1226586Sdim//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2226586Sdim//
3226586Sdim//                     The LLVM Compiler Infrastructure
4226586Sdim//
5226586Sdim// This file is distributed under the University of Illinois Open Source
6226586Sdim// License. See LICENSE.TXT for details.
7226586Sdim//
8226586Sdim//===----------------------------------------------------------------------===//
9226586Sdim//
10226586Sdim//  This file defines ExprEngine's support for calls and returns.
11226586Sdim//
12226586Sdim//===----------------------------------------------------------------------===//
13226586Sdim
14239462Sdim#define DEBUG_TYPE "ExprEngine"
15239462Sdim
16226586Sdim#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
17263508Sdim#include "PrettyStackTraceLocationContext.h"
18243830Sdim#include "clang/AST/CXXInheritance.h"
19226586Sdim#include "clang/AST/DeclCXX.h"
20243830Sdim#include "clang/AST/ParentMap.h"
21249423Sdim#include "clang/Analysis/Analyses/LiveVariables.h"
22249423Sdim#include "clang/StaticAnalyzer/Core/CheckerManager.h"
23249423Sdim#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
24234353Sdim#include "llvm/ADT/SmallSet.h"
25239462Sdim#include "llvm/ADT/Statistic.h"
26234353Sdim#include "llvm/Support/SaveAndRestore.h"
27226586Sdim
28226586Sdimusing namespace clang;
29226586Sdimusing namespace ento;
30226586Sdim
31239462SdimSTATISTIC(NumOfDynamicDispatchPathSplits,
32239462Sdim  "The # of times we split the path due to imprecise dynamic dispatch info");
33239462Sdim
34243830SdimSTATISTIC(NumInlinedCalls,
35243830Sdim  "The # of times we inlined a call");
36243830Sdim
37249423SdimSTATISTIC(NumReachedInlineCountMax,
38249423Sdim  "The # of times we reached inline count maximum");
39249423Sdim
40234353Sdimvoid ExprEngine::processCallEnter(CallEnter CE, ExplodedNode *Pred) {
41234353Sdim  // Get the entry block in the CFG of the callee.
42234353Sdim  const StackFrameContext *calleeCtx = CE.getCalleeContext();
43263508Sdim  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
44263508Sdim
45234353Sdim  const CFG *CalleeCFG = calleeCtx->getCFG();
46234353Sdim  const CFGBlock *Entry = &(CalleeCFG->getEntry());
47234353Sdim
48234353Sdim  // Validate the CFG.
49234353Sdim  assert(Entry->empty());
50234353Sdim  assert(Entry->succ_size() == 1);
51234353Sdim
52234353Sdim  // Get the solitary sucessor.
53234353Sdim  const CFGBlock *Succ = *(Entry->succ_begin());
54234353Sdim
55234353Sdim  // Construct an edge representing the starting location in the callee.
56234353Sdim  BlockEdge Loc(Entry, Succ, calleeCtx);
57234353Sdim
58239462Sdim  ProgramStateRef state = Pred->getState();
59234353Sdim
60234353Sdim  // Construct a new node and add it to the worklist.
61234353Sdim  bool isNew;
62234353Sdim  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63234353Sdim  Node->addPredecessor(Pred, G);
64234353Sdim  if (isNew)
65234353Sdim    Engine.getWorkList()->enqueue(Node);
66226586Sdim}
67226586Sdim
68239462Sdim// Find the last statement on the path to the exploded node and the
69239462Sdim// corresponding Block.
70239462Sdimstatic std::pair<const Stmt*,
71239462Sdim                 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
72239462Sdim  const Stmt *S = 0;
73249423Sdim  const CFGBlock *Blk = 0;
74239462Sdim  const StackFrameContext *SF =
75239462Sdim          Node->getLocation().getLocationContext()->getCurrentStackFrame();
76239462Sdim
77243830Sdim  // Back up through the ExplodedGraph until we reach a statement node in this
78243830Sdim  // stack frame.
79234353Sdim  while (Node) {
80234353Sdim    const ProgramPoint &PP = Node->getLocation();
81239462Sdim
82243830Sdim    if (PP.getLocationContext()->getCurrentStackFrame() == SF) {
83249423Sdim      if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
84243830Sdim        S = SP->getStmt();
85239462Sdim        break;
86249423Sdim      } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
87243830Sdim        S = CEE->getCalleeContext()->getCallSite();
88243830Sdim        if (S)
89243830Sdim          break;
90243830Sdim
91243830Sdim        // If there is no statement, this is an implicitly-generated call.
92243830Sdim        // We'll walk backwards over it and then continue the loop to find
93243830Sdim        // an actual statement.
94249423Sdim        Optional<CallEnter> CE;
95243830Sdim        do {
96243830Sdim          Node = Node->getFirstPred();
97243830Sdim          CE = Node->getLocationAs<CallEnter>();
98243830Sdim        } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
99243830Sdim
100243830Sdim        // Continue searching the graph.
101249423Sdim      } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
102249423Sdim        Blk = BE->getSrc();
103243830Sdim      }
104249423Sdim    } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
105239462Sdim      // If we reached the CallEnter for this function, it has no statements.
106239462Sdim      if (CE->getCalleeContext() == SF)
107239462Sdim        break;
108234353Sdim    }
109239462Sdim
110243830Sdim    if (Node->pred_empty())
111243830Sdim      return std::pair<const Stmt*, const CFGBlock*>((Stmt*)0, (CFGBlock*)0);
112243830Sdim
113239462Sdim    Node = *Node->pred_begin();
114234353Sdim  }
115239462Sdim
116239462Sdim  return std::pair<const Stmt*, const CFGBlock*>(S, Blk);
117226586Sdim}
118226586Sdim
119243830Sdim/// Adjusts a return value when the called function's return type does not
120243830Sdim/// match the caller's expression type. This can happen when a dynamic call
121243830Sdim/// is devirtualized, and the overridding method has a covariant (more specific)
122243830Sdim/// return type than the parent's method. For C++ objects, this means we need
123243830Sdim/// to add base casts.
124243830Sdimstatic SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
125243830Sdim                              StoreManager &StoreMgr) {
126243830Sdim  // For now, the only adjustments we handle apply only to locations.
127249423Sdim  if (!V.getAs<Loc>())
128243830Sdim    return V;
129243830Sdim
130243830Sdim  // If the types already match, don't do any unnecessary work.
131243830Sdim  ExpectedTy = ExpectedTy.getCanonicalType();
132243830Sdim  ActualTy = ActualTy.getCanonicalType();
133243830Sdim  if (ExpectedTy == ActualTy)
134243830Sdim    return V;
135243830Sdim
136243830Sdim  // No adjustment is needed between Objective-C pointer types.
137243830Sdim  if (ExpectedTy->isObjCObjectPointerType() &&
138243830Sdim      ActualTy->isObjCObjectPointerType())
139243830Sdim    return V;
140243830Sdim
141243830Sdim  // C++ object pointers may need "derived-to-base" casts.
142243830Sdim  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
143243830Sdim  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
144243830Sdim  if (ExpectedClass && ActualClass) {
145243830Sdim    CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
146243830Sdim                       /*DetectVirtual=*/false);
147243830Sdim    if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
148243830Sdim        !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
149243830Sdim      return StoreMgr.evalDerivedToBase(V, Paths.front());
150243830Sdim    }
151243830Sdim  }
152243830Sdim
153243830Sdim  // Unfortunately, Objective-C does not enforce that overridden methods have
154243830Sdim  // covariant return types, so we can't assert that that never happens.
155243830Sdim  // Be safe and return UnknownVal().
156243830Sdim  return UnknownVal();
157243830Sdim}
158243830Sdim
159243830Sdimvoid ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC,
160243830Sdim                                           ExplodedNode *Pred,
161243830Sdim                                           ExplodedNodeSet &Dst) {
162243830Sdim  // Find the last statement in the function and the corresponding basic block.
163243830Sdim  const Stmt *LastSt = 0;
164243830Sdim  const CFGBlock *Blk = 0;
165243830Sdim  llvm::tie(LastSt, Blk) = getLastStmt(Pred);
166243830Sdim  if (!Blk || !LastSt) {
167249423Sdim    Dst.Add(Pred);
168243830Sdim    return;
169243830Sdim  }
170243830Sdim
171249423Sdim  // Here, we destroy the current location context. We use the current
172249423Sdim  // function's entire body as a diagnostic statement, with which the program
173249423Sdim  // point will be associated. However, we only want to use LastStmt as a
174249423Sdim  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
175249423Sdim  // is dead.
176249423Sdim  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
177249423Sdim  const LocationContext *LCtx = Pred->getLocationContext();
178249423Sdim  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
179249423Sdim             LCtx->getAnalysisDeclContext()->getBody(),
180243830Sdim             ProgramPoint::PostStmtPurgeDeadSymbolsKind);
181243830Sdim}
182243830Sdim
183243830Sdimstatic bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
184243830Sdim    const StackFrameContext *calleeCtx) {
185243830Sdim  const Decl *RuntimeCallee = calleeCtx->getDecl();
186243830Sdim  const Decl *StaticDecl = Call->getDecl();
187243830Sdim  assert(RuntimeCallee);
188243830Sdim  if (!StaticDecl)
189243830Sdim    return true;
190243830Sdim  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
191243830Sdim}
192243830Sdim
193249423Sdim/// Returns true if the CXXConstructExpr \p E was intended to construct a
194249423Sdim/// prvalue for the region in \p V.
195249423Sdim///
196249423Sdim/// Note that we can't just test for rvalue vs. glvalue because
197249423Sdim/// CXXConstructExprs embedded in DeclStmts and initializers are considered
198249423Sdim/// rvalues by the AST, and the analyzer would like to treat them as lvalues.
199249423Sdimstatic bool isTemporaryPRValue(const CXXConstructExpr *E, SVal V) {
200249423Sdim  if (E->isGLValue())
201249423Sdim    return false;
202249423Sdim
203249423Sdim  const MemRegion *MR = V.getAsRegion();
204249423Sdim  if (!MR)
205249423Sdim    return false;
206249423Sdim
207249423Sdim  return isa<CXXTempObjectRegion>(MR);
208249423Sdim}
209249423Sdim
210239462Sdim/// The call exit is simulated with a sequence of nodes, which occur between
211239462Sdim/// CallExitBegin and CallExitEnd. The following operations occur between the
212239462Sdim/// two program points:
213239462Sdim/// 1. CallExitBegin (triggers the start of call exit sequence)
214239462Sdim/// 2. Bind the return value
215239462Sdim/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
216239462Sdim/// 4. CallExitEnd (switch to the caller context)
217239462Sdim/// 5. PostStmt<CallExpr>
218239462Sdimvoid ExprEngine::processCallExit(ExplodedNode *CEBNode) {
219239462Sdim  // Step 1 CEBNode was generated before the call.
220263508Sdim  PrettyStackTraceLocationContext CrashInfo(CEBNode->getLocationContext());
221239462Sdim  const StackFrameContext *calleeCtx =
222239462Sdim      CEBNode->getLocationContext()->getCurrentStackFrame();
223239462Sdim
224239462Sdim  // The parent context might not be a stack frame, so make sure we
225239462Sdim  // look up the first enclosing stack frame.
226239462Sdim  const StackFrameContext *callerCtx =
227239462Sdim    calleeCtx->getParent()->getCurrentStackFrame();
228239462Sdim
229226586Sdim  const Stmt *CE = calleeCtx->getCallSite();
230239462Sdim  ProgramStateRef state = CEBNode->getState();
231239462Sdim  // Find the last statement in the function and the corresponding basic block.
232239462Sdim  const Stmt *LastSt = 0;
233239462Sdim  const CFGBlock *Blk = 0;
234239462Sdim  llvm::tie(LastSt, Blk) = getLastStmt(CEBNode);
235239462Sdim
236243830Sdim  // Generate a CallEvent /before/ cleaning the state, so that we can get the
237243830Sdim  // correct value for 'this' (if necessary).
238243830Sdim  CallEventManager &CEMgr = getStateManager().getCallEventManager();
239243830Sdim  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
240243830Sdim
241239462Sdim  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
242239462Sdim
243226586Sdim  // If the callee returns an expression, bind its value to CallExpr.
244239462Sdim  if (CE) {
245239462Sdim    if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
246239462Sdim      const LocationContext *LCtx = CEBNode->getLocationContext();
247239462Sdim      SVal V = state->getSVal(RS, LCtx);
248243830Sdim
249243830Sdim      // Ensure that the return type matches the type of the returned Expr.
250243830Sdim      if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
251243830Sdim        QualType ReturnedTy =
252243830Sdim          CallEvent::getDeclaredResultType(calleeCtx->getDecl());
253243830Sdim        if (!ReturnedTy.isNull()) {
254243830Sdim          if (const Expr *Ex = dyn_cast<Expr>(CE)) {
255243830Sdim            V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
256243830Sdim                                  getStoreManager());
257243830Sdim          }
258243830Sdim        }
259243830Sdim      }
260243830Sdim
261239462Sdim      state = state->BindExpr(CE, callerCtx, V);
262239462Sdim    }
263239462Sdim
264239462Sdim    // Bind the constructed object value to CXXConstructExpr.
265239462Sdim    if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
266239462Sdim      loc::MemRegionVal This =
267239462Sdim        svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
268239462Sdim      SVal ThisV = state->getSVal(This);
269239462Sdim
270249423Sdim      // If the constructed object is a temporary prvalue, get its bindings.
271249423Sdim      if (isTemporaryPRValue(CCE, ThisV))
272249423Sdim        ThisV = state->getSVal(ThisV.castAs<Loc>());
273243830Sdim
274239462Sdim      state = state->BindExpr(CCE, callerCtx, ThisV);
275239462Sdim    }
276226586Sdim  }
277239462Sdim
278239462Sdim  // Step 3: BindedRetNode -> CleanedNodes
279239462Sdim  // If we can find a statement and a block in the inlined function, run remove
280239462Sdim  // dead bindings before returning from the call. This is important to ensure
281239462Sdim  // that we report the issues such as leaks in the stack contexts in which
282239462Sdim  // they occurred.
283239462Sdim  ExplodedNodeSet CleanedNodes;
284243830Sdim  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
285239462Sdim    static SimpleProgramPointTag retValBind("ExprEngine : Bind Return Value");
286239462Sdim    PostStmt Loc(LastSt, calleeCtx, &retValBind);
287239462Sdim    bool isNew;
288239462Sdim    ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
289239462Sdim    BindedRetNode->addPredecessor(CEBNode, G);
290239462Sdim    if (!isNew)
291239462Sdim      return;
292239462Sdim
293239462Sdim    NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
294243830Sdim    currBldrCtx = &Ctx;
295249423Sdim    // Here, we call the Symbol Reaper with 0 statement and callee location
296239462Sdim    // context, telling it to clean up everything in the callee's context
297249423Sdim    // (and its children). We use the callee's function body as a diagnostic
298249423Sdim    // statement, with which the program point will be associated.
299249423Sdim    removeDead(BindedRetNode, CleanedNodes, 0, calleeCtx,
300249423Sdim               calleeCtx->getAnalysisDeclContext()->getBody(),
301239462Sdim               ProgramPoint::PostStmtPurgeDeadSymbolsKind);
302243830Sdim    currBldrCtx = 0;
303239462Sdim  } else {
304239462Sdim    CleanedNodes.Add(CEBNode);
305226586Sdim  }
306239462Sdim
307239462Sdim  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
308239462Sdim                                 E = CleanedNodes.end(); I != E; ++I) {
309239462Sdim
310239462Sdim    // Step 4: Generate the CallExit and leave the callee's context.
311239462Sdim    // CleanedNodes -> CEENode
312239462Sdim    CallExitEnd Loc(calleeCtx, callerCtx);
313239462Sdim    bool isNew;
314239462Sdim    ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
315239462Sdim    ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
316239462Sdim    CEENode->addPredecessor(*I, G);
317239462Sdim    if (!isNew)
318239462Sdim      return;
319239462Sdim
320239462Sdim    // Step 5: Perform the post-condition check of the CallExpr and enqueue the
321239462Sdim    // result onto the work list.
322239462Sdim    // CEENode -> Dst -> WorkList
323239462Sdim    NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
324243830Sdim    SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
325239462Sdim        &Ctx);
326243830Sdim    SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
327239462Sdim
328239462Sdim    CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
329239462Sdim
330239462Sdim    ExplodedNodeSet DstPostCall;
331239462Sdim    getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
332239462Sdim                                               *UpdatedCall, *this,
333239462Sdim                                               /*WasInlined=*/true);
334239462Sdim
335239462Sdim    ExplodedNodeSet Dst;
336239462Sdim    if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
337239462Sdim      getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
338239462Sdim                                                        *this,
339239462Sdim                                                        /*WasInlined=*/true);
340239462Sdim    } else if (CE) {
341239462Sdim      getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
342239462Sdim                                                 *this, /*WasInlined=*/true);
343239462Sdim    } else {
344239462Sdim      Dst.insert(DstPostCall);
345239462Sdim    }
346239462Sdim
347239462Sdim    // Enqueue the next element in the block.
348239462Sdim    for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
349239462Sdim                                   PSI != PSE; ++PSI) {
350239462Sdim      Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
351239462Sdim                                    calleeCtx->getIndex()+1);
352239462Sdim    }
353234353Sdim  }
354226586Sdim}
355226586Sdim
356243830Sdimvoid ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
357243830Sdim                               bool &IsRecursive, unsigned &StackDepth) {
358243830Sdim  IsRecursive = false;
359243830Sdim  StackDepth = 0;
360243830Sdim
361234353Sdim  while (LCtx) {
362243830Sdim    if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
363243830Sdim      const Decl *DI = SFC->getDecl();
364243830Sdim
365243830Sdim      // Mark recursive (and mutually recursive) functions and always count
366243830Sdim      // them when measuring the stack depth.
367243830Sdim      if (DI == D) {
368243830Sdim        IsRecursive = true;
369243830Sdim        ++StackDepth;
370243830Sdim        LCtx = LCtx->getParent();
371243830Sdim        continue;
372243830Sdim      }
373243830Sdim
374243830Sdim      // Do not count the small functions when determining the stack depth.
375243830Sdim      AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
376243830Sdim      const CFG *CalleeCFG = CalleeADC->getCFG();
377243830Sdim      if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
378243830Sdim        ++StackDepth;
379243830Sdim    }
380234353Sdim    LCtx = LCtx->getParent();
381234353Sdim  }
382243830Sdim
383234353Sdim}
384234353Sdim
385243830Sdimstatic bool IsInStdNamespace(const FunctionDecl *FD) {
386243830Sdim  const DeclContext *DC = FD->getEnclosingNamespaceContext();
387243830Sdim  const NamespaceDecl *ND = dyn_cast<NamespaceDecl>(DC);
388243830Sdim  if (!ND)
389243830Sdim    return false;
390243830Sdim
391243830Sdim  while (const DeclContext *Parent = ND->getParent()) {
392243830Sdim    if (!isa<NamespaceDecl>(Parent))
393243830Sdim      break;
394243830Sdim    ND = cast<NamespaceDecl>(Parent);
395243830Sdim  }
396243830Sdim
397243830Sdim  return ND->getName() == "std";
398243830Sdim}
399243830Sdim
400243830Sdim// The GDM component containing the dynamic dispatch bifurcation info. When
401243830Sdim// the exact type of the receiver is not known, we want to explore both paths -
402243830Sdim// one on which we do inline it and the other one on which we don't. This is
403243830Sdim// done to ensure we do not drop coverage.
404243830Sdim// This is the map from the receiver region to a bool, specifying either we
405243830Sdim// consider this region's information precise or not along the given path.
406243830Sdimnamespace {
407243830Sdim  enum DynamicDispatchMode {
408243830Sdim    DynamicDispatchModeInlined = 1,
409243830Sdim    DynamicDispatchModeConservative
410243830Sdim  };
411243830Sdim}
412243830SdimREGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
413243830Sdim                                 CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *,
414243830Sdim                                                             unsigned))
415234353Sdim
416239462Sdimbool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
417239462Sdim                            NodeBuilder &Bldr, ExplodedNode *Pred,
418239462Sdim                            ProgramStateRef State) {
419239462Sdim  assert(D);
420234353Sdim
421239462Sdim  const LocationContext *CurLC = Pred->getLocationContext();
422239462Sdim  const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
423249423Sdim  const LocationContext *ParentOfCallee = CallerSFC;
424249423Sdim  if (Call.getKind() == CE_Block) {
425239462Sdim    const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
426239462Sdim    assert(BR && "If we have the block definition we should have its region");
427239462Sdim    AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
428239462Sdim    ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
429239462Sdim                                                         cast<BlockDecl>(D),
430239462Sdim                                                         BR);
431239462Sdim  }
432239462Sdim
433239462Sdim  // This may be NULL, but that's fine.
434239462Sdim  const Expr *CallE = Call.getOriginExpr();
435226586Sdim
436239462Sdim  // Construct a new stack frame for the callee.
437239462Sdim  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
438239462Sdim  const StackFrameContext *CalleeSFC =
439239462Sdim    CalleeADC->getStackFrame(ParentOfCallee, CallE,
440243830Sdim                             currBldrCtx->getBlock(),
441243830Sdim                             currStmtIdx);
442239462Sdim
443249423Sdim
444239462Sdim  CallEnter Loc(CallE, CalleeSFC, CurLC);
445226586Sdim
446239462Sdim  // Construct a new state which contains the mapping from actual to
447239462Sdim  // formal arguments.
448239462Sdim  State = State->enterStackFrame(Call, CalleeSFC);
449226586Sdim
450239462Sdim  bool isNew;
451239462Sdim  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
452239462Sdim    N->addPredecessor(Pred, G);
453239462Sdim    if (isNew)
454239462Sdim      Engine.getWorkList()->enqueue(N);
455226586Sdim  }
456226586Sdim
457239462Sdim  // If we decided to inline the call, the successor has been manually
458239462Sdim  // added onto the work list so remove it from the node builder.
459239462Sdim  Bldr.takeNodes(Pred);
460226586Sdim
461243830Sdim  NumInlinedCalls++;
462243830Sdim
463243830Sdim  // Mark the decl as visited.
464243830Sdim  if (VisitedCallees)
465243830Sdim    VisitedCallees->insert(D);
466243830Sdim
467239462Sdim  return true;
468226586Sdim}
469226586Sdim
470239462Sdimstatic ProgramStateRef getInlineFailedState(ProgramStateRef State,
471239462Sdim                                            const Stmt *CallE) {
472249423Sdim  const void *ReplayState = State->get<ReplayWithoutInlining>();
473234353Sdim  if (!ReplayState)
474234353Sdim    return 0;
475239462Sdim
476249423Sdim  assert(ReplayState == CallE && "Backtracked to the wrong call.");
477239462Sdim  (void)CallE;
478239462Sdim
479239462Sdim  return State->remove<ReplayWithoutInlining>();
480234353Sdim}
481234353Sdim
482226586Sdimvoid ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
483226586Sdim                               ExplodedNodeSet &dst) {
484226586Sdim  // Perform the previsit of the CallExpr.
485226586Sdim  ExplodedNodeSet dstPreVisit;
486226586Sdim  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
487234353Sdim
488239462Sdim  // Get the call in its initial state. We use this as a template to perform
489239462Sdim  // all the checks.
490239462Sdim  CallEventManager &CEMgr = getStateManager().getCallEventManager();
491239462Sdim  CallEventRef<> CallTemplate
492239462Sdim    = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
493234353Sdim
494239462Sdim  // Evaluate the function call.  We try each of the checkers
495239462Sdim  // to see if the can evaluate the function call.
496239462Sdim  ExplodedNodeSet dstCallEvaluated;
497239462Sdim  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
498239462Sdim       I != E; ++I) {
499239462Sdim    evalCall(dstCallEvaluated, *I, *CallTemplate);
500239462Sdim  }
501226586Sdim
502239462Sdim  // Finally, perform the post-condition check of the CallExpr and store
503239462Sdim  // the created nodes in 'Dst'.
504239462Sdim  // Note that if the call was inlined, dstCallEvaluated will be empty.
505239462Sdim  // The post-CallExpr check will occur in processCallExit.
506239462Sdim  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
507239462Sdim                                             *this);
508239462Sdim}
509226586Sdim
510239462Sdimvoid ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred,
511239462Sdim                          const CallEvent &Call) {
512239462Sdim  // WARNING: At this time, the state attached to 'Call' may be older than the
513239462Sdim  // state in 'Pred'. This is a minor optimization since CheckerManager will
514239462Sdim  // use an updated CallEvent instance when calling checkers, but if 'Call' is
515239462Sdim  // ever used directly in this function all callers should be updated to pass
516239462Sdim  // the most recent state. (It is probably not worth doing the work here since
517239462Sdim  // for some callers this will not be necessary.)
518226586Sdim
519239462Sdim  // Run any pre-call checks using the generic call interface.
520239462Sdim  ExplodedNodeSet dstPreVisit;
521239462Sdim  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, Call, *this);
522226586Sdim
523239462Sdim  // Actually evaluate the function call.  We try each of the checkers
524239462Sdim  // to see if the can evaluate the function call, and get a callback at
525239462Sdim  // defaultEvalCall if all of them fail.
526239462Sdim  ExplodedNodeSet dstCallEvaluated;
527239462Sdim  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
528239462Sdim                                             Call, *this);
529226586Sdim
530239462Sdim  // Finally, run any post-call checks.
531239462Sdim  getCheckerManager().runCheckersForPostCall(Dst, dstCallEvaluated,
532239462Sdim                                             Call, *this);
533239462Sdim}
534226586Sdim
535239462SdimProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call,
536239462Sdim                                            const LocationContext *LCtx,
537239462Sdim                                            ProgramStateRef State) {
538239462Sdim  const Expr *E = Call.getOriginExpr();
539239462Sdim  if (!E)
540239462Sdim    return State;
541226586Sdim
542239462Sdim  // Some method families have known return values.
543239462Sdim  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
544239462Sdim    switch (Msg->getMethodFamily()) {
545239462Sdim    default:
546239462Sdim      break;
547239462Sdim    case OMF_autorelease:
548239462Sdim    case OMF_retain:
549239462Sdim    case OMF_self: {
550239462Sdim      // These methods return their receivers.
551239462Sdim      return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
552239462Sdim    }
553239462Sdim    }
554239462Sdim  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
555249423Sdim    SVal ThisV = C->getCXXThisVal();
556249423Sdim
557249423Sdim    // If the constructed object is a temporary prvalue, get its bindings.
558249423Sdim    if (isTemporaryPRValue(cast<CXXConstructExpr>(E), ThisV))
559249423Sdim      ThisV = State->getSVal(ThisV.castAs<Loc>());
560249423Sdim
561249423Sdim    return State->BindExpr(E, LCtx, ThisV);
562239462Sdim  }
563226586Sdim
564239462Sdim  // Conjure a symbol if the return value is unknown.
565239462Sdim  QualType ResultTy = Call.getResultType();
566239462Sdim  SValBuilder &SVB = getSValBuilder();
567243830Sdim  unsigned Count = currBldrCtx->blockCount();
568243830Sdim  SVal R = SVB.conjureSymbolVal(0, E, LCtx, ResultTy, Count);
569239462Sdim  return State->BindExpr(E, LCtx, R);
570239462Sdim}
571239462Sdim
572239462Sdim// Conservatively evaluate call by invalidating regions and binding
573239462Sdim// a conjured return value.
574239462Sdimvoid ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
575249423Sdim                                      ExplodedNode *Pred,
576249423Sdim                                      ProgramStateRef State) {
577243830Sdim  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
578239462Sdim  State = bindReturnValue(Call, Pred->getLocationContext(), State);
579239462Sdim
580239462Sdim  // And make the result node.
581239462Sdim  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
582239462Sdim}
583239462Sdim
584249423Sdimenum CallInlinePolicy {
585249423Sdim  CIP_Allowed,
586249423Sdim  CIP_DisallowedOnce,
587249423Sdim  CIP_DisallowedAlways
588249423Sdim};
589249423Sdim
590249423Sdimstatic CallInlinePolicy mayInlineCallKind(const CallEvent &Call,
591249423Sdim                                          const ExplodedNode *Pred,
592249423Sdim                                          AnalyzerOptions &Opts) {
593249423Sdim  const LocationContext *CurLC = Pred->getLocationContext();
594249423Sdim  const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
595249423Sdim  switch (Call.getKind()) {
596249423Sdim  case CE_Function:
597249423Sdim  case CE_Block:
598249423Sdim    break;
599249423Sdim  case CE_CXXMember:
600249423Sdim  case CE_CXXMemberOperator:
601249423Sdim    if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions))
602249423Sdim      return CIP_DisallowedAlways;
603249423Sdim    break;
604249423Sdim  case CE_CXXConstructor: {
605249423Sdim    if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors))
606249423Sdim      return CIP_DisallowedAlways;
607249423Sdim
608249423Sdim    const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
609249423Sdim
610249423Sdim    // FIXME: We don't handle constructors or destructors for arrays properly.
611249423Sdim    // Even once we do, we still need to be careful about implicitly-generated
612249423Sdim    // initializers for array fields in default move/copy constructors.
613249423Sdim    const MemRegion *Target = Ctor.getCXXThisVal().getAsRegion();
614249423Sdim    if (Target && isa<ElementRegion>(Target))
615249423Sdim      return CIP_DisallowedOnce;
616249423Sdim
617249423Sdim    // FIXME: This is a hack. We don't use the correct region for a new
618249423Sdim    // expression, so if we inline the constructor its result will just be
619249423Sdim    // thrown away. This short-term hack is tracked in <rdar://problem/12180598>
620249423Sdim    // and the longer-term possible fix is discussed in PR12014.
621249423Sdim    const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
622249423Sdim    if (const Stmt *Parent = CurLC->getParentMap().getParent(CtorExpr))
623249423Sdim      if (isa<CXXNewExpr>(Parent))
624249423Sdim        return CIP_DisallowedOnce;
625249423Sdim
626249423Sdim    // Inlining constructors requires including initializers in the CFG.
627249423Sdim    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
628249423Sdim    assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
629249423Sdim    (void)ADC;
630249423Sdim
631249423Sdim    // If the destructor is trivial, it's always safe to inline the constructor.
632249423Sdim    if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
633249423Sdim      break;
634249423Sdim
635249423Sdim    // For other types, only inline constructors if destructor inlining is
636249423Sdim    // also enabled.
637249423Sdim    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
638249423Sdim      return CIP_DisallowedAlways;
639249423Sdim
640249423Sdim    // FIXME: This is a hack. We don't handle temporary destructors
641249423Sdim    // right now, so we shouldn't inline their constructors.
642249423Sdim    if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete)
643249423Sdim      if (!Target || !isa<DeclRegion>(Target))
644249423Sdim        return CIP_DisallowedOnce;
645249423Sdim
646249423Sdim    break;
647249423Sdim  }
648249423Sdim  case CE_CXXDestructor: {
649249423Sdim    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
650249423Sdim      return CIP_DisallowedAlways;
651249423Sdim
652249423Sdim    // Inlining destructors requires building the CFG correctly.
653249423Sdim    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
654249423Sdim    assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
655249423Sdim    (void)ADC;
656249423Sdim
657249423Sdim    const CXXDestructorCall &Dtor = cast<CXXDestructorCall>(Call);
658249423Sdim
659249423Sdim    // FIXME: We don't handle constructors or destructors for arrays properly.
660249423Sdim    const MemRegion *Target = Dtor.getCXXThisVal().getAsRegion();
661249423Sdim    if (Target && isa<ElementRegion>(Target))
662249423Sdim      return CIP_DisallowedOnce;
663249423Sdim
664249423Sdim    break;
665249423Sdim  }
666249423Sdim  case CE_CXXAllocator:
667249423Sdim    // Do not inline allocators until we model deallocators.
668249423Sdim    // This is unfortunate, but basically necessary for smart pointers and such.
669249423Sdim    return CIP_DisallowedAlways;
670249423Sdim  case CE_ObjCMessage:
671249423Sdim    if (!Opts.mayInlineObjCMethod())
672249423Sdim      return CIP_DisallowedAlways;
673249423Sdim    if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
674249423Sdim          Opts.getIPAMode() == IPAK_DynamicDispatchBifurcate))
675249423Sdim      return CIP_DisallowedAlways;
676249423Sdim    break;
677249423Sdim  }
678249423Sdim
679249423Sdim  return CIP_Allowed;
680249423Sdim}
681249423Sdim
682251662Sdim/// Returns true if the given C++ class contains a member with the given name.
683251662Sdimstatic bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
684251662Sdim                      StringRef Name) {
685251662Sdim  const IdentifierInfo &II = Ctx.Idents.get(Name);
686251662Sdim  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
687251662Sdim  if (!RD->lookup(DeclName).empty())
688251662Sdim    return true;
689251662Sdim
690249423Sdim  CXXBasePaths Paths(false, false, false);
691249423Sdim  if (RD->lookupInBases(&CXXRecordDecl::FindOrdinaryMember,
692251662Sdim                        DeclName.getAsOpaquePtr(),
693249423Sdim                        Paths))
694249423Sdim    return true;
695249423Sdim
696249423Sdim  return false;
697249423Sdim}
698249423Sdim
699251662Sdim/// Returns true if the given C++ class is a container or iterator.
700251662Sdim///
701251662Sdim/// Our heuristic for this is whether it contains a method named 'begin()' or a
702251662Sdim/// nested type named 'iterator' or 'iterator_category'.
703251662Sdimstatic bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
704251662Sdim  return hasMember(Ctx, RD, "begin") ||
705251662Sdim         hasMember(Ctx, RD, "iterator") ||
706251662Sdim         hasMember(Ctx, RD, "iterator_category");
707251662Sdim}
708251662Sdim
709249423Sdim/// Returns true if the given function refers to a constructor or destructor of
710251662Sdim/// a C++ container or iterator.
711249423Sdim///
712249423Sdim/// We generally do a poor job modeling most containers right now, and would
713251662Sdim/// prefer not to inline their setup and teardown.
714249423Sdimstatic bool isContainerCtorOrDtor(const ASTContext &Ctx,
715249423Sdim                                  const FunctionDecl *FD) {
716249423Sdim  if (!(isa<CXXConstructorDecl>(FD) || isa<CXXDestructorDecl>(FD)))
717249423Sdim    return false;
718249423Sdim
719249423Sdim  const CXXRecordDecl *RD = cast<CXXMethodDecl>(FD)->getParent();
720249423Sdim  return isContainerClass(Ctx, RD);
721249423Sdim}
722249423Sdim
723263508Sdim/// Returns true if the given function is the destructor of a class named
724263508Sdim/// "shared_ptr".
725263508Sdimstatic bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
726263508Sdim  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
727263508Sdim  if (!Dtor)
728263508Sdim    return false;
729263508Sdim
730263508Sdim  const CXXRecordDecl *RD = Dtor->getParent();
731263508Sdim  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
732263508Sdim    if (II->isStr("shared_ptr"))
733263508Sdim        return true;
734263508Sdim
735263508Sdim  return false;
736263508Sdim}
737263508Sdim
738249423Sdim/// Returns true if the function in \p CalleeADC may be inlined in general.
739249423Sdim///
740249423Sdim/// This checks static properties of the function, such as its signature and
741249423Sdim/// CFG, to determine whether the analyzer should ever consider inlining it,
742249423Sdim/// in any context.
743263508Sdimstatic bool mayInlineDecl(AnalysisDeclContext *CalleeADC,
744249423Sdim                          AnalyzerOptions &Opts) {
745249423Sdim  // FIXME: Do not inline variadic calls.
746263508Sdim  if (CallEvent::isVariadic(CalleeADC->getDecl()))
747249423Sdim    return false;
748249423Sdim
749249423Sdim  // Check certain C++-related inlining policies.
750249423Sdim  ASTContext &Ctx = CalleeADC->getASTContext();
751249423Sdim  if (Ctx.getLangOpts().CPlusPlus) {
752249423Sdim    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
753249423Sdim      // Conditionally control the inlining of template functions.
754249423Sdim      if (!Opts.mayInlineTemplateFunctions())
755249423Sdim        if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
756249423Sdim          return false;
757249423Sdim
758249423Sdim      // Conditionally control the inlining of C++ standard library functions.
759249423Sdim      if (!Opts.mayInlineCXXStandardLibrary())
760249423Sdim        if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
761249423Sdim          if (IsInStdNamespace(FD))
762249423Sdim            return false;
763249423Sdim
764249423Sdim      // Conditionally control the inlining of methods on objects that look
765249423Sdim      // like C++ containers.
766249423Sdim      if (!Opts.mayInlineCXXContainerCtorsAndDtors())
767263508Sdim        if (!Ctx.getSourceManager().isInMainFile(FD->getLocation()))
768249423Sdim          if (isContainerCtorOrDtor(Ctx, FD))
769249423Sdim            return false;
770263508Sdim
771263508Sdim      // Conditionally control the inlining of the destructor of C++ shared_ptr.
772263508Sdim      // We don't currently do a good job modeling shared_ptr because we can't
773263508Sdim      // see the reference count, so treating as opaque is probably the best
774263508Sdim      // idea.
775263508Sdim      if (!Opts.mayInlineCXXSharedPtrDtor())
776263508Sdim        if (isCXXSharedPtrDtor(FD))
777263508Sdim          return false;
778263508Sdim
779249423Sdim    }
780249423Sdim  }
781249423Sdim
782249423Sdim  // It is possible that the CFG cannot be constructed.
783249423Sdim  // Be safe, and check if the CalleeCFG is valid.
784249423Sdim  const CFG *CalleeCFG = CalleeADC->getCFG();
785249423Sdim  if (!CalleeCFG)
786249423Sdim    return false;
787249423Sdim
788249423Sdim  // Do not inline large functions.
789249423Sdim  if (CalleeCFG->getNumBlockIDs() > Opts.getMaxInlinableSize())
790249423Sdim    return false;
791249423Sdim
792249423Sdim  // It is possible that the live variables analysis cannot be
793249423Sdim  // run.  If so, bail out.
794249423Sdim  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
795249423Sdim    return false;
796249423Sdim
797249423Sdim  return true;
798249423Sdim}
799249423Sdim
800249423Sdimbool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
801249423Sdim                                  const ExplodedNode *Pred) {
802249423Sdim  if (!D)
803249423Sdim    return false;
804249423Sdim
805249423Sdim  AnalysisManager &AMgr = getAnalysisManager();
806249423Sdim  AnalyzerOptions &Opts = AMgr.options;
807249423Sdim  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
808249423Sdim  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
809249423Sdim
810263508Sdim  // Temporary object destructor processing is currently broken, so we never
811263508Sdim  // inline them.
812263508Sdim  // FIXME: Remove this once temp destructors are working.
813263508Sdim  if (isa<CXXDestructorCall>(Call)) {
814263508Sdim    if ((*currBldrCtx->getBlock())[currStmtIdx].getAs<CFGTemporaryDtor>())
815263508Sdim      return false;
816263508Sdim  }
817263508Sdim
818249423Sdim  // The auto-synthesized bodies are essential to inline as they are
819249423Sdim  // usually small and commonly used. Note: we should do this check early on to
820249423Sdim  // ensure we always inline these calls.
821249423Sdim  if (CalleeADC->isBodyAutosynthesized())
822249423Sdim    return true;
823249423Sdim
824249423Sdim  if (!AMgr.shouldInlineCall())
825249423Sdim    return false;
826249423Sdim
827249423Sdim  // Check if this function has been marked as non-inlinable.
828249423Sdim  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
829249423Sdim  if (MayInline.hasValue()) {
830249423Sdim    if (!MayInline.getValue())
831249423Sdim      return false;
832249423Sdim
833249423Sdim  } else {
834249423Sdim    // We haven't actually checked the static properties of this function yet.
835249423Sdim    // Do that now, and record our decision in the function summaries.
836263508Sdim    if (mayInlineDecl(CalleeADC, Opts)) {
837249423Sdim      Engine.FunctionSummaries->markMayInline(D);
838249423Sdim    } else {
839249423Sdim      Engine.FunctionSummaries->markShouldNotInline(D);
840249423Sdim      return false;
841249423Sdim    }
842249423Sdim  }
843249423Sdim
844249423Sdim  // Check if we should inline a call based on its kind.
845249423Sdim  // FIXME: this checks both static and dynamic properties of the call, which
846249423Sdim  // means we're redoing a bit of work that could be cached in the function
847249423Sdim  // summary.
848249423Sdim  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts);
849249423Sdim  if (CIP != CIP_Allowed) {
850249423Sdim    if (CIP == CIP_DisallowedAlways) {
851249423Sdim      assert(!MayInline.hasValue() || MayInline.getValue());
852249423Sdim      Engine.FunctionSummaries->markShouldNotInline(D);
853249423Sdim    }
854249423Sdim    return false;
855249423Sdim  }
856249423Sdim
857249423Sdim  const CFG *CalleeCFG = CalleeADC->getCFG();
858249423Sdim
859249423Sdim  // Do not inline if recursive or we've reached max stack frame count.
860249423Sdim  bool IsRecursive = false;
861249423Sdim  unsigned StackDepth = 0;
862249423Sdim  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
863249423Sdim  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
864249423Sdim      ((CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize())
865249423Sdim       || IsRecursive))
866249423Sdim    return false;
867249423Sdim
868249423Sdim  // Do not inline large functions too many times.
869249423Sdim  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
870249423Sdim       Opts.getMaxTimesInlineLarge()) &&
871249423Sdim      CalleeCFG->getNumBlockIDs() > 13) {
872249423Sdim    NumReachedInlineCountMax++;
873249423Sdim    return false;
874249423Sdim  }
875249423Sdim
876249423Sdim  if (HowToInline == Inline_Minimal &&
877249423Sdim      (CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize()
878249423Sdim      || IsRecursive))
879249423Sdim    return false;
880249423Sdim
881249423Sdim  Engine.FunctionSummaries->bumpNumTimesInlined(D);
882249423Sdim
883249423Sdim  return true;
884249423Sdim}
885249423Sdim
886249423Sdimstatic bool isTrivialObjectAssignment(const CallEvent &Call) {
887249423Sdim  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
888249423Sdim  if (!ICall)
889249423Sdim    return false;
890249423Sdim
891249423Sdim  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
892249423Sdim  if (!MD)
893249423Sdim    return false;
894249423Sdim  if (!(MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()))
895249423Sdim    return false;
896249423Sdim
897249423Sdim  return MD->isTrivial();
898249423Sdim}
899249423Sdim
900239462Sdimvoid ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred,
901239462Sdim                                 const CallEvent &CallTemplate) {
902239462Sdim  // Make sure we have the most recent state attached to the call.
903239462Sdim  ProgramStateRef State = Pred->getState();
904239462Sdim  CallEventRef<> Call = CallTemplate.cloneWithState(State);
905239462Sdim
906249423Sdim  // Special-case trivial assignment operators.
907249423Sdim  if (isTrivialObjectAssignment(*Call)) {
908249423Sdim    performTrivialCopy(Bldr, Pred, *Call);
909239462Sdim    return;
910239462Sdim  }
911249423Sdim
912239462Sdim  // Try to inline the call.
913239462Sdim  // The origin expression here is just used as a kind of checksum;
914239462Sdim  // this should still be safe even for CallEvents that don't come from exprs.
915239462Sdim  const Expr *E = Call->getOriginExpr();
916249423Sdim
917239462Sdim  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
918239462Sdim  if (InlinedFailedState) {
919239462Sdim    // If we already tried once and failed, make sure we don't retry later.
920239462Sdim    State = InlinedFailedState;
921239462Sdim  } else {
922239462Sdim    RuntimeDefinition RD = Call->getRuntimeDefinition();
923239462Sdim    const Decl *D = RD.getDecl();
924249423Sdim    if (shouldInlineCall(*Call, D, Pred)) {
925239462Sdim      if (RD.mayHaveOtherDefinitions()) {
926249423Sdim        AnalyzerOptions &Options = getAnalysisManager().options;
927249423Sdim
928239462Sdim        // Explore with and without inlining the call.
929249423Sdim        if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
930239462Sdim          BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
931239462Sdim          return;
932239462Sdim        }
933239462Sdim
934239462Sdim        // Don't inline if we're not in any dynamic dispatch mode.
935249423Sdim        if (Options.getIPAMode() != IPAK_DynamicDispatch) {
936239462Sdim          conservativeEvalCall(*Call, Bldr, Pred, State);
937239462Sdim          return;
938239462Sdim        }
939239462Sdim      }
940239462Sdim
941239462Sdim      // We are not bifurcating and we do have a Decl, so just inline.
942239462Sdim      if (inlineCall(*Call, D, Bldr, Pred, State))
943239462Sdim        return;
944226586Sdim    }
945239462Sdim  }
946239462Sdim
947239462Sdim  // If we can't inline it, handle the return value and invalidate the regions.
948239462Sdim  conservativeEvalCall(*Call, Bldr, Pred, State);
949226586Sdim}
950226586Sdim
951239462Sdimvoid ExprEngine::BifurcateCall(const MemRegion *BifurReg,
952239462Sdim                               const CallEvent &Call, const Decl *D,
953239462Sdim                               NodeBuilder &Bldr, ExplodedNode *Pred) {
954239462Sdim  assert(BifurReg);
955239462Sdim  BifurReg = BifurReg->StripCasts();
956239462Sdim
957239462Sdim  // Check if we've performed the split already - note, we only want
958239462Sdim  // to split the path once per memory region.
959239462Sdim  ProgramStateRef State = Pred->getState();
960243830Sdim  const unsigned *BState =
961239462Sdim                        State->get<DynamicDispatchBifurcationMap>(BifurReg);
962239462Sdim  if (BState) {
963239462Sdim    // If we are on "inline path", keep inlining if possible.
964239462Sdim    if (*BState == DynamicDispatchModeInlined)
965239462Sdim      if (inlineCall(Call, D, Bldr, Pred, State))
966239462Sdim        return;
967239462Sdim    // If inline failed, or we are on the path where we assume we
968239462Sdim    // don't have enough info about the receiver to inline, conjure the
969239462Sdim    // return value and invalidate the regions.
970239462Sdim    conservativeEvalCall(Call, Bldr, Pred, State);
971239462Sdim    return;
972239462Sdim  }
973239462Sdim
974239462Sdim  // If we got here, this is the first time we process a message to this
975239462Sdim  // region, so split the path.
976239462Sdim  ProgramStateRef IState =
977239462Sdim      State->set<DynamicDispatchBifurcationMap>(BifurReg,
978239462Sdim                                               DynamicDispatchModeInlined);
979239462Sdim  inlineCall(Call, D, Bldr, Pred, IState);
980239462Sdim
981239462Sdim  ProgramStateRef NoIState =
982239462Sdim      State->set<DynamicDispatchBifurcationMap>(BifurReg,
983239462Sdim                                               DynamicDispatchModeConservative);
984239462Sdim  conservativeEvalCall(Call, Bldr, Pred, NoIState);
985239462Sdim
986239462Sdim  NumOfDynamicDispatchPathSplits++;
987239462Sdim  return;
988239462Sdim}
989239462Sdim
990239462Sdim
991226586Sdimvoid ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
992226586Sdim                                 ExplodedNodeSet &Dst) {
993234353Sdim
994234353Sdim  ExplodedNodeSet dstPreVisit;
995234353Sdim  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
996234353Sdim
997243830Sdim  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
998234353Sdim
999234353Sdim  if (RS->getRetValue()) {
1000234353Sdim    for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1001234353Sdim                                  ei = dstPreVisit.end(); it != ei; ++it) {
1002234353Sdim      B.generateNode(RS, *it, (*it)->getState());
1003226586Sdim    }
1004226586Sdim  }
1005226586Sdim}
1006