CoreEngine.cpp revision 226633
1//==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines a generic engine for intraprocedural, path-sensitive, 11// dataflow analysis via graph reachability engine. 12// 13//===----------------------------------------------------------------------===// 14 15#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 16#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 17#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 18#include "clang/Index/TranslationUnit.h" 19#include "clang/AST/Expr.h" 20#include "clang/AST/StmtCXX.h" 21#include "llvm/Support/Casting.h" 22#include "llvm/ADT/DenseMap.h" 23using namespace clang; 24using namespace ento; 25 26//===----------------------------------------------------------------------===// 27// Worklist classes for exploration of reachable states. 28//===----------------------------------------------------------------------===// 29 30WorkList::Visitor::~Visitor() {} 31 32namespace { 33class DFS : public WorkList { 34 SmallVector<WorkListUnit,20> Stack; 35public: 36 virtual bool hasWork() const { 37 return !Stack.empty(); 38 } 39 40 virtual void enqueue(const WorkListUnit& U) { 41 Stack.push_back(U); 42 } 43 44 virtual WorkListUnit dequeue() { 45 assert (!Stack.empty()); 46 const WorkListUnit& U = Stack.back(); 47 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 48 return U; 49 } 50 51 virtual bool visitItemsInWorkList(Visitor &V) { 52 for (SmallVectorImpl<WorkListUnit>::iterator 53 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 54 if (V.visit(*I)) 55 return true; 56 } 57 return false; 58 } 59}; 60 61class BFS : public WorkList { 62 std::deque<WorkListUnit> Queue; 63public: 64 virtual bool hasWork() const { 65 return !Queue.empty(); 66 } 67 68 virtual void enqueue(const WorkListUnit& U) { 69 Queue.push_front(U); 70 } 71 72 virtual WorkListUnit dequeue() { 73 WorkListUnit U = Queue.front(); 74 Queue.pop_front(); 75 return U; 76 } 77 78 virtual bool visitItemsInWorkList(Visitor &V) { 79 for (std::deque<WorkListUnit>::iterator 80 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 81 if (V.visit(*I)) 82 return true; 83 } 84 return false; 85 } 86}; 87 88} // end anonymous namespace 89 90// Place the dstor for WorkList here because it contains virtual member 91// functions, and we the code for the dstor generated in one compilation unit. 92WorkList::~WorkList() {} 93 94WorkList *WorkList::makeDFS() { return new DFS(); } 95WorkList *WorkList::makeBFS() { return new BFS(); } 96 97namespace { 98 class BFSBlockDFSContents : public WorkList { 99 std::deque<WorkListUnit> Queue; 100 SmallVector<WorkListUnit,20> Stack; 101 public: 102 virtual bool hasWork() const { 103 return !Queue.empty() || !Stack.empty(); 104 } 105 106 virtual void enqueue(const WorkListUnit& U) { 107 if (isa<BlockEntrance>(U.getNode()->getLocation())) 108 Queue.push_front(U); 109 else 110 Stack.push_back(U); 111 } 112 113 virtual WorkListUnit dequeue() { 114 // Process all basic blocks to completion. 115 if (!Stack.empty()) { 116 const WorkListUnit& U = Stack.back(); 117 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 118 return U; 119 } 120 121 assert(!Queue.empty()); 122 // Don't use const reference. The subsequent pop_back() might make it 123 // unsafe. 124 WorkListUnit U = Queue.front(); 125 Queue.pop_front(); 126 return U; 127 } 128 virtual bool visitItemsInWorkList(Visitor &V) { 129 for (SmallVectorImpl<WorkListUnit>::iterator 130 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 131 if (V.visit(*I)) 132 return true; 133 } 134 for (std::deque<WorkListUnit>::iterator 135 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 136 if (V.visit(*I)) 137 return true; 138 } 139 return false; 140 } 141 142 }; 143} // end anonymous namespace 144 145WorkList* WorkList::makeBFSBlockDFSContents() { 146 return new BFSBlockDFSContents(); 147} 148 149//===----------------------------------------------------------------------===// 150// Core analysis engine. 151//===----------------------------------------------------------------------===// 152 153/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 154bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 155 const ProgramState *InitState) { 156 157 if (G->num_roots() == 0) { // Initialize the analysis by constructing 158 // the root if none exists. 159 160 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 161 162 assert (Entry->empty() && 163 "Entry block must be empty."); 164 165 assert (Entry->succ_size() == 1 && 166 "Entry block must have 1 successor."); 167 168 // Get the solitary successor. 169 const CFGBlock *Succ = *(Entry->succ_begin()); 170 171 // Construct an edge representing the 172 // starting location in the function. 173 BlockEdge StartLoc(Entry, Succ, L); 174 175 // Set the current block counter to being empty. 176 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 177 178 if (!InitState) 179 // Generate the root. 180 generateNode(StartLoc, SubEng.getInitialState(L), 0); 181 else 182 generateNode(StartLoc, InitState, 0); 183 } 184 185 // Check if we have a steps limit 186 bool UnlimitedSteps = Steps == 0; 187 188 while (WList->hasWork()) { 189 if (!UnlimitedSteps) { 190 if (Steps == 0) 191 break; 192 --Steps; 193 } 194 195 const WorkListUnit& WU = WList->dequeue(); 196 197 // Set the current block counter. 198 WList->setBlockCounter(WU.getBlockCounter()); 199 200 // Retrieve the node. 201 ExplodedNode *Node = WU.getNode(); 202 203 // Dispatch on the location type. 204 switch (Node->getLocation().getKind()) { 205 case ProgramPoint::BlockEdgeKind: 206 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node); 207 break; 208 209 case ProgramPoint::BlockEntranceKind: 210 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node); 211 break; 212 213 case ProgramPoint::BlockExitKind: 214 assert (false && "BlockExit location never occur in forward analysis."); 215 break; 216 217 case ProgramPoint::CallEnterKind: 218 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(), 219 WU.getIndex(), Node); 220 break; 221 222 case ProgramPoint::CallExitKind: 223 HandleCallExit(cast<CallExit>(Node->getLocation()), Node); 224 break; 225 226 default: 227 assert(isa<PostStmt>(Node->getLocation()) || 228 isa<PostInitializer>(Node->getLocation())); 229 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node); 230 break; 231 } 232 } 233 234 SubEng.processEndWorklist(hasWorkRemaining()); 235 return WList->hasWork(); 236} 237 238void CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 239 unsigned Steps, 240 const ProgramState *InitState, 241 ExplodedNodeSet &Dst) { 242 ExecuteWorkList(L, Steps, InitState); 243 for (SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(), 244 E = G->EndNodes.end(); I != E; ++I) { 245 Dst.Add(*I); 246 } 247} 248 249void CoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block, 250 unsigned Index, ExplodedNode *Pred) { 251 CallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(), 252 L.getCalleeContext(), Block, Index); 253 SubEng.processCallEnter(Builder); 254} 255 256void CoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) { 257 CallExitNodeBuilder Builder(*this, Pred); 258 SubEng.processCallExit(Builder); 259} 260 261void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 262 263 const CFGBlock *Blk = L.getDst(); 264 265 // Check if we are entering the EXIT block. 266 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 267 268 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 269 && "EXIT block cannot contain Stmts."); 270 271 // Process the final state transition. 272 EndOfFunctionNodeBuilder Builder(Blk, Pred, this); 273 SubEng.processEndOfFunction(Builder); 274 275 // This path is done. Don't enqueue any more nodes. 276 return; 277 } 278 279 // Call into the subengine to process entering the CFGBlock. 280 ExplodedNodeSet dstNodes; 281 BlockEntrance BE(Blk, Pred->getLocationContext()); 282 GenericNodeBuilder<BlockEntrance> nodeBuilder(*this, Pred, BE); 283 SubEng.processCFGBlockEntrance(dstNodes, nodeBuilder); 284 285 if (dstNodes.empty()) { 286 if (!nodeBuilder.hasGeneratedNode) { 287 // Auto-generate a node and enqueue it to the worklist. 288 generateNode(BE, Pred->State, Pred); 289 } 290 } 291 else { 292 for (ExplodedNodeSet::iterator I = dstNodes.begin(), E = dstNodes.end(); 293 I != E; ++I) { 294 WList->enqueue(*I); 295 } 296 } 297 298 for (SmallVectorImpl<ExplodedNode*>::const_iterator 299 I = nodeBuilder.sinks().begin(), E = nodeBuilder.sinks().end(); 300 I != E; ++I) { 301 blocksExhausted.push_back(std::make_pair(L, *I)); 302 } 303} 304 305void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 306 ExplodedNode *Pred) { 307 308 // Increment the block counter. 309 BlockCounter Counter = WList->getBlockCounter(); 310 Counter = BCounterFactory.IncrementCount(Counter, 311 Pred->getLocationContext()->getCurrentStackFrame(), 312 L.getBlock()->getBlockID()); 313 WList->setBlockCounter(Counter); 314 315 // Process the entrance of the block. 316 if (CFGElement E = L.getFirstElement()) { 317 StmtNodeBuilder Builder(L.getBlock(), 0, Pred, this); 318 SubEng.processCFGElement(E, Builder); 319 } 320 else 321 HandleBlockExit(L.getBlock(), Pred); 322} 323 324void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 325 326 if (const Stmt *Term = B->getTerminator()) { 327 switch (Term->getStmtClass()) { 328 default: 329 llvm_unreachable("Analysis for this terminator not implemented."); 330 331 case Stmt::BinaryOperatorClass: // '&&' and '||' 332 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 333 return; 334 335 case Stmt::BinaryConditionalOperatorClass: 336 case Stmt::ConditionalOperatorClass: 337 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 338 Term, B, Pred); 339 return; 340 341 // FIXME: Use constant-folding in CFG construction to simplify this 342 // case. 343 344 case Stmt::ChooseExprClass: 345 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 346 return; 347 348 case Stmt::DoStmtClass: 349 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 350 return; 351 352 case Stmt::CXXForRangeStmtClass: 353 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 354 return; 355 356 case Stmt::ForStmtClass: 357 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 358 return; 359 360 case Stmt::ContinueStmtClass: 361 case Stmt::BreakStmtClass: 362 case Stmt::GotoStmtClass: 363 break; 364 365 case Stmt::IfStmtClass: 366 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 367 return; 368 369 case Stmt::IndirectGotoStmtClass: { 370 // Only 1 successor: the indirect goto dispatch block. 371 assert (B->succ_size() == 1); 372 373 IndirectGotoNodeBuilder 374 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 375 *(B->succ_begin()), this); 376 377 SubEng.processIndirectGoto(builder); 378 return; 379 } 380 381 case Stmt::ObjCForCollectionStmtClass: { 382 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 383 // 384 // (1) inside a basic block, which represents the binding of the 385 // 'element' variable to a value. 386 // (2) in a terminator, which represents the branch. 387 // 388 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 389 // whether or not collection contains any more elements. We cannot 390 // just test to see if the element is nil because a container can 391 // contain nil elements. 392 HandleBranch(Term, Term, B, Pred); 393 return; 394 } 395 396 case Stmt::SwitchStmtClass: { 397 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 398 this); 399 400 SubEng.processSwitch(builder); 401 return; 402 } 403 404 case Stmt::WhileStmtClass: 405 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 406 return; 407 } 408 } 409 410 assert (B->succ_size() == 1 && 411 "Blocks with no terminator should have at most 1 successor."); 412 413 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 414 Pred->State, Pred); 415} 416 417void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 418 const CFGBlock * B, ExplodedNode *Pred) { 419 assert(B->succ_size() == 2); 420 BranchNodeBuilder Builder(B, *(B->succ_begin()), *(B->succ_begin()+1), 421 Pred, this); 422 SubEng.processBranch(Cond, Term, Builder); 423} 424 425void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 426 ExplodedNode *Pred) { 427 assert (!B->empty()); 428 429 if (StmtIdx == B->size()) 430 HandleBlockExit(B, Pred); 431 else { 432 StmtNodeBuilder Builder(B, StmtIdx, Pred, this); 433 SubEng.processCFGElement((*B)[StmtIdx], Builder); 434 } 435} 436 437/// generateNode - Utility method to generate nodes, hook up successors, 438/// and add nodes to the worklist. 439void CoreEngine::generateNode(const ProgramPoint &Loc, 440 const ProgramState *State, 441 ExplodedNode *Pred) { 442 443 bool IsNew; 444 ExplodedNode *Node = G->getNode(Loc, State, &IsNew); 445 446 if (Pred) 447 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor. 448 else { 449 assert (IsNew); 450 G->addRoot(Node); // 'Node' has no predecessor. Make it a root. 451 } 452 453 // Only add 'Node' to the worklist if it was freshly generated. 454 if (IsNew) WList->enqueue(Node); 455} 456 457ExplodedNode * 458GenericNodeBuilderImpl::generateNodeImpl(const ProgramState *state, 459 ExplodedNode *pred, 460 ProgramPoint programPoint, 461 bool asSink) { 462 463 hasGeneratedNode = true; 464 bool isNew; 465 ExplodedNode *node = engine.getGraph().getNode(programPoint, state, &isNew); 466 if (pred) 467 node->addPredecessor(pred, engine.getGraph()); 468 if (isNew) { 469 if (asSink) { 470 node->markAsSink(); 471 sinksGenerated.push_back(node); 472 } 473 return node; 474 } 475 return 0; 476} 477 478StmtNodeBuilder::StmtNodeBuilder(const CFGBlock *b, 479 unsigned idx, 480 ExplodedNode *N, 481 CoreEngine* e) 482 : Eng(*e), B(*b), Idx(idx), Pred(N), 483 PurgingDeadSymbols(false), BuildSinks(false), hasGeneratedNode(false), 484 PointKind(ProgramPoint::PostStmtKind), Tag(0) { 485 Deferred.insert(N); 486} 487 488StmtNodeBuilder::~StmtNodeBuilder() { 489 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 490 if (!(*I)->isSink()) 491 GenerateAutoTransition(*I); 492} 493 494void StmtNodeBuilder::GenerateAutoTransition(ExplodedNode *N) { 495 assert (!N->isSink()); 496 497 // Check if this node entered a callee. 498 if (isa<CallEnter>(N->getLocation())) { 499 // Still use the index of the CallExpr. It's needed to create the callee 500 // StackFrameContext. 501 Eng.WList->enqueue(N, &B, Idx); 502 return; 503 } 504 505 // Do not create extra nodes. Move to the next CFG element. 506 if (isa<PostInitializer>(N->getLocation())) { 507 Eng.WList->enqueue(N, &B, Idx+1); 508 return; 509 } 510 511 PostStmt Loc(getStmt(), N->getLocationContext()); 512 513 if (Loc == N->getLocation()) { 514 // Note: 'N' should be a fresh node because otherwise it shouldn't be 515 // a member of Deferred. 516 Eng.WList->enqueue(N, &B, Idx+1); 517 return; 518 } 519 520 bool IsNew; 521 ExplodedNode *Succ = Eng.G->getNode(Loc, N->State, &IsNew); 522 Succ->addPredecessor(N, *Eng.G); 523 524 if (IsNew) 525 Eng.WList->enqueue(Succ, &B, Idx+1); 526} 527 528ExplodedNode *StmtNodeBuilder::MakeNode(ExplodedNodeSet &Dst, 529 const Stmt *S, 530 ExplodedNode *Pred, 531 const ProgramState *St, 532 ProgramPoint::Kind K) { 533 534 ExplodedNode *N = generateNode(S, St, Pred, K); 535 536 if (N) { 537 if (BuildSinks) 538 N->markAsSink(); 539 else 540 Dst.Add(N); 541 } 542 543 return N; 544} 545 546ExplodedNode* 547StmtNodeBuilder::generateNodeInternal(const Stmt *S, 548 const ProgramState *state, 549 ExplodedNode *Pred, 550 ProgramPoint::Kind K, 551 const ProgramPointTag *tag) { 552 553 const ProgramPoint &L = ProgramPoint::getProgramPoint(S, K, 554 Pred->getLocationContext(), tag); 555 return generateNodeInternal(L, state, Pred); 556} 557 558ExplodedNode* 559StmtNodeBuilder::generateNodeInternal(const ProgramPoint &Loc, 560 const ProgramState *State, 561 ExplodedNode *Pred) { 562 bool IsNew; 563 ExplodedNode *N = Eng.G->getNode(Loc, State, &IsNew); 564 N->addPredecessor(Pred, *Eng.G); 565 Deferred.erase(Pred); 566 567 if (IsNew) { 568 Deferred.insert(N); 569 return N; 570 } 571 572 return NULL; 573} 574 575// This function generate a new ExplodedNode but not a new branch(block edge). 576ExplodedNode *BranchNodeBuilder::generateNode(const Stmt *Condition, 577 const ProgramState *State) { 578 bool IsNew; 579 580 ExplodedNode *Succ 581 = Eng.G->getNode(PostCondition(Condition, Pred->getLocationContext()), State, 582 &IsNew); 583 584 Succ->addPredecessor(Pred, *Eng.G); 585 586 Pred = Succ; 587 588 if (IsNew) 589 return Succ; 590 591 return NULL; 592} 593 594ExplodedNode *BranchNodeBuilder::generateNode(const ProgramState *State, 595 bool branch) { 596 597 // If the branch has been marked infeasible we should not generate a node. 598 if (!isFeasible(branch)) 599 return NULL; 600 601 bool IsNew; 602 603 ExplodedNode *Succ = 604 Eng.G->getNode(BlockEdge(Src,branch ? DstT:DstF,Pred->getLocationContext()), 605 State, &IsNew); 606 607 Succ->addPredecessor(Pred, *Eng.G); 608 609 if (branch) 610 GeneratedTrue = true; 611 else 612 GeneratedFalse = true; 613 614 if (IsNew) { 615 Deferred.push_back(Succ); 616 return Succ; 617 } 618 619 return NULL; 620} 621 622BranchNodeBuilder::~BranchNodeBuilder() { 623 if (!GeneratedTrue) generateNode(Pred->State, true); 624 if (!GeneratedFalse) generateNode(Pred->State, false); 625 626 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 627 if (!(*I)->isSink()) Eng.WList->enqueue(*I); 628} 629 630 631ExplodedNode* 632IndirectGotoNodeBuilder::generateNode(const iterator &I, 633 const ProgramState *St, 634 bool isSink) { 635 bool IsNew; 636 637 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 638 Pred->getLocationContext()), St, &IsNew); 639 640 Succ->addPredecessor(Pred, *Eng.G); 641 642 if (IsNew) { 643 644 if (isSink) 645 Succ->markAsSink(); 646 else 647 Eng.WList->enqueue(Succ); 648 649 return Succ; 650 } 651 652 return NULL; 653} 654 655 656ExplodedNode* 657SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 658 const ProgramState *St) { 659 660 bool IsNew; 661 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 662 Pred->getLocationContext()), 663 St, &IsNew); 664 Succ->addPredecessor(Pred, *Eng.G); 665 if (IsNew) { 666 Eng.WList->enqueue(Succ); 667 return Succ; 668 } 669 return NULL; 670} 671 672 673ExplodedNode* 674SwitchNodeBuilder::generateDefaultCaseNode(const ProgramState *St, 675 bool isSink) { 676 // Get the block for the default case. 677 assert(Src->succ_rbegin() != Src->succ_rend()); 678 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 679 680 // Sanity check for default blocks that are unreachable and not caught 681 // by earlier stages. 682 if (!DefaultBlock) 683 return NULL; 684 685 bool IsNew; 686 687 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock, 688 Pred->getLocationContext()), St, &IsNew); 689 Succ->addPredecessor(Pred, *Eng.G); 690 691 if (IsNew) { 692 if (isSink) 693 Succ->markAsSink(); 694 else 695 Eng.WList->enqueue(Succ); 696 697 return Succ; 698 } 699 700 return NULL; 701} 702 703EndOfFunctionNodeBuilder::~EndOfFunctionNodeBuilder() { 704 // Auto-generate an EOP node if one has not been generated. 705 if (!hasGeneratedNode) { 706 // If we are in an inlined call, generate CallExit node. 707 if (Pred->getLocationContext()->getParent()) 708 GenerateCallExitNode(Pred->State); 709 else 710 generateNode(Pred->State); 711 } 712} 713 714ExplodedNode* 715EndOfFunctionNodeBuilder::generateNode(const ProgramState *State, 716 ExplodedNode *P, 717 const ProgramPointTag *tag) { 718 hasGeneratedNode = true; 719 bool IsNew; 720 721 ExplodedNode *Node = Eng.G->getNode(BlockEntrance(&B, 722 Pred->getLocationContext(), tag ? tag : Tag), 723 State, &IsNew); 724 725 Node->addPredecessor(P ? P : Pred, *Eng.G); 726 727 if (IsNew) { 728 Eng.G->addEndOfPath(Node); 729 return Node; 730 } 731 732 return NULL; 733} 734 735void EndOfFunctionNodeBuilder::GenerateCallExitNode(const ProgramState *state) { 736 hasGeneratedNode = true; 737 // Create a CallExit node and enqueue it. 738 const StackFrameContext *LocCtx 739 = cast<StackFrameContext>(Pred->getLocationContext()); 740 const Stmt *CE = LocCtx->getCallSite(); 741 742 // Use the the callee location context. 743 CallExit Loc(CE, LocCtx); 744 745 bool isNew; 746 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 747 Node->addPredecessor(Pred, *Eng.G); 748 749 if (isNew) 750 Eng.WList->enqueue(Node); 751} 752 753 754void CallEnterNodeBuilder::generateNode(const ProgramState *state) { 755 // Check if the callee is in the same translation unit. 756 if (CalleeCtx->getTranslationUnit() != 757 Pred->getLocationContext()->getTranslationUnit()) { 758 // Create a new engine. We must be careful that the new engine should not 759 // reference data structures owned by the old engine. 760 761 AnalysisManager &OldMgr = Eng.SubEng.getAnalysisManager(); 762 763 // Get the callee's translation unit. 764 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit(); 765 766 // Create a new AnalysisManager with components of the callee's 767 // TranslationUnit. 768 // The Diagnostic is actually shared when we create ASTUnits from AST files. 769 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(), OldMgr); 770 771 // Create the new engine. 772 // FIXME: This cast isn't really safe. 773 bool GCEnabled = static_cast<ExprEngine&>(Eng.SubEng).isObjCGCEnabled(); 774 ExprEngine NewEng(AMgr, GCEnabled); 775 776 // Create the new LocationContext. 777 AnalysisContext *NewAnaCtx = AMgr.getAnalysisContext(CalleeCtx->getDecl(), 778 CalleeCtx->getTranslationUnit()); 779 const StackFrameContext *OldLocCtx = CalleeCtx; 780 const StackFrameContext *NewLocCtx = AMgr.getStackFrame(NewAnaCtx, 781 OldLocCtx->getParent(), 782 OldLocCtx->getCallSite(), 783 OldLocCtx->getCallSiteBlock(), 784 OldLocCtx->getIndex()); 785 786 // Now create an initial state for the new engine. 787 const ProgramState *NewState = 788 NewEng.getStateManager().MarshalState(state, NewLocCtx); 789 ExplodedNodeSet ReturnNodes; 790 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(), 791 NewState, ReturnNodes); 792 return; 793 } 794 795 // Get the callee entry block. 796 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry()); 797 assert(Entry->empty()); 798 assert(Entry->succ_size() == 1); 799 800 // Get the solitary successor. 801 const CFGBlock *SuccB = *(Entry->succ_begin()); 802 803 // Construct an edge representing the starting location in the callee. 804 BlockEdge Loc(Entry, SuccB, CalleeCtx); 805 806 bool isNew; 807 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 808 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 809 810 if (isNew) 811 Eng.WList->enqueue(Node); 812} 813 814void CallExitNodeBuilder::generateNode(const ProgramState *state) { 815 // Get the callee's location context. 816 const StackFrameContext *LocCtx 817 = cast<StackFrameContext>(Pred->getLocationContext()); 818 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt 819 // that triggers the dtor. 820 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent()); 821 bool isNew; 822 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 823 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 824 if (isNew) 825 Eng.WList->enqueue(Node, LocCtx->getCallSiteBlock(), 826 LocCtx->getIndex() + 1); 827} 828