CoreEngine.cpp revision 218887
1//==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines a generic engine for intraprocedural, path-sensitive, 11// dataflow analysis via graph reachability engine. 12// 13//===----------------------------------------------------------------------===// 14 15#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 16#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 17#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 18#include "clang/Index/TranslationUnit.h" 19#include "clang/AST/Expr.h" 20#include "llvm/Support/Casting.h" 21#include "llvm/ADT/DenseMap.h" 22#include <vector> 23#include <queue> 24 25using llvm::cast; 26using llvm::isa; 27using namespace clang; 28using namespace ento; 29 30// This should be removed in the future. 31namespace clang { 32namespace ento { 33TransferFuncs* MakeCFRefCountTF(ASTContext& Ctx, bool GCEnabled, 34 const LangOptions& lopts); 35} 36} 37 38//===----------------------------------------------------------------------===// 39// Worklist classes for exploration of reachable states. 40//===----------------------------------------------------------------------===// 41 42WorkList::Visitor::~Visitor() {} 43 44namespace { 45class DFS : public WorkList { 46 llvm::SmallVector<WorkListUnit,20> Stack; 47public: 48 virtual bool hasWork() const { 49 return !Stack.empty(); 50 } 51 52 virtual void enqueue(const WorkListUnit& U) { 53 Stack.push_back(U); 54 } 55 56 virtual WorkListUnit dequeue() { 57 assert (!Stack.empty()); 58 const WorkListUnit& U = Stack.back(); 59 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 60 return U; 61 } 62 63 virtual bool visitItemsInWorkList(Visitor &V) { 64 for (llvm::SmallVectorImpl<WorkListUnit>::iterator 65 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 66 if (V.visit(*I)) 67 return true; 68 } 69 return false; 70 } 71}; 72 73class BFS : public WorkList { 74 std::deque<WorkListUnit> Queue; 75public: 76 virtual bool hasWork() const { 77 return !Queue.empty(); 78 } 79 80 virtual void enqueue(const WorkListUnit& U) { 81 Queue.push_front(U); 82 } 83 84 virtual WorkListUnit dequeue() { 85 WorkListUnit U = Queue.front(); 86 Queue.pop_front(); 87 return U; 88 } 89 90 virtual bool visitItemsInWorkList(Visitor &V) { 91 for (std::deque<WorkListUnit>::iterator 92 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 93 if (V.visit(*I)) 94 return true; 95 } 96 return false; 97 } 98}; 99 100} // end anonymous namespace 101 102// Place the dstor for WorkList here because it contains virtual member 103// functions, and we the code for the dstor generated in one compilation unit. 104WorkList::~WorkList() {} 105 106WorkList *WorkList::makeDFS() { return new DFS(); } 107WorkList *WorkList::makeBFS() { return new BFS(); } 108 109namespace { 110 class BFSBlockDFSContents : public WorkList { 111 std::deque<WorkListUnit> Queue; 112 llvm::SmallVector<WorkListUnit,20> Stack; 113 public: 114 virtual bool hasWork() const { 115 return !Queue.empty() || !Stack.empty(); 116 } 117 118 virtual void enqueue(const WorkListUnit& U) { 119 if (isa<BlockEntrance>(U.getNode()->getLocation())) 120 Queue.push_front(U); 121 else 122 Stack.push_back(U); 123 } 124 125 virtual WorkListUnit dequeue() { 126 // Process all basic blocks to completion. 127 if (!Stack.empty()) { 128 const WorkListUnit& U = Stack.back(); 129 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 130 return U; 131 } 132 133 assert(!Queue.empty()); 134 // Don't use const reference. The subsequent pop_back() might make it 135 // unsafe. 136 WorkListUnit U = Queue.front(); 137 Queue.pop_front(); 138 return U; 139 } 140 virtual bool visitItemsInWorkList(Visitor &V) { 141 for (llvm::SmallVectorImpl<WorkListUnit>::iterator 142 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 143 if (V.visit(*I)) 144 return true; 145 } 146 for (std::deque<WorkListUnit>::iterator 147 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 148 if (V.visit(*I)) 149 return true; 150 } 151 return false; 152 } 153 154 }; 155} // end anonymous namespace 156 157WorkList* WorkList::makeBFSBlockDFSContents() { 158 return new BFSBlockDFSContents(); 159} 160 161//===----------------------------------------------------------------------===// 162// Core analysis engine. 163//===----------------------------------------------------------------------===// 164 165/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 166bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 167 const GRState *InitState) { 168 169 if (G->num_roots() == 0) { // Initialize the analysis by constructing 170 // the root if none exists. 171 172 const CFGBlock* Entry = &(L->getCFG()->getEntry()); 173 174 assert (Entry->empty() && 175 "Entry block must be empty."); 176 177 assert (Entry->succ_size() == 1 && 178 "Entry block must have 1 successor."); 179 180 // Get the solitary successor. 181 const CFGBlock* Succ = *(Entry->succ_begin()); 182 183 // Construct an edge representing the 184 // starting location in the function. 185 BlockEdge StartLoc(Entry, Succ, L); 186 187 // Set the current block counter to being empty. 188 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 189 190 if (!InitState) 191 // Generate the root. 192 generateNode(StartLoc, SubEng.getInitialState(L), 0); 193 else 194 generateNode(StartLoc, InitState, 0); 195 } 196 197 // Check if we have a steps limit 198 bool UnlimitedSteps = Steps == 0; 199 200 while (WList->hasWork()) { 201 if (!UnlimitedSteps) { 202 if (Steps == 0) 203 break; 204 --Steps; 205 } 206 207 const WorkListUnit& WU = WList->dequeue(); 208 209 // Set the current block counter. 210 WList->setBlockCounter(WU.getBlockCounter()); 211 212 // Retrieve the node. 213 ExplodedNode* Node = WU.getNode(); 214 215 // Dispatch on the location type. 216 switch (Node->getLocation().getKind()) { 217 case ProgramPoint::BlockEdgeKind: 218 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node); 219 break; 220 221 case ProgramPoint::BlockEntranceKind: 222 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node); 223 break; 224 225 case ProgramPoint::BlockExitKind: 226 assert (false && "BlockExit location never occur in forward analysis."); 227 break; 228 229 case ProgramPoint::CallEnterKind: 230 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(), 231 WU.getIndex(), Node); 232 break; 233 234 case ProgramPoint::CallExitKind: 235 HandleCallExit(cast<CallExit>(Node->getLocation()), Node); 236 break; 237 238 default: 239 assert(isa<PostStmt>(Node->getLocation()) || 240 isa<PostInitializer>(Node->getLocation())); 241 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node); 242 break; 243 } 244 } 245 246 SubEng.processEndWorklist(hasWorkRemaining()); 247 return WList->hasWork(); 248} 249 250void CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 251 unsigned Steps, 252 const GRState *InitState, 253 ExplodedNodeSet &Dst) { 254 ExecuteWorkList(L, Steps, InitState); 255 for (llvm::SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(), 256 E = G->EndNodes.end(); I != E; ++I) { 257 Dst.Add(*I); 258 } 259} 260 261void CoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block, 262 unsigned Index, ExplodedNode *Pred) { 263 CallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(), 264 L.getCalleeContext(), Block, Index); 265 SubEng.processCallEnter(Builder); 266} 267 268void CoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) { 269 CallExitNodeBuilder Builder(*this, Pred); 270 SubEng.processCallExit(Builder); 271} 272 273void CoreEngine::HandleBlockEdge(const BlockEdge& L, ExplodedNode* Pred) { 274 275 const CFGBlock* Blk = L.getDst(); 276 277 // Check if we are entering the EXIT block. 278 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 279 280 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 281 && "EXIT block cannot contain Stmts."); 282 283 // Process the final state transition. 284 EndOfFunctionNodeBuilder Builder(Blk, Pred, this); 285 SubEng.processEndOfFunction(Builder); 286 287 // This path is done. Don't enqueue any more nodes. 288 return; 289 } 290 291 // Call into the subengine to process entering the CFGBlock. 292 ExplodedNodeSet dstNodes; 293 BlockEntrance BE(Blk, Pred->getLocationContext()); 294 GenericNodeBuilder<BlockEntrance> nodeBuilder(*this, Pred, BE); 295 SubEng.processCFGBlockEntrance(dstNodes, nodeBuilder); 296 297 if (dstNodes.empty()) { 298 if (!nodeBuilder.hasGeneratedNode) { 299 // Auto-generate a node and enqueue it to the worklist. 300 generateNode(BE, Pred->State, Pred); 301 } 302 } 303 else { 304 for (ExplodedNodeSet::iterator I = dstNodes.begin(), E = dstNodes.end(); 305 I != E; ++I) { 306 WList->enqueue(*I); 307 } 308 } 309 310 for (llvm::SmallVectorImpl<ExplodedNode*>::const_iterator 311 I = nodeBuilder.sinks().begin(), E = nodeBuilder.sinks().end(); 312 I != E; ++I) { 313 blocksAborted.push_back(std::make_pair(L, *I)); 314 } 315} 316 317void CoreEngine::HandleBlockEntrance(const BlockEntrance& L, 318 ExplodedNode* Pred) { 319 320 // Increment the block counter. 321 BlockCounter Counter = WList->getBlockCounter(); 322 Counter = BCounterFactory.IncrementCount(Counter, 323 Pred->getLocationContext()->getCurrentStackFrame(), 324 L.getBlock()->getBlockID()); 325 WList->setBlockCounter(Counter); 326 327 // Process the entrance of the block. 328 if (CFGElement E = L.getFirstElement()) { 329 StmtNodeBuilder Builder(L.getBlock(), 0, Pred, this, 330 SubEng.getStateManager()); 331 SubEng.processCFGElement(E, Builder); 332 } 333 else 334 HandleBlockExit(L.getBlock(), Pred); 335} 336 337void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode* Pred) { 338 339 if (const Stmt* Term = B->getTerminator()) { 340 switch (Term->getStmtClass()) { 341 default: 342 assert(false && "Analysis for this terminator not implemented."); 343 break; 344 345 case Stmt::BinaryOperatorClass: // '&&' and '||' 346 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 347 return; 348 349 case Stmt::BinaryConditionalOperatorClass: 350 case Stmt::ConditionalOperatorClass: 351 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 352 Term, B, Pred); 353 return; 354 355 // FIXME: Use constant-folding in CFG construction to simplify this 356 // case. 357 358 case Stmt::ChooseExprClass: 359 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 360 return; 361 362 case Stmt::DoStmtClass: 363 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 364 return; 365 366 case Stmt::ForStmtClass: 367 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 368 return; 369 370 case Stmt::ContinueStmtClass: 371 case Stmt::BreakStmtClass: 372 case Stmt::GotoStmtClass: 373 break; 374 375 case Stmt::IfStmtClass: 376 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 377 return; 378 379 case Stmt::IndirectGotoStmtClass: { 380 // Only 1 successor: the indirect goto dispatch block. 381 assert (B->succ_size() == 1); 382 383 IndirectGotoNodeBuilder 384 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 385 *(B->succ_begin()), this); 386 387 SubEng.processIndirectGoto(builder); 388 return; 389 } 390 391 case Stmt::ObjCForCollectionStmtClass: { 392 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 393 // 394 // (1) inside a basic block, which represents the binding of the 395 // 'element' variable to a value. 396 // (2) in a terminator, which represents the branch. 397 // 398 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 399 // whether or not collection contains any more elements. We cannot 400 // just test to see if the element is nil because a container can 401 // contain nil elements. 402 HandleBranch(Term, Term, B, Pred); 403 return; 404 } 405 406 case Stmt::SwitchStmtClass: { 407 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 408 this); 409 410 SubEng.processSwitch(builder); 411 return; 412 } 413 414 case Stmt::WhileStmtClass: 415 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 416 return; 417 } 418 } 419 420 assert (B->succ_size() == 1 && 421 "Blocks with no terminator should have at most 1 successor."); 422 423 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 424 Pred->State, Pred); 425} 426 427void CoreEngine::HandleBranch(const Stmt* Cond, const Stmt* Term, 428 const CFGBlock * B, ExplodedNode* Pred) { 429 assert(B->succ_size() == 2); 430 BranchNodeBuilder Builder(B, *(B->succ_begin()), *(B->succ_begin()+1), 431 Pred, this); 432 SubEng.processBranch(Cond, Term, Builder); 433} 434 435void CoreEngine::HandlePostStmt(const CFGBlock* B, unsigned StmtIdx, 436 ExplodedNode* Pred) { 437 assert (!B->empty()); 438 439 if (StmtIdx == B->size()) 440 HandleBlockExit(B, Pred); 441 else { 442 StmtNodeBuilder Builder(B, StmtIdx, Pred, this, 443 SubEng.getStateManager()); 444 SubEng.processCFGElement((*B)[StmtIdx], Builder); 445 } 446} 447 448/// generateNode - Utility method to generate nodes, hook up successors, 449/// and add nodes to the worklist. 450void CoreEngine::generateNode(const ProgramPoint& Loc, 451 const GRState* State, ExplodedNode* Pred) { 452 453 bool IsNew; 454 ExplodedNode* Node = G->getNode(Loc, State, &IsNew); 455 456 if (Pred) 457 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor. 458 else { 459 assert (IsNew); 460 G->addRoot(Node); // 'Node' has no predecessor. Make it a root. 461 } 462 463 // Only add 'Node' to the worklist if it was freshly generated. 464 if (IsNew) WList->enqueue(Node); 465} 466 467ExplodedNode * 468GenericNodeBuilderImpl::generateNodeImpl(const GRState *state, 469 ExplodedNode *pred, 470 ProgramPoint programPoint, 471 bool asSink) { 472 473 hasGeneratedNode = true; 474 bool isNew; 475 ExplodedNode *node = engine.getGraph().getNode(programPoint, state, &isNew); 476 if (pred) 477 node->addPredecessor(pred, engine.getGraph()); 478 if (isNew) { 479 if (asSink) { 480 node->markAsSink(); 481 sinksGenerated.push_back(node); 482 } 483 return node; 484 } 485 return 0; 486} 487 488StmtNodeBuilder::StmtNodeBuilder(const CFGBlock* b, unsigned idx, 489 ExplodedNode* N, CoreEngine* e, 490 GRStateManager &mgr) 491 : Eng(*e), B(*b), Idx(idx), Pred(N), Mgr(mgr), 492 PurgingDeadSymbols(false), BuildSinks(false), hasGeneratedNode(false), 493 PointKind(ProgramPoint::PostStmtKind), Tag(0) { 494 Deferred.insert(N); 495 CleanedState = Pred->getState(); 496} 497 498StmtNodeBuilder::~StmtNodeBuilder() { 499 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 500 if (!(*I)->isSink()) 501 GenerateAutoTransition(*I); 502} 503 504void StmtNodeBuilder::GenerateAutoTransition(ExplodedNode* N) { 505 assert (!N->isSink()); 506 507 // Check if this node entered a callee. 508 if (isa<CallEnter>(N->getLocation())) { 509 // Still use the index of the CallExpr. It's needed to create the callee 510 // StackFrameContext. 511 Eng.WList->enqueue(N, &B, Idx); 512 return; 513 } 514 515 // Do not create extra nodes. Move to the next CFG element. 516 if (isa<PostInitializer>(N->getLocation())) { 517 Eng.WList->enqueue(N, &B, Idx+1); 518 return; 519 } 520 521 PostStmt Loc(getStmt(), N->getLocationContext()); 522 523 if (Loc == N->getLocation()) { 524 // Note: 'N' should be a fresh node because otherwise it shouldn't be 525 // a member of Deferred. 526 Eng.WList->enqueue(N, &B, Idx+1); 527 return; 528 } 529 530 bool IsNew; 531 ExplodedNode* Succ = Eng.G->getNode(Loc, N->State, &IsNew); 532 Succ->addPredecessor(N, *Eng.G); 533 534 if (IsNew) 535 Eng.WList->enqueue(Succ, &B, Idx+1); 536} 537 538ExplodedNode* StmtNodeBuilder::MakeNode(ExplodedNodeSet& Dst, const Stmt* S, 539 ExplodedNode* Pred, const GRState* St, 540 ProgramPoint::Kind K) { 541 542 ExplodedNode* N = generateNode(S, St, Pred, K); 543 544 if (N) { 545 if (BuildSinks) 546 N->markAsSink(); 547 else 548 Dst.Add(N); 549 } 550 551 return N; 552} 553 554static ProgramPoint GetProgramPoint(const Stmt *S, ProgramPoint::Kind K, 555 const LocationContext *LC, const void *tag){ 556 switch (K) { 557 default: 558 assert(false && "Unhandled ProgramPoint kind"); 559 case ProgramPoint::PreStmtKind: 560 return PreStmt(S, LC, tag); 561 case ProgramPoint::PostStmtKind: 562 return PostStmt(S, LC, tag); 563 case ProgramPoint::PreLoadKind: 564 return PreLoad(S, LC, tag); 565 case ProgramPoint::PostLoadKind: 566 return PostLoad(S, LC, tag); 567 case ProgramPoint::PreStoreKind: 568 return PreStore(S, LC, tag); 569 case ProgramPoint::PostStoreKind: 570 return PostStore(S, LC, tag); 571 case ProgramPoint::PostLValueKind: 572 return PostLValue(S, LC, tag); 573 case ProgramPoint::PostPurgeDeadSymbolsKind: 574 return PostPurgeDeadSymbols(S, LC, tag); 575 } 576} 577 578ExplodedNode* 579StmtNodeBuilder::generateNodeInternal(const Stmt* S, const GRState* state, 580 ExplodedNode* Pred, 581 ProgramPoint::Kind K, 582 const void *tag) { 583 584 const ProgramPoint &L = GetProgramPoint(S, K, Pred->getLocationContext(),tag); 585 return generateNodeInternal(L, state, Pred); 586} 587 588ExplodedNode* 589StmtNodeBuilder::generateNodeInternal(const ProgramPoint &Loc, 590 const GRState* State, 591 ExplodedNode* Pred) { 592 bool IsNew; 593 ExplodedNode* N = Eng.G->getNode(Loc, State, &IsNew); 594 N->addPredecessor(Pred, *Eng.G); 595 Deferred.erase(Pred); 596 597 if (IsNew) { 598 Deferred.insert(N); 599 return N; 600 } 601 602 return NULL; 603} 604 605ExplodedNode* BranchNodeBuilder::generateNode(const GRState* State, 606 bool branch) { 607 608 // If the branch has been marked infeasible we should not generate a node. 609 if (!isFeasible(branch)) 610 return NULL; 611 612 bool IsNew; 613 614 ExplodedNode* Succ = 615 Eng.G->getNode(BlockEdge(Src,branch ? DstT:DstF,Pred->getLocationContext()), 616 State, &IsNew); 617 618 Succ->addPredecessor(Pred, *Eng.G); 619 620 if (branch) 621 GeneratedTrue = true; 622 else 623 GeneratedFalse = true; 624 625 if (IsNew) { 626 Deferred.push_back(Succ); 627 return Succ; 628 } 629 630 return NULL; 631} 632 633BranchNodeBuilder::~BranchNodeBuilder() { 634 if (!GeneratedTrue) generateNode(Pred->State, true); 635 if (!GeneratedFalse) generateNode(Pred->State, false); 636 637 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 638 if (!(*I)->isSink()) Eng.WList->enqueue(*I); 639} 640 641 642ExplodedNode* 643IndirectGotoNodeBuilder::generateNode(const iterator& I, const GRState* St, 644 bool isSink) { 645 bool IsNew; 646 647 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 648 Pred->getLocationContext()), St, &IsNew); 649 650 Succ->addPredecessor(Pred, *Eng.G); 651 652 if (IsNew) { 653 654 if (isSink) 655 Succ->markAsSink(); 656 else 657 Eng.WList->enqueue(Succ); 658 659 return Succ; 660 } 661 662 return NULL; 663} 664 665 666ExplodedNode* 667SwitchNodeBuilder::generateCaseStmtNode(const iterator& I, const GRState* St){ 668 669 bool IsNew; 670 671 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 672 Pred->getLocationContext()), St, &IsNew); 673 Succ->addPredecessor(Pred, *Eng.G); 674 675 if (IsNew) { 676 Eng.WList->enqueue(Succ); 677 return Succ; 678 } 679 680 return NULL; 681} 682 683 684ExplodedNode* 685SwitchNodeBuilder::generateDefaultCaseNode(const GRState* St, bool isSink) { 686 687 // Get the block for the default case. 688 assert (Src->succ_rbegin() != Src->succ_rend()); 689 CFGBlock* DefaultBlock = *Src->succ_rbegin(); 690 691 bool IsNew; 692 693 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock, 694 Pred->getLocationContext()), St, &IsNew); 695 Succ->addPredecessor(Pred, *Eng.G); 696 697 if (IsNew) { 698 if (isSink) 699 Succ->markAsSink(); 700 else 701 Eng.WList->enqueue(Succ); 702 703 return Succ; 704 } 705 706 return NULL; 707} 708 709EndOfFunctionNodeBuilder::~EndOfFunctionNodeBuilder() { 710 // Auto-generate an EOP node if one has not been generated. 711 if (!hasGeneratedNode) { 712 // If we are in an inlined call, generate CallExit node. 713 if (Pred->getLocationContext()->getParent()) 714 GenerateCallExitNode(Pred->State); 715 else 716 generateNode(Pred->State); 717 } 718} 719 720ExplodedNode* 721EndOfFunctionNodeBuilder::generateNode(const GRState* State, const void *tag, 722 ExplodedNode* P) { 723 hasGeneratedNode = true; 724 bool IsNew; 725 726 ExplodedNode* Node = Eng.G->getNode(BlockEntrance(&B, 727 Pred->getLocationContext(), tag), State, &IsNew); 728 729 Node->addPredecessor(P ? P : Pred, *Eng.G); 730 731 if (IsNew) { 732 Eng.G->addEndOfPath(Node); 733 return Node; 734 } 735 736 return NULL; 737} 738 739void EndOfFunctionNodeBuilder::GenerateCallExitNode(const GRState *state) { 740 hasGeneratedNode = true; 741 // Create a CallExit node and enqueue it. 742 const StackFrameContext *LocCtx 743 = cast<StackFrameContext>(Pred->getLocationContext()); 744 const Stmt *CE = LocCtx->getCallSite(); 745 746 // Use the the callee location context. 747 CallExit Loc(CE, LocCtx); 748 749 bool isNew; 750 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 751 Node->addPredecessor(Pred, *Eng.G); 752 753 if (isNew) 754 Eng.WList->enqueue(Node); 755} 756 757 758void CallEnterNodeBuilder::generateNode(const GRState *state) { 759 // Check if the callee is in the same translation unit. 760 if (CalleeCtx->getTranslationUnit() != 761 Pred->getLocationContext()->getTranslationUnit()) { 762 // Create a new engine. We must be careful that the new engine should not 763 // reference data structures owned by the old engine. 764 765 AnalysisManager &OldMgr = Eng.SubEng.getAnalysisManager(); 766 767 // Get the callee's translation unit. 768 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit(); 769 770 // Create a new AnalysisManager with components of the callee's 771 // TranslationUnit. 772 // The Diagnostic is actually shared when we create ASTUnits from AST files. 773 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(), 774 OldMgr.getLangOptions(), 775 OldMgr.getPathDiagnosticClient(), 776 OldMgr.getStoreManagerCreator(), 777 OldMgr.getConstraintManagerCreator(), 778 OldMgr.getCheckerManager(), 779 OldMgr.getIndexer(), 780 OldMgr.getMaxNodes(), OldMgr.getMaxVisit(), 781 OldMgr.shouldVisualizeGraphviz(), 782 OldMgr.shouldVisualizeUbigraph(), 783 OldMgr.shouldPurgeDead(), 784 OldMgr.shouldEagerlyAssume(), 785 OldMgr.shouldTrimGraph(), 786 OldMgr.shouldInlineCall(), 787 OldMgr.getAnalysisContextManager().getUseUnoptimizedCFG(), 788 OldMgr.getAnalysisContextManager().getAddImplicitDtors(), 789 OldMgr.getAnalysisContextManager().getAddInitializers(), 790 OldMgr.shouldEagerlyTrimExplodedGraph()); 791 llvm::OwningPtr<TransferFuncs> TF(MakeCFRefCountTF(AMgr.getASTContext(), 792 /* GCEnabled */ false, 793 AMgr.getLangOptions())); 794 // Create the new engine. 795 ExprEngine NewEng(AMgr, TF.take()); 796 797 // Create the new LocationContext. 798 AnalysisContext *NewAnaCtx = AMgr.getAnalysisContext(CalleeCtx->getDecl(), 799 CalleeCtx->getTranslationUnit()); 800 const StackFrameContext *OldLocCtx = CalleeCtx; 801 const StackFrameContext *NewLocCtx = AMgr.getStackFrame(NewAnaCtx, 802 OldLocCtx->getParent(), 803 OldLocCtx->getCallSite(), 804 OldLocCtx->getCallSiteBlock(), 805 OldLocCtx->getIndex()); 806 807 // Now create an initial state for the new engine. 808 const GRState *NewState = NewEng.getStateManager().MarshalState(state, 809 NewLocCtx); 810 ExplodedNodeSet ReturnNodes; 811 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(), 812 NewState, ReturnNodes); 813 return; 814 } 815 816 // Get the callee entry block. 817 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry()); 818 assert(Entry->empty()); 819 assert(Entry->succ_size() == 1); 820 821 // Get the solitary successor. 822 const CFGBlock *SuccB = *(Entry->succ_begin()); 823 824 // Construct an edge representing the starting location in the callee. 825 BlockEdge Loc(Entry, SuccB, CalleeCtx); 826 827 bool isNew; 828 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 829 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 830 831 if (isNew) 832 Eng.WList->enqueue(Node); 833} 834 835void CallExitNodeBuilder::generateNode(const GRState *state) { 836 // Get the callee's location context. 837 const StackFrameContext *LocCtx 838 = cast<StackFrameContext>(Pred->getLocationContext()); 839 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt 840 // that triggers the dtor. 841 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent()); 842 bool isNew; 843 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 844 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 845 if (isNew) 846 Eng.WList->enqueue(Node, LocCtx->getCallSiteBlock(), 847 LocCtx->getIndex() + 1); 848} 849