CoreEngine.cpp revision 341825
1//===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines a generic engine for intraprocedural, path-sensitive, 11// dataflow analysis via graph reachability engine. 12// 13//===----------------------------------------------------------------------===// 14 15#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 16#include "clang/AST/Expr.h" 17#include "clang/AST/ExprCXX.h" 18#include "clang/AST/Stmt.h" 19#include "clang/AST/StmtCXX.h" 20#include "clang/Analysis/AnalysisDeclContext.h" 21#include "clang/Analysis/CFG.h" 22#include "clang/Analysis/ProgramPoint.h" 23#include "clang/Basic/LLVM.h" 24#include "clang/StaticAnalyzer/Core/AnalyzerOptions.h" 25#include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h" 26#include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" 27#include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h" 28#include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h" 29#include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h" 30#include "llvm/ADT/Optional.h" 31#include "llvm/ADT/STLExtras.h" 32#include "llvm/ADT/Statistic.h" 33#include "llvm/Support/Casting.h" 34#include "llvm/Support/ErrorHandling.h" 35#include <algorithm> 36#include <cassert> 37#include <memory> 38#include <utility> 39 40using namespace clang; 41using namespace ento; 42 43#define DEBUG_TYPE "CoreEngine" 44 45STATISTIC(NumSteps, 46 "The # of steps executed."); 47STATISTIC(NumReachedMaxSteps, 48 "The # of times we reached the max number of steps."); 49STATISTIC(NumPathsExplored, 50 "The # of paths explored by the analyzer."); 51 52//===----------------------------------------------------------------------===// 53// Core analysis engine. 54//===----------------------------------------------------------------------===// 55 56static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) { 57 switch (Opts.getExplorationStrategy()) { 58 case AnalyzerOptions::ExplorationStrategyKind::DFS: 59 return WorkList::makeDFS(); 60 case AnalyzerOptions::ExplorationStrategyKind::BFS: 61 return WorkList::makeBFS(); 62 case AnalyzerOptions::ExplorationStrategyKind::BFSBlockDFSContents: 63 return WorkList::makeBFSBlockDFSContents(); 64 case AnalyzerOptions::ExplorationStrategyKind::UnexploredFirst: 65 return WorkList::makeUnexploredFirst(); 66 case AnalyzerOptions::ExplorationStrategyKind::UnexploredFirstQueue: 67 return WorkList::makeUnexploredFirstPriorityQueue(); 68 default: 69 llvm_unreachable("Unexpected case"); 70 } 71} 72 73CoreEngine::CoreEngine(SubEngine &subengine, FunctionSummariesTy *FS, 74 AnalyzerOptions &Opts) 75 : SubEng(subengine), WList(generateWorkList(Opts)), 76 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {} 77 78/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 79bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 80 ProgramStateRef InitState) { 81 if (G.num_roots() == 0) { // Initialize the analysis by constructing 82 // the root if none exists. 83 84 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 85 86 assert(Entry->empty() && "Entry block must be empty."); 87 88 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor."); 89 90 // Mark the entry block as visited. 91 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(), 92 L->getDecl(), 93 L->getCFG()->getNumBlockIDs()); 94 95 // Get the solitary successor. 96 const CFGBlock *Succ = *(Entry->succ_begin()); 97 98 // Construct an edge representing the 99 // starting location in the function. 100 BlockEdge StartLoc(Entry, Succ, L); 101 102 // Set the current block counter to being empty. 103 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 104 105 if (!InitState) 106 InitState = SubEng.getInitialState(L); 107 108 bool IsNew; 109 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew); 110 assert(IsNew); 111 G.addRoot(Node); 112 113 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node); 114 ExplodedNodeSet DstBegin; 115 SubEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc); 116 117 enqueue(DstBegin); 118 } 119 120 // Check if we have a steps limit 121 bool UnlimitedSteps = Steps == 0; 122 // Cap our pre-reservation in the event that the user specifies 123 // a very large number of maximum steps. 124 const unsigned PreReservationCap = 4000000; 125 if(!UnlimitedSteps) 126 G.reserve(std::min(Steps,PreReservationCap)); 127 128 while (WList->hasWork()) { 129 if (!UnlimitedSteps) { 130 if (Steps == 0) { 131 NumReachedMaxSteps++; 132 break; 133 } 134 --Steps; 135 } 136 137 NumSteps++; 138 139 const WorkListUnit& WU = WList->dequeue(); 140 141 // Set the current block counter. 142 WList->setBlockCounter(WU.getBlockCounter()); 143 144 // Retrieve the node. 145 ExplodedNode *Node = WU.getNode(); 146 147 dispatchWorkItem(Node, Node->getLocation(), WU); 148 } 149 SubEng.processEndWorklist(hasWorkRemaining()); 150 return WList->hasWork(); 151} 152 153void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc, 154 const WorkListUnit& WU) { 155 // Dispatch on the location type. 156 switch (Loc.getKind()) { 157 case ProgramPoint::BlockEdgeKind: 158 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred); 159 break; 160 161 case ProgramPoint::BlockEntranceKind: 162 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred); 163 break; 164 165 case ProgramPoint::BlockExitKind: 166 assert(false && "BlockExit location never occur in forward analysis."); 167 break; 168 169 case ProgramPoint::CallEnterKind: 170 HandleCallEnter(Loc.castAs<CallEnter>(), Pred); 171 break; 172 173 case ProgramPoint::CallExitBeginKind: 174 SubEng.processCallExit(Pred); 175 break; 176 177 case ProgramPoint::EpsilonKind: { 178 assert(Pred->hasSinglePred() && 179 "Assume epsilon has exactly one predecessor by construction"); 180 ExplodedNode *PNode = Pred->getFirstPred(); 181 dispatchWorkItem(Pred, PNode->getLocation(), WU); 182 break; 183 } 184 default: 185 assert(Loc.getAs<PostStmt>() || 186 Loc.getAs<PostInitializer>() || 187 Loc.getAs<PostImplicitCall>() || 188 Loc.getAs<CallExitEnd>() || 189 Loc.getAs<LoopExit>() || 190 Loc.getAs<PostAllocatorCall>()); 191 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred); 192 break; 193 } 194} 195 196bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 197 unsigned Steps, 198 ProgramStateRef InitState, 199 ExplodedNodeSet &Dst) { 200 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState); 201 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E; 202 ++I) { 203 Dst.Add(*I); 204 } 205 return DidNotFinish; 206} 207 208void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 209 const CFGBlock *Blk = L.getDst(); 210 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 211 212 // Mark this block as visited. 213 const LocationContext *LC = Pred->getLocationContext(); 214 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(), 215 LC->getDecl(), 216 LC->getCFG()->getNumBlockIDs()); 217 218 // Check if we are entering the EXIT block. 219 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 220 assert(L.getLocationContext()->getCFG()->getExit().empty() && 221 "EXIT block cannot contain Stmts."); 222 223 // Get return statement.. 224 const ReturnStmt *RS = nullptr; 225 if (!L.getSrc()->empty()) { 226 if (Optional<CFGStmt> LastStmt = L.getSrc()->back().getAs<CFGStmt>()) { 227 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt()); 228 } 229 } 230 231 // Process the final state transition. 232 SubEng.processEndOfFunction(BuilderCtx, Pred, RS); 233 234 // This path is done. Don't enqueue any more nodes. 235 return; 236 } 237 238 // Call into the SubEngine to process entering the CFGBlock. 239 ExplodedNodeSet dstNodes; 240 BlockEntrance BE(Blk, Pred->getLocationContext()); 241 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 242 SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred); 243 244 // Auto-generate a node. 245 if (!nodeBuilder.hasGeneratedNodes()) { 246 nodeBuilder.generateNode(Pred->State, Pred); 247 } 248 249 // Enqueue nodes onto the worklist. 250 enqueue(dstNodes); 251} 252 253void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 254 ExplodedNode *Pred) { 255 // Increment the block counter. 256 const LocationContext *LC = Pred->getLocationContext(); 257 unsigned BlockId = L.getBlock()->getBlockID(); 258 BlockCounter Counter = WList->getBlockCounter(); 259 Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(), 260 BlockId); 261 WList->setBlockCounter(Counter); 262 263 // Process the entrance of the block. 264 if (Optional<CFGElement> E = L.getFirstElement()) { 265 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 266 SubEng.processCFGElement(*E, Pred, 0, &Ctx); 267 } 268 else 269 HandleBlockExit(L.getBlock(), Pred); 270} 271 272void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 273 if (const Stmt *Term = B->getTerminator()) { 274 switch (Term->getStmtClass()) { 275 default: 276 llvm_unreachable("Analysis for this terminator not implemented."); 277 278 case Stmt::CXXBindTemporaryExprClass: 279 HandleCleanupTemporaryBranch( 280 cast<CXXBindTemporaryExpr>(B->getTerminator().getStmt()), B, Pred); 281 return; 282 283 // Model static initializers. 284 case Stmt::DeclStmtClass: 285 HandleStaticInit(cast<DeclStmt>(Term), B, Pred); 286 return; 287 288 case Stmt::BinaryOperatorClass: // '&&' and '||' 289 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 290 return; 291 292 case Stmt::BinaryConditionalOperatorClass: 293 case Stmt::ConditionalOperatorClass: 294 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 295 Term, B, Pred); 296 return; 297 298 // FIXME: Use constant-folding in CFG construction to simplify this 299 // case. 300 301 case Stmt::ChooseExprClass: 302 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 303 return; 304 305 case Stmt::CXXTryStmtClass: 306 // Generate a node for each of the successors. 307 // Our logic for EH analysis can certainly be improved. 308 for (CFGBlock::const_succ_iterator it = B->succ_begin(), 309 et = B->succ_end(); it != et; ++it) { 310 if (const CFGBlock *succ = *it) { 311 generateNode(BlockEdge(B, succ, Pred->getLocationContext()), 312 Pred->State, Pred); 313 } 314 } 315 return; 316 317 case Stmt::DoStmtClass: 318 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 319 return; 320 321 case Stmt::CXXForRangeStmtClass: 322 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 323 return; 324 325 case Stmt::ForStmtClass: 326 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 327 return; 328 329 case Stmt::ContinueStmtClass: 330 case Stmt::BreakStmtClass: 331 case Stmt::GotoStmtClass: 332 break; 333 334 case Stmt::IfStmtClass: 335 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 336 return; 337 338 case Stmt::IndirectGotoStmtClass: { 339 // Only 1 successor: the indirect goto dispatch block. 340 assert(B->succ_size() == 1); 341 342 IndirectGotoNodeBuilder 343 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 344 *(B->succ_begin()), this); 345 346 SubEng.processIndirectGoto(builder); 347 return; 348 } 349 350 case Stmt::ObjCForCollectionStmtClass: 351 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 352 // 353 // (1) inside a basic block, which represents the binding of the 354 // 'element' variable to a value. 355 // (2) in a terminator, which represents the branch. 356 // 357 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 358 // whether or not collection contains any more elements. We cannot 359 // just test to see if the element is nil because a container can 360 // contain nil elements. 361 HandleBranch(Term, Term, B, Pred); 362 return; 363 364 case Stmt::SwitchStmtClass: { 365 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 366 this); 367 368 SubEng.processSwitch(builder); 369 return; 370 } 371 372 case Stmt::WhileStmtClass: 373 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 374 return; 375 } 376 } 377 378 assert(B->succ_size() == 1 && 379 "Blocks with no terminator should have at most 1 successor."); 380 381 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 382 Pred->State, Pred); 383} 384 385void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) { 386 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred); 387 SubEng.processCallEnter(BuilderCtx, CE, Pred); 388} 389 390void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 391 const CFGBlock * B, ExplodedNode *Pred) { 392 assert(B->succ_size() == 2); 393 NodeBuilderContext Ctx(*this, B, Pred); 394 ExplodedNodeSet Dst; 395 SubEng.processBranch(Cond, Term, Ctx, Pred, Dst, 396 *(B->succ_begin()), *(B->succ_begin()+1)); 397 // Enqueue the new frontier onto the worklist. 398 enqueue(Dst); 399} 400 401void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 402 const CFGBlock *B, 403 ExplodedNode *Pred) { 404 assert(B->succ_size() == 2); 405 NodeBuilderContext Ctx(*this, B, Pred); 406 ExplodedNodeSet Dst; 407 SubEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()), 408 *(B->succ_begin() + 1)); 409 // Enqueue the new frontier onto the worklist. 410 enqueue(Dst); 411} 412 413void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B, 414 ExplodedNode *Pred) { 415 assert(B->succ_size() == 2); 416 NodeBuilderContext Ctx(*this, B, Pred); 417 ExplodedNodeSet Dst; 418 SubEng.processStaticInitializer(DS, Ctx, Pred, Dst, 419 *(B->succ_begin()), *(B->succ_begin()+1)); 420 // Enqueue the new frontier onto the worklist. 421 enqueue(Dst); 422} 423 424void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 425 ExplodedNode *Pred) { 426 assert(B); 427 assert(!B->empty()); 428 429 if (StmtIdx == B->size()) 430 HandleBlockExit(B, Pred); 431 else { 432 NodeBuilderContext Ctx(*this, B, Pred); 433 SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 434 } 435} 436 437/// generateNode - Utility method to generate nodes, hook up successors, 438/// and add nodes to the worklist. 439void CoreEngine::generateNode(const ProgramPoint &Loc, 440 ProgramStateRef State, 441 ExplodedNode *Pred) { 442 bool IsNew; 443 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew); 444 445 if (Pred) 446 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor. 447 else { 448 assert(IsNew); 449 G.addRoot(Node); // 'Node' has no predecessor. Make it a root. 450 } 451 452 // Only add 'Node' to the worklist if it was freshly generated. 453 if (IsNew) WList->enqueue(Node); 454} 455 456void CoreEngine::enqueueStmtNode(ExplodedNode *N, 457 const CFGBlock *Block, unsigned Idx) { 458 assert(Block); 459 assert(!N->isSink()); 460 461 // Check if this node entered a callee. 462 if (N->getLocation().getAs<CallEnter>()) { 463 // Still use the index of the CallExpr. It's needed to create the callee 464 // StackFrameContext. 465 WList->enqueue(N, Block, Idx); 466 return; 467 } 468 469 // Do not create extra nodes. Move to the next CFG element. 470 if (N->getLocation().getAs<PostInitializer>() || 471 N->getLocation().getAs<PostImplicitCall>()|| 472 N->getLocation().getAs<LoopExit>()) { 473 WList->enqueue(N, Block, Idx+1); 474 return; 475 } 476 477 if (N->getLocation().getAs<EpsilonPoint>()) { 478 WList->enqueue(N, Block, Idx); 479 return; 480 } 481 482 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) { 483 WList->enqueue(N, Block, Idx+1); 484 return; 485 } 486 487 // At this point, we know we're processing a normal statement. 488 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>(); 489 PostStmt Loc(CS.getStmt(), N->getLocationContext()); 490 491 if (Loc == N->getLocation().withTag(nullptr)) { 492 // Note: 'N' should be a fresh node because otherwise it shouldn't be 493 // a member of Deferred. 494 WList->enqueue(N, Block, Idx+1); 495 return; 496 } 497 498 bool IsNew; 499 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew); 500 Succ->addPredecessor(N, G); 501 502 if (IsNew) 503 WList->enqueue(Succ, Block, Idx+1); 504} 505 506ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N, 507 const ReturnStmt *RS) { 508 // Create a CallExitBegin node and enqueue it. 509 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext()); 510 511 // Use the callee location context. 512 CallExitBegin Loc(LocCtx, RS); 513 514 bool isNew; 515 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew); 516 Node->addPredecessor(N, G); 517 return isNew ? Node : nullptr; 518} 519 520void CoreEngine::enqueue(ExplodedNodeSet &Set) { 521 for (const auto I : Set) 522 WList->enqueue(I); 523} 524 525void CoreEngine::enqueue(ExplodedNodeSet &Set, 526 const CFGBlock *Block, unsigned Idx) { 527 for (const auto I : Set) 528 enqueueStmtNode(I, Block, Idx); 529} 530 531void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) { 532 for (auto I : Set) { 533 // If we are in an inlined call, generate CallExitBegin node. 534 if (I->getLocationContext()->getParent()) { 535 I = generateCallExitBeginNode(I, RS); 536 if (I) 537 WList->enqueue(I); 538 } else { 539 // TODO: We should run remove dead bindings here. 540 G.addEndOfPath(I); 541 NumPathsExplored++; 542 } 543 } 544} 545 546void NodeBuilder::anchor() {} 547 548ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 549 ProgramStateRef State, 550 ExplodedNode *FromN, 551 bool MarkAsSink) { 552 HasGeneratedNodes = true; 553 bool IsNew; 554 ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew); 555 N->addPredecessor(FromN, C.Eng.G); 556 Frontier.erase(FromN); 557 558 if (!IsNew) 559 return nullptr; 560 561 if (!MarkAsSink) 562 Frontier.Add(N); 563 564 return N; 565} 566 567void NodeBuilderWithSinks::anchor() {} 568 569StmtNodeBuilder::~StmtNodeBuilder() { 570 if (EnclosingBldr) 571 for (const auto I : Frontier) 572 EnclosingBldr->addNodes(I); 573} 574 575void BranchNodeBuilder::anchor() {} 576 577ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State, 578 bool branch, 579 ExplodedNode *NodePred) { 580 // If the branch has been marked infeasible we should not generate a node. 581 if (!isFeasible(branch)) 582 return nullptr; 583 584 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 585 NodePred->getLocationContext()); 586 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 587 return Succ; 588} 589 590ExplodedNode* 591IndirectGotoNodeBuilder::generateNode(const iterator &I, 592 ProgramStateRef St, 593 bool IsSink) { 594 bool IsNew; 595 ExplodedNode *Succ = 596 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 597 St, IsSink, &IsNew); 598 Succ->addPredecessor(Pred, Eng.G); 599 600 if (!IsNew) 601 return nullptr; 602 603 if (!IsSink) 604 Eng.WList->enqueue(Succ); 605 606 return Succ; 607} 608 609ExplodedNode* 610SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 611 ProgramStateRef St) { 612 bool IsNew; 613 ExplodedNode *Succ = 614 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 615 St, false, &IsNew); 616 Succ->addPredecessor(Pred, Eng.G); 617 if (!IsNew) 618 return nullptr; 619 620 Eng.WList->enqueue(Succ); 621 return Succ; 622} 623 624ExplodedNode* 625SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St, 626 bool IsSink) { 627 // Get the block for the default case. 628 assert(Src->succ_rbegin() != Src->succ_rend()); 629 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 630 631 // Sanity check for default blocks that are unreachable and not caught 632 // by earlier stages. 633 if (!DefaultBlock) 634 return nullptr; 635 636 bool IsNew; 637 ExplodedNode *Succ = 638 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()), 639 St, IsSink, &IsNew); 640 Succ->addPredecessor(Pred, Eng.G); 641 642 if (!IsNew) 643 return nullptr; 644 645 if (!IsSink) 646 Eng.WList->enqueue(Succ); 647 648 return Succ; 649} 650