1 //==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a generic engine for intraprocedural, path-sensitive, 11 // dataflow analysis via graph reachability engine. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #define DEBUG_TYPE "CoreEngine" 16 17 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 18 #include "clang/AST/Expr.h" 19 #include "clang/AST/StmtCXX.h" 20 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 21 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 22 #include "llvm/ADT/DenseMap.h" 23 #include "llvm/ADT/Statistic.h" 24 #include "llvm/Support/Casting.h" 25 26 using namespace clang; 27 using namespace ento; 28 29 STATISTIC(NumSteps, 30 "The # of steps executed."); 31 STATISTIC(NumReachedMaxSteps, 32 "The # of times we reached the max number of steps."); 33 STATISTIC(NumPathsExplored, 34 "The # of paths explored by the analyzer."); 35 36 //===----------------------------------------------------------------------===// 37 // Worklist classes for exploration of reachable states. 38 //===----------------------------------------------------------------------===// 39 40 WorkList::Visitor::~Visitor() {} 41 42 namespace { 43 class DFS : public WorkList { 44 SmallVector<WorkListUnit,20> Stack; 45 public: 46 virtual bool hasWork() const { 47 return !Stack.empty(); 48 } 49 50 virtual void enqueue(const WorkListUnit& U) { 51 Stack.push_back(U); 52 } 53 54 virtual WorkListUnit dequeue() { 55 assert (!Stack.empty()); 56 const WorkListUnit& U = Stack.back(); 57 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 58 return U; 59 } 60 61 virtual bool visitItemsInWorkList(Visitor &V) { 62 for (SmallVectorImpl<WorkListUnit>::iterator 63 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 64 if (V.visit(*I)) 65 return true; 66 } 67 return false; 68 } 69 }; 70 71 class BFS : public WorkList { 72 std::deque<WorkListUnit> Queue; 73 public: 74 virtual bool hasWork() const { 75 return !Queue.empty(); 76 } 77 78 virtual void enqueue(const WorkListUnit& U) { 79 Queue.push_back(U); 80 } 81 82 virtual WorkListUnit dequeue() { 83 WorkListUnit U = Queue.front(); 84 Queue.pop_front(); 85 return U; 86 } 87 88 virtual bool visitItemsInWorkList(Visitor &V) { 89 for (std::deque<WorkListUnit>::iterator 90 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 91 if (V.visit(*I)) 92 return true; 93 } 94 return false; 95 } 96 }; 97 98 } // end anonymous namespace 99 100 // Place the dstor for WorkList here because it contains virtual member 101 // functions, and we the code for the dstor generated in one compilation unit. 102 WorkList::~WorkList() {} 103 104 WorkList *WorkList::makeDFS() { return new DFS(); } 105 WorkList *WorkList::makeBFS() { return new BFS(); } 106 107 namespace { 108 class BFSBlockDFSContents : public WorkList { 109 std::deque<WorkListUnit> Queue; 110 SmallVector<WorkListUnit,20> Stack; 111 public: 112 virtual bool hasWork() const { 113 return !Queue.empty() || !Stack.empty(); 114 } 115 116 virtual void enqueue(const WorkListUnit& U) { 117 if (U.getNode()->getLocation().getAs<BlockEntrance>()) 118 Queue.push_front(U); 119 else 120 Stack.push_back(U); 121 } 122 123 virtual WorkListUnit dequeue() { 124 // Process all basic blocks to completion. 125 if (!Stack.empty()) { 126 const WorkListUnit& U = Stack.back(); 127 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 128 return U; 129 } 130 131 assert(!Queue.empty()); 132 // Don't use const reference. The subsequent pop_back() might make it 133 // unsafe. 134 WorkListUnit U = Queue.front(); 135 Queue.pop_front(); 136 return U; 137 } 138 virtual bool visitItemsInWorkList(Visitor &V) { 139 for (SmallVectorImpl<WorkListUnit>::iterator 140 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 141 if (V.visit(*I)) 142 return true; 143 } 144 for (std::deque<WorkListUnit>::iterator 145 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 146 if (V.visit(*I)) 147 return true; 148 } 149 return false; 150 } 151 152 }; 153 } // end anonymous namespace 154 155 WorkList* WorkList::makeBFSBlockDFSContents() { 156 return new BFSBlockDFSContents(); 157 } 158 159 //===----------------------------------------------------------------------===// 160 // Core analysis engine. 161 //===----------------------------------------------------------------------===// 162 163 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 164 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 165 ProgramStateRef InitState) { 166 167 if (G->num_roots() == 0) { // Initialize the analysis by constructing 168 // the root if none exists. 169 170 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 171 172 assert (Entry->empty() && 173 "Entry block must be empty."); 174 175 assert (Entry->succ_size() == 1 && 176 "Entry block must have 1 successor."); 177 178 // Mark the entry block as visited. 179 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(), 180 L->getDecl(), 181 L->getCFG()->getNumBlockIDs()); 182 183 // Get the solitary successor. 184 const CFGBlock *Succ = *(Entry->succ_begin()); 185 186 // Construct an edge representing the 187 // starting location in the function. 188 BlockEdge StartLoc(Entry, Succ, L); 189 190 // Set the current block counter to being empty. 191 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 192 193 if (!InitState) 194 // Generate the root. 195 generateNode(StartLoc, SubEng.getInitialState(L), 0); 196 else 197 generateNode(StartLoc, InitState, 0); 198 } 199 200 // Check if we have a steps limit 201 bool UnlimitedSteps = Steps == 0; 202 203 while (WList->hasWork()) { 204 if (!UnlimitedSteps) { 205 if (Steps == 0) { 206 NumReachedMaxSteps++; 207 break; 208 } 209 --Steps; 210 } 211 212 NumSteps++; 213 214 const WorkListUnit& WU = WList->dequeue(); 215 216 // Set the current block counter. 217 WList->setBlockCounter(WU.getBlockCounter()); 218 219 // Retrieve the node. 220 ExplodedNode *Node = WU.getNode(); 221 222 dispatchWorkItem(Node, Node->getLocation(), WU); 223 } 224 SubEng.processEndWorklist(hasWorkRemaining()); 225 return WList->hasWork(); 226 } 227 228 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc, 229 const WorkListUnit& WU) { 230 // Dispatch on the location type. 231 switch (Loc.getKind()) { 232 case ProgramPoint::BlockEdgeKind: 233 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred); 234 break; 235 236 case ProgramPoint::BlockEntranceKind: 237 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred); 238 break; 239 240 case ProgramPoint::BlockExitKind: 241 assert (false && "BlockExit location never occur in forward analysis."); 242 break; 243 244 case ProgramPoint::CallEnterKind: { 245 CallEnter CEnter = Loc.castAs<CallEnter>(); 246 SubEng.processCallEnter(CEnter, Pred); 247 break; 248 } 249 250 case ProgramPoint::CallExitBeginKind: 251 SubEng.processCallExit(Pred); 252 break; 253 254 case ProgramPoint::EpsilonKind: { 255 assert(Pred->hasSinglePred() && 256 "Assume epsilon has exactly one predecessor by construction"); 257 ExplodedNode *PNode = Pred->getFirstPred(); 258 dispatchWorkItem(Pred, PNode->getLocation(), WU); 259 break; 260 } 261 default: 262 assert(Loc.getAs<PostStmt>() || 263 Loc.getAs<PostInitializer>() || 264 Loc.getAs<PostImplicitCall>() || 265 Loc.getAs<CallExitEnd>()); 266 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred); 267 break; 268 } 269 } 270 271 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 272 unsigned Steps, 273 ProgramStateRef InitState, 274 ExplodedNodeSet &Dst) { 275 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState); 276 for (ExplodedGraph::eop_iterator I = G->eop_begin(), 277 E = G->eop_end(); I != E; ++I) { 278 Dst.Add(*I); 279 } 280 return DidNotFinish; 281 } 282 283 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 284 285 const CFGBlock *Blk = L.getDst(); 286 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 287 288 // Mark this block as visited. 289 const LocationContext *LC = Pred->getLocationContext(); 290 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(), 291 LC->getDecl(), 292 LC->getCFG()->getNumBlockIDs()); 293 294 // Check if we are entering the EXIT block. 295 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 296 297 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 298 && "EXIT block cannot contain Stmts."); 299 300 // Process the final state transition. 301 SubEng.processEndOfFunction(BuilderCtx, Pred); 302 303 // This path is done. Don't enqueue any more nodes. 304 return; 305 } 306 307 // Call into the SubEngine to process entering the CFGBlock. 308 ExplodedNodeSet dstNodes; 309 BlockEntrance BE(Blk, Pred->getLocationContext()); 310 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 311 SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred); 312 313 // Auto-generate a node. 314 if (!nodeBuilder.hasGeneratedNodes()) { 315 nodeBuilder.generateNode(Pred->State, Pred); 316 } 317 318 // Enqueue nodes onto the worklist. 319 enqueue(dstNodes); 320 } 321 322 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 323 ExplodedNode *Pred) { 324 325 // Increment the block counter. 326 const LocationContext *LC = Pred->getLocationContext(); 327 unsigned BlockId = L.getBlock()->getBlockID(); 328 BlockCounter Counter = WList->getBlockCounter(); 329 Counter = BCounterFactory.IncrementCount(Counter, LC->getCurrentStackFrame(), 330 BlockId); 331 WList->setBlockCounter(Counter); 332 333 // Process the entrance of the block. 334 if (Optional<CFGElement> E = L.getFirstElement()) { 335 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 336 SubEng.processCFGElement(*E, Pred, 0, &Ctx); 337 } 338 else 339 HandleBlockExit(L.getBlock(), Pred); 340 } 341 342 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 343 344 if (const Stmt *Term = B->getTerminator()) { 345 switch (Term->getStmtClass()) { 346 default: 347 llvm_unreachable("Analysis for this terminator not implemented."); 348 349 // Model static initializers. 350 case Stmt::DeclStmtClass: 351 HandleStaticInit(cast<DeclStmt>(Term), B, Pred); 352 return; 353 354 case Stmt::BinaryOperatorClass: // '&&' and '||' 355 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 356 return; 357 358 case Stmt::BinaryConditionalOperatorClass: 359 case Stmt::ConditionalOperatorClass: 360 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 361 Term, B, Pred); 362 return; 363 364 // FIXME: Use constant-folding in CFG construction to simplify this 365 // case. 366 367 case Stmt::ChooseExprClass: 368 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 369 return; 370 371 case Stmt::CXXTryStmtClass: { 372 // Generate a node for each of the successors. 373 // Our logic for EH analysis can certainly be improved. 374 for (CFGBlock::const_succ_iterator it = B->succ_begin(), 375 et = B->succ_end(); it != et; ++it) { 376 if (const CFGBlock *succ = *it) { 377 generateNode(BlockEdge(B, succ, Pred->getLocationContext()), 378 Pred->State, Pred); 379 } 380 } 381 return; 382 } 383 384 case Stmt::DoStmtClass: 385 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 386 return; 387 388 case Stmt::CXXForRangeStmtClass: 389 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 390 return; 391 392 case Stmt::ForStmtClass: 393 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 394 return; 395 396 case Stmt::ContinueStmtClass: 397 case Stmt::BreakStmtClass: 398 case Stmt::GotoStmtClass: 399 break; 400 401 case Stmt::IfStmtClass: 402 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 403 return; 404 405 case Stmt::IndirectGotoStmtClass: { 406 // Only 1 successor: the indirect goto dispatch block. 407 assert (B->succ_size() == 1); 408 409 IndirectGotoNodeBuilder 410 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 411 *(B->succ_begin()), this); 412 413 SubEng.processIndirectGoto(builder); 414 return; 415 } 416 417 case Stmt::ObjCForCollectionStmtClass: { 418 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 419 // 420 // (1) inside a basic block, which represents the binding of the 421 // 'element' variable to a value. 422 // (2) in a terminator, which represents the branch. 423 // 424 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 425 // whether or not collection contains any more elements. We cannot 426 // just test to see if the element is nil because a container can 427 // contain nil elements. 428 HandleBranch(Term, Term, B, Pred); 429 return; 430 } 431 432 case Stmt::SwitchStmtClass: { 433 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 434 this); 435 436 SubEng.processSwitch(builder); 437 return; 438 } 439 440 case Stmt::WhileStmtClass: 441 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 442 return; 443 } 444 } 445 446 assert (B->succ_size() == 1 && 447 "Blocks with no terminator should have at most 1 successor."); 448 449 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 450 Pred->State, Pred); 451 } 452 453 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 454 const CFGBlock * B, ExplodedNode *Pred) { 455 assert(B->succ_size() == 2); 456 NodeBuilderContext Ctx(*this, B, Pred); 457 ExplodedNodeSet Dst; 458 SubEng.processBranch(Cond, Term, Ctx, Pred, Dst, 459 *(B->succ_begin()), *(B->succ_begin()+1)); 460 // Enqueue the new frontier onto the worklist. 461 enqueue(Dst); 462 } 463 464 465 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B, 466 ExplodedNode *Pred) { 467 assert(B->succ_size() == 2); 468 NodeBuilderContext Ctx(*this, B, Pred); 469 ExplodedNodeSet Dst; 470 SubEng.processStaticInitializer(DS, Ctx, Pred, Dst, 471 *(B->succ_begin()), *(B->succ_begin()+1)); 472 // Enqueue the new frontier onto the worklist. 473 enqueue(Dst); 474 } 475 476 477 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 478 ExplodedNode *Pred) { 479 assert(B); 480 assert(!B->empty()); 481 482 if (StmtIdx == B->size()) 483 HandleBlockExit(B, Pred); 484 else { 485 NodeBuilderContext Ctx(*this, B, Pred); 486 SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 487 } 488 } 489 490 /// generateNode - Utility method to generate nodes, hook up successors, 491 /// and add nodes to the worklist. 492 void CoreEngine::generateNode(const ProgramPoint &Loc, 493 ProgramStateRef State, 494 ExplodedNode *Pred) { 495 496 bool IsNew; 497 ExplodedNode *Node = G->getNode(Loc, State, false, &IsNew); 498 499 if (Pred) 500 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor. 501 else { 502 assert (IsNew); 503 G->addRoot(Node); // 'Node' has no predecessor. Make it a root. 504 } 505 506 // Only add 'Node' to the worklist if it was freshly generated. 507 if (IsNew) WList->enqueue(Node); 508 } 509 510 void CoreEngine::enqueueStmtNode(ExplodedNode *N, 511 const CFGBlock *Block, unsigned Idx) { 512 assert(Block); 513 assert (!N->isSink()); 514 515 // Check if this node entered a callee. 516 if (N->getLocation().getAs<CallEnter>()) { 517 // Still use the index of the CallExpr. It's needed to create the callee 518 // StackFrameContext. 519 WList->enqueue(N, Block, Idx); 520 return; 521 } 522 523 // Do not create extra nodes. Move to the next CFG element. 524 if (N->getLocation().getAs<PostInitializer>() || 525 N->getLocation().getAs<PostImplicitCall>()) { 526 WList->enqueue(N, Block, Idx+1); 527 return; 528 } 529 530 if (N->getLocation().getAs<EpsilonPoint>()) { 531 WList->enqueue(N, Block, Idx); 532 return; 533 } 534 535 // At this point, we know we're processing a normal statement. 536 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>(); 537 PostStmt Loc(CS.getStmt(), N->getLocationContext()); 538 539 if (Loc == N->getLocation()) { 540 // Note: 'N' should be a fresh node because otherwise it shouldn't be 541 // a member of Deferred. 542 WList->enqueue(N, Block, Idx+1); 543 return; 544 } 545 546 bool IsNew; 547 ExplodedNode *Succ = G->getNode(Loc, N->getState(), false, &IsNew); 548 Succ->addPredecessor(N, *G); 549 550 if (IsNew) 551 WList->enqueue(Succ, Block, Idx+1); 552 } 553 554 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N) { 555 // Create a CallExitBegin node and enqueue it. 556 const StackFrameContext *LocCtx 557 = cast<StackFrameContext>(N->getLocationContext()); 558 559 // Use the callee location context. 560 CallExitBegin Loc(LocCtx); 561 562 bool isNew; 563 ExplodedNode *Node = G->getNode(Loc, N->getState(), false, &isNew); 564 Node->addPredecessor(N, *G); 565 return isNew ? Node : 0; 566 } 567 568 569 void CoreEngine::enqueue(ExplodedNodeSet &Set) { 570 for (ExplodedNodeSet::iterator I = Set.begin(), 571 E = Set.end(); I != E; ++I) { 572 WList->enqueue(*I); 573 } 574 } 575 576 void CoreEngine::enqueue(ExplodedNodeSet &Set, 577 const CFGBlock *Block, unsigned Idx) { 578 for (ExplodedNodeSet::iterator I = Set.begin(), 579 E = Set.end(); I != E; ++I) { 580 enqueueStmtNode(*I, Block, Idx); 581 } 582 } 583 584 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set) { 585 for (ExplodedNodeSet::iterator I = Set.begin(), E = Set.end(); I != E; ++I) { 586 ExplodedNode *N = *I; 587 // If we are in an inlined call, generate CallExitBegin node. 588 if (N->getLocationContext()->getParent()) { 589 N = generateCallExitBeginNode(N); 590 if (N) 591 WList->enqueue(N); 592 } else { 593 // TODO: We should run remove dead bindings here. 594 G->addEndOfPath(N); 595 NumPathsExplored++; 596 } 597 } 598 } 599 600 601 void NodeBuilder::anchor() { } 602 603 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 604 ProgramStateRef State, 605 ExplodedNode *FromN, 606 bool MarkAsSink) { 607 HasGeneratedNodes = true; 608 bool IsNew; 609 ExplodedNode *N = C.Eng.G->getNode(Loc, State, MarkAsSink, &IsNew); 610 N->addPredecessor(FromN, *C.Eng.G); 611 Frontier.erase(FromN); 612 613 if (!IsNew) 614 return 0; 615 616 if (!MarkAsSink) 617 Frontier.Add(N); 618 619 return N; 620 } 621 622 void NodeBuilderWithSinks::anchor() { } 623 624 StmtNodeBuilder::~StmtNodeBuilder() { 625 if (EnclosingBldr) 626 for (ExplodedNodeSet::iterator I = Frontier.begin(), 627 E = Frontier.end(); I != E; ++I ) 628 EnclosingBldr->addNodes(*I); 629 } 630 631 void BranchNodeBuilder::anchor() { } 632 633 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State, 634 bool branch, 635 ExplodedNode *NodePred) { 636 // If the branch has been marked infeasible we should not generate a node. 637 if (!isFeasible(branch)) 638 return NULL; 639 640 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 641 NodePred->getLocationContext()); 642 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 643 return Succ; 644 } 645 646 ExplodedNode* 647 IndirectGotoNodeBuilder::generateNode(const iterator &I, 648 ProgramStateRef St, 649 bool IsSink) { 650 bool IsNew; 651 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 652 Pred->getLocationContext()), St, 653 IsSink, &IsNew); 654 Succ->addPredecessor(Pred, *Eng.G); 655 656 if (!IsNew) 657 return 0; 658 659 if (!IsSink) 660 Eng.WList->enqueue(Succ); 661 662 return Succ; 663 } 664 665 666 ExplodedNode* 667 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 668 ProgramStateRef St) { 669 670 bool IsNew; 671 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 672 Pred->getLocationContext()), St, 673 false, &IsNew); 674 Succ->addPredecessor(Pred, *Eng.G); 675 if (!IsNew) 676 return 0; 677 678 Eng.WList->enqueue(Succ); 679 return Succ; 680 } 681 682 683 ExplodedNode* 684 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St, 685 bool IsSink) { 686 // Get the block for the default case. 687 assert(Src->succ_rbegin() != Src->succ_rend()); 688 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 689 690 // Sanity check for default blocks that are unreachable and not caught 691 // by earlier stages. 692 if (!DefaultBlock) 693 return NULL; 694 695 bool IsNew; 696 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock, 697 Pred->getLocationContext()), St, 698 IsSink, &IsNew); 699 Succ->addPredecessor(Pred, *Eng.G); 700 701 if (!IsNew) 702 return 0; 703 704 if (!IsSink) 705 Eng.WList->enqueue(Succ); 706 707 return Succ; 708 } 709