1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/AST/StmtObjC.h" 22 #include "clang/Basic/Builtins.h" 23 #include "clang/Basic/PrettyStackTrace.h" 24 #include "clang/Basic/SourceManager.h" 25 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 26 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h" 30 #include "llvm/ADT/ImmutableList.h" 31 #include "llvm/ADT/Statistic.h" 32 #include "llvm/Support/raw_ostream.h" 33 #include "llvm/Support/SaveAndRestore.h" 34 35 #ifndef NDEBUG 36 #include "llvm/Support/GraphWriter.h" 37 #endif 38 39 using namespace clang; 40 using namespace ento; 41 using llvm::APSInt; 42 43 #define DEBUG_TYPE "ExprEngine" 44 45 STATISTIC(NumRemoveDeadBindings, 46 "The # of times RemoveDeadBindings is called"); 47 STATISTIC(NumMaxBlockCountReached, 48 "The # of aborted paths due to reaching the maximum block count in " 49 "a top level function"); 50 STATISTIC(NumMaxBlockCountReachedInInlined, 51 "The # of aborted paths due to reaching the maximum block count in " 52 "an inlined function"); 53 STATISTIC(NumTimesRetriedWithoutInlining, 54 "The # of times we re-evaluated a call without inlining"); 55 56 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 57 CXXBindTemporaryContext; 58 59 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 60 // The StackFrameContext assures that nested calls due to inlined recursive 61 // functions do not interfere. 62 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 63 llvm::ImmutableSet<CXXBindTemporaryContext>) 64 65 //===----------------------------------------------------------------------===// 66 // Engine construction and deletion. 67 //===----------------------------------------------------------------------===// 68 69 static const char* TagProviderName = "ExprEngine"; 70 71 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 72 SetOfConstDecls *VisitedCalleesIn, 73 FunctionSummariesTy *FS, 74 InliningModes HowToInlineIn) 75 : AMgr(mgr), 76 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 77 Engine(*this, FS), 78 G(Engine.getGraph()), 79 StateMgr(getContext(), mgr.getStoreManagerCreator(), 80 mgr.getConstraintManagerCreator(), G.getAllocator(), 81 this), 82 SymMgr(StateMgr.getSymbolManager()), 83 svalBuilder(StateMgr.getSValBuilder()), 84 currStmtIdx(0), currBldrCtx(nullptr), 85 ObjCNoRet(mgr.getASTContext()), 86 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 87 VisitedCallees(VisitedCalleesIn), 88 HowToInline(HowToInlineIn) 89 { 90 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 91 if (TrimInterval != 0) { 92 // Enable eager node reclaimation when constructing the ExplodedGraph. 93 G.enableNodeReclamation(TrimInterval); 94 } 95 } 96 97 ExprEngine::~ExprEngine() { 98 BR.FlushReports(); 99 } 100 101 //===----------------------------------------------------------------------===// 102 // Utility methods. 103 //===----------------------------------------------------------------------===// 104 105 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 106 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 107 const Decl *D = InitLoc->getDecl(); 108 109 // Preconditions. 110 // FIXME: It would be nice if we had a more general mechanism to add 111 // such preconditions. Some day. 112 do { 113 114 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 115 // Precondition: the first argument of 'main' is an integer guaranteed 116 // to be > 0. 117 const IdentifierInfo *II = FD->getIdentifier(); 118 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 119 break; 120 121 const ParmVarDecl *PD = FD->getParamDecl(0); 122 QualType T = PD->getType(); 123 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 124 if (!BT || !BT->isInteger()) 125 break; 126 127 const MemRegion *R = state->getRegion(PD, InitLoc); 128 if (!R) 129 break; 130 131 SVal V = state->getSVal(loc::MemRegionVal(R)); 132 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 133 svalBuilder.makeZeroVal(T), 134 svalBuilder.getConditionType()); 135 136 Optional<DefinedOrUnknownSVal> Constraint = 137 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 138 139 if (!Constraint) 140 break; 141 142 if (ProgramStateRef newState = state->assume(*Constraint, true)) 143 state = newState; 144 } 145 break; 146 } 147 while (0); 148 149 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 150 // Precondition: 'self' is always non-null upon entry to an Objective-C 151 // method. 152 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 153 const MemRegion *R = state->getRegion(SelfD, InitLoc); 154 SVal V = state->getSVal(loc::MemRegionVal(R)); 155 156 if (Optional<Loc> LV = V.getAs<Loc>()) { 157 // Assume that the pointer value in 'self' is non-null. 158 state = state->assume(*LV, true); 159 assert(state && "'self' cannot be null"); 160 } 161 } 162 163 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 164 if (!MD->isStatic()) { 165 // Precondition: 'this' is always non-null upon entry to the 166 // top-level function. This is our starting assumption for 167 // analyzing an "open" program. 168 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 169 if (SFC->getParent() == nullptr) { 170 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 171 SVal V = state->getSVal(L); 172 if (Optional<Loc> LV = V.getAs<Loc>()) { 173 state = state->assume(*LV, true); 174 assert(state && "'this' cannot be null"); 175 } 176 } 177 } 178 } 179 180 return state; 181 } 182 183 ProgramStateRef 184 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 185 const LocationContext *LC, 186 const Expr *Ex, 187 const Expr *Result) { 188 SVal V = State->getSVal(Ex, LC); 189 if (!Result) { 190 // If we don't have an explicit result expression, we're in "if needed" 191 // mode. Only create a region if the current value is a NonLoc. 192 if (!V.getAs<NonLoc>()) 193 return State; 194 Result = Ex; 195 } else { 196 // We need to create a region no matter what. For sanity, make sure we don't 197 // try to stuff a Loc into a non-pointer temporary region. 198 assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) || 199 Result->getType()->isMemberPointerType()); 200 } 201 202 ProgramStateManager &StateMgr = State->getStateManager(); 203 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 204 StoreManager &StoreMgr = StateMgr.getStoreManager(); 205 206 // We need to be careful about treating a derived type's value as 207 // bindings for a base type. Unless we're creating a temporary pointer region, 208 // start by stripping and recording base casts. 209 SmallVector<const CastExpr *, 4> Casts; 210 const Expr *Inner = Ex->IgnoreParens(); 211 if (!Loc::isLocType(Result->getType())) { 212 while (const CastExpr *CE = dyn_cast<CastExpr>(Inner)) { 213 if (CE->getCastKind() == CK_DerivedToBase || 214 CE->getCastKind() == CK_UncheckedDerivedToBase) 215 Casts.push_back(CE); 216 else if (CE->getCastKind() != CK_NoOp) 217 break; 218 219 Inner = CE->getSubExpr()->IgnoreParens(); 220 } 221 } 222 223 // Create a temporary object region for the inner expression (which may have 224 // a more derived type) and bind the value into it. 225 const TypedValueRegion *TR = nullptr; 226 if (const MaterializeTemporaryExpr *MT = 227 dyn_cast<MaterializeTemporaryExpr>(Result)) { 228 StorageDuration SD = MT->getStorageDuration(); 229 // If this object is bound to a reference with static storage duration, we 230 // put it in a different region to prevent "address leakage" warnings. 231 if (SD == SD_Static || SD == SD_Thread) 232 TR = MRMgr.getCXXStaticTempObjectRegion(Inner); 233 } 234 if (!TR) 235 TR = MRMgr.getCXXTempObjectRegion(Inner, LC); 236 237 SVal Reg = loc::MemRegionVal(TR); 238 239 if (V.isUnknown()) 240 V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(), 241 currBldrCtx->blockCount()); 242 State = State->bindLoc(Reg, V); 243 244 // Re-apply the casts (from innermost to outermost) for type sanity. 245 for (SmallVectorImpl<const CastExpr *>::reverse_iterator I = Casts.rbegin(), 246 E = Casts.rend(); 247 I != E; ++I) { 248 Reg = StoreMgr.evalDerivedToBase(Reg, *I); 249 } 250 251 State = State->BindExpr(Result, LC, Reg); 252 return State; 253 } 254 255 //===----------------------------------------------------------------------===// 256 // Top-level transfer function logic (Dispatcher). 257 //===----------------------------------------------------------------------===// 258 259 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 260 /// logic for handling assumptions on symbolic values. 261 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 262 SVal cond, bool assumption) { 263 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 264 } 265 266 bool ExprEngine::wantsRegionChangeUpdate(ProgramStateRef state) { 267 return getCheckerManager().wantsRegionChangeUpdate(state); 268 } 269 270 ProgramStateRef 271 ExprEngine::processRegionChanges(ProgramStateRef state, 272 const InvalidatedSymbols *invalidated, 273 ArrayRef<const MemRegion *> Explicits, 274 ArrayRef<const MemRegion *> Regions, 275 const CallEvent *Call) { 276 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 277 Explicits, Regions, Call); 278 } 279 280 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 281 const char *NL, const char *Sep) { 282 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 283 } 284 285 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 286 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 287 } 288 289 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 290 unsigned StmtIdx, NodeBuilderContext *Ctx) { 291 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 292 currStmtIdx = StmtIdx; 293 currBldrCtx = Ctx; 294 295 switch (E.getKind()) { 296 case CFGElement::Statement: 297 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 298 return; 299 case CFGElement::Initializer: 300 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 301 return; 302 case CFGElement::NewAllocator: 303 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 304 Pred); 305 return; 306 case CFGElement::AutomaticObjectDtor: 307 case CFGElement::DeleteDtor: 308 case CFGElement::BaseDtor: 309 case CFGElement::MemberDtor: 310 case CFGElement::TemporaryDtor: 311 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 312 return; 313 } 314 } 315 316 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 317 const CFGStmt S, 318 const ExplodedNode *Pred, 319 const LocationContext *LC) { 320 321 // Are we never purging state values? 322 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 323 return false; 324 325 // Is this the beginning of a basic block? 326 if (Pred->getLocation().getAs<BlockEntrance>()) 327 return true; 328 329 // Is this on a non-expression? 330 if (!isa<Expr>(S.getStmt())) 331 return true; 332 333 // Run before processing a call. 334 if (CallEvent::isCallStmt(S.getStmt())) 335 return true; 336 337 // Is this an expression that is consumed by another expression? If so, 338 // postpone cleaning out the state. 339 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 340 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 341 } 342 343 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 344 const Stmt *ReferenceStmt, 345 const LocationContext *LC, 346 const Stmt *DiagnosticStmt, 347 ProgramPoint::Kind K) { 348 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 349 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 350 && "PostStmt is not generally supported by the SymbolReaper yet"); 351 assert(LC && "Must pass the current (or expiring) LocationContext"); 352 353 if (!DiagnosticStmt) { 354 DiagnosticStmt = ReferenceStmt; 355 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 356 } 357 358 NumRemoveDeadBindings++; 359 ProgramStateRef CleanedState = Pred->getState(); 360 361 // LC is the location context being destroyed, but SymbolReaper wants a 362 // location context that is still live. (If this is the top-level stack 363 // frame, this will be null.) 364 if (!ReferenceStmt) { 365 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 366 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 367 LC = LC->getParent(); 368 } 369 370 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 371 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 372 373 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 374 375 // Create a state in which dead bindings are removed from the environment 376 // and the store. TODO: The function should just return new env and store, 377 // not a new state. 378 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 379 380 // Process any special transfer function for dead symbols. 381 // A tag to track convenience transitions, which can be removed at cleanup. 382 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 383 if (!SymReaper.hasDeadSymbols()) { 384 // Generate a CleanedNode that has the environment and store cleaned 385 // up. Since no symbols are dead, we can optimize and not clean out 386 // the constraint manager. 387 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 388 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 389 390 } else { 391 // Call checkers with the non-cleaned state so that they could query the 392 // values of the soon to be dead symbols. 393 ExplodedNodeSet CheckedSet; 394 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 395 DiagnosticStmt, *this, K); 396 397 // For each node in CheckedSet, generate CleanedNodes that have the 398 // environment, the store, and the constraints cleaned up but have the 399 // user-supplied states as the predecessors. 400 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 401 for (ExplodedNodeSet::const_iterator 402 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 403 ProgramStateRef CheckerState = (*I)->getState(); 404 405 // The constraint manager has not been cleaned up yet, so clean up now. 406 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 407 SymReaper); 408 409 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 410 "Checkers are not allowed to modify the Environment as a part of " 411 "checkDeadSymbols processing."); 412 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 413 "Checkers are not allowed to modify the Store as a part of " 414 "checkDeadSymbols processing."); 415 416 // Create a state based on CleanedState with CheckerState GDM and 417 // generate a transition to that state. 418 ProgramStateRef CleanedCheckerSt = 419 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 420 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 421 } 422 } 423 } 424 425 void ExprEngine::ProcessStmt(const CFGStmt S, 426 ExplodedNode *Pred) { 427 // Reclaim any unnecessary nodes in the ExplodedGraph. 428 G.reclaimRecentlyAllocatedNodes(); 429 430 const Stmt *currStmt = S.getStmt(); 431 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 432 currStmt->getLocStart(), 433 "Error evaluating statement"); 434 435 // Remove dead bindings and symbols. 436 ExplodedNodeSet CleanedStates; 437 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 438 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 439 } else 440 CleanedStates.Add(Pred); 441 442 // Visit the statement. 443 ExplodedNodeSet Dst; 444 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 445 E = CleanedStates.end(); I != E; ++I) { 446 ExplodedNodeSet DstI; 447 // Visit the statement. 448 Visit(currStmt, *I, DstI); 449 Dst.insert(DstI); 450 } 451 452 // Enqueue the new nodes onto the work list. 453 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 454 } 455 456 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 457 ExplodedNode *Pred) { 458 const CXXCtorInitializer *BMI = Init.getInitializer(); 459 460 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 461 BMI->getSourceLocation(), 462 "Error evaluating initializer"); 463 464 // We don't clean up dead bindings here. 465 const StackFrameContext *stackFrame = 466 cast<StackFrameContext>(Pred->getLocationContext()); 467 const CXXConstructorDecl *decl = 468 cast<CXXConstructorDecl>(stackFrame->getDecl()); 469 470 ProgramStateRef State = Pred->getState(); 471 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 472 473 ExplodedNodeSet Tmp(Pred); 474 SVal FieldLoc; 475 476 // Evaluate the initializer, if necessary 477 if (BMI->isAnyMemberInitializer()) { 478 // Constructors build the object directly in the field, 479 // but non-objects must be copied in from the initializer. 480 if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) { 481 assert(BMI->getInit()->IgnoreImplicit() == CtorExpr); 482 (void)CtorExpr; 483 // The field was directly constructed, so there is no need to bind. 484 } else { 485 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 486 const ValueDecl *Field; 487 if (BMI->isIndirectMemberInitializer()) { 488 Field = BMI->getIndirectMember(); 489 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 490 } else { 491 Field = BMI->getMember(); 492 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 493 } 494 495 SVal InitVal; 496 if (BMI->getNumArrayIndices() > 0) { 497 // Handle arrays of trivial type. We can represent this with a 498 // primitive load/copy from the base array region. 499 const ArraySubscriptExpr *ASE; 500 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 501 Init = ASE->getBase()->IgnoreImplicit(); 502 503 SVal LValue = State->getSVal(Init, stackFrame); 504 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 505 InitVal = State->getSVal(*LValueLoc); 506 507 // If we fail to get the value for some reason, use a symbolic value. 508 if (InitVal.isUnknownOrUndef()) { 509 SValBuilder &SVB = getSValBuilder(); 510 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 511 Field->getType(), 512 currBldrCtx->blockCount()); 513 } 514 } else { 515 InitVal = State->getSVal(BMI->getInit(), stackFrame); 516 } 517 518 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 519 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 520 Tmp.clear(); 521 522 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 523 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 524 } 525 } else { 526 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 527 // We already did all the work when visiting the CXXConstructExpr. 528 } 529 530 // Construct PostInitializer nodes whether the state changed or not, 531 // so that the diagnostics don't get confused. 532 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 533 ExplodedNodeSet Dst; 534 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 535 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 536 ExplodedNode *N = *I; 537 Bldr.generateNode(PP, N->getState(), N); 538 } 539 540 // Enqueue the new nodes onto the work list. 541 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 542 } 543 544 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 545 ExplodedNode *Pred) { 546 ExplodedNodeSet Dst; 547 switch (D.getKind()) { 548 case CFGElement::AutomaticObjectDtor: 549 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 550 break; 551 case CFGElement::BaseDtor: 552 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 553 break; 554 case CFGElement::MemberDtor: 555 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 556 break; 557 case CFGElement::TemporaryDtor: 558 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 559 break; 560 case CFGElement::DeleteDtor: 561 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 562 break; 563 default: 564 llvm_unreachable("Unexpected dtor kind."); 565 } 566 567 // Enqueue the new nodes onto the work list. 568 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 569 } 570 571 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 572 ExplodedNode *Pred) { 573 ExplodedNodeSet Dst; 574 AnalysisManager &AMgr = getAnalysisManager(); 575 AnalyzerOptions &Opts = AMgr.options; 576 // TODO: We're not evaluating allocators for all cases just yet as 577 // we're not handling the return value correctly, which causes false 578 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 579 if (Opts.mayInlineCXXAllocator()) 580 VisitCXXNewAllocatorCall(NE, Pred, Dst); 581 else { 582 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 583 const LocationContext *LCtx = Pred->getLocationContext(); 584 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 585 Bldr.generateNode(PP, Pred->getState(), Pred); 586 } 587 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 588 } 589 590 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 591 ExplodedNode *Pred, 592 ExplodedNodeSet &Dst) { 593 const VarDecl *varDecl = Dtor.getVarDecl(); 594 QualType varType = varDecl->getType(); 595 596 ProgramStateRef state = Pred->getState(); 597 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 598 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 599 600 if (const ReferenceType *refType = varType->getAs<ReferenceType>()) { 601 varType = refType->getPointeeType(); 602 Region = state->getSVal(Region).getAsRegion(); 603 } 604 605 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 606 Pred, Dst); 607 } 608 609 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 610 ExplodedNode *Pred, 611 ExplodedNodeSet &Dst) { 612 ProgramStateRef State = Pred->getState(); 613 const LocationContext *LCtx = Pred->getLocationContext(); 614 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 615 const Stmt *Arg = DE->getArgument(); 616 SVal ArgVal = State->getSVal(Arg, LCtx); 617 618 // If the argument to delete is known to be a null value, 619 // don't run destructor. 620 if (State->isNull(ArgVal).isConstrainedTrue()) { 621 QualType DTy = DE->getDestroyedType(); 622 QualType BTy = getContext().getBaseElementType(DTy); 623 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 624 const CXXDestructorDecl *Dtor = RD->getDestructor(); 625 626 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 627 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 628 Bldr.generateNode(PP, Pred->getState(), Pred); 629 return; 630 } 631 632 VisitCXXDestructor(DE->getDestroyedType(), 633 ArgVal.getAsRegion(), 634 DE, /*IsBase=*/ false, 635 Pred, Dst); 636 } 637 638 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 639 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 640 const LocationContext *LCtx = Pred->getLocationContext(); 641 642 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 643 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 644 LCtx->getCurrentStackFrame()); 645 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 646 647 // Create the base object region. 648 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 649 QualType BaseTy = Base->getType(); 650 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 651 Base->isVirtual()); 652 653 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 654 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 655 } 656 657 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 658 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 659 const FieldDecl *Member = D.getFieldDecl(); 660 ProgramStateRef State = Pred->getState(); 661 const LocationContext *LCtx = Pred->getLocationContext(); 662 663 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 664 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 665 LCtx->getCurrentStackFrame()); 666 SVal FieldVal = 667 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 668 669 VisitCXXDestructor(Member->getType(), 670 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 671 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 672 } 673 674 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 675 ExplodedNode *Pred, 676 ExplodedNodeSet &Dst) { 677 ExplodedNodeSet CleanDtorState; 678 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 679 ProgramStateRef State = Pred->getState(); 680 if (State->contains<InitializedTemporariesSet>( 681 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 682 // FIXME: Currently we insert temporary destructors for default parameters, 683 // but we don't insert the constructors. 684 State = State->remove<InitializedTemporariesSet>( 685 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 686 } 687 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 688 689 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 690 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 691 // bound to default parameters. 692 assert(CleanDtorState.size() <= 1); 693 ExplodedNode *CleanPred = 694 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 695 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 696 // we just put a NULL region for now. This will need to be changed later. 697 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 698 /*IsBase=*/false, CleanPred, Dst); 699 } 700 701 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 702 NodeBuilderContext &BldCtx, 703 ExplodedNode *Pred, 704 ExplodedNodeSet &Dst, 705 const CFGBlock *DstT, 706 const CFGBlock *DstF) { 707 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 708 if (Pred->getState()->contains<InitializedTemporariesSet>( 709 std::make_pair(BTE, Pred->getStackFrame()))) { 710 TempDtorBuilder.markInfeasible(false); 711 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 712 } else { 713 TempDtorBuilder.markInfeasible(true); 714 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 715 } 716 } 717 718 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 719 ExplodedNodeSet &PreVisit, 720 ExplodedNodeSet &Dst) { 721 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 722 // In case we don't have temporary destructors in the CFG, do not mark 723 // the initialization - we would otherwise never clean it up. 724 Dst = PreVisit; 725 return; 726 } 727 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 728 for (ExplodedNode *Node : PreVisit) { 729 ProgramStateRef State = Node->getState(); 730 731 if (!State->contains<InitializedTemporariesSet>( 732 std::make_pair(BTE, Node->getStackFrame()))) { 733 // FIXME: Currently the state might already contain the marker due to 734 // incorrect handling of temporaries bound to default parameters; for 735 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 736 // temporary destructor nodes. 737 State = State->add<InitializedTemporariesSet>( 738 std::make_pair(BTE, Node->getStackFrame())); 739 } 740 StmtBldr.generateNode(BTE, Node, State); 741 } 742 } 743 744 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 745 ExplodedNodeSet &DstTop) { 746 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 747 S->getLocStart(), 748 "Error evaluating statement"); 749 ExplodedNodeSet Dst; 750 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 751 752 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 753 754 switch (S->getStmtClass()) { 755 // C++ and ARC stuff we don't support yet. 756 case Expr::ObjCIndirectCopyRestoreExprClass: 757 case Stmt::CXXDependentScopeMemberExprClass: 758 case Stmt::CXXInheritedCtorInitExprClass: 759 case Stmt::CXXTryStmtClass: 760 case Stmt::CXXTypeidExprClass: 761 case Stmt::CXXUuidofExprClass: 762 case Stmt::CXXFoldExprClass: 763 case Stmt::MSPropertyRefExprClass: 764 case Stmt::MSPropertySubscriptExprClass: 765 case Stmt::CXXUnresolvedConstructExprClass: 766 case Stmt::DependentScopeDeclRefExprClass: 767 case Stmt::ArrayTypeTraitExprClass: 768 case Stmt::ExpressionTraitExprClass: 769 case Stmt::UnresolvedLookupExprClass: 770 case Stmt::UnresolvedMemberExprClass: 771 case Stmt::TypoExprClass: 772 case Stmt::CXXNoexceptExprClass: 773 case Stmt::PackExpansionExprClass: 774 case Stmt::SubstNonTypeTemplateParmPackExprClass: 775 case Stmt::FunctionParmPackExprClass: 776 case Stmt::CoroutineBodyStmtClass: 777 case Stmt::CoawaitExprClass: 778 case Stmt::CoreturnStmtClass: 779 case Stmt::CoyieldExprClass: 780 case Stmt::SEHTryStmtClass: 781 case Stmt::SEHExceptStmtClass: 782 case Stmt::SEHLeaveStmtClass: 783 case Stmt::SEHFinallyStmtClass: { 784 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 785 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 786 break; 787 } 788 789 case Stmt::ParenExprClass: 790 llvm_unreachable("ParenExprs already handled."); 791 case Stmt::GenericSelectionExprClass: 792 llvm_unreachable("GenericSelectionExprs already handled."); 793 // Cases that should never be evaluated simply because they shouldn't 794 // appear in the CFG. 795 case Stmt::BreakStmtClass: 796 case Stmt::CaseStmtClass: 797 case Stmt::CompoundStmtClass: 798 case Stmt::ContinueStmtClass: 799 case Stmt::CXXForRangeStmtClass: 800 case Stmt::DefaultStmtClass: 801 case Stmt::DoStmtClass: 802 case Stmt::ForStmtClass: 803 case Stmt::GotoStmtClass: 804 case Stmt::IfStmtClass: 805 case Stmt::IndirectGotoStmtClass: 806 case Stmt::LabelStmtClass: 807 case Stmt::NoStmtClass: 808 case Stmt::NullStmtClass: 809 case Stmt::SwitchStmtClass: 810 case Stmt::WhileStmtClass: 811 case Expr::MSDependentExistsStmtClass: 812 case Stmt::CapturedStmtClass: 813 case Stmt::OMPParallelDirectiveClass: 814 case Stmt::OMPSimdDirectiveClass: 815 case Stmt::OMPForDirectiveClass: 816 case Stmt::OMPForSimdDirectiveClass: 817 case Stmt::OMPSectionsDirectiveClass: 818 case Stmt::OMPSectionDirectiveClass: 819 case Stmt::OMPSingleDirectiveClass: 820 case Stmt::OMPMasterDirectiveClass: 821 case Stmt::OMPCriticalDirectiveClass: 822 case Stmt::OMPParallelForDirectiveClass: 823 case Stmt::OMPParallelForSimdDirectiveClass: 824 case Stmt::OMPParallelSectionsDirectiveClass: 825 case Stmt::OMPTaskDirectiveClass: 826 case Stmt::OMPTaskyieldDirectiveClass: 827 case Stmt::OMPBarrierDirectiveClass: 828 case Stmt::OMPTaskwaitDirectiveClass: 829 case Stmt::OMPTaskgroupDirectiveClass: 830 case Stmt::OMPFlushDirectiveClass: 831 case Stmt::OMPOrderedDirectiveClass: 832 case Stmt::OMPAtomicDirectiveClass: 833 case Stmt::OMPTargetDirectiveClass: 834 case Stmt::OMPTargetDataDirectiveClass: 835 case Stmt::OMPTargetEnterDataDirectiveClass: 836 case Stmt::OMPTargetExitDataDirectiveClass: 837 case Stmt::OMPTargetParallelDirectiveClass: 838 case Stmt::OMPTargetParallelForDirectiveClass: 839 case Stmt::OMPTargetUpdateDirectiveClass: 840 case Stmt::OMPTeamsDirectiveClass: 841 case Stmt::OMPCancellationPointDirectiveClass: 842 case Stmt::OMPCancelDirectiveClass: 843 case Stmt::OMPTaskLoopDirectiveClass: 844 case Stmt::OMPTaskLoopSimdDirectiveClass: 845 case Stmt::OMPDistributeDirectiveClass: 846 case Stmt::OMPDistributeParallelForDirectiveClass: 847 case Stmt::OMPDistributeParallelForSimdDirectiveClass: 848 case Stmt::OMPDistributeSimdDirectiveClass: 849 case Stmt::OMPTargetParallelForSimdDirectiveClass: 850 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 851 852 case Stmt::ObjCSubscriptRefExprClass: 853 case Stmt::ObjCPropertyRefExprClass: 854 llvm_unreachable("These are handled by PseudoObjectExpr"); 855 856 case Stmt::GNUNullExprClass: { 857 // GNU __null is a pointer-width integer, not an actual pointer. 858 ProgramStateRef state = Pred->getState(); 859 state = state->BindExpr(S, Pred->getLocationContext(), 860 svalBuilder.makeIntValWithPtrWidth(0, false)); 861 Bldr.generateNode(S, Pred, state); 862 break; 863 } 864 865 case Stmt::ObjCAtSynchronizedStmtClass: 866 Bldr.takeNodes(Pred); 867 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 868 Bldr.addNodes(Dst); 869 break; 870 871 case Stmt::ExprWithCleanupsClass: 872 // Handled due to fully linearised CFG. 873 break; 874 875 case Stmt::CXXBindTemporaryExprClass: { 876 Bldr.takeNodes(Pred); 877 ExplodedNodeSet PreVisit; 878 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 879 ExplodedNodeSet Next; 880 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 881 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 882 Bldr.addNodes(Dst); 883 break; 884 } 885 886 // Cases not handled yet; but will handle some day. 887 case Stmt::DesignatedInitExprClass: 888 case Stmt::DesignatedInitUpdateExprClass: 889 case Stmt::ExtVectorElementExprClass: 890 case Stmt::ImaginaryLiteralClass: 891 case Stmt::ObjCAtCatchStmtClass: 892 case Stmt::ObjCAtFinallyStmtClass: 893 case Stmt::ObjCAtTryStmtClass: 894 case Stmt::ObjCAutoreleasePoolStmtClass: 895 case Stmt::ObjCEncodeExprClass: 896 case Stmt::ObjCIsaExprClass: 897 case Stmt::ObjCProtocolExprClass: 898 case Stmt::ObjCSelectorExprClass: 899 case Stmt::ParenListExprClass: 900 case Stmt::ShuffleVectorExprClass: 901 case Stmt::ConvertVectorExprClass: 902 case Stmt::VAArgExprClass: 903 case Stmt::CUDAKernelCallExprClass: 904 case Stmt::OpaqueValueExprClass: 905 case Stmt::AsTypeExprClass: 906 // Fall through. 907 908 // Cases we intentionally don't evaluate, since they don't need 909 // to be explicitly evaluated. 910 case Stmt::PredefinedExprClass: 911 case Stmt::AddrLabelExprClass: 912 case Stmt::AttributedStmtClass: 913 case Stmt::IntegerLiteralClass: 914 case Stmt::CharacterLiteralClass: 915 case Stmt::ImplicitValueInitExprClass: 916 case Stmt::CXXScalarValueInitExprClass: 917 case Stmt::CXXBoolLiteralExprClass: 918 case Stmt::ObjCBoolLiteralExprClass: 919 case Stmt::FloatingLiteralClass: 920 case Stmt::NoInitExprClass: 921 case Stmt::SizeOfPackExprClass: 922 case Stmt::StringLiteralClass: 923 case Stmt::ObjCStringLiteralClass: 924 case Stmt::CXXPseudoDestructorExprClass: 925 case Stmt::SubstNonTypeTemplateParmExprClass: 926 case Stmt::CXXNullPtrLiteralExprClass: 927 case Stmt::OMPArraySectionExprClass: 928 case Stmt::TypeTraitExprClass: { 929 Bldr.takeNodes(Pred); 930 ExplodedNodeSet preVisit; 931 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 932 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 933 Bldr.addNodes(Dst); 934 break; 935 } 936 937 case Stmt::CXXDefaultArgExprClass: 938 case Stmt::CXXDefaultInitExprClass: { 939 Bldr.takeNodes(Pred); 940 ExplodedNodeSet PreVisit; 941 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 942 943 ExplodedNodeSet Tmp; 944 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 945 946 const Expr *ArgE; 947 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 948 ArgE = DefE->getExpr(); 949 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 950 ArgE = DefE->getExpr(); 951 else 952 llvm_unreachable("unknown constant wrapper kind"); 953 954 bool IsTemporary = false; 955 if (const MaterializeTemporaryExpr *MTE = 956 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 957 ArgE = MTE->GetTemporaryExpr(); 958 IsTemporary = true; 959 } 960 961 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 962 if (!ConstantVal) 963 ConstantVal = UnknownVal(); 964 965 const LocationContext *LCtx = Pred->getLocationContext(); 966 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 967 I != E; ++I) { 968 ProgramStateRef State = (*I)->getState(); 969 State = State->BindExpr(S, LCtx, *ConstantVal); 970 if (IsTemporary) 971 State = createTemporaryRegionIfNeeded(State, LCtx, 972 cast<Expr>(S), 973 cast<Expr>(S)); 974 Bldr2.generateNode(S, *I, State); 975 } 976 977 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 978 Bldr.addNodes(Dst); 979 break; 980 } 981 982 // Cases we evaluate as opaque expressions, conjuring a symbol. 983 case Stmt::CXXStdInitializerListExprClass: 984 case Expr::ObjCArrayLiteralClass: 985 case Expr::ObjCDictionaryLiteralClass: 986 case Expr::ObjCBoxedExprClass: { 987 Bldr.takeNodes(Pred); 988 989 ExplodedNodeSet preVisit; 990 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 991 992 ExplodedNodeSet Tmp; 993 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 994 995 const Expr *Ex = cast<Expr>(S); 996 QualType resultType = Ex->getType(); 997 998 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 999 it != et; ++it) { 1000 ExplodedNode *N = *it; 1001 const LocationContext *LCtx = N->getLocationContext(); 1002 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 1003 resultType, 1004 currBldrCtx->blockCount()); 1005 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 1006 Bldr2.generateNode(S, N, state); 1007 } 1008 1009 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1010 Bldr.addNodes(Dst); 1011 break; 1012 } 1013 1014 case Stmt::ArraySubscriptExprClass: 1015 Bldr.takeNodes(Pred); 1016 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 1017 Bldr.addNodes(Dst); 1018 break; 1019 1020 case Stmt::GCCAsmStmtClass: 1021 Bldr.takeNodes(Pred); 1022 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1023 Bldr.addNodes(Dst); 1024 break; 1025 1026 case Stmt::MSAsmStmtClass: 1027 Bldr.takeNodes(Pred); 1028 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1029 Bldr.addNodes(Dst); 1030 break; 1031 1032 case Stmt::BlockExprClass: 1033 Bldr.takeNodes(Pred); 1034 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1035 Bldr.addNodes(Dst); 1036 break; 1037 1038 case Stmt::LambdaExprClass: 1039 if (AMgr.options.shouldInlineLambdas()) { 1040 Bldr.takeNodes(Pred); 1041 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1042 Bldr.addNodes(Dst); 1043 } else { 1044 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1045 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1046 } 1047 break; 1048 1049 case Stmt::BinaryOperatorClass: { 1050 const BinaryOperator* B = cast<BinaryOperator>(S); 1051 if (B->isLogicalOp()) { 1052 Bldr.takeNodes(Pred); 1053 VisitLogicalExpr(B, Pred, Dst); 1054 Bldr.addNodes(Dst); 1055 break; 1056 } 1057 else if (B->getOpcode() == BO_Comma) { 1058 ProgramStateRef state = Pred->getState(); 1059 Bldr.generateNode(B, Pred, 1060 state->BindExpr(B, Pred->getLocationContext(), 1061 state->getSVal(B->getRHS(), 1062 Pred->getLocationContext()))); 1063 break; 1064 } 1065 1066 Bldr.takeNodes(Pred); 1067 1068 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1069 (B->isRelationalOp() || B->isEqualityOp())) { 1070 ExplodedNodeSet Tmp; 1071 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1072 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1073 } 1074 else 1075 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1076 1077 Bldr.addNodes(Dst); 1078 break; 1079 } 1080 1081 case Stmt::CXXOperatorCallExprClass: { 1082 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1083 1084 // For instance method operators, make sure the 'this' argument has a 1085 // valid region. 1086 const Decl *Callee = OCE->getCalleeDecl(); 1087 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1088 if (MD->isInstance()) { 1089 ProgramStateRef State = Pred->getState(); 1090 const LocationContext *LCtx = Pred->getLocationContext(); 1091 ProgramStateRef NewState = 1092 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1093 if (NewState != State) { 1094 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1095 ProgramPoint::PreStmtKind); 1096 // Did we cache out? 1097 if (!Pred) 1098 break; 1099 } 1100 } 1101 } 1102 // FALLTHROUGH 1103 } 1104 case Stmt::CallExprClass: 1105 case Stmt::CXXMemberCallExprClass: 1106 case Stmt::UserDefinedLiteralClass: { 1107 Bldr.takeNodes(Pred); 1108 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1109 Bldr.addNodes(Dst); 1110 break; 1111 } 1112 1113 case Stmt::CXXCatchStmtClass: { 1114 Bldr.takeNodes(Pred); 1115 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1116 Bldr.addNodes(Dst); 1117 break; 1118 } 1119 1120 case Stmt::CXXTemporaryObjectExprClass: 1121 case Stmt::CXXConstructExprClass: { 1122 Bldr.takeNodes(Pred); 1123 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1124 Bldr.addNodes(Dst); 1125 break; 1126 } 1127 1128 case Stmt::CXXNewExprClass: { 1129 Bldr.takeNodes(Pred); 1130 ExplodedNodeSet PostVisit; 1131 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1132 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1133 Bldr.addNodes(Dst); 1134 break; 1135 } 1136 1137 case Stmt::CXXDeleteExprClass: { 1138 Bldr.takeNodes(Pred); 1139 ExplodedNodeSet PreVisit; 1140 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1141 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1142 1143 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1144 e = PreVisit.end(); i != e ; ++i) 1145 VisitCXXDeleteExpr(CDE, *i, Dst); 1146 1147 Bldr.addNodes(Dst); 1148 break; 1149 } 1150 // FIXME: ChooseExpr is really a constant. We need to fix 1151 // the CFG do not model them as explicit control-flow. 1152 1153 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1154 Bldr.takeNodes(Pred); 1155 const ChooseExpr *C = cast<ChooseExpr>(S); 1156 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1157 Bldr.addNodes(Dst); 1158 break; 1159 } 1160 1161 case Stmt::CompoundAssignOperatorClass: 1162 Bldr.takeNodes(Pred); 1163 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1164 Bldr.addNodes(Dst); 1165 break; 1166 1167 case Stmt::CompoundLiteralExprClass: 1168 Bldr.takeNodes(Pred); 1169 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1170 Bldr.addNodes(Dst); 1171 break; 1172 1173 case Stmt::BinaryConditionalOperatorClass: 1174 case Stmt::ConditionalOperatorClass: { // '?' operator 1175 Bldr.takeNodes(Pred); 1176 const AbstractConditionalOperator *C 1177 = cast<AbstractConditionalOperator>(S); 1178 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1179 Bldr.addNodes(Dst); 1180 break; 1181 } 1182 1183 case Stmt::CXXThisExprClass: 1184 Bldr.takeNodes(Pred); 1185 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1186 Bldr.addNodes(Dst); 1187 break; 1188 1189 case Stmt::DeclRefExprClass: { 1190 Bldr.takeNodes(Pred); 1191 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1192 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1193 Bldr.addNodes(Dst); 1194 break; 1195 } 1196 1197 case Stmt::DeclStmtClass: 1198 Bldr.takeNodes(Pred); 1199 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1200 Bldr.addNodes(Dst); 1201 break; 1202 1203 case Stmt::ImplicitCastExprClass: 1204 case Stmt::CStyleCastExprClass: 1205 case Stmt::CXXStaticCastExprClass: 1206 case Stmt::CXXDynamicCastExprClass: 1207 case Stmt::CXXReinterpretCastExprClass: 1208 case Stmt::CXXConstCastExprClass: 1209 case Stmt::CXXFunctionalCastExprClass: 1210 case Stmt::ObjCBridgedCastExprClass: { 1211 Bldr.takeNodes(Pred); 1212 const CastExpr *C = cast<CastExpr>(S); 1213 // Handle the previsit checks. 1214 ExplodedNodeSet dstPrevisit; 1215 getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, C, *this); 1216 1217 // Handle the expression itself. 1218 ExplodedNodeSet dstExpr; 1219 for (ExplodedNodeSet::iterator i = dstPrevisit.begin(), 1220 e = dstPrevisit.end(); i != e ; ++i) { 1221 VisitCast(C, C->getSubExpr(), *i, dstExpr); 1222 } 1223 1224 // Handle the postvisit checks. 1225 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1226 Bldr.addNodes(Dst); 1227 break; 1228 } 1229 1230 case Expr::MaterializeTemporaryExprClass: { 1231 Bldr.takeNodes(Pred); 1232 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1233 CreateCXXTemporaryObject(MTE, Pred, Dst); 1234 Bldr.addNodes(Dst); 1235 break; 1236 } 1237 1238 case Stmt::InitListExprClass: 1239 Bldr.takeNodes(Pred); 1240 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1241 Bldr.addNodes(Dst); 1242 break; 1243 1244 case Stmt::MemberExprClass: 1245 Bldr.takeNodes(Pred); 1246 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1247 Bldr.addNodes(Dst); 1248 break; 1249 1250 case Stmt::AtomicExprClass: 1251 Bldr.takeNodes(Pred); 1252 VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst); 1253 Bldr.addNodes(Dst); 1254 break; 1255 1256 case Stmt::ObjCIvarRefExprClass: 1257 Bldr.takeNodes(Pred); 1258 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1259 Bldr.addNodes(Dst); 1260 break; 1261 1262 case Stmt::ObjCForCollectionStmtClass: 1263 Bldr.takeNodes(Pred); 1264 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1265 Bldr.addNodes(Dst); 1266 break; 1267 1268 case Stmt::ObjCMessageExprClass: 1269 Bldr.takeNodes(Pred); 1270 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1271 Bldr.addNodes(Dst); 1272 break; 1273 1274 case Stmt::ObjCAtThrowStmtClass: 1275 case Stmt::CXXThrowExprClass: 1276 // FIXME: This is not complete. We basically treat @throw as 1277 // an abort. 1278 Bldr.generateSink(S, Pred, Pred->getState()); 1279 break; 1280 1281 case Stmt::ReturnStmtClass: 1282 Bldr.takeNodes(Pred); 1283 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1284 Bldr.addNodes(Dst); 1285 break; 1286 1287 case Stmt::OffsetOfExprClass: 1288 Bldr.takeNodes(Pred); 1289 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1290 Bldr.addNodes(Dst); 1291 break; 1292 1293 case Stmt::UnaryExprOrTypeTraitExprClass: 1294 Bldr.takeNodes(Pred); 1295 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1296 Pred, Dst); 1297 Bldr.addNodes(Dst); 1298 break; 1299 1300 case Stmt::StmtExprClass: { 1301 const StmtExpr *SE = cast<StmtExpr>(S); 1302 1303 if (SE->getSubStmt()->body_empty()) { 1304 // Empty statement expression. 1305 assert(SE->getType() == getContext().VoidTy 1306 && "Empty statement expression must have void type."); 1307 break; 1308 } 1309 1310 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1311 ProgramStateRef state = Pred->getState(); 1312 Bldr.generateNode(SE, Pred, 1313 state->BindExpr(SE, Pred->getLocationContext(), 1314 state->getSVal(LastExpr, 1315 Pred->getLocationContext()))); 1316 } 1317 break; 1318 } 1319 1320 case Stmt::UnaryOperatorClass: { 1321 Bldr.takeNodes(Pred); 1322 const UnaryOperator *U = cast<UnaryOperator>(S); 1323 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1324 ExplodedNodeSet Tmp; 1325 VisitUnaryOperator(U, Pred, Tmp); 1326 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1327 } 1328 else 1329 VisitUnaryOperator(U, Pred, Dst); 1330 Bldr.addNodes(Dst); 1331 break; 1332 } 1333 1334 case Stmt::PseudoObjectExprClass: { 1335 Bldr.takeNodes(Pred); 1336 ProgramStateRef state = Pred->getState(); 1337 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1338 if (const Expr *Result = PE->getResultExpr()) { 1339 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1340 Bldr.generateNode(S, Pred, 1341 state->BindExpr(S, Pred->getLocationContext(), V)); 1342 } 1343 else 1344 Bldr.generateNode(S, Pred, 1345 state->BindExpr(S, Pred->getLocationContext(), 1346 UnknownVal())); 1347 1348 Bldr.addNodes(Dst); 1349 break; 1350 } 1351 } 1352 } 1353 1354 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1355 const LocationContext *CalleeLC) { 1356 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1357 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1358 assert(CalleeSF && CallerSF); 1359 ExplodedNode *BeforeProcessingCall = nullptr; 1360 const Stmt *CE = CalleeSF->getCallSite(); 1361 1362 // Find the first node before we started processing the call expression. 1363 while (N) { 1364 ProgramPoint L = N->getLocation(); 1365 BeforeProcessingCall = N; 1366 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1367 1368 // Skip the nodes corresponding to the inlined code. 1369 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1370 continue; 1371 // We reached the caller. Find the node right before we started 1372 // processing the call. 1373 if (L.isPurgeKind()) 1374 continue; 1375 if (L.getAs<PreImplicitCall>()) 1376 continue; 1377 if (L.getAs<CallEnter>()) 1378 continue; 1379 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1380 if (SP->getStmt() == CE) 1381 continue; 1382 break; 1383 } 1384 1385 if (!BeforeProcessingCall) 1386 return false; 1387 1388 // TODO: Clean up the unneeded nodes. 1389 1390 // Build an Epsilon node from which we will restart the analyzes. 1391 // Note that CE is permitted to be NULL! 1392 ProgramPoint NewNodeLoc = 1393 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1394 // Add the special flag to GDM to signal retrying with no inlining. 1395 // Note, changing the state ensures that we are not going to cache out. 1396 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1397 NewNodeState = 1398 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1399 1400 // Make the new node a successor of BeforeProcessingCall. 1401 bool IsNew = false; 1402 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1403 // We cached out at this point. Caching out is common due to us backtracking 1404 // from the inlined function, which might spawn several paths. 1405 if (!IsNew) 1406 return true; 1407 1408 NewNode->addPredecessor(BeforeProcessingCall, G); 1409 1410 // Add the new node to the work list. 1411 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1412 CalleeSF->getIndex()); 1413 NumTimesRetriedWithoutInlining++; 1414 return true; 1415 } 1416 1417 /// Block entrance. (Update counters). 1418 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1419 NodeBuilderWithSinks &nodeBuilder, 1420 ExplodedNode *Pred) { 1421 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1422 1423 // If this block is terminated by a loop and it has already been visited the 1424 // maximum number of times, widen the loop. 1425 unsigned int BlockCount = nodeBuilder.getContext().blockCount(); 1426 if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 && 1427 AMgr.options.shouldWidenLoops()) { 1428 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1429 if (!(Term && 1430 (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term)))) 1431 return; 1432 // Widen. 1433 const LocationContext *LCtx = Pred->getLocationContext(); 1434 ProgramStateRef WidenedState = 1435 getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term); 1436 nodeBuilder.generateNode(WidenedState, Pred); 1437 return; 1438 } 1439 1440 // FIXME: Refactor this into a checker. 1441 if (BlockCount >= AMgr.options.maxBlockVisitOnPath) { 1442 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1443 const ExplodedNode *Sink = 1444 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1445 1446 // Check if we stopped at the top level function or not. 1447 // Root node should have the location context of the top most function. 1448 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1449 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1450 const LocationContext *RootLC = 1451 (*G.roots_begin())->getLocation().getLocationContext(); 1452 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1453 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1454 1455 // Re-run the call evaluation without inlining it, by storing the 1456 // no-inlining policy in the state and enqueuing the new work item on 1457 // the list. Replay should almost never fail. Use the stats to catch it 1458 // if it does. 1459 if ((!AMgr.options.NoRetryExhausted && 1460 replayWithoutInlining(Pred, CalleeLC))) 1461 return; 1462 NumMaxBlockCountReachedInInlined++; 1463 } else 1464 NumMaxBlockCountReached++; 1465 1466 // Make sink nodes as exhausted(for stats) only if retry failed. 1467 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1468 } 1469 } 1470 1471 //===----------------------------------------------------------------------===// 1472 // Branch processing. 1473 //===----------------------------------------------------------------------===// 1474 1475 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1476 /// to try to recover some path-sensitivity for casts of symbolic 1477 /// integers that promote their values (which are currently not tracked well). 1478 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1479 // cast(s) did was sign-extend the original value. 1480 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1481 ProgramStateRef state, 1482 const Stmt *Condition, 1483 const LocationContext *LCtx, 1484 ASTContext &Ctx) { 1485 1486 const Expr *Ex = dyn_cast<Expr>(Condition); 1487 if (!Ex) 1488 return UnknownVal(); 1489 1490 uint64_t bits = 0; 1491 bool bitsInit = false; 1492 1493 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1494 QualType T = CE->getType(); 1495 1496 if (!T->isIntegralOrEnumerationType()) 1497 return UnknownVal(); 1498 1499 uint64_t newBits = Ctx.getTypeSize(T); 1500 if (!bitsInit || newBits < bits) { 1501 bitsInit = true; 1502 bits = newBits; 1503 } 1504 1505 Ex = CE->getSubExpr(); 1506 } 1507 1508 // We reached a non-cast. Is it a symbolic value? 1509 QualType T = Ex->getType(); 1510 1511 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1512 Ctx.getTypeSize(T) > bits) 1513 return UnknownVal(); 1514 1515 return state->getSVal(Ex, LCtx); 1516 } 1517 1518 #ifndef NDEBUG 1519 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1520 while (Condition) { 1521 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1522 if (!BO || !BO->isLogicalOp()) { 1523 return Condition; 1524 } 1525 Condition = BO->getRHS()->IgnoreParens(); 1526 } 1527 return nullptr; 1528 } 1529 #endif 1530 1531 // Returns the condition the branch at the end of 'B' depends on and whose value 1532 // has been evaluated within 'B'. 1533 // In most cases, the terminator condition of 'B' will be evaluated fully in 1534 // the last statement of 'B'; in those cases, the resolved condition is the 1535 // given 'Condition'. 1536 // If the condition of the branch is a logical binary operator tree, the CFG is 1537 // optimized: in that case, we know that the expression formed by all but the 1538 // rightmost leaf of the logical binary operator tree must be true, and thus 1539 // the branch condition is at this point equivalent to the truth value of that 1540 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1541 // expression in its final statement. As the full condition in that case was 1542 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1543 // expression to evaluate the truth value of the condition in the current state 1544 // space. 1545 static const Stmt *ResolveCondition(const Stmt *Condition, 1546 const CFGBlock *B) { 1547 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1548 Condition = Ex->IgnoreParens(); 1549 1550 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1551 if (!BO || !BO->isLogicalOp()) 1552 return Condition; 1553 1554 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1555 "Temporary destructor branches handled by processBindTemporary."); 1556 1557 // For logical operations, we still have the case where some branches 1558 // use the traditional "merge" approach and others sink the branch 1559 // directly into the basic blocks representing the logical operation. 1560 // We need to distinguish between those two cases here. 1561 1562 // The invariants are still shifting, but it is possible that the 1563 // last element in a CFGBlock is not a CFGStmt. Look for the last 1564 // CFGStmt as the value of the condition. 1565 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1566 for (; I != E; ++I) { 1567 CFGElement Elem = *I; 1568 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1569 if (!CS) 1570 continue; 1571 const Stmt *LastStmt = CS->getStmt(); 1572 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1573 return LastStmt; 1574 } 1575 llvm_unreachable("could not resolve condition"); 1576 } 1577 1578 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1579 NodeBuilderContext& BldCtx, 1580 ExplodedNode *Pred, 1581 ExplodedNodeSet &Dst, 1582 const CFGBlock *DstT, 1583 const CFGBlock *DstF) { 1584 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1585 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1586 const LocationContext *LCtx = Pred->getLocationContext(); 1587 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1588 currBldrCtx = &BldCtx; 1589 1590 // Check for NULL conditions; e.g. "for(;;)" 1591 if (!Condition) { 1592 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1593 NullCondBldr.markInfeasible(false); 1594 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1595 return; 1596 } 1597 1598 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1599 Condition = Ex->IgnoreParens(); 1600 1601 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1602 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1603 Condition->getLocStart(), 1604 "Error evaluating branch"); 1605 1606 ExplodedNodeSet CheckersOutSet; 1607 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1608 Pred, *this); 1609 // We generated only sinks. 1610 if (CheckersOutSet.empty()) 1611 return; 1612 1613 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1614 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1615 E = CheckersOutSet.end(); E != I; ++I) { 1616 ExplodedNode *PredI = *I; 1617 1618 if (PredI->isSink()) 1619 continue; 1620 1621 ProgramStateRef PrevState = PredI->getState(); 1622 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1623 1624 if (X.isUnknownOrUndef()) { 1625 // Give it a chance to recover from unknown. 1626 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1627 if (Ex->getType()->isIntegralOrEnumerationType()) { 1628 // Try to recover some path-sensitivity. Right now casts of symbolic 1629 // integers that promote their values are currently not tracked well. 1630 // If 'Condition' is such an expression, try and recover the 1631 // underlying value and use that instead. 1632 SVal recovered = RecoverCastedSymbol(getStateManager(), 1633 PrevState, Condition, 1634 PredI->getLocationContext(), 1635 getContext()); 1636 1637 if (!recovered.isUnknown()) { 1638 X = recovered; 1639 } 1640 } 1641 } 1642 } 1643 1644 // If the condition is still unknown, give up. 1645 if (X.isUnknownOrUndef()) { 1646 builder.generateNode(PrevState, true, PredI); 1647 builder.generateNode(PrevState, false, PredI); 1648 continue; 1649 } 1650 1651 DefinedSVal V = X.castAs<DefinedSVal>(); 1652 1653 ProgramStateRef StTrue, StFalse; 1654 std::tie(StTrue, StFalse) = PrevState->assume(V); 1655 1656 // Process the true branch. 1657 if (builder.isFeasible(true)) { 1658 if (StTrue) 1659 builder.generateNode(StTrue, true, PredI); 1660 else 1661 builder.markInfeasible(true); 1662 } 1663 1664 // Process the false branch. 1665 if (builder.isFeasible(false)) { 1666 if (StFalse) 1667 builder.generateNode(StFalse, false, PredI); 1668 else 1669 builder.markInfeasible(false); 1670 } 1671 } 1672 currBldrCtx = nullptr; 1673 } 1674 1675 /// The GDM component containing the set of global variables which have been 1676 /// previously initialized with explicit initializers. 1677 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1678 llvm::ImmutableSet<const VarDecl *>) 1679 1680 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1681 NodeBuilderContext &BuilderCtx, 1682 ExplodedNode *Pred, 1683 clang::ento::ExplodedNodeSet &Dst, 1684 const CFGBlock *DstT, 1685 const CFGBlock *DstF) { 1686 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1687 currBldrCtx = &BuilderCtx; 1688 1689 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1690 ProgramStateRef state = Pred->getState(); 1691 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1692 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1693 1694 if (!initHasRun) { 1695 state = state->add<InitializedGlobalsSet>(VD); 1696 } 1697 1698 builder.generateNode(state, initHasRun, Pred); 1699 builder.markInfeasible(!initHasRun); 1700 1701 currBldrCtx = nullptr; 1702 } 1703 1704 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1705 /// nodes by processing the 'effects' of a computed goto jump. 1706 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1707 1708 ProgramStateRef state = builder.getState(); 1709 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1710 1711 // Three possibilities: 1712 // 1713 // (1) We know the computed label. 1714 // (2) The label is NULL (or some other constant), or Undefined. 1715 // (3) We have no clue about the label. Dispatch to all targets. 1716 // 1717 1718 typedef IndirectGotoNodeBuilder::iterator iterator; 1719 1720 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1721 const LabelDecl *L = LV->getLabel(); 1722 1723 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1724 if (I.getLabel() == L) { 1725 builder.generateNode(I, state); 1726 return; 1727 } 1728 } 1729 1730 llvm_unreachable("No block with label."); 1731 } 1732 1733 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1734 // Dispatch to the first target and mark it as a sink. 1735 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1736 // FIXME: add checker visit. 1737 // UndefBranches.insert(N); 1738 return; 1739 } 1740 1741 // This is really a catch-all. We don't support symbolics yet. 1742 // FIXME: Implement dispatch for symbolic pointers. 1743 1744 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1745 builder.generateNode(I, state); 1746 } 1747 1748 #if 0 1749 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1750 const StackFrameContext* Frame = Pred.getStackFrame(); 1751 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1752 Pred.getState()->get<InitializedTemporariesSet>(); 1753 return std::find_if(Set.begin(), Set.end(), 1754 [&](const CXXBindTemporaryContext &Ctx) { 1755 if (Ctx.second == Frame) { 1756 Ctx.first->dump(); 1757 llvm::errs() << "\n"; 1758 } 1759 return Ctx.second == Frame; 1760 }) == Set.end(); 1761 } 1762 #endif 1763 1764 void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC, 1765 ExplodedNode *Pred, 1766 ExplodedNodeSet &Dst, 1767 const BlockEdge &L) { 1768 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 1769 getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this); 1770 } 1771 1772 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1773 /// nodes when the control reaches the end of a function. 1774 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1775 ExplodedNode *Pred) { 1776 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1777 // We currently cannot enable this assert, as lifetime extended temporaries 1778 // are not modelled correctly. 1779 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1780 StateMgr.EndPath(Pred->getState()); 1781 1782 ExplodedNodeSet Dst; 1783 if (Pred->getLocationContext()->inTopFrame()) { 1784 // Remove dead symbols. 1785 ExplodedNodeSet AfterRemovedDead; 1786 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1787 1788 // Notify checkers. 1789 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1790 E = AfterRemovedDead.end(); I != E; ++I) { 1791 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1792 } 1793 } else { 1794 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1795 } 1796 1797 Engine.enqueueEndOfFunction(Dst); 1798 } 1799 1800 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1801 /// nodes by processing the 'effects' of a switch statement. 1802 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1803 typedef SwitchNodeBuilder::iterator iterator; 1804 ProgramStateRef state = builder.getState(); 1805 const Expr *CondE = builder.getCondition(); 1806 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1807 1808 if (CondV_untested.isUndef()) { 1809 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1810 // FIXME: add checker 1811 //UndefBranches.insert(N); 1812 1813 return; 1814 } 1815 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1816 1817 ProgramStateRef DefaultSt = state; 1818 1819 iterator I = builder.begin(), EI = builder.end(); 1820 bool defaultIsFeasible = I == EI; 1821 1822 for ( ; I != EI; ++I) { 1823 // Successor may be pruned out during CFG construction. 1824 if (!I.getBlock()) 1825 continue; 1826 1827 const CaseStmt *Case = I.getCase(); 1828 1829 // Evaluate the LHS of the case value. 1830 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1831 assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType())); 1832 1833 // Get the RHS of the case, if it exists. 1834 llvm::APSInt V2; 1835 if (const Expr *E = Case->getRHS()) 1836 V2 = E->EvaluateKnownConstInt(getContext()); 1837 else 1838 V2 = V1; 1839 1840 ProgramStateRef StateCase; 1841 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1842 std::tie(StateCase, DefaultSt) = 1843 DefaultSt->assumeWithinInclusiveRange(*NL, V1, V2); 1844 else // UnknownVal 1845 StateCase = DefaultSt; 1846 1847 if (StateCase) 1848 builder.generateCaseStmtNode(I, StateCase); 1849 1850 // Now "assume" that the case doesn't match. Add this state 1851 // to the default state (if it is feasible). 1852 if (DefaultSt) 1853 defaultIsFeasible = true; 1854 else { 1855 defaultIsFeasible = false; 1856 break; 1857 } 1858 } 1859 1860 if (!defaultIsFeasible) 1861 return; 1862 1863 // If we have switch(enum value), the default branch is not 1864 // feasible if all of the enum constants not covered by 'case:' statements 1865 // are not feasible values for the switch condition. 1866 // 1867 // Note that this isn't as accurate as it could be. Even if there isn't 1868 // a case for a particular enum value as long as that enum value isn't 1869 // feasible then it shouldn't be considered for making 'default:' reachable. 1870 const SwitchStmt *SS = builder.getSwitch(); 1871 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1872 if (CondExpr->getType()->getAs<EnumType>()) { 1873 if (SS->isAllEnumCasesCovered()) 1874 return; 1875 } 1876 1877 builder.generateDefaultCaseNode(DefaultSt); 1878 } 1879 1880 //===----------------------------------------------------------------------===// 1881 // Transfer functions: Loads and stores. 1882 //===----------------------------------------------------------------------===// 1883 1884 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1885 ExplodedNode *Pred, 1886 ExplodedNodeSet &Dst) { 1887 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1888 1889 ProgramStateRef state = Pred->getState(); 1890 const LocationContext *LCtx = Pred->getLocationContext(); 1891 1892 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1893 // C permits "extern void v", and if you cast the address to a valid type, 1894 // you can even do things with it. We simply pretend 1895 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1896 const LocationContext *LocCtxt = Pred->getLocationContext(); 1897 const Decl *D = LocCtxt->getDecl(); 1898 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 1899 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 1900 SVal V; 1901 bool IsReference; 1902 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 1903 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 1904 MD->getParent()->isLambda()) { 1905 // Lookup the field of the lambda. 1906 const CXXRecordDecl *CXXRec = MD->getParent(); 1907 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 1908 FieldDecl *LambdaThisCaptureField; 1909 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 1910 const FieldDecl *FD = LambdaCaptureFields[VD]; 1911 if (!FD) { 1912 // When a constant is captured, sometimes no corresponding field is 1913 // created in the lambda object. 1914 assert(VD->getType().isConstQualified()); 1915 V = state->getLValue(VD, LocCtxt); 1916 IsReference = false; 1917 } else { 1918 Loc CXXThis = 1919 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 1920 SVal CXXThisVal = state->getSVal(CXXThis); 1921 V = state->getLValue(FD, CXXThisVal); 1922 IsReference = FD->getType()->isReferenceType(); 1923 } 1924 } else { 1925 V = state->getLValue(VD, LocCtxt); 1926 IsReference = VD->getType()->isReferenceType(); 1927 } 1928 1929 // For references, the 'lvalue' is the pointer address stored in the 1930 // reference region. 1931 if (IsReference) { 1932 if (const MemRegion *R = V.getAsRegion()) 1933 V = state->getSVal(R); 1934 else 1935 V = UnknownVal(); 1936 } 1937 1938 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1939 ProgramPoint::PostLValueKind); 1940 return; 1941 } 1942 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 1943 assert(!Ex->isGLValue()); 1944 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 1945 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 1946 return; 1947 } 1948 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 1949 SVal V = svalBuilder.getFunctionPointer(FD); 1950 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1951 ProgramPoint::PostLValueKind); 1952 return; 1953 } 1954 if (isa<FieldDecl>(D)) { 1955 // FIXME: Compute lvalue of field pointers-to-member. 1956 // Right now we just use a non-null void pointer, so that it gives proper 1957 // results in boolean contexts. 1958 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 1959 currBldrCtx->blockCount()); 1960 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 1961 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1962 ProgramPoint::PostLValueKind); 1963 return; 1964 } 1965 1966 llvm_unreachable("Support for this Decl not implemented."); 1967 } 1968 1969 /// VisitArraySubscriptExpr - Transfer function for array accesses 1970 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 1971 ExplodedNode *Pred, 1972 ExplodedNodeSet &Dst){ 1973 1974 const Expr *Base = A->getBase()->IgnoreParens(); 1975 const Expr *Idx = A->getIdx()->IgnoreParens(); 1976 1977 ExplodedNodeSet checkerPreStmt; 1978 getCheckerManager().runCheckersForPreStmt(checkerPreStmt, Pred, A, *this); 1979 1980 StmtNodeBuilder Bldr(checkerPreStmt, Dst, *currBldrCtx); 1981 assert(A->isGLValue() || 1982 (!AMgr.getLangOpts().CPlusPlus && 1983 A->getType().isCForbiddenLValueType())); 1984 1985 for (ExplodedNodeSet::iterator it = checkerPreStmt.begin(), 1986 ei = checkerPreStmt.end(); it != ei; ++it) { 1987 const LocationContext *LCtx = (*it)->getLocationContext(); 1988 ProgramStateRef state = (*it)->getState(); 1989 SVal V = state->getLValue(A->getType(), 1990 state->getSVal(Idx, LCtx), 1991 state->getSVal(Base, LCtx)); 1992 Bldr.generateNode(A, *it, state->BindExpr(A, LCtx, V), nullptr, 1993 ProgramPoint::PostLValueKind); 1994 } 1995 } 1996 1997 /// VisitMemberExpr - Transfer function for member expressions. 1998 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 1999 ExplodedNodeSet &Dst) { 2000 2001 // FIXME: Prechecks eventually go in ::Visit(). 2002 ExplodedNodeSet CheckedSet; 2003 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 2004 2005 ExplodedNodeSet EvalSet; 2006 ValueDecl *Member = M->getMemberDecl(); 2007 2008 // Handle static member variables and enum constants accessed via 2009 // member syntax. 2010 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 2011 ExplodedNodeSet Dst; 2012 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2013 I != E; ++I) { 2014 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 2015 } 2016 } else { 2017 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 2018 ExplodedNodeSet Tmp; 2019 2020 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2021 I != E; ++I) { 2022 ProgramStateRef state = (*I)->getState(); 2023 const LocationContext *LCtx = (*I)->getLocationContext(); 2024 Expr *BaseExpr = M->getBase(); 2025 2026 // Handle C++ method calls. 2027 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 2028 if (MD->isInstance()) 2029 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2030 2031 SVal MDVal = svalBuilder.getFunctionPointer(MD); 2032 state = state->BindExpr(M, LCtx, MDVal); 2033 2034 Bldr.generateNode(M, *I, state); 2035 continue; 2036 } 2037 2038 // Handle regular struct fields / member variables. 2039 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2040 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 2041 2042 FieldDecl *field = cast<FieldDecl>(Member); 2043 SVal L = state->getLValue(field, baseExprVal); 2044 2045 if (M->isGLValue() || M->getType()->isArrayType()) { 2046 // We special-case rvalues of array type because the analyzer cannot 2047 // reason about them, since we expect all regions to be wrapped in Locs. 2048 // We instead treat these as lvalues and assume that they will decay to 2049 // pointers as soon as they are used. 2050 if (!M->isGLValue()) { 2051 assert(M->getType()->isArrayType()); 2052 const ImplicitCastExpr *PE = 2053 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParent(M)); 2054 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2055 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2056 } 2057 } 2058 2059 if (field->getType()->isReferenceType()) { 2060 if (const MemRegion *R = L.getAsRegion()) 2061 L = state->getSVal(R); 2062 else 2063 L = UnknownVal(); 2064 } 2065 2066 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2067 ProgramPoint::PostLValueKind); 2068 } else { 2069 Bldr.takeNodes(*I); 2070 evalLoad(Tmp, M, M, *I, state, L); 2071 Bldr.addNodes(Tmp); 2072 } 2073 } 2074 } 2075 2076 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2077 } 2078 2079 void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred, 2080 ExplodedNodeSet &Dst) { 2081 ExplodedNodeSet AfterPreSet; 2082 getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this); 2083 2084 // For now, treat all the arguments to C11 atomics as escaping. 2085 // FIXME: Ideally we should model the behavior of the atomics precisely here. 2086 2087 ExplodedNodeSet AfterInvalidateSet; 2088 StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx); 2089 2090 for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end(); 2091 I != E; ++I) { 2092 ProgramStateRef State = (*I)->getState(); 2093 const LocationContext *LCtx = (*I)->getLocationContext(); 2094 2095 SmallVector<SVal, 8> ValuesToInvalidate; 2096 for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) { 2097 const Expr *SubExpr = AE->getSubExprs()[SI]; 2098 SVal SubExprVal = State->getSVal(SubExpr, LCtx); 2099 ValuesToInvalidate.push_back(SubExprVal); 2100 } 2101 2102 State = State->invalidateRegions(ValuesToInvalidate, AE, 2103 currBldrCtx->blockCount(), 2104 LCtx, 2105 /*CausedByPointerEscape*/true, 2106 /*Symbols=*/nullptr); 2107 2108 SVal ResultVal = UnknownVal(); 2109 State = State->BindExpr(AE, LCtx, ResultVal); 2110 Bldr.generateNode(AE, *I, State, nullptr, 2111 ProgramPoint::PostStmtKind); 2112 } 2113 2114 getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this); 2115 } 2116 2117 namespace { 2118 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2119 InvalidatedSymbols Symbols; 2120 2121 public: 2122 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2123 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2124 2125 bool VisitSymbol(SymbolRef Sym) override { 2126 Symbols.insert(Sym); 2127 return true; 2128 } 2129 }; 2130 } // end anonymous namespace 2131 2132 // A value escapes in three possible cases: 2133 // (1) We are binding to something that is not a memory region. 2134 // (2) We are binding to a MemrRegion that does not have stack storage. 2135 // (3) We are binding to a MemRegion with stack storage that the store 2136 // does not understand. 2137 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2138 SVal Loc, SVal Val) { 2139 // Are we storing to something that causes the value to "escape"? 2140 bool escapes = true; 2141 2142 // TODO: Move to StoreManager. 2143 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2144 escapes = !regionLoc->getRegion()->hasStackStorage(); 2145 2146 if (!escapes) { 2147 // To test (3), generate a new state with the binding added. If it is 2148 // the same state, then it escapes (since the store cannot represent 2149 // the binding). 2150 // Do this only if we know that the store is not supposed to generate the 2151 // same state. 2152 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2153 if (StoredVal != Val) 2154 escapes = (State == (State->bindLoc(*regionLoc, Val))); 2155 } 2156 } 2157 2158 // If our store can represent the binding and we aren't storing to something 2159 // that doesn't have local storage then just return and have the simulation 2160 // state continue as is. 2161 if (!escapes) 2162 return State; 2163 2164 // Otherwise, find all symbols referenced by 'val' that we are tracking 2165 // and stop tracking them. 2166 CollectReachableSymbolsCallback Scanner = 2167 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2168 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2169 State = getCheckerManager().runCheckersForPointerEscape(State, 2170 EscapedSymbols, 2171 /*CallEvent*/ nullptr, 2172 PSK_EscapeOnBind, 2173 nullptr); 2174 2175 return State; 2176 } 2177 2178 ProgramStateRef 2179 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2180 const InvalidatedSymbols *Invalidated, 2181 ArrayRef<const MemRegion *> ExplicitRegions, 2182 ArrayRef<const MemRegion *> Regions, 2183 const CallEvent *Call, 2184 RegionAndSymbolInvalidationTraits &ITraits) { 2185 2186 if (!Invalidated || Invalidated->empty()) 2187 return State; 2188 2189 if (!Call) 2190 return getCheckerManager().runCheckersForPointerEscape(State, 2191 *Invalidated, 2192 nullptr, 2193 PSK_EscapeOther, 2194 &ITraits); 2195 2196 // If the symbols were invalidated by a call, we want to find out which ones 2197 // were invalidated directly due to being arguments to the call. 2198 InvalidatedSymbols SymbolsDirectlyInvalidated; 2199 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2200 E = ExplicitRegions.end(); I != E; ++I) { 2201 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2202 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2203 } 2204 2205 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2206 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2207 E = Invalidated->end(); I!=E; ++I) { 2208 SymbolRef sym = *I; 2209 if (SymbolsDirectlyInvalidated.count(sym)) 2210 continue; 2211 SymbolsIndirectlyInvalidated.insert(sym); 2212 } 2213 2214 if (!SymbolsDirectlyInvalidated.empty()) 2215 State = getCheckerManager().runCheckersForPointerEscape(State, 2216 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2217 2218 // Notify about the symbols that get indirectly invalidated by the call. 2219 if (!SymbolsIndirectlyInvalidated.empty()) 2220 State = getCheckerManager().runCheckersForPointerEscape(State, 2221 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2222 2223 return State; 2224 } 2225 2226 /// evalBind - Handle the semantics of binding a value to a specific location. 2227 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2228 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2229 ExplodedNode *Pred, 2230 SVal location, SVal Val, 2231 bool atDeclInit, const ProgramPoint *PP) { 2232 2233 const LocationContext *LC = Pred->getLocationContext(); 2234 PostStmt PS(StoreE, LC); 2235 if (!PP) 2236 PP = &PS; 2237 2238 // Do a previsit of the bind. 2239 ExplodedNodeSet CheckedSet; 2240 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2241 StoreE, *this, *PP); 2242 2243 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2244 2245 // If the location is not a 'Loc', it will already be handled by 2246 // the checkers. There is nothing left to do. 2247 if (!location.getAs<Loc>()) { 2248 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2249 /*tag*/nullptr); 2250 ProgramStateRef state = Pred->getState(); 2251 state = processPointerEscapedOnBind(state, location, Val); 2252 Bldr.generateNode(L, state, Pred); 2253 return; 2254 } 2255 2256 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2257 I!=E; ++I) { 2258 ExplodedNode *PredI = *I; 2259 ProgramStateRef state = PredI->getState(); 2260 2261 state = processPointerEscapedOnBind(state, location, Val); 2262 2263 // When binding the value, pass on the hint that this is a initialization. 2264 // For initializations, we do not need to inform clients of region 2265 // changes. 2266 state = state->bindLoc(location.castAs<Loc>(), 2267 Val, /* notifyChanges = */ !atDeclInit); 2268 2269 const MemRegion *LocReg = nullptr; 2270 if (Optional<loc::MemRegionVal> LocRegVal = 2271 location.getAs<loc::MemRegionVal>()) { 2272 LocReg = LocRegVal->getRegion(); 2273 } 2274 2275 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2276 Bldr.generateNode(L, state, PredI); 2277 } 2278 } 2279 2280 /// evalStore - Handle the semantics of a store via an assignment. 2281 /// @param Dst The node set to store generated state nodes 2282 /// @param AssignE The assignment expression if the store happens in an 2283 /// assignment. 2284 /// @param LocationE The location expression that is stored to. 2285 /// @param state The current simulation state 2286 /// @param location The location to store the value 2287 /// @param Val The value to be stored 2288 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2289 const Expr *LocationE, 2290 ExplodedNode *Pred, 2291 ProgramStateRef state, SVal location, SVal Val, 2292 const ProgramPointTag *tag) { 2293 // Proceed with the store. We use AssignE as the anchor for the PostStore 2294 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2295 const Expr *StoreE = AssignE ? AssignE : LocationE; 2296 2297 // Evaluate the location (checks for bad dereferences). 2298 ExplodedNodeSet Tmp; 2299 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2300 2301 if (Tmp.empty()) 2302 return; 2303 2304 if (location.isUndef()) 2305 return; 2306 2307 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2308 evalBind(Dst, StoreE, *NI, location, Val, false); 2309 } 2310 2311 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2312 const Expr *NodeEx, 2313 const Expr *BoundEx, 2314 ExplodedNode *Pred, 2315 ProgramStateRef state, 2316 SVal location, 2317 const ProgramPointTag *tag, 2318 QualType LoadTy) 2319 { 2320 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2321 2322 // Are we loading from a region? This actually results in two loads; one 2323 // to fetch the address of the referenced value and one to fetch the 2324 // referenced value. 2325 if (const TypedValueRegion *TR = 2326 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2327 2328 QualType ValTy = TR->getValueType(); 2329 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2330 static SimpleProgramPointTag 2331 loadReferenceTag(TagProviderName, "Load Reference"); 2332 ExplodedNodeSet Tmp; 2333 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2334 location, &loadReferenceTag, 2335 getContext().getPointerType(RT->getPointeeType())); 2336 2337 // Perform the load from the referenced value. 2338 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2339 state = (*I)->getState(); 2340 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2341 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2342 } 2343 return; 2344 } 2345 } 2346 2347 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2348 } 2349 2350 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2351 const Expr *NodeEx, 2352 const Expr *BoundEx, 2353 ExplodedNode *Pred, 2354 ProgramStateRef state, 2355 SVal location, 2356 const ProgramPointTag *tag, 2357 QualType LoadTy) { 2358 assert(NodeEx); 2359 assert(BoundEx); 2360 // Evaluate the location (checks for bad dereferences). 2361 ExplodedNodeSet Tmp; 2362 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2363 if (Tmp.empty()) 2364 return; 2365 2366 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2367 if (location.isUndef()) 2368 return; 2369 2370 // Proceed with the load. 2371 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2372 state = (*NI)->getState(); 2373 const LocationContext *LCtx = (*NI)->getLocationContext(); 2374 2375 SVal V = UnknownVal(); 2376 if (location.isValid()) { 2377 if (LoadTy.isNull()) 2378 LoadTy = BoundEx->getType(); 2379 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2380 } 2381 2382 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2383 ProgramPoint::PostLoadKind); 2384 } 2385 } 2386 2387 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2388 const Stmt *NodeEx, 2389 const Stmt *BoundEx, 2390 ExplodedNode *Pred, 2391 ProgramStateRef state, 2392 SVal location, 2393 const ProgramPointTag *tag, 2394 bool isLoad) { 2395 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2396 // Early checks for performance reason. 2397 if (location.isUnknown()) { 2398 return; 2399 } 2400 2401 ExplodedNodeSet Src; 2402 BldrTop.takeNodes(Pred); 2403 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2404 if (Pred->getState() != state) { 2405 // Associate this new state with an ExplodedNode. 2406 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2407 // int *p; 2408 // p = 0; 2409 // *p = 0xDEADBEEF; 2410 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2411 // instead "int *p" is noted as 2412 // "Variable 'p' initialized to a null pointer value" 2413 2414 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2415 Bldr.generateNode(NodeEx, Pred, state, &tag); 2416 } 2417 ExplodedNodeSet Tmp; 2418 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2419 NodeEx, BoundEx, *this); 2420 BldrTop.addNodes(Tmp); 2421 } 2422 2423 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2424 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2425 static SimpleProgramPointTag 2426 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2427 "Eagerly Assume True"), 2428 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2429 "Eagerly Assume False"); 2430 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2431 &eagerlyAssumeBinOpBifurcationFalse); 2432 } 2433 2434 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2435 ExplodedNodeSet &Src, 2436 const Expr *Ex) { 2437 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2438 2439 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2440 ExplodedNode *Pred = *I; 2441 // Test if the previous node was as the same expression. This can happen 2442 // when the expression fails to evaluate to anything meaningful and 2443 // (as an optimization) we don't generate a node. 2444 ProgramPoint P = Pred->getLocation(); 2445 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2446 continue; 2447 } 2448 2449 ProgramStateRef state = Pred->getState(); 2450 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2451 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2452 if (SEV && SEV->isExpression()) { 2453 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2454 geteagerlyAssumeBinOpBifurcationTags(); 2455 2456 ProgramStateRef StateTrue, StateFalse; 2457 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2458 2459 // First assume that the condition is true. 2460 if (StateTrue) { 2461 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2462 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2463 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2464 } 2465 2466 // Next, assume that the condition is false. 2467 if (StateFalse) { 2468 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2469 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2470 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2471 } 2472 } 2473 } 2474 } 2475 2476 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2477 ExplodedNodeSet &Dst) { 2478 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2479 // We have processed both the inputs and the outputs. All of the outputs 2480 // should evaluate to Locs. Nuke all of their values. 2481 2482 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2483 // which interprets the inline asm and stores proper results in the 2484 // outputs. 2485 2486 ProgramStateRef state = Pred->getState(); 2487 2488 for (const Expr *O : A->outputs()) { 2489 SVal X = state->getSVal(O, Pred->getLocationContext()); 2490 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2491 2492 if (Optional<Loc> LV = X.getAs<Loc>()) 2493 state = state->bindLoc(*LV, UnknownVal()); 2494 } 2495 2496 Bldr.generateNode(A, Pred, state); 2497 } 2498 2499 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2500 ExplodedNodeSet &Dst) { 2501 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2502 Bldr.generateNode(A, Pred, Pred->getState()); 2503 } 2504 2505 //===----------------------------------------------------------------------===// 2506 // Visualization. 2507 //===----------------------------------------------------------------------===// 2508 2509 #ifndef NDEBUG 2510 static ExprEngine* GraphPrintCheckerState; 2511 static SourceManager* GraphPrintSourceManager; 2512 2513 namespace llvm { 2514 template<> 2515 struct DOTGraphTraits<ExplodedNode*> : 2516 public DefaultDOTGraphTraits { 2517 2518 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2519 2520 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2521 // work. 2522 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2523 2524 #if 0 2525 // FIXME: Replace with a general scheme to tell if the node is 2526 // an error node. 2527 if (GraphPrintCheckerState->isImplicitNullDeref(N) || 2528 GraphPrintCheckerState->isExplicitNullDeref(N) || 2529 GraphPrintCheckerState->isUndefDeref(N) || 2530 GraphPrintCheckerState->isUndefStore(N) || 2531 GraphPrintCheckerState->isUndefControlFlow(N) || 2532 GraphPrintCheckerState->isUndefResult(N) || 2533 GraphPrintCheckerState->isBadCall(N) || 2534 GraphPrintCheckerState->isUndefArg(N)) 2535 return "color=\"red\",style=\"filled\""; 2536 2537 if (GraphPrintCheckerState->isNoReturnCall(N)) 2538 return "color=\"blue\",style=\"filled\""; 2539 #endif 2540 return ""; 2541 } 2542 2543 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2544 if (SLoc.isFileID()) { 2545 Out << "\\lline=" 2546 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2547 << " col=" 2548 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2549 << "\\l"; 2550 } 2551 } 2552 2553 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2554 2555 std::string sbuf; 2556 llvm::raw_string_ostream Out(sbuf); 2557 2558 // Program Location. 2559 ProgramPoint Loc = N->getLocation(); 2560 2561 switch (Loc.getKind()) { 2562 case ProgramPoint::BlockEntranceKind: { 2563 Out << "Block Entrance: B" 2564 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2565 if (const NamedDecl *ND = 2566 dyn_cast<NamedDecl>(Loc.getLocationContext()->getDecl())) { 2567 Out << " ("; 2568 ND->printName(Out); 2569 Out << ")"; 2570 } 2571 break; 2572 } 2573 2574 case ProgramPoint::BlockExitKind: 2575 assert (false); 2576 break; 2577 2578 case ProgramPoint::CallEnterKind: 2579 Out << "CallEnter"; 2580 break; 2581 2582 case ProgramPoint::CallExitBeginKind: 2583 Out << "CallExitBegin"; 2584 break; 2585 2586 case ProgramPoint::CallExitEndKind: 2587 Out << "CallExitEnd"; 2588 break; 2589 2590 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2591 Out << "PostStmtPurgeDeadSymbols"; 2592 break; 2593 2594 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2595 Out << "PreStmtPurgeDeadSymbols"; 2596 break; 2597 2598 case ProgramPoint::EpsilonKind: 2599 Out << "Epsilon Point"; 2600 break; 2601 2602 case ProgramPoint::PreImplicitCallKind: { 2603 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2604 Out << "PreCall: "; 2605 2606 // FIXME: Get proper printing options. 2607 PC.getDecl()->print(Out, LangOptions()); 2608 printLocation(Out, PC.getLocation()); 2609 break; 2610 } 2611 2612 case ProgramPoint::PostImplicitCallKind: { 2613 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2614 Out << "PostCall: "; 2615 2616 // FIXME: Get proper printing options. 2617 PC.getDecl()->print(Out, LangOptions()); 2618 printLocation(Out, PC.getLocation()); 2619 break; 2620 } 2621 2622 case ProgramPoint::PostInitializerKind: { 2623 Out << "PostInitializer: "; 2624 const CXXCtorInitializer *Init = 2625 Loc.castAs<PostInitializer>().getInitializer(); 2626 if (const FieldDecl *FD = Init->getAnyMember()) 2627 Out << *FD; 2628 else { 2629 QualType Ty = Init->getTypeSourceInfo()->getType(); 2630 Ty = Ty.getLocalUnqualifiedType(); 2631 LangOptions LO; // FIXME. 2632 Ty.print(Out, LO); 2633 } 2634 break; 2635 } 2636 2637 case ProgramPoint::BlockEdgeKind: { 2638 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2639 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2640 << E.getDst()->getBlockID() << ')'; 2641 2642 if (const Stmt *T = E.getSrc()->getTerminator()) { 2643 SourceLocation SLoc = T->getLocStart(); 2644 2645 Out << "\\|Terminator: "; 2646 LangOptions LO; // FIXME. 2647 E.getSrc()->printTerminator(Out, LO); 2648 2649 if (SLoc.isFileID()) { 2650 Out << "\\lline=" 2651 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2652 << " col=" 2653 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2654 } 2655 2656 if (isa<SwitchStmt>(T)) { 2657 const Stmt *Label = E.getDst()->getLabel(); 2658 2659 if (Label) { 2660 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2661 Out << "\\lcase "; 2662 LangOptions LO; // FIXME. 2663 if (C->getLHS()) 2664 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2665 2666 if (const Stmt *RHS = C->getRHS()) { 2667 Out << " .. "; 2668 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2669 } 2670 2671 Out << ":"; 2672 } 2673 else { 2674 assert (isa<DefaultStmt>(Label)); 2675 Out << "\\ldefault:"; 2676 } 2677 } 2678 else 2679 Out << "\\l(implicit) default:"; 2680 } 2681 else if (isa<IndirectGotoStmt>(T)) { 2682 // FIXME 2683 } 2684 else { 2685 Out << "\\lCondition: "; 2686 if (*E.getSrc()->succ_begin() == E.getDst()) 2687 Out << "true"; 2688 else 2689 Out << "false"; 2690 } 2691 2692 Out << "\\l"; 2693 } 2694 2695 #if 0 2696 // FIXME: Replace with a general scheme to determine 2697 // the name of the check. 2698 if (GraphPrintCheckerState->isUndefControlFlow(N)) { 2699 Out << "\\|Control-flow based on\\lUndefined value.\\l"; 2700 } 2701 #endif 2702 break; 2703 } 2704 2705 default: { 2706 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2707 assert(S != nullptr && "Expecting non-null Stmt"); 2708 2709 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2710 LangOptions LO; // FIXME. 2711 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2712 printLocation(Out, S->getLocStart()); 2713 2714 if (Loc.getAs<PreStmt>()) 2715 Out << "\\lPreStmt\\l;"; 2716 else if (Loc.getAs<PostLoad>()) 2717 Out << "\\lPostLoad\\l;"; 2718 else if (Loc.getAs<PostStore>()) 2719 Out << "\\lPostStore\\l"; 2720 else if (Loc.getAs<PostLValue>()) 2721 Out << "\\lPostLValue\\l"; 2722 2723 #if 0 2724 // FIXME: Replace with a general scheme to determine 2725 // the name of the check. 2726 if (GraphPrintCheckerState->isImplicitNullDeref(N)) 2727 Out << "\\|Implicit-Null Dereference.\\l"; 2728 else if (GraphPrintCheckerState->isExplicitNullDeref(N)) 2729 Out << "\\|Explicit-Null Dereference.\\l"; 2730 else if (GraphPrintCheckerState->isUndefDeref(N)) 2731 Out << "\\|Dereference of undefialied value.\\l"; 2732 else if (GraphPrintCheckerState->isUndefStore(N)) 2733 Out << "\\|Store to Undefined Loc."; 2734 else if (GraphPrintCheckerState->isUndefResult(N)) 2735 Out << "\\|Result of operation is undefined."; 2736 else if (GraphPrintCheckerState->isNoReturnCall(N)) 2737 Out << "\\|Call to function marked \"noreturn\"."; 2738 else if (GraphPrintCheckerState->isBadCall(N)) 2739 Out << "\\|Call to NULL/Undefined."; 2740 else if (GraphPrintCheckerState->isUndefArg(N)) 2741 Out << "\\|Argument in call is undefined"; 2742 #endif 2743 2744 break; 2745 } 2746 } 2747 2748 ProgramStateRef state = N->getState(); 2749 Out << "\\|StateID: " << (const void*) state.get() 2750 << " NodeID: " << (const void*) N << "\\|"; 2751 state->printDOT(Out); 2752 2753 Out << "\\l"; 2754 2755 if (const ProgramPointTag *tag = Loc.getTag()) { 2756 Out << "\\|Tag: " << tag->getTagDescription(); 2757 Out << "\\l"; 2758 } 2759 return Out.str(); 2760 } 2761 }; 2762 } // end llvm namespace 2763 #endif 2764 2765 void ExprEngine::ViewGraph(bool trim) { 2766 #ifndef NDEBUG 2767 if (trim) { 2768 std::vector<const ExplodedNode*> Src; 2769 2770 // Flush any outstanding reports to make sure we cover all the nodes. 2771 // This does not cause them to get displayed. 2772 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2773 const_cast<BugType*>(*I)->FlushReports(BR); 2774 2775 // Iterate through the reports and get their nodes. 2776 for (BugReporter::EQClasses_iterator 2777 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2778 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2779 if (N) Src.push_back(N); 2780 } 2781 2782 ViewGraph(Src); 2783 } 2784 else { 2785 GraphPrintCheckerState = this; 2786 GraphPrintSourceManager = &getContext().getSourceManager(); 2787 2788 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2789 2790 GraphPrintCheckerState = nullptr; 2791 GraphPrintSourceManager = nullptr; 2792 } 2793 #endif 2794 } 2795 2796 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2797 #ifndef NDEBUG 2798 GraphPrintCheckerState = this; 2799 GraphPrintSourceManager = &getContext().getSourceManager(); 2800 2801 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2802 2803 if (!TrimmedG.get()) 2804 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2805 else 2806 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2807 2808 GraphPrintCheckerState = nullptr; 2809 GraphPrintSourceManager = nullptr; 2810 #endif 2811 } 2812