Home | History | Annotate | Download | only in Sema
      1 //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This file defines analysis_warnings::[Policy,Executor].
     11 // Together they are used by Sema to issue warnings based on inexpensive
     12 // static analysis algorithms in libAnalysis.
     13 //
     14 //===----------------------------------------------------------------------===//
     15 
     16 #include "clang/Sema/AnalysisBasedWarnings.h"
     17 #include "clang/AST/DeclCXX.h"
     18 #include "clang/AST/DeclObjC.h"
     19 #include "clang/AST/EvaluatedExprVisitor.h"
     20 #include "clang/AST/ExprCXX.h"
     21 #include "clang/AST/ExprObjC.h"
     22 #include "clang/AST/ParentMap.h"
     23 #include "clang/AST/RecursiveASTVisitor.h"
     24 #include "clang/AST/StmtCXX.h"
     25 #include "clang/AST/StmtObjC.h"
     26 #include "clang/AST/StmtVisitor.h"
     27 #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
     28 #include "clang/Analysis/Analyses/ReachableCode.h"
     29 #include "clang/Analysis/Analyses/ThreadSafety.h"
     30 #include "clang/Analysis/Analyses/UninitializedValues.h"
     31 #include "clang/Analysis/AnalysisContext.h"
     32 #include "clang/Analysis/CFG.h"
     33 #include "clang/Analysis/CFGStmtMap.h"
     34 #include "clang/Basic/SourceLocation.h"
     35 #include "clang/Basic/SourceManager.h"
     36 #include "clang/Lex/Lexer.h"
     37 #include "clang/Lex/Preprocessor.h"
     38 #include "clang/Sema/ScopeInfo.h"
     39 #include "clang/Sema/SemaInternal.h"
     40 #include "llvm/ADT/ArrayRef.h"
     41 #include "llvm/ADT/BitVector.h"
     42 #include "llvm/ADT/FoldingSet.h"
     43 #include "llvm/ADT/ImmutableMap.h"
     44 #include "llvm/ADT/MapVector.h"
     45 #include "llvm/ADT/PostOrderIterator.h"
     46 #include "llvm/ADT/SmallString.h"
     47 #include "llvm/ADT/SmallVector.h"
     48 #include "llvm/ADT/StringRef.h"
     49 #include "llvm/Support/Casting.h"
     50 #include <algorithm>
     51 #include <deque>
     52 #include <iterator>
     53 #include <vector>
     54 
     55 using namespace clang;
     56 
     57 //===----------------------------------------------------------------------===//
     58 // Unreachable code analysis.
     59 //===----------------------------------------------------------------------===//
     60 
     61 namespace {
     62   class UnreachableCodeHandler : public reachable_code::Callback {
     63     Sema &S;
     64   public:
     65     UnreachableCodeHandler(Sema &s) : S(s) {}
     66 
     67     void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
     68       S.Diag(L, diag::warn_unreachable) << R1 << R2;
     69     }
     70   };
     71 }
     72 
     73 /// CheckUnreachable - Check for unreachable code.
     74 static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
     75   UnreachableCodeHandler UC(S);
     76   reachable_code::FindUnreachableCode(AC, UC);
     77 }
     78 
     79 //===----------------------------------------------------------------------===//
     80 // Check for missing return value.
     81 //===----------------------------------------------------------------------===//
     82 
     83 enum ControlFlowKind {
     84   UnknownFallThrough,
     85   NeverFallThrough,
     86   MaybeFallThrough,
     87   AlwaysFallThrough,
     88   NeverFallThroughOrReturn
     89 };
     90 
     91 /// CheckFallThrough - Check that we don't fall off the end of a
     92 /// Statement that should return a value.
     93 ///
     94 /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
     95 /// MaybeFallThrough iff we might or might not fall off the end,
     96 /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
     97 /// return.  We assume NeverFallThrough iff we never fall off the end of the
     98 /// statement but we may return.  We assume that functions not marked noreturn
     99 /// will return.
    100 static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
    101   CFG *cfg = AC.getCFG();
    102   if (cfg == 0) return UnknownFallThrough;
    103 
    104   // The CFG leaves in dead things, and we don't want the dead code paths to
    105   // confuse us, so we mark all live things first.
    106   llvm::BitVector live(cfg->getNumBlockIDs());
    107   unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
    108                                                           live);
    109 
    110   bool AddEHEdges = AC.getAddEHEdges();
    111   if (!AddEHEdges && count != cfg->getNumBlockIDs())
    112     // When there are things remaining dead, and we didn't add EH edges
    113     // from CallExprs to the catch clauses, we have to go back and
    114     // mark them as live.
    115     for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
    116       CFGBlock &b = **I;
    117       if (!live[b.getBlockID()]) {
    118         if (b.pred_begin() == b.pred_end()) {
    119           if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
    120             // When not adding EH edges from calls, catch clauses
    121             // can otherwise seem dead.  Avoid noting them as dead.
    122             count += reachable_code::ScanReachableFromBlock(&b, live);
    123           continue;
    124         }
    125       }
    126     }
    127 
    128   // Now we know what is live, we check the live precessors of the exit block
    129   // and look for fall through paths, being careful to ignore normal returns,
    130   // and exceptional paths.
    131   bool HasLiveReturn = false;
    132   bool HasFakeEdge = false;
    133   bool HasPlainEdge = false;
    134   bool HasAbnormalEdge = false;
    135 
    136   // Ignore default cases that aren't likely to be reachable because all
    137   // enums in a switch(X) have explicit case statements.
    138   CFGBlock::FilterOptions FO;
    139   FO.IgnoreDefaultsWithCoveredEnums = 1;
    140 
    141   for (CFGBlock::filtered_pred_iterator
    142 	 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
    143     const CFGBlock& B = **I;
    144     if (!live[B.getBlockID()])
    145       continue;
    146 
    147     // Skip blocks which contain an element marked as no-return. They don't
    148     // represent actually viable edges into the exit block, so mark them as
    149     // abnormal.
    150     if (B.hasNoReturnElement()) {
    151       HasAbnormalEdge = true;
    152       continue;
    153     }
    154 
    155     // Destructors can appear after the 'return' in the CFG.  This is
    156     // normal.  We need to look pass the destructors for the return
    157     // statement (if it exists).
    158     CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
    159 
    160     for ( ; ri != re ; ++ri)
    161       if (ri->getAs<CFGStmt>())
    162         break;
    163 
    164     // No more CFGElements in the block?
    165     if (ri == re) {
    166       if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
    167         HasAbnormalEdge = true;
    168         continue;
    169       }
    170       // A labeled empty statement, or the entry block...
    171       HasPlainEdge = true;
    172       continue;
    173     }
    174 
    175     CFGStmt CS = ri->castAs<CFGStmt>();
    176     const Stmt *S = CS.getStmt();
    177     if (isa<ReturnStmt>(S)) {
    178       HasLiveReturn = true;
    179       continue;
    180     }
    181     if (isa<ObjCAtThrowStmt>(S)) {
    182       HasFakeEdge = true;
    183       continue;
    184     }
    185     if (isa<CXXThrowExpr>(S)) {
    186       HasFakeEdge = true;
    187       continue;
    188     }
    189     if (isa<MSAsmStmt>(S)) {
    190       // TODO: Verify this is correct.
    191       HasFakeEdge = true;
    192       HasLiveReturn = true;
    193       continue;
    194     }
    195     if (isa<CXXTryStmt>(S)) {
    196       HasAbnormalEdge = true;
    197       continue;
    198     }
    199     if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
    200         == B.succ_end()) {
    201       HasAbnormalEdge = true;
    202       continue;
    203     }
    204 
    205     HasPlainEdge = true;
    206   }
    207   if (!HasPlainEdge) {
    208     if (HasLiveReturn)
    209       return NeverFallThrough;
    210     return NeverFallThroughOrReturn;
    211   }
    212   if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
    213     return MaybeFallThrough;
    214   // This says AlwaysFallThrough for calls to functions that are not marked
    215   // noreturn, that don't return.  If people would like this warning to be more
    216   // accurate, such functions should be marked as noreturn.
    217   return AlwaysFallThrough;
    218 }
    219 
    220 namespace {
    221 
    222 struct CheckFallThroughDiagnostics {
    223   unsigned diag_MaybeFallThrough_HasNoReturn;
    224   unsigned diag_MaybeFallThrough_ReturnsNonVoid;
    225   unsigned diag_AlwaysFallThrough_HasNoReturn;
    226   unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
    227   unsigned diag_NeverFallThroughOrReturn;
    228   enum { Function, Block, Lambda } funMode;
    229   SourceLocation FuncLoc;
    230 
    231   static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
    232     CheckFallThroughDiagnostics D;
    233     D.FuncLoc = Func->getLocation();
    234     D.diag_MaybeFallThrough_HasNoReturn =
    235       diag::warn_falloff_noreturn_function;
    236     D.diag_MaybeFallThrough_ReturnsNonVoid =
    237       diag::warn_maybe_falloff_nonvoid_function;
    238     D.diag_AlwaysFallThrough_HasNoReturn =
    239       diag::warn_falloff_noreturn_function;
    240     D.diag_AlwaysFallThrough_ReturnsNonVoid =
    241       diag::warn_falloff_nonvoid_function;
    242 
    243     // Don't suggest that virtual functions be marked "noreturn", since they
    244     // might be overridden by non-noreturn functions.
    245     bool isVirtualMethod = false;
    246     if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
    247       isVirtualMethod = Method->isVirtual();
    248 
    249     // Don't suggest that template instantiations be marked "noreturn"
    250     bool isTemplateInstantiation = false;
    251     if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
    252       isTemplateInstantiation = Function->isTemplateInstantiation();
    253 
    254     if (!isVirtualMethod && !isTemplateInstantiation)
    255       D.diag_NeverFallThroughOrReturn =
    256         diag::warn_suggest_noreturn_function;
    257     else
    258       D.diag_NeverFallThroughOrReturn = 0;
    259 
    260     D.funMode = Function;
    261     return D;
    262   }
    263 
    264   static CheckFallThroughDiagnostics MakeForBlock() {
    265     CheckFallThroughDiagnostics D;
    266     D.diag_MaybeFallThrough_HasNoReturn =
    267       diag::err_noreturn_block_has_return_expr;
    268     D.diag_MaybeFallThrough_ReturnsNonVoid =
    269       diag::err_maybe_falloff_nonvoid_block;
    270     D.diag_AlwaysFallThrough_HasNoReturn =
    271       diag::err_noreturn_block_has_return_expr;
    272     D.diag_AlwaysFallThrough_ReturnsNonVoid =
    273       diag::err_falloff_nonvoid_block;
    274     D.diag_NeverFallThroughOrReturn =
    275       diag::warn_suggest_noreturn_block;
    276     D.funMode = Block;
    277     return D;
    278   }
    279 
    280   static CheckFallThroughDiagnostics MakeForLambda() {
    281     CheckFallThroughDiagnostics D;
    282     D.diag_MaybeFallThrough_HasNoReturn =
    283       diag::err_noreturn_lambda_has_return_expr;
    284     D.diag_MaybeFallThrough_ReturnsNonVoid =
    285       diag::warn_maybe_falloff_nonvoid_lambda;
    286     D.diag_AlwaysFallThrough_HasNoReturn =
    287       diag::err_noreturn_lambda_has_return_expr;
    288     D.diag_AlwaysFallThrough_ReturnsNonVoid =
    289       diag::warn_falloff_nonvoid_lambda;
    290     D.diag_NeverFallThroughOrReturn = 0;
    291     D.funMode = Lambda;
    292     return D;
    293   }
    294 
    295   bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
    296                         bool HasNoReturn) const {
    297     if (funMode == Function) {
    298       return (ReturnsVoid ||
    299               D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
    300                                    FuncLoc) == DiagnosticsEngine::Ignored)
    301         && (!HasNoReturn ||
    302             D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
    303                                  FuncLoc) == DiagnosticsEngine::Ignored)
    304         && (!ReturnsVoid ||
    305             D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
    306               == DiagnosticsEngine::Ignored);
    307     }
    308 
    309     // For blocks / lambdas.
    310     return ReturnsVoid && !HasNoReturn
    311             && ((funMode == Lambda) ||
    312                 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
    313                   == DiagnosticsEngine::Ignored);
    314   }
    315 };
    316 
    317 }
    318 
    319 /// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
    320 /// function that should return a value.  Check that we don't fall off the end
    321 /// of a noreturn function.  We assume that functions and blocks not marked
    322 /// noreturn will return.
    323 static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
    324                                     const BlockExpr *blkExpr,
    325                                     const CheckFallThroughDiagnostics& CD,
    326                                     AnalysisDeclContext &AC) {
    327 
    328   bool ReturnsVoid = false;
    329   bool HasNoReturn = false;
    330 
    331   if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
    332     ReturnsVoid = FD->getResultType()->isVoidType();
    333     HasNoReturn = FD->isNoReturn();
    334   }
    335   else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
    336     ReturnsVoid = MD->getResultType()->isVoidType();
    337     HasNoReturn = MD->hasAttr<NoReturnAttr>();
    338   }
    339   else if (isa<BlockDecl>(D)) {
    340     QualType BlockTy = blkExpr->getType();
    341     if (const FunctionType *FT =
    342           BlockTy->getPointeeType()->getAs<FunctionType>()) {
    343       if (FT->getResultType()->isVoidType())
    344         ReturnsVoid = true;
    345       if (FT->getNoReturnAttr())
    346         HasNoReturn = true;
    347     }
    348   }
    349 
    350   DiagnosticsEngine &Diags = S.getDiagnostics();
    351 
    352   // Short circuit for compilation speed.
    353   if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
    354       return;
    355 
    356   // FIXME: Function try block
    357   if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
    358     switch (CheckFallThrough(AC)) {
    359       case UnknownFallThrough:
    360         break;
    361 
    362       case MaybeFallThrough:
    363         if (HasNoReturn)
    364           S.Diag(Compound->getRBracLoc(),
    365                  CD.diag_MaybeFallThrough_HasNoReturn);
    366         else if (!ReturnsVoid)
    367           S.Diag(Compound->getRBracLoc(),
    368                  CD.diag_MaybeFallThrough_ReturnsNonVoid);
    369         break;
    370       case AlwaysFallThrough:
    371         if (HasNoReturn)
    372           S.Diag(Compound->getRBracLoc(),
    373                  CD.diag_AlwaysFallThrough_HasNoReturn);
    374         else if (!ReturnsVoid)
    375           S.Diag(Compound->getRBracLoc(),
    376                  CD.diag_AlwaysFallThrough_ReturnsNonVoid);
    377         break;
    378       case NeverFallThroughOrReturn:
    379         if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
    380           if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
    381             S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
    382               << 0 << FD;
    383           } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
    384             S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
    385               << 1 << MD;
    386           } else {
    387             S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
    388           }
    389         }
    390         break;
    391       case NeverFallThrough:
    392         break;
    393     }
    394   }
    395 }
    396 
    397 //===----------------------------------------------------------------------===//
    398 // -Wuninitialized
    399 //===----------------------------------------------------------------------===//
    400 
    401 namespace {
    402 /// ContainsReference - A visitor class to search for references to
    403 /// a particular declaration (the needle) within any evaluated component of an
    404 /// expression (recursively).
    405 class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
    406   bool FoundReference;
    407   const DeclRefExpr *Needle;
    408 
    409 public:
    410   ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
    411     : EvaluatedExprVisitor<ContainsReference>(Context),
    412       FoundReference(false), Needle(Needle) {}
    413 
    414   void VisitExpr(Expr *E) {
    415     // Stop evaluating if we already have a reference.
    416     if (FoundReference)
    417       return;
    418 
    419     EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
    420   }
    421 
    422   void VisitDeclRefExpr(DeclRefExpr *E) {
    423     if (E == Needle)
    424       FoundReference = true;
    425     else
    426       EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
    427   }
    428 
    429   bool doesContainReference() const { return FoundReference; }
    430 };
    431 }
    432 
    433 static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
    434   QualType VariableTy = VD->getType().getCanonicalType();
    435   if (VariableTy->isBlockPointerType() &&
    436       !VD->hasAttr<BlocksAttr>()) {
    437     S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) << VD->getDeclName()
    438     << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
    439     return true;
    440   }
    441 
    442   // Don't issue a fixit if there is already an initializer.
    443   if (VD->getInit())
    444     return false;
    445 
    446   // Suggest possible initialization (if any).
    447   std::string Init = S.getFixItZeroInitializerForType(VariableTy);
    448   if (Init.empty())
    449     return false;
    450 
    451   // Don't suggest a fixit inside macros.
    452   if (VD->getLocEnd().isMacroID())
    453     return false;
    454 
    455   SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
    456 
    457   S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
    458     << FixItHint::CreateInsertion(Loc, Init);
    459   return true;
    460 }
    461 
    462 /// Create a fixit to remove an if-like statement, on the assumption that its
    463 /// condition is CondVal.
    464 static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
    465                           const Stmt *Else, bool CondVal,
    466                           FixItHint &Fixit1, FixItHint &Fixit2) {
    467   if (CondVal) {
    468     // If condition is always true, remove all but the 'then'.
    469     Fixit1 = FixItHint::CreateRemoval(
    470         CharSourceRange::getCharRange(If->getLocStart(),
    471                                       Then->getLocStart()));
    472     if (Else) {
    473       SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken(
    474           Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts());
    475       Fixit2 = FixItHint::CreateRemoval(
    476           SourceRange(ElseKwLoc, Else->getLocEnd()));
    477     }
    478   } else {
    479     // If condition is always false, remove all but the 'else'.
    480     if (Else)
    481       Fixit1 = FixItHint::CreateRemoval(
    482           CharSourceRange::getCharRange(If->getLocStart(),
    483                                         Else->getLocStart()));
    484     else
    485       Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
    486   }
    487 }
    488 
    489 /// DiagUninitUse -- Helper function to produce a diagnostic for an
    490 /// uninitialized use of a variable.
    491 static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
    492                           bool IsCapturedByBlock) {
    493   bool Diagnosed = false;
    494 
    495   // Diagnose each branch which leads to a sometimes-uninitialized use.
    496   for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
    497        I != E; ++I) {
    498     assert(Use.getKind() == UninitUse::Sometimes);
    499 
    500     const Expr *User = Use.getUser();
    501     const Stmt *Term = I->Terminator;
    502 
    503     // Information used when building the diagnostic.
    504     unsigned DiagKind;
    505     StringRef Str;
    506     SourceRange Range;
    507 
    508     // FixIts to suppress the diagnostic by removing the dead condition.
    509     // For all binary terminators, branch 0 is taken if the condition is true,
    510     // and branch 1 is taken if the condition is false.
    511     int RemoveDiagKind = -1;
    512     const char *FixitStr =
    513         S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
    514                                   : (I->Output ? "1" : "0");
    515     FixItHint Fixit1, Fixit2;
    516 
    517     switch (Term->getStmtClass()) {
    518     default:
    519       // Don't know how to report this. Just fall back to 'may be used
    520       // uninitialized'. This happens for range-based for, which the user
    521       // can't explicitly fix.
    522       // FIXME: This also happens if the first use of a variable is always
    523       // uninitialized, eg "for (int n; n < 10; ++n)". We should report that
    524       // with the 'is uninitialized' diagnostic.
    525       continue;
    526 
    527     // "condition is true / condition is false".
    528     case Stmt::IfStmtClass: {
    529       const IfStmt *IS = cast<IfStmt>(Term);
    530       DiagKind = 0;
    531       Str = "if";
    532       Range = IS->getCond()->getSourceRange();
    533       RemoveDiagKind = 0;
    534       CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
    535                     I->Output, Fixit1, Fixit2);
    536       break;
    537     }
    538     case Stmt::ConditionalOperatorClass: {
    539       const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
    540       DiagKind = 0;
    541       Str = "?:";
    542       Range = CO->getCond()->getSourceRange();
    543       RemoveDiagKind = 0;
    544       CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
    545                     I->Output, Fixit1, Fixit2);
    546       break;
    547     }
    548     case Stmt::BinaryOperatorClass: {
    549       const BinaryOperator *BO = cast<BinaryOperator>(Term);
    550       if (!BO->isLogicalOp())
    551         continue;
    552       DiagKind = 0;
    553       Str = BO->getOpcodeStr();
    554       Range = BO->getLHS()->getSourceRange();
    555       RemoveDiagKind = 0;
    556       if ((BO->getOpcode() == BO_LAnd && I->Output) ||
    557           (BO->getOpcode() == BO_LOr && !I->Output))
    558         // true && y -> y, false || y -> y.
    559         Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(),
    560                                                       BO->getOperatorLoc()));
    561       else
    562         // false && y -> false, true || y -> true.
    563         Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
    564       break;
    565     }
    566 
    567     // "loop is entered / loop is exited".
    568     case Stmt::WhileStmtClass:
    569       DiagKind = 1;
    570       Str = "while";
    571       Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
    572       RemoveDiagKind = 1;
    573       Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
    574       break;
    575     case Stmt::ForStmtClass:
    576       DiagKind = 1;
    577       Str = "for";
    578       Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
    579       RemoveDiagKind = 1;
    580       if (I->Output)
    581         Fixit1 = FixItHint::CreateRemoval(Range);
    582       else
    583         Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
    584       break;
    585 
    586     // "condition is true / loop is exited".
    587     case Stmt::DoStmtClass:
    588       DiagKind = 2;
    589       Str = "do";
    590       Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
    591       RemoveDiagKind = 1;
    592       Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
    593       break;
    594 
    595     // "switch case is taken".
    596     case Stmt::CaseStmtClass:
    597       DiagKind = 3;
    598       Str = "case";
    599       Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
    600       break;
    601     case Stmt::DefaultStmtClass:
    602       DiagKind = 3;
    603       Str = "default";
    604       Range = cast<DefaultStmt>(Term)->getDefaultLoc();
    605       break;
    606     }
    607 
    608     S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
    609       << VD->getDeclName() << IsCapturedByBlock << DiagKind
    610       << Str << I->Output << Range;
    611     S.Diag(User->getLocStart(), diag::note_uninit_var_use)
    612       << IsCapturedByBlock << User->getSourceRange();
    613     if (RemoveDiagKind != -1)
    614       S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
    615         << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
    616 
    617     Diagnosed = true;
    618   }
    619 
    620   if (!Diagnosed)
    621     S.Diag(Use.getUser()->getLocStart(),
    622            Use.getKind() == UninitUse::Always ? diag::warn_uninit_var
    623                                               : diag::warn_maybe_uninit_var)
    624         << VD->getDeclName() << IsCapturedByBlock
    625         << Use.getUser()->getSourceRange();
    626 }
    627 
    628 /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
    629 /// uninitialized variable. This manages the different forms of diagnostic
    630 /// emitted for particular types of uses. Returns true if the use was diagnosed
    631 /// as a warning. If a particular use is one we omit warnings for, returns
    632 /// false.
    633 static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
    634                                      const UninitUse &Use,
    635                                      bool alwaysReportSelfInit = false) {
    636 
    637   if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
    638     // Inspect the initializer of the variable declaration which is
    639     // being referenced prior to its initialization. We emit
    640     // specialized diagnostics for self-initialization, and we
    641     // specifically avoid warning about self references which take the
    642     // form of:
    643     //
    644     //   int x = x;
    645     //
    646     // This is used to indicate to GCC that 'x' is intentionally left
    647     // uninitialized. Proven code paths which access 'x' in
    648     // an uninitialized state after this will still warn.
    649     if (const Expr *Initializer = VD->getInit()) {
    650       if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
    651         return false;
    652 
    653       ContainsReference CR(S.Context, DRE);
    654       CR.Visit(const_cast<Expr*>(Initializer));
    655       if (CR.doesContainReference()) {
    656         S.Diag(DRE->getLocStart(),
    657                diag::warn_uninit_self_reference_in_init)
    658           << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
    659         return true;
    660       }
    661     }
    662 
    663     DiagUninitUse(S, VD, Use, false);
    664   } else {
    665     const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
    666     if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
    667       S.Diag(BE->getLocStart(),
    668              diag::warn_uninit_byref_blockvar_captured_by_block)
    669         << VD->getDeclName();
    670     else
    671       DiagUninitUse(S, VD, Use, true);
    672   }
    673 
    674   // Report where the variable was declared when the use wasn't within
    675   // the initializer of that declaration & we didn't already suggest
    676   // an initialization fixit.
    677   if (!SuggestInitializationFixit(S, VD))
    678     S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
    679       << VD->getDeclName();
    680 
    681   return true;
    682 }
    683 
    684 namespace {
    685   class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
    686   public:
    687     FallthroughMapper(Sema &S)
    688       : FoundSwitchStatements(false),
    689         S(S) {
    690     }
    691 
    692     bool foundSwitchStatements() const { return FoundSwitchStatements; }
    693 
    694     void markFallthroughVisited(const AttributedStmt *Stmt) {
    695       bool Found = FallthroughStmts.erase(Stmt);
    696       assert(Found);
    697       (void)Found;
    698     }
    699 
    700     typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
    701 
    702     const AttrStmts &getFallthroughStmts() const {
    703       return FallthroughStmts;
    704     }
    705 
    706     void fillReachableBlocks(CFG *Cfg) {
    707       assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
    708       std::deque<const CFGBlock *> BlockQueue;
    709 
    710       ReachableBlocks.insert(&Cfg->getEntry());
    711       BlockQueue.push_back(&Cfg->getEntry());
    712       // Mark all case blocks reachable to avoid problems with switching on
    713       // constants, covered enums, etc.
    714       // These blocks can contain fall-through annotations, and we don't want to
    715       // issue a warn_fallthrough_attr_unreachable for them.
    716       for (CFG::iterator I = Cfg->begin(), E = Cfg->end(); I != E; ++I) {
    717         const CFGBlock *B = *I;
    718         const Stmt *L = B->getLabel();
    719         if (L && isa<SwitchCase>(L) && ReachableBlocks.insert(B))
    720           BlockQueue.push_back(B);
    721       }
    722 
    723       while (!BlockQueue.empty()) {
    724         const CFGBlock *P = BlockQueue.front();
    725         BlockQueue.pop_front();
    726         for (CFGBlock::const_succ_iterator I = P->succ_begin(),
    727                                            E = P->succ_end();
    728              I != E; ++I) {
    729           if (*I && ReachableBlocks.insert(*I))
    730             BlockQueue.push_back(*I);
    731         }
    732       }
    733     }
    734 
    735     bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) {
    736       assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
    737 
    738       int UnannotatedCnt = 0;
    739       AnnotatedCnt = 0;
    740 
    741       std::deque<const CFGBlock*> BlockQueue;
    742 
    743       std::copy(B.pred_begin(), B.pred_end(), std::back_inserter(BlockQueue));
    744 
    745       while (!BlockQueue.empty()) {
    746         const CFGBlock *P = BlockQueue.front();
    747         BlockQueue.pop_front();
    748 
    749         const Stmt *Term = P->getTerminator();
    750         if (Term && isa<SwitchStmt>(Term))
    751           continue; // Switch statement, good.
    752 
    753         const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
    754         if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
    755           continue; // Previous case label has no statements, good.
    756 
    757         const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
    758         if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
    759           continue; // Case label is preceded with a normal label, good.
    760 
    761         if (!ReachableBlocks.count(P)) {
    762           for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(),
    763                                                 ElemEnd = P->rend();
    764                ElemIt != ElemEnd; ++ElemIt) {
    765             if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) {
    766               if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
    767                 S.Diag(AS->getLocStart(),
    768                        diag::warn_fallthrough_attr_unreachable);
    769                 markFallthroughVisited(AS);
    770                 ++AnnotatedCnt;
    771                 break;
    772               }
    773               // Don't care about other unreachable statements.
    774             }
    775           }
    776           // If there are no unreachable statements, this may be a special
    777           // case in CFG:
    778           // case X: {
    779           //    A a;  // A has a destructor.
    780           //    break;
    781           // }
    782           // // <<<< This place is represented by a 'hanging' CFG block.
    783           // case Y:
    784           continue;
    785         }
    786 
    787         const Stmt *LastStmt = getLastStmt(*P);
    788         if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
    789           markFallthroughVisited(AS);
    790           ++AnnotatedCnt;
    791           continue; // Fallthrough annotation, good.
    792         }
    793 
    794         if (!LastStmt) { // This block contains no executable statements.
    795           // Traverse its predecessors.
    796           std::copy(P->pred_begin(), P->pred_end(),
    797                     std::back_inserter(BlockQueue));
    798           continue;
    799         }
    800 
    801         ++UnannotatedCnt;
    802       }
    803       return !!UnannotatedCnt;
    804     }
    805 
    806     // RecursiveASTVisitor setup.
    807     bool shouldWalkTypesOfTypeLocs() const { return false; }
    808 
    809     bool VisitAttributedStmt(AttributedStmt *S) {
    810       if (asFallThroughAttr(S))
    811         FallthroughStmts.insert(S);
    812       return true;
    813     }
    814 
    815     bool VisitSwitchStmt(SwitchStmt *S) {
    816       FoundSwitchStatements = true;
    817       return true;
    818     }
    819 
    820   private:
    821 
    822     static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
    823       if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
    824         if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
    825           return AS;
    826       }
    827       return 0;
    828     }
    829 
    830     static const Stmt *getLastStmt(const CFGBlock &B) {
    831       if (const Stmt *Term = B.getTerminator())
    832         return Term;
    833       for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
    834                                             ElemEnd = B.rend();
    835                                             ElemIt != ElemEnd; ++ElemIt) {
    836         if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>())
    837           return CS->getStmt();
    838       }
    839       // Workaround to detect a statement thrown out by CFGBuilder:
    840       //   case X: {} case Y:
    841       //   case X: ; case Y:
    842       if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
    843         if (!isa<SwitchCase>(SW->getSubStmt()))
    844           return SW->getSubStmt();
    845 
    846       return 0;
    847     }
    848 
    849     bool FoundSwitchStatements;
    850     AttrStmts FallthroughStmts;
    851     Sema &S;
    852     llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
    853   };
    854 }
    855 
    856 static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
    857                                             bool PerFunction) {
    858   // Only perform this analysis when using C++11.  There is no good workflow
    859   // for this warning when not using C++11.  There is no good way to silence
    860   // the warning (no attribute is available) unless we are using C++11's support
    861   // for generalized attributes.  Once could use pragmas to silence the warning,
    862   // but as a general solution that is gross and not in the spirit of this
    863   // warning.
    864   //
    865   // NOTE: This an intermediate solution.  There are on-going discussions on
    866   // how to properly support this warning outside of C++11 with an annotation.
    867   if (!AC.getASTContext().getLangOpts().CPlusPlus11)
    868     return;
    869 
    870   FallthroughMapper FM(S);
    871   FM.TraverseStmt(AC.getBody());
    872 
    873   if (!FM.foundSwitchStatements())
    874     return;
    875 
    876   if (PerFunction && FM.getFallthroughStmts().empty())
    877     return;
    878 
    879   CFG *Cfg = AC.getCFG();
    880 
    881   if (!Cfg)
    882     return;
    883 
    884   FM.fillReachableBlocks(Cfg);
    885 
    886   for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) {
    887     const CFGBlock *B = *I;
    888     const Stmt *Label = B->getLabel();
    889 
    890     if (!Label || !isa<SwitchCase>(Label))
    891       continue;
    892 
    893     int AnnotatedCnt;
    894 
    895     if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt))
    896       continue;
    897 
    898     S.Diag(Label->getLocStart(),
    899         PerFunction ? diag::warn_unannotated_fallthrough_per_function
    900                     : diag::warn_unannotated_fallthrough);
    901 
    902     if (!AnnotatedCnt) {
    903       SourceLocation L = Label->getLocStart();
    904       if (L.isMacroID())
    905         continue;
    906       if (S.getLangOpts().CPlusPlus11) {
    907         const Stmt *Term = B->getTerminator();
    908         // Skip empty cases.
    909         while (B->empty() && !Term && B->succ_size() == 1) {
    910           B = *B->succ_begin();
    911           Term = B->getTerminator();
    912         }
    913         if (!(B->empty() && Term && isa<BreakStmt>(Term))) {
    914           Preprocessor &PP = S.getPreprocessor();
    915           TokenValue Tokens[] = {
    916             tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
    917             tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
    918             tok::r_square, tok::r_square
    919           };
    920           StringRef AnnotationSpelling = "[[clang::fallthrough]]";
    921           StringRef MacroName = PP.getLastMacroWithSpelling(L, Tokens);
    922           if (!MacroName.empty())
    923             AnnotationSpelling = MacroName;
    924           SmallString<64> TextToInsert(AnnotationSpelling);
    925           TextToInsert += "; ";
    926           S.Diag(L, diag::note_insert_fallthrough_fixit) <<
    927               AnnotationSpelling <<
    928               FixItHint::CreateInsertion(L, TextToInsert);
    929         }
    930       }
    931       S.Diag(L, diag::note_insert_break_fixit) <<
    932         FixItHint::CreateInsertion(L, "break; ");
    933     }
    934   }
    935 
    936   const FallthroughMapper::AttrStmts &Fallthroughs = FM.getFallthroughStmts();
    937   for (FallthroughMapper::AttrStmts::const_iterator I = Fallthroughs.begin(),
    938                                                     E = Fallthroughs.end();
    939                                                     I != E; ++I) {
    940     S.Diag((*I)->getLocStart(), diag::warn_fallthrough_attr_invalid_placement);
    941   }
    942 
    943 }
    944 
    945 namespace {
    946 typedef std::pair<const Stmt *,
    947                   sema::FunctionScopeInfo::WeakObjectUseMap::const_iterator>
    948         StmtUsesPair;
    949 
    950 class StmtUseSorter {
    951   const SourceManager &SM;
    952 
    953 public:
    954   explicit StmtUseSorter(const SourceManager &SM) : SM(SM) { }
    955 
    956   bool operator()(const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
    957     return SM.isBeforeInTranslationUnit(LHS.first->getLocStart(),
    958                                         RHS.first->getLocStart());
    959   }
    960 };
    961 }
    962 
    963 static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
    964                      const Stmt *S) {
    965   assert(S);
    966 
    967   do {
    968     switch (S->getStmtClass()) {
    969     case Stmt::ForStmtClass:
    970     case Stmt::WhileStmtClass:
    971     case Stmt::CXXForRangeStmtClass:
    972     case Stmt::ObjCForCollectionStmtClass:
    973       return true;
    974     case Stmt::DoStmtClass: {
    975       const Expr *Cond = cast<DoStmt>(S)->getCond();
    976       llvm::APSInt Val;
    977       if (!Cond->EvaluateAsInt(Val, Ctx))
    978         return true;
    979       return Val.getBoolValue();
    980     }
    981     default:
    982       break;
    983     }
    984   } while ((S = PM.getParent(S)));
    985 
    986   return false;
    987 }
    988 
    989 
    990 static void diagnoseRepeatedUseOfWeak(Sema &S,
    991                                       const sema::FunctionScopeInfo *CurFn,
    992                                       const Decl *D,
    993                                       const ParentMap &PM) {
    994   typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
    995   typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
    996   typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
    997 
    998   ASTContext &Ctx = S.getASTContext();
    999 
   1000   const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
   1001 
   1002   // Extract all weak objects that are referenced more than once.
   1003   SmallVector<StmtUsesPair, 8> UsesByStmt;
   1004   for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
   1005        I != E; ++I) {
   1006     const WeakUseVector &Uses = I->second;
   1007 
   1008     // Find the first read of the weak object.
   1009     WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
   1010     for ( ; UI != UE; ++UI) {
   1011       if (UI->isUnsafe())
   1012         break;
   1013     }
   1014 
   1015     // If there were only writes to this object, don't warn.
   1016     if (UI == UE)
   1017       continue;
   1018 
   1019     // If there was only one read, followed by any number of writes, and the
   1020     // read is not within a loop, don't warn. Additionally, don't warn in a
   1021     // loop if the base object is a local variable -- local variables are often
   1022     // changed in loops.
   1023     if (UI == Uses.begin()) {
   1024       WeakUseVector::const_iterator UI2 = UI;
   1025       for (++UI2; UI2 != UE; ++UI2)
   1026         if (UI2->isUnsafe())
   1027           break;
   1028 
   1029       if (UI2 == UE) {
   1030         if (!isInLoop(Ctx, PM, UI->getUseExpr()))
   1031           continue;
   1032 
   1033         const WeakObjectProfileTy &Profile = I->first;
   1034         if (!Profile.isExactProfile())
   1035           continue;
   1036 
   1037         const NamedDecl *Base = Profile.getBase();
   1038         if (!Base)
   1039           Base = Profile.getProperty();
   1040         assert(Base && "A profile always has a base or property.");
   1041 
   1042         if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
   1043           if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
   1044             continue;
   1045       }
   1046     }
   1047 
   1048     UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
   1049   }
   1050 
   1051   if (UsesByStmt.empty())
   1052     return;
   1053 
   1054   // Sort by first use so that we emit the warnings in a deterministic order.
   1055   std::sort(UsesByStmt.begin(), UsesByStmt.end(),
   1056             StmtUseSorter(S.getSourceManager()));
   1057 
   1058   // Classify the current code body for better warning text.
   1059   // This enum should stay in sync with the cases in
   1060   // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
   1061   // FIXME: Should we use a common classification enum and the same set of
   1062   // possibilities all throughout Sema?
   1063   enum {
   1064     Function,
   1065     Method,
   1066     Block,
   1067     Lambda
   1068   } FunctionKind;
   1069 
   1070   if (isa<sema::BlockScopeInfo>(CurFn))
   1071     FunctionKind = Block;
   1072   else if (isa<sema::LambdaScopeInfo>(CurFn))
   1073     FunctionKind = Lambda;
   1074   else if (isa<ObjCMethodDecl>(D))
   1075     FunctionKind = Method;
   1076   else
   1077     FunctionKind = Function;
   1078 
   1079   // Iterate through the sorted problems and emit warnings for each.
   1080   for (SmallVectorImpl<StmtUsesPair>::const_iterator I = UsesByStmt.begin(),
   1081                                                      E = UsesByStmt.end();
   1082        I != E; ++I) {
   1083     const Stmt *FirstRead = I->first;
   1084     const WeakObjectProfileTy &Key = I->second->first;
   1085     const WeakUseVector &Uses = I->second->second;
   1086 
   1087     // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
   1088     // may not contain enough information to determine that these are different
   1089     // properties. We can only be 100% sure of a repeated use in certain cases,
   1090     // and we adjust the diagnostic kind accordingly so that the less certain
   1091     // case can be turned off if it is too noisy.
   1092     unsigned DiagKind;
   1093     if (Key.isExactProfile())
   1094       DiagKind = diag::warn_arc_repeated_use_of_weak;
   1095     else
   1096       DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
   1097 
   1098     // Classify the weak object being accessed for better warning text.
   1099     // This enum should stay in sync with the cases in
   1100     // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
   1101     enum {
   1102       Variable,
   1103       Property,
   1104       ImplicitProperty,
   1105       Ivar
   1106     } ObjectKind;
   1107 
   1108     const NamedDecl *D = Key.getProperty();
   1109     if (isa<VarDecl>(D))
   1110       ObjectKind = Variable;
   1111     else if (isa<ObjCPropertyDecl>(D))
   1112       ObjectKind = Property;
   1113     else if (isa<ObjCMethodDecl>(D))
   1114       ObjectKind = ImplicitProperty;
   1115     else if (isa<ObjCIvarDecl>(D))
   1116       ObjectKind = Ivar;
   1117     else
   1118       llvm_unreachable("Unexpected weak object kind!");
   1119 
   1120     // Show the first time the object was read.
   1121     S.Diag(FirstRead->getLocStart(), DiagKind)
   1122       << ObjectKind << D << FunctionKind
   1123       << FirstRead->getSourceRange();
   1124 
   1125     // Print all the other accesses as notes.
   1126     for (WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
   1127          UI != UE; ++UI) {
   1128       if (UI->getUseExpr() == FirstRead)
   1129         continue;
   1130       S.Diag(UI->getUseExpr()->getLocStart(),
   1131              diag::note_arc_weak_also_accessed_here)
   1132         << UI->getUseExpr()->getSourceRange();
   1133     }
   1134   }
   1135 }
   1136 
   1137 
   1138 namespace {
   1139 struct SLocSort {
   1140   bool operator()(const UninitUse &a, const UninitUse &b) {
   1141     // Prefer a more confident report over a less confident one.
   1142     if (a.getKind() != b.getKind())
   1143       return a.getKind() > b.getKind();
   1144     SourceLocation aLoc = a.getUser()->getLocStart();
   1145     SourceLocation bLoc = b.getUser()->getLocStart();
   1146     return aLoc.getRawEncoding() < bLoc.getRawEncoding();
   1147   }
   1148 };
   1149 
   1150 class UninitValsDiagReporter : public UninitVariablesHandler {
   1151   Sema &S;
   1152   typedef SmallVector<UninitUse, 2> UsesVec;
   1153   typedef std::pair<UsesVec*, bool> MappedType;
   1154   // Prefer using MapVector to DenseMap, so that iteration order will be
   1155   // the same as insertion order. This is needed to obtain a deterministic
   1156   // order of diagnostics when calling flushDiagnostics().
   1157   typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
   1158   UsesMap *uses;
   1159 
   1160 public:
   1161   UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
   1162   ~UninitValsDiagReporter() {
   1163     flushDiagnostics();
   1164   }
   1165 
   1166   MappedType &getUses(const VarDecl *vd) {
   1167     if (!uses)
   1168       uses = new UsesMap();
   1169 
   1170     MappedType &V = (*uses)[vd];
   1171     UsesVec *&vec = V.first;
   1172     if (!vec)
   1173       vec = new UsesVec();
   1174 
   1175     return V;
   1176   }
   1177 
   1178   void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) {
   1179     getUses(vd).first->push_back(use);
   1180   }
   1181 
   1182   void handleSelfInit(const VarDecl *vd) {
   1183     getUses(vd).second = true;
   1184   }
   1185 
   1186   void flushDiagnostics() {
   1187     if (!uses)
   1188       return;
   1189 
   1190     for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
   1191       const VarDecl *vd = i->first;
   1192       const MappedType &V = i->second;
   1193 
   1194       UsesVec *vec = V.first;
   1195       bool hasSelfInit = V.second;
   1196 
   1197       // Specially handle the case where we have uses of an uninitialized
   1198       // variable, but the root cause is an idiomatic self-init.  We want
   1199       // to report the diagnostic at the self-init since that is the root cause.
   1200       if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
   1201         DiagnoseUninitializedUse(S, vd,
   1202                                  UninitUse(vd->getInit()->IgnoreParenCasts(),
   1203                                            /* isAlwaysUninit */ true),
   1204                                  /* alwaysReportSelfInit */ true);
   1205       else {
   1206         // Sort the uses by their SourceLocations.  While not strictly
   1207         // guaranteed to produce them in line/column order, this will provide
   1208         // a stable ordering.
   1209         std::sort(vec->begin(), vec->end(), SLocSort());
   1210 
   1211         for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
   1212              ++vi) {
   1213           // If we have self-init, downgrade all uses to 'may be uninitialized'.
   1214           UninitUse Use = hasSelfInit ? UninitUse(vi->getUser(), false) : *vi;
   1215 
   1216           if (DiagnoseUninitializedUse(S, vd, Use))
   1217             // Skip further diagnostics for this variable. We try to warn only
   1218             // on the first point at which a variable is used uninitialized.
   1219             break;
   1220         }
   1221       }
   1222 
   1223       // Release the uses vector.
   1224       delete vec;
   1225     }
   1226     delete uses;
   1227   }
   1228 
   1229 private:
   1230   static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
   1231   for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
   1232     if (i->getKind() == UninitUse::Always) {
   1233       return true;
   1234     }
   1235   }
   1236   return false;
   1237 }
   1238 };
   1239 }
   1240 
   1241 
   1242 //===----------------------------------------------------------------------===//
   1243 // -Wthread-safety
   1244 //===----------------------------------------------------------------------===//
   1245 namespace clang {
   1246 namespace thread_safety {
   1247 typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
   1248 typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
   1249 typedef std::list<DelayedDiag> DiagList;
   1250 
   1251 struct SortDiagBySourceLocation {
   1252   SourceManager &SM;
   1253   SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
   1254 
   1255   bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
   1256     // Although this call will be slow, this is only called when outputting
   1257     // multiple warnings.
   1258     return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
   1259   }
   1260 };
   1261 
   1262 namespace {
   1263 class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
   1264   Sema &S;
   1265   DiagList Warnings;
   1266   SourceLocation FunLocation, FunEndLocation;
   1267 
   1268   // Helper functions
   1269   void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
   1270     // Gracefully handle rare cases when the analysis can't get a more
   1271     // precise source location.
   1272     if (!Loc.isValid())
   1273       Loc = FunLocation;
   1274     PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
   1275     Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
   1276   }
   1277 
   1278  public:
   1279   ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
   1280     : S(S), FunLocation(FL), FunEndLocation(FEL) {}
   1281 
   1282   /// \brief Emit all buffered diagnostics in order of sourcelocation.
   1283   /// We need to output diagnostics produced while iterating through
   1284   /// the lockset in deterministic order, so this function orders diagnostics
   1285   /// and outputs them.
   1286   void emitDiagnostics() {
   1287     Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
   1288     for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
   1289          I != E; ++I) {
   1290       S.Diag(I->first.first, I->first.second);
   1291       const OptionalNotes &Notes = I->second;
   1292       for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
   1293         S.Diag(Notes[NoteI].first, Notes[NoteI].second);
   1294     }
   1295   }
   1296 
   1297   void handleInvalidLockExp(SourceLocation Loc) {
   1298     PartialDiagnosticAt Warning(Loc,
   1299                                 S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
   1300     Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
   1301   }
   1302   void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
   1303     warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
   1304   }
   1305 
   1306   void handleDoubleLock(Name LockName, SourceLocation Loc) {
   1307     warnLockMismatch(diag::warn_double_lock, LockName, Loc);
   1308   }
   1309 
   1310   void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
   1311                                  SourceLocation LocEndOfScope,
   1312                                  LockErrorKind LEK){
   1313     unsigned DiagID = 0;
   1314     switch (LEK) {
   1315       case LEK_LockedSomePredecessors:
   1316         DiagID = diag::warn_lock_some_predecessors;
   1317         break;
   1318       case LEK_LockedSomeLoopIterations:
   1319         DiagID = diag::warn_expecting_lock_held_on_loop;
   1320         break;
   1321       case LEK_LockedAtEndOfFunction:
   1322         DiagID = diag::warn_no_unlock;
   1323         break;
   1324       case LEK_NotLockedAtEndOfFunction:
   1325         DiagID = diag::warn_expecting_locked;
   1326         break;
   1327     }
   1328     if (LocEndOfScope.isInvalid())
   1329       LocEndOfScope = FunEndLocation;
   1330 
   1331     PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
   1332     PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
   1333     Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
   1334   }
   1335 
   1336 
   1337   void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
   1338                                 SourceLocation Loc2) {
   1339     PartialDiagnosticAt Warning(
   1340       Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
   1341     PartialDiagnosticAt Note(
   1342       Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
   1343     Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
   1344   }
   1345 
   1346   void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
   1347                          AccessKind AK, SourceLocation Loc) {
   1348     assert((POK == POK_VarAccess || POK == POK_VarDereference)
   1349              && "Only works for variables");
   1350     unsigned DiagID = POK == POK_VarAccess?
   1351                         diag::warn_variable_requires_any_lock:
   1352                         diag::warn_var_deref_requires_any_lock;
   1353     PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
   1354       << D->getNameAsString() << getLockKindFromAccessKind(AK));
   1355     Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
   1356   }
   1357 
   1358   void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
   1359                           Name LockName, LockKind LK, SourceLocation Loc,
   1360                           Name *PossibleMatch) {
   1361     unsigned DiagID = 0;
   1362     if (PossibleMatch) {
   1363       switch (POK) {
   1364         case POK_VarAccess:
   1365           DiagID = diag::warn_variable_requires_lock_precise;
   1366           break;
   1367         case POK_VarDereference:
   1368           DiagID = diag::warn_var_deref_requires_lock_precise;
   1369           break;
   1370         case POK_FunctionCall:
   1371           DiagID = diag::warn_fun_requires_lock_precise;
   1372           break;
   1373       }
   1374       PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
   1375         << D->getNameAsString() << LockName << LK);
   1376       PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
   1377                                << *PossibleMatch);
   1378       Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
   1379     } else {
   1380       switch (POK) {
   1381         case POK_VarAccess:
   1382           DiagID = diag::warn_variable_requires_lock;
   1383           break;
   1384         case POK_VarDereference:
   1385           DiagID = diag::warn_var_deref_requires_lock;
   1386           break;
   1387         case POK_FunctionCall:
   1388           DiagID = diag::warn_fun_requires_lock;
   1389           break;
   1390       }
   1391       PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
   1392         << D->getNameAsString() << LockName << LK);
   1393       Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
   1394     }
   1395   }
   1396 
   1397   void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
   1398     PartialDiagnosticAt Warning(Loc,
   1399       S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
   1400     Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
   1401   }
   1402 };
   1403 }
   1404 }
   1405 }
   1406 
   1407 //===----------------------------------------------------------------------===//
   1408 // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
   1409 //  warnings on a function, method, or block.
   1410 //===----------------------------------------------------------------------===//
   1411 
   1412 clang::sema::AnalysisBasedWarnings::Policy::Policy() {
   1413   enableCheckFallThrough = 1;
   1414   enableCheckUnreachable = 0;
   1415   enableThreadSafetyAnalysis = 0;
   1416 }
   1417 
   1418 clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
   1419   : S(s),
   1420     NumFunctionsAnalyzed(0),
   1421     NumFunctionsWithBadCFGs(0),
   1422     NumCFGBlocks(0),
   1423     MaxCFGBlocksPerFunction(0),
   1424     NumUninitAnalysisFunctions(0),
   1425     NumUninitAnalysisVariables(0),
   1426     MaxUninitAnalysisVariablesPerFunction(0),
   1427     NumUninitAnalysisBlockVisits(0),
   1428     MaxUninitAnalysisBlockVisitsPerFunction(0) {
   1429   DiagnosticsEngine &D = S.getDiagnostics();
   1430   DefaultPolicy.enableCheckUnreachable = (unsigned)
   1431     (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
   1432         DiagnosticsEngine::Ignored);
   1433   DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
   1434     (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
   1435      DiagnosticsEngine::Ignored);
   1436 
   1437 }
   1438 
   1439 static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
   1440   for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
   1441        i = fscope->PossiblyUnreachableDiags.begin(),
   1442        e = fscope->PossiblyUnreachableDiags.end();
   1443        i != e; ++i) {
   1444     const sema::PossiblyUnreachableDiag &D = *i;
   1445     S.Diag(D.Loc, D.PD);
   1446   }
   1447 }
   1448 
   1449 void clang::sema::
   1450 AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
   1451                                      sema::FunctionScopeInfo *fscope,
   1452                                      const Decl *D, const BlockExpr *blkExpr) {
   1453 
   1454   // We avoid doing analysis-based warnings when there are errors for
   1455   // two reasons:
   1456   // (1) The CFGs often can't be constructed (if the body is invalid), so
   1457   //     don't bother trying.
   1458   // (2) The code already has problems; running the analysis just takes more
   1459   //     time.
   1460   DiagnosticsEngine &Diags = S.getDiagnostics();
   1461 
   1462   // Do not do any analysis for declarations in system headers if we are
   1463   // going to just ignore them.
   1464   if (Diags.getSuppressSystemWarnings() &&
   1465       S.SourceMgr.isInSystemHeader(D->getLocation()))
   1466     return;
   1467 
   1468   // For code in dependent contexts, we'll do this at instantiation time.
   1469   if (cast<DeclContext>(D)->isDependentContext())
   1470     return;
   1471 
   1472   if (Diags.hasUncompilableErrorOccurred() || Diags.hasFatalErrorOccurred()) {
   1473     // Flush out any possibly unreachable diagnostics.
   1474     flushDiagnostics(S, fscope);
   1475     return;
   1476   }
   1477 
   1478   const Stmt *Body = D->getBody();
   1479   assert(Body);
   1480 
   1481   AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D);
   1482 
   1483   // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
   1484   // explosion for destrutors that can result and the compile time hit.
   1485   AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
   1486   AC.getCFGBuildOptions().AddEHEdges = false;
   1487   AC.getCFGBuildOptions().AddInitializers = true;
   1488   AC.getCFGBuildOptions().AddImplicitDtors = true;
   1489   AC.getCFGBuildOptions().AddTemporaryDtors = true;
   1490 
   1491   // Force that certain expressions appear as CFGElements in the CFG.  This
   1492   // is used to speed up various analyses.
   1493   // FIXME: This isn't the right factoring.  This is here for initial
   1494   // prototyping, but we need a way for analyses to say what expressions they
   1495   // expect to always be CFGElements and then fill in the BuildOptions
   1496   // appropriately.  This is essentially a layering violation.
   1497   if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
   1498     // Unreachable code analysis and thread safety require a linearized CFG.
   1499     AC.getCFGBuildOptions().setAllAlwaysAdd();
   1500   }
   1501   else {
   1502     AC.getCFGBuildOptions()
   1503       .setAlwaysAdd(Stmt::BinaryOperatorClass)
   1504       .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
   1505       .setAlwaysAdd(Stmt::BlockExprClass)
   1506       .setAlwaysAdd(Stmt::CStyleCastExprClass)
   1507       .setAlwaysAdd(Stmt::DeclRefExprClass)
   1508       .setAlwaysAdd(Stmt::ImplicitCastExprClass)
   1509       .setAlwaysAdd(Stmt::UnaryOperatorClass)
   1510       .setAlwaysAdd(Stmt::AttributedStmtClass);
   1511   }
   1512 
   1513   // Construct the analysis context with the specified CFG build options.
   1514 
   1515   // Emit delayed diagnostics.
   1516   if (!fscope->PossiblyUnreachableDiags.empty()) {
   1517     bool analyzed = false;
   1518 
   1519     // Register the expressions with the CFGBuilder.
   1520     for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
   1521          i = fscope->PossiblyUnreachableDiags.begin(),
   1522          e = fscope->PossiblyUnreachableDiags.end();
   1523          i != e; ++i) {
   1524       if (const Stmt *stmt = i->stmt)
   1525         AC.registerForcedBlockExpression(stmt);
   1526     }
   1527 
   1528     if (AC.getCFG()) {
   1529       analyzed = true;
   1530       for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
   1531             i = fscope->PossiblyUnreachableDiags.begin(),
   1532             e = fscope->PossiblyUnreachableDiags.end();
   1533             i != e; ++i)
   1534       {
   1535         const sema::PossiblyUnreachableDiag &D = *i;
   1536         bool processed = false;
   1537         if (const Stmt *stmt = i->stmt) {
   1538           const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
   1539           CFGReverseBlockReachabilityAnalysis *cra =
   1540               AC.getCFGReachablityAnalysis();
   1541           // FIXME: We should be able to assert that block is non-null, but
   1542           // the CFG analysis can skip potentially-evaluated expressions in
   1543           // edge cases; see test/Sema/vla-2.c.
   1544           if (block && cra) {
   1545             // Can this block be reached from the entrance?
   1546             if (cra->isReachable(&AC.getCFG()->getEntry(), block))
   1547               S.Diag(D.Loc, D.PD);
   1548             processed = true;
   1549           }
   1550         }
   1551         if (!processed) {
   1552           // Emit the warning anyway if we cannot map to a basic block.
   1553           S.Diag(D.Loc, D.PD);
   1554         }
   1555       }
   1556     }
   1557 
   1558     if (!analyzed)
   1559       flushDiagnostics(S, fscope);
   1560   }
   1561 
   1562 
   1563   // Warning: check missing 'return'
   1564   if (P.enableCheckFallThrough) {
   1565     const CheckFallThroughDiagnostics &CD =
   1566       (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
   1567        : (isa<CXXMethodDecl>(D) &&
   1568           cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
   1569           cast<CXXMethodDecl>(D)->getParent()->isLambda())
   1570             ? CheckFallThroughDiagnostics::MakeForLambda()
   1571             : CheckFallThroughDiagnostics::MakeForFunction(D));
   1572     CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
   1573   }
   1574 
   1575   // Warning: check for unreachable code
   1576   if (P.enableCheckUnreachable) {
   1577     // Only check for unreachable code on non-template instantiations.
   1578     // Different template instantiations can effectively change the control-flow
   1579     // and it is very difficult to prove that a snippet of code in a template
   1580     // is unreachable for all instantiations.
   1581     bool isTemplateInstantiation = false;
   1582     if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
   1583       isTemplateInstantiation = Function->isTemplateInstantiation();
   1584     if (!isTemplateInstantiation)
   1585       CheckUnreachable(S, AC);
   1586   }
   1587 
   1588   // Check for thread safety violations
   1589   if (P.enableThreadSafetyAnalysis) {
   1590     SourceLocation FL = AC.getDecl()->getLocation();
   1591     SourceLocation FEL = AC.getDecl()->getLocEnd();
   1592     thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
   1593     if (Diags.getDiagnosticLevel(diag::warn_thread_safety_beta,D->getLocStart())
   1594         != DiagnosticsEngine::Ignored)
   1595       Reporter.setIssueBetaWarnings(true);
   1596 
   1597     thread_safety::runThreadSafetyAnalysis(AC, Reporter);
   1598     Reporter.emitDiagnostics();
   1599   }
   1600 
   1601   if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
   1602       != DiagnosticsEngine::Ignored ||
   1603       Diags.getDiagnosticLevel(diag::warn_sometimes_uninit_var,D->getLocStart())
   1604       != DiagnosticsEngine::Ignored ||
   1605       Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
   1606       != DiagnosticsEngine::Ignored) {
   1607     if (CFG *cfg = AC.getCFG()) {
   1608       UninitValsDiagReporter reporter(S);
   1609       UninitVariablesAnalysisStats stats;
   1610       std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
   1611       runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
   1612                                         reporter, stats);
   1613 
   1614       if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
   1615         ++NumUninitAnalysisFunctions;
   1616         NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
   1617         NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
   1618         MaxUninitAnalysisVariablesPerFunction =
   1619             std::max(MaxUninitAnalysisVariablesPerFunction,
   1620                      stats.NumVariablesAnalyzed);
   1621         MaxUninitAnalysisBlockVisitsPerFunction =
   1622             std::max(MaxUninitAnalysisBlockVisitsPerFunction,
   1623                      stats.NumBlockVisits);
   1624       }
   1625     }
   1626   }
   1627 
   1628   bool FallThroughDiagFull =
   1629       Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough,
   1630                                D->getLocStart()) != DiagnosticsEngine::Ignored;
   1631   bool FallThroughDiagPerFunction =
   1632       Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough_per_function,
   1633                                D->getLocStart()) != DiagnosticsEngine::Ignored;
   1634   if (FallThroughDiagFull || FallThroughDiagPerFunction) {
   1635     DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
   1636   }
   1637 
   1638   if (S.getLangOpts().ObjCARCWeak &&
   1639       Diags.getDiagnosticLevel(diag::warn_arc_repeated_use_of_weak,
   1640                                D->getLocStart()) != DiagnosticsEngine::Ignored)
   1641     diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
   1642 
   1643   // Collect statistics about the CFG if it was built.
   1644   if (S.CollectStats && AC.isCFGBuilt()) {
   1645     ++NumFunctionsAnalyzed;
   1646     if (CFG *cfg = AC.getCFG()) {
   1647       // If we successfully built a CFG for this context, record some more
   1648       // detail information about it.
   1649       NumCFGBlocks += cfg->getNumBlockIDs();
   1650       MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
   1651                                          cfg->getNumBlockIDs());
   1652     } else {
   1653       ++NumFunctionsWithBadCFGs;
   1654     }
   1655   }
   1656 }
   1657 
   1658 void clang::sema::AnalysisBasedWarnings::PrintStats() const {
   1659   llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
   1660 
   1661   unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
   1662   unsigned AvgCFGBlocksPerFunction =
   1663       !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
   1664   llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
   1665                << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
   1666                << "  " << NumCFGBlocks << " CFG blocks built.\n"
   1667                << "  " << AvgCFGBlocksPerFunction
   1668                << " average CFG blocks per function.\n"
   1669                << "  " << MaxCFGBlocksPerFunction
   1670                << " max CFG blocks per function.\n";
   1671 
   1672   unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
   1673       : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
   1674   unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
   1675       : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
   1676   llvm::errs() << NumUninitAnalysisFunctions
   1677                << " functions analyzed for uninitialiazed variables\n"
   1678                << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
   1679                << "  " << AvgUninitVariablesPerFunction
   1680                << " average variables per function.\n"
   1681                << "  " << MaxUninitAnalysisVariablesPerFunction
   1682                << " max variables per function.\n"
   1683                << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
   1684                << "  " << AvgUninitBlockVisitsPerFunction
   1685                << " average block visits per function.\n"
   1686                << "  " << MaxUninitAnalysisBlockVisitsPerFunction
   1687                << " max block visits per function.\n";
   1688 }
   1689