Home | History | Annotate | Download | only in Lex
      1 //===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 //  This file defines the Preprocessor interface.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 
     14 #ifndef LLVM_CLANG_LEX_PREPROCESSOR_H
     15 #define LLVM_CLANG_LEX_PREPROCESSOR_H
     16 
     17 #include "clang/Lex/MacroInfo.h"
     18 #include "clang/Lex/Lexer.h"
     19 #include "clang/Lex/PTHLexer.h"
     20 #include "clang/Lex/PPCallbacks.h"
     21 #include "clang/Lex/TokenLexer.h"
     22 #include "clang/Lex/PTHManager.h"
     23 #include "clang/Basic/Builtins.h"
     24 #include "clang/Basic/Diagnostic.h"
     25 #include "clang/Basic/IdentifierTable.h"
     26 #include "clang/Basic/SourceLocation.h"
     27 #include "llvm/ADT/DenseMap.h"
     28 #include "llvm/ADT/IntrusiveRefCntPtr.h"
     29 #include "llvm/ADT/SmallPtrSet.h"
     30 #include "llvm/ADT/OwningPtr.h"
     31 #include "llvm/ADT/SmallVector.h"
     32 #include "llvm/ADT/ArrayRef.h"
     33 #include "llvm/Support/Allocator.h"
     34 #include <vector>
     35 
     36 namespace clang {
     37 
     38 class SourceManager;
     39 class ExternalPreprocessorSource;
     40 class FileManager;
     41 class FileEntry;
     42 class HeaderSearch;
     43 class PragmaNamespace;
     44 class PragmaHandler;
     45 class CommentHandler;
     46 class ScratchBuffer;
     47 class TargetInfo;
     48 class PPCallbacks;
     49 class CodeCompletionHandler;
     50 class DirectoryLookup;
     51 class PreprocessingRecord;
     52 
     53 /// Preprocessor - This object engages in a tight little dance with the lexer to
     54 /// efficiently preprocess tokens.  Lexers know only about tokens within a
     55 /// single source file, and don't know anything about preprocessor-level issues
     56 /// like the #include stack, token expansion, etc.
     57 ///
     58 class Preprocessor : public llvm::RefCountedBase<Preprocessor> {
     59   Diagnostic        *Diags;
     60   LangOptions        Features;
     61   const TargetInfo  &Target;
     62   FileManager       &FileMgr;
     63   SourceManager     &SourceMgr;
     64   ScratchBuffer     *ScratchBuf;
     65   HeaderSearch      &HeaderInfo;
     66 
     67   /// \brief External source of macros.
     68   ExternalPreprocessorSource *ExternalSource;
     69 
     70   /// PTH - An optional PTHManager object used for getting tokens from
     71   ///  a token cache rather than lexing the original source file.
     72   llvm::OwningPtr<PTHManager> PTH;
     73 
     74   /// BP - A BumpPtrAllocator object used to quickly allocate and release
     75   ///  objects internal to the Preprocessor.
     76   llvm::BumpPtrAllocator BP;
     77 
     78   /// Identifiers for builtin macros and other builtins.
     79   IdentifierInfo *Ident__LINE__, *Ident__FILE__;   // __LINE__, __FILE__
     80   IdentifierInfo *Ident__DATE__, *Ident__TIME__;   // __DATE__, __TIME__
     81   IdentifierInfo *Ident__INCLUDE_LEVEL__;          // __INCLUDE_LEVEL__
     82   IdentifierInfo *Ident__BASE_FILE__;              // __BASE_FILE__
     83   IdentifierInfo *Ident__TIMESTAMP__;              // __TIMESTAMP__
     84   IdentifierInfo *Ident__COUNTER__;                // __COUNTER__
     85   IdentifierInfo *Ident_Pragma, *Ident__pragma;    // _Pragma, __pragma
     86   IdentifierInfo *Ident__VA_ARGS__;                // __VA_ARGS__
     87   IdentifierInfo *Ident__has_feature;              // __has_feature
     88   IdentifierInfo *Ident__has_extension;            // __has_extension
     89   IdentifierInfo *Ident__has_builtin;              // __has_builtin
     90   IdentifierInfo *Ident__has_attribute;            // __has_attribute
     91   IdentifierInfo *Ident__has_include;              // __has_include
     92   IdentifierInfo *Ident__has_include_next;         // __has_include_next
     93 
     94   SourceLocation DATELoc, TIMELoc;
     95   unsigned CounterValue;  // Next __COUNTER__ value.
     96 
     97   enum {
     98     /// MaxIncludeStackDepth - Maximum depth of #includes.
     99     MaxAllowedIncludeStackDepth = 200
    100   };
    101 
    102   // State that is set before the preprocessor begins.
    103   bool KeepComments : 1;
    104   bool KeepMacroComments : 1;
    105 
    106   // State that changes while the preprocessor runs:
    107   bool InMacroArgs : 1;            // True if parsing fn macro invocation args.
    108 
    109   /// Whether the preprocessor owns the header search object.
    110   bool OwnsHeaderSearch : 1;
    111 
    112   /// DisableMacroExpansion - True if macro expansion is disabled.
    113   bool DisableMacroExpansion : 1;
    114 
    115   /// \brief Whether we have already loaded macros from the external source.
    116   mutable bool ReadMacrosFromExternalSource : 1;
    117 
    118   /// Identifiers - This is mapping/lookup information for all identifiers in
    119   /// the program, including program keywords.
    120   mutable IdentifierTable Identifiers;
    121 
    122   /// Selectors - This table contains all the selectors in the program. Unlike
    123   /// IdentifierTable above, this table *isn't* populated by the preprocessor.
    124   /// It is declared/expanded here because it's role/lifetime is
    125   /// conceptually similar the IdentifierTable. In addition, the current control
    126   /// flow (in clang::ParseAST()), make it convenient to put here.
    127   /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to
    128   /// the lifetime of the preprocessor.
    129   SelectorTable Selectors;
    130 
    131   /// BuiltinInfo - Information about builtins.
    132   Builtin::Context BuiltinInfo;
    133 
    134   /// PragmaHandlers - This tracks all of the pragmas that the client registered
    135   /// with this preprocessor.
    136   PragmaNamespace *PragmaHandlers;
    137 
    138   /// \brief Tracks all of the comment handlers that the client registered
    139   /// with this preprocessor.
    140   std::vector<CommentHandler *> CommentHandlers;
    141 
    142   /// \brief The code-completion handler.
    143   CodeCompletionHandler *CodeComplete;
    144 
    145   /// \brief The file that we're performing code-completion for, if any.
    146   const FileEntry *CodeCompletionFile;
    147 
    148   /// \brief The number of bytes that we will initially skip when entering the
    149   /// main file, which is used when loading a precompiled preamble, along
    150   /// with a flag that indicates whether skipping this number of bytes will
    151   /// place the lexer at the start of a line.
    152   std::pair<unsigned, bool> SkipMainFilePreamble;
    153 
    154   /// CurLexer - This is the current top of the stack that we're lexing from if
    155   /// not expanding a macro and we are lexing directly from source code.
    156   ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
    157   llvm::OwningPtr<Lexer> CurLexer;
    158 
    159   /// CurPTHLexer - This is the current top of stack that we're lexing from if
    160   ///  not expanding from a macro and we are lexing from a PTH cache.
    161   ///  Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null.
    162   llvm::OwningPtr<PTHLexer> CurPTHLexer;
    163 
    164   /// CurPPLexer - This is the current top of the stack what we're lexing from
    165   ///  if not expanding a macro.  This is an alias for either CurLexer or
    166   ///  CurPTHLexer.
    167   PreprocessorLexer *CurPPLexer;
    168 
    169   /// CurLookup - The DirectoryLookup structure used to find the current
    170   /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
    171   /// implement #include_next and find directory-specific properties.
    172   const DirectoryLookup *CurDirLookup;
    173 
    174   /// CurTokenLexer - This is the current macro we are expanding, if we are
    175   /// expanding a macro.  One of CurLexer and CurTokenLexer must be null.
    176   llvm::OwningPtr<TokenLexer> CurTokenLexer;
    177 
    178   /// IncludeMacroStack - This keeps track of the stack of files currently
    179   /// #included, and macros currently being expanded from, not counting
    180   /// CurLexer/CurTokenLexer.
    181   struct IncludeStackInfo {
    182     Lexer                 *TheLexer;
    183     PTHLexer              *ThePTHLexer;
    184     PreprocessorLexer     *ThePPLexer;
    185     TokenLexer            *TheTokenLexer;
    186     const DirectoryLookup *TheDirLookup;
    187 
    188     IncludeStackInfo(Lexer *L, PTHLexer* P, PreprocessorLexer* PPL,
    189                      TokenLexer* TL, const DirectoryLookup *D)
    190       : TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), TheTokenLexer(TL),
    191         TheDirLookup(D) {}
    192   };
    193   std::vector<IncludeStackInfo> IncludeMacroStack;
    194 
    195   /// Callbacks - These are actions invoked when some preprocessor activity is
    196   /// encountered (e.g. a file is #included, etc).
    197   PPCallbacks *Callbacks;
    198 
    199   /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping
    200   /// to the actual definition of the macro.
    201   llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros;
    202 
    203   /// \brief Macros that we want to warn because they are not used at the end
    204   /// of the translation unit; we store just their SourceLocations instead
    205   /// something like MacroInfo*. The benefit of this is that when we are
    206   /// deserializing from PCH, we don't need to deserialize identifier & macros
    207   /// just so that we can report that they are unused, we just warn using
    208   /// the SourceLocations of this set (that will be filled by the ASTReader).
    209   /// We are using SmallPtrSet instead of a vector for faster removal.
    210   typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy;
    211   WarnUnusedMacroLocsTy WarnUnusedMacroLocs;
    212 
    213   /// MacroArgCache - This is a "freelist" of MacroArg objects that can be
    214   /// reused for quick allocation.
    215   MacroArgs *MacroArgCache;
    216   friend class MacroArgs;
    217 
    218   /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma
    219   /// push_macro directive, we keep a MacroInfo stack used to restore
    220   /// previous macro value.
    221   llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo;
    222 
    223   /// \brief Expansion source location for the last macro that expanded
    224   /// to no tokens.
    225   SourceLocation LastEmptyMacroExpansionLoc;
    226 
    227   // Various statistics we track for performance analysis.
    228   unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma;
    229   unsigned NumIf, NumElse, NumEndif;
    230   unsigned NumEnteredSourceFiles, MaxIncludeStackDepth;
    231   unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded;
    232   unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste;
    233   unsigned NumSkipped;
    234 
    235   /// Predefines - This string is the predefined macros that preprocessor
    236   /// should use from the command line etc.
    237   std::string Predefines;
    238 
    239   /// TokenLexerCache - Cache macro expanders to reduce malloc traffic.
    240   enum { TokenLexerCacheSize = 8 };
    241   unsigned NumCachedTokenLexers;
    242   TokenLexer *TokenLexerCache[TokenLexerCacheSize];
    243 
    244   /// \brief Keeps macro expanded tokens for TokenLexers.
    245   //
    246   /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
    247   /// going to lex in the cache and when it finishes the tokens are removed
    248   /// from the end of the cache.
    249   llvm::SmallVector<Token, 16> MacroExpandedTokens;
    250   std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack;
    251 
    252   /// \brief A record of the macro definitions and expansions that
    253   /// occurred during preprocessing.
    254   ///
    255   /// This is an optional side structure that can be enabled with
    256   /// \c createPreprocessingRecord() prior to preprocessing.
    257   PreprocessingRecord *Record;
    258 
    259 private:  // Cached tokens state.
    260   typedef llvm::SmallVector<Token, 1> CachedTokensTy;
    261 
    262   /// CachedTokens - Cached tokens are stored here when we do backtracking or
    263   /// lookahead. They are "lexed" by the CachingLex() method.
    264   CachedTokensTy CachedTokens;
    265 
    266   /// CachedLexPos - The position of the cached token that CachingLex() should
    267   /// "lex" next. If it points beyond the CachedTokens vector, it means that
    268   /// a normal Lex() should be invoked.
    269   CachedTokensTy::size_type CachedLexPos;
    270 
    271   /// BacktrackPositions - Stack of backtrack positions, allowing nested
    272   /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to
    273   /// indicate where CachedLexPos should be set when the BackTrack() method is
    274   /// invoked (at which point the last position is popped).
    275   std::vector<CachedTokensTy::size_type> BacktrackPositions;
    276 
    277   struct MacroInfoChain {
    278     MacroInfo MI;
    279     MacroInfoChain *Next;
    280     MacroInfoChain *Prev;
    281   };
    282 
    283   /// MacroInfos are managed as a chain for easy disposal.  This is the head
    284   /// of that list.
    285   MacroInfoChain *MIChainHead;
    286 
    287   /// MICache - A "freelist" of MacroInfo objects that can be reused for quick
    288   /// allocation.
    289   MacroInfoChain *MICache;
    290 
    291   MacroInfo *getInfoForMacro(IdentifierInfo *II) const;
    292 
    293 public:
    294   Preprocessor(Diagnostic &diags, const LangOptions &opts,
    295                const TargetInfo &target,
    296                SourceManager &SM, HeaderSearch &Headers,
    297                IdentifierInfoLookup *IILookup = 0,
    298                bool OwnsHeaderSearch = false);
    299 
    300   ~Preprocessor();
    301 
    302   Diagnostic &getDiagnostics() const { return *Diags; }
    303   void setDiagnostics(Diagnostic &D) { Diags = &D; }
    304 
    305   const LangOptions &getLangOptions() const { return Features; }
    306   const TargetInfo &getTargetInfo() const { return Target; }
    307   FileManager &getFileManager() const { return FileMgr; }
    308   SourceManager &getSourceManager() const { return SourceMgr; }
    309   HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; }
    310 
    311   IdentifierTable &getIdentifierTable() { return Identifiers; }
    312   SelectorTable &getSelectorTable() { return Selectors; }
    313   Builtin::Context &getBuiltinInfo() { return BuiltinInfo; }
    314   llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; }
    315 
    316   void setPTHManager(PTHManager* pm);
    317 
    318   PTHManager *getPTHManager() { return PTH.get(); }
    319 
    320   void setExternalSource(ExternalPreprocessorSource *Source) {
    321     ExternalSource = Source;
    322   }
    323 
    324   ExternalPreprocessorSource *getExternalSource() const {
    325     return ExternalSource;
    326   }
    327 
    328   /// SetCommentRetentionState - Control whether or not the preprocessor retains
    329   /// comments in output.
    330   void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) {
    331     this->KeepComments = KeepComments | KeepMacroComments;
    332     this->KeepMacroComments = KeepMacroComments;
    333   }
    334 
    335   bool getCommentRetentionState() const { return KeepComments; }
    336 
    337   /// isCurrentLexer - Return true if we are lexing directly from the specified
    338   /// lexer.
    339   bool isCurrentLexer(const PreprocessorLexer *L) const {
    340     return CurPPLexer == L;
    341   }
    342 
    343   /// getCurrentLexer - Return the current lexer being lexed from.  Note
    344   /// that this ignores any potentially active macro expansions and _Pragma
    345   /// expansions going on at the time.
    346   PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; }
    347 
    348   /// getCurrentFileLexer - Return the current file lexer being lexed from.
    349   /// Note that this ignores any potentially active macro expansions and _Pragma
    350   /// expansions going on at the time.
    351   PreprocessorLexer *getCurrentFileLexer() const;
    352 
    353   /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks.
    354   /// Note that this class takes ownership of any PPCallbacks object given to
    355   /// it.
    356   PPCallbacks *getPPCallbacks() const { return Callbacks; }
    357   void addPPCallbacks(PPCallbacks *C) {
    358     if (Callbacks)
    359       C = new PPChainedCallbacks(C, Callbacks);
    360     Callbacks = C;
    361   }
    362 
    363   /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to
    364   /// or null if it isn't #define'd.
    365   MacroInfo *getMacroInfo(IdentifierInfo *II) const {
    366     if (!II->hasMacroDefinition())
    367       return 0;
    368 
    369     return getInfoForMacro(II);
    370   }
    371 
    372   /// setMacroInfo - Specify a macro for this identifier.
    373   ///
    374   void setMacroInfo(IdentifierInfo *II, MacroInfo *MI);
    375 
    376   /// macro_iterator/macro_begin/macro_end - This allows you to walk the current
    377   /// state of the macro table.  This visits every currently-defined macro.
    378   typedef llvm::DenseMap<IdentifierInfo*,
    379                          MacroInfo*>::const_iterator macro_iterator;
    380   macro_iterator macro_begin(bool IncludeExternalMacros = true) const;
    381   macro_iterator macro_end(bool IncludeExternalMacros = true) const;
    382 
    383   /// \brief Expansion source location for the last macro that expanded
    384   /// to no tokens.
    385   SourceLocation getLastEmptyMacroExpansionLoc() const {
    386     return LastEmptyMacroExpansionLoc;
    387   }
    388 
    389   const std::string &getPredefines() const { return Predefines; }
    390   /// setPredefines - Set the predefines for this Preprocessor.  These
    391   /// predefines are automatically injected when parsing the main file.
    392   void setPredefines(const char *P) { Predefines = P; }
    393   void setPredefines(const std::string &P) { Predefines = P; }
    394 
    395   /// getIdentifierInfo - Return information about the specified preprocessor
    396   /// identifier token.  The version of this method that takes two character
    397   /// pointers is preferred unless the identifier is already available as a
    398   /// string (this avoids allocation and copying of memory to construct an
    399   /// std::string).
    400   IdentifierInfo *getIdentifierInfo(llvm::StringRef Name) const {
    401     return &Identifiers.get(Name);
    402   }
    403 
    404   /// AddPragmaHandler - Add the specified pragma handler to the preprocessor.
    405   /// If 'Namespace' is non-null, then it is a token required to exist on the
    406   /// pragma line before the pragma string starts, e.g. "STDC" or "GCC".
    407   void AddPragmaHandler(llvm::StringRef Namespace, PragmaHandler *Handler);
    408   void AddPragmaHandler(PragmaHandler *Handler) {
    409     AddPragmaHandler(llvm::StringRef(), Handler);
    410   }
    411 
    412   /// RemovePragmaHandler - Remove the specific pragma handler from
    413   /// the preprocessor. If \arg Namespace is non-null, then it should
    414   /// be the namespace that \arg Handler was added to. It is an error
    415   /// to remove a handler that has not been registered.
    416   void RemovePragmaHandler(llvm::StringRef Namespace, PragmaHandler *Handler);
    417   void RemovePragmaHandler(PragmaHandler *Handler) {
    418     RemovePragmaHandler(llvm::StringRef(), Handler);
    419   }
    420 
    421   /// \brief Add the specified comment handler to the preprocessor.
    422   void AddCommentHandler(CommentHandler *Handler);
    423 
    424   /// \brief Remove the specified comment handler.
    425   ///
    426   /// It is an error to remove a handler that has not been registered.
    427   void RemoveCommentHandler(CommentHandler *Handler);
    428 
    429   /// \brief Set the code completion handler to the given object.
    430   void setCodeCompletionHandler(CodeCompletionHandler &Handler) {
    431     CodeComplete = &Handler;
    432   }
    433 
    434   /// \brief Retrieve the current code-completion handler.
    435   CodeCompletionHandler *getCodeCompletionHandler() const {
    436     return CodeComplete;
    437   }
    438 
    439   /// \brief Clear out the code completion handler.
    440   void clearCodeCompletionHandler() {
    441     CodeComplete = 0;
    442   }
    443 
    444   /// \brief Hook used by the lexer to invoke the "natural language" code
    445   /// completion point.
    446   void CodeCompleteNaturalLanguage();
    447 
    448   /// \brief Retrieve the preprocessing record, or NULL if there is no
    449   /// preprocessing record.
    450   PreprocessingRecord *getPreprocessingRecord() const { return Record; }
    451 
    452   /// \brief Create a new preprocessing record, which will keep track of
    453   /// all macro expansions, macro definitions, etc.
    454   void createPreprocessingRecord(bool IncludeNestedMacroExpansions);
    455 
    456   /// EnterMainSourceFile - Enter the specified FileID as the main source file,
    457   /// which implicitly adds the builtin defines etc.
    458   void EnterMainSourceFile();
    459 
    460   /// EndSourceFile - Inform the preprocessor callbacks that processing is
    461   /// complete.
    462   void EndSourceFile();
    463 
    464   /// EnterSourceFile - Add a source file to the top of the include stack and
    465   /// start lexing tokens from it instead of the current buffer.  Emit an error
    466   /// and don't enter the file on error.
    467   void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir,
    468                        SourceLocation Loc);
    469 
    470   /// EnterMacro - Add a Macro to the top of the include stack and start lexing
    471   /// tokens from it instead of the current buffer.  Args specifies the
    472   /// tokens input to a function-like macro.
    473   ///
    474   /// ILEnd specifies the location of the ')' for a function-like macro or the
    475   /// identifier for an object-like macro.
    476   void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args);
    477 
    478   /// EnterTokenStream - Add a "macro" context to the top of the include stack,
    479   /// which will cause the lexer to start returning the specified tokens.
    480   ///
    481   /// If DisableMacroExpansion is true, tokens lexed from the token stream will
    482   /// not be subject to further macro expansion.  Otherwise, these tokens will
    483   /// be re-macro-expanded when/if expansion is enabled.
    484   ///
    485   /// If OwnsTokens is false, this method assumes that the specified stream of
    486   /// tokens has a permanent owner somewhere, so they do not need to be copied.
    487   /// If it is true, it assumes the array of tokens is allocated with new[] and
    488   /// must be freed.
    489   ///
    490   void EnterTokenStream(const Token *Toks, unsigned NumToks,
    491                         bool DisableMacroExpansion, bool OwnsTokens);
    492 
    493   /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the
    494   /// lexer stack.  This should only be used in situations where the current
    495   /// state of the top-of-stack lexer is known.
    496   void RemoveTopOfLexerStack();
    497 
    498   /// EnableBacktrackAtThisPos - From the point that this method is called, and
    499   /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor
    500   /// keeps track of the lexed tokens so that a subsequent Backtrack() call will
    501   /// make the Preprocessor re-lex the same tokens.
    502   ///
    503   /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can
    504   /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will
    505   /// be combined with the EnableBacktrackAtThisPos calls in reverse order.
    506   ///
    507   /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack
    508   /// at some point after EnableBacktrackAtThisPos. If you don't, caching of
    509   /// tokens will continue indefinitely.
    510   ///
    511   void EnableBacktrackAtThisPos();
    512 
    513   /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call.
    514   void CommitBacktrackedTokens();
    515 
    516   /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since
    517   /// EnableBacktrackAtThisPos() was previously called.
    518   void Backtrack();
    519 
    520   /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and
    521   /// caching of tokens is on.
    522   bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); }
    523 
    524   /// Lex - To lex a token from the preprocessor, just pull a token from the
    525   /// current lexer or macro object.
    526   void Lex(Token &Result) {
    527     if (CurLexer)
    528       CurLexer->Lex(Result);
    529     else if (CurPTHLexer)
    530       CurPTHLexer->Lex(Result);
    531     else if (CurTokenLexer)
    532       CurTokenLexer->Lex(Result);
    533     else
    534       CachingLex(Result);
    535   }
    536 
    537   /// LexNonComment - Lex a token.  If it's a comment, keep lexing until we get
    538   /// something not a comment.  This is useful in -E -C mode where comments
    539   /// would foul up preprocessor directive handling.
    540   void LexNonComment(Token &Result) {
    541     do
    542       Lex(Result);
    543     while (Result.getKind() == tok::comment);
    544   }
    545 
    546   /// LexUnexpandedToken - This is just like Lex, but this disables macro
    547   /// expansion of identifier tokens.
    548   void LexUnexpandedToken(Token &Result) {
    549     // Disable macro expansion.
    550     bool OldVal = DisableMacroExpansion;
    551     DisableMacroExpansion = true;
    552     // Lex the token.
    553     Lex(Result);
    554 
    555     // Reenable it.
    556     DisableMacroExpansion = OldVal;
    557   }
    558 
    559   /// LookAhead - This peeks ahead N tokens and returns that token without
    560   /// consuming any tokens.  LookAhead(0) returns the next token that would be
    561   /// returned by Lex(), LookAhead(1) returns the token after it, etc.  This
    562   /// returns normal tokens after phase 5.  As such, it is equivalent to using
    563   /// 'Lex', not 'LexUnexpandedToken'.
    564   const Token &LookAhead(unsigned N) {
    565     if (CachedLexPos + N < CachedTokens.size())
    566       return CachedTokens[CachedLexPos+N];
    567     else
    568       return PeekAhead(N+1);
    569   }
    570 
    571   /// RevertCachedTokens - When backtracking is enabled and tokens are cached,
    572   /// this allows to revert a specific number of tokens.
    573   /// Note that the number of tokens being reverted should be up to the last
    574   /// backtrack position, not more.
    575   void RevertCachedTokens(unsigned N) {
    576     assert(isBacktrackEnabled() &&
    577            "Should only be called when tokens are cached for backtracking");
    578     assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back())
    579          && "Should revert tokens up to the last backtrack position, not more");
    580     assert(signed(CachedLexPos) - signed(N) >= 0 &&
    581            "Corrupted backtrack positions ?");
    582     CachedLexPos -= N;
    583   }
    584 
    585   /// EnterToken - Enters a token in the token stream to be lexed next. If
    586   /// BackTrack() is called afterwards, the token will remain at the insertion
    587   /// point.
    588   void EnterToken(const Token &Tok) {
    589     EnterCachingLexMode();
    590     CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok);
    591   }
    592 
    593   /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching
    594   /// tokens (because backtrack is enabled) it should replace the most recent
    595   /// cached tokens with the given annotation token. This function has no effect
    596   /// if backtracking is not enabled.
    597   ///
    598   /// Note that the use of this function is just for optimization; so that the
    599   /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is
    600   /// invoked.
    601   void AnnotateCachedTokens(const Token &Tok) {
    602     assert(Tok.isAnnotation() && "Expected annotation token");
    603     if (CachedLexPos != 0 && isBacktrackEnabled())
    604       AnnotatePreviousCachedTokens(Tok);
    605   }
    606 
    607   /// \brief Replace the last token with an annotation token.
    608   ///
    609   /// Like AnnotateCachedTokens(), this routine replaces an
    610   /// already-parsed (and resolved) token with an annotation
    611   /// token. However, this routine only replaces the last token with
    612   /// the annotation token; it does not affect any other cached
    613   /// tokens. This function has no effect if backtracking is not
    614   /// enabled.
    615   void ReplaceLastTokenWithAnnotation(const Token &Tok) {
    616     assert(Tok.isAnnotation() && "Expected annotation token");
    617     if (CachedLexPos != 0 && isBacktrackEnabled())
    618       CachedTokens[CachedLexPos-1] = Tok;
    619   }
    620 
    621   /// \brief Specify the point at which code-completion will be performed.
    622   ///
    623   /// \param File the file in which code completion should occur. If
    624   /// this file is included multiple times, code-completion will
    625   /// perform completion the first time it is included. If NULL, this
    626   /// function clears out the code-completion point.
    627   ///
    628   /// \param Line the line at which code completion should occur
    629   /// (1-based).
    630   ///
    631   /// \param Column the column at which code completion should occur
    632   /// (1-based).
    633   ///
    634   /// \returns true if an error occurred, false otherwise.
    635   bool SetCodeCompletionPoint(const FileEntry *File,
    636                               unsigned Line, unsigned Column);
    637 
    638   /// \brief Determine if this source location refers into the file
    639   /// for which we are performing code completion.
    640   bool isCodeCompletionFile(SourceLocation FileLoc) const;
    641 
    642   /// \brief Determine if we are performing code completion.
    643   bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; }
    644 
    645   /// \brief Instruct the preprocessor to skip part of the main
    646   /// the main source file.
    647   ///
    648   /// \brief Bytes The number of bytes in the preamble to skip.
    649   ///
    650   /// \brief StartOfLine Whether skipping these bytes puts the lexer at the
    651   /// start of a line.
    652   void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) {
    653     SkipMainFilePreamble.first = Bytes;
    654     SkipMainFilePreamble.second = StartOfLine;
    655   }
    656 
    657   /// Diag - Forwarding function for diagnostics.  This emits a diagnostic at
    658   /// the specified Token's location, translating the token's start
    659   /// position in the current buffer into a SourcePosition object for rendering.
    660   DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) {
    661     return Diags->Report(Loc, DiagID);
    662   }
    663 
    664   DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) {
    665     return Diags->Report(Tok.getLocation(), DiagID);
    666   }
    667 
    668   /// getSpelling() - Return the 'spelling' of the token at the given
    669   /// location; does not go up to the spelling location or down to the
    670   /// expansion location.
    671   ///
    672   /// \param buffer A buffer which will be used only if the token requires
    673   ///   "cleaning", e.g. if it contains trigraphs or escaped newlines
    674   /// \param invalid If non-null, will be set \c true if an error occurs.
    675   llvm::StringRef getSpelling(SourceLocation loc,
    676                               llvm::SmallVectorImpl<char> &buffer,
    677                               bool *invalid = 0) const {
    678     return Lexer::getSpelling(loc, buffer, SourceMgr, Features, invalid);
    679   }
    680 
    681   /// getSpelling() - Return the 'spelling' of the Tok token.  The spelling of a
    682   /// token is the characters used to represent the token in the source file
    683   /// after trigraph expansion and escaped-newline folding.  In particular, this
    684   /// wants to get the true, uncanonicalized, spelling of things like digraphs
    685   /// UCNs, etc.
    686   ///
    687   /// \param Invalid If non-null, will be set \c true if an error occurs.
    688   std::string getSpelling(const Token &Tok, bool *Invalid = 0) const {
    689     return Lexer::getSpelling(Tok, SourceMgr, Features, Invalid);
    690   }
    691 
    692   /// getSpelling - This method is used to get the spelling of a token into a
    693   /// preallocated buffer, instead of as an std::string.  The caller is required
    694   /// to allocate enough space for the token, which is guaranteed to be at least
    695   /// Tok.getLength() bytes long.  The length of the actual result is returned.
    696   ///
    697   /// Note that this method may do two possible things: it may either fill in
    698   /// the buffer specified with characters, or it may *change the input pointer*
    699   /// to point to a constant buffer with the data already in it (avoiding a
    700   /// copy).  The caller is not allowed to modify the returned buffer pointer
    701   /// if an internal buffer is returned.
    702   unsigned getSpelling(const Token &Tok, const char *&Buffer,
    703                        bool *Invalid = 0) const {
    704     return Lexer::getSpelling(Tok, Buffer, SourceMgr, Features, Invalid);
    705   }
    706 
    707   /// getSpelling - This method is used to get the spelling of a token into a
    708   /// SmallVector. Note that the returned StringRef may not point to the
    709   /// supplied buffer if a copy can be avoided.
    710   llvm::StringRef getSpelling(const Token &Tok,
    711                               llvm::SmallVectorImpl<char> &Buffer,
    712                               bool *Invalid = 0) const;
    713 
    714   /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant
    715   /// with length 1, return the character.
    716   char getSpellingOfSingleCharacterNumericConstant(const Token &Tok,
    717                                                    bool *Invalid = 0) const {
    718     assert(Tok.is(tok::numeric_constant) &&
    719            Tok.getLength() == 1 && "Called on unsupported token");
    720     assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1");
    721 
    722     // If the token is carrying a literal data pointer, just use it.
    723     if (const char *D = Tok.getLiteralData())
    724       return *D;
    725 
    726     // Otherwise, fall back on getCharacterData, which is slower, but always
    727     // works.
    728     return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid);
    729   }
    730 
    731   /// CreateString - Plop the specified string into a scratch buffer and set the
    732   /// specified token's location and length to it.  If specified, the source
    733   /// location provides a location of the expansion point of the token.
    734   void CreateString(const char *Buf, unsigned Len,
    735                     Token &Tok, SourceLocation SourceLoc = SourceLocation());
    736 
    737   /// \brief Computes the source location just past the end of the
    738   /// token at this source location.
    739   ///
    740   /// This routine can be used to produce a source location that
    741   /// points just past the end of the token referenced by \p Loc, and
    742   /// is generally used when a diagnostic needs to point just after a
    743   /// token where it expected something different that it received. If
    744   /// the returned source location would not be meaningful (e.g., if
    745   /// it points into a macro), this routine returns an invalid
    746   /// source location.
    747   ///
    748   /// \param Offset an offset from the end of the token, where the source
    749   /// location should refer to. The default offset (0) produces a source
    750   /// location pointing just past the end of the token; an offset of 1 produces
    751   /// a source location pointing to the last character in the token, etc.
    752   SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) {
    753     return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, Features);
    754   }
    755 
    756   /// \brief Returns true if the given MacroID location points at the first
    757   /// token of the macro expansion.
    758   bool isAtStartOfMacroExpansion(SourceLocation loc) const {
    759     return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, Features);
    760   }
    761 
    762   /// \brief Returns true if the given MacroID location points at the last
    763   /// token of the macro expansion.
    764   bool isAtEndOfMacroExpansion(SourceLocation loc) const {
    765     return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, Features);
    766   }
    767 
    768   /// DumpToken - Print the token to stderr, used for debugging.
    769   ///
    770   void DumpToken(const Token &Tok, bool DumpFlags = false) const;
    771   void DumpLocation(SourceLocation Loc) const;
    772   void DumpMacro(const MacroInfo &MI) const;
    773 
    774   /// AdvanceToTokenCharacter - Given a location that specifies the start of a
    775   /// token, return a new location that specifies a character within the token.
    776   SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart,
    777                                          unsigned Char) const {
    778     return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, Features);
    779   }
    780 
    781   /// IncrementPasteCounter - Increment the counters for the number of token
    782   /// paste operations performed.  If fast was specified, this is a 'fast paste'
    783   /// case we handled.
    784   ///
    785   void IncrementPasteCounter(bool isFast) {
    786     if (isFast)
    787       ++NumFastTokenPaste;
    788     else
    789       ++NumTokenPaste;
    790   }
    791 
    792   void PrintStats();
    793 
    794   size_t getTotalMemory() const;
    795 
    796   /// HandleMicrosoftCommentPaste - When the macro expander pastes together a
    797   /// comment (/##/) in microsoft mode, this method handles updating the current
    798   /// state, returning the token on the next source line.
    799   void HandleMicrosoftCommentPaste(Token &Tok);
    800 
    801   //===--------------------------------------------------------------------===//
    802   // Preprocessor callback methods.  These are invoked by a lexer as various
    803   // directives and events are found.
    804 
    805   /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the
    806   /// identifier information for the token and install it into the token,
    807   /// updating the token kind accordingly.
    808   IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const;
    809 
    810 private:
    811   llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons;
    812 
    813 public:
    814 
    815   // SetPoisonReason - Call this function to indicate the reason for
    816   // poisoning an identifier. If that identifier is accessed while
    817   // poisoned, then this reason will be used instead of the default
    818   // "poisoned" diagnostic.
    819   void SetPoisonReason(IdentifierInfo *II, unsigned DiagID);
    820 
    821   // HandlePoisonedIdentifier - Display reason for poisoned
    822   // identifier.
    823   void HandlePoisonedIdentifier(Token & Tok);
    824 
    825   void MaybeHandlePoisonedIdentifier(Token & Identifier) {
    826     if(IdentifierInfo * II = Identifier.getIdentifierInfo()) {
    827       if(II->isPoisoned()) {
    828         HandlePoisonedIdentifier(Identifier);
    829       }
    830     }
    831   }
    832 
    833 private:
    834   /// Identifiers used for SEH handling in Borland. These are only
    835   /// allowed in particular circumstances
    836   IdentifierInfo *Ident__exception_code, *Ident___exception_code, *Ident_GetExceptionCode; // __except block
    837   IdentifierInfo *Ident__exception_info, *Ident___exception_info, *Ident_GetExceptionInfo; // __except filter expression
    838   IdentifierInfo *Ident__abnormal_termination, *Ident___abnormal_termination, *Ident_AbnormalTermination; // __finally
    839 public:
    840   void PoisonSEHIdentifiers(bool Poison = true); // Borland
    841 
    842   /// HandleIdentifier - This callback is invoked when the lexer reads an
    843   /// identifier and has filled in the tokens IdentifierInfo member.  This
    844   /// callback potentially macro expands it or turns it into a named token (like
    845   /// 'for').
    846   void HandleIdentifier(Token &Identifier);
    847 
    848 
    849   /// HandleEndOfFile - This callback is invoked when the lexer hits the end of
    850   /// the current file.  This either returns the EOF token and returns true, or
    851   /// pops a level off the include stack and returns false, at which point the
    852   /// client should call lex again.
    853   bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false);
    854 
    855   /// HandleEndOfTokenLexer - This callback is invoked when the current
    856   /// TokenLexer hits the end of its token stream.
    857   bool HandleEndOfTokenLexer(Token &Result);
    858 
    859   /// HandleDirective - This callback is invoked when the lexer sees a # token
    860   /// at the start of a line.  This consumes the directive, modifies the
    861   /// lexer/preprocessor state, and advances the lexer(s) so that the next token
    862   /// read is the correct one.
    863   void HandleDirective(Token &Result);
    864 
    865   /// CheckEndOfDirective - Ensure that the next token is a tok::eod token.  If
    866   /// not, emit a diagnostic and consume up until the eod.  If EnableMacros is
    867   /// true, then we consider macros that expand to zero tokens as being ok.
    868   void CheckEndOfDirective(const char *Directive, bool EnableMacros = false);
    869 
    870   /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
    871   /// current line until the tok::eod token is found.
    872   void DiscardUntilEndOfDirective();
    873 
    874   /// SawDateOrTime - This returns true if the preprocessor has seen a use of
    875   /// __DATE__ or __TIME__ in the file so far.
    876   bool SawDateOrTime() const {
    877     return DATELoc != SourceLocation() || TIMELoc != SourceLocation();
    878   }
    879   unsigned getCounterValue() const { return CounterValue; }
    880   void setCounterValue(unsigned V) { CounterValue = V; }
    881 
    882   /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide
    883   ///  SourceLocation.
    884   MacroInfo *AllocateMacroInfo(SourceLocation L);
    885 
    886   /// CloneMacroInfo - Allocate a new MacroInfo object which is clone of MI.
    887   MacroInfo *CloneMacroInfo(const MacroInfo &MI);
    888 
    889   /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully
    890   /// checked and spelled filename, e.g. as an operand of #include. This returns
    891   /// true if the input filename was in <>'s or false if it were in ""'s.  The
    892   /// caller is expected to provide a buffer that is large enough to hold the
    893   /// spelling of the filename, but is also expected to handle the case when
    894   /// this method decides to use a different buffer.
    895   bool GetIncludeFilenameSpelling(SourceLocation Loc,llvm::StringRef &Filename);
    896 
    897   /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file,
    898   /// return null on failure.  isAngled indicates whether the file reference is
    899   /// for system #include's or not (i.e. using <> instead of "").
    900   const FileEntry *LookupFile(llvm::StringRef Filename,
    901                               bool isAngled, const DirectoryLookup *FromDir,
    902                               const DirectoryLookup *&CurDir,
    903                               llvm::SmallVectorImpl<char> *SearchPath,
    904                               llvm::SmallVectorImpl<char> *RelativePath);
    905 
    906   /// GetCurLookup - The DirectoryLookup structure used to find the current
    907   /// FileEntry, if CurLexer is non-null and if applicable.  This allows us to
    908   /// implement #include_next and find directory-specific properties.
    909   const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; }
    910 
    911   /// isInPrimaryFile - Return true if we're in the top-level file, not in a
    912   /// #include.
    913   bool isInPrimaryFile() const;
    914 
    915   /// ConcatenateIncludeName - Handle cases where the #include name is expanded
    916   /// from a macro as multiple tokens, which need to be glued together.  This
    917   /// occurs for code like:
    918   ///    #define FOO <a/b.h>
    919   ///    #include FOO
    920   /// because in this case, "<a/b.h>" is returned as 7 tokens, not one.
    921   ///
    922   /// This code concatenates and consumes tokens up to the '>' token.  It
    923   /// returns false if the > was found, otherwise it returns true if it finds
    924   /// and consumes the EOD marker.
    925   bool ConcatenateIncludeName(llvm::SmallString<128> &FilenameBuffer,
    926                               SourceLocation &End);
    927 
    928   /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is
    929   /// followed by EOD.  Return true if the token is not a valid on-off-switch.
    930   bool LexOnOffSwitch(tok::OnOffSwitch &OOS);
    931 
    932 private:
    933 
    934   void PushIncludeMacroStack() {
    935     IncludeMacroStack.push_back(IncludeStackInfo(CurLexer.take(),
    936                                                  CurPTHLexer.take(),
    937                                                  CurPPLexer,
    938                                                  CurTokenLexer.take(),
    939                                                  CurDirLookup));
    940     CurPPLexer = 0;
    941   }
    942 
    943   void PopIncludeMacroStack() {
    944     CurLexer.reset(IncludeMacroStack.back().TheLexer);
    945     CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer);
    946     CurPPLexer = IncludeMacroStack.back().ThePPLexer;
    947     CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer);
    948     CurDirLookup  = IncludeMacroStack.back().TheDirLookup;
    949     IncludeMacroStack.pop_back();
    950   }
    951 
    952   /// AllocateMacroInfo - Allocate a new MacroInfo object.
    953   MacroInfo *AllocateMacroInfo();
    954 
    955   /// ReleaseMacroInfo - Release the specified MacroInfo.  This memory will
    956   ///  be reused for allocating new MacroInfo objects.
    957   void ReleaseMacroInfo(MacroInfo* MI);
    958 
    959   /// ReadMacroName - Lex and validate a macro name, which occurs after a
    960   /// #define or #undef.  This emits a diagnostic, sets the token kind to eod,
    961   /// and discards the rest of the macro line if the macro name is invalid.
    962   void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0);
    963 
    964   /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro
    965   /// definition has just been read.  Lex the rest of the arguments and the
    966   /// closing ), updating MI with what we learn.  Return true if an error occurs
    967   /// parsing the arg list.
    968   bool ReadMacroDefinitionArgList(MacroInfo *MI);
    969 
    970   /// SkipExcludedConditionalBlock - We just read a #if or related directive and
    971   /// decided that the subsequent tokens are in the #if'd out portion of the
    972   /// file.  Lex the rest of the file, until we see an #endif.  If
    973   /// FoundNonSkipPortion is true, then we have already emitted code for part of
    974   /// this #if directive, so #else/#elif blocks should never be entered. If
    975   /// FoundElse is false, then #else directives are ok, if not, then we have
    976   /// already seen one so a #else directive is a duplicate.  When this returns,
    977   /// the caller can lex the first valid token.
    978   void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
    979                                     bool FoundNonSkipPortion, bool FoundElse);
    980 
    981   /// PTHSkipExcludedConditionalBlock - A fast PTH version of
    982   ///  SkipExcludedConditionalBlock.
    983   void PTHSkipExcludedConditionalBlock();
    984 
    985   /// EvaluateDirectiveExpression - Evaluate an integer constant expression that
    986   /// may occur after a #if or #elif directive and return it as a bool.  If the
    987   /// expression is equivalent to "!defined(X)" return X in IfNDefMacro.
    988   bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro);
    989 
    990   /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas:
    991   /// #pragma GCC poison/system_header/dependency and #pragma once.
    992   void RegisterBuiltinPragmas();
    993 
    994   /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the
    995   /// identifier table.
    996   void RegisterBuiltinMacros();
    997 
    998   /// HandleMacroExpandedIdentifier - If an identifier token is read that is to
    999   /// be expanded as a macro, handle it and return the next token as 'Tok'.  If
   1000   /// the macro should not be expanded return true, otherwise return false.
   1001   bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI);
   1002 
   1003   /// \brief Cache macro expanded tokens for TokenLexers.
   1004   //
   1005   /// Works like a stack; a TokenLexer adds the macro expanded tokens that is
   1006   /// going to lex in the cache and when it finishes the tokens are removed
   1007   /// from the end of the cache.
   1008   Token *cacheMacroExpandedTokens(TokenLexer *tokLexer,
   1009                                   llvm::ArrayRef<Token> tokens);
   1010   void removeCachedMacroExpandedTokensOfLastLexer();
   1011   friend void TokenLexer::ExpandFunctionArguments();
   1012 
   1013   /// isNextPPTokenLParen - Determine whether the next preprocessor token to be
   1014   /// lexed is a '('.  If so, consume the token and return true, if not, this
   1015   /// method should have no observable side-effect on the lexed tokens.
   1016   bool isNextPPTokenLParen();
   1017 
   1018   /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is
   1019   /// invoked to read all of the formal arguments specified for the macro
   1020   /// invocation.  This returns null on error.
   1021   MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI,
   1022                                        SourceLocation &ExpansionEnd);
   1023 
   1024   /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded
   1025   /// as a builtin macro, handle it and return the next token as 'Tok'.
   1026   void ExpandBuiltinMacro(Token &Tok);
   1027 
   1028   /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then
   1029   /// return the first token after the directive.  The _Pragma token has just
   1030   /// been read into 'Tok'.
   1031   void Handle_Pragma(Token &Tok);
   1032 
   1033   /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text
   1034   /// is not enclosed within a string literal.
   1035   void HandleMicrosoft__pragma(Token &Tok);
   1036 
   1037   /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and
   1038   /// start lexing tokens from it instead of the current buffer.
   1039   void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir);
   1040 
   1041   /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and
   1042   /// start getting tokens from it using the PTH cache.
   1043   void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir);
   1044 
   1045   /// IsFileLexer - Returns true if we are lexing from a file and not a
   1046   ///  pragma or a macro.
   1047   static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) {
   1048     return L ? !L->isPragmaLexer() : P != 0;
   1049   }
   1050 
   1051   static bool IsFileLexer(const IncludeStackInfo& I) {
   1052     return IsFileLexer(I.TheLexer, I.ThePPLexer);
   1053   }
   1054 
   1055   bool IsFileLexer() const {
   1056     return IsFileLexer(CurLexer.get(), CurPPLexer);
   1057   }
   1058 
   1059   //===--------------------------------------------------------------------===//
   1060   // Caching stuff.
   1061   void CachingLex(Token &Result);
   1062   bool InCachingLexMode() const {
   1063     // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means
   1064     // that we are past EOF, not that we are in CachingLex mode.
   1065     return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 &&
   1066            !IncludeMacroStack.empty();
   1067   }
   1068   void EnterCachingLexMode();
   1069   void ExitCachingLexMode() {
   1070     if (InCachingLexMode())
   1071       RemoveTopOfLexerStack();
   1072   }
   1073   const Token &PeekAhead(unsigned N);
   1074   void AnnotatePreviousCachedTokens(const Token &Tok);
   1075 
   1076   //===--------------------------------------------------------------------===//
   1077   /// Handle*Directive - implement the various preprocessor directives.  These
   1078   /// should side-effect the current preprocessor object so that the next call
   1079   /// to Lex() will return the appropriate token next.
   1080   void HandleLineDirective(Token &Tok);
   1081   void HandleDigitDirective(Token &Tok);
   1082   void HandleUserDiagnosticDirective(Token &Tok, bool isWarning);
   1083   void HandleIdentSCCSDirective(Token &Tok);
   1084 
   1085   // File inclusion.
   1086   void HandleIncludeDirective(SourceLocation HashLoc,
   1087                               Token &Tok,
   1088                               const DirectoryLookup *LookupFrom = 0,
   1089                               bool isImport = false);
   1090   void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok);
   1091   void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok);
   1092   void HandleImportDirective(SourceLocation HashLoc, Token &Tok);
   1093 
   1094   // Macro handling.
   1095   void HandleDefineDirective(Token &Tok);
   1096   void HandleUndefDirective(Token &Tok);
   1097 
   1098   // Conditional Inclusion.
   1099   void HandleIfdefDirective(Token &Tok, bool isIfndef,
   1100                             bool ReadAnyTokensBeforeDirective);
   1101   void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective);
   1102   void HandleEndifDirective(Token &Tok);
   1103   void HandleElseDirective(Token &Tok);
   1104   void HandleElifDirective(Token &Tok);
   1105 
   1106   // Pragmas.
   1107   void HandlePragmaDirective(unsigned Introducer);
   1108 public:
   1109   void HandlePragmaOnce(Token &OnceTok);
   1110   void HandlePragmaMark();
   1111   void HandlePragmaPoison(Token &PoisonTok);
   1112   void HandlePragmaSystemHeader(Token &SysHeaderTok);
   1113   void HandlePragmaDependency(Token &DependencyTok);
   1114   void HandlePragmaComment(Token &CommentTok);
   1115   void HandlePragmaMessage(Token &MessageTok);
   1116   void HandlePragmaPushMacro(Token &Tok);
   1117   void HandlePragmaPopMacro(Token &Tok);
   1118   IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok);
   1119 
   1120   // Return true and store the first token only if any CommentHandler
   1121   // has inserted some tokens and getCommentRetentionState() is false.
   1122   bool HandleComment(Token &Token, SourceRange Comment);
   1123 
   1124   /// \brief A macro is used, update information about macros that need unused
   1125   /// warnings.
   1126   void markMacroAsUsed(MacroInfo *MI);
   1127 };
   1128 
   1129 /// \brief Abstract base class that describes a handler that will receive
   1130 /// source ranges for each of the comments encountered in the source file.
   1131 class CommentHandler {
   1132 public:
   1133   virtual ~CommentHandler();
   1134 
   1135   // The handler shall return true if it has pushed any tokens
   1136   // to be read using e.g. EnterToken or EnterTokenStream.
   1137   virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0;
   1138 };
   1139 
   1140 }  // end namespace clang
   1141 
   1142 #endif
   1143