1 //===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines the Preprocessor interface. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #ifndef LLVM_CLANG_LEX_PREPROCESSOR_H 15 #define LLVM_CLANG_LEX_PREPROCESSOR_H 16 17 #include "clang/Lex/MacroInfo.h" 18 #include "clang/Lex/Lexer.h" 19 #include "clang/Lex/PTHLexer.h" 20 #include "clang/Lex/PPCallbacks.h" 21 #include "clang/Lex/TokenLexer.h" 22 #include "clang/Lex/PTHManager.h" 23 #include "clang/Basic/Builtins.h" 24 #include "clang/Basic/Diagnostic.h" 25 #include "clang/Basic/IdentifierTable.h" 26 #include "clang/Basic/SourceLocation.h" 27 #include "llvm/ADT/DenseMap.h" 28 #include "llvm/ADT/IntrusiveRefCntPtr.h" 29 #include "llvm/ADT/SmallPtrSet.h" 30 #include "llvm/ADT/OwningPtr.h" 31 #include "llvm/ADT/SmallVector.h" 32 #include "llvm/ADT/ArrayRef.h" 33 #include "llvm/Support/Allocator.h" 34 #include <vector> 35 36 namespace clang { 37 38 class SourceManager; 39 class ExternalPreprocessorSource; 40 class FileManager; 41 class FileEntry; 42 class HeaderSearch; 43 class PragmaNamespace; 44 class PragmaHandler; 45 class CommentHandler; 46 class ScratchBuffer; 47 class TargetInfo; 48 class PPCallbacks; 49 class CodeCompletionHandler; 50 class DirectoryLookup; 51 class PreprocessingRecord; 52 class ModuleLoader; 53 54 /// Preprocessor - This object engages in a tight little dance with the lexer to 55 /// efficiently preprocess tokens. Lexers know only about tokens within a 56 /// single source file, and don't know anything about preprocessor-level issues 57 /// like the #include stack, token expansion, etc. 58 /// 59 class Preprocessor : public llvm::RefCountedBase<Preprocessor> { 60 DiagnosticsEngine *Diags; 61 LangOptions &Features; 62 const TargetInfo *Target; 63 FileManager &FileMgr; 64 SourceManager &SourceMgr; 65 ScratchBuffer *ScratchBuf; 66 HeaderSearch &HeaderInfo; 67 ModuleLoader &TheModuleLoader; 68 69 /// \brief External source of macros. 70 ExternalPreprocessorSource *ExternalSource; 71 72 73 /// PTH - An optional PTHManager object used for getting tokens from 74 /// a token cache rather than lexing the original source file. 75 llvm::OwningPtr<PTHManager> PTH; 76 77 /// BP - A BumpPtrAllocator object used to quickly allocate and release 78 /// objects internal to the Preprocessor. 79 llvm::BumpPtrAllocator BP; 80 81 /// Identifiers for builtin macros and other builtins. 82 IdentifierInfo *Ident__LINE__, *Ident__FILE__; // __LINE__, __FILE__ 83 IdentifierInfo *Ident__DATE__, *Ident__TIME__; // __DATE__, __TIME__ 84 IdentifierInfo *Ident__INCLUDE_LEVEL__; // __INCLUDE_LEVEL__ 85 IdentifierInfo *Ident__BASE_FILE__; // __BASE_FILE__ 86 IdentifierInfo *Ident__TIMESTAMP__; // __TIMESTAMP__ 87 IdentifierInfo *Ident__COUNTER__; // __COUNTER__ 88 IdentifierInfo *Ident_Pragma, *Ident__pragma; // _Pragma, __pragma 89 IdentifierInfo *Ident__VA_ARGS__; // __VA_ARGS__ 90 IdentifierInfo *Ident__has_feature; // __has_feature 91 IdentifierInfo *Ident__has_extension; // __has_extension 92 IdentifierInfo *Ident__has_builtin; // __has_builtin 93 IdentifierInfo *Ident__has_attribute; // __has_attribute 94 IdentifierInfo *Ident__has_include; // __has_include 95 IdentifierInfo *Ident__has_include_next; // __has_include_next 96 IdentifierInfo *Ident__has_warning; // __has_warning 97 98 SourceLocation DATELoc, TIMELoc; 99 unsigned CounterValue; // Next __COUNTER__ value. 100 101 enum { 102 /// MaxIncludeStackDepth - Maximum depth of #includes. 103 MaxAllowedIncludeStackDepth = 200 104 }; 105 106 // State that is set before the preprocessor begins. 107 bool KeepComments : 1; 108 bool KeepMacroComments : 1; 109 bool SuppressIncludeNotFoundError : 1; 110 bool AutoModuleImport : 1; 111 112 // State that changes while the preprocessor runs: 113 bool InMacroArgs : 1; // True if parsing fn macro invocation args. 114 115 /// Whether the preprocessor owns the header search object. 116 bool OwnsHeaderSearch : 1; 117 118 /// DisableMacroExpansion - True if macro expansion is disabled. 119 bool DisableMacroExpansion : 1; 120 121 /// \brief Whether we have already loaded macros from the external source. 122 mutable bool ReadMacrosFromExternalSource : 1; 123 124 /// Identifiers - This is mapping/lookup information for all identifiers in 125 /// the program, including program keywords. 126 mutable IdentifierTable Identifiers; 127 128 /// Selectors - This table contains all the selectors in the program. Unlike 129 /// IdentifierTable above, this table *isn't* populated by the preprocessor. 130 /// It is declared/expanded here because it's role/lifetime is 131 /// conceptually similar the IdentifierTable. In addition, the current control 132 /// flow (in clang::ParseAST()), make it convenient to put here. 133 /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to 134 /// the lifetime of the preprocessor. 135 SelectorTable Selectors; 136 137 /// BuiltinInfo - Information about builtins. 138 Builtin::Context BuiltinInfo; 139 140 /// PragmaHandlers - This tracks all of the pragmas that the client registered 141 /// with this preprocessor. 142 PragmaNamespace *PragmaHandlers; 143 144 /// \brief Tracks all of the comment handlers that the client registered 145 /// with this preprocessor. 146 std::vector<CommentHandler *> CommentHandlers; 147 148 /// \brief The code-completion handler. 149 CodeCompletionHandler *CodeComplete; 150 151 /// \brief The file that we're performing code-completion for, if any. 152 const FileEntry *CodeCompletionFile; 153 154 /// \brief The offset in file for the code-completion point. 155 unsigned CodeCompletionOffset; 156 157 /// \brief The location for the code-completion point. This gets instantiated 158 /// when the CodeCompletionFile gets #include'ed for preprocessing. 159 SourceLocation CodeCompletionLoc; 160 161 /// \brief The start location for the file of the code-completion point. 162 /// This gets instantiated when the CodeCompletionFile gets #include'ed 163 /// for preprocessing. 164 SourceLocation CodeCompletionFileLoc; 165 166 /// \brief The source location of the __import_module__ keyword we just 167 /// lexed, if any. 168 SourceLocation ModuleImportLoc; 169 170 /// \brief The source location of the currently-active 171 /// #pragma clang arc_cf_code_audited begin. 172 SourceLocation PragmaARCCFCodeAuditedLoc; 173 174 /// \brief True if we hit the code-completion point. 175 bool CodeCompletionReached; 176 177 /// \brief The number of bytes that we will initially skip when entering the 178 /// main file, which is used when loading a precompiled preamble, along 179 /// with a flag that indicates whether skipping this number of bytes will 180 /// place the lexer at the start of a line. 181 std::pair<unsigned, bool> SkipMainFilePreamble; 182 183 /// CurLexer - This is the current top of the stack that we're lexing from if 184 /// not expanding a macro and we are lexing directly from source code. 185 /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null. 186 llvm::OwningPtr<Lexer> CurLexer; 187 188 /// CurPTHLexer - This is the current top of stack that we're lexing from if 189 /// not expanding from a macro and we are lexing from a PTH cache. 190 /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null. 191 llvm::OwningPtr<PTHLexer> CurPTHLexer; 192 193 /// CurPPLexer - This is the current top of the stack what we're lexing from 194 /// if not expanding a macro. This is an alias for either CurLexer or 195 /// CurPTHLexer. 196 PreprocessorLexer *CurPPLexer; 197 198 /// CurLookup - The DirectoryLookup structure used to find the current 199 /// FileEntry, if CurLexer is non-null and if applicable. This allows us to 200 /// implement #include_next and find directory-specific properties. 201 const DirectoryLookup *CurDirLookup; 202 203 /// CurTokenLexer - This is the current macro we are expanding, if we are 204 /// expanding a macro. One of CurLexer and CurTokenLexer must be null. 205 llvm::OwningPtr<TokenLexer> CurTokenLexer; 206 207 /// \brief The kind of lexer we're currently working with. 208 enum CurLexerKind { 209 CLK_Lexer, 210 CLK_PTHLexer, 211 CLK_TokenLexer, 212 CLK_CachingLexer, 213 CLK_LexAfterModuleImport 214 } CurLexerKind; 215 216 /// IncludeMacroStack - This keeps track of the stack of files currently 217 /// #included, and macros currently being expanded from, not counting 218 /// CurLexer/CurTokenLexer. 219 struct IncludeStackInfo { 220 enum CurLexerKind CurLexerKind; 221 Lexer *TheLexer; 222 PTHLexer *ThePTHLexer; 223 PreprocessorLexer *ThePPLexer; 224 TokenLexer *TheTokenLexer; 225 const DirectoryLookup *TheDirLookup; 226 227 IncludeStackInfo(enum CurLexerKind K, Lexer *L, PTHLexer* P, 228 PreprocessorLexer* PPL, 229 TokenLexer* TL, const DirectoryLookup *D) 230 : CurLexerKind(K), TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), 231 TheTokenLexer(TL), TheDirLookup(D) {} 232 }; 233 std::vector<IncludeStackInfo> IncludeMacroStack; 234 235 /// Callbacks - These are actions invoked when some preprocessor activity is 236 /// encountered (e.g. a file is #included, etc). 237 PPCallbacks *Callbacks; 238 239 /// Macros - For each IdentifierInfo with 'HasMacro' set, we keep a mapping 240 /// to the actual definition of the macro. 241 llvm::DenseMap<IdentifierInfo*, MacroInfo*> Macros; 242 243 /// \brief Macros that we want to warn because they are not used at the end 244 /// of the translation unit; we store just their SourceLocations instead 245 /// something like MacroInfo*. The benefit of this is that when we are 246 /// deserializing from PCH, we don't need to deserialize identifier & macros 247 /// just so that we can report that they are unused, we just warn using 248 /// the SourceLocations of this set (that will be filled by the ASTReader). 249 /// We are using SmallPtrSet instead of a vector for faster removal. 250 typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy; 251 WarnUnusedMacroLocsTy WarnUnusedMacroLocs; 252 253 /// MacroArgCache - This is a "freelist" of MacroArg objects that can be 254 /// reused for quick allocation. 255 MacroArgs *MacroArgCache; 256 friend class MacroArgs; 257 258 /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma 259 /// push_macro directive, we keep a MacroInfo stack used to restore 260 /// previous macro value. 261 llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo; 262 263 // Various statistics we track for performance analysis. 264 unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma; 265 unsigned NumIf, NumElse, NumEndif; 266 unsigned NumEnteredSourceFiles, MaxIncludeStackDepth; 267 unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded; 268 unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste; 269 unsigned NumSkipped; 270 271 /// Predefines - This string is the predefined macros that preprocessor 272 /// should use from the command line etc. 273 std::string Predefines; 274 275 /// TokenLexerCache - Cache macro expanders to reduce malloc traffic. 276 enum { TokenLexerCacheSize = 8 }; 277 unsigned NumCachedTokenLexers; 278 TokenLexer *TokenLexerCache[TokenLexerCacheSize]; 279 280 /// \brief Keeps macro expanded tokens for TokenLexers. 281 // 282 /// Works like a stack; a TokenLexer adds the macro expanded tokens that is 283 /// going to lex in the cache and when it finishes the tokens are removed 284 /// from the end of the cache. 285 SmallVector<Token, 16> MacroExpandedTokens; 286 std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack; 287 288 /// \brief A record of the macro definitions and expansions that 289 /// occurred during preprocessing. 290 /// 291 /// This is an optional side structure that can be enabled with 292 /// \c createPreprocessingRecord() prior to preprocessing. 293 PreprocessingRecord *Record; 294 295 private: // Cached tokens state. 296 typedef SmallVector<Token, 1> CachedTokensTy; 297 298 /// CachedTokens - Cached tokens are stored here when we do backtracking or 299 /// lookahead. They are "lexed" by the CachingLex() method. 300 CachedTokensTy CachedTokens; 301 302 /// CachedLexPos - The position of the cached token that CachingLex() should 303 /// "lex" next. If it points beyond the CachedTokens vector, it means that 304 /// a normal Lex() should be invoked. 305 CachedTokensTy::size_type CachedLexPos; 306 307 /// BacktrackPositions - Stack of backtrack positions, allowing nested 308 /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to 309 /// indicate where CachedLexPos should be set when the BackTrack() method is 310 /// invoked (at which point the last position is popped). 311 std::vector<CachedTokensTy::size_type> BacktrackPositions; 312 313 struct MacroInfoChain { 314 MacroInfo MI; 315 MacroInfoChain *Next; 316 MacroInfoChain *Prev; 317 }; 318 319 /// MacroInfos are managed as a chain for easy disposal. This is the head 320 /// of that list. 321 MacroInfoChain *MIChainHead; 322 323 /// MICache - A "freelist" of MacroInfo objects that can be reused for quick 324 /// allocation. 325 MacroInfoChain *MICache; 326 327 MacroInfo *getInfoForMacro(IdentifierInfo *II) const; 328 329 public: 330 Preprocessor(DiagnosticsEngine &diags, LangOptions &opts, 331 const TargetInfo *target, 332 SourceManager &SM, HeaderSearch &Headers, 333 ModuleLoader &TheModuleLoader, 334 IdentifierInfoLookup *IILookup = 0, 335 bool OwnsHeaderSearch = false, 336 bool DelayInitialization = false); 337 338 ~Preprocessor(); 339 340 /// \brief Initialize the preprocessor, if the constructor did not already 341 /// perform the initialization. 342 /// 343 /// \param Target Information about the target. 344 void Initialize(const TargetInfo &Target); 345 346 DiagnosticsEngine &getDiagnostics() const { return *Diags; } 347 void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; } 348 349 const LangOptions &getLangOptions() const { return Features; } 350 const TargetInfo &getTargetInfo() const { return *Target; } 351 FileManager &getFileManager() const { return FileMgr; } 352 SourceManager &getSourceManager() const { return SourceMgr; } 353 HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; } 354 355 IdentifierTable &getIdentifierTable() { return Identifiers; } 356 SelectorTable &getSelectorTable() { return Selectors; } 357 Builtin::Context &getBuiltinInfo() { return BuiltinInfo; } 358 llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; } 359 360 void setPTHManager(PTHManager* pm); 361 362 PTHManager *getPTHManager() { return PTH.get(); } 363 364 void setExternalSource(ExternalPreprocessorSource *Source) { 365 ExternalSource = Source; 366 } 367 368 ExternalPreprocessorSource *getExternalSource() const { 369 return ExternalSource; 370 } 371 372 /// \brief Retrieve the module loader associated with this preprocessor. 373 ModuleLoader &getModuleLoader() const { return TheModuleLoader; } 374 375 /// SetCommentRetentionState - Control whether or not the preprocessor retains 376 /// comments in output. 377 void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) { 378 this->KeepComments = KeepComments | KeepMacroComments; 379 this->KeepMacroComments = KeepMacroComments; 380 } 381 382 bool getCommentRetentionState() const { return KeepComments; } 383 384 void SetSuppressIncludeNotFoundError(bool Suppress) { 385 SuppressIncludeNotFoundError = Suppress; 386 } 387 388 bool GetSuppressIncludeNotFoundError() { 389 return SuppressIncludeNotFoundError; 390 } 391 392 /// \brief Specify whether automatic module imports are enabled. 393 void setAutoModuleImport(bool AutoModuleImport = true) { 394 this->AutoModuleImport = AutoModuleImport; 395 } 396 397 /// isCurrentLexer - Return true if we are lexing directly from the specified 398 /// lexer. 399 bool isCurrentLexer(const PreprocessorLexer *L) const { 400 return CurPPLexer == L; 401 } 402 403 /// getCurrentLexer - Return the current lexer being lexed from. Note 404 /// that this ignores any potentially active macro expansions and _Pragma 405 /// expansions going on at the time. 406 PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; } 407 408 /// getCurrentFileLexer - Return the current file lexer being lexed from. 409 /// Note that this ignores any potentially active macro expansions and _Pragma 410 /// expansions going on at the time. 411 PreprocessorLexer *getCurrentFileLexer() const; 412 413 /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks. 414 /// Note that this class takes ownership of any PPCallbacks object given to 415 /// it. 416 PPCallbacks *getPPCallbacks() const { return Callbacks; } 417 void addPPCallbacks(PPCallbacks *C) { 418 if (Callbacks) 419 C = new PPChainedCallbacks(C, Callbacks); 420 Callbacks = C; 421 } 422 423 /// getMacroInfo - Given an identifier, return the MacroInfo it is #defined to 424 /// or null if it isn't #define'd. 425 MacroInfo *getMacroInfo(IdentifierInfo *II) const { 426 if (!II->hasMacroDefinition()) 427 return 0; 428 429 return getInfoForMacro(II); 430 } 431 432 /// setMacroInfo - Specify a macro for this identifier. 433 /// 434 void setMacroInfo(IdentifierInfo *II, MacroInfo *MI); 435 436 /// macro_iterator/macro_begin/macro_end - This allows you to walk the current 437 /// state of the macro table. This visits every currently-defined macro. 438 typedef llvm::DenseMap<IdentifierInfo*, 439 MacroInfo*>::const_iterator macro_iterator; 440 macro_iterator macro_begin(bool IncludeExternalMacros = true) const; 441 macro_iterator macro_end(bool IncludeExternalMacros = true) const; 442 443 const std::string &getPredefines() const { return Predefines; } 444 /// setPredefines - Set the predefines for this Preprocessor. These 445 /// predefines are automatically injected when parsing the main file. 446 void setPredefines(const char *P) { Predefines = P; } 447 void setPredefines(const std::string &P) { Predefines = P; } 448 449 /// getIdentifierInfo - Return information about the specified preprocessor 450 /// identifier token. The version of this method that takes two character 451 /// pointers is preferred unless the identifier is already available as a 452 /// string (this avoids allocation and copying of memory to construct an 453 /// std::string). 454 IdentifierInfo *getIdentifierInfo(StringRef Name) const { 455 return &Identifiers.get(Name); 456 } 457 458 /// AddPragmaHandler - Add the specified pragma handler to the preprocessor. 459 /// If 'Namespace' is non-null, then it is a token required to exist on the 460 /// pragma line before the pragma string starts, e.g. "STDC" or "GCC". 461 void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler); 462 void AddPragmaHandler(PragmaHandler *Handler) { 463 AddPragmaHandler(StringRef(), Handler); 464 } 465 466 /// RemovePragmaHandler - Remove the specific pragma handler from 467 /// the preprocessor. If \arg Namespace is non-null, then it should 468 /// be the namespace that \arg Handler was added to. It is an error 469 /// to remove a handler that has not been registered. 470 void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler); 471 void RemovePragmaHandler(PragmaHandler *Handler) { 472 RemovePragmaHandler(StringRef(), Handler); 473 } 474 475 /// \brief Add the specified comment handler to the preprocessor. 476 void AddCommentHandler(CommentHandler *Handler); 477 478 /// \brief Remove the specified comment handler. 479 /// 480 /// It is an error to remove a handler that has not been registered. 481 void RemoveCommentHandler(CommentHandler *Handler); 482 483 /// \brief Set the code completion handler to the given object. 484 void setCodeCompletionHandler(CodeCompletionHandler &Handler) { 485 CodeComplete = &Handler; 486 } 487 488 /// \brief Retrieve the current code-completion handler. 489 CodeCompletionHandler *getCodeCompletionHandler() const { 490 return CodeComplete; 491 } 492 493 /// \brief Clear out the code completion handler. 494 void clearCodeCompletionHandler() { 495 CodeComplete = 0; 496 } 497 498 /// \brief Hook used by the lexer to invoke the "natural language" code 499 /// completion point. 500 void CodeCompleteNaturalLanguage(); 501 502 /// \brief Retrieve the preprocessing record, or NULL if there is no 503 /// preprocessing record. 504 PreprocessingRecord *getPreprocessingRecord() const { return Record; } 505 506 /// \brief Create a new preprocessing record, which will keep track of 507 /// all macro expansions, macro definitions, etc. 508 void createPreprocessingRecord(bool IncludeNestedMacroExpansions); 509 510 /// EnterMainSourceFile - Enter the specified FileID as the main source file, 511 /// which implicitly adds the builtin defines etc. 512 void EnterMainSourceFile(); 513 514 /// EndSourceFile - Inform the preprocessor callbacks that processing is 515 /// complete. 516 void EndSourceFile(); 517 518 /// EnterSourceFile - Add a source file to the top of the include stack and 519 /// start lexing tokens from it instead of the current buffer. Emit an error 520 /// and don't enter the file on error. 521 void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir, 522 SourceLocation Loc); 523 524 /// EnterMacro - Add a Macro to the top of the include stack and start lexing 525 /// tokens from it instead of the current buffer. Args specifies the 526 /// tokens input to a function-like macro. 527 /// 528 /// ILEnd specifies the location of the ')' for a function-like macro or the 529 /// identifier for an object-like macro. 530 void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroArgs *Args); 531 532 /// EnterTokenStream - Add a "macro" context to the top of the include stack, 533 /// which will cause the lexer to start returning the specified tokens. 534 /// 535 /// If DisableMacroExpansion is true, tokens lexed from the token stream will 536 /// not be subject to further macro expansion. Otherwise, these tokens will 537 /// be re-macro-expanded when/if expansion is enabled. 538 /// 539 /// If OwnsTokens is false, this method assumes that the specified stream of 540 /// tokens has a permanent owner somewhere, so they do not need to be copied. 541 /// If it is true, it assumes the array of tokens is allocated with new[] and 542 /// must be freed. 543 /// 544 void EnterTokenStream(const Token *Toks, unsigned NumToks, 545 bool DisableMacroExpansion, bool OwnsTokens); 546 547 /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the 548 /// lexer stack. This should only be used in situations where the current 549 /// state of the top-of-stack lexer is known. 550 void RemoveTopOfLexerStack(); 551 552 /// EnableBacktrackAtThisPos - From the point that this method is called, and 553 /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor 554 /// keeps track of the lexed tokens so that a subsequent Backtrack() call will 555 /// make the Preprocessor re-lex the same tokens. 556 /// 557 /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can 558 /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will 559 /// be combined with the EnableBacktrackAtThisPos calls in reverse order. 560 /// 561 /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack 562 /// at some point after EnableBacktrackAtThisPos. If you don't, caching of 563 /// tokens will continue indefinitely. 564 /// 565 void EnableBacktrackAtThisPos(); 566 567 /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call. 568 void CommitBacktrackedTokens(); 569 570 /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since 571 /// EnableBacktrackAtThisPos() was previously called. 572 void Backtrack(); 573 574 /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and 575 /// caching of tokens is on. 576 bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); } 577 578 /// Lex - To lex a token from the preprocessor, just pull a token from the 579 /// current lexer or macro object. 580 void Lex(Token &Result) { 581 switch (CurLexerKind) { 582 case CLK_Lexer: CurLexer->Lex(Result); break; 583 case CLK_PTHLexer: CurPTHLexer->Lex(Result); break; 584 case CLK_TokenLexer: CurTokenLexer->Lex(Result); break; 585 case CLK_CachingLexer: CachingLex(Result); break; 586 case CLK_LexAfterModuleImport: LexAfterModuleImport(Result); break; 587 } 588 } 589 590 void LexAfterModuleImport(Token &Result); 591 592 /// LexNonComment - Lex a token. If it's a comment, keep lexing until we get 593 /// something not a comment. This is useful in -E -C mode where comments 594 /// would foul up preprocessor directive handling. 595 void LexNonComment(Token &Result) { 596 do 597 Lex(Result); 598 while (Result.getKind() == tok::comment); 599 } 600 601 /// LexUnexpandedToken - This is just like Lex, but this disables macro 602 /// expansion of identifier tokens. 603 void LexUnexpandedToken(Token &Result) { 604 // Disable macro expansion. 605 bool OldVal = DisableMacroExpansion; 606 DisableMacroExpansion = true; 607 // Lex the token. 608 Lex(Result); 609 610 // Reenable it. 611 DisableMacroExpansion = OldVal; 612 } 613 614 /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro 615 /// expansion of identifier tokens. 616 void LexUnexpandedNonComment(Token &Result) { 617 do 618 LexUnexpandedToken(Result); 619 while (Result.getKind() == tok::comment); 620 } 621 622 /// LookAhead - This peeks ahead N tokens and returns that token without 623 /// consuming any tokens. LookAhead(0) returns the next token that would be 624 /// returned by Lex(), LookAhead(1) returns the token after it, etc. This 625 /// returns normal tokens after phase 5. As such, it is equivalent to using 626 /// 'Lex', not 'LexUnexpandedToken'. 627 const Token &LookAhead(unsigned N) { 628 if (CachedLexPos + N < CachedTokens.size()) 629 return CachedTokens[CachedLexPos+N]; 630 else 631 return PeekAhead(N+1); 632 } 633 634 /// RevertCachedTokens - When backtracking is enabled and tokens are cached, 635 /// this allows to revert a specific number of tokens. 636 /// Note that the number of tokens being reverted should be up to the last 637 /// backtrack position, not more. 638 void RevertCachedTokens(unsigned N) { 639 assert(isBacktrackEnabled() && 640 "Should only be called when tokens are cached for backtracking"); 641 assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back()) 642 && "Should revert tokens up to the last backtrack position, not more"); 643 assert(signed(CachedLexPos) - signed(N) >= 0 && 644 "Corrupted backtrack positions ?"); 645 CachedLexPos -= N; 646 } 647 648 /// EnterToken - Enters a token in the token stream to be lexed next. If 649 /// BackTrack() is called afterwards, the token will remain at the insertion 650 /// point. 651 void EnterToken(const Token &Tok) { 652 EnterCachingLexMode(); 653 CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok); 654 } 655 656 /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching 657 /// tokens (because backtrack is enabled) it should replace the most recent 658 /// cached tokens with the given annotation token. This function has no effect 659 /// if backtracking is not enabled. 660 /// 661 /// Note that the use of this function is just for optimization; so that the 662 /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is 663 /// invoked. 664 void AnnotateCachedTokens(const Token &Tok) { 665 assert(Tok.isAnnotation() && "Expected annotation token"); 666 if (CachedLexPos != 0 && isBacktrackEnabled()) 667 AnnotatePreviousCachedTokens(Tok); 668 } 669 670 /// \brief Replace the last token with an annotation token. 671 /// 672 /// Like AnnotateCachedTokens(), this routine replaces an 673 /// already-parsed (and resolved) token with an annotation 674 /// token. However, this routine only replaces the last token with 675 /// the annotation token; it does not affect any other cached 676 /// tokens. This function has no effect if backtracking is not 677 /// enabled. 678 void ReplaceLastTokenWithAnnotation(const Token &Tok) { 679 assert(Tok.isAnnotation() && "Expected annotation token"); 680 if (CachedLexPos != 0 && isBacktrackEnabled()) 681 CachedTokens[CachedLexPos-1] = Tok; 682 } 683 684 /// \brief Specify the point at which code-completion will be performed. 685 /// 686 /// \param File the file in which code completion should occur. If 687 /// this file is included multiple times, code-completion will 688 /// perform completion the first time it is included. If NULL, this 689 /// function clears out the code-completion point. 690 /// 691 /// \param Line the line at which code completion should occur 692 /// (1-based). 693 /// 694 /// \param Column the column at which code completion should occur 695 /// (1-based). 696 /// 697 /// \returns true if an error occurred, false otherwise. 698 bool SetCodeCompletionPoint(const FileEntry *File, 699 unsigned Line, unsigned Column); 700 701 /// \brief Determine if we are performing code completion. 702 bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; } 703 704 /// \brief Returns the location of the code-completion point. 705 /// Returns an invalid location if code-completion is not enabled or the file 706 /// containing the code-completion point has not been lexed yet. 707 SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; } 708 709 /// \brief Returns the start location of the file of code-completion point. 710 /// Returns an invalid location if code-completion is not enabled or the file 711 /// containing the code-completion point has not been lexed yet. 712 SourceLocation getCodeCompletionFileLoc() const { 713 return CodeCompletionFileLoc; 714 } 715 716 /// \brief Returns true if code-completion is enabled and we have hit the 717 /// code-completion point. 718 bool isCodeCompletionReached() const { return CodeCompletionReached; } 719 720 /// \brief Note that we hit the code-completion point. 721 void setCodeCompletionReached() { 722 assert(isCodeCompletionEnabled() && "Code-completion not enabled!"); 723 CodeCompletionReached = true; 724 // Silence any diagnostics that occur after we hit the code-completion. 725 getDiagnostics().setSuppressAllDiagnostics(true); 726 } 727 728 /// \brief The location of the currently-active #pragma clang 729 /// arc_cf_code_audited begin. Returns an invalid location if there 730 /// is no such pragma active. 731 SourceLocation getPragmaARCCFCodeAuditedLoc() const { 732 return PragmaARCCFCodeAuditedLoc; 733 } 734 735 /// \brief Set the location of the currently-active #pragma clang 736 /// arc_cf_code_audited begin. An invalid location ends the pragma. 737 void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) { 738 PragmaARCCFCodeAuditedLoc = Loc; 739 } 740 741 /// \brief Instruct the preprocessor to skip part of the main 742 /// the main source file. 743 /// 744 /// \brief Bytes The number of bytes in the preamble to skip. 745 /// 746 /// \brief StartOfLine Whether skipping these bytes puts the lexer at the 747 /// start of a line. 748 void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) { 749 SkipMainFilePreamble.first = Bytes; 750 SkipMainFilePreamble.second = StartOfLine; 751 } 752 753 /// Diag - Forwarding function for diagnostics. This emits a diagnostic at 754 /// the specified Token's location, translating the token's start 755 /// position in the current buffer into a SourcePosition object for rendering. 756 DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const { 757 return Diags->Report(Loc, DiagID); 758 } 759 760 DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) const { 761 return Diags->Report(Tok.getLocation(), DiagID); 762 } 763 764 /// getSpelling() - Return the 'spelling' of the token at the given 765 /// location; does not go up to the spelling location or down to the 766 /// expansion location. 767 /// 768 /// \param buffer A buffer which will be used only if the token requires 769 /// "cleaning", e.g. if it contains trigraphs or escaped newlines 770 /// \param invalid If non-null, will be set \c true if an error occurs. 771 StringRef getSpelling(SourceLocation loc, 772 SmallVectorImpl<char> &buffer, 773 bool *invalid = 0) const { 774 return Lexer::getSpelling(loc, buffer, SourceMgr, Features, invalid); 775 } 776 777 /// getSpelling() - Return the 'spelling' of the Tok token. The spelling of a 778 /// token is the characters used to represent the token in the source file 779 /// after trigraph expansion and escaped-newline folding. In particular, this 780 /// wants to get the true, uncanonicalized, spelling of things like digraphs 781 /// UCNs, etc. 782 /// 783 /// \param Invalid If non-null, will be set \c true if an error occurs. 784 std::string getSpelling(const Token &Tok, bool *Invalid = 0) const { 785 return Lexer::getSpelling(Tok, SourceMgr, Features, Invalid); 786 } 787 788 /// getSpelling - This method is used to get the spelling of a token into a 789 /// preallocated buffer, instead of as an std::string. The caller is required 790 /// to allocate enough space for the token, which is guaranteed to be at least 791 /// Tok.getLength() bytes long. The length of the actual result is returned. 792 /// 793 /// Note that this method may do two possible things: it may either fill in 794 /// the buffer specified with characters, or it may *change the input pointer* 795 /// to point to a constant buffer with the data already in it (avoiding a 796 /// copy). The caller is not allowed to modify the returned buffer pointer 797 /// if an internal buffer is returned. 798 unsigned getSpelling(const Token &Tok, const char *&Buffer, 799 bool *Invalid = 0) const { 800 return Lexer::getSpelling(Tok, Buffer, SourceMgr, Features, Invalid); 801 } 802 803 /// getSpelling - This method is used to get the spelling of a token into a 804 /// SmallVector. Note that the returned StringRef may not point to the 805 /// supplied buffer if a copy can be avoided. 806 StringRef getSpelling(const Token &Tok, 807 SmallVectorImpl<char> &Buffer, 808 bool *Invalid = 0) const; 809 810 /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant 811 /// with length 1, return the character. 812 char getSpellingOfSingleCharacterNumericConstant(const Token &Tok, 813 bool *Invalid = 0) const { 814 assert(Tok.is(tok::numeric_constant) && 815 Tok.getLength() == 1 && "Called on unsupported token"); 816 assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1"); 817 818 // If the token is carrying a literal data pointer, just use it. 819 if (const char *D = Tok.getLiteralData()) 820 return *D; 821 822 // Otherwise, fall back on getCharacterData, which is slower, but always 823 // works. 824 return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid); 825 } 826 827 /// CreateString - Plop the specified string into a scratch buffer and set the 828 /// specified token's location and length to it. If specified, the source 829 /// location provides a location of the expansion point of the token. 830 void CreateString(const char *Buf, unsigned Len, Token &Tok, 831 SourceLocation ExpansionLocStart = SourceLocation(), 832 SourceLocation ExpansionLocEnd = SourceLocation()); 833 834 /// \brief Computes the source location just past the end of the 835 /// token at this source location. 836 /// 837 /// This routine can be used to produce a source location that 838 /// points just past the end of the token referenced by \p Loc, and 839 /// is generally used when a diagnostic needs to point just after a 840 /// token where it expected something different that it received. If 841 /// the returned source location would not be meaningful (e.g., if 842 /// it points into a macro), this routine returns an invalid 843 /// source location. 844 /// 845 /// \param Offset an offset from the end of the token, where the source 846 /// location should refer to. The default offset (0) produces a source 847 /// location pointing just past the end of the token; an offset of 1 produces 848 /// a source location pointing to the last character in the token, etc. 849 SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) { 850 return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, Features); 851 } 852 853 /// \brief Returns true if the given MacroID location points at the first 854 /// token of the macro expansion. 855 bool isAtStartOfMacroExpansion(SourceLocation loc) const { 856 return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, Features); 857 } 858 859 /// \brief Returns true if the given MacroID location points at the last 860 /// token of the macro expansion. 861 bool isAtEndOfMacroExpansion(SourceLocation loc) const { 862 return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, Features); 863 } 864 865 /// DumpToken - Print the token to stderr, used for debugging. 866 /// 867 void DumpToken(const Token &Tok, bool DumpFlags = false) const; 868 void DumpLocation(SourceLocation Loc) const; 869 void DumpMacro(const MacroInfo &MI) const; 870 871 /// AdvanceToTokenCharacter - Given a location that specifies the start of a 872 /// token, return a new location that specifies a character within the token. 873 SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart, 874 unsigned Char) const { 875 return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, Features); 876 } 877 878 /// IncrementPasteCounter - Increment the counters for the number of token 879 /// paste operations performed. If fast was specified, this is a 'fast paste' 880 /// case we handled. 881 /// 882 void IncrementPasteCounter(bool isFast) { 883 if (isFast) 884 ++NumFastTokenPaste; 885 else 886 ++NumTokenPaste; 887 } 888 889 void PrintStats(); 890 891 size_t getTotalMemory() const; 892 893 /// HandleMicrosoftCommentPaste - When the macro expander pastes together a 894 /// comment (/##/) in microsoft mode, this method handles updating the current 895 /// state, returning the token on the next source line. 896 void HandleMicrosoftCommentPaste(Token &Tok); 897 898 //===--------------------------------------------------------------------===// 899 // Preprocessor callback methods. These are invoked by a lexer as various 900 // directives and events are found. 901 902 /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the 903 /// identifier information for the token and install it into the token, 904 /// updating the token kind accordingly. 905 IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const; 906 907 private: 908 llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons; 909 910 public: 911 912 // SetPoisonReason - Call this function to indicate the reason for 913 // poisoning an identifier. If that identifier is accessed while 914 // poisoned, then this reason will be used instead of the default 915 // "poisoned" diagnostic. 916 void SetPoisonReason(IdentifierInfo *II, unsigned DiagID); 917 918 // HandlePoisonedIdentifier - Display reason for poisoned 919 // identifier. 920 void HandlePoisonedIdentifier(Token & Tok); 921 922 void MaybeHandlePoisonedIdentifier(Token & Identifier) { 923 if(IdentifierInfo * II = Identifier.getIdentifierInfo()) { 924 if(II->isPoisoned()) { 925 HandlePoisonedIdentifier(Identifier); 926 } 927 } 928 } 929 930 private: 931 /// Identifiers used for SEH handling in Borland. These are only 932 /// allowed in particular circumstances 933 IdentifierInfo *Ident__exception_code, *Ident___exception_code, *Ident_GetExceptionCode; // __except block 934 IdentifierInfo *Ident__exception_info, *Ident___exception_info, *Ident_GetExceptionInfo; // __except filter expression 935 IdentifierInfo *Ident__abnormal_termination, *Ident___abnormal_termination, *Ident_AbnormalTermination; // __finally 936 public: 937 void PoisonSEHIdentifiers(bool Poison = true); // Borland 938 939 /// HandleIdentifier - This callback is invoked when the lexer reads an 940 /// identifier and has filled in the tokens IdentifierInfo member. This 941 /// callback potentially macro expands it or turns it into a named token (like 942 /// 'for'). 943 void HandleIdentifier(Token &Identifier); 944 945 946 /// HandleEndOfFile - This callback is invoked when the lexer hits the end of 947 /// the current file. This either returns the EOF token and returns true, or 948 /// pops a level off the include stack and returns false, at which point the 949 /// client should call lex again. 950 bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false); 951 952 /// HandleEndOfTokenLexer - This callback is invoked when the current 953 /// TokenLexer hits the end of its token stream. 954 bool HandleEndOfTokenLexer(Token &Result); 955 956 /// HandleDirective - This callback is invoked when the lexer sees a # token 957 /// at the start of a line. This consumes the directive, modifies the 958 /// lexer/preprocessor state, and advances the lexer(s) so that the next token 959 /// read is the correct one. 960 void HandleDirective(Token &Result); 961 962 /// CheckEndOfDirective - Ensure that the next token is a tok::eod token. If 963 /// not, emit a diagnostic and consume up until the eod. If EnableMacros is 964 /// true, then we consider macros that expand to zero tokens as being ok. 965 void CheckEndOfDirective(const char *Directive, bool EnableMacros = false); 966 967 /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the 968 /// current line until the tok::eod token is found. 969 void DiscardUntilEndOfDirective(); 970 971 /// SawDateOrTime - This returns true if the preprocessor has seen a use of 972 /// __DATE__ or __TIME__ in the file so far. 973 bool SawDateOrTime() const { 974 return DATELoc != SourceLocation() || TIMELoc != SourceLocation(); 975 } 976 unsigned getCounterValue() const { return CounterValue; } 977 void setCounterValue(unsigned V) { CounterValue = V; } 978 979 /// AllocateMacroInfo - Allocate a new MacroInfo object with the provide 980 /// SourceLocation. 981 MacroInfo *AllocateMacroInfo(SourceLocation L); 982 983 /// CloneMacroInfo - Allocate a new MacroInfo object which is clone of MI. 984 MacroInfo *CloneMacroInfo(const MacroInfo &MI); 985 986 /// GetIncludeFilenameSpelling - Turn the specified lexer token into a fully 987 /// checked and spelled filename, e.g. as an operand of #include. This returns 988 /// true if the input filename was in <>'s or false if it were in ""'s. The 989 /// caller is expected to provide a buffer that is large enough to hold the 990 /// spelling of the filename, but is also expected to handle the case when 991 /// this method decides to use a different buffer. 992 bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename); 993 994 /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file, 995 /// return null on failure. isAngled indicates whether the file reference is 996 /// for system #include's or not (i.e. using <> instead of ""). 997 const FileEntry *LookupFile(StringRef Filename, 998 bool isAngled, const DirectoryLookup *FromDir, 999 const DirectoryLookup *&CurDir, 1000 SmallVectorImpl<char> *SearchPath, 1001 SmallVectorImpl<char> *RelativePath, 1002 StringRef *SuggestedModule); 1003 1004 /// GetCurLookup - The DirectoryLookup structure used to find the current 1005 /// FileEntry, if CurLexer is non-null and if applicable. This allows us to 1006 /// implement #include_next and find directory-specific properties. 1007 const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; } 1008 1009 /// isInPrimaryFile - Return true if we're in the top-level file, not in a 1010 /// #include. 1011 bool isInPrimaryFile() const; 1012 1013 /// ConcatenateIncludeName - Handle cases where the #include name is expanded 1014 /// from a macro as multiple tokens, which need to be glued together. This 1015 /// occurs for code like: 1016 /// #define FOO <a/b.h> 1017 /// #include FOO 1018 /// because in this case, "<a/b.h>" is returned as 7 tokens, not one. 1019 /// 1020 /// This code concatenates and consumes tokens up to the '>' token. It 1021 /// returns false if the > was found, otherwise it returns true if it finds 1022 /// and consumes the EOD marker. 1023 bool ConcatenateIncludeName(llvm::SmallString<128> &FilenameBuffer, 1024 SourceLocation &End); 1025 1026 /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is 1027 /// followed by EOD. Return true if the token is not a valid on-off-switch. 1028 bool LexOnOffSwitch(tok::OnOffSwitch &OOS); 1029 1030 private: 1031 1032 void PushIncludeMacroStack() { 1033 IncludeMacroStack.push_back(IncludeStackInfo(CurLexerKind, 1034 CurLexer.take(), 1035 CurPTHLexer.take(), 1036 CurPPLexer, 1037 CurTokenLexer.take(), 1038 CurDirLookup)); 1039 CurPPLexer = 0; 1040 } 1041 1042 void PopIncludeMacroStack() { 1043 CurLexer.reset(IncludeMacroStack.back().TheLexer); 1044 CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer); 1045 CurPPLexer = IncludeMacroStack.back().ThePPLexer; 1046 CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer); 1047 CurDirLookup = IncludeMacroStack.back().TheDirLookup; 1048 CurLexerKind = IncludeMacroStack.back().CurLexerKind; 1049 IncludeMacroStack.pop_back(); 1050 } 1051 1052 /// AllocateMacroInfo - Allocate a new MacroInfo object. 1053 MacroInfo *AllocateMacroInfo(); 1054 1055 /// ReleaseMacroInfo - Release the specified MacroInfo. This memory will 1056 /// be reused for allocating new MacroInfo objects. 1057 void ReleaseMacroInfo(MacroInfo* MI); 1058 1059 /// ReadMacroName - Lex and validate a macro name, which occurs after a 1060 /// #define or #undef. This emits a diagnostic, sets the token kind to eod, 1061 /// and discards the rest of the macro line if the macro name is invalid. 1062 void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0); 1063 1064 /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro 1065 /// definition has just been read. Lex the rest of the arguments and the 1066 /// closing ), updating MI with what we learn. Return true if an error occurs 1067 /// parsing the arg list. 1068 bool ReadMacroDefinitionArgList(MacroInfo *MI); 1069 1070 /// SkipExcludedConditionalBlock - We just read a #if or related directive and 1071 /// decided that the subsequent tokens are in the #if'd out portion of the 1072 /// file. Lex the rest of the file, until we see an #endif. If 1073 /// FoundNonSkipPortion is true, then we have already emitted code for part of 1074 /// this #if directive, so #else/#elif blocks should never be entered. If 1075 /// FoundElse is false, then #else directives are ok, if not, then we have 1076 /// already seen one so a #else directive is a duplicate. When this returns, 1077 /// the caller can lex the first valid token. 1078 void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc, 1079 bool FoundNonSkipPortion, bool FoundElse, 1080 SourceLocation ElseLoc = SourceLocation()); 1081 1082 /// PTHSkipExcludedConditionalBlock - A fast PTH version of 1083 /// SkipExcludedConditionalBlock. 1084 void PTHSkipExcludedConditionalBlock(); 1085 1086 /// EvaluateDirectiveExpression - Evaluate an integer constant expression that 1087 /// may occur after a #if or #elif directive and return it as a bool. If the 1088 /// expression is equivalent to "!defined(X)" return X in IfNDefMacro. 1089 bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro); 1090 1091 /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas: 1092 /// #pragma GCC poison/system_header/dependency and #pragma once. 1093 void RegisterBuiltinPragmas(); 1094 1095 /// RegisterBuiltinMacros - Register builtin macros, such as __LINE__ with the 1096 /// identifier table. 1097 void RegisterBuiltinMacros(); 1098 1099 /// HandleMacroExpandedIdentifier - If an identifier token is read that is to 1100 /// be expanded as a macro, handle it and return the next token as 'Tok'. If 1101 /// the macro should not be expanded return true, otherwise return false. 1102 bool HandleMacroExpandedIdentifier(Token &Tok, MacroInfo *MI); 1103 1104 /// \brief Cache macro expanded tokens for TokenLexers. 1105 // 1106 /// Works like a stack; a TokenLexer adds the macro expanded tokens that is 1107 /// going to lex in the cache and when it finishes the tokens are removed 1108 /// from the end of the cache. 1109 Token *cacheMacroExpandedTokens(TokenLexer *tokLexer, 1110 ArrayRef<Token> tokens); 1111 void removeCachedMacroExpandedTokensOfLastLexer(); 1112 friend void TokenLexer::ExpandFunctionArguments(); 1113 1114 /// isNextPPTokenLParen - Determine whether the next preprocessor token to be 1115 /// lexed is a '('. If so, consume the token and return true, if not, this 1116 /// method should have no observable side-effect on the lexed tokens. 1117 bool isNextPPTokenLParen(); 1118 1119 /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is 1120 /// invoked to read all of the formal arguments specified for the macro 1121 /// invocation. This returns null on error. 1122 MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI, 1123 SourceLocation &ExpansionEnd); 1124 1125 /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded 1126 /// as a builtin macro, handle it and return the next token as 'Tok'. 1127 void ExpandBuiltinMacro(Token &Tok); 1128 1129 /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then 1130 /// return the first token after the directive. The _Pragma token has just 1131 /// been read into 'Tok'. 1132 void Handle_Pragma(Token &Tok); 1133 1134 /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text 1135 /// is not enclosed within a string literal. 1136 void HandleMicrosoft__pragma(Token &Tok); 1137 1138 /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and 1139 /// start lexing tokens from it instead of the current buffer. 1140 void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir); 1141 1142 /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and 1143 /// start getting tokens from it using the PTH cache. 1144 void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir); 1145 1146 /// IsFileLexer - Returns true if we are lexing from a file and not a 1147 /// pragma or a macro. 1148 static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) { 1149 return L ? !L->isPragmaLexer() : P != 0; 1150 } 1151 1152 static bool IsFileLexer(const IncludeStackInfo& I) { 1153 return IsFileLexer(I.TheLexer, I.ThePPLexer); 1154 } 1155 1156 bool IsFileLexer() const { 1157 return IsFileLexer(CurLexer.get(), CurPPLexer); 1158 } 1159 1160 //===--------------------------------------------------------------------===// 1161 // Caching stuff. 1162 void CachingLex(Token &Result); 1163 bool InCachingLexMode() const { 1164 // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means 1165 // that we are past EOF, not that we are in CachingLex mode. 1166 return CurPPLexer == 0 && CurTokenLexer == 0 && CurPTHLexer == 0 && 1167 !IncludeMacroStack.empty(); 1168 } 1169 void EnterCachingLexMode(); 1170 void ExitCachingLexMode() { 1171 if (InCachingLexMode()) 1172 RemoveTopOfLexerStack(); 1173 } 1174 const Token &PeekAhead(unsigned N); 1175 void AnnotatePreviousCachedTokens(const Token &Tok); 1176 1177 //===--------------------------------------------------------------------===// 1178 /// Handle*Directive - implement the various preprocessor directives. These 1179 /// should side-effect the current preprocessor object so that the next call 1180 /// to Lex() will return the appropriate token next. 1181 void HandleLineDirective(Token &Tok); 1182 void HandleDigitDirective(Token &Tok); 1183 void HandleUserDiagnosticDirective(Token &Tok, bool isWarning); 1184 void HandleIdentSCCSDirective(Token &Tok); 1185 void HandleMacroExportDirective(Token &Tok); 1186 void HandleMacroPrivateDirective(Token &Tok); 1187 1188 // File inclusion. 1189 void HandleIncludeDirective(SourceLocation HashLoc, 1190 Token &Tok, 1191 const DirectoryLookup *LookupFrom = 0, 1192 bool isImport = false); 1193 void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok); 1194 void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok); 1195 void HandleImportDirective(SourceLocation HashLoc, Token &Tok); 1196 1197 // Macro handling. 1198 void HandleDefineDirective(Token &Tok); 1199 void HandleUndefDirective(Token &Tok); 1200 1201 // Conditional Inclusion. 1202 void HandleIfdefDirective(Token &Tok, bool isIfndef, 1203 bool ReadAnyTokensBeforeDirective); 1204 void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective); 1205 void HandleEndifDirective(Token &Tok); 1206 void HandleElseDirective(Token &Tok); 1207 void HandleElifDirective(Token &Tok); 1208 1209 // Pragmas. 1210 void HandlePragmaDirective(unsigned Introducer); 1211 public: 1212 void HandlePragmaOnce(Token &OnceTok); 1213 void HandlePragmaMark(); 1214 void HandlePragmaPoison(Token &PoisonTok); 1215 void HandlePragmaSystemHeader(Token &SysHeaderTok); 1216 void HandlePragmaDependency(Token &DependencyTok); 1217 void HandlePragmaComment(Token &CommentTok); 1218 void HandlePragmaMessage(Token &MessageTok); 1219 void HandlePragmaPushMacro(Token &Tok); 1220 void HandlePragmaPopMacro(Token &Tok); 1221 IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok); 1222 1223 // Return true and store the first token only if any CommentHandler 1224 // has inserted some tokens and getCommentRetentionState() is false. 1225 bool HandleComment(Token &Token, SourceRange Comment); 1226 1227 /// \brief A macro is used, update information about macros that need unused 1228 /// warnings. 1229 void markMacroAsUsed(MacroInfo *MI); 1230 }; 1231 1232 /// \brief Abstract base class that describes a handler that will receive 1233 /// source ranges for each of the comments encountered in the source file. 1234 class CommentHandler { 1235 public: 1236 virtual ~CommentHandler(); 1237 1238 // The handler shall return true if it has pushed any tokens 1239 // to be read using e.g. EnterToken or EnterTokenStream. 1240 virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0; 1241 }; 1242 1243 } // end namespace clang 1244 1245 #endif 1246