1 //===--- Preprocessor.h - C Language Family Preprocessor --------*- C++ -*-===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines the Preprocessor interface. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #ifndef LLVM_CLANG_LEX_PREPROCESSOR_H 15 #define LLVM_CLANG_LEX_PREPROCESSOR_H 16 17 #include "clang/Basic/Builtins.h" 18 #include "clang/Basic/Diagnostic.h" 19 #include "clang/Basic/IdentifierTable.h" 20 #include "clang/Basic/SourceLocation.h" 21 #include "clang/Lex/Lexer.h" 22 #include "clang/Lex/MacroInfo.h" 23 #include "clang/Lex/ModuleMap.h" 24 #include "clang/Lex/PPCallbacks.h" 25 #include "clang/Lex/PTHLexer.h" 26 #include "clang/Lex/PTHManager.h" 27 #include "clang/Lex/TokenLexer.h" 28 #include "llvm/ADT/ArrayRef.h" 29 #include "llvm/ADT/DenseMap.h" 30 #include "llvm/ADT/IntrusiveRefCntPtr.h" 31 #include "llvm/ADT/OwningPtr.h" 32 #include "llvm/ADT/SmallPtrSet.h" 33 #include "llvm/ADT/SmallVector.h" 34 #include "llvm/Support/Allocator.h" 35 #include <vector> 36 37 namespace llvm { 38 template<unsigned InternalLen> class SmallString; 39 } 40 41 namespace clang { 42 43 class SourceManager; 44 class ExternalPreprocessorSource; 45 class FileManager; 46 class FileEntry; 47 class HeaderSearch; 48 class PragmaNamespace; 49 class PragmaHandler; 50 class CommentHandler; 51 class ScratchBuffer; 52 class TargetInfo; 53 class PPCallbacks; 54 class CodeCompletionHandler; 55 class DirectoryLookup; 56 class PreprocessingRecord; 57 class ModuleLoader; 58 class PreprocessorOptions; 59 60 /// \brief Stores token information for comparing actual tokens with 61 /// predefined values. Only handles simple tokens and identifiers. 62 class TokenValue { 63 tok::TokenKind Kind; 64 IdentifierInfo *II; 65 66 public: 67 TokenValue(tok::TokenKind Kind) : Kind(Kind), II(0) { 68 assert(Kind != tok::raw_identifier && "Raw identifiers are not supported."); 69 assert(Kind != tok::identifier && 70 "Identifiers should be created by TokenValue(IdentifierInfo *)"); 71 assert(!tok::isLiteral(Kind) && "Literals are not supported."); 72 assert(!tok::isAnnotation(Kind) && "Annotations are not supported."); 73 } 74 TokenValue(IdentifierInfo *II) : Kind(tok::identifier), II(II) {} 75 bool operator==(const Token &Tok) const { 76 return Tok.getKind() == Kind && 77 (!II || II == Tok.getIdentifierInfo()); 78 } 79 }; 80 81 /// Preprocessor - This object engages in a tight little dance with the lexer to 82 /// efficiently preprocess tokens. Lexers know only about tokens within a 83 /// single source file, and don't know anything about preprocessor-level issues 84 /// like the \#include stack, token expansion, etc. 85 /// 86 class Preprocessor : public RefCountedBase<Preprocessor> { 87 IntrusiveRefCntPtr<PreprocessorOptions> PPOpts; 88 DiagnosticsEngine *Diags; 89 LangOptions &LangOpts; 90 const TargetInfo *Target; 91 FileManager &FileMgr; 92 SourceManager &SourceMgr; 93 ScratchBuffer *ScratchBuf; 94 HeaderSearch &HeaderInfo; 95 ModuleLoader &TheModuleLoader; 96 97 /// \brief External source of macros. 98 ExternalPreprocessorSource *ExternalSource; 99 100 101 /// PTH - An optional PTHManager object used for getting tokens from 102 /// a token cache rather than lexing the original source file. 103 OwningPtr<PTHManager> PTH; 104 105 /// BP - A BumpPtrAllocator object used to quickly allocate and release 106 /// objects internal to the Preprocessor. 107 llvm::BumpPtrAllocator BP; 108 109 /// Identifiers for builtin macros and other builtins. 110 IdentifierInfo *Ident__LINE__, *Ident__FILE__; // __LINE__, __FILE__ 111 IdentifierInfo *Ident__DATE__, *Ident__TIME__; // __DATE__, __TIME__ 112 IdentifierInfo *Ident__INCLUDE_LEVEL__; // __INCLUDE_LEVEL__ 113 IdentifierInfo *Ident__BASE_FILE__; // __BASE_FILE__ 114 IdentifierInfo *Ident__TIMESTAMP__; // __TIMESTAMP__ 115 IdentifierInfo *Ident__COUNTER__; // __COUNTER__ 116 IdentifierInfo *Ident_Pragma, *Ident__pragma; // _Pragma, __pragma 117 IdentifierInfo *Ident__VA_ARGS__; // __VA_ARGS__ 118 IdentifierInfo *Ident__has_feature; // __has_feature 119 IdentifierInfo *Ident__has_extension; // __has_extension 120 IdentifierInfo *Ident__has_builtin; // __has_builtin 121 IdentifierInfo *Ident__has_attribute; // __has_attribute 122 IdentifierInfo *Ident__has_include; // __has_include 123 IdentifierInfo *Ident__has_include_next; // __has_include_next 124 IdentifierInfo *Ident__has_warning; // __has_warning 125 IdentifierInfo *Ident__building_module; // __building_module 126 IdentifierInfo *Ident__MODULE__; // __MODULE__ 127 128 SourceLocation DATELoc, TIMELoc; 129 unsigned CounterValue; // Next __COUNTER__ value. 130 131 enum { 132 /// MaxIncludeStackDepth - Maximum depth of \#includes. 133 MaxAllowedIncludeStackDepth = 200 134 }; 135 136 // State that is set before the preprocessor begins. 137 bool KeepComments : 1; 138 bool KeepMacroComments : 1; 139 bool SuppressIncludeNotFoundError : 1; 140 141 // State that changes while the preprocessor runs: 142 bool InMacroArgs : 1; // True if parsing fn macro invocation args. 143 144 /// Whether the preprocessor owns the header search object. 145 bool OwnsHeaderSearch : 1; 146 147 /// DisableMacroExpansion - True if macro expansion is disabled. 148 bool DisableMacroExpansion : 1; 149 150 /// MacroExpansionInDirectivesOverride - Temporarily disables 151 /// DisableMacroExpansion (i.e. enables expansion) when parsing preprocessor 152 /// directives. 153 bool MacroExpansionInDirectivesOverride : 1; 154 155 class ResetMacroExpansionHelper; 156 157 /// \brief Whether we have already loaded macros from the external source. 158 mutable bool ReadMacrosFromExternalSource : 1; 159 160 /// \brief True if pragmas are enabled. 161 bool PragmasEnabled : 1; 162 163 /// \brief True if the current build action is a preprocessing action. 164 bool PreprocessedOutput : 1; 165 166 /// \brief True if we are currently preprocessing a #if or #elif directive 167 bool ParsingIfOrElifDirective; 168 169 /// \brief True if we are pre-expanding macro arguments. 170 bool InMacroArgPreExpansion; 171 172 /// Identifiers - This is mapping/lookup information for all identifiers in 173 /// the program, including program keywords. 174 mutable IdentifierTable Identifiers; 175 176 /// Selectors - This table contains all the selectors in the program. Unlike 177 /// IdentifierTable above, this table *isn't* populated by the preprocessor. 178 /// It is declared/expanded here because it's role/lifetime is 179 /// conceptually similar the IdentifierTable. In addition, the current control 180 /// flow (in clang::ParseAST()), make it convenient to put here. 181 /// FIXME: Make sure the lifetime of Identifiers/Selectors *isn't* tied to 182 /// the lifetime of the preprocessor. 183 SelectorTable Selectors; 184 185 /// BuiltinInfo - Information about builtins. 186 Builtin::Context BuiltinInfo; 187 188 /// PragmaHandlers - This tracks all of the pragmas that the client registered 189 /// with this preprocessor. 190 PragmaNamespace *PragmaHandlers; 191 192 /// \brief Tracks all of the comment handlers that the client registered 193 /// with this preprocessor. 194 std::vector<CommentHandler *> CommentHandlers; 195 196 /// \brief True if we want to ignore EOF token and continue later on (thus 197 /// avoid tearing the Lexer and etc. down). 198 bool IncrementalProcessing; 199 200 /// \brief The code-completion handler. 201 CodeCompletionHandler *CodeComplete; 202 203 /// \brief The file that we're performing code-completion for, if any. 204 const FileEntry *CodeCompletionFile; 205 206 /// \brief The offset in file for the code-completion point. 207 unsigned CodeCompletionOffset; 208 209 /// \brief The location for the code-completion point. This gets instantiated 210 /// when the CodeCompletionFile gets \#include'ed for preprocessing. 211 SourceLocation CodeCompletionLoc; 212 213 /// \brief The start location for the file of the code-completion point. 214 /// 215 /// This gets instantiated when the CodeCompletionFile gets \#include'ed 216 /// for preprocessing. 217 SourceLocation CodeCompletionFileLoc; 218 219 /// \brief The source location of the 'import' contextual keyword we just 220 /// lexed, if any. 221 SourceLocation ModuleImportLoc; 222 223 /// \brief The module import path that we're currently processing. 224 SmallVector<std::pair<IdentifierInfo *, SourceLocation>, 2> ModuleImportPath; 225 226 /// \brief Whether the module import expectes an identifier next. Otherwise, 227 /// it expects a '.' or ';'. 228 bool ModuleImportExpectsIdentifier; 229 230 /// \brief The source location of the currently-active 231 /// #pragma clang arc_cf_code_audited begin. 232 SourceLocation PragmaARCCFCodeAuditedLoc; 233 234 /// \brief True if we hit the code-completion point. 235 bool CodeCompletionReached; 236 237 /// \brief The number of bytes that we will initially skip when entering the 238 /// main file, which is used when loading a precompiled preamble, along 239 /// with a flag that indicates whether skipping this number of bytes will 240 /// place the lexer at the start of a line. 241 std::pair<unsigned, bool> SkipMainFilePreamble; 242 243 /// CurLexer - This is the current top of the stack that we're lexing from if 244 /// not expanding a macro and we are lexing directly from source code. 245 /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null. 246 OwningPtr<Lexer> CurLexer; 247 248 /// CurPTHLexer - This is the current top of stack that we're lexing from if 249 /// not expanding from a macro and we are lexing from a PTH cache. 250 /// Only one of CurLexer, CurPTHLexer, or CurTokenLexer will be non-null. 251 OwningPtr<PTHLexer> CurPTHLexer; 252 253 /// CurPPLexer - This is the current top of the stack what we're lexing from 254 /// if not expanding a macro. This is an alias for either CurLexer or 255 /// CurPTHLexer. 256 PreprocessorLexer *CurPPLexer; 257 258 /// CurLookup - The DirectoryLookup structure used to find the current 259 /// FileEntry, if CurLexer is non-null and if applicable. This allows us to 260 /// implement \#include_next and find directory-specific properties. 261 const DirectoryLookup *CurDirLookup; 262 263 /// CurTokenLexer - This is the current macro we are expanding, if we are 264 /// expanding a macro. One of CurLexer and CurTokenLexer must be null. 265 OwningPtr<TokenLexer> CurTokenLexer; 266 267 /// \brief The kind of lexer we're currently working with. 268 enum CurLexerKind { 269 CLK_Lexer, 270 CLK_PTHLexer, 271 CLK_TokenLexer, 272 CLK_CachingLexer, 273 CLK_LexAfterModuleImport 274 } CurLexerKind; 275 276 /// IncludeMacroStack - This keeps track of the stack of files currently 277 /// \#included, and macros currently being expanded from, not counting 278 /// CurLexer/CurTokenLexer. 279 struct IncludeStackInfo { 280 enum CurLexerKind CurLexerKind; 281 Lexer *TheLexer; 282 PTHLexer *ThePTHLexer; 283 PreprocessorLexer *ThePPLexer; 284 TokenLexer *TheTokenLexer; 285 const DirectoryLookup *TheDirLookup; 286 287 IncludeStackInfo(enum CurLexerKind K, Lexer *L, PTHLexer* P, 288 PreprocessorLexer* PPL, 289 TokenLexer* TL, const DirectoryLookup *D) 290 : CurLexerKind(K), TheLexer(L), ThePTHLexer(P), ThePPLexer(PPL), 291 TheTokenLexer(TL), TheDirLookup(D) {} 292 }; 293 std::vector<IncludeStackInfo> IncludeMacroStack; 294 295 /// Callbacks - These are actions invoked when some preprocessor activity is 296 /// encountered (e.g. a file is \#included, etc). 297 PPCallbacks *Callbacks; 298 299 struct MacroExpandsInfo { 300 Token Tok; 301 MacroDirective *MD; 302 SourceRange Range; 303 MacroExpandsInfo(Token Tok, MacroDirective *MD, SourceRange Range) 304 : Tok(Tok), MD(MD), Range(Range) { } 305 }; 306 SmallVector<MacroExpandsInfo, 2> DelayedMacroExpandsCallbacks; 307 308 /// Macros - For each IdentifierInfo that was associated with a macro, we 309 /// keep a mapping to the history of all macro definitions and #undefs in 310 /// the reverse order (the latest one is in the head of the list). 311 llvm::DenseMap<const IdentifierInfo*, MacroDirective*> Macros; 312 friend class ASTReader; 313 314 /// \brief Macros that we want to warn because they are not used at the end 315 /// of the translation unit; we store just their SourceLocations instead 316 /// something like MacroInfo*. The benefit of this is that when we are 317 /// deserializing from PCH, we don't need to deserialize identifier & macros 318 /// just so that we can report that they are unused, we just warn using 319 /// the SourceLocations of this set (that will be filled by the ASTReader). 320 /// We are using SmallPtrSet instead of a vector for faster removal. 321 typedef llvm::SmallPtrSet<SourceLocation, 32> WarnUnusedMacroLocsTy; 322 WarnUnusedMacroLocsTy WarnUnusedMacroLocs; 323 324 /// MacroArgCache - This is a "freelist" of MacroArg objects that can be 325 /// reused for quick allocation. 326 MacroArgs *MacroArgCache; 327 friend class MacroArgs; 328 329 /// PragmaPushMacroInfo - For each IdentifierInfo used in a #pragma 330 /// push_macro directive, we keep a MacroInfo stack used to restore 331 /// previous macro value. 332 llvm::DenseMap<IdentifierInfo*, std::vector<MacroInfo*> > PragmaPushMacroInfo; 333 334 // Various statistics we track for performance analysis. 335 unsigned NumDirectives, NumIncluded, NumDefined, NumUndefined, NumPragma; 336 unsigned NumIf, NumElse, NumEndif; 337 unsigned NumEnteredSourceFiles, MaxIncludeStackDepth; 338 unsigned NumMacroExpanded, NumFnMacroExpanded, NumBuiltinMacroExpanded; 339 unsigned NumFastMacroExpanded, NumTokenPaste, NumFastTokenPaste; 340 unsigned NumSkipped; 341 342 /// Predefines - This string is the predefined macros that preprocessor 343 /// should use from the command line etc. 344 std::string Predefines; 345 346 /// \brief The file ID for the preprocessor predefines. 347 FileID PredefinesFileID; 348 349 /// TokenLexerCache - Cache macro expanders to reduce malloc traffic. 350 enum { TokenLexerCacheSize = 8 }; 351 unsigned NumCachedTokenLexers; 352 TokenLexer *TokenLexerCache[TokenLexerCacheSize]; 353 354 /// \brief Keeps macro expanded tokens for TokenLexers. 355 // 356 /// Works like a stack; a TokenLexer adds the macro expanded tokens that is 357 /// going to lex in the cache and when it finishes the tokens are removed 358 /// from the end of the cache. 359 SmallVector<Token, 16> MacroExpandedTokens; 360 std::vector<std::pair<TokenLexer *, size_t> > MacroExpandingLexersStack; 361 362 /// \brief A record of the macro definitions and expansions that 363 /// occurred during preprocessing. 364 /// 365 /// This is an optional side structure that can be enabled with 366 /// \c createPreprocessingRecord() prior to preprocessing. 367 PreprocessingRecord *Record; 368 369 private: // Cached tokens state. 370 typedef SmallVector<Token, 1> CachedTokensTy; 371 372 /// CachedTokens - Cached tokens are stored here when we do backtracking or 373 /// lookahead. They are "lexed" by the CachingLex() method. 374 CachedTokensTy CachedTokens; 375 376 /// CachedLexPos - The position of the cached token that CachingLex() should 377 /// "lex" next. If it points beyond the CachedTokens vector, it means that 378 /// a normal Lex() should be invoked. 379 CachedTokensTy::size_type CachedLexPos; 380 381 /// BacktrackPositions - Stack of backtrack positions, allowing nested 382 /// backtracks. The EnableBacktrackAtThisPos() method pushes a position to 383 /// indicate where CachedLexPos should be set when the BackTrack() method is 384 /// invoked (at which point the last position is popped). 385 std::vector<CachedTokensTy::size_type> BacktrackPositions; 386 387 struct MacroInfoChain { 388 MacroInfo MI; 389 MacroInfoChain *Next; 390 MacroInfoChain *Prev; 391 }; 392 393 /// MacroInfos are managed as a chain for easy disposal. This is the head 394 /// of that list. 395 MacroInfoChain *MIChainHead; 396 397 /// MICache - A "freelist" of MacroInfo objects that can be reused for quick 398 /// allocation. 399 MacroInfoChain *MICache; 400 401 struct DeserializedMacroInfoChain { 402 MacroInfo MI; 403 unsigned OwningModuleID; // MUST be immediately after the MacroInfo object 404 // so it can be accessed by MacroInfo::getOwningModuleID(). 405 DeserializedMacroInfoChain *Next; 406 }; 407 DeserializedMacroInfoChain *DeserialMIChainHead; 408 409 public: 410 Preprocessor(IntrusiveRefCntPtr<PreprocessorOptions> PPOpts, 411 DiagnosticsEngine &diags, LangOptions &opts, 412 const TargetInfo *target, 413 SourceManager &SM, HeaderSearch &Headers, 414 ModuleLoader &TheModuleLoader, 415 IdentifierInfoLookup *IILookup = 0, 416 bool OwnsHeaderSearch = false, 417 bool DelayInitialization = false, 418 bool IncrProcessing = false); 419 420 ~Preprocessor(); 421 422 /// \brief Initialize the preprocessor, if the constructor did not already 423 /// perform the initialization. 424 /// 425 /// \param Target Information about the target. 426 void Initialize(const TargetInfo &Target); 427 428 /// \brief Retrieve the preprocessor options used to initialize this 429 /// preprocessor. 430 PreprocessorOptions &getPreprocessorOpts() const { return *PPOpts; } 431 432 DiagnosticsEngine &getDiagnostics() const { return *Diags; } 433 void setDiagnostics(DiagnosticsEngine &D) { Diags = &D; } 434 435 const LangOptions &getLangOpts() const { return LangOpts; } 436 const TargetInfo &getTargetInfo() const { return *Target; } 437 FileManager &getFileManager() const { return FileMgr; } 438 SourceManager &getSourceManager() const { return SourceMgr; } 439 HeaderSearch &getHeaderSearchInfo() const { return HeaderInfo; } 440 441 IdentifierTable &getIdentifierTable() { return Identifiers; } 442 SelectorTable &getSelectorTable() { return Selectors; } 443 Builtin::Context &getBuiltinInfo() { return BuiltinInfo; } 444 llvm::BumpPtrAllocator &getPreprocessorAllocator() { return BP; } 445 446 void setPTHManager(PTHManager* pm); 447 448 PTHManager *getPTHManager() { return PTH.get(); } 449 450 void setExternalSource(ExternalPreprocessorSource *Source) { 451 ExternalSource = Source; 452 } 453 454 ExternalPreprocessorSource *getExternalSource() const { 455 return ExternalSource; 456 } 457 458 /// \brief Retrieve the module loader associated with this preprocessor. 459 ModuleLoader &getModuleLoader() const { return TheModuleLoader; } 460 461 bool hadModuleLoaderFatalFailure() const { 462 return TheModuleLoader.HadFatalFailure; 463 } 464 465 /// \brief True if we are currently preprocessing a #if or #elif directive 466 bool isParsingIfOrElifDirective() const { 467 return ParsingIfOrElifDirective; 468 } 469 470 /// SetCommentRetentionState - Control whether or not the preprocessor retains 471 /// comments in output. 472 void SetCommentRetentionState(bool KeepComments, bool KeepMacroComments) { 473 this->KeepComments = KeepComments | KeepMacroComments; 474 this->KeepMacroComments = KeepMacroComments; 475 } 476 477 bool getCommentRetentionState() const { return KeepComments; } 478 479 void setPragmasEnabled(bool Enabled) { PragmasEnabled = Enabled; } 480 bool getPragmasEnabled() const { return PragmasEnabled; } 481 482 void SetSuppressIncludeNotFoundError(bool Suppress) { 483 SuppressIncludeNotFoundError = Suppress; 484 } 485 486 bool GetSuppressIncludeNotFoundError() { 487 return SuppressIncludeNotFoundError; 488 } 489 490 /// Sets whether the preprocessor is responsible for producing output or if 491 /// it is producing tokens to be consumed by Parse and Sema. 492 void setPreprocessedOutput(bool IsPreprocessedOutput) { 493 PreprocessedOutput = IsPreprocessedOutput; 494 } 495 496 /// Returns true if the preprocessor is responsible for generating output, 497 /// false if it is producing tokens to be consumed by Parse and Sema. 498 bool isPreprocessedOutput() const { return PreprocessedOutput; } 499 500 /// isCurrentLexer - Return true if we are lexing directly from the specified 501 /// lexer. 502 bool isCurrentLexer(const PreprocessorLexer *L) const { 503 return CurPPLexer == L; 504 } 505 506 /// getCurrentLexer - Return the current lexer being lexed from. Note 507 /// that this ignores any potentially active macro expansions and _Pragma 508 /// expansions going on at the time. 509 PreprocessorLexer *getCurrentLexer() const { return CurPPLexer; } 510 511 /// getCurrentFileLexer - Return the current file lexer being lexed from. 512 /// Note that this ignores any potentially active macro expansions and _Pragma 513 /// expansions going on at the time. 514 PreprocessorLexer *getCurrentFileLexer() const; 515 516 /// \brief Returns the file ID for the preprocessor predefines. 517 FileID getPredefinesFileID() const { return PredefinesFileID; } 518 519 /// getPPCallbacks/addPPCallbacks - Accessors for preprocessor callbacks. 520 /// Note that this class takes ownership of any PPCallbacks object given to 521 /// it. 522 PPCallbacks *getPPCallbacks() const { return Callbacks; } 523 void addPPCallbacks(PPCallbacks *C) { 524 if (Callbacks) 525 C = new PPChainedCallbacks(C, Callbacks); 526 Callbacks = C; 527 } 528 529 /// \brief Given an identifier, return its latest MacroDirective if it is 530 // \#defined or null if it isn't \#define'd. 531 MacroDirective *getMacroDirective(IdentifierInfo *II) const { 532 if (!II->hasMacroDefinition()) 533 return 0; 534 535 MacroDirective *MD = getMacroDirectiveHistory(II); 536 assert(MD->isDefined() && "Macro is undefined!"); 537 return MD; 538 } 539 540 const MacroInfo *getMacroInfo(IdentifierInfo *II) const { 541 return const_cast<Preprocessor*>(this)->getMacroInfo(II); 542 } 543 544 MacroInfo *getMacroInfo(IdentifierInfo *II) { 545 if (MacroDirective *MD = getMacroDirective(II)) 546 return MD->getMacroInfo(); 547 return 0; 548 } 549 550 /// \brief Given an identifier, return the (probably #undef'd) MacroInfo 551 /// representing the most recent macro definition. One can iterate over all 552 /// previous macro definitions from it. This method should only be called for 553 /// identifiers that hadMacroDefinition(). 554 MacroDirective *getMacroDirectiveHistory(const IdentifierInfo *II) const; 555 556 /// \brief Add a directive to the macro directive history for this identifier. 557 void appendMacroDirective(IdentifierInfo *II, MacroDirective *MD); 558 DefMacroDirective *appendDefMacroDirective(IdentifierInfo *II, MacroInfo *MI, 559 SourceLocation Loc, 560 bool isImported) { 561 DefMacroDirective *MD = AllocateDefMacroDirective(MI, Loc, isImported); 562 appendMacroDirective(II, MD); 563 return MD; 564 } 565 DefMacroDirective *appendDefMacroDirective(IdentifierInfo *II, MacroInfo *MI){ 566 return appendDefMacroDirective(II, MI, MI->getDefinitionLoc(), false); 567 } 568 /// \brief Set a MacroDirective that was loaded from a PCH file. 569 void setLoadedMacroDirective(IdentifierInfo *II, MacroDirective *MD); 570 571 /// macro_iterator/macro_begin/macro_end - This allows you to walk the macro 572 /// history table. Currently defined macros have 573 /// IdentifierInfo::hasMacroDefinition() set and an empty 574 /// MacroInfo::getUndefLoc() at the head of the list. 575 typedef llvm::DenseMap<const IdentifierInfo *, 576 MacroDirective*>::const_iterator macro_iterator; 577 macro_iterator macro_begin(bool IncludeExternalMacros = true) const; 578 macro_iterator macro_end(bool IncludeExternalMacros = true) const; 579 580 /// \brief Return the name of the macro defined before \p Loc that has 581 /// spelling \p Tokens. If there are multiple macros with same spelling, 582 /// return the last one defined. 583 StringRef getLastMacroWithSpelling(SourceLocation Loc, 584 ArrayRef<TokenValue> Tokens) const; 585 586 const std::string &getPredefines() const { return Predefines; } 587 /// setPredefines - Set the predefines for this Preprocessor. These 588 /// predefines are automatically injected when parsing the main file. 589 void setPredefines(const char *P) { Predefines = P; } 590 void setPredefines(const std::string &P) { Predefines = P; } 591 592 /// Return information about the specified preprocessor 593 /// identifier token. 594 IdentifierInfo *getIdentifierInfo(StringRef Name) const { 595 return &Identifiers.get(Name); 596 } 597 598 /// AddPragmaHandler - Add the specified pragma handler to the preprocessor. 599 /// If 'Namespace' is non-null, then it is a token required to exist on the 600 /// pragma line before the pragma string starts, e.g. "STDC" or "GCC". 601 void AddPragmaHandler(StringRef Namespace, PragmaHandler *Handler); 602 void AddPragmaHandler(PragmaHandler *Handler) { 603 AddPragmaHandler(StringRef(), Handler); 604 } 605 606 /// RemovePragmaHandler - Remove the specific pragma handler from 607 /// the preprocessor. If \p Namespace is non-null, then it should 608 /// be the namespace that \p Handler was added to. It is an error 609 /// to remove a handler that has not been registered. 610 void RemovePragmaHandler(StringRef Namespace, PragmaHandler *Handler); 611 void RemovePragmaHandler(PragmaHandler *Handler) { 612 RemovePragmaHandler(StringRef(), Handler); 613 } 614 615 /// \brief Add the specified comment handler to the preprocessor. 616 void addCommentHandler(CommentHandler *Handler); 617 618 /// \brief Remove the specified comment handler. 619 /// 620 /// It is an error to remove a handler that has not been registered. 621 void removeCommentHandler(CommentHandler *Handler); 622 623 /// \brief Set the code completion handler to the given object. 624 void setCodeCompletionHandler(CodeCompletionHandler &Handler) { 625 CodeComplete = &Handler; 626 } 627 628 /// \brief Retrieve the current code-completion handler. 629 CodeCompletionHandler *getCodeCompletionHandler() const { 630 return CodeComplete; 631 } 632 633 /// \brief Clear out the code completion handler. 634 void clearCodeCompletionHandler() { 635 CodeComplete = 0; 636 } 637 638 /// \brief Hook used by the lexer to invoke the "natural language" code 639 /// completion point. 640 void CodeCompleteNaturalLanguage(); 641 642 /// \brief Retrieve the preprocessing record, or NULL if there is no 643 /// preprocessing record. 644 PreprocessingRecord *getPreprocessingRecord() const { return Record; } 645 646 /// \brief Create a new preprocessing record, which will keep track of 647 /// all macro expansions, macro definitions, etc. 648 void createPreprocessingRecord(); 649 650 /// EnterMainSourceFile - Enter the specified FileID as the main source file, 651 /// which implicitly adds the builtin defines etc. 652 void EnterMainSourceFile(); 653 654 /// EndSourceFile - Inform the preprocessor callbacks that processing is 655 /// complete. 656 void EndSourceFile(); 657 658 /// EnterSourceFile - Add a source file to the top of the include stack and 659 /// start lexing tokens from it instead of the current buffer. Emit an error 660 /// and don't enter the file on error. 661 void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir, 662 SourceLocation Loc); 663 664 /// EnterMacro - Add a Macro to the top of the include stack and start lexing 665 /// tokens from it instead of the current buffer. Args specifies the 666 /// tokens input to a function-like macro. 667 /// 668 /// ILEnd specifies the location of the ')' for a function-like macro or the 669 /// identifier for an object-like macro. 670 void EnterMacro(Token &Identifier, SourceLocation ILEnd, MacroInfo *Macro, 671 MacroArgs *Args); 672 673 /// EnterTokenStream - Add a "macro" context to the top of the include stack, 674 /// which will cause the lexer to start returning the specified tokens. 675 /// 676 /// If DisableMacroExpansion is true, tokens lexed from the token stream will 677 /// not be subject to further macro expansion. Otherwise, these tokens will 678 /// be re-macro-expanded when/if expansion is enabled. 679 /// 680 /// If OwnsTokens is false, this method assumes that the specified stream of 681 /// tokens has a permanent owner somewhere, so they do not need to be copied. 682 /// If it is true, it assumes the array of tokens is allocated with new[] and 683 /// must be freed. 684 /// 685 void EnterTokenStream(const Token *Toks, unsigned NumToks, 686 bool DisableMacroExpansion, bool OwnsTokens); 687 688 /// RemoveTopOfLexerStack - Pop the current lexer/macro exp off the top of the 689 /// lexer stack. This should only be used in situations where the current 690 /// state of the top-of-stack lexer is known. 691 void RemoveTopOfLexerStack(); 692 693 /// EnableBacktrackAtThisPos - From the point that this method is called, and 694 /// until CommitBacktrackedTokens() or Backtrack() is called, the Preprocessor 695 /// keeps track of the lexed tokens so that a subsequent Backtrack() call will 696 /// make the Preprocessor re-lex the same tokens. 697 /// 698 /// Nested backtracks are allowed, meaning that EnableBacktrackAtThisPos can 699 /// be called multiple times and CommitBacktrackedTokens/Backtrack calls will 700 /// be combined with the EnableBacktrackAtThisPos calls in reverse order. 701 /// 702 /// NOTE: *DO NOT* forget to call either CommitBacktrackedTokens or Backtrack 703 /// at some point after EnableBacktrackAtThisPos. If you don't, caching of 704 /// tokens will continue indefinitely. 705 /// 706 void EnableBacktrackAtThisPos(); 707 708 /// CommitBacktrackedTokens - Disable the last EnableBacktrackAtThisPos call. 709 void CommitBacktrackedTokens(); 710 711 /// Backtrack - Make Preprocessor re-lex the tokens that were lexed since 712 /// EnableBacktrackAtThisPos() was previously called. 713 void Backtrack(); 714 715 /// isBacktrackEnabled - True if EnableBacktrackAtThisPos() was called and 716 /// caching of tokens is on. 717 bool isBacktrackEnabled() const { return !BacktrackPositions.empty(); } 718 719 /// Lex - To lex a token from the preprocessor, just pull a token from the 720 /// current lexer or macro object. 721 void Lex(Token &Result) { 722 switch (CurLexerKind) { 723 case CLK_Lexer: CurLexer->Lex(Result); break; 724 case CLK_PTHLexer: CurPTHLexer->Lex(Result); break; 725 case CLK_TokenLexer: CurTokenLexer->Lex(Result); break; 726 case CLK_CachingLexer: CachingLex(Result); break; 727 case CLK_LexAfterModuleImport: LexAfterModuleImport(Result); break; 728 } 729 } 730 731 void LexAfterModuleImport(Token &Result); 732 733 /// \brief Lex a string literal, which may be the concatenation of multiple 734 /// string literals and may even come from macro expansion. 735 /// \returns true on success, false if a error diagnostic has been generated. 736 bool LexStringLiteral(Token &Result, std::string &String, 737 const char *DiagnosticTag, bool AllowMacroExpansion) { 738 if (AllowMacroExpansion) 739 Lex(Result); 740 else 741 LexUnexpandedToken(Result); 742 return FinishLexStringLiteral(Result, String, DiagnosticTag, 743 AllowMacroExpansion); 744 } 745 746 /// \brief Complete the lexing of a string literal where the first token has 747 /// already been lexed (see LexStringLiteral). 748 bool FinishLexStringLiteral(Token &Result, std::string &String, 749 const char *DiagnosticTag, 750 bool AllowMacroExpansion); 751 752 /// LexNonComment - Lex a token. If it's a comment, keep lexing until we get 753 /// something not a comment. This is useful in -E -C mode where comments 754 /// would foul up preprocessor directive handling. 755 void LexNonComment(Token &Result) { 756 do 757 Lex(Result); 758 while (Result.getKind() == tok::comment); 759 } 760 761 /// LexUnexpandedToken - This is just like Lex, but this disables macro 762 /// expansion of identifier tokens. 763 void LexUnexpandedToken(Token &Result) { 764 // Disable macro expansion. 765 bool OldVal = DisableMacroExpansion; 766 DisableMacroExpansion = true; 767 // Lex the token. 768 Lex(Result); 769 770 // Reenable it. 771 DisableMacroExpansion = OldVal; 772 } 773 774 /// LexUnexpandedNonComment - Like LexNonComment, but this disables macro 775 /// expansion of identifier tokens. 776 void LexUnexpandedNonComment(Token &Result) { 777 do 778 LexUnexpandedToken(Result); 779 while (Result.getKind() == tok::comment); 780 } 781 782 /// Disables macro expansion everywhere except for preprocessor directives. 783 void SetMacroExpansionOnlyInDirectives() { 784 DisableMacroExpansion = true; 785 MacroExpansionInDirectivesOverride = true; 786 } 787 788 /// LookAhead - This peeks ahead N tokens and returns that token without 789 /// consuming any tokens. LookAhead(0) returns the next token that would be 790 /// returned by Lex(), LookAhead(1) returns the token after it, etc. This 791 /// returns normal tokens after phase 5. As such, it is equivalent to using 792 /// 'Lex', not 'LexUnexpandedToken'. 793 const Token &LookAhead(unsigned N) { 794 if (CachedLexPos + N < CachedTokens.size()) 795 return CachedTokens[CachedLexPos+N]; 796 else 797 return PeekAhead(N+1); 798 } 799 800 /// RevertCachedTokens - When backtracking is enabled and tokens are cached, 801 /// this allows to revert a specific number of tokens. 802 /// Note that the number of tokens being reverted should be up to the last 803 /// backtrack position, not more. 804 void RevertCachedTokens(unsigned N) { 805 assert(isBacktrackEnabled() && 806 "Should only be called when tokens are cached for backtracking"); 807 assert(signed(CachedLexPos) - signed(N) >= signed(BacktrackPositions.back()) 808 && "Should revert tokens up to the last backtrack position, not more"); 809 assert(signed(CachedLexPos) - signed(N) >= 0 && 810 "Corrupted backtrack positions ?"); 811 CachedLexPos -= N; 812 } 813 814 /// EnterToken - Enters a token in the token stream to be lexed next. If 815 /// BackTrack() is called afterwards, the token will remain at the insertion 816 /// point. 817 void EnterToken(const Token &Tok) { 818 EnterCachingLexMode(); 819 CachedTokens.insert(CachedTokens.begin()+CachedLexPos, Tok); 820 } 821 822 /// AnnotateCachedTokens - We notify the Preprocessor that if it is caching 823 /// tokens (because backtrack is enabled) it should replace the most recent 824 /// cached tokens with the given annotation token. This function has no effect 825 /// if backtracking is not enabled. 826 /// 827 /// Note that the use of this function is just for optimization; so that the 828 /// cached tokens doesn't get re-parsed and re-resolved after a backtrack is 829 /// invoked. 830 void AnnotateCachedTokens(const Token &Tok) { 831 assert(Tok.isAnnotation() && "Expected annotation token"); 832 if (CachedLexPos != 0 && isBacktrackEnabled()) 833 AnnotatePreviousCachedTokens(Tok); 834 } 835 836 /// Get the location of the last cached token, suitable for setting the end 837 /// location of an annotation token. 838 SourceLocation getLastCachedTokenLocation() const { 839 assert(CachedLexPos != 0); 840 return CachedTokens[CachedLexPos-1].getLocation(); 841 } 842 843 /// \brief Replace the last token with an annotation token. 844 /// 845 /// Like AnnotateCachedTokens(), this routine replaces an 846 /// already-parsed (and resolved) token with an annotation 847 /// token. However, this routine only replaces the last token with 848 /// the annotation token; it does not affect any other cached 849 /// tokens. This function has no effect if backtracking is not 850 /// enabled. 851 void ReplaceLastTokenWithAnnotation(const Token &Tok) { 852 assert(Tok.isAnnotation() && "Expected annotation token"); 853 if (CachedLexPos != 0 && isBacktrackEnabled()) 854 CachedTokens[CachedLexPos-1] = Tok; 855 } 856 857 /// TypoCorrectToken - Update the current token to represent the provided 858 /// identifier, in order to cache an action performed by typo correction. 859 void TypoCorrectToken(const Token &Tok) { 860 assert(Tok.getIdentifierInfo() && "Expected identifier token"); 861 if (CachedLexPos != 0 && isBacktrackEnabled()) 862 CachedTokens[CachedLexPos-1] = Tok; 863 } 864 865 /// \brief Recompute the current lexer kind based on the CurLexer/CurPTHLexer/ 866 /// CurTokenLexer pointers. 867 void recomputeCurLexerKind(); 868 869 /// \brief Returns true if incremental processing is enabled 870 bool isIncrementalProcessingEnabled() const { return IncrementalProcessing; } 871 872 /// \brief Enables the incremental processing 873 void enableIncrementalProcessing(bool value = true) { 874 IncrementalProcessing = value; 875 } 876 877 /// \brief Specify the point at which code-completion will be performed. 878 /// 879 /// \param File the file in which code completion should occur. If 880 /// this file is included multiple times, code-completion will 881 /// perform completion the first time it is included. If NULL, this 882 /// function clears out the code-completion point. 883 /// 884 /// \param Line the line at which code completion should occur 885 /// (1-based). 886 /// 887 /// \param Column the column at which code completion should occur 888 /// (1-based). 889 /// 890 /// \returns true if an error occurred, false otherwise. 891 bool SetCodeCompletionPoint(const FileEntry *File, 892 unsigned Line, unsigned Column); 893 894 /// \brief Determine if we are performing code completion. 895 bool isCodeCompletionEnabled() const { return CodeCompletionFile != 0; } 896 897 /// \brief Returns the location of the code-completion point. 898 /// Returns an invalid location if code-completion is not enabled or the file 899 /// containing the code-completion point has not been lexed yet. 900 SourceLocation getCodeCompletionLoc() const { return CodeCompletionLoc; } 901 902 /// \brief Returns the start location of the file of code-completion point. 903 /// Returns an invalid location if code-completion is not enabled or the file 904 /// containing the code-completion point has not been lexed yet. 905 SourceLocation getCodeCompletionFileLoc() const { 906 return CodeCompletionFileLoc; 907 } 908 909 /// \brief Returns true if code-completion is enabled and we have hit the 910 /// code-completion point. 911 bool isCodeCompletionReached() const { return CodeCompletionReached; } 912 913 /// \brief Note that we hit the code-completion point. 914 void setCodeCompletionReached() { 915 assert(isCodeCompletionEnabled() && "Code-completion not enabled!"); 916 CodeCompletionReached = true; 917 // Silence any diagnostics that occur after we hit the code-completion. 918 getDiagnostics().setSuppressAllDiagnostics(true); 919 } 920 921 /// \brief The location of the currently-active \#pragma clang 922 /// arc_cf_code_audited begin. Returns an invalid location if there 923 /// is no such pragma active. 924 SourceLocation getPragmaARCCFCodeAuditedLoc() const { 925 return PragmaARCCFCodeAuditedLoc; 926 } 927 928 /// \brief Set the location of the currently-active \#pragma clang 929 /// arc_cf_code_audited begin. An invalid location ends the pragma. 930 void setPragmaARCCFCodeAuditedLoc(SourceLocation Loc) { 931 PragmaARCCFCodeAuditedLoc = Loc; 932 } 933 934 /// \brief Instruct the preprocessor to skip part of the main source file. 935 /// 936 /// \param Bytes The number of bytes in the preamble to skip. 937 /// 938 /// \param StartOfLine Whether skipping these bytes puts the lexer at the 939 /// start of a line. 940 void setSkipMainFilePreamble(unsigned Bytes, bool StartOfLine) { 941 SkipMainFilePreamble.first = Bytes; 942 SkipMainFilePreamble.second = StartOfLine; 943 } 944 945 /// Diag - Forwarding function for diagnostics. This emits a diagnostic at 946 /// the specified Token's location, translating the token's start 947 /// position in the current buffer into a SourcePosition object for rendering. 948 DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID) const { 949 return Diags->Report(Loc, DiagID); 950 } 951 952 DiagnosticBuilder Diag(const Token &Tok, unsigned DiagID) const { 953 return Diags->Report(Tok.getLocation(), DiagID); 954 } 955 956 /// getSpelling() - Return the 'spelling' of the token at the given 957 /// location; does not go up to the spelling location or down to the 958 /// expansion location. 959 /// 960 /// \param buffer A buffer which will be used only if the token requires 961 /// "cleaning", e.g. if it contains trigraphs or escaped newlines 962 /// \param invalid If non-null, will be set \c true if an error occurs. 963 StringRef getSpelling(SourceLocation loc, 964 SmallVectorImpl<char> &buffer, 965 bool *invalid = 0) const { 966 return Lexer::getSpelling(loc, buffer, SourceMgr, LangOpts, invalid); 967 } 968 969 /// getSpelling() - Return the 'spelling' of the Tok token. The spelling of a 970 /// token is the characters used to represent the token in the source file 971 /// after trigraph expansion and escaped-newline folding. In particular, this 972 /// wants to get the true, uncanonicalized, spelling of things like digraphs 973 /// UCNs, etc. 974 /// 975 /// \param Invalid If non-null, will be set \c true if an error occurs. 976 std::string getSpelling(const Token &Tok, bool *Invalid = 0) const { 977 return Lexer::getSpelling(Tok, SourceMgr, LangOpts, Invalid); 978 } 979 980 /// getSpelling - This method is used to get the spelling of a token into a 981 /// preallocated buffer, instead of as an std::string. The caller is required 982 /// to allocate enough space for the token, which is guaranteed to be at least 983 /// Tok.getLength() bytes long. The length of the actual result is returned. 984 /// 985 /// Note that this method may do two possible things: it may either fill in 986 /// the buffer specified with characters, or it may *change the input pointer* 987 /// to point to a constant buffer with the data already in it (avoiding a 988 /// copy). The caller is not allowed to modify the returned buffer pointer 989 /// if an internal buffer is returned. 990 unsigned getSpelling(const Token &Tok, const char *&Buffer, 991 bool *Invalid = 0) const { 992 return Lexer::getSpelling(Tok, Buffer, SourceMgr, LangOpts, Invalid); 993 } 994 995 /// getSpelling - This method is used to get the spelling of a token into a 996 /// SmallVector. Note that the returned StringRef may not point to the 997 /// supplied buffer if a copy can be avoided. 998 StringRef getSpelling(const Token &Tok, 999 SmallVectorImpl<char> &Buffer, 1000 bool *Invalid = 0) const; 1001 1002 /// \brief Relex the token at the specified location. 1003 /// \returns true if there was a failure, false on success. 1004 bool getRawToken(SourceLocation Loc, Token &Result) { 1005 return Lexer::getRawToken(Loc, Result, SourceMgr, LangOpts); 1006 } 1007 1008 /// getSpellingOfSingleCharacterNumericConstant - Tok is a numeric constant 1009 /// with length 1, return the character. 1010 char getSpellingOfSingleCharacterNumericConstant(const Token &Tok, 1011 bool *Invalid = 0) const { 1012 assert(Tok.is(tok::numeric_constant) && 1013 Tok.getLength() == 1 && "Called on unsupported token"); 1014 assert(!Tok.needsCleaning() && "Token can't need cleaning with length 1"); 1015 1016 // If the token is carrying a literal data pointer, just use it. 1017 if (const char *D = Tok.getLiteralData()) 1018 return *D; 1019 1020 // Otherwise, fall back on getCharacterData, which is slower, but always 1021 // works. 1022 return *SourceMgr.getCharacterData(Tok.getLocation(), Invalid); 1023 } 1024 1025 /// \brief Retrieve the name of the immediate macro expansion. 1026 /// 1027 /// This routine starts from a source location, and finds the name of the macro 1028 /// responsible for its immediate expansion. It looks through any intervening 1029 /// macro argument expansions to compute this. It returns a StringRef which 1030 /// refers to the SourceManager-owned buffer of the source where that macro 1031 /// name is spelled. Thus, the result shouldn't out-live the SourceManager. 1032 StringRef getImmediateMacroName(SourceLocation Loc) { 1033 return Lexer::getImmediateMacroName(Loc, SourceMgr, getLangOpts()); 1034 } 1035 1036 /// CreateString - Plop the specified string into a scratch buffer and set the 1037 /// specified token's location and length to it. If specified, the source 1038 /// location provides a location of the expansion point of the token. 1039 void CreateString(StringRef Str, Token &Tok, 1040 SourceLocation ExpansionLocStart = SourceLocation(), 1041 SourceLocation ExpansionLocEnd = SourceLocation()); 1042 1043 /// \brief Computes the source location just past the end of the 1044 /// token at this source location. 1045 /// 1046 /// This routine can be used to produce a source location that 1047 /// points just past the end of the token referenced by \p Loc, and 1048 /// is generally used when a diagnostic needs to point just after a 1049 /// token where it expected something different that it received. If 1050 /// the returned source location would not be meaningful (e.g., if 1051 /// it points into a macro), this routine returns an invalid 1052 /// source location. 1053 /// 1054 /// \param Offset an offset from the end of the token, where the source 1055 /// location should refer to. The default offset (0) produces a source 1056 /// location pointing just past the end of the token; an offset of 1 produces 1057 /// a source location pointing to the last character in the token, etc. 1058 SourceLocation getLocForEndOfToken(SourceLocation Loc, unsigned Offset = 0) { 1059 return Lexer::getLocForEndOfToken(Loc, Offset, SourceMgr, LangOpts); 1060 } 1061 1062 /// \brief Returns true if the given MacroID location points at the first 1063 /// token of the macro expansion. 1064 /// 1065 /// \param MacroBegin If non-null and function returns true, it is set to 1066 /// begin location of the macro. 1067 bool isAtStartOfMacroExpansion(SourceLocation loc, 1068 SourceLocation *MacroBegin = 0) const { 1069 return Lexer::isAtStartOfMacroExpansion(loc, SourceMgr, LangOpts, 1070 MacroBegin); 1071 } 1072 1073 /// \brief Returns true if the given MacroID location points at the last 1074 /// token of the macro expansion. 1075 /// 1076 /// \param MacroEnd If non-null and function returns true, it is set to 1077 /// end location of the macro. 1078 bool isAtEndOfMacroExpansion(SourceLocation loc, 1079 SourceLocation *MacroEnd = 0) const { 1080 return Lexer::isAtEndOfMacroExpansion(loc, SourceMgr, LangOpts, MacroEnd); 1081 } 1082 1083 /// DumpToken - Print the token to stderr, used for debugging. 1084 /// 1085 void DumpToken(const Token &Tok, bool DumpFlags = false) const; 1086 void DumpLocation(SourceLocation Loc) const; 1087 void DumpMacro(const MacroInfo &MI) const; 1088 1089 /// AdvanceToTokenCharacter - Given a location that specifies the start of a 1090 /// token, return a new location that specifies a character within the token. 1091 SourceLocation AdvanceToTokenCharacter(SourceLocation TokStart, 1092 unsigned Char) const { 1093 return Lexer::AdvanceToTokenCharacter(TokStart, Char, SourceMgr, LangOpts); 1094 } 1095 1096 /// IncrementPasteCounter - Increment the counters for the number of token 1097 /// paste operations performed. If fast was specified, this is a 'fast paste' 1098 /// case we handled. 1099 /// 1100 void IncrementPasteCounter(bool isFast) { 1101 if (isFast) 1102 ++NumFastTokenPaste; 1103 else 1104 ++NumTokenPaste; 1105 } 1106 1107 void PrintStats(); 1108 1109 size_t getTotalMemory() const; 1110 1111 /// HandleMicrosoftCommentPaste - When the macro expander pastes together a 1112 /// comment (/##/) in microsoft mode, this method handles updating the current 1113 /// state, returning the token on the next source line. 1114 void HandleMicrosoftCommentPaste(Token &Tok); 1115 1116 //===--------------------------------------------------------------------===// 1117 // Preprocessor callback methods. These are invoked by a lexer as various 1118 // directives and events are found. 1119 1120 /// LookUpIdentifierInfo - Given a tok::raw_identifier token, look up the 1121 /// identifier information for the token and install it into the token, 1122 /// updating the token kind accordingly. 1123 IdentifierInfo *LookUpIdentifierInfo(Token &Identifier) const; 1124 1125 private: 1126 llvm::DenseMap<IdentifierInfo*,unsigned> PoisonReasons; 1127 1128 public: 1129 1130 // SetPoisonReason - Call this function to indicate the reason for 1131 // poisoning an identifier. If that identifier is accessed while 1132 // poisoned, then this reason will be used instead of the default 1133 // "poisoned" diagnostic. 1134 void SetPoisonReason(IdentifierInfo *II, unsigned DiagID); 1135 1136 // HandlePoisonedIdentifier - Display reason for poisoned 1137 // identifier. 1138 void HandlePoisonedIdentifier(Token & Tok); 1139 1140 void MaybeHandlePoisonedIdentifier(Token & Identifier) { 1141 if(IdentifierInfo * II = Identifier.getIdentifierInfo()) { 1142 if(II->isPoisoned()) { 1143 HandlePoisonedIdentifier(Identifier); 1144 } 1145 } 1146 } 1147 1148 private: 1149 /// Identifiers used for SEH handling in Borland. These are only 1150 /// allowed in particular circumstances 1151 // __except block 1152 IdentifierInfo *Ident__exception_code, 1153 *Ident___exception_code, 1154 *Ident_GetExceptionCode; 1155 // __except filter expression 1156 IdentifierInfo *Ident__exception_info, 1157 *Ident___exception_info, 1158 *Ident_GetExceptionInfo; 1159 // __finally 1160 IdentifierInfo *Ident__abnormal_termination, 1161 *Ident___abnormal_termination, 1162 *Ident_AbnormalTermination; 1163 public: 1164 void PoisonSEHIdentifiers(bool Poison = true); // Borland 1165 1166 /// HandleIdentifier - This callback is invoked when the lexer reads an 1167 /// identifier and has filled in the tokens IdentifierInfo member. This 1168 /// callback potentially macro expands it or turns it into a named token (like 1169 /// 'for'). 1170 void HandleIdentifier(Token &Identifier); 1171 1172 1173 /// HandleEndOfFile - This callback is invoked when the lexer hits the end of 1174 /// the current file. This either returns the EOF token and returns true, or 1175 /// pops a level off the include stack and returns false, at which point the 1176 /// client should call lex again. 1177 bool HandleEndOfFile(Token &Result, bool isEndOfMacro = false); 1178 1179 /// HandleEndOfTokenLexer - This callback is invoked when the current 1180 /// TokenLexer hits the end of its token stream. 1181 bool HandleEndOfTokenLexer(Token &Result); 1182 1183 /// HandleDirective - This callback is invoked when the lexer sees a # token 1184 /// at the start of a line. This consumes the directive, modifies the 1185 /// lexer/preprocessor state, and advances the lexer(s) so that the next token 1186 /// read is the correct one. 1187 void HandleDirective(Token &Result); 1188 1189 /// CheckEndOfDirective - Ensure that the next token is a tok::eod token. If 1190 /// not, emit a diagnostic and consume up until the eod. If EnableMacros is 1191 /// true, then we consider macros that expand to zero tokens as being ok. 1192 void CheckEndOfDirective(const char *Directive, bool EnableMacros = false); 1193 1194 /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the 1195 /// current line until the tok::eod token is found. 1196 void DiscardUntilEndOfDirective(); 1197 1198 /// SawDateOrTime - This returns true if the preprocessor has seen a use of 1199 /// __DATE__ or __TIME__ in the file so far. 1200 bool SawDateOrTime() const { 1201 return DATELoc != SourceLocation() || TIMELoc != SourceLocation(); 1202 } 1203 unsigned getCounterValue() const { return CounterValue; } 1204 void setCounterValue(unsigned V) { CounterValue = V; } 1205 1206 /// \brief Retrieves the module that we're currently building, if any. 1207 Module *getCurrentModule(); 1208 1209 /// \brief Allocate a new MacroInfo object with the provided SourceLocation. 1210 MacroInfo *AllocateMacroInfo(SourceLocation L); 1211 1212 /// \brief Allocate a new MacroInfo object loaded from an AST file. 1213 MacroInfo *AllocateDeserializedMacroInfo(SourceLocation L, 1214 unsigned SubModuleID); 1215 1216 /// \brief Turn the specified lexer token into a fully checked and spelled 1217 /// filename, e.g. as an operand of \#include. 1218 /// 1219 /// The caller is expected to provide a buffer that is large enough to hold 1220 /// the spelling of the filename, but is also expected to handle the case 1221 /// when this method decides to use a different buffer. 1222 /// 1223 /// \returns true if the input filename was in <>'s or false if it was 1224 /// in ""'s. 1225 bool GetIncludeFilenameSpelling(SourceLocation Loc,StringRef &Filename); 1226 1227 /// \brief Given a "foo" or \<foo> reference, look up the indicated file. 1228 /// 1229 /// Returns null on failure. \p isAngled indicates whether the file 1230 /// reference is for system \#include's or not (i.e. using <> instead of ""). 1231 const FileEntry *LookupFile(SourceLocation FilenameLoc, StringRef Filename, 1232 bool isAngled, const DirectoryLookup *FromDir, 1233 const DirectoryLookup *&CurDir, 1234 SmallVectorImpl<char> *SearchPath, 1235 SmallVectorImpl<char> *RelativePath, 1236 ModuleMap::KnownHeader *SuggestedModule, 1237 bool SkipCache = false); 1238 1239 /// GetCurLookup - The DirectoryLookup structure used to find the current 1240 /// FileEntry, if CurLexer is non-null and if applicable. This allows us to 1241 /// implement \#include_next and find directory-specific properties. 1242 const DirectoryLookup *GetCurDirLookup() { return CurDirLookup; } 1243 1244 /// \brief Return true if we're in the top-level file, not in a \#include. 1245 bool isInPrimaryFile() const; 1246 1247 /// ConcatenateIncludeName - Handle cases where the \#include name is expanded 1248 /// from a macro as multiple tokens, which need to be glued together. This 1249 /// occurs for code like: 1250 /// \code 1251 /// \#define FOO <x/y.h> 1252 /// \#include FOO 1253 /// \endcode 1254 /// because in this case, "<x/y.h>" is returned as 7 tokens, not one. 1255 /// 1256 /// This code concatenates and consumes tokens up to the '>' token. It 1257 /// returns false if the > was found, otherwise it returns true if it finds 1258 /// and consumes the EOD marker. 1259 bool ConcatenateIncludeName(SmallString<128> &FilenameBuffer, 1260 SourceLocation &End); 1261 1262 /// LexOnOffSwitch - Lex an on-off-switch (C99 6.10.6p2) and verify that it is 1263 /// followed by EOD. Return true if the token is not a valid on-off-switch. 1264 bool LexOnOffSwitch(tok::OnOffSwitch &OOS); 1265 1266 private: 1267 1268 void PushIncludeMacroStack() { 1269 IncludeMacroStack.push_back(IncludeStackInfo(CurLexerKind, 1270 CurLexer.take(), 1271 CurPTHLexer.take(), 1272 CurPPLexer, 1273 CurTokenLexer.take(), 1274 CurDirLookup)); 1275 CurPPLexer = 0; 1276 } 1277 1278 void PopIncludeMacroStack() { 1279 CurLexer.reset(IncludeMacroStack.back().TheLexer); 1280 CurPTHLexer.reset(IncludeMacroStack.back().ThePTHLexer); 1281 CurPPLexer = IncludeMacroStack.back().ThePPLexer; 1282 CurTokenLexer.reset(IncludeMacroStack.back().TheTokenLexer); 1283 CurDirLookup = IncludeMacroStack.back().TheDirLookup; 1284 CurLexerKind = IncludeMacroStack.back().CurLexerKind; 1285 IncludeMacroStack.pop_back(); 1286 } 1287 1288 /// \brief Allocate a new MacroInfo object. 1289 MacroInfo *AllocateMacroInfo(); 1290 1291 DefMacroDirective *AllocateDefMacroDirective(MacroInfo *MI, 1292 SourceLocation Loc, 1293 bool isImported); 1294 UndefMacroDirective *AllocateUndefMacroDirective(SourceLocation UndefLoc); 1295 VisibilityMacroDirective *AllocateVisibilityMacroDirective(SourceLocation Loc, 1296 bool isPublic); 1297 1298 /// \brief Release the specified MacroInfo for re-use. 1299 /// 1300 /// This memory will be reused for allocating new MacroInfo objects. 1301 void ReleaseMacroInfo(MacroInfo* MI); 1302 1303 /// ReadMacroName - Lex and validate a macro name, which occurs after a 1304 /// \#define or \#undef. This emits a diagnostic, sets the token kind to eod, 1305 /// and discards the rest of the macro line if the macro name is invalid. 1306 void ReadMacroName(Token &MacroNameTok, char isDefineUndef = 0); 1307 1308 /// ReadMacroDefinitionArgList - The ( starting an argument list of a macro 1309 /// definition has just been read. Lex the rest of the arguments and the 1310 /// closing ), updating MI with what we learn and saving in LastTok the 1311 /// last token read. 1312 /// Return true if an error occurs parsing the arg list. 1313 bool ReadMacroDefinitionArgList(MacroInfo *MI, Token& LastTok); 1314 1315 /// We just read a \#if or related directive and decided that the 1316 /// subsequent tokens are in the \#if'd out portion of the 1317 /// file. Lex the rest of the file, until we see an \#endif. If \p 1318 /// FoundNonSkipPortion is true, then we have already emitted code for part of 1319 /// this \#if directive, so \#else/\#elif blocks should never be entered. If 1320 /// \p FoundElse is false, then \#else directives are ok, if not, then we have 1321 /// already seen one so a \#else directive is a duplicate. When this returns, 1322 /// the caller can lex the first valid token. 1323 void SkipExcludedConditionalBlock(SourceLocation IfTokenLoc, 1324 bool FoundNonSkipPortion, bool FoundElse, 1325 SourceLocation ElseLoc = SourceLocation()); 1326 1327 /// \brief A fast PTH version of SkipExcludedConditionalBlock. 1328 void PTHSkipExcludedConditionalBlock(); 1329 1330 /// EvaluateDirectiveExpression - Evaluate an integer constant expression that 1331 /// may occur after a #if or #elif directive and return it as a bool. If the 1332 /// expression is equivalent to "!defined(X)" return X in IfNDefMacro. 1333 bool EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro); 1334 1335 /// RegisterBuiltinPragmas - Install the standard preprocessor pragmas: 1336 /// \#pragma GCC poison/system_header/dependency and \#pragma once. 1337 void RegisterBuiltinPragmas(); 1338 1339 /// \brief Register builtin macros such as __LINE__ with the identifier table. 1340 void RegisterBuiltinMacros(); 1341 1342 /// HandleMacroExpandedIdentifier - If an identifier token is read that is to 1343 /// be expanded as a macro, handle it and return the next token as 'Tok'. If 1344 /// the macro should not be expanded return true, otherwise return false. 1345 bool HandleMacroExpandedIdentifier(Token &Tok, MacroDirective *MD); 1346 1347 /// \brief Cache macro expanded tokens for TokenLexers. 1348 // 1349 /// Works like a stack; a TokenLexer adds the macro expanded tokens that is 1350 /// going to lex in the cache and when it finishes the tokens are removed 1351 /// from the end of the cache. 1352 Token *cacheMacroExpandedTokens(TokenLexer *tokLexer, 1353 ArrayRef<Token> tokens); 1354 void removeCachedMacroExpandedTokensOfLastLexer(); 1355 friend void TokenLexer::ExpandFunctionArguments(); 1356 1357 /// isNextPPTokenLParen - Determine whether the next preprocessor token to be 1358 /// lexed is a '('. If so, consume the token and return true, if not, this 1359 /// method should have no observable side-effect on the lexed tokens. 1360 bool isNextPPTokenLParen(); 1361 1362 /// ReadFunctionLikeMacroArgs - After reading "MACRO(", this method is 1363 /// invoked to read all of the formal arguments specified for the macro 1364 /// invocation. This returns null on error. 1365 MacroArgs *ReadFunctionLikeMacroArgs(Token &MacroName, MacroInfo *MI, 1366 SourceLocation &ExpansionEnd); 1367 1368 /// ExpandBuiltinMacro - If an identifier token is read that is to be expanded 1369 /// as a builtin macro, handle it and return the next token as 'Tok'. 1370 void ExpandBuiltinMacro(Token &Tok); 1371 1372 /// Handle_Pragma - Read a _Pragma directive, slice it up, process it, then 1373 /// return the first token after the directive. The _Pragma token has just 1374 /// been read into 'Tok'. 1375 void Handle_Pragma(Token &Tok); 1376 1377 /// HandleMicrosoft__pragma - Like Handle_Pragma except the pragma text 1378 /// is not enclosed within a string literal. 1379 void HandleMicrosoft__pragma(Token &Tok); 1380 1381 /// EnterSourceFileWithLexer - Add a lexer to the top of the include stack and 1382 /// start lexing tokens from it instead of the current buffer. 1383 void EnterSourceFileWithLexer(Lexer *TheLexer, const DirectoryLookup *Dir); 1384 1385 /// EnterSourceFileWithPTH - Add a lexer to the top of the include stack and 1386 /// start getting tokens from it using the PTH cache. 1387 void EnterSourceFileWithPTH(PTHLexer *PL, const DirectoryLookup *Dir); 1388 1389 /// \brief Set the file ID for the preprocessor predefines. 1390 void setPredefinesFileID(FileID FID) { 1391 assert(PredefinesFileID.isInvalid() && "PredefinesFileID already set!"); 1392 PredefinesFileID = FID; 1393 } 1394 1395 /// IsFileLexer - Returns true if we are lexing from a file and not a 1396 /// pragma or a macro. 1397 static bool IsFileLexer(const Lexer* L, const PreprocessorLexer* P) { 1398 return L ? !L->isPragmaLexer() : P != 0; 1399 } 1400 1401 static bool IsFileLexer(const IncludeStackInfo& I) { 1402 return IsFileLexer(I.TheLexer, I.ThePPLexer); 1403 } 1404 1405 bool IsFileLexer() const { 1406 return IsFileLexer(CurLexer.get(), CurPPLexer); 1407 } 1408 1409 //===--------------------------------------------------------------------===// 1410 // Caching stuff. 1411 void CachingLex(Token &Result); 1412 bool InCachingLexMode() const { 1413 // If the Lexer pointers are 0 and IncludeMacroStack is empty, it means 1414 // that we are past EOF, not that we are in CachingLex mode. 1415 return !CurPPLexer && !CurTokenLexer && !CurPTHLexer && 1416 !IncludeMacroStack.empty(); 1417 } 1418 void EnterCachingLexMode(); 1419 void ExitCachingLexMode() { 1420 if (InCachingLexMode()) 1421 RemoveTopOfLexerStack(); 1422 } 1423 const Token &PeekAhead(unsigned N); 1424 void AnnotatePreviousCachedTokens(const Token &Tok); 1425 1426 //===--------------------------------------------------------------------===// 1427 /// Handle*Directive - implement the various preprocessor directives. These 1428 /// should side-effect the current preprocessor object so that the next call 1429 /// to Lex() will return the appropriate token next. 1430 void HandleLineDirective(Token &Tok); 1431 void HandleDigitDirective(Token &Tok); 1432 void HandleUserDiagnosticDirective(Token &Tok, bool isWarning); 1433 void HandleIdentSCCSDirective(Token &Tok); 1434 void HandleMacroPublicDirective(Token &Tok); 1435 void HandleMacroPrivateDirective(Token &Tok); 1436 1437 // File inclusion. 1438 void HandleIncludeDirective(SourceLocation HashLoc, 1439 Token &Tok, 1440 const DirectoryLookup *LookupFrom = 0, 1441 bool isImport = false); 1442 void HandleIncludeNextDirective(SourceLocation HashLoc, Token &Tok); 1443 void HandleIncludeMacrosDirective(SourceLocation HashLoc, Token &Tok); 1444 void HandleImportDirective(SourceLocation HashLoc, Token &Tok); 1445 void HandleMicrosoftImportDirective(Token &Tok); 1446 1447 // Macro handling. 1448 void HandleDefineDirective(Token &Tok, bool ImmediatelyAfterTopLevelIfndef); 1449 void HandleUndefDirective(Token &Tok); 1450 1451 // Conditional Inclusion. 1452 void HandleIfdefDirective(Token &Tok, bool isIfndef, 1453 bool ReadAnyTokensBeforeDirective); 1454 void HandleIfDirective(Token &Tok, bool ReadAnyTokensBeforeDirective); 1455 void HandleEndifDirective(Token &Tok); 1456 void HandleElseDirective(Token &Tok); 1457 void HandleElifDirective(Token &Tok); 1458 1459 // Pragmas. 1460 void HandlePragmaDirective(SourceLocation IntroducerLoc, 1461 PragmaIntroducerKind Introducer); 1462 public: 1463 void HandlePragmaOnce(Token &OnceTok); 1464 void HandlePragmaMark(); 1465 void HandlePragmaPoison(Token &PoisonTok); 1466 void HandlePragmaSystemHeader(Token &SysHeaderTok); 1467 void HandlePragmaDependency(Token &DependencyTok); 1468 void HandlePragmaPushMacro(Token &Tok); 1469 void HandlePragmaPopMacro(Token &Tok); 1470 void HandlePragmaIncludeAlias(Token &Tok); 1471 IdentifierInfo *ParsePragmaPushOrPopMacro(Token &Tok); 1472 1473 // Return true and store the first token only if any CommentHandler 1474 // has inserted some tokens and getCommentRetentionState() is false. 1475 bool HandleComment(Token &Token, SourceRange Comment); 1476 1477 /// \brief A macro is used, update information about macros that need unused 1478 /// warnings. 1479 void markMacroAsUsed(MacroInfo *MI); 1480 }; 1481 1482 /// \brief Abstract base class that describes a handler that will receive 1483 /// source ranges for each of the comments encountered in the source file. 1484 class CommentHandler { 1485 public: 1486 virtual ~CommentHandler(); 1487 1488 // The handler shall return true if it has pushed any tokens 1489 // to be read using e.g. EnterToken or EnterTokenStream. 1490 virtual bool HandleComment(Preprocessor &PP, SourceRange Comment) = 0; 1491 }; 1492 1493 } // end namespace clang 1494 1495 #endif 1496