1 //===--- TokenLexer.cpp - Lex from a token stream -------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the TokenLexer interface. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "clang/Lex/MacroArgs.h" 15 #include "clang/Lex/LexDiagnostic.h" 16 #include "clang/Lex/MacroInfo.h" 17 #include "clang/Lex/Preprocessor.h" 18 #include "llvm/ADT/SmallString.h" 19 #include "llvm/Support/SaveAndRestore.h" 20 #include <algorithm> 21 22 using namespace clang; 23 24 /// MacroArgs ctor function - This destroys the vector passed in. 25 MacroArgs *MacroArgs::create(const MacroInfo *MI, 26 ArrayRef<Token> UnexpArgTokens, 27 bool VarargsElided, Preprocessor &PP) { 28 assert(MI->isFunctionLike() && 29 "Can't have args for an object-like macro!"); 30 MacroArgs **ResultEnt = 0; 31 unsigned ClosestMatch = ~0U; 32 33 // See if we have an entry with a big enough argument list to reuse on the 34 // free list. If so, reuse it. 35 for (MacroArgs **Entry = &PP.MacroArgCache; *Entry; 36 Entry = &(*Entry)->ArgCache) 37 if ((*Entry)->NumUnexpArgTokens >= UnexpArgTokens.size() && 38 (*Entry)->NumUnexpArgTokens < ClosestMatch) { 39 ResultEnt = Entry; 40 41 // If we have an exact match, use it. 42 if ((*Entry)->NumUnexpArgTokens == UnexpArgTokens.size()) 43 break; 44 // Otherwise, use the best fit. 45 ClosestMatch = (*Entry)->NumUnexpArgTokens; 46 } 47 48 MacroArgs *Result; 49 if (ResultEnt == 0) { 50 // Allocate memory for a MacroArgs object with the lexer tokens at the end. 51 Result = (MacroArgs*)malloc(sizeof(MacroArgs) + 52 UnexpArgTokens.size() * sizeof(Token)); 53 // Construct the MacroArgs object. 54 new (Result) MacroArgs(UnexpArgTokens.size(), VarargsElided); 55 } else { 56 Result = *ResultEnt; 57 // Unlink this node from the preprocessors singly linked list. 58 *ResultEnt = Result->ArgCache; 59 Result->NumUnexpArgTokens = UnexpArgTokens.size(); 60 Result->VarargsElided = VarargsElided; 61 } 62 63 // Copy the actual unexpanded tokens to immediately after the result ptr. 64 if (!UnexpArgTokens.empty()) 65 std::copy(UnexpArgTokens.begin(), UnexpArgTokens.end(), 66 const_cast<Token*>(Result->getUnexpArgument(0))); 67 68 return Result; 69 } 70 71 /// destroy - Destroy and deallocate the memory for this object. 72 /// 73 void MacroArgs::destroy(Preprocessor &PP) { 74 StringifiedArgs.clear(); 75 76 // Don't clear PreExpArgTokens, just clear the entries. Clearing the entries 77 // would deallocate the element vectors. 78 for (unsigned i = 0, e = PreExpArgTokens.size(); i != e; ++i) 79 PreExpArgTokens[i].clear(); 80 81 // Add this to the preprocessor's free list. 82 ArgCache = PP.MacroArgCache; 83 PP.MacroArgCache = this; 84 } 85 86 /// deallocate - This should only be called by the Preprocessor when managing 87 /// its freelist. 88 MacroArgs *MacroArgs::deallocate() { 89 MacroArgs *Next = ArgCache; 90 91 // Run the dtor to deallocate the vectors. 92 this->~MacroArgs(); 93 // Release the memory for the object. 94 free(this); 95 96 return Next; 97 } 98 99 100 /// getArgLength - Given a pointer to an expanded or unexpanded argument, 101 /// return the number of tokens, not counting the EOF, that make up the 102 /// argument. 103 unsigned MacroArgs::getArgLength(const Token *ArgPtr) { 104 unsigned NumArgTokens = 0; 105 for (; ArgPtr->isNot(tok::eof); ++ArgPtr) 106 ++NumArgTokens; 107 return NumArgTokens; 108 } 109 110 111 /// getUnexpArgument - Return the unexpanded tokens for the specified formal. 112 /// 113 const Token *MacroArgs::getUnexpArgument(unsigned Arg) const { 114 // The unexpanded argument tokens start immediately after the MacroArgs object 115 // in memory. 116 const Token *Start = (const Token *)(this+1); 117 const Token *Result = Start; 118 // Scan to find Arg. 119 for (; Arg; ++Result) { 120 assert(Result < Start+NumUnexpArgTokens && "Invalid arg #"); 121 if (Result->is(tok::eof)) 122 --Arg; 123 } 124 assert(Result < Start+NumUnexpArgTokens && "Invalid arg #"); 125 return Result; 126 } 127 128 129 /// ArgNeedsPreexpansion - If we can prove that the argument won't be affected 130 /// by pre-expansion, return false. Otherwise, conservatively return true. 131 bool MacroArgs::ArgNeedsPreexpansion(const Token *ArgTok, 132 Preprocessor &PP) const { 133 // If there are no identifiers in the argument list, or if the identifiers are 134 // known to not be macros, pre-expansion won't modify it. 135 for (; ArgTok->isNot(tok::eof); ++ArgTok) 136 if (IdentifierInfo *II = ArgTok->getIdentifierInfo()) { 137 if (II->hasMacroDefinition() && PP.getMacroInfo(II)->isEnabled()) 138 // Return true even though the macro could be a function-like macro 139 // without a following '(' token. 140 return true; 141 } 142 return false; 143 } 144 145 /// getPreExpArgument - Return the pre-expanded form of the specified 146 /// argument. 147 const std::vector<Token> & 148 MacroArgs::getPreExpArgument(unsigned Arg, const MacroInfo *MI, 149 Preprocessor &PP) { 150 assert(Arg < MI->getNumArgs() && "Invalid argument number!"); 151 152 // If we have already computed this, return it. 153 if (PreExpArgTokens.size() < MI->getNumArgs()) 154 PreExpArgTokens.resize(MI->getNumArgs()); 155 156 std::vector<Token> &Result = PreExpArgTokens[Arg]; 157 if (!Result.empty()) return Result; 158 159 SaveAndRestore<bool> PreExpandingMacroArgs(PP.InMacroArgPreExpansion, true); 160 161 const Token *AT = getUnexpArgument(Arg); 162 unsigned NumToks = getArgLength(AT)+1; // Include the EOF. 163 164 // Otherwise, we have to pre-expand this argument, populating Result. To do 165 // this, we set up a fake TokenLexer to lex from the unexpanded argument 166 // list. With this installed, we lex expanded tokens until we hit the EOF 167 // token at the end of the unexp list. 168 PP.EnterTokenStream(AT, NumToks, false /*disable expand*/, 169 false /*owns tokens*/); 170 171 // Lex all of the macro-expanded tokens into Result. 172 do { 173 Result.push_back(Token()); 174 Token &Tok = Result.back(); 175 PP.Lex(Tok); 176 } while (Result.back().isNot(tok::eof)); 177 178 // Pop the token stream off the top of the stack. We know that the internal 179 // pointer inside of it is to the "end" of the token stream, but the stack 180 // will not otherwise be popped until the next token is lexed. The problem is 181 // that the token may be lexed sometime after the vector of tokens itself is 182 // destroyed, which would be badness. 183 if (PP.InCachingLexMode()) 184 PP.ExitCachingLexMode(); 185 PP.RemoveTopOfLexerStack(); 186 return Result; 187 } 188 189 190 /// StringifyArgument - Implement C99 6.10.3.2p2, converting a sequence of 191 /// tokens into the literal string token that should be produced by the C # 192 /// preprocessor operator. If Charify is true, then it should be turned into 193 /// a character literal for the Microsoft charize (#@) extension. 194 /// 195 Token MacroArgs::StringifyArgument(const Token *ArgToks, 196 Preprocessor &PP, bool Charify, 197 SourceLocation ExpansionLocStart, 198 SourceLocation ExpansionLocEnd) { 199 Token Tok; 200 Tok.startToken(); 201 Tok.setKind(Charify ? tok::char_constant : tok::string_literal); 202 203 const Token *ArgTokStart = ArgToks; 204 205 // Stringify all the tokens. 206 SmallString<128> Result; 207 Result += "\""; 208 209 bool isFirst = true; 210 for (; ArgToks->isNot(tok::eof); ++ArgToks) { 211 const Token &Tok = *ArgToks; 212 if (!isFirst && (Tok.hasLeadingSpace() || Tok.isAtStartOfLine())) 213 Result += ' '; 214 isFirst = false; 215 216 // If this is a string or character constant, escape the token as specified 217 // by 6.10.3.2p2. 218 if (tok::isStringLiteral(Tok.getKind()) || // "foo", u8R"x(foo)x"_bar, etc. 219 Tok.is(tok::char_constant) || // 'x' 220 Tok.is(tok::wide_char_constant) || // L'x'. 221 Tok.is(tok::utf16_char_constant) || // u'x'. 222 Tok.is(tok::utf32_char_constant)) { // U'x'. 223 bool Invalid = false; 224 std::string TokStr = PP.getSpelling(Tok, &Invalid); 225 if (!Invalid) { 226 std::string Str = Lexer::Stringify(TokStr); 227 Result.append(Str.begin(), Str.end()); 228 } 229 } else if (Tok.is(tok::code_completion)) { 230 PP.CodeCompleteNaturalLanguage(); 231 } else { 232 // Otherwise, just append the token. Do some gymnastics to get the token 233 // in place and avoid copies where possible. 234 unsigned CurStrLen = Result.size(); 235 Result.resize(CurStrLen+Tok.getLength()); 236 const char *BufPtr = &Result[CurStrLen]; 237 bool Invalid = false; 238 unsigned ActualTokLen = PP.getSpelling(Tok, BufPtr, &Invalid); 239 240 if (!Invalid) { 241 // If getSpelling returned a pointer to an already uniqued version of 242 // the string instead of filling in BufPtr, memcpy it onto our string. 243 if (BufPtr != &Result[CurStrLen]) 244 memcpy(&Result[CurStrLen], BufPtr, ActualTokLen); 245 246 // If the token was dirty, the spelling may be shorter than the token. 247 if (ActualTokLen != Tok.getLength()) 248 Result.resize(CurStrLen+ActualTokLen); 249 } 250 } 251 } 252 253 // If the last character of the string is a \, and if it isn't escaped, this 254 // is an invalid string literal, diagnose it as specified in C99. 255 if (Result.back() == '\\') { 256 // Count the number of consequtive \ characters. If even, then they are 257 // just escaped backslashes, otherwise it's an error. 258 unsigned FirstNonSlash = Result.size()-2; 259 // Guaranteed to find the starting " if nothing else. 260 while (Result[FirstNonSlash] == '\\') 261 --FirstNonSlash; 262 if ((Result.size()-1-FirstNonSlash) & 1) { 263 // Diagnose errors for things like: #define F(X) #X / F(\) 264 PP.Diag(ArgToks[-1], diag::pp_invalid_string_literal); 265 Result.pop_back(); // remove one of the \'s. 266 } 267 } 268 Result += '"'; 269 270 // If this is the charify operation and the result is not a legal character 271 // constant, diagnose it. 272 if (Charify) { 273 // First step, turn double quotes into single quotes: 274 Result[0] = '\''; 275 Result[Result.size()-1] = '\''; 276 277 // Check for bogus character. 278 bool isBad = false; 279 if (Result.size() == 3) 280 isBad = Result[1] == '\''; // ''' is not legal. '\' already fixed above. 281 else 282 isBad = (Result.size() != 4 || Result[1] != '\\'); // Not '\x' 283 284 if (isBad) { 285 PP.Diag(ArgTokStart[0], diag::err_invalid_character_to_charify); 286 Result = "' '"; // Use something arbitrary, but legal. 287 } 288 } 289 290 PP.CreateString(Result, Tok, 291 ExpansionLocStart, ExpansionLocEnd); 292 return Tok; 293 } 294 295 /// getStringifiedArgument - Compute, cache, and return the specified argument 296 /// that has been 'stringified' as required by the # operator. 297 const Token &MacroArgs::getStringifiedArgument(unsigned ArgNo, 298 Preprocessor &PP, 299 SourceLocation ExpansionLocStart, 300 SourceLocation ExpansionLocEnd) { 301 assert(ArgNo < NumUnexpArgTokens && "Invalid argument number!"); 302 if (StringifiedArgs.empty()) { 303 StringifiedArgs.resize(getNumArguments()); 304 memset((void*)&StringifiedArgs[0], 0, 305 sizeof(StringifiedArgs[0])*getNumArguments()); 306 } 307 if (StringifiedArgs[ArgNo].isNot(tok::string_literal)) 308 StringifiedArgs[ArgNo] = StringifyArgument(getUnexpArgument(ArgNo), PP, 309 /*Charify=*/false, 310 ExpansionLocStart, 311 ExpansionLocEnd); 312 return StringifiedArgs[ArgNo]; 313 } 314