1 //===--- TokenLexer.cpp - Lex from a token stream -------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the TokenLexer interface. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "MacroArgs.h" 15 #include "clang/Lex/MacroInfo.h" 16 #include "clang/Lex/Preprocessor.h" 17 #include "clang/Lex/LexDiagnostic.h" 18 19 #include <algorithm> 20 21 using namespace clang; 22 23 /// MacroArgs ctor function - This destroys the vector passed in. 24 MacroArgs *MacroArgs::create(const MacroInfo *MI, 25 llvm::ArrayRef<Token> UnexpArgTokens, 26 bool VarargsElided, Preprocessor &PP) { 27 assert(MI->isFunctionLike() && 28 "Can't have args for an object-like macro!"); 29 MacroArgs **ResultEnt = 0; 30 unsigned ClosestMatch = ~0U; 31 32 // See if we have an entry with a big enough argument list to reuse on the 33 // free list. If so, reuse it. 34 for (MacroArgs **Entry = &PP.MacroArgCache; *Entry; 35 Entry = &(*Entry)->ArgCache) 36 if ((*Entry)->NumUnexpArgTokens >= UnexpArgTokens.size() && 37 (*Entry)->NumUnexpArgTokens < ClosestMatch) { 38 ResultEnt = Entry; 39 40 // If we have an exact match, use it. 41 if ((*Entry)->NumUnexpArgTokens == UnexpArgTokens.size()) 42 break; 43 // Otherwise, use the best fit. 44 ClosestMatch = (*Entry)->NumUnexpArgTokens; 45 } 46 47 MacroArgs *Result; 48 if (ResultEnt == 0) { 49 // Allocate memory for a MacroArgs object with the lexer tokens at the end. 50 Result = (MacroArgs*)malloc(sizeof(MacroArgs) + 51 UnexpArgTokens.size() * sizeof(Token)); 52 // Construct the MacroArgs object. 53 new (Result) MacroArgs(UnexpArgTokens.size(), VarargsElided); 54 } else { 55 Result = *ResultEnt; 56 // Unlink this node from the preprocessors singly linked list. 57 *ResultEnt = Result->ArgCache; 58 Result->NumUnexpArgTokens = UnexpArgTokens.size(); 59 Result->VarargsElided = VarargsElided; 60 } 61 62 // Copy the actual unexpanded tokens to immediately after the result ptr. 63 if (!UnexpArgTokens.empty()) 64 std::copy(UnexpArgTokens.begin(), UnexpArgTokens.end(), 65 const_cast<Token*>(Result->getUnexpArgument(0))); 66 67 return Result; 68 } 69 70 /// destroy - Destroy and deallocate the memory for this object. 71 /// 72 void MacroArgs::destroy(Preprocessor &PP) { 73 StringifiedArgs.clear(); 74 75 // Don't clear PreExpArgTokens, just clear the entries. Clearing the entries 76 // would deallocate the element vectors. 77 for (unsigned i = 0, e = PreExpArgTokens.size(); i != e; ++i) 78 PreExpArgTokens[i].clear(); 79 80 // Add this to the preprocessor's free list. 81 ArgCache = PP.MacroArgCache; 82 PP.MacroArgCache = this; 83 } 84 85 /// deallocate - This should only be called by the Preprocessor when managing 86 /// its freelist. 87 MacroArgs *MacroArgs::deallocate() { 88 MacroArgs *Next = ArgCache; 89 90 // Run the dtor to deallocate the vectors. 91 this->~MacroArgs(); 92 // Release the memory for the object. 93 free(this); 94 95 return Next; 96 } 97 98 99 /// getArgLength - Given a pointer to an expanded or unexpanded argument, 100 /// return the number of tokens, not counting the EOF, that make up the 101 /// argument. 102 unsigned MacroArgs::getArgLength(const Token *ArgPtr) { 103 unsigned NumArgTokens = 0; 104 for (; ArgPtr->isNot(tok::eof); ++ArgPtr) 105 ++NumArgTokens; 106 return NumArgTokens; 107 } 108 109 110 /// getUnexpArgument - Return the unexpanded tokens for the specified formal. 111 /// 112 const Token *MacroArgs::getUnexpArgument(unsigned Arg) const { 113 // The unexpanded argument tokens start immediately after the MacroArgs object 114 // in memory. 115 const Token *Start = (const Token *)(this+1); 116 const Token *Result = Start; 117 // Scan to find Arg. 118 for (; Arg; ++Result) { 119 assert(Result < Start+NumUnexpArgTokens && "Invalid arg #"); 120 if (Result->is(tok::eof)) 121 --Arg; 122 } 123 assert(Result < Start+NumUnexpArgTokens && "Invalid arg #"); 124 return Result; 125 } 126 127 128 /// ArgNeedsPreexpansion - If we can prove that the argument won't be affected 129 /// by pre-expansion, return false. Otherwise, conservatively return true. 130 bool MacroArgs::ArgNeedsPreexpansion(const Token *ArgTok, 131 Preprocessor &PP) const { 132 // If there are no identifiers in the argument list, or if the identifiers are 133 // known to not be macros, pre-expansion won't modify it. 134 for (; ArgTok->isNot(tok::eof); ++ArgTok) 135 if (IdentifierInfo *II = ArgTok->getIdentifierInfo()) { 136 if (II->hasMacroDefinition() && PP.getMacroInfo(II)->isEnabled()) 137 // Return true even though the macro could be a function-like macro 138 // without a following '(' token. 139 return true; 140 } 141 return false; 142 } 143 144 /// getPreExpArgument - Return the pre-expanded form of the specified 145 /// argument. 146 const std::vector<Token> & 147 MacroArgs::getPreExpArgument(unsigned Arg, const MacroInfo *MI, 148 Preprocessor &PP) { 149 assert(Arg < MI->getNumArgs() && "Invalid argument number!"); 150 151 // If we have already computed this, return it. 152 if (PreExpArgTokens.size() < MI->getNumArgs()) 153 PreExpArgTokens.resize(MI->getNumArgs()); 154 155 std::vector<Token> &Result = PreExpArgTokens[Arg]; 156 if (!Result.empty()) return Result; 157 158 const Token *AT = getUnexpArgument(Arg); 159 unsigned NumToks = getArgLength(AT)+1; // Include the EOF. 160 161 // Otherwise, we have to pre-expand this argument, populating Result. To do 162 // this, we set up a fake TokenLexer to lex from the unexpanded argument 163 // list. With this installed, we lex expanded tokens until we hit the EOF 164 // token at the end of the unexp list. 165 PP.EnterTokenStream(AT, NumToks, false /*disable expand*/, 166 false /*owns tokens*/); 167 168 // Lex all of the macro-expanded tokens into Result. 169 do { 170 Result.push_back(Token()); 171 Token &Tok = Result.back(); 172 PP.Lex(Tok); 173 } while (Result.back().isNot(tok::eof)); 174 175 // Pop the token stream off the top of the stack. We know that the internal 176 // pointer inside of it is to the "end" of the token stream, but the stack 177 // will not otherwise be popped until the next token is lexed. The problem is 178 // that the token may be lexed sometime after the vector of tokens itself is 179 // destroyed, which would be badness. 180 PP.RemoveTopOfLexerStack(); 181 return Result; 182 } 183 184 185 /// StringifyArgument - Implement C99 6.10.3.2p2, converting a sequence of 186 /// tokens into the literal string token that should be produced by the C # 187 /// preprocessor operator. If Charify is true, then it should be turned into 188 /// a character literal for the Microsoft charize (#@) extension. 189 /// 190 Token MacroArgs::StringifyArgument(const Token *ArgToks, 191 Preprocessor &PP, bool Charify, 192 SourceLocation ExpansionLocStart, 193 SourceLocation ExpansionLocEnd) { 194 Token Tok; 195 Tok.startToken(); 196 Tok.setKind(Charify ? tok::char_constant : tok::string_literal); 197 198 const Token *ArgTokStart = ArgToks; 199 200 // Stringify all the tokens. 201 llvm::SmallString<128> Result; 202 Result += "\""; 203 204 bool isFirst = true; 205 for (; ArgToks->isNot(tok::eof); ++ArgToks) { 206 const Token &Tok = *ArgToks; 207 if (!isFirst && (Tok.hasLeadingSpace() || Tok.isAtStartOfLine())) 208 Result += ' '; 209 isFirst = false; 210 211 // If this is a string or character constant, escape the token as specified 212 // by 6.10.3.2p2. 213 if (Tok.is(tok::string_literal) || // "foo" 214 Tok.is(tok::wide_string_literal) || // L"foo" 215 Tok.is(tok::utf8_string_literal) || // u8"foo" 216 Tok.is(tok::utf16_string_literal) || // u"foo" 217 Tok.is(tok::utf32_string_literal) || // U"foo" 218 Tok.is(tok::char_constant) || // 'x' 219 Tok.is(tok::wide_char_constant) || // L'x'. 220 Tok.is(tok::utf16_char_constant) || // u'x'. 221 Tok.is(tok::utf32_char_constant)) { // U'x'. 222 bool Invalid = false; 223 std::string TokStr = PP.getSpelling(Tok, &Invalid); 224 if (!Invalid) { 225 std::string Str = Lexer::Stringify(TokStr); 226 Result.append(Str.begin(), Str.end()); 227 } 228 } else if (Tok.is(tok::code_completion)) { 229 PP.CodeCompleteNaturalLanguage(); 230 } else { 231 // Otherwise, just append the token. Do some gymnastics to get the token 232 // in place and avoid copies where possible. 233 unsigned CurStrLen = Result.size(); 234 Result.resize(CurStrLen+Tok.getLength()); 235 const char *BufPtr = &Result[CurStrLen]; 236 bool Invalid = false; 237 unsigned ActualTokLen = PP.getSpelling(Tok, BufPtr, &Invalid); 238 239 if (!Invalid) { 240 // If getSpelling returned a pointer to an already uniqued version of 241 // the string instead of filling in BufPtr, memcpy it onto our string. 242 if (BufPtr != &Result[CurStrLen]) 243 memcpy(&Result[CurStrLen], BufPtr, ActualTokLen); 244 245 // If the token was dirty, the spelling may be shorter than the token. 246 if (ActualTokLen != Tok.getLength()) 247 Result.resize(CurStrLen+ActualTokLen); 248 } 249 } 250 } 251 252 // If the last character of the string is a \, and if it isn't escaped, this 253 // is an invalid string literal, diagnose it as specified in C99. 254 if (Result.back() == '\\') { 255 // Count the number of consequtive \ characters. If even, then they are 256 // just escaped backslashes, otherwise it's an error. 257 unsigned FirstNonSlash = Result.size()-2; 258 // Guaranteed to find the starting " if nothing else. 259 while (Result[FirstNonSlash] == '\\') 260 --FirstNonSlash; 261 if ((Result.size()-1-FirstNonSlash) & 1) { 262 // Diagnose errors for things like: #define F(X) #X / F(\) 263 PP.Diag(ArgToks[-1], diag::pp_invalid_string_literal); 264 Result.pop_back(); // remove one of the \'s. 265 } 266 } 267 Result += '"'; 268 269 // If this is the charify operation and the result is not a legal character 270 // constant, diagnose it. 271 if (Charify) { 272 // First step, turn double quotes into single quotes: 273 Result[0] = '\''; 274 Result[Result.size()-1] = '\''; 275 276 // Check for bogus character. 277 bool isBad = false; 278 if (Result.size() == 3) 279 isBad = Result[1] == '\''; // ''' is not legal. '\' already fixed above. 280 else 281 isBad = (Result.size() != 4 || Result[1] != '\\'); // Not '\x' 282 283 if (isBad) { 284 PP.Diag(ArgTokStart[0], diag::err_invalid_character_to_charify); 285 Result = "' '"; // Use something arbitrary, but legal. 286 } 287 } 288 289 PP.CreateString(&Result[0], Result.size(), Tok, 290 ExpansionLocStart, ExpansionLocEnd); 291 return Tok; 292 } 293 294 /// getStringifiedArgument - Compute, cache, and return the specified argument 295 /// that has been 'stringified' as required by the # operator. 296 const Token &MacroArgs::getStringifiedArgument(unsigned ArgNo, 297 Preprocessor &PP, 298 SourceLocation ExpansionLocStart, 299 SourceLocation ExpansionLocEnd) { 300 assert(ArgNo < NumUnexpArgTokens && "Invalid argument number!"); 301 if (StringifiedArgs.empty()) { 302 StringifiedArgs.resize(getNumArguments()); 303 memset((void*)&StringifiedArgs[0], 0, 304 sizeof(StringifiedArgs[0])*getNumArguments()); 305 } 306 if (StringifiedArgs[ArgNo].isNot(tok::string_literal)) 307 StringifiedArgs[ArgNo] = StringifyArgument(getUnexpArgument(ArgNo), PP, 308 /*Charify=*/false, 309 ExpansionLocStart, 310 ExpansionLocEnd); 311 return StringifiedArgs[ArgNo]; 312 } 313