1 /* 2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved. 3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich (at) uwaterloo.ca> 4 * 5 * Redistribution and use in source and binary forms, with or without 6 * modification, are permitted provided that the following conditions 7 * are met: 8 * 9 * 1. Redistributions of source code must retain the above copyright 10 * notice, this list of conditions and the following disclaimer. 11 * 2. Redistributions in binary form must reproduce the above copyright 12 * notice, this list of conditions and the following disclaimer in the 13 * documentation and/or other materials provided with the distribution. 14 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of 15 * its contributors may be used to endorse or promote products derived 16 * from this software without specific prior written permission. 17 * 18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY 19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY 22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 */ 29 30 #include "config.h" 31 #include "BytecodeGenerator.h" 32 33 #include "BatchedTransitionOptimizer.h" 34 #include "JSFunction.h" 35 #include "Interpreter.h" 36 #include "ScopeChain.h" 37 #include "UString.h" 38 39 using namespace std; 40 41 namespace JSC { 42 43 /* 44 The layout of a register frame looks like this: 45 46 For 47 48 function f(x, y) { 49 var v1; 50 function g() { } 51 var v2; 52 return (x) * (y); 53 } 54 55 assuming (x) and (y) generated temporaries t1 and t2, you would have 56 57 ------------------------------------ 58 | x | y | g | v2 | v1 | t1 | t2 | <-- value held 59 ------------------------------------ 60 | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index 61 ------------------------------------ 62 | params->|<-locals | temps-> 63 64 Because temporary registers are allocated in a stack-like fashion, we 65 can reclaim them with a simple popping algorithm. The same goes for labels. 66 (We never reclaim parameter or local registers, because parameters and 67 locals are DontDelete.) 68 69 The register layout before a function call looks like this: 70 71 For 72 73 function f(x, y) 74 { 75 } 76 77 f(1); 78 79 > <------------------------------ 80 < > reserved: call frame | 1 | <-- value held 81 > >snip< <------------------------------ 82 < > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index 83 > <------------------------------ 84 | params->|<-locals | temps-> 85 86 The call instruction fills in the "call frame" registers. It also pads 87 missing arguments at the end of the call: 88 89 > <----------------------------------- 90 < > reserved: call frame | 1 | ? | <-- value held ("?" stands for "undefined") 91 > >snip< <----------------------------------- 92 < > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index 93 > <----------------------------------- 94 | params->|<-locals | temps-> 95 96 After filling in missing arguments, the call instruction sets up the new 97 stack frame to overlap the end of the old stack frame: 98 99 |----------------------------------> < 100 | reserved: call frame | 1 | ? < > <-- value held ("?" stands for "undefined") 101 |----------------------------------> >snip< < 102 | -7 | -6 | -5 | -4 | -3 | -2 | -1 < > <-- register index 103 |----------------------------------> < 104 | | params->|<-locals | temps-> 105 106 That way, arguments are "copied" into the callee's stack frame for free. 107 108 If the caller supplies too many arguments, this trick doesn't work. The 109 extra arguments protrude into space reserved for locals and temporaries. 110 In that case, the call instruction makes a real copy of the call frame header, 111 along with just the arguments expected by the callee, leaving the original 112 call frame header and arguments behind. (The call instruction can't just discard 113 extra arguments, because the "arguments" object may access them later.) 114 This copying strategy ensures that all named values will be at the indices 115 expected by the callee. 116 */ 117 118 #ifndef NDEBUG 119 static bool s_dumpsGeneratedCode = false; 120 #endif 121 122 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode) 123 { 124 #ifndef NDEBUG 125 s_dumpsGeneratedCode = dumpsGeneratedCode; 126 #else 127 UNUSED_PARAM(dumpsGeneratedCode); 128 #endif 129 } 130 131 bool BytecodeGenerator::dumpsGeneratedCode() 132 { 133 #ifndef NDEBUG 134 return s_dumpsGeneratedCode; 135 #else 136 return false; 137 #endif 138 } 139 140 JSObject* BytecodeGenerator::generate() 141 { 142 m_codeBlock->setThisRegister(m_thisRegister.index()); 143 144 m_scopeNode->emitBytecode(*this); 145 146 #ifndef NDEBUG 147 m_codeBlock->setInstructionCount(m_codeBlock->instructions().size()); 148 149 if (s_dumpsGeneratedCode) 150 m_codeBlock->dump(m_scopeChain->globalObject->globalExec()); 151 #endif 152 153 if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode) 154 symbolTable().clear(); 155 156 m_codeBlock->shrinkToFit(); 157 158 if (m_expressionTooDeep) 159 return createOutOfMemoryError(m_scopeChain->globalObject.get()); 160 return 0; 161 } 162 163 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0) 164 { 165 int index = m_calleeRegisters.size(); 166 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0); 167 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.impl(), newEntry); 168 169 if (!result.second) { 170 r0 = ®isterFor(result.first->second.getIndex()); 171 return false; 172 } 173 174 r0 = addVar(); 175 return true; 176 } 177 178 bool BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant, RegisterID*& r0) 179 { 180 int index = m_nextGlobalIndex; 181 SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0); 182 pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.impl(), newEntry); 183 184 if (!result.second) 185 index = result.first->second.getIndex(); 186 else { 187 --m_nextGlobalIndex; 188 m_globals.append(index + m_globalVarStorageOffset); 189 } 190 191 r0 = ®isterFor(index); 192 return result.second; 193 } 194 195 void BytecodeGenerator::preserveLastVar() 196 { 197 if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0) 198 m_lastVar = &m_calleeRegisters.last(); 199 } 200 201 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock) 202 : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger()) 203 , m_shouldEmitProfileHooks(scopeChain->globalObject->supportsProfiling()) 204 , m_shouldEmitRichSourceInfo(scopeChain->globalObject->supportsRichSourceInfo()) 205 , m_scopeChain(*scopeChain->globalData, scopeChain) 206 , m_symbolTable(symbolTable) 207 , m_scopeNode(programNode) 208 , m_codeBlock(codeBlock) 209 , m_thisRegister(RegisterFile::ProgramCodeThisRegister) 210 , m_finallyDepth(0) 211 , m_dynamicScopeDepth(0) 212 , m_baseScopeDepth(0) 213 , m_codeType(GlobalCode) 214 , m_nextGlobalIndex(-1) 215 , m_nextConstantOffset(0) 216 , m_globalConstantIndex(0) 217 , m_hasCreatedActivation(true) 218 , m_firstLazyFunction(0) 219 , m_lastLazyFunction(0) 220 , m_globalData(scopeChain->globalData) 221 , m_lastOpcodeID(op_end) 222 #ifndef NDEBUG 223 , m_lastOpcodePosition(0) 224 #endif 225 , m_stack(m_globalData->stack()) 226 , m_usesExceptions(false) 227 , m_expressionTooDeep(false) 228 { 229 if (m_shouldEmitDebugHooks) 230 m_codeBlock->setNeedsFullScopeChain(true); 231 232 emitOpcode(op_enter); 233 codeBlock->setGlobalData(m_globalData); 234 235 // FIXME: Move code that modifies the global object to Interpreter::execute. 236 237 m_codeBlock->m_numParameters = 1; // Allocate space for "this" 238 239 JSGlobalObject* globalObject = scopeChain->globalObject.get(); 240 ExecState* exec = globalObject->globalExec(); 241 RegisterFile* registerFile = &exec->globalData().interpreter->registerFile(); 242 243 // Shift register indexes in generated code to elide registers allocated by intermediate stack frames. 244 m_globalVarStorageOffset = -RegisterFile::CallFrameHeaderSize - m_codeBlock->m_numParameters - registerFile->size(); 245 246 // Add previously defined symbols to bookkeeping. 247 m_globals.grow(symbolTable->size()); 248 SymbolTable::iterator end = symbolTable->end(); 249 for (SymbolTable::iterator it = symbolTable->begin(); it != end; ++it) 250 registerFor(it->second.getIndex()).setIndex(it->second.getIndex() + m_globalVarStorageOffset); 251 252 BatchedTransitionOptimizer optimizer(*m_globalData, globalObject); 253 254 const VarStack& varStack = programNode->varStack(); 255 const FunctionStack& functionStack = programNode->functionStack(); 256 bool canOptimizeNewGlobals = symbolTable->size() + functionStack.size() + varStack.size() < registerFile->maxGlobals(); 257 if (canOptimizeNewGlobals) { 258 // Shift new symbols so they get stored prior to existing symbols. 259 m_nextGlobalIndex -= symbolTable->size(); 260 261 HashSet<StringImpl*, IdentifierRepHash> newGlobals; 262 Vector<std::pair<int, bool>, 16> functionInfo(functionStack.size()); 263 for (size_t i = 0; i < functionStack.size(); ++i) { 264 FunctionBodyNode* function = functionStack[i]; 265 globalObject->removeDirect(*m_globalData, function->ident()); // Make sure our new function is not shadowed by an old property. 266 SymbolTableEntry entry = symbolTable->inlineGet(function->ident().impl()); 267 268 if (entry.isNull()) 269 newGlobals.add(function->ident().impl()); 270 functionInfo[i] = make_pair(entry.getIndex(), entry.isReadOnly()); 271 } 272 273 Vector<bool, 16> shouldCreateVar(varStack.size()); 274 for (size_t i = 0; i < varStack.size(); ++i) { 275 if (newGlobals.contains(varStack[i].first->impl()) || globalObject->hasProperty(exec, *varStack[i].first)) { 276 shouldCreateVar[i] = false; 277 continue; 278 } 279 shouldCreateVar[i] = true; 280 newGlobals.add(varStack[i].first->impl()); 281 } 282 283 int expectedSize = symbolTable->size() + newGlobals.size(); 284 globalObject->resizeRegisters(symbolTable->size(), expectedSize); 285 286 for (size_t i = 0; i < functionStack.size(); ++i) { 287 FunctionBodyNode* function = functionStack[i]; 288 if (functionInfo[i].second) 289 continue; 290 RegisterID* dst = addGlobalVar(function->ident(), false); 291 JSValue value = new (exec) JSFunction(exec, makeFunction(exec, function), scopeChain); 292 globalObject->registerAt(dst->index() - m_globalVarStorageOffset).set(*m_globalData, globalObject, value); 293 } 294 295 for (size_t i = 0; i < varStack.size(); ++i) { 296 if (!shouldCreateVar[i]) 297 continue; 298 addGlobalVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant); 299 } 300 if (symbolTable->size() != expectedSize) 301 CRASH(); 302 303 preserveLastVar(); 304 } else { 305 for (size_t i = 0; i < functionStack.size(); ++i) { 306 FunctionBodyNode* function = functionStack[i]; 307 globalObject->putWithAttributes(exec, function->ident(), new (exec) JSFunction(exec, makeFunction(exec, function), scopeChain), DontDelete); 308 } 309 for (size_t i = 0; i < varStack.size(); ++i) { 310 if (globalObject->symbolTableHasProperty(*varStack[i].first) || globalObject->hasProperty(exec, *varStack[i].first)) 311 continue; 312 int attributes = DontDelete; 313 if (varStack[i].second & DeclarationStacks::IsConstant) 314 attributes |= ReadOnly; 315 globalObject->putWithAttributes(exec, *varStack[i].first, jsUndefined(), attributes); 316 } 317 318 preserveLastVar(); 319 } 320 codeBlock->m_numCapturedVars = codeBlock->m_numVars; 321 } 322 323 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, ScopeChainNode* scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock) 324 : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger()) 325 , m_shouldEmitProfileHooks(scopeChain->globalObject->supportsProfiling()) 326 , m_shouldEmitRichSourceInfo(scopeChain->globalObject->supportsRichSourceInfo()) 327 , m_scopeChain(*scopeChain->globalData, scopeChain) 328 , m_symbolTable(symbolTable) 329 , m_scopeNode(functionBody) 330 , m_codeBlock(codeBlock) 331 , m_activationRegister(0) 332 , m_finallyDepth(0) 333 , m_dynamicScopeDepth(0) 334 , m_baseScopeDepth(0) 335 , m_codeType(FunctionCode) 336 , m_nextConstantOffset(0) 337 , m_globalConstantIndex(0) 338 , m_hasCreatedActivation(false) 339 , m_firstLazyFunction(0) 340 , m_lastLazyFunction(0) 341 , m_globalData(scopeChain->globalData) 342 , m_lastOpcodeID(op_end) 343 #ifndef NDEBUG 344 , m_lastOpcodePosition(0) 345 #endif 346 , m_stack(m_globalData->stack()) 347 , m_usesExceptions(false) 348 , m_expressionTooDeep(false) 349 { 350 if (m_shouldEmitDebugHooks) 351 m_codeBlock->setNeedsFullScopeChain(true); 352 353 codeBlock->setGlobalData(m_globalData); 354 355 emitOpcode(op_enter); 356 if (m_codeBlock->needsFullScopeChain()) { 357 m_activationRegister = addVar(); 358 emitInitLazyRegister(m_activationRegister); 359 m_codeBlock->setActivationRegister(m_activationRegister->index()); 360 } 361 362 // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments' 363 // object, if created. 364 if (m_codeBlock->needsFullScopeChain() || functionBody->usesArguments()) { 365 RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code. 366 RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'. 367 368 // We can save a little space by hard-coding the knowledge that the two 369 // 'arguments' values are stored in consecutive registers, and storing 370 // only the index of the assignable one. 371 codeBlock->setArgumentsRegister(argumentsRegister->index()); 372 ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister())); 373 374 emitInitLazyRegister(argumentsRegister); 375 emitInitLazyRegister(unmodifiedArgumentsRegister); 376 377 if (m_codeBlock->isStrictMode()) { 378 emitOpcode(op_create_arguments); 379 instructions().append(argumentsRegister->index()); 380 } 381 382 // The debugger currently retrieves the arguments object from an activation rather than pulling 383 // it from a call frame. In the long-term it should stop doing that (<rdar://problem/6911886>), 384 // but for now we force eager creation of the arguments object when debugging. 385 if (m_shouldEmitDebugHooks) { 386 emitOpcode(op_create_arguments); 387 instructions().append(argumentsRegister->index()); 388 } 389 } 390 391 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack(); 392 const DeclarationStacks::VarStack& varStack = functionBody->varStack(); 393 394 // Captured variables and functions go first so that activations don't have 395 // to step over the non-captured locals to mark them. 396 m_hasCreatedActivation = false; 397 if (functionBody->hasCapturedVariables()) { 398 for (size_t i = 0; i < functionStack.size(); ++i) { 399 FunctionBodyNode* function = functionStack[i]; 400 const Identifier& ident = function->ident(); 401 if (functionBody->captures(ident)) { 402 if (!m_hasCreatedActivation) { 403 m_hasCreatedActivation = true; 404 emitOpcode(op_create_activation); 405 instructions().append(m_activationRegister->index()); 406 } 407 m_functions.add(ident.impl()); 408 emitNewFunction(addVar(ident, false), function); 409 } 410 } 411 for (size_t i = 0; i < varStack.size(); ++i) { 412 const Identifier& ident = *varStack[i].first; 413 if (functionBody->captures(ident)) 414 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant); 415 } 416 } 417 bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks; 418 if (!canLazilyCreateFunctions && !m_hasCreatedActivation) { 419 m_hasCreatedActivation = true; 420 emitOpcode(op_create_activation); 421 instructions().append(m_activationRegister->index()); 422 } 423 424 codeBlock->m_numCapturedVars = codeBlock->m_numVars; 425 m_firstLazyFunction = codeBlock->m_numVars; 426 for (size_t i = 0; i < functionStack.size(); ++i) { 427 FunctionBodyNode* function = functionStack[i]; 428 const Identifier& ident = function->ident(); 429 if (!functionBody->captures(ident)) { 430 m_functions.add(ident.impl()); 431 RefPtr<RegisterID> reg = addVar(ident, false); 432 // Don't lazily create functions that override the name 'arguments' 433 // as this would complicate lazy instantiation of actual arguments. 434 if (!canLazilyCreateFunctions || ident == propertyNames().arguments) 435 emitNewFunction(reg.get(), function); 436 else { 437 emitInitLazyRegister(reg.get()); 438 m_lazyFunctions.set(reg->index(), function); 439 } 440 } 441 } 442 m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction; 443 for (size_t i = 0; i < varStack.size(); ++i) { 444 const Identifier& ident = *varStack[i].first; 445 if (!functionBody->captures(ident)) 446 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant); 447 } 448 449 if (m_shouldEmitDebugHooks) 450 codeBlock->m_numCapturedVars = codeBlock->m_numVars; 451 452 FunctionParameters& parameters = *functionBody->parameters(); 453 size_t parameterCount = parameters.size(); 454 int nextParameterIndex = -RegisterFile::CallFrameHeaderSize - parameterCount - 1; 455 m_parameters.grow(1 + parameterCount); // reserve space for "this" 456 457 // Add "this" as a parameter 458 m_thisRegister.setIndex(nextParameterIndex); 459 ++m_codeBlock->m_numParameters; 460 461 for (size_t i = 0; i < parameterCount; ++i) 462 addParameter(parameters[i], ++nextParameterIndex); 463 464 preserveLastVar(); 465 466 if (isConstructor()) { 467 RefPtr<RegisterID> func = newTemporary(); 468 RefPtr<RegisterID> funcProto = newTemporary(); 469 470 emitOpcode(op_get_callee); 471 instructions().append(func->index()); 472 // Load prototype. 473 emitGetById(funcProto.get(), func.get(), globalData()->propertyNames->prototype); 474 475 emitOpcode(op_create_this); 476 instructions().append(m_thisRegister.index()); 477 instructions().append(funcProto->index()); 478 } else if (functionBody->usesThis() || m_shouldEmitDebugHooks) { 479 if (codeBlock->isStrictMode()) 480 emitOpcode(op_convert_this_strict); 481 else 482 emitOpcode(op_convert_this); 483 instructions().append(m_thisRegister.index()); 484 } 485 } 486 487 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock) 488 : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger()) 489 , m_shouldEmitProfileHooks(scopeChain->globalObject->supportsProfiling()) 490 , m_shouldEmitRichSourceInfo(scopeChain->globalObject->supportsRichSourceInfo()) 491 , m_scopeChain(*scopeChain->globalData, scopeChain) 492 , m_symbolTable(symbolTable) 493 , m_scopeNode(evalNode) 494 , m_codeBlock(codeBlock) 495 , m_thisRegister(RegisterFile::ProgramCodeThisRegister) 496 , m_finallyDepth(0) 497 , m_dynamicScopeDepth(0) 498 , m_baseScopeDepth(codeBlock->baseScopeDepth()) 499 , m_codeType(EvalCode) 500 , m_nextConstantOffset(0) 501 , m_globalConstantIndex(0) 502 , m_hasCreatedActivation(true) 503 , m_firstLazyFunction(0) 504 , m_lastLazyFunction(0) 505 , m_globalData(scopeChain->globalData) 506 , m_lastOpcodeID(op_end) 507 #ifndef NDEBUG 508 , m_lastOpcodePosition(0) 509 #endif 510 , m_stack(m_globalData->stack()) 511 , m_usesExceptions(false) 512 , m_expressionTooDeep(false) 513 { 514 if (m_shouldEmitDebugHooks || m_baseScopeDepth) 515 m_codeBlock->setNeedsFullScopeChain(true); 516 517 emitOpcode(op_enter); 518 codeBlock->setGlobalData(m_globalData); 519 m_codeBlock->m_numParameters = 1; // Allocate space for "this" 520 521 const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack(); 522 for (size_t i = 0; i < functionStack.size(); ++i) 523 m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i])); 524 525 const DeclarationStacks::VarStack& varStack = evalNode->varStack(); 526 unsigned numVariables = varStack.size(); 527 Vector<Identifier> variables; 528 variables.reserveCapacity(numVariables); 529 for (size_t i = 0; i < numVariables; ++i) 530 variables.append(*varStack[i].first); 531 codeBlock->adoptVariables(variables); 532 codeBlock->m_numCapturedVars = codeBlock->m_numVars; 533 preserveLastVar(); 534 } 535 536 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg) 537 { 538 emitOpcode(op_init_lazy_reg); 539 instructions().append(reg->index()); 540 return reg; 541 } 542 543 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex) 544 { 545 // Parameters overwrite var declarations, but not function declarations. 546 StringImpl* rep = ident.impl(); 547 if (!m_functions.contains(rep)) { 548 symbolTable().set(rep, parameterIndex); 549 RegisterID& parameter = registerFor(parameterIndex); 550 parameter.setIndex(parameterIndex); 551 } 552 553 // To maintain the calling convention, we have to allocate unique space for 554 // each parameter, even if the parameter doesn't make it into the symbol table. 555 ++m_codeBlock->m_numParameters; 556 } 557 558 RegisterID* BytecodeGenerator::registerFor(const Identifier& ident) 559 { 560 if (ident == propertyNames().thisIdentifier) 561 return &m_thisRegister; 562 563 if (!shouldOptimizeLocals()) 564 return 0; 565 566 SymbolTableEntry entry = symbolTable().get(ident.impl()); 567 if (entry.isNull()) 568 return 0; 569 570 if (ident == propertyNames().arguments) 571 createArgumentsIfNecessary(); 572 573 return createLazyRegisterIfNecessary(®isterFor(entry.getIndex())); 574 } 575 576 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident) 577 { 578 if (ident != propertyNames().arguments) 579 return false; 580 581 if (!shouldOptimizeLocals()) 582 return false; 583 584 SymbolTableEntry entry = symbolTable().get(ident.impl()); 585 if (entry.isNull()) 586 return false; 587 588 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode) 589 return true; 590 591 return false; 592 } 593 594 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments() 595 { 596 ASSERT(willResolveToArguments(propertyNames().arguments)); 597 598 SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl()); 599 ASSERT(!entry.isNull()); 600 return ®isterFor(entry.getIndex()); 601 } 602 603 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg) 604 { 605 if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction) 606 return reg; 607 emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index())); 608 return reg; 609 } 610 611 RegisterID* BytecodeGenerator::constRegisterFor(const Identifier& ident) 612 { 613 if (m_codeType == EvalCode) 614 return 0; 615 616 SymbolTableEntry entry = symbolTable().get(ident.impl()); 617 if (entry.isNull()) 618 return 0; 619 620 return createLazyRegisterIfNecessary(®isterFor(entry.getIndex())); 621 } 622 623 bool BytecodeGenerator::isLocal(const Identifier& ident) 624 { 625 if (ident == propertyNames().thisIdentifier) 626 return true; 627 628 return shouldOptimizeLocals() && symbolTable().contains(ident.impl()); 629 } 630 631 bool BytecodeGenerator::isLocalConstant(const Identifier& ident) 632 { 633 return symbolTable().get(ident.impl()).isReadOnly(); 634 } 635 636 RegisterID* BytecodeGenerator::newRegister() 637 { 638 m_calleeRegisters.append(m_calleeRegisters.size()); 639 m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size()); 640 return &m_calleeRegisters.last(); 641 } 642 643 RegisterID* BytecodeGenerator::newTemporary() 644 { 645 // Reclaim free register IDs. 646 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount()) 647 m_calleeRegisters.removeLast(); 648 649 RegisterID* result = newRegister(); 650 result->setTemporary(); 651 return result; 652 } 653 654 RegisterID* BytecodeGenerator::highestUsedRegister() 655 { 656 size_t count = m_codeBlock->m_numCalleeRegisters; 657 while (m_calleeRegisters.size() < count) 658 newRegister(); 659 return &m_calleeRegisters.last(); 660 } 661 662 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name) 663 { 664 // Reclaim free label scopes. 665 while (m_labelScopes.size() && !m_labelScopes.last().refCount()) 666 m_labelScopes.removeLast(); 667 668 // Allocate new label scope. 669 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets. 670 m_labelScopes.append(scope); 671 return &m_labelScopes.last(); 672 } 673 674 PassRefPtr<Label> BytecodeGenerator::newLabel() 675 { 676 // Reclaim free label IDs. 677 while (m_labels.size() && !m_labels.last().refCount()) 678 m_labels.removeLast(); 679 680 // Allocate new label ID. 681 m_labels.append(m_codeBlock); 682 return &m_labels.last(); 683 } 684 685 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0) 686 { 687 unsigned newLabelIndex = instructions().size(); 688 l0->setLocation(newLabelIndex); 689 690 if (m_codeBlock->numberOfJumpTargets()) { 691 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget(); 692 ASSERT(lastLabelIndex <= newLabelIndex); 693 if (newLabelIndex == lastLabelIndex) { 694 // Peephole optimizations have already been disabled by emitting the last label 695 return l0; 696 } 697 } 698 699 m_codeBlock->addJumpTarget(newLabelIndex); 700 701 // This disables peephole optimizations when an instruction is a jump target 702 m_lastOpcodeID = op_end; 703 return l0; 704 } 705 706 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID) 707 { 708 #ifndef NDEBUG 709 size_t opcodePosition = instructions().size(); 710 ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end); 711 m_lastOpcodePosition = opcodePosition; 712 #endif 713 instructions().append(globalData()->interpreter->getOpcode(opcodeID)); 714 m_lastOpcodeID = opcodeID; 715 } 716 717 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index) 718 { 719 ASSERT(instructions().size() >= 4); 720 size_t size = instructions().size(); 721 dstIndex = instructions().at(size - 3).u.operand; 722 src1Index = instructions().at(size - 2).u.operand; 723 src2Index = instructions().at(size - 1).u.operand; 724 } 725 726 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex) 727 { 728 ASSERT(instructions().size() >= 3); 729 size_t size = instructions().size(); 730 dstIndex = instructions().at(size - 2).u.operand; 731 srcIndex = instructions().at(size - 1).u.operand; 732 } 733 734 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp() 735 { 736 ASSERT(instructions().size() >= 4); 737 instructions().shrink(instructions().size() - 4); 738 m_lastOpcodeID = op_end; 739 } 740 741 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp() 742 { 743 ASSERT(instructions().size() >= 3); 744 instructions().shrink(instructions().size() - 3); 745 m_lastOpcodeID = op_end; 746 } 747 748 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target) 749 { 750 size_t begin = instructions().size(); 751 emitOpcode(target->isForward() ? op_jmp : op_loop); 752 instructions().append(target->bind(begin, instructions().size())); 753 return target; 754 } 755 756 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target) 757 { 758 if (m_lastOpcodeID == op_less) { 759 int dstIndex; 760 int src1Index; 761 int src2Index; 762 763 retrieveLastBinaryOp(dstIndex, src1Index, src2Index); 764 765 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 766 rewindBinaryOp(); 767 768 size_t begin = instructions().size(); 769 emitOpcode(target->isForward() ? op_jless : op_loop_if_less); 770 instructions().append(src1Index); 771 instructions().append(src2Index); 772 instructions().append(target->bind(begin, instructions().size())); 773 return target; 774 } 775 } else if (m_lastOpcodeID == op_lesseq) { 776 int dstIndex; 777 int src1Index; 778 int src2Index; 779 780 retrieveLastBinaryOp(dstIndex, src1Index, src2Index); 781 782 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 783 rewindBinaryOp(); 784 785 size_t begin = instructions().size(); 786 emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq); 787 instructions().append(src1Index); 788 instructions().append(src2Index); 789 instructions().append(target->bind(begin, instructions().size())); 790 return target; 791 } 792 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) { 793 int dstIndex; 794 int srcIndex; 795 796 retrieveLastUnaryOp(dstIndex, srcIndex); 797 798 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 799 rewindUnaryOp(); 800 801 size_t begin = instructions().size(); 802 emitOpcode(op_jeq_null); 803 instructions().append(srcIndex); 804 instructions().append(target->bind(begin, instructions().size())); 805 return target; 806 } 807 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) { 808 int dstIndex; 809 int srcIndex; 810 811 retrieveLastUnaryOp(dstIndex, srcIndex); 812 813 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 814 rewindUnaryOp(); 815 816 size_t begin = instructions().size(); 817 emitOpcode(op_jneq_null); 818 instructions().append(srcIndex); 819 instructions().append(target->bind(begin, instructions().size())); 820 return target; 821 } 822 } 823 824 size_t begin = instructions().size(); 825 826 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true); 827 instructions().append(cond->index()); 828 instructions().append(target->bind(begin, instructions().size())); 829 return target; 830 } 831 832 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target) 833 { 834 if (m_lastOpcodeID == op_less && target->isForward()) { 835 int dstIndex; 836 int src1Index; 837 int src2Index; 838 839 retrieveLastBinaryOp(dstIndex, src1Index, src2Index); 840 841 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 842 rewindBinaryOp(); 843 844 size_t begin = instructions().size(); 845 emitOpcode(op_jnless); 846 instructions().append(src1Index); 847 instructions().append(src2Index); 848 instructions().append(target->bind(begin, instructions().size())); 849 return target; 850 } 851 } else if (m_lastOpcodeID == op_lesseq && target->isForward()) { 852 int dstIndex; 853 int src1Index; 854 int src2Index; 855 856 retrieveLastBinaryOp(dstIndex, src1Index, src2Index); 857 858 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 859 rewindBinaryOp(); 860 861 size_t begin = instructions().size(); 862 emitOpcode(op_jnlesseq); 863 instructions().append(src1Index); 864 instructions().append(src2Index); 865 instructions().append(target->bind(begin, instructions().size())); 866 return target; 867 } 868 } else if (m_lastOpcodeID == op_not) { 869 int dstIndex; 870 int srcIndex; 871 872 retrieveLastUnaryOp(dstIndex, srcIndex); 873 874 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 875 rewindUnaryOp(); 876 877 size_t begin = instructions().size(); 878 emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true); 879 instructions().append(srcIndex); 880 instructions().append(target->bind(begin, instructions().size())); 881 return target; 882 } 883 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) { 884 int dstIndex; 885 int srcIndex; 886 887 retrieveLastUnaryOp(dstIndex, srcIndex); 888 889 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 890 rewindUnaryOp(); 891 892 size_t begin = instructions().size(); 893 emitOpcode(op_jneq_null); 894 instructions().append(srcIndex); 895 instructions().append(target->bind(begin, instructions().size())); 896 return target; 897 } 898 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) { 899 int dstIndex; 900 int srcIndex; 901 902 retrieveLastUnaryOp(dstIndex, srcIndex); 903 904 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) { 905 rewindUnaryOp(); 906 907 size_t begin = instructions().size(); 908 emitOpcode(op_jeq_null); 909 instructions().append(srcIndex); 910 instructions().append(target->bind(begin, instructions().size())); 911 return target; 912 } 913 } 914 915 size_t begin = instructions().size(); 916 emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false); 917 instructions().append(cond->index()); 918 instructions().append(target->bind(begin, instructions().size())); 919 return target; 920 } 921 922 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target) 923 { 924 size_t begin = instructions().size(); 925 926 emitOpcode(op_jneq_ptr); 927 instructions().append(cond->index()); 928 instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->callFunction())); 929 instructions().append(target->bind(begin, instructions().size())); 930 return target; 931 } 932 933 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target) 934 { 935 size_t begin = instructions().size(); 936 937 emitOpcode(op_jneq_ptr); 938 instructions().append(cond->index()); 939 instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->applyFunction())); 940 instructions().append(target->bind(begin, instructions().size())); 941 return target; 942 } 943 944 unsigned BytecodeGenerator::addConstant(const Identifier& ident) 945 { 946 StringImpl* rep = ident.impl(); 947 pair<IdentifierMap::iterator, bool> result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers()); 948 if (result.second) // new entry 949 m_codeBlock->addIdentifier(Identifier(m_globalData, rep)); 950 951 return result.first->second; 952 } 953 954 RegisterID* BytecodeGenerator::addConstantValue(JSValue v) 955 { 956 int index = m_nextConstantOffset; 957 958 pair<JSValueMap::iterator, bool> result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset); 959 if (result.second) { 960 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset); 961 ++m_nextConstantOffset; 962 m_codeBlock->addConstant(JSValue(v)); 963 } else 964 index = result.first->second; 965 966 return &m_constantPoolRegisters[index]; 967 } 968 969 unsigned BytecodeGenerator::addRegExp(PassRefPtr<RegExp> r) 970 { 971 return m_codeBlock->addRegExp(r); 972 } 973 974 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src) 975 { 976 emitOpcode(op_mov); 977 instructions().append(dst->index()); 978 instructions().append(src->index()); 979 return dst; 980 } 981 982 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src) 983 { 984 emitOpcode(opcodeID); 985 instructions().append(dst->index()); 986 instructions().append(src->index()); 987 return dst; 988 } 989 990 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst) 991 { 992 emitOpcode(op_pre_inc); 993 instructions().append(srcDst->index()); 994 return srcDst; 995 } 996 997 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst) 998 { 999 emitOpcode(op_pre_dec); 1000 instructions().append(srcDst->index()); 1001 return srcDst; 1002 } 1003 1004 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst) 1005 { 1006 emitOpcode(op_post_inc); 1007 instructions().append(dst->index()); 1008 instructions().append(srcDst->index()); 1009 return dst; 1010 } 1011 1012 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst) 1013 { 1014 emitOpcode(op_post_dec); 1015 instructions().append(dst->index()); 1016 instructions().append(srcDst->index()); 1017 return dst; 1018 } 1019 1020 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types) 1021 { 1022 emitOpcode(opcodeID); 1023 instructions().append(dst->index()); 1024 instructions().append(src1->index()); 1025 instructions().append(src2->index()); 1026 1027 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor || 1028 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div) 1029 instructions().append(types.toInt()); 1030 1031 return dst; 1032 } 1033 1034 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2) 1035 { 1036 if (m_lastOpcodeID == op_typeof) { 1037 int dstIndex; 1038 int srcIndex; 1039 1040 retrieveLastUnaryOp(dstIndex, srcIndex); 1041 1042 if (src1->index() == dstIndex 1043 && src1->isTemporary() 1044 && m_codeBlock->isConstantRegisterIndex(src2->index()) 1045 && m_codeBlock->constantRegister(src2->index()).get().isString()) { 1046 const UString& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue(); 1047 if (value == "undefined") { 1048 rewindUnaryOp(); 1049 emitOpcode(op_is_undefined); 1050 instructions().append(dst->index()); 1051 instructions().append(srcIndex); 1052 return dst; 1053 } 1054 if (value == "boolean") { 1055 rewindUnaryOp(); 1056 emitOpcode(op_is_boolean); 1057 instructions().append(dst->index()); 1058 instructions().append(srcIndex); 1059 return dst; 1060 } 1061 if (value == "number") { 1062 rewindUnaryOp(); 1063 emitOpcode(op_is_number); 1064 instructions().append(dst->index()); 1065 instructions().append(srcIndex); 1066 return dst; 1067 } 1068 if (value == "string") { 1069 rewindUnaryOp(); 1070 emitOpcode(op_is_string); 1071 instructions().append(dst->index()); 1072 instructions().append(srcIndex); 1073 return dst; 1074 } 1075 if (value == "object") { 1076 rewindUnaryOp(); 1077 emitOpcode(op_is_object); 1078 instructions().append(dst->index()); 1079 instructions().append(srcIndex); 1080 return dst; 1081 } 1082 if (value == "function") { 1083 rewindUnaryOp(); 1084 emitOpcode(op_is_function); 1085 instructions().append(dst->index()); 1086 instructions().append(srcIndex); 1087 return dst; 1088 } 1089 } 1090 } 1091 1092 emitOpcode(opcodeID); 1093 instructions().append(dst->index()); 1094 instructions().append(src1->index()); 1095 instructions().append(src2->index()); 1096 return dst; 1097 } 1098 1099 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b) 1100 { 1101 return emitLoad(dst, jsBoolean(b)); 1102 } 1103 1104 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number) 1105 { 1106 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time. 1107 // Later we can do the extra work to handle that like the other cases. They also don't 1108 // work correctly with NaN as a key. 1109 if (isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number)) 1110 return emitLoad(dst, jsNumber(number)); 1111 JSValue& valueInMap = m_numberMap.add(number, JSValue()).first->second; 1112 if (!valueInMap) 1113 valueInMap = jsNumber(number); 1114 return emitLoad(dst, valueInMap); 1115 } 1116 1117 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier) 1118 { 1119 JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).first->second; 1120 if (!stringInMap) 1121 stringInMap = jsOwnedString(globalData(), identifier.ustring()); 1122 return emitLoad(dst, JSValue(stringInMap)); 1123 } 1124 1125 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v) 1126 { 1127 RegisterID* constantID = addConstantValue(v); 1128 if (dst) 1129 return emitMove(dst, constantID); 1130 return constantID; 1131 } 1132 1133 bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, bool& requiresDynamicChecks, JSObject*& globalObject) 1134 { 1135 // Cases where we cannot statically optimize the lookup. 1136 if (property == propertyNames().arguments || !canOptimizeNonLocals()) { 1137 stackDepth = 0; 1138 index = missingSymbolMarker(); 1139 1140 if (shouldOptimizeLocals() && m_codeType == GlobalCode) { 1141 ScopeChainIterator iter = m_scopeChain->begin(); 1142 globalObject = iter->get(); 1143 ASSERT((++iter) == m_scopeChain->end()); 1144 } 1145 return false; 1146 } 1147 1148 size_t depth = 0; 1149 requiresDynamicChecks = false; 1150 ScopeChainIterator iter = m_scopeChain->begin(); 1151 ScopeChainIterator end = m_scopeChain->end(); 1152 for (; iter != end; ++iter, ++depth) { 1153 JSObject* currentScope = iter->get(); 1154 if (!currentScope->isVariableObject()) 1155 break; 1156 JSVariableObject* currentVariableObject = static_cast<JSVariableObject*>(currentScope); 1157 SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl()); 1158 1159 // Found the property 1160 if (!entry.isNull()) { 1161 if (entry.isReadOnly() && forWriting) { 1162 stackDepth = 0; 1163 index = missingSymbolMarker(); 1164 if (++iter == end) 1165 globalObject = currentVariableObject; 1166 return false; 1167 } 1168 stackDepth = depth + m_codeBlock->needsFullScopeChain(); 1169 index = entry.getIndex(); 1170 if (++iter == end) 1171 globalObject = currentVariableObject; 1172 return true; 1173 } 1174 bool scopeRequiresDynamicChecks = false; 1175 if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks)) 1176 break; 1177 requiresDynamicChecks |= scopeRequiresDynamicChecks; 1178 } 1179 // Can't locate the property but we're able to avoid a few lookups. 1180 stackDepth = depth + m_codeBlock->needsFullScopeChain(); 1181 index = missingSymbolMarker(); 1182 JSObject* scope = iter->get(); 1183 if (++iter == end) 1184 globalObject = scope; 1185 return true; 1186 } 1187 1188 void BytecodeGenerator::emitCheckHasInstance(RegisterID* base) 1189 { 1190 emitOpcode(op_check_has_instance); 1191 instructions().append(base->index()); 1192 } 1193 1194 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype) 1195 { 1196 emitOpcode(op_instanceof); 1197 instructions().append(dst->index()); 1198 instructions().append(value->index()); 1199 instructions().append(base->index()); 1200 instructions().append(basePrototype->index()); 1201 return dst; 1202 } 1203 1204 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const Identifier& property) 1205 { 1206 size_t depth = 0; 1207 int index = 0; 1208 JSObject* globalObject = 0; 1209 bool requiresDynamicChecks = false; 1210 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) && !globalObject) { 1211 // We can't optimise at all :-( 1212 emitOpcode(op_resolve); 1213 instructions().append(dst->index()); 1214 instructions().append(addConstant(property)); 1215 return dst; 1216 } 1217 1218 if (globalObject) { 1219 bool forceGlobalResolve = false; 1220 1221 if (index != missingSymbolMarker() && !forceGlobalResolve && !requiresDynamicChecks) { 1222 // Directly index the property lookup across multiple scopes. 1223 return emitGetScopedVar(dst, depth, index, globalObject); 1224 } 1225 1226 #if ENABLE(JIT) 1227 m_codeBlock->addGlobalResolveInfo(instructions().size()); 1228 #else 1229 m_codeBlock->addGlobalResolveInstruction(instructions().size()); 1230 #endif 1231 emitOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global); 1232 instructions().append(dst->index()); 1233 instructions().append(addConstant(property)); 1234 instructions().append(0); 1235 instructions().append(0); 1236 if (requiresDynamicChecks) 1237 instructions().append(depth); 1238 return dst; 1239 } 1240 1241 if (requiresDynamicChecks) { 1242 // If we get here we have eval nested inside a |with| just give up 1243 emitOpcode(op_resolve); 1244 instructions().append(dst->index()); 1245 instructions().append(addConstant(property)); 1246 return dst; 1247 } 1248 1249 if (index != missingSymbolMarker()) { 1250 // Directly index the property lookup across multiple scopes. 1251 return emitGetScopedVar(dst, depth, index, globalObject); 1252 } 1253 1254 // In this case we are at least able to drop a few scope chains from the 1255 // lookup chain, although we still need to hash from then on. 1256 emitOpcode(op_resolve_skip); 1257 instructions().append(dst->index()); 1258 instructions().append(addConstant(property)); 1259 instructions().append(depth); 1260 return dst; 1261 } 1262 1263 RegisterID* BytecodeGenerator::emitGetScopedVar(RegisterID* dst, size_t depth, int index, JSValue globalObject) 1264 { 1265 if (globalObject) { 1266 emitOpcode(op_get_global_var); 1267 instructions().append(dst->index()); 1268 instructions().append(index); 1269 return dst; 1270 } 1271 1272 emitOpcode(op_get_scoped_var); 1273 instructions().append(dst->index()); 1274 instructions().append(index); 1275 instructions().append(depth); 1276 return dst; 1277 } 1278 1279 RegisterID* BytecodeGenerator::emitPutScopedVar(size_t depth, int index, RegisterID* value, JSValue globalObject) 1280 { 1281 if (globalObject) { 1282 emitOpcode(op_put_global_var); 1283 instructions().append(index); 1284 instructions().append(value->index()); 1285 return value; 1286 } 1287 emitOpcode(op_put_scoped_var); 1288 instructions().append(index); 1289 instructions().append(depth); 1290 instructions().append(value->index()); 1291 return value; 1292 } 1293 1294 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const Identifier& property) 1295 { 1296 size_t depth = 0; 1297 int index = 0; 1298 JSObject* globalObject = 0; 1299 bool requiresDynamicChecks = false; 1300 findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject); 1301 if (!globalObject || requiresDynamicChecks) { 1302 // We can't optimise at all :-( 1303 emitOpcode(op_resolve_base); 1304 instructions().append(dst->index()); 1305 instructions().append(addConstant(property)); 1306 instructions().append(false); 1307 return dst; 1308 } 1309 1310 // Global object is the base 1311 return emitLoad(dst, JSValue(globalObject)); 1312 } 1313 1314 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const Identifier& property) 1315 { 1316 if (!m_codeBlock->isStrictMode()) 1317 return emitResolveBase(dst, property); 1318 size_t depth = 0; 1319 int index = 0; 1320 JSObject* globalObject = 0; 1321 bool requiresDynamicChecks = false; 1322 findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject); 1323 if (!globalObject || requiresDynamicChecks) { 1324 // We can't optimise at all :-( 1325 emitOpcode(op_resolve_base); 1326 instructions().append(dst->index()); 1327 instructions().append(addConstant(property)); 1328 instructions().append(true); 1329 return dst; 1330 } 1331 1332 // Global object is the base 1333 RefPtr<RegisterID> result = emitLoad(dst, JSValue(globalObject)); 1334 emitOpcode(op_ensure_property_exists); 1335 instructions().append(dst->index()); 1336 instructions().append(addConstant(property)); 1337 return result.get(); 1338 } 1339 1340 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const Identifier& property) 1341 { 1342 size_t depth = 0; 1343 int index = 0; 1344 JSObject* globalObject = 0; 1345 bool requiresDynamicChecks = false; 1346 if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) || !globalObject || requiresDynamicChecks) { 1347 // We can't optimise at all :-( 1348 emitOpcode(op_resolve_with_base); 1349 instructions().append(baseDst->index()); 1350 instructions().append(propDst->index()); 1351 instructions().append(addConstant(property)); 1352 return baseDst; 1353 } 1354 1355 bool forceGlobalResolve = false; 1356 1357 // Global object is the base 1358 emitLoad(baseDst, JSValue(globalObject)); 1359 1360 if (index != missingSymbolMarker() && !forceGlobalResolve) { 1361 // Directly index the property lookup across multiple scopes. 1362 emitGetScopedVar(propDst, depth, index, globalObject); 1363 return baseDst; 1364 } 1365 1366 #if ENABLE(JIT) 1367 m_codeBlock->addGlobalResolveInfo(instructions().size()); 1368 #else 1369 m_codeBlock->addGlobalResolveInstruction(instructions().size()); 1370 #endif 1371 emitOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global); 1372 instructions().append(propDst->index()); 1373 instructions().append(addConstant(property)); 1374 instructions().append(0); 1375 instructions().append(0); 1376 if (requiresDynamicChecks) 1377 instructions().append(depth); 1378 return baseDst; 1379 } 1380 1381 void BytecodeGenerator::emitMethodCheck() 1382 { 1383 emitOpcode(op_method_check); 1384 } 1385 1386 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property) 1387 { 1388 #if ENABLE(JIT) 1389 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_get_by_id)); 1390 #else 1391 m_codeBlock->addPropertyAccessInstruction(instructions().size()); 1392 #endif 1393 1394 emitOpcode(op_get_by_id); 1395 instructions().append(dst->index()); 1396 instructions().append(base->index()); 1397 instructions().append(addConstant(property)); 1398 instructions().append(0); 1399 instructions().append(0); 1400 instructions().append(0); 1401 instructions().append(0); 1402 return dst; 1403 } 1404 1405 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base) 1406 { 1407 emitOpcode(op_get_arguments_length); 1408 instructions().append(dst->index()); 1409 ASSERT(base->index() == m_codeBlock->argumentsRegister()); 1410 instructions().append(base->index()); 1411 instructions().append(addConstant(propertyNames().length)); 1412 return dst; 1413 } 1414 1415 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value) 1416 { 1417 #if ENABLE(JIT) 1418 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_put_by_id)); 1419 #else 1420 m_codeBlock->addPropertyAccessInstruction(instructions().size()); 1421 #endif 1422 1423 emitOpcode(op_put_by_id); 1424 instructions().append(base->index()); 1425 instructions().append(addConstant(property)); 1426 instructions().append(value->index()); 1427 instructions().append(0); 1428 instructions().append(0); 1429 instructions().append(0); 1430 instructions().append(0); 1431 instructions().append(0); 1432 return value; 1433 } 1434 1435 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value) 1436 { 1437 #if ENABLE(JIT) 1438 m_codeBlock->addStructureStubInfo(StructureStubInfo(access_put_by_id)); 1439 #else 1440 m_codeBlock->addPropertyAccessInstruction(instructions().size()); 1441 #endif 1442 1443 emitOpcode(op_put_by_id); 1444 instructions().append(base->index()); 1445 instructions().append(addConstant(property)); 1446 instructions().append(value->index()); 1447 instructions().append(0); 1448 instructions().append(0); 1449 instructions().append(0); 1450 instructions().append(0); 1451 instructions().append(property != m_globalData->propertyNames->underscoreProto); 1452 return value; 1453 } 1454 1455 RegisterID* BytecodeGenerator::emitPutGetter(RegisterID* base, const Identifier& property, RegisterID* value) 1456 { 1457 emitOpcode(op_put_getter); 1458 instructions().append(base->index()); 1459 instructions().append(addConstant(property)); 1460 instructions().append(value->index()); 1461 return value; 1462 } 1463 1464 RegisterID* BytecodeGenerator::emitPutSetter(RegisterID* base, const Identifier& property, RegisterID* value) 1465 { 1466 emitOpcode(op_put_setter); 1467 instructions().append(base->index()); 1468 instructions().append(addConstant(property)); 1469 instructions().append(value->index()); 1470 return value; 1471 } 1472 1473 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property) 1474 { 1475 emitOpcode(op_del_by_id); 1476 instructions().append(dst->index()); 1477 instructions().append(base->index()); 1478 instructions().append(addConstant(property)); 1479 return dst; 1480 } 1481 1482 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property) 1483 { 1484 emitOpcode(op_get_argument_by_val); 1485 instructions().append(dst->index()); 1486 ASSERT(base->index() == m_codeBlock->argumentsRegister()); 1487 instructions().append(base->index()); 1488 instructions().append(property->index()); 1489 return dst; 1490 } 1491 1492 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property) 1493 { 1494 for (size_t i = m_forInContextStack.size(); i > 0; i--) { 1495 ForInContext& context = m_forInContextStack[i - 1]; 1496 if (context.propertyRegister == property) { 1497 emitOpcode(op_get_by_pname); 1498 instructions().append(dst->index()); 1499 instructions().append(base->index()); 1500 instructions().append(property->index()); 1501 instructions().append(context.expectedSubscriptRegister->index()); 1502 instructions().append(context.iterRegister->index()); 1503 instructions().append(context.indexRegister->index()); 1504 return dst; 1505 } 1506 } 1507 emitOpcode(op_get_by_val); 1508 instructions().append(dst->index()); 1509 instructions().append(base->index()); 1510 instructions().append(property->index()); 1511 return dst; 1512 } 1513 1514 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value) 1515 { 1516 emitOpcode(op_put_by_val); 1517 instructions().append(base->index()); 1518 instructions().append(property->index()); 1519 instructions().append(value->index()); 1520 return value; 1521 } 1522 1523 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property) 1524 { 1525 emitOpcode(op_del_by_val); 1526 instructions().append(dst->index()); 1527 instructions().append(base->index()); 1528 instructions().append(property->index()); 1529 return dst; 1530 } 1531 1532 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value) 1533 { 1534 emitOpcode(op_put_by_index); 1535 instructions().append(base->index()); 1536 instructions().append(index); 1537 instructions().append(value->index()); 1538 return value; 1539 } 1540 1541 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst) 1542 { 1543 emitOpcode(op_new_object); 1544 instructions().append(dst->index()); 1545 return dst; 1546 } 1547 1548 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements) 1549 { 1550 Vector<RefPtr<RegisterID>, 16> argv; 1551 for (ElementNode* n = elements; n; n = n->next()) { 1552 if (n->elision()) 1553 break; 1554 argv.append(newTemporary()); 1555 // op_new_array requires the initial values to be a sequential range of registers 1556 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1); 1557 emitNode(argv.last().get(), n->value()); 1558 } 1559 emitOpcode(op_new_array); 1560 instructions().append(dst->index()); 1561 instructions().append(argv.size() ? argv[0]->index() : 0); // argv 1562 instructions().append(argv.size()); // argc 1563 return dst; 1564 } 1565 1566 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function) 1567 { 1568 return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function)), false); 1569 } 1570 1571 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function) 1572 { 1573 std::pair<FunctionOffsetMap::iterator, bool> ptr = m_functionOffsets.add(function, 0); 1574 if (ptr.second) 1575 ptr.first->second = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function)); 1576 return emitNewFunctionInternal(dst, ptr.first->second, true); 1577 } 1578 1579 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck) 1580 { 1581 createActivationIfNecessary(); 1582 emitOpcode(op_new_func); 1583 instructions().append(dst->index()); 1584 instructions().append(index); 1585 instructions().append(doNullCheck); 1586 return dst; 1587 } 1588 1589 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, PassRefPtr<RegExp> regExp) 1590 { 1591 emitOpcode(op_new_regexp); 1592 instructions().append(dst->index()); 1593 instructions().append(addRegExp(regExp)); 1594 return dst; 1595 } 1596 1597 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n) 1598 { 1599 FunctionBodyNode* function = n->body(); 1600 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function)); 1601 1602 createActivationIfNecessary(); 1603 emitOpcode(op_new_func_exp); 1604 instructions().append(r0->index()); 1605 instructions().append(index); 1606 return r0; 1607 } 1608 1609 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset) 1610 { 1611 return emitCall(op_call, dst, func, callArguments, divot, startOffset, endOffset); 1612 } 1613 1614 void BytecodeGenerator::createArgumentsIfNecessary() 1615 { 1616 if (m_codeType != FunctionCode) 1617 return; 1618 1619 if (!m_codeBlock->usesArguments()) 1620 return; 1621 1622 // If we're in strict mode we tear off the arguments on function 1623 // entry, so there's no need to check if we need to create them 1624 // now 1625 if (m_codeBlock->isStrictMode()) 1626 return; 1627 1628 emitOpcode(op_create_arguments); 1629 instructions().append(m_codeBlock->argumentsRegister()); 1630 } 1631 1632 void BytecodeGenerator::createActivationIfNecessary() 1633 { 1634 if (m_hasCreatedActivation) 1635 return; 1636 if (!m_codeBlock->needsFullScopeChain()) 1637 return; 1638 emitOpcode(op_create_activation); 1639 instructions().append(m_activationRegister->index()); 1640 } 1641 1642 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset) 1643 { 1644 return emitCall(op_call_eval, dst, func, callArguments, divot, startOffset, endOffset); 1645 } 1646 1647 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset) 1648 { 1649 ASSERT(opcodeID == op_call || opcodeID == op_call_eval); 1650 ASSERT(func->refCount()); 1651 1652 if (m_shouldEmitProfileHooks) 1653 emitMove(callArguments.profileHookRegister(), func); 1654 1655 // Generate code for arguments. 1656 unsigned argumentIndex = 0; 1657 for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next) 1658 emitNode(callArguments.argumentRegister(argumentIndex++), n); 1659 1660 // Reserve space for call frame. 1661 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame; 1662 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i) 1663 callFrame.append(newTemporary()); 1664 1665 if (m_shouldEmitProfileHooks) { 1666 emitOpcode(op_profile_will_call); 1667 instructions().append(callArguments.profileHookRegister()->index()); 1668 } 1669 1670 emitExpressionInfo(divot, startOffset, endOffset); 1671 1672 #if ENABLE(JIT) 1673 m_codeBlock->addCallLinkInfo(); 1674 #endif 1675 1676 // Emit call. 1677 emitOpcode(opcodeID); 1678 instructions().append(func->index()); // func 1679 instructions().append(callArguments.count()); // argCount 1680 instructions().append(callArguments.callFrame()); // registerOffset 1681 if (dst != ignoredResult()) { 1682 emitOpcode(op_call_put_result); 1683 instructions().append(dst->index()); // dst 1684 } 1685 1686 if (m_shouldEmitProfileHooks) { 1687 emitOpcode(op_profile_did_call); 1688 instructions().append(callArguments.profileHookRegister()->index()); 1689 } 1690 1691 return dst; 1692 } 1693 1694 RegisterID* BytecodeGenerator::emitLoadVarargs(RegisterID* argCountDst, RegisterID* thisRegister, RegisterID* arguments) 1695 { 1696 ASSERT(argCountDst->index() < arguments->index()); 1697 emitOpcode(op_load_varargs); 1698 instructions().append(argCountDst->index()); 1699 instructions().append(arguments->index()); 1700 instructions().append(thisRegister->index() + RegisterFile::CallFrameHeaderSize); // initial registerOffset 1701 return argCountDst; 1702 } 1703 1704 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* argCountRegister, unsigned divot, unsigned startOffset, unsigned endOffset) 1705 { 1706 ASSERT(func->refCount()); 1707 ASSERT(thisRegister->refCount()); 1708 ASSERT(dst != func); 1709 if (m_shouldEmitProfileHooks) { 1710 emitOpcode(op_profile_will_call); 1711 instructions().append(func->index()); 1712 } 1713 1714 emitExpressionInfo(divot, startOffset, endOffset); 1715 1716 // Emit call. 1717 emitOpcode(op_call_varargs); 1718 instructions().append(func->index()); // func 1719 instructions().append(argCountRegister->index()); // arg count 1720 instructions().append(thisRegister->index() + RegisterFile::CallFrameHeaderSize); // initial registerOffset 1721 if (dst != ignoredResult()) { 1722 emitOpcode(op_call_put_result); 1723 instructions().append(dst->index()); // dst 1724 } 1725 if (m_shouldEmitProfileHooks) { 1726 emitOpcode(op_profile_did_call); 1727 instructions().append(func->index()); 1728 } 1729 return dst; 1730 } 1731 1732 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src) 1733 { 1734 if (m_codeBlock->needsFullScopeChain()) { 1735 emitOpcode(op_tear_off_activation); 1736 instructions().append(m_activationRegister->index()); 1737 instructions().append(m_codeBlock->argumentsRegister()); 1738 } else if (m_codeBlock->usesArguments() && m_codeBlock->m_numParameters > 1 1739 && !m_codeBlock->isStrictMode()) { // If there are no named parameters, there's nothing to tear off, since extra / unnamed parameters get copied to the arguments object at construct time. 1740 emitOpcode(op_tear_off_arguments); 1741 instructions().append(m_codeBlock->argumentsRegister()); 1742 } 1743 1744 // Constructors use op_ret_object_or_this to check the result is an 1745 // object, unless we can trivially determine the check is not 1746 // necessary (currently, if the return value is 'this'). 1747 if (isConstructor() && (src->index() != m_thisRegister.index())) { 1748 emitOpcode(op_ret_object_or_this); 1749 instructions().append(src->index()); 1750 instructions().append(m_thisRegister.index()); 1751 return src; 1752 } 1753 return emitUnaryNoDstOp(op_ret, src); 1754 } 1755 1756 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src) 1757 { 1758 emitOpcode(opcodeID); 1759 instructions().append(src->index()); 1760 return src; 1761 } 1762 1763 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset) 1764 { 1765 ASSERT(func->refCount()); 1766 1767 if (m_shouldEmitProfileHooks) 1768 emitMove(callArguments.profileHookRegister(), func); 1769 1770 // Generate code for arguments. 1771 unsigned argumentIndex = 0; 1772 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) { 1773 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next) 1774 emitNode(callArguments.argumentRegister(argumentIndex++), n); 1775 } 1776 1777 if (m_shouldEmitProfileHooks) { 1778 emitOpcode(op_profile_will_call); 1779 instructions().append(callArguments.profileHookRegister()->index()); 1780 } 1781 1782 // Reserve space for call frame. 1783 Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame; 1784 for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i) 1785 callFrame.append(newTemporary()); 1786 1787 emitExpressionInfo(divot, startOffset, endOffset); 1788 1789 #if ENABLE(JIT) 1790 m_codeBlock->addCallLinkInfo(); 1791 #endif 1792 1793 emitOpcode(op_construct); 1794 instructions().append(func->index()); // func 1795 instructions().append(callArguments.count()); // argCount 1796 instructions().append(callArguments.callFrame()); // registerOffset 1797 if (dst != ignoredResult()) { 1798 emitOpcode(op_call_put_result); 1799 instructions().append(dst->index()); // dst 1800 } 1801 1802 if (m_shouldEmitProfileHooks) { 1803 emitOpcode(op_profile_did_call); 1804 instructions().append(callArguments.profileHookRegister()->index()); 1805 } 1806 1807 return dst; 1808 } 1809 1810 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count) 1811 { 1812 emitOpcode(op_strcat); 1813 instructions().append(dst->index()); 1814 instructions().append(src->index()); 1815 instructions().append(count); 1816 1817 return dst; 1818 } 1819 1820 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src) 1821 { 1822 emitOpcode(op_to_primitive); 1823 instructions().append(dst->index()); 1824 instructions().append(src->index()); 1825 } 1826 1827 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope) 1828 { 1829 ASSERT(scope->isTemporary()); 1830 ControlFlowContext context; 1831 context.isFinallyBlock = false; 1832 m_scopeContextStack.append(context); 1833 m_dynamicScopeDepth++; 1834 1835 return emitUnaryNoDstOp(op_push_scope, scope); 1836 } 1837 1838 void BytecodeGenerator::emitPopScope() 1839 { 1840 ASSERT(m_scopeContextStack.size()); 1841 ASSERT(!m_scopeContextStack.last().isFinallyBlock); 1842 1843 emitOpcode(op_pop_scope); 1844 1845 m_scopeContextStack.removeLast(); 1846 m_dynamicScopeDepth--; 1847 } 1848 1849 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine) 1850 { 1851 #if ENABLE(DEBUG_WITH_BREAKPOINT) 1852 if (debugHookID != DidReachBreakpoint) 1853 return; 1854 #else 1855 if (!m_shouldEmitDebugHooks) 1856 return; 1857 #endif 1858 emitOpcode(op_debug); 1859 instructions().append(debugHookID); 1860 instructions().append(firstLine); 1861 instructions().append(lastLine); 1862 } 1863 1864 void BytecodeGenerator::pushFinallyContext(Label* target, RegisterID* retAddrDst) 1865 { 1866 ControlFlowContext scope; 1867 scope.isFinallyBlock = true; 1868 FinallyContext context = { target, retAddrDst }; 1869 scope.finallyContext = context; 1870 m_scopeContextStack.append(scope); 1871 m_finallyDepth++; 1872 } 1873 1874 void BytecodeGenerator::popFinallyContext() 1875 { 1876 ASSERT(m_scopeContextStack.size()); 1877 ASSERT(m_scopeContextStack.last().isFinallyBlock); 1878 ASSERT(m_finallyDepth > 0); 1879 m_scopeContextStack.removeLast(); 1880 m_finallyDepth--; 1881 } 1882 1883 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name) 1884 { 1885 // Reclaim free label scopes. 1886 // 1887 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()', 1888 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with 1889 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to 1890 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the 1891 // loop condition is a workaround. 1892 while (m_labelScopes.size()) { 1893 if (m_labelScopes.last().refCount()) 1894 break; 1895 m_labelScopes.removeLast(); 1896 } 1897 1898 if (!m_labelScopes.size()) 1899 return 0; 1900 1901 // We special-case the following, which is a syntax error in Firefox: 1902 // label: 1903 // break; 1904 if (name.isEmpty()) { 1905 for (int i = m_labelScopes.size() - 1; i >= 0; --i) { 1906 LabelScope* scope = &m_labelScopes[i]; 1907 if (scope->type() != LabelScope::NamedLabel) { 1908 ASSERT(scope->breakTarget()); 1909 return scope; 1910 } 1911 } 1912 return 0; 1913 } 1914 1915 for (int i = m_labelScopes.size() - 1; i >= 0; --i) { 1916 LabelScope* scope = &m_labelScopes[i]; 1917 if (scope->name() && *scope->name() == name) { 1918 ASSERT(scope->breakTarget()); 1919 return scope; 1920 } 1921 } 1922 return 0; 1923 } 1924 1925 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name) 1926 { 1927 // Reclaim free label scopes. 1928 while (m_labelScopes.size() && !m_labelScopes.last().refCount()) 1929 m_labelScopes.removeLast(); 1930 1931 if (!m_labelScopes.size()) 1932 return 0; 1933 1934 if (name.isEmpty()) { 1935 for (int i = m_labelScopes.size() - 1; i >= 0; --i) { 1936 LabelScope* scope = &m_labelScopes[i]; 1937 if (scope->type() == LabelScope::Loop) { 1938 ASSERT(scope->continueTarget()); 1939 return scope; 1940 } 1941 } 1942 return 0; 1943 } 1944 1945 // Continue to the loop nested nearest to the label scope that matches 1946 // 'name'. 1947 LabelScope* result = 0; 1948 for (int i = m_labelScopes.size() - 1; i >= 0; --i) { 1949 LabelScope* scope = &m_labelScopes[i]; 1950 if (scope->type() == LabelScope::Loop) { 1951 ASSERT(scope->continueTarget()); 1952 result = scope; 1953 } 1954 if (scope->name() && *scope->name() == name) 1955 return result; // may be 0 1956 } 1957 return 0; 1958 } 1959 1960 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope) 1961 { 1962 while (topScope > bottomScope) { 1963 // First we count the number of dynamic scopes we need to remove to get 1964 // to a finally block. 1965 int nNormalScopes = 0; 1966 while (topScope > bottomScope) { 1967 if (topScope->isFinallyBlock) 1968 break; 1969 ++nNormalScopes; 1970 --topScope; 1971 } 1972 1973 if (nNormalScopes) { 1974 size_t begin = instructions().size(); 1975 1976 // We need to remove a number of dynamic scopes to get to the next 1977 // finally block 1978 emitOpcode(op_jmp_scopes); 1979 instructions().append(nNormalScopes); 1980 1981 // If topScope == bottomScope then there isn't actually a finally block 1982 // left to emit, so make the jmp_scopes jump directly to the target label 1983 if (topScope == bottomScope) { 1984 instructions().append(target->bind(begin, instructions().size())); 1985 return target; 1986 } 1987 1988 // Otherwise we just use jmp_scopes to pop a group of scopes and go 1989 // to the next instruction 1990 RefPtr<Label> nextInsn = newLabel(); 1991 instructions().append(nextInsn->bind(begin, instructions().size())); 1992 emitLabel(nextInsn.get()); 1993 } 1994 1995 while (topScope > bottomScope && topScope->isFinallyBlock) { 1996 emitJumpSubroutine(topScope->finallyContext.retAddrDst, topScope->finallyContext.finallyAddr); 1997 --topScope; 1998 } 1999 } 2000 return emitJump(target); 2001 } 2002 2003 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth) 2004 { 2005 ASSERT(scopeDepth() - targetScopeDepth >= 0); 2006 ASSERT(target->isForward()); 2007 2008 size_t scopeDelta = scopeDepth() - targetScopeDepth; 2009 ASSERT(scopeDelta <= m_scopeContextStack.size()); 2010 if (!scopeDelta) 2011 return emitJump(target); 2012 2013 if (m_finallyDepth) 2014 return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta); 2015 2016 size_t begin = instructions().size(); 2017 2018 emitOpcode(op_jmp_scopes); 2019 instructions().append(scopeDelta); 2020 instructions().append(target->bind(begin, instructions().size())); 2021 return target; 2022 } 2023 2024 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget) 2025 { 2026 size_t begin = instructions().size(); 2027 2028 emitOpcode(op_get_pnames); 2029 instructions().append(dst->index()); 2030 instructions().append(base->index()); 2031 instructions().append(i->index()); 2032 instructions().append(size->index()); 2033 instructions().append(breakTarget->bind(begin, instructions().size())); 2034 return dst; 2035 } 2036 2037 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target) 2038 { 2039 size_t begin = instructions().size(); 2040 2041 emitOpcode(op_next_pname); 2042 instructions().append(dst->index()); 2043 instructions().append(base->index()); 2044 instructions().append(i->index()); 2045 instructions().append(size->index()); 2046 instructions().append(iter->index()); 2047 instructions().append(target->bind(begin, instructions().size())); 2048 return dst; 2049 } 2050 2051 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end) 2052 { 2053 m_usesExceptions = true; 2054 #if ENABLE(JIT) 2055 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() }; 2056 #else 2057 HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth }; 2058 #endif 2059 2060 m_codeBlock->addExceptionHandler(info); 2061 emitOpcode(op_catch); 2062 instructions().append(targetRegister->index()); 2063 return targetRegister; 2064 } 2065 2066 void BytecodeGenerator::emitThrowReferenceError(const UString& message) 2067 { 2068 emitOpcode(op_throw_reference_error); 2069 instructions().append(addConstantValue(jsString(globalData(), message))->index()); 2070 } 2071 2072 PassRefPtr<Label> BytecodeGenerator::emitJumpSubroutine(RegisterID* retAddrDst, Label* finally) 2073 { 2074 size_t begin = instructions().size(); 2075 2076 emitOpcode(op_jsr); 2077 instructions().append(retAddrDst->index()); 2078 instructions().append(finally->bind(begin, instructions().size())); 2079 emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it. 2080 return finally; 2081 } 2082 2083 void BytecodeGenerator::emitSubroutineReturn(RegisterID* retAddrSrc) 2084 { 2085 emitOpcode(op_sret); 2086 instructions().append(retAddrSrc->index()); 2087 } 2088 2089 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value) 2090 { 2091 ControlFlowContext context; 2092 context.isFinallyBlock = false; 2093 m_scopeContextStack.append(context); 2094 m_dynamicScopeDepth++; 2095 2096 emitOpcode(op_push_new_scope); 2097 instructions().append(dst->index()); 2098 instructions().append(addConstant(property)); 2099 instructions().append(value->index()); 2100 } 2101 2102 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type) 2103 { 2104 SwitchInfo info = { instructions().size(), type }; 2105 switch (type) { 2106 case SwitchInfo::SwitchImmediate: 2107 emitOpcode(op_switch_imm); 2108 break; 2109 case SwitchInfo::SwitchCharacter: 2110 emitOpcode(op_switch_char); 2111 break; 2112 case SwitchInfo::SwitchString: 2113 emitOpcode(op_switch_string); 2114 break; 2115 default: 2116 ASSERT_NOT_REACHED(); 2117 } 2118 2119 instructions().append(0); // place holder for table index 2120 instructions().append(0); // place holder for default target 2121 instructions().append(scrutineeRegister->index()); 2122 m_switchContextStack.append(info); 2123 } 2124 2125 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max) 2126 { 2127 UNUSED_PARAM(max); 2128 ASSERT(node->isNumber()); 2129 double value = static_cast<NumberNode*>(node)->value(); 2130 int32_t key = static_cast<int32_t>(value); 2131 ASSERT(key == value); 2132 ASSERT(key >= min); 2133 ASSERT(key <= max); 2134 return key - min; 2135 } 2136 2137 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max) 2138 { 2139 jumpTable.min = min; 2140 jumpTable.branchOffsets.resize(max - min + 1); 2141 jumpTable.branchOffsets.fill(0); 2142 for (uint32_t i = 0; i < clauseCount; ++i) { 2143 // We're emitting this after the clause labels should have been fixed, so 2144 // the labels should not be "forward" references 2145 ASSERT(!labels[i]->isForward()); 2146 jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 2147 } 2148 } 2149 2150 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max) 2151 { 2152 UNUSED_PARAM(max); 2153 ASSERT(node->isString()); 2154 StringImpl* clause = static_cast<StringNode*>(node)->value().impl(); 2155 ASSERT(clause->length() == 1); 2156 2157 int32_t key = clause->characters()[0]; 2158 ASSERT(key >= min); 2159 ASSERT(key <= max); 2160 return key - min; 2161 } 2162 2163 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max) 2164 { 2165 jumpTable.min = min; 2166 jumpTable.branchOffsets.resize(max - min + 1); 2167 jumpTable.branchOffsets.fill(0); 2168 for (uint32_t i = 0; i < clauseCount; ++i) { 2169 // We're emitting this after the clause labels should have been fixed, so 2170 // the labels should not be "forward" references 2171 ASSERT(!labels[i]->isForward()); 2172 jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 2173 } 2174 } 2175 2176 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes) 2177 { 2178 for (uint32_t i = 0; i < clauseCount; ++i) { 2179 // We're emitting this after the clause labels should have been fixed, so 2180 // the labels should not be "forward" references 2181 ASSERT(!labels[i]->isForward()); 2182 2183 ASSERT(nodes[i]->isString()); 2184 StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl(); 2185 OffsetLocation location; 2186 location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3); 2187 jumpTable.offsetTable.add(clause, location); 2188 } 2189 } 2190 2191 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max) 2192 { 2193 SwitchInfo switchInfo = m_switchContextStack.last(); 2194 m_switchContextStack.removeLast(); 2195 if (switchInfo.switchType == SwitchInfo::SwitchImmediate) { 2196 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables(); 2197 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3); 2198 2199 SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable(); 2200 prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max); 2201 } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) { 2202 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables(); 2203 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3); 2204 2205 SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable(); 2206 prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max); 2207 } else { 2208 ASSERT(switchInfo.switchType == SwitchInfo::SwitchString); 2209 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables(); 2210 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3); 2211 2212 StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable(); 2213 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes); 2214 } 2215 } 2216 2217 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException() 2218 { 2219 // It would be nice to do an even better job of identifying exactly where the expression is. 2220 // And we could make the caller pass the node pointer in, if there was some way of getting 2221 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data 2222 // is still good enough to get us an accurate line number. 2223 m_expressionTooDeep = true; 2224 return newTemporary(); 2225 } 2226 2227 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction) 2228 { 2229 m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction); 2230 } 2231 2232 int BytecodeGenerator::argumentNumberFor(const Identifier& ident) 2233 { 2234 int parameterCount = m_parameters.size(); // includes 'this' 2235 RegisterID* registerID = registerFor(ident); 2236 if (!registerID) 2237 return 0; 2238 int index = registerID->index() + RegisterFile::CallFrameHeaderSize + parameterCount; 2239 return (index > 0 && index < parameterCount) ? index : 0; 2240 } 2241 2242 } // namespace JSC 2243