1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #if V8_TARGET_ARCH_MIPS 31 32 // Note on Mips implementation: 33 // 34 // The result_register() for mips is the 'v0' register, which is defined 35 // by the ABI to contain function return values. However, the first 36 // parameter to a function is defined to be 'a0'. So there are many 37 // places where we have to move a previous result in v0 to a0 for the 38 // next call: mov(a0, v0). This is not needed on the other architectures. 39 40 #include "code-stubs.h" 41 #include "codegen.h" 42 #include "compiler.h" 43 #include "debug.h" 44 #include "full-codegen.h" 45 #include "isolate-inl.h" 46 #include "parser.h" 47 #include "scopes.h" 48 #include "stub-cache.h" 49 50 #include "mips/code-stubs-mips.h" 51 #include "mips/macro-assembler-mips.h" 52 53 namespace v8 { 54 namespace internal { 55 56 #define __ ACCESS_MASM(masm_) 57 58 59 // A patch site is a location in the code which it is possible to patch. This 60 // class has a number of methods to emit the code which is patchable and the 61 // method EmitPatchInfo to record a marker back to the patchable code. This 62 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy 63 // (raw 16 bit immediate value is used) is the delta from the pc to the first 64 // instruction of the patchable code. 65 // The marker instruction is effectively a NOP (dest is zero_reg) and will 66 // never be emitted by normal code. 67 class JumpPatchSite BASE_EMBEDDED { 68 public: 69 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 70 #ifdef DEBUG 71 info_emitted_ = false; 72 #endif 73 } 74 75 ~JumpPatchSite() { 76 ASSERT(patch_site_.is_bound() == info_emitted_); 77 } 78 79 // When initially emitting this ensure that a jump is always generated to skip 80 // the inlined smi code. 81 void EmitJumpIfNotSmi(Register reg, Label* target) { 82 ASSERT(!patch_site_.is_bound() && !info_emitted_); 83 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 84 __ bind(&patch_site_); 85 __ andi(at, reg, 0); 86 // Always taken before patched. 87 __ Branch(target, eq, at, Operand(zero_reg)); 88 } 89 90 // When initially emitting this ensure that a jump is never generated to skip 91 // the inlined smi code. 92 void EmitJumpIfSmi(Register reg, Label* target) { 93 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 94 ASSERT(!patch_site_.is_bound() && !info_emitted_); 95 __ bind(&patch_site_); 96 __ andi(at, reg, 0); 97 // Never taken before patched. 98 __ Branch(target, ne, at, Operand(zero_reg)); 99 } 100 101 void EmitPatchInfo() { 102 if (patch_site_.is_bound()) { 103 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_); 104 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask); 105 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask); 106 #ifdef DEBUG 107 info_emitted_ = true; 108 #endif 109 } else { 110 __ nop(); // Signals no inlined code. 111 } 112 } 113 114 private: 115 MacroAssembler* masm_; 116 Label patch_site_; 117 #ifdef DEBUG 118 bool info_emitted_; 119 #endif 120 }; 121 122 123 // Generate code for a JS function. On entry to the function the receiver 124 // and arguments have been pushed on the stack left to right. The actual 125 // argument count matches the formal parameter count expected by the 126 // function. 127 // 128 // The live registers are: 129 // o a1: the JS function object being called (i.e. ourselves) 130 // o cp: our context 131 // o fp: our caller's frame pointer 132 // o sp: stack pointer 133 // o ra: return address 134 // 135 // The function builds a JS frame. Please see JavaScriptFrameConstants in 136 // frames-mips.h for its layout. 137 void FullCodeGenerator::Generate() { 138 CompilationInfo* info = info_; 139 handler_table_ = 140 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 141 profiling_counter_ = isolate()->factory()->NewCell( 142 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 143 SetFunctionPosition(function()); 144 Comment cmnt(masm_, "[ function compiled by full code generator"); 145 146 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 147 148 #ifdef DEBUG 149 if (strlen(FLAG_stop_at) > 0 && 150 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 151 __ stop("stop-at"); 152 } 153 #endif 154 155 // Strict mode functions and builtins need to replace the receiver 156 // with undefined when called as functions (without an explicit 157 // receiver object). t1 is zero for method calls and non-zero for 158 // function calls. 159 if (!info->is_classic_mode() || info->is_native()) { 160 Label ok; 161 __ Branch(&ok, eq, t1, Operand(zero_reg)); 162 int receiver_offset = info->scope()->num_parameters() * kPointerSize; 163 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 164 __ sw(a2, MemOperand(sp, receiver_offset)); 165 __ bind(&ok); 166 } 167 168 // Open a frame scope to indicate that there is a frame on the stack. The 169 // MANUAL indicates that the scope shouldn't actually generate code to set up 170 // the frame (that is done below). 171 FrameScope frame_scope(masm_, StackFrame::MANUAL); 172 173 info->set_prologue_offset(masm_->pc_offset()); 174 // The following three instructions must remain together and unmodified for 175 // code aging to work properly. 176 __ Push(ra, fp, cp, a1); 177 __ nop(Assembler::CODE_AGE_SEQUENCE_NOP); 178 // Adjust fp to point to caller's fp. 179 __ Addu(fp, sp, Operand(2 * kPointerSize)); 180 info->AddNoFrameRange(0, masm_->pc_offset()); 181 182 { Comment cmnt(masm_, "[ Allocate locals"); 183 int locals_count = info->scope()->num_stack_slots(); 184 // Generators allocate locals, if any, in context slots. 185 ASSERT(!info->function()->is_generator() || locals_count == 0); 186 if (locals_count > 0) { 187 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 188 for (int i = 0; i < locals_count; i++) { 189 __ push(at); 190 } 191 } 192 } 193 194 bool function_in_register = true; 195 196 // Possibly allocate a local context. 197 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 198 if (heap_slots > 0) { 199 Comment cmnt(masm_, "[ Allocate context"); 200 // Argument to NewContext is the function, which is still in a1. 201 __ push(a1); 202 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 203 __ Push(info->scope()->GetScopeInfo()); 204 __ CallRuntime(Runtime::kNewGlobalContext, 2); 205 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 206 FastNewContextStub stub(heap_slots); 207 __ CallStub(&stub); 208 } else { 209 __ CallRuntime(Runtime::kNewFunctionContext, 1); 210 } 211 function_in_register = false; 212 // Context is returned in both v0 and cp. It replaces the context 213 // passed to us. It's saved in the stack and kept live in cp. 214 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 215 // Copy any necessary parameters into the context. 216 int num_parameters = info->scope()->num_parameters(); 217 for (int i = 0; i < num_parameters; i++) { 218 Variable* var = scope()->parameter(i); 219 if (var->IsContextSlot()) { 220 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 221 (num_parameters - 1 - i) * kPointerSize; 222 // Load parameter from stack. 223 __ lw(a0, MemOperand(fp, parameter_offset)); 224 // Store it in the context. 225 MemOperand target = ContextOperand(cp, var->index()); 226 __ sw(a0, target); 227 228 // Update the write barrier. 229 __ RecordWriteContextSlot( 230 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs); 231 } 232 } 233 } 234 235 Variable* arguments = scope()->arguments(); 236 if (arguments != NULL) { 237 // Function uses arguments object. 238 Comment cmnt(masm_, "[ Allocate arguments object"); 239 if (!function_in_register) { 240 // Load this again, if it's used by the local context below. 241 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 242 } else { 243 __ mov(a3, a1); 244 } 245 // Receiver is just before the parameters on the caller's stack. 246 int num_parameters = info->scope()->num_parameters(); 247 int offset = num_parameters * kPointerSize; 248 __ Addu(a2, fp, 249 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 250 __ li(a1, Operand(Smi::FromInt(num_parameters))); 251 __ Push(a3, a2, a1); 252 253 // Arguments to ArgumentsAccessStub: 254 // function, receiver address, parameter count. 255 // The stub will rewrite receiever and parameter count if the previous 256 // stack frame was an arguments adapter frame. 257 ArgumentsAccessStub::Type type; 258 if (!is_classic_mode()) { 259 type = ArgumentsAccessStub::NEW_STRICT; 260 } else if (function()->has_duplicate_parameters()) { 261 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW; 262 } else { 263 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST; 264 } 265 ArgumentsAccessStub stub(type); 266 __ CallStub(&stub); 267 268 SetVar(arguments, v0, a1, a2); 269 } 270 271 if (FLAG_trace) { 272 __ CallRuntime(Runtime::kTraceEnter, 0); 273 } 274 275 // Visit the declarations and body unless there is an illegal 276 // redeclaration. 277 if (scope()->HasIllegalRedeclaration()) { 278 Comment cmnt(masm_, "[ Declarations"); 279 scope()->VisitIllegalRedeclaration(this); 280 281 } else { 282 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 283 { Comment cmnt(masm_, "[ Declarations"); 284 // For named function expressions, declare the function name as a 285 // constant. 286 if (scope()->is_function_scope() && scope()->function() != NULL) { 287 VariableDeclaration* function = scope()->function(); 288 ASSERT(function->proxy()->var()->mode() == CONST || 289 function->proxy()->var()->mode() == CONST_HARMONY); 290 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED); 291 VisitVariableDeclaration(function); 292 } 293 VisitDeclarations(scope()->declarations()); 294 } 295 296 { Comment cmnt(masm_, "[ Stack check"); 297 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 298 Label ok; 299 __ LoadRoot(t0, Heap::kStackLimitRootIndex); 300 __ Branch(&ok, hs, sp, Operand(t0)); 301 StackCheckStub stub; 302 __ CallStub(&stub); 303 __ bind(&ok); 304 } 305 306 { Comment cmnt(masm_, "[ Body"); 307 ASSERT(loop_depth() == 0); 308 VisitStatements(function()->body()); 309 ASSERT(loop_depth() == 0); 310 } 311 } 312 313 // Always emit a 'return undefined' in case control fell off the end of 314 // the body. 315 { Comment cmnt(masm_, "[ return <undefined>;"); 316 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 317 } 318 EmitReturnSequence(); 319 } 320 321 322 void FullCodeGenerator::ClearAccumulator() { 323 ASSERT(Smi::FromInt(0) == 0); 324 __ mov(v0, zero_reg); 325 } 326 327 328 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 329 __ li(a2, Operand(profiling_counter_)); 330 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset)); 331 __ Subu(a3, a3, Operand(Smi::FromInt(delta))); 332 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); 333 } 334 335 336 void FullCodeGenerator::EmitProfilingCounterReset() { 337 int reset_value = FLAG_interrupt_budget; 338 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) { 339 // Self-optimization is a one-off thing: if it fails, don't try again. 340 reset_value = Smi::kMaxValue; 341 } 342 if (isolate()->IsDebuggerActive()) { 343 // Detect debug break requests as soon as possible. 344 reset_value = FLAG_interrupt_budget >> 4; 345 } 346 __ li(a2, Operand(profiling_counter_)); 347 __ li(a3, Operand(Smi::FromInt(reset_value))); 348 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset)); 349 } 350 351 352 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 353 Label* back_edge_target) { 354 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need 355 // to make sure it is constant. Branch may emit a skip-or-jump sequence 356 // instead of the normal Branch. It seems that the "skip" part of that 357 // sequence is about as long as this Branch would be so it is safe to ignore 358 // that. 359 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 360 Comment cmnt(masm_, "[ Back edge bookkeeping"); 361 Label ok; 362 int weight = 1; 363 if (FLAG_weighted_back_edges) { 364 ASSERT(back_edge_target->is_bound()); 365 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 366 weight = Min(kMaxBackEdgeWeight, 367 Max(1, distance / kCodeSizeMultiplier)); 368 } 369 EmitProfilingCounterDecrement(weight); 370 __ slt(at, a3, zero_reg); 371 __ beq(at, zero_reg, &ok); 372 // CallStub will emit a li t9 first, so it is safe to use the delay slot. 373 InterruptStub stub; 374 __ CallStub(&stub); 375 // Record a mapping of this PC offset to the OSR id. This is used to find 376 // the AST id from the unoptimized code in order to use it as a key into 377 // the deoptimization input data found in the optimized code. 378 RecordBackEdge(stmt->OsrEntryId()); 379 EmitProfilingCounterReset(); 380 381 __ bind(&ok); 382 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 383 // Record a mapping of the OSR id to this PC. This is used if the OSR 384 // entry becomes the target of a bailout. We don't expect it to be, but 385 // we want it to work if it is. 386 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 387 } 388 389 390 void FullCodeGenerator::EmitReturnSequence() { 391 Comment cmnt(masm_, "[ Return sequence"); 392 if (return_label_.is_bound()) { 393 __ Branch(&return_label_); 394 } else { 395 __ bind(&return_label_); 396 if (FLAG_trace) { 397 // Push the return value on the stack as the parameter. 398 // Runtime::TraceExit returns its parameter in v0. 399 __ push(v0); 400 __ CallRuntime(Runtime::kTraceExit, 1); 401 } 402 if (FLAG_interrupt_at_exit || FLAG_self_optimization) { 403 // Pretend that the exit is a backwards jump to the entry. 404 int weight = 1; 405 if (info_->ShouldSelfOptimize()) { 406 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 407 } else if (FLAG_weighted_back_edges) { 408 int distance = masm_->pc_offset(); 409 weight = Min(kMaxBackEdgeWeight, 410 Max(1, distance / kCodeSizeMultiplier)); 411 } 412 EmitProfilingCounterDecrement(weight); 413 Label ok; 414 __ Branch(&ok, ge, a3, Operand(zero_reg)); 415 __ push(v0); 416 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) { 417 __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 418 __ push(a2); 419 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); 420 } else { 421 InterruptStub stub; 422 __ CallStub(&stub); 423 } 424 __ pop(v0); 425 EmitProfilingCounterReset(); 426 __ bind(&ok); 427 } 428 429 #ifdef DEBUG 430 // Add a label for checking the size of the code used for returning. 431 Label check_exit_codesize; 432 masm_->bind(&check_exit_codesize); 433 #endif 434 // Make sure that the constant pool is not emitted inside of the return 435 // sequence. 436 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); 437 // Here we use masm_-> instead of the __ macro to avoid the code coverage 438 // tool from instrumenting as we rely on the code size here. 439 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize; 440 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1); 441 __ RecordJSReturn(); 442 masm_->mov(sp, fp); 443 int no_frame_start = masm_->pc_offset(); 444 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit())); 445 masm_->Addu(sp, sp, Operand(sp_delta)); 446 masm_->Jump(ra); 447 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 448 } 449 450 #ifdef DEBUG 451 // Check that the size of the code used for returning is large enough 452 // for the debugger's requirements. 453 ASSERT(Assembler::kJSReturnSequenceInstructions <= 454 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 455 #endif 456 } 457 } 458 459 460 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 461 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 462 } 463 464 465 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 466 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 467 codegen()->GetVar(result_register(), var); 468 } 469 470 471 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 472 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 473 codegen()->GetVar(result_register(), var); 474 __ push(result_register()); 475 } 476 477 478 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 479 // For simplicity we always test the accumulator register. 480 codegen()->GetVar(result_register(), var); 481 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 482 codegen()->DoTest(this); 483 } 484 485 486 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 487 } 488 489 490 void FullCodeGenerator::AccumulatorValueContext::Plug( 491 Heap::RootListIndex index) const { 492 __ LoadRoot(result_register(), index); 493 } 494 495 496 void FullCodeGenerator::StackValueContext::Plug( 497 Heap::RootListIndex index) const { 498 __ LoadRoot(result_register(), index); 499 __ push(result_register()); 500 } 501 502 503 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 504 codegen()->PrepareForBailoutBeforeSplit(condition(), 505 true, 506 true_label_, 507 false_label_); 508 if (index == Heap::kUndefinedValueRootIndex || 509 index == Heap::kNullValueRootIndex || 510 index == Heap::kFalseValueRootIndex) { 511 if (false_label_ != fall_through_) __ Branch(false_label_); 512 } else if (index == Heap::kTrueValueRootIndex) { 513 if (true_label_ != fall_through_) __ Branch(true_label_); 514 } else { 515 __ LoadRoot(result_register(), index); 516 codegen()->DoTest(this); 517 } 518 } 519 520 521 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 522 } 523 524 525 void FullCodeGenerator::AccumulatorValueContext::Plug( 526 Handle<Object> lit) const { 527 __ li(result_register(), Operand(lit)); 528 } 529 530 531 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 532 // Immediates cannot be pushed directly. 533 __ li(result_register(), Operand(lit)); 534 __ push(result_register()); 535 } 536 537 538 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 539 codegen()->PrepareForBailoutBeforeSplit(condition(), 540 true, 541 true_label_, 542 false_label_); 543 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals. 544 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 545 if (false_label_ != fall_through_) __ Branch(false_label_); 546 } else if (lit->IsTrue() || lit->IsJSObject()) { 547 if (true_label_ != fall_through_) __ Branch(true_label_); 548 } else if (lit->IsString()) { 549 if (String::cast(*lit)->length() == 0) { 550 if (false_label_ != fall_through_) __ Branch(false_label_); 551 } else { 552 if (true_label_ != fall_through_) __ Branch(true_label_); 553 } 554 } else if (lit->IsSmi()) { 555 if (Smi::cast(*lit)->value() == 0) { 556 if (false_label_ != fall_through_) __ Branch(false_label_); 557 } else { 558 if (true_label_ != fall_through_) __ Branch(true_label_); 559 } 560 } else { 561 // For simplicity we always test the accumulator register. 562 __ li(result_register(), Operand(lit)); 563 codegen()->DoTest(this); 564 } 565 } 566 567 568 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 569 Register reg) const { 570 ASSERT(count > 0); 571 __ Drop(count); 572 } 573 574 575 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 576 int count, 577 Register reg) const { 578 ASSERT(count > 0); 579 __ Drop(count); 580 __ Move(result_register(), reg); 581 } 582 583 584 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 585 Register reg) const { 586 ASSERT(count > 0); 587 if (count > 1) __ Drop(count - 1); 588 __ sw(reg, MemOperand(sp, 0)); 589 } 590 591 592 void FullCodeGenerator::TestContext::DropAndPlug(int count, 593 Register reg) const { 594 ASSERT(count > 0); 595 // For simplicity we always test the accumulator register. 596 __ Drop(count); 597 __ Move(result_register(), reg); 598 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 599 codegen()->DoTest(this); 600 } 601 602 603 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 604 Label* materialize_false) const { 605 ASSERT(materialize_true == materialize_false); 606 __ bind(materialize_true); 607 } 608 609 610 void FullCodeGenerator::AccumulatorValueContext::Plug( 611 Label* materialize_true, 612 Label* materialize_false) const { 613 Label done; 614 __ bind(materialize_true); 615 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex); 616 __ Branch(&done); 617 __ bind(materialize_false); 618 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex); 619 __ bind(&done); 620 } 621 622 623 void FullCodeGenerator::StackValueContext::Plug( 624 Label* materialize_true, 625 Label* materialize_false) const { 626 Label done; 627 __ bind(materialize_true); 628 __ LoadRoot(at, Heap::kTrueValueRootIndex); 629 __ push(at); 630 __ Branch(&done); 631 __ bind(materialize_false); 632 __ LoadRoot(at, Heap::kFalseValueRootIndex); 633 __ push(at); 634 __ bind(&done); 635 } 636 637 638 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 639 Label* materialize_false) const { 640 ASSERT(materialize_true == true_label_); 641 ASSERT(materialize_false == false_label_); 642 } 643 644 645 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 646 } 647 648 649 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 650 Heap::RootListIndex value_root_index = 651 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 652 __ LoadRoot(result_register(), value_root_index); 653 } 654 655 656 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 657 Heap::RootListIndex value_root_index = 658 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 659 __ LoadRoot(at, value_root_index); 660 __ push(at); 661 } 662 663 664 void FullCodeGenerator::TestContext::Plug(bool flag) const { 665 codegen()->PrepareForBailoutBeforeSplit(condition(), 666 true, 667 true_label_, 668 false_label_); 669 if (flag) { 670 if (true_label_ != fall_through_) __ Branch(true_label_); 671 } else { 672 if (false_label_ != fall_through_) __ Branch(false_label_); 673 } 674 } 675 676 677 void FullCodeGenerator::DoTest(Expression* condition, 678 Label* if_true, 679 Label* if_false, 680 Label* fall_through) { 681 __ mov(a0, result_register()); 682 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 683 CallIC(ic, RelocInfo::CODE_TARGET, condition->test_id()); 684 __ mov(at, zero_reg); 685 Split(ne, v0, Operand(at), if_true, if_false, fall_through); 686 } 687 688 689 void FullCodeGenerator::Split(Condition cc, 690 Register lhs, 691 const Operand& rhs, 692 Label* if_true, 693 Label* if_false, 694 Label* fall_through) { 695 if (if_false == fall_through) { 696 __ Branch(if_true, cc, lhs, rhs); 697 } else if (if_true == fall_through) { 698 __ Branch(if_false, NegateCondition(cc), lhs, rhs); 699 } else { 700 __ Branch(if_true, cc, lhs, rhs); 701 __ Branch(if_false); 702 } 703 } 704 705 706 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 707 ASSERT(var->IsStackAllocated()); 708 // Offset is negative because higher indexes are at lower addresses. 709 int offset = -var->index() * kPointerSize; 710 // Adjust by a (parameter or local) base offset. 711 if (var->IsParameter()) { 712 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 713 } else { 714 offset += JavaScriptFrameConstants::kLocal0Offset; 715 } 716 return MemOperand(fp, offset); 717 } 718 719 720 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 721 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 722 if (var->IsContextSlot()) { 723 int context_chain_length = scope()->ContextChainLength(var->scope()); 724 __ LoadContext(scratch, context_chain_length); 725 return ContextOperand(scratch, var->index()); 726 } else { 727 return StackOperand(var); 728 } 729 } 730 731 732 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 733 // Use destination as scratch. 734 MemOperand location = VarOperand(var, dest); 735 __ lw(dest, location); 736 } 737 738 739 void FullCodeGenerator::SetVar(Variable* var, 740 Register src, 741 Register scratch0, 742 Register scratch1) { 743 ASSERT(var->IsContextSlot() || var->IsStackAllocated()); 744 ASSERT(!scratch0.is(src)); 745 ASSERT(!scratch0.is(scratch1)); 746 ASSERT(!scratch1.is(src)); 747 MemOperand location = VarOperand(var, scratch0); 748 __ sw(src, location); 749 // Emit the write barrier code if the location is in the heap. 750 if (var->IsContextSlot()) { 751 __ RecordWriteContextSlot(scratch0, 752 location.offset(), 753 src, 754 scratch1, 755 kRAHasBeenSaved, 756 kDontSaveFPRegs); 757 } 758 } 759 760 761 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 762 bool should_normalize, 763 Label* if_true, 764 Label* if_false) { 765 // Only prepare for bailouts before splits if we're in a test 766 // context. Otherwise, we let the Visit function deal with the 767 // preparation to avoid preparing with the same AST id twice. 768 if (!context()->IsTest() || !info_->IsOptimizable()) return; 769 770 Label skip; 771 if (should_normalize) __ Branch(&skip); 772 PrepareForBailout(expr, TOS_REG); 773 if (should_normalize) { 774 __ LoadRoot(t0, Heap::kTrueValueRootIndex); 775 Split(eq, a0, Operand(t0), if_true, if_false, NULL); 776 __ bind(&skip); 777 } 778 } 779 780 781 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 782 // The variable in the declaration always resides in the current function 783 // context. 784 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope())); 785 if (generate_debug_code_) { 786 // Check that we're not inside a with or catch context. 787 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset)); 788 __ LoadRoot(t0, Heap::kWithContextMapRootIndex); 789 __ Check(ne, kDeclarationInWithContext, 790 a1, Operand(t0)); 791 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex); 792 __ Check(ne, kDeclarationInCatchContext, 793 a1, Operand(t0)); 794 } 795 } 796 797 798 void FullCodeGenerator::VisitVariableDeclaration( 799 VariableDeclaration* declaration) { 800 // If it was not possible to allocate the variable at compile time, we 801 // need to "declare" it at runtime to make sure it actually exists in the 802 // local context. 803 VariableProxy* proxy = declaration->proxy(); 804 VariableMode mode = declaration->mode(); 805 Variable* variable = proxy->var(); 806 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET; 807 switch (variable->location()) { 808 case Variable::UNALLOCATED: 809 globals_->Add(variable->name(), zone()); 810 globals_->Add(variable->binding_needs_init() 811 ? isolate()->factory()->the_hole_value() 812 : isolate()->factory()->undefined_value(), 813 zone()); 814 break; 815 816 case Variable::PARAMETER: 817 case Variable::LOCAL: 818 if (hole_init) { 819 Comment cmnt(masm_, "[ VariableDeclaration"); 820 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 821 __ sw(t0, StackOperand(variable)); 822 } 823 break; 824 825 case Variable::CONTEXT: 826 if (hole_init) { 827 Comment cmnt(masm_, "[ VariableDeclaration"); 828 EmitDebugCheckDeclarationContext(variable); 829 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 830 __ sw(at, ContextOperand(cp, variable->index())); 831 // No write barrier since the_hole_value is in old space. 832 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 833 } 834 break; 835 836 case Variable::LOOKUP: { 837 Comment cmnt(masm_, "[ VariableDeclaration"); 838 __ li(a2, Operand(variable->name())); 839 // Declaration nodes are always introduced in one of four modes. 840 ASSERT(IsDeclaredVariableMode(mode)); 841 PropertyAttributes attr = 842 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 843 __ li(a1, Operand(Smi::FromInt(attr))); 844 // Push initial value, if any. 845 // Note: For variables we must not push an initial value (such as 846 // 'undefined') because we may have a (legal) redeclaration and we 847 // must not destroy the current value. 848 if (hole_init) { 849 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex); 850 __ Push(cp, a2, a1, a0); 851 } else { 852 ASSERT(Smi::FromInt(0) == 0); 853 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value. 854 __ Push(cp, a2, a1, a0); 855 } 856 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 857 break; 858 } 859 } 860 } 861 862 863 void FullCodeGenerator::VisitFunctionDeclaration( 864 FunctionDeclaration* declaration) { 865 VariableProxy* proxy = declaration->proxy(); 866 Variable* variable = proxy->var(); 867 switch (variable->location()) { 868 case Variable::UNALLOCATED: { 869 globals_->Add(variable->name(), zone()); 870 Handle<SharedFunctionInfo> function = 871 Compiler::BuildFunctionInfo(declaration->fun(), script()); 872 // Check for stack-overflow exception. 873 if (function.is_null()) return SetStackOverflow(); 874 globals_->Add(function, zone()); 875 break; 876 } 877 878 case Variable::PARAMETER: 879 case Variable::LOCAL: { 880 Comment cmnt(masm_, "[ FunctionDeclaration"); 881 VisitForAccumulatorValue(declaration->fun()); 882 __ sw(result_register(), StackOperand(variable)); 883 break; 884 } 885 886 case Variable::CONTEXT: { 887 Comment cmnt(masm_, "[ FunctionDeclaration"); 888 EmitDebugCheckDeclarationContext(variable); 889 VisitForAccumulatorValue(declaration->fun()); 890 __ sw(result_register(), ContextOperand(cp, variable->index())); 891 int offset = Context::SlotOffset(variable->index()); 892 // We know that we have written a function, which is not a smi. 893 __ RecordWriteContextSlot(cp, 894 offset, 895 result_register(), 896 a2, 897 kRAHasBeenSaved, 898 kDontSaveFPRegs, 899 EMIT_REMEMBERED_SET, 900 OMIT_SMI_CHECK); 901 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 902 break; 903 } 904 905 case Variable::LOOKUP: { 906 Comment cmnt(masm_, "[ FunctionDeclaration"); 907 __ li(a2, Operand(variable->name())); 908 __ li(a1, Operand(Smi::FromInt(NONE))); 909 __ Push(cp, a2, a1); 910 // Push initial value for function declaration. 911 VisitForStackValue(declaration->fun()); 912 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 913 break; 914 } 915 } 916 } 917 918 919 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 920 Variable* variable = declaration->proxy()->var(); 921 ASSERT(variable->location() == Variable::CONTEXT); 922 ASSERT(variable->interface()->IsFrozen()); 923 924 Comment cmnt(masm_, "[ ModuleDeclaration"); 925 EmitDebugCheckDeclarationContext(variable); 926 927 // Load instance object. 928 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope())); 929 __ lw(a1, ContextOperand(a1, variable->interface()->Index())); 930 __ lw(a1, ContextOperand(a1, Context::EXTENSION_INDEX)); 931 932 // Assign it. 933 __ sw(a1, ContextOperand(cp, variable->index())); 934 // We know that we have written a module, which is not a smi. 935 __ RecordWriteContextSlot(cp, 936 Context::SlotOffset(variable->index()), 937 a1, 938 a3, 939 kRAHasBeenSaved, 940 kDontSaveFPRegs, 941 EMIT_REMEMBERED_SET, 942 OMIT_SMI_CHECK); 943 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 944 945 // Traverse into body. 946 Visit(declaration->module()); 947 } 948 949 950 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 951 VariableProxy* proxy = declaration->proxy(); 952 Variable* variable = proxy->var(); 953 switch (variable->location()) { 954 case Variable::UNALLOCATED: 955 // TODO(rossberg) 956 break; 957 958 case Variable::CONTEXT: { 959 Comment cmnt(masm_, "[ ImportDeclaration"); 960 EmitDebugCheckDeclarationContext(variable); 961 // TODO(rossberg) 962 break; 963 } 964 965 case Variable::PARAMETER: 966 case Variable::LOCAL: 967 case Variable::LOOKUP: 968 UNREACHABLE(); 969 } 970 } 971 972 973 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 974 // TODO(rossberg) 975 } 976 977 978 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 979 // Call the runtime to declare the globals. 980 // The context is the first argument. 981 __ li(a1, Operand(pairs)); 982 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags()))); 983 __ Push(cp, a1, a0); 984 __ CallRuntime(Runtime::kDeclareGlobals, 3); 985 // Return value is ignored. 986 } 987 988 989 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 990 // Call the runtime to declare the modules. 991 __ Push(descriptions); 992 __ CallRuntime(Runtime::kDeclareModules, 1); 993 // Return value is ignored. 994 } 995 996 997 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 998 Comment cmnt(masm_, "[ SwitchStatement"); 999 Breakable nested_statement(this, stmt); 1000 SetStatementPosition(stmt); 1001 1002 // Keep the switch value on the stack until a case matches. 1003 VisitForStackValue(stmt->tag()); 1004 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 1005 1006 ZoneList<CaseClause*>* clauses = stmt->cases(); 1007 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 1008 1009 Label next_test; // Recycled for each test. 1010 // Compile all the tests with branches to their bodies. 1011 for (int i = 0; i < clauses->length(); i++) { 1012 CaseClause* clause = clauses->at(i); 1013 clause->body_target()->Unuse(); 1014 1015 // The default is not a test, but remember it as final fall through. 1016 if (clause->is_default()) { 1017 default_clause = clause; 1018 continue; 1019 } 1020 1021 Comment cmnt(masm_, "[ Case comparison"); 1022 __ bind(&next_test); 1023 next_test.Unuse(); 1024 1025 // Compile the label expression. 1026 VisitForAccumulatorValue(clause->label()); 1027 __ mov(a0, result_register()); // CompareStub requires args in a0, a1. 1028 1029 // Perform the comparison as if via '==='. 1030 __ lw(a1, MemOperand(sp, 0)); // Switch value. 1031 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 1032 JumpPatchSite patch_site(masm_); 1033 if (inline_smi_code) { 1034 Label slow_case; 1035 __ or_(a2, a1, a0); 1036 patch_site.EmitJumpIfNotSmi(a2, &slow_case); 1037 1038 __ Branch(&next_test, ne, a1, Operand(a0)); 1039 __ Drop(1); // Switch value is no longer needed. 1040 __ Branch(clause->body_target()); 1041 1042 __ bind(&slow_case); 1043 } 1044 1045 // Record position before stub call for type feedback. 1046 SetSourcePosition(clause->position()); 1047 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT); 1048 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId()); 1049 patch_site.EmitPatchInfo(); 1050 1051 __ Branch(&next_test, ne, v0, Operand(zero_reg)); 1052 __ Drop(1); // Switch value is no longer needed. 1053 __ Branch(clause->body_target()); 1054 } 1055 1056 // Discard the test value and jump to the default if present, otherwise to 1057 // the end of the statement. 1058 __ bind(&next_test); 1059 __ Drop(1); // Switch value is no longer needed. 1060 if (default_clause == NULL) { 1061 __ Branch(nested_statement.break_label()); 1062 } else { 1063 __ Branch(default_clause->body_target()); 1064 } 1065 1066 // Compile all the case bodies. 1067 for (int i = 0; i < clauses->length(); i++) { 1068 Comment cmnt(masm_, "[ Case body"); 1069 CaseClause* clause = clauses->at(i); 1070 __ bind(clause->body_target()); 1071 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 1072 VisitStatements(clause->statements()); 1073 } 1074 1075 __ bind(nested_statement.break_label()); 1076 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1077 } 1078 1079 1080 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1081 Comment cmnt(masm_, "[ ForInStatement"); 1082 SetStatementPosition(stmt); 1083 1084 Label loop, exit; 1085 ForIn loop_statement(this, stmt); 1086 increment_loop_depth(); 1087 1088 // Get the object to enumerate over. If the object is null or undefined, skip 1089 // over the loop. See ECMA-262 version 5, section 12.6.4. 1090 VisitForAccumulatorValue(stmt->enumerable()); 1091 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below. 1092 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1093 __ Branch(&exit, eq, a0, Operand(at)); 1094 Register null_value = t1; 1095 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1096 __ Branch(&exit, eq, a0, Operand(null_value)); 1097 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1098 __ mov(a0, v0); 1099 // Convert the object to a JS object. 1100 Label convert, done_convert; 1101 __ JumpIfSmi(a0, &convert); 1102 __ GetObjectType(a0, a1, a1); 1103 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 1104 __ bind(&convert); 1105 __ push(a0); 1106 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1107 __ mov(a0, v0); 1108 __ bind(&done_convert); 1109 __ push(a0); 1110 1111 // Check for proxies. 1112 Label call_runtime; 1113 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1114 __ GetObjectType(a0, a1, a1); 1115 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE)); 1116 1117 // Check cache validity in generated code. This is a fast case for 1118 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1119 // guarantee cache validity, call the runtime system to check cache 1120 // validity or get the property names in a fixed array. 1121 __ CheckEnumCache(null_value, &call_runtime); 1122 1123 // The enum cache is valid. Load the map of the object being 1124 // iterated over and use the cache for the iteration. 1125 Label use_cache; 1126 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset)); 1127 __ Branch(&use_cache); 1128 1129 // Get the set of properties to enumerate. 1130 __ bind(&call_runtime); 1131 __ push(a0); // Duplicate the enumerable object on the stack. 1132 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1133 1134 // If we got a map from the runtime call, we can do a fast 1135 // modification check. Otherwise, we got a fixed array, and we have 1136 // to do a slow check. 1137 Label fixed_array; 1138 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); 1139 __ LoadRoot(at, Heap::kMetaMapRootIndex); 1140 __ Branch(&fixed_array, ne, a2, Operand(at)); 1141 1142 // We got a map in register v0. Get the enumeration cache from it. 1143 Label no_descriptors; 1144 __ bind(&use_cache); 1145 1146 __ EnumLength(a1, v0); 1147 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0))); 1148 1149 __ LoadInstanceDescriptors(v0, a2); 1150 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset)); 1151 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1152 1153 // Set up the four remaining stack slots. 1154 __ push(v0); // Map. 1155 __ li(a0, Operand(Smi::FromInt(0))); 1156 // Push enumeration cache, enumeration cache length (as smi) and zero. 1157 __ Push(a2, a1, a0); 1158 __ jmp(&loop); 1159 1160 __ bind(&no_descriptors); 1161 __ Drop(1); 1162 __ jmp(&exit); 1163 1164 // We got a fixed array in register v0. Iterate through that. 1165 Label non_proxy; 1166 __ bind(&fixed_array); 1167 1168 Handle<Cell> cell = isolate()->factory()->NewCell( 1169 Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker), 1170 isolate())); 1171 RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell); 1172 __ LoadHeapObject(a1, cell); 1173 __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker))); 1174 __ sw(a2, FieldMemOperand(a1, Cell::kValueOffset)); 1175 1176 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check 1177 __ lw(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object 1178 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1179 __ GetObjectType(a2, a3, a3); 1180 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE)); 1181 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy 1182 __ bind(&non_proxy); 1183 __ Push(a1, v0); // Smi and array 1184 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset)); 1185 __ li(a0, Operand(Smi::FromInt(0))); 1186 __ Push(a1, a0); // Fixed array length (as smi) and initial index. 1187 1188 // Generate code for doing the condition check. 1189 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1190 __ bind(&loop); 1191 // Load the current count to a0, load the length to a1. 1192 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); 1193 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); 1194 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1)); 1195 1196 // Get the current entry of the array into register a3. 1197 __ lw(a2, MemOperand(sp, 2 * kPointerSize)); 1198 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 1199 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); 1200 __ addu(t0, a2, t0); // Array base + scaled (smi) index. 1201 __ lw(a3, MemOperand(t0)); // Current entry. 1202 1203 // Get the expected map from the stack or a smi in the 1204 // permanent slow case into register a2. 1205 __ lw(a2, MemOperand(sp, 3 * kPointerSize)); 1206 1207 // Check if the expected map still matches that of the enumerable. 1208 // If not, we may have to filter the key. 1209 Label update_each; 1210 __ lw(a1, MemOperand(sp, 4 * kPointerSize)); 1211 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); 1212 __ Branch(&update_each, eq, t0, Operand(a2)); 1213 1214 // For proxies, no filtering is done. 1215 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1216 ASSERT_EQ(Smi::FromInt(0), 0); 1217 __ Branch(&update_each, eq, a2, Operand(zero_reg)); 1218 1219 // Convert the entry to a string or (smi) 0 if it isn't a property 1220 // any more. If the property has been removed while iterating, we 1221 // just skip it. 1222 __ push(a1); // Enumerable. 1223 __ push(a3); // Current entry. 1224 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1225 __ mov(a3, result_register()); 1226 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg)); 1227 1228 // Update the 'each' property or variable from the possibly filtered 1229 // entry in register a3. 1230 __ bind(&update_each); 1231 __ mov(result_register(), a3); 1232 // Perform the assignment as if via '='. 1233 { EffectContext context(this); 1234 EmitAssignment(stmt->each()); 1235 } 1236 1237 // Generate code for the body of the loop. 1238 Visit(stmt->body()); 1239 1240 // Generate code for the going to the next element by incrementing 1241 // the index (smi) stored on top of the stack. 1242 __ bind(loop_statement.continue_label()); 1243 __ pop(a0); 1244 __ Addu(a0, a0, Operand(Smi::FromInt(1))); 1245 __ push(a0); 1246 1247 EmitBackEdgeBookkeeping(stmt, &loop); 1248 __ Branch(&loop); 1249 1250 // Remove the pointers stored on the stack. 1251 __ bind(loop_statement.break_label()); 1252 __ Drop(5); 1253 1254 // Exit and decrement the loop depth. 1255 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1256 __ bind(&exit); 1257 decrement_loop_depth(); 1258 } 1259 1260 1261 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { 1262 Comment cmnt(masm_, "[ ForOfStatement"); 1263 SetStatementPosition(stmt); 1264 1265 Iteration loop_statement(this, stmt); 1266 increment_loop_depth(); 1267 1268 // var iterator = iterable[@@iterator]() 1269 VisitForAccumulatorValue(stmt->assign_iterator()); 1270 __ mov(a0, v0); 1271 1272 // As with for-in, skip the loop if the iterator is null or undefined. 1273 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1274 __ Branch(loop_statement.break_label(), eq, a0, Operand(at)); 1275 __ LoadRoot(at, Heap::kNullValueRootIndex); 1276 __ Branch(loop_statement.break_label(), eq, a0, Operand(at)); 1277 1278 // Convert the iterator to a JS object. 1279 Label convert, done_convert; 1280 __ JumpIfSmi(a0, &convert); 1281 __ GetObjectType(a0, a1, a1); 1282 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 1283 __ bind(&convert); 1284 __ push(a0); 1285 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1286 __ mov(a0, v0); 1287 __ bind(&done_convert); 1288 __ push(a0); 1289 1290 // Loop entry. 1291 __ bind(loop_statement.continue_label()); 1292 1293 // result = iterator.next() 1294 VisitForEffect(stmt->next_result()); 1295 1296 // if (result.done) break; 1297 Label result_not_done; 1298 VisitForControl(stmt->result_done(), 1299 loop_statement.break_label(), 1300 &result_not_done, 1301 &result_not_done); 1302 __ bind(&result_not_done); 1303 1304 // each = result.value 1305 VisitForEffect(stmt->assign_each()); 1306 1307 // Generate code for the body of the loop. 1308 Visit(stmt->body()); 1309 1310 // Check stack before looping. 1311 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); 1312 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); 1313 __ jmp(loop_statement.continue_label()); 1314 1315 // Exit and decrement the loop depth. 1316 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1317 __ bind(loop_statement.break_label()); 1318 decrement_loop_depth(); 1319 } 1320 1321 1322 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1323 bool pretenure) { 1324 // Use the fast case closure allocation code that allocates in new 1325 // space for nested functions that don't need literals cloning. If 1326 // we're running with the --always-opt or the --prepare-always-opt 1327 // flag, we need to use the runtime function so that the new function 1328 // we are creating here gets a chance to have its code optimized and 1329 // doesn't just get a copy of the existing unoptimized code. 1330 if (!FLAG_always_opt && 1331 !FLAG_prepare_always_opt && 1332 !pretenure && 1333 scope()->is_function_scope() && 1334 info->num_literals() == 0) { 1335 FastNewClosureStub stub(info->language_mode(), info->is_generator()); 1336 __ li(a0, Operand(info)); 1337 __ push(a0); 1338 __ CallStub(&stub); 1339 } else { 1340 __ li(a0, Operand(info)); 1341 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex 1342 : Heap::kFalseValueRootIndex); 1343 __ Push(cp, a0, a1); 1344 __ CallRuntime(Runtime::kNewClosure, 3); 1345 } 1346 context()->Plug(v0); 1347 } 1348 1349 1350 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1351 Comment cmnt(masm_, "[ VariableProxy"); 1352 EmitVariableLoad(expr); 1353 } 1354 1355 1356 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var, 1357 TypeofState typeof_state, 1358 Label* slow) { 1359 Register current = cp; 1360 Register next = a1; 1361 Register temp = a2; 1362 1363 Scope* s = scope(); 1364 while (s != NULL) { 1365 if (s->num_heap_slots() > 0) { 1366 if (s->calls_non_strict_eval()) { 1367 // Check that extension is NULL. 1368 __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX)); 1369 __ Branch(slow, ne, temp, Operand(zero_reg)); 1370 } 1371 // Load next context in chain. 1372 __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX)); 1373 // Walk the rest of the chain without clobbering cp. 1374 current = next; 1375 } 1376 // If no outer scope calls eval, we do not need to check more 1377 // context extensions. 1378 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break; 1379 s = s->outer_scope(); 1380 } 1381 1382 if (s->is_eval_scope()) { 1383 Label loop, fast; 1384 if (!current.is(next)) { 1385 __ Move(next, current); 1386 } 1387 __ bind(&loop); 1388 // Terminate at native context. 1389 __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset)); 1390 __ LoadRoot(t0, Heap::kNativeContextMapRootIndex); 1391 __ Branch(&fast, eq, temp, Operand(t0)); 1392 // Check that extension is NULL. 1393 __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX)); 1394 __ Branch(slow, ne, temp, Operand(zero_reg)); 1395 // Load next context in chain. 1396 __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX)); 1397 __ Branch(&loop); 1398 __ bind(&fast); 1399 } 1400 1401 __ lw(a0, GlobalObjectOperand()); 1402 __ li(a2, Operand(var->name())); 1403 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF) 1404 ? RelocInfo::CODE_TARGET 1405 : RelocInfo::CODE_TARGET_CONTEXT; 1406 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1407 CallIC(ic, mode); 1408 } 1409 1410 1411 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1412 Label* slow) { 1413 ASSERT(var->IsContextSlot()); 1414 Register context = cp; 1415 Register next = a3; 1416 Register temp = t0; 1417 1418 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1419 if (s->num_heap_slots() > 0) { 1420 if (s->calls_non_strict_eval()) { 1421 // Check that extension is NULL. 1422 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1423 __ Branch(slow, ne, temp, Operand(zero_reg)); 1424 } 1425 __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX)); 1426 // Walk the rest of the chain without clobbering cp. 1427 context = next; 1428 } 1429 } 1430 // Check that last extension is NULL. 1431 __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX)); 1432 __ Branch(slow, ne, temp, Operand(zero_reg)); 1433 1434 // This function is used only for loads, not stores, so it's safe to 1435 // return an cp-based operand (the write barrier cannot be allowed to 1436 // destroy the cp register). 1437 return ContextOperand(context, var->index()); 1438 } 1439 1440 1441 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var, 1442 TypeofState typeof_state, 1443 Label* slow, 1444 Label* done) { 1445 // Generate fast-case code for variables that might be shadowed by 1446 // eval-introduced variables. Eval is used a lot without 1447 // introducing variables. In those cases, we do not want to 1448 // perform a runtime call for all variables in the scope 1449 // containing the eval. 1450 if (var->mode() == DYNAMIC_GLOBAL) { 1451 EmitLoadGlobalCheckExtensions(var, typeof_state, slow); 1452 __ Branch(done); 1453 } else if (var->mode() == DYNAMIC_LOCAL) { 1454 Variable* local = var->local_if_not_shadowed(); 1455 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow)); 1456 if (local->mode() == LET || 1457 local->mode() == CONST || 1458 local->mode() == CONST_HARMONY) { 1459 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1460 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. 1461 if (local->mode() == CONST) { 1462 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1463 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole. 1464 } else { // LET || CONST_HARMONY 1465 __ Branch(done, ne, at, Operand(zero_reg)); 1466 __ li(a0, Operand(var->name())); 1467 __ push(a0); 1468 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1469 } 1470 } 1471 __ Branch(done); 1472 } 1473 } 1474 1475 1476 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1477 // Record position before possible IC call. 1478 SetSourcePosition(proxy->position()); 1479 Variable* var = proxy->var(); 1480 1481 // Three cases: global variables, lookup variables, and all other types of 1482 // variables. 1483 switch (var->location()) { 1484 case Variable::UNALLOCATED: { 1485 Comment cmnt(masm_, "Global variable"); 1486 // Use inline caching. Variable name is passed in a2 and the global 1487 // object (receiver) in a0. 1488 __ lw(a0, GlobalObjectOperand()); 1489 __ li(a2, Operand(var->name())); 1490 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 1491 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); 1492 context()->Plug(v0); 1493 break; 1494 } 1495 1496 case Variable::PARAMETER: 1497 case Variable::LOCAL: 1498 case Variable::CONTEXT: { 1499 Comment cmnt(masm_, var->IsContextSlot() 1500 ? "Context variable" 1501 : "Stack variable"); 1502 if (var->binding_needs_init()) { 1503 // var->scope() may be NULL when the proxy is located in eval code and 1504 // refers to a potential outside binding. Currently those bindings are 1505 // always looked up dynamically, i.e. in that case 1506 // var->location() == LOOKUP. 1507 // always holds. 1508 ASSERT(var->scope() != NULL); 1509 1510 // Check if the binding really needs an initialization check. The check 1511 // can be skipped in the following situation: we have a LET or CONST 1512 // binding in harmony mode, both the Variable and the VariableProxy have 1513 // the same declaration scope (i.e. they are both in global code, in the 1514 // same function or in the same eval code) and the VariableProxy is in 1515 // the source physically located after the initializer of the variable. 1516 // 1517 // We cannot skip any initialization checks for CONST in non-harmony 1518 // mode because const variables may be declared but never initialized: 1519 // if (false) { const x; }; var y = x; 1520 // 1521 // The condition on the declaration scopes is a conservative check for 1522 // nested functions that access a binding and are called before the 1523 // binding is initialized: 1524 // function() { f(); let x = 1; function f() { x = 2; } } 1525 // 1526 bool skip_init_check; 1527 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1528 skip_init_check = false; 1529 } else { 1530 // Check that we always have valid source position. 1531 ASSERT(var->initializer_position() != RelocInfo::kNoPosition); 1532 ASSERT(proxy->position() != RelocInfo::kNoPosition); 1533 skip_init_check = var->mode() != CONST && 1534 var->initializer_position() < proxy->position(); 1535 } 1536 1537 if (!skip_init_check) { 1538 // Let and const need a read barrier. 1539 GetVar(v0, var); 1540 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 1541 __ subu(at, v0, at); // Sub as compare: at == 0 on eq. 1542 if (var->mode() == LET || var->mode() == CONST_HARMONY) { 1543 // Throw a reference error when using an uninitialized let/const 1544 // binding in harmony mode. 1545 Label done; 1546 __ Branch(&done, ne, at, Operand(zero_reg)); 1547 __ li(a0, Operand(var->name())); 1548 __ push(a0); 1549 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1550 __ bind(&done); 1551 } else { 1552 // Uninitalized const bindings outside of harmony mode are unholed. 1553 ASSERT(var->mode() == CONST); 1554 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 1555 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole. 1556 } 1557 context()->Plug(v0); 1558 break; 1559 } 1560 } 1561 context()->Plug(var); 1562 break; 1563 } 1564 1565 case Variable::LOOKUP: { 1566 Label done, slow; 1567 // Generate code for loading from variables potentially shadowed 1568 // by eval-introduced variables. 1569 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done); 1570 __ bind(&slow); 1571 Comment cmnt(masm_, "Lookup variable"); 1572 __ li(a1, Operand(var->name())); 1573 __ Push(cp, a1); // Context and name. 1574 __ CallRuntime(Runtime::kLoadContextSlot, 2); 1575 __ bind(&done); 1576 context()->Plug(v0); 1577 } 1578 } 1579 } 1580 1581 1582 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1583 Comment cmnt(masm_, "[ RegExpLiteral"); 1584 Label materialized; 1585 // Registers will be used as follows: 1586 // t1 = materialized value (RegExp literal) 1587 // t0 = JS function, literals array 1588 // a3 = literal index 1589 // a2 = RegExp pattern 1590 // a1 = RegExp flags 1591 // a0 = RegExp literal clone 1592 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1593 __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset)); 1594 int literal_offset = 1595 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1596 __ lw(t1, FieldMemOperand(t0, literal_offset)); 1597 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 1598 __ Branch(&materialized, ne, t1, Operand(at)); 1599 1600 // Create regexp literal using runtime function. 1601 // Result will be in v0. 1602 __ li(a3, Operand(Smi::FromInt(expr->literal_index()))); 1603 __ li(a2, Operand(expr->pattern())); 1604 __ li(a1, Operand(expr->flags())); 1605 __ Push(t0, a3, a2, a1); 1606 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1607 __ mov(t1, v0); 1608 1609 __ bind(&materialized); 1610 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1611 Label allocated, runtime_allocate; 1612 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT); 1613 __ jmp(&allocated); 1614 1615 __ bind(&runtime_allocate); 1616 __ push(t1); 1617 __ li(a0, Operand(Smi::FromInt(size))); 1618 __ push(a0); 1619 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1620 __ pop(t1); 1621 1622 __ bind(&allocated); 1623 1624 // After this, registers are used as follows: 1625 // v0: Newly allocated regexp. 1626 // t1: Materialized regexp. 1627 // a2: temp. 1628 __ CopyFields(v0, t1, a2.bit(), size / kPointerSize); 1629 context()->Plug(v0); 1630 } 1631 1632 1633 void FullCodeGenerator::EmitAccessor(Expression* expression) { 1634 if (expression == NULL) { 1635 __ LoadRoot(a1, Heap::kNullValueRootIndex); 1636 __ push(a1); 1637 } else { 1638 VisitForStackValue(expression); 1639 } 1640 } 1641 1642 1643 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1644 Comment cmnt(masm_, "[ ObjectLiteral"); 1645 Handle<FixedArray> constant_properties = expr->constant_properties(); 1646 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1647 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); 1648 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); 1649 __ li(a1, Operand(constant_properties)); 1650 int flags = expr->fast_elements() 1651 ? ObjectLiteral::kFastElements 1652 : ObjectLiteral::kNoFlags; 1653 flags |= expr->has_function() 1654 ? ObjectLiteral::kHasFunction 1655 : ObjectLiteral::kNoFlags; 1656 __ li(a0, Operand(Smi::FromInt(flags))); 1657 int properties_count = constant_properties->length() / 2; 1658 if ((FLAG_track_double_fields && expr->may_store_doubles()) || 1659 expr->depth() > 1) { 1660 __ Push(a3, a2, a1, a0); 1661 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1662 } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements || 1663 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1664 __ Push(a3, a2, a1, a0); 1665 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4); 1666 } else { 1667 FastCloneShallowObjectStub stub(properties_count); 1668 __ CallStub(&stub); 1669 } 1670 1671 // If result_saved is true the result is on top of the stack. If 1672 // result_saved is false the result is in v0. 1673 bool result_saved = false; 1674 1675 // Mark all computed expressions that are bound to a key that 1676 // is shadowed by a later occurrence of the same key. For the 1677 // marked expressions, no store code is emitted. 1678 expr->CalculateEmitStore(zone()); 1679 1680 AccessorTable accessor_table(zone()); 1681 for (int i = 0; i < expr->properties()->length(); i++) { 1682 ObjectLiteral::Property* property = expr->properties()->at(i); 1683 if (property->IsCompileTimeValue()) continue; 1684 1685 Literal* key = property->key(); 1686 Expression* value = property->value(); 1687 if (!result_saved) { 1688 __ push(v0); // Save result on stack. 1689 result_saved = true; 1690 } 1691 switch (property->kind()) { 1692 case ObjectLiteral::Property::CONSTANT: 1693 UNREACHABLE(); 1694 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1695 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 1696 // Fall through. 1697 case ObjectLiteral::Property::COMPUTED: 1698 if (key->value()->IsInternalizedString()) { 1699 if (property->emit_store()) { 1700 VisitForAccumulatorValue(value); 1701 __ mov(a0, result_register()); 1702 __ li(a2, Operand(key->value())); 1703 __ lw(a1, MemOperand(sp)); 1704 Handle<Code> ic = is_classic_mode() 1705 ? isolate()->builtins()->StoreIC_Initialize() 1706 : isolate()->builtins()->StoreIC_Initialize_Strict(); 1707 CallIC(ic, RelocInfo::CODE_TARGET, key->LiteralFeedbackId()); 1708 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1709 } else { 1710 VisitForEffect(value); 1711 } 1712 break; 1713 } 1714 // Duplicate receiver on stack. 1715 __ lw(a0, MemOperand(sp)); 1716 __ push(a0); 1717 VisitForStackValue(key); 1718 VisitForStackValue(value); 1719 if (property->emit_store()) { 1720 __ li(a0, Operand(Smi::FromInt(NONE))); // PropertyAttributes. 1721 __ push(a0); 1722 __ CallRuntime(Runtime::kSetProperty, 4); 1723 } else { 1724 __ Drop(3); 1725 } 1726 break; 1727 case ObjectLiteral::Property::PROTOTYPE: 1728 // Duplicate receiver on stack. 1729 __ lw(a0, MemOperand(sp)); 1730 __ push(a0); 1731 VisitForStackValue(value); 1732 if (property->emit_store()) { 1733 __ CallRuntime(Runtime::kSetPrototype, 2); 1734 } else { 1735 __ Drop(2); 1736 } 1737 break; 1738 case ObjectLiteral::Property::GETTER: 1739 accessor_table.lookup(key)->second->getter = value; 1740 break; 1741 case ObjectLiteral::Property::SETTER: 1742 accessor_table.lookup(key)->second->setter = value; 1743 break; 1744 } 1745 } 1746 1747 // Emit code to define accessors, using only a single call to the runtime for 1748 // each pair of corresponding getters and setters. 1749 for (AccessorTable::Iterator it = accessor_table.begin(); 1750 it != accessor_table.end(); 1751 ++it) { 1752 __ lw(a0, MemOperand(sp)); // Duplicate receiver. 1753 __ push(a0); 1754 VisitForStackValue(it->first); 1755 EmitAccessor(it->second->getter); 1756 EmitAccessor(it->second->setter); 1757 __ li(a0, Operand(Smi::FromInt(NONE))); 1758 __ push(a0); 1759 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5); 1760 } 1761 1762 if (expr->has_function()) { 1763 ASSERT(result_saved); 1764 __ lw(a0, MemOperand(sp)); 1765 __ push(a0); 1766 __ CallRuntime(Runtime::kToFastProperties, 1); 1767 } 1768 1769 if (result_saved) { 1770 context()->PlugTOS(); 1771 } else { 1772 context()->Plug(v0); 1773 } 1774 } 1775 1776 1777 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1778 Comment cmnt(masm_, "[ ArrayLiteral"); 1779 1780 ZoneList<Expression*>* subexprs = expr->values(); 1781 int length = subexprs->length(); 1782 1783 Handle<FixedArray> constant_elements = expr->constant_elements(); 1784 ASSERT_EQ(2, constant_elements->length()); 1785 ElementsKind constant_elements_kind = 1786 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1787 bool has_fast_elements = 1788 IsFastObjectElementsKind(constant_elements_kind); 1789 Handle<FixedArrayBase> constant_elements_values( 1790 FixedArrayBase::cast(constant_elements->get(1))); 1791 1792 __ mov(a0, result_register()); 1793 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1794 __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset)); 1795 __ li(a2, Operand(Smi::FromInt(expr->literal_index()))); 1796 __ li(a1, Operand(constant_elements)); 1797 if (has_fast_elements && constant_elements_values->map() == 1798 isolate()->heap()->fixed_cow_array_map()) { 1799 FastCloneShallowArrayStub stub( 1800 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, 1801 DONT_TRACK_ALLOCATION_SITE, 1802 length); 1803 __ CallStub(&stub); 1804 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1805 1, a1, a2); 1806 } else if (expr->depth() > 1) { 1807 __ Push(a3, a2, a1); 1808 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 1809 } else if (Serializer::enabled() || 1810 length > FastCloneShallowArrayStub::kMaximumClonedLength) { 1811 __ Push(a3, a2, a1); 1812 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 1813 } else { 1814 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) || 1815 FLAG_smi_only_arrays); 1816 FastCloneShallowArrayStub::Mode mode = 1817 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS; 1818 AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites 1819 ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE; 1820 1821 if (has_fast_elements) { 1822 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS; 1823 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1824 } 1825 1826 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length); 1827 __ CallStub(&stub); 1828 } 1829 1830 bool result_saved = false; // Is the result saved to the stack? 1831 1832 // Emit code to evaluate all the non-constant subexpressions and to store 1833 // them into the newly cloned array. 1834 for (int i = 0; i < length; i++) { 1835 Expression* subexpr = subexprs->at(i); 1836 // If the subexpression is a literal or a simple materialized literal it 1837 // is already set in the cloned array. 1838 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1839 1840 if (!result_saved) { 1841 __ push(v0); // array literal 1842 __ Push(Smi::FromInt(expr->literal_index())); 1843 result_saved = true; 1844 } 1845 1846 VisitForAccumulatorValue(subexpr); 1847 1848 if (IsFastObjectElementsKind(constant_elements_kind)) { 1849 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1850 __ lw(t2, MemOperand(sp, kPointerSize)); // Copy of array literal. 1851 __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset)); 1852 __ sw(result_register(), FieldMemOperand(a1, offset)); 1853 // Update the write barrier for the array store. 1854 __ RecordWriteField(a1, offset, result_register(), a2, 1855 kRAHasBeenSaved, kDontSaveFPRegs, 1856 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK); 1857 } else { 1858 __ li(a3, Operand(Smi::FromInt(i))); 1859 __ mov(a0, result_register()); 1860 StoreArrayLiteralElementStub stub; 1861 __ CallStub(&stub); 1862 } 1863 1864 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1865 } 1866 if (result_saved) { 1867 __ Pop(); // literal index 1868 context()->PlugTOS(); 1869 } else { 1870 context()->Plug(v0); 1871 } 1872 } 1873 1874 1875 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1876 Comment cmnt(masm_, "[ Assignment"); 1877 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 1878 // on the left-hand side. 1879 if (!expr->target()->IsValidLeftHandSide()) { 1880 VisitForEffect(expr->target()); 1881 return; 1882 } 1883 1884 // Left-hand side can only be a property, a global or a (parameter or local) 1885 // slot. 1886 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1887 LhsKind assign_type = VARIABLE; 1888 Property* property = expr->target()->AsProperty(); 1889 if (property != NULL) { 1890 assign_type = (property->key()->IsPropertyName()) 1891 ? NAMED_PROPERTY 1892 : KEYED_PROPERTY; 1893 } 1894 1895 // Evaluate LHS expression. 1896 switch (assign_type) { 1897 case VARIABLE: 1898 // Nothing to do here. 1899 break; 1900 case NAMED_PROPERTY: 1901 if (expr->is_compound()) { 1902 // We need the receiver both on the stack and in the accumulator. 1903 VisitForAccumulatorValue(property->obj()); 1904 __ push(result_register()); 1905 } else { 1906 VisitForStackValue(property->obj()); 1907 } 1908 break; 1909 case KEYED_PROPERTY: 1910 // We need the key and receiver on both the stack and in v0 and a1. 1911 if (expr->is_compound()) { 1912 VisitForStackValue(property->obj()); 1913 VisitForAccumulatorValue(property->key()); 1914 __ lw(a1, MemOperand(sp, 0)); 1915 __ push(v0); 1916 } else { 1917 VisitForStackValue(property->obj()); 1918 VisitForStackValue(property->key()); 1919 } 1920 break; 1921 } 1922 1923 // For compound assignments we need another deoptimization point after the 1924 // variable/property load. 1925 if (expr->is_compound()) { 1926 { AccumulatorValueContext context(this); 1927 switch (assign_type) { 1928 case VARIABLE: 1929 EmitVariableLoad(expr->target()->AsVariableProxy()); 1930 PrepareForBailout(expr->target(), TOS_REG); 1931 break; 1932 case NAMED_PROPERTY: 1933 EmitNamedPropertyLoad(property); 1934 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1935 break; 1936 case KEYED_PROPERTY: 1937 EmitKeyedPropertyLoad(property); 1938 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1939 break; 1940 } 1941 } 1942 1943 Token::Value op = expr->binary_op(); 1944 __ push(v0); // Left operand goes on the stack. 1945 VisitForAccumulatorValue(expr->value()); 1946 1947 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1948 ? OVERWRITE_RIGHT 1949 : NO_OVERWRITE; 1950 SetSourcePosition(expr->position() + 1); 1951 AccumulatorValueContext context(this); 1952 if (ShouldInlineSmiCase(op)) { 1953 EmitInlineSmiBinaryOp(expr->binary_operation(), 1954 op, 1955 mode, 1956 expr->target(), 1957 expr->value()); 1958 } else { 1959 EmitBinaryOp(expr->binary_operation(), op, mode); 1960 } 1961 1962 // Deoptimization point in case the binary operation may have side effects. 1963 PrepareForBailout(expr->binary_operation(), TOS_REG); 1964 } else { 1965 VisitForAccumulatorValue(expr->value()); 1966 } 1967 1968 // Record source position before possible IC call. 1969 SetSourcePosition(expr->position()); 1970 1971 // Store the value. 1972 switch (assign_type) { 1973 case VARIABLE: 1974 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1975 expr->op()); 1976 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1977 context()->Plug(v0); 1978 break; 1979 case NAMED_PROPERTY: 1980 EmitNamedPropertyAssignment(expr); 1981 break; 1982 case KEYED_PROPERTY: 1983 EmitKeyedPropertyAssignment(expr); 1984 break; 1985 } 1986 } 1987 1988 1989 void FullCodeGenerator::VisitYield(Yield* expr) { 1990 Comment cmnt(masm_, "[ Yield"); 1991 // Evaluate yielded value first; the initial iterator definition depends on 1992 // this. It stays on the stack while we update the iterator. 1993 VisitForStackValue(expr->expression()); 1994 1995 switch (expr->yield_kind()) { 1996 case Yield::SUSPEND: 1997 // Pop value from top-of-stack slot; box result into result register. 1998 EmitCreateIteratorResult(false); 1999 __ push(result_register()); 2000 // Fall through. 2001 case Yield::INITIAL: { 2002 Label suspend, continuation, post_runtime, resume; 2003 2004 __ jmp(&suspend); 2005 2006 __ bind(&continuation); 2007 __ jmp(&resume); 2008 2009 __ bind(&suspend); 2010 VisitForAccumulatorValue(expr->generator_object()); 2011 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 2012 __ li(a1, Operand(Smi::FromInt(continuation.pos()))); 2013 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset)); 2014 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset)); 2015 __ mov(a1, cp); 2016 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2, 2017 kRAHasBeenSaved, kDontSaveFPRegs); 2018 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset)); 2019 __ Branch(&post_runtime, eq, sp, Operand(a1)); 2020 __ push(v0); // generator object 2021 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2022 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2023 __ bind(&post_runtime); 2024 __ pop(result_register()); 2025 EmitReturnSequence(); 2026 2027 __ bind(&resume); 2028 context()->Plug(result_register()); 2029 break; 2030 } 2031 2032 case Yield::FINAL: { 2033 VisitForAccumulatorValue(expr->generator_object()); 2034 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); 2035 __ sw(a1, FieldMemOperand(result_register(), 2036 JSGeneratorObject::kContinuationOffset)); 2037 // Pop value from top-of-stack slot, box result into result register. 2038 EmitCreateIteratorResult(true); 2039 EmitUnwindBeforeReturn(); 2040 EmitReturnSequence(); 2041 break; 2042 } 2043 2044 case Yield::DELEGATING: { 2045 VisitForStackValue(expr->generator_object()); 2046 2047 // Initial stack layout is as follows: 2048 // [sp + 1 * kPointerSize] iter 2049 // [sp + 0 * kPointerSize] g 2050 2051 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 2052 Label l_next, l_call, l_loop; 2053 // Initial send value is undefined. 2054 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); 2055 __ Branch(&l_next); 2056 2057 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 2058 __ bind(&l_catch); 2059 __ mov(a0, v0); 2060 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); 2061 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw" 2062 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter 2063 __ push(a3); // iter 2064 __ push(a0); // exception 2065 __ jmp(&l_call); 2066 2067 // try { received = %yield result } 2068 // Shuffle the received result above a try handler and yield it without 2069 // re-boxing. 2070 __ bind(&l_try); 2071 __ pop(a0); // result 2072 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2073 const int handler_size = StackHandlerConstants::kSize; 2074 __ push(a0); // result 2075 __ jmp(&l_suspend); 2076 __ bind(&l_continuation); 2077 __ mov(a0, v0); 2078 __ jmp(&l_resume); 2079 __ bind(&l_suspend); 2080 const int generator_object_depth = kPointerSize + handler_size; 2081 __ lw(a0, MemOperand(sp, generator_object_depth)); 2082 __ push(a0); // g 2083 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2084 __ li(a1, Operand(Smi::FromInt(l_continuation.pos()))); 2085 __ sw(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset)); 2086 __ sw(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset)); 2087 __ mov(a1, cp); 2088 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2, 2089 kRAHasBeenSaved, kDontSaveFPRegs); 2090 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2091 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2092 __ pop(v0); // result 2093 EmitReturnSequence(); 2094 __ mov(a0, v0); 2095 __ bind(&l_resume); // received in a0 2096 __ PopTryHandler(); 2097 2098 // receiver = iter; f = 'next'; arg = received; 2099 __ bind(&l_next); 2100 __ LoadRoot(a2, Heap::knext_stringRootIndex); // "next" 2101 __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // iter 2102 __ push(a3); // iter 2103 __ push(a0); // received 2104 2105 // result = receiver[f](arg); 2106 __ bind(&l_call); 2107 Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(1); 2108 CallIC(ic); 2109 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2110 2111 // if (!result.done) goto l_try; 2112 __ bind(&l_loop); 2113 __ mov(a0, v0); 2114 __ push(a0); // save result 2115 __ LoadRoot(a2, Heap::kdone_stringRootIndex); // "done" 2116 Handle<Code> done_ic = isolate()->builtins()->LoadIC_Initialize(); 2117 CallIC(done_ic); // result.done in v0 2118 __ mov(a0, v0); 2119 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2120 CallIC(bool_ic); 2121 __ Branch(&l_try, eq, v0, Operand(zero_reg)); 2122 2123 // result.value 2124 __ pop(a0); // result 2125 __ LoadRoot(a2, Heap::kvalue_stringRootIndex); // "value" 2126 Handle<Code> value_ic = isolate()->builtins()->LoadIC_Initialize(); 2127 CallIC(value_ic); // result.value in v0 2128 context()->DropAndPlug(2, v0); // drop iter and g 2129 break; 2130 } 2131 } 2132 } 2133 2134 2135 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2136 Expression *value, 2137 JSGeneratorObject::ResumeMode resume_mode) { 2138 // The value stays in a0, and is ultimately read by the resumed generator, as 2139 // if the CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. a1 2140 // will hold the generator object until the activation has been resumed. 2141 VisitForStackValue(generator); 2142 VisitForAccumulatorValue(value); 2143 __ pop(a1); 2144 2145 // Check generator state. 2146 Label wrong_state, done; 2147 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 2148 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0); 2149 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0); 2150 __ Branch(&wrong_state, le, a3, Operand(zero_reg)); 2151 2152 // Load suspended function and context. 2153 __ lw(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset)); 2154 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); 2155 2156 // Load receiver and store as the first argument. 2157 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); 2158 __ push(a2); 2159 2160 // Push holes for the rest of the arguments to the generator function. 2161 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); 2162 __ lw(a3, 2163 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); 2164 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex); 2165 Label push_argument_holes, push_frame; 2166 __ bind(&push_argument_holes); 2167 __ Subu(a3, a3, Operand(Smi::FromInt(1))); 2168 __ Branch(&push_frame, lt, a3, Operand(zero_reg)); 2169 __ push(a2); 2170 __ jmp(&push_argument_holes); 2171 2172 // Enter a new JavaScript frame, and initialize its slots as they were when 2173 // the generator was suspended. 2174 Label resume_frame; 2175 __ bind(&push_frame); 2176 __ Call(&resume_frame); 2177 __ jmp(&done); 2178 __ bind(&resume_frame); 2179 __ push(ra); // Return address. 2180 __ push(fp); // Caller's frame pointer. 2181 __ mov(fp, sp); 2182 __ push(cp); // Callee's context. 2183 __ push(t0); // Callee's JS Function. 2184 2185 // Load the operand stack size. 2186 __ lw(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset)); 2187 __ lw(a3, FieldMemOperand(a3, FixedArray::kLengthOffset)); 2188 __ SmiUntag(a3); 2189 2190 // If we are sending a value and there is no operand stack, we can jump back 2191 // in directly. 2192 if (resume_mode == JSGeneratorObject::NEXT) { 2193 Label slow_resume; 2194 __ Branch(&slow_resume, ne, a3, Operand(zero_reg)); 2195 __ lw(a3, FieldMemOperand(t0, JSFunction::kCodeEntryOffset)); 2196 __ lw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 2197 __ SmiUntag(a2); 2198 __ Addu(a3, a3, Operand(a2)); 2199 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 2200 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset)); 2201 __ Jump(a3); 2202 __ bind(&slow_resume); 2203 } 2204 2205 // Otherwise, we push holes for the operand stack and call the runtime to fix 2206 // up the stack and the handlers. 2207 Label push_operand_holes, call_resume; 2208 __ bind(&push_operand_holes); 2209 __ Subu(a3, a3, Operand(1)); 2210 __ Branch(&call_resume, lt, a3, Operand(zero_reg)); 2211 __ push(a2); 2212 __ Branch(&push_operand_holes); 2213 __ bind(&call_resume); 2214 __ push(a1); 2215 __ push(result_register()); 2216 __ Push(Smi::FromInt(resume_mode)); 2217 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2218 // Not reached: the runtime call returns elsewhere. 2219 __ stop("not-reached"); 2220 2221 // Throw error if we attempt to operate on a running generator. 2222 __ bind(&wrong_state); 2223 __ push(a1); 2224 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); 2225 2226 __ bind(&done); 2227 context()->Plug(result_register()); 2228 } 2229 2230 2231 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2232 Label gc_required; 2233 Label allocated; 2234 2235 Handle<Map> map(isolate()->native_context()->generator_result_map()); 2236 2237 __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT); 2238 __ jmp(&allocated); 2239 2240 __ bind(&gc_required); 2241 __ Push(Smi::FromInt(map->instance_size())); 2242 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2243 __ lw(context_register(), 2244 MemOperand(fp, StandardFrameConstants::kContextOffset)); 2245 2246 __ bind(&allocated); 2247 __ li(a1, Operand(map)); 2248 __ pop(a2); 2249 __ li(a3, Operand(isolate()->factory()->ToBoolean(done))); 2250 __ li(t0, Operand(isolate()->factory()->empty_fixed_array())); 2251 ASSERT_EQ(map->instance_size(), 5 * kPointerSize); 2252 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 2253 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 2254 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); 2255 __ sw(a2, 2256 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset)); 2257 __ sw(a3, 2258 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset)); 2259 2260 // Only the value field needs a write barrier, as the other values are in the 2261 // root set. 2262 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset, 2263 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); 2264 } 2265 2266 2267 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 2268 SetSourcePosition(prop->position()); 2269 Literal* key = prop->key()->AsLiteral(); 2270 __ mov(a0, result_register()); 2271 __ li(a2, Operand(key->value())); 2272 // Call load IC. It has arguments receiver and property name a0 and a2. 2273 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2274 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId()); 2275 } 2276 2277 2278 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2279 SetSourcePosition(prop->position()); 2280 __ mov(a0, result_register()); 2281 // Call keyed load IC. It has arguments key and receiver in a0 and a1. 2282 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2283 CallIC(ic, RelocInfo::CODE_TARGET, prop->PropertyFeedbackId()); 2284 } 2285 2286 2287 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2288 Token::Value op, 2289 OverwriteMode mode, 2290 Expression* left_expr, 2291 Expression* right_expr) { 2292 Label done, smi_case, stub_call; 2293 2294 Register scratch1 = a2; 2295 Register scratch2 = a3; 2296 2297 // Get the arguments. 2298 Register left = a1; 2299 Register right = a0; 2300 __ pop(left); 2301 __ mov(a0, result_register()); 2302 2303 // Perform combined smi check on both operands. 2304 __ Or(scratch1, left, Operand(right)); 2305 STATIC_ASSERT(kSmiTag == 0); 2306 JumpPatchSite patch_site(masm_); 2307 patch_site.EmitJumpIfSmi(scratch1, &smi_case); 2308 2309 __ bind(&stub_call); 2310 BinaryOpStub stub(op, mode); 2311 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 2312 expr->BinaryOperationFeedbackId()); 2313 patch_site.EmitPatchInfo(); 2314 __ jmp(&done); 2315 2316 __ bind(&smi_case); 2317 // Smi case. This code works the same way as the smi-smi case in the type 2318 // recording binary operation stub, see 2319 // BinaryOpStub::GenerateSmiSmiOperation for comments. 2320 switch (op) { 2321 case Token::SAR: 2322 __ Branch(&stub_call); 2323 __ GetLeastBitsFromSmi(scratch1, right, 5); 2324 __ srav(right, left, scratch1); 2325 __ And(v0, right, Operand(~kSmiTagMask)); 2326 break; 2327 case Token::SHL: { 2328 __ Branch(&stub_call); 2329 __ SmiUntag(scratch1, left); 2330 __ GetLeastBitsFromSmi(scratch2, right, 5); 2331 __ sllv(scratch1, scratch1, scratch2); 2332 __ Addu(scratch2, scratch1, Operand(0x40000000)); 2333 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); 2334 __ SmiTag(v0, scratch1); 2335 break; 2336 } 2337 case Token::SHR: { 2338 __ Branch(&stub_call); 2339 __ SmiUntag(scratch1, left); 2340 __ GetLeastBitsFromSmi(scratch2, right, 5); 2341 __ srlv(scratch1, scratch1, scratch2); 2342 __ And(scratch2, scratch1, 0xc0000000); 2343 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg)); 2344 __ SmiTag(v0, scratch1); 2345 break; 2346 } 2347 case Token::ADD: 2348 __ AdduAndCheckForOverflow(v0, left, right, scratch1); 2349 __ BranchOnOverflow(&stub_call, scratch1); 2350 break; 2351 case Token::SUB: 2352 __ SubuAndCheckForOverflow(v0, left, right, scratch1); 2353 __ BranchOnOverflow(&stub_call, scratch1); 2354 break; 2355 case Token::MUL: { 2356 __ SmiUntag(scratch1, right); 2357 __ Mult(left, scratch1); 2358 __ mflo(scratch1); 2359 __ mfhi(scratch2); 2360 __ sra(scratch1, scratch1, 31); 2361 __ Branch(&stub_call, ne, scratch1, Operand(scratch2)); 2362 __ mflo(v0); 2363 __ Branch(&done, ne, v0, Operand(zero_reg)); 2364 __ Addu(scratch2, right, left); 2365 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg)); 2366 ASSERT(Smi::FromInt(0) == 0); 2367 __ mov(v0, zero_reg); 2368 break; 2369 } 2370 case Token::BIT_OR: 2371 __ Or(v0, left, Operand(right)); 2372 break; 2373 case Token::BIT_AND: 2374 __ And(v0, left, Operand(right)); 2375 break; 2376 case Token::BIT_XOR: 2377 __ Xor(v0, left, Operand(right)); 2378 break; 2379 default: 2380 UNREACHABLE(); 2381 } 2382 2383 __ bind(&done); 2384 context()->Plug(v0); 2385 } 2386 2387 2388 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2389 Token::Value op, 2390 OverwriteMode mode) { 2391 __ mov(a0, result_register()); 2392 __ pop(a1); 2393 BinaryOpStub stub(op, mode); 2394 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2395 CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, 2396 expr->BinaryOperationFeedbackId()); 2397 patch_site.EmitPatchInfo(); 2398 context()->Plug(v0); 2399 } 2400 2401 2402 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2403 // Invalid left-hand sides are rewritten by the parser to have a 'throw 2404 // ReferenceError' on the left-hand side. 2405 if (!expr->IsValidLeftHandSide()) { 2406 VisitForEffect(expr); 2407 return; 2408 } 2409 2410 // Left-hand side can only be a property, a global or a (parameter or local) 2411 // slot. 2412 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2413 LhsKind assign_type = VARIABLE; 2414 Property* prop = expr->AsProperty(); 2415 if (prop != NULL) { 2416 assign_type = (prop->key()->IsPropertyName()) 2417 ? NAMED_PROPERTY 2418 : KEYED_PROPERTY; 2419 } 2420 2421 switch (assign_type) { 2422 case VARIABLE: { 2423 Variable* var = expr->AsVariableProxy()->var(); 2424 EffectContext context(this); 2425 EmitVariableAssignment(var, Token::ASSIGN); 2426 break; 2427 } 2428 case NAMED_PROPERTY: { 2429 __ push(result_register()); // Preserve value. 2430 VisitForAccumulatorValue(prop->obj()); 2431 __ mov(a1, result_register()); 2432 __ pop(a0); // Restore value. 2433 __ li(a2, Operand(prop->key()->AsLiteral()->value())); 2434 Handle<Code> ic = is_classic_mode() 2435 ? isolate()->builtins()->StoreIC_Initialize() 2436 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2437 CallIC(ic); 2438 break; 2439 } 2440 case KEYED_PROPERTY: { 2441 __ push(result_register()); // Preserve value. 2442 VisitForStackValue(prop->obj()); 2443 VisitForAccumulatorValue(prop->key()); 2444 __ mov(a1, result_register()); 2445 __ pop(a2); 2446 __ pop(a0); // Restore value. 2447 Handle<Code> ic = is_classic_mode() 2448 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2449 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2450 CallIC(ic); 2451 break; 2452 } 2453 } 2454 context()->Plug(v0); 2455 } 2456 2457 2458 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2459 Token::Value op) { 2460 if (var->IsUnallocated()) { 2461 // Global var, const, or let. 2462 __ mov(a0, result_register()); 2463 __ li(a2, Operand(var->name())); 2464 __ lw(a1, GlobalObjectOperand()); 2465 Handle<Code> ic = is_classic_mode() 2466 ? isolate()->builtins()->StoreIC_Initialize() 2467 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2468 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT); 2469 2470 } else if (op == Token::INIT_CONST) { 2471 // Const initializers need a write barrier. 2472 ASSERT(!var->IsParameter()); // No const parameters. 2473 if (var->IsStackLocal()) { 2474 Label skip; 2475 __ lw(a1, StackOperand(var)); 2476 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 2477 __ Branch(&skip, ne, a1, Operand(t0)); 2478 __ sw(result_register(), StackOperand(var)); 2479 __ bind(&skip); 2480 } else { 2481 ASSERT(var->IsContextSlot() || var->IsLookupSlot()); 2482 // Like var declarations, const declarations are hoisted to function 2483 // scope. However, unlike var initializers, const initializers are 2484 // able to drill a hole to that function context, even from inside a 2485 // 'with' context. We thus bypass the normal static scope lookup for 2486 // var->IsContextSlot(). 2487 __ push(v0); 2488 __ li(a0, Operand(var->name())); 2489 __ Push(cp, a0); // Context and name. 2490 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3); 2491 } 2492 2493 } else if (var->mode() == LET && op != Token::INIT_LET) { 2494 // Non-initializing assignment to let variable needs a write barrier. 2495 if (var->IsLookupSlot()) { 2496 __ push(v0); // Value. 2497 __ li(a1, Operand(var->name())); 2498 __ li(a0, Operand(Smi::FromInt(language_mode()))); 2499 __ Push(cp, a1, a0); // Context, name, strict mode. 2500 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2501 } else { 2502 ASSERT(var->IsStackAllocated() || var->IsContextSlot()); 2503 Label assign; 2504 MemOperand location = VarOperand(var, a1); 2505 __ lw(a3, location); 2506 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 2507 __ Branch(&assign, ne, a3, Operand(t0)); 2508 __ li(a3, Operand(var->name())); 2509 __ push(a3); 2510 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2511 // Perform the assignment. 2512 __ bind(&assign); 2513 __ sw(result_register(), location); 2514 if (var->IsContextSlot()) { 2515 // RecordWrite may destroy all its register arguments. 2516 __ mov(a3, result_register()); 2517 int offset = Context::SlotOffset(var->index()); 2518 __ RecordWriteContextSlot( 2519 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); 2520 } 2521 } 2522 2523 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) { 2524 // Assignment to var or initializing assignment to let/const 2525 // in harmony mode. 2526 if (var->IsStackAllocated() || var->IsContextSlot()) { 2527 MemOperand location = VarOperand(var, a1); 2528 if (generate_debug_code_ && op == Token::INIT_LET) { 2529 // Check for an uninitialized let binding. 2530 __ lw(a2, location); 2531 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex); 2532 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0)); 2533 } 2534 // Perform the assignment. 2535 __ sw(v0, location); 2536 if (var->IsContextSlot()) { 2537 __ mov(a3, v0); 2538 int offset = Context::SlotOffset(var->index()); 2539 __ RecordWriteContextSlot( 2540 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs); 2541 } 2542 } else { 2543 ASSERT(var->IsLookupSlot()); 2544 __ push(v0); // Value. 2545 __ li(a1, Operand(var->name())); 2546 __ li(a0, Operand(Smi::FromInt(language_mode()))); 2547 __ Push(cp, a1, a0); // Context, name, strict mode. 2548 __ CallRuntime(Runtime::kStoreContextSlot, 4); 2549 } 2550 } 2551 // Non-initializing assignments to consts are ignored. 2552 } 2553 2554 2555 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2556 // Assignment to a property, using a named store IC. 2557 Property* prop = expr->target()->AsProperty(); 2558 ASSERT(prop != NULL); 2559 ASSERT(prop->key()->AsLiteral() != NULL); 2560 2561 // Record source code position before IC call. 2562 SetSourcePosition(expr->position()); 2563 __ mov(a0, result_register()); // Load the value. 2564 __ li(a2, Operand(prop->key()->AsLiteral()->value())); 2565 __ pop(a1); 2566 2567 Handle<Code> ic = is_classic_mode() 2568 ? isolate()->builtins()->StoreIC_Initialize() 2569 : isolate()->builtins()->StoreIC_Initialize_Strict(); 2570 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId()); 2571 2572 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2573 context()->Plug(v0); 2574 } 2575 2576 2577 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2578 // Assignment to a property, using a keyed store IC. 2579 2580 // Record source code position before IC call. 2581 SetSourcePosition(expr->position()); 2582 // Call keyed store IC. 2583 // The arguments are: 2584 // - a0 is the value, 2585 // - a1 is the key, 2586 // - a2 is the receiver. 2587 __ mov(a0, result_register()); 2588 __ pop(a1); // Key. 2589 __ pop(a2); 2590 2591 Handle<Code> ic = is_classic_mode() 2592 ? isolate()->builtins()->KeyedStoreIC_Initialize() 2593 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 2594 CallIC(ic, RelocInfo::CODE_TARGET, expr->AssignmentFeedbackId()); 2595 2596 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2597 context()->Plug(v0); 2598 } 2599 2600 2601 void FullCodeGenerator::VisitProperty(Property* expr) { 2602 Comment cmnt(masm_, "[ Property"); 2603 Expression* key = expr->key(); 2604 2605 if (key->IsPropertyName()) { 2606 VisitForAccumulatorValue(expr->obj()); 2607 EmitNamedPropertyLoad(expr); 2608 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2609 context()->Plug(v0); 2610 } else { 2611 VisitForStackValue(expr->obj()); 2612 VisitForAccumulatorValue(expr->key()); 2613 __ pop(a1); 2614 EmitKeyedPropertyLoad(expr); 2615 context()->Plug(v0); 2616 } 2617 } 2618 2619 2620 void FullCodeGenerator::CallIC(Handle<Code> code, 2621 RelocInfo::Mode rmode, 2622 TypeFeedbackId id) { 2623 ic_total_count_++; 2624 __ Call(code, rmode, id); 2625 } 2626 2627 2628 void FullCodeGenerator::EmitCallWithIC(Call* expr, 2629 Handle<Object> name, 2630 RelocInfo::Mode mode) { 2631 // Code common for calls using the IC. 2632 ZoneList<Expression*>* args = expr->arguments(); 2633 int arg_count = args->length(); 2634 { PreservePositionScope scope(masm()->positions_recorder()); 2635 for (int i = 0; i < arg_count; i++) { 2636 VisitForStackValue(args->at(i)); 2637 } 2638 __ li(a2, Operand(name)); 2639 } 2640 // Record source position for debugger. 2641 SetSourcePosition(expr->position()); 2642 // Call the IC initialization code. 2643 Handle<Code> ic = 2644 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 2645 CallIC(ic, mode, expr->CallFeedbackId()); 2646 RecordJSReturnSite(expr); 2647 // Restore context register. 2648 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2649 context()->Plug(v0); 2650 } 2651 2652 2653 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr, 2654 Expression* key) { 2655 // Load the key. 2656 VisitForAccumulatorValue(key); 2657 2658 // Swap the name of the function and the receiver on the stack to follow 2659 // the calling convention for call ICs. 2660 __ pop(a1); 2661 __ push(v0); 2662 __ push(a1); 2663 2664 // Code common for calls using the IC. 2665 ZoneList<Expression*>* args = expr->arguments(); 2666 int arg_count = args->length(); 2667 { PreservePositionScope scope(masm()->positions_recorder()); 2668 for (int i = 0; i < arg_count; i++) { 2669 VisitForStackValue(args->at(i)); 2670 } 2671 } 2672 // Record source position for debugger. 2673 SetSourcePosition(expr->position()); 2674 // Call the IC initialization code. 2675 Handle<Code> ic = 2676 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count); 2677 __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key. 2678 CallIC(ic, RelocInfo::CODE_TARGET, expr->CallFeedbackId()); 2679 RecordJSReturnSite(expr); 2680 // Restore context register. 2681 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2682 context()->DropAndPlug(1, v0); // Drop the key still on the stack. 2683 } 2684 2685 2686 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) { 2687 // Code common for calls using the call stub. 2688 ZoneList<Expression*>* args = expr->arguments(); 2689 int arg_count = args->length(); 2690 { PreservePositionScope scope(masm()->positions_recorder()); 2691 for (int i = 0; i < arg_count; i++) { 2692 VisitForStackValue(args->at(i)); 2693 } 2694 } 2695 // Record source position for debugger. 2696 SetSourcePosition(expr->position()); 2697 2698 // Record call targets. 2699 flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET); 2700 Handle<Object> uninitialized = 2701 TypeFeedbackCells::UninitializedSentinel(isolate()); 2702 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized); 2703 RecordTypeFeedbackCell(expr->CallFeedbackId(), cell); 2704 __ li(a2, Operand(cell)); 2705 2706 CallFunctionStub stub(arg_count, flags); 2707 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2708 __ CallStub(&stub, expr->CallFeedbackId()); 2709 RecordJSReturnSite(expr); 2710 // Restore context register. 2711 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2712 context()->DropAndPlug(1, v0); 2713 } 2714 2715 2716 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2717 // Push copy of the first argument or undefined if it doesn't exist. 2718 if (arg_count > 0) { 2719 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); 2720 } else { 2721 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); 2722 } 2723 __ push(a1); 2724 2725 // Push the receiver of the enclosing function. 2726 int receiver_offset = 2 + info_->scope()->num_parameters(); 2727 __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize)); 2728 __ push(a1); 2729 // Push the language mode. 2730 __ li(a1, Operand(Smi::FromInt(language_mode()))); 2731 __ push(a1); 2732 2733 // Push the start position of the scope the calls resides in. 2734 __ li(a1, Operand(Smi::FromInt(scope()->start_position()))); 2735 __ push(a1); 2736 2737 // Do the runtime call. 2738 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5); 2739 } 2740 2741 2742 void FullCodeGenerator::VisitCall(Call* expr) { 2743 #ifdef DEBUG 2744 // We want to verify that RecordJSReturnSite gets called on all paths 2745 // through this function. Avoid early returns. 2746 expr->return_is_recorded_ = false; 2747 #endif 2748 2749 Comment cmnt(masm_, "[ Call"); 2750 Expression* callee = expr->expression(); 2751 VariableProxy* proxy = callee->AsVariableProxy(); 2752 Property* property = callee->AsProperty(); 2753 2754 if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) { 2755 // In a call to eval, we first call %ResolvePossiblyDirectEval to 2756 // resolve the function we need to call and the receiver of the 2757 // call. Then we call the resolved function using the given 2758 // arguments. 2759 ZoneList<Expression*>* args = expr->arguments(); 2760 int arg_count = args->length(); 2761 2762 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2763 VisitForStackValue(callee); 2764 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); 2765 __ push(a2); // Reserved receiver slot. 2766 2767 // Push the arguments. 2768 for (int i = 0; i < arg_count; i++) { 2769 VisitForStackValue(args->at(i)); 2770 } 2771 2772 // Push a copy of the function (found below the arguments) and 2773 // resolve eval. 2774 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2775 __ push(a1); 2776 EmitResolvePossiblyDirectEval(arg_count); 2777 2778 // The runtime call returns a pair of values in v0 (function) and 2779 // v1 (receiver). Touch up the stack with the right values. 2780 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2781 __ sw(v1, MemOperand(sp, arg_count * kPointerSize)); 2782 } 2783 // Record source position for debugger. 2784 SetSourcePosition(expr->position()); 2785 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT); 2786 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 2787 __ CallStub(&stub); 2788 RecordJSReturnSite(expr); 2789 // Restore context register. 2790 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 2791 context()->DropAndPlug(1, v0); 2792 } else if (proxy != NULL && proxy->var()->IsUnallocated()) { 2793 // Push global object as receiver for the call IC. 2794 __ lw(a0, GlobalObjectOperand()); 2795 __ push(a0); 2796 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT); 2797 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 2798 // Call to a lookup slot (dynamically introduced variable). 2799 Label slow, done; 2800 2801 { PreservePositionScope scope(masm()->positions_recorder()); 2802 // Generate code for loading from variables potentially shadowed 2803 // by eval-introduced variables. 2804 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done); 2805 } 2806 2807 __ bind(&slow); 2808 // Call the runtime to find the function to call (returned in v0) 2809 // and the object holding it (returned in v1). 2810 __ push(context_register()); 2811 __ li(a2, Operand(proxy->name())); 2812 __ push(a2); 2813 __ CallRuntime(Runtime::kLoadContextSlot, 2); 2814 __ Push(v0, v1); // Function, receiver. 2815 2816 // If fast case code has been generated, emit code to push the 2817 // function and receiver and have the slow path jump around this 2818 // code. 2819 if (done.is_linked()) { 2820 Label call; 2821 __ Branch(&call); 2822 __ bind(&done); 2823 // Push function. 2824 __ push(v0); 2825 // The receiver is implicitly the global receiver. Indicate this 2826 // by passing the hole to the call function stub. 2827 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); 2828 __ push(a1); 2829 __ bind(&call); 2830 } 2831 2832 // The receiver is either the global receiver or an object found 2833 // by LoadContextSlot. That object could be the hole if the 2834 // receiver is implicitly the global object. 2835 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT); 2836 } else if (property != NULL) { 2837 { PreservePositionScope scope(masm()->positions_recorder()); 2838 VisitForStackValue(property->obj()); 2839 } 2840 if (property->key()->IsPropertyName()) { 2841 EmitCallWithIC(expr, 2842 property->key()->AsLiteral()->value(), 2843 RelocInfo::CODE_TARGET); 2844 } else { 2845 EmitKeyedCallWithIC(expr, property->key()); 2846 } 2847 } else { 2848 // Call to an arbitrary expression not handled specially above. 2849 { PreservePositionScope scope(masm()->positions_recorder()); 2850 VisitForStackValue(callee); 2851 } 2852 // Load global receiver object. 2853 __ lw(a1, GlobalObjectOperand()); 2854 __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset)); 2855 __ push(a1); 2856 // Emit function call. 2857 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS); 2858 } 2859 2860 #ifdef DEBUG 2861 // RecordJSReturnSite should have been called. 2862 ASSERT(expr->return_is_recorded_); 2863 #endif 2864 } 2865 2866 2867 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2868 Comment cmnt(masm_, "[ CallNew"); 2869 // According to ECMA-262, section 11.2.2, page 44, the function 2870 // expression in new calls must be evaluated before the 2871 // arguments. 2872 2873 // Push constructor on the stack. If it's not a function it's used as 2874 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 2875 // ignored. 2876 VisitForStackValue(expr->expression()); 2877 2878 // Push the arguments ("left-to-right") on the stack. 2879 ZoneList<Expression*>* args = expr->arguments(); 2880 int arg_count = args->length(); 2881 for (int i = 0; i < arg_count; i++) { 2882 VisitForStackValue(args->at(i)); 2883 } 2884 2885 // Call the construct call builtin that handles allocation and 2886 // constructor invocation. 2887 SetSourcePosition(expr->position()); 2888 2889 // Load function and argument count into a1 and a0. 2890 __ li(a0, Operand(arg_count)); 2891 __ lw(a1, MemOperand(sp, arg_count * kPointerSize)); 2892 2893 // Record call targets in unoptimized code. 2894 Handle<Object> uninitialized = 2895 TypeFeedbackCells::UninitializedSentinel(isolate()); 2896 Handle<Cell> cell = isolate()->factory()->NewCell(uninitialized); 2897 RecordTypeFeedbackCell(expr->CallNewFeedbackId(), cell); 2898 __ li(a2, Operand(cell)); 2899 2900 CallConstructStub stub(RECORD_CALL_TARGET); 2901 __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL); 2902 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2903 context()->Plug(v0); 2904 } 2905 2906 2907 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2908 ZoneList<Expression*>* args = expr->arguments(); 2909 ASSERT(args->length() == 1); 2910 2911 VisitForAccumulatorValue(args->at(0)); 2912 2913 Label materialize_true, materialize_false; 2914 Label* if_true = NULL; 2915 Label* if_false = NULL; 2916 Label* fall_through = NULL; 2917 context()->PrepareTest(&materialize_true, &materialize_false, 2918 &if_true, &if_false, &fall_through); 2919 2920 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2921 __ And(t0, v0, Operand(kSmiTagMask)); 2922 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through); 2923 2924 context()->Plug(if_true, if_false); 2925 } 2926 2927 2928 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2929 ZoneList<Expression*>* args = expr->arguments(); 2930 ASSERT(args->length() == 1); 2931 2932 VisitForAccumulatorValue(args->at(0)); 2933 2934 Label materialize_true, materialize_false; 2935 Label* if_true = NULL; 2936 Label* if_false = NULL; 2937 Label* fall_through = NULL; 2938 context()->PrepareTest(&materialize_true, &materialize_false, 2939 &if_true, &if_false, &fall_through); 2940 2941 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2942 __ And(at, v0, Operand(kSmiTagMask | 0x80000000)); 2943 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through); 2944 2945 context()->Plug(if_true, if_false); 2946 } 2947 2948 2949 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 2950 ZoneList<Expression*>* args = expr->arguments(); 2951 ASSERT(args->length() == 1); 2952 2953 VisitForAccumulatorValue(args->at(0)); 2954 2955 Label materialize_true, materialize_false; 2956 Label* if_true = NULL; 2957 Label* if_false = NULL; 2958 Label* fall_through = NULL; 2959 context()->PrepareTest(&materialize_true, &materialize_false, 2960 &if_true, &if_false, &fall_through); 2961 2962 __ JumpIfSmi(v0, if_false); 2963 __ LoadRoot(at, Heap::kNullValueRootIndex); 2964 __ Branch(if_true, eq, v0, Operand(at)); 2965 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); 2966 // Undetectable objects behave like undefined when tested with typeof. 2967 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset)); 2968 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); 2969 __ Branch(if_false, ne, at, Operand(zero_reg)); 2970 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset)); 2971 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 2972 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2973 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE), 2974 if_true, if_false, fall_through); 2975 2976 context()->Plug(if_true, if_false); 2977 } 2978 2979 2980 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 2981 ZoneList<Expression*>* args = expr->arguments(); 2982 ASSERT(args->length() == 1); 2983 2984 VisitForAccumulatorValue(args->at(0)); 2985 2986 Label materialize_true, materialize_false; 2987 Label* if_true = NULL; 2988 Label* if_false = NULL; 2989 Label* fall_through = NULL; 2990 context()->PrepareTest(&materialize_true, &materialize_false, 2991 &if_true, &if_false, &fall_through); 2992 2993 __ JumpIfSmi(v0, if_false); 2994 __ GetObjectType(v0, a1, a1); 2995 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2996 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE), 2997 if_true, if_false, fall_through); 2998 2999 context()->Plug(if_true, if_false); 3000 } 3001 3002 3003 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 3004 ZoneList<Expression*>* args = expr->arguments(); 3005 ASSERT(args->length() == 1); 3006 3007 VisitForAccumulatorValue(args->at(0)); 3008 3009 Label materialize_true, materialize_false; 3010 Label* if_true = NULL; 3011 Label* if_false = NULL; 3012 Label* fall_through = NULL; 3013 context()->PrepareTest(&materialize_true, &materialize_false, 3014 &if_true, &if_false, &fall_through); 3015 3016 __ JumpIfSmi(v0, if_false); 3017 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 3018 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset)); 3019 __ And(at, a1, Operand(1 << Map::kIsUndetectable)); 3020 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3021 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through); 3022 3023 context()->Plug(if_true, if_false); 3024 } 3025 3026 3027 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 3028 CallRuntime* expr) { 3029 ZoneList<Expression*>* args = expr->arguments(); 3030 ASSERT(args->length() == 1); 3031 3032 VisitForAccumulatorValue(args->at(0)); 3033 3034 Label materialize_true, materialize_false; 3035 Label* if_true = NULL; 3036 Label* if_false = NULL; 3037 Label* fall_through = NULL; 3038 context()->PrepareTest(&materialize_true, &materialize_false, 3039 &if_true, &if_false, &fall_through); 3040 3041 __ AssertNotSmi(v0); 3042 3043 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); 3044 __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset)); 3045 __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf); 3046 __ Branch(if_true, ne, t0, Operand(zero_reg)); 3047 3048 // Check for fast case object. Generate false result for slow case object. 3049 __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset)); 3050 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); 3051 __ LoadRoot(t0, Heap::kHashTableMapRootIndex); 3052 __ Branch(if_false, eq, a2, Operand(t0)); 3053 3054 // Look for valueOf name in the descriptor array, and indicate false if 3055 // found. Since we omit an enumeration index check, if it is added via a 3056 // transition that shares its descriptor array, this is a false positive. 3057 Label entry, loop, done; 3058 3059 // Skip loop if no descriptors are valid. 3060 __ NumberOfOwnDescriptors(a3, a1); 3061 __ Branch(&done, eq, a3, Operand(zero_reg)); 3062 3063 __ LoadInstanceDescriptors(a1, t0); 3064 // t0: descriptor array. 3065 // a3: valid entries in the descriptor array. 3066 STATIC_ASSERT(kSmiTag == 0); 3067 STATIC_ASSERT(kSmiTagSize == 1); 3068 STATIC_ASSERT(kPointerSize == 4); 3069 __ li(at, Operand(DescriptorArray::kDescriptorSize)); 3070 __ Mul(a3, a3, at); 3071 // Calculate location of the first key name. 3072 __ Addu(t0, t0, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag)); 3073 // Calculate the end of the descriptor array. 3074 __ mov(a2, t0); 3075 __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize); 3076 __ Addu(a2, a2, t1); 3077 3078 // Loop through all the keys in the descriptor array. If one of these is the 3079 // string "valueOf" the result is false. 3080 // The use of t2 to store the valueOf string assumes that it is not otherwise 3081 // used in the loop below. 3082 __ li(t2, Operand(isolate()->factory()->value_of_string())); 3083 __ jmp(&entry); 3084 __ bind(&loop); 3085 __ lw(a3, MemOperand(t0, 0)); 3086 __ Branch(if_false, eq, a3, Operand(t2)); 3087 __ Addu(t0, t0, Operand(DescriptorArray::kDescriptorSize * kPointerSize)); 3088 __ bind(&entry); 3089 __ Branch(&loop, ne, t0, Operand(a2)); 3090 3091 __ bind(&done); 3092 // If a valueOf property is not found on the object check that its 3093 // prototype is the un-modified String prototype. If not result is false. 3094 __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset)); 3095 __ JumpIfSmi(a2, if_false); 3096 __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset)); 3097 __ lw(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3098 __ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset)); 3099 __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 3100 __ Branch(if_false, ne, a2, Operand(a3)); 3101 3102 // Set the bit in the map to indicate that it has been checked safe for 3103 // default valueOf and set true result. 3104 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset)); 3105 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3106 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset)); 3107 __ jmp(if_true); 3108 3109 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3110 context()->Plug(if_true, if_false); 3111 } 3112 3113 3114 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3115 ZoneList<Expression*>* args = expr->arguments(); 3116 ASSERT(args->length() == 1); 3117 3118 VisitForAccumulatorValue(args->at(0)); 3119 3120 Label materialize_true, materialize_false; 3121 Label* if_true = NULL; 3122 Label* if_false = NULL; 3123 Label* fall_through = NULL; 3124 context()->PrepareTest(&materialize_true, &materialize_false, 3125 &if_true, &if_false, &fall_through); 3126 3127 __ JumpIfSmi(v0, if_false); 3128 __ GetObjectType(v0, a1, a2); 3129 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3130 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE)); 3131 __ Branch(if_false); 3132 3133 context()->Plug(if_true, if_false); 3134 } 3135 3136 3137 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3138 ZoneList<Expression*>* args = expr->arguments(); 3139 ASSERT(args->length() == 1); 3140 3141 VisitForAccumulatorValue(args->at(0)); 3142 3143 Label materialize_true, materialize_false; 3144 Label* if_true = NULL; 3145 Label* if_false = NULL; 3146 Label* fall_through = NULL; 3147 context()->PrepareTest(&materialize_true, &materialize_false, 3148 &if_true, &if_false, &fall_through); 3149 3150 __ JumpIfSmi(v0, if_false); 3151 __ GetObjectType(v0, a1, a1); 3152 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3153 Split(eq, a1, Operand(JS_ARRAY_TYPE), 3154 if_true, if_false, fall_through); 3155 3156 context()->Plug(if_true, if_false); 3157 } 3158 3159 3160 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3161 ZoneList<Expression*>* args = expr->arguments(); 3162 ASSERT(args->length() == 1); 3163 3164 VisitForAccumulatorValue(args->at(0)); 3165 3166 Label materialize_true, materialize_false; 3167 Label* if_true = NULL; 3168 Label* if_false = NULL; 3169 Label* fall_through = NULL; 3170 context()->PrepareTest(&materialize_true, &materialize_false, 3171 &if_true, &if_false, &fall_through); 3172 3173 __ JumpIfSmi(v0, if_false); 3174 __ GetObjectType(v0, a1, a1); 3175 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3176 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through); 3177 3178 context()->Plug(if_true, if_false); 3179 } 3180 3181 3182 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3183 ASSERT(expr->arguments()->length() == 0); 3184 3185 Label materialize_true, materialize_false; 3186 Label* if_true = NULL; 3187 Label* if_false = NULL; 3188 Label* fall_through = NULL; 3189 context()->PrepareTest(&materialize_true, &materialize_false, 3190 &if_true, &if_false, &fall_through); 3191 3192 // Get the frame pointer for the calling frame. 3193 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3194 3195 // Skip the arguments adaptor frame if it exists. 3196 Label check_frame_marker; 3197 __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset)); 3198 __ Branch(&check_frame_marker, ne, 3199 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3200 __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset)); 3201 3202 // Check the marker in the calling frame. 3203 __ bind(&check_frame_marker); 3204 __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset)); 3205 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3206 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)), 3207 if_true, if_false, fall_through); 3208 3209 context()->Plug(if_true, if_false); 3210 } 3211 3212 3213 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3214 ZoneList<Expression*>* args = expr->arguments(); 3215 ASSERT(args->length() == 2); 3216 3217 // Load the two objects into registers and perform the comparison. 3218 VisitForStackValue(args->at(0)); 3219 VisitForAccumulatorValue(args->at(1)); 3220 3221 Label materialize_true, materialize_false; 3222 Label* if_true = NULL; 3223 Label* if_false = NULL; 3224 Label* fall_through = NULL; 3225 context()->PrepareTest(&materialize_true, &materialize_false, 3226 &if_true, &if_false, &fall_through); 3227 3228 __ pop(a1); 3229 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3230 Split(eq, v0, Operand(a1), if_true, if_false, fall_through); 3231 3232 context()->Plug(if_true, if_false); 3233 } 3234 3235 3236 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3237 ZoneList<Expression*>* args = expr->arguments(); 3238 ASSERT(args->length() == 1); 3239 3240 // ArgumentsAccessStub expects the key in a1 and the formal 3241 // parameter count in a0. 3242 VisitForAccumulatorValue(args->at(0)); 3243 __ mov(a1, v0); 3244 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3245 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT); 3246 __ CallStub(&stub); 3247 context()->Plug(v0); 3248 } 3249 3250 3251 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3252 ASSERT(expr->arguments()->length() == 0); 3253 Label exit; 3254 // Get the number of formal parameters. 3255 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters()))); 3256 3257 // Check if the calling frame is an arguments adaptor frame. 3258 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); 3259 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); 3260 __ Branch(&exit, ne, a3, 3261 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3262 3263 // Arguments adaptor case: Read the arguments length from the 3264 // adaptor frame. 3265 __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3266 3267 __ bind(&exit); 3268 context()->Plug(v0); 3269 } 3270 3271 3272 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3273 ZoneList<Expression*>* args = expr->arguments(); 3274 ASSERT(args->length() == 1); 3275 Label done, null, function, non_function_constructor; 3276 3277 VisitForAccumulatorValue(args->at(0)); 3278 3279 // If the object is a smi, we return null. 3280 __ JumpIfSmi(v0, &null); 3281 3282 // Check that the object is a JS object but take special care of JS 3283 // functions to make sure they have 'Function' as their class. 3284 // Assume that there are only two callable types, and one of them is at 3285 // either end of the type range for JS object types. Saves extra comparisons. 3286 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 3287 __ GetObjectType(v0, v0, a1); // Map is now in v0. 3288 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 3289 3290 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 3291 FIRST_SPEC_OBJECT_TYPE + 1); 3292 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); 3293 3294 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 3295 LAST_SPEC_OBJECT_TYPE - 1); 3296 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE)); 3297 // Assume that there is no larger type. 3298 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 3299 3300 // Check if the constructor in the map is a JS function. 3301 __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset)); 3302 __ GetObjectType(v0, a1, a1); 3303 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE)); 3304 3305 // v0 now contains the constructor function. Grab the 3306 // instance class name from there. 3307 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); 3308 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset)); 3309 __ Branch(&done); 3310 3311 // Functions have class 'Function'. 3312 __ bind(&function); 3313 __ LoadRoot(v0, Heap::kfunction_class_stringRootIndex); 3314 __ jmp(&done); 3315 3316 // Objects with a non-function constructor have class 'Object'. 3317 __ bind(&non_function_constructor); 3318 __ LoadRoot(v0, Heap::kObject_stringRootIndex); 3319 __ jmp(&done); 3320 3321 // Non-JS objects have class null. 3322 __ bind(&null); 3323 __ LoadRoot(v0, Heap::kNullValueRootIndex); 3324 3325 // All done. 3326 __ bind(&done); 3327 3328 context()->Plug(v0); 3329 } 3330 3331 3332 void FullCodeGenerator::EmitLog(CallRuntime* expr) { 3333 // Conditionally generate a log call. 3334 // Args: 3335 // 0 (literal string): The type of logging (corresponds to the flags). 3336 // This is used to determine whether or not to generate the log call. 3337 // 1 (string): Format string. Access the string at argument index 2 3338 // with '%2s' (see Logger::LogRuntime for all the formats). 3339 // 2 (array): Arguments to the format string. 3340 ZoneList<Expression*>* args = expr->arguments(); 3341 ASSERT_EQ(args->length(), 3); 3342 if (CodeGenerator::ShouldGenerateLog(args->at(0))) { 3343 VisitForStackValue(args->at(1)); 3344 VisitForStackValue(args->at(2)); 3345 __ CallRuntime(Runtime::kLog, 2); 3346 } 3347 3348 // Finally, we're expected to leave a value on the top of the stack. 3349 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 3350 context()->Plug(v0); 3351 } 3352 3353 3354 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) { 3355 ASSERT(expr->arguments()->length() == 0); 3356 Label slow_allocate_heapnumber; 3357 Label heapnumber_allocated; 3358 3359 // Save the new heap number in callee-saved register s0, since 3360 // we call out to external C code below. 3361 __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex); 3362 __ AllocateHeapNumber(s0, a1, a2, t6, &slow_allocate_heapnumber); 3363 __ jmp(&heapnumber_allocated); 3364 3365 __ bind(&slow_allocate_heapnumber); 3366 3367 // Allocate a heap number. 3368 __ CallRuntime(Runtime::kNumberAlloc, 0); 3369 __ mov(s0, v0); // Save result in s0, so it is saved thru CFunc call. 3370 3371 __ bind(&heapnumber_allocated); 3372 3373 // Convert 32 random bits in v0 to 0.(32 random bits) in a double 3374 // by computing: 3375 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). 3376 __ PrepareCallCFunction(1, a0); 3377 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3378 __ lw(a0, FieldMemOperand(a0, GlobalObject::kNativeContextOffset)); 3379 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1); 3380 3381 // 0x41300000 is the top half of 1.0 x 2^20 as a double. 3382 __ li(a1, Operand(0x41300000)); 3383 // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU. 3384 __ Move(f12, v0, a1); 3385 // Move 0x4130000000000000 to FPU. 3386 __ Move(f14, zero_reg, a1); 3387 // Subtract and store the result in the heap number. 3388 __ sub_d(f0, f12, f14); 3389 __ sdc1(f0, FieldMemOperand(s0, HeapNumber::kValueOffset)); 3390 __ mov(v0, s0); 3391 3392 context()->Plug(v0); 3393 } 3394 3395 3396 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3397 // Load the arguments on the stack and call the stub. 3398 SubStringStub stub; 3399 ZoneList<Expression*>* args = expr->arguments(); 3400 ASSERT(args->length() == 3); 3401 VisitForStackValue(args->at(0)); 3402 VisitForStackValue(args->at(1)); 3403 VisitForStackValue(args->at(2)); 3404 __ CallStub(&stub); 3405 context()->Plug(v0); 3406 } 3407 3408 3409 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3410 // Load the arguments on the stack and call the stub. 3411 RegExpExecStub stub; 3412 ZoneList<Expression*>* args = expr->arguments(); 3413 ASSERT(args->length() == 4); 3414 VisitForStackValue(args->at(0)); 3415 VisitForStackValue(args->at(1)); 3416 VisitForStackValue(args->at(2)); 3417 VisitForStackValue(args->at(3)); 3418 __ CallStub(&stub); 3419 context()->Plug(v0); 3420 } 3421 3422 3423 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3424 ZoneList<Expression*>* args = expr->arguments(); 3425 ASSERT(args->length() == 1); 3426 3427 VisitForAccumulatorValue(args->at(0)); // Load the object. 3428 3429 Label done; 3430 // If the object is a smi return the object. 3431 __ JumpIfSmi(v0, &done); 3432 // If the object is not a value type, return the object. 3433 __ GetObjectType(v0, a1, a1); 3434 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE)); 3435 3436 __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset)); 3437 3438 __ bind(&done); 3439 context()->Plug(v0); 3440 } 3441 3442 3443 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3444 ZoneList<Expression*>* args = expr->arguments(); 3445 ASSERT(args->length() == 2); 3446 ASSERT_NE(NULL, args->at(1)->AsLiteral()); 3447 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3448 3449 VisitForAccumulatorValue(args->at(0)); // Load the object. 3450 3451 Label runtime, done, not_date_object; 3452 Register object = v0; 3453 Register result = v0; 3454 Register scratch0 = t5; 3455 Register scratch1 = a1; 3456 3457 __ JumpIfSmi(object, ¬_date_object); 3458 __ GetObjectType(object, scratch1, scratch1); 3459 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE)); 3460 3461 if (index->value() == 0) { 3462 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset)); 3463 __ jmp(&done); 3464 } else { 3465 if (index->value() < JSDate::kFirstUncachedField) { 3466 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3467 __ li(scratch1, Operand(stamp)); 3468 __ lw(scratch1, MemOperand(scratch1)); 3469 __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset)); 3470 __ Branch(&runtime, ne, scratch1, Operand(scratch0)); 3471 __ lw(result, FieldMemOperand(object, JSDate::kValueOffset + 3472 kPointerSize * index->value())); 3473 __ jmp(&done); 3474 } 3475 __ bind(&runtime); 3476 __ PrepareCallCFunction(2, scratch1); 3477 __ li(a1, Operand(index)); 3478 __ Move(a0, object); 3479 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3480 __ jmp(&done); 3481 } 3482 3483 __ bind(¬_date_object); 3484 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3485 __ bind(&done); 3486 context()->Plug(v0); 3487 } 3488 3489 3490 void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string, 3491 Register index, 3492 Register value, 3493 uint32_t encoding_mask) { 3494 __ And(at, index, Operand(kSmiTagMask)); 3495 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg)); 3496 __ And(at, value, Operand(kSmiTagMask)); 3497 __ Check(eq, kNonSmiValue, at, Operand(zero_reg)); 3498 3499 __ lw(at, FieldMemOperand(string, String::kLengthOffset)); 3500 __ Check(lt, kIndexIsTooLarge, index, Operand(at)); 3501 3502 __ Check(ge, kIndexIsNegative, index, Operand(zero_reg)); 3503 3504 __ lw(at, FieldMemOperand(string, HeapObject::kMapOffset)); 3505 __ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset)); 3506 3507 __ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask)); 3508 __ Subu(at, at, Operand(encoding_mask)); 3509 __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg)); 3510 } 3511 3512 3513 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3514 ZoneList<Expression*>* args = expr->arguments(); 3515 ASSERT_EQ(3, args->length()); 3516 3517 Register string = v0; 3518 Register index = a1; 3519 Register value = a2; 3520 3521 VisitForStackValue(args->at(1)); // index 3522 VisitForStackValue(args->at(2)); // value 3523 __ pop(value); 3524 __ pop(index); 3525 VisitForAccumulatorValue(args->at(0)); // string 3526 3527 if (FLAG_debug_code) { 3528 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3529 EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3530 } 3531 3532 __ SmiUntag(value, value); 3533 __ Addu(at, 3534 string, 3535 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 3536 __ SmiUntag(index); 3537 __ Addu(at, at, index); 3538 __ sb(value, MemOperand(at)); 3539 context()->Plug(string); 3540 } 3541 3542 3543 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3544 ZoneList<Expression*>* args = expr->arguments(); 3545 ASSERT_EQ(3, args->length()); 3546 3547 Register string = v0; 3548 Register index = a1; 3549 Register value = a2; 3550 3551 VisitForStackValue(args->at(1)); // index 3552 VisitForStackValue(args->at(2)); // value 3553 __ pop(value); 3554 __ pop(index); 3555 VisitForAccumulatorValue(args->at(0)); // string 3556 3557 if (FLAG_debug_code) { 3558 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3559 EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3560 } 3561 3562 __ SmiUntag(value, value); 3563 __ Addu(at, 3564 string, 3565 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 3566 __ Addu(at, at, index); 3567 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 3568 __ sh(value, MemOperand(at)); 3569 context()->Plug(string); 3570 } 3571 3572 3573 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3574 // Load the arguments on the stack and call the runtime function. 3575 ZoneList<Expression*>* args = expr->arguments(); 3576 ASSERT(args->length() == 2); 3577 VisitForStackValue(args->at(0)); 3578 VisitForStackValue(args->at(1)); 3579 MathPowStub stub(MathPowStub::ON_STACK); 3580 __ CallStub(&stub); 3581 context()->Plug(v0); 3582 } 3583 3584 3585 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3586 ZoneList<Expression*>* args = expr->arguments(); 3587 ASSERT(args->length() == 2); 3588 3589 VisitForStackValue(args->at(0)); // Load the object. 3590 VisitForAccumulatorValue(args->at(1)); // Load the value. 3591 __ pop(a1); // v0 = value. a1 = object. 3592 3593 Label done; 3594 // If the object is a smi, return the value. 3595 __ JumpIfSmi(a1, &done); 3596 3597 // If the object is not a value type, return the value. 3598 __ GetObjectType(a1, a2, a2); 3599 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE)); 3600 3601 // Store the value. 3602 __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset)); 3603 // Update the write barrier. Save the value as it will be 3604 // overwritten by the write barrier code and is needed afterward. 3605 __ mov(a2, v0); 3606 __ RecordWriteField( 3607 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs); 3608 3609 __ bind(&done); 3610 context()->Plug(v0); 3611 } 3612 3613 3614 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3615 ZoneList<Expression*>* args = expr->arguments(); 3616 ASSERT_EQ(args->length(), 1); 3617 3618 // Load the argument on the stack and call the stub. 3619 VisitForStackValue(args->at(0)); 3620 3621 NumberToStringStub stub; 3622 __ CallStub(&stub); 3623 context()->Plug(v0); 3624 } 3625 3626 3627 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3628 ZoneList<Expression*>* args = expr->arguments(); 3629 ASSERT(args->length() == 1); 3630 3631 VisitForAccumulatorValue(args->at(0)); 3632 3633 Label done; 3634 StringCharFromCodeGenerator generator(v0, a1); 3635 generator.GenerateFast(masm_); 3636 __ jmp(&done); 3637 3638 NopRuntimeCallHelper call_helper; 3639 generator.GenerateSlow(masm_, call_helper); 3640 3641 __ bind(&done); 3642 context()->Plug(a1); 3643 } 3644 3645 3646 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3647 ZoneList<Expression*>* args = expr->arguments(); 3648 ASSERT(args->length() == 2); 3649 3650 VisitForStackValue(args->at(0)); 3651 VisitForAccumulatorValue(args->at(1)); 3652 __ mov(a0, result_register()); 3653 3654 Register object = a1; 3655 Register index = a0; 3656 Register result = v0; 3657 3658 __ pop(object); 3659 3660 Label need_conversion; 3661 Label index_out_of_range; 3662 Label done; 3663 StringCharCodeAtGenerator generator(object, 3664 index, 3665 result, 3666 &need_conversion, 3667 &need_conversion, 3668 &index_out_of_range, 3669 STRING_INDEX_IS_NUMBER); 3670 generator.GenerateFast(masm_); 3671 __ jmp(&done); 3672 3673 __ bind(&index_out_of_range); 3674 // When the index is out of range, the spec requires us to return 3675 // NaN. 3676 __ LoadRoot(result, Heap::kNanValueRootIndex); 3677 __ jmp(&done); 3678 3679 __ bind(&need_conversion); 3680 // Load the undefined value into the result register, which will 3681 // trigger conversion. 3682 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3683 __ jmp(&done); 3684 3685 NopRuntimeCallHelper call_helper; 3686 generator.GenerateSlow(masm_, call_helper); 3687 3688 __ bind(&done); 3689 context()->Plug(result); 3690 } 3691 3692 3693 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3694 ZoneList<Expression*>* args = expr->arguments(); 3695 ASSERT(args->length() == 2); 3696 3697 VisitForStackValue(args->at(0)); 3698 VisitForAccumulatorValue(args->at(1)); 3699 __ mov(a0, result_register()); 3700 3701 Register object = a1; 3702 Register index = a0; 3703 Register scratch = a3; 3704 Register result = v0; 3705 3706 __ pop(object); 3707 3708 Label need_conversion; 3709 Label index_out_of_range; 3710 Label done; 3711 StringCharAtGenerator generator(object, 3712 index, 3713 scratch, 3714 result, 3715 &need_conversion, 3716 &need_conversion, 3717 &index_out_of_range, 3718 STRING_INDEX_IS_NUMBER); 3719 generator.GenerateFast(masm_); 3720 __ jmp(&done); 3721 3722 __ bind(&index_out_of_range); 3723 // When the index is out of range, the spec requires us to return 3724 // the empty string. 3725 __ LoadRoot(result, Heap::kempty_stringRootIndex); 3726 __ jmp(&done); 3727 3728 __ bind(&need_conversion); 3729 // Move smi zero into the result register, which will trigger 3730 // conversion. 3731 __ li(result, Operand(Smi::FromInt(0))); 3732 __ jmp(&done); 3733 3734 NopRuntimeCallHelper call_helper; 3735 generator.GenerateSlow(masm_, call_helper); 3736 3737 __ bind(&done); 3738 context()->Plug(result); 3739 } 3740 3741 3742 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3743 ZoneList<Expression*>* args = expr->arguments(); 3744 ASSERT_EQ(2, args->length()); 3745 VisitForStackValue(args->at(0)); 3746 VisitForStackValue(args->at(1)); 3747 3748 StringAddStub stub(STRING_ADD_CHECK_BOTH); 3749 __ CallStub(&stub); 3750 context()->Plug(v0); 3751 } 3752 3753 3754 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3755 ZoneList<Expression*>* args = expr->arguments(); 3756 ASSERT_EQ(2, args->length()); 3757 3758 VisitForStackValue(args->at(0)); 3759 VisitForStackValue(args->at(1)); 3760 3761 StringCompareStub stub; 3762 __ CallStub(&stub); 3763 context()->Plug(v0); 3764 } 3765 3766 3767 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) { 3768 // Load the argument on the stack and call the stub. 3769 TranscendentalCacheStub stub(TranscendentalCache::SIN, 3770 TranscendentalCacheStub::TAGGED); 3771 ZoneList<Expression*>* args = expr->arguments(); 3772 ASSERT(args->length() == 1); 3773 VisitForStackValue(args->at(0)); 3774 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos. 3775 __ CallStub(&stub); 3776 context()->Plug(v0); 3777 } 3778 3779 3780 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) { 3781 // Load the argument on the stack and call the stub. 3782 TranscendentalCacheStub stub(TranscendentalCache::COS, 3783 TranscendentalCacheStub::TAGGED); 3784 ZoneList<Expression*>* args = expr->arguments(); 3785 ASSERT(args->length() == 1); 3786 VisitForStackValue(args->at(0)); 3787 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos. 3788 __ CallStub(&stub); 3789 context()->Plug(v0); 3790 } 3791 3792 3793 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) { 3794 // Load the argument on the stack and call the stub. 3795 TranscendentalCacheStub stub(TranscendentalCache::TAN, 3796 TranscendentalCacheStub::TAGGED); 3797 ZoneList<Expression*>* args = expr->arguments(); 3798 ASSERT(args->length() == 1); 3799 VisitForStackValue(args->at(0)); 3800 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos. 3801 __ CallStub(&stub); 3802 context()->Plug(v0); 3803 } 3804 3805 3806 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) { 3807 // Load the argument on the stack and call the stub. 3808 TranscendentalCacheStub stub(TranscendentalCache::LOG, 3809 TranscendentalCacheStub::TAGGED); 3810 ZoneList<Expression*>* args = expr->arguments(); 3811 ASSERT(args->length() == 1); 3812 VisitForStackValue(args->at(0)); 3813 __ mov(a0, result_register()); // Stub requires parameter in a0 and on tos. 3814 __ CallStub(&stub); 3815 context()->Plug(v0); 3816 } 3817 3818 3819 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) { 3820 // Load the argument on the stack and call the runtime function. 3821 ZoneList<Expression*>* args = expr->arguments(); 3822 ASSERT(args->length() == 1); 3823 VisitForStackValue(args->at(0)); 3824 __ CallRuntime(Runtime::kMath_sqrt, 1); 3825 context()->Plug(v0); 3826 } 3827 3828 3829 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3830 ZoneList<Expression*>* args = expr->arguments(); 3831 ASSERT(args->length() >= 2); 3832 3833 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3834 for (int i = 0; i < arg_count + 1; i++) { 3835 VisitForStackValue(args->at(i)); 3836 } 3837 VisitForAccumulatorValue(args->last()); // Function. 3838 3839 Label runtime, done; 3840 // Check for non-function argument (including proxy). 3841 __ JumpIfSmi(v0, &runtime); 3842 __ GetObjectType(v0, a1, a1); 3843 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE)); 3844 3845 // InvokeFunction requires the function in a1. Move it in there. 3846 __ mov(a1, result_register()); 3847 ParameterCount count(arg_count); 3848 __ InvokeFunction(a1, count, CALL_FUNCTION, 3849 NullCallWrapper(), CALL_AS_METHOD); 3850 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 3851 __ jmp(&done); 3852 3853 __ bind(&runtime); 3854 __ push(v0); 3855 __ CallRuntime(Runtime::kCall, args->length()); 3856 __ bind(&done); 3857 3858 context()->Plug(v0); 3859 } 3860 3861 3862 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3863 RegExpConstructResultStub stub; 3864 ZoneList<Expression*>* args = expr->arguments(); 3865 ASSERT(args->length() == 3); 3866 VisitForStackValue(args->at(0)); 3867 VisitForStackValue(args->at(1)); 3868 VisitForStackValue(args->at(2)); 3869 __ CallStub(&stub); 3870 context()->Plug(v0); 3871 } 3872 3873 3874 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3875 ZoneList<Expression*>* args = expr->arguments(); 3876 ASSERT_EQ(2, args->length()); 3877 3878 ASSERT_NE(NULL, args->at(0)->AsLiteral()); 3879 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3880 3881 Handle<FixedArray> jsfunction_result_caches( 3882 isolate()->native_context()->jsfunction_result_caches()); 3883 if (jsfunction_result_caches->length() <= cache_id) { 3884 __ Abort(kAttemptToUseUndefinedCache); 3885 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 3886 context()->Plug(v0); 3887 return; 3888 } 3889 3890 VisitForAccumulatorValue(args->at(1)); 3891 3892 Register key = v0; 3893 Register cache = a1; 3894 __ lw(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); 3895 __ lw(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset)); 3896 __ lw(cache, 3897 ContextOperand( 3898 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3899 __ lw(cache, 3900 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3901 3902 3903 Label done, not_found; 3904 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3905 __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset)); 3906 // a2 now holds finger offset as a smi. 3907 __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 3908 // a3 now points to the start of fixed array elements. 3909 __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize); 3910 __ addu(a3, a3, at); 3911 // a3 now points to key of indexed element of cache. 3912 __ lw(a2, MemOperand(a3)); 3913 __ Branch(¬_found, ne, key, Operand(a2)); 3914 3915 __ lw(v0, MemOperand(a3, kPointerSize)); 3916 __ Branch(&done); 3917 3918 __ bind(¬_found); 3919 // Call runtime to perform the lookup. 3920 __ Push(cache, key); 3921 __ CallRuntime(Runtime::kGetFromCache, 2); 3922 3923 __ bind(&done); 3924 context()->Plug(v0); 3925 } 3926 3927 3928 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) { 3929 ZoneList<Expression*>* args = expr->arguments(); 3930 ASSERT_EQ(2, args->length()); 3931 3932 Register right = v0; 3933 Register left = a1; 3934 Register tmp = a2; 3935 Register tmp2 = a3; 3936 3937 VisitForStackValue(args->at(0)); 3938 VisitForAccumulatorValue(args->at(1)); // Result (right) in v0. 3939 __ pop(left); 3940 3941 Label done, fail, ok; 3942 __ Branch(&ok, eq, left, Operand(right)); 3943 // Fail if either is a non-HeapObject. 3944 __ And(tmp, left, Operand(right)); 3945 __ JumpIfSmi(tmp, &fail); 3946 __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset)); 3947 __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset)); 3948 __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE)); 3949 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); 3950 __ Branch(&fail, ne, tmp, Operand(tmp2)); 3951 __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset)); 3952 __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset)); 3953 __ Branch(&ok, eq, tmp, Operand(tmp2)); 3954 __ bind(&fail); 3955 __ LoadRoot(v0, Heap::kFalseValueRootIndex); 3956 __ jmp(&done); 3957 __ bind(&ok); 3958 __ LoadRoot(v0, Heap::kTrueValueRootIndex); 3959 __ bind(&done); 3960 3961 context()->Plug(v0); 3962 } 3963 3964 3965 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3966 ZoneList<Expression*>* args = expr->arguments(); 3967 VisitForAccumulatorValue(args->at(0)); 3968 3969 Label materialize_true, materialize_false; 3970 Label* if_true = NULL; 3971 Label* if_false = NULL; 3972 Label* fall_through = NULL; 3973 context()->PrepareTest(&materialize_true, &materialize_false, 3974 &if_true, &if_false, &fall_through); 3975 3976 __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset)); 3977 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask)); 3978 3979 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3980 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through); 3981 3982 context()->Plug(if_true, if_false); 3983 } 3984 3985 3986 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3987 ZoneList<Expression*>* args = expr->arguments(); 3988 ASSERT(args->length() == 1); 3989 VisitForAccumulatorValue(args->at(0)); 3990 3991 __ AssertString(v0); 3992 3993 __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset)); 3994 __ IndexFromHash(v0, v0); 3995 3996 context()->Plug(v0); 3997 } 3998 3999 4000 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) { 4001 Label bailout, done, one_char_separator, long_separator, 4002 non_trivial_array, not_size_one_array, loop, 4003 empty_separator_loop, one_char_separator_loop, 4004 one_char_separator_loop_entry, long_separator_loop; 4005 ZoneList<Expression*>* args = expr->arguments(); 4006 ASSERT(args->length() == 2); 4007 VisitForStackValue(args->at(1)); 4008 VisitForAccumulatorValue(args->at(0)); 4009 4010 // All aliases of the same register have disjoint lifetimes. 4011 Register array = v0; 4012 Register elements = no_reg; // Will be v0. 4013 Register result = no_reg; // Will be v0. 4014 Register separator = a1; 4015 Register array_length = a2; 4016 Register result_pos = no_reg; // Will be a2. 4017 Register string_length = a3; 4018 Register string = t0; 4019 Register element = t1; 4020 Register elements_end = t2; 4021 Register scratch1 = t3; 4022 Register scratch2 = t5; 4023 Register scratch3 = t4; 4024 4025 // Separator operand is on the stack. 4026 __ pop(separator); 4027 4028 // Check that the array is a JSArray. 4029 __ JumpIfSmi(array, &bailout); 4030 __ GetObjectType(array, scratch1, scratch2); 4031 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE)); 4032 4033 // Check that the array has fast elements. 4034 __ CheckFastElements(scratch1, scratch2, &bailout); 4035 4036 // If the array has length zero, return the empty string. 4037 __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset)); 4038 __ SmiUntag(array_length); 4039 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg)); 4040 __ LoadRoot(v0, Heap::kempty_stringRootIndex); 4041 __ Branch(&done); 4042 4043 __ bind(&non_trivial_array); 4044 4045 // Get the FixedArray containing array's elements. 4046 elements = array; 4047 __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset)); 4048 array = no_reg; // End of array's live range. 4049 4050 // Check that all array elements are sequential ASCII strings, and 4051 // accumulate the sum of their lengths, as a smi-encoded value. 4052 __ mov(string_length, zero_reg); 4053 __ Addu(element, 4054 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 4055 __ sll(elements_end, array_length, kPointerSizeLog2); 4056 __ Addu(elements_end, element, elements_end); 4057 // Loop condition: while (element < elements_end). 4058 // Live values in registers: 4059 // elements: Fixed array of strings. 4060 // array_length: Length of the fixed array of strings (not smi) 4061 // separator: Separator string 4062 // string_length: Accumulated sum of string lengths (smi). 4063 // element: Current array element. 4064 // elements_end: Array end. 4065 if (generate_debug_code_) { 4066 __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin, 4067 array_length, Operand(zero_reg)); 4068 } 4069 __ bind(&loop); 4070 __ lw(string, MemOperand(element)); 4071 __ Addu(element, element, kPointerSize); 4072 __ JumpIfSmi(string, &bailout); 4073 __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset)); 4074 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 4075 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); 4076 __ lw(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset)); 4077 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3); 4078 __ BranchOnOverflow(&bailout, scratch3); 4079 __ Branch(&loop, lt, element, Operand(elements_end)); 4080 4081 // If array_length is 1, return elements[0], a string. 4082 __ Branch(¬_size_one_array, ne, array_length, Operand(1)); 4083 __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize)); 4084 __ Branch(&done); 4085 4086 __ bind(¬_size_one_array); 4087 4088 // Live values in registers: 4089 // separator: Separator string 4090 // array_length: Length of the array. 4091 // string_length: Sum of string lengths (smi). 4092 // elements: FixedArray of strings. 4093 4094 // Check that the separator is a flat ASCII string. 4095 __ JumpIfSmi(separator, &bailout); 4096 __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset)); 4097 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset)); 4098 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout); 4099 4100 // Add (separator length times array_length) - separator length to the 4101 // string_length to get the length of the result string. array_length is not 4102 // smi but the other values are, so the result is a smi. 4103 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4104 __ Subu(string_length, string_length, Operand(scratch1)); 4105 __ Mult(array_length, scratch1); 4106 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are 4107 // zero. 4108 __ mfhi(scratch2); 4109 __ Branch(&bailout, ne, scratch2, Operand(zero_reg)); 4110 __ mflo(scratch2); 4111 __ And(scratch3, scratch2, Operand(0x80000000)); 4112 __ Branch(&bailout, ne, scratch3, Operand(zero_reg)); 4113 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3); 4114 __ BranchOnOverflow(&bailout, scratch3); 4115 __ SmiUntag(string_length); 4116 4117 // Get first element in the array to free up the elements register to be used 4118 // for the result. 4119 __ Addu(element, 4120 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); 4121 result = elements; // End of live range for elements. 4122 elements = no_reg; 4123 // Live values in registers: 4124 // element: First array element 4125 // separator: Separator string 4126 // string_length: Length of result string (not smi) 4127 // array_length: Length of the array. 4128 __ AllocateAsciiString(result, 4129 string_length, 4130 scratch1, 4131 scratch2, 4132 elements_end, 4133 &bailout); 4134 // Prepare for looping. Set up elements_end to end of the array. Set 4135 // result_pos to the position of the result where to write the first 4136 // character. 4137 __ sll(elements_end, array_length, kPointerSizeLog2); 4138 __ Addu(elements_end, element, elements_end); 4139 result_pos = array_length; // End of live range for array_length. 4140 array_length = no_reg; 4141 __ Addu(result_pos, 4142 result, 4143 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4144 4145 // Check the length of the separator. 4146 __ lw(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset)); 4147 __ li(at, Operand(Smi::FromInt(1))); 4148 __ Branch(&one_char_separator, eq, scratch1, Operand(at)); 4149 __ Branch(&long_separator, gt, scratch1, Operand(at)); 4150 4151 // Empty separator case. 4152 __ bind(&empty_separator_loop); 4153 // Live values in registers: 4154 // result_pos: the position to which we are currently copying characters. 4155 // element: Current array element. 4156 // elements_end: Array end. 4157 4158 // Copy next array element to the result. 4159 __ lw(string, MemOperand(element)); 4160 __ Addu(element, element, kPointerSize); 4161 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); 4162 __ SmiUntag(string_length); 4163 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 4164 __ CopyBytes(string, result_pos, string_length, scratch1); 4165 // End while (element < elements_end). 4166 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end)); 4167 ASSERT(result.is(v0)); 4168 __ Branch(&done); 4169 4170 // One-character separator case. 4171 __ bind(&one_char_separator); 4172 // Replace separator with its ASCII character value. 4173 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize)); 4174 // Jump into the loop after the code that copies the separator, so the first 4175 // element is not preceded by a separator. 4176 __ jmp(&one_char_separator_loop_entry); 4177 4178 __ bind(&one_char_separator_loop); 4179 // Live values in registers: 4180 // result_pos: the position to which we are currently copying characters. 4181 // element: Current array element. 4182 // elements_end: Array end. 4183 // separator: Single separator ASCII char (in lower byte). 4184 4185 // Copy the separator character to the result. 4186 __ sb(separator, MemOperand(result_pos)); 4187 __ Addu(result_pos, result_pos, 1); 4188 4189 // Copy next array element to the result. 4190 __ bind(&one_char_separator_loop_entry); 4191 __ lw(string, MemOperand(element)); 4192 __ Addu(element, element, kPointerSize); 4193 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); 4194 __ SmiUntag(string_length); 4195 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 4196 __ CopyBytes(string, result_pos, string_length, scratch1); 4197 // End while (element < elements_end). 4198 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end)); 4199 ASSERT(result.is(v0)); 4200 __ Branch(&done); 4201 4202 // Long separator case (separator is more than one character). Entry is at the 4203 // label long_separator below. 4204 __ bind(&long_separator_loop); 4205 // Live values in registers: 4206 // result_pos: the position to which we are currently copying characters. 4207 // element: Current array element. 4208 // elements_end: Array end. 4209 // separator: Separator string. 4210 4211 // Copy the separator to the result. 4212 __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset)); 4213 __ SmiUntag(string_length); 4214 __ Addu(string, 4215 separator, 4216 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag)); 4217 __ CopyBytes(string, result_pos, string_length, scratch1); 4218 4219 __ bind(&long_separator); 4220 __ lw(string, MemOperand(element)); 4221 __ Addu(element, element, kPointerSize); 4222 __ lw(string_length, FieldMemOperand(string, String::kLengthOffset)); 4223 __ SmiUntag(string_length); 4224 __ Addu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag); 4225 __ CopyBytes(string, result_pos, string_length, scratch1); 4226 // End while (element < elements_end). 4227 __ Branch(&long_separator_loop, lt, element, Operand(elements_end)); 4228 ASSERT(result.is(v0)); 4229 __ Branch(&done); 4230 4231 __ bind(&bailout); 4232 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); 4233 __ bind(&done); 4234 context()->Plug(v0); 4235 } 4236 4237 4238 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4239 Handle<String> name = expr->name(); 4240 if (name->length() > 0 && name->Get(0) == '_') { 4241 Comment cmnt(masm_, "[ InlineRuntimeCall"); 4242 EmitInlineRuntimeCall(expr); 4243 return; 4244 } 4245 4246 Comment cmnt(masm_, "[ CallRuntime"); 4247 ZoneList<Expression*>* args = expr->arguments(); 4248 4249 if (expr->is_jsruntime()) { 4250 // Prepare for calling JS runtime function. 4251 __ lw(a0, GlobalObjectOperand()); 4252 __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset)); 4253 __ push(a0); 4254 } 4255 4256 // Push the arguments ("left-to-right"). 4257 int arg_count = args->length(); 4258 for (int i = 0; i < arg_count; i++) { 4259 VisitForStackValue(args->at(i)); 4260 } 4261 4262 if (expr->is_jsruntime()) { 4263 // Call the JS runtime function. 4264 __ li(a2, Operand(expr->name())); 4265 RelocInfo::Mode mode = RelocInfo::CODE_TARGET; 4266 Handle<Code> ic = 4267 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode); 4268 CallIC(ic, mode, expr->CallRuntimeFeedbackId()); 4269 // Restore context register. 4270 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4271 } else { 4272 // Call the C runtime function. 4273 __ CallRuntime(expr->function(), arg_count); 4274 } 4275 context()->Plug(v0); 4276 } 4277 4278 4279 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4280 switch (expr->op()) { 4281 case Token::DELETE: { 4282 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4283 Property* property = expr->expression()->AsProperty(); 4284 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4285 4286 if (property != NULL) { 4287 VisitForStackValue(property->obj()); 4288 VisitForStackValue(property->key()); 4289 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE) 4290 ? kNonStrictMode : kStrictMode; 4291 __ li(a1, Operand(Smi::FromInt(strict_mode_flag))); 4292 __ push(a1); 4293 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4294 context()->Plug(v0); 4295 } else if (proxy != NULL) { 4296 Variable* var = proxy->var(); 4297 // Delete of an unqualified identifier is disallowed in strict mode 4298 // but "delete this" is allowed. 4299 ASSERT(language_mode() == CLASSIC_MODE || var->is_this()); 4300 if (var->IsUnallocated()) { 4301 __ lw(a2, GlobalObjectOperand()); 4302 __ li(a1, Operand(var->name())); 4303 __ li(a0, Operand(Smi::FromInt(kNonStrictMode))); 4304 __ Push(a2, a1, a0); 4305 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4306 context()->Plug(v0); 4307 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4308 // Result of deleting non-global, non-dynamic variables is false. 4309 // The subexpression does not have side effects. 4310 context()->Plug(var->is_this()); 4311 } else { 4312 // Non-global variable. Call the runtime to try to delete from the 4313 // context where the variable was introduced. 4314 __ push(context_register()); 4315 __ li(a2, Operand(var->name())); 4316 __ push(a2); 4317 __ CallRuntime(Runtime::kDeleteContextSlot, 2); 4318 context()->Plug(v0); 4319 } 4320 } else { 4321 // Result of deleting non-property, non-variable reference is true. 4322 // The subexpression may have side effects. 4323 VisitForEffect(expr->expression()); 4324 context()->Plug(true); 4325 } 4326 break; 4327 } 4328 4329 case Token::VOID: { 4330 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 4331 VisitForEffect(expr->expression()); 4332 context()->Plug(Heap::kUndefinedValueRootIndex); 4333 break; 4334 } 4335 4336 case Token::NOT: { 4337 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 4338 if (context()->IsEffect()) { 4339 // Unary NOT has no side effects so it's only necessary to visit the 4340 // subexpression. Match the optimizing compiler by not branching. 4341 VisitForEffect(expr->expression()); 4342 } else if (context()->IsTest()) { 4343 const TestContext* test = TestContext::cast(context()); 4344 // The labels are swapped for the recursive call. 4345 VisitForControl(expr->expression(), 4346 test->false_label(), 4347 test->true_label(), 4348 test->fall_through()); 4349 context()->Plug(test->true_label(), test->false_label()); 4350 } else { 4351 // We handle value contexts explicitly rather than simply visiting 4352 // for control and plugging the control flow into the context, 4353 // because we need to prepare a pair of extra administrative AST ids 4354 // for the optimizing compiler. 4355 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue()); 4356 Label materialize_true, materialize_false, done; 4357 VisitForControl(expr->expression(), 4358 &materialize_false, 4359 &materialize_true, 4360 &materialize_true); 4361 __ bind(&materialize_true); 4362 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4363 __ LoadRoot(v0, Heap::kTrueValueRootIndex); 4364 if (context()->IsStackValue()) __ push(v0); 4365 __ jmp(&done); 4366 __ bind(&materialize_false); 4367 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4368 __ LoadRoot(v0, Heap::kFalseValueRootIndex); 4369 if (context()->IsStackValue()) __ push(v0); 4370 __ bind(&done); 4371 } 4372 break; 4373 } 4374 4375 case Token::TYPEOF: { 4376 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4377 { StackValueContext context(this); 4378 VisitForTypeofValue(expr->expression()); 4379 } 4380 __ CallRuntime(Runtime::kTypeof, 1); 4381 context()->Plug(v0); 4382 break; 4383 } 4384 4385 default: 4386 UNREACHABLE(); 4387 } 4388 } 4389 4390 4391 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4392 Comment cmnt(masm_, "[ CountOperation"); 4393 SetSourcePosition(expr->position()); 4394 4395 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError' 4396 // as the left-hand side. 4397 if (!expr->expression()->IsValidLeftHandSide()) { 4398 VisitForEffect(expr->expression()); 4399 return; 4400 } 4401 4402 // Expression can only be a property, a global or a (parameter or local) 4403 // slot. 4404 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 4405 LhsKind assign_type = VARIABLE; 4406 Property* prop = expr->expression()->AsProperty(); 4407 // In case of a property we use the uninitialized expression context 4408 // of the key to detect a named property. 4409 if (prop != NULL) { 4410 assign_type = 4411 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 4412 } 4413 4414 // Evaluate expression and get value. 4415 if (assign_type == VARIABLE) { 4416 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 4417 AccumulatorValueContext context(this); 4418 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4419 } else { 4420 // Reserve space for result of postfix operation. 4421 if (expr->is_postfix() && !context()->IsEffect()) { 4422 __ li(at, Operand(Smi::FromInt(0))); 4423 __ push(at); 4424 } 4425 if (assign_type == NAMED_PROPERTY) { 4426 // Put the object both on the stack and in the accumulator. 4427 VisitForAccumulatorValue(prop->obj()); 4428 __ push(v0); 4429 EmitNamedPropertyLoad(prop); 4430 } else { 4431 VisitForStackValue(prop->obj()); 4432 VisitForAccumulatorValue(prop->key()); 4433 __ lw(a1, MemOperand(sp, 0)); 4434 __ push(v0); 4435 EmitKeyedPropertyLoad(prop); 4436 } 4437 } 4438 4439 // We need a second deoptimization point after loading the value 4440 // in case evaluating the property load my have a side effect. 4441 if (assign_type == VARIABLE) { 4442 PrepareForBailout(expr->expression(), TOS_REG); 4443 } else { 4444 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4445 } 4446 4447 // Call ToNumber only if operand is not a smi. 4448 Label no_conversion; 4449 if (ShouldInlineSmiCase(expr->op())) { 4450 __ JumpIfSmi(v0, &no_conversion); 4451 } 4452 __ mov(a0, v0); 4453 ToNumberStub convert_stub; 4454 __ CallStub(&convert_stub); 4455 __ bind(&no_conversion); 4456 4457 // Save result for postfix expressions. 4458 if (expr->is_postfix()) { 4459 if (!context()->IsEffect()) { 4460 // Save the result on the stack. If we have a named or keyed property 4461 // we store the result under the receiver that is currently on top 4462 // of the stack. 4463 switch (assign_type) { 4464 case VARIABLE: 4465 __ push(v0); 4466 break; 4467 case NAMED_PROPERTY: 4468 __ sw(v0, MemOperand(sp, kPointerSize)); 4469 break; 4470 case KEYED_PROPERTY: 4471 __ sw(v0, MemOperand(sp, 2 * kPointerSize)); 4472 break; 4473 } 4474 } 4475 } 4476 __ mov(a0, result_register()); 4477 4478 // Inline smi case if we are in a loop. 4479 Label stub_call, done; 4480 JumpPatchSite patch_site(masm_); 4481 4482 int count_value = expr->op() == Token::INC ? 1 : -1; 4483 if (ShouldInlineSmiCase(expr->op())) { 4484 __ li(a1, Operand(Smi::FromInt(count_value))); 4485 __ AdduAndCheckForOverflow(v0, a0, a1, t0); 4486 __ BranchOnOverflow(&stub_call, t0); // Do stub on overflow. 4487 4488 // We could eliminate this smi check if we split the code at 4489 // the first smi check before calling ToNumber. 4490 patch_site.EmitJumpIfSmi(v0, &done); 4491 __ bind(&stub_call); 4492 } 4493 __ mov(a1, a0); 4494 __ li(a0, Operand(Smi::FromInt(count_value))); 4495 4496 // Record position before stub call. 4497 SetSourcePosition(expr->position()); 4498 4499 BinaryOpStub stub(Token::ADD, NO_OVERWRITE); 4500 CallIC(stub.GetCode(isolate()), 4501 RelocInfo::CODE_TARGET, 4502 expr->CountBinOpFeedbackId()); 4503 patch_site.EmitPatchInfo(); 4504 __ bind(&done); 4505 4506 // Store the value returned in v0. 4507 switch (assign_type) { 4508 case VARIABLE: 4509 if (expr->is_postfix()) { 4510 { EffectContext context(this); 4511 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4512 Token::ASSIGN); 4513 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4514 context.Plug(v0); 4515 } 4516 // For all contexts except EffectConstant we have the result on 4517 // top of the stack. 4518 if (!context()->IsEffect()) { 4519 context()->PlugTOS(); 4520 } 4521 } else { 4522 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4523 Token::ASSIGN); 4524 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4525 context()->Plug(v0); 4526 } 4527 break; 4528 case NAMED_PROPERTY: { 4529 __ mov(a0, result_register()); // Value. 4530 __ li(a2, Operand(prop->key()->AsLiteral()->value())); // Name. 4531 __ pop(a1); // Receiver. 4532 Handle<Code> ic = is_classic_mode() 4533 ? isolate()->builtins()->StoreIC_Initialize() 4534 : isolate()->builtins()->StoreIC_Initialize_Strict(); 4535 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId()); 4536 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4537 if (expr->is_postfix()) { 4538 if (!context()->IsEffect()) { 4539 context()->PlugTOS(); 4540 } 4541 } else { 4542 context()->Plug(v0); 4543 } 4544 break; 4545 } 4546 case KEYED_PROPERTY: { 4547 __ mov(a0, result_register()); // Value. 4548 __ pop(a1); // Key. 4549 __ pop(a2); // Receiver. 4550 Handle<Code> ic = is_classic_mode() 4551 ? isolate()->builtins()->KeyedStoreIC_Initialize() 4552 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict(); 4553 CallIC(ic, RelocInfo::CODE_TARGET, expr->CountStoreFeedbackId()); 4554 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4555 if (expr->is_postfix()) { 4556 if (!context()->IsEffect()) { 4557 context()->PlugTOS(); 4558 } 4559 } else { 4560 context()->Plug(v0); 4561 } 4562 break; 4563 } 4564 } 4565 } 4566 4567 4568 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4569 ASSERT(!context()->IsEffect()); 4570 ASSERT(!context()->IsTest()); 4571 VariableProxy* proxy = expr->AsVariableProxy(); 4572 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4573 Comment cmnt(masm_, "Global variable"); 4574 __ lw(a0, GlobalObjectOperand()); 4575 __ li(a2, Operand(proxy->name())); 4576 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 4577 // Use a regular load, not a contextual load, to avoid a reference 4578 // error. 4579 CallIC(ic); 4580 PrepareForBailout(expr, TOS_REG); 4581 context()->Plug(v0); 4582 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4583 Label done, slow; 4584 4585 // Generate code for loading from variables potentially shadowed 4586 // by eval-introduced variables. 4587 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done); 4588 4589 __ bind(&slow); 4590 __ li(a0, Operand(proxy->name())); 4591 __ Push(cp, a0); 4592 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 4593 PrepareForBailout(expr, TOS_REG); 4594 __ bind(&done); 4595 4596 context()->Plug(v0); 4597 } else { 4598 // This expression cannot throw a reference error at the top level. 4599 VisitInDuplicateContext(expr); 4600 } 4601 } 4602 4603 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4604 Expression* sub_expr, 4605 Handle<String> check) { 4606 Label materialize_true, materialize_false; 4607 Label* if_true = NULL; 4608 Label* if_false = NULL; 4609 Label* fall_through = NULL; 4610 context()->PrepareTest(&materialize_true, &materialize_false, 4611 &if_true, &if_false, &fall_through); 4612 4613 { AccumulatorValueContext context(this); 4614 VisitForTypeofValue(sub_expr); 4615 } 4616 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4617 4618 if (check->Equals(isolate()->heap()->number_string())) { 4619 __ JumpIfSmi(v0, if_true); 4620 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 4621 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); 4622 Split(eq, v0, Operand(at), if_true, if_false, fall_through); 4623 } else if (check->Equals(isolate()->heap()->string_string())) { 4624 __ JumpIfSmi(v0, if_false); 4625 // Check for undetectable objects => false. 4626 __ GetObjectType(v0, v0, a1); 4627 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE)); 4628 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 4629 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); 4630 Split(eq, a1, Operand(zero_reg), 4631 if_true, if_false, fall_through); 4632 } else if (check->Equals(isolate()->heap()->symbol_string())) { 4633 __ JumpIfSmi(v0, if_false); 4634 __ GetObjectType(v0, v0, a1); 4635 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through); 4636 } else if (check->Equals(isolate()->heap()->boolean_string())) { 4637 __ LoadRoot(at, Heap::kTrueValueRootIndex); 4638 __ Branch(if_true, eq, v0, Operand(at)); 4639 __ LoadRoot(at, Heap::kFalseValueRootIndex); 4640 Split(eq, v0, Operand(at), if_true, if_false, fall_through); 4641 } else if (FLAG_harmony_typeof && 4642 check->Equals(isolate()->heap()->null_string())) { 4643 __ LoadRoot(at, Heap::kNullValueRootIndex); 4644 Split(eq, v0, Operand(at), if_true, if_false, fall_through); 4645 } else if (check->Equals(isolate()->heap()->undefined_string())) { 4646 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 4647 __ Branch(if_true, eq, v0, Operand(at)); 4648 __ JumpIfSmi(v0, if_false); 4649 // Check for undetectable objects => true. 4650 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); 4651 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 4652 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); 4653 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through); 4654 } else if (check->Equals(isolate()->heap()->function_string())) { 4655 __ JumpIfSmi(v0, if_false); 4656 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 4657 __ GetObjectType(v0, v0, a1); 4658 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE)); 4659 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE), 4660 if_true, if_false, fall_through); 4661 } else if (check->Equals(isolate()->heap()->object_string())) { 4662 __ JumpIfSmi(v0, if_false); 4663 if (!FLAG_harmony_typeof) { 4664 __ LoadRoot(at, Heap::kNullValueRootIndex); 4665 __ Branch(if_true, eq, v0, Operand(at)); 4666 } 4667 // Check for JS objects => true. 4668 __ GetObjectType(v0, v0, a1); 4669 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)); 4670 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset)); 4671 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE)); 4672 // Check for undetectable objects => false. 4673 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset)); 4674 __ And(a1, a1, Operand(1 << Map::kIsUndetectable)); 4675 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through); 4676 } else { 4677 if (if_false != fall_through) __ jmp(if_false); 4678 } 4679 context()->Plug(if_true, if_false); 4680 } 4681 4682 4683 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4684 Comment cmnt(masm_, "[ CompareOperation"); 4685 SetSourcePosition(expr->position()); 4686 4687 // First we try a fast inlined version of the compare when one of 4688 // the operands is a literal. 4689 if (TryLiteralCompare(expr)) return; 4690 4691 // Always perform the comparison for its control flow. Pack the result 4692 // into the expression's context after the comparison is performed. 4693 Label materialize_true, materialize_false; 4694 Label* if_true = NULL; 4695 Label* if_false = NULL; 4696 Label* fall_through = NULL; 4697 context()->PrepareTest(&materialize_true, &materialize_false, 4698 &if_true, &if_false, &fall_through); 4699 4700 Token::Value op = expr->op(); 4701 VisitForStackValue(expr->left()); 4702 switch (op) { 4703 case Token::IN: 4704 VisitForStackValue(expr->right()); 4705 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4706 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4707 __ LoadRoot(t0, Heap::kTrueValueRootIndex); 4708 Split(eq, v0, Operand(t0), if_true, if_false, fall_through); 4709 break; 4710 4711 case Token::INSTANCEOF: { 4712 VisitForStackValue(expr->right()); 4713 InstanceofStub stub(InstanceofStub::kNoFlags); 4714 __ CallStub(&stub); 4715 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4716 // The stub returns 0 for true. 4717 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through); 4718 break; 4719 } 4720 4721 default: { 4722 VisitForAccumulatorValue(expr->right()); 4723 Condition cc = CompareIC::ComputeCondition(op); 4724 __ mov(a0, result_register()); 4725 __ pop(a1); 4726 4727 bool inline_smi_code = ShouldInlineSmiCase(op); 4728 JumpPatchSite patch_site(masm_); 4729 if (inline_smi_code) { 4730 Label slow_case; 4731 __ Or(a2, a0, Operand(a1)); 4732 patch_site.EmitJumpIfNotSmi(a2, &slow_case); 4733 Split(cc, a1, Operand(a0), if_true, if_false, NULL); 4734 __ bind(&slow_case); 4735 } 4736 // Record position and call the compare IC. 4737 SetSourcePosition(expr->position()); 4738 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); 4739 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId()); 4740 patch_site.EmitPatchInfo(); 4741 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4742 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through); 4743 } 4744 } 4745 4746 // Convert the result of the comparison into one expected for this 4747 // expression's context. 4748 context()->Plug(if_true, if_false); 4749 } 4750 4751 4752 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4753 Expression* sub_expr, 4754 NilValue nil) { 4755 Label materialize_true, materialize_false; 4756 Label* if_true = NULL; 4757 Label* if_false = NULL; 4758 Label* fall_through = NULL; 4759 context()->PrepareTest(&materialize_true, &materialize_false, 4760 &if_true, &if_false, &fall_through); 4761 4762 VisitForAccumulatorValue(sub_expr); 4763 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4764 __ mov(a0, result_register()); 4765 if (expr->op() == Token::EQ_STRICT) { 4766 Heap::RootListIndex nil_value = nil == kNullValue ? 4767 Heap::kNullValueRootIndex : 4768 Heap::kUndefinedValueRootIndex; 4769 __ LoadRoot(a1, nil_value); 4770 Split(eq, a0, Operand(a1), if_true, if_false, fall_through); 4771 } else { 4772 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4773 CallIC(ic, RelocInfo::CODE_TARGET, expr->CompareOperationFeedbackId()); 4774 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through); 4775 } 4776 context()->Plug(if_true, if_false); 4777 } 4778 4779 4780 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4781 __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4782 context()->Plug(v0); 4783 } 4784 4785 4786 Register FullCodeGenerator::result_register() { 4787 return v0; 4788 } 4789 4790 4791 Register FullCodeGenerator::context_register() { 4792 return cp; 4793 } 4794 4795 4796 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4797 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4798 __ sw(value, MemOperand(fp, frame_offset)); 4799 } 4800 4801 4802 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4803 __ lw(dst, ContextOperand(cp, context_index)); 4804 } 4805 4806 4807 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4808 Scope* declaration_scope = scope()->DeclarationScope(); 4809 if (declaration_scope->is_global_scope() || 4810 declaration_scope->is_module_scope()) { 4811 // Contexts nested in the native context have a canonical empty function 4812 // as their closure, not the anonymous closure containing the global 4813 // code. Pass a smi sentinel and let the runtime look up the empty 4814 // function. 4815 __ li(at, Operand(Smi::FromInt(0))); 4816 } else if (declaration_scope->is_eval_scope()) { 4817 // Contexts created by a call to eval have the same closure as the 4818 // context calling eval, not the anonymous closure containing the eval 4819 // code. Fetch it from the context. 4820 __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX)); 4821 } else { 4822 ASSERT(declaration_scope->is_function_scope()); 4823 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 4824 } 4825 __ push(at); 4826 } 4827 4828 4829 // ---------------------------------------------------------------------------- 4830 // Non-local control flow support. 4831 4832 void FullCodeGenerator::EnterFinallyBlock() { 4833 ASSERT(!result_register().is(a1)); 4834 // Store result register while executing finally block. 4835 __ push(result_register()); 4836 // Cook return address in link register to stack (smi encoded Code* delta). 4837 __ Subu(a1, ra, Operand(masm_->CodeObject())); 4838 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 4839 STATIC_ASSERT(0 == kSmiTag); 4840 __ Addu(a1, a1, Operand(a1)); // Convert to smi. 4841 4842 // Store result register while executing finally block. 4843 __ push(a1); 4844 4845 // Store pending message while executing finally block. 4846 ExternalReference pending_message_obj = 4847 ExternalReference::address_of_pending_message_obj(isolate()); 4848 __ li(at, Operand(pending_message_obj)); 4849 __ lw(a1, MemOperand(at)); 4850 __ push(a1); 4851 4852 ExternalReference has_pending_message = 4853 ExternalReference::address_of_has_pending_message(isolate()); 4854 __ li(at, Operand(has_pending_message)); 4855 __ lw(a1, MemOperand(at)); 4856 __ SmiTag(a1); 4857 __ push(a1); 4858 4859 ExternalReference pending_message_script = 4860 ExternalReference::address_of_pending_message_script(isolate()); 4861 __ li(at, Operand(pending_message_script)); 4862 __ lw(a1, MemOperand(at)); 4863 __ push(a1); 4864 } 4865 4866 4867 void FullCodeGenerator::ExitFinallyBlock() { 4868 ASSERT(!result_register().is(a1)); 4869 // Restore pending message from stack. 4870 __ pop(a1); 4871 ExternalReference pending_message_script = 4872 ExternalReference::address_of_pending_message_script(isolate()); 4873 __ li(at, Operand(pending_message_script)); 4874 __ sw(a1, MemOperand(at)); 4875 4876 __ pop(a1); 4877 __ SmiUntag(a1); 4878 ExternalReference has_pending_message = 4879 ExternalReference::address_of_has_pending_message(isolate()); 4880 __ li(at, Operand(has_pending_message)); 4881 __ sw(a1, MemOperand(at)); 4882 4883 __ pop(a1); 4884 ExternalReference pending_message_obj = 4885 ExternalReference::address_of_pending_message_obj(isolate()); 4886 __ li(at, Operand(pending_message_obj)); 4887 __ sw(a1, MemOperand(at)); 4888 4889 // Restore result register from stack. 4890 __ pop(a1); 4891 4892 // Uncook return address and return. 4893 __ pop(result_register()); 4894 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 4895 __ sra(a1, a1, 1); // Un-smi-tag value. 4896 __ Addu(at, a1, Operand(masm_->CodeObject())); 4897 __ Jump(at); 4898 } 4899 4900 4901 #undef __ 4902 4903 #define __ ACCESS_MASM(masm()) 4904 4905 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( 4906 int* stack_depth, 4907 int* context_length) { 4908 // The macros used here must preserve the result register. 4909 4910 // Because the handler block contains the context of the finally 4911 // code, we can restore it directly from there for the finally code 4912 // rather than iteratively unwinding contexts via their previous 4913 // links. 4914 __ Drop(*stack_depth); // Down to the handler block. 4915 if (*context_length > 0) { 4916 // Restore the context to its dedicated register and the stack. 4917 __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset)); 4918 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 4919 } 4920 __ PopTryHandler(); 4921 __ Call(finally_entry_); 4922 4923 *stack_depth = 0; 4924 *context_length = 0; 4925 return previous_; 4926 } 4927 4928 4929 #undef __ 4930 4931 } } // namespace v8::internal 4932 4933 #endif // V8_TARGET_ARCH_MIPS 4934