1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/v8.h" 6 7 #if V8_TARGET_ARCH_IA32 8 9 #include "src/code-factory.h" 10 #include "src/code-stubs.h" 11 #include "src/codegen.h" 12 #include "src/compiler.h" 13 #include "src/debug.h" 14 #include "src/full-codegen.h" 15 #include "src/ic/ic.h" 16 #include "src/isolate-inl.h" 17 #include "src/parser.h" 18 #include "src/scopes.h" 19 20 namespace v8 { 21 namespace internal { 22 23 #define __ ACCESS_MASM(masm_) 24 25 26 class JumpPatchSite BASE_EMBEDDED { 27 public: 28 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 29 #ifdef DEBUG 30 info_emitted_ = false; 31 #endif 32 } 33 34 ~JumpPatchSite() { 35 DCHECK(patch_site_.is_bound() == info_emitted_); 36 } 37 38 void EmitJumpIfNotSmi(Register reg, 39 Label* target, 40 Label::Distance distance = Label::kFar) { 41 __ test(reg, Immediate(kSmiTagMask)); 42 EmitJump(not_carry, target, distance); // Always taken before patched. 43 } 44 45 void EmitJumpIfSmi(Register reg, 46 Label* target, 47 Label::Distance distance = Label::kFar) { 48 __ test(reg, Immediate(kSmiTagMask)); 49 EmitJump(carry, target, distance); // Never taken before patched. 50 } 51 52 void EmitPatchInfo() { 53 if (patch_site_.is_bound()) { 54 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); 55 DCHECK(is_uint8(delta_to_patch_site)); 56 __ test(eax, Immediate(delta_to_patch_site)); 57 #ifdef DEBUG 58 info_emitted_ = true; 59 #endif 60 } else { 61 __ nop(); // Signals no inlined code. 62 } 63 } 64 65 private: 66 // jc will be patched with jz, jnc will become jnz. 67 void EmitJump(Condition cc, Label* target, Label::Distance distance) { 68 DCHECK(!patch_site_.is_bound() && !info_emitted_); 69 DCHECK(cc == carry || cc == not_carry); 70 __ bind(&patch_site_); 71 __ j(cc, target, distance); 72 } 73 74 MacroAssembler* masm_; 75 Label patch_site_; 76 #ifdef DEBUG 77 bool info_emitted_; 78 #endif 79 }; 80 81 82 // Generate code for a JS function. On entry to the function the receiver 83 // and arguments have been pushed on the stack left to right, with the 84 // return address on top of them. The actual argument count matches the 85 // formal parameter count expected by the function. 86 // 87 // The live registers are: 88 // o edi: the JS function object being called (i.e. ourselves) 89 // o esi: our context 90 // o ebp: our caller's frame pointer 91 // o esp: stack pointer (pointing to return address) 92 // 93 // The function builds a JS frame. Please see JavaScriptFrameConstants in 94 // frames-ia32.h for its layout. 95 void FullCodeGenerator::Generate() { 96 CompilationInfo* info = info_; 97 handler_table_ = 98 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED); 99 100 profiling_counter_ = isolate()->factory()->NewCell( 101 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 102 SetFunctionPosition(function()); 103 Comment cmnt(masm_, "[ function compiled by full code generator"); 104 105 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 106 107 #ifdef DEBUG 108 if (strlen(FLAG_stop_at) > 0 && 109 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 110 __ int3(); 111 } 112 #endif 113 114 // Sloppy mode functions and builtins need to replace the receiver with the 115 // global proxy when called as functions (without an explicit receiver 116 // object). 117 if (info->strict_mode() == SLOPPY && !info->is_native()) { 118 Label ok; 119 // +1 for return address. 120 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize; 121 __ mov(ecx, Operand(esp, receiver_offset)); 122 123 __ cmp(ecx, isolate()->factory()->undefined_value()); 124 __ j(not_equal, &ok, Label::kNear); 125 126 __ mov(ecx, GlobalObjectOperand()); 127 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset)); 128 129 __ mov(Operand(esp, receiver_offset), ecx); 130 131 __ bind(&ok); 132 } 133 134 // Open a frame scope to indicate that there is a frame on the stack. The 135 // MANUAL indicates that the scope shouldn't actually generate code to set up 136 // the frame (that is done below). 137 FrameScope frame_scope(masm_, StackFrame::MANUAL); 138 139 info->set_prologue_offset(masm_->pc_offset()); 140 __ Prologue(info->IsCodePreAgingActive()); 141 info->AddNoFrameRange(0, masm_->pc_offset()); 142 143 { Comment cmnt(masm_, "[ Allocate locals"); 144 int locals_count = info->scope()->num_stack_slots(); 145 // Generators allocate locals, if any, in context slots. 146 DCHECK(!info->function()->is_generator() || locals_count == 0); 147 if (locals_count == 1) { 148 __ push(Immediate(isolate()->factory()->undefined_value())); 149 } else if (locals_count > 1) { 150 if (locals_count >= 128) { 151 Label ok; 152 __ mov(ecx, esp); 153 __ sub(ecx, Immediate(locals_count * kPointerSize)); 154 ExternalReference stack_limit = 155 ExternalReference::address_of_real_stack_limit(isolate()); 156 __ cmp(ecx, Operand::StaticVariable(stack_limit)); 157 __ j(above_equal, &ok, Label::kNear); 158 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION); 159 __ bind(&ok); 160 } 161 __ mov(eax, Immediate(isolate()->factory()->undefined_value())); 162 const int kMaxPushes = 32; 163 if (locals_count >= kMaxPushes) { 164 int loop_iterations = locals_count / kMaxPushes; 165 __ mov(ecx, loop_iterations); 166 Label loop_header; 167 __ bind(&loop_header); 168 // Do pushes. 169 for (int i = 0; i < kMaxPushes; i++) { 170 __ push(eax); 171 } 172 __ dec(ecx); 173 __ j(not_zero, &loop_header, Label::kNear); 174 } 175 int remaining = locals_count % kMaxPushes; 176 // Emit the remaining pushes. 177 for (int i = 0; i < remaining; i++) { 178 __ push(eax); 179 } 180 } 181 } 182 183 bool function_in_register = true; 184 185 // Possibly allocate a local context. 186 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 187 if (heap_slots > 0) { 188 Comment cmnt(masm_, "[ Allocate context"); 189 bool need_write_barrier = true; 190 // Argument to NewContext is the function, which is still in edi. 191 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) { 192 __ push(edi); 193 __ Push(info->scope()->GetScopeInfo()); 194 __ CallRuntime(Runtime::kNewGlobalContext, 2); 195 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) { 196 FastNewContextStub stub(isolate(), heap_slots); 197 __ CallStub(&stub); 198 // Result of FastNewContextStub is always in new space. 199 need_write_barrier = false; 200 } else { 201 __ push(edi); 202 __ CallRuntime(Runtime::kNewFunctionContext, 1); 203 } 204 function_in_register = false; 205 // Context is returned in eax. It replaces the context passed to us. 206 // It's saved in the stack and kept live in esi. 207 __ mov(esi, eax); 208 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax); 209 210 // Copy parameters into context if necessary. 211 int num_parameters = info->scope()->num_parameters(); 212 for (int i = 0; i < num_parameters; i++) { 213 Variable* var = scope()->parameter(i); 214 if (var->IsContextSlot()) { 215 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 216 (num_parameters - 1 - i) * kPointerSize; 217 // Load parameter from stack. 218 __ mov(eax, Operand(ebp, parameter_offset)); 219 // Store it in the context. 220 int context_offset = Context::SlotOffset(var->index()); 221 __ mov(Operand(esi, context_offset), eax); 222 // Update the write barrier. This clobbers eax and ebx. 223 if (need_write_barrier) { 224 __ RecordWriteContextSlot(esi, 225 context_offset, 226 eax, 227 ebx, 228 kDontSaveFPRegs); 229 } else if (FLAG_debug_code) { 230 Label done; 231 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear); 232 __ Abort(kExpectedNewSpaceObject); 233 __ bind(&done); 234 } 235 } 236 } 237 } 238 239 Variable* arguments = scope()->arguments(); 240 if (arguments != NULL) { 241 // Function uses arguments object. 242 Comment cmnt(masm_, "[ Allocate arguments object"); 243 if (function_in_register) { 244 __ push(edi); 245 } else { 246 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 247 } 248 // Receiver is just before the parameters on the caller's stack. 249 int num_parameters = info->scope()->num_parameters(); 250 int offset = num_parameters * kPointerSize; 251 __ lea(edx, 252 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset)); 253 __ push(edx); 254 __ push(Immediate(Smi::FromInt(num_parameters))); 255 // Arguments to ArgumentsAccessStub: 256 // function, receiver address, parameter count. 257 // The stub will rewrite receiver and parameter count if the previous 258 // stack frame was an arguments adapter frame. 259 ArgumentsAccessStub::Type type; 260 if (strict_mode() == STRICT) { 261 type = ArgumentsAccessStub::NEW_STRICT; 262 } else if (function()->has_duplicate_parameters()) { 263 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW; 264 } else { 265 type = ArgumentsAccessStub::NEW_SLOPPY_FAST; 266 } 267 ArgumentsAccessStub stub(isolate(), type); 268 __ CallStub(&stub); 269 270 SetVar(arguments, eax, ebx, edx); 271 } 272 273 if (FLAG_trace) { 274 __ CallRuntime(Runtime::kTraceEnter, 0); 275 } 276 277 // Visit the declarations and body unless there is an illegal 278 // redeclaration. 279 if (scope()->HasIllegalRedeclaration()) { 280 Comment cmnt(masm_, "[ Declarations"); 281 scope()->VisitIllegalRedeclaration(this); 282 283 } else { 284 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 285 { Comment cmnt(masm_, "[ Declarations"); 286 // For named function expressions, declare the function name as a 287 // constant. 288 if (scope()->is_function_scope() && scope()->function() != NULL) { 289 VariableDeclaration* function = scope()->function(); 290 DCHECK(function->proxy()->var()->mode() == CONST || 291 function->proxy()->var()->mode() == CONST_LEGACY); 292 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED); 293 VisitVariableDeclaration(function); 294 } 295 VisitDeclarations(scope()->declarations()); 296 } 297 298 { Comment cmnt(masm_, "[ Stack check"); 299 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 300 Label ok; 301 ExternalReference stack_limit 302 = ExternalReference::address_of_stack_limit(isolate()); 303 __ cmp(esp, Operand::StaticVariable(stack_limit)); 304 __ j(above_equal, &ok, Label::kNear); 305 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); 306 __ bind(&ok); 307 } 308 309 { Comment cmnt(masm_, "[ Body"); 310 DCHECK(loop_depth() == 0); 311 VisitStatements(function()->body()); 312 DCHECK(loop_depth() == 0); 313 } 314 } 315 316 // Always emit a 'return undefined' in case control fell off the end of 317 // the body. 318 { Comment cmnt(masm_, "[ return <undefined>;"); 319 __ mov(eax, isolate()->factory()->undefined_value()); 320 EmitReturnSequence(); 321 } 322 } 323 324 325 void FullCodeGenerator::ClearAccumulator() { 326 __ Move(eax, Immediate(Smi::FromInt(0))); 327 } 328 329 330 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 331 __ mov(ebx, Immediate(profiling_counter_)); 332 __ sub(FieldOperand(ebx, Cell::kValueOffset), 333 Immediate(Smi::FromInt(delta))); 334 } 335 336 337 void FullCodeGenerator::EmitProfilingCounterReset() { 338 int reset_value = FLAG_interrupt_budget; 339 __ mov(ebx, Immediate(profiling_counter_)); 340 __ mov(FieldOperand(ebx, Cell::kValueOffset), 341 Immediate(Smi::FromInt(reset_value))); 342 } 343 344 345 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 346 Label* back_edge_target) { 347 Comment cmnt(masm_, "[ Back edge bookkeeping"); 348 Label ok; 349 350 DCHECK(back_edge_target->is_bound()); 351 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 352 int weight = Min(kMaxBackEdgeWeight, 353 Max(1, distance / kCodeSizeMultiplier)); 354 EmitProfilingCounterDecrement(weight); 355 __ j(positive, &ok, Label::kNear); 356 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 357 358 // Record a mapping of this PC offset to the OSR id. This is used to find 359 // the AST id from the unoptimized code in order to use it as a key into 360 // the deoptimization input data found in the optimized code. 361 RecordBackEdge(stmt->OsrEntryId()); 362 363 EmitProfilingCounterReset(); 364 365 __ bind(&ok); 366 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 367 // Record a mapping of the OSR id to this PC. This is used if the OSR 368 // entry becomes the target of a bailout. We don't expect it to be, but 369 // we want it to work if it is. 370 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 371 } 372 373 374 void FullCodeGenerator::EmitReturnSequence() { 375 Comment cmnt(masm_, "[ Return sequence"); 376 if (return_label_.is_bound()) { 377 __ jmp(&return_label_); 378 } else { 379 // Common return label 380 __ bind(&return_label_); 381 if (FLAG_trace) { 382 __ push(eax); 383 __ CallRuntime(Runtime::kTraceExit, 1); 384 } 385 // Pretend that the exit is a backwards jump to the entry. 386 int weight = 1; 387 if (info_->ShouldSelfOptimize()) { 388 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 389 } else { 390 int distance = masm_->pc_offset(); 391 weight = Min(kMaxBackEdgeWeight, 392 Max(1, distance / kCodeSizeMultiplier)); 393 } 394 EmitProfilingCounterDecrement(weight); 395 Label ok; 396 __ j(positive, &ok, Label::kNear); 397 __ push(eax); 398 __ call(isolate()->builtins()->InterruptCheck(), 399 RelocInfo::CODE_TARGET); 400 __ pop(eax); 401 EmitProfilingCounterReset(); 402 __ bind(&ok); 403 #ifdef DEBUG 404 // Add a label for checking the size of the code used for returning. 405 Label check_exit_codesize; 406 masm_->bind(&check_exit_codesize); 407 #endif 408 SetSourcePosition(function()->end_position() - 1); 409 __ RecordJSReturn(); 410 // Do not use the leave instruction here because it is too short to 411 // patch with the code required by the debugger. 412 __ mov(esp, ebp); 413 int no_frame_start = masm_->pc_offset(); 414 __ pop(ebp); 415 416 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize; 417 __ Ret(arguments_bytes, ecx); 418 // Check that the size of the code used for returning is large enough 419 // for the debugger's requirements. 420 DCHECK(Assembler::kJSReturnSequenceLength <= 421 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); 422 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset()); 423 } 424 } 425 426 427 void FullCodeGenerator::EffectContext::Plug(Variable* var) const { 428 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 429 } 430 431 432 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const { 433 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 434 codegen()->GetVar(result_register(), var); 435 } 436 437 438 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 439 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 440 MemOperand operand = codegen()->VarOperand(var, result_register()); 441 // Memory operands can be pushed directly. 442 __ push(operand); 443 } 444 445 446 void FullCodeGenerator::TestContext::Plug(Variable* var) const { 447 // For simplicity we always test the accumulator register. 448 codegen()->GetVar(result_register(), var); 449 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 450 codegen()->DoTest(this); 451 } 452 453 454 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 455 UNREACHABLE(); // Not used on IA32. 456 } 457 458 459 void FullCodeGenerator::AccumulatorValueContext::Plug( 460 Heap::RootListIndex index) const { 461 UNREACHABLE(); // Not used on IA32. 462 } 463 464 465 void FullCodeGenerator::StackValueContext::Plug( 466 Heap::RootListIndex index) const { 467 UNREACHABLE(); // Not used on IA32. 468 } 469 470 471 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 472 UNREACHABLE(); // Not used on IA32. 473 } 474 475 476 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 477 } 478 479 480 void FullCodeGenerator::AccumulatorValueContext::Plug( 481 Handle<Object> lit) const { 482 if (lit->IsSmi()) { 483 __ SafeMove(result_register(), Immediate(lit)); 484 } else { 485 __ Move(result_register(), Immediate(lit)); 486 } 487 } 488 489 490 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 491 if (lit->IsSmi()) { 492 __ SafePush(Immediate(lit)); 493 } else { 494 __ push(Immediate(lit)); 495 } 496 } 497 498 499 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 500 codegen()->PrepareForBailoutBeforeSplit(condition(), 501 true, 502 true_label_, 503 false_label_); 504 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. 505 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 506 if (false_label_ != fall_through_) __ jmp(false_label_); 507 } else if (lit->IsTrue() || lit->IsJSObject()) { 508 if (true_label_ != fall_through_) __ jmp(true_label_); 509 } else if (lit->IsString()) { 510 if (String::cast(*lit)->length() == 0) { 511 if (false_label_ != fall_through_) __ jmp(false_label_); 512 } else { 513 if (true_label_ != fall_through_) __ jmp(true_label_); 514 } 515 } else if (lit->IsSmi()) { 516 if (Smi::cast(*lit)->value() == 0) { 517 if (false_label_ != fall_through_) __ jmp(false_label_); 518 } else { 519 if (true_label_ != fall_through_) __ jmp(true_label_); 520 } 521 } else { 522 // For simplicity we always test the accumulator register. 523 __ mov(result_register(), lit); 524 codegen()->DoTest(this); 525 } 526 } 527 528 529 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 530 Register reg) const { 531 DCHECK(count > 0); 532 __ Drop(count); 533 } 534 535 536 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 537 int count, 538 Register reg) const { 539 DCHECK(count > 0); 540 __ Drop(count); 541 __ Move(result_register(), reg); 542 } 543 544 545 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 546 Register reg) const { 547 DCHECK(count > 0); 548 if (count > 1) __ Drop(count - 1); 549 __ mov(Operand(esp, 0), reg); 550 } 551 552 553 void FullCodeGenerator::TestContext::DropAndPlug(int count, 554 Register reg) const { 555 DCHECK(count > 0); 556 // For simplicity we always test the accumulator register. 557 __ Drop(count); 558 __ Move(result_register(), reg); 559 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 560 codegen()->DoTest(this); 561 } 562 563 564 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 565 Label* materialize_false) const { 566 DCHECK(materialize_true == materialize_false); 567 __ bind(materialize_true); 568 } 569 570 571 void FullCodeGenerator::AccumulatorValueContext::Plug( 572 Label* materialize_true, 573 Label* materialize_false) const { 574 Label done; 575 __ bind(materialize_true); 576 __ mov(result_register(), isolate()->factory()->true_value()); 577 __ jmp(&done, Label::kNear); 578 __ bind(materialize_false); 579 __ mov(result_register(), isolate()->factory()->false_value()); 580 __ bind(&done); 581 } 582 583 584 void FullCodeGenerator::StackValueContext::Plug( 585 Label* materialize_true, 586 Label* materialize_false) const { 587 Label done; 588 __ bind(materialize_true); 589 __ push(Immediate(isolate()->factory()->true_value())); 590 __ jmp(&done, Label::kNear); 591 __ bind(materialize_false); 592 __ push(Immediate(isolate()->factory()->false_value())); 593 __ bind(&done); 594 } 595 596 597 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 598 Label* materialize_false) const { 599 DCHECK(materialize_true == true_label_); 600 DCHECK(materialize_false == false_label_); 601 } 602 603 604 void FullCodeGenerator::EffectContext::Plug(bool flag) const { 605 } 606 607 608 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 609 Handle<Object> value = flag 610 ? isolate()->factory()->true_value() 611 : isolate()->factory()->false_value(); 612 __ mov(result_register(), value); 613 } 614 615 616 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 617 Handle<Object> value = flag 618 ? isolate()->factory()->true_value() 619 : isolate()->factory()->false_value(); 620 __ push(Immediate(value)); 621 } 622 623 624 void FullCodeGenerator::TestContext::Plug(bool flag) const { 625 codegen()->PrepareForBailoutBeforeSplit(condition(), 626 true, 627 true_label_, 628 false_label_); 629 if (flag) { 630 if (true_label_ != fall_through_) __ jmp(true_label_); 631 } else { 632 if (false_label_ != fall_through_) __ jmp(false_label_); 633 } 634 } 635 636 637 void FullCodeGenerator::DoTest(Expression* condition, 638 Label* if_true, 639 Label* if_false, 640 Label* fall_through) { 641 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 642 CallIC(ic, condition->test_id()); 643 __ test(result_register(), result_register()); 644 // The stub returns nonzero for true. 645 Split(not_zero, if_true, if_false, fall_through); 646 } 647 648 649 void FullCodeGenerator::Split(Condition cc, 650 Label* if_true, 651 Label* if_false, 652 Label* fall_through) { 653 if (if_false == fall_through) { 654 __ j(cc, if_true); 655 } else if (if_true == fall_through) { 656 __ j(NegateCondition(cc), if_false); 657 } else { 658 __ j(cc, if_true); 659 __ jmp(if_false); 660 } 661 } 662 663 664 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 665 DCHECK(var->IsStackAllocated()); 666 // Offset is negative because higher indexes are at lower addresses. 667 int offset = -var->index() * kPointerSize; 668 // Adjust by a (parameter or local) base offset. 669 if (var->IsParameter()) { 670 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 671 } else { 672 offset += JavaScriptFrameConstants::kLocal0Offset; 673 } 674 return Operand(ebp, offset); 675 } 676 677 678 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 679 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 680 if (var->IsContextSlot()) { 681 int context_chain_length = scope()->ContextChainLength(var->scope()); 682 __ LoadContext(scratch, context_chain_length); 683 return ContextOperand(scratch, var->index()); 684 } else { 685 return StackOperand(var); 686 } 687 } 688 689 690 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 691 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 692 MemOperand location = VarOperand(var, dest); 693 __ mov(dest, location); 694 } 695 696 697 void FullCodeGenerator::SetVar(Variable* var, 698 Register src, 699 Register scratch0, 700 Register scratch1) { 701 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 702 DCHECK(!scratch0.is(src)); 703 DCHECK(!scratch0.is(scratch1)); 704 DCHECK(!scratch1.is(src)); 705 MemOperand location = VarOperand(var, scratch0); 706 __ mov(location, src); 707 708 // Emit the write barrier code if the location is in the heap. 709 if (var->IsContextSlot()) { 710 int offset = Context::SlotOffset(var->index()); 711 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi)); 712 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs); 713 } 714 } 715 716 717 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 718 bool should_normalize, 719 Label* if_true, 720 Label* if_false) { 721 // Only prepare for bailouts before splits if we're in a test 722 // context. Otherwise, we let the Visit function deal with the 723 // preparation to avoid preparing with the same AST id twice. 724 if (!context()->IsTest() || !info_->IsOptimizable()) return; 725 726 Label skip; 727 if (should_normalize) __ jmp(&skip, Label::kNear); 728 PrepareForBailout(expr, TOS_REG); 729 if (should_normalize) { 730 __ cmp(eax, isolate()->factory()->true_value()); 731 Split(equal, if_true, if_false, NULL); 732 __ bind(&skip); 733 } 734 } 735 736 737 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 738 // The variable in the declaration always resides in the current context. 739 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 740 if (generate_debug_code_) { 741 // Check that we're not inside a with or catch context. 742 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset)); 743 __ cmp(ebx, isolate()->factory()->with_context_map()); 744 __ Check(not_equal, kDeclarationInWithContext); 745 __ cmp(ebx, isolate()->factory()->catch_context_map()); 746 __ Check(not_equal, kDeclarationInCatchContext); 747 } 748 } 749 750 751 void FullCodeGenerator::VisitVariableDeclaration( 752 VariableDeclaration* declaration) { 753 // If it was not possible to allocate the variable at compile time, we 754 // need to "declare" it at runtime to make sure it actually exists in the 755 // local context. 756 VariableProxy* proxy = declaration->proxy(); 757 VariableMode mode = declaration->mode(); 758 Variable* variable = proxy->var(); 759 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; 760 switch (variable->location()) { 761 case Variable::UNALLOCATED: 762 globals_->Add(variable->name(), zone()); 763 globals_->Add(variable->binding_needs_init() 764 ? isolate()->factory()->the_hole_value() 765 : isolate()->factory()->undefined_value(), zone()); 766 break; 767 768 case Variable::PARAMETER: 769 case Variable::LOCAL: 770 if (hole_init) { 771 Comment cmnt(masm_, "[ VariableDeclaration"); 772 __ mov(StackOperand(variable), 773 Immediate(isolate()->factory()->the_hole_value())); 774 } 775 break; 776 777 case Variable::CONTEXT: 778 if (hole_init) { 779 Comment cmnt(masm_, "[ VariableDeclaration"); 780 EmitDebugCheckDeclarationContext(variable); 781 __ mov(ContextOperand(esi, variable->index()), 782 Immediate(isolate()->factory()->the_hole_value())); 783 // No write barrier since the hole value is in old space. 784 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 785 } 786 break; 787 788 case Variable::LOOKUP: { 789 Comment cmnt(masm_, "[ VariableDeclaration"); 790 __ push(esi); 791 __ push(Immediate(variable->name())); 792 // VariableDeclaration nodes are always introduced in one of four modes. 793 DCHECK(IsDeclaredVariableMode(mode)); 794 PropertyAttributes attr = 795 IsImmutableVariableMode(mode) ? READ_ONLY : NONE; 796 __ push(Immediate(Smi::FromInt(attr))); 797 // Push initial value, if any. 798 // Note: For variables we must not push an initial value (such as 799 // 'undefined') because we may have a (legal) redeclaration and we 800 // must not destroy the current value. 801 if (hole_init) { 802 __ push(Immediate(isolate()->factory()->the_hole_value())); 803 } else { 804 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value. 805 } 806 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 807 break; 808 } 809 } 810 } 811 812 813 void FullCodeGenerator::VisitFunctionDeclaration( 814 FunctionDeclaration* declaration) { 815 VariableProxy* proxy = declaration->proxy(); 816 Variable* variable = proxy->var(); 817 switch (variable->location()) { 818 case Variable::UNALLOCATED: { 819 globals_->Add(variable->name(), zone()); 820 Handle<SharedFunctionInfo> function = 821 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_); 822 // Check for stack-overflow exception. 823 if (function.is_null()) return SetStackOverflow(); 824 globals_->Add(function, zone()); 825 break; 826 } 827 828 case Variable::PARAMETER: 829 case Variable::LOCAL: { 830 Comment cmnt(masm_, "[ FunctionDeclaration"); 831 VisitForAccumulatorValue(declaration->fun()); 832 __ mov(StackOperand(variable), result_register()); 833 break; 834 } 835 836 case Variable::CONTEXT: { 837 Comment cmnt(masm_, "[ FunctionDeclaration"); 838 EmitDebugCheckDeclarationContext(variable); 839 VisitForAccumulatorValue(declaration->fun()); 840 __ mov(ContextOperand(esi, variable->index()), result_register()); 841 // We know that we have written a function, which is not a smi. 842 __ RecordWriteContextSlot(esi, 843 Context::SlotOffset(variable->index()), 844 result_register(), 845 ecx, 846 kDontSaveFPRegs, 847 EMIT_REMEMBERED_SET, 848 OMIT_SMI_CHECK); 849 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 850 break; 851 } 852 853 case Variable::LOOKUP: { 854 Comment cmnt(masm_, "[ FunctionDeclaration"); 855 __ push(esi); 856 __ push(Immediate(variable->name())); 857 __ push(Immediate(Smi::FromInt(NONE))); 858 VisitForStackValue(declaration->fun()); 859 __ CallRuntime(Runtime::kDeclareLookupSlot, 4); 860 break; 861 } 862 } 863 } 864 865 866 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) { 867 Variable* variable = declaration->proxy()->var(); 868 DCHECK(variable->location() == Variable::CONTEXT); 869 DCHECK(variable->interface()->IsFrozen()); 870 871 Comment cmnt(masm_, "[ ModuleDeclaration"); 872 EmitDebugCheckDeclarationContext(variable); 873 874 // Load instance object. 875 __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope())); 876 __ mov(eax, ContextOperand(eax, variable->interface()->Index())); 877 __ mov(eax, ContextOperand(eax, Context::EXTENSION_INDEX)); 878 879 // Assign it. 880 __ mov(ContextOperand(esi, variable->index()), eax); 881 // We know that we have written a module, which is not a smi. 882 __ RecordWriteContextSlot(esi, 883 Context::SlotOffset(variable->index()), 884 eax, 885 ecx, 886 kDontSaveFPRegs, 887 EMIT_REMEMBERED_SET, 888 OMIT_SMI_CHECK); 889 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS); 890 891 // Traverse into body. 892 Visit(declaration->module()); 893 } 894 895 896 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) { 897 VariableProxy* proxy = declaration->proxy(); 898 Variable* variable = proxy->var(); 899 switch (variable->location()) { 900 case Variable::UNALLOCATED: 901 // TODO(rossberg) 902 break; 903 904 case Variable::CONTEXT: { 905 Comment cmnt(masm_, "[ ImportDeclaration"); 906 EmitDebugCheckDeclarationContext(variable); 907 // TODO(rossberg) 908 break; 909 } 910 911 case Variable::PARAMETER: 912 case Variable::LOCAL: 913 case Variable::LOOKUP: 914 UNREACHABLE(); 915 } 916 } 917 918 919 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) { 920 // TODO(rossberg) 921 } 922 923 924 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 925 // Call the runtime to declare the globals. 926 __ push(esi); // The context is the first argument. 927 __ Push(pairs); 928 __ Push(Smi::FromInt(DeclareGlobalsFlags())); 929 __ CallRuntime(Runtime::kDeclareGlobals, 3); 930 // Return value is ignored. 931 } 932 933 934 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 935 // Call the runtime to declare the modules. 936 __ Push(descriptions); 937 __ CallRuntime(Runtime::kDeclareModules, 1); 938 // Return value is ignored. 939 } 940 941 942 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 943 Comment cmnt(masm_, "[ SwitchStatement"); 944 Breakable nested_statement(this, stmt); 945 SetStatementPosition(stmt); 946 947 // Keep the switch value on the stack until a case matches. 948 VisitForStackValue(stmt->tag()); 949 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 950 951 ZoneList<CaseClause*>* clauses = stmt->cases(); 952 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 953 954 Label next_test; // Recycled for each test. 955 // Compile all the tests with branches to their bodies. 956 for (int i = 0; i < clauses->length(); i++) { 957 CaseClause* clause = clauses->at(i); 958 clause->body_target()->Unuse(); 959 960 // The default is not a test, but remember it as final fall through. 961 if (clause->is_default()) { 962 default_clause = clause; 963 continue; 964 } 965 966 Comment cmnt(masm_, "[ Case comparison"); 967 __ bind(&next_test); 968 next_test.Unuse(); 969 970 // Compile the label expression. 971 VisitForAccumulatorValue(clause->label()); 972 973 // Perform the comparison as if via '==='. 974 __ mov(edx, Operand(esp, 0)); // Switch value. 975 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 976 JumpPatchSite patch_site(masm_); 977 if (inline_smi_code) { 978 Label slow_case; 979 __ mov(ecx, edx); 980 __ or_(ecx, eax); 981 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear); 982 983 __ cmp(edx, eax); 984 __ j(not_equal, &next_test); 985 __ Drop(1); // Switch value is no longer needed. 986 __ jmp(clause->body_target()); 987 __ bind(&slow_case); 988 } 989 990 // Record position before stub call for type feedback. 991 SetSourcePosition(clause->position()); 992 Handle<Code> ic = 993 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 994 CallIC(ic, clause->CompareId()); 995 patch_site.EmitPatchInfo(); 996 997 Label skip; 998 __ jmp(&skip, Label::kNear); 999 PrepareForBailout(clause, TOS_REG); 1000 __ cmp(eax, isolate()->factory()->true_value()); 1001 __ j(not_equal, &next_test); 1002 __ Drop(1); 1003 __ jmp(clause->body_target()); 1004 __ bind(&skip); 1005 1006 __ test(eax, eax); 1007 __ j(not_equal, &next_test); 1008 __ Drop(1); // Switch value is no longer needed. 1009 __ jmp(clause->body_target()); 1010 } 1011 1012 // Discard the test value and jump to the default if present, otherwise to 1013 // the end of the statement. 1014 __ bind(&next_test); 1015 __ Drop(1); // Switch value is no longer needed. 1016 if (default_clause == NULL) { 1017 __ jmp(nested_statement.break_label()); 1018 } else { 1019 __ jmp(default_clause->body_target()); 1020 } 1021 1022 // Compile all the case bodies. 1023 for (int i = 0; i < clauses->length(); i++) { 1024 Comment cmnt(masm_, "[ Case body"); 1025 CaseClause* clause = clauses->at(i); 1026 __ bind(clause->body_target()); 1027 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 1028 VisitStatements(clause->statements()); 1029 } 1030 1031 __ bind(nested_statement.break_label()); 1032 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1033 } 1034 1035 1036 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 1037 Comment cmnt(masm_, "[ ForInStatement"); 1038 int slot = stmt->ForInFeedbackSlot(); 1039 1040 SetStatementPosition(stmt); 1041 1042 Label loop, exit; 1043 ForIn loop_statement(this, stmt); 1044 increment_loop_depth(); 1045 1046 // Get the object to enumerate over. If the object is null or undefined, skip 1047 // over the loop. See ECMA-262 version 5, section 12.6.4. 1048 VisitForAccumulatorValue(stmt->enumerable()); 1049 __ cmp(eax, isolate()->factory()->undefined_value()); 1050 __ j(equal, &exit); 1051 __ cmp(eax, isolate()->factory()->null_value()); 1052 __ j(equal, &exit); 1053 1054 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1055 1056 // Convert the object to a JS object. 1057 Label convert, done_convert; 1058 __ JumpIfSmi(eax, &convert, Label::kNear); 1059 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); 1060 __ j(above_equal, &done_convert, Label::kNear); 1061 __ bind(&convert); 1062 __ push(eax); 1063 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1064 __ bind(&done_convert); 1065 __ push(eax); 1066 1067 // Check for proxies. 1068 Label call_runtime, use_cache, fixed_array; 1069 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1070 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx); 1071 __ j(below_equal, &call_runtime); 1072 1073 // Check cache validity in generated code. This is a fast case for 1074 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1075 // guarantee cache validity, call the runtime system to check cache 1076 // validity or get the property names in a fixed array. 1077 __ CheckEnumCache(&call_runtime); 1078 1079 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); 1080 __ jmp(&use_cache, Label::kNear); 1081 1082 // Get the set of properties to enumerate. 1083 __ bind(&call_runtime); 1084 __ push(eax); 1085 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1); 1086 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 1087 isolate()->factory()->meta_map()); 1088 __ j(not_equal, &fixed_array); 1089 1090 1091 // We got a map in register eax. Get the enumeration cache from it. 1092 Label no_descriptors; 1093 __ bind(&use_cache); 1094 1095 __ EnumLength(edx, eax); 1096 __ cmp(edx, Immediate(Smi::FromInt(0))); 1097 __ j(equal, &no_descriptors); 1098 1099 __ LoadInstanceDescriptors(eax, ecx); 1100 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset)); 1101 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1102 1103 // Set up the four remaining stack slots. 1104 __ push(eax); // Map. 1105 __ push(ecx); // Enumeration cache. 1106 __ push(edx); // Number of valid entries for the map in the enum cache. 1107 __ push(Immediate(Smi::FromInt(0))); // Initial index. 1108 __ jmp(&loop); 1109 1110 __ bind(&no_descriptors); 1111 __ add(esp, Immediate(kPointerSize)); 1112 __ jmp(&exit); 1113 1114 // We got a fixed array in register eax. Iterate through that. 1115 Label non_proxy; 1116 __ bind(&fixed_array); 1117 1118 // No need for a write barrier, we are storing a Smi in the feedback vector. 1119 __ LoadHeapObject(ebx, FeedbackVector()); 1120 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)), 1121 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 1122 1123 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check 1124 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object 1125 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE); 1126 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx); 1127 __ j(above, &non_proxy); 1128 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy 1129 __ bind(&non_proxy); 1130 __ push(ebx); // Smi 1131 __ push(eax); // Array 1132 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); 1133 __ push(eax); // Fixed array length (as smi). 1134 __ push(Immediate(Smi::FromInt(0))); // Initial index. 1135 1136 // Generate code for doing the condition check. 1137 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1138 __ bind(&loop); 1139 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index. 1140 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length. 1141 __ j(above_equal, loop_statement.break_label()); 1142 1143 // Get the current entry of the array into register ebx. 1144 __ mov(ebx, Operand(esp, 2 * kPointerSize)); 1145 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize)); 1146 1147 // Get the expected map from the stack or a smi in the 1148 // permanent slow case into register edx. 1149 __ mov(edx, Operand(esp, 3 * kPointerSize)); 1150 1151 // Check if the expected map still matches that of the enumerable. 1152 // If not, we may have to filter the key. 1153 Label update_each; 1154 __ mov(ecx, Operand(esp, 4 * kPointerSize)); 1155 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset)); 1156 __ j(equal, &update_each, Label::kNear); 1157 1158 // For proxies, no filtering is done. 1159 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet. 1160 DCHECK(Smi::FromInt(0) == 0); 1161 __ test(edx, edx); 1162 __ j(zero, &update_each); 1163 1164 // Convert the entry to a string or null if it isn't a property 1165 // anymore. If the property has been removed while iterating, we 1166 // just skip it. 1167 __ push(ecx); // Enumerable. 1168 __ push(ebx); // Current entry. 1169 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION); 1170 __ test(eax, eax); 1171 __ j(equal, loop_statement.continue_label()); 1172 __ mov(ebx, eax); 1173 1174 // Update the 'each' property or variable from the possibly filtered 1175 // entry in register ebx. 1176 __ bind(&update_each); 1177 __ mov(result_register(), ebx); 1178 // Perform the assignment as if via '='. 1179 { EffectContext context(this); 1180 EmitAssignment(stmt->each()); 1181 } 1182 1183 // Generate code for the body of the loop. 1184 Visit(stmt->body()); 1185 1186 // Generate code for going to the next element by incrementing the 1187 // index (smi) stored on top of the stack. 1188 __ bind(loop_statement.continue_label()); 1189 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1))); 1190 1191 EmitBackEdgeBookkeeping(stmt, &loop); 1192 __ jmp(&loop); 1193 1194 // Remove the pointers stored on the stack. 1195 __ bind(loop_statement.break_label()); 1196 __ add(esp, Immediate(5 * kPointerSize)); 1197 1198 // Exit and decrement the loop depth. 1199 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1200 __ bind(&exit); 1201 decrement_loop_depth(); 1202 } 1203 1204 1205 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) { 1206 Comment cmnt(masm_, "[ ForOfStatement"); 1207 SetStatementPosition(stmt); 1208 1209 Iteration loop_statement(this, stmt); 1210 increment_loop_depth(); 1211 1212 // var iterator = iterable[Symbol.iterator](); 1213 VisitForEffect(stmt->assign_iterator()); 1214 1215 // Loop entry. 1216 __ bind(loop_statement.continue_label()); 1217 1218 // result = iterator.next() 1219 VisitForEffect(stmt->next_result()); 1220 1221 // if (result.done) break; 1222 Label result_not_done; 1223 VisitForControl(stmt->result_done(), 1224 loop_statement.break_label(), 1225 &result_not_done, 1226 &result_not_done); 1227 __ bind(&result_not_done); 1228 1229 // each = result.value 1230 VisitForEffect(stmt->assign_each()); 1231 1232 // Generate code for the body of the loop. 1233 Visit(stmt->body()); 1234 1235 // Check stack before looping. 1236 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS); 1237 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label()); 1238 __ jmp(loop_statement.continue_label()); 1239 1240 // Exit and decrement the loop depth. 1241 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1242 __ bind(loop_statement.break_label()); 1243 decrement_loop_depth(); 1244 } 1245 1246 1247 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1248 bool pretenure) { 1249 // Use the fast case closure allocation code that allocates in new 1250 // space for nested functions that don't need literals cloning. If 1251 // we're running with the --always-opt or the --prepare-always-opt 1252 // flag, we need to use the runtime function so that the new function 1253 // we are creating here gets a chance to have its code optimized and 1254 // doesn't just get a copy of the existing unoptimized code. 1255 if (!FLAG_always_opt && 1256 !FLAG_prepare_always_opt && 1257 !pretenure && 1258 scope()->is_function_scope() && 1259 info->num_literals() == 0) { 1260 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind()); 1261 __ mov(ebx, Immediate(info)); 1262 __ CallStub(&stub); 1263 } else { 1264 __ push(esi); 1265 __ push(Immediate(info)); 1266 __ push(Immediate(pretenure 1267 ? isolate()->factory()->true_value() 1268 : isolate()->factory()->false_value())); 1269 __ CallRuntime(Runtime::kNewClosure, 3); 1270 } 1271 context()->Plug(eax); 1272 } 1273 1274 1275 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 1276 Comment cmnt(masm_, "[ VariableProxy"); 1277 EmitVariableLoad(expr); 1278 } 1279 1280 1281 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) { 1282 Comment cnmt(masm_, "[ SuperReference "); 1283 1284 __ mov(LoadDescriptor::ReceiverRegister(), 1285 Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1286 1287 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol()); 1288 __ mov(LoadDescriptor::NameRegister(), home_object_symbol); 1289 1290 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId()); 1291 1292 __ cmp(eax, isolate()->factory()->undefined_value()); 1293 Label done; 1294 __ j(not_equal, &done); 1295 __ CallRuntime(Runtime::kThrowNonMethodError, 0); 1296 __ bind(&done); 1297 } 1298 1299 1300 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, 1301 TypeofState typeof_state, 1302 Label* slow) { 1303 Register context = esi; 1304 Register temp = edx; 1305 1306 Scope* s = scope(); 1307 while (s != NULL) { 1308 if (s->num_heap_slots() > 0) { 1309 if (s->calls_sloppy_eval()) { 1310 // Check that extension is NULL. 1311 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), 1312 Immediate(0)); 1313 __ j(not_equal, slow); 1314 } 1315 // Load next context in chain. 1316 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1317 // Walk the rest of the chain without clobbering esi. 1318 context = temp; 1319 } 1320 // If no outer scope calls eval, we do not need to check more 1321 // context extensions. If we have reached an eval scope, we check 1322 // all extensions from this point. 1323 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1324 s = s->outer_scope(); 1325 } 1326 1327 if (s != NULL && s->is_eval_scope()) { 1328 // Loop up the context chain. There is no frame effect so it is 1329 // safe to use raw labels here. 1330 Label next, fast; 1331 if (!context.is(temp)) { 1332 __ mov(temp, context); 1333 } 1334 __ bind(&next); 1335 // Terminate at native context. 1336 __ cmp(FieldOperand(temp, HeapObject::kMapOffset), 1337 Immediate(isolate()->factory()->native_context_map())); 1338 __ j(equal, &fast, Label::kNear); 1339 // Check that extension is NULL. 1340 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0)); 1341 __ j(not_equal, slow); 1342 // Load next context in chain. 1343 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); 1344 __ jmp(&next); 1345 __ bind(&fast); 1346 } 1347 1348 // All extension objects were empty and it is safe to use a global 1349 // load IC call. 1350 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1351 __ mov(LoadDescriptor::NameRegister(), proxy->var()->name()); 1352 if (FLAG_vector_ics) { 1353 __ mov(VectorLoadICDescriptor::SlotRegister(), 1354 Immediate(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1355 } 1356 1357 ContextualMode mode = (typeof_state == INSIDE_TYPEOF) 1358 ? NOT_CONTEXTUAL 1359 : CONTEXTUAL; 1360 1361 CallLoadIC(mode); 1362 } 1363 1364 1365 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1366 Label* slow) { 1367 DCHECK(var->IsContextSlot()); 1368 Register context = esi; 1369 Register temp = ebx; 1370 1371 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1372 if (s->num_heap_slots() > 0) { 1373 if (s->calls_sloppy_eval()) { 1374 // Check that extension is NULL. 1375 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), 1376 Immediate(0)); 1377 __ j(not_equal, slow); 1378 } 1379 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1380 // Walk the rest of the chain without clobbering esi. 1381 context = temp; 1382 } 1383 } 1384 // Check that last extension is NULL. 1385 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); 1386 __ j(not_equal, slow); 1387 1388 // This function is used only for loads, not stores, so it's safe to 1389 // return an esi-based operand (the write barrier cannot be allowed to 1390 // destroy the esi register). 1391 return ContextOperand(context, var->index()); 1392 } 1393 1394 1395 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1396 TypeofState typeof_state, 1397 Label* slow, 1398 Label* done) { 1399 // Generate fast-case code for variables that might be shadowed by 1400 // eval-introduced variables. Eval is used a lot without 1401 // introducing variables. In those cases, we do not want to 1402 // perform a runtime call for all variables in the scope 1403 // containing the eval. 1404 Variable* var = proxy->var(); 1405 if (var->mode() == DYNAMIC_GLOBAL) { 1406 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow); 1407 __ jmp(done); 1408 } else if (var->mode() == DYNAMIC_LOCAL) { 1409 Variable* local = var->local_if_not_shadowed(); 1410 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow)); 1411 if (local->mode() == LET || local->mode() == CONST || 1412 local->mode() == CONST_LEGACY) { 1413 __ cmp(eax, isolate()->factory()->the_hole_value()); 1414 __ j(not_equal, done); 1415 if (local->mode() == CONST_LEGACY) { 1416 __ mov(eax, isolate()->factory()->undefined_value()); 1417 } else { // LET || CONST 1418 __ push(Immediate(var->name())); 1419 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1420 } 1421 } 1422 __ jmp(done); 1423 } 1424 } 1425 1426 1427 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) { 1428 // Record position before possible IC call. 1429 SetSourcePosition(proxy->position()); 1430 Variable* var = proxy->var(); 1431 1432 // Three cases: global variables, lookup variables, and all other types of 1433 // variables. 1434 switch (var->location()) { 1435 case Variable::UNALLOCATED: { 1436 Comment cmnt(masm_, "[ Global variable"); 1437 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 1438 __ mov(LoadDescriptor::NameRegister(), var->name()); 1439 if (FLAG_vector_ics) { 1440 __ mov(VectorLoadICDescriptor::SlotRegister(), 1441 Immediate(Smi::FromInt(proxy->VariableFeedbackSlot()))); 1442 } 1443 CallLoadIC(CONTEXTUAL); 1444 context()->Plug(eax); 1445 break; 1446 } 1447 1448 case Variable::PARAMETER: 1449 case Variable::LOCAL: 1450 case Variable::CONTEXT: { 1451 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1452 : "[ Stack variable"); 1453 if (var->binding_needs_init()) { 1454 // var->scope() may be NULL when the proxy is located in eval code and 1455 // refers to a potential outside binding. Currently those bindings are 1456 // always looked up dynamically, i.e. in that case 1457 // var->location() == LOOKUP. 1458 // always holds. 1459 DCHECK(var->scope() != NULL); 1460 1461 // Check if the binding really needs an initialization check. The check 1462 // can be skipped in the following situation: we have a LET or CONST 1463 // binding in harmony mode, both the Variable and the VariableProxy have 1464 // the same declaration scope (i.e. they are both in global code, in the 1465 // same function or in the same eval code) and the VariableProxy is in 1466 // the source physically located after the initializer of the variable. 1467 // 1468 // We cannot skip any initialization checks for CONST in non-harmony 1469 // mode because const variables may be declared but never initialized: 1470 // if (false) { const x; }; var y = x; 1471 // 1472 // The condition on the declaration scopes is a conservative check for 1473 // nested functions that access a binding and are called before the 1474 // binding is initialized: 1475 // function() { f(); let x = 1; function f() { x = 2; } } 1476 // 1477 bool skip_init_check; 1478 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) { 1479 skip_init_check = false; 1480 } else { 1481 // Check that we always have valid source position. 1482 DCHECK(var->initializer_position() != RelocInfo::kNoPosition); 1483 DCHECK(proxy->position() != RelocInfo::kNoPosition); 1484 skip_init_check = var->mode() != CONST_LEGACY && 1485 var->initializer_position() < proxy->position(); 1486 } 1487 1488 if (!skip_init_check) { 1489 // Let and const need a read barrier. 1490 Label done; 1491 GetVar(eax, var); 1492 __ cmp(eax, isolate()->factory()->the_hole_value()); 1493 __ j(not_equal, &done, Label::kNear); 1494 if (var->mode() == LET || var->mode() == CONST) { 1495 // Throw a reference error when using an uninitialized let/const 1496 // binding in harmony mode. 1497 __ push(Immediate(var->name())); 1498 __ CallRuntime(Runtime::kThrowReferenceError, 1); 1499 } else { 1500 // Uninitalized const bindings outside of harmony mode are unholed. 1501 DCHECK(var->mode() == CONST_LEGACY); 1502 __ mov(eax, isolate()->factory()->undefined_value()); 1503 } 1504 __ bind(&done); 1505 context()->Plug(eax); 1506 break; 1507 } 1508 } 1509 context()->Plug(var); 1510 break; 1511 } 1512 1513 case Variable::LOOKUP: { 1514 Comment cmnt(masm_, "[ Lookup variable"); 1515 Label done, slow; 1516 // Generate code for loading from variables potentially shadowed 1517 // by eval-introduced variables. 1518 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 1519 __ bind(&slow); 1520 __ push(esi); // Context. 1521 __ push(Immediate(var->name())); 1522 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 1523 __ bind(&done); 1524 context()->Plug(eax); 1525 break; 1526 } 1527 } 1528 } 1529 1530 1531 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1532 Comment cmnt(masm_, "[ RegExpLiteral"); 1533 Label materialized; 1534 // Registers will be used as follows: 1535 // edi = JS function. 1536 // ecx = literals array. 1537 // ebx = regexp literal. 1538 // eax = regexp literal clone. 1539 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1540 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset)); 1541 int literal_offset = 1542 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 1543 __ mov(ebx, FieldOperand(ecx, literal_offset)); 1544 __ cmp(ebx, isolate()->factory()->undefined_value()); 1545 __ j(not_equal, &materialized, Label::kNear); 1546 1547 // Create regexp literal using runtime function 1548 // Result will be in eax. 1549 __ push(ecx); 1550 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1551 __ push(Immediate(expr->pattern())); 1552 __ push(Immediate(expr->flags())); 1553 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 1554 __ mov(ebx, eax); 1555 1556 __ bind(&materialized); 1557 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; 1558 Label allocated, runtime_allocate; 1559 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT); 1560 __ jmp(&allocated); 1561 1562 __ bind(&runtime_allocate); 1563 __ push(ebx); 1564 __ push(Immediate(Smi::FromInt(size))); 1565 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 1566 __ pop(ebx); 1567 1568 __ bind(&allocated); 1569 // Copy the content into the newly allocated memory. 1570 // (Unroll copy loop once for better throughput). 1571 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) { 1572 __ mov(edx, FieldOperand(ebx, i)); 1573 __ mov(ecx, FieldOperand(ebx, i + kPointerSize)); 1574 __ mov(FieldOperand(eax, i), edx); 1575 __ mov(FieldOperand(eax, i + kPointerSize), ecx); 1576 } 1577 if ((size % (2 * kPointerSize)) != 0) { 1578 __ mov(edx, FieldOperand(ebx, size - kPointerSize)); 1579 __ mov(FieldOperand(eax, size - kPointerSize), edx); 1580 } 1581 context()->Plug(eax); 1582 } 1583 1584 1585 void FullCodeGenerator::EmitAccessor(Expression* expression) { 1586 if (expression == NULL) { 1587 __ push(Immediate(isolate()->factory()->null_value())); 1588 } else { 1589 VisitForStackValue(expression); 1590 } 1591 } 1592 1593 1594 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1595 Comment cmnt(masm_, "[ ObjectLiteral"); 1596 1597 expr->BuildConstantProperties(isolate()); 1598 Handle<FixedArray> constant_properties = expr->constant_properties(); 1599 int flags = expr->fast_elements() 1600 ? ObjectLiteral::kFastElements 1601 : ObjectLiteral::kNoFlags; 1602 flags |= expr->has_function() 1603 ? ObjectLiteral::kHasFunction 1604 : ObjectLiteral::kNoFlags; 1605 int properties_count = constant_properties->length() / 2; 1606 if (expr->may_store_doubles() || expr->depth() > 1 || 1607 masm()->serializer_enabled() || 1608 flags != ObjectLiteral::kFastElements || 1609 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) { 1610 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1611 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset)); 1612 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1613 __ push(Immediate(constant_properties)); 1614 __ push(Immediate(Smi::FromInt(flags))); 1615 __ CallRuntime(Runtime::kCreateObjectLiteral, 4); 1616 } else { 1617 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1618 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset)); 1619 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); 1620 __ mov(ecx, Immediate(constant_properties)); 1621 __ mov(edx, Immediate(Smi::FromInt(flags))); 1622 FastCloneShallowObjectStub stub(isolate(), properties_count); 1623 __ CallStub(&stub); 1624 } 1625 1626 // If result_saved is true the result is on top of the stack. If 1627 // result_saved is false the result is in eax. 1628 bool result_saved = false; 1629 1630 // Mark all computed expressions that are bound to a key that 1631 // is shadowed by a later occurrence of the same key. For the 1632 // marked expressions, no store code is emitted. 1633 expr->CalculateEmitStore(zone()); 1634 1635 AccessorTable accessor_table(zone()); 1636 for (int i = 0; i < expr->properties()->length(); i++) { 1637 ObjectLiteral::Property* property = expr->properties()->at(i); 1638 if (property->IsCompileTimeValue()) continue; 1639 1640 Literal* key = property->key(); 1641 Expression* value = property->value(); 1642 if (!result_saved) { 1643 __ push(eax); // Save result on the stack 1644 result_saved = true; 1645 } 1646 switch (property->kind()) { 1647 case ObjectLiteral::Property::CONSTANT: 1648 UNREACHABLE(); 1649 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1650 DCHECK(!CompileTimeValue::IsCompileTimeValue(value)); 1651 // Fall through. 1652 case ObjectLiteral::Property::COMPUTED: 1653 if (key->value()->IsInternalizedString()) { 1654 if (property->emit_store()) { 1655 VisitForAccumulatorValue(value); 1656 DCHECK(StoreDescriptor::ValueRegister().is(eax)); 1657 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value())); 1658 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0)); 1659 CallStoreIC(key->LiteralFeedbackId()); 1660 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1661 } else { 1662 VisitForEffect(value); 1663 } 1664 break; 1665 } 1666 __ push(Operand(esp, 0)); // Duplicate receiver. 1667 VisitForStackValue(key); 1668 VisitForStackValue(value); 1669 if (property->emit_store()) { 1670 __ push(Immediate(Smi::FromInt(SLOPPY))); // Strict mode 1671 __ CallRuntime(Runtime::kSetProperty, 4); 1672 } else { 1673 __ Drop(3); 1674 } 1675 break; 1676 case ObjectLiteral::Property::PROTOTYPE: 1677 __ push(Operand(esp, 0)); // Duplicate receiver. 1678 VisitForStackValue(value); 1679 if (property->emit_store()) { 1680 __ CallRuntime(Runtime::kSetPrototype, 2); 1681 } else { 1682 __ Drop(2); 1683 } 1684 break; 1685 case ObjectLiteral::Property::GETTER: 1686 accessor_table.lookup(key)->second->getter = value; 1687 break; 1688 case ObjectLiteral::Property::SETTER: 1689 accessor_table.lookup(key)->second->setter = value; 1690 break; 1691 } 1692 } 1693 1694 // Emit code to define accessors, using only a single call to the runtime for 1695 // each pair of corresponding getters and setters. 1696 for (AccessorTable::Iterator it = accessor_table.begin(); 1697 it != accessor_table.end(); 1698 ++it) { 1699 __ push(Operand(esp, 0)); // Duplicate receiver. 1700 VisitForStackValue(it->first); 1701 EmitAccessor(it->second->getter); 1702 EmitAccessor(it->second->setter); 1703 __ push(Immediate(Smi::FromInt(NONE))); 1704 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5); 1705 } 1706 1707 if (expr->has_function()) { 1708 DCHECK(result_saved); 1709 __ push(Operand(esp, 0)); 1710 __ CallRuntime(Runtime::kToFastProperties, 1); 1711 } 1712 1713 if (result_saved) { 1714 context()->PlugTOS(); 1715 } else { 1716 context()->Plug(eax); 1717 } 1718 } 1719 1720 1721 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1722 Comment cmnt(masm_, "[ ArrayLiteral"); 1723 1724 expr->BuildConstantElements(isolate()); 1725 int flags = expr->depth() == 1 1726 ? ArrayLiteral::kShallowElements 1727 : ArrayLiteral::kNoFlags; 1728 1729 ZoneList<Expression*>* subexprs = expr->values(); 1730 int length = subexprs->length(); 1731 Handle<FixedArray> constant_elements = expr->constant_elements(); 1732 DCHECK_EQ(2, constant_elements->length()); 1733 ElementsKind constant_elements_kind = 1734 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value()); 1735 bool has_constant_fast_elements = 1736 IsFastObjectElementsKind(constant_elements_kind); 1737 Handle<FixedArrayBase> constant_elements_values( 1738 FixedArrayBase::cast(constant_elements->get(1))); 1739 1740 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1741 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) { 1742 // If the only customer of allocation sites is transitioning, then 1743 // we can turn it off if we don't have anywhere else to transition to. 1744 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1745 } 1746 1747 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) { 1748 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1749 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset)); 1750 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1751 __ push(Immediate(constant_elements)); 1752 __ push(Immediate(Smi::FromInt(flags))); 1753 __ CallRuntime(Runtime::kCreateArrayLiteral, 4); 1754 } else { 1755 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1756 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset)); 1757 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); 1758 __ mov(ecx, Immediate(constant_elements)); 1759 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1760 __ CallStub(&stub); 1761 } 1762 1763 bool result_saved = false; // Is the result saved to the stack? 1764 1765 // Emit code to evaluate all the non-constant subexpressions and to store 1766 // them into the newly cloned array. 1767 for (int i = 0; i < length; i++) { 1768 Expression* subexpr = subexprs->at(i); 1769 // If the subexpression is a literal or a simple materialized literal it 1770 // is already set in the cloned array. 1771 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1772 1773 if (!result_saved) { 1774 __ push(eax); // array literal. 1775 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1776 result_saved = true; 1777 } 1778 VisitForAccumulatorValue(subexpr); 1779 1780 if (IsFastObjectElementsKind(constant_elements_kind)) { 1781 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they 1782 // cannot transition and don't need to call the runtime stub. 1783 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 1784 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal. 1785 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset)); 1786 // Store the subexpression value in the array's elements. 1787 __ mov(FieldOperand(ebx, offset), result_register()); 1788 // Update the write barrier for the array store. 1789 __ RecordWriteField(ebx, offset, result_register(), ecx, 1790 kDontSaveFPRegs, 1791 EMIT_REMEMBERED_SET, 1792 INLINE_SMI_CHECK); 1793 } else { 1794 // Store the subexpression value in the array's elements. 1795 __ mov(ecx, Immediate(Smi::FromInt(i))); 1796 StoreArrayLiteralElementStub stub(isolate()); 1797 __ CallStub(&stub); 1798 } 1799 1800 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS); 1801 } 1802 1803 if (result_saved) { 1804 __ add(esp, Immediate(kPointerSize)); // literal index 1805 context()->PlugTOS(); 1806 } else { 1807 context()->Plug(eax); 1808 } 1809 } 1810 1811 1812 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1813 DCHECK(expr->target()->IsValidReferenceExpression()); 1814 1815 Comment cmnt(masm_, "[ Assignment"); 1816 1817 // Left-hand side can only be a property, a global or a (parameter or local) 1818 // slot. 1819 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1820 LhsKind assign_type = VARIABLE; 1821 Property* property = expr->target()->AsProperty(); 1822 if (property != NULL) { 1823 assign_type = (property->key()->IsPropertyName()) 1824 ? NAMED_PROPERTY 1825 : KEYED_PROPERTY; 1826 } 1827 1828 // Evaluate LHS expression. 1829 switch (assign_type) { 1830 case VARIABLE: 1831 // Nothing to do here. 1832 break; 1833 case NAMED_PROPERTY: 1834 if (expr->is_compound()) { 1835 // We need the receiver both on the stack and in the register. 1836 VisitForStackValue(property->obj()); 1837 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 1838 } else { 1839 VisitForStackValue(property->obj()); 1840 } 1841 break; 1842 case KEYED_PROPERTY: { 1843 if (expr->is_compound()) { 1844 VisitForStackValue(property->obj()); 1845 VisitForStackValue(property->key()); 1846 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize)); 1847 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); 1848 } else { 1849 VisitForStackValue(property->obj()); 1850 VisitForStackValue(property->key()); 1851 } 1852 break; 1853 } 1854 } 1855 1856 // For compound assignments we need another deoptimization point after the 1857 // variable/property load. 1858 if (expr->is_compound()) { 1859 AccumulatorValueContext result_context(this); 1860 { AccumulatorValueContext left_operand_context(this); 1861 switch (assign_type) { 1862 case VARIABLE: 1863 EmitVariableLoad(expr->target()->AsVariableProxy()); 1864 PrepareForBailout(expr->target(), TOS_REG); 1865 break; 1866 case NAMED_PROPERTY: 1867 EmitNamedPropertyLoad(property); 1868 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1869 break; 1870 case KEYED_PROPERTY: 1871 EmitKeyedPropertyLoad(property); 1872 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1873 break; 1874 } 1875 } 1876 1877 Token::Value op = expr->binary_op(); 1878 __ push(eax); // Left operand goes on the stack. 1879 VisitForAccumulatorValue(expr->value()); 1880 1881 OverwriteMode mode = expr->value()->ResultOverwriteAllowed() 1882 ? OVERWRITE_RIGHT 1883 : NO_OVERWRITE; 1884 SetSourcePosition(expr->position() + 1); 1885 if (ShouldInlineSmiCase(op)) { 1886 EmitInlineSmiBinaryOp(expr->binary_operation(), 1887 op, 1888 mode, 1889 expr->target(), 1890 expr->value()); 1891 } else { 1892 EmitBinaryOp(expr->binary_operation(), op, mode); 1893 } 1894 1895 // Deoptimization point in case the binary operation may have side effects. 1896 PrepareForBailout(expr->binary_operation(), TOS_REG); 1897 } else { 1898 VisitForAccumulatorValue(expr->value()); 1899 } 1900 1901 // Record source position before possible IC call. 1902 SetSourcePosition(expr->position()); 1903 1904 // Store the value. 1905 switch (assign_type) { 1906 case VARIABLE: 1907 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1908 expr->op()); 1909 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1910 context()->Plug(eax); 1911 break; 1912 case NAMED_PROPERTY: 1913 EmitNamedPropertyAssignment(expr); 1914 break; 1915 case KEYED_PROPERTY: 1916 EmitKeyedPropertyAssignment(expr); 1917 break; 1918 } 1919 } 1920 1921 1922 void FullCodeGenerator::VisitYield(Yield* expr) { 1923 Comment cmnt(masm_, "[ Yield"); 1924 // Evaluate yielded value first; the initial iterator definition depends on 1925 // this. It stays on the stack while we update the iterator. 1926 VisitForStackValue(expr->expression()); 1927 1928 switch (expr->yield_kind()) { 1929 case Yield::kSuspend: 1930 // Pop value from top-of-stack slot; box result into result register. 1931 EmitCreateIteratorResult(false); 1932 __ push(result_register()); 1933 // Fall through. 1934 case Yield::kInitial: { 1935 Label suspend, continuation, post_runtime, resume; 1936 1937 __ jmp(&suspend); 1938 1939 __ bind(&continuation); 1940 __ jmp(&resume); 1941 1942 __ bind(&suspend); 1943 VisitForAccumulatorValue(expr->generator_object()); 1944 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1945 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset), 1946 Immediate(Smi::FromInt(continuation.pos()))); 1947 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi); 1948 __ mov(ecx, esi); 1949 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx, 1950 kDontSaveFPRegs); 1951 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset)); 1952 __ cmp(esp, ebx); 1953 __ j(equal, &post_runtime); 1954 __ push(eax); // generator object 1955 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 1956 __ mov(context_register(), 1957 Operand(ebp, StandardFrameConstants::kContextOffset)); 1958 __ bind(&post_runtime); 1959 __ pop(result_register()); 1960 EmitReturnSequence(); 1961 1962 __ bind(&resume); 1963 context()->Plug(result_register()); 1964 break; 1965 } 1966 1967 case Yield::kFinal: { 1968 VisitForAccumulatorValue(expr->generator_object()); 1969 __ mov(FieldOperand(result_register(), 1970 JSGeneratorObject::kContinuationOffset), 1971 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed))); 1972 // Pop value from top-of-stack slot, box result into result register. 1973 EmitCreateIteratorResult(true); 1974 EmitUnwindBeforeReturn(); 1975 EmitReturnSequence(); 1976 break; 1977 } 1978 1979 case Yield::kDelegating: { 1980 VisitForStackValue(expr->generator_object()); 1981 1982 // Initial stack layout is as follows: 1983 // [sp + 1 * kPointerSize] iter 1984 // [sp + 0 * kPointerSize] g 1985 1986 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 1987 Label l_next, l_call, l_loop; 1988 Register load_receiver = LoadDescriptor::ReceiverRegister(); 1989 Register load_name = LoadDescriptor::NameRegister(); 1990 1991 // Initial send value is undefined. 1992 __ mov(eax, isolate()->factory()->undefined_value()); 1993 __ jmp(&l_next); 1994 1995 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 1996 __ bind(&l_catch); 1997 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos())); 1998 __ mov(load_name, isolate()->factory()->throw_string()); // "throw" 1999 __ push(load_name); // "throw" 2000 __ push(Operand(esp, 2 * kPointerSize)); // iter 2001 __ push(eax); // exception 2002 __ jmp(&l_call); 2003 2004 // try { received = %yield result } 2005 // Shuffle the received result above a try handler and yield it without 2006 // re-boxing. 2007 __ bind(&l_try); 2008 __ pop(eax); // result 2009 __ PushTryHandler(StackHandler::CATCH, expr->index()); 2010 const int handler_size = StackHandlerConstants::kSize; 2011 __ push(eax); // result 2012 __ jmp(&l_suspend); 2013 __ bind(&l_continuation); 2014 __ jmp(&l_resume); 2015 __ bind(&l_suspend); 2016 const int generator_object_depth = kPointerSize + handler_size; 2017 __ mov(eax, Operand(esp, generator_object_depth)); 2018 __ push(eax); // g 2019 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 2020 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset), 2021 Immediate(Smi::FromInt(l_continuation.pos()))); 2022 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi); 2023 __ mov(ecx, esi); 2024 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx, 2025 kDontSaveFPRegs); 2026 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 2027 __ mov(context_register(), 2028 Operand(ebp, StandardFrameConstants::kContextOffset)); 2029 __ pop(eax); // result 2030 EmitReturnSequence(); 2031 __ bind(&l_resume); // received in eax 2032 __ PopTryHandler(); 2033 2034 // receiver = iter; f = iter.next; arg = received; 2035 __ bind(&l_next); 2036 2037 __ mov(load_name, isolate()->factory()->next_string()); 2038 __ push(load_name); // "next" 2039 __ push(Operand(esp, 2 * kPointerSize)); // iter 2040 __ push(eax); // received 2041 2042 // result = receiver[f](arg); 2043 __ bind(&l_call); 2044 __ mov(load_receiver, Operand(esp, kPointerSize)); 2045 if (FLAG_vector_ics) { 2046 __ mov(VectorLoadICDescriptor::SlotRegister(), 2047 Immediate(Smi::FromInt(expr->KeyedLoadFeedbackSlot()))); 2048 } 2049 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2050 CallIC(ic, TypeFeedbackId::None()); 2051 __ mov(edi, eax); 2052 __ mov(Operand(esp, 2 * kPointerSize), edi); 2053 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD); 2054 __ CallStub(&stub); 2055 2056 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2057 __ Drop(1); // The function is still on the stack; drop it. 2058 2059 // if (!result.done) goto l_try; 2060 __ bind(&l_loop); 2061 __ push(eax); // save result 2062 __ Move(load_receiver, eax); // result 2063 __ mov(load_name, 2064 isolate()->factory()->done_string()); // "done" 2065 if (FLAG_vector_ics) { 2066 __ mov(VectorLoadICDescriptor::SlotRegister(), 2067 Immediate(Smi::FromInt(expr->DoneFeedbackSlot()))); 2068 } 2069 CallLoadIC(NOT_CONTEXTUAL); // result.done in eax 2070 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2071 CallIC(bool_ic); 2072 __ test(eax, eax); 2073 __ j(zero, &l_try); 2074 2075 // result.value 2076 __ pop(load_receiver); // result 2077 __ mov(load_name, 2078 isolate()->factory()->value_string()); // "value" 2079 if (FLAG_vector_ics) { 2080 __ mov(VectorLoadICDescriptor::SlotRegister(), 2081 Immediate(Smi::FromInt(expr->ValueFeedbackSlot()))); 2082 } 2083 CallLoadIC(NOT_CONTEXTUAL); // result.value in eax 2084 context()->DropAndPlug(2, eax); // drop iter and g 2085 break; 2086 } 2087 } 2088 } 2089 2090 2091 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2092 Expression *value, 2093 JSGeneratorObject::ResumeMode resume_mode) { 2094 // The value stays in eax, and is ultimately read by the resumed generator, as 2095 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it 2096 // is read to throw the value when the resumed generator is already closed. 2097 // ebx will hold the generator object until the activation has been resumed. 2098 VisitForStackValue(generator); 2099 VisitForAccumulatorValue(value); 2100 __ pop(ebx); 2101 2102 // Check generator state. 2103 Label wrong_state, closed_state, done; 2104 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0); 2105 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0); 2106 __ cmp(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset), 2107 Immediate(Smi::FromInt(0))); 2108 __ j(equal, &closed_state); 2109 __ j(less, &wrong_state); 2110 2111 // Load suspended function and context. 2112 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset)); 2113 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset)); 2114 2115 // Push receiver. 2116 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset)); 2117 2118 // Push holes for arguments to generator function. 2119 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2120 __ mov(edx, 2121 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); 2122 __ mov(ecx, isolate()->factory()->the_hole_value()); 2123 Label push_argument_holes, push_frame; 2124 __ bind(&push_argument_holes); 2125 __ sub(edx, Immediate(Smi::FromInt(1))); 2126 __ j(carry, &push_frame); 2127 __ push(ecx); 2128 __ jmp(&push_argument_holes); 2129 2130 // Enter a new JavaScript frame, and initialize its slots as they were when 2131 // the generator was suspended. 2132 Label resume_frame; 2133 __ bind(&push_frame); 2134 __ call(&resume_frame); 2135 __ jmp(&done); 2136 __ bind(&resume_frame); 2137 __ push(ebp); // Caller's frame pointer. 2138 __ mov(ebp, esp); 2139 __ push(esi); // Callee's context. 2140 __ push(edi); // Callee's JS Function. 2141 2142 // Load the operand stack size. 2143 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset)); 2144 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset)); 2145 __ SmiUntag(edx); 2146 2147 // If we are sending a value and there is no operand stack, we can jump back 2148 // in directly. 2149 if (resume_mode == JSGeneratorObject::NEXT) { 2150 Label slow_resume; 2151 __ cmp(edx, Immediate(0)); 2152 __ j(not_zero, &slow_resume); 2153 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2154 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset)); 2155 __ SmiUntag(ecx); 2156 __ add(edx, ecx); 2157 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset), 2158 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 2159 __ jmp(edx); 2160 __ bind(&slow_resume); 2161 } 2162 2163 // Otherwise, we push holes for the operand stack and call the runtime to fix 2164 // up the stack and the handlers. 2165 Label push_operand_holes, call_resume; 2166 __ bind(&push_operand_holes); 2167 __ sub(edx, Immediate(1)); 2168 __ j(carry, &call_resume); 2169 __ push(ecx); 2170 __ jmp(&push_operand_holes); 2171 __ bind(&call_resume); 2172 __ push(ebx); 2173 __ push(result_register()); 2174 __ Push(Smi::FromInt(resume_mode)); 2175 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3); 2176 // Not reached: the runtime call returns elsewhere. 2177 __ Abort(kGeneratorFailedToResume); 2178 2179 // Reach here when generator is closed. 2180 __ bind(&closed_state); 2181 if (resume_mode == JSGeneratorObject::NEXT) { 2182 // Return completed iterator result when generator is closed. 2183 __ push(Immediate(isolate()->factory()->undefined_value())); 2184 // Pop value from top-of-stack slot; box result into result register. 2185 EmitCreateIteratorResult(true); 2186 } else { 2187 // Throw the provided value. 2188 __ push(eax); 2189 __ CallRuntime(Runtime::kThrow, 1); 2190 } 2191 __ jmp(&done); 2192 2193 // Throw error if we attempt to operate on a running generator. 2194 __ bind(&wrong_state); 2195 __ push(ebx); 2196 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1); 2197 2198 __ bind(&done); 2199 context()->Plug(result_register()); 2200 } 2201 2202 2203 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2204 Label gc_required; 2205 Label allocated; 2206 2207 Handle<Map> map(isolate()->native_context()->iterator_result_map()); 2208 2209 __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT); 2210 __ jmp(&allocated); 2211 2212 __ bind(&gc_required); 2213 __ Push(Smi::FromInt(map->instance_size())); 2214 __ CallRuntime(Runtime::kAllocateInNewSpace, 1); 2215 __ mov(context_register(), 2216 Operand(ebp, StandardFrameConstants::kContextOffset)); 2217 2218 __ bind(&allocated); 2219 __ mov(ebx, map); 2220 __ pop(ecx); 2221 __ mov(edx, isolate()->factory()->ToBoolean(done)); 2222 DCHECK_EQ(map->instance_size(), 5 * kPointerSize); 2223 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx); 2224 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), 2225 isolate()->factory()->empty_fixed_array()); 2226 __ mov(FieldOperand(eax, JSObject::kElementsOffset), 2227 isolate()->factory()->empty_fixed_array()); 2228 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx); 2229 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx); 2230 2231 // Only the value field needs a write barrier, as the other values are in the 2232 // root set. 2233 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset, 2234 ecx, edx, kDontSaveFPRegs); 2235 } 2236 2237 2238 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 2239 SetSourcePosition(prop->position()); 2240 Literal* key = prop->key()->AsLiteral(); 2241 DCHECK(!key->value()->IsSmi()); 2242 __ mov(LoadDescriptor::NameRegister(), Immediate(key->value())); 2243 if (FLAG_vector_ics) { 2244 __ mov(VectorLoadICDescriptor::SlotRegister(), 2245 Immediate(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2246 CallLoadIC(NOT_CONTEXTUAL); 2247 } else { 2248 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId()); 2249 } 2250 } 2251 2252 2253 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) { 2254 SetSourcePosition(prop->position()); 2255 Literal* key = prop->key()->AsLiteral(); 2256 DCHECK(!key->value()->IsSmi()); 2257 DCHECK(prop->IsSuperAccess()); 2258 2259 SuperReference* super_ref = prop->obj()->AsSuperReference(); 2260 EmitLoadHomeObject(super_ref); 2261 __ push(eax); 2262 VisitForStackValue(super_ref->this_var()); 2263 __ push(Immediate(key->value())); 2264 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2265 } 2266 2267 2268 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2269 SetSourcePosition(prop->position()); 2270 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code(); 2271 if (FLAG_vector_ics) { 2272 __ mov(VectorLoadICDescriptor::SlotRegister(), 2273 Immediate(Smi::FromInt(prop->PropertyFeedbackSlot()))); 2274 CallIC(ic); 2275 } else { 2276 CallIC(ic, prop->PropertyFeedbackId()); 2277 } 2278 } 2279 2280 2281 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2282 Token::Value op, 2283 OverwriteMode mode, 2284 Expression* left, 2285 Expression* right) { 2286 // Do combined smi check of the operands. Left operand is on the 2287 // stack. Right operand is in eax. 2288 Label smi_case, done, stub_call; 2289 __ pop(edx); 2290 __ mov(ecx, eax); 2291 __ or_(eax, edx); 2292 JumpPatchSite patch_site(masm_); 2293 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear); 2294 2295 __ bind(&stub_call); 2296 __ mov(eax, ecx); 2297 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2298 CallIC(code, expr->BinaryOperationFeedbackId()); 2299 patch_site.EmitPatchInfo(); 2300 __ jmp(&done, Label::kNear); 2301 2302 // Smi case. 2303 __ bind(&smi_case); 2304 __ mov(eax, edx); // Copy left operand in case of a stub call. 2305 2306 switch (op) { 2307 case Token::SAR: 2308 __ SmiUntag(ecx); 2309 __ sar_cl(eax); // No checks of result necessary 2310 __ and_(eax, Immediate(~kSmiTagMask)); 2311 break; 2312 case Token::SHL: { 2313 Label result_ok; 2314 __ SmiUntag(eax); 2315 __ SmiUntag(ecx); 2316 __ shl_cl(eax); 2317 // Check that the *signed* result fits in a smi. 2318 __ cmp(eax, 0xc0000000); 2319 __ j(positive, &result_ok); 2320 __ SmiTag(ecx); 2321 __ jmp(&stub_call); 2322 __ bind(&result_ok); 2323 __ SmiTag(eax); 2324 break; 2325 } 2326 case Token::SHR: { 2327 Label result_ok; 2328 __ SmiUntag(eax); 2329 __ SmiUntag(ecx); 2330 __ shr_cl(eax); 2331 __ test(eax, Immediate(0xc0000000)); 2332 __ j(zero, &result_ok); 2333 __ SmiTag(ecx); 2334 __ jmp(&stub_call); 2335 __ bind(&result_ok); 2336 __ SmiTag(eax); 2337 break; 2338 } 2339 case Token::ADD: 2340 __ add(eax, ecx); 2341 __ j(overflow, &stub_call); 2342 break; 2343 case Token::SUB: 2344 __ sub(eax, ecx); 2345 __ j(overflow, &stub_call); 2346 break; 2347 case Token::MUL: { 2348 __ SmiUntag(eax); 2349 __ imul(eax, ecx); 2350 __ j(overflow, &stub_call); 2351 __ test(eax, eax); 2352 __ j(not_zero, &done, Label::kNear); 2353 __ mov(ebx, edx); 2354 __ or_(ebx, ecx); 2355 __ j(negative, &stub_call); 2356 break; 2357 } 2358 case Token::BIT_OR: 2359 __ or_(eax, ecx); 2360 break; 2361 case Token::BIT_AND: 2362 __ and_(eax, ecx); 2363 break; 2364 case Token::BIT_XOR: 2365 __ xor_(eax, ecx); 2366 break; 2367 default: 2368 UNREACHABLE(); 2369 } 2370 2371 __ bind(&done); 2372 context()->Plug(eax); 2373 } 2374 2375 2376 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, 2377 Token::Value op, 2378 OverwriteMode mode) { 2379 __ pop(edx); 2380 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code(); 2381 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2382 CallIC(code, expr->BinaryOperationFeedbackId()); 2383 patch_site.EmitPatchInfo(); 2384 context()->Plug(eax); 2385 } 2386 2387 2388 void FullCodeGenerator::EmitAssignment(Expression* expr) { 2389 DCHECK(expr->IsValidReferenceExpression()); 2390 2391 // Left-hand side can only be a property, a global or a (parameter or local) 2392 // slot. 2393 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 2394 LhsKind assign_type = VARIABLE; 2395 Property* prop = expr->AsProperty(); 2396 if (prop != NULL) { 2397 assign_type = (prop->key()->IsPropertyName()) 2398 ? NAMED_PROPERTY 2399 : KEYED_PROPERTY; 2400 } 2401 2402 switch (assign_type) { 2403 case VARIABLE: { 2404 Variable* var = expr->AsVariableProxy()->var(); 2405 EffectContext context(this); 2406 EmitVariableAssignment(var, Token::ASSIGN); 2407 break; 2408 } 2409 case NAMED_PROPERTY: { 2410 __ push(eax); // Preserve value. 2411 VisitForAccumulatorValue(prop->obj()); 2412 __ Move(StoreDescriptor::ReceiverRegister(), eax); 2413 __ pop(StoreDescriptor::ValueRegister()); // Restore value. 2414 __ mov(StoreDescriptor::NameRegister(), 2415 prop->key()->AsLiteral()->value()); 2416 CallStoreIC(); 2417 break; 2418 } 2419 case KEYED_PROPERTY: { 2420 __ push(eax); // Preserve value. 2421 VisitForStackValue(prop->obj()); 2422 VisitForAccumulatorValue(prop->key()); 2423 __ Move(StoreDescriptor::NameRegister(), eax); 2424 __ pop(StoreDescriptor::ReceiverRegister()); // Receiver. 2425 __ pop(StoreDescriptor::ValueRegister()); // Restore value. 2426 Handle<Code> ic = 2427 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2428 CallIC(ic); 2429 break; 2430 } 2431 } 2432 context()->Plug(eax); 2433 } 2434 2435 2436 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2437 Variable* var, MemOperand location) { 2438 __ mov(location, eax); 2439 if (var->IsContextSlot()) { 2440 __ mov(edx, eax); 2441 int offset = Context::SlotOffset(var->index()); 2442 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs); 2443 } 2444 } 2445 2446 2447 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 2448 Token::Value op) { 2449 if (var->IsUnallocated()) { 2450 // Global var, const, or let. 2451 __ mov(StoreDescriptor::NameRegister(), var->name()); 2452 __ mov(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand()); 2453 CallStoreIC(); 2454 2455 } else if (op == Token::INIT_CONST_LEGACY) { 2456 // Const initializers need a write barrier. 2457 DCHECK(!var->IsParameter()); // No const parameters. 2458 if (var->IsLookupSlot()) { 2459 __ push(eax); 2460 __ push(esi); 2461 __ push(Immediate(var->name())); 2462 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3); 2463 } else { 2464 DCHECK(var->IsStackLocal() || var->IsContextSlot()); 2465 Label skip; 2466 MemOperand location = VarOperand(var, ecx); 2467 __ mov(edx, location); 2468 __ cmp(edx, isolate()->factory()->the_hole_value()); 2469 __ j(not_equal, &skip, Label::kNear); 2470 EmitStoreToStackLocalOrContextSlot(var, location); 2471 __ bind(&skip); 2472 } 2473 2474 } else if (var->mode() == LET && op != Token::INIT_LET) { 2475 // Non-initializing assignment to let variable needs a write barrier. 2476 DCHECK(!var->IsLookupSlot()); 2477 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2478 Label assign; 2479 MemOperand location = VarOperand(var, ecx); 2480 __ mov(edx, location); 2481 __ cmp(edx, isolate()->factory()->the_hole_value()); 2482 __ j(not_equal, &assign, Label::kNear); 2483 __ push(Immediate(var->name())); 2484 __ CallRuntime(Runtime::kThrowReferenceError, 1); 2485 __ bind(&assign); 2486 EmitStoreToStackLocalOrContextSlot(var, location); 2487 2488 } else if (!var->is_const_mode() || op == Token::INIT_CONST) { 2489 if (var->IsLookupSlot()) { 2490 // Assignment to var. 2491 __ push(eax); // Value. 2492 __ push(esi); // Context. 2493 __ push(Immediate(var->name())); 2494 __ push(Immediate(Smi::FromInt(strict_mode()))); 2495 __ CallRuntime(Runtime::kStoreLookupSlot, 4); 2496 } else { 2497 // Assignment to var or initializing assignment to let/const in harmony 2498 // mode. 2499 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2500 MemOperand location = VarOperand(var, ecx); 2501 if (generate_debug_code_ && op == Token::INIT_LET) { 2502 // Check for an uninitialized let binding. 2503 __ mov(edx, location); 2504 __ cmp(edx, isolate()->factory()->the_hole_value()); 2505 __ Check(equal, kLetBindingReInitialization); 2506 } 2507 EmitStoreToStackLocalOrContextSlot(var, location); 2508 } 2509 } 2510 // Non-initializing assignments to consts are ignored. 2511 } 2512 2513 2514 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2515 // Assignment to a property, using a named store IC. 2516 // eax : value 2517 // esp[0] : receiver 2518 2519 Property* prop = expr->target()->AsProperty(); 2520 DCHECK(prop != NULL); 2521 DCHECK(prop->key()->IsLiteral()); 2522 2523 // Record source code position before IC call. 2524 SetSourcePosition(expr->position()); 2525 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value()); 2526 __ pop(StoreDescriptor::ReceiverRegister()); 2527 CallStoreIC(expr->AssignmentFeedbackId()); 2528 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2529 context()->Plug(eax); 2530 } 2531 2532 2533 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2534 // Assignment to a property, using a keyed store IC. 2535 // eax : value 2536 // esp[0] : key 2537 // esp[kPointerSize] : receiver 2538 2539 __ pop(StoreDescriptor::NameRegister()); // Key. 2540 __ pop(StoreDescriptor::ReceiverRegister()); 2541 DCHECK(StoreDescriptor::ValueRegister().is(eax)); 2542 // Record source code position before IC call. 2543 SetSourcePosition(expr->position()); 2544 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 2545 CallIC(ic, expr->AssignmentFeedbackId()); 2546 2547 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2548 context()->Plug(eax); 2549 } 2550 2551 2552 void FullCodeGenerator::VisitProperty(Property* expr) { 2553 Comment cmnt(masm_, "[ Property"); 2554 Expression* key = expr->key(); 2555 2556 if (key->IsPropertyName()) { 2557 if (!expr->IsSuperAccess()) { 2558 VisitForAccumulatorValue(expr->obj()); 2559 __ Move(LoadDescriptor::ReceiverRegister(), result_register()); 2560 EmitNamedPropertyLoad(expr); 2561 } else { 2562 EmitNamedSuperPropertyLoad(expr); 2563 } 2564 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2565 context()->Plug(eax); 2566 } else { 2567 VisitForStackValue(expr->obj()); 2568 VisitForAccumulatorValue(expr->key()); 2569 __ pop(LoadDescriptor::ReceiverRegister()); // Object. 2570 __ Move(LoadDescriptor::NameRegister(), result_register()); // Key. 2571 EmitKeyedPropertyLoad(expr); 2572 context()->Plug(eax); 2573 } 2574 } 2575 2576 2577 void FullCodeGenerator::CallIC(Handle<Code> code, 2578 TypeFeedbackId ast_id) { 2579 ic_total_count_++; 2580 __ call(code, RelocInfo::CODE_TARGET, ast_id); 2581 } 2582 2583 2584 // Code common for calls using the IC. 2585 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2586 Expression* callee = expr->expression(); 2587 2588 CallICState::CallType call_type = 2589 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD; 2590 // Get the target function. 2591 if (call_type == CallICState::FUNCTION) { 2592 { StackValueContext context(this); 2593 EmitVariableLoad(callee->AsVariableProxy()); 2594 PrepareForBailout(callee, NO_REGISTERS); 2595 } 2596 // Push undefined as receiver. This is patched in the method prologue if it 2597 // is a sloppy mode method. 2598 __ push(Immediate(isolate()->factory()->undefined_value())); 2599 } else { 2600 // Load the function from the receiver. 2601 DCHECK(callee->IsProperty()); 2602 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2603 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 2604 EmitNamedPropertyLoad(callee->AsProperty()); 2605 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2606 // Push the target function under the receiver. 2607 __ push(Operand(esp, 0)); 2608 __ mov(Operand(esp, kPointerSize), eax); 2609 } 2610 2611 EmitCall(expr, call_type); 2612 } 2613 2614 2615 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { 2616 Expression* callee = expr->expression(); 2617 DCHECK(callee->IsProperty()); 2618 Property* prop = callee->AsProperty(); 2619 DCHECK(prop->IsSuperAccess()); 2620 2621 SetSourcePosition(prop->position()); 2622 Literal* key = prop->key()->AsLiteral(); 2623 DCHECK(!key->value()->IsSmi()); 2624 // Load the function from the receiver. 2625 SuperReference* super_ref = callee->AsProperty()->obj()->AsSuperReference(); 2626 EmitLoadHomeObject(super_ref); 2627 __ push(eax); 2628 VisitForAccumulatorValue(super_ref->this_var()); 2629 __ push(eax); 2630 __ push(Operand(esp, kPointerSize)); 2631 __ push(eax); 2632 __ push(Immediate(key->value())); 2633 // Stack here: 2634 // - home_object 2635 // - this (receiver) 2636 // - home_object <-- LoadFromSuper will pop here and below. 2637 // - this (receiver) 2638 // - key 2639 __ CallRuntime(Runtime::kLoadFromSuper, 3); 2640 2641 // Replace home_object with target function. 2642 __ mov(Operand(esp, kPointerSize), eax); 2643 2644 // Stack here: 2645 // - target function 2646 // - this (receiver) 2647 EmitCall(expr, CallICState::METHOD); 2648 } 2649 2650 2651 // Code common for calls using the IC. 2652 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2653 Expression* key) { 2654 // Load the key. 2655 VisitForAccumulatorValue(key); 2656 2657 Expression* callee = expr->expression(); 2658 2659 // Load the function from the receiver. 2660 DCHECK(callee->IsProperty()); 2661 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 2662 __ mov(LoadDescriptor::NameRegister(), eax); 2663 EmitKeyedPropertyLoad(callee->AsProperty()); 2664 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2665 2666 // Push the target function under the receiver. 2667 __ push(Operand(esp, 0)); 2668 __ mov(Operand(esp, kPointerSize), eax); 2669 2670 EmitCall(expr, CallICState::METHOD); 2671 } 2672 2673 2674 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) { 2675 // Load the arguments. 2676 ZoneList<Expression*>* args = expr->arguments(); 2677 int arg_count = args->length(); 2678 { PreservePositionScope scope(masm()->positions_recorder()); 2679 for (int i = 0; i < arg_count; i++) { 2680 VisitForStackValue(args->at(i)); 2681 } 2682 } 2683 2684 // Record source position of the IC call. 2685 SetSourcePosition(expr->position()); 2686 Handle<Code> ic = CallIC::initialize_stub( 2687 isolate(), arg_count, call_type); 2688 __ Move(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot()))); 2689 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); 2690 // Don't assign a type feedback id to the IC, since type feedback is provided 2691 // by the vector above. 2692 CallIC(ic); 2693 2694 RecordJSReturnSite(expr); 2695 2696 // Restore context register. 2697 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2698 2699 context()->DropAndPlug(1, eax); 2700 } 2701 2702 2703 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2704 // Push copy of the first argument or undefined if it doesn't exist. 2705 if (arg_count > 0) { 2706 __ push(Operand(esp, arg_count * kPointerSize)); 2707 } else { 2708 __ push(Immediate(isolate()->factory()->undefined_value())); 2709 } 2710 2711 // Push the enclosing function. 2712 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 2713 // Push the receiver of the enclosing function. 2714 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize)); 2715 // Push the language mode. 2716 __ push(Immediate(Smi::FromInt(strict_mode()))); 2717 2718 // Push the start position of the scope the calls resides in. 2719 __ push(Immediate(Smi::FromInt(scope()->start_position()))); 2720 2721 // Do the runtime call. 2722 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6); 2723 } 2724 2725 2726 void FullCodeGenerator::VisitCall(Call* expr) { 2727 #ifdef DEBUG 2728 // We want to verify that RecordJSReturnSite gets called on all paths 2729 // through this function. Avoid early returns. 2730 expr->return_is_recorded_ = false; 2731 #endif 2732 2733 Comment cmnt(masm_, "[ Call"); 2734 Expression* callee = expr->expression(); 2735 Call::CallType call_type = expr->GetCallType(isolate()); 2736 2737 if (call_type == Call::POSSIBLY_EVAL_CALL) { 2738 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval 2739 // to resolve the function we need to call and the receiver of the call. 2740 // Then we call the resolved function using the given arguments. 2741 ZoneList<Expression*>* args = expr->arguments(); 2742 int arg_count = args->length(); 2743 { PreservePositionScope pos_scope(masm()->positions_recorder()); 2744 VisitForStackValue(callee); 2745 // Reserved receiver slot. 2746 __ push(Immediate(isolate()->factory()->undefined_value())); 2747 // Push the arguments. 2748 for (int i = 0; i < arg_count; i++) { 2749 VisitForStackValue(args->at(i)); 2750 } 2751 2752 // Push a copy of the function (found below the arguments) and 2753 // resolve eval. 2754 __ push(Operand(esp, (arg_count + 1) * kPointerSize)); 2755 EmitResolvePossiblyDirectEval(arg_count); 2756 2757 // The runtime call returns a pair of values in eax (function) and 2758 // edx (receiver). Touch up the stack with the right values. 2759 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx); 2760 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax); 2761 } 2762 // Record source position for debugger. 2763 SetSourcePosition(expr->position()); 2764 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 2765 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); 2766 __ CallStub(&stub); 2767 RecordJSReturnSite(expr); 2768 // Restore context register. 2769 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 2770 context()->DropAndPlug(1, eax); 2771 2772 } else if (call_type == Call::GLOBAL_CALL) { 2773 EmitCallWithLoadIC(expr); 2774 2775 } else if (call_type == Call::LOOKUP_SLOT_CALL) { 2776 // Call to a lookup slot (dynamically introduced variable). 2777 VariableProxy* proxy = callee->AsVariableProxy(); 2778 Label slow, done; 2779 { PreservePositionScope scope(masm()->positions_recorder()); 2780 // Generate code for loading from variables potentially shadowed by 2781 // eval-introduced variables. 2782 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done); 2783 } 2784 __ bind(&slow); 2785 // Call the runtime to find the function to call (returned in eax) and 2786 // the object holding it (returned in edx). 2787 __ push(context_register()); 2788 __ push(Immediate(proxy->name())); 2789 __ CallRuntime(Runtime::kLoadLookupSlot, 2); 2790 __ push(eax); // Function. 2791 __ push(edx); // Receiver. 2792 2793 // If fast case code has been generated, emit code to push the function 2794 // and receiver and have the slow path jump around this code. 2795 if (done.is_linked()) { 2796 Label call; 2797 __ jmp(&call, Label::kNear); 2798 __ bind(&done); 2799 // Push function. 2800 __ push(eax); 2801 // The receiver is implicitly the global receiver. Indicate this by 2802 // passing the hole to the call function stub. 2803 __ push(Immediate(isolate()->factory()->undefined_value())); 2804 __ bind(&call); 2805 } 2806 2807 // The receiver is either the global receiver or an object found by 2808 // LoadContextSlot. 2809 EmitCall(expr); 2810 2811 } else if (call_type == Call::PROPERTY_CALL) { 2812 Property* property = callee->AsProperty(); 2813 bool is_named_call = property->key()->IsPropertyName(); 2814 // super.x() is handled in EmitCallWithLoadIC. 2815 if (property->IsSuperAccess() && is_named_call) { 2816 EmitSuperCallWithLoadIC(expr); 2817 } else { 2818 { 2819 PreservePositionScope scope(masm()->positions_recorder()); 2820 VisitForStackValue(property->obj()); 2821 } 2822 if (is_named_call) { 2823 EmitCallWithLoadIC(expr); 2824 } else { 2825 EmitKeyedCallWithLoadIC(expr, property->key()); 2826 } 2827 } 2828 } else { 2829 DCHECK(call_type == Call::OTHER_CALL); 2830 // Call to an arbitrary expression not handled specially above. 2831 { PreservePositionScope scope(masm()->positions_recorder()); 2832 VisitForStackValue(callee); 2833 } 2834 __ push(Immediate(isolate()->factory()->undefined_value())); 2835 // Emit function call. 2836 EmitCall(expr); 2837 } 2838 2839 #ifdef DEBUG 2840 // RecordJSReturnSite should have been called. 2841 DCHECK(expr->return_is_recorded_); 2842 #endif 2843 } 2844 2845 2846 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2847 Comment cmnt(masm_, "[ CallNew"); 2848 // According to ECMA-262, section 11.2.2, page 44, the function 2849 // expression in new calls must be evaluated before the 2850 // arguments. 2851 2852 // Push constructor on the stack. If it's not a function it's used as 2853 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 2854 // ignored. 2855 VisitForStackValue(expr->expression()); 2856 2857 // Push the arguments ("left-to-right") on the stack. 2858 ZoneList<Expression*>* args = expr->arguments(); 2859 int arg_count = args->length(); 2860 for (int i = 0; i < arg_count; i++) { 2861 VisitForStackValue(args->at(i)); 2862 } 2863 2864 // Call the construct call builtin that handles allocation and 2865 // constructor invocation. 2866 SetSourcePosition(expr->position()); 2867 2868 // Load function and argument count into edi and eax. 2869 __ Move(eax, Immediate(arg_count)); 2870 __ mov(edi, Operand(esp, arg_count * kPointerSize)); 2871 2872 // Record call targets in unoptimized code. 2873 if (FLAG_pretenuring_call_new) { 2874 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot()); 2875 DCHECK(expr->AllocationSiteFeedbackSlot() == 2876 expr->CallNewFeedbackSlot() + 1); 2877 } 2878 2879 __ LoadHeapObject(ebx, FeedbackVector()); 2880 __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot()))); 2881 2882 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET); 2883 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); 2884 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2885 context()->Plug(eax); 2886 } 2887 2888 2889 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2890 ZoneList<Expression*>* args = expr->arguments(); 2891 DCHECK(args->length() == 1); 2892 2893 VisitForAccumulatorValue(args->at(0)); 2894 2895 Label materialize_true, materialize_false; 2896 Label* if_true = NULL; 2897 Label* if_false = NULL; 2898 Label* fall_through = NULL; 2899 context()->PrepareTest(&materialize_true, &materialize_false, 2900 &if_true, &if_false, &fall_through); 2901 2902 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2903 __ test(eax, Immediate(kSmiTagMask)); 2904 Split(zero, if_true, if_false, fall_through); 2905 2906 context()->Plug(if_true, if_false); 2907 } 2908 2909 2910 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) { 2911 ZoneList<Expression*>* args = expr->arguments(); 2912 DCHECK(args->length() == 1); 2913 2914 VisitForAccumulatorValue(args->at(0)); 2915 2916 Label materialize_true, materialize_false; 2917 Label* if_true = NULL; 2918 Label* if_false = NULL; 2919 Label* fall_through = NULL; 2920 context()->PrepareTest(&materialize_true, &materialize_false, 2921 &if_true, &if_false, &fall_through); 2922 2923 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2924 __ test(eax, Immediate(kSmiTagMask | 0x80000000)); 2925 Split(zero, if_true, if_false, fall_through); 2926 2927 context()->Plug(if_true, if_false); 2928 } 2929 2930 2931 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) { 2932 ZoneList<Expression*>* args = expr->arguments(); 2933 DCHECK(args->length() == 1); 2934 2935 VisitForAccumulatorValue(args->at(0)); 2936 2937 Label materialize_true, materialize_false; 2938 Label* if_true = NULL; 2939 Label* if_false = NULL; 2940 Label* fall_through = NULL; 2941 context()->PrepareTest(&materialize_true, &materialize_false, 2942 &if_true, &if_false, &fall_through); 2943 2944 __ JumpIfSmi(eax, if_false); 2945 __ cmp(eax, isolate()->factory()->null_value()); 2946 __ j(equal, if_true); 2947 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 2948 // Undetectable objects behave like undefined when tested with typeof. 2949 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset)); 2950 __ test(ecx, Immediate(1 << Map::kIsUndetectable)); 2951 __ j(not_zero, if_false); 2952 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 2953 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE); 2954 __ j(below, if_false); 2955 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 2956 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2957 Split(below_equal, if_true, if_false, fall_through); 2958 2959 context()->Plug(if_true, if_false); 2960 } 2961 2962 2963 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) { 2964 ZoneList<Expression*>* args = expr->arguments(); 2965 DCHECK(args->length() == 1); 2966 2967 VisitForAccumulatorValue(args->at(0)); 2968 2969 Label materialize_true, materialize_false; 2970 Label* if_true = NULL; 2971 Label* if_false = NULL; 2972 Label* fall_through = NULL; 2973 context()->PrepareTest(&materialize_true, &materialize_false, 2974 &if_true, &if_false, &fall_through); 2975 2976 __ JumpIfSmi(eax, if_false); 2977 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx); 2978 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2979 Split(above_equal, if_true, if_false, fall_through); 2980 2981 context()->Plug(if_true, if_false); 2982 } 2983 2984 2985 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) { 2986 ZoneList<Expression*>* args = expr->arguments(); 2987 DCHECK(args->length() == 1); 2988 2989 VisitForAccumulatorValue(args->at(0)); 2990 2991 Label materialize_true, materialize_false; 2992 Label* if_true = NULL; 2993 Label* if_false = NULL; 2994 Label* fall_through = NULL; 2995 context()->PrepareTest(&materialize_true, &materialize_false, 2996 &if_true, &if_false, &fall_through); 2997 2998 __ JumpIfSmi(eax, if_false); 2999 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 3000 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset)); 3001 __ test(ebx, Immediate(1 << Map::kIsUndetectable)); 3002 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3003 Split(not_zero, if_true, if_false, fall_through); 3004 3005 context()->Plug(if_true, if_false); 3006 } 3007 3008 3009 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf( 3010 CallRuntime* expr) { 3011 ZoneList<Expression*>* args = expr->arguments(); 3012 DCHECK(args->length() == 1); 3013 3014 VisitForAccumulatorValue(args->at(0)); 3015 3016 Label materialize_true, materialize_false, skip_lookup; 3017 Label* if_true = NULL; 3018 Label* if_false = NULL; 3019 Label* fall_through = NULL; 3020 context()->PrepareTest(&materialize_true, &materialize_false, 3021 &if_true, &if_false, &fall_through); 3022 3023 __ AssertNotSmi(eax); 3024 3025 // Check whether this map has already been checked to be safe for default 3026 // valueOf. 3027 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 3028 __ test_b(FieldOperand(ebx, Map::kBitField2Offset), 3029 1 << Map::kStringWrapperSafeForDefaultValueOf); 3030 __ j(not_zero, &skip_lookup); 3031 3032 // Check for fast case object. Return false for slow case objects. 3033 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset)); 3034 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset)); 3035 __ cmp(ecx, isolate()->factory()->hash_table_map()); 3036 __ j(equal, if_false); 3037 3038 // Look for valueOf string in the descriptor array, and indicate false if 3039 // found. Since we omit an enumeration index check, if it is added via a 3040 // transition that shares its descriptor array, this is a false positive. 3041 Label entry, loop, done; 3042 3043 // Skip loop if no descriptors are valid. 3044 __ NumberOfOwnDescriptors(ecx, ebx); 3045 __ cmp(ecx, 0); 3046 __ j(equal, &done); 3047 3048 __ LoadInstanceDescriptors(ebx, ebx); 3049 // ebx: descriptor array. 3050 // ecx: valid entries in the descriptor array. 3051 // Calculate the end of the descriptor array. 3052 STATIC_ASSERT(kSmiTag == 0); 3053 STATIC_ASSERT(kSmiTagSize == 1); 3054 STATIC_ASSERT(kPointerSize == 4); 3055 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize); 3056 __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset)); 3057 // Calculate location of the first key name. 3058 __ add(ebx, Immediate(DescriptorArray::kFirstOffset)); 3059 // Loop through all the keys in the descriptor array. If one of these is the 3060 // internalized string "valueOf" the result is false. 3061 __ jmp(&entry); 3062 __ bind(&loop); 3063 __ mov(edx, FieldOperand(ebx, 0)); 3064 __ cmp(edx, isolate()->factory()->value_of_string()); 3065 __ j(equal, if_false); 3066 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize)); 3067 __ bind(&entry); 3068 __ cmp(ebx, ecx); 3069 __ j(not_equal, &loop); 3070 3071 __ bind(&done); 3072 3073 // Reload map as register ebx was used as temporary above. 3074 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 3075 3076 // Set the bit in the map to indicate that there is no local valueOf field. 3077 __ or_(FieldOperand(ebx, Map::kBitField2Offset), 3078 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf)); 3079 3080 __ bind(&skip_lookup); 3081 3082 // If a valueOf property is not found on the object check that its 3083 // prototype is the un-modified String prototype. If not result is false. 3084 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset)); 3085 __ JumpIfSmi(ecx, if_false); 3086 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset)); 3087 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); 3088 __ mov(edx, 3089 FieldOperand(edx, GlobalObject::kNativeContextOffset)); 3090 __ cmp(ecx, 3091 ContextOperand(edx, 3092 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); 3093 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3094 Split(equal, if_true, if_false, fall_through); 3095 3096 context()->Plug(if_true, if_false); 3097 } 3098 3099 3100 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3101 ZoneList<Expression*>* args = expr->arguments(); 3102 DCHECK(args->length() == 1); 3103 3104 VisitForAccumulatorValue(args->at(0)); 3105 3106 Label materialize_true, materialize_false; 3107 Label* if_true = NULL; 3108 Label* if_false = NULL; 3109 Label* fall_through = NULL; 3110 context()->PrepareTest(&materialize_true, &materialize_false, 3111 &if_true, &if_false, &fall_through); 3112 3113 __ JumpIfSmi(eax, if_false); 3114 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx); 3115 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3116 Split(equal, if_true, if_false, fall_through); 3117 3118 context()->Plug(if_true, if_false); 3119 } 3120 3121 3122 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3123 ZoneList<Expression*>* args = expr->arguments(); 3124 DCHECK(args->length() == 1); 3125 3126 VisitForAccumulatorValue(args->at(0)); 3127 3128 Label materialize_true, materialize_false; 3129 Label* if_true = NULL; 3130 Label* if_false = NULL; 3131 Label* fall_through = NULL; 3132 context()->PrepareTest(&materialize_true, &materialize_false, 3133 &if_true, &if_false, &fall_through); 3134 3135 Handle<Map> map = masm()->isolate()->factory()->heap_number_map(); 3136 __ CheckMap(eax, map, if_false, DO_SMI_CHECK); 3137 // Check if the exponent half is 0x80000000. Comparing against 1 and 3138 // checking for overflow is the shortest possible encoding. 3139 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1)); 3140 __ j(no_overflow, if_false); 3141 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0)); 3142 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3143 Split(equal, if_true, if_false, fall_through); 3144 3145 context()->Plug(if_true, if_false); 3146 } 3147 3148 3149 3150 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3151 ZoneList<Expression*>* args = expr->arguments(); 3152 DCHECK(args->length() == 1); 3153 3154 VisitForAccumulatorValue(args->at(0)); 3155 3156 Label materialize_true, materialize_false; 3157 Label* if_true = NULL; 3158 Label* if_false = NULL; 3159 Label* fall_through = NULL; 3160 context()->PrepareTest(&materialize_true, &materialize_false, 3161 &if_true, &if_false, &fall_through); 3162 3163 __ JumpIfSmi(eax, if_false); 3164 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); 3165 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3166 Split(equal, if_true, if_false, fall_through); 3167 3168 context()->Plug(if_true, if_false); 3169 } 3170 3171 3172 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3173 ZoneList<Expression*>* args = expr->arguments(); 3174 DCHECK(args->length() == 1); 3175 3176 VisitForAccumulatorValue(args->at(0)); 3177 3178 Label materialize_true, materialize_false; 3179 Label* if_true = NULL; 3180 Label* if_false = NULL; 3181 Label* fall_through = NULL; 3182 context()->PrepareTest(&materialize_true, &materialize_false, 3183 &if_true, &if_false, &fall_through); 3184 3185 __ JumpIfSmi(eax, if_false); 3186 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx); 3187 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3188 Split(equal, if_true, if_false, fall_through); 3189 3190 context()->Plug(if_true, if_false); 3191 } 3192 3193 3194 3195 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) { 3196 DCHECK(expr->arguments()->length() == 0); 3197 3198 Label materialize_true, materialize_false; 3199 Label* if_true = NULL; 3200 Label* if_false = NULL; 3201 Label* fall_through = NULL; 3202 context()->PrepareTest(&materialize_true, &materialize_false, 3203 &if_true, &if_false, &fall_through); 3204 3205 // Get the frame pointer for the calling frame. 3206 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 3207 3208 // Skip the arguments adaptor frame if it exists. 3209 Label check_frame_marker; 3210 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset), 3211 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3212 __ j(not_equal, &check_frame_marker); 3213 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset)); 3214 3215 // Check the marker in the calling frame. 3216 __ bind(&check_frame_marker); 3217 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset), 3218 Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); 3219 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3220 Split(equal, if_true, if_false, fall_through); 3221 3222 context()->Plug(if_true, if_false); 3223 } 3224 3225 3226 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3227 ZoneList<Expression*>* args = expr->arguments(); 3228 DCHECK(args->length() == 2); 3229 3230 // Load the two objects into registers and perform the comparison. 3231 VisitForStackValue(args->at(0)); 3232 VisitForAccumulatorValue(args->at(1)); 3233 3234 Label materialize_true, materialize_false; 3235 Label* if_true = NULL; 3236 Label* if_false = NULL; 3237 Label* fall_through = NULL; 3238 context()->PrepareTest(&materialize_true, &materialize_false, 3239 &if_true, &if_false, &fall_through); 3240 3241 __ pop(ebx); 3242 __ cmp(eax, ebx); 3243 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3244 Split(equal, if_true, if_false, fall_through); 3245 3246 context()->Plug(if_true, if_false); 3247 } 3248 3249 3250 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3251 ZoneList<Expression*>* args = expr->arguments(); 3252 DCHECK(args->length() == 1); 3253 3254 // ArgumentsAccessStub expects the key in edx and the formal 3255 // parameter count in eax. 3256 VisitForAccumulatorValue(args->at(0)); 3257 __ mov(edx, eax); 3258 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters()))); 3259 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3260 __ CallStub(&stub); 3261 context()->Plug(eax); 3262 } 3263 3264 3265 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3266 DCHECK(expr->arguments()->length() == 0); 3267 3268 Label exit; 3269 // Get the number of formal parameters. 3270 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters()))); 3271 3272 // Check if the calling frame is an arguments adaptor frame. 3273 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 3274 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset), 3275 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3276 __ j(not_equal, &exit); 3277 3278 // Arguments adaptor case: Read the arguments length from the 3279 // adaptor frame. 3280 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3281 3282 __ bind(&exit); 3283 __ AssertSmi(eax); 3284 context()->Plug(eax); 3285 } 3286 3287 3288 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3289 ZoneList<Expression*>* args = expr->arguments(); 3290 DCHECK(args->length() == 1); 3291 Label done, null, function, non_function_constructor; 3292 3293 VisitForAccumulatorValue(args->at(0)); 3294 3295 // If the object is a smi, we return null. 3296 __ JumpIfSmi(eax, &null); 3297 3298 // Check that the object is a JS object but take special care of JS 3299 // functions to make sure they have 'Function' as their class. 3300 // Assume that there are only two callable types, and one of them is at 3301 // either end of the type range for JS object types. Saves extra comparisons. 3302 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 3303 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax); 3304 // Map is now in eax. 3305 __ j(below, &null); 3306 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE == 3307 FIRST_SPEC_OBJECT_TYPE + 1); 3308 __ j(equal, &function); 3309 3310 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE); 3311 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == 3312 LAST_SPEC_OBJECT_TYPE - 1); 3313 __ j(equal, &function); 3314 // Assume that there is no larger type. 3315 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1); 3316 3317 // Check if the constructor in the map is a JS function. 3318 __ mov(eax, FieldOperand(eax, Map::kConstructorOffset)); 3319 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx); 3320 __ j(not_equal, &non_function_constructor); 3321 3322 // eax now contains the constructor function. Grab the 3323 // instance class name from there. 3324 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset)); 3325 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset)); 3326 __ jmp(&done); 3327 3328 // Functions have class 'Function'. 3329 __ bind(&function); 3330 __ mov(eax, isolate()->factory()->Function_string()); 3331 __ jmp(&done); 3332 3333 // Objects with a non-function constructor have class 'Object'. 3334 __ bind(&non_function_constructor); 3335 __ mov(eax, isolate()->factory()->Object_string()); 3336 __ jmp(&done); 3337 3338 // Non-JS objects have class null. 3339 __ bind(&null); 3340 __ mov(eax, isolate()->factory()->null_value()); 3341 3342 // All done. 3343 __ bind(&done); 3344 3345 context()->Plug(eax); 3346 } 3347 3348 3349 void FullCodeGenerator::EmitSubString(CallRuntime* expr) { 3350 // Load the arguments on the stack and call the stub. 3351 SubStringStub stub(isolate()); 3352 ZoneList<Expression*>* args = expr->arguments(); 3353 DCHECK(args->length() == 3); 3354 VisitForStackValue(args->at(0)); 3355 VisitForStackValue(args->at(1)); 3356 VisitForStackValue(args->at(2)); 3357 __ CallStub(&stub); 3358 context()->Plug(eax); 3359 } 3360 3361 3362 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) { 3363 // Load the arguments on the stack and call the stub. 3364 RegExpExecStub stub(isolate()); 3365 ZoneList<Expression*>* args = expr->arguments(); 3366 DCHECK(args->length() == 4); 3367 VisitForStackValue(args->at(0)); 3368 VisitForStackValue(args->at(1)); 3369 VisitForStackValue(args->at(2)); 3370 VisitForStackValue(args->at(3)); 3371 __ CallStub(&stub); 3372 context()->Plug(eax); 3373 } 3374 3375 3376 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3377 ZoneList<Expression*>* args = expr->arguments(); 3378 DCHECK(args->length() == 1); 3379 3380 VisitForAccumulatorValue(args->at(0)); // Load the object. 3381 3382 Label done; 3383 // If the object is a smi return the object. 3384 __ JumpIfSmi(eax, &done, Label::kNear); 3385 // If the object is not a value type, return the object. 3386 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx); 3387 __ j(not_equal, &done, Label::kNear); 3388 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset)); 3389 3390 __ bind(&done); 3391 context()->Plug(eax); 3392 } 3393 3394 3395 void FullCodeGenerator::EmitDateField(CallRuntime* expr) { 3396 ZoneList<Expression*>* args = expr->arguments(); 3397 DCHECK(args->length() == 2); 3398 DCHECK_NE(NULL, args->at(1)->AsLiteral()); 3399 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value())); 3400 3401 VisitForAccumulatorValue(args->at(0)); // Load the object. 3402 3403 Label runtime, done, not_date_object; 3404 Register object = eax; 3405 Register result = eax; 3406 Register scratch = ecx; 3407 3408 __ JumpIfSmi(object, ¬_date_object); 3409 __ CmpObjectType(object, JS_DATE_TYPE, scratch); 3410 __ j(not_equal, ¬_date_object); 3411 3412 if (index->value() == 0) { 3413 __ mov(result, FieldOperand(object, JSDate::kValueOffset)); 3414 __ jmp(&done); 3415 } else { 3416 if (index->value() < JSDate::kFirstUncachedField) { 3417 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate()); 3418 __ mov(scratch, Operand::StaticVariable(stamp)); 3419 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset)); 3420 __ j(not_equal, &runtime, Label::kNear); 3421 __ mov(result, FieldOperand(object, JSDate::kValueOffset + 3422 kPointerSize * index->value())); 3423 __ jmp(&done); 3424 } 3425 __ bind(&runtime); 3426 __ PrepareCallCFunction(2, scratch); 3427 __ mov(Operand(esp, 0), object); 3428 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index)); 3429 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); 3430 __ jmp(&done); 3431 } 3432 3433 __ bind(¬_date_object); 3434 __ CallRuntime(Runtime::kThrowNotDateError, 0); 3435 __ bind(&done); 3436 context()->Plug(result); 3437 } 3438 3439 3440 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3441 ZoneList<Expression*>* args = expr->arguments(); 3442 DCHECK_EQ(3, args->length()); 3443 3444 Register string = eax; 3445 Register index = ebx; 3446 Register value = ecx; 3447 3448 VisitForStackValue(args->at(0)); // index 3449 VisitForStackValue(args->at(1)); // value 3450 VisitForAccumulatorValue(args->at(2)); // string 3451 3452 __ pop(value); 3453 __ pop(index); 3454 3455 if (FLAG_debug_code) { 3456 __ test(value, Immediate(kSmiTagMask)); 3457 __ Check(zero, kNonSmiValue); 3458 __ test(index, Immediate(kSmiTagMask)); 3459 __ Check(zero, kNonSmiValue); 3460 } 3461 3462 __ SmiUntag(value); 3463 __ SmiUntag(index); 3464 3465 if (FLAG_debug_code) { 3466 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3467 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3468 } 3469 3470 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize), 3471 value); 3472 context()->Plug(string); 3473 } 3474 3475 3476 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3477 ZoneList<Expression*>* args = expr->arguments(); 3478 DCHECK_EQ(3, args->length()); 3479 3480 Register string = eax; 3481 Register index = ebx; 3482 Register value = ecx; 3483 3484 VisitForStackValue(args->at(0)); // index 3485 VisitForStackValue(args->at(1)); // value 3486 VisitForAccumulatorValue(args->at(2)); // string 3487 __ pop(value); 3488 __ pop(index); 3489 3490 if (FLAG_debug_code) { 3491 __ test(value, Immediate(kSmiTagMask)); 3492 __ Check(zero, kNonSmiValue); 3493 __ test(index, Immediate(kSmiTagMask)); 3494 __ Check(zero, kNonSmiValue); 3495 __ SmiUntag(index); 3496 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3497 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3498 __ SmiTag(index); 3499 } 3500 3501 __ SmiUntag(value); 3502 // No need to untag a smi for two-byte addressing. 3503 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize), 3504 value); 3505 context()->Plug(string); 3506 } 3507 3508 3509 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) { 3510 // Load the arguments on the stack and call the runtime function. 3511 ZoneList<Expression*>* args = expr->arguments(); 3512 DCHECK(args->length() == 2); 3513 VisitForStackValue(args->at(0)); 3514 VisitForStackValue(args->at(1)); 3515 3516 MathPowStub stub(isolate(), MathPowStub::ON_STACK); 3517 __ CallStub(&stub); 3518 context()->Plug(eax); 3519 } 3520 3521 3522 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3523 ZoneList<Expression*>* args = expr->arguments(); 3524 DCHECK(args->length() == 2); 3525 3526 VisitForStackValue(args->at(0)); // Load the object. 3527 VisitForAccumulatorValue(args->at(1)); // Load the value. 3528 __ pop(ebx); // eax = value. ebx = object. 3529 3530 Label done; 3531 // If the object is a smi, return the value. 3532 __ JumpIfSmi(ebx, &done, Label::kNear); 3533 3534 // If the object is not a value type, return the value. 3535 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx); 3536 __ j(not_equal, &done, Label::kNear); 3537 3538 // Store the value. 3539 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax); 3540 3541 // Update the write barrier. Save the value as it will be 3542 // overwritten by the write barrier code and is needed afterward. 3543 __ mov(edx, eax); 3544 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs); 3545 3546 __ bind(&done); 3547 context()->Plug(eax); 3548 } 3549 3550 3551 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) { 3552 ZoneList<Expression*>* args = expr->arguments(); 3553 DCHECK_EQ(args->length(), 1); 3554 3555 // Load the argument into eax and call the stub. 3556 VisitForAccumulatorValue(args->at(0)); 3557 3558 NumberToStringStub stub(isolate()); 3559 __ CallStub(&stub); 3560 context()->Plug(eax); 3561 } 3562 3563 3564 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3565 ZoneList<Expression*>* args = expr->arguments(); 3566 DCHECK(args->length() == 1); 3567 3568 VisitForAccumulatorValue(args->at(0)); 3569 3570 Label done; 3571 StringCharFromCodeGenerator generator(eax, ebx); 3572 generator.GenerateFast(masm_); 3573 __ jmp(&done); 3574 3575 NopRuntimeCallHelper call_helper; 3576 generator.GenerateSlow(masm_, call_helper); 3577 3578 __ bind(&done); 3579 context()->Plug(ebx); 3580 } 3581 3582 3583 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3584 ZoneList<Expression*>* args = expr->arguments(); 3585 DCHECK(args->length() == 2); 3586 3587 VisitForStackValue(args->at(0)); 3588 VisitForAccumulatorValue(args->at(1)); 3589 3590 Register object = ebx; 3591 Register index = eax; 3592 Register result = edx; 3593 3594 __ pop(object); 3595 3596 Label need_conversion; 3597 Label index_out_of_range; 3598 Label done; 3599 StringCharCodeAtGenerator generator(object, 3600 index, 3601 result, 3602 &need_conversion, 3603 &need_conversion, 3604 &index_out_of_range, 3605 STRING_INDEX_IS_NUMBER); 3606 generator.GenerateFast(masm_); 3607 __ jmp(&done); 3608 3609 __ bind(&index_out_of_range); 3610 // When the index is out of range, the spec requires us to return 3611 // NaN. 3612 __ Move(result, Immediate(isolate()->factory()->nan_value())); 3613 __ jmp(&done); 3614 3615 __ bind(&need_conversion); 3616 // Move the undefined value into the result register, which will 3617 // trigger conversion. 3618 __ Move(result, Immediate(isolate()->factory()->undefined_value())); 3619 __ jmp(&done); 3620 3621 NopRuntimeCallHelper call_helper; 3622 generator.GenerateSlow(masm_, call_helper); 3623 3624 __ bind(&done); 3625 context()->Plug(result); 3626 } 3627 3628 3629 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3630 ZoneList<Expression*>* args = expr->arguments(); 3631 DCHECK(args->length() == 2); 3632 3633 VisitForStackValue(args->at(0)); 3634 VisitForAccumulatorValue(args->at(1)); 3635 3636 Register object = ebx; 3637 Register index = eax; 3638 Register scratch = edx; 3639 Register result = eax; 3640 3641 __ pop(object); 3642 3643 Label need_conversion; 3644 Label index_out_of_range; 3645 Label done; 3646 StringCharAtGenerator generator(object, 3647 index, 3648 scratch, 3649 result, 3650 &need_conversion, 3651 &need_conversion, 3652 &index_out_of_range, 3653 STRING_INDEX_IS_NUMBER); 3654 generator.GenerateFast(masm_); 3655 __ jmp(&done); 3656 3657 __ bind(&index_out_of_range); 3658 // When the index is out of range, the spec requires us to return 3659 // the empty string. 3660 __ Move(result, Immediate(isolate()->factory()->empty_string())); 3661 __ jmp(&done); 3662 3663 __ bind(&need_conversion); 3664 // Move smi zero into the result register, which will trigger 3665 // conversion. 3666 __ Move(result, Immediate(Smi::FromInt(0))); 3667 __ jmp(&done); 3668 3669 NopRuntimeCallHelper call_helper; 3670 generator.GenerateSlow(masm_, call_helper); 3671 3672 __ bind(&done); 3673 context()->Plug(result); 3674 } 3675 3676 3677 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) { 3678 ZoneList<Expression*>* args = expr->arguments(); 3679 DCHECK_EQ(2, args->length()); 3680 VisitForStackValue(args->at(0)); 3681 VisitForAccumulatorValue(args->at(1)); 3682 3683 __ pop(edx); 3684 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED); 3685 __ CallStub(&stub); 3686 context()->Plug(eax); 3687 } 3688 3689 3690 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) { 3691 ZoneList<Expression*>* args = expr->arguments(); 3692 DCHECK_EQ(2, args->length()); 3693 3694 VisitForStackValue(args->at(0)); 3695 VisitForStackValue(args->at(1)); 3696 3697 StringCompareStub stub(isolate()); 3698 __ CallStub(&stub); 3699 context()->Plug(eax); 3700 } 3701 3702 3703 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) { 3704 ZoneList<Expression*>* args = expr->arguments(); 3705 DCHECK(args->length() >= 2); 3706 3707 int arg_count = args->length() - 2; // 2 ~ receiver and function. 3708 for (int i = 0; i < arg_count + 1; ++i) { 3709 VisitForStackValue(args->at(i)); 3710 } 3711 VisitForAccumulatorValue(args->last()); // Function. 3712 3713 Label runtime, done; 3714 // Check for non-function argument (including proxy). 3715 __ JumpIfSmi(eax, &runtime); 3716 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx); 3717 __ j(not_equal, &runtime); 3718 3719 // InvokeFunction requires the function in edi. Move it in there. 3720 __ mov(edi, result_register()); 3721 ParameterCount count(arg_count); 3722 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper()); 3723 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 3724 __ jmp(&done); 3725 3726 __ bind(&runtime); 3727 __ push(eax); 3728 __ CallRuntime(Runtime::kCall, args->length()); 3729 __ bind(&done); 3730 3731 context()->Plug(eax); 3732 } 3733 3734 3735 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) { 3736 // Load the arguments on the stack and call the stub. 3737 RegExpConstructResultStub stub(isolate()); 3738 ZoneList<Expression*>* args = expr->arguments(); 3739 DCHECK(args->length() == 3); 3740 VisitForStackValue(args->at(0)); 3741 VisitForStackValue(args->at(1)); 3742 VisitForAccumulatorValue(args->at(2)); 3743 __ pop(ebx); 3744 __ pop(ecx); 3745 __ CallStub(&stub); 3746 context()->Plug(eax); 3747 } 3748 3749 3750 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) { 3751 ZoneList<Expression*>* args = expr->arguments(); 3752 DCHECK_EQ(2, args->length()); 3753 3754 DCHECK_NE(NULL, args->at(0)->AsLiteral()); 3755 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value(); 3756 3757 Handle<FixedArray> jsfunction_result_caches( 3758 isolate()->native_context()->jsfunction_result_caches()); 3759 if (jsfunction_result_caches->length() <= cache_id) { 3760 __ Abort(kAttemptToUseUndefinedCache); 3761 __ mov(eax, isolate()->factory()->undefined_value()); 3762 context()->Plug(eax); 3763 return; 3764 } 3765 3766 VisitForAccumulatorValue(args->at(1)); 3767 3768 Register key = eax; 3769 Register cache = ebx; 3770 Register tmp = ecx; 3771 __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX)); 3772 __ mov(cache, 3773 FieldOperand(cache, GlobalObject::kNativeContextOffset)); 3774 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX)); 3775 __ mov(cache, 3776 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id))); 3777 3778 Label done, not_found; 3779 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3780 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset)); 3781 // tmp now holds finger offset as a smi. 3782 __ cmp(key, FixedArrayElementOperand(cache, tmp)); 3783 __ j(not_equal, ¬_found); 3784 3785 __ mov(eax, FixedArrayElementOperand(cache, tmp, 1)); 3786 __ jmp(&done); 3787 3788 __ bind(¬_found); 3789 // Call runtime to perform the lookup. 3790 __ push(cache); 3791 __ push(key); 3792 __ CallRuntime(Runtime::kGetFromCache, 2); 3793 3794 __ bind(&done); 3795 context()->Plug(eax); 3796 } 3797 3798 3799 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3800 ZoneList<Expression*>* args = expr->arguments(); 3801 DCHECK(args->length() == 1); 3802 3803 VisitForAccumulatorValue(args->at(0)); 3804 3805 __ AssertString(eax); 3806 3807 Label materialize_true, materialize_false; 3808 Label* if_true = NULL; 3809 Label* if_false = NULL; 3810 Label* fall_through = NULL; 3811 context()->PrepareTest(&materialize_true, &materialize_false, 3812 &if_true, &if_false, &fall_through); 3813 3814 __ test(FieldOperand(eax, String::kHashFieldOffset), 3815 Immediate(String::kContainsCachedArrayIndexMask)); 3816 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3817 Split(zero, if_true, if_false, fall_through); 3818 3819 context()->Plug(if_true, if_false); 3820 } 3821 3822 3823 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3824 ZoneList<Expression*>* args = expr->arguments(); 3825 DCHECK(args->length() == 1); 3826 VisitForAccumulatorValue(args->at(0)); 3827 3828 __ AssertString(eax); 3829 3830 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset)); 3831 __ IndexFromHash(eax, eax); 3832 3833 context()->Plug(eax); 3834 } 3835 3836 3837 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { 3838 Label bailout, done, one_char_separator, long_separator, 3839 non_trivial_array, not_size_one_array, loop, 3840 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry; 3841 3842 ZoneList<Expression*>* args = expr->arguments(); 3843 DCHECK(args->length() == 2); 3844 // We will leave the separator on the stack until the end of the function. 3845 VisitForStackValue(args->at(1)); 3846 // Load this to eax (= array) 3847 VisitForAccumulatorValue(args->at(0)); 3848 // All aliases of the same register have disjoint lifetimes. 3849 Register array = eax; 3850 Register elements = no_reg; // Will be eax. 3851 3852 Register index = edx; 3853 3854 Register string_length = ecx; 3855 3856 Register string = esi; 3857 3858 Register scratch = ebx; 3859 3860 Register array_length = edi; 3861 Register result_pos = no_reg; // Will be edi. 3862 3863 // Separator operand is already pushed. 3864 Operand separator_operand = Operand(esp, 2 * kPointerSize); 3865 Operand result_operand = Operand(esp, 1 * kPointerSize); 3866 Operand array_length_operand = Operand(esp, 0); 3867 __ sub(esp, Immediate(2 * kPointerSize)); 3868 __ cld(); 3869 // Check that the array is a JSArray 3870 __ JumpIfSmi(array, &bailout); 3871 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch); 3872 __ j(not_equal, &bailout); 3873 3874 // Check that the array has fast elements. 3875 __ CheckFastElements(scratch, &bailout); 3876 3877 // If the array has length zero, return the empty string. 3878 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset)); 3879 __ SmiUntag(array_length); 3880 __ j(not_zero, &non_trivial_array); 3881 __ mov(result_operand, isolate()->factory()->empty_string()); 3882 __ jmp(&done); 3883 3884 // Save the array length. 3885 __ bind(&non_trivial_array); 3886 __ mov(array_length_operand, array_length); 3887 3888 // Save the FixedArray containing array's elements. 3889 // End of array's live range. 3890 elements = array; 3891 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset)); 3892 array = no_reg; 3893 3894 3895 // Check that all array elements are sequential one-byte strings, and 3896 // accumulate the sum of their lengths, as a smi-encoded value. 3897 __ Move(index, Immediate(0)); 3898 __ Move(string_length, Immediate(0)); 3899 // Loop condition: while (index < length). 3900 // Live loop registers: index, array_length, string, 3901 // scratch, string_length, elements. 3902 if (generate_debug_code_) { 3903 __ cmp(index, array_length); 3904 __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin); 3905 } 3906 __ bind(&loop); 3907 __ mov(string, FieldOperand(elements, 3908 index, 3909 times_pointer_size, 3910 FixedArray::kHeaderSize)); 3911 __ JumpIfSmi(string, &bailout); 3912 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset)); 3913 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 3914 __ and_(scratch, Immediate( 3915 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); 3916 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag); 3917 __ j(not_equal, &bailout); 3918 __ add(string_length, 3919 FieldOperand(string, SeqOneByteString::kLengthOffset)); 3920 __ j(overflow, &bailout); 3921 __ add(index, Immediate(1)); 3922 __ cmp(index, array_length); 3923 __ j(less, &loop); 3924 3925 // If array_length is 1, return elements[0], a string. 3926 __ cmp(array_length, 1); 3927 __ j(not_equal, ¬_size_one_array); 3928 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize)); 3929 __ mov(result_operand, scratch); 3930 __ jmp(&done); 3931 3932 __ bind(¬_size_one_array); 3933 3934 // End of array_length live range. 3935 result_pos = array_length; 3936 array_length = no_reg; 3937 3938 // Live registers: 3939 // string_length: Sum of string lengths, as a smi. 3940 // elements: FixedArray of strings. 3941 3942 // Check that the separator is a flat one-byte string. 3943 __ mov(string, separator_operand); 3944 __ JumpIfSmi(string, &bailout); 3945 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset)); 3946 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 3947 __ and_(scratch, Immediate( 3948 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); 3949 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag); 3950 __ j(not_equal, &bailout); 3951 3952 // Add (separator length times array_length) - separator length 3953 // to string_length. 3954 __ mov(scratch, separator_operand); 3955 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset)); 3956 __ sub(string_length, scratch); // May be negative, temporarily. 3957 __ imul(scratch, array_length_operand); 3958 __ j(overflow, &bailout); 3959 __ add(string_length, scratch); 3960 __ j(overflow, &bailout); 3961 3962 __ shr(string_length, 1); 3963 // Live registers and stack values: 3964 // string_length 3965 // elements 3966 __ AllocateOneByteString(result_pos, string_length, scratch, index, string, 3967 &bailout); 3968 __ mov(result_operand, result_pos); 3969 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize)); 3970 3971 3972 __ mov(string, separator_operand); 3973 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset), 3974 Immediate(Smi::FromInt(1))); 3975 __ j(equal, &one_char_separator); 3976 __ j(greater, &long_separator); 3977 3978 3979 // Empty separator case 3980 __ mov(index, Immediate(0)); 3981 __ jmp(&loop_1_condition); 3982 // Loop condition: while (index < length). 3983 __ bind(&loop_1); 3984 // Each iteration of the loop concatenates one string to the result. 3985 // Live values in registers: 3986 // index: which element of the elements array we are adding to the result. 3987 // result_pos: the position to which we are currently copying characters. 3988 // elements: the FixedArray of strings we are joining. 3989 3990 // Get string = array[index]. 3991 __ mov(string, FieldOperand(elements, index, 3992 times_pointer_size, 3993 FixedArray::kHeaderSize)); 3994 __ mov(string_length, 3995 FieldOperand(string, String::kLengthOffset)); 3996 __ shr(string_length, 1); 3997 __ lea(string, 3998 FieldOperand(string, SeqOneByteString::kHeaderSize)); 3999 __ CopyBytes(string, result_pos, string_length, scratch); 4000 __ add(index, Immediate(1)); 4001 __ bind(&loop_1_condition); 4002 __ cmp(index, array_length_operand); 4003 __ j(less, &loop_1); // End while (index < length). 4004 __ jmp(&done); 4005 4006 4007 4008 // One-character separator case 4009 __ bind(&one_char_separator); 4010 // Replace separator with its one-byte character value. 4011 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize)); 4012 __ mov_b(separator_operand, scratch); 4013 4014 __ Move(index, Immediate(0)); 4015 // Jump into the loop after the code that copies the separator, so the first 4016 // element is not preceded by a separator 4017 __ jmp(&loop_2_entry); 4018 // Loop condition: while (index < length). 4019 __ bind(&loop_2); 4020 // Each iteration of the loop concatenates one string to the result. 4021 // Live values in registers: 4022 // index: which element of the elements array we are adding to the result. 4023 // result_pos: the position to which we are currently copying characters. 4024 4025 // Copy the separator character to the result. 4026 __ mov_b(scratch, separator_operand); 4027 __ mov_b(Operand(result_pos, 0), scratch); 4028 __ inc(result_pos); 4029 4030 __ bind(&loop_2_entry); 4031 // Get string = array[index]. 4032 __ mov(string, FieldOperand(elements, index, 4033 times_pointer_size, 4034 FixedArray::kHeaderSize)); 4035 __ mov(string_length, 4036 FieldOperand(string, String::kLengthOffset)); 4037 __ shr(string_length, 1); 4038 __ lea(string, 4039 FieldOperand(string, SeqOneByteString::kHeaderSize)); 4040 __ CopyBytes(string, result_pos, string_length, scratch); 4041 __ add(index, Immediate(1)); 4042 4043 __ cmp(index, array_length_operand); 4044 __ j(less, &loop_2); // End while (index < length). 4045 __ jmp(&done); 4046 4047 4048 // Long separator case (separator is more than one character). 4049 __ bind(&long_separator); 4050 4051 __ Move(index, Immediate(0)); 4052 // Jump into the loop after the code that copies the separator, so the first 4053 // element is not preceded by a separator 4054 __ jmp(&loop_3_entry); 4055 // Loop condition: while (index < length). 4056 __ bind(&loop_3); 4057 // Each iteration of the loop concatenates one string to the result. 4058 // Live values in registers: 4059 // index: which element of the elements array we are adding to the result. 4060 // result_pos: the position to which we are currently copying characters. 4061 4062 // Copy the separator to the result. 4063 __ mov(string, separator_operand); 4064 __ mov(string_length, 4065 FieldOperand(string, String::kLengthOffset)); 4066 __ shr(string_length, 1); 4067 __ lea(string, 4068 FieldOperand(string, SeqOneByteString::kHeaderSize)); 4069 __ CopyBytes(string, result_pos, string_length, scratch); 4070 4071 __ bind(&loop_3_entry); 4072 // Get string = array[index]. 4073 __ mov(string, FieldOperand(elements, index, 4074 times_pointer_size, 4075 FixedArray::kHeaderSize)); 4076 __ mov(string_length, 4077 FieldOperand(string, String::kLengthOffset)); 4078 __ shr(string_length, 1); 4079 __ lea(string, 4080 FieldOperand(string, SeqOneByteString::kHeaderSize)); 4081 __ CopyBytes(string, result_pos, string_length, scratch); 4082 __ add(index, Immediate(1)); 4083 4084 __ cmp(index, array_length_operand); 4085 __ j(less, &loop_3); // End while (index < length). 4086 __ jmp(&done); 4087 4088 4089 __ bind(&bailout); 4090 __ mov(result_operand, isolate()->factory()->undefined_value()); 4091 __ bind(&done); 4092 __ mov(eax, result_operand); 4093 // Drop temp values from the stack, and restore context register. 4094 __ add(esp, Immediate(3 * kPointerSize)); 4095 4096 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 4097 context()->Plug(eax); 4098 } 4099 4100 4101 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 4102 DCHECK(expr->arguments()->length() == 0); 4103 ExternalReference debug_is_active = 4104 ExternalReference::debug_is_active_address(isolate()); 4105 __ movzx_b(eax, Operand::StaticVariable(debug_is_active)); 4106 __ SmiTag(eax); 4107 context()->Plug(eax); 4108 } 4109 4110 4111 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 4112 if (expr->function() != NULL && 4113 expr->function()->intrinsic_type == Runtime::INLINE) { 4114 Comment cmnt(masm_, "[ InlineRuntimeCall"); 4115 EmitInlineRuntimeCall(expr); 4116 return; 4117 } 4118 4119 Comment cmnt(masm_, "[ CallRuntime"); 4120 ZoneList<Expression*>* args = expr->arguments(); 4121 4122 if (expr->is_jsruntime()) { 4123 // Push the builtins object as receiver. 4124 __ mov(eax, GlobalObjectOperand()); 4125 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset)); 4126 4127 // Load the function from the receiver. 4128 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 4129 __ mov(LoadDescriptor::NameRegister(), Immediate(expr->name())); 4130 if (FLAG_vector_ics) { 4131 __ mov(VectorLoadICDescriptor::SlotRegister(), 4132 Immediate(Smi::FromInt(expr->CallRuntimeFeedbackSlot()))); 4133 CallLoadIC(NOT_CONTEXTUAL); 4134 } else { 4135 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId()); 4136 } 4137 4138 // Push the target function under the receiver. 4139 __ push(Operand(esp, 0)); 4140 __ mov(Operand(esp, kPointerSize), eax); 4141 4142 // Code common for calls using the IC. 4143 ZoneList<Expression*>* args = expr->arguments(); 4144 int arg_count = args->length(); 4145 for (int i = 0; i < arg_count; i++) { 4146 VisitForStackValue(args->at(i)); 4147 } 4148 4149 // Record source position of the IC call. 4150 SetSourcePosition(expr->position()); 4151 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS); 4152 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); 4153 __ CallStub(&stub); 4154 // Restore context register. 4155 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 4156 context()->DropAndPlug(1, eax); 4157 4158 } else { 4159 // Push the arguments ("left-to-right"). 4160 int arg_count = args->length(); 4161 for (int i = 0; i < arg_count; i++) { 4162 VisitForStackValue(args->at(i)); 4163 } 4164 4165 // Call the C runtime function. 4166 __ CallRuntime(expr->function(), arg_count); 4167 4168 context()->Plug(eax); 4169 } 4170 } 4171 4172 4173 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4174 switch (expr->op()) { 4175 case Token::DELETE: { 4176 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4177 Property* property = expr->expression()->AsProperty(); 4178 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4179 4180 if (property != NULL) { 4181 VisitForStackValue(property->obj()); 4182 VisitForStackValue(property->key()); 4183 __ push(Immediate(Smi::FromInt(strict_mode()))); 4184 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4185 context()->Plug(eax); 4186 } else if (proxy != NULL) { 4187 Variable* var = proxy->var(); 4188 // Delete of an unqualified identifier is disallowed in strict mode 4189 // but "delete this" is allowed. 4190 DCHECK(strict_mode() == SLOPPY || var->is_this()); 4191 if (var->IsUnallocated()) { 4192 __ push(GlobalObjectOperand()); 4193 __ push(Immediate(var->name())); 4194 __ push(Immediate(Smi::FromInt(SLOPPY))); 4195 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION); 4196 context()->Plug(eax); 4197 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4198 // Result of deleting non-global variables is false. 'this' is 4199 // not really a variable, though we implement it as one. The 4200 // subexpression does not have side effects. 4201 context()->Plug(var->is_this()); 4202 } else { 4203 // Non-global variable. Call the runtime to try to delete from the 4204 // context where the variable was introduced. 4205 __ push(context_register()); 4206 __ push(Immediate(var->name())); 4207 __ CallRuntime(Runtime::kDeleteLookupSlot, 2); 4208 context()->Plug(eax); 4209 } 4210 } else { 4211 // Result of deleting non-property, non-variable reference is true. 4212 // The subexpression may have side effects. 4213 VisitForEffect(expr->expression()); 4214 context()->Plug(true); 4215 } 4216 break; 4217 } 4218 4219 case Token::VOID: { 4220 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 4221 VisitForEffect(expr->expression()); 4222 context()->Plug(isolate()->factory()->undefined_value()); 4223 break; 4224 } 4225 4226 case Token::NOT: { 4227 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 4228 if (context()->IsEffect()) { 4229 // Unary NOT has no side effects so it's only necessary to visit the 4230 // subexpression. Match the optimizing compiler by not branching. 4231 VisitForEffect(expr->expression()); 4232 } else if (context()->IsTest()) { 4233 const TestContext* test = TestContext::cast(context()); 4234 // The labels are swapped for the recursive call. 4235 VisitForControl(expr->expression(), 4236 test->false_label(), 4237 test->true_label(), 4238 test->fall_through()); 4239 context()->Plug(test->true_label(), test->false_label()); 4240 } else { 4241 // We handle value contexts explicitly rather than simply visiting 4242 // for control and plugging the control flow into the context, 4243 // because we need to prepare a pair of extra administrative AST ids 4244 // for the optimizing compiler. 4245 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 4246 Label materialize_true, materialize_false, done; 4247 VisitForControl(expr->expression(), 4248 &materialize_false, 4249 &materialize_true, 4250 &materialize_true); 4251 __ bind(&materialize_true); 4252 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4253 if (context()->IsAccumulatorValue()) { 4254 __ mov(eax, isolate()->factory()->true_value()); 4255 } else { 4256 __ Push(isolate()->factory()->true_value()); 4257 } 4258 __ jmp(&done, Label::kNear); 4259 __ bind(&materialize_false); 4260 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4261 if (context()->IsAccumulatorValue()) { 4262 __ mov(eax, isolate()->factory()->false_value()); 4263 } else { 4264 __ Push(isolate()->factory()->false_value()); 4265 } 4266 __ bind(&done); 4267 } 4268 break; 4269 } 4270 4271 case Token::TYPEOF: { 4272 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4273 { StackValueContext context(this); 4274 VisitForTypeofValue(expr->expression()); 4275 } 4276 __ CallRuntime(Runtime::kTypeof, 1); 4277 context()->Plug(eax); 4278 break; 4279 } 4280 4281 default: 4282 UNREACHABLE(); 4283 } 4284 } 4285 4286 4287 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4288 DCHECK(expr->expression()->IsValidReferenceExpression()); 4289 4290 Comment cmnt(masm_, "[ CountOperation"); 4291 SetSourcePosition(expr->position()); 4292 4293 // Expression can only be a property, a global or a (parameter or local) 4294 // slot. 4295 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 4296 LhsKind assign_type = VARIABLE; 4297 Property* prop = expr->expression()->AsProperty(); 4298 // In case of a property we use the uninitialized expression context 4299 // of the key to detect a named property. 4300 if (prop != NULL) { 4301 assign_type = 4302 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 4303 } 4304 4305 // Evaluate expression and get value. 4306 if (assign_type == VARIABLE) { 4307 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 4308 AccumulatorValueContext context(this); 4309 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4310 } else { 4311 // Reserve space for result of postfix operation. 4312 if (expr->is_postfix() && !context()->IsEffect()) { 4313 __ push(Immediate(Smi::FromInt(0))); 4314 } 4315 if (assign_type == NAMED_PROPERTY) { 4316 // Put the object both on the stack and in the register. 4317 VisitForStackValue(prop->obj()); 4318 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 4319 EmitNamedPropertyLoad(prop); 4320 } else { 4321 VisitForStackValue(prop->obj()); 4322 VisitForStackValue(prop->key()); 4323 __ mov(LoadDescriptor::ReceiverRegister(), 4324 Operand(esp, kPointerSize)); // Object. 4325 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key. 4326 EmitKeyedPropertyLoad(prop); 4327 } 4328 } 4329 4330 // We need a second deoptimization point after loading the value 4331 // in case evaluating the property load my have a side effect. 4332 if (assign_type == VARIABLE) { 4333 PrepareForBailout(expr->expression(), TOS_REG); 4334 } else { 4335 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4336 } 4337 4338 // Inline smi case if we are in a loop. 4339 Label done, stub_call; 4340 JumpPatchSite patch_site(masm_); 4341 if (ShouldInlineSmiCase(expr->op())) { 4342 Label slow; 4343 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear); 4344 4345 // Save result for postfix expressions. 4346 if (expr->is_postfix()) { 4347 if (!context()->IsEffect()) { 4348 // Save the result on the stack. If we have a named or keyed property 4349 // we store the result under the receiver that is currently on top 4350 // of the stack. 4351 switch (assign_type) { 4352 case VARIABLE: 4353 __ push(eax); 4354 break; 4355 case NAMED_PROPERTY: 4356 __ mov(Operand(esp, kPointerSize), eax); 4357 break; 4358 case KEYED_PROPERTY: 4359 __ mov(Operand(esp, 2 * kPointerSize), eax); 4360 break; 4361 } 4362 } 4363 } 4364 4365 if (expr->op() == Token::INC) { 4366 __ add(eax, Immediate(Smi::FromInt(1))); 4367 } else { 4368 __ sub(eax, Immediate(Smi::FromInt(1))); 4369 } 4370 __ j(no_overflow, &done, Label::kNear); 4371 // Call stub. Undo operation first. 4372 if (expr->op() == Token::INC) { 4373 __ sub(eax, Immediate(Smi::FromInt(1))); 4374 } else { 4375 __ add(eax, Immediate(Smi::FromInt(1))); 4376 } 4377 __ jmp(&stub_call, Label::kNear); 4378 __ bind(&slow); 4379 } 4380 ToNumberStub convert_stub(isolate()); 4381 __ CallStub(&convert_stub); 4382 4383 // Save result for postfix expressions. 4384 if (expr->is_postfix()) { 4385 if (!context()->IsEffect()) { 4386 // Save the result on the stack. If we have a named or keyed property 4387 // we store the result under the receiver that is currently on top 4388 // of the stack. 4389 switch (assign_type) { 4390 case VARIABLE: 4391 __ push(eax); 4392 break; 4393 case NAMED_PROPERTY: 4394 __ mov(Operand(esp, kPointerSize), eax); 4395 break; 4396 case KEYED_PROPERTY: 4397 __ mov(Operand(esp, 2 * kPointerSize), eax); 4398 break; 4399 } 4400 } 4401 } 4402 4403 // Record position before stub call. 4404 SetSourcePosition(expr->position()); 4405 4406 // Call stub for +1/-1. 4407 __ bind(&stub_call); 4408 __ mov(edx, eax); 4409 __ mov(eax, Immediate(Smi::FromInt(1))); 4410 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(), 4411 NO_OVERWRITE).code(); 4412 CallIC(code, expr->CountBinOpFeedbackId()); 4413 patch_site.EmitPatchInfo(); 4414 __ bind(&done); 4415 4416 // Store the value returned in eax. 4417 switch (assign_type) { 4418 case VARIABLE: 4419 if (expr->is_postfix()) { 4420 // Perform the assignment as if via '='. 4421 { EffectContext context(this); 4422 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4423 Token::ASSIGN); 4424 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4425 context.Plug(eax); 4426 } 4427 // For all contexts except EffectContext We have the result on 4428 // top of the stack. 4429 if (!context()->IsEffect()) { 4430 context()->PlugTOS(); 4431 } 4432 } else { 4433 // Perform the assignment as if via '='. 4434 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4435 Token::ASSIGN); 4436 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4437 context()->Plug(eax); 4438 } 4439 break; 4440 case NAMED_PROPERTY: { 4441 __ mov(StoreDescriptor::NameRegister(), 4442 prop->key()->AsLiteral()->value()); 4443 __ pop(StoreDescriptor::ReceiverRegister()); 4444 CallStoreIC(expr->CountStoreFeedbackId()); 4445 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4446 if (expr->is_postfix()) { 4447 if (!context()->IsEffect()) { 4448 context()->PlugTOS(); 4449 } 4450 } else { 4451 context()->Plug(eax); 4452 } 4453 break; 4454 } 4455 case KEYED_PROPERTY: { 4456 __ pop(StoreDescriptor::NameRegister()); 4457 __ pop(StoreDescriptor::ReceiverRegister()); 4458 Handle<Code> ic = 4459 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code(); 4460 CallIC(ic, expr->CountStoreFeedbackId()); 4461 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4462 if (expr->is_postfix()) { 4463 // Result is on the stack 4464 if (!context()->IsEffect()) { 4465 context()->PlugTOS(); 4466 } 4467 } else { 4468 context()->Plug(eax); 4469 } 4470 break; 4471 } 4472 } 4473 } 4474 4475 4476 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) { 4477 VariableProxy* proxy = expr->AsVariableProxy(); 4478 DCHECK(!context()->IsEffect()); 4479 DCHECK(!context()->IsTest()); 4480 4481 if (proxy != NULL && proxy->var()->IsUnallocated()) { 4482 Comment cmnt(masm_, "[ Global variable"); 4483 __ mov(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand()); 4484 __ mov(LoadDescriptor::NameRegister(), Immediate(proxy->name())); 4485 if (FLAG_vector_ics) { 4486 __ mov(VectorLoadICDescriptor::SlotRegister(), 4487 Immediate(Smi::FromInt(proxy->VariableFeedbackSlot()))); 4488 } 4489 // Use a regular load, not a contextual load, to avoid a reference 4490 // error. 4491 CallLoadIC(NOT_CONTEXTUAL); 4492 PrepareForBailout(expr, TOS_REG); 4493 context()->Plug(eax); 4494 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) { 4495 Comment cmnt(masm_, "[ Lookup slot"); 4496 Label done, slow; 4497 4498 // Generate code for loading from variables potentially shadowed 4499 // by eval-introduced variables. 4500 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done); 4501 4502 __ bind(&slow); 4503 __ push(esi); 4504 __ push(Immediate(proxy->name())); 4505 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2); 4506 PrepareForBailout(expr, TOS_REG); 4507 __ bind(&done); 4508 4509 context()->Plug(eax); 4510 } else { 4511 // This expression cannot throw a reference error at the top level. 4512 VisitInDuplicateContext(expr); 4513 } 4514 } 4515 4516 4517 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4518 Expression* sub_expr, 4519 Handle<String> check) { 4520 Label materialize_true, materialize_false; 4521 Label* if_true = NULL; 4522 Label* if_false = NULL; 4523 Label* fall_through = NULL; 4524 context()->PrepareTest(&materialize_true, &materialize_false, 4525 &if_true, &if_false, &fall_through); 4526 4527 { AccumulatorValueContext context(this); 4528 VisitForTypeofValue(sub_expr); 4529 } 4530 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4531 4532 Factory* factory = isolate()->factory(); 4533 if (String::Equals(check, factory->number_string())) { 4534 __ JumpIfSmi(eax, if_true); 4535 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 4536 isolate()->factory()->heap_number_map()); 4537 Split(equal, if_true, if_false, fall_through); 4538 } else if (String::Equals(check, factory->string_string())) { 4539 __ JumpIfSmi(eax, if_false); 4540 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx); 4541 __ j(above_equal, if_false); 4542 // Check for undetectable objects => false. 4543 __ test_b(FieldOperand(edx, Map::kBitFieldOffset), 4544 1 << Map::kIsUndetectable); 4545 Split(zero, if_true, if_false, fall_through); 4546 } else if (String::Equals(check, factory->symbol_string())) { 4547 __ JumpIfSmi(eax, if_false); 4548 __ CmpObjectType(eax, SYMBOL_TYPE, edx); 4549 Split(equal, if_true, if_false, fall_through); 4550 } else if (String::Equals(check, factory->boolean_string())) { 4551 __ cmp(eax, isolate()->factory()->true_value()); 4552 __ j(equal, if_true); 4553 __ cmp(eax, isolate()->factory()->false_value()); 4554 Split(equal, if_true, if_false, fall_through); 4555 } else if (String::Equals(check, factory->undefined_string())) { 4556 __ cmp(eax, isolate()->factory()->undefined_value()); 4557 __ j(equal, if_true); 4558 __ JumpIfSmi(eax, if_false); 4559 // Check for undetectable objects => true. 4560 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 4561 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset)); 4562 __ test(ecx, Immediate(1 << Map::kIsUndetectable)); 4563 Split(not_zero, if_true, if_false, fall_through); 4564 } else if (String::Equals(check, factory->function_string())) { 4565 __ JumpIfSmi(eax, if_false); 4566 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2); 4567 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx); 4568 __ j(equal, if_true); 4569 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE); 4570 Split(equal, if_true, if_false, fall_through); 4571 } else if (String::Equals(check, factory->object_string())) { 4572 __ JumpIfSmi(eax, if_false); 4573 __ cmp(eax, isolate()->factory()->null_value()); 4574 __ j(equal, if_true); 4575 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx); 4576 __ j(below, if_false); 4577 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE); 4578 __ j(above, if_false); 4579 // Check for undetectable objects => false. 4580 __ test_b(FieldOperand(edx, Map::kBitFieldOffset), 4581 1 << Map::kIsUndetectable); 4582 Split(zero, if_true, if_false, fall_through); 4583 } else { 4584 if (if_false != fall_through) __ jmp(if_false); 4585 } 4586 context()->Plug(if_true, if_false); 4587 } 4588 4589 4590 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4591 Comment cmnt(masm_, "[ CompareOperation"); 4592 SetSourcePosition(expr->position()); 4593 4594 // First we try a fast inlined version of the compare when one of 4595 // the operands is a literal. 4596 if (TryLiteralCompare(expr)) return; 4597 4598 // Always perform the comparison for its control flow. Pack the result 4599 // into the expression's context after the comparison is performed. 4600 Label materialize_true, materialize_false; 4601 Label* if_true = NULL; 4602 Label* if_false = NULL; 4603 Label* fall_through = NULL; 4604 context()->PrepareTest(&materialize_true, &materialize_false, 4605 &if_true, &if_false, &fall_through); 4606 4607 Token::Value op = expr->op(); 4608 VisitForStackValue(expr->left()); 4609 switch (op) { 4610 case Token::IN: 4611 VisitForStackValue(expr->right()); 4612 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4613 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4614 __ cmp(eax, isolate()->factory()->true_value()); 4615 Split(equal, if_true, if_false, fall_through); 4616 break; 4617 4618 case Token::INSTANCEOF: { 4619 VisitForStackValue(expr->right()); 4620 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); 4621 __ CallStub(&stub); 4622 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4623 __ test(eax, eax); 4624 // The stub returns 0 for true. 4625 Split(zero, if_true, if_false, fall_through); 4626 break; 4627 } 4628 4629 default: { 4630 VisitForAccumulatorValue(expr->right()); 4631 Condition cc = CompareIC::ComputeCondition(op); 4632 __ pop(edx); 4633 4634 bool inline_smi_code = ShouldInlineSmiCase(op); 4635 JumpPatchSite patch_site(masm_); 4636 if (inline_smi_code) { 4637 Label slow_case; 4638 __ mov(ecx, edx); 4639 __ or_(ecx, eax); 4640 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear); 4641 __ cmp(edx, eax); 4642 Split(cc, if_true, if_false, NULL); 4643 __ bind(&slow_case); 4644 } 4645 4646 // Record position and call the compare IC. 4647 SetSourcePosition(expr->position()); 4648 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 4649 CallIC(ic, expr->CompareOperationFeedbackId()); 4650 patch_site.EmitPatchInfo(); 4651 4652 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4653 __ test(eax, eax); 4654 Split(cc, if_true, if_false, fall_through); 4655 } 4656 } 4657 4658 // Convert the result of the comparison into one expected for this 4659 // expression's context. 4660 context()->Plug(if_true, if_false); 4661 } 4662 4663 4664 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4665 Expression* sub_expr, 4666 NilValue nil) { 4667 Label materialize_true, materialize_false; 4668 Label* if_true = NULL; 4669 Label* if_false = NULL; 4670 Label* fall_through = NULL; 4671 context()->PrepareTest(&materialize_true, &materialize_false, 4672 &if_true, &if_false, &fall_through); 4673 4674 VisitForAccumulatorValue(sub_expr); 4675 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4676 4677 Handle<Object> nil_value = nil == kNullValue 4678 ? isolate()->factory()->null_value() 4679 : isolate()->factory()->undefined_value(); 4680 if (expr->op() == Token::EQ_STRICT) { 4681 __ cmp(eax, nil_value); 4682 Split(equal, if_true, if_false, fall_through); 4683 } else { 4684 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4685 CallIC(ic, expr->CompareOperationFeedbackId()); 4686 __ test(eax, eax); 4687 Split(not_zero, if_true, if_false, fall_through); 4688 } 4689 context()->Plug(if_true, if_false); 4690 } 4691 4692 4693 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4694 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 4695 context()->Plug(eax); 4696 } 4697 4698 4699 Register FullCodeGenerator::result_register() { 4700 return eax; 4701 } 4702 4703 4704 Register FullCodeGenerator::context_register() { 4705 return esi; 4706 } 4707 4708 4709 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4710 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4711 __ mov(Operand(ebp, frame_offset), value); 4712 } 4713 4714 4715 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4716 __ mov(dst, ContextOperand(esi, context_index)); 4717 } 4718 4719 4720 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4721 Scope* declaration_scope = scope()->DeclarationScope(); 4722 if (declaration_scope->is_global_scope() || 4723 declaration_scope->is_module_scope()) { 4724 // Contexts nested in the native context have a canonical empty function 4725 // as their closure, not the anonymous closure containing the global 4726 // code. Pass a smi sentinel and let the runtime look up the empty 4727 // function. 4728 __ push(Immediate(Smi::FromInt(0))); 4729 } else if (declaration_scope->is_eval_scope()) { 4730 // Contexts nested inside eval code have the same closure as the context 4731 // calling eval, not the anonymous closure containing the eval code. 4732 // Fetch it from the context. 4733 __ push(ContextOperand(esi, Context::CLOSURE_INDEX)); 4734 } else { 4735 DCHECK(declaration_scope->is_function_scope()); 4736 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 4737 } 4738 } 4739 4740 4741 // ---------------------------------------------------------------------------- 4742 // Non-local control flow support. 4743 4744 void FullCodeGenerator::EnterFinallyBlock() { 4745 // Cook return address on top of stack (smi encoded Code* delta) 4746 DCHECK(!result_register().is(edx)); 4747 __ pop(edx); 4748 __ sub(edx, Immediate(masm_->CodeObject())); 4749 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 4750 STATIC_ASSERT(kSmiTag == 0); 4751 __ SmiTag(edx); 4752 __ push(edx); 4753 4754 // Store result register while executing finally block. 4755 __ push(result_register()); 4756 4757 // Store pending message while executing finally block. 4758 ExternalReference pending_message_obj = 4759 ExternalReference::address_of_pending_message_obj(isolate()); 4760 __ mov(edx, Operand::StaticVariable(pending_message_obj)); 4761 __ push(edx); 4762 4763 ExternalReference has_pending_message = 4764 ExternalReference::address_of_has_pending_message(isolate()); 4765 __ mov(edx, Operand::StaticVariable(has_pending_message)); 4766 __ SmiTag(edx); 4767 __ push(edx); 4768 4769 ExternalReference pending_message_script = 4770 ExternalReference::address_of_pending_message_script(isolate()); 4771 __ mov(edx, Operand::StaticVariable(pending_message_script)); 4772 __ push(edx); 4773 } 4774 4775 4776 void FullCodeGenerator::ExitFinallyBlock() { 4777 DCHECK(!result_register().is(edx)); 4778 // Restore pending message from stack. 4779 __ pop(edx); 4780 ExternalReference pending_message_script = 4781 ExternalReference::address_of_pending_message_script(isolate()); 4782 __ mov(Operand::StaticVariable(pending_message_script), edx); 4783 4784 __ pop(edx); 4785 __ SmiUntag(edx); 4786 ExternalReference has_pending_message = 4787 ExternalReference::address_of_has_pending_message(isolate()); 4788 __ mov(Operand::StaticVariable(has_pending_message), edx); 4789 4790 __ pop(edx); 4791 ExternalReference pending_message_obj = 4792 ExternalReference::address_of_pending_message_obj(isolate()); 4793 __ mov(Operand::StaticVariable(pending_message_obj), edx); 4794 4795 // Restore result register from stack. 4796 __ pop(result_register()); 4797 4798 // Uncook return address. 4799 __ pop(edx); 4800 __ SmiUntag(edx); 4801 __ add(edx, Immediate(masm_->CodeObject())); 4802 __ jmp(edx); 4803 } 4804 4805 4806 #undef __ 4807 4808 #define __ ACCESS_MASM(masm()) 4809 4810 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( 4811 int* stack_depth, 4812 int* context_length) { 4813 // The macros used here must preserve the result register. 4814 4815 // Because the handler block contains the context of the finally 4816 // code, we can restore it directly from there for the finally code 4817 // rather than iteratively unwinding contexts via their previous 4818 // links. 4819 __ Drop(*stack_depth); // Down to the handler block. 4820 if (*context_length > 0) { 4821 // Restore the context to its dedicated register and the stack. 4822 __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset)); 4823 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); 4824 } 4825 __ PopTryHandler(); 4826 __ call(finally_entry_); 4827 4828 *stack_depth = 0; 4829 *context_length = 0; 4830 return previous_; 4831 } 4832 4833 #undef __ 4834 4835 4836 static const byte kJnsInstruction = 0x79; 4837 static const byte kJnsOffset = 0x11; 4838 static const byte kNopByteOne = 0x66; 4839 static const byte kNopByteTwo = 0x90; 4840 #ifdef DEBUG 4841 static const byte kCallInstruction = 0xe8; 4842 #endif 4843 4844 4845 void BackEdgeTable::PatchAt(Code* unoptimized_code, 4846 Address pc, 4847 BackEdgeState target_state, 4848 Code* replacement_code) { 4849 Address call_target_address = pc - kIntSize; 4850 Address jns_instr_address = call_target_address - 3; 4851 Address jns_offset_address = call_target_address - 2; 4852 4853 switch (target_state) { 4854 case INTERRUPT: 4855 // sub <profiling_counter>, <delta> ;; Not changed 4856 // jns ok 4857 // call <interrupt stub> 4858 // ok: 4859 *jns_instr_address = kJnsInstruction; 4860 *jns_offset_address = kJnsOffset; 4861 break; 4862 case ON_STACK_REPLACEMENT: 4863 case OSR_AFTER_STACK_CHECK: 4864 // sub <profiling_counter>, <delta> ;; Not changed 4865 // nop 4866 // nop 4867 // call <on-stack replacment> 4868 // ok: 4869 *jns_instr_address = kNopByteOne; 4870 *jns_offset_address = kNopByteTwo; 4871 break; 4872 } 4873 4874 Assembler::set_target_address_at(call_target_address, 4875 unoptimized_code, 4876 replacement_code->entry()); 4877 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 4878 unoptimized_code, call_target_address, replacement_code); 4879 } 4880 4881 4882 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4883 Isolate* isolate, 4884 Code* unoptimized_code, 4885 Address pc) { 4886 Address call_target_address = pc - kIntSize; 4887 Address jns_instr_address = call_target_address - 3; 4888 DCHECK_EQ(kCallInstruction, *(call_target_address - 1)); 4889 4890 if (*jns_instr_address == kJnsInstruction) { 4891 DCHECK_EQ(kJnsOffset, *(call_target_address - 2)); 4892 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(), 4893 Assembler::target_address_at(call_target_address, 4894 unoptimized_code)); 4895 return INTERRUPT; 4896 } 4897 4898 DCHECK_EQ(kNopByteOne, *jns_instr_address); 4899 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2)); 4900 4901 if (Assembler::target_address_at(call_target_address, unoptimized_code) == 4902 isolate->builtins()->OnStackReplacement()->entry()) { 4903 return ON_STACK_REPLACEMENT; 4904 } 4905 4906 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), 4907 Assembler::target_address_at(call_target_address, 4908 unoptimized_code)); 4909 return OSR_AFTER_STACK_CHECK; 4910 } 4911 4912 4913 } } // namespace v8::internal 4914 4915 #endif // V8_TARGET_ARCH_IA32 4916