1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_IA32 6 7 #include "src/ast/scopes.h" 8 #include "src/code-factory.h" 9 #include "src/code-stubs.h" 10 #include "src/codegen.h" 11 #include "src/debug/debug.h" 12 #include "src/full-codegen/full-codegen.h" 13 #include "src/ia32/frames-ia32.h" 14 #include "src/ic/ic.h" 15 #include "src/parsing/parser.h" 16 17 namespace v8 { 18 namespace internal { 19 20 #define __ ACCESS_MASM(masm()) 21 22 class JumpPatchSite BASE_EMBEDDED { 23 public: 24 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 25 #ifdef DEBUG 26 info_emitted_ = false; 27 #endif 28 } 29 30 ~JumpPatchSite() { 31 DCHECK(patch_site_.is_bound() == info_emitted_); 32 } 33 34 void EmitJumpIfNotSmi(Register reg, 35 Label* target, 36 Label::Distance distance = Label::kFar) { 37 __ test(reg, Immediate(kSmiTagMask)); 38 EmitJump(not_carry, target, distance); // Always taken before patched. 39 } 40 41 void EmitJumpIfSmi(Register reg, 42 Label* target, 43 Label::Distance distance = Label::kFar) { 44 __ test(reg, Immediate(kSmiTagMask)); 45 EmitJump(carry, target, distance); // Never taken before patched. 46 } 47 48 void EmitPatchInfo() { 49 if (patch_site_.is_bound()) { 50 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); 51 DCHECK(is_uint8(delta_to_patch_site)); 52 __ test(eax, Immediate(delta_to_patch_site)); 53 #ifdef DEBUG 54 info_emitted_ = true; 55 #endif 56 } else { 57 __ nop(); // Signals no inlined code. 58 } 59 } 60 61 private: 62 // jc will be patched with jz, jnc will become jnz. 63 void EmitJump(Condition cc, Label* target, Label::Distance distance) { 64 DCHECK(!patch_site_.is_bound() && !info_emitted_); 65 DCHECK(cc == carry || cc == not_carry); 66 __ bind(&patch_site_); 67 __ j(cc, target, distance); 68 } 69 70 MacroAssembler* masm() { return masm_; } 71 MacroAssembler* masm_; 72 Label patch_site_; 73 #ifdef DEBUG 74 bool info_emitted_; 75 #endif 76 }; 77 78 79 // Generate code for a JS function. On entry to the function the receiver 80 // and arguments have been pushed on the stack left to right, with the 81 // return address on top of them. The actual argument count matches the 82 // formal parameter count expected by the function. 83 // 84 // The live registers are: 85 // o edi: the JS function object being called (i.e. ourselves) 86 // o edx: the new target value 87 // o esi: our context 88 // o ebp: our caller's frame pointer 89 // o esp: stack pointer (pointing to return address) 90 // 91 // The function builds a JS frame. Please see JavaScriptFrameConstants in 92 // frames-ia32.h for its layout. 93 void FullCodeGenerator::Generate() { 94 CompilationInfo* info = info_; 95 profiling_counter_ = isolate()->factory()->NewCell( 96 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 97 SetFunctionPosition(literal()); 98 Comment cmnt(masm_, "[ function compiled by full code generator"); 99 100 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 101 102 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { 103 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize; 104 __ mov(ecx, Operand(esp, receiver_offset)); 105 __ AssertNotSmi(ecx); 106 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx); 107 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver); 108 } 109 110 // Open a frame scope to indicate that there is a frame on the stack. The 111 // MANUAL indicates that the scope shouldn't actually generate code to set up 112 // the frame (that is done below). 113 FrameScope frame_scope(masm_, StackFrame::MANUAL); 114 115 info->set_prologue_offset(masm_->pc_offset()); 116 __ Prologue(info->GeneratePreagedPrologue()); 117 118 { Comment cmnt(masm_, "[ Allocate locals"); 119 int locals_count = info->scope()->num_stack_slots(); 120 // Generators allocate locals, if any, in context slots. 121 DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0); 122 OperandStackDepthIncrement(locals_count); 123 if (locals_count == 1) { 124 __ push(Immediate(isolate()->factory()->undefined_value())); 125 } else if (locals_count > 1) { 126 if (locals_count >= 128) { 127 Label ok; 128 __ mov(ecx, esp); 129 __ sub(ecx, Immediate(locals_count * kPointerSize)); 130 ExternalReference stack_limit = 131 ExternalReference::address_of_real_stack_limit(isolate()); 132 __ cmp(ecx, Operand::StaticVariable(stack_limit)); 133 __ j(above_equal, &ok, Label::kNear); 134 __ CallRuntime(Runtime::kThrowStackOverflow); 135 __ bind(&ok); 136 } 137 __ mov(eax, Immediate(isolate()->factory()->undefined_value())); 138 const int kMaxPushes = 32; 139 if (locals_count >= kMaxPushes) { 140 int loop_iterations = locals_count / kMaxPushes; 141 __ mov(ecx, loop_iterations); 142 Label loop_header; 143 __ bind(&loop_header); 144 // Do pushes. 145 for (int i = 0; i < kMaxPushes; i++) { 146 __ push(eax); 147 } 148 __ dec(ecx); 149 __ j(not_zero, &loop_header, Label::kNear); 150 } 151 int remaining = locals_count % kMaxPushes; 152 // Emit the remaining pushes. 153 for (int i = 0; i < remaining; i++) { 154 __ push(eax); 155 } 156 } 157 } 158 159 bool function_in_register = true; 160 161 // Possibly allocate a local context. 162 if (info->scope()->num_heap_slots() > 0) { 163 Comment cmnt(masm_, "[ Allocate context"); 164 bool need_write_barrier = true; 165 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 166 // Argument to NewContext is the function, which is still in edi. 167 if (info->scope()->is_script_scope()) { 168 __ push(edi); 169 __ Push(info->scope()->GetScopeInfo(info->isolate())); 170 __ CallRuntime(Runtime::kNewScriptContext); 171 PrepareForBailoutForId(BailoutId::ScriptContext(), 172 BailoutState::TOS_REGISTER); 173 // The new target value is not used, clobbering is safe. 174 DCHECK_NULL(info->scope()->new_target_var()); 175 } else { 176 if (info->scope()->new_target_var() != nullptr) { 177 __ push(edx); // Preserve new target. 178 } 179 if (slots <= FastNewContextStub::kMaximumSlots) { 180 FastNewContextStub stub(isolate(), slots); 181 __ CallStub(&stub); 182 // Result of FastNewContextStub is always in new space. 183 need_write_barrier = false; 184 } else { 185 __ push(edi); 186 __ CallRuntime(Runtime::kNewFunctionContext); 187 } 188 if (info->scope()->new_target_var() != nullptr) { 189 __ pop(edx); // Restore new target. 190 } 191 } 192 function_in_register = false; 193 // Context is returned in eax. It replaces the context passed to us. 194 // It's saved in the stack and kept live in esi. 195 __ mov(esi, eax); 196 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax); 197 198 // Copy parameters into context if necessary. 199 int num_parameters = info->scope()->num_parameters(); 200 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; 201 for (int i = first_parameter; i < num_parameters; i++) { 202 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); 203 if (var->IsContextSlot()) { 204 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 205 (num_parameters - 1 - i) * kPointerSize; 206 // Load parameter from stack. 207 __ mov(eax, Operand(ebp, parameter_offset)); 208 // Store it in the context. 209 int context_offset = Context::SlotOffset(var->index()); 210 __ mov(Operand(esi, context_offset), eax); 211 // Update the write barrier. This clobbers eax and ebx. 212 if (need_write_barrier) { 213 __ RecordWriteContextSlot(esi, 214 context_offset, 215 eax, 216 ebx, 217 kDontSaveFPRegs); 218 } else if (FLAG_debug_code) { 219 Label done; 220 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear); 221 __ Abort(kExpectedNewSpaceObject); 222 __ bind(&done); 223 } 224 } 225 } 226 } 227 228 // Register holding this function and new target are both trashed in case we 229 // bailout here. But since that can happen only when new target is not used 230 // and we allocate a context, the value of |function_in_register| is correct. 231 PrepareForBailoutForId(BailoutId::FunctionContext(), 232 BailoutState::NO_REGISTERS); 233 234 // Possibly set up a local binding to the this function which is used in 235 // derived constructors with super calls. 236 Variable* this_function_var = scope()->this_function_var(); 237 if (this_function_var != nullptr) { 238 Comment cmnt(masm_, "[ This function"); 239 if (!function_in_register) { 240 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 241 // The write barrier clobbers register again, keep it marked as such. 242 } 243 SetVar(this_function_var, edi, ebx, ecx); 244 } 245 246 // Possibly set up a local binding to the new target value. 247 Variable* new_target_var = scope()->new_target_var(); 248 if (new_target_var != nullptr) { 249 Comment cmnt(masm_, "[ new.target"); 250 SetVar(new_target_var, edx, ebx, ecx); 251 } 252 253 // Possibly allocate RestParameters 254 int rest_index; 255 Variable* rest_param = scope()->rest_parameter(&rest_index); 256 if (rest_param) { 257 Comment cmnt(masm_, "[ Allocate rest parameter array"); 258 if (!function_in_register) { 259 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 260 } 261 FastNewRestParameterStub stub(isolate()); 262 __ CallStub(&stub); 263 function_in_register = false; 264 SetVar(rest_param, eax, ebx, edx); 265 } 266 267 Variable* arguments = scope()->arguments(); 268 if (arguments != NULL) { 269 // Arguments object must be allocated after the context object, in 270 // case the "arguments" or ".arguments" variables are in the context. 271 Comment cmnt(masm_, "[ Allocate arguments object"); 272 if (!function_in_register) { 273 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 274 } 275 if (is_strict(language_mode()) || !has_simple_parameters()) { 276 FastNewStrictArgumentsStub stub(isolate()); 277 __ CallStub(&stub); 278 } else if (literal()->has_duplicate_parameters()) { 279 __ Push(edi); 280 __ CallRuntime(Runtime::kNewSloppyArguments_Generic); 281 } else { 282 FastNewSloppyArgumentsStub stub(isolate()); 283 __ CallStub(&stub); 284 } 285 286 SetVar(arguments, eax, ebx, edx); 287 } 288 289 if (FLAG_trace) { 290 __ CallRuntime(Runtime::kTraceEnter); 291 } 292 293 // Visit the declarations and body. 294 PrepareForBailoutForId(BailoutId::FunctionEntry(), 295 BailoutState::NO_REGISTERS); 296 { 297 Comment cmnt(masm_, "[ Declarations"); 298 VisitDeclarations(scope()->declarations()); 299 } 300 301 // Assert that the declarations do not use ICs. Otherwise the debugger 302 // won't be able to redirect a PC at an IC to the correct IC in newly 303 // recompiled code. 304 DCHECK_EQ(0, ic_total_count_); 305 306 { 307 Comment cmnt(masm_, "[ Stack check"); 308 PrepareForBailoutForId(BailoutId::Declarations(), 309 BailoutState::NO_REGISTERS); 310 Label ok; 311 ExternalReference stack_limit = 312 ExternalReference::address_of_stack_limit(isolate()); 313 __ cmp(esp, Operand::StaticVariable(stack_limit)); 314 __ j(above_equal, &ok, Label::kNear); 315 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); 316 __ bind(&ok); 317 } 318 319 { 320 Comment cmnt(masm_, "[ Body"); 321 DCHECK(loop_depth() == 0); 322 VisitStatements(literal()->body()); 323 DCHECK(loop_depth() == 0); 324 } 325 326 // Always emit a 'return undefined' in case control fell off the end of 327 // the body. 328 { Comment cmnt(masm_, "[ return <undefined>;"); 329 __ mov(eax, isolate()->factory()->undefined_value()); 330 EmitReturnSequence(); 331 } 332 } 333 334 335 void FullCodeGenerator::ClearAccumulator() { 336 __ Move(eax, Immediate(Smi::FromInt(0))); 337 } 338 339 340 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 341 __ mov(ebx, Immediate(profiling_counter_)); 342 __ sub(FieldOperand(ebx, Cell::kValueOffset), 343 Immediate(Smi::FromInt(delta))); 344 } 345 346 347 void FullCodeGenerator::EmitProfilingCounterReset() { 348 int reset_value = FLAG_interrupt_budget; 349 __ mov(ebx, Immediate(profiling_counter_)); 350 __ mov(FieldOperand(ebx, Cell::kValueOffset), 351 Immediate(Smi::FromInt(reset_value))); 352 } 353 354 355 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 356 Label* back_edge_target) { 357 Comment cmnt(masm_, "[ Back edge bookkeeping"); 358 Label ok; 359 360 DCHECK(back_edge_target->is_bound()); 361 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 362 int weight = Min(kMaxBackEdgeWeight, 363 Max(1, distance / kCodeSizeMultiplier)); 364 EmitProfilingCounterDecrement(weight); 365 __ j(positive, &ok, Label::kNear); 366 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 367 368 // Record a mapping of this PC offset to the OSR id. This is used to find 369 // the AST id from the unoptimized code in order to use it as a key into 370 // the deoptimization input data found in the optimized code. 371 RecordBackEdge(stmt->OsrEntryId()); 372 373 EmitProfilingCounterReset(); 374 375 __ bind(&ok); 376 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS); 377 // Record a mapping of the OSR id to this PC. This is used if the OSR 378 // entry becomes the target of a bailout. We don't expect it to be, but 379 // we want it to work if it is. 380 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS); 381 } 382 383 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence( 384 bool is_tail_call) { 385 // Pretend that the exit is a backwards jump to the entry. 386 int weight = 1; 387 if (info_->ShouldSelfOptimize()) { 388 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 389 } else { 390 int distance = masm_->pc_offset(); 391 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); 392 } 393 EmitProfilingCounterDecrement(weight); 394 Label ok; 395 __ j(positive, &ok, Label::kNear); 396 // Don't need to save result register if we are going to do a tail call. 397 if (!is_tail_call) { 398 __ push(eax); 399 } 400 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 401 if (!is_tail_call) { 402 __ pop(eax); 403 } 404 EmitProfilingCounterReset(); 405 __ bind(&ok); 406 } 407 408 void FullCodeGenerator::EmitReturnSequence() { 409 Comment cmnt(masm_, "[ Return sequence"); 410 if (return_label_.is_bound()) { 411 __ jmp(&return_label_); 412 } else { 413 // Common return label 414 __ bind(&return_label_); 415 if (FLAG_trace) { 416 __ push(eax); 417 __ CallRuntime(Runtime::kTraceExit); 418 } 419 EmitProfilingCounterHandlingForReturnSequence(false); 420 421 SetReturnPosition(literal()); 422 __ leave(); 423 424 int arg_count = info_->scope()->num_parameters() + 1; 425 int arguments_bytes = arg_count * kPointerSize; 426 __ Ret(arguments_bytes, ecx); 427 } 428 } 429 430 void FullCodeGenerator::RestoreContext() { 431 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); 432 } 433 434 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 435 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 436 MemOperand operand = codegen()->VarOperand(var, result_register()); 437 // Memory operands can be pushed directly. 438 codegen()->PushOperand(operand); 439 } 440 441 442 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 443 UNREACHABLE(); // Not used on IA32. 444 } 445 446 447 void FullCodeGenerator::AccumulatorValueContext::Plug( 448 Heap::RootListIndex index) const { 449 UNREACHABLE(); // Not used on IA32. 450 } 451 452 453 void FullCodeGenerator::StackValueContext::Plug( 454 Heap::RootListIndex index) const { 455 UNREACHABLE(); // Not used on IA32. 456 } 457 458 459 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 460 UNREACHABLE(); // Not used on IA32. 461 } 462 463 464 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 465 } 466 467 468 void FullCodeGenerator::AccumulatorValueContext::Plug( 469 Handle<Object> lit) const { 470 if (lit->IsSmi()) { 471 __ SafeMove(result_register(), Immediate(lit)); 472 } else { 473 __ Move(result_register(), Immediate(lit)); 474 } 475 } 476 477 478 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 479 codegen()->OperandStackDepthIncrement(1); 480 if (lit->IsSmi()) { 481 __ SafePush(Immediate(lit)); 482 } else { 483 __ push(Immediate(lit)); 484 } 485 } 486 487 488 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 489 codegen()->PrepareForBailoutBeforeSplit(condition(), 490 true, 491 true_label_, 492 false_label_); 493 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) || 494 !lit->IsUndetectable()); 495 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) || 496 lit->IsFalse(isolate())) { 497 if (false_label_ != fall_through_) __ jmp(false_label_); 498 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) { 499 if (true_label_ != fall_through_) __ jmp(true_label_); 500 } else if (lit->IsString()) { 501 if (String::cast(*lit)->length() == 0) { 502 if (false_label_ != fall_through_) __ jmp(false_label_); 503 } else { 504 if (true_label_ != fall_through_) __ jmp(true_label_); 505 } 506 } else if (lit->IsSmi()) { 507 if (Smi::cast(*lit)->value() == 0) { 508 if (false_label_ != fall_through_) __ jmp(false_label_); 509 } else { 510 if (true_label_ != fall_through_) __ jmp(true_label_); 511 } 512 } else { 513 // For simplicity we always test the accumulator register. 514 __ mov(result_register(), lit); 515 codegen()->DoTest(this); 516 } 517 } 518 519 520 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 521 Register reg) const { 522 DCHECK(count > 0); 523 if (count > 1) codegen()->DropOperands(count - 1); 524 __ mov(Operand(esp, 0), reg); 525 } 526 527 528 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 529 Label* materialize_false) const { 530 DCHECK(materialize_true == materialize_false); 531 __ bind(materialize_true); 532 } 533 534 535 void FullCodeGenerator::AccumulatorValueContext::Plug( 536 Label* materialize_true, 537 Label* materialize_false) const { 538 Label done; 539 __ bind(materialize_true); 540 __ mov(result_register(), isolate()->factory()->true_value()); 541 __ jmp(&done, Label::kNear); 542 __ bind(materialize_false); 543 __ mov(result_register(), isolate()->factory()->false_value()); 544 __ bind(&done); 545 } 546 547 548 void FullCodeGenerator::StackValueContext::Plug( 549 Label* materialize_true, 550 Label* materialize_false) const { 551 codegen()->OperandStackDepthIncrement(1); 552 Label done; 553 __ bind(materialize_true); 554 __ push(Immediate(isolate()->factory()->true_value())); 555 __ jmp(&done, Label::kNear); 556 __ bind(materialize_false); 557 __ push(Immediate(isolate()->factory()->false_value())); 558 __ bind(&done); 559 } 560 561 562 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 563 Label* materialize_false) const { 564 DCHECK(materialize_true == true_label_); 565 DCHECK(materialize_false == false_label_); 566 } 567 568 569 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 570 Handle<Object> value = flag 571 ? isolate()->factory()->true_value() 572 : isolate()->factory()->false_value(); 573 __ mov(result_register(), value); 574 } 575 576 577 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 578 codegen()->OperandStackDepthIncrement(1); 579 Handle<Object> value = flag 580 ? isolate()->factory()->true_value() 581 : isolate()->factory()->false_value(); 582 __ push(Immediate(value)); 583 } 584 585 586 void FullCodeGenerator::TestContext::Plug(bool flag) const { 587 codegen()->PrepareForBailoutBeforeSplit(condition(), 588 true, 589 true_label_, 590 false_label_); 591 if (flag) { 592 if (true_label_ != fall_through_) __ jmp(true_label_); 593 } else { 594 if (false_label_ != fall_through_) __ jmp(false_label_); 595 } 596 } 597 598 599 void FullCodeGenerator::DoTest(Expression* condition, 600 Label* if_true, 601 Label* if_false, 602 Label* fall_through) { 603 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate()); 604 CallIC(ic, condition->test_id()); 605 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); 606 Split(equal, if_true, if_false, fall_through); 607 } 608 609 610 void FullCodeGenerator::Split(Condition cc, 611 Label* if_true, 612 Label* if_false, 613 Label* fall_through) { 614 if (if_false == fall_through) { 615 __ j(cc, if_true); 616 } else if (if_true == fall_through) { 617 __ j(NegateCondition(cc), if_false); 618 } else { 619 __ j(cc, if_true); 620 __ jmp(if_false); 621 } 622 } 623 624 625 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 626 DCHECK(var->IsStackAllocated()); 627 // Offset is negative because higher indexes are at lower addresses. 628 int offset = -var->index() * kPointerSize; 629 // Adjust by a (parameter or local) base offset. 630 if (var->IsParameter()) { 631 offset += (info_->scope()->num_parameters() + 1) * kPointerSize; 632 } else { 633 offset += JavaScriptFrameConstants::kLocal0Offset; 634 } 635 return Operand(ebp, offset); 636 } 637 638 639 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 640 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 641 if (var->IsContextSlot()) { 642 int context_chain_length = scope()->ContextChainLength(var->scope()); 643 __ LoadContext(scratch, context_chain_length); 644 return ContextOperand(scratch, var->index()); 645 } else { 646 return StackOperand(var); 647 } 648 } 649 650 651 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 652 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 653 MemOperand location = VarOperand(var, dest); 654 __ mov(dest, location); 655 } 656 657 658 void FullCodeGenerator::SetVar(Variable* var, 659 Register src, 660 Register scratch0, 661 Register scratch1) { 662 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 663 DCHECK(!scratch0.is(src)); 664 DCHECK(!scratch0.is(scratch1)); 665 DCHECK(!scratch1.is(src)); 666 MemOperand location = VarOperand(var, scratch0); 667 __ mov(location, src); 668 669 // Emit the write barrier code if the location is in the heap. 670 if (var->IsContextSlot()) { 671 int offset = Context::SlotOffset(var->index()); 672 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi)); 673 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs); 674 } 675 } 676 677 678 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 679 bool should_normalize, 680 Label* if_true, 681 Label* if_false) { 682 // Only prepare for bailouts before splits if we're in a test 683 // context. Otherwise, we let the Visit function deal with the 684 // preparation to avoid preparing with the same AST id twice. 685 if (!context()->IsTest()) return; 686 687 Label skip; 688 if (should_normalize) __ jmp(&skip, Label::kNear); 689 PrepareForBailout(expr, BailoutState::TOS_REGISTER); 690 if (should_normalize) { 691 __ cmp(eax, isolate()->factory()->true_value()); 692 Split(equal, if_true, if_false, NULL); 693 __ bind(&skip); 694 } 695 } 696 697 698 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 699 // The variable in the declaration always resides in the current context. 700 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 701 if (FLAG_debug_code) { 702 // Check that we're not inside a with or catch context. 703 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset)); 704 __ cmp(ebx, isolate()->factory()->with_context_map()); 705 __ Check(not_equal, kDeclarationInWithContext); 706 __ cmp(ebx, isolate()->factory()->catch_context_map()); 707 __ Check(not_equal, kDeclarationInCatchContext); 708 } 709 } 710 711 712 void FullCodeGenerator::VisitVariableDeclaration( 713 VariableDeclaration* declaration) { 714 // If it was not possible to allocate the variable at compile time, we 715 // need to "declare" it at runtime to make sure it actually exists in the 716 // local context. 717 VariableProxy* proxy = declaration->proxy(); 718 VariableMode mode = declaration->mode(); 719 Variable* variable = proxy->var(); 720 bool hole_init = mode == LET || mode == CONST; 721 switch (variable->location()) { 722 case VariableLocation::GLOBAL: 723 case VariableLocation::UNALLOCATED: 724 DCHECK(!variable->binding_needs_init()); 725 globals_->Add(variable->name(), zone()); 726 globals_->Add(isolate()->factory()->undefined_value(), zone()); 727 break; 728 729 case VariableLocation::PARAMETER: 730 case VariableLocation::LOCAL: 731 if (hole_init) { 732 Comment cmnt(masm_, "[ VariableDeclaration"); 733 __ mov(StackOperand(variable), 734 Immediate(isolate()->factory()->the_hole_value())); 735 } 736 break; 737 738 case VariableLocation::CONTEXT: 739 if (hole_init) { 740 Comment cmnt(masm_, "[ VariableDeclaration"); 741 EmitDebugCheckDeclarationContext(variable); 742 __ mov(ContextOperand(esi, variable->index()), 743 Immediate(isolate()->factory()->the_hole_value())); 744 // No write barrier since the hole value is in old space. 745 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 746 } 747 break; 748 749 case VariableLocation::LOOKUP: { 750 Comment cmnt(masm_, "[ VariableDeclaration"); 751 DCHECK_EQ(VAR, mode); 752 DCHECK(!hole_init); 753 __ push(Immediate(variable->name())); 754 __ CallRuntime(Runtime::kDeclareEvalVar); 755 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 756 break; 757 } 758 } 759 } 760 761 762 void FullCodeGenerator::VisitFunctionDeclaration( 763 FunctionDeclaration* declaration) { 764 VariableProxy* proxy = declaration->proxy(); 765 Variable* variable = proxy->var(); 766 switch (variable->location()) { 767 case VariableLocation::GLOBAL: 768 case VariableLocation::UNALLOCATED: { 769 globals_->Add(variable->name(), zone()); 770 Handle<SharedFunctionInfo> function = 771 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); 772 // Check for stack-overflow exception. 773 if (function.is_null()) return SetStackOverflow(); 774 globals_->Add(function, zone()); 775 break; 776 } 777 778 case VariableLocation::PARAMETER: 779 case VariableLocation::LOCAL: { 780 Comment cmnt(masm_, "[ FunctionDeclaration"); 781 VisitForAccumulatorValue(declaration->fun()); 782 __ mov(StackOperand(variable), result_register()); 783 break; 784 } 785 786 case VariableLocation::CONTEXT: { 787 Comment cmnt(masm_, "[ FunctionDeclaration"); 788 EmitDebugCheckDeclarationContext(variable); 789 VisitForAccumulatorValue(declaration->fun()); 790 __ mov(ContextOperand(esi, variable->index()), result_register()); 791 // We know that we have written a function, which is not a smi. 792 __ RecordWriteContextSlot(esi, 793 Context::SlotOffset(variable->index()), 794 result_register(), 795 ecx, 796 kDontSaveFPRegs, 797 EMIT_REMEMBERED_SET, 798 OMIT_SMI_CHECK); 799 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 800 break; 801 } 802 803 case VariableLocation::LOOKUP: { 804 Comment cmnt(masm_, "[ FunctionDeclaration"); 805 PushOperand(variable->name()); 806 VisitForStackValue(declaration->fun()); 807 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction); 808 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS); 809 break; 810 } 811 } 812 } 813 814 815 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 816 // Call the runtime to declare the globals. 817 __ Push(pairs); 818 __ Push(Smi::FromInt(DeclareGlobalsFlags())); 819 __ CallRuntime(Runtime::kDeclareGlobals); 820 // Return value is ignored. 821 } 822 823 824 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 825 // Call the runtime to declare the modules. 826 __ Push(descriptions); 827 __ CallRuntime(Runtime::kDeclareModules); 828 // Return value is ignored. 829 } 830 831 832 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 833 Comment cmnt(masm_, "[ SwitchStatement"); 834 Breakable nested_statement(this, stmt); 835 SetStatementPosition(stmt); 836 837 // Keep the switch value on the stack until a case matches. 838 VisitForStackValue(stmt->tag()); 839 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS); 840 841 ZoneList<CaseClause*>* clauses = stmt->cases(); 842 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 843 844 Label next_test; // Recycled for each test. 845 // Compile all the tests with branches to their bodies. 846 for (int i = 0; i < clauses->length(); i++) { 847 CaseClause* clause = clauses->at(i); 848 clause->body_target()->Unuse(); 849 850 // The default is not a test, but remember it as final fall through. 851 if (clause->is_default()) { 852 default_clause = clause; 853 continue; 854 } 855 856 Comment cmnt(masm_, "[ Case comparison"); 857 __ bind(&next_test); 858 next_test.Unuse(); 859 860 // Compile the label expression. 861 VisitForAccumulatorValue(clause->label()); 862 863 // Perform the comparison as if via '==='. 864 __ mov(edx, Operand(esp, 0)); // Switch value. 865 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 866 JumpPatchSite patch_site(masm_); 867 if (inline_smi_code) { 868 Label slow_case; 869 __ mov(ecx, edx); 870 __ or_(ecx, eax); 871 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear); 872 873 __ cmp(edx, eax); 874 __ j(not_equal, &next_test); 875 __ Drop(1); // Switch value is no longer needed. 876 __ jmp(clause->body_target()); 877 __ bind(&slow_case); 878 } 879 880 SetExpressionPosition(clause); 881 Handle<Code> ic = 882 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code(); 883 CallIC(ic, clause->CompareId()); 884 patch_site.EmitPatchInfo(); 885 886 Label skip; 887 __ jmp(&skip, Label::kNear); 888 PrepareForBailout(clause, BailoutState::TOS_REGISTER); 889 __ cmp(eax, isolate()->factory()->true_value()); 890 __ j(not_equal, &next_test); 891 __ Drop(1); 892 __ jmp(clause->body_target()); 893 __ bind(&skip); 894 895 __ test(eax, eax); 896 __ j(not_equal, &next_test); 897 __ Drop(1); // Switch value is no longer needed. 898 __ jmp(clause->body_target()); 899 } 900 901 // Discard the test value and jump to the default if present, otherwise to 902 // the end of the statement. 903 __ bind(&next_test); 904 DropOperands(1); // Switch value is no longer needed. 905 if (default_clause == NULL) { 906 __ jmp(nested_statement.break_label()); 907 } else { 908 __ jmp(default_clause->body_target()); 909 } 910 911 // Compile all the case bodies. 912 for (int i = 0; i < clauses->length(); i++) { 913 Comment cmnt(masm_, "[ Case body"); 914 CaseClause* clause = clauses->at(i); 915 __ bind(clause->body_target()); 916 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS); 917 VisitStatements(clause->statements()); 918 } 919 920 __ bind(nested_statement.break_label()); 921 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 922 } 923 924 925 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 926 Comment cmnt(masm_, "[ ForInStatement"); 927 SetStatementPosition(stmt, SKIP_BREAK); 928 929 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); 930 931 // Get the object to enumerate over. 932 SetExpressionAsStatementPosition(stmt->enumerable()); 933 VisitForAccumulatorValue(stmt->enumerable()); 934 OperandStackDepthIncrement(5); 935 936 Label loop, exit; 937 Iteration loop_statement(this, stmt); 938 increment_loop_depth(); 939 940 // If the object is null or undefined, skip over the loop, otherwise convert 941 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4. 942 Label convert, done_convert; 943 __ JumpIfSmi(eax, &convert, Label::kNear); 944 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); 945 __ j(above_equal, &done_convert, Label::kNear); 946 __ cmp(eax, isolate()->factory()->undefined_value()); 947 __ j(equal, &exit); 948 __ cmp(eax, isolate()->factory()->null_value()); 949 __ j(equal, &exit); 950 __ bind(&convert); 951 ToObjectStub stub(isolate()); 952 __ CallStub(&stub); 953 __ bind(&done_convert); 954 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER); 955 __ push(eax); 956 957 // Check cache validity in generated code. If we cannot guarantee cache 958 // validity, call the runtime system to check cache validity or get the 959 // property names in a fixed array. Note: Proxies never have an enum cache, 960 // so will always take the slow path. 961 Label call_runtime, use_cache, fixed_array; 962 __ CheckEnumCache(&call_runtime); 963 964 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); 965 __ jmp(&use_cache, Label::kNear); 966 967 // Get the set of properties to enumerate. 968 __ bind(&call_runtime); 969 __ push(eax); 970 __ CallRuntime(Runtime::kForInEnumerate); 971 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER); 972 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 973 isolate()->factory()->meta_map()); 974 __ j(not_equal, &fixed_array); 975 976 977 // We got a map in register eax. Get the enumeration cache from it. 978 Label no_descriptors; 979 __ bind(&use_cache); 980 981 __ EnumLength(edx, eax); 982 __ cmp(edx, Immediate(Smi::FromInt(0))); 983 __ j(equal, &no_descriptors); 984 985 __ LoadInstanceDescriptors(eax, ecx); 986 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset)); 987 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 988 989 // Set up the four remaining stack slots. 990 __ push(eax); // Map. 991 __ push(ecx); // Enumeration cache. 992 __ push(edx); // Number of valid entries for the map in the enum cache. 993 __ push(Immediate(Smi::FromInt(0))); // Initial index. 994 __ jmp(&loop); 995 996 __ bind(&no_descriptors); 997 __ add(esp, Immediate(kPointerSize)); 998 __ jmp(&exit); 999 1000 // We got a fixed array in register eax. Iterate through that. 1001 __ bind(&fixed_array); 1002 1003 __ push(Immediate(Smi::FromInt(1))); // Smi(1) indicates slow check 1004 __ push(eax); // Array 1005 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset)); 1006 __ push(eax); // Fixed array length (as smi). 1007 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS); 1008 __ push(Immediate(Smi::FromInt(0))); // Initial index. 1009 1010 // Generate code for doing the condition check. 1011 __ bind(&loop); 1012 SetExpressionAsStatementPosition(stmt->each()); 1013 1014 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index. 1015 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length. 1016 __ j(above_equal, loop_statement.break_label()); 1017 1018 // Get the current entry of the array into register ebx. 1019 __ mov(ebx, Operand(esp, 2 * kPointerSize)); 1020 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize)); 1021 1022 // Get the expected map from the stack or a smi in the 1023 // permanent slow case into register edx. 1024 __ mov(edx, Operand(esp, 3 * kPointerSize)); 1025 1026 // Check if the expected map still matches that of the enumerable. 1027 // If not, we may have to filter the key. 1028 Label update_each; 1029 __ mov(ecx, Operand(esp, 4 * kPointerSize)); 1030 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset)); 1031 __ j(equal, &update_each, Label::kNear); 1032 1033 // We need to filter the key, record slow-path here. 1034 int const vector_index = SmiFromSlot(slot)->value(); 1035 __ EmitLoadTypeFeedbackVector(edx); 1036 __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)), 1037 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate()))); 1038 1039 // Convert the entry to a string or null if it isn't a property 1040 // anymore. If the property has been removed while iterating, we 1041 // just skip it. 1042 __ push(ecx); // Enumerable. 1043 __ push(ebx); // Current entry. 1044 __ CallRuntime(Runtime::kForInFilter); 1045 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER); 1046 __ cmp(eax, isolate()->factory()->undefined_value()); 1047 __ j(equal, loop_statement.continue_label()); 1048 __ mov(ebx, eax); 1049 1050 // Update the 'each' property or variable from the possibly filtered 1051 // entry in register ebx. 1052 __ bind(&update_each); 1053 __ mov(result_register(), ebx); 1054 // Perform the assignment as if via '='. 1055 { EffectContext context(this); 1056 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); 1057 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS); 1058 } 1059 1060 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). 1061 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS); 1062 // Generate code for the body of the loop. 1063 Visit(stmt->body()); 1064 1065 // Generate code for going to the next element by incrementing the 1066 // index (smi) stored on top of the stack. 1067 __ bind(loop_statement.continue_label()); 1068 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1))); 1069 1070 EmitBackEdgeBookkeeping(stmt, &loop); 1071 __ jmp(&loop); 1072 1073 // Remove the pointers stored on the stack. 1074 __ bind(loop_statement.break_label()); 1075 DropOperands(5); 1076 1077 // Exit and decrement the loop depth. 1078 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS); 1079 __ bind(&exit); 1080 decrement_loop_depth(); 1081 } 1082 1083 1084 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, 1085 FeedbackVectorSlot slot) { 1086 DCHECK(NeedsHomeObject(initializer)); 1087 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0)); 1088 __ mov(StoreDescriptor::NameRegister(), 1089 Immediate(isolate()->factory()->home_object_symbol())); 1090 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize)); 1091 EmitLoadStoreICSlot(slot); 1092 CallStoreIC(); 1093 } 1094 1095 1096 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, 1097 int offset, 1098 FeedbackVectorSlot slot) { 1099 DCHECK(NeedsHomeObject(initializer)); 1100 __ mov(StoreDescriptor::ReceiverRegister(), eax); 1101 __ mov(StoreDescriptor::NameRegister(), 1102 Immediate(isolate()->factory()->home_object_symbol())); 1103 __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize)); 1104 EmitLoadStoreICSlot(slot); 1105 CallStoreIC(); 1106 } 1107 1108 1109 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, 1110 TypeofMode typeof_mode, 1111 Label* slow) { 1112 Register context = esi; 1113 Register temp = edx; 1114 1115 Scope* s = scope(); 1116 while (s != NULL) { 1117 if (s->num_heap_slots() > 0) { 1118 if (s->calls_sloppy_eval()) { 1119 // Check that extension is "the hole". 1120 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX), 1121 Heap::kTheHoleValueRootIndex, slow); 1122 } 1123 // Load next context in chain. 1124 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1125 // Walk the rest of the chain without clobbering esi. 1126 context = temp; 1127 } 1128 // If no outer scope calls eval, we do not need to check more 1129 // context extensions. If we have reached an eval scope, we check 1130 // all extensions from this point. 1131 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1132 s = s->outer_scope(); 1133 } 1134 1135 if (s != NULL && s->is_eval_scope()) { 1136 // Loop up the context chain. There is no frame effect so it is 1137 // safe to use raw labels here. 1138 Label next, fast; 1139 if (!context.is(temp)) { 1140 __ mov(temp, context); 1141 } 1142 __ bind(&next); 1143 // Terminate at native context. 1144 __ cmp(FieldOperand(temp, HeapObject::kMapOffset), 1145 Immediate(isolate()->factory()->native_context_map())); 1146 __ j(equal, &fast, Label::kNear); 1147 // Check that extension is "the hole". 1148 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX), 1149 Heap::kTheHoleValueRootIndex, slow); 1150 // Load next context in chain. 1151 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); 1152 __ jmp(&next); 1153 __ bind(&fast); 1154 } 1155 1156 // All extension objects were empty and it is safe to use a normal global 1157 // load machinery. 1158 EmitGlobalVariableLoad(proxy, typeof_mode); 1159 } 1160 1161 1162 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1163 Label* slow) { 1164 DCHECK(var->IsContextSlot()); 1165 Register context = esi; 1166 Register temp = ebx; 1167 1168 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1169 if (s->num_heap_slots() > 0) { 1170 if (s->calls_sloppy_eval()) { 1171 // Check that extension is "the hole". 1172 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX), 1173 Heap::kTheHoleValueRootIndex, slow); 1174 } 1175 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1176 // Walk the rest of the chain without clobbering esi. 1177 context = temp; 1178 } 1179 } 1180 // Check that last extension is "the hole". 1181 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX), 1182 Heap::kTheHoleValueRootIndex, slow); 1183 1184 // This function is used only for loads, not stores, so it's safe to 1185 // return an esi-based operand (the write barrier cannot be allowed to 1186 // destroy the esi register). 1187 return ContextOperand(context, var->index()); 1188 } 1189 1190 1191 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1192 TypeofMode typeof_mode, 1193 Label* slow, Label* done) { 1194 // Generate fast-case code for variables that might be shadowed by 1195 // eval-introduced variables. Eval is used a lot without 1196 // introducing variables. In those cases, we do not want to 1197 // perform a runtime call for all variables in the scope 1198 // containing the eval. 1199 Variable* var = proxy->var(); 1200 if (var->mode() == DYNAMIC_GLOBAL) { 1201 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); 1202 __ jmp(done); 1203 } else if (var->mode() == DYNAMIC_LOCAL) { 1204 Variable* local = var->local_if_not_shadowed(); 1205 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow)); 1206 if (local->mode() == LET || local->mode() == CONST) { 1207 __ cmp(eax, isolate()->factory()->the_hole_value()); 1208 __ j(not_equal, done); 1209 __ push(Immediate(var->name())); 1210 __ CallRuntime(Runtime::kThrowReferenceError); 1211 } 1212 __ jmp(done); 1213 } 1214 } 1215 1216 1217 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, 1218 TypeofMode typeof_mode) { 1219 #ifdef DEBUG 1220 Variable* var = proxy->var(); 1221 DCHECK(var->IsUnallocatedOrGlobalSlot() || 1222 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); 1223 #endif 1224 __ mov(LoadGlobalDescriptor::SlotRegister(), 1225 Immediate(SmiFromSlot(proxy->VariableFeedbackSlot()))); 1226 CallLoadGlobalIC(typeof_mode); 1227 } 1228 1229 1230 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, 1231 TypeofMode typeof_mode) { 1232 SetExpressionPosition(proxy); 1233 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS); 1234 Variable* var = proxy->var(); 1235 1236 // Three cases: global variables, lookup variables, and all other types of 1237 // variables. 1238 switch (var->location()) { 1239 case VariableLocation::GLOBAL: 1240 case VariableLocation::UNALLOCATED: { 1241 Comment cmnt(masm_, "[ Global variable"); 1242 EmitGlobalVariableLoad(proxy, typeof_mode); 1243 context()->Plug(eax); 1244 break; 1245 } 1246 1247 case VariableLocation::PARAMETER: 1248 case VariableLocation::LOCAL: 1249 case VariableLocation::CONTEXT: { 1250 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); 1251 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable" 1252 : "[ Stack variable"); 1253 1254 if (NeedsHoleCheckForLoad(proxy)) { 1255 // Let and const need a read barrier. 1256 Label done; 1257 GetVar(eax, var); 1258 __ cmp(eax, isolate()->factory()->the_hole_value()); 1259 __ j(not_equal, &done, Label::kNear); 1260 if (var->mode() == LET || var->mode() == CONST) { 1261 // Throw a reference error when using an uninitialized let/const 1262 // binding in harmony mode. 1263 __ push(Immediate(var->name())); 1264 __ CallRuntime(Runtime::kThrowReferenceError); 1265 } 1266 __ bind(&done); 1267 context()->Plug(eax); 1268 break; 1269 } 1270 context()->Plug(var); 1271 break; 1272 } 1273 1274 case VariableLocation::LOOKUP: { 1275 Comment cmnt(masm_, "[ Lookup variable"); 1276 Label done, slow; 1277 // Generate code for loading from variables potentially shadowed 1278 // by eval-introduced variables. 1279 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); 1280 __ bind(&slow); 1281 __ push(Immediate(var->name())); 1282 Runtime::FunctionId function_id = 1283 typeof_mode == NOT_INSIDE_TYPEOF 1284 ? Runtime::kLoadLookupSlot 1285 : Runtime::kLoadLookupSlotInsideTypeof; 1286 __ CallRuntime(function_id); 1287 __ bind(&done); 1288 context()->Plug(eax); 1289 break; 1290 } 1291 } 1292 } 1293 1294 1295 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { 1296 Expression* expression = (property == NULL) ? NULL : property->value(); 1297 if (expression == NULL) { 1298 PushOperand(isolate()->factory()->null_value()); 1299 } else { 1300 VisitForStackValue(expression); 1301 if (NeedsHomeObject(expression)) { 1302 DCHECK(property->kind() == ObjectLiteral::Property::GETTER || 1303 property->kind() == ObjectLiteral::Property::SETTER); 1304 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; 1305 EmitSetHomeObject(expression, offset, property->GetSlot()); 1306 } 1307 } 1308 } 1309 1310 1311 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1312 Comment cmnt(masm_, "[ ObjectLiteral"); 1313 1314 Handle<FixedArray> constant_properties = expr->constant_properties(); 1315 int flags = expr->ComputeFlags(); 1316 // If any of the keys would store to the elements array, then we shouldn't 1317 // allow it. 1318 if (MustCreateObjectLiteralWithRuntime(expr)) { 1319 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1320 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1321 __ push(Immediate(constant_properties)); 1322 __ push(Immediate(Smi::FromInt(flags))); 1323 __ CallRuntime(Runtime::kCreateObjectLiteral); 1324 } else { 1325 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1326 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); 1327 __ mov(ecx, Immediate(constant_properties)); 1328 __ mov(edx, Immediate(Smi::FromInt(flags))); 1329 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); 1330 __ CallStub(&stub); 1331 RestoreContext(); 1332 } 1333 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); 1334 1335 // If result_saved is true the result is on top of the stack. If 1336 // result_saved is false the result is in eax. 1337 bool result_saved = false; 1338 1339 AccessorTable accessor_table(zone()); 1340 int property_index = 0; 1341 for (; property_index < expr->properties()->length(); property_index++) { 1342 ObjectLiteral::Property* property = expr->properties()->at(property_index); 1343 if (property->is_computed_name()) break; 1344 if (property->IsCompileTimeValue()) continue; 1345 1346 Literal* key = property->key()->AsLiteral(); 1347 Expression* value = property->value(); 1348 if (!result_saved) { 1349 PushOperand(eax); // Save result on the stack 1350 result_saved = true; 1351 } 1352 switch (property->kind()) { 1353 case ObjectLiteral::Property::CONSTANT: 1354 UNREACHABLE(); 1355 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1356 DCHECK(!CompileTimeValue::IsCompileTimeValue(value)); 1357 // Fall through. 1358 case ObjectLiteral::Property::COMPUTED: 1359 // It is safe to use [[Put]] here because the boilerplate already 1360 // contains computed properties with an uninitialized value. 1361 if (key->value()->IsInternalizedString()) { 1362 if (property->emit_store()) { 1363 VisitForAccumulatorValue(value); 1364 DCHECK(StoreDescriptor::ValueRegister().is(eax)); 1365 __ mov(StoreDescriptor::NameRegister(), Immediate(key->value())); 1366 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0)); 1367 EmitLoadStoreICSlot(property->GetSlot(0)); 1368 CallStoreIC(); 1369 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS); 1370 if (NeedsHomeObject(value)) { 1371 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); 1372 } 1373 } else { 1374 VisitForEffect(value); 1375 } 1376 break; 1377 } 1378 PushOperand(Operand(esp, 0)); // Duplicate receiver. 1379 VisitForStackValue(key); 1380 VisitForStackValue(value); 1381 if (property->emit_store()) { 1382 if (NeedsHomeObject(value)) { 1383 EmitSetHomeObject(value, 2, property->GetSlot()); 1384 } 1385 PushOperand(Smi::FromInt(SLOPPY)); // Language mode 1386 CallRuntimeWithOperands(Runtime::kSetProperty); 1387 } else { 1388 DropOperands(3); 1389 } 1390 break; 1391 case ObjectLiteral::Property::PROTOTYPE: 1392 PushOperand(Operand(esp, 0)); // Duplicate receiver. 1393 VisitForStackValue(value); 1394 DCHECK(property->emit_store()); 1395 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1396 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1397 BailoutState::NO_REGISTERS); 1398 break; 1399 case ObjectLiteral::Property::GETTER: 1400 if (property->emit_store()) { 1401 AccessorTable::Iterator it = accessor_table.lookup(key); 1402 it->second->bailout_id = expr->GetIdForPropertySet(property_index); 1403 it->second->getter = property; 1404 } 1405 break; 1406 case ObjectLiteral::Property::SETTER: 1407 if (property->emit_store()) { 1408 AccessorTable::Iterator it = accessor_table.lookup(key); 1409 it->second->bailout_id = expr->GetIdForPropertySet(property_index); 1410 it->second->setter = property; 1411 } 1412 break; 1413 } 1414 } 1415 1416 // Emit code to define accessors, using only a single call to the runtime for 1417 // each pair of corresponding getters and setters. 1418 for (AccessorTable::Iterator it = accessor_table.begin(); 1419 it != accessor_table.end(); 1420 ++it) { 1421 PushOperand(Operand(esp, 0)); // Duplicate receiver. 1422 VisitForStackValue(it->first); 1423 1424 EmitAccessor(it->second->getter); 1425 EmitAccessor(it->second->setter); 1426 1427 PushOperand(Smi::FromInt(NONE)); 1428 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked); 1429 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS); 1430 } 1431 1432 // Object literals have two parts. The "static" part on the left contains no 1433 // computed property names, and so we can compute its map ahead of time; see 1434 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part 1435 // starts with the first computed property name, and continues with all 1436 // properties to its right. All the code from above initializes the static 1437 // component of the object literal, and arranges for the map of the result to 1438 // reflect the static order in which the keys appear. For the dynamic 1439 // properties, we compile them into a series of "SetOwnProperty" runtime 1440 // calls. This will preserve insertion order. 1441 for (; property_index < expr->properties()->length(); property_index++) { 1442 ObjectLiteral::Property* property = expr->properties()->at(property_index); 1443 1444 Expression* value = property->value(); 1445 if (!result_saved) { 1446 PushOperand(eax); // Save result on the stack 1447 result_saved = true; 1448 } 1449 1450 PushOperand(Operand(esp, 0)); // Duplicate receiver. 1451 1452 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { 1453 DCHECK(!property->is_computed_name()); 1454 VisitForStackValue(value); 1455 DCHECK(property->emit_store()); 1456 CallRuntimeWithOperands(Runtime::kInternalSetPrototype); 1457 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1458 BailoutState::NO_REGISTERS); 1459 } else { 1460 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); 1461 VisitForStackValue(value); 1462 if (NeedsHomeObject(value)) { 1463 EmitSetHomeObject(value, 2, property->GetSlot()); 1464 } 1465 1466 switch (property->kind()) { 1467 case ObjectLiteral::Property::CONSTANT: 1468 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1469 case ObjectLiteral::Property::COMPUTED: 1470 if (property->emit_store()) { 1471 PushOperand(Smi::FromInt(NONE)); 1472 PushOperand(Smi::FromInt(property->NeedsSetFunctionName())); 1473 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral); 1474 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1475 BailoutState::NO_REGISTERS); 1476 } else { 1477 DropOperands(3); 1478 } 1479 break; 1480 1481 case ObjectLiteral::Property::PROTOTYPE: 1482 UNREACHABLE(); 1483 break; 1484 1485 case ObjectLiteral::Property::GETTER: 1486 PushOperand(Smi::FromInt(NONE)); 1487 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked); 1488 break; 1489 1490 case ObjectLiteral::Property::SETTER: 1491 PushOperand(Smi::FromInt(NONE)); 1492 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); 1493 break; 1494 } 1495 } 1496 } 1497 1498 if (result_saved) { 1499 context()->PlugTOS(); 1500 } else { 1501 context()->Plug(eax); 1502 } 1503 } 1504 1505 1506 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1507 Comment cmnt(masm_, "[ ArrayLiteral"); 1508 1509 Handle<FixedArray> constant_elements = expr->constant_elements(); 1510 bool has_constant_fast_elements = 1511 IsFastObjectElementsKind(expr->constant_elements_kind()); 1512 1513 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1514 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) { 1515 // If the only customer of allocation sites is transitioning, then 1516 // we can turn it off if we don't have anywhere else to transition to. 1517 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1518 } 1519 1520 if (MustCreateArrayLiteralWithRuntime(expr)) { 1521 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1522 __ push(Immediate(Smi::FromInt(expr->literal_index()))); 1523 __ push(Immediate(constant_elements)); 1524 __ push(Immediate(Smi::FromInt(expr->ComputeFlags()))); 1525 __ CallRuntime(Runtime::kCreateArrayLiteral); 1526 } else { 1527 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 1528 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index()))); 1529 __ mov(ecx, Immediate(constant_elements)); 1530 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1531 __ CallStub(&stub); 1532 } 1533 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER); 1534 1535 bool result_saved = false; // Is the result saved to the stack? 1536 ZoneList<Expression*>* subexprs = expr->values(); 1537 int length = subexprs->length(); 1538 1539 // Emit code to evaluate all the non-constant subexpressions and to store 1540 // them into the newly cloned array. 1541 int array_index = 0; 1542 for (; array_index < length; array_index++) { 1543 Expression* subexpr = subexprs->at(array_index); 1544 DCHECK(!subexpr->IsSpread()); 1545 1546 // If the subexpression is a literal or a simple materialized literal it 1547 // is already set in the cloned array. 1548 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1549 1550 if (!result_saved) { 1551 PushOperand(eax); // array literal. 1552 result_saved = true; 1553 } 1554 VisitForAccumulatorValue(subexpr); 1555 1556 __ mov(StoreDescriptor::NameRegister(), 1557 Immediate(Smi::FromInt(array_index))); 1558 __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0)); 1559 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); 1560 Handle<Code> ic = 1561 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 1562 CallIC(ic); 1563 PrepareForBailoutForId(expr->GetIdForElement(array_index), 1564 BailoutState::NO_REGISTERS); 1565 } 1566 1567 // In case the array literal contains spread expressions it has two parts. The 1568 // first part is the "static" array which has a literal index is handled 1569 // above. The second part is the part after the first spread expression 1570 // (inclusive) and these elements gets appended to the array. Note that the 1571 // number elements an iterable produces is unknown ahead of time. 1572 if (array_index < length && result_saved) { 1573 PopOperand(eax); 1574 result_saved = false; 1575 } 1576 for (; array_index < length; array_index++) { 1577 Expression* subexpr = subexprs->at(array_index); 1578 1579 PushOperand(eax); 1580 DCHECK(!subexpr->IsSpread()); 1581 VisitForStackValue(subexpr); 1582 CallRuntimeWithOperands(Runtime::kAppendElement); 1583 1584 PrepareForBailoutForId(expr->GetIdForElement(array_index), 1585 BailoutState::NO_REGISTERS); 1586 } 1587 1588 if (result_saved) { 1589 context()->PlugTOS(); 1590 } else { 1591 context()->Plug(eax); 1592 } 1593 } 1594 1595 1596 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1597 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); 1598 1599 Comment cmnt(masm_, "[ Assignment"); 1600 1601 Property* property = expr->target()->AsProperty(); 1602 LhsKind assign_type = Property::GetAssignType(property); 1603 1604 // Evaluate LHS expression. 1605 switch (assign_type) { 1606 case VARIABLE: 1607 // Nothing to do here. 1608 break; 1609 case NAMED_SUPER_PROPERTY: 1610 VisitForStackValue( 1611 property->obj()->AsSuperPropertyReference()->this_var()); 1612 VisitForAccumulatorValue( 1613 property->obj()->AsSuperPropertyReference()->home_object()); 1614 PushOperand(result_register()); 1615 if (expr->is_compound()) { 1616 PushOperand(MemOperand(esp, kPointerSize)); 1617 PushOperand(result_register()); 1618 } 1619 break; 1620 case NAMED_PROPERTY: 1621 if (expr->is_compound()) { 1622 // We need the receiver both on the stack and in the register. 1623 VisitForStackValue(property->obj()); 1624 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 1625 } else { 1626 VisitForStackValue(property->obj()); 1627 } 1628 break; 1629 case KEYED_SUPER_PROPERTY: 1630 VisitForStackValue( 1631 property->obj()->AsSuperPropertyReference()->this_var()); 1632 VisitForStackValue( 1633 property->obj()->AsSuperPropertyReference()->home_object()); 1634 VisitForAccumulatorValue(property->key()); 1635 PushOperand(result_register()); 1636 if (expr->is_compound()) { 1637 PushOperand(MemOperand(esp, 2 * kPointerSize)); 1638 PushOperand(MemOperand(esp, 2 * kPointerSize)); 1639 PushOperand(result_register()); 1640 } 1641 break; 1642 case KEYED_PROPERTY: { 1643 if (expr->is_compound()) { 1644 VisitForStackValue(property->obj()); 1645 VisitForStackValue(property->key()); 1646 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize)); 1647 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); 1648 } else { 1649 VisitForStackValue(property->obj()); 1650 VisitForStackValue(property->key()); 1651 } 1652 break; 1653 } 1654 } 1655 1656 // For compound assignments we need another deoptimization point after the 1657 // variable/property load. 1658 if (expr->is_compound()) { 1659 AccumulatorValueContext result_context(this); 1660 { AccumulatorValueContext left_operand_context(this); 1661 switch (assign_type) { 1662 case VARIABLE: 1663 EmitVariableLoad(expr->target()->AsVariableProxy()); 1664 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER); 1665 break; 1666 case NAMED_SUPER_PROPERTY: 1667 EmitNamedSuperPropertyLoad(property); 1668 PrepareForBailoutForId(property->LoadId(), 1669 BailoutState::TOS_REGISTER); 1670 break; 1671 case NAMED_PROPERTY: 1672 EmitNamedPropertyLoad(property); 1673 PrepareForBailoutForId(property->LoadId(), 1674 BailoutState::TOS_REGISTER); 1675 break; 1676 case KEYED_SUPER_PROPERTY: 1677 EmitKeyedSuperPropertyLoad(property); 1678 PrepareForBailoutForId(property->LoadId(), 1679 BailoutState::TOS_REGISTER); 1680 break; 1681 case KEYED_PROPERTY: 1682 EmitKeyedPropertyLoad(property); 1683 PrepareForBailoutForId(property->LoadId(), 1684 BailoutState::TOS_REGISTER); 1685 break; 1686 } 1687 } 1688 1689 Token::Value op = expr->binary_op(); 1690 PushOperand(eax); // Left operand goes on the stack. 1691 VisitForAccumulatorValue(expr->value()); 1692 1693 if (ShouldInlineSmiCase(op)) { 1694 EmitInlineSmiBinaryOp(expr->binary_operation(), 1695 op, 1696 expr->target(), 1697 expr->value()); 1698 } else { 1699 EmitBinaryOp(expr->binary_operation(), op); 1700 } 1701 1702 // Deoptimization point in case the binary operation may have side effects. 1703 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER); 1704 } else { 1705 VisitForAccumulatorValue(expr->value()); 1706 } 1707 1708 SetExpressionPosition(expr); 1709 1710 // Store the value. 1711 switch (assign_type) { 1712 case VARIABLE: 1713 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1714 expr->op(), expr->AssignmentSlot()); 1715 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 1716 context()->Plug(eax); 1717 break; 1718 case NAMED_PROPERTY: 1719 EmitNamedPropertyAssignment(expr); 1720 break; 1721 case NAMED_SUPER_PROPERTY: 1722 EmitNamedSuperPropertyStore(property); 1723 context()->Plug(result_register()); 1724 break; 1725 case KEYED_SUPER_PROPERTY: 1726 EmitKeyedSuperPropertyStore(property); 1727 context()->Plug(result_register()); 1728 break; 1729 case KEYED_PROPERTY: 1730 EmitKeyedPropertyAssignment(expr); 1731 break; 1732 } 1733 } 1734 1735 1736 void FullCodeGenerator::VisitYield(Yield* expr) { 1737 Comment cmnt(masm_, "[ Yield"); 1738 SetExpressionPosition(expr); 1739 1740 // Evaluate yielded value first; the initial iterator definition depends on 1741 // this. It stays on the stack while we update the iterator. 1742 VisitForStackValue(expr->expression()); 1743 1744 Label suspend, continuation, post_runtime, resume, exception; 1745 1746 __ jmp(&suspend); 1747 __ bind(&continuation); 1748 // When we arrive here, eax holds the generator object. 1749 __ RecordGeneratorContinuation(); 1750 __ mov(ebx, FieldOperand(eax, JSGeneratorObject::kResumeModeOffset)); 1751 __ mov(eax, FieldOperand(eax, JSGeneratorObject::kInputOrDebugPosOffset)); 1752 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn); 1753 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn); 1754 __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::kReturn))); 1755 __ j(less, &resume); 1756 __ Push(result_register()); 1757 __ j(greater, &exception); 1758 EmitCreateIteratorResult(true); 1759 EmitUnwindAndReturn(); 1760 1761 __ bind(&exception); 1762 __ CallRuntime(Runtime::kThrow); 1763 1764 __ bind(&suspend); 1765 OperandStackDepthIncrement(1); // Not popped on this path. 1766 VisitForAccumulatorValue(expr->generator_object()); 1767 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1768 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset), 1769 Immediate(Smi::FromInt(continuation.pos()))); 1770 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi); 1771 __ mov(ecx, esi); 1772 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx, 1773 kDontSaveFPRegs); 1774 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset)); 1775 __ cmp(esp, ebx); 1776 __ j(equal, &post_runtime); 1777 __ push(eax); // generator object 1778 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 1779 RestoreContext(); 1780 __ bind(&post_runtime); 1781 PopOperand(result_register()); 1782 EmitReturnSequence(); 1783 1784 __ bind(&resume); 1785 context()->Plug(result_register()); 1786 } 1787 1788 void FullCodeGenerator::PushOperand(MemOperand operand) { 1789 OperandStackDepthIncrement(1); 1790 __ Push(operand); 1791 } 1792 1793 void FullCodeGenerator::EmitOperandStackDepthCheck() { 1794 if (FLAG_debug_code) { 1795 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp + 1796 operand_stack_depth_ * kPointerSize; 1797 __ mov(eax, ebp); 1798 __ sub(eax, esp); 1799 __ cmp(eax, Immediate(expected_diff)); 1800 __ Assert(equal, kUnexpectedStackDepth); 1801 } 1802 } 1803 1804 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 1805 Label allocate, done_allocate; 1806 1807 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate, 1808 NO_ALLOCATION_FLAGS); 1809 __ jmp(&done_allocate, Label::kNear); 1810 1811 __ bind(&allocate); 1812 __ Push(Smi::FromInt(JSIteratorResult::kSize)); 1813 __ CallRuntime(Runtime::kAllocateInNewSpace); 1814 1815 __ bind(&done_allocate); 1816 __ mov(ebx, NativeContextOperand()); 1817 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX)); 1818 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx); 1819 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), 1820 isolate()->factory()->empty_fixed_array()); 1821 __ mov(FieldOperand(eax, JSObject::kElementsOffset), 1822 isolate()->factory()->empty_fixed_array()); 1823 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset)); 1824 __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset), 1825 isolate()->factory()->ToBoolean(done)); 1826 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 1827 OperandStackDepthDecrement(1); 1828 } 1829 1830 1831 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 1832 Token::Value op, 1833 Expression* left, 1834 Expression* right) { 1835 // Do combined smi check of the operands. Left operand is on the 1836 // stack. Right operand is in eax. 1837 Label smi_case, done, stub_call; 1838 PopOperand(edx); 1839 __ mov(ecx, eax); 1840 __ or_(eax, edx); 1841 JumpPatchSite patch_site(masm_); 1842 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear); 1843 1844 __ bind(&stub_call); 1845 __ mov(eax, ecx); 1846 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); 1847 CallIC(code, expr->BinaryOperationFeedbackId()); 1848 patch_site.EmitPatchInfo(); 1849 __ jmp(&done, Label::kNear); 1850 1851 // Smi case. 1852 __ bind(&smi_case); 1853 __ mov(eax, edx); // Copy left operand in case of a stub call. 1854 1855 switch (op) { 1856 case Token::SAR: 1857 __ SmiUntag(ecx); 1858 __ sar_cl(eax); // No checks of result necessary 1859 __ and_(eax, Immediate(~kSmiTagMask)); 1860 break; 1861 case Token::SHL: { 1862 Label result_ok; 1863 __ SmiUntag(eax); 1864 __ SmiUntag(ecx); 1865 __ shl_cl(eax); 1866 // Check that the *signed* result fits in a smi. 1867 __ cmp(eax, 0xc0000000); 1868 __ j(positive, &result_ok); 1869 __ SmiTag(ecx); 1870 __ jmp(&stub_call); 1871 __ bind(&result_ok); 1872 __ SmiTag(eax); 1873 break; 1874 } 1875 case Token::SHR: { 1876 Label result_ok; 1877 __ SmiUntag(eax); 1878 __ SmiUntag(ecx); 1879 __ shr_cl(eax); 1880 __ test(eax, Immediate(0xc0000000)); 1881 __ j(zero, &result_ok); 1882 __ SmiTag(ecx); 1883 __ jmp(&stub_call); 1884 __ bind(&result_ok); 1885 __ SmiTag(eax); 1886 break; 1887 } 1888 case Token::ADD: 1889 __ add(eax, ecx); 1890 __ j(overflow, &stub_call); 1891 break; 1892 case Token::SUB: 1893 __ sub(eax, ecx); 1894 __ j(overflow, &stub_call); 1895 break; 1896 case Token::MUL: { 1897 __ SmiUntag(eax); 1898 __ imul(eax, ecx); 1899 __ j(overflow, &stub_call); 1900 __ test(eax, eax); 1901 __ j(not_zero, &done, Label::kNear); 1902 __ mov(ebx, edx); 1903 __ or_(ebx, ecx); 1904 __ j(negative, &stub_call); 1905 break; 1906 } 1907 case Token::BIT_OR: 1908 __ or_(eax, ecx); 1909 break; 1910 case Token::BIT_AND: 1911 __ and_(eax, ecx); 1912 break; 1913 case Token::BIT_XOR: 1914 __ xor_(eax, ecx); 1915 break; 1916 default: 1917 UNREACHABLE(); 1918 } 1919 1920 __ bind(&done); 1921 context()->Plug(eax); 1922 } 1923 1924 1925 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) { 1926 for (int i = 0; i < lit->properties()->length(); i++) { 1927 ObjectLiteral::Property* property = lit->properties()->at(i); 1928 Expression* value = property->value(); 1929 1930 if (property->is_static()) { 1931 PushOperand(Operand(esp, kPointerSize)); // constructor 1932 } else { 1933 PushOperand(Operand(esp, 0)); // prototype 1934 } 1935 EmitPropertyKey(property, lit->GetIdForProperty(i)); 1936 1937 // The static prototype property is read only. We handle the non computed 1938 // property name case in the parser. Since this is the only case where we 1939 // need to check for an own read only property we special case this so we do 1940 // not need to do this for every property. 1941 if (property->is_static() && property->is_computed_name()) { 1942 __ CallRuntime(Runtime::kThrowIfStaticPrototype); 1943 __ push(eax); 1944 } 1945 1946 VisitForStackValue(value); 1947 if (NeedsHomeObject(value)) { 1948 EmitSetHomeObject(value, 2, property->GetSlot()); 1949 } 1950 1951 switch (property->kind()) { 1952 case ObjectLiteral::Property::CONSTANT: 1953 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1954 case ObjectLiteral::Property::PROTOTYPE: 1955 UNREACHABLE(); 1956 case ObjectLiteral::Property::COMPUTED: 1957 PushOperand(Smi::FromInt(DONT_ENUM)); 1958 PushOperand(Smi::FromInt(property->NeedsSetFunctionName())); 1959 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral); 1960 break; 1961 1962 case ObjectLiteral::Property::GETTER: 1963 PushOperand(Smi::FromInt(DONT_ENUM)); 1964 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked); 1965 break; 1966 1967 case ObjectLiteral::Property::SETTER: 1968 PushOperand(Smi::FromInt(DONT_ENUM)); 1969 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked); 1970 break; 1971 } 1972 } 1973 } 1974 1975 1976 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { 1977 PopOperand(edx); 1978 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code(); 1979 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 1980 CallIC(code, expr->BinaryOperationFeedbackId()); 1981 patch_site.EmitPatchInfo(); 1982 context()->Plug(eax); 1983 } 1984 1985 1986 void FullCodeGenerator::EmitAssignment(Expression* expr, 1987 FeedbackVectorSlot slot) { 1988 DCHECK(expr->IsValidReferenceExpressionOrThis()); 1989 1990 Property* prop = expr->AsProperty(); 1991 LhsKind assign_type = Property::GetAssignType(prop); 1992 1993 switch (assign_type) { 1994 case VARIABLE: { 1995 Variable* var = expr->AsVariableProxy()->var(); 1996 EffectContext context(this); 1997 EmitVariableAssignment(var, Token::ASSIGN, slot); 1998 break; 1999 } 2000 case NAMED_PROPERTY: { 2001 PushOperand(eax); // Preserve value. 2002 VisitForAccumulatorValue(prop->obj()); 2003 __ Move(StoreDescriptor::ReceiverRegister(), eax); 2004 PopOperand(StoreDescriptor::ValueRegister()); // Restore value. 2005 __ mov(StoreDescriptor::NameRegister(), 2006 prop->key()->AsLiteral()->value()); 2007 EmitLoadStoreICSlot(slot); 2008 CallStoreIC(); 2009 break; 2010 } 2011 case NAMED_SUPER_PROPERTY: { 2012 PushOperand(eax); 2013 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); 2014 VisitForAccumulatorValue( 2015 prop->obj()->AsSuperPropertyReference()->home_object()); 2016 // stack: value, this; eax: home_object 2017 Register scratch = ecx; 2018 Register scratch2 = edx; 2019 __ mov(scratch, result_register()); // home_object 2020 __ mov(eax, MemOperand(esp, kPointerSize)); // value 2021 __ mov(scratch2, MemOperand(esp, 0)); // this 2022 __ mov(MemOperand(esp, kPointerSize), scratch2); // this 2023 __ mov(MemOperand(esp, 0), scratch); // home_object 2024 // stack: this, home_object. eax: value 2025 EmitNamedSuperPropertyStore(prop); 2026 break; 2027 } 2028 case KEYED_SUPER_PROPERTY: { 2029 PushOperand(eax); 2030 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); 2031 VisitForStackValue( 2032 prop->obj()->AsSuperPropertyReference()->home_object()); 2033 VisitForAccumulatorValue(prop->key()); 2034 Register scratch = ecx; 2035 Register scratch2 = edx; 2036 __ mov(scratch2, MemOperand(esp, 2 * kPointerSize)); // value 2037 // stack: value, this, home_object; eax: key, edx: value 2038 __ mov(scratch, MemOperand(esp, kPointerSize)); // this 2039 __ mov(MemOperand(esp, 2 * kPointerSize), scratch); 2040 __ mov(scratch, MemOperand(esp, 0)); // home_object 2041 __ mov(MemOperand(esp, kPointerSize), scratch); 2042 __ mov(MemOperand(esp, 0), eax); 2043 __ mov(eax, scratch2); 2044 // stack: this, home_object, key; eax: value. 2045 EmitKeyedSuperPropertyStore(prop); 2046 break; 2047 } 2048 case KEYED_PROPERTY: { 2049 PushOperand(eax); // Preserve value. 2050 VisitForStackValue(prop->obj()); 2051 VisitForAccumulatorValue(prop->key()); 2052 __ Move(StoreDescriptor::NameRegister(), eax); 2053 PopOperand(StoreDescriptor::ReceiverRegister()); // Receiver. 2054 PopOperand(StoreDescriptor::ValueRegister()); // Restore value. 2055 EmitLoadStoreICSlot(slot); 2056 Handle<Code> ic = 2057 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 2058 CallIC(ic); 2059 break; 2060 } 2061 } 2062 context()->Plug(eax); 2063 } 2064 2065 2066 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2067 Variable* var, MemOperand location) { 2068 __ mov(location, eax); 2069 if (var->IsContextSlot()) { 2070 __ mov(edx, eax); 2071 int offset = Context::SlotOffset(var->index()); 2072 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs); 2073 } 2074 } 2075 2076 2077 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, 2078 FeedbackVectorSlot slot) { 2079 if (var->IsUnallocated()) { 2080 // Global var, const, or let. 2081 __ mov(StoreDescriptor::NameRegister(), var->name()); 2082 __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand()); 2083 __ mov(StoreDescriptor::ReceiverRegister(), 2084 ContextOperand(StoreDescriptor::ReceiverRegister(), 2085 Context::EXTENSION_INDEX)); 2086 EmitLoadStoreICSlot(slot); 2087 CallStoreIC(); 2088 2089 } else if (var->mode() == LET && op != Token::INIT) { 2090 // Non-initializing assignment to let variable needs a write barrier. 2091 DCHECK(!var->IsLookupSlot()); 2092 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2093 Label assign; 2094 MemOperand location = VarOperand(var, ecx); 2095 __ mov(edx, location); 2096 __ cmp(edx, isolate()->factory()->the_hole_value()); 2097 __ j(not_equal, &assign, Label::kNear); 2098 __ push(Immediate(var->name())); 2099 __ CallRuntime(Runtime::kThrowReferenceError); 2100 __ bind(&assign); 2101 EmitStoreToStackLocalOrContextSlot(var, location); 2102 2103 } else if (var->mode() == CONST && op != Token::INIT) { 2104 // Assignment to const variable needs a write barrier. 2105 DCHECK(!var->IsLookupSlot()); 2106 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2107 Label const_error; 2108 MemOperand location = VarOperand(var, ecx); 2109 __ mov(edx, location); 2110 __ cmp(edx, isolate()->factory()->the_hole_value()); 2111 __ j(not_equal, &const_error, Label::kNear); 2112 __ push(Immediate(var->name())); 2113 __ CallRuntime(Runtime::kThrowReferenceError); 2114 __ bind(&const_error); 2115 __ CallRuntime(Runtime::kThrowConstAssignError); 2116 2117 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { 2118 // Initializing assignment to const {this} needs a write barrier. 2119 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2120 Label uninitialized_this; 2121 MemOperand location = VarOperand(var, ecx); 2122 __ mov(edx, location); 2123 __ cmp(edx, isolate()->factory()->the_hole_value()); 2124 __ j(equal, &uninitialized_this); 2125 __ push(Immediate(var->name())); 2126 __ CallRuntime(Runtime::kThrowReferenceError); 2127 __ bind(&uninitialized_this); 2128 EmitStoreToStackLocalOrContextSlot(var, location); 2129 2130 } else if (!var->is_const_mode() || op == Token::INIT) { 2131 if (var->IsLookupSlot()) { 2132 // Assignment to var. 2133 __ Push(Immediate(var->name())); 2134 __ Push(eax); 2135 __ CallRuntime(is_strict(language_mode()) 2136 ? Runtime::kStoreLookupSlot_Strict 2137 : Runtime::kStoreLookupSlot_Sloppy); 2138 } else { 2139 // Assignment to var or initializing assignment to let/const in harmony 2140 // mode. 2141 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2142 MemOperand location = VarOperand(var, ecx); 2143 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) { 2144 // Check for an uninitialized let binding. 2145 __ mov(edx, location); 2146 __ cmp(edx, isolate()->factory()->the_hole_value()); 2147 __ Check(equal, kLetBindingReInitialization); 2148 } 2149 EmitStoreToStackLocalOrContextSlot(var, location); 2150 } 2151 2152 } else { 2153 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT); 2154 if (is_strict(language_mode())) { 2155 __ CallRuntime(Runtime::kThrowConstAssignError); 2156 } 2157 // Silently ignore store in sloppy mode. 2158 } 2159 } 2160 2161 2162 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2163 // Assignment to a property, using a named store IC. 2164 // eax : value 2165 // esp[0] : receiver 2166 Property* prop = expr->target()->AsProperty(); 2167 DCHECK(prop != NULL); 2168 DCHECK(prop->key()->IsLiteral()); 2169 2170 __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value()); 2171 PopOperand(StoreDescriptor::ReceiverRegister()); 2172 EmitLoadStoreICSlot(expr->AssignmentSlot()); 2173 CallStoreIC(); 2174 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 2175 context()->Plug(eax); 2176 } 2177 2178 2179 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { 2180 // Assignment to named property of super. 2181 // eax : value 2182 // stack : receiver ('this'), home_object 2183 DCHECK(prop != NULL); 2184 Literal* key = prop->key()->AsLiteral(); 2185 DCHECK(key != NULL); 2186 2187 PushOperand(key->value()); 2188 PushOperand(eax); 2189 CallRuntimeWithOperands(is_strict(language_mode()) 2190 ? Runtime::kStoreToSuper_Strict 2191 : Runtime::kStoreToSuper_Sloppy); 2192 } 2193 2194 2195 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { 2196 // Assignment to named property of super. 2197 // eax : value 2198 // stack : receiver ('this'), home_object, key 2199 2200 PushOperand(eax); 2201 CallRuntimeWithOperands(is_strict(language_mode()) 2202 ? Runtime::kStoreKeyedToSuper_Strict 2203 : Runtime::kStoreKeyedToSuper_Sloppy); 2204 } 2205 2206 2207 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2208 // Assignment to a property, using a keyed store IC. 2209 // eax : value 2210 // esp[0] : key 2211 // esp[kPointerSize] : receiver 2212 2213 PopOperand(StoreDescriptor::NameRegister()); // Key. 2214 PopOperand(StoreDescriptor::ReceiverRegister()); 2215 DCHECK(StoreDescriptor::ValueRegister().is(eax)); 2216 Handle<Code> ic = 2217 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 2218 EmitLoadStoreICSlot(expr->AssignmentSlot()); 2219 CallIC(ic); 2220 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 2221 context()->Plug(eax); 2222 } 2223 2224 2225 void FullCodeGenerator::CallIC(Handle<Code> code, 2226 TypeFeedbackId ast_id) { 2227 ic_total_count_++; 2228 __ call(code, RelocInfo::CODE_TARGET, ast_id); 2229 } 2230 2231 2232 // Code common for calls using the IC. 2233 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2234 Expression* callee = expr->expression(); 2235 2236 // Get the target function. 2237 ConvertReceiverMode convert_mode; 2238 if (callee->IsVariableProxy()) { 2239 { StackValueContext context(this); 2240 EmitVariableLoad(callee->AsVariableProxy()); 2241 PrepareForBailout(callee, BailoutState::NO_REGISTERS); 2242 } 2243 // Push undefined as receiver. This is patched in the method prologue if it 2244 // is a sloppy mode method. 2245 PushOperand(isolate()->factory()->undefined_value()); 2246 convert_mode = ConvertReceiverMode::kNullOrUndefined; 2247 } else { 2248 // Load the function from the receiver. 2249 DCHECK(callee->IsProperty()); 2250 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2251 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 2252 EmitNamedPropertyLoad(callee->AsProperty()); 2253 PrepareForBailoutForId(callee->AsProperty()->LoadId(), 2254 BailoutState::TOS_REGISTER); 2255 // Push the target function under the receiver. 2256 PushOperand(Operand(esp, 0)); 2257 __ mov(Operand(esp, kPointerSize), eax); 2258 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; 2259 } 2260 2261 EmitCall(expr, convert_mode); 2262 } 2263 2264 2265 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { 2266 SetExpressionPosition(expr); 2267 Expression* callee = expr->expression(); 2268 DCHECK(callee->IsProperty()); 2269 Property* prop = callee->AsProperty(); 2270 DCHECK(prop->IsSuperAccess()); 2271 2272 Literal* key = prop->key()->AsLiteral(); 2273 DCHECK(!key->value()->IsSmi()); 2274 // Load the function from the receiver. 2275 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); 2276 VisitForStackValue(super_ref->home_object()); 2277 VisitForAccumulatorValue(super_ref->this_var()); 2278 PushOperand(eax); 2279 PushOperand(eax); 2280 PushOperand(Operand(esp, kPointerSize * 2)); 2281 PushOperand(key->value()); 2282 // Stack here: 2283 // - home_object 2284 // - this (receiver) 2285 // - this (receiver) <-- LoadFromSuper will pop here and below. 2286 // - home_object 2287 // - key 2288 CallRuntimeWithOperands(Runtime::kLoadFromSuper); 2289 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER); 2290 2291 // Replace home_object with target function. 2292 __ mov(Operand(esp, kPointerSize), eax); 2293 2294 // Stack here: 2295 // - target function 2296 // - this (receiver) 2297 EmitCall(expr); 2298 } 2299 2300 2301 // Code common for calls using the IC. 2302 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2303 Expression* key) { 2304 // Load the key. 2305 VisitForAccumulatorValue(key); 2306 2307 Expression* callee = expr->expression(); 2308 2309 // Load the function from the receiver. 2310 DCHECK(callee->IsProperty()); 2311 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 2312 __ mov(LoadDescriptor::NameRegister(), eax); 2313 EmitKeyedPropertyLoad(callee->AsProperty()); 2314 PrepareForBailoutForId(callee->AsProperty()->LoadId(), 2315 BailoutState::TOS_REGISTER); 2316 2317 // Push the target function under the receiver. 2318 PushOperand(Operand(esp, 0)); 2319 __ mov(Operand(esp, kPointerSize), eax); 2320 2321 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); 2322 } 2323 2324 2325 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { 2326 Expression* callee = expr->expression(); 2327 DCHECK(callee->IsProperty()); 2328 Property* prop = callee->AsProperty(); 2329 DCHECK(prop->IsSuperAccess()); 2330 2331 SetExpressionPosition(prop); 2332 // Load the function from the receiver. 2333 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); 2334 VisitForStackValue(super_ref->home_object()); 2335 VisitForAccumulatorValue(super_ref->this_var()); 2336 PushOperand(eax); 2337 PushOperand(eax); 2338 PushOperand(Operand(esp, kPointerSize * 2)); 2339 VisitForStackValue(prop->key()); 2340 // Stack here: 2341 // - home_object 2342 // - this (receiver) 2343 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. 2344 // - home_object 2345 // - key 2346 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper); 2347 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER); 2348 2349 // Replace home_object with target function. 2350 __ mov(Operand(esp, kPointerSize), eax); 2351 2352 // Stack here: 2353 // - target function 2354 // - this (receiver) 2355 EmitCall(expr); 2356 } 2357 2358 2359 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { 2360 // Load the arguments. 2361 ZoneList<Expression*>* args = expr->arguments(); 2362 int arg_count = args->length(); 2363 for (int i = 0; i < arg_count; i++) { 2364 VisitForStackValue(args->at(i)); 2365 } 2366 2367 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 2368 SetCallPosition(expr, expr->tail_call_mode()); 2369 if (expr->tail_call_mode() == TailCallMode::kAllow) { 2370 if (FLAG_trace) { 2371 __ CallRuntime(Runtime::kTraceTailCall); 2372 } 2373 // Update profiling counters before the tail call since we will 2374 // not return to this function. 2375 EmitProfilingCounterHandlingForReturnSequence(true); 2376 } 2377 Handle<Code> ic = 2378 CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode()) 2379 .code(); 2380 __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot()))); 2381 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); 2382 // Don't assign a type feedback id to the IC, since type feedback is provided 2383 // by the vector above. 2384 CallIC(ic); 2385 OperandStackDepthDecrement(arg_count + 1); 2386 2387 RecordJSReturnSite(expr); 2388 RestoreContext(); 2389 context()->DropAndPlug(1, eax); 2390 } 2391 2392 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) { 2393 int arg_count = expr->arguments()->length(); 2394 // Push copy of the first argument or undefined if it doesn't exist. 2395 if (arg_count > 0) { 2396 __ push(Operand(esp, arg_count * kPointerSize)); 2397 } else { 2398 __ push(Immediate(isolate()->factory()->undefined_value())); 2399 } 2400 2401 // Push the enclosing function. 2402 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 2403 2404 // Push the language mode. 2405 __ push(Immediate(Smi::FromInt(language_mode()))); 2406 2407 // Push the start position of the scope the calls resides in. 2408 __ push(Immediate(Smi::FromInt(scope()->start_position()))); 2409 2410 // Push the source position of the eval call. 2411 __ push(Immediate(Smi::FromInt(expr->position()))); 2412 2413 // Do the runtime call. 2414 __ CallRuntime(Runtime::kResolvePossiblyDirectEval); 2415 } 2416 2417 2418 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. 2419 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { 2420 VariableProxy* callee = expr->expression()->AsVariableProxy(); 2421 if (callee->var()->IsLookupSlot()) { 2422 Label slow, done; 2423 SetExpressionPosition(callee); 2424 // Generate code for loading from variables potentially shadowed by 2425 // eval-introduced variables. 2426 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); 2427 2428 __ bind(&slow); 2429 // Call the runtime to find the function to call (returned in eax) and 2430 // the object holding it (returned in edx). 2431 __ Push(callee->name()); 2432 __ CallRuntime(Runtime::kLoadLookupSlotForCall); 2433 PushOperand(eax); // Function. 2434 PushOperand(edx); // Receiver. 2435 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS); 2436 2437 // If fast case code has been generated, emit code to push the function 2438 // and receiver and have the slow path jump around this code. 2439 if (done.is_linked()) { 2440 Label call; 2441 __ jmp(&call, Label::kNear); 2442 __ bind(&done); 2443 // Push function. 2444 __ push(eax); 2445 // The receiver is implicitly the global receiver. Indicate this by 2446 // passing the hole to the call function stub. 2447 __ push(Immediate(isolate()->factory()->undefined_value())); 2448 __ bind(&call); 2449 } 2450 } else { 2451 VisitForStackValue(callee); 2452 // refEnv.WithBaseObject() 2453 PushOperand(isolate()->factory()->undefined_value()); 2454 } 2455 } 2456 2457 2458 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) { 2459 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval 2460 // to resolve the function we need to call. Then we call the resolved 2461 // function using the given arguments. 2462 ZoneList<Expression*>* args = expr->arguments(); 2463 int arg_count = args->length(); 2464 2465 PushCalleeAndWithBaseObject(expr); 2466 2467 // Push the arguments. 2468 for (int i = 0; i < arg_count; i++) { 2469 VisitForStackValue(args->at(i)); 2470 } 2471 2472 // Push a copy of the function (found below the arguments) and 2473 // resolve eval. 2474 __ push(Operand(esp, (arg_count + 1) * kPointerSize)); 2475 EmitResolvePossiblyDirectEval(expr); 2476 2477 // Touch up the stack with the resolved function. 2478 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax); 2479 2480 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS); 2481 2482 SetCallPosition(expr); 2483 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); 2484 __ Set(eax, arg_count); 2485 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny, 2486 expr->tail_call_mode()), 2487 RelocInfo::CODE_TARGET); 2488 OperandStackDepthDecrement(arg_count + 1); 2489 RecordJSReturnSite(expr); 2490 RestoreContext(); 2491 context()->DropAndPlug(1, eax); 2492 } 2493 2494 2495 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2496 Comment cmnt(masm_, "[ CallNew"); 2497 // According to ECMA-262, section 11.2.2, page 44, the function 2498 // expression in new calls must be evaluated before the 2499 // arguments. 2500 2501 // Push constructor on the stack. If it's not a function it's used as 2502 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 2503 // ignored. 2504 DCHECK(!expr->expression()->IsSuperPropertyReference()); 2505 VisitForStackValue(expr->expression()); 2506 2507 // Push the arguments ("left-to-right") on the stack. 2508 ZoneList<Expression*>* args = expr->arguments(); 2509 int arg_count = args->length(); 2510 for (int i = 0; i < arg_count; i++) { 2511 VisitForStackValue(args->at(i)); 2512 } 2513 2514 // Call the construct call builtin that handles allocation and 2515 // constructor invocation. 2516 SetConstructCallPosition(expr); 2517 2518 // Load function and argument count into edi and eax. 2519 __ Move(eax, Immediate(arg_count)); 2520 __ mov(edi, Operand(esp, arg_count * kPointerSize)); 2521 2522 // Record call targets in unoptimized code. 2523 __ EmitLoadTypeFeedbackVector(ebx); 2524 __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot()))); 2525 2526 CallConstructStub stub(isolate()); 2527 __ call(stub.GetCode(), RelocInfo::CODE_TARGET); 2528 OperandStackDepthDecrement(arg_count + 1); 2529 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER); 2530 RestoreContext(); 2531 context()->Plug(eax); 2532 } 2533 2534 2535 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { 2536 SuperCallReference* super_call_ref = 2537 expr->expression()->AsSuperCallReference(); 2538 DCHECK_NOT_NULL(super_call_ref); 2539 2540 // Push the super constructor target on the stack (may be null, 2541 // but the Construct builtin can deal with that properly). 2542 VisitForAccumulatorValue(super_call_ref->this_function_var()); 2543 __ AssertFunction(result_register()); 2544 __ mov(result_register(), 2545 FieldOperand(result_register(), HeapObject::kMapOffset)); 2546 PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset)); 2547 2548 // Push the arguments ("left-to-right") on the stack. 2549 ZoneList<Expression*>* args = expr->arguments(); 2550 int arg_count = args->length(); 2551 for (int i = 0; i < arg_count; i++) { 2552 VisitForStackValue(args->at(i)); 2553 } 2554 2555 // Call the construct call builtin that handles allocation and 2556 // constructor invocation. 2557 SetConstructCallPosition(expr); 2558 2559 // Load new target into edx. 2560 VisitForAccumulatorValue(super_call_ref->new_target_var()); 2561 __ mov(edx, result_register()); 2562 2563 // Load function and argument count into edi and eax. 2564 __ Move(eax, Immediate(arg_count)); 2565 __ mov(edi, Operand(esp, arg_count * kPointerSize)); 2566 2567 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2568 OperandStackDepthDecrement(arg_count + 1); 2569 2570 RecordJSReturnSite(expr); 2571 RestoreContext(); 2572 context()->Plug(eax); 2573 } 2574 2575 2576 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2577 ZoneList<Expression*>* args = expr->arguments(); 2578 DCHECK(args->length() == 1); 2579 2580 VisitForAccumulatorValue(args->at(0)); 2581 2582 Label materialize_true, materialize_false; 2583 Label* if_true = NULL; 2584 Label* if_false = NULL; 2585 Label* fall_through = NULL; 2586 context()->PrepareTest(&materialize_true, &materialize_false, 2587 &if_true, &if_false, &fall_through); 2588 2589 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2590 __ test(eax, Immediate(kSmiTagMask)); 2591 Split(zero, if_true, if_false, fall_through); 2592 2593 context()->Plug(if_true, if_false); 2594 } 2595 2596 2597 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { 2598 ZoneList<Expression*>* args = expr->arguments(); 2599 DCHECK(args->length() == 1); 2600 2601 VisitForAccumulatorValue(args->at(0)); 2602 2603 Label materialize_true, materialize_false; 2604 Label* if_true = NULL; 2605 Label* if_false = NULL; 2606 Label* fall_through = NULL; 2607 context()->PrepareTest(&materialize_true, &materialize_false, 2608 &if_true, &if_false, &fall_through); 2609 2610 __ JumpIfSmi(eax, if_false); 2611 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx); 2612 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2613 Split(above_equal, if_true, if_false, fall_through); 2614 2615 context()->Plug(if_true, if_false); 2616 } 2617 2618 2619 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 2620 ZoneList<Expression*>* args = expr->arguments(); 2621 DCHECK(args->length() == 1); 2622 2623 VisitForAccumulatorValue(args->at(0)); 2624 2625 Label materialize_true, materialize_false; 2626 Label* if_true = NULL; 2627 Label* if_false = NULL; 2628 Label* fall_through = NULL; 2629 context()->PrepareTest(&materialize_true, &materialize_false, 2630 &if_true, &if_false, &fall_through); 2631 2632 __ JumpIfSmi(eax, if_false); 2633 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx); 2634 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2635 Split(equal, if_true, if_false, fall_through); 2636 2637 context()->Plug(if_true, if_false); 2638 } 2639 2640 2641 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { 2642 ZoneList<Expression*>* args = expr->arguments(); 2643 DCHECK(args->length() == 1); 2644 2645 VisitForAccumulatorValue(args->at(0)); 2646 2647 Label materialize_true, materialize_false; 2648 Label* if_true = NULL; 2649 Label* if_false = NULL; 2650 Label* fall_through = NULL; 2651 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 2652 &if_false, &fall_through); 2653 2654 __ JumpIfSmi(eax, if_false); 2655 __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx); 2656 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2657 Split(equal, if_true, if_false, fall_through); 2658 2659 context()->Plug(if_true, if_false); 2660 } 2661 2662 2663 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 2664 ZoneList<Expression*>* args = expr->arguments(); 2665 DCHECK(args->length() == 1); 2666 2667 VisitForAccumulatorValue(args->at(0)); 2668 2669 Label materialize_true, materialize_false; 2670 Label* if_true = NULL; 2671 Label* if_false = NULL; 2672 Label* fall_through = NULL; 2673 context()->PrepareTest(&materialize_true, &materialize_false, 2674 &if_true, &if_false, &fall_through); 2675 2676 __ JumpIfSmi(eax, if_false); 2677 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx); 2678 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2679 Split(equal, if_true, if_false, fall_through); 2680 2681 context()->Plug(if_true, if_false); 2682 } 2683 2684 2685 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { 2686 ZoneList<Expression*>* args = expr->arguments(); 2687 DCHECK(args->length() == 1); 2688 2689 VisitForAccumulatorValue(args->at(0)); 2690 2691 Label materialize_true, materialize_false; 2692 Label* if_true = NULL; 2693 Label* if_false = NULL; 2694 Label* fall_through = NULL; 2695 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 2696 &if_false, &fall_through); 2697 2698 __ JumpIfSmi(eax, if_false); 2699 __ CmpObjectType(eax, JS_PROXY_TYPE, ebx); 2700 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2701 Split(equal, if_true, if_false, fall_through); 2702 2703 context()->Plug(if_true, if_false); 2704 } 2705 2706 2707 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 2708 ZoneList<Expression*>* args = expr->arguments(); 2709 DCHECK(args->length() == 1); 2710 Label done, null, function, non_function_constructor; 2711 2712 VisitForAccumulatorValue(args->at(0)); 2713 2714 // If the object is not a JSReceiver, we return null. 2715 __ JumpIfSmi(eax, &null, Label::kNear); 2716 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2717 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax); 2718 __ j(below, &null, Label::kNear); 2719 2720 // Return 'Function' for JSFunction and JSBoundFunction objects. 2721 __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE); 2722 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE); 2723 __ j(above_equal, &function, Label::kNear); 2724 2725 // Check if the constructor in the map is a JS function. 2726 __ GetMapConstructor(eax, eax, ebx); 2727 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE); 2728 __ j(not_equal, &non_function_constructor, Label::kNear); 2729 2730 // eax now contains the constructor function. Grab the 2731 // instance class name from there. 2732 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset)); 2733 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset)); 2734 __ jmp(&done, Label::kNear); 2735 2736 // Non-JS objects have class null. 2737 __ bind(&null); 2738 __ mov(eax, isolate()->factory()->null_value()); 2739 __ jmp(&done, Label::kNear); 2740 2741 // Functions have class 'Function'. 2742 __ bind(&function); 2743 __ mov(eax, isolate()->factory()->Function_string()); 2744 __ jmp(&done, Label::kNear); 2745 2746 // Objects with a non-function constructor have class 'Object'. 2747 __ bind(&non_function_constructor); 2748 __ mov(eax, isolate()->factory()->Object_string()); 2749 2750 // All done. 2751 __ bind(&done); 2752 2753 context()->Plug(eax); 2754 } 2755 2756 2757 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 2758 ZoneList<Expression*>* args = expr->arguments(); 2759 DCHECK(args->length() == 1); 2760 2761 VisitForAccumulatorValue(args->at(0)); // Load the object. 2762 2763 Label done; 2764 // If the object is a smi return the object. 2765 __ JumpIfSmi(eax, &done, Label::kNear); 2766 // If the object is not a value type, return the object. 2767 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx); 2768 __ j(not_equal, &done, Label::kNear); 2769 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset)); 2770 2771 __ bind(&done); 2772 context()->Plug(eax); 2773 } 2774 2775 2776 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 2777 ZoneList<Expression*>* args = expr->arguments(); 2778 DCHECK(args->length() == 1); 2779 2780 VisitForAccumulatorValue(args->at(0)); 2781 2782 Label done; 2783 StringCharFromCodeGenerator generator(eax, ebx); 2784 generator.GenerateFast(masm_); 2785 __ jmp(&done); 2786 2787 NopRuntimeCallHelper call_helper; 2788 generator.GenerateSlow(masm_, call_helper); 2789 2790 __ bind(&done); 2791 context()->Plug(ebx); 2792 } 2793 2794 2795 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 2796 ZoneList<Expression*>* args = expr->arguments(); 2797 DCHECK(args->length() == 2); 2798 2799 VisitForStackValue(args->at(0)); 2800 VisitForAccumulatorValue(args->at(1)); 2801 2802 Register object = ebx; 2803 Register index = eax; 2804 Register result = edx; 2805 2806 PopOperand(object); 2807 2808 Label need_conversion; 2809 Label index_out_of_range; 2810 Label done; 2811 StringCharCodeAtGenerator generator(object, index, result, &need_conversion, 2812 &need_conversion, &index_out_of_range); 2813 generator.GenerateFast(masm_); 2814 __ jmp(&done); 2815 2816 __ bind(&index_out_of_range); 2817 // When the index is out of range, the spec requires us to return 2818 // NaN. 2819 __ Move(result, Immediate(isolate()->factory()->nan_value())); 2820 __ jmp(&done); 2821 2822 __ bind(&need_conversion); 2823 // Move the undefined value into the result register, which will 2824 // trigger conversion. 2825 __ Move(result, Immediate(isolate()->factory()->undefined_value())); 2826 __ jmp(&done); 2827 2828 NopRuntimeCallHelper call_helper; 2829 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); 2830 2831 __ bind(&done); 2832 context()->Plug(result); 2833 } 2834 2835 2836 void FullCodeGenerator::EmitCall(CallRuntime* expr) { 2837 ZoneList<Expression*>* args = expr->arguments(); 2838 DCHECK_LE(2, args->length()); 2839 // Push target, receiver and arguments onto the stack. 2840 for (Expression* const arg : *args) { 2841 VisitForStackValue(arg); 2842 } 2843 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS); 2844 // Move target to edi. 2845 int const argc = args->length() - 2; 2846 __ mov(edi, Operand(esp, (argc + 1) * kPointerSize)); 2847 // Call the target. 2848 __ mov(eax, Immediate(argc)); 2849 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2850 OperandStackDepthDecrement(argc + 1); 2851 RestoreContext(); 2852 // Discard the function left on TOS. 2853 context()->DropAndPlug(1, eax); 2854 } 2855 2856 2857 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 2858 ZoneList<Expression*>* args = expr->arguments(); 2859 DCHECK(args->length() == 1); 2860 2861 VisitForAccumulatorValue(args->at(0)); 2862 2863 __ AssertString(eax); 2864 2865 Label materialize_true, materialize_false; 2866 Label* if_true = NULL; 2867 Label* if_false = NULL; 2868 Label* fall_through = NULL; 2869 context()->PrepareTest(&materialize_true, &materialize_false, 2870 &if_true, &if_false, &fall_through); 2871 2872 __ test(FieldOperand(eax, String::kHashFieldOffset), 2873 Immediate(String::kContainsCachedArrayIndexMask)); 2874 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2875 Split(zero, if_true, if_false, fall_through); 2876 2877 context()->Plug(if_true, if_false); 2878 } 2879 2880 2881 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 2882 ZoneList<Expression*>* args = expr->arguments(); 2883 DCHECK(args->length() == 1); 2884 VisitForAccumulatorValue(args->at(0)); 2885 2886 __ AssertString(eax); 2887 2888 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset)); 2889 __ IndexFromHash(eax, eax); 2890 2891 context()->Plug(eax); 2892 } 2893 2894 2895 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { 2896 ZoneList<Expression*>* args = expr->arguments(); 2897 DCHECK_EQ(1, args->length()); 2898 VisitForAccumulatorValue(args->at(0)); 2899 __ AssertFunction(eax); 2900 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); 2901 __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset)); 2902 context()->Plug(eax); 2903 } 2904 2905 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 2906 DCHECK(expr->arguments()->length() == 0); 2907 ExternalReference debug_is_active = 2908 ExternalReference::debug_is_active_address(isolate()); 2909 __ movzx_b(eax, Operand::StaticVariable(debug_is_active)); 2910 __ SmiTag(eax); 2911 context()->Plug(eax); 2912 } 2913 2914 2915 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { 2916 ZoneList<Expression*>* args = expr->arguments(); 2917 DCHECK_EQ(2, args->length()); 2918 VisitForStackValue(args->at(0)); 2919 VisitForStackValue(args->at(1)); 2920 2921 Label runtime, done; 2922 2923 __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime, 2924 NO_ALLOCATION_FLAGS); 2925 __ mov(ebx, NativeContextOperand()); 2926 __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX)); 2927 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx); 2928 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), 2929 isolate()->factory()->empty_fixed_array()); 2930 __ mov(FieldOperand(eax, JSObject::kElementsOffset), 2931 isolate()->factory()->empty_fixed_array()); 2932 __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset)); 2933 __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset)); 2934 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 2935 __ jmp(&done, Label::kNear); 2936 2937 __ bind(&runtime); 2938 CallRuntimeWithOperands(Runtime::kCreateIterResultObject); 2939 2940 __ bind(&done); 2941 context()->Plug(eax); 2942 } 2943 2944 2945 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { 2946 // Push function. 2947 __ LoadGlobalFunction(expr->context_index(), eax); 2948 PushOperand(eax); 2949 2950 // Push undefined as receiver. 2951 PushOperand(isolate()->factory()->undefined_value()); 2952 } 2953 2954 2955 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { 2956 ZoneList<Expression*>* args = expr->arguments(); 2957 int arg_count = args->length(); 2958 2959 SetCallPosition(expr); 2960 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize)); 2961 __ Set(eax, arg_count); 2962 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), 2963 RelocInfo::CODE_TARGET); 2964 OperandStackDepthDecrement(arg_count + 1); 2965 RestoreContext(); 2966 } 2967 2968 2969 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 2970 switch (expr->op()) { 2971 case Token::DELETE: { 2972 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 2973 Property* property = expr->expression()->AsProperty(); 2974 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 2975 2976 if (property != NULL) { 2977 VisitForStackValue(property->obj()); 2978 VisitForStackValue(property->key()); 2979 CallRuntimeWithOperands(is_strict(language_mode()) 2980 ? Runtime::kDeleteProperty_Strict 2981 : Runtime::kDeleteProperty_Sloppy); 2982 context()->Plug(eax); 2983 } else if (proxy != NULL) { 2984 Variable* var = proxy->var(); 2985 // Delete of an unqualified identifier is disallowed in strict mode but 2986 // "delete this" is allowed. 2987 bool is_this = var->HasThisName(isolate()); 2988 DCHECK(is_sloppy(language_mode()) || is_this); 2989 if (var->IsUnallocatedOrGlobalSlot()) { 2990 __ mov(eax, NativeContextOperand()); 2991 __ push(ContextOperand(eax, Context::EXTENSION_INDEX)); 2992 __ push(Immediate(var->name())); 2993 __ CallRuntime(Runtime::kDeleteProperty_Sloppy); 2994 context()->Plug(eax); 2995 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 2996 // Result of deleting non-global variables is false. 'this' is 2997 // not really a variable, though we implement it as one. The 2998 // subexpression does not have side effects. 2999 context()->Plug(is_this); 3000 } else { 3001 // Non-global variable. Call the runtime to try to delete from the 3002 // context where the variable was introduced. 3003 __ Push(var->name()); 3004 __ CallRuntime(Runtime::kDeleteLookupSlot); 3005 context()->Plug(eax); 3006 } 3007 } else { 3008 // Result of deleting non-property, non-variable reference is true. 3009 // The subexpression may have side effects. 3010 VisitForEffect(expr->expression()); 3011 context()->Plug(true); 3012 } 3013 break; 3014 } 3015 3016 case Token::VOID: { 3017 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 3018 VisitForEffect(expr->expression()); 3019 context()->Plug(isolate()->factory()->undefined_value()); 3020 break; 3021 } 3022 3023 case Token::NOT: { 3024 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 3025 if (context()->IsEffect()) { 3026 // Unary NOT has no side effects so it's only necessary to visit the 3027 // subexpression. Match the optimizing compiler by not branching. 3028 VisitForEffect(expr->expression()); 3029 } else if (context()->IsTest()) { 3030 const TestContext* test = TestContext::cast(context()); 3031 // The labels are swapped for the recursive call. 3032 VisitForControl(expr->expression(), 3033 test->false_label(), 3034 test->true_label(), 3035 test->fall_through()); 3036 context()->Plug(test->true_label(), test->false_label()); 3037 } else { 3038 // We handle value contexts explicitly rather than simply visiting 3039 // for control and plugging the control flow into the context, 3040 // because we need to prepare a pair of extra administrative AST ids 3041 // for the optimizing compiler. 3042 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 3043 Label materialize_true, materialize_false, done; 3044 VisitForControl(expr->expression(), 3045 &materialize_false, 3046 &materialize_true, 3047 &materialize_true); 3048 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1); 3049 __ bind(&materialize_true); 3050 PrepareForBailoutForId(expr->MaterializeTrueId(), 3051 BailoutState::NO_REGISTERS); 3052 if (context()->IsAccumulatorValue()) { 3053 __ mov(eax, isolate()->factory()->true_value()); 3054 } else { 3055 __ Push(isolate()->factory()->true_value()); 3056 } 3057 __ jmp(&done, Label::kNear); 3058 __ bind(&materialize_false); 3059 PrepareForBailoutForId(expr->MaterializeFalseId(), 3060 BailoutState::NO_REGISTERS); 3061 if (context()->IsAccumulatorValue()) { 3062 __ mov(eax, isolate()->factory()->false_value()); 3063 } else { 3064 __ Push(isolate()->factory()->false_value()); 3065 } 3066 __ bind(&done); 3067 } 3068 break; 3069 } 3070 3071 case Token::TYPEOF: { 3072 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 3073 { 3074 AccumulatorValueContext context(this); 3075 VisitForTypeofValue(expr->expression()); 3076 } 3077 __ mov(ebx, eax); 3078 TypeofStub typeof_stub(isolate()); 3079 __ CallStub(&typeof_stub); 3080 context()->Plug(eax); 3081 break; 3082 } 3083 3084 default: 3085 UNREACHABLE(); 3086 } 3087 } 3088 3089 3090 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 3091 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); 3092 3093 Comment cmnt(masm_, "[ CountOperation"); 3094 3095 Property* prop = expr->expression()->AsProperty(); 3096 LhsKind assign_type = Property::GetAssignType(prop); 3097 3098 // Evaluate expression and get value. 3099 if (assign_type == VARIABLE) { 3100 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 3101 AccumulatorValueContext context(this); 3102 EmitVariableLoad(expr->expression()->AsVariableProxy()); 3103 } else { 3104 // Reserve space for result of postfix operation. 3105 if (expr->is_postfix() && !context()->IsEffect()) { 3106 PushOperand(Smi::FromInt(0)); 3107 } 3108 switch (assign_type) { 3109 case NAMED_PROPERTY: { 3110 // Put the object both on the stack and in the register. 3111 VisitForStackValue(prop->obj()); 3112 __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0)); 3113 EmitNamedPropertyLoad(prop); 3114 break; 3115 } 3116 3117 case NAMED_SUPER_PROPERTY: { 3118 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); 3119 VisitForAccumulatorValue( 3120 prop->obj()->AsSuperPropertyReference()->home_object()); 3121 PushOperand(result_register()); 3122 PushOperand(MemOperand(esp, kPointerSize)); 3123 PushOperand(result_register()); 3124 EmitNamedSuperPropertyLoad(prop); 3125 break; 3126 } 3127 3128 case KEYED_SUPER_PROPERTY: { 3129 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); 3130 VisitForStackValue( 3131 prop->obj()->AsSuperPropertyReference()->home_object()); 3132 VisitForAccumulatorValue(prop->key()); 3133 PushOperand(result_register()); 3134 PushOperand(MemOperand(esp, 2 * kPointerSize)); 3135 PushOperand(MemOperand(esp, 2 * kPointerSize)); 3136 PushOperand(result_register()); 3137 EmitKeyedSuperPropertyLoad(prop); 3138 break; 3139 } 3140 3141 case KEYED_PROPERTY: { 3142 VisitForStackValue(prop->obj()); 3143 VisitForStackValue(prop->key()); 3144 __ mov(LoadDescriptor::ReceiverRegister(), 3145 Operand(esp, kPointerSize)); // Object. 3146 __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0)); // Key. 3147 EmitKeyedPropertyLoad(prop); 3148 break; 3149 } 3150 3151 case VARIABLE: 3152 UNREACHABLE(); 3153 } 3154 } 3155 3156 // We need a second deoptimization point after loading the value 3157 // in case evaluating the property load my have a side effect. 3158 if (assign_type == VARIABLE) { 3159 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER); 3160 } else { 3161 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER); 3162 } 3163 3164 // Inline smi case if we are in a loop. 3165 Label done, stub_call; 3166 JumpPatchSite patch_site(masm_); 3167 if (ShouldInlineSmiCase(expr->op())) { 3168 Label slow; 3169 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear); 3170 3171 // Save result for postfix expressions. 3172 if (expr->is_postfix()) { 3173 if (!context()->IsEffect()) { 3174 // Save the result on the stack. If we have a named or keyed property 3175 // we store the result under the receiver that is currently on top 3176 // of the stack. 3177 switch (assign_type) { 3178 case VARIABLE: 3179 __ push(eax); 3180 break; 3181 case NAMED_PROPERTY: 3182 __ mov(Operand(esp, kPointerSize), eax); 3183 break; 3184 case NAMED_SUPER_PROPERTY: 3185 __ mov(Operand(esp, 2 * kPointerSize), eax); 3186 break; 3187 case KEYED_PROPERTY: 3188 __ mov(Operand(esp, 2 * kPointerSize), eax); 3189 break; 3190 case KEYED_SUPER_PROPERTY: 3191 __ mov(Operand(esp, 3 * kPointerSize), eax); 3192 break; 3193 } 3194 } 3195 } 3196 3197 if (expr->op() == Token::INC) { 3198 __ add(eax, Immediate(Smi::FromInt(1))); 3199 } else { 3200 __ sub(eax, Immediate(Smi::FromInt(1))); 3201 } 3202 __ j(no_overflow, &done, Label::kNear); 3203 // Call stub. Undo operation first. 3204 if (expr->op() == Token::INC) { 3205 __ sub(eax, Immediate(Smi::FromInt(1))); 3206 } else { 3207 __ add(eax, Immediate(Smi::FromInt(1))); 3208 } 3209 __ jmp(&stub_call, Label::kNear); 3210 __ bind(&slow); 3211 } 3212 3213 // Convert old value into a number. 3214 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 3215 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER); 3216 3217 // Save result for postfix expressions. 3218 if (expr->is_postfix()) { 3219 if (!context()->IsEffect()) { 3220 // Save the result on the stack. If we have a named or keyed property 3221 // we store the result under the receiver that is currently on top 3222 // of the stack. 3223 switch (assign_type) { 3224 case VARIABLE: 3225 PushOperand(eax); 3226 break; 3227 case NAMED_PROPERTY: 3228 __ mov(Operand(esp, kPointerSize), eax); 3229 break; 3230 case NAMED_SUPER_PROPERTY: 3231 __ mov(Operand(esp, 2 * kPointerSize), eax); 3232 break; 3233 case KEYED_PROPERTY: 3234 __ mov(Operand(esp, 2 * kPointerSize), eax); 3235 break; 3236 case KEYED_SUPER_PROPERTY: 3237 __ mov(Operand(esp, 3 * kPointerSize), eax); 3238 break; 3239 } 3240 } 3241 } 3242 3243 SetExpressionPosition(expr); 3244 3245 // Call stub for +1/-1. 3246 __ bind(&stub_call); 3247 __ mov(edx, eax); 3248 __ mov(eax, Immediate(Smi::FromInt(1))); 3249 Handle<Code> code = 3250 CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code(); 3251 CallIC(code, expr->CountBinOpFeedbackId()); 3252 patch_site.EmitPatchInfo(); 3253 __ bind(&done); 3254 3255 // Store the value returned in eax. 3256 switch (assign_type) { 3257 case VARIABLE: 3258 if (expr->is_postfix()) { 3259 // Perform the assignment as if via '='. 3260 { EffectContext context(this); 3261 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3262 Token::ASSIGN, expr->CountSlot()); 3263 PrepareForBailoutForId(expr->AssignmentId(), 3264 BailoutState::TOS_REGISTER); 3265 context.Plug(eax); 3266 } 3267 // For all contexts except EffectContext We have the result on 3268 // top of the stack. 3269 if (!context()->IsEffect()) { 3270 context()->PlugTOS(); 3271 } 3272 } else { 3273 // Perform the assignment as if via '='. 3274 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3275 Token::ASSIGN, expr->CountSlot()); 3276 PrepareForBailoutForId(expr->AssignmentId(), 3277 BailoutState::TOS_REGISTER); 3278 context()->Plug(eax); 3279 } 3280 break; 3281 case NAMED_PROPERTY: { 3282 __ mov(StoreDescriptor::NameRegister(), 3283 prop->key()->AsLiteral()->value()); 3284 PopOperand(StoreDescriptor::ReceiverRegister()); 3285 EmitLoadStoreICSlot(expr->CountSlot()); 3286 CallStoreIC(); 3287 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 3288 if (expr->is_postfix()) { 3289 if (!context()->IsEffect()) { 3290 context()->PlugTOS(); 3291 } 3292 } else { 3293 context()->Plug(eax); 3294 } 3295 break; 3296 } 3297 case NAMED_SUPER_PROPERTY: { 3298 EmitNamedSuperPropertyStore(prop); 3299 if (expr->is_postfix()) { 3300 if (!context()->IsEffect()) { 3301 context()->PlugTOS(); 3302 } 3303 } else { 3304 context()->Plug(eax); 3305 } 3306 break; 3307 } 3308 case KEYED_SUPER_PROPERTY: { 3309 EmitKeyedSuperPropertyStore(prop); 3310 if (expr->is_postfix()) { 3311 if (!context()->IsEffect()) { 3312 context()->PlugTOS(); 3313 } 3314 } else { 3315 context()->Plug(eax); 3316 } 3317 break; 3318 } 3319 case KEYED_PROPERTY: { 3320 PopOperand(StoreDescriptor::NameRegister()); 3321 PopOperand(StoreDescriptor::ReceiverRegister()); 3322 Handle<Code> ic = 3323 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 3324 EmitLoadStoreICSlot(expr->CountSlot()); 3325 CallIC(ic); 3326 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER); 3327 if (expr->is_postfix()) { 3328 // Result is on the stack 3329 if (!context()->IsEffect()) { 3330 context()->PlugTOS(); 3331 } 3332 } else { 3333 context()->Plug(eax); 3334 } 3335 break; 3336 } 3337 } 3338 } 3339 3340 3341 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 3342 Expression* sub_expr, 3343 Handle<String> check) { 3344 Label materialize_true, materialize_false; 3345 Label* if_true = NULL; 3346 Label* if_false = NULL; 3347 Label* fall_through = NULL; 3348 context()->PrepareTest(&materialize_true, &materialize_false, 3349 &if_true, &if_false, &fall_through); 3350 3351 { AccumulatorValueContext context(this); 3352 VisitForTypeofValue(sub_expr); 3353 } 3354 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3355 3356 Factory* factory = isolate()->factory(); 3357 if (String::Equals(check, factory->number_string())) { 3358 __ JumpIfSmi(eax, if_true); 3359 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 3360 isolate()->factory()->heap_number_map()); 3361 Split(equal, if_true, if_false, fall_through); 3362 } else if (String::Equals(check, factory->string_string())) { 3363 __ JumpIfSmi(eax, if_false); 3364 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx); 3365 Split(below, if_true, if_false, fall_through); 3366 } else if (String::Equals(check, factory->symbol_string())) { 3367 __ JumpIfSmi(eax, if_false); 3368 __ CmpObjectType(eax, SYMBOL_TYPE, edx); 3369 Split(equal, if_true, if_false, fall_through); 3370 } else if (String::Equals(check, factory->boolean_string())) { 3371 __ cmp(eax, isolate()->factory()->true_value()); 3372 __ j(equal, if_true); 3373 __ cmp(eax, isolate()->factory()->false_value()); 3374 Split(equal, if_true, if_false, fall_through); 3375 } else if (String::Equals(check, factory->undefined_string())) { 3376 __ cmp(eax, isolate()->factory()->null_value()); 3377 __ j(equal, if_false); 3378 __ JumpIfSmi(eax, if_false); 3379 // Check for undetectable objects => true. 3380 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 3381 __ test_b(FieldOperand(edx, Map::kBitFieldOffset), 3382 Immediate(1 << Map::kIsUndetectable)); 3383 Split(not_zero, if_true, if_false, fall_through); 3384 } else if (String::Equals(check, factory->function_string())) { 3385 __ JumpIfSmi(eax, if_false); 3386 // Check for callable and not undetectable objects => true. 3387 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); 3388 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset)); 3389 __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)); 3390 __ cmp(ecx, 1 << Map::kIsCallable); 3391 Split(equal, if_true, if_false, fall_through); 3392 } else if (String::Equals(check, factory->object_string())) { 3393 __ JumpIfSmi(eax, if_false); 3394 __ cmp(eax, isolate()->factory()->null_value()); 3395 __ j(equal, if_true); 3396 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 3397 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx); 3398 __ j(below, if_false); 3399 // Check for callable or undetectable objects => false. 3400 __ test_b(FieldOperand(edx, Map::kBitFieldOffset), 3401 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 3402 Split(zero, if_true, if_false, fall_through); 3403 // clang-format off 3404 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ 3405 } else if (String::Equals(check, factory->type##_string())) { \ 3406 __ JumpIfSmi(eax, if_false); \ 3407 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), \ 3408 isolate()->factory()->type##_map()); \ 3409 Split(equal, if_true, if_false, fall_through); 3410 SIMD128_TYPES(SIMD128_TYPE) 3411 #undef SIMD128_TYPE 3412 // clang-format on 3413 } else { 3414 if (if_false != fall_through) __ jmp(if_false); 3415 } 3416 context()->Plug(if_true, if_false); 3417 } 3418 3419 3420 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 3421 Comment cmnt(masm_, "[ CompareOperation"); 3422 3423 // First we try a fast inlined version of the compare when one of 3424 // the operands is a literal. 3425 if (TryLiteralCompare(expr)) return; 3426 3427 // Always perform the comparison for its control flow. Pack the result 3428 // into the expression's context after the comparison is performed. 3429 Label materialize_true, materialize_false; 3430 Label* if_true = NULL; 3431 Label* if_false = NULL; 3432 Label* fall_through = NULL; 3433 context()->PrepareTest(&materialize_true, &materialize_false, 3434 &if_true, &if_false, &fall_through); 3435 3436 Token::Value op = expr->op(); 3437 VisitForStackValue(expr->left()); 3438 switch (op) { 3439 case Token::IN: 3440 VisitForStackValue(expr->right()); 3441 SetExpressionPosition(expr); 3442 EmitHasProperty(); 3443 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 3444 __ cmp(eax, isolate()->factory()->true_value()); 3445 Split(equal, if_true, if_false, fall_through); 3446 break; 3447 3448 case Token::INSTANCEOF: { 3449 VisitForAccumulatorValue(expr->right()); 3450 SetExpressionPosition(expr); 3451 PopOperand(edx); 3452 InstanceOfStub stub(isolate()); 3453 __ CallStub(&stub); 3454 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 3455 __ cmp(eax, isolate()->factory()->true_value()); 3456 Split(equal, if_true, if_false, fall_through); 3457 break; 3458 } 3459 3460 default: { 3461 VisitForAccumulatorValue(expr->right()); 3462 SetExpressionPosition(expr); 3463 Condition cc = CompareIC::ComputeCondition(op); 3464 PopOperand(edx); 3465 3466 bool inline_smi_code = ShouldInlineSmiCase(op); 3467 JumpPatchSite patch_site(masm_); 3468 if (inline_smi_code) { 3469 Label slow_case; 3470 __ mov(ecx, edx); 3471 __ or_(ecx, eax); 3472 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear); 3473 __ cmp(edx, eax); 3474 Split(cc, if_true, if_false, NULL); 3475 __ bind(&slow_case); 3476 } 3477 3478 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code(); 3479 CallIC(ic, expr->CompareOperationFeedbackId()); 3480 patch_site.EmitPatchInfo(); 3481 3482 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3483 __ test(eax, eax); 3484 Split(cc, if_true, if_false, fall_through); 3485 } 3486 } 3487 3488 // Convert the result of the comparison into one expected for this 3489 // expression's context. 3490 context()->Plug(if_true, if_false); 3491 } 3492 3493 3494 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 3495 Expression* sub_expr, 3496 NilValue nil) { 3497 Label materialize_true, materialize_false; 3498 Label* if_true = NULL; 3499 Label* if_false = NULL; 3500 Label* fall_through = NULL; 3501 context()->PrepareTest(&materialize_true, &materialize_false, 3502 &if_true, &if_false, &fall_through); 3503 3504 VisitForAccumulatorValue(sub_expr); 3505 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3506 3507 Handle<Object> nil_value = nil == kNullValue 3508 ? isolate()->factory()->null_value() 3509 : isolate()->factory()->undefined_value(); 3510 if (expr->op() == Token::EQ_STRICT) { 3511 __ cmp(eax, nil_value); 3512 Split(equal, if_true, if_false, fall_through); 3513 } else { 3514 __ JumpIfSmi(eax, if_false); 3515 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset)); 3516 __ test_b(FieldOperand(eax, Map::kBitFieldOffset), 3517 Immediate(1 << Map::kIsUndetectable)); 3518 Split(not_zero, if_true, if_false, fall_through); 3519 } 3520 context()->Plug(if_true, if_false); 3521 } 3522 3523 3524 Register FullCodeGenerator::result_register() { 3525 return eax; 3526 } 3527 3528 3529 Register FullCodeGenerator::context_register() { 3530 return esi; 3531 } 3532 3533 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) { 3534 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 3535 __ mov(value, Operand(ebp, frame_offset)); 3536 } 3537 3538 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 3539 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 3540 __ mov(Operand(ebp, frame_offset), value); 3541 } 3542 3543 3544 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 3545 __ mov(dst, ContextOperand(esi, context_index)); 3546 } 3547 3548 3549 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 3550 Scope* closure_scope = scope()->ClosureScope(); 3551 if (closure_scope->is_script_scope() || 3552 closure_scope->is_module_scope()) { 3553 // Contexts nested in the native context have a canonical empty function 3554 // as their closure, not the anonymous closure containing the global 3555 // code. 3556 __ mov(eax, NativeContextOperand()); 3557 PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX)); 3558 } else if (closure_scope->is_eval_scope()) { 3559 // Contexts nested inside eval code have the same closure as the context 3560 // calling eval, not the anonymous closure containing the eval code. 3561 // Fetch it from the context. 3562 PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX)); 3563 } else { 3564 DCHECK(closure_scope->is_function_scope()); 3565 PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 3566 } 3567 } 3568 3569 3570 // ---------------------------------------------------------------------------- 3571 // Non-local control flow support. 3572 3573 void FullCodeGenerator::EnterFinallyBlock() { 3574 // Store pending message while executing finally block. 3575 ExternalReference pending_message_obj = 3576 ExternalReference::address_of_pending_message_obj(isolate()); 3577 __ mov(edx, Operand::StaticVariable(pending_message_obj)); 3578 PushOperand(edx); 3579 3580 ClearPendingMessage(); 3581 } 3582 3583 3584 void FullCodeGenerator::ExitFinallyBlock() { 3585 DCHECK(!result_register().is(edx)); 3586 // Restore pending message from stack. 3587 PopOperand(edx); 3588 ExternalReference pending_message_obj = 3589 ExternalReference::address_of_pending_message_obj(isolate()); 3590 __ mov(Operand::StaticVariable(pending_message_obj), edx); 3591 } 3592 3593 3594 void FullCodeGenerator::ClearPendingMessage() { 3595 DCHECK(!result_register().is(edx)); 3596 ExternalReference pending_message_obj = 3597 ExternalReference::address_of_pending_message_obj(isolate()); 3598 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); 3599 __ mov(Operand::StaticVariable(pending_message_obj), edx); 3600 } 3601 3602 3603 void FullCodeGenerator::DeferredCommands::EmitCommands() { 3604 DCHECK(!result_register().is(edx)); 3605 __ Pop(result_register()); // Restore the accumulator. 3606 __ Pop(edx); // Get the token. 3607 for (DeferredCommand cmd : commands_) { 3608 Label skip; 3609 __ cmp(edx, Immediate(Smi::FromInt(cmd.token))); 3610 __ j(not_equal, &skip); 3611 switch (cmd.command) { 3612 case kReturn: 3613 codegen_->EmitUnwindAndReturn(); 3614 break; 3615 case kThrow: 3616 __ Push(result_register()); 3617 __ CallRuntime(Runtime::kReThrow); 3618 break; 3619 case kContinue: 3620 codegen_->EmitContinue(cmd.target); 3621 break; 3622 case kBreak: 3623 codegen_->EmitBreak(cmd.target); 3624 break; 3625 } 3626 __ bind(&skip); 3627 } 3628 } 3629 3630 #undef __ 3631 3632 3633 static const byte kJnsInstruction = 0x79; 3634 static const byte kJnsOffset = 0x11; 3635 static const byte kNopByteOne = 0x66; 3636 static const byte kNopByteTwo = 0x90; 3637 #ifdef DEBUG 3638 static const byte kCallInstruction = 0xe8; 3639 #endif 3640 3641 3642 void BackEdgeTable::PatchAt(Code* unoptimized_code, 3643 Address pc, 3644 BackEdgeState target_state, 3645 Code* replacement_code) { 3646 Address call_target_address = pc - kIntSize; 3647 Address jns_instr_address = call_target_address - 3; 3648 Address jns_offset_address = call_target_address - 2; 3649 3650 switch (target_state) { 3651 case INTERRUPT: 3652 // sub <profiling_counter>, <delta> ;; Not changed 3653 // jns ok 3654 // call <interrupt stub> 3655 // ok: 3656 *jns_instr_address = kJnsInstruction; 3657 *jns_offset_address = kJnsOffset; 3658 break; 3659 case ON_STACK_REPLACEMENT: 3660 // sub <profiling_counter>, <delta> ;; Not changed 3661 // nop 3662 // nop 3663 // call <on-stack replacment> 3664 // ok: 3665 *jns_instr_address = kNopByteOne; 3666 *jns_offset_address = kNopByteTwo; 3667 break; 3668 } 3669 3670 Assembler::set_target_address_at(unoptimized_code->GetIsolate(), 3671 call_target_address, unoptimized_code, 3672 replacement_code->entry()); 3673 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 3674 unoptimized_code, call_target_address, replacement_code); 3675 } 3676 3677 3678 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 3679 Isolate* isolate, 3680 Code* unoptimized_code, 3681 Address pc) { 3682 Address call_target_address = pc - kIntSize; 3683 Address jns_instr_address = call_target_address - 3; 3684 DCHECK_EQ(kCallInstruction, *(call_target_address - 1)); 3685 3686 if (*jns_instr_address == kJnsInstruction) { 3687 DCHECK_EQ(kJnsOffset, *(call_target_address - 2)); 3688 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(), 3689 Assembler::target_address_at(call_target_address, 3690 unoptimized_code)); 3691 return INTERRUPT; 3692 } 3693 3694 DCHECK_EQ(kNopByteOne, *jns_instr_address); 3695 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2)); 3696 3697 DCHECK_EQ( 3698 isolate->builtins()->OnStackReplacement()->entry(), 3699 Assembler::target_address_at(call_target_address, unoptimized_code)); 3700 return ON_STACK_REPLACEMENT; 3701 } 3702 3703 3704 } // namespace internal 3705 } // namespace v8 3706 3707 #endif // V8_TARGET_ARCH_IA32 3708