1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_X64 6 7 #include "src/ast/scopes.h" 8 #include "src/code-factory.h" 9 #include "src/code-stubs.h" 10 #include "src/codegen.h" 11 #include "src/debug/debug.h" 12 #include "src/full-codegen/full-codegen.h" 13 #include "src/ic/ic.h" 14 #include "src/parsing/parser.h" 15 16 namespace v8 { 17 namespace internal { 18 19 #define __ ACCESS_MASM(masm_) 20 21 22 class JumpPatchSite BASE_EMBEDDED { 23 public: 24 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) { 25 #ifdef DEBUG 26 info_emitted_ = false; 27 #endif 28 } 29 30 ~JumpPatchSite() { 31 DCHECK(patch_site_.is_bound() == info_emitted_); 32 } 33 34 void EmitJumpIfNotSmi(Register reg, 35 Label* target, 36 Label::Distance near_jump = Label::kFar) { 37 __ testb(reg, Immediate(kSmiTagMask)); 38 EmitJump(not_carry, target, near_jump); // Always taken before patched. 39 } 40 41 void EmitJumpIfSmi(Register reg, 42 Label* target, 43 Label::Distance near_jump = Label::kFar) { 44 __ testb(reg, Immediate(kSmiTagMask)); 45 EmitJump(carry, target, near_jump); // Never taken before patched. 46 } 47 48 void EmitPatchInfo() { 49 if (patch_site_.is_bound()) { 50 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); 51 DCHECK(is_uint8(delta_to_patch_site)); 52 __ testl(rax, Immediate(delta_to_patch_site)); 53 #ifdef DEBUG 54 info_emitted_ = true; 55 #endif 56 } else { 57 __ nop(); // Signals no inlined code. 58 } 59 } 60 61 private: 62 // jc will be patched with jz, jnc will become jnz. 63 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) { 64 DCHECK(!patch_site_.is_bound() && !info_emitted_); 65 DCHECK(cc == carry || cc == not_carry); 66 __ bind(&patch_site_); 67 __ j(cc, target, near_jump); 68 } 69 70 MacroAssembler* masm_; 71 Label patch_site_; 72 #ifdef DEBUG 73 bool info_emitted_; 74 #endif 75 }; 76 77 78 // Generate code for a JS function. On entry to the function the receiver 79 // and arguments have been pushed on the stack left to right, with the 80 // return address on top of them. The actual argument count matches the 81 // formal parameter count expected by the function. 82 // 83 // The live registers are: 84 // o rdi: the JS function object being called (i.e. ourselves) 85 // o rdx: the new target value 86 // o rsi: our context 87 // o rbp: our caller's frame pointer 88 // o rsp: stack pointer (pointing to return address) 89 // 90 // The function builds a JS frame. Please see JavaScriptFrameConstants in 91 // frames-x64.h for its layout. 92 void FullCodeGenerator::Generate() { 93 CompilationInfo* info = info_; 94 profiling_counter_ = isolate()->factory()->NewCell( 95 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate())); 96 SetFunctionPosition(literal()); 97 Comment cmnt(masm_, "[ function compiled by full code generator"); 98 99 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 100 101 #ifdef DEBUG 102 if (strlen(FLAG_stop_at) > 0 && 103 info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) { 104 __ int3(); 105 } 106 #endif 107 108 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) { 109 StackArgumentsAccessor args(rsp, info->scope()->num_parameters()); 110 __ movp(rcx, args.GetReceiverOperand()); 111 __ AssertNotSmi(rcx); 112 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rcx); 113 __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver); 114 } 115 116 // Open a frame scope to indicate that there is a frame on the stack. The 117 // MANUAL indicates that the scope shouldn't actually generate code to set up 118 // the frame (that is done below). 119 FrameScope frame_scope(masm_, StackFrame::MANUAL); 120 121 info->set_prologue_offset(masm_->pc_offset()); 122 __ Prologue(info->GeneratePreagedPrologue()); 123 124 { Comment cmnt(masm_, "[ Allocate locals"); 125 int locals_count = info->scope()->num_stack_slots(); 126 // Generators allocate locals, if any, in context slots. 127 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0); 128 if (locals_count == 1) { 129 __ PushRoot(Heap::kUndefinedValueRootIndex); 130 } else if (locals_count > 1) { 131 if (locals_count >= 128) { 132 Label ok; 133 __ movp(rcx, rsp); 134 __ subp(rcx, Immediate(locals_count * kPointerSize)); 135 __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex); 136 __ j(above_equal, &ok, Label::kNear); 137 __ CallRuntime(Runtime::kThrowStackOverflow); 138 __ bind(&ok); 139 } 140 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 141 const int kMaxPushes = 32; 142 if (locals_count >= kMaxPushes) { 143 int loop_iterations = locals_count / kMaxPushes; 144 __ movp(rcx, Immediate(loop_iterations)); 145 Label loop_header; 146 __ bind(&loop_header); 147 // Do pushes. 148 for (int i = 0; i < kMaxPushes; i++) { 149 __ Push(rax); 150 } 151 // Continue loop if not done. 152 __ decp(rcx); 153 __ j(not_zero, &loop_header, Label::kNear); 154 } 155 int remaining = locals_count % kMaxPushes; 156 // Emit the remaining pushes. 157 for (int i = 0; i < remaining; i++) { 158 __ Push(rax); 159 } 160 } 161 } 162 163 bool function_in_register = true; 164 165 // Possibly allocate a local context. 166 if (info->scope()->num_heap_slots() > 0) { 167 Comment cmnt(masm_, "[ Allocate context"); 168 bool need_write_barrier = true; 169 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS; 170 // Argument to NewContext is the function, which is still in rdi. 171 if (info->scope()->is_script_scope()) { 172 __ Push(rdi); 173 __ Push(info->scope()->GetScopeInfo(info->isolate())); 174 __ CallRuntime(Runtime::kNewScriptContext); 175 PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG); 176 // The new target value is not used, clobbering is safe. 177 DCHECK_NULL(info->scope()->new_target_var()); 178 } else { 179 if (info->scope()->new_target_var() != nullptr) { 180 __ Push(rdx); // Preserve new target. 181 } 182 if (slots <= FastNewContextStub::kMaximumSlots) { 183 FastNewContextStub stub(isolate(), slots); 184 __ CallStub(&stub); 185 // Result of FastNewContextStub is always in new space. 186 need_write_barrier = false; 187 } else { 188 __ Push(rdi); 189 __ CallRuntime(Runtime::kNewFunctionContext); 190 } 191 if (info->scope()->new_target_var() != nullptr) { 192 __ Pop(rdx); // Restore new target. 193 } 194 } 195 function_in_register = false; 196 // Context is returned in rax. It replaces the context passed to us. 197 // It's saved in the stack and kept live in rsi. 198 __ movp(rsi, rax); 199 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax); 200 201 // Copy any necessary parameters into the context. 202 int num_parameters = info->scope()->num_parameters(); 203 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0; 204 for (int i = first_parameter; i < num_parameters; i++) { 205 Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i); 206 if (var->IsContextSlot()) { 207 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 208 (num_parameters - 1 - i) * kPointerSize; 209 // Load parameter from stack. 210 __ movp(rax, Operand(rbp, parameter_offset)); 211 // Store it in the context. 212 int context_offset = Context::SlotOffset(var->index()); 213 __ movp(Operand(rsi, context_offset), rax); 214 // Update the write barrier. This clobbers rax and rbx. 215 if (need_write_barrier) { 216 __ RecordWriteContextSlot( 217 rsi, context_offset, rax, rbx, kDontSaveFPRegs); 218 } else if (FLAG_debug_code) { 219 Label done; 220 __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear); 221 __ Abort(kExpectedNewSpaceObject); 222 __ bind(&done); 223 } 224 } 225 } 226 } 227 228 // Register holding this function and new target are both trashed in case we 229 // bailout here. But since that can happen only when new target is not used 230 // and we allocate a context, the value of |function_in_register| is correct. 231 PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS); 232 233 // Possibly set up a local binding to the this function which is used in 234 // derived constructors with super calls. 235 Variable* this_function_var = scope()->this_function_var(); 236 if (this_function_var != nullptr) { 237 Comment cmnt(masm_, "[ This function"); 238 if (!function_in_register) { 239 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 240 // The write barrier clobbers register again, keep it marked as such. 241 } 242 SetVar(this_function_var, rdi, rbx, rcx); 243 } 244 245 // Possibly set up a local binding to the new target value. 246 Variable* new_target_var = scope()->new_target_var(); 247 if (new_target_var != nullptr) { 248 Comment cmnt(masm_, "[ new.target"); 249 SetVar(new_target_var, rdx, rbx, rcx); 250 } 251 252 // Possibly allocate RestParameters 253 int rest_index; 254 Variable* rest_param = scope()->rest_parameter(&rest_index); 255 if (rest_param) { 256 Comment cmnt(masm_, "[ Allocate rest parameter array"); 257 258 int num_parameters = info->scope()->num_parameters(); 259 int offset = num_parameters * kPointerSize; 260 261 __ Move(RestParamAccessDescriptor::parameter_count(), 262 Smi::FromInt(num_parameters)); 263 __ leap(RestParamAccessDescriptor::parameter_pointer(), 264 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset)); 265 __ Move(RestParamAccessDescriptor::rest_parameter_index(), 266 Smi::FromInt(rest_index)); 267 function_in_register = false; 268 269 RestParamAccessStub stub(isolate()); 270 __ CallStub(&stub); 271 272 SetVar(rest_param, rax, rbx, rdx); 273 } 274 275 // Possibly allocate an arguments object. 276 Variable* arguments = scope()->arguments(); 277 if (arguments != NULL) { 278 // Arguments object must be allocated after the context object, in 279 // case the "arguments" or ".arguments" variables are in the context. 280 Comment cmnt(masm_, "[ Allocate arguments object"); 281 DCHECK(rdi.is(ArgumentsAccessNewDescriptor::function())); 282 if (!function_in_register) { 283 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 284 } 285 // The receiver is just before the parameters on the caller's stack. 286 int num_parameters = info->scope()->num_parameters(); 287 int offset = num_parameters * kPointerSize; 288 __ Move(ArgumentsAccessNewDescriptor::parameter_count(), 289 Smi::FromInt(num_parameters)); 290 __ leap(ArgumentsAccessNewDescriptor::parameter_pointer(), 291 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset)); 292 293 // Arguments to ArgumentsAccessStub: 294 // function, parameter pointer, parameter count. 295 // The stub will rewrite parameter pointer and parameter count if the 296 // previous stack frame was an arguments adapter frame. 297 bool is_unmapped = is_strict(language_mode()) || !has_simple_parameters(); 298 ArgumentsAccessStub::Type type = ArgumentsAccessStub::ComputeType( 299 is_unmapped, literal()->has_duplicate_parameters()); 300 ArgumentsAccessStub stub(isolate(), type); 301 __ CallStub(&stub); 302 303 SetVar(arguments, rax, rbx, rdx); 304 } 305 306 if (FLAG_trace) { 307 __ CallRuntime(Runtime::kTraceEnter); 308 } 309 310 // Visit the declarations and body unless there is an illegal 311 // redeclaration. 312 if (scope()->HasIllegalRedeclaration()) { 313 Comment cmnt(masm_, "[ Declarations"); 314 VisitForEffect(scope()->GetIllegalRedeclaration()); 315 316 } else { 317 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS); 318 { Comment cmnt(masm_, "[ Declarations"); 319 VisitDeclarations(scope()->declarations()); 320 } 321 322 // Assert that the declarations do not use ICs. Otherwise the debugger 323 // won't be able to redirect a PC at an IC to the correct IC in newly 324 // recompiled code. 325 DCHECK_EQ(0, ic_total_count_); 326 327 { Comment cmnt(masm_, "[ Stack check"); 328 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS); 329 Label ok; 330 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 331 __ j(above_equal, &ok, Label::kNear); 332 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); 333 __ bind(&ok); 334 } 335 336 { Comment cmnt(masm_, "[ Body"); 337 DCHECK(loop_depth() == 0); 338 VisitStatements(literal()->body()); 339 DCHECK(loop_depth() == 0); 340 } 341 } 342 343 // Always emit a 'return undefined' in case control fell off the end of 344 // the body. 345 { Comment cmnt(masm_, "[ return <undefined>;"); 346 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 347 EmitReturnSequence(); 348 } 349 } 350 351 352 void FullCodeGenerator::ClearAccumulator() { 353 __ Set(rax, 0); 354 } 355 356 357 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) { 358 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); 359 __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset), 360 Smi::FromInt(-delta)); 361 } 362 363 364 void FullCodeGenerator::EmitProfilingCounterReset() { 365 int reset_value = FLAG_interrupt_budget; 366 __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT); 367 __ Move(kScratchRegister, Smi::FromInt(reset_value)); 368 __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister); 369 } 370 371 372 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14; 373 374 375 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, 376 Label* back_edge_target) { 377 Comment cmnt(masm_, "[ Back edge bookkeeping"); 378 Label ok; 379 380 DCHECK(back_edge_target->is_bound()); 381 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target); 382 int weight = Min(kMaxBackEdgeWeight, 383 Max(1, distance / kCodeSizeMultiplier)); 384 EmitProfilingCounterDecrement(weight); 385 386 __ j(positive, &ok, Label::kNear); 387 { 388 PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset); 389 DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_); 390 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); 391 392 // Record a mapping of this PC offset to the OSR id. This is used to find 393 // the AST id from the unoptimized code in order to use it as a key into 394 // the deoptimization input data found in the optimized code. 395 RecordBackEdge(stmt->OsrEntryId()); 396 397 EmitProfilingCounterReset(); 398 } 399 __ bind(&ok); 400 401 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 402 // Record a mapping of the OSR id to this PC. This is used if the OSR 403 // entry becomes the target of a bailout. We don't expect it to be, but 404 // we want it to work if it is. 405 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS); 406 } 407 408 409 void FullCodeGenerator::EmitReturnSequence() { 410 Comment cmnt(masm_, "[ Return sequence"); 411 if (return_label_.is_bound()) { 412 __ jmp(&return_label_); 413 } else { 414 __ bind(&return_label_); 415 if (FLAG_trace) { 416 __ Push(rax); 417 __ CallRuntime(Runtime::kTraceExit); 418 } 419 // Pretend that the exit is a backwards jump to the entry. 420 int weight = 1; 421 if (info_->ShouldSelfOptimize()) { 422 weight = FLAG_interrupt_budget / FLAG_self_opt_count; 423 } else { 424 int distance = masm_->pc_offset(); 425 weight = Min(kMaxBackEdgeWeight, 426 Max(1, distance / kCodeSizeMultiplier)); 427 } 428 EmitProfilingCounterDecrement(weight); 429 Label ok; 430 __ j(positive, &ok, Label::kNear); 431 __ Push(rax); 432 __ call(isolate()->builtins()->InterruptCheck(), 433 RelocInfo::CODE_TARGET); 434 __ Pop(rax); 435 EmitProfilingCounterReset(); 436 __ bind(&ok); 437 438 SetReturnPosition(literal()); 439 __ leave(); 440 441 int arg_count = info_->scope()->num_parameters() + 1; 442 int arguments_bytes = arg_count * kPointerSize; 443 __ Ret(arguments_bytes, rcx); 444 } 445 } 446 447 448 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const { 449 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 450 MemOperand operand = codegen()->VarOperand(var, result_register()); 451 __ Push(operand); 452 } 453 454 455 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const { 456 } 457 458 459 void FullCodeGenerator::AccumulatorValueContext::Plug( 460 Heap::RootListIndex index) const { 461 __ LoadRoot(result_register(), index); 462 } 463 464 465 void FullCodeGenerator::StackValueContext::Plug( 466 Heap::RootListIndex index) const { 467 __ PushRoot(index); 468 } 469 470 471 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const { 472 codegen()->PrepareForBailoutBeforeSplit(condition(), 473 true, 474 true_label_, 475 false_label_); 476 if (index == Heap::kUndefinedValueRootIndex || 477 index == Heap::kNullValueRootIndex || 478 index == Heap::kFalseValueRootIndex) { 479 if (false_label_ != fall_through_) __ jmp(false_label_); 480 } else if (index == Heap::kTrueValueRootIndex) { 481 if (true_label_ != fall_through_) __ jmp(true_label_); 482 } else { 483 __ LoadRoot(result_register(), index); 484 codegen()->DoTest(this); 485 } 486 } 487 488 489 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const { 490 } 491 492 493 void FullCodeGenerator::AccumulatorValueContext::Plug( 494 Handle<Object> lit) const { 495 if (lit->IsSmi()) { 496 __ SafeMove(result_register(), Smi::cast(*lit)); 497 } else { 498 __ Move(result_register(), lit); 499 } 500 } 501 502 503 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const { 504 if (lit->IsSmi()) { 505 __ SafePush(Smi::cast(*lit)); 506 } else { 507 __ Push(lit); 508 } 509 } 510 511 512 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const { 513 codegen()->PrepareForBailoutBeforeSplit(condition(), 514 true, 515 true_label_, 516 false_label_); 517 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals. 518 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) { 519 if (false_label_ != fall_through_) __ jmp(false_label_); 520 } else if (lit->IsTrue() || lit->IsJSObject()) { 521 if (true_label_ != fall_through_) __ jmp(true_label_); 522 } else if (lit->IsString()) { 523 if (String::cast(*lit)->length() == 0) { 524 if (false_label_ != fall_through_) __ jmp(false_label_); 525 } else { 526 if (true_label_ != fall_through_) __ jmp(true_label_); 527 } 528 } else if (lit->IsSmi()) { 529 if (Smi::cast(*lit)->value() == 0) { 530 if (false_label_ != fall_through_) __ jmp(false_label_); 531 } else { 532 if (true_label_ != fall_through_) __ jmp(true_label_); 533 } 534 } else { 535 // For simplicity we always test the accumulator register. 536 __ Move(result_register(), lit); 537 codegen()->DoTest(this); 538 } 539 } 540 541 542 void FullCodeGenerator::EffectContext::DropAndPlug(int count, 543 Register reg) const { 544 DCHECK(count > 0); 545 __ Drop(count); 546 } 547 548 549 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug( 550 int count, 551 Register reg) const { 552 DCHECK(count > 0); 553 __ Drop(count); 554 __ Move(result_register(), reg); 555 } 556 557 558 void FullCodeGenerator::StackValueContext::DropAndPlug(int count, 559 Register reg) const { 560 DCHECK(count > 0); 561 if (count > 1) __ Drop(count - 1); 562 __ movp(Operand(rsp, 0), reg); 563 } 564 565 566 void FullCodeGenerator::TestContext::DropAndPlug(int count, 567 Register reg) const { 568 DCHECK(count > 0); 569 // For simplicity we always test the accumulator register. 570 __ Drop(count); 571 __ Move(result_register(), reg); 572 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL); 573 codegen()->DoTest(this); 574 } 575 576 577 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true, 578 Label* materialize_false) const { 579 DCHECK(materialize_true == materialize_false); 580 __ bind(materialize_true); 581 } 582 583 584 void FullCodeGenerator::AccumulatorValueContext::Plug( 585 Label* materialize_true, 586 Label* materialize_false) const { 587 Label done; 588 __ bind(materialize_true); 589 __ Move(result_register(), isolate()->factory()->true_value()); 590 __ jmp(&done, Label::kNear); 591 __ bind(materialize_false); 592 __ Move(result_register(), isolate()->factory()->false_value()); 593 __ bind(&done); 594 } 595 596 597 void FullCodeGenerator::StackValueContext::Plug( 598 Label* materialize_true, 599 Label* materialize_false) const { 600 Label done; 601 __ bind(materialize_true); 602 __ Push(isolate()->factory()->true_value()); 603 __ jmp(&done, Label::kNear); 604 __ bind(materialize_false); 605 __ Push(isolate()->factory()->false_value()); 606 __ bind(&done); 607 } 608 609 610 void FullCodeGenerator::TestContext::Plug(Label* materialize_true, 611 Label* materialize_false) const { 612 DCHECK(materialize_true == true_label_); 613 DCHECK(materialize_false == false_label_); 614 } 615 616 617 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const { 618 Heap::RootListIndex value_root_index = 619 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 620 __ LoadRoot(result_register(), value_root_index); 621 } 622 623 624 void FullCodeGenerator::StackValueContext::Plug(bool flag) const { 625 Heap::RootListIndex value_root_index = 626 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex; 627 __ PushRoot(value_root_index); 628 } 629 630 631 void FullCodeGenerator::TestContext::Plug(bool flag) const { 632 codegen()->PrepareForBailoutBeforeSplit(condition(), 633 true, 634 true_label_, 635 false_label_); 636 if (flag) { 637 if (true_label_ != fall_through_) __ jmp(true_label_); 638 } else { 639 if (false_label_ != fall_through_) __ jmp(false_label_); 640 } 641 } 642 643 644 void FullCodeGenerator::DoTest(Expression* condition, 645 Label* if_true, 646 Label* if_false, 647 Label* fall_through) { 648 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate()); 649 CallIC(ic, condition->test_id()); 650 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); 651 Split(equal, if_true, if_false, fall_through); 652 } 653 654 655 void FullCodeGenerator::Split(Condition cc, 656 Label* if_true, 657 Label* if_false, 658 Label* fall_through) { 659 if (if_false == fall_through) { 660 __ j(cc, if_true); 661 } else if (if_true == fall_through) { 662 __ j(NegateCondition(cc), if_false); 663 } else { 664 __ j(cc, if_true); 665 __ jmp(if_false); 666 } 667 } 668 669 670 MemOperand FullCodeGenerator::StackOperand(Variable* var) { 671 DCHECK(var->IsStackAllocated()); 672 // Offset is negative because higher indexes are at lower addresses. 673 int offset = -var->index() * kPointerSize; 674 // Adjust by a (parameter or local) base offset. 675 if (var->IsParameter()) { 676 offset += kFPOnStackSize + kPCOnStackSize + 677 (info_->scope()->num_parameters() - 1) * kPointerSize; 678 } else { 679 offset += JavaScriptFrameConstants::kLocal0Offset; 680 } 681 return Operand(rbp, offset); 682 } 683 684 685 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) { 686 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 687 if (var->IsContextSlot()) { 688 int context_chain_length = scope()->ContextChainLength(var->scope()); 689 __ LoadContext(scratch, context_chain_length); 690 return ContextOperand(scratch, var->index()); 691 } else { 692 return StackOperand(var); 693 } 694 } 695 696 697 void FullCodeGenerator::GetVar(Register dest, Variable* var) { 698 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 699 MemOperand location = VarOperand(var, dest); 700 __ movp(dest, location); 701 } 702 703 704 void FullCodeGenerator::SetVar(Variable* var, 705 Register src, 706 Register scratch0, 707 Register scratch1) { 708 DCHECK(var->IsContextSlot() || var->IsStackAllocated()); 709 DCHECK(!scratch0.is(src)); 710 DCHECK(!scratch0.is(scratch1)); 711 DCHECK(!scratch1.is(src)); 712 MemOperand location = VarOperand(var, scratch0); 713 __ movp(location, src); 714 715 // Emit the write barrier code if the location is in the heap. 716 if (var->IsContextSlot()) { 717 int offset = Context::SlotOffset(var->index()); 718 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs); 719 } 720 } 721 722 723 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr, 724 bool should_normalize, 725 Label* if_true, 726 Label* if_false) { 727 // Only prepare for bailouts before splits if we're in a test 728 // context. Otherwise, we let the Visit function deal with the 729 // preparation to avoid preparing with the same AST id twice. 730 if (!context()->IsTest()) return; 731 732 Label skip; 733 if (should_normalize) __ jmp(&skip, Label::kNear); 734 PrepareForBailout(expr, TOS_REG); 735 if (should_normalize) { 736 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 737 Split(equal, if_true, if_false, NULL); 738 __ bind(&skip); 739 } 740 } 741 742 743 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) { 744 // The variable in the declaration always resides in the current context. 745 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope())); 746 if (generate_debug_code_) { 747 // Check that we're not inside a with or catch context. 748 __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset)); 749 __ CompareRoot(rbx, Heap::kWithContextMapRootIndex); 750 __ Check(not_equal, kDeclarationInWithContext); 751 __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex); 752 __ Check(not_equal, kDeclarationInCatchContext); 753 } 754 } 755 756 757 void FullCodeGenerator::VisitVariableDeclaration( 758 VariableDeclaration* declaration) { 759 // If it was not possible to allocate the variable at compile time, we 760 // need to "declare" it at runtime to make sure it actually exists in the 761 // local context. 762 VariableProxy* proxy = declaration->proxy(); 763 VariableMode mode = declaration->mode(); 764 Variable* variable = proxy->var(); 765 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY; 766 switch (variable->location()) { 767 case VariableLocation::GLOBAL: 768 case VariableLocation::UNALLOCATED: 769 globals_->Add(variable->name(), zone()); 770 globals_->Add(variable->binding_needs_init() 771 ? isolate()->factory()->the_hole_value() 772 : isolate()->factory()->undefined_value(), 773 zone()); 774 break; 775 776 case VariableLocation::PARAMETER: 777 case VariableLocation::LOCAL: 778 if (hole_init) { 779 Comment cmnt(masm_, "[ VariableDeclaration"); 780 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 781 __ movp(StackOperand(variable), kScratchRegister); 782 } 783 break; 784 785 case VariableLocation::CONTEXT: 786 if (hole_init) { 787 Comment cmnt(masm_, "[ VariableDeclaration"); 788 EmitDebugCheckDeclarationContext(variable); 789 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 790 __ movp(ContextOperand(rsi, variable->index()), kScratchRegister); 791 // No write barrier since the hole value is in old space. 792 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 793 } 794 break; 795 796 case VariableLocation::LOOKUP: { 797 Comment cmnt(masm_, "[ VariableDeclaration"); 798 __ Push(variable->name()); 799 // Declaration nodes are always introduced in one of four modes. 800 DCHECK(IsDeclaredVariableMode(mode)); 801 // Push initial value, if any. 802 // Note: For variables we must not push an initial value (such as 803 // 'undefined') because we may have a (legal) redeclaration and we 804 // must not destroy the current value. 805 if (hole_init) { 806 __ PushRoot(Heap::kTheHoleValueRootIndex); 807 } else { 808 __ Push(Smi::FromInt(0)); // Indicates no initial value. 809 } 810 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); 811 __ CallRuntime(Runtime::kDeclareLookupSlot); 812 break; 813 } 814 } 815 } 816 817 818 void FullCodeGenerator::VisitFunctionDeclaration( 819 FunctionDeclaration* declaration) { 820 VariableProxy* proxy = declaration->proxy(); 821 Variable* variable = proxy->var(); 822 switch (variable->location()) { 823 case VariableLocation::GLOBAL: 824 case VariableLocation::UNALLOCATED: { 825 globals_->Add(variable->name(), zone()); 826 Handle<SharedFunctionInfo> function = 827 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_); 828 // Check for stack-overflow exception. 829 if (function.is_null()) return SetStackOverflow(); 830 globals_->Add(function, zone()); 831 break; 832 } 833 834 case VariableLocation::PARAMETER: 835 case VariableLocation::LOCAL: { 836 Comment cmnt(masm_, "[ FunctionDeclaration"); 837 VisitForAccumulatorValue(declaration->fun()); 838 __ movp(StackOperand(variable), result_register()); 839 break; 840 } 841 842 case VariableLocation::CONTEXT: { 843 Comment cmnt(masm_, "[ FunctionDeclaration"); 844 EmitDebugCheckDeclarationContext(variable); 845 VisitForAccumulatorValue(declaration->fun()); 846 __ movp(ContextOperand(rsi, variable->index()), result_register()); 847 int offset = Context::SlotOffset(variable->index()); 848 // We know that we have written a function, which is not a smi. 849 __ RecordWriteContextSlot(rsi, 850 offset, 851 result_register(), 852 rcx, 853 kDontSaveFPRegs, 854 EMIT_REMEMBERED_SET, 855 OMIT_SMI_CHECK); 856 PrepareForBailoutForId(proxy->id(), NO_REGISTERS); 857 break; 858 } 859 860 case VariableLocation::LOOKUP: { 861 Comment cmnt(masm_, "[ FunctionDeclaration"); 862 __ Push(variable->name()); 863 VisitForStackValue(declaration->fun()); 864 __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes())); 865 __ CallRuntime(Runtime::kDeclareLookupSlot); 866 break; 867 } 868 } 869 } 870 871 872 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 873 // Call the runtime to declare the globals. 874 __ Push(pairs); 875 __ Push(Smi::FromInt(DeclareGlobalsFlags())); 876 __ CallRuntime(Runtime::kDeclareGlobals); 877 // Return value is ignored. 878 } 879 880 881 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) { 882 // Call the runtime to declare the modules. 883 __ Push(descriptions); 884 __ CallRuntime(Runtime::kDeclareModules); 885 // Return value is ignored. 886 } 887 888 889 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 890 Comment cmnt(masm_, "[ SwitchStatement"); 891 Breakable nested_statement(this, stmt); 892 SetStatementPosition(stmt); 893 894 // Keep the switch value on the stack until a case matches. 895 VisitForStackValue(stmt->tag()); 896 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 897 898 ZoneList<CaseClause*>* clauses = stmt->cases(); 899 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 900 901 Label next_test; // Recycled for each test. 902 // Compile all the tests with branches to their bodies. 903 for (int i = 0; i < clauses->length(); i++) { 904 CaseClause* clause = clauses->at(i); 905 clause->body_target()->Unuse(); 906 907 // The default is not a test, but remember it as final fall through. 908 if (clause->is_default()) { 909 default_clause = clause; 910 continue; 911 } 912 913 Comment cmnt(masm_, "[ Case comparison"); 914 __ bind(&next_test); 915 next_test.Unuse(); 916 917 // Compile the label expression. 918 VisitForAccumulatorValue(clause->label()); 919 920 // Perform the comparison as if via '==='. 921 __ movp(rdx, Operand(rsp, 0)); // Switch value. 922 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 923 JumpPatchSite patch_site(masm_); 924 if (inline_smi_code) { 925 Label slow_case; 926 __ movp(rcx, rdx); 927 __ orp(rcx, rax); 928 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); 929 930 __ cmpp(rdx, rax); 931 __ j(not_equal, &next_test); 932 __ Drop(1); // Switch value is no longer needed. 933 __ jmp(clause->body_target()); 934 __ bind(&slow_case); 935 } 936 937 // Record position before stub call for type feedback. 938 SetExpressionPosition(clause); 939 Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT, 940 strength(language_mode())).code(); 941 CallIC(ic, clause->CompareId()); 942 patch_site.EmitPatchInfo(); 943 944 Label skip; 945 __ jmp(&skip, Label::kNear); 946 PrepareForBailout(clause, TOS_REG); 947 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 948 __ j(not_equal, &next_test); 949 __ Drop(1); 950 __ jmp(clause->body_target()); 951 __ bind(&skip); 952 953 __ testp(rax, rax); 954 __ j(not_equal, &next_test); 955 __ Drop(1); // Switch value is no longer needed. 956 __ jmp(clause->body_target()); 957 } 958 959 // Discard the test value and jump to the default if present, otherwise to 960 // the end of the statement. 961 __ bind(&next_test); 962 __ Drop(1); // Switch value is no longer needed. 963 if (default_clause == NULL) { 964 __ jmp(nested_statement.break_label()); 965 } else { 966 __ jmp(default_clause->body_target()); 967 } 968 969 // Compile all the case bodies. 970 for (int i = 0; i < clauses->length(); i++) { 971 Comment cmnt(masm_, "[ Case body"); 972 CaseClause* clause = clauses->at(i); 973 __ bind(clause->body_target()); 974 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS); 975 VisitStatements(clause->statements()); 976 } 977 978 __ bind(nested_statement.break_label()); 979 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 980 } 981 982 983 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) { 984 Comment cmnt(masm_, "[ ForInStatement"); 985 SetStatementPosition(stmt, SKIP_BREAK); 986 987 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot(); 988 989 Label loop, exit; 990 ForIn loop_statement(this, stmt); 991 increment_loop_depth(); 992 993 // Get the object to enumerate over. If the object is null or undefined, skip 994 // over the loop. See ECMA-262 version 5, section 12.6.4. 995 SetExpressionAsStatementPosition(stmt->enumerable()); 996 VisitForAccumulatorValue(stmt->enumerable()); 997 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 998 __ j(equal, &exit); 999 Register null_value = rdi; 1000 __ LoadRoot(null_value, Heap::kNullValueRootIndex); 1001 __ cmpp(rax, null_value); 1002 __ j(equal, &exit); 1003 1004 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG); 1005 1006 // Convert the object to a JS object. 1007 Label convert, done_convert; 1008 __ JumpIfSmi(rax, &convert, Label::kNear); 1009 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); 1010 __ j(above_equal, &done_convert, Label::kNear); 1011 __ bind(&convert); 1012 ToObjectStub stub(isolate()); 1013 __ CallStub(&stub); 1014 __ bind(&done_convert); 1015 PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG); 1016 __ Push(rax); 1017 1018 // Check for proxies. 1019 Label call_runtime; 1020 __ CmpObjectType(rax, JS_PROXY_TYPE, rcx); 1021 __ j(equal, &call_runtime); 1022 1023 // Check cache validity in generated code. This is a fast case for 1024 // the JSObject::IsSimpleEnum cache validity checks. If we cannot 1025 // guarantee cache validity, call the runtime system to check cache 1026 // validity or get the property names in a fixed array. 1027 __ CheckEnumCache(null_value, &call_runtime); 1028 1029 // The enum cache is valid. Load the map of the object being 1030 // iterated over and use the cache for the iteration. 1031 Label use_cache; 1032 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); 1033 __ jmp(&use_cache, Label::kNear); 1034 1035 // Get the set of properties to enumerate. 1036 __ bind(&call_runtime); 1037 __ Push(rax); // Duplicate the enumerable object on the stack. 1038 __ CallRuntime(Runtime::kGetPropertyNamesFast); 1039 PrepareForBailoutForId(stmt->EnumId(), TOS_REG); 1040 1041 // If we got a map from the runtime call, we can do a fast 1042 // modification check. Otherwise, we got a fixed array, and we have 1043 // to do a slow check. 1044 Label fixed_array; 1045 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), 1046 Heap::kMetaMapRootIndex); 1047 __ j(not_equal, &fixed_array); 1048 1049 // We got a map in register rax. Get the enumeration cache from it. 1050 __ bind(&use_cache); 1051 1052 Label no_descriptors; 1053 1054 __ EnumLength(rdx, rax); 1055 __ Cmp(rdx, Smi::FromInt(0)); 1056 __ j(equal, &no_descriptors); 1057 1058 __ LoadInstanceDescriptors(rax, rcx); 1059 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset)); 1060 __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset)); 1061 1062 // Set up the four remaining stack slots. 1063 __ Push(rax); // Map. 1064 __ Push(rcx); // Enumeration cache. 1065 __ Push(rdx); // Number of valid entries for the map in the enum cache. 1066 __ Push(Smi::FromInt(0)); // Initial index. 1067 __ jmp(&loop); 1068 1069 __ bind(&no_descriptors); 1070 __ addp(rsp, Immediate(kPointerSize)); 1071 __ jmp(&exit); 1072 1073 // We got a fixed array in register rax. Iterate through that. 1074 __ bind(&fixed_array); 1075 1076 // No need for a write barrier, we are storing a Smi in the feedback vector. 1077 __ EmitLoadTypeFeedbackVector(rbx); 1078 int vector_index = SmiFromSlot(slot)->value(); 1079 __ Move(FieldOperand(rbx, FixedArray::OffsetOfElementAt(vector_index)), 1080 TypeFeedbackVector::MegamorphicSentinel(isolate())); 1081 __ movp(rcx, Operand(rsp, 0 * kPointerSize)); // Get enumerated object 1082 __ Push(Smi::FromInt(1)); // Smi(1) indicates slow check 1083 __ Push(rax); // Array 1084 __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset)); 1085 __ Push(rax); // Fixed array length (as smi). 1086 __ Push(Smi::FromInt(0)); // Initial index. 1087 1088 // Generate code for doing the condition check. 1089 __ bind(&loop); 1090 SetExpressionAsStatementPosition(stmt->each()); 1091 1092 __ movp(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index. 1093 __ cmpp(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length. 1094 __ j(above_equal, loop_statement.break_label()); 1095 1096 // Get the current entry of the array into register rbx. 1097 __ movp(rbx, Operand(rsp, 2 * kPointerSize)); 1098 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2); 1099 __ movp(rbx, FieldOperand(rbx, 1100 index.reg, 1101 index.scale, 1102 FixedArray::kHeaderSize)); 1103 1104 // Get the expected map from the stack or a smi in the 1105 // permanent slow case into register rdx. 1106 __ movp(rdx, Operand(rsp, 3 * kPointerSize)); 1107 1108 // Check if the expected map still matches that of the enumerable. 1109 // If not, we may have to filter the key. 1110 Label update_each; 1111 __ movp(rcx, Operand(rsp, 4 * kPointerSize)); 1112 __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset)); 1113 __ j(equal, &update_each, Label::kNear); 1114 1115 // Convert the entry to a string or null if it isn't a property 1116 // anymore. If the property has been removed while iterating, we 1117 // just skip it. 1118 __ Push(rcx); // Enumerable. 1119 __ Push(rbx); // Current entry. 1120 __ CallRuntime(Runtime::kForInFilter); 1121 PrepareForBailoutForId(stmt->FilterId(), TOS_REG); 1122 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 1123 __ j(equal, loop_statement.continue_label()); 1124 __ movp(rbx, rax); 1125 1126 // Update the 'each' property or variable from the possibly filtered 1127 // entry in register rbx. 1128 __ bind(&update_each); 1129 __ movp(result_register(), rbx); 1130 // Perform the assignment as if via '='. 1131 { EffectContext context(this); 1132 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot()); 1133 PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS); 1134 } 1135 1136 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body(). 1137 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS); 1138 // Generate code for the body of the loop. 1139 Visit(stmt->body()); 1140 1141 // Generate code for going to the next element by incrementing the 1142 // index (smi) stored on top of the stack. 1143 __ bind(loop_statement.continue_label()); 1144 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1)); 1145 1146 EmitBackEdgeBookkeeping(stmt, &loop); 1147 __ jmp(&loop); 1148 1149 // Remove the pointers stored on the stack. 1150 __ bind(loop_statement.break_label()); 1151 __ addp(rsp, Immediate(5 * kPointerSize)); 1152 1153 // Exit and decrement the loop depth. 1154 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS); 1155 __ bind(&exit); 1156 decrement_loop_depth(); 1157 } 1158 1159 1160 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info, 1161 bool pretenure) { 1162 // Use the fast case closure allocation code that allocates in new 1163 // space for nested functions that don't need literals cloning. If 1164 // we're running with the --always-opt or the --prepare-always-opt 1165 // flag, we need to use the runtime function so that the new function 1166 // we are creating here gets a chance to have its code optimized and 1167 // doesn't just get a copy of the existing unoptimized code. 1168 if (!FLAG_always_opt && 1169 !FLAG_prepare_always_opt && 1170 !pretenure && 1171 scope()->is_function_scope() && 1172 info->num_literals() == 0) { 1173 FastNewClosureStub stub(isolate(), info->language_mode(), info->kind()); 1174 __ Move(rbx, info); 1175 __ CallStub(&stub); 1176 } else { 1177 __ Push(info); 1178 __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured 1179 : Runtime::kNewClosure); 1180 } 1181 context()->Plug(rax); 1182 } 1183 1184 1185 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset, 1186 FeedbackVectorSlot slot) { 1187 DCHECK(NeedsHomeObject(initializer)); 1188 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0)); 1189 __ Move(StoreDescriptor::NameRegister(), 1190 isolate()->factory()->home_object_symbol()); 1191 __ movp(StoreDescriptor::ValueRegister(), 1192 Operand(rsp, offset * kPointerSize)); 1193 EmitLoadStoreICSlot(slot); 1194 CallStoreIC(); 1195 } 1196 1197 1198 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer, 1199 int offset, 1200 FeedbackVectorSlot slot) { 1201 DCHECK(NeedsHomeObject(initializer)); 1202 __ movp(StoreDescriptor::ReceiverRegister(), rax); 1203 __ Move(StoreDescriptor::NameRegister(), 1204 isolate()->factory()->home_object_symbol()); 1205 __ movp(StoreDescriptor::ValueRegister(), 1206 Operand(rsp, offset * kPointerSize)); 1207 EmitLoadStoreICSlot(slot); 1208 CallStoreIC(); 1209 } 1210 1211 1212 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy, 1213 TypeofMode typeof_mode, 1214 Label* slow) { 1215 Register context = rsi; 1216 Register temp = rdx; 1217 1218 Scope* s = scope(); 1219 while (s != NULL) { 1220 if (s->num_heap_slots() > 0) { 1221 if (s->calls_sloppy_eval()) { 1222 // Check that extension is "the hole". 1223 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX), 1224 Heap::kTheHoleValueRootIndex, slow); 1225 } 1226 // Load next context in chain. 1227 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1228 // Walk the rest of the chain without clobbering rsi. 1229 context = temp; 1230 } 1231 // If no outer scope calls eval, we do not need to check more 1232 // context extensions. If we have reached an eval scope, we check 1233 // all extensions from this point. 1234 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break; 1235 s = s->outer_scope(); 1236 } 1237 1238 if (s != NULL && s->is_eval_scope()) { 1239 // Loop up the context chain. There is no frame effect so it is 1240 // safe to use raw labels here. 1241 Label next, fast; 1242 if (!context.is(temp)) { 1243 __ movp(temp, context); 1244 } 1245 // Load map for comparison into register, outside loop. 1246 __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex); 1247 __ bind(&next); 1248 // Terminate at native context. 1249 __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset)); 1250 __ j(equal, &fast, Label::kNear); 1251 // Check that extension is "the hole". 1252 __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX), 1253 Heap::kTheHoleValueRootIndex, slow); 1254 // Load next context in chain. 1255 __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX)); 1256 __ jmp(&next); 1257 __ bind(&fast); 1258 } 1259 1260 // All extension objects were empty and it is safe to use a normal global 1261 // load machinery. 1262 EmitGlobalVariableLoad(proxy, typeof_mode); 1263 } 1264 1265 1266 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var, 1267 Label* slow) { 1268 DCHECK(var->IsContextSlot()); 1269 Register context = rsi; 1270 Register temp = rbx; 1271 1272 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) { 1273 if (s->num_heap_slots() > 0) { 1274 if (s->calls_sloppy_eval()) { 1275 // Check that extension is "the hole". 1276 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX), 1277 Heap::kTheHoleValueRootIndex, slow); 1278 } 1279 __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX)); 1280 // Walk the rest of the chain without clobbering rsi. 1281 context = temp; 1282 } 1283 } 1284 // Check that last extension is "the hole". 1285 __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX), 1286 Heap::kTheHoleValueRootIndex, slow); 1287 1288 // This function is used only for loads, not stores, so it's safe to 1289 // return an rsi-based operand (the write barrier cannot be allowed to 1290 // destroy the rsi register). 1291 return ContextOperand(context, var->index()); 1292 } 1293 1294 1295 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy, 1296 TypeofMode typeof_mode, 1297 Label* slow, Label* done) { 1298 // Generate fast-case code for variables that might be shadowed by 1299 // eval-introduced variables. Eval is used a lot without 1300 // introducing variables. In those cases, we do not want to 1301 // perform a runtime call for all variables in the scope 1302 // containing the eval. 1303 Variable* var = proxy->var(); 1304 if (var->mode() == DYNAMIC_GLOBAL) { 1305 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow); 1306 __ jmp(done); 1307 } else if (var->mode() == DYNAMIC_LOCAL) { 1308 Variable* local = var->local_if_not_shadowed(); 1309 __ movp(rax, ContextSlotOperandCheckExtensions(local, slow)); 1310 if (local->mode() == LET || local->mode() == CONST || 1311 local->mode() == CONST_LEGACY) { 1312 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); 1313 __ j(not_equal, done); 1314 if (local->mode() == CONST_LEGACY) { 1315 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 1316 } else { // LET || CONST 1317 __ Push(var->name()); 1318 __ CallRuntime(Runtime::kThrowReferenceError); 1319 } 1320 } 1321 __ jmp(done); 1322 } 1323 } 1324 1325 1326 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy, 1327 TypeofMode typeof_mode) { 1328 Variable* var = proxy->var(); 1329 DCHECK(var->IsUnallocatedOrGlobalSlot() || 1330 (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL)); 1331 __ Move(LoadDescriptor::NameRegister(), var->name()); 1332 __ LoadGlobalObject(LoadDescriptor::ReceiverRegister()); 1333 __ Move(LoadDescriptor::SlotRegister(), 1334 SmiFromSlot(proxy->VariableFeedbackSlot())); 1335 CallLoadIC(typeof_mode); 1336 } 1337 1338 1339 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy, 1340 TypeofMode typeof_mode) { 1341 // Record position before possible IC call. 1342 SetExpressionPosition(proxy); 1343 PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS); 1344 Variable* var = proxy->var(); 1345 1346 // Three cases: global variables, lookup variables, and all other types of 1347 // variables. 1348 switch (var->location()) { 1349 case VariableLocation::GLOBAL: 1350 case VariableLocation::UNALLOCATED: { 1351 Comment cmnt(masm_, "[ Global variable"); 1352 EmitGlobalVariableLoad(proxy, typeof_mode); 1353 context()->Plug(rax); 1354 break; 1355 } 1356 1357 case VariableLocation::PARAMETER: 1358 case VariableLocation::LOCAL: 1359 case VariableLocation::CONTEXT: { 1360 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode); 1361 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot" 1362 : "[ Stack slot"); 1363 if (NeedsHoleCheckForLoad(proxy)) { 1364 // Let and const need a read barrier. 1365 Label done; 1366 GetVar(rax, var); 1367 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); 1368 __ j(not_equal, &done, Label::kNear); 1369 if (var->mode() == LET || var->mode() == CONST) { 1370 // Throw a reference error when using an uninitialized let/const 1371 // binding in harmony mode. 1372 __ Push(var->name()); 1373 __ CallRuntime(Runtime::kThrowReferenceError); 1374 } else { 1375 // Uninitialized legacy const bindings are unholed. 1376 DCHECK(var->mode() == CONST_LEGACY); 1377 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 1378 } 1379 __ bind(&done); 1380 context()->Plug(rax); 1381 break; 1382 } 1383 context()->Plug(var); 1384 break; 1385 } 1386 1387 case VariableLocation::LOOKUP: { 1388 Comment cmnt(masm_, "[ Lookup slot"); 1389 Label done, slow; 1390 // Generate code for loading from variables potentially shadowed 1391 // by eval-introduced variables. 1392 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done); 1393 __ bind(&slow); 1394 __ Push(rsi); // Context. 1395 __ Push(var->name()); 1396 Runtime::FunctionId function_id = 1397 typeof_mode == NOT_INSIDE_TYPEOF 1398 ? Runtime::kLoadLookupSlot 1399 : Runtime::kLoadLookupSlotNoReferenceError; 1400 __ CallRuntime(function_id); 1401 __ bind(&done); 1402 context()->Plug(rax); 1403 break; 1404 } 1405 } 1406 } 1407 1408 1409 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 1410 Comment cmnt(masm_, "[ RegExpLiteral"); 1411 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1412 __ Move(rax, Smi::FromInt(expr->literal_index())); 1413 __ Move(rcx, expr->pattern()); 1414 __ Move(rdx, Smi::FromInt(expr->flags())); 1415 FastCloneRegExpStub stub(isolate()); 1416 __ CallStub(&stub); 1417 context()->Plug(rax); 1418 } 1419 1420 1421 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) { 1422 Expression* expression = (property == NULL) ? NULL : property->value(); 1423 if (expression == NULL) { 1424 __ PushRoot(Heap::kNullValueRootIndex); 1425 } else { 1426 VisitForStackValue(expression); 1427 if (NeedsHomeObject(expression)) { 1428 DCHECK(property->kind() == ObjectLiteral::Property::GETTER || 1429 property->kind() == ObjectLiteral::Property::SETTER); 1430 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3; 1431 EmitSetHomeObject(expression, offset, property->GetSlot()); 1432 } 1433 } 1434 } 1435 1436 1437 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 1438 Comment cmnt(masm_, "[ ObjectLiteral"); 1439 1440 Handle<FixedArray> constant_properties = expr->constant_properties(); 1441 int flags = expr->ComputeFlags(); 1442 if (MustCreateObjectLiteralWithRuntime(expr)) { 1443 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1444 __ Push(Smi::FromInt(expr->literal_index())); 1445 __ Push(constant_properties); 1446 __ Push(Smi::FromInt(flags)); 1447 __ CallRuntime(Runtime::kCreateObjectLiteral); 1448 } else { 1449 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1450 __ Move(rbx, Smi::FromInt(expr->literal_index())); 1451 __ Move(rcx, constant_properties); 1452 __ Move(rdx, Smi::FromInt(flags)); 1453 FastCloneShallowObjectStub stub(isolate(), expr->properties_count()); 1454 __ CallStub(&stub); 1455 } 1456 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); 1457 1458 // If result_saved is true the result is on top of the stack. If 1459 // result_saved is false the result is in rax. 1460 bool result_saved = false; 1461 1462 AccessorTable accessor_table(zone()); 1463 int property_index = 0; 1464 for (; property_index < expr->properties()->length(); property_index++) { 1465 ObjectLiteral::Property* property = expr->properties()->at(property_index); 1466 if (property->is_computed_name()) break; 1467 if (property->IsCompileTimeValue()) continue; 1468 1469 Literal* key = property->key()->AsLiteral(); 1470 Expression* value = property->value(); 1471 if (!result_saved) { 1472 __ Push(rax); // Save result on the stack 1473 result_saved = true; 1474 } 1475 switch (property->kind()) { 1476 case ObjectLiteral::Property::CONSTANT: 1477 UNREACHABLE(); 1478 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1479 DCHECK(!CompileTimeValue::IsCompileTimeValue(value)); 1480 // Fall through. 1481 case ObjectLiteral::Property::COMPUTED: 1482 // It is safe to use [[Put]] here because the boilerplate already 1483 // contains computed properties with an uninitialized value. 1484 if (key->value()->IsInternalizedString()) { 1485 if (property->emit_store()) { 1486 VisitForAccumulatorValue(value); 1487 DCHECK(StoreDescriptor::ValueRegister().is(rax)); 1488 __ Move(StoreDescriptor::NameRegister(), key->value()); 1489 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0)); 1490 EmitLoadStoreICSlot(property->GetSlot(0)); 1491 CallStoreIC(); 1492 PrepareForBailoutForId(key->id(), NO_REGISTERS); 1493 1494 if (NeedsHomeObject(value)) { 1495 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1)); 1496 } 1497 } else { 1498 VisitForEffect(value); 1499 } 1500 break; 1501 } 1502 __ Push(Operand(rsp, 0)); // Duplicate receiver. 1503 VisitForStackValue(key); 1504 VisitForStackValue(value); 1505 if (property->emit_store()) { 1506 if (NeedsHomeObject(value)) { 1507 EmitSetHomeObject(value, 2, property->GetSlot()); 1508 } 1509 __ Push(Smi::FromInt(SLOPPY)); // Language mode 1510 __ CallRuntime(Runtime::kSetProperty); 1511 } else { 1512 __ Drop(3); 1513 } 1514 break; 1515 case ObjectLiteral::Property::PROTOTYPE: 1516 __ Push(Operand(rsp, 0)); // Duplicate receiver. 1517 VisitForStackValue(value); 1518 DCHECK(property->emit_store()); 1519 __ CallRuntime(Runtime::kInternalSetPrototype); 1520 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1521 NO_REGISTERS); 1522 break; 1523 case ObjectLiteral::Property::GETTER: 1524 if (property->emit_store()) { 1525 accessor_table.lookup(key)->second->getter = property; 1526 } 1527 break; 1528 case ObjectLiteral::Property::SETTER: 1529 if (property->emit_store()) { 1530 accessor_table.lookup(key)->second->setter = property; 1531 } 1532 break; 1533 } 1534 } 1535 1536 // Emit code to define accessors, using only a single call to the runtime for 1537 // each pair of corresponding getters and setters. 1538 for (AccessorTable::Iterator it = accessor_table.begin(); 1539 it != accessor_table.end(); 1540 ++it) { 1541 __ Push(Operand(rsp, 0)); // Duplicate receiver. 1542 VisitForStackValue(it->first); 1543 EmitAccessor(it->second->getter); 1544 EmitAccessor(it->second->setter); 1545 __ Push(Smi::FromInt(NONE)); 1546 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked); 1547 } 1548 1549 // Object literals have two parts. The "static" part on the left contains no 1550 // computed property names, and so we can compute its map ahead of time; see 1551 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part 1552 // starts with the first computed property name, and continues with all 1553 // properties to its right. All the code from above initializes the static 1554 // component of the object literal, and arranges for the map of the result to 1555 // reflect the static order in which the keys appear. For the dynamic 1556 // properties, we compile them into a series of "SetOwnProperty" runtime 1557 // calls. This will preserve insertion order. 1558 for (; property_index < expr->properties()->length(); property_index++) { 1559 ObjectLiteral::Property* property = expr->properties()->at(property_index); 1560 1561 Expression* value = property->value(); 1562 if (!result_saved) { 1563 __ Push(rax); // Save result on the stack 1564 result_saved = true; 1565 } 1566 1567 __ Push(Operand(rsp, 0)); // Duplicate receiver. 1568 1569 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) { 1570 DCHECK(!property->is_computed_name()); 1571 VisitForStackValue(value); 1572 DCHECK(property->emit_store()); 1573 __ CallRuntime(Runtime::kInternalSetPrototype); 1574 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index), 1575 NO_REGISTERS); 1576 } else { 1577 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index)); 1578 VisitForStackValue(value); 1579 if (NeedsHomeObject(value)) { 1580 EmitSetHomeObject(value, 2, property->GetSlot()); 1581 } 1582 1583 switch (property->kind()) { 1584 case ObjectLiteral::Property::CONSTANT: 1585 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 1586 case ObjectLiteral::Property::COMPUTED: 1587 if (property->emit_store()) { 1588 __ Push(Smi::FromInt(NONE)); 1589 __ CallRuntime(Runtime::kDefineDataPropertyUnchecked); 1590 } else { 1591 __ Drop(3); 1592 } 1593 break; 1594 1595 case ObjectLiteral::Property::PROTOTYPE: 1596 UNREACHABLE(); 1597 break; 1598 1599 case ObjectLiteral::Property::GETTER: 1600 __ Push(Smi::FromInt(NONE)); 1601 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked); 1602 break; 1603 1604 case ObjectLiteral::Property::SETTER: 1605 __ Push(Smi::FromInt(NONE)); 1606 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked); 1607 break; 1608 } 1609 } 1610 } 1611 1612 if (expr->has_function()) { 1613 DCHECK(result_saved); 1614 __ Push(Operand(rsp, 0)); 1615 __ CallRuntime(Runtime::kToFastProperties); 1616 } 1617 1618 if (result_saved) { 1619 context()->PlugTOS(); 1620 } else { 1621 context()->Plug(rax); 1622 } 1623 } 1624 1625 1626 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 1627 Comment cmnt(masm_, "[ ArrayLiteral"); 1628 1629 Handle<FixedArray> constant_elements = expr->constant_elements(); 1630 bool has_constant_fast_elements = 1631 IsFastObjectElementsKind(expr->constant_elements_kind()); 1632 1633 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE; 1634 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) { 1635 // If the only customer of allocation sites is transitioning, then 1636 // we can turn it off if we don't have anywhere else to transition to. 1637 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE; 1638 } 1639 1640 if (MustCreateArrayLiteralWithRuntime(expr)) { 1641 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1642 __ Push(Smi::FromInt(expr->literal_index())); 1643 __ Push(constant_elements); 1644 __ Push(Smi::FromInt(expr->ComputeFlags())); 1645 __ CallRuntime(Runtime::kCreateArrayLiteral); 1646 } else { 1647 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1648 __ Move(rbx, Smi::FromInt(expr->literal_index())); 1649 __ Move(rcx, constant_elements); 1650 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode); 1651 __ CallStub(&stub); 1652 } 1653 PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG); 1654 1655 bool result_saved = false; // Is the result saved to the stack? 1656 ZoneList<Expression*>* subexprs = expr->values(); 1657 int length = subexprs->length(); 1658 1659 // Emit code to evaluate all the non-constant subexpressions and to store 1660 // them into the newly cloned array. 1661 int array_index = 0; 1662 for (; array_index < length; array_index++) { 1663 Expression* subexpr = subexprs->at(array_index); 1664 if (subexpr->IsSpread()) break; 1665 1666 // If the subexpression is a literal or a simple materialized literal it 1667 // is already set in the cloned array. 1668 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue; 1669 1670 if (!result_saved) { 1671 __ Push(rax); // array literal 1672 result_saved = true; 1673 } 1674 VisitForAccumulatorValue(subexpr); 1675 1676 __ Move(StoreDescriptor::NameRegister(), Smi::FromInt(array_index)); 1677 __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0)); 1678 EmitLoadStoreICSlot(expr->LiteralFeedbackSlot()); 1679 Handle<Code> ic = 1680 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 1681 CallIC(ic); 1682 1683 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); 1684 } 1685 1686 // In case the array literal contains spread expressions it has two parts. The 1687 // first part is the "static" array which has a literal index is handled 1688 // above. The second part is the part after the first spread expression 1689 // (inclusive) and these elements gets appended to the array. Note that the 1690 // number elements an iterable produces is unknown ahead of time. 1691 if (array_index < length && result_saved) { 1692 __ Pop(rax); 1693 result_saved = false; 1694 } 1695 for (; array_index < length; array_index++) { 1696 Expression* subexpr = subexprs->at(array_index); 1697 1698 __ Push(rax); 1699 if (subexpr->IsSpread()) { 1700 VisitForStackValue(subexpr->AsSpread()->expression()); 1701 __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX, 1702 CALL_FUNCTION); 1703 } else { 1704 VisitForStackValue(subexpr); 1705 __ CallRuntime(Runtime::kAppendElement); 1706 } 1707 1708 PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS); 1709 } 1710 1711 if (result_saved) { 1712 context()->PlugTOS(); 1713 } else { 1714 context()->Plug(rax); 1715 } 1716 } 1717 1718 1719 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 1720 DCHECK(expr->target()->IsValidReferenceExpressionOrThis()); 1721 1722 Comment cmnt(masm_, "[ Assignment"); 1723 SetExpressionPosition(expr, INSERT_BREAK); 1724 1725 Property* property = expr->target()->AsProperty(); 1726 LhsKind assign_type = Property::GetAssignType(property); 1727 1728 // Evaluate LHS expression. 1729 switch (assign_type) { 1730 case VARIABLE: 1731 // Nothing to do here. 1732 break; 1733 case NAMED_PROPERTY: 1734 if (expr->is_compound()) { 1735 // We need the receiver both on the stack and in the register. 1736 VisitForStackValue(property->obj()); 1737 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0)); 1738 } else { 1739 VisitForStackValue(property->obj()); 1740 } 1741 break; 1742 case NAMED_SUPER_PROPERTY: 1743 VisitForStackValue( 1744 property->obj()->AsSuperPropertyReference()->this_var()); 1745 VisitForAccumulatorValue( 1746 property->obj()->AsSuperPropertyReference()->home_object()); 1747 __ Push(result_register()); 1748 if (expr->is_compound()) { 1749 __ Push(MemOperand(rsp, kPointerSize)); 1750 __ Push(result_register()); 1751 } 1752 break; 1753 case KEYED_SUPER_PROPERTY: 1754 VisitForStackValue( 1755 property->obj()->AsSuperPropertyReference()->this_var()); 1756 VisitForStackValue( 1757 property->obj()->AsSuperPropertyReference()->home_object()); 1758 VisitForAccumulatorValue(property->key()); 1759 __ Push(result_register()); 1760 if (expr->is_compound()) { 1761 __ Push(MemOperand(rsp, 2 * kPointerSize)); 1762 __ Push(MemOperand(rsp, 2 * kPointerSize)); 1763 __ Push(result_register()); 1764 } 1765 break; 1766 case KEYED_PROPERTY: { 1767 if (expr->is_compound()) { 1768 VisitForStackValue(property->obj()); 1769 VisitForStackValue(property->key()); 1770 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize)); 1771 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0)); 1772 } else { 1773 VisitForStackValue(property->obj()); 1774 VisitForStackValue(property->key()); 1775 } 1776 break; 1777 } 1778 } 1779 1780 // For compound assignments we need another deoptimization point after the 1781 // variable/property load. 1782 if (expr->is_compound()) { 1783 { AccumulatorValueContext context(this); 1784 switch (assign_type) { 1785 case VARIABLE: 1786 EmitVariableLoad(expr->target()->AsVariableProxy()); 1787 PrepareForBailout(expr->target(), TOS_REG); 1788 break; 1789 case NAMED_PROPERTY: 1790 EmitNamedPropertyLoad(property); 1791 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1792 break; 1793 case NAMED_SUPER_PROPERTY: 1794 EmitNamedSuperPropertyLoad(property); 1795 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1796 break; 1797 case KEYED_SUPER_PROPERTY: 1798 EmitKeyedSuperPropertyLoad(property); 1799 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1800 break; 1801 case KEYED_PROPERTY: 1802 EmitKeyedPropertyLoad(property); 1803 PrepareForBailoutForId(property->LoadId(), TOS_REG); 1804 break; 1805 } 1806 } 1807 1808 Token::Value op = expr->binary_op(); 1809 __ Push(rax); // Left operand goes on the stack. 1810 VisitForAccumulatorValue(expr->value()); 1811 1812 AccumulatorValueContext context(this); 1813 if (ShouldInlineSmiCase(op)) { 1814 EmitInlineSmiBinaryOp(expr->binary_operation(), 1815 op, 1816 expr->target(), 1817 expr->value()); 1818 } else { 1819 EmitBinaryOp(expr->binary_operation(), op); 1820 } 1821 // Deoptimization point in case the binary operation may have side effects. 1822 PrepareForBailout(expr->binary_operation(), TOS_REG); 1823 } else { 1824 VisitForAccumulatorValue(expr->value()); 1825 } 1826 1827 SetExpressionPosition(expr); 1828 1829 // Store the value. 1830 switch (assign_type) { 1831 case VARIABLE: 1832 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 1833 expr->op(), expr->AssignmentSlot()); 1834 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 1835 context()->Plug(rax); 1836 break; 1837 case NAMED_PROPERTY: 1838 EmitNamedPropertyAssignment(expr); 1839 break; 1840 case NAMED_SUPER_PROPERTY: 1841 EmitNamedSuperPropertyStore(property); 1842 context()->Plug(rax); 1843 break; 1844 case KEYED_SUPER_PROPERTY: 1845 EmitKeyedSuperPropertyStore(property); 1846 context()->Plug(rax); 1847 break; 1848 case KEYED_PROPERTY: 1849 EmitKeyedPropertyAssignment(expr); 1850 break; 1851 } 1852 } 1853 1854 1855 void FullCodeGenerator::VisitYield(Yield* expr) { 1856 Comment cmnt(masm_, "[ Yield"); 1857 SetExpressionPosition(expr); 1858 1859 // Evaluate yielded value first; the initial iterator definition depends on 1860 // this. It stays on the stack while we update the iterator. 1861 VisitForStackValue(expr->expression()); 1862 1863 switch (expr->yield_kind()) { 1864 case Yield::kSuspend: 1865 // Pop value from top-of-stack slot; box result into result register. 1866 EmitCreateIteratorResult(false); 1867 __ Push(result_register()); 1868 // Fall through. 1869 case Yield::kInitial: { 1870 Label suspend, continuation, post_runtime, resume; 1871 1872 __ jmp(&suspend); 1873 __ bind(&continuation); 1874 __ RecordGeneratorContinuation(); 1875 __ jmp(&resume); 1876 1877 __ bind(&suspend); 1878 VisitForAccumulatorValue(expr->generator_object()); 1879 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos())); 1880 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset), 1881 Smi::FromInt(continuation.pos())); 1882 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi); 1883 __ movp(rcx, rsi); 1884 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx, 1885 kDontSaveFPRegs); 1886 __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset)); 1887 __ cmpp(rsp, rbx); 1888 __ j(equal, &post_runtime); 1889 __ Push(rax); // generator object 1890 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1); 1891 __ movp(context_register(), 1892 Operand(rbp, StandardFrameConstants::kContextOffset)); 1893 __ bind(&post_runtime); 1894 1895 __ Pop(result_register()); 1896 EmitReturnSequence(); 1897 1898 __ bind(&resume); 1899 context()->Plug(result_register()); 1900 break; 1901 } 1902 1903 case Yield::kFinal: { 1904 VisitForAccumulatorValue(expr->generator_object()); 1905 __ Move(FieldOperand(result_register(), 1906 JSGeneratorObject::kContinuationOffset), 1907 Smi::FromInt(JSGeneratorObject::kGeneratorClosed)); 1908 // Pop value from top-of-stack slot, box result into result register. 1909 EmitCreateIteratorResult(true); 1910 EmitUnwindBeforeReturn(); 1911 EmitReturnSequence(); 1912 break; 1913 } 1914 1915 case Yield::kDelegating: { 1916 VisitForStackValue(expr->generator_object()); 1917 1918 // Initial stack layout is as follows: 1919 // [sp + 1 * kPointerSize] iter 1920 // [sp + 0 * kPointerSize] g 1921 1922 Label l_catch, l_try, l_suspend, l_continuation, l_resume; 1923 Label l_next, l_call, l_loop; 1924 Register load_receiver = LoadDescriptor::ReceiverRegister(); 1925 Register load_name = LoadDescriptor::NameRegister(); 1926 1927 // Initial send value is undefined. 1928 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 1929 __ jmp(&l_next); 1930 1931 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; } 1932 __ bind(&l_catch); 1933 __ LoadRoot(load_name, Heap::kthrow_stringRootIndex); // "throw" 1934 __ Push(load_name); 1935 __ Push(Operand(rsp, 2 * kPointerSize)); // iter 1936 __ Push(rax); // exception 1937 __ jmp(&l_call); 1938 1939 // try { received = %yield result } 1940 // Shuffle the received result above a try handler and yield it without 1941 // re-boxing. 1942 __ bind(&l_try); 1943 __ Pop(rax); // result 1944 int handler_index = NewHandlerTableEntry(); 1945 EnterTryBlock(handler_index, &l_catch); 1946 const int try_block_size = TryCatch::kElementCount * kPointerSize; 1947 __ Push(rax); // result 1948 1949 __ jmp(&l_suspend); 1950 __ bind(&l_continuation); 1951 __ RecordGeneratorContinuation(); 1952 __ jmp(&l_resume); 1953 1954 __ bind(&l_suspend); 1955 const int generator_object_depth = kPointerSize + try_block_size; 1956 __ movp(rax, Operand(rsp, generator_object_depth)); 1957 __ Push(rax); // g 1958 __ Push(Smi::FromInt(handler_index)); // handler-index 1959 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos())); 1960 __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset), 1961 Smi::FromInt(l_continuation.pos())); 1962 __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi); 1963 __ movp(rcx, rsi); 1964 __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx, 1965 kDontSaveFPRegs); 1966 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2); 1967 __ movp(context_register(), 1968 Operand(rbp, StandardFrameConstants::kContextOffset)); 1969 __ Pop(rax); // result 1970 EmitReturnSequence(); 1971 __ bind(&l_resume); // received in rax 1972 ExitTryBlock(handler_index); 1973 1974 // receiver = iter; f = 'next'; arg = received; 1975 __ bind(&l_next); 1976 1977 __ LoadRoot(load_name, Heap::knext_stringRootIndex); 1978 __ Push(load_name); // "next" 1979 __ Push(Operand(rsp, 2 * kPointerSize)); // iter 1980 __ Push(rax); // received 1981 1982 // result = receiver[f](arg); 1983 __ bind(&l_call); 1984 __ movp(load_receiver, Operand(rsp, kPointerSize)); 1985 __ Move(LoadDescriptor::SlotRegister(), 1986 SmiFromSlot(expr->KeyedLoadFeedbackSlot())); 1987 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code(); 1988 CallIC(ic, TypeFeedbackId::None()); 1989 __ movp(rdi, rax); 1990 __ movp(Operand(rsp, 2 * kPointerSize), rdi); 1991 1992 SetCallPosition(expr); 1993 __ Set(rax, 1); 1994 __ Call( 1995 isolate()->builtins()->Call(ConvertReceiverMode::kNotNullOrUndefined), 1996 RelocInfo::CODE_TARGET); 1997 1998 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1999 __ Drop(1); // The function is still on the stack; drop it. 2000 2001 // if (!result.done) goto l_try; 2002 __ bind(&l_loop); 2003 __ Move(load_receiver, rax); 2004 __ Push(load_receiver); // save result 2005 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done" 2006 __ Move(LoadDescriptor::SlotRegister(), 2007 SmiFromSlot(expr->DoneFeedbackSlot())); 2008 CallLoadIC(NOT_INSIDE_TYPEOF); // rax=result.done 2009 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate()); 2010 CallIC(bool_ic); 2011 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex); 2012 __ j(not_equal, &l_try); 2013 2014 // result.value 2015 __ Pop(load_receiver); // result 2016 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value" 2017 __ Move(LoadDescriptor::SlotRegister(), 2018 SmiFromSlot(expr->ValueFeedbackSlot())); 2019 CallLoadIC(NOT_INSIDE_TYPEOF); // result.value in rax 2020 context()->DropAndPlug(2, rax); // drop iter and g 2021 break; 2022 } 2023 } 2024 } 2025 2026 2027 void FullCodeGenerator::EmitGeneratorResume(Expression *generator, 2028 Expression *value, 2029 JSGeneratorObject::ResumeMode resume_mode) { 2030 // The value stays in rax, and is ultimately read by the resumed generator, as 2031 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it 2032 // is read to throw the value when the resumed generator is already closed. 2033 // rbx will hold the generator object until the activation has been resumed. 2034 VisitForStackValue(generator); 2035 VisitForAccumulatorValue(value); 2036 __ Pop(rbx); 2037 2038 // Load suspended function and context. 2039 __ movp(rsi, FieldOperand(rbx, JSGeneratorObject::kContextOffset)); 2040 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); 2041 2042 // Push receiver. 2043 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset)); 2044 2045 // Push holes for arguments to generator function. 2046 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2047 __ LoadSharedFunctionInfoSpecialField(rdx, rdx, 2048 SharedFunctionInfo::kFormalParameterCountOffset); 2049 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex); 2050 Label push_argument_holes, push_frame; 2051 __ bind(&push_argument_holes); 2052 __ subp(rdx, Immediate(1)); 2053 __ j(carry, &push_frame); 2054 __ Push(rcx); 2055 __ jmp(&push_argument_holes); 2056 2057 // Enter a new JavaScript frame, and initialize its slots as they were when 2058 // the generator was suspended. 2059 Label resume_frame, done; 2060 __ bind(&push_frame); 2061 __ call(&resume_frame); 2062 __ jmp(&done); 2063 __ bind(&resume_frame); 2064 __ pushq(rbp); // Caller's frame pointer. 2065 __ movp(rbp, rsp); 2066 __ Push(rsi); // Callee's context. 2067 __ Push(rdi); // Callee's JS Function. 2068 2069 // Load the operand stack size. 2070 __ movp(rdx, FieldOperand(rbx, JSGeneratorObject::kOperandStackOffset)); 2071 __ movp(rdx, FieldOperand(rdx, FixedArray::kLengthOffset)); 2072 __ SmiToInteger32(rdx, rdx); 2073 2074 // If we are sending a value and there is no operand stack, we can jump back 2075 // in directly. 2076 if (resume_mode == JSGeneratorObject::NEXT) { 2077 Label slow_resume; 2078 __ cmpp(rdx, Immediate(0)); 2079 __ j(not_zero, &slow_resume); 2080 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 2081 __ SmiToInteger64(rcx, 2082 FieldOperand(rbx, JSGeneratorObject::kContinuationOffset)); 2083 __ addp(rdx, rcx); 2084 __ Move(FieldOperand(rbx, JSGeneratorObject::kContinuationOffset), 2085 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); 2086 __ jmp(rdx); 2087 __ bind(&slow_resume); 2088 } 2089 2090 // Otherwise, we push holes for the operand stack and call the runtime to fix 2091 // up the stack and the handlers. 2092 Label push_operand_holes, call_resume; 2093 __ bind(&push_operand_holes); 2094 __ subp(rdx, Immediate(1)); 2095 __ j(carry, &call_resume); 2096 __ Push(rcx); 2097 __ jmp(&push_operand_holes); 2098 __ bind(&call_resume); 2099 __ Push(rbx); 2100 __ Push(result_register()); 2101 __ Push(Smi::FromInt(resume_mode)); 2102 __ CallRuntime(Runtime::kResumeJSGeneratorObject); 2103 // Not reached: the runtime call returns elsewhere. 2104 __ Abort(kGeneratorFailedToResume); 2105 2106 __ bind(&done); 2107 context()->Plug(result_register()); 2108 } 2109 2110 2111 void FullCodeGenerator::EmitCreateIteratorResult(bool done) { 2112 Label allocate, done_allocate; 2113 2114 __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &allocate, TAG_OBJECT); 2115 __ jmp(&done_allocate, Label::kNear); 2116 2117 __ bind(&allocate); 2118 __ Push(Smi::FromInt(JSIteratorResult::kSize)); 2119 __ CallRuntime(Runtime::kAllocateInNewSpace); 2120 2121 __ bind(&done_allocate); 2122 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx); 2123 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx); 2124 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); 2125 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); 2126 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx); 2127 __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset)); 2128 __ LoadRoot(FieldOperand(rax, JSIteratorResult::kDoneOffset), 2129 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex); 2130 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 2131 } 2132 2133 2134 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 2135 SetExpressionPosition(prop); 2136 Literal* key = prop->key()->AsLiteral(); 2137 DCHECK(!prop->IsSuperAccess()); 2138 2139 __ Move(LoadDescriptor::NameRegister(), key->value()); 2140 __ Move(LoadDescriptor::SlotRegister(), 2141 SmiFromSlot(prop->PropertyFeedbackSlot())); 2142 CallLoadIC(NOT_INSIDE_TYPEOF, language_mode()); 2143 } 2144 2145 2146 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) { 2147 // Stack: receiver, home_object 2148 SetExpressionPosition(prop); 2149 Literal* key = prop->key()->AsLiteral(); 2150 DCHECK(!key->value()->IsSmi()); 2151 DCHECK(prop->IsSuperAccess()); 2152 2153 __ Push(key->value()); 2154 __ Push(Smi::FromInt(language_mode())); 2155 __ CallRuntime(Runtime::kLoadFromSuper); 2156 } 2157 2158 2159 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 2160 SetExpressionPosition(prop); 2161 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code(); 2162 __ Move(LoadDescriptor::SlotRegister(), 2163 SmiFromSlot(prop->PropertyFeedbackSlot())); 2164 CallIC(ic); 2165 } 2166 2167 2168 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) { 2169 // Stack: receiver, home_object, key. 2170 SetExpressionPosition(prop); 2171 __ Push(Smi::FromInt(language_mode())); 2172 __ CallRuntime(Runtime::kLoadKeyedFromSuper); 2173 } 2174 2175 2176 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, 2177 Token::Value op, 2178 Expression* left, 2179 Expression* right) { 2180 // Do combined smi check of the operands. Left operand is on the 2181 // stack (popped into rdx). Right operand is in rax but moved into 2182 // rcx to make the shifts easier. 2183 Label done, stub_call, smi_case; 2184 __ Pop(rdx); 2185 __ movp(rcx, rax); 2186 __ orp(rax, rdx); 2187 JumpPatchSite patch_site(masm_); 2188 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear); 2189 2190 __ bind(&stub_call); 2191 __ movp(rax, rcx); 2192 Handle<Code> code = 2193 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code(); 2194 CallIC(code, expr->BinaryOperationFeedbackId()); 2195 patch_site.EmitPatchInfo(); 2196 __ jmp(&done, Label::kNear); 2197 2198 __ bind(&smi_case); 2199 switch (op) { 2200 case Token::SAR: 2201 __ SmiShiftArithmeticRight(rax, rdx, rcx); 2202 break; 2203 case Token::SHL: 2204 __ SmiShiftLeft(rax, rdx, rcx, &stub_call); 2205 break; 2206 case Token::SHR: 2207 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call); 2208 break; 2209 case Token::ADD: 2210 __ SmiAdd(rax, rdx, rcx, &stub_call); 2211 break; 2212 case Token::SUB: 2213 __ SmiSub(rax, rdx, rcx, &stub_call); 2214 break; 2215 case Token::MUL: 2216 __ SmiMul(rax, rdx, rcx, &stub_call); 2217 break; 2218 case Token::BIT_OR: 2219 __ SmiOr(rax, rdx, rcx); 2220 break; 2221 case Token::BIT_AND: 2222 __ SmiAnd(rax, rdx, rcx); 2223 break; 2224 case Token::BIT_XOR: 2225 __ SmiXor(rax, rdx, rcx); 2226 break; 2227 default: 2228 UNREACHABLE(); 2229 break; 2230 } 2231 2232 __ bind(&done); 2233 context()->Plug(rax); 2234 } 2235 2236 2237 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) { 2238 // Constructor is in rax. 2239 DCHECK(lit != NULL); 2240 __ Push(rax); 2241 2242 // No access check is needed here since the constructor is created by the 2243 // class literal. 2244 Register scratch = rbx; 2245 __ movp(scratch, FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset)); 2246 __ Push(scratch); 2247 2248 for (int i = 0; i < lit->properties()->length(); i++) { 2249 ObjectLiteral::Property* property = lit->properties()->at(i); 2250 Expression* value = property->value(); 2251 2252 if (property->is_static()) { 2253 __ Push(Operand(rsp, kPointerSize)); // constructor 2254 } else { 2255 __ Push(Operand(rsp, 0)); // prototype 2256 } 2257 EmitPropertyKey(property, lit->GetIdForProperty(i)); 2258 2259 // The static prototype property is read only. We handle the non computed 2260 // property name case in the parser. Since this is the only case where we 2261 // need to check for an own read only property we special case this so we do 2262 // not need to do this for every property. 2263 if (property->is_static() && property->is_computed_name()) { 2264 __ CallRuntime(Runtime::kThrowIfStaticPrototype); 2265 __ Push(rax); 2266 } 2267 2268 VisitForStackValue(value); 2269 if (NeedsHomeObject(value)) { 2270 EmitSetHomeObject(value, 2, property->GetSlot()); 2271 } 2272 2273 switch (property->kind()) { 2274 case ObjectLiteral::Property::CONSTANT: 2275 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 2276 case ObjectLiteral::Property::PROTOTYPE: 2277 UNREACHABLE(); 2278 case ObjectLiteral::Property::COMPUTED: 2279 __ CallRuntime(Runtime::kDefineClassMethod); 2280 break; 2281 2282 case ObjectLiteral::Property::GETTER: 2283 __ Push(Smi::FromInt(DONT_ENUM)); 2284 __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked); 2285 break; 2286 2287 case ObjectLiteral::Property::SETTER: 2288 __ Push(Smi::FromInt(DONT_ENUM)); 2289 __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked); 2290 break; 2291 2292 default: 2293 UNREACHABLE(); 2294 } 2295 } 2296 2297 // Set both the prototype and constructor to have fast properties, and also 2298 // freeze them in strong mode. 2299 __ CallRuntime(Runtime::kFinalizeClassDefinition); 2300 } 2301 2302 2303 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) { 2304 __ Pop(rdx); 2305 Handle<Code> code = 2306 CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code(); 2307 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code. 2308 CallIC(code, expr->BinaryOperationFeedbackId()); 2309 patch_site.EmitPatchInfo(); 2310 context()->Plug(rax); 2311 } 2312 2313 2314 void FullCodeGenerator::EmitAssignment(Expression* expr, 2315 FeedbackVectorSlot slot) { 2316 DCHECK(expr->IsValidReferenceExpressionOrThis()); 2317 2318 Property* prop = expr->AsProperty(); 2319 LhsKind assign_type = Property::GetAssignType(prop); 2320 2321 switch (assign_type) { 2322 case VARIABLE: { 2323 Variable* var = expr->AsVariableProxy()->var(); 2324 EffectContext context(this); 2325 EmitVariableAssignment(var, Token::ASSIGN, slot); 2326 break; 2327 } 2328 case NAMED_PROPERTY: { 2329 __ Push(rax); // Preserve value. 2330 VisitForAccumulatorValue(prop->obj()); 2331 __ Move(StoreDescriptor::ReceiverRegister(), rax); 2332 __ Pop(StoreDescriptor::ValueRegister()); // Restore value. 2333 __ Move(StoreDescriptor::NameRegister(), 2334 prop->key()->AsLiteral()->value()); 2335 EmitLoadStoreICSlot(slot); 2336 CallStoreIC(); 2337 break; 2338 } 2339 case NAMED_SUPER_PROPERTY: { 2340 __ Push(rax); 2341 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); 2342 VisitForAccumulatorValue( 2343 prop->obj()->AsSuperPropertyReference()->home_object()); 2344 // stack: value, this; rax: home_object 2345 Register scratch = rcx; 2346 Register scratch2 = rdx; 2347 __ Move(scratch, result_register()); // home_object 2348 __ movp(rax, MemOperand(rsp, kPointerSize)); // value 2349 __ movp(scratch2, MemOperand(rsp, 0)); // this 2350 __ movp(MemOperand(rsp, kPointerSize), scratch2); // this 2351 __ movp(MemOperand(rsp, 0), scratch); // home_object 2352 // stack: this, home_object; rax: value 2353 EmitNamedSuperPropertyStore(prop); 2354 break; 2355 } 2356 case KEYED_SUPER_PROPERTY: { 2357 __ Push(rax); 2358 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); 2359 VisitForStackValue( 2360 prop->obj()->AsSuperPropertyReference()->home_object()); 2361 VisitForAccumulatorValue(prop->key()); 2362 Register scratch = rcx; 2363 Register scratch2 = rdx; 2364 __ movp(scratch2, MemOperand(rsp, 2 * kPointerSize)); // value 2365 // stack: value, this, home_object; rax: key, rdx: value 2366 __ movp(scratch, MemOperand(rsp, kPointerSize)); // this 2367 __ movp(MemOperand(rsp, 2 * kPointerSize), scratch); 2368 __ movp(scratch, MemOperand(rsp, 0)); // home_object 2369 __ movp(MemOperand(rsp, kPointerSize), scratch); 2370 __ movp(MemOperand(rsp, 0), rax); 2371 __ Move(rax, scratch2); 2372 // stack: this, home_object, key; rax: value. 2373 EmitKeyedSuperPropertyStore(prop); 2374 break; 2375 } 2376 case KEYED_PROPERTY: { 2377 __ Push(rax); // Preserve value. 2378 VisitForStackValue(prop->obj()); 2379 VisitForAccumulatorValue(prop->key()); 2380 __ Move(StoreDescriptor::NameRegister(), rax); 2381 __ Pop(StoreDescriptor::ReceiverRegister()); 2382 __ Pop(StoreDescriptor::ValueRegister()); // Restore value. 2383 EmitLoadStoreICSlot(slot); 2384 Handle<Code> ic = 2385 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 2386 CallIC(ic); 2387 break; 2388 } 2389 } 2390 context()->Plug(rax); 2391 } 2392 2393 2394 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot( 2395 Variable* var, MemOperand location) { 2396 __ movp(location, rax); 2397 if (var->IsContextSlot()) { 2398 __ movp(rdx, rax); 2399 __ RecordWriteContextSlot( 2400 rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs); 2401 } 2402 } 2403 2404 2405 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op, 2406 FeedbackVectorSlot slot) { 2407 if (var->IsUnallocated()) { 2408 // Global var, const, or let. 2409 __ Move(StoreDescriptor::NameRegister(), var->name()); 2410 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister()); 2411 EmitLoadStoreICSlot(slot); 2412 CallStoreIC(); 2413 2414 } else if (var->mode() == LET && op != Token::INIT) { 2415 // Non-initializing assignment to let variable needs a write barrier. 2416 DCHECK(!var->IsLookupSlot()); 2417 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2418 Label assign; 2419 MemOperand location = VarOperand(var, rcx); 2420 __ movp(rdx, location); 2421 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 2422 __ j(not_equal, &assign, Label::kNear); 2423 __ Push(var->name()); 2424 __ CallRuntime(Runtime::kThrowReferenceError); 2425 __ bind(&assign); 2426 EmitStoreToStackLocalOrContextSlot(var, location); 2427 2428 } else if (var->mode() == CONST && op != Token::INIT) { 2429 // Assignment to const variable needs a write barrier. 2430 DCHECK(!var->IsLookupSlot()); 2431 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2432 Label const_error; 2433 MemOperand location = VarOperand(var, rcx); 2434 __ movp(rdx, location); 2435 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 2436 __ j(not_equal, &const_error, Label::kNear); 2437 __ Push(var->name()); 2438 __ CallRuntime(Runtime::kThrowReferenceError); 2439 __ bind(&const_error); 2440 __ CallRuntime(Runtime::kThrowConstAssignError); 2441 2442 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) { 2443 // Initializing assignment to const {this} needs a write barrier. 2444 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2445 Label uninitialized_this; 2446 MemOperand location = VarOperand(var, rcx); 2447 __ movp(rdx, location); 2448 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 2449 __ j(equal, &uninitialized_this); 2450 __ Push(var->name()); 2451 __ CallRuntime(Runtime::kThrowReferenceError); 2452 __ bind(&uninitialized_this); 2453 EmitStoreToStackLocalOrContextSlot(var, location); 2454 2455 } else if (!var->is_const_mode() || 2456 (var->mode() == CONST && op == Token::INIT)) { 2457 if (var->IsLookupSlot()) { 2458 // Assignment to var. 2459 __ Push(rax); // Value. 2460 __ Push(rsi); // Context. 2461 __ Push(var->name()); 2462 __ Push(Smi::FromInt(language_mode())); 2463 __ CallRuntime(Runtime::kStoreLookupSlot); 2464 } else { 2465 // Assignment to var or initializing assignment to let/const in harmony 2466 // mode. 2467 DCHECK(var->IsStackAllocated() || var->IsContextSlot()); 2468 MemOperand location = VarOperand(var, rcx); 2469 if (generate_debug_code_ && var->mode() == LET && op == Token::INIT) { 2470 // Check for an uninitialized let binding. 2471 __ movp(rdx, location); 2472 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 2473 __ Check(equal, kLetBindingReInitialization); 2474 } 2475 EmitStoreToStackLocalOrContextSlot(var, location); 2476 } 2477 2478 } else if (var->mode() == CONST_LEGACY && op == Token::INIT) { 2479 // Const initializers need a write barrier. 2480 DCHECK(!var->IsParameter()); // No const parameters. 2481 if (var->IsLookupSlot()) { 2482 __ Push(rax); 2483 __ Push(rsi); 2484 __ Push(var->name()); 2485 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot); 2486 } else { 2487 DCHECK(var->IsStackLocal() || var->IsContextSlot()); 2488 Label skip; 2489 MemOperand location = VarOperand(var, rcx); 2490 __ movp(rdx, location); 2491 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex); 2492 __ j(not_equal, &skip); 2493 EmitStoreToStackLocalOrContextSlot(var, location); 2494 __ bind(&skip); 2495 } 2496 2497 } else { 2498 DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT); 2499 if (is_strict(language_mode())) { 2500 __ CallRuntime(Runtime::kThrowConstAssignError); 2501 } 2502 // Silently ignore store in sloppy mode. 2503 } 2504 } 2505 2506 2507 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 2508 // Assignment to a property, using a named store IC. 2509 Property* prop = expr->target()->AsProperty(); 2510 DCHECK(prop != NULL); 2511 DCHECK(prop->key()->IsLiteral()); 2512 2513 __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value()); 2514 __ Pop(StoreDescriptor::ReceiverRegister()); 2515 EmitLoadStoreICSlot(expr->AssignmentSlot()); 2516 CallStoreIC(); 2517 2518 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2519 context()->Plug(rax); 2520 } 2521 2522 2523 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) { 2524 // Assignment to named property of super. 2525 // rax : value 2526 // stack : receiver ('this'), home_object 2527 DCHECK(prop != NULL); 2528 Literal* key = prop->key()->AsLiteral(); 2529 DCHECK(key != NULL); 2530 2531 __ Push(key->value()); 2532 __ Push(rax); 2533 __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict 2534 : Runtime::kStoreToSuper_Sloppy)); 2535 } 2536 2537 2538 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) { 2539 // Assignment to named property of super. 2540 // rax : value 2541 // stack : receiver ('this'), home_object, key 2542 DCHECK(prop != NULL); 2543 2544 __ Push(rax); 2545 __ CallRuntime((is_strict(language_mode()) 2546 ? Runtime::kStoreKeyedToSuper_Strict 2547 : Runtime::kStoreKeyedToSuper_Sloppy)); 2548 } 2549 2550 2551 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 2552 // Assignment to a property, using a keyed store IC. 2553 __ Pop(StoreDescriptor::NameRegister()); // Key. 2554 __ Pop(StoreDescriptor::ReceiverRegister()); 2555 DCHECK(StoreDescriptor::ValueRegister().is(rax)); 2556 Handle<Code> ic = 2557 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 2558 EmitLoadStoreICSlot(expr->AssignmentSlot()); 2559 CallIC(ic); 2560 2561 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 2562 context()->Plug(rax); 2563 } 2564 2565 2566 void FullCodeGenerator::VisitProperty(Property* expr) { 2567 Comment cmnt(masm_, "[ Property"); 2568 SetExpressionPosition(expr); 2569 2570 Expression* key = expr->key(); 2571 2572 if (key->IsPropertyName()) { 2573 if (!expr->IsSuperAccess()) { 2574 VisitForAccumulatorValue(expr->obj()); 2575 DCHECK(!rax.is(LoadDescriptor::ReceiverRegister())); 2576 __ movp(LoadDescriptor::ReceiverRegister(), rax); 2577 EmitNamedPropertyLoad(expr); 2578 } else { 2579 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); 2580 VisitForStackValue( 2581 expr->obj()->AsSuperPropertyReference()->home_object()); 2582 EmitNamedSuperPropertyLoad(expr); 2583 } 2584 } else { 2585 if (!expr->IsSuperAccess()) { 2586 VisitForStackValue(expr->obj()); 2587 VisitForAccumulatorValue(expr->key()); 2588 __ Move(LoadDescriptor::NameRegister(), rax); 2589 __ Pop(LoadDescriptor::ReceiverRegister()); 2590 EmitKeyedPropertyLoad(expr); 2591 } else { 2592 VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var()); 2593 VisitForStackValue( 2594 expr->obj()->AsSuperPropertyReference()->home_object()); 2595 VisitForStackValue(expr->key()); 2596 EmitKeyedSuperPropertyLoad(expr); 2597 } 2598 } 2599 PrepareForBailoutForId(expr->LoadId(), TOS_REG); 2600 context()->Plug(rax); 2601 } 2602 2603 2604 void FullCodeGenerator::CallIC(Handle<Code> code, 2605 TypeFeedbackId ast_id) { 2606 ic_total_count_++; 2607 __ call(code, RelocInfo::CODE_TARGET, ast_id); 2608 } 2609 2610 2611 // Code common for calls using the IC. 2612 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) { 2613 Expression* callee = expr->expression(); 2614 2615 // Get the target function. 2616 ConvertReceiverMode convert_mode; 2617 if (callee->IsVariableProxy()) { 2618 { StackValueContext context(this); 2619 EmitVariableLoad(callee->AsVariableProxy()); 2620 PrepareForBailout(callee, NO_REGISTERS); 2621 } 2622 // Push undefined as receiver. This is patched in the Call builtin if it 2623 // is a sloppy mode method. 2624 __ Push(isolate()->factory()->undefined_value()); 2625 convert_mode = ConvertReceiverMode::kNullOrUndefined; 2626 } else { 2627 // Load the function from the receiver. 2628 DCHECK(callee->IsProperty()); 2629 DCHECK(!callee->AsProperty()->IsSuperAccess()); 2630 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0)); 2631 EmitNamedPropertyLoad(callee->AsProperty()); 2632 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2633 // Push the target function under the receiver. 2634 __ Push(Operand(rsp, 0)); 2635 __ movp(Operand(rsp, kPointerSize), rax); 2636 convert_mode = ConvertReceiverMode::kNotNullOrUndefined; 2637 } 2638 2639 EmitCall(expr, convert_mode); 2640 } 2641 2642 2643 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) { 2644 Expression* callee = expr->expression(); 2645 DCHECK(callee->IsProperty()); 2646 Property* prop = callee->AsProperty(); 2647 DCHECK(prop->IsSuperAccess()); 2648 SetExpressionPosition(prop); 2649 2650 Literal* key = prop->key()->AsLiteral(); 2651 DCHECK(!key->value()->IsSmi()); 2652 // Load the function from the receiver. 2653 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); 2654 VisitForStackValue(super_ref->home_object()); 2655 VisitForAccumulatorValue(super_ref->this_var()); 2656 __ Push(rax); 2657 __ Push(rax); 2658 __ Push(Operand(rsp, kPointerSize * 2)); 2659 __ Push(key->value()); 2660 __ Push(Smi::FromInt(language_mode())); 2661 2662 // Stack here: 2663 // - home_object 2664 // - this (receiver) 2665 // - this (receiver) <-- LoadFromSuper will pop here and below. 2666 // - home_object 2667 // - key 2668 // - language_mode 2669 __ CallRuntime(Runtime::kLoadFromSuper); 2670 2671 // Replace home_object with target function. 2672 __ movp(Operand(rsp, kPointerSize), rax); 2673 2674 // Stack here: 2675 // - target function 2676 // - this (receiver) 2677 EmitCall(expr); 2678 } 2679 2680 2681 // Common code for calls using the IC. 2682 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, 2683 Expression* key) { 2684 // Load the key. 2685 VisitForAccumulatorValue(key); 2686 2687 Expression* callee = expr->expression(); 2688 2689 // Load the function from the receiver. 2690 DCHECK(callee->IsProperty()); 2691 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0)); 2692 __ Move(LoadDescriptor::NameRegister(), rax); 2693 EmitKeyedPropertyLoad(callee->AsProperty()); 2694 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG); 2695 2696 // Push the target function under the receiver. 2697 __ Push(Operand(rsp, 0)); 2698 __ movp(Operand(rsp, kPointerSize), rax); 2699 2700 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined); 2701 } 2702 2703 2704 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) { 2705 Expression* callee = expr->expression(); 2706 DCHECK(callee->IsProperty()); 2707 Property* prop = callee->AsProperty(); 2708 DCHECK(prop->IsSuperAccess()); 2709 2710 SetExpressionPosition(prop); 2711 // Load the function from the receiver. 2712 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference(); 2713 VisitForStackValue(super_ref->home_object()); 2714 VisitForAccumulatorValue(super_ref->this_var()); 2715 __ Push(rax); 2716 __ Push(rax); 2717 __ Push(Operand(rsp, kPointerSize * 2)); 2718 VisitForStackValue(prop->key()); 2719 __ Push(Smi::FromInt(language_mode())); 2720 2721 // Stack here: 2722 // - home_object 2723 // - this (receiver) 2724 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below. 2725 // - home_object 2726 // - key 2727 // - language_mode 2728 __ CallRuntime(Runtime::kLoadKeyedFromSuper); 2729 2730 // Replace home_object with target function. 2731 __ movp(Operand(rsp, kPointerSize), rax); 2732 2733 // Stack here: 2734 // - target function 2735 // - this (receiver) 2736 EmitCall(expr); 2737 } 2738 2739 2740 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) { 2741 // Load the arguments. 2742 ZoneList<Expression*>* args = expr->arguments(); 2743 int arg_count = args->length(); 2744 for (int i = 0; i < arg_count; i++) { 2745 VisitForStackValue(args->at(i)); 2746 } 2747 2748 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); 2749 SetCallPosition(expr); 2750 Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, mode).code(); 2751 __ Move(rdx, SmiFromSlot(expr->CallFeedbackICSlot())); 2752 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); 2753 // Don't assign a type feedback id to the IC, since type feedback is provided 2754 // by the vector above. 2755 CallIC(ic); 2756 2757 RecordJSReturnSite(expr); 2758 2759 // Restore context register. 2760 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2761 // Discard the function left on TOS. 2762 context()->DropAndPlug(1, rax); 2763 } 2764 2765 2766 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) { 2767 // Push copy of the first argument or undefined if it doesn't exist. 2768 if (arg_count > 0) { 2769 __ Push(Operand(rsp, arg_count * kPointerSize)); 2770 } else { 2771 __ PushRoot(Heap::kUndefinedValueRootIndex); 2772 } 2773 2774 // Push the enclosing function. 2775 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 2776 2777 // Push the language mode. 2778 __ Push(Smi::FromInt(language_mode())); 2779 2780 // Push the start position of the scope the calls resides in. 2781 __ Push(Smi::FromInt(scope()->start_position())); 2782 2783 // Do the runtime call. 2784 __ CallRuntime(Runtime::kResolvePossiblyDirectEval); 2785 } 2786 2787 2788 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls. 2789 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) { 2790 VariableProxy* callee = expr->expression()->AsVariableProxy(); 2791 if (callee->var()->IsLookupSlot()) { 2792 Label slow, done; 2793 SetExpressionPosition(callee); 2794 // Generate code for loading from variables potentially shadowed by 2795 // eval-introduced variables. 2796 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done); 2797 __ bind(&slow); 2798 // Call the runtime to find the function to call (returned in rax) and 2799 // the object holding it (returned in rdx). 2800 __ Push(context_register()); 2801 __ Push(callee->name()); 2802 __ CallRuntime(Runtime::kLoadLookupSlot); 2803 __ Push(rax); // Function. 2804 __ Push(rdx); // Receiver. 2805 PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS); 2806 2807 // If fast case code has been generated, emit code to push the function 2808 // and receiver and have the slow path jump around this code. 2809 if (done.is_linked()) { 2810 Label call; 2811 __ jmp(&call, Label::kNear); 2812 __ bind(&done); 2813 // Push function. 2814 __ Push(rax); 2815 // Pass undefined as the receiver, which is the WithBaseObject of a 2816 // non-object environment record. If the callee is sloppy, it will patch 2817 // it up to be the global receiver. 2818 __ PushRoot(Heap::kUndefinedValueRootIndex); 2819 __ bind(&call); 2820 } 2821 } else { 2822 VisitForStackValue(callee); 2823 // refEnv.WithBaseObject() 2824 __ PushRoot(Heap::kUndefinedValueRootIndex); 2825 } 2826 } 2827 2828 2829 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) { 2830 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval 2831 // to resolve the function we need to call. Then we call the resolved 2832 // function using the given arguments. 2833 ZoneList<Expression*>* args = expr->arguments(); 2834 int arg_count = args->length(); 2835 PushCalleeAndWithBaseObject(expr); 2836 2837 // Push the arguments. 2838 for (int i = 0; i < arg_count; i++) { 2839 VisitForStackValue(args->at(i)); 2840 } 2841 2842 // Push a copy of the function (found below the arguments) and resolve 2843 // eval. 2844 __ Push(Operand(rsp, (arg_count + 1) * kPointerSize)); 2845 EmitResolvePossiblyDirectEval(arg_count); 2846 2847 // Touch up the callee. 2848 __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax); 2849 2850 PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS); 2851 2852 SetCallPosition(expr); 2853 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); 2854 __ Set(rax, arg_count); 2855 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2856 RecordJSReturnSite(expr); 2857 // Restore context register. 2858 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2859 context()->DropAndPlug(1, rax); 2860 } 2861 2862 2863 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 2864 Comment cmnt(masm_, "[ CallNew"); 2865 // According to ECMA-262, section 11.2.2, page 44, the function 2866 // expression in new calls must be evaluated before the 2867 // arguments. 2868 2869 // Push constructor on the stack. If it's not a function it's used as 2870 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is 2871 // ignored. 2872 DCHECK(!expr->expression()->IsSuperPropertyReference()); 2873 VisitForStackValue(expr->expression()); 2874 2875 // Push the arguments ("left-to-right") on the stack. 2876 ZoneList<Expression*>* args = expr->arguments(); 2877 int arg_count = args->length(); 2878 for (int i = 0; i < arg_count; i++) { 2879 VisitForStackValue(args->at(i)); 2880 } 2881 2882 // Call the construct call builtin that handles allocation and 2883 // constructor invocation. 2884 SetConstructCallPosition(expr); 2885 2886 // Load function and argument count into rdi and rax. 2887 __ Set(rax, arg_count); 2888 __ movp(rdi, Operand(rsp, arg_count * kPointerSize)); 2889 2890 // Record call targets in unoptimized code, but not in the snapshot. 2891 __ EmitLoadTypeFeedbackVector(rbx); 2892 __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot())); 2893 2894 CallConstructStub stub(isolate()); 2895 __ Call(stub.GetCode(), RelocInfo::CODE_TARGET); 2896 PrepareForBailoutForId(expr->ReturnId(), TOS_REG); 2897 // Restore context register. 2898 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2899 context()->Plug(rax); 2900 } 2901 2902 2903 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) { 2904 SuperCallReference* super_call_ref = 2905 expr->expression()->AsSuperCallReference(); 2906 DCHECK_NOT_NULL(super_call_ref); 2907 2908 // Push the super constructor target on the stack (may be null, 2909 // but the Construct builtin can deal with that properly). 2910 VisitForAccumulatorValue(super_call_ref->this_function_var()); 2911 __ AssertFunction(result_register()); 2912 __ movp(result_register(), 2913 FieldOperand(result_register(), HeapObject::kMapOffset)); 2914 __ Push(FieldOperand(result_register(), Map::kPrototypeOffset)); 2915 2916 // Push the arguments ("left-to-right") on the stack. 2917 ZoneList<Expression*>* args = expr->arguments(); 2918 int arg_count = args->length(); 2919 for (int i = 0; i < arg_count; i++) { 2920 VisitForStackValue(args->at(i)); 2921 } 2922 2923 // Call the construct call builtin that handles allocation and 2924 // constructor invocation. 2925 SetConstructCallPosition(expr); 2926 2927 // Load new target into rdx. 2928 VisitForAccumulatorValue(super_call_ref->new_target_var()); 2929 __ movp(rdx, result_register()); 2930 2931 // Load function and argument count into rdi and rax. 2932 __ Set(rax, arg_count); 2933 __ movp(rdi, Operand(rsp, arg_count * kPointerSize)); 2934 2935 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2936 2937 RecordJSReturnSite(expr); 2938 2939 // Restore context register. 2940 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 2941 2942 context()->Plug(rax); 2943 } 2944 2945 2946 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) { 2947 ZoneList<Expression*>* args = expr->arguments(); 2948 DCHECK(args->length() == 1); 2949 2950 VisitForAccumulatorValue(args->at(0)); 2951 2952 Label materialize_true, materialize_false; 2953 Label* if_true = NULL; 2954 Label* if_false = NULL; 2955 Label* fall_through = NULL; 2956 context()->PrepareTest(&materialize_true, &materialize_false, 2957 &if_true, &if_false, &fall_through); 2958 2959 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2960 __ JumpIfSmi(rax, if_true); 2961 __ jmp(if_false); 2962 2963 context()->Plug(if_true, if_false); 2964 } 2965 2966 2967 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) { 2968 ZoneList<Expression*>* args = expr->arguments(); 2969 DCHECK(args->length() == 1); 2970 2971 VisitForAccumulatorValue(args->at(0)); 2972 2973 Label materialize_true, materialize_false; 2974 Label* if_true = NULL; 2975 Label* if_false = NULL; 2976 Label* fall_through = NULL; 2977 context()->PrepareTest(&materialize_true, &materialize_false, 2978 &if_true, &if_false, &fall_through); 2979 2980 __ JumpIfSmi(rax, if_false); 2981 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rbx); 2982 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 2983 Split(above_equal, if_true, if_false, fall_through); 2984 2985 context()->Plug(if_true, if_false); 2986 } 2987 2988 2989 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) { 2990 ZoneList<Expression*>* args = expr->arguments(); 2991 DCHECK(args->length() == 1); 2992 2993 VisitForAccumulatorValue(args->at(0)); 2994 2995 Label materialize_true, materialize_false; 2996 Label* if_true = NULL; 2997 Label* if_false = NULL; 2998 Label* fall_through = NULL; 2999 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 3000 &if_false, &fall_through); 3001 3002 __ JumpIfSmi(rax, if_false); 3003 __ CmpObjectType(rax, SIMD128_VALUE_TYPE, rbx); 3004 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3005 Split(equal, if_true, if_false, fall_through); 3006 3007 context()->Plug(if_true, if_false); 3008 } 3009 3010 3011 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) { 3012 ZoneList<Expression*>* args = expr->arguments(); 3013 DCHECK(args->length() == 1); 3014 3015 VisitForAccumulatorValue(args->at(0)); 3016 3017 Label materialize_true, materialize_false; 3018 Label* if_true = NULL; 3019 Label* if_false = NULL; 3020 Label* fall_through = NULL; 3021 context()->PrepareTest(&materialize_true, &materialize_false, 3022 &if_true, &if_false, &fall_through); 3023 3024 __ JumpIfSmi(rax, if_false); 3025 __ CmpObjectType(rax, FIRST_FUNCTION_TYPE, rbx); 3026 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3027 Split(above_equal, if_true, if_false, fall_through); 3028 3029 context()->Plug(if_true, if_false); 3030 } 3031 3032 3033 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) { 3034 ZoneList<Expression*>* args = expr->arguments(); 3035 DCHECK(args->length() == 1); 3036 3037 VisitForAccumulatorValue(args->at(0)); 3038 3039 Label materialize_true, materialize_false; 3040 Label* if_true = NULL; 3041 Label* if_false = NULL; 3042 Label* fall_through = NULL; 3043 context()->PrepareTest(&materialize_true, &materialize_false, 3044 &if_true, &if_false, &fall_through); 3045 3046 Handle<Map> map = masm()->isolate()->factory()->heap_number_map(); 3047 __ CheckMap(rax, map, if_false, DO_SMI_CHECK); 3048 __ cmpl(FieldOperand(rax, HeapNumber::kExponentOffset), 3049 Immediate(0x1)); 3050 __ j(no_overflow, if_false); 3051 __ cmpl(FieldOperand(rax, HeapNumber::kMantissaOffset), 3052 Immediate(0x00000000)); 3053 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3054 Split(equal, if_true, if_false, fall_through); 3055 3056 context()->Plug(if_true, if_false); 3057 } 3058 3059 3060 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) { 3061 ZoneList<Expression*>* args = expr->arguments(); 3062 DCHECK(args->length() == 1); 3063 3064 VisitForAccumulatorValue(args->at(0)); 3065 3066 Label materialize_true, materialize_false; 3067 Label* if_true = NULL; 3068 Label* if_false = NULL; 3069 Label* fall_through = NULL; 3070 context()->PrepareTest(&materialize_true, &materialize_false, 3071 &if_true, &if_false, &fall_through); 3072 3073 __ JumpIfSmi(rax, if_false); 3074 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx); 3075 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3076 Split(equal, if_true, if_false, fall_through); 3077 3078 context()->Plug(if_true, if_false); 3079 } 3080 3081 3082 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) { 3083 ZoneList<Expression*>* args = expr->arguments(); 3084 DCHECK(args->length() == 1); 3085 3086 VisitForAccumulatorValue(args->at(0)); 3087 3088 Label materialize_true, materialize_false; 3089 Label* if_true = NULL; 3090 Label* if_false = NULL; 3091 Label* fall_through = NULL; 3092 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 3093 &if_false, &fall_through); 3094 3095 __ JumpIfSmi(rax, if_false); 3096 __ CmpObjectType(rax, JS_TYPED_ARRAY_TYPE, rbx); 3097 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3098 Split(equal, if_true, if_false, fall_through); 3099 3100 context()->Plug(if_true, if_false); 3101 } 3102 3103 3104 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) { 3105 ZoneList<Expression*>* args = expr->arguments(); 3106 DCHECK(args->length() == 1); 3107 3108 VisitForAccumulatorValue(args->at(0)); 3109 3110 Label materialize_true, materialize_false; 3111 Label* if_true = NULL; 3112 Label* if_false = NULL; 3113 Label* fall_through = NULL; 3114 context()->PrepareTest(&materialize_true, &materialize_false, 3115 &if_true, &if_false, &fall_through); 3116 3117 __ JumpIfSmi(rax, if_false); 3118 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx); 3119 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3120 Split(equal, if_true, if_false, fall_through); 3121 3122 context()->Plug(if_true, if_false); 3123 } 3124 3125 3126 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) { 3127 ZoneList<Expression*>* args = expr->arguments(); 3128 DCHECK(args->length() == 1); 3129 3130 VisitForAccumulatorValue(args->at(0)); 3131 3132 Label materialize_true, materialize_false; 3133 Label* if_true = NULL; 3134 Label* if_false = NULL; 3135 Label* fall_through = NULL; 3136 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 3137 &if_false, &fall_through); 3138 3139 3140 __ JumpIfSmi(rax, if_false); 3141 __ CmpObjectType(rax, JS_PROXY_TYPE, rbx); 3142 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3143 Split(equal, if_true, if_false, fall_through); 3144 3145 context()->Plug(if_true, if_false); 3146 } 3147 3148 3149 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) { 3150 ZoneList<Expression*>* args = expr->arguments(); 3151 DCHECK(args->length() == 2); 3152 3153 // Load the two objects into registers and perform the comparison. 3154 VisitForStackValue(args->at(0)); 3155 VisitForAccumulatorValue(args->at(1)); 3156 3157 Label materialize_true, materialize_false; 3158 Label* if_true = NULL; 3159 Label* if_false = NULL; 3160 Label* fall_through = NULL; 3161 context()->PrepareTest(&materialize_true, &materialize_false, 3162 &if_true, &if_false, &fall_through); 3163 3164 __ Pop(rbx); 3165 __ cmpp(rax, rbx); 3166 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3167 Split(equal, if_true, if_false, fall_through); 3168 3169 context()->Plug(if_true, if_false); 3170 } 3171 3172 3173 void FullCodeGenerator::EmitArguments(CallRuntime* expr) { 3174 ZoneList<Expression*>* args = expr->arguments(); 3175 DCHECK(args->length() == 1); 3176 3177 // ArgumentsAccessStub expects the key in rdx and the formal 3178 // parameter count in rax. 3179 VisitForAccumulatorValue(args->at(0)); 3180 __ movp(rdx, rax); 3181 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters())); 3182 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT); 3183 __ CallStub(&stub); 3184 context()->Plug(rax); 3185 } 3186 3187 3188 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) { 3189 DCHECK(expr->arguments()->length() == 0); 3190 3191 Label exit; 3192 // Get the number of formal parameters. 3193 __ Move(rax, Smi::FromInt(info_->scope()->num_parameters())); 3194 3195 // Check if the calling frame is an arguments adaptor frame. 3196 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 3197 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), 3198 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 3199 __ j(not_equal, &exit, Label::kNear); 3200 3201 // Arguments adaptor case: Read the arguments length from the 3202 // adaptor frame. 3203 __ movp(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3204 3205 __ bind(&exit); 3206 __ AssertSmi(rax); 3207 context()->Plug(rax); 3208 } 3209 3210 3211 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) { 3212 ZoneList<Expression*>* args = expr->arguments(); 3213 DCHECK(args->length() == 1); 3214 Label done, null, function, non_function_constructor; 3215 3216 VisitForAccumulatorValue(args->at(0)); 3217 3218 // If the object is not a JSReceiver, we return null. 3219 __ JumpIfSmi(rax, &null, Label::kNear); 3220 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 3221 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rax); 3222 __ j(below, &null, Label::kNear); 3223 3224 // Return 'Function' for JSFunction objects. 3225 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); 3226 __ j(equal, &function, Label::kNear); 3227 3228 // Check if the constructor in the map is a JS function. 3229 __ GetMapConstructor(rax, rax, rbx); 3230 __ CmpInstanceType(rbx, JS_FUNCTION_TYPE); 3231 __ j(not_equal, &non_function_constructor, Label::kNear); 3232 3233 // rax now contains the constructor function. Grab the 3234 // instance class name from there. 3235 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); 3236 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset)); 3237 __ jmp(&done, Label::kNear); 3238 3239 // Non-JS objects have class null. 3240 __ bind(&null); 3241 __ LoadRoot(rax, Heap::kNullValueRootIndex); 3242 __ jmp(&done, Label::kNear); 3243 3244 // Functions have class 'Function'. 3245 __ bind(&function); 3246 __ LoadRoot(rax, Heap::kFunction_stringRootIndex); 3247 __ jmp(&done, Label::kNear); 3248 3249 // Objects with a non-function constructor have class 'Object'. 3250 __ bind(&non_function_constructor); 3251 __ LoadRoot(rax, Heap::kObject_stringRootIndex); 3252 3253 // All done. 3254 __ bind(&done); 3255 3256 context()->Plug(rax); 3257 } 3258 3259 3260 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) { 3261 ZoneList<Expression*>* args = expr->arguments(); 3262 DCHECK(args->length() == 1); 3263 3264 VisitForAccumulatorValue(args->at(0)); // Load the object. 3265 3266 Label done; 3267 // If the object is a smi return the object. 3268 __ JumpIfSmi(rax, &done); 3269 // If the object is not a value type, return the object. 3270 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx); 3271 __ j(not_equal, &done); 3272 __ movp(rax, FieldOperand(rax, JSValue::kValueOffset)); 3273 3274 __ bind(&done); 3275 context()->Plug(rax); 3276 } 3277 3278 3279 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) { 3280 ZoneList<Expression*>* args = expr->arguments(); 3281 DCHECK_EQ(1, args->length()); 3282 3283 VisitForAccumulatorValue(args->at(0)); 3284 3285 Label materialize_true, materialize_false; 3286 Label* if_true = nullptr; 3287 Label* if_false = nullptr; 3288 Label* fall_through = nullptr; 3289 context()->PrepareTest(&materialize_true, &materialize_false, &if_true, 3290 &if_false, &fall_through); 3291 3292 __ JumpIfSmi(rax, if_false); 3293 __ CmpObjectType(rax, JS_DATE_TYPE, rbx); 3294 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3295 Split(equal, if_true, if_false, fall_through); 3296 3297 context()->Plug(if_true, if_false); 3298 } 3299 3300 3301 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) { 3302 ZoneList<Expression*>* args = expr->arguments(); 3303 DCHECK_EQ(3, args->length()); 3304 3305 Register string = rax; 3306 Register index = rbx; 3307 Register value = rcx; 3308 3309 VisitForStackValue(args->at(0)); // index 3310 VisitForStackValue(args->at(1)); // value 3311 VisitForAccumulatorValue(args->at(2)); // string 3312 __ Pop(value); 3313 __ Pop(index); 3314 3315 if (FLAG_debug_code) { 3316 __ Check(__ CheckSmi(value), kNonSmiValue); 3317 __ Check(__ CheckSmi(index), kNonSmiValue); 3318 } 3319 3320 __ SmiToInteger32(value, value); 3321 __ SmiToInteger32(index, index); 3322 3323 if (FLAG_debug_code) { 3324 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag; 3325 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type); 3326 } 3327 3328 __ movb(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize), 3329 value); 3330 context()->Plug(string); 3331 } 3332 3333 3334 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) { 3335 ZoneList<Expression*>* args = expr->arguments(); 3336 DCHECK_EQ(3, args->length()); 3337 3338 Register string = rax; 3339 Register index = rbx; 3340 Register value = rcx; 3341 3342 VisitForStackValue(args->at(0)); // index 3343 VisitForStackValue(args->at(1)); // value 3344 VisitForAccumulatorValue(args->at(2)); // string 3345 __ Pop(value); 3346 __ Pop(index); 3347 3348 if (FLAG_debug_code) { 3349 __ Check(__ CheckSmi(value), kNonSmiValue); 3350 __ Check(__ CheckSmi(index), kNonSmiValue); 3351 } 3352 3353 __ SmiToInteger32(value, value); 3354 __ SmiToInteger32(index, index); 3355 3356 if (FLAG_debug_code) { 3357 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag; 3358 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type); 3359 } 3360 3361 __ movw(FieldOperand(string, index, times_2, SeqTwoByteString::kHeaderSize), 3362 value); 3363 context()->Plug(rax); 3364 } 3365 3366 3367 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) { 3368 ZoneList<Expression*>* args = expr->arguments(); 3369 DCHECK(args->length() == 2); 3370 3371 VisitForStackValue(args->at(0)); // Load the object. 3372 VisitForAccumulatorValue(args->at(1)); // Load the value. 3373 __ Pop(rbx); // rax = value. rbx = object. 3374 3375 Label done; 3376 // If the object is a smi, return the value. 3377 __ JumpIfSmi(rbx, &done); 3378 3379 // If the object is not a value type, return the value. 3380 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx); 3381 __ j(not_equal, &done); 3382 3383 // Store the value. 3384 __ movp(FieldOperand(rbx, JSValue::kValueOffset), rax); 3385 // Update the write barrier. Save the value as it will be 3386 // overwritten by the write barrier code and is needed afterward. 3387 __ movp(rdx, rax); 3388 __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs); 3389 3390 __ bind(&done); 3391 context()->Plug(rax); 3392 } 3393 3394 3395 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) { 3396 ZoneList<Expression*>* args = expr->arguments(); 3397 DCHECK_EQ(1, args->length()); 3398 3399 // Load the argument into rax and convert it. 3400 VisitForAccumulatorValue(args->at(0)); 3401 3402 // Convert the object to an integer. 3403 Label done_convert; 3404 __ JumpIfSmi(rax, &done_convert, Label::kNear); 3405 __ Push(rax); 3406 __ CallRuntime(Runtime::kToInteger); 3407 __ bind(&done_convert); 3408 context()->Plug(rax); 3409 } 3410 3411 3412 void FullCodeGenerator::EmitToName(CallRuntime* expr) { 3413 ZoneList<Expression*>* args = expr->arguments(); 3414 DCHECK_EQ(1, args->length()); 3415 3416 // Load the argument into rax and convert it. 3417 VisitForAccumulatorValue(args->at(0)); 3418 3419 // Convert the object to a name. 3420 Label convert, done_convert; 3421 __ JumpIfSmi(rax, &convert, Label::kNear); 3422 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE); 3423 __ CmpObjectType(rax, LAST_NAME_TYPE, rcx); 3424 __ j(below_equal, &done_convert, Label::kNear); 3425 __ bind(&convert); 3426 __ Push(rax); 3427 __ CallRuntime(Runtime::kToName); 3428 __ bind(&done_convert); 3429 context()->Plug(rax); 3430 } 3431 3432 3433 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) { 3434 ZoneList<Expression*>* args = expr->arguments(); 3435 DCHECK(args->length() == 1); 3436 3437 VisitForAccumulatorValue(args->at(0)); 3438 3439 Label done; 3440 StringCharFromCodeGenerator generator(rax, rbx); 3441 generator.GenerateFast(masm_); 3442 __ jmp(&done); 3443 3444 NopRuntimeCallHelper call_helper; 3445 generator.GenerateSlow(masm_, call_helper); 3446 3447 __ bind(&done); 3448 context()->Plug(rbx); 3449 } 3450 3451 3452 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) { 3453 ZoneList<Expression*>* args = expr->arguments(); 3454 DCHECK(args->length() == 2); 3455 3456 VisitForStackValue(args->at(0)); 3457 VisitForAccumulatorValue(args->at(1)); 3458 3459 Register object = rbx; 3460 Register index = rax; 3461 Register result = rdx; 3462 3463 __ Pop(object); 3464 3465 Label need_conversion; 3466 Label index_out_of_range; 3467 Label done; 3468 StringCharCodeAtGenerator generator(object, 3469 index, 3470 result, 3471 &need_conversion, 3472 &need_conversion, 3473 &index_out_of_range, 3474 STRING_INDEX_IS_NUMBER); 3475 generator.GenerateFast(masm_); 3476 __ jmp(&done); 3477 3478 __ bind(&index_out_of_range); 3479 // When the index is out of range, the spec requires us to return 3480 // NaN. 3481 __ LoadRoot(result, Heap::kNanValueRootIndex); 3482 __ jmp(&done); 3483 3484 __ bind(&need_conversion); 3485 // Move the undefined value into the result register, which will 3486 // trigger conversion. 3487 __ LoadRoot(result, Heap::kUndefinedValueRootIndex); 3488 __ jmp(&done); 3489 3490 NopRuntimeCallHelper call_helper; 3491 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); 3492 3493 __ bind(&done); 3494 context()->Plug(result); 3495 } 3496 3497 3498 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) { 3499 ZoneList<Expression*>* args = expr->arguments(); 3500 DCHECK(args->length() == 2); 3501 3502 VisitForStackValue(args->at(0)); 3503 VisitForAccumulatorValue(args->at(1)); 3504 3505 Register object = rbx; 3506 Register index = rax; 3507 Register scratch = rdx; 3508 Register result = rax; 3509 3510 __ Pop(object); 3511 3512 Label need_conversion; 3513 Label index_out_of_range; 3514 Label done; 3515 StringCharAtGenerator generator(object, 3516 index, 3517 scratch, 3518 result, 3519 &need_conversion, 3520 &need_conversion, 3521 &index_out_of_range, 3522 STRING_INDEX_IS_NUMBER); 3523 generator.GenerateFast(masm_); 3524 __ jmp(&done); 3525 3526 __ bind(&index_out_of_range); 3527 // When the index is out of range, the spec requires us to return 3528 // the empty string. 3529 __ LoadRoot(result, Heap::kempty_stringRootIndex); 3530 __ jmp(&done); 3531 3532 __ bind(&need_conversion); 3533 // Move smi zero into the result register, which will trigger 3534 // conversion. 3535 __ Move(result, Smi::FromInt(0)); 3536 __ jmp(&done); 3537 3538 NopRuntimeCallHelper call_helper; 3539 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper); 3540 3541 __ bind(&done); 3542 context()->Plug(result); 3543 } 3544 3545 3546 void FullCodeGenerator::EmitCall(CallRuntime* expr) { 3547 ZoneList<Expression*>* args = expr->arguments(); 3548 DCHECK_LE(2, args->length()); 3549 // Push target, receiver and arguments onto the stack. 3550 for (Expression* const arg : *args) { 3551 VisitForStackValue(arg); 3552 } 3553 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); 3554 // Move target to rdi. 3555 int const argc = args->length() - 2; 3556 __ movp(rdi, Operand(rsp, (argc + 1) * kPointerSize)); 3557 // Call the target. 3558 __ Set(rax, argc); 3559 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 3560 // Restore context register. 3561 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3562 // Discard the function left on TOS. 3563 context()->DropAndPlug(1, rax); 3564 } 3565 3566 3567 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) { 3568 ZoneList<Expression*>* args = expr->arguments(); 3569 DCHECK(args->length() == 1); 3570 3571 VisitForAccumulatorValue(args->at(0)); 3572 3573 Label materialize_true, materialize_false; 3574 Label* if_true = NULL; 3575 Label* if_false = NULL; 3576 Label* fall_through = NULL; 3577 context()->PrepareTest(&materialize_true, &materialize_false, 3578 &if_true, &if_false, &fall_through); 3579 3580 __ testl(FieldOperand(rax, String::kHashFieldOffset), 3581 Immediate(String::kContainsCachedArrayIndexMask)); 3582 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 3583 __ j(zero, if_true); 3584 __ jmp(if_false); 3585 3586 context()->Plug(if_true, if_false); 3587 } 3588 3589 3590 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) { 3591 ZoneList<Expression*>* args = expr->arguments(); 3592 DCHECK(args->length() == 1); 3593 VisitForAccumulatorValue(args->at(0)); 3594 3595 __ AssertString(rax); 3596 3597 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset)); 3598 DCHECK(String::kHashShift >= kSmiTagSize); 3599 __ IndexFromHash(rax, rax); 3600 3601 context()->Plug(rax); 3602 } 3603 3604 3605 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) { 3606 ZoneList<Expression*>* args = expr->arguments(); 3607 DCHECK_EQ(1, args->length()); 3608 VisitForAccumulatorValue(args->at(0)); 3609 __ AssertFunction(rax); 3610 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); 3611 __ movp(rax, FieldOperand(rax, Map::kPrototypeOffset)); 3612 context()->Plug(rax); 3613 } 3614 3615 3616 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) { 3617 Label bailout, return_result, done, one_char_separator, long_separator, 3618 non_trivial_array, not_size_one_array, loop, 3619 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry; 3620 ZoneList<Expression*>* args = expr->arguments(); 3621 DCHECK(args->length() == 2); 3622 // We will leave the separator on the stack until the end of the function. 3623 VisitForStackValue(args->at(1)); 3624 // Load this to rax (= array) 3625 VisitForAccumulatorValue(args->at(0)); 3626 // All aliases of the same register have disjoint lifetimes. 3627 Register array = rax; 3628 Register elements = no_reg; // Will be rax. 3629 3630 Register index = rdx; 3631 3632 Register string_length = rcx; 3633 3634 Register string = rsi; 3635 3636 Register scratch = rbx; 3637 3638 Register array_length = rdi; 3639 Register result_pos = no_reg; // Will be rdi. 3640 3641 Operand separator_operand = Operand(rsp, 2 * kPointerSize); 3642 Operand result_operand = Operand(rsp, 1 * kPointerSize); 3643 Operand array_length_operand = Operand(rsp, 0 * kPointerSize); 3644 // Separator operand is already pushed. Make room for the two 3645 // other stack fields, and clear the direction flag in anticipation 3646 // of calling CopyBytes. 3647 __ subp(rsp, Immediate(2 * kPointerSize)); 3648 __ cld(); 3649 // Check that the array is a JSArray 3650 __ JumpIfSmi(array, &bailout); 3651 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch); 3652 __ j(not_equal, &bailout); 3653 3654 // Check that the array has fast elements. 3655 __ CheckFastElements(scratch, &bailout); 3656 3657 // Array has fast elements, so its length must be a smi. 3658 // If the array has length zero, return the empty string. 3659 __ movp(array_length, FieldOperand(array, JSArray::kLengthOffset)); 3660 __ SmiCompare(array_length, Smi::FromInt(0)); 3661 __ j(not_zero, &non_trivial_array); 3662 __ LoadRoot(rax, Heap::kempty_stringRootIndex); 3663 __ jmp(&return_result); 3664 3665 // Save the array length on the stack. 3666 __ bind(&non_trivial_array); 3667 __ SmiToInteger32(array_length, array_length); 3668 __ movl(array_length_operand, array_length); 3669 3670 // Save the FixedArray containing array's elements. 3671 // End of array's live range. 3672 elements = array; 3673 __ movp(elements, FieldOperand(array, JSArray::kElementsOffset)); 3674 array = no_reg; 3675 3676 3677 // Check that all array elements are sequential one-byte strings, and 3678 // accumulate the sum of their lengths, as a smi-encoded value. 3679 __ Set(index, 0); 3680 __ Set(string_length, 0); 3681 // Loop condition: while (index < array_length). 3682 // Live loop registers: index(int32), array_length(int32), string(String*), 3683 // scratch, string_length(int32), elements(FixedArray*). 3684 if (generate_debug_code_) { 3685 __ cmpp(index, array_length); 3686 __ Assert(below, kNoEmptyArraysHereInEmitFastOneByteArrayJoin); 3687 } 3688 __ bind(&loop); 3689 __ movp(string, FieldOperand(elements, 3690 index, 3691 times_pointer_size, 3692 FixedArray::kHeaderSize)); 3693 __ JumpIfSmi(string, &bailout); 3694 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset)); 3695 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 3696 __ andb(scratch, Immediate( 3697 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); 3698 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag)); 3699 __ j(not_equal, &bailout); 3700 __ AddSmiField(string_length, 3701 FieldOperand(string, SeqOneByteString::kLengthOffset)); 3702 __ j(overflow, &bailout); 3703 __ incl(index); 3704 __ cmpl(index, array_length); 3705 __ j(less, &loop); 3706 3707 // Live registers: 3708 // string_length: Sum of string lengths. 3709 // elements: FixedArray of strings. 3710 // index: Array length. 3711 // array_length: Array length. 3712 3713 // If array_length is 1, return elements[0], a string. 3714 __ cmpl(array_length, Immediate(1)); 3715 __ j(not_equal, ¬_size_one_array); 3716 __ movp(rax, FieldOperand(elements, FixedArray::kHeaderSize)); 3717 __ jmp(&return_result); 3718 3719 __ bind(¬_size_one_array); 3720 3721 // End of array_length live range. 3722 result_pos = array_length; 3723 array_length = no_reg; 3724 3725 // Live registers: 3726 // string_length: Sum of string lengths. 3727 // elements: FixedArray of strings. 3728 // index: Array length. 3729 3730 // Check that the separator is a sequential one-byte string. 3731 __ movp(string, separator_operand); 3732 __ JumpIfSmi(string, &bailout); 3733 __ movp(scratch, FieldOperand(string, HeapObject::kMapOffset)); 3734 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 3735 __ andb(scratch, Immediate( 3736 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask)); 3737 __ cmpb(scratch, Immediate(kStringTag | kOneByteStringTag | kSeqStringTag)); 3738 __ j(not_equal, &bailout); 3739 3740 // Live registers: 3741 // string_length: Sum of string lengths. 3742 // elements: FixedArray of strings. 3743 // index: Array length. 3744 // string: Separator string. 3745 3746 // Add (separator length times (array_length - 1)) to string_length. 3747 __ SmiToInteger32(scratch, 3748 FieldOperand(string, SeqOneByteString::kLengthOffset)); 3749 __ decl(index); 3750 __ imull(scratch, index); 3751 __ j(overflow, &bailout); 3752 __ addl(string_length, scratch); 3753 __ j(overflow, &bailout); 3754 __ jmp(&bailout); 3755 3756 // Bailout for large object allocations. 3757 __ cmpl(string_length, Immediate(Page::kMaxRegularHeapObjectSize)); 3758 __ j(greater, &bailout); 3759 3760 // Live registers and stack values: 3761 // string_length: Total length of result string. 3762 // elements: FixedArray of strings. 3763 __ AllocateOneByteString(result_pos, string_length, scratch, index, string, 3764 &bailout); 3765 __ movp(result_operand, result_pos); 3766 __ leap(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize)); 3767 3768 __ movp(string, separator_operand); 3769 __ SmiCompare(FieldOperand(string, SeqOneByteString::kLengthOffset), 3770 Smi::FromInt(1)); 3771 __ j(equal, &one_char_separator); 3772 __ j(greater, &long_separator); 3773 3774 3775 // Empty separator case: 3776 __ Set(index, 0); 3777 __ movl(scratch, array_length_operand); 3778 __ jmp(&loop_1_condition); 3779 // Loop condition: while (index < array_length). 3780 __ bind(&loop_1); 3781 // Each iteration of the loop concatenates one string to the result. 3782 // Live values in registers: 3783 // index: which element of the elements array we are adding to the result. 3784 // result_pos: the position to which we are currently copying characters. 3785 // elements: the FixedArray of strings we are joining. 3786 // scratch: array length. 3787 3788 // Get string = array[index]. 3789 __ movp(string, FieldOperand(elements, index, 3790 times_pointer_size, 3791 FixedArray::kHeaderSize)); 3792 __ SmiToInteger32(string_length, 3793 FieldOperand(string, String::kLengthOffset)); 3794 __ leap(string, 3795 FieldOperand(string, SeqOneByteString::kHeaderSize)); 3796 __ CopyBytes(result_pos, string, string_length); 3797 __ incl(index); 3798 __ bind(&loop_1_condition); 3799 __ cmpl(index, scratch); 3800 __ j(less, &loop_1); // Loop while (index < array_length). 3801 __ jmp(&done); 3802 3803 // Generic bailout code used from several places. 3804 __ bind(&bailout); 3805 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 3806 __ jmp(&return_result); 3807 3808 3809 // One-character separator case 3810 __ bind(&one_char_separator); 3811 // Get the separator one-byte character value. 3812 // Register "string" holds the separator. 3813 __ movzxbl(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize)); 3814 __ Set(index, 0); 3815 // Jump into the loop after the code that copies the separator, so the first 3816 // element is not preceded by a separator 3817 __ jmp(&loop_2_entry); 3818 // Loop condition: while (index < length). 3819 __ bind(&loop_2); 3820 // Each iteration of the loop concatenates one string to the result. 3821 // Live values in registers: 3822 // elements: The FixedArray of strings we are joining. 3823 // index: which element of the elements array we are adding to the result. 3824 // result_pos: the position to which we are currently copying characters. 3825 // scratch: Separator character. 3826 3827 // Copy the separator character to the result. 3828 __ movb(Operand(result_pos, 0), scratch); 3829 __ incp(result_pos); 3830 3831 __ bind(&loop_2_entry); 3832 // Get string = array[index]. 3833 __ movp(string, FieldOperand(elements, index, 3834 times_pointer_size, 3835 FixedArray::kHeaderSize)); 3836 __ SmiToInteger32(string_length, 3837 FieldOperand(string, String::kLengthOffset)); 3838 __ leap(string, 3839 FieldOperand(string, SeqOneByteString::kHeaderSize)); 3840 __ CopyBytes(result_pos, string, string_length); 3841 __ incl(index); 3842 __ cmpl(index, array_length_operand); 3843 __ j(less, &loop_2); // End while (index < length). 3844 __ jmp(&done); 3845 3846 3847 // Long separator case (separator is more than one character). 3848 __ bind(&long_separator); 3849 3850 // Make elements point to end of elements array, and index 3851 // count from -array_length to zero, so we don't need to maintain 3852 // a loop limit. 3853 __ movl(index, array_length_operand); 3854 __ leap(elements, FieldOperand(elements, index, times_pointer_size, 3855 FixedArray::kHeaderSize)); 3856 __ negq(index); 3857 3858 // Replace separator string with pointer to its first character, and 3859 // make scratch be its length. 3860 __ movp(string, separator_operand); 3861 __ SmiToInteger32(scratch, 3862 FieldOperand(string, String::kLengthOffset)); 3863 __ leap(string, 3864 FieldOperand(string, SeqOneByteString::kHeaderSize)); 3865 __ movp(separator_operand, string); 3866 3867 // Jump into the loop after the code that copies the separator, so the first 3868 // element is not preceded by a separator 3869 __ jmp(&loop_3_entry); 3870 // Loop condition: while (index < length). 3871 __ bind(&loop_3); 3872 // Each iteration of the loop concatenates one string to the result. 3873 // Live values in registers: 3874 // index: which element of the elements array we are adding to the result. 3875 // result_pos: the position to which we are currently copying characters. 3876 // scratch: Separator length. 3877 // separator_operand (rsp[0x10]): Address of first char of separator. 3878 3879 // Copy the separator to the result. 3880 __ movp(string, separator_operand); 3881 __ movl(string_length, scratch); 3882 __ CopyBytes(result_pos, string, string_length, 2); 3883 3884 __ bind(&loop_3_entry); 3885 // Get string = array[index]. 3886 __ movp(string, Operand(elements, index, times_pointer_size, 0)); 3887 __ SmiToInteger32(string_length, 3888 FieldOperand(string, String::kLengthOffset)); 3889 __ leap(string, 3890 FieldOperand(string, SeqOneByteString::kHeaderSize)); 3891 __ CopyBytes(result_pos, string, string_length); 3892 __ incq(index); 3893 __ j(not_equal, &loop_3); // Loop while (index < 0). 3894 3895 __ bind(&done); 3896 __ movp(rax, result_operand); 3897 3898 __ bind(&return_result); 3899 // Drop temp values from the stack, and restore context register. 3900 __ addp(rsp, Immediate(3 * kPointerSize)); 3901 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3902 context()->Plug(rax); 3903 } 3904 3905 3906 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) { 3907 DCHECK(expr->arguments()->length() == 0); 3908 ExternalReference debug_is_active = 3909 ExternalReference::debug_is_active_address(isolate()); 3910 __ Move(kScratchRegister, debug_is_active); 3911 __ movzxbp(rax, Operand(kScratchRegister, 0)); 3912 __ Integer32ToSmi(rax, rax); 3913 context()->Plug(rax); 3914 } 3915 3916 3917 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) { 3918 ZoneList<Expression*>* args = expr->arguments(); 3919 DCHECK_EQ(2, args->length()); 3920 VisitForStackValue(args->at(0)); 3921 VisitForStackValue(args->at(1)); 3922 3923 Label runtime, done; 3924 3925 __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &runtime, TAG_OBJECT); 3926 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx); 3927 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx); 3928 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); 3929 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); 3930 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx); 3931 __ Pop(FieldOperand(rax, JSIteratorResult::kDoneOffset)); 3932 __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset)); 3933 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize); 3934 __ jmp(&done, Label::kNear); 3935 3936 __ bind(&runtime); 3937 __ CallRuntime(Runtime::kCreateIterResultObject); 3938 3939 __ bind(&done); 3940 context()->Plug(rax); 3941 } 3942 3943 3944 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) { 3945 // Push the builtins object as receiver. 3946 __ PushRoot(Heap::kUndefinedValueRootIndex); 3947 3948 __ LoadNativeContextSlot(expr->context_index(), rax); 3949 } 3950 3951 3952 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) { 3953 ZoneList<Expression*>* args = expr->arguments(); 3954 int arg_count = args->length(); 3955 3956 SetCallPosition(expr); 3957 __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize)); 3958 __ Set(rax, arg_count); 3959 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined), 3960 RelocInfo::CODE_TARGET); 3961 } 3962 3963 3964 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 3965 ZoneList<Expression*>* args = expr->arguments(); 3966 int arg_count = args->length(); 3967 3968 if (expr->is_jsruntime()) { 3969 Comment cmnt(masm_, "[ CallRuntime"); 3970 3971 EmitLoadJSRuntimeFunction(expr); 3972 3973 // Push the target function under the receiver. 3974 __ Push(Operand(rsp, 0)); 3975 __ movp(Operand(rsp, kPointerSize), rax); 3976 3977 // Push the arguments ("left-to-right"). 3978 for (int i = 0; i < arg_count; i++) { 3979 VisitForStackValue(args->at(i)); 3980 } 3981 3982 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); 3983 EmitCallJSRuntimeFunction(expr); 3984 3985 // Restore context register. 3986 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 3987 context()->DropAndPlug(1, rax); 3988 3989 } else { 3990 const Runtime::Function* function = expr->function(); 3991 switch (function->function_id) { 3992 #define CALL_INTRINSIC_GENERATOR(Name) \ 3993 case Runtime::kInline##Name: { \ 3994 Comment cmnt(masm_, "[ Inline" #Name); \ 3995 return Emit##Name(expr); \ 3996 } 3997 FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR) 3998 #undef CALL_INTRINSIC_GENERATOR 3999 default: { 4000 Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic"); 4001 // Push the arguments ("left-to-right"). 4002 for (int i = 0; i < arg_count; i++) { 4003 VisitForStackValue(args->at(i)); 4004 } 4005 4006 // Call the C runtime. 4007 PrepareForBailoutForId(expr->CallId(), NO_REGISTERS); 4008 __ CallRuntime(function, arg_count); 4009 context()->Plug(rax); 4010 } 4011 } 4012 } 4013 } 4014 4015 4016 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 4017 switch (expr->op()) { 4018 case Token::DELETE: { 4019 Comment cmnt(masm_, "[ UnaryOperation (DELETE)"); 4020 Property* property = expr->expression()->AsProperty(); 4021 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 4022 4023 if (property != NULL) { 4024 VisitForStackValue(property->obj()); 4025 VisitForStackValue(property->key()); 4026 __ CallRuntime(is_strict(language_mode()) 4027 ? Runtime::kDeleteProperty_Strict 4028 : Runtime::kDeleteProperty_Sloppy); 4029 context()->Plug(rax); 4030 } else if (proxy != NULL) { 4031 Variable* var = proxy->var(); 4032 // Delete of an unqualified identifier is disallowed in strict mode but 4033 // "delete this" is allowed. 4034 bool is_this = var->HasThisName(isolate()); 4035 DCHECK(is_sloppy(language_mode()) || is_this); 4036 if (var->IsUnallocatedOrGlobalSlot()) { 4037 __ movp(rax, NativeContextOperand()); 4038 __ Push(ContextOperand(rax, Context::EXTENSION_INDEX)); 4039 __ Push(var->name()); 4040 __ CallRuntime(Runtime::kDeleteProperty_Sloppy); 4041 context()->Plug(rax); 4042 } else if (var->IsStackAllocated() || var->IsContextSlot()) { 4043 // Result of deleting non-global variables is false. 'this' is 4044 // not really a variable, though we implement it as one. The 4045 // subexpression does not have side effects. 4046 context()->Plug(is_this); 4047 } else { 4048 // Non-global variable. Call the runtime to try to delete from the 4049 // context where the variable was introduced. 4050 __ Push(context_register()); 4051 __ Push(var->name()); 4052 __ CallRuntime(Runtime::kDeleteLookupSlot); 4053 context()->Plug(rax); 4054 } 4055 } else { 4056 // Result of deleting non-property, non-variable reference is true. 4057 // The subexpression may have side effects. 4058 VisitForEffect(expr->expression()); 4059 context()->Plug(true); 4060 } 4061 break; 4062 } 4063 4064 case Token::VOID: { 4065 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 4066 VisitForEffect(expr->expression()); 4067 context()->Plug(Heap::kUndefinedValueRootIndex); 4068 break; 4069 } 4070 4071 case Token::NOT: { 4072 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 4073 if (context()->IsEffect()) { 4074 // Unary NOT has no side effects so it's only necessary to visit the 4075 // subexpression. Match the optimizing compiler by not branching. 4076 VisitForEffect(expr->expression()); 4077 } else if (context()->IsTest()) { 4078 const TestContext* test = TestContext::cast(context()); 4079 // The labels are swapped for the recursive call. 4080 VisitForControl(expr->expression(), 4081 test->false_label(), 4082 test->true_label(), 4083 test->fall_through()); 4084 context()->Plug(test->true_label(), test->false_label()); 4085 } else { 4086 // We handle value contexts explicitly rather than simply visiting 4087 // for control and plugging the control flow into the context, 4088 // because we need to prepare a pair of extra administrative AST ids 4089 // for the optimizing compiler. 4090 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue()); 4091 Label materialize_true, materialize_false, done; 4092 VisitForControl(expr->expression(), 4093 &materialize_false, 4094 &materialize_true, 4095 &materialize_true); 4096 __ bind(&materialize_true); 4097 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS); 4098 if (context()->IsAccumulatorValue()) { 4099 __ LoadRoot(rax, Heap::kTrueValueRootIndex); 4100 } else { 4101 __ PushRoot(Heap::kTrueValueRootIndex); 4102 } 4103 __ jmp(&done, Label::kNear); 4104 __ bind(&materialize_false); 4105 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS); 4106 if (context()->IsAccumulatorValue()) { 4107 __ LoadRoot(rax, Heap::kFalseValueRootIndex); 4108 } else { 4109 __ PushRoot(Heap::kFalseValueRootIndex); 4110 } 4111 __ bind(&done); 4112 } 4113 break; 4114 } 4115 4116 case Token::TYPEOF: { 4117 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 4118 { 4119 AccumulatorValueContext context(this); 4120 VisitForTypeofValue(expr->expression()); 4121 } 4122 __ movp(rbx, rax); 4123 TypeofStub typeof_stub(isolate()); 4124 __ CallStub(&typeof_stub); 4125 context()->Plug(rax); 4126 break; 4127 } 4128 4129 default: 4130 UNREACHABLE(); 4131 } 4132 } 4133 4134 4135 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 4136 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis()); 4137 4138 Comment cmnt(masm_, "[ CountOperation"); 4139 4140 Property* prop = expr->expression()->AsProperty(); 4141 LhsKind assign_type = Property::GetAssignType(prop); 4142 4143 // Evaluate expression and get value. 4144 if (assign_type == VARIABLE) { 4145 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL); 4146 AccumulatorValueContext context(this); 4147 EmitVariableLoad(expr->expression()->AsVariableProxy()); 4148 } else { 4149 // Reserve space for result of postfix operation. 4150 if (expr->is_postfix() && !context()->IsEffect()) { 4151 __ Push(Smi::FromInt(0)); 4152 } 4153 switch (assign_type) { 4154 case NAMED_PROPERTY: { 4155 VisitForStackValue(prop->obj()); 4156 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0)); 4157 EmitNamedPropertyLoad(prop); 4158 break; 4159 } 4160 4161 case NAMED_SUPER_PROPERTY: { 4162 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); 4163 VisitForAccumulatorValue( 4164 prop->obj()->AsSuperPropertyReference()->home_object()); 4165 __ Push(result_register()); 4166 __ Push(MemOperand(rsp, kPointerSize)); 4167 __ Push(result_register()); 4168 EmitNamedSuperPropertyLoad(prop); 4169 break; 4170 } 4171 4172 case KEYED_SUPER_PROPERTY: { 4173 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var()); 4174 VisitForStackValue( 4175 prop->obj()->AsSuperPropertyReference()->home_object()); 4176 VisitForAccumulatorValue(prop->key()); 4177 __ Push(result_register()); 4178 __ Push(MemOperand(rsp, 2 * kPointerSize)); 4179 __ Push(MemOperand(rsp, 2 * kPointerSize)); 4180 __ Push(result_register()); 4181 EmitKeyedSuperPropertyLoad(prop); 4182 break; 4183 } 4184 4185 case KEYED_PROPERTY: { 4186 VisitForStackValue(prop->obj()); 4187 VisitForStackValue(prop->key()); 4188 // Leave receiver on stack 4189 __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize)); 4190 // Copy of key, needed for later store. 4191 __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0)); 4192 EmitKeyedPropertyLoad(prop); 4193 break; 4194 } 4195 4196 case VARIABLE: 4197 UNREACHABLE(); 4198 } 4199 } 4200 4201 // We need a second deoptimization point after loading the value 4202 // in case evaluating the property load my have a side effect. 4203 if (assign_type == VARIABLE) { 4204 PrepareForBailout(expr->expression(), TOS_REG); 4205 } else { 4206 PrepareForBailoutForId(prop->LoadId(), TOS_REG); 4207 } 4208 4209 // Inline smi case if we are in a loop. 4210 Label done, stub_call; 4211 JumpPatchSite patch_site(masm_); 4212 if (ShouldInlineSmiCase(expr->op())) { 4213 Label slow; 4214 patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear); 4215 4216 // Save result for postfix expressions. 4217 if (expr->is_postfix()) { 4218 if (!context()->IsEffect()) { 4219 // Save the result on the stack. If we have a named or keyed property 4220 // we store the result under the receiver that is currently on top 4221 // of the stack. 4222 switch (assign_type) { 4223 case VARIABLE: 4224 __ Push(rax); 4225 break; 4226 case NAMED_PROPERTY: 4227 __ movp(Operand(rsp, kPointerSize), rax); 4228 break; 4229 case NAMED_SUPER_PROPERTY: 4230 __ movp(Operand(rsp, 2 * kPointerSize), rax); 4231 break; 4232 case KEYED_PROPERTY: 4233 __ movp(Operand(rsp, 2 * kPointerSize), rax); 4234 break; 4235 case KEYED_SUPER_PROPERTY: 4236 __ movp(Operand(rsp, 3 * kPointerSize), rax); 4237 break; 4238 } 4239 } 4240 } 4241 4242 SmiOperationConstraints constraints = 4243 SmiOperationConstraint::kPreserveSourceRegister | 4244 SmiOperationConstraint::kBailoutOnNoOverflow; 4245 if (expr->op() == Token::INC) { 4246 __ SmiAddConstant(rax, rax, Smi::FromInt(1), constraints, &done, 4247 Label::kNear); 4248 } else { 4249 __ SmiSubConstant(rax, rax, Smi::FromInt(1), constraints, &done, 4250 Label::kNear); 4251 } 4252 __ jmp(&stub_call, Label::kNear); 4253 __ bind(&slow); 4254 } 4255 if (!is_strong(language_mode())) { 4256 ToNumberStub convert_stub(isolate()); 4257 __ CallStub(&convert_stub); 4258 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); 4259 } 4260 4261 // Save result for postfix expressions. 4262 if (expr->is_postfix()) { 4263 if (!context()->IsEffect()) { 4264 // Save the result on the stack. If we have a named or keyed property 4265 // we store the result under the receiver that is currently on top 4266 // of the stack. 4267 switch (assign_type) { 4268 case VARIABLE: 4269 __ Push(rax); 4270 break; 4271 case NAMED_PROPERTY: 4272 __ movp(Operand(rsp, kPointerSize), rax); 4273 break; 4274 case NAMED_SUPER_PROPERTY: 4275 __ movp(Operand(rsp, 2 * kPointerSize), rax); 4276 break; 4277 case KEYED_PROPERTY: 4278 __ movp(Operand(rsp, 2 * kPointerSize), rax); 4279 break; 4280 case KEYED_SUPER_PROPERTY: 4281 __ movp(Operand(rsp, 3 * kPointerSize), rax); 4282 break; 4283 } 4284 } 4285 } 4286 4287 SetExpressionPosition(expr); 4288 4289 // Call stub for +1/-1. 4290 __ bind(&stub_call); 4291 __ movp(rdx, rax); 4292 __ Move(rax, Smi::FromInt(1)); 4293 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(), 4294 strength(language_mode())).code(); 4295 CallIC(code, expr->CountBinOpFeedbackId()); 4296 patch_site.EmitPatchInfo(); 4297 __ bind(&done); 4298 4299 if (is_strong(language_mode())) { 4300 PrepareForBailoutForId(expr->ToNumberId(), TOS_REG); 4301 } 4302 // Store the value returned in rax. 4303 switch (assign_type) { 4304 case VARIABLE: 4305 if (expr->is_postfix()) { 4306 // Perform the assignment as if via '='. 4307 { EffectContext context(this); 4308 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4309 Token::ASSIGN, expr->CountSlot()); 4310 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4311 context.Plug(rax); 4312 } 4313 // For all contexts except kEffect: We have the result on 4314 // top of the stack. 4315 if (!context()->IsEffect()) { 4316 context()->PlugTOS(); 4317 } 4318 } else { 4319 // Perform the assignment as if via '='. 4320 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 4321 Token::ASSIGN, expr->CountSlot()); 4322 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4323 context()->Plug(rax); 4324 } 4325 break; 4326 case NAMED_PROPERTY: { 4327 __ Move(StoreDescriptor::NameRegister(), 4328 prop->key()->AsLiteral()->value()); 4329 __ Pop(StoreDescriptor::ReceiverRegister()); 4330 EmitLoadStoreICSlot(expr->CountSlot()); 4331 CallStoreIC(); 4332 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4333 if (expr->is_postfix()) { 4334 if (!context()->IsEffect()) { 4335 context()->PlugTOS(); 4336 } 4337 } else { 4338 context()->Plug(rax); 4339 } 4340 break; 4341 } 4342 case NAMED_SUPER_PROPERTY: { 4343 EmitNamedSuperPropertyStore(prop); 4344 if (expr->is_postfix()) { 4345 if (!context()->IsEffect()) { 4346 context()->PlugTOS(); 4347 } 4348 } else { 4349 context()->Plug(rax); 4350 } 4351 break; 4352 } 4353 case KEYED_SUPER_PROPERTY: { 4354 EmitKeyedSuperPropertyStore(prop); 4355 if (expr->is_postfix()) { 4356 if (!context()->IsEffect()) { 4357 context()->PlugTOS(); 4358 } 4359 } else { 4360 context()->Plug(rax); 4361 } 4362 break; 4363 } 4364 case KEYED_PROPERTY: { 4365 __ Pop(StoreDescriptor::NameRegister()); 4366 __ Pop(StoreDescriptor::ReceiverRegister()); 4367 Handle<Code> ic = 4368 CodeFactory::KeyedStoreIC(isolate(), language_mode()).code(); 4369 EmitLoadStoreICSlot(expr->CountSlot()); 4370 CallIC(ic); 4371 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG); 4372 if (expr->is_postfix()) { 4373 if (!context()->IsEffect()) { 4374 context()->PlugTOS(); 4375 } 4376 } else { 4377 context()->Plug(rax); 4378 } 4379 break; 4380 } 4381 } 4382 } 4383 4384 4385 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr, 4386 Expression* sub_expr, 4387 Handle<String> check) { 4388 Label materialize_true, materialize_false; 4389 Label* if_true = NULL; 4390 Label* if_false = NULL; 4391 Label* fall_through = NULL; 4392 context()->PrepareTest(&materialize_true, &materialize_false, 4393 &if_true, &if_false, &fall_through); 4394 4395 { AccumulatorValueContext context(this); 4396 VisitForTypeofValue(sub_expr); 4397 } 4398 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4399 4400 Factory* factory = isolate()->factory(); 4401 if (String::Equals(check, factory->number_string())) { 4402 __ JumpIfSmi(rax, if_true); 4403 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); 4404 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex); 4405 Split(equal, if_true, if_false, fall_through); 4406 } else if (String::Equals(check, factory->string_string())) { 4407 __ JumpIfSmi(rax, if_false); 4408 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); 4409 Split(below, if_true, if_false, fall_through); 4410 } else if (String::Equals(check, factory->symbol_string())) { 4411 __ JumpIfSmi(rax, if_false); 4412 __ CmpObjectType(rax, SYMBOL_TYPE, rdx); 4413 Split(equal, if_true, if_false, fall_through); 4414 } else if (String::Equals(check, factory->boolean_string())) { 4415 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 4416 __ j(equal, if_true); 4417 __ CompareRoot(rax, Heap::kFalseValueRootIndex); 4418 Split(equal, if_true, if_false, fall_through); 4419 } else if (String::Equals(check, factory->undefined_string())) { 4420 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); 4421 __ j(equal, if_true); 4422 __ JumpIfSmi(rax, if_false); 4423 // Check for undetectable objects => true. 4424 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset)); 4425 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), 4426 Immediate(1 << Map::kIsUndetectable)); 4427 Split(not_zero, if_true, if_false, fall_through); 4428 } else if (String::Equals(check, factory->function_string())) { 4429 __ JumpIfSmi(rax, if_false); 4430 // Check for callable and not undetectable objects => true. 4431 __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset)); 4432 __ movzxbl(rdx, FieldOperand(rdx, Map::kBitFieldOffset)); 4433 __ andb(rdx, 4434 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 4435 __ cmpb(rdx, Immediate(1 << Map::kIsCallable)); 4436 Split(equal, if_true, if_false, fall_through); 4437 } else if (String::Equals(check, factory->object_string())) { 4438 __ JumpIfSmi(rax, if_false); 4439 __ CompareRoot(rax, Heap::kNullValueRootIndex); 4440 __ j(equal, if_true); 4441 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 4442 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rdx); 4443 __ j(below, if_false); 4444 // Check for callable or undetectable objects => false. 4445 __ testb(FieldOperand(rdx, Map::kBitFieldOffset), 4446 Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); 4447 Split(zero, if_true, if_false, fall_through); 4448 // clang-format off 4449 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \ 4450 } else if (String::Equals(check, factory->type##_string())) { \ 4451 __ JumpIfSmi(rax, if_false); \ 4452 __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset)); \ 4453 __ CompareRoot(rax, Heap::k##Type##MapRootIndex); \ 4454 Split(equal, if_true, if_false, fall_through); 4455 SIMD128_TYPES(SIMD128_TYPE) 4456 #undef SIMD128_TYPE 4457 // clang-format on 4458 } else { 4459 if (if_false != fall_through) __ jmp(if_false); 4460 } 4461 context()->Plug(if_true, if_false); 4462 } 4463 4464 4465 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 4466 Comment cmnt(masm_, "[ CompareOperation"); 4467 SetExpressionPosition(expr); 4468 4469 // First we try a fast inlined version of the compare when one of 4470 // the operands is a literal. 4471 if (TryLiteralCompare(expr)) return; 4472 4473 // Always perform the comparison for its control flow. Pack the result 4474 // into the expression's context after the comparison is performed. 4475 Label materialize_true, materialize_false; 4476 Label* if_true = NULL; 4477 Label* if_false = NULL; 4478 Label* fall_through = NULL; 4479 context()->PrepareTest(&materialize_true, &materialize_false, 4480 &if_true, &if_false, &fall_through); 4481 4482 Token::Value op = expr->op(); 4483 VisitForStackValue(expr->left()); 4484 switch (op) { 4485 case Token::IN: 4486 VisitForStackValue(expr->right()); 4487 __ CallRuntime(Runtime::kHasProperty); 4488 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4489 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 4490 Split(equal, if_true, if_false, fall_through); 4491 break; 4492 4493 case Token::INSTANCEOF: { 4494 VisitForAccumulatorValue(expr->right()); 4495 __ Pop(rdx); 4496 InstanceOfStub stub(isolate()); 4497 __ CallStub(&stub); 4498 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL); 4499 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 4500 Split(equal, if_true, if_false, fall_through); 4501 break; 4502 } 4503 4504 default: { 4505 VisitForAccumulatorValue(expr->right()); 4506 Condition cc = CompareIC::ComputeCondition(op); 4507 __ Pop(rdx); 4508 4509 bool inline_smi_code = ShouldInlineSmiCase(op); 4510 JumpPatchSite patch_site(masm_); 4511 if (inline_smi_code) { 4512 Label slow_case; 4513 __ movp(rcx, rdx); 4514 __ orp(rcx, rax); 4515 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); 4516 __ cmpp(rdx, rax); 4517 Split(cc, if_true, if_false, NULL); 4518 __ bind(&slow_case); 4519 } 4520 4521 Handle<Code> ic = CodeFactory::CompareIC( 4522 isolate(), op, strength(language_mode())).code(); 4523 CallIC(ic, expr->CompareOperationFeedbackId()); 4524 patch_site.EmitPatchInfo(); 4525 4526 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4527 __ testp(rax, rax); 4528 Split(cc, if_true, if_false, fall_through); 4529 } 4530 } 4531 4532 // Convert the result of the comparison into one expected for this 4533 // expression's context. 4534 context()->Plug(if_true, if_false); 4535 } 4536 4537 4538 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr, 4539 Expression* sub_expr, 4540 NilValue nil) { 4541 Label materialize_true, materialize_false; 4542 Label* if_true = NULL; 4543 Label* if_false = NULL; 4544 Label* fall_through = NULL; 4545 context()->PrepareTest(&materialize_true, &materialize_false, 4546 &if_true, &if_false, &fall_through); 4547 4548 VisitForAccumulatorValue(sub_expr); 4549 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false); 4550 if (expr->op() == Token::EQ_STRICT) { 4551 Heap::RootListIndex nil_value = nil == kNullValue ? 4552 Heap::kNullValueRootIndex : 4553 Heap::kUndefinedValueRootIndex; 4554 __ CompareRoot(rax, nil_value); 4555 Split(equal, if_true, if_false, fall_through); 4556 } else { 4557 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil); 4558 CallIC(ic, expr->CompareOperationFeedbackId()); 4559 __ CompareRoot(rax, Heap::kTrueValueRootIndex); 4560 Split(equal, if_true, if_false, fall_through); 4561 } 4562 context()->Plug(if_true, if_false); 4563 } 4564 4565 4566 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 4567 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 4568 context()->Plug(rax); 4569 } 4570 4571 4572 Register FullCodeGenerator::result_register() { 4573 return rax; 4574 } 4575 4576 4577 Register FullCodeGenerator::context_register() { 4578 return rsi; 4579 } 4580 4581 4582 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4583 DCHECK(IsAligned(frame_offset, kPointerSize)); 4584 __ movp(Operand(rbp, frame_offset), value); 4585 } 4586 4587 4588 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4589 __ movp(dst, ContextOperand(rsi, context_index)); 4590 } 4591 4592 4593 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() { 4594 Scope* closure_scope = scope()->ClosureScope(); 4595 if (closure_scope->is_script_scope() || 4596 closure_scope->is_module_scope()) { 4597 // Contexts nested in the native context have a canonical empty function 4598 // as their closure, not the anonymous closure containing the global 4599 // code. 4600 __ movp(rax, NativeContextOperand()); 4601 __ Push(ContextOperand(rax, Context::CLOSURE_INDEX)); 4602 } else if (closure_scope->is_eval_scope()) { 4603 // Contexts created by a call to eval have the same closure as the 4604 // context calling eval, not the anonymous closure containing the eval 4605 // code. Fetch it from the context. 4606 __ Push(ContextOperand(rsi, Context::CLOSURE_INDEX)); 4607 } else { 4608 DCHECK(closure_scope->is_function_scope()); 4609 __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 4610 } 4611 } 4612 4613 4614 // ---------------------------------------------------------------------------- 4615 // Non-local control flow support. 4616 4617 4618 void FullCodeGenerator::EnterFinallyBlock() { 4619 DCHECK(!result_register().is(rdx)); 4620 DCHECK(!result_register().is(rcx)); 4621 // Cook return address on top of stack (smi encoded Code* delta) 4622 __ PopReturnAddressTo(rdx); 4623 __ Move(rcx, masm_->CodeObject()); 4624 __ subp(rdx, rcx); 4625 __ Integer32ToSmi(rdx, rdx); 4626 __ Push(rdx); 4627 4628 // Store result register while executing finally block. 4629 __ Push(result_register()); 4630 4631 // Store pending message while executing finally block. 4632 ExternalReference pending_message_obj = 4633 ExternalReference::address_of_pending_message_obj(isolate()); 4634 __ Load(rdx, pending_message_obj); 4635 __ Push(rdx); 4636 4637 ClearPendingMessage(); 4638 } 4639 4640 4641 void FullCodeGenerator::ExitFinallyBlock() { 4642 DCHECK(!result_register().is(rdx)); 4643 DCHECK(!result_register().is(rcx)); 4644 // Restore pending message from stack. 4645 __ Pop(rdx); 4646 ExternalReference pending_message_obj = 4647 ExternalReference::address_of_pending_message_obj(isolate()); 4648 __ Store(pending_message_obj, rdx); 4649 4650 // Restore result register from stack. 4651 __ Pop(result_register()); 4652 4653 // Uncook return address. 4654 __ Pop(rdx); 4655 __ SmiToInteger32(rdx, rdx); 4656 __ Move(rcx, masm_->CodeObject()); 4657 __ addp(rdx, rcx); 4658 __ jmp(rdx); 4659 } 4660 4661 4662 void FullCodeGenerator::ClearPendingMessage() { 4663 DCHECK(!result_register().is(rdx)); 4664 ExternalReference pending_message_obj = 4665 ExternalReference::address_of_pending_message_obj(isolate()); 4666 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); 4667 __ Store(pending_message_obj, rdx); 4668 } 4669 4670 4671 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) { 4672 DCHECK(!slot.IsInvalid()); 4673 __ Move(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot)); 4674 } 4675 4676 4677 #undef __ 4678 4679 4680 static const byte kJnsInstruction = 0x79; 4681 static const byte kNopByteOne = 0x66; 4682 static const byte kNopByteTwo = 0x90; 4683 #ifdef DEBUG 4684 static const byte kCallInstruction = 0xe8; 4685 #endif 4686 4687 4688 void BackEdgeTable::PatchAt(Code* unoptimized_code, 4689 Address pc, 4690 BackEdgeState target_state, 4691 Code* replacement_code) { 4692 Address call_target_address = pc - kIntSize; 4693 Address jns_instr_address = call_target_address - 3; 4694 Address jns_offset_address = call_target_address - 2; 4695 4696 switch (target_state) { 4697 case INTERRUPT: 4698 // sub <profiling_counter>, <delta> ;; Not changed 4699 // jns ok 4700 // call <interrupt stub> 4701 // ok: 4702 *jns_instr_address = kJnsInstruction; 4703 *jns_offset_address = kJnsOffset; 4704 break; 4705 case ON_STACK_REPLACEMENT: 4706 case OSR_AFTER_STACK_CHECK: 4707 // sub <profiling_counter>, <delta> ;; Not changed 4708 // nop 4709 // nop 4710 // call <on-stack replacment> 4711 // ok: 4712 *jns_instr_address = kNopByteOne; 4713 *jns_offset_address = kNopByteTwo; 4714 break; 4715 } 4716 4717 Assembler::set_target_address_at(unoptimized_code->GetIsolate(), 4718 call_target_address, unoptimized_code, 4719 replacement_code->entry()); 4720 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( 4721 unoptimized_code, call_target_address, replacement_code); 4722 } 4723 4724 4725 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( 4726 Isolate* isolate, 4727 Code* unoptimized_code, 4728 Address pc) { 4729 Address call_target_address = pc - kIntSize; 4730 Address jns_instr_address = call_target_address - 3; 4731 DCHECK_EQ(kCallInstruction, *(call_target_address - 1)); 4732 4733 if (*jns_instr_address == kJnsInstruction) { 4734 DCHECK_EQ(kJnsOffset, *(call_target_address - 2)); 4735 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(), 4736 Assembler::target_address_at(call_target_address, 4737 unoptimized_code)); 4738 return INTERRUPT; 4739 } 4740 4741 DCHECK_EQ(kNopByteOne, *jns_instr_address); 4742 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2)); 4743 4744 if (Assembler::target_address_at(call_target_address, 4745 unoptimized_code) == 4746 isolate->builtins()->OnStackReplacement()->entry()) { 4747 return ON_STACK_REPLACEMENT; 4748 } 4749 4750 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(), 4751 Assembler::target_address_at(call_target_address, 4752 unoptimized_code)); 4753 return OSR_AFTER_STACK_CHECK; 4754 } 4755 4756 4757 } // namespace internal 4758 } // namespace v8 4759 4760 #endif // V8_TARGET_ARCH_X64 4761