1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_X64 6 7 #include "src/code-factory.h" 8 #include "src/codegen.h" 9 #include "src/counters.h" 10 #include "src/deoptimizer.h" 11 #include "src/full-codegen/full-codegen.h" 12 #include "src/objects-inl.h" 13 14 namespace v8 { 15 namespace internal { 16 17 #define __ ACCESS_MASM(masm) 18 19 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address, 20 ExitFrameType exit_frame_type) { 21 // ----------- S t a t e ------------- 22 // -- rax : number of arguments excluding receiver 23 // -- rdi : target 24 // -- rdx : new.target 25 // -- rsp[0] : return address 26 // -- rsp[8] : last argument 27 // -- ... 28 // -- rsp[8 * argc] : first argument 29 // -- rsp[8 * (argc + 1)] : receiver 30 // ----------------------------------- 31 __ AssertFunction(rdi); 32 33 // The logic contained here is mirrored for TurboFan inlining in 34 // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync. 35 36 // Make sure we operate in the context of the called function (for example 37 // ConstructStubs implemented in C++ will be run in the context of the caller 38 // instead of the callee, due to the way that [[Construct]] is defined for 39 // ordinary functions). 40 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 41 42 // JumpToExternalReference expects rax to contain the number of arguments 43 // including the receiver and the extra arguments. 44 const int num_extra_args = 3; 45 __ addp(rax, Immediate(num_extra_args + 1)); 46 47 // Unconditionally insert argc, target and new target as extra arguments. They 48 // will be used by stack frame iterators when constructing the stack trace. 49 __ PopReturnAddressTo(kScratchRegister); 50 __ Integer32ToSmi(rax, rax); 51 __ Push(rax); 52 __ SmiToInteger32(rax, rax); 53 __ Push(rdi); 54 __ Push(rdx); 55 __ PushReturnAddressFrom(kScratchRegister); 56 57 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), 58 exit_frame_type == BUILTIN_EXIT); 59 } 60 61 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 62 __ movp(kScratchRegister, 63 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 64 __ movp(kScratchRegister, 65 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); 66 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); 67 __ jmp(kScratchRegister); 68 } 69 70 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, 71 Runtime::FunctionId function_id) { 72 // ----------- S t a t e ------------- 73 // -- rax : argument count (preserved for callee) 74 // -- rdx : new target (preserved for callee) 75 // -- rdi : target function (preserved for callee) 76 // ----------------------------------- 77 { 78 FrameScope scope(masm, StackFrame::INTERNAL); 79 // Push the number of arguments to the callee. 80 __ Integer32ToSmi(rax, rax); 81 __ Push(rax); 82 // Push a copy of the target function and the new target. 83 __ Push(rdi); 84 __ Push(rdx); 85 // Function is also the parameter to the runtime call. 86 __ Push(rdi); 87 88 __ CallRuntime(function_id, 1); 89 __ movp(rbx, rax); 90 91 // Restore target function and new target. 92 __ Pop(rdx); 93 __ Pop(rdi); 94 __ Pop(rax); 95 __ SmiToInteger32(rax, rax); 96 } 97 __ leap(rbx, FieldOperand(rbx, Code::kHeaderSize)); 98 __ jmp(rbx); 99 } 100 101 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { 102 // Checking whether the queued function is ready for install is optional, 103 // since we come across interrupts and stack checks elsewhere. However, 104 // not checking may delay installing ready functions, and always checking 105 // would be quite expensive. A good compromise is to first check against 106 // stack limit as a cue for an interrupt signal. 107 Label ok; 108 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 109 __ j(above_equal, &ok); 110 111 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); 112 113 __ bind(&ok); 114 GenerateTailCallToSharedCode(masm); 115 } 116 117 namespace { 118 119 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, 120 bool create_implicit_receiver, 121 bool check_derived_construct) { 122 Label post_instantiation_deopt_entry; 123 124 // ----------- S t a t e ------------- 125 // -- rax: number of arguments 126 // -- rsi: context 127 // -- rdi: constructor function 128 // -- rdx: new target 129 // ----------------------------------- 130 131 // Enter a construct frame. 132 { 133 FrameScope scope(masm, StackFrame::CONSTRUCT); 134 135 // Preserve the incoming parameters on the stack. 136 __ Integer32ToSmi(rcx, rax); 137 __ Push(rsi); 138 __ Push(rcx); 139 140 if (create_implicit_receiver) { 141 // Allocate the new receiver object. 142 __ Push(rdi); 143 __ Push(rdx); 144 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), 145 RelocInfo::CODE_TARGET); 146 __ movp(rbx, rax); 147 __ Pop(rdx); 148 __ Pop(rdi); 149 150 // ----------- S t a t e ------------- 151 // -- rdi: constructor function 152 // -- rbx: newly allocated object 153 // -- rdx: new target 154 // ----------------------------------- 155 156 // Retrieve smi-tagged arguments count from the stack. 157 __ SmiToInteger32(rax, Operand(rsp, 0 * kPointerSize)); 158 159 // Push the allocated receiver to the stack. We need two copies 160 // because we may have to return the original one and the calling 161 // conventions dictate that the called function pops the receiver. 162 __ Push(rbx); 163 __ Push(rbx); 164 } else { 165 __ PushRoot(Heap::kTheHoleValueRootIndex); 166 } 167 168 // Deoptimizer re-enters stub code here. 169 __ bind(&post_instantiation_deopt_entry); 170 171 // Set up pointer to last argument. 172 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); 173 174 // Copy arguments and receiver to the expression stack. 175 Label loop, entry; 176 __ movp(rcx, rax); 177 __ jmp(&entry); 178 __ bind(&loop); 179 __ Push(Operand(rbx, rcx, times_pointer_size, 0)); 180 __ bind(&entry); 181 __ decp(rcx); 182 __ j(greater_equal, &loop); 183 184 // Call the function. 185 ParameterCount actual(rax); 186 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION, 187 CheckDebugStepCallWrapper()); 188 189 // Store offset of return address for deoptimizer. 190 if (create_implicit_receiver && !is_api_function) { 191 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset( 192 masm->pc_offset()); 193 } 194 195 // Restore context from the frame. 196 __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset)); 197 198 if (create_implicit_receiver) { 199 // If the result is an object (in the ECMA sense), we should get rid 200 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 201 // on page 74. 202 Label use_receiver, exit; 203 // If the result is a smi, it is *not* an object in the ECMA sense. 204 __ JumpIfSmi(rax, &use_receiver, Label::kNear); 205 206 // If the type of the result (stored in its map) is less than 207 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. 208 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 209 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); 210 __ j(above_equal, &exit, Label::kNear); 211 212 // Throw away the result of the constructor invocation and use the 213 // on-stack receiver as the result. 214 __ bind(&use_receiver); 215 __ movp(rax, Operand(rsp, 0)); 216 217 // Restore the arguments count and leave the construct frame. The 218 // arguments count is stored below the receiver. 219 __ bind(&exit); 220 __ movp(rbx, Operand(rsp, 1 * kPointerSize)); 221 } else { 222 __ movp(rbx, Operand(rsp, 0)); 223 } 224 225 // Leave construct frame. 226 } 227 228 // ES6 9.2.2. Step 13+ 229 // Check that the result is not a Smi, indicating that the constructor result 230 // from a derived class is neither undefined nor an Object. 231 if (check_derived_construct) { 232 Label dont_throw; 233 __ JumpIfNotSmi(rax, &dont_throw); 234 { 235 FrameScope scope(masm, StackFrame::INTERNAL); 236 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); 237 } 238 __ bind(&dont_throw); 239 } 240 241 // Remove caller arguments from the stack and return. 242 __ PopReturnAddressTo(rcx); 243 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 244 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 245 __ PushReturnAddressFrom(rcx); 246 if (create_implicit_receiver) { 247 Counters* counters = masm->isolate()->counters(); 248 __ IncrementCounter(counters->constructed_objects(), 1); 249 } 250 __ ret(0); 251 252 // Store offset of trampoline address for deoptimizer. This is the bailout 253 // point after the receiver instantiation but before the function invocation. 254 // We need to restore some registers in order to continue the above code. 255 if (create_implicit_receiver && !is_api_function) { 256 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset( 257 masm->pc_offset()); 258 259 // ----------- S t a t e ------------- 260 // -- rax : newly allocated object 261 // -- rsp[0] : constructor function 262 // ----------------------------------- 263 264 __ Pop(rdi); 265 __ Push(rax); 266 __ Push(rax); 267 268 // Retrieve smi-tagged arguments count from the stack. 269 __ SmiToInteger32(rax, 270 Operand(rbp, ConstructFrameConstants::kLengthOffset)); 271 272 // Retrieve the new target value from the stack. This was placed into the 273 // frame description in place of the receiver by the optimizing compiler. 274 __ movp(rdx, Operand(rbp, rax, times_pointer_size, 275 StandardFrameConstants::kCallerSPOffset)); 276 277 // Continue with constructor function invocation. 278 __ jmp(&post_instantiation_deopt_entry); 279 } 280 } 281 282 } // namespace 283 284 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 285 Generate_JSConstructStubHelper(masm, false, true, false); 286 } 287 288 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { 289 Generate_JSConstructStubHelper(masm, true, false, false); 290 } 291 292 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { 293 Generate_JSConstructStubHelper(masm, false, false, false); 294 } 295 296 void Builtins::Generate_JSBuiltinsConstructStubForDerived( 297 MacroAssembler* masm) { 298 Generate_JSConstructStubHelper(masm, false, false, true); 299 } 300 301 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { 302 FrameScope scope(masm, StackFrame::INTERNAL); 303 __ Push(rdi); 304 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); 305 } 306 307 enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt }; 308 309 // Clobbers rcx, r11, kScratchRegister; preserves all other registers. 310 static void Generate_CheckStackOverflow(MacroAssembler* masm, 311 IsTagged rax_is_tagged) { 312 // rax : the number of items to be pushed to the stack 313 // 314 // Check the stack for overflow. We are not trying to catch 315 // interruptions (e.g. debug break and preemption) here, so the "real stack 316 // limit" is checked. 317 Label okay; 318 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 319 __ movp(rcx, rsp); 320 // Make rcx the space we have left. The stack might already be overflowed 321 // here which will cause rcx to become negative. 322 __ subp(rcx, kScratchRegister); 323 // Make r11 the space we need for the array when it is unrolled onto the 324 // stack. 325 if (rax_is_tagged == kRaxIsSmiTagged) { 326 __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2); 327 } else { 328 DCHECK(rax_is_tagged == kRaxIsUntaggedInt); 329 __ movp(r11, rax); 330 __ shlq(r11, Immediate(kPointerSizeLog2)); 331 } 332 // Check if the arguments will overflow the stack. 333 __ cmpp(rcx, r11); 334 __ j(greater, &okay); // Signed comparison. 335 336 // Out of stack space. 337 __ CallRuntime(Runtime::kThrowStackOverflow); 338 339 __ bind(&okay); 340 } 341 342 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 343 bool is_construct) { 344 ProfileEntryHookStub::MaybeCallEntryHook(masm); 345 346 // Expects five C++ function parameters. 347 // - Object* new_target 348 // - JSFunction* function 349 // - Object* receiver 350 // - int argc 351 // - Object*** argv 352 // (see Handle::Invoke in execution.cc). 353 354 // Open a C++ scope for the FrameScope. 355 { 356 // Platform specific argument handling. After this, the stack contains 357 // an internal frame and the pushed function and receiver, and 358 // register rax and rbx holds the argument count and argument array, 359 // while rdi holds the function pointer, rsi the context, and rdx the 360 // new.target. 361 362 #ifdef _WIN64 363 // MSVC parameters in: 364 // rcx : new_target 365 // rdx : function 366 // r8 : receiver 367 // r9 : argc 368 // [rsp+0x20] : argv 369 370 // Enter an internal frame. 371 FrameScope scope(masm, StackFrame::INTERNAL); 372 373 // Setup the context (we need to use the caller context from the isolate). 374 ExternalReference context_address(Isolate::kContextAddress, 375 masm->isolate()); 376 __ movp(rsi, masm->ExternalOperand(context_address)); 377 378 // Push the function and the receiver onto the stack. 379 __ Push(rdx); 380 __ Push(r8); 381 382 // Load the number of arguments and setup pointer to the arguments. 383 __ movp(rax, r9); 384 // Load the previous frame pointer to access C argument on stack 385 __ movp(kScratchRegister, Operand(rbp, 0)); 386 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); 387 // Load the function pointer into rdi. 388 __ movp(rdi, rdx); 389 // Load the new.target into rdx. 390 __ movp(rdx, rcx); 391 #else // _WIN64 392 // GCC parameters in: 393 // rdi : new_target 394 // rsi : function 395 // rdx : receiver 396 // rcx : argc 397 // r8 : argv 398 399 __ movp(r11, rdi); 400 __ movp(rdi, rsi); 401 // rdi : function 402 // r11 : new_target 403 404 // Clear the context before we push it when entering the internal frame. 405 __ Set(rsi, 0); 406 407 // Enter an internal frame. 408 FrameScope scope(masm, StackFrame::INTERNAL); 409 410 // Setup the context (we need to use the caller context from the isolate). 411 ExternalReference context_address(Isolate::kContextAddress, 412 masm->isolate()); 413 __ movp(rsi, masm->ExternalOperand(context_address)); 414 415 // Push the function and receiver onto the stack. 416 __ Push(rdi); 417 __ Push(rdx); 418 419 // Load the number of arguments and setup pointer to the arguments. 420 __ movp(rax, rcx); 421 __ movp(rbx, r8); 422 423 // Load the new.target into rdx. 424 __ movp(rdx, r11); 425 #endif // _WIN64 426 427 // Current stack contents: 428 // [rsp + 2 * kPointerSize ... ] : Internal frame 429 // [rsp + kPointerSize] : function 430 // [rsp] : receiver 431 // Current register contents: 432 // rax : argc 433 // rbx : argv 434 // rsi : context 435 // rdi : function 436 // rdx : new.target 437 438 // Check if we have enough stack space to push all arguments. 439 // Expects argument count in rax. Clobbers rcx, r11. 440 Generate_CheckStackOverflow(masm, kRaxIsUntaggedInt); 441 442 // Copy arguments to the stack in a loop. 443 // Register rbx points to array of pointers to handle locations. 444 // Push the values of these handles. 445 Label loop, entry; 446 __ Set(rcx, 0); // Set loop variable to 0. 447 __ jmp(&entry, Label::kNear); 448 __ bind(&loop); 449 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); 450 __ Push(Operand(kScratchRegister, 0)); // dereference handle 451 __ addp(rcx, Immediate(1)); 452 __ bind(&entry); 453 __ cmpp(rcx, rax); 454 __ j(not_equal, &loop); 455 456 // Invoke the builtin code. 457 Handle<Code> builtin = is_construct 458 ? masm->isolate()->builtins()->Construct() 459 : masm->isolate()->builtins()->Call(); 460 __ Call(builtin, RelocInfo::CODE_TARGET); 461 462 // Exit the internal frame. Notice that this also removes the empty 463 // context and the function left on the stack by the code 464 // invocation. 465 } 466 467 // TODO(X64): Is argument correct? Is there a receiver to remove? 468 __ ret(1 * kPointerSize); // Remove receiver. 469 } 470 471 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 472 Generate_JSEntryTrampolineHelper(masm, false); 473 } 474 475 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 476 Generate_JSEntryTrampolineHelper(masm, true); 477 } 478 479 // static 480 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { 481 // ----------- S t a t e ------------- 482 // -- rax : the value to pass to the generator 483 // -- rbx : the JSGeneratorObject to resume 484 // -- rdx : the resume mode (tagged) 485 // -- rsp[0] : return address 486 // ----------------------------------- 487 __ AssertGeneratorObject(rbx); 488 489 // Store input value into generator object. 490 __ movp(FieldOperand(rbx, JSGeneratorObject::kInputOrDebugPosOffset), rax); 491 __ RecordWriteField(rbx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx, 492 kDontSaveFPRegs); 493 494 // Store resume mode into generator object. 495 __ movp(FieldOperand(rbx, JSGeneratorObject::kResumeModeOffset), rdx); 496 497 // Load suspended function and context. 498 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); 499 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 500 501 // Flood function if we are stepping. 502 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; 503 Label stepping_prepared; 504 ExternalReference debug_hook = 505 ExternalReference::debug_hook_on_function_call_address(masm->isolate()); 506 Operand debug_hook_operand = masm->ExternalOperand(debug_hook); 507 __ cmpb(debug_hook_operand, Immediate(0)); 508 __ j(not_equal, &prepare_step_in_if_stepping); 509 510 // Flood function if we need to continue stepping in the suspended generator. 511 ExternalReference debug_suspended_generator = 512 ExternalReference::debug_suspended_generator_address(masm->isolate()); 513 Operand debug_suspended_generator_operand = 514 masm->ExternalOperand(debug_suspended_generator); 515 __ cmpp(rbx, debug_suspended_generator_operand); 516 __ j(equal, &prepare_step_in_suspended_generator); 517 __ bind(&stepping_prepared); 518 519 // Pop return address. 520 __ PopReturnAddressTo(rax); 521 522 // Push receiver. 523 __ Push(FieldOperand(rbx, JSGeneratorObject::kReceiverOffset)); 524 525 // ----------- S t a t e ------------- 526 // -- rax : return address 527 // -- rbx : the JSGeneratorObject to resume 528 // -- rdx : the resume mode (tagged) 529 // -- rdi : generator function 530 // -- rsi : generator context 531 // -- rsp[0] : generator receiver 532 // ----------------------------------- 533 534 // Push holes for arguments to generator function. Since the parser forced 535 // context allocation for any variables in generators, the actual argument 536 // values have already been copied into the context and these dummy values 537 // will never be used. 538 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 539 __ LoadSharedFunctionInfoSpecialField( 540 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset); 541 { 542 Label done_loop, loop; 543 __ bind(&loop); 544 __ subl(rcx, Immediate(1)); 545 __ j(carry, &done_loop, Label::kNear); 546 __ PushRoot(Heap::kTheHoleValueRootIndex); 547 __ jmp(&loop); 548 __ bind(&done_loop); 549 } 550 551 // Underlying function needs to have bytecode available. 552 if (FLAG_debug_code) { 553 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 554 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset)); 555 __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx); 556 __ Assert(equal, kMissingBytecodeArray); 557 } 558 559 // Resume (Ignition/TurboFan) generator object. 560 { 561 __ PushReturnAddressFrom(rax); 562 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 563 __ LoadSharedFunctionInfoSpecialField( 564 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset); 565 // We abuse new.target both to indicate that this is a resume call and to 566 // pass in the generator object. In ordinary calls, new.target is always 567 // undefined because generator functions are non-constructable. 568 __ movp(rdx, rbx); 569 __ jmp(FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 570 } 571 572 __ bind(&prepare_step_in_if_stepping); 573 { 574 FrameScope scope(masm, StackFrame::INTERNAL); 575 __ Push(rbx); 576 __ Push(rdx); 577 __ Push(rdi); 578 __ CallRuntime(Runtime::kDebugOnFunctionCall); 579 __ Pop(rdx); 580 __ Pop(rbx); 581 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); 582 } 583 __ jmp(&stepping_prepared); 584 585 __ bind(&prepare_step_in_suspended_generator); 586 { 587 FrameScope scope(masm, StackFrame::INTERNAL); 588 __ Push(rbx); 589 __ Push(rdx); 590 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); 591 __ Pop(rdx); 592 __ Pop(rbx); 593 __ movp(rdi, FieldOperand(rbx, JSGeneratorObject::kFunctionOffset)); 594 } 595 __ jmp(&stepping_prepared); 596 } 597 598 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1, 599 Register scratch2) { 600 Register args_count = scratch1; 601 Register return_pc = scratch2; 602 603 // Get the arguments + receiver count. 604 __ movp(args_count, 605 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 606 __ movl(args_count, 607 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset)); 608 609 // Leave the frame (also dropping the register file). 610 __ leave(); 611 612 // Drop receiver + arguments. 613 __ PopReturnAddressTo(return_pc); 614 __ addp(rsp, args_count); 615 __ PushReturnAddressFrom(return_pc); 616 } 617 618 // Generate code for entering a JS function with the interpreter. 619 // On entry to the function the receiver and arguments have been pushed on the 620 // stack left to right. The actual argument count matches the formal parameter 621 // count expected by the function. 622 // 623 // The live registers are: 624 // o rdi: the JS function object being called 625 // o rdx: the new target 626 // o rsi: our context 627 // o rbp: the caller's frame pointer 628 // o rsp: stack pointer (pointing to return address) 629 // 630 // The function builds an interpreter frame. See InterpreterFrameConstants in 631 // frames.h for its layout. 632 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { 633 ProfileEntryHookStub::MaybeCallEntryHook(masm); 634 635 // Open a frame scope to indicate that there is a frame on the stack. The 636 // MANUAL indicates that the scope shouldn't actually generate code to set up 637 // the frame (that is done below). 638 FrameScope frame_scope(masm, StackFrame::MANUAL); 639 __ pushq(rbp); // Caller's frame pointer. 640 __ movp(rbp, rsp); 641 __ Push(rsi); // Callee's context. 642 __ Push(rdi); // Callee's JS function. 643 __ Push(rdx); // Callee's new target. 644 645 // Get the bytecode array from the function object (or from the DebugInfo if 646 // it is present) and load it into kInterpreterBytecodeArrayRegister. 647 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 648 Label load_debug_bytecode_array, bytecode_array_loaded; 649 __ JumpIfNotSmi(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset), 650 &load_debug_bytecode_array); 651 __ movp(kInterpreterBytecodeArrayRegister, 652 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset)); 653 __ bind(&bytecode_array_loaded); 654 655 // Check whether we should continue to use the interpreter. 656 Label switch_to_different_code_kind; 657 __ Move(rcx, masm->CodeObject()); // Self-reference to this code. 658 __ cmpp(rcx, FieldOperand(rax, SharedFunctionInfo::kCodeOffset)); 659 __ j(not_equal, &switch_to_different_code_kind); 660 661 // Increment invocation count for the function. 662 __ movp(rcx, FieldOperand(rdi, JSFunction::kFeedbackVectorOffset)); 663 __ movp(rcx, FieldOperand(rcx, Cell::kValueOffset)); 664 __ SmiAddConstant( 665 FieldOperand(rcx, FeedbackVector::kInvocationCountIndex * kPointerSize + 666 FeedbackVector::kHeaderSize), 667 Smi::FromInt(1)); 668 669 // Check function data field is actually a BytecodeArray object. 670 if (FLAG_debug_code) { 671 __ AssertNotSmi(kInterpreterBytecodeArrayRegister); 672 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, 673 rax); 674 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); 675 } 676 677 // Reset code age. 678 __ movb(FieldOperand(kInterpreterBytecodeArrayRegister, 679 BytecodeArray::kBytecodeAgeOffset), 680 Immediate(BytecodeArray::kNoAgeBytecodeAge)); 681 682 // Load initial bytecode offset. 683 __ movp(kInterpreterBytecodeOffsetRegister, 684 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag)); 685 686 // Push bytecode array and Smi tagged bytecode offset. 687 __ Push(kInterpreterBytecodeArrayRegister); 688 __ Integer32ToSmi(rcx, kInterpreterBytecodeOffsetRegister); 689 __ Push(rcx); 690 691 // Allocate the local and temporary register file on the stack. 692 { 693 // Load frame size from the BytecodeArray object. 694 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister, 695 BytecodeArray::kFrameSizeOffset)); 696 697 // Do a stack check to ensure we don't go over the limit. 698 Label ok; 699 __ movp(rdx, rsp); 700 __ subp(rdx, rcx); 701 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex); 702 __ j(above_equal, &ok, Label::kNear); 703 __ CallRuntime(Runtime::kThrowStackOverflow); 704 __ bind(&ok); 705 706 // If ok, push undefined as the initial value for all register file entries. 707 Label loop_header; 708 Label loop_check; 709 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 710 __ j(always, &loop_check); 711 __ bind(&loop_header); 712 // TODO(rmcilroy): Consider doing more than one push per loop iteration. 713 __ Push(rdx); 714 // Continue loop if not done. 715 __ bind(&loop_check); 716 __ subp(rcx, Immediate(kPointerSize)); 717 __ j(greater_equal, &loop_header, Label::kNear); 718 } 719 720 // Load accumulator and dispatch table into registers. 721 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); 722 __ Move( 723 kInterpreterDispatchTableRegister, 724 ExternalReference::interpreter_dispatch_table_address(masm->isolate())); 725 726 // Dispatch to the first bytecode handler for the function. 727 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister, 728 kInterpreterBytecodeOffsetRegister, times_1, 0)); 729 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx, 730 times_pointer_size, 0)); 731 __ call(rbx); 732 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); 733 734 // The return value is in rax. 735 LeaveInterpreterFrame(masm, rbx, rcx); 736 __ ret(0); 737 738 // Load debug copy of the bytecode array. 739 __ bind(&load_debug_bytecode_array); 740 Register debug_info = kInterpreterBytecodeArrayRegister; 741 __ movp(debug_info, FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset)); 742 __ movp(kInterpreterBytecodeArrayRegister, 743 FieldOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex)); 744 __ jmp(&bytecode_array_loaded); 745 746 // If the shared code is no longer this entry trampoline, then the underlying 747 // function has been switched to a different kind of code and we heal the 748 // closure by switching the code entry field over to the new code as well. 749 __ bind(&switch_to_different_code_kind); 750 __ leave(); // Leave the frame so we can tail call. 751 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 752 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kCodeOffset)); 753 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); 754 __ movp(FieldOperand(rdi, JSFunction::kCodeEntryOffset), rcx); 755 __ RecordWriteCodeEntryField(rdi, rcx, r15); 756 __ jmp(rcx); 757 } 758 759 static void Generate_StackOverflowCheck( 760 MacroAssembler* masm, Register num_args, Register scratch, 761 Label* stack_overflow, 762 Label::Distance stack_overflow_distance = Label::kFar) { 763 // Check the stack for overflow. We are not trying to catch 764 // interruptions (e.g. debug break and preemption) here, so the "real stack 765 // limit" is checked. 766 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 767 __ movp(scratch, rsp); 768 // Make scratch the space we have left. The stack might already be overflowed 769 // here which will cause scratch to become negative. 770 __ subp(scratch, kScratchRegister); 771 __ sarp(scratch, Immediate(kPointerSizeLog2)); 772 // Check if the arguments will overflow the stack. 773 __ cmpp(scratch, num_args); 774 // Signed comparison. 775 __ j(less_equal, stack_overflow, stack_overflow_distance); 776 } 777 778 static void Generate_InterpreterPushArgs(MacroAssembler* masm, 779 Register num_args, 780 Register start_address, 781 Register scratch) { 782 // Find the address of the last argument. 783 __ Move(scratch, num_args); 784 __ shlp(scratch, Immediate(kPointerSizeLog2)); 785 __ negp(scratch); 786 __ addp(scratch, start_address); 787 788 // Push the arguments. 789 Label loop_header, loop_check; 790 __ j(always, &loop_check); 791 __ bind(&loop_header); 792 __ Push(Operand(start_address, 0)); 793 __ subp(start_address, Immediate(kPointerSize)); 794 __ bind(&loop_check); 795 __ cmpp(start_address, scratch); 796 __ j(greater, &loop_header, Label::kNear); 797 } 798 799 // static 800 void Builtins::Generate_InterpreterPushArgsAndCallImpl( 801 MacroAssembler* masm, TailCallMode tail_call_mode, 802 InterpreterPushArgsMode mode) { 803 // ----------- S t a t e ------------- 804 // -- rax : the number of arguments (not including the receiver) 805 // -- rbx : the address of the first argument to be pushed. Subsequent 806 // arguments should be consecutive above this, in the same order as 807 // they are to be pushed onto the stack. 808 // -- rdi : the target to call (can be any Object). 809 // ----------------------------------- 810 Label stack_overflow; 811 812 // Number of values to be pushed. 813 __ Move(rcx, rax); 814 __ addp(rcx, Immediate(1)); // Add one for receiver. 815 816 // Add a stack check before pushing arguments. 817 Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow); 818 819 // Pop return address to allow tail-call after pushing arguments. 820 __ PopReturnAddressTo(kScratchRegister); 821 822 // rbx and rdx will be modified. 823 Generate_InterpreterPushArgs(masm, rcx, rbx, rdx); 824 825 // Call the target. 826 __ PushReturnAddressFrom(kScratchRegister); // Re-push return address. 827 828 if (mode == InterpreterPushArgsMode::kJSFunction) { 829 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, 830 tail_call_mode), 831 RelocInfo::CODE_TARGET); 832 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { 833 __ Jump(masm->isolate()->builtins()->CallWithSpread(), 834 RelocInfo::CODE_TARGET); 835 } else { 836 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, 837 tail_call_mode), 838 RelocInfo::CODE_TARGET); 839 } 840 841 // Throw stack overflow exception. 842 __ bind(&stack_overflow); 843 { 844 __ TailCallRuntime(Runtime::kThrowStackOverflow); 845 // This should be unreachable. 846 __ int3(); 847 } 848 } 849 850 // static 851 void Builtins::Generate_InterpreterPushArgsAndConstructImpl( 852 MacroAssembler* masm, InterpreterPushArgsMode mode) { 853 // ----------- S t a t e ------------- 854 // -- rax : the number of arguments (not including the receiver) 855 // -- rdx : the new target (either the same as the constructor or 856 // the JSFunction on which new was invoked initially) 857 // -- rdi : the constructor to call (can be any Object) 858 // -- rbx : the allocation site feedback if available, undefined otherwise 859 // -- rcx : the address of the first argument to be pushed. Subsequent 860 // arguments should be consecutive above this, in the same order as 861 // they are to be pushed onto the stack. 862 // ----------------------------------- 863 Label stack_overflow; 864 865 // Add a stack check before pushing arguments. 866 Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow); 867 868 // Pop return address to allow tail-call after pushing arguments. 869 __ PopReturnAddressTo(kScratchRegister); 870 871 // Push slot for the receiver to be constructed. 872 __ Push(Immediate(0)); 873 874 // rcx and r8 will be modified. 875 Generate_InterpreterPushArgs(masm, rax, rcx, r8); 876 877 // Push return address in preparation for the tail-call. 878 __ PushReturnAddressFrom(kScratchRegister); 879 880 __ AssertUndefinedOrAllocationSite(rbx); 881 if (mode == InterpreterPushArgsMode::kJSFunction) { 882 // Tail call to the function-specific construct stub (still in the caller 883 // context at this point). 884 __ AssertFunction(rdi); 885 886 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 887 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); 888 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); 889 // Jump to the constructor function (rax, rbx, rdx passed on). 890 __ jmp(rcx); 891 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { 892 // Call the constructor (rax, rdx, rdi passed on). 893 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), 894 RelocInfo::CODE_TARGET); 895 } else { 896 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode); 897 // Call the constructor (rax, rdx, rdi passed on). 898 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 899 } 900 901 // Throw stack overflow exception. 902 __ bind(&stack_overflow); 903 { 904 __ TailCallRuntime(Runtime::kThrowStackOverflow); 905 // This should be unreachable. 906 __ int3(); 907 } 908 } 909 910 // static 911 void Builtins::Generate_InterpreterPushArgsAndConstructArray( 912 MacroAssembler* masm) { 913 // ----------- S t a t e ------------- 914 // -- rax : the number of arguments (not including the receiver) 915 // -- rdx : the target to call checked to be Array function. 916 // -- rbx : the allocation site feedback 917 // -- rcx : the address of the first argument to be pushed. Subsequent 918 // arguments should be consecutive above this, in the same order as 919 // they are to be pushed onto the stack. 920 // ----------------------------------- 921 Label stack_overflow; 922 923 // Number of values to be pushed. 924 __ Move(r8, rax); 925 __ addp(r8, Immediate(1)); // Add one for receiver. 926 927 // Add a stack check before pushing arguments. 928 Generate_StackOverflowCheck(masm, r8, rdi, &stack_overflow); 929 930 // Pop return address to allow tail-call after pushing arguments. 931 __ PopReturnAddressTo(kScratchRegister); 932 933 // rcx and rdi will be modified. 934 Generate_InterpreterPushArgs(masm, r8, rcx, rdi); 935 936 // Push return address in preparation for the tail-call. 937 __ PushReturnAddressFrom(kScratchRegister); 938 939 // Array constructor expects constructor in rdi. It is same as rdx here. 940 __ Move(rdi, rdx); 941 942 ArrayConstructorStub stub(masm->isolate()); 943 __ TailCallStub(&stub); 944 945 // Throw stack overflow exception. 946 __ bind(&stack_overflow); 947 { 948 __ TailCallRuntime(Runtime::kThrowStackOverflow); 949 // This should be unreachable. 950 __ int3(); 951 } 952 } 953 954 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { 955 // Set the return address to the correct point in the interpreter entry 956 // trampoline. 957 Smi* interpreter_entry_return_pc_offset( 958 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); 959 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero); 960 __ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline()); 961 __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() + 962 Code::kHeaderSize - kHeapObjectTag)); 963 __ Push(rbx); 964 965 // Initialize dispatch table register. 966 __ Move( 967 kInterpreterDispatchTableRegister, 968 ExternalReference::interpreter_dispatch_table_address(masm->isolate())); 969 970 // Get the bytecode array pointer from the frame. 971 __ movp(kInterpreterBytecodeArrayRegister, 972 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 973 974 if (FLAG_debug_code) { 975 // Check function data field is actually a BytecodeArray object. 976 __ AssertNotSmi(kInterpreterBytecodeArrayRegister); 977 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, 978 rbx); 979 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); 980 } 981 982 // Get the target bytecode offset from the frame. 983 __ movp(kInterpreterBytecodeOffsetRegister, 984 Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); 985 __ SmiToInteger32(kInterpreterBytecodeOffsetRegister, 986 kInterpreterBytecodeOffsetRegister); 987 988 // Dispatch to the target bytecode. 989 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister, 990 kInterpreterBytecodeOffsetRegister, times_1, 0)); 991 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx, 992 times_pointer_size, 0)); 993 __ jmp(rbx); 994 } 995 996 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) { 997 // Advance the current bytecode offset stored within the given interpreter 998 // stack frame. This simulates what all bytecode handlers do upon completion 999 // of the underlying operation. 1000 __ movp(rbx, Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 1001 __ movp(rdx, Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); 1002 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 1003 { 1004 FrameScope scope(masm, StackFrame::INTERNAL); 1005 __ Push(kInterpreterAccumulatorRegister); 1006 __ Push(rbx); // First argument is the bytecode array. 1007 __ Push(rdx); // Second argument is the bytecode offset. 1008 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset); 1009 __ Move(rdx, rax); // Result is the new bytecode offset. 1010 __ Pop(kInterpreterAccumulatorRegister); 1011 } 1012 __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rdx); 1013 1014 Generate_InterpreterEnterBytecode(masm); 1015 } 1016 1017 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { 1018 Generate_InterpreterEnterBytecode(masm); 1019 } 1020 1021 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { 1022 // ----------- S t a t e ------------- 1023 // -- rax : argument count (preserved for callee) 1024 // -- rdx : new target (preserved for callee) 1025 // -- rdi : target function (preserved for callee) 1026 // ----------------------------------- 1027 // First lookup code, maybe we don't need to compile! 1028 Label gotta_call_runtime; 1029 Label try_shared; 1030 Label loop_top, loop_bottom; 1031 1032 Register closure = rdi; 1033 Register map = r8; 1034 Register index = r9; 1035 1036 // Do we have a valid feedback vector? 1037 __ movp(rbx, FieldOperand(closure, JSFunction::kFeedbackVectorOffset)); 1038 __ movp(rbx, FieldOperand(rbx, Cell::kValueOffset)); 1039 __ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &gotta_call_runtime); 1040 1041 __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); 1042 __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); 1043 __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset)); 1044 __ cmpl(index, Immediate(2)); 1045 __ j(less, &try_shared); 1046 1047 // r14 : native context 1048 // r9 : length / index 1049 // r8 : optimized code map 1050 // rdx : new target 1051 // rdi : closure 1052 Register native_context = r14; 1053 __ movp(native_context, NativeContextOperand()); 1054 1055 __ bind(&loop_top); 1056 // Native context match? 1057 Register temp = r11; 1058 __ movp(temp, FieldOperand(map, index, times_pointer_size, 1059 SharedFunctionInfo::kOffsetToPreviousContext)); 1060 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); 1061 __ cmpp(temp, native_context); 1062 __ j(not_equal, &loop_bottom); 1063 1064 // Code available? 1065 Register entry = rcx; 1066 __ movp(entry, FieldOperand(map, index, times_pointer_size, 1067 SharedFunctionInfo::kOffsetToPreviousCachedCode)); 1068 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); 1069 __ JumpIfSmi(entry, &try_shared); 1070 1071 // Found code. Get it into the closure and return. 1072 __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); 1073 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); 1074 __ RecordWriteCodeEntryField(closure, entry, r15); 1075 1076 // Link the closure into the optimized function list. 1077 // rcx : code entry (entry) 1078 // r14 : native context 1079 // rdx : new target 1080 // rdi : closure 1081 __ movp(rbx, 1082 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); 1083 __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx); 1084 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, r15, 1085 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 1086 const int function_list_offset = 1087 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); 1088 __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), 1089 closure); 1090 // Save closure before the write barrier. 1091 __ movp(rbx, closure); 1092 __ RecordWriteContextSlot(native_context, function_list_offset, closure, r15, 1093 kDontSaveFPRegs); 1094 __ movp(closure, rbx); 1095 __ jmp(entry); 1096 1097 __ bind(&loop_bottom); 1098 __ subl(index, Immediate(SharedFunctionInfo::kEntryLength)); 1099 __ cmpl(index, Immediate(1)); 1100 __ j(greater, &loop_top); 1101 1102 // We found no code. 1103 __ bind(&try_shared); 1104 __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); 1105 // Is the shared function marked for tier up? 1106 __ testb(FieldOperand(entry, SharedFunctionInfo::kMarkedForTierUpByteOffset), 1107 Immediate(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte)); 1108 __ j(not_zero, &gotta_call_runtime); 1109 1110 // If SFI points to anything other than CompileLazy, install that. 1111 __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset)); 1112 __ Move(rbx, masm->CodeObject()); 1113 __ cmpp(entry, rbx); 1114 __ j(equal, &gotta_call_runtime); 1115 1116 // Install the SFI's code entry. 1117 __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); 1118 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); 1119 __ RecordWriteCodeEntryField(closure, entry, r15); 1120 __ jmp(entry); 1121 1122 __ bind(&gotta_call_runtime); 1123 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); 1124 } 1125 1126 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { 1127 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); 1128 } 1129 1130 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 1131 GenerateTailCallToReturnedCode(masm, 1132 Runtime::kCompileOptimized_NotConcurrent); 1133 } 1134 1135 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { 1136 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); 1137 } 1138 1139 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { 1140 // ----------- S t a t e ------------- 1141 // -- rax : argument count (preserved for callee) 1142 // -- rdx : new target (preserved for callee) 1143 // -- rdi : target function (preserved for callee) 1144 // ----------------------------------- 1145 Label failed; 1146 { 1147 FrameScope scope(masm, StackFrame::INTERNAL); 1148 // Preserve argument count for later compare. 1149 __ movp(rcx, rax); 1150 // Push the number of arguments to the callee. 1151 __ Integer32ToSmi(rax, rax); 1152 __ Push(rax); 1153 // Push a copy of the target function and the new target. 1154 __ Push(rdi); 1155 __ Push(rdx); 1156 1157 // The function. 1158 __ Push(rdi); 1159 // Copy arguments from caller (stdlib, foreign, heap). 1160 Label args_done; 1161 for (int j = 0; j < 4; ++j) { 1162 Label over; 1163 if (j < 3) { 1164 __ cmpp(rcx, Immediate(j)); 1165 __ j(not_equal, &over, Label::kNear); 1166 } 1167 for (int i = j - 1; i >= 0; --i) { 1168 __ Push(Operand( 1169 rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize)); 1170 } 1171 for (int i = 0; i < 3 - j; ++i) { 1172 __ PushRoot(Heap::kUndefinedValueRootIndex); 1173 } 1174 if (j < 3) { 1175 __ jmp(&args_done, Label::kNear); 1176 __ bind(&over); 1177 } 1178 } 1179 __ bind(&args_done); 1180 1181 // Call runtime, on success unwind frame, and parent frame. 1182 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); 1183 // A smi 0 is returned on failure, an object on success. 1184 __ JumpIfSmi(rax, &failed, Label::kNear); 1185 1186 __ Drop(2); 1187 __ Pop(rcx); 1188 __ SmiToInteger32(rcx, rcx); 1189 scope.GenerateLeaveFrame(); 1190 1191 __ PopReturnAddressTo(rbx); 1192 __ incp(rcx); 1193 __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0)); 1194 __ PushReturnAddressFrom(rbx); 1195 __ ret(0); 1196 1197 __ bind(&failed); 1198 // Restore target function and new target. 1199 __ Pop(rdx); 1200 __ Pop(rdi); 1201 __ Pop(rax); 1202 __ SmiToInteger32(rax, rax); 1203 } 1204 // On failure, tail call back to regular js. 1205 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); 1206 } 1207 1208 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { 1209 // For now, we are relying on the fact that make_code_young doesn't do any 1210 // garbage collection which allows us to save/restore the registers without 1211 // worrying about which of them contain pointers. We also don't build an 1212 // internal frame to make the code faster, since we shouldn't have to do stack 1213 // crawls in MakeCodeYoung. This seems a bit fragile. 1214 1215 // Re-execute the code that was patched back to the young age when 1216 // the stub returns. 1217 __ subp(Operand(rsp, 0), Immediate(5)); 1218 __ Pushad(); 1219 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); 1220 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); 1221 { // NOLINT 1222 FrameScope scope(masm, StackFrame::MANUAL); 1223 __ PrepareCallCFunction(2); 1224 __ CallCFunction( 1225 ExternalReference::get_make_code_young_function(masm->isolate()), 2); 1226 } 1227 __ Popad(); 1228 __ ret(0); 1229 } 1230 1231 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ 1232 void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \ 1233 GenerateMakeCodeYoungAgainCommon(masm); \ 1234 } 1235 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) 1236 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR 1237 1238 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { 1239 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact 1240 // that make_code_young doesn't do any garbage collection which allows us to 1241 // save/restore the registers without worrying about which of them contain 1242 // pointers. 1243 __ Pushad(); 1244 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); 1245 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); 1246 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); 1247 { // NOLINT 1248 FrameScope scope(masm, StackFrame::MANUAL); 1249 __ PrepareCallCFunction(2); 1250 __ CallCFunction( 1251 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), 1252 2); 1253 } 1254 __ Popad(); 1255 1256 // Perform prologue operations usually performed by the young code stub. 1257 __ PopReturnAddressTo(kScratchRegister); 1258 __ pushq(rbp); // Caller's frame pointer. 1259 __ movp(rbp, rsp); 1260 __ Push(rsi); // Callee's context. 1261 __ Push(rdi); // Callee's JS Function. 1262 __ PushReturnAddressFrom(kScratchRegister); 1263 1264 // Jump to point after the code-age stub. 1265 __ ret(0); 1266 } 1267 1268 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 1269 GenerateMakeCodeYoungAgainCommon(masm); 1270 } 1271 1272 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { 1273 Generate_MarkCodeAsExecutedOnce(masm); 1274 } 1275 1276 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 1277 SaveFPRegsMode save_doubles) { 1278 // Enter an internal frame. 1279 { 1280 FrameScope scope(masm, StackFrame::INTERNAL); 1281 1282 // Preserve registers across notification, this is important for compiled 1283 // stubs that tail call the runtime on deopts passing their parameters in 1284 // registers. 1285 __ Pushad(); 1286 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); 1287 __ Popad(); 1288 // Tear down internal frame. 1289 } 1290 1291 __ DropUnderReturnAddress(1); // Ignore state offset 1292 __ ret(0); // Return to IC Miss stub, continuation still on stack. 1293 } 1294 1295 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 1296 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 1297 } 1298 1299 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 1300 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 1301 } 1302 1303 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1304 Deoptimizer::BailoutType type) { 1305 // Enter an internal frame. 1306 { 1307 FrameScope scope(masm, StackFrame::INTERNAL); 1308 1309 // Pass the deoptimization type to the runtime system. 1310 __ Push(Smi::FromInt(static_cast<int>(type))); 1311 1312 __ CallRuntime(Runtime::kNotifyDeoptimized); 1313 // Tear down internal frame. 1314 } 1315 1316 // Get the full codegen state from the stack and untag it. 1317 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize)); 1318 1319 // Switch on the state. 1320 Label not_no_registers, not_tos_rax; 1321 __ cmpp(kScratchRegister, 1322 Immediate(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS))); 1323 __ j(not_equal, ¬_no_registers, Label::kNear); 1324 __ ret(1 * kPointerSize); // Remove state. 1325 1326 __ bind(¬_no_registers); 1327 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code()); 1328 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize)); 1329 __ cmpp(kScratchRegister, 1330 Immediate(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER))); 1331 __ j(not_equal, ¬_tos_rax, Label::kNear); 1332 __ ret(2 * kPointerSize); // Remove state, rax. 1333 1334 __ bind(¬_tos_rax); 1335 __ Abort(kNoCasesLeft); 1336 } 1337 1338 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 1339 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 1340 } 1341 1342 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 1343 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 1344 } 1345 1346 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 1347 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1348 } 1349 1350 // static 1351 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { 1352 // ----------- S t a t e ------------- 1353 // -- rax : argc 1354 // -- rsp[0] : return address 1355 // -- rsp[8] : argArray 1356 // -- rsp[16] : thisArg 1357 // -- rsp[24] : receiver 1358 // ----------------------------------- 1359 1360 // 1. Load receiver into rdi, argArray into rax (if present), remove all 1361 // arguments from the stack (including the receiver), and push thisArg (if 1362 // present) instead. 1363 { 1364 Label no_arg_array, no_this_arg; 1365 StackArgumentsAccessor args(rsp, rax); 1366 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 1367 __ movp(rbx, rdx); 1368 __ movp(rdi, args.GetReceiverOperand()); 1369 __ testp(rax, rax); 1370 __ j(zero, &no_this_arg, Label::kNear); 1371 { 1372 __ movp(rdx, args.GetArgumentOperand(1)); 1373 __ cmpp(rax, Immediate(1)); 1374 __ j(equal, &no_arg_array, Label::kNear); 1375 __ movp(rbx, args.GetArgumentOperand(2)); 1376 __ bind(&no_arg_array); 1377 } 1378 __ bind(&no_this_arg); 1379 __ PopReturnAddressTo(rcx); 1380 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); 1381 __ Push(rdx); 1382 __ PushReturnAddressFrom(rcx); 1383 __ movp(rax, rbx); 1384 } 1385 1386 // ----------- S t a t e ------------- 1387 // -- rax : argArray 1388 // -- rdi : receiver 1389 // -- rsp[0] : return address 1390 // -- rsp[8] : thisArg 1391 // ----------------------------------- 1392 1393 // 2. Make sure the receiver is actually callable. 1394 Label receiver_not_callable; 1395 __ JumpIfSmi(rdi, &receiver_not_callable, Label::kNear); 1396 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset)); 1397 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 1398 Immediate(1 << Map::kIsCallable)); 1399 __ j(zero, &receiver_not_callable, Label::kNear); 1400 1401 // 3. Tail call with no arguments if argArray is null or undefined. 1402 Label no_arguments; 1403 __ JumpIfRoot(rax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear); 1404 __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &no_arguments, 1405 Label::kNear); 1406 1407 // 4a. Apply the receiver to the given argArray (passing undefined for 1408 // new.target). 1409 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 1410 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1411 1412 // 4b. The argArray is either null or undefined, so we tail call without any 1413 // arguments to the receiver. Since we did not create a frame for 1414 // Function.prototype.apply() yet, we use a normal Call builtin here. 1415 __ bind(&no_arguments); 1416 { 1417 __ Set(rax, 0); 1418 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1419 } 1420 1421 // 4c. The receiver is not callable, throw an appropriate TypeError. 1422 __ bind(&receiver_not_callable); 1423 { 1424 StackArgumentsAccessor args(rsp, 0); 1425 __ movp(args.GetReceiverOperand(), rdi); 1426 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1427 } 1428 } 1429 1430 // static 1431 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { 1432 // Stack Layout: 1433 // rsp[0] : Return address 1434 // rsp[8] : Argument n 1435 // rsp[16] : Argument n-1 1436 // ... 1437 // rsp[8 * n] : Argument 1 1438 // rsp[8 * (n + 1)] : Receiver (callable to call) 1439 // 1440 // rax contains the number of arguments, n, not counting the receiver. 1441 // 1442 // 1. Make sure we have at least one argument. 1443 { 1444 Label done; 1445 __ testp(rax, rax); 1446 __ j(not_zero, &done, Label::kNear); 1447 __ PopReturnAddressTo(rbx); 1448 __ PushRoot(Heap::kUndefinedValueRootIndex); 1449 __ PushReturnAddressFrom(rbx); 1450 __ incp(rax); 1451 __ bind(&done); 1452 } 1453 1454 // 2. Get the callable to call (passed as receiver) from the stack. 1455 { 1456 StackArgumentsAccessor args(rsp, rax); 1457 __ movp(rdi, args.GetReceiverOperand()); 1458 } 1459 1460 // 3. Shift arguments and return address one slot down on the stack 1461 // (overwriting the original receiver). Adjust argument count to make 1462 // the original first argument the new receiver. 1463 { 1464 Label loop; 1465 __ movp(rcx, rax); 1466 StackArgumentsAccessor args(rsp, rcx); 1467 __ bind(&loop); 1468 __ movp(rbx, args.GetArgumentOperand(1)); 1469 __ movp(args.GetArgumentOperand(0), rbx); 1470 __ decp(rcx); 1471 __ j(not_zero, &loop); // While non-zero. 1472 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address. 1473 __ decp(rax); // One fewer argument (first argument is new receiver). 1474 } 1475 1476 // 4. Call the callable. 1477 // Since we did not create a frame for Function.prototype.call() yet, 1478 // we use a normal Call builtin here. 1479 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1480 } 1481 1482 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { 1483 // ----------- S t a t e ------------- 1484 // -- rax : argc 1485 // -- rsp[0] : return address 1486 // -- rsp[8] : argumentsList 1487 // -- rsp[16] : thisArgument 1488 // -- rsp[24] : target 1489 // -- rsp[32] : receiver 1490 // ----------------------------------- 1491 1492 // 1. Load target into rdi (if present), argumentsList into rax (if present), 1493 // remove all arguments from the stack (including the receiver), and push 1494 // thisArgument (if present) instead. 1495 { 1496 Label done; 1497 StackArgumentsAccessor args(rsp, rax); 1498 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); 1499 __ movp(rdx, rdi); 1500 __ movp(rbx, rdi); 1501 __ cmpp(rax, Immediate(1)); 1502 __ j(below, &done, Label::kNear); 1503 __ movp(rdi, args.GetArgumentOperand(1)); // target 1504 __ j(equal, &done, Label::kNear); 1505 __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument 1506 __ cmpp(rax, Immediate(3)); 1507 __ j(below, &done, Label::kNear); 1508 __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList 1509 __ bind(&done); 1510 __ PopReturnAddressTo(rcx); 1511 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); 1512 __ Push(rdx); 1513 __ PushReturnAddressFrom(rcx); 1514 __ movp(rax, rbx); 1515 } 1516 1517 // ----------- S t a t e ------------- 1518 // -- rax : argumentsList 1519 // -- rdi : target 1520 // -- rsp[0] : return address 1521 // -- rsp[8] : thisArgument 1522 // ----------------------------------- 1523 1524 // 2. Make sure the target is actually callable. 1525 Label target_not_callable; 1526 __ JumpIfSmi(rdi, &target_not_callable, Label::kNear); 1527 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset)); 1528 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 1529 Immediate(1 << Map::kIsCallable)); 1530 __ j(zero, &target_not_callable, Label::kNear); 1531 1532 // 3a. Apply the target to the given argumentsList (passing undefined for 1533 // new.target). 1534 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 1535 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1536 1537 // 3b. The target is not callable, throw an appropriate TypeError. 1538 __ bind(&target_not_callable); 1539 { 1540 StackArgumentsAccessor args(rsp, 0); 1541 __ movp(args.GetReceiverOperand(), rdi); 1542 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1543 } 1544 } 1545 1546 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 1547 // ----------- S t a t e ------------- 1548 // -- rax : argc 1549 // -- rsp[0] : return address 1550 // -- rsp[8] : new.target (optional) 1551 // -- rsp[16] : argumentsList 1552 // -- rsp[24] : target 1553 // -- rsp[32] : receiver 1554 // ----------------------------------- 1555 1556 // 1. Load target into rdi (if present), argumentsList into rax (if present), 1557 // new.target into rdx (if present, otherwise use target), remove all 1558 // arguments from the stack (including the receiver), and push thisArgument 1559 // (if present) instead. 1560 { 1561 Label done; 1562 StackArgumentsAccessor args(rsp, rax); 1563 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); 1564 __ movp(rdx, rdi); 1565 __ movp(rbx, rdi); 1566 __ cmpp(rax, Immediate(1)); 1567 __ j(below, &done, Label::kNear); 1568 __ movp(rdi, args.GetArgumentOperand(1)); // target 1569 __ movp(rdx, rdi); // new.target defaults to target 1570 __ j(equal, &done, Label::kNear); 1571 __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList 1572 __ cmpp(rax, Immediate(3)); 1573 __ j(below, &done, Label::kNear); 1574 __ movp(rdx, args.GetArgumentOperand(3)); // new.target 1575 __ bind(&done); 1576 __ PopReturnAddressTo(rcx); 1577 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); 1578 __ PushRoot(Heap::kUndefinedValueRootIndex); 1579 __ PushReturnAddressFrom(rcx); 1580 __ movp(rax, rbx); 1581 } 1582 1583 // ----------- S t a t e ------------- 1584 // -- rax : argumentsList 1585 // -- rdx : new.target 1586 // -- rdi : target 1587 // -- rsp[0] : return address 1588 // -- rsp[8] : receiver (undefined) 1589 // ----------------------------------- 1590 1591 // 2. Make sure the target is actually a constructor. 1592 Label target_not_constructor; 1593 __ JumpIfSmi(rdi, &target_not_constructor, Label::kNear); 1594 __ movp(rcx, FieldOperand(rdi, HeapObject::kMapOffset)); 1595 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 1596 Immediate(1 << Map::kIsConstructor)); 1597 __ j(zero, &target_not_constructor, Label::kNear); 1598 1599 // 3. Make sure the target is actually a constructor. 1600 Label new_target_not_constructor; 1601 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear); 1602 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); 1603 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 1604 Immediate(1 << Map::kIsConstructor)); 1605 __ j(zero, &new_target_not_constructor, Label::kNear); 1606 1607 // 4a. Construct the target with the given new.target and argumentsList. 1608 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1609 1610 // 4b. The target is not a constructor, throw an appropriate TypeError. 1611 __ bind(&target_not_constructor); 1612 { 1613 StackArgumentsAccessor args(rsp, 0); 1614 __ movp(args.GetReceiverOperand(), rdi); 1615 __ TailCallRuntime(Runtime::kThrowNotConstructor); 1616 } 1617 1618 // 4c. The new.target is not a constructor, throw an appropriate TypeError. 1619 __ bind(&new_target_not_constructor); 1620 { 1621 StackArgumentsAccessor args(rsp, 0); 1622 __ movp(args.GetReceiverOperand(), rdx); 1623 __ TailCallRuntime(Runtime::kThrowNotConstructor); 1624 } 1625 } 1626 1627 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 1628 // ----------- S t a t e ------------- 1629 // -- rax : argc 1630 // -- rsp[0] : return address 1631 // -- rsp[8] : last argument 1632 // ----------------------------------- 1633 Label generic_array_code; 1634 1635 // Get the InternalArray function. 1636 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); 1637 1638 if (FLAG_debug_code) { 1639 // Initial map for the builtin InternalArray functions should be maps. 1640 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 1641 // Will both indicate a NULL and a Smi. 1642 STATIC_ASSERT(kSmiTag == 0); 1643 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); 1644 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction); 1645 __ CmpObjectType(rbx, MAP_TYPE, rcx); 1646 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction); 1647 } 1648 1649 // Run the native code for the InternalArray function called as a normal 1650 // function. 1651 // tail call a stub 1652 InternalArrayConstructorStub stub(masm->isolate()); 1653 __ TailCallStub(&stub); 1654 } 1655 1656 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 1657 // ----------- S t a t e ------------- 1658 // -- rax : argc 1659 // -- rsp[0] : return address 1660 // -- rsp[8] : last argument 1661 // ----------------------------------- 1662 Label generic_array_code; 1663 1664 // Get the Array function. 1665 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rdi); 1666 1667 if (FLAG_debug_code) { 1668 // Initial map for the builtin Array functions should be maps. 1669 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 1670 // Will both indicate a NULL and a Smi. 1671 STATIC_ASSERT(kSmiTag == 0); 1672 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); 1673 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 1674 __ CmpObjectType(rbx, MAP_TYPE, rcx); 1675 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 1676 } 1677 1678 __ movp(rdx, rdi); 1679 // Run the native code for the Array function called as a normal function. 1680 // tail call a stub 1681 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); 1682 ArrayConstructorStub stub(masm->isolate()); 1683 __ TailCallStub(&stub); 1684 } 1685 1686 // static 1687 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { 1688 // ----------- S t a t e ------------- 1689 // -- rax : number of arguments 1690 // -- rdi : function 1691 // -- rsi : context 1692 // -- rsp[0] : return address 1693 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1694 // -- rsp[(argc + 1) * 8] : receiver 1695 // ----------------------------------- 1696 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above; 1697 Heap::RootListIndex const root_index = 1698 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex 1699 : Heap::kMinusInfinityValueRootIndex; 1700 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0; 1701 1702 // Load the accumulator with the default return value (either -Infinity or 1703 // +Infinity), with the tagged value in rdx and the double value in xmm0. 1704 __ LoadRoot(rdx, root_index); 1705 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 1706 __ Move(rcx, rax); 1707 1708 Label done_loop, loop; 1709 __ bind(&loop); 1710 { 1711 // Check if all parameters done. 1712 __ testp(rcx, rcx); 1713 __ j(zero, &done_loop); 1714 1715 // Load the next parameter tagged value into rbx. 1716 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0)); 1717 1718 // Load the double value of the parameter into xmm1, maybe converting the 1719 // parameter to a number first using the ToNumber builtin if necessary. 1720 Label convert, convert_smi, convert_number, done_convert; 1721 __ bind(&convert); 1722 __ JumpIfSmi(rbx, &convert_smi); 1723 __ JumpIfRoot(FieldOperand(rbx, HeapObject::kMapOffset), 1724 Heap::kHeapNumberMapRootIndex, &convert_number); 1725 { 1726 // Parameter is not a Number, use the ToNumber builtin to convert it. 1727 FrameScope scope(masm, StackFrame::MANUAL); 1728 __ Integer32ToSmi(rax, rax); 1729 __ Integer32ToSmi(rcx, rcx); 1730 __ EnterBuiltinFrame(rsi, rdi, rax); 1731 __ Push(rcx); 1732 __ Push(rdx); 1733 __ movp(rax, rbx); 1734 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 1735 __ movp(rbx, rax); 1736 __ Pop(rdx); 1737 __ Pop(rcx); 1738 __ LeaveBuiltinFrame(rsi, rdi, rax); 1739 __ SmiToInteger32(rcx, rcx); 1740 __ SmiToInteger32(rax, rax); 1741 { 1742 // Restore the double accumulator value (xmm0). 1743 Label restore_smi, done_restore; 1744 __ JumpIfSmi(rdx, &restore_smi, Label::kNear); 1745 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 1746 __ jmp(&done_restore, Label::kNear); 1747 __ bind(&restore_smi); 1748 __ SmiToDouble(xmm0, rdx); 1749 __ bind(&done_restore); 1750 } 1751 } 1752 __ jmp(&convert); 1753 __ bind(&convert_number); 1754 __ Movsd(xmm1, FieldOperand(rbx, HeapNumber::kValueOffset)); 1755 __ jmp(&done_convert, Label::kNear); 1756 __ bind(&convert_smi); 1757 __ SmiToDouble(xmm1, rbx); 1758 __ bind(&done_convert); 1759 1760 // Perform the actual comparison with the accumulator value on the left hand 1761 // side (xmm0) and the next parameter value on the right hand side (xmm1). 1762 Label compare_equal, compare_nan, compare_swap, done_compare; 1763 __ Ucomisd(xmm0, xmm1); 1764 __ j(parity_even, &compare_nan, Label::kNear); 1765 __ j(cc, &done_compare, Label::kNear); 1766 __ j(equal, &compare_equal, Label::kNear); 1767 1768 // Result is on the right hand side. 1769 __ bind(&compare_swap); 1770 __ Movaps(xmm0, xmm1); 1771 __ Move(rdx, rbx); 1772 __ jmp(&done_compare, Label::kNear); 1773 1774 // At least one side is NaN, which means that the result will be NaN too. 1775 __ bind(&compare_nan); 1776 __ LoadRoot(rdx, Heap::kNanValueRootIndex); 1777 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 1778 __ jmp(&done_compare, Label::kNear); 1779 1780 // Left and right hand side are equal, check for -0 vs. +0. 1781 __ bind(&compare_equal); 1782 __ Movmskpd(kScratchRegister, reg); 1783 __ testl(kScratchRegister, Immediate(1)); 1784 __ j(not_zero, &compare_swap); 1785 1786 __ bind(&done_compare); 1787 __ decp(rcx); 1788 __ jmp(&loop); 1789 } 1790 1791 __ bind(&done_loop); 1792 __ PopReturnAddressTo(rcx); 1793 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); 1794 __ PushReturnAddressFrom(rcx); 1795 __ movp(rax, rdx); 1796 __ Ret(); 1797 } 1798 1799 // static 1800 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { 1801 // ----------- S t a t e ------------- 1802 // -- rax : number of arguments 1803 // -- rdi : constructor function 1804 // -- rsi : context 1805 // -- rsp[0] : return address 1806 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1807 // -- rsp[(argc + 1) * 8] : receiver 1808 // ----------------------------------- 1809 1810 // 1. Load the first argument into rbx. 1811 Label no_arguments; 1812 { 1813 StackArgumentsAccessor args(rsp, rax); 1814 __ testp(rax, rax); 1815 __ j(zero, &no_arguments, Label::kNear); 1816 __ movp(rbx, args.GetArgumentOperand(1)); 1817 } 1818 1819 // 2a. Convert the first argument to a number. 1820 { 1821 FrameScope scope(masm, StackFrame::MANUAL); 1822 __ Integer32ToSmi(rax, rax); 1823 __ EnterBuiltinFrame(rsi, rdi, rax); 1824 __ movp(rax, rbx); 1825 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 1826 __ LeaveBuiltinFrame(rsi, rdi, rbx); // Argc popped to rbx. 1827 __ SmiToInteger32(rbx, rbx); 1828 } 1829 1830 { 1831 // Drop all arguments including the receiver. 1832 __ PopReturnAddressTo(rcx); 1833 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, kPointerSize)); 1834 __ PushReturnAddressFrom(rcx); 1835 __ Ret(); 1836 } 1837 1838 // 2b. No arguments, return +0 (already in rax). 1839 __ bind(&no_arguments); 1840 __ ret(1 * kPointerSize); 1841 } 1842 1843 // static 1844 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { 1845 // ----------- S t a t e ------------- 1846 // -- rax : number of arguments 1847 // -- rdi : constructor function 1848 // -- rdx : new target 1849 // -- rsi : context 1850 // -- rsp[0] : return address 1851 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1852 // -- rsp[(argc + 1) * 8] : receiver 1853 // ----------------------------------- 1854 1855 // 1. Make sure we operate in the context of the called function. 1856 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 1857 1858 // Store argc in r8. 1859 __ Integer32ToSmi(r8, rax); 1860 1861 // 2. Load the first argument into rbx. 1862 { 1863 StackArgumentsAccessor args(rsp, rax); 1864 Label no_arguments, done; 1865 __ testp(rax, rax); 1866 __ j(zero, &no_arguments, Label::kNear); 1867 __ movp(rbx, args.GetArgumentOperand(1)); 1868 __ jmp(&done, Label::kNear); 1869 __ bind(&no_arguments); 1870 __ Move(rbx, Smi::kZero); 1871 __ bind(&done); 1872 } 1873 1874 // 3. Make sure rbx is a number. 1875 { 1876 Label done_convert; 1877 __ JumpIfSmi(rbx, &done_convert); 1878 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), 1879 Heap::kHeapNumberMapRootIndex); 1880 __ j(equal, &done_convert); 1881 { 1882 FrameScope scope(masm, StackFrame::MANUAL); 1883 __ EnterBuiltinFrame(rsi, rdi, r8); 1884 __ Push(rdx); 1885 __ Move(rax, rbx); 1886 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 1887 __ Move(rbx, rax); 1888 __ Pop(rdx); 1889 __ LeaveBuiltinFrame(rsi, rdi, r8); 1890 } 1891 __ bind(&done_convert); 1892 } 1893 1894 // 4. Check if new target and constructor differ. 1895 Label drop_frame_and_ret, new_object; 1896 __ cmpp(rdx, rdi); 1897 __ j(not_equal, &new_object); 1898 1899 // 5. Allocate a JSValue wrapper for the number. 1900 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object); 1901 __ jmp(&drop_frame_and_ret, Label::kNear); 1902 1903 // 6. Fallback to the runtime to create new object. 1904 __ bind(&new_object); 1905 { 1906 FrameScope scope(masm, StackFrame::MANUAL); 1907 __ EnterBuiltinFrame(rsi, rdi, r8); 1908 __ Push(rbx); // the first argument 1909 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), 1910 RelocInfo::CODE_TARGET); 1911 __ Pop(FieldOperand(rax, JSValue::kValueOffset)); 1912 __ LeaveBuiltinFrame(rsi, rdi, r8); 1913 } 1914 1915 __ bind(&drop_frame_and_ret); 1916 { 1917 // Drop all arguments including the receiver. 1918 __ PopReturnAddressTo(rcx); 1919 __ SmiToInteger32(r8, r8); 1920 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize)); 1921 __ PushReturnAddressFrom(rcx); 1922 __ Ret(); 1923 } 1924 } 1925 1926 // static 1927 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { 1928 // ----------- S t a t e ------------- 1929 // -- rax : number of arguments 1930 // -- rdi : constructor function 1931 // -- rsi : context 1932 // -- rsp[0] : return address 1933 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1934 // -- rsp[(argc + 1) * 8] : receiver 1935 // ----------------------------------- 1936 1937 // 1. Load the first argument into rax. 1938 Label no_arguments; 1939 { 1940 StackArgumentsAccessor args(rsp, rax); 1941 __ Integer32ToSmi(r8, rax); // Store argc in r8. 1942 __ testp(rax, rax); 1943 __ j(zero, &no_arguments, Label::kNear); 1944 __ movp(rax, args.GetArgumentOperand(1)); 1945 } 1946 1947 // 2a. At least one argument, return rax if it's a string, otherwise 1948 // dispatch to appropriate conversion. 1949 Label drop_frame_and_ret, to_string, symbol_descriptive_string; 1950 { 1951 __ JumpIfSmi(rax, &to_string, Label::kNear); 1952 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); 1953 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); 1954 __ j(above, &to_string, Label::kNear); 1955 __ j(equal, &symbol_descriptive_string, Label::kNear); 1956 __ jmp(&drop_frame_and_ret, Label::kNear); 1957 } 1958 1959 // 2b. No arguments, return the empty string (and pop the receiver). 1960 __ bind(&no_arguments); 1961 { 1962 __ LoadRoot(rax, Heap::kempty_stringRootIndex); 1963 __ ret(1 * kPointerSize); 1964 } 1965 1966 // 3a. Convert rax to a string. 1967 __ bind(&to_string); 1968 { 1969 FrameScope scope(masm, StackFrame::MANUAL); 1970 __ EnterBuiltinFrame(rsi, rdi, r8); 1971 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); 1972 __ LeaveBuiltinFrame(rsi, rdi, r8); 1973 } 1974 __ jmp(&drop_frame_and_ret, Label::kNear); 1975 1976 // 3b. Convert symbol in rax to a string. 1977 __ bind(&symbol_descriptive_string); 1978 { 1979 __ PopReturnAddressTo(rcx); 1980 __ SmiToInteger32(r8, r8); 1981 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize)); 1982 __ Push(rax); 1983 __ PushReturnAddressFrom(rcx); 1984 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); 1985 } 1986 1987 __ bind(&drop_frame_and_ret); 1988 { 1989 // Drop all arguments including the receiver. 1990 __ PopReturnAddressTo(rcx); 1991 __ SmiToInteger32(r8, r8); 1992 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize)); 1993 __ PushReturnAddressFrom(rcx); 1994 __ Ret(); 1995 } 1996 } 1997 1998 // static 1999 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { 2000 // ----------- S t a t e ------------- 2001 // -- rax : number of arguments 2002 // -- rdi : constructor function 2003 // -- rdx : new target 2004 // -- rsi : context 2005 // -- rsp[0] : return address 2006 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 2007 // -- rsp[(argc + 1) * 8] : receiver 2008 // ----------------------------------- 2009 2010 // 1. Make sure we operate in the context of the called function. 2011 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 2012 2013 // Store argc in r8. 2014 __ Integer32ToSmi(r8, rax); 2015 2016 // 2. Load the first argument into rbx. 2017 { 2018 StackArgumentsAccessor args(rsp, rax); 2019 Label no_arguments, done; 2020 __ testp(rax, rax); 2021 __ j(zero, &no_arguments, Label::kNear); 2022 __ movp(rbx, args.GetArgumentOperand(1)); 2023 __ jmp(&done, Label::kNear); 2024 __ bind(&no_arguments); 2025 __ LoadRoot(rbx, Heap::kempty_stringRootIndex); 2026 __ bind(&done); 2027 } 2028 2029 // 3. Make sure rbx is a string. 2030 { 2031 Label convert, done_convert; 2032 __ JumpIfSmi(rbx, &convert, Label::kNear); 2033 __ CmpObjectType(rbx, FIRST_NONSTRING_TYPE, rcx); 2034 __ j(below, &done_convert); 2035 __ bind(&convert); 2036 { 2037 FrameScope scope(masm, StackFrame::MANUAL); 2038 __ EnterBuiltinFrame(rsi, rdi, r8); 2039 __ Push(rdx); 2040 __ Move(rax, rbx); 2041 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); 2042 __ Move(rbx, rax); 2043 __ Pop(rdx); 2044 __ LeaveBuiltinFrame(rsi, rdi, r8); 2045 } 2046 __ bind(&done_convert); 2047 } 2048 2049 // 4. Check if new target and constructor differ. 2050 Label drop_frame_and_ret, new_object; 2051 __ cmpp(rdx, rdi); 2052 __ j(not_equal, &new_object); 2053 2054 // 5. Allocate a JSValue wrapper for the string. 2055 __ AllocateJSValue(rax, rdi, rbx, rcx, &new_object); 2056 __ jmp(&drop_frame_and_ret, Label::kNear); 2057 2058 // 6. Fallback to the runtime to create new object. 2059 __ bind(&new_object); 2060 { 2061 FrameScope scope(masm, StackFrame::MANUAL); 2062 __ EnterBuiltinFrame(rsi, rdi, r8); 2063 __ Push(rbx); // the first argument 2064 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), 2065 RelocInfo::CODE_TARGET); 2066 __ Pop(FieldOperand(rax, JSValue::kValueOffset)); 2067 __ LeaveBuiltinFrame(rsi, rdi, r8); 2068 } 2069 2070 __ bind(&drop_frame_and_ret); 2071 { 2072 // Drop all arguments including the receiver. 2073 __ PopReturnAddressTo(rcx); 2074 __ SmiToInteger32(r8, r8); 2075 __ leap(rsp, Operand(rsp, r8, times_pointer_size, kPointerSize)); 2076 __ PushReturnAddressFrom(rcx); 2077 __ Ret(); 2078 } 2079 } 2080 2081 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 2082 __ pushq(rbp); 2083 __ movp(rbp, rsp); 2084 2085 // Store the arguments adaptor context sentinel. 2086 __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); 2087 2088 // Push the function on the stack. 2089 __ Push(rdi); 2090 2091 // Preserve the number of arguments on the stack. Must preserve rax, 2092 // rbx and rcx because these registers are used when copying the 2093 // arguments and the receiver. 2094 __ Integer32ToSmi(r8, rax); 2095 __ Push(r8); 2096 } 2097 2098 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 2099 // Retrieve the number of arguments from the stack. Number is a Smi. 2100 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2101 2102 // Leave the frame. 2103 __ movp(rsp, rbp); 2104 __ popq(rbp); 2105 2106 // Remove caller arguments from the stack. 2107 __ PopReturnAddressTo(rcx); 2108 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 2109 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 2110 __ PushReturnAddressFrom(rcx); 2111 } 2112 2113 // static 2114 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { 2115 // ----------- S t a t e ------------- 2116 // -- rdx : requested object size (untagged) 2117 // -- rsp[0] : return address 2118 // ----------------------------------- 2119 __ Integer32ToSmi(rdx, rdx); 2120 __ PopReturnAddressTo(rcx); 2121 __ Push(rdx); 2122 __ PushReturnAddressFrom(rcx); 2123 __ Move(rsi, Smi::kZero); 2124 __ TailCallRuntime(Runtime::kAllocateInNewSpace); 2125 } 2126 2127 // static 2128 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) { 2129 // ----------- S t a t e ------------- 2130 // -- rdx : requested object size (untagged) 2131 // -- rsp[0] : return address 2132 // ----------------------------------- 2133 __ Integer32ToSmi(rdx, rdx); 2134 __ PopReturnAddressTo(rcx); 2135 __ Push(rdx); 2136 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); 2137 __ PushReturnAddressFrom(rcx); 2138 __ Move(rsi, Smi::kZero); 2139 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); 2140 } 2141 2142 // static 2143 void Builtins::Generate_Abort(MacroAssembler* masm) { 2144 // ----------- S t a t e ------------- 2145 // -- rdx : message_id as Smi 2146 // -- rsp[0] : return address 2147 // ----------------------------------- 2148 __ PopReturnAddressTo(rcx); 2149 __ Push(rdx); 2150 __ PushReturnAddressFrom(rcx); 2151 __ Move(rsi, Smi::kZero); 2152 __ TailCallRuntime(Runtime::kAbort); 2153 } 2154 2155 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 2156 // ----------- S t a t e ------------- 2157 // -- rax : actual number of arguments 2158 // -- rbx : expected number of arguments 2159 // -- rdx : new target (passed through to callee) 2160 // -- rdi : function (passed through to callee) 2161 // ----------------------------------- 2162 2163 Label invoke, dont_adapt_arguments, stack_overflow; 2164 Counters* counters = masm->isolate()->counters(); 2165 __ IncrementCounter(counters->arguments_adaptors(), 1); 2166 2167 Label enough, too_few; 2168 __ cmpp(rax, rbx); 2169 __ j(less, &too_few); 2170 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); 2171 __ j(equal, &dont_adapt_arguments); 2172 2173 { // Enough parameters: Actual >= expected. 2174 __ bind(&enough); 2175 EnterArgumentsAdaptorFrame(masm); 2176 // The registers rcx and r8 will be modified. The register rbx is only read. 2177 Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow); 2178 2179 // Copy receiver and all expected arguments. 2180 const int offset = StandardFrameConstants::kCallerSPOffset; 2181 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset)); 2182 __ Set(r8, -1); // account for receiver 2183 2184 Label copy; 2185 __ bind(©); 2186 __ incp(r8); 2187 __ Push(Operand(rax, 0)); 2188 __ subp(rax, Immediate(kPointerSize)); 2189 __ cmpp(r8, rbx); 2190 __ j(less, ©); 2191 __ jmp(&invoke); 2192 } 2193 2194 { // Too few parameters: Actual < expected. 2195 __ bind(&too_few); 2196 2197 EnterArgumentsAdaptorFrame(masm); 2198 // The registers rcx and r8 will be modified. The register rbx is only read. 2199 Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow); 2200 2201 // Copy receiver and all actual arguments. 2202 const int offset = StandardFrameConstants::kCallerSPOffset; 2203 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset)); 2204 __ Set(r8, -1); // account for receiver 2205 2206 Label copy; 2207 __ bind(©); 2208 __ incp(r8); 2209 __ Push(Operand(rdi, 0)); 2210 __ subp(rdi, Immediate(kPointerSize)); 2211 __ cmpp(r8, rax); 2212 __ j(less, ©); 2213 2214 // Fill remaining expected arguments with undefined values. 2215 Label fill; 2216 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 2217 __ bind(&fill); 2218 __ incp(r8); 2219 __ Push(kScratchRegister); 2220 __ cmpp(r8, rbx); 2221 __ j(less, &fill); 2222 2223 // Restore function pointer. 2224 __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); 2225 } 2226 2227 // Call the entry point. 2228 __ bind(&invoke); 2229 __ movp(rax, rbx); 2230 // rax : expected number of arguments 2231 // rdx : new target (passed through to callee) 2232 // rdi : function (passed through to callee) 2233 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 2234 __ call(rcx); 2235 2236 // Store offset of return address for deoptimizer. 2237 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 2238 2239 // Leave frame and return. 2240 LeaveArgumentsAdaptorFrame(masm); 2241 __ ret(0); 2242 2243 // ------------------------------------------- 2244 // Dont adapt arguments. 2245 // ------------------------------------------- 2246 __ bind(&dont_adapt_arguments); 2247 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 2248 __ jmp(rcx); 2249 2250 __ bind(&stack_overflow); 2251 { 2252 FrameScope frame(masm, StackFrame::MANUAL); 2253 __ CallRuntime(Runtime::kThrowStackOverflow); 2254 __ int3(); 2255 } 2256 } 2257 2258 // static 2259 void Builtins::Generate_Apply(MacroAssembler* masm) { 2260 // ----------- S t a t e ------------- 2261 // -- rax : argumentsList 2262 // -- rdi : target 2263 // -- rdx : new.target (checked to be constructor or undefined) 2264 // -- rsp[0] : return address. 2265 // -- rsp[8] : thisArgument 2266 // ----------------------------------- 2267 2268 // Create the list of arguments from the array-like argumentsList. 2269 { 2270 Label create_arguments, create_array, create_holey_array, create_runtime, 2271 done_create; 2272 __ JumpIfSmi(rax, &create_runtime); 2273 2274 // Load the map of argumentsList into rcx. 2275 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset)); 2276 2277 // Load native context into rbx. 2278 __ movp(rbx, NativeContextOperand()); 2279 2280 // Check if argumentsList is an (unmodified) arguments object. 2281 __ cmpp(rcx, ContextOperand(rbx, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); 2282 __ j(equal, &create_arguments); 2283 __ cmpp(rcx, ContextOperand(rbx, Context::STRICT_ARGUMENTS_MAP_INDEX)); 2284 __ j(equal, &create_arguments); 2285 2286 // Check if argumentsList is a fast JSArray. 2287 __ CmpInstanceType(rcx, JS_ARRAY_TYPE); 2288 __ j(equal, &create_array); 2289 2290 // Ask the runtime to create the list (actually a FixedArray). 2291 __ bind(&create_runtime); 2292 { 2293 FrameScope scope(masm, StackFrame::INTERNAL); 2294 __ Push(rdi); 2295 __ Push(rdx); 2296 __ Push(rax); 2297 __ CallRuntime(Runtime::kCreateListFromArrayLike); 2298 __ Pop(rdx); 2299 __ Pop(rdi); 2300 __ SmiToInteger32(rbx, FieldOperand(rax, FixedArray::kLengthOffset)); 2301 } 2302 __ jmp(&done_create); 2303 2304 // Try to create the list from an arguments object. 2305 __ bind(&create_arguments); 2306 __ movp(rbx, FieldOperand(rax, JSArgumentsObject::kLengthOffset)); 2307 __ movp(rcx, FieldOperand(rax, JSObject::kElementsOffset)); 2308 __ cmpp(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); 2309 __ j(not_equal, &create_runtime); 2310 __ SmiToInteger32(rbx, rbx); 2311 __ movp(rax, rcx); 2312 __ jmp(&done_create); 2313 2314 __ bind(&create_holey_array); 2315 // For holey JSArrays we need to check that the array prototype chain 2316 // protector is intact and our prototype is the Array.prototype actually. 2317 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset)); 2318 __ movp(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); 2319 __ cmpp(rcx, ContextOperand(rbx, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); 2320 __ j(not_equal, &create_runtime); 2321 __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex); 2322 __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset), 2323 Smi::FromInt(Isolate::kProtectorValid)); 2324 __ j(not_equal, &create_runtime); 2325 __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset)); 2326 __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset)); 2327 __ jmp(&done_create); 2328 2329 // Try to create the list from a JSArray object. 2330 __ bind(&create_array); 2331 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); 2332 __ DecodeField<Map::ElementsKindBits>(rcx); 2333 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 2334 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 2335 STATIC_ASSERT(FAST_ELEMENTS == 2); 2336 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); 2337 __ cmpl(rcx, Immediate(FAST_HOLEY_SMI_ELEMENTS)); 2338 __ j(equal, &create_holey_array); 2339 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); 2340 __ j(equal, &create_holey_array); 2341 __ j(above, &create_runtime); 2342 __ SmiToInteger32(rbx, FieldOperand(rax, JSArray::kLengthOffset)); 2343 __ movp(rax, FieldOperand(rax, JSArray::kElementsOffset)); 2344 2345 __ bind(&done_create); 2346 } 2347 2348 // Check for stack overflow. 2349 { 2350 // Check the stack for overflow. We are not trying to catch interruptions 2351 // (i.e. debug break and preemption) here, so check the "real stack limit". 2352 Label done; 2353 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 2354 __ movp(rcx, rsp); 2355 // Make rcx the space we have left. The stack might already be overflowed 2356 // here which will cause rcx to become negative. 2357 __ subp(rcx, kScratchRegister); 2358 __ sarp(rcx, Immediate(kPointerSizeLog2)); 2359 // Check if the arguments will overflow the stack. 2360 __ cmpp(rcx, rbx); 2361 __ j(greater, &done, Label::kNear); // Signed comparison. 2362 __ TailCallRuntime(Runtime::kThrowStackOverflow); 2363 __ bind(&done); 2364 } 2365 2366 // ----------- S t a t e ------------- 2367 // -- rdi : target 2368 // -- rax : args (a FixedArray built from argumentsList) 2369 // -- rbx : len (number of elements to push from args) 2370 // -- rdx : new.target (checked to be constructor or undefined) 2371 // -- rsp[0] : return address. 2372 // -- rsp[8] : thisArgument 2373 // ----------------------------------- 2374 2375 // Push arguments onto the stack (thisArgument is already on the stack). 2376 { 2377 __ PopReturnAddressTo(r8); 2378 __ Set(rcx, 0); 2379 Label done, push, loop; 2380 __ bind(&loop); 2381 __ cmpl(rcx, rbx); 2382 __ j(equal, &done, Label::kNear); 2383 // Turn the hole into undefined as we go. 2384 __ movp(r9, FieldOperand(rax, rcx, times_pointer_size, 2385 FixedArray::kHeaderSize)); 2386 __ CompareRoot(r9, Heap::kTheHoleValueRootIndex); 2387 __ j(not_equal, &push, Label::kNear); 2388 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); 2389 __ bind(&push); 2390 __ Push(r9); 2391 __ incl(rcx); 2392 __ jmp(&loop); 2393 __ bind(&done); 2394 __ PushReturnAddressFrom(r8); 2395 __ Move(rax, rcx); 2396 } 2397 2398 // Dispatch to Call or Construct depending on whether new.target is undefined. 2399 { 2400 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); 2401 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2402 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2403 } 2404 } 2405 2406 // static 2407 void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm, 2408 Handle<Code> code) { 2409 // ----------- S t a t e ------------- 2410 // -- rdi : the target to call (can be any Object) 2411 // -- rcx : start index (to support rest parameters) 2412 // -- rsp[0] : return address. 2413 // -- rsp[8] : thisArgument 2414 // ----------------------------------- 2415 2416 // Check if we have an arguments adaptor frame below the function frame. 2417 Label arguments_adaptor, arguments_done; 2418 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2419 __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset), 2420 Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); 2421 __ j(equal, &arguments_adaptor, Label::kNear); 2422 { 2423 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 2424 __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); 2425 __ LoadSharedFunctionInfoSpecialField( 2426 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset); 2427 __ movp(rbx, rbp); 2428 } 2429 __ jmp(&arguments_done, Label::kNear); 2430 __ bind(&arguments_adaptor); 2431 { 2432 __ SmiToInteger32( 2433 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2434 } 2435 __ bind(&arguments_done); 2436 2437 Label stack_empty, stack_done, stack_overflow; 2438 __ subl(rax, rcx); 2439 __ j(less_equal, &stack_empty); 2440 { 2441 // Check for stack overflow. 2442 Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear); 2443 2444 // Forward the arguments from the caller frame. 2445 { 2446 Label loop; 2447 __ movl(rcx, rax); 2448 __ Pop(r8); 2449 __ bind(&loop); 2450 { 2451 StackArgumentsAccessor args(rbx, rcx, ARGUMENTS_DONT_CONTAIN_RECEIVER); 2452 __ Push(args.GetArgumentOperand(0)); 2453 __ decl(rcx); 2454 __ j(not_zero, &loop); 2455 } 2456 __ Push(r8); 2457 } 2458 } 2459 __ jmp(&stack_done, Label::kNear); 2460 __ bind(&stack_overflow); 2461 __ TailCallRuntime(Runtime::kThrowStackOverflow); 2462 __ bind(&stack_empty); 2463 { 2464 // We just pass the receiver, which is already on the stack. 2465 __ Set(rax, 0); 2466 } 2467 __ bind(&stack_done); 2468 2469 __ Jump(code, RelocInfo::CODE_TARGET); 2470 } 2471 2472 namespace { 2473 2474 // Drops top JavaScript frame and an arguments adaptor frame below it (if 2475 // present) preserving all the arguments prepared for current call. 2476 // Does nothing if debugger is currently active. 2477 // ES6 14.6.3. PrepareForTailCall 2478 // 2479 // Stack structure for the function g() tail calling f(): 2480 // 2481 // ------- Caller frame: ------- 2482 // | ... 2483 // | g()'s arg M 2484 // | ... 2485 // | g()'s arg 1 2486 // | g()'s receiver arg 2487 // | g()'s caller pc 2488 // ------- g()'s frame: ------- 2489 // | g()'s caller fp <- fp 2490 // | g()'s context 2491 // | function pointer: g 2492 // | ------------------------- 2493 // | ... 2494 // | ... 2495 // | f()'s arg N 2496 // | ... 2497 // | f()'s arg 1 2498 // | f()'s receiver arg 2499 // | f()'s caller pc <- sp 2500 // ---------------------- 2501 // 2502 void PrepareForTailCall(MacroAssembler* masm, Register args_reg, 2503 Register scratch1, Register scratch2, 2504 Register scratch3) { 2505 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); 2506 Comment cmnt(masm, "[ PrepareForTailCall"); 2507 2508 // Prepare for tail call only if ES2015 tail call elimination is active. 2509 Label done; 2510 ExternalReference is_tail_call_elimination_enabled = 2511 ExternalReference::is_tail_call_elimination_enabled_address( 2512 masm->isolate()); 2513 __ Move(kScratchRegister, is_tail_call_elimination_enabled); 2514 __ cmpb(Operand(kScratchRegister, 0), Immediate(0)); 2515 __ j(equal, &done); 2516 2517 // Drop possible interpreter handler/stub frame. 2518 { 2519 Label no_interpreter_frame; 2520 __ cmpp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset), 2521 Immediate(StackFrame::TypeToMarker(StackFrame::STUB))); 2522 __ j(not_equal, &no_interpreter_frame, Label::kNear); 2523 __ movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2524 __ bind(&no_interpreter_frame); 2525 } 2526 2527 // Check if next frame is an arguments adaptor frame. 2528 Register caller_args_count_reg = scratch1; 2529 Label no_arguments_adaptor, formal_parameter_count_loaded; 2530 __ movp(scratch2, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2531 __ cmpp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset), 2532 Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); 2533 __ j(not_equal, &no_arguments_adaptor, Label::kNear); 2534 2535 // Drop current frame and load arguments count from arguments adaptor frame. 2536 __ movp(rbp, scratch2); 2537 __ SmiToInteger32( 2538 caller_args_count_reg, 2539 Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2540 __ jmp(&formal_parameter_count_loaded, Label::kNear); 2541 2542 __ bind(&no_arguments_adaptor); 2543 // Load caller's formal parameter count 2544 __ movp(scratch1, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 2545 __ movp(scratch1, 2546 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); 2547 __ LoadSharedFunctionInfoSpecialField( 2548 caller_args_count_reg, scratch1, 2549 SharedFunctionInfo::kFormalParameterCountOffset); 2550 2551 __ bind(&formal_parameter_count_loaded); 2552 2553 ParameterCount callee_args_count(args_reg); 2554 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, 2555 scratch3, ReturnAddressState::kOnStack); 2556 __ bind(&done); 2557 } 2558 } // namespace 2559 2560 // static 2561 void Builtins::Generate_CallFunction(MacroAssembler* masm, 2562 ConvertReceiverMode mode, 2563 TailCallMode tail_call_mode) { 2564 // ----------- S t a t e ------------- 2565 // -- rax : the number of arguments (not including the receiver) 2566 // -- rdi : the function to call (checked to be a JSFunction) 2567 // ----------------------------------- 2568 StackArgumentsAccessor args(rsp, rax); 2569 __ AssertFunction(rdi); 2570 2571 // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) 2572 // Check that the function is not a "classConstructor". 2573 Label class_constructor; 2574 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2575 __ testb(FieldOperand(rdx, SharedFunctionInfo::kFunctionKindByteOffset), 2576 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte)); 2577 __ j(not_zero, &class_constructor); 2578 2579 // ----------- S t a t e ------------- 2580 // -- rax : the number of arguments (not including the receiver) 2581 // -- rdx : the shared function info. 2582 // -- rdi : the function to call (checked to be a JSFunction) 2583 // ----------------------------------- 2584 2585 // Enter the context of the function; ToObject has to run in the function 2586 // context, and we also need to take the global proxy from the function 2587 // context in case of conversion. 2588 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == 2589 SharedFunctionInfo::kStrictModeByteOffset); 2590 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 2591 // We need to convert the receiver for non-native sloppy mode functions. 2592 Label done_convert; 2593 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), 2594 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) | 2595 (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); 2596 __ j(not_zero, &done_convert); 2597 { 2598 // ----------- S t a t e ------------- 2599 // -- rax : the number of arguments (not including the receiver) 2600 // -- rdx : the shared function info. 2601 // -- rdi : the function to call (checked to be a JSFunction) 2602 // -- rsi : the function context. 2603 // ----------------------------------- 2604 2605 if (mode == ConvertReceiverMode::kNullOrUndefined) { 2606 // Patch receiver to global proxy. 2607 __ LoadGlobalProxy(rcx); 2608 } else { 2609 Label convert_to_object, convert_receiver; 2610 __ movp(rcx, args.GetReceiverOperand()); 2611 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear); 2612 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2613 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx); 2614 __ j(above_equal, &done_convert); 2615 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { 2616 Label convert_global_proxy; 2617 __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex, 2618 &convert_global_proxy, Label::kNear); 2619 __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object, 2620 Label::kNear); 2621 __ bind(&convert_global_proxy); 2622 { 2623 // Patch receiver to global proxy. 2624 __ LoadGlobalProxy(rcx); 2625 } 2626 __ jmp(&convert_receiver); 2627 } 2628 __ bind(&convert_to_object); 2629 { 2630 // Convert receiver using ToObject. 2631 // TODO(bmeurer): Inline the allocation here to avoid building the frame 2632 // in the fast case? (fall back to AllocateInNewSpace?) 2633 FrameScope scope(masm, StackFrame::INTERNAL); 2634 __ Integer32ToSmi(rax, rax); 2635 __ Push(rax); 2636 __ Push(rdi); 2637 __ movp(rax, rcx); 2638 __ Push(rsi); 2639 __ Call(masm->isolate()->builtins()->ToObject(), 2640 RelocInfo::CODE_TARGET); 2641 __ Pop(rsi); 2642 __ movp(rcx, rax); 2643 __ Pop(rdi); 2644 __ Pop(rax); 2645 __ SmiToInteger32(rax, rax); 2646 } 2647 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2648 __ bind(&convert_receiver); 2649 } 2650 __ movp(args.GetReceiverOperand(), rcx); 2651 } 2652 __ bind(&done_convert); 2653 2654 // ----------- S t a t e ------------- 2655 // -- rax : the number of arguments (not including the receiver) 2656 // -- rdx : the shared function info. 2657 // -- rdi : the function to call (checked to be a JSFunction) 2658 // -- rsi : the function context. 2659 // ----------------------------------- 2660 2661 if (tail_call_mode == TailCallMode::kAllow) { 2662 PrepareForTailCall(masm, rax, rbx, rcx, r8); 2663 } 2664 2665 __ LoadSharedFunctionInfoSpecialField( 2666 rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset); 2667 ParameterCount actual(rax); 2668 ParameterCount expected(rbx); 2669 2670 __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION, 2671 CheckDebugStepCallWrapper()); 2672 2673 // The function is a "classConstructor", need to raise an exception. 2674 __ bind(&class_constructor); 2675 { 2676 FrameScope frame(masm, StackFrame::INTERNAL); 2677 __ Push(rdi); 2678 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); 2679 } 2680 } 2681 2682 namespace { 2683 2684 void Generate_PushBoundArguments(MacroAssembler* masm) { 2685 // ----------- S t a t e ------------- 2686 // -- rax : the number of arguments (not including the receiver) 2687 // -- rdx : new.target (only in case of [[Construct]]) 2688 // -- rdi : target (checked to be a JSBoundFunction) 2689 // ----------------------------------- 2690 2691 // Load [[BoundArguments]] into rcx and length of that into rbx. 2692 Label no_bound_arguments; 2693 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset)); 2694 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); 2695 __ testl(rbx, rbx); 2696 __ j(zero, &no_bound_arguments); 2697 { 2698 // ----------- S t a t e ------------- 2699 // -- rax : the number of arguments (not including the receiver) 2700 // -- rdx : new.target (only in case of [[Construct]]) 2701 // -- rdi : target (checked to be a JSBoundFunction) 2702 // -- rcx : the [[BoundArguments]] (implemented as FixedArray) 2703 // -- rbx : the number of [[BoundArguments]] (checked to be non-zero) 2704 // ----------------------------------- 2705 2706 // Reserve stack space for the [[BoundArguments]]. 2707 { 2708 Label done; 2709 __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0)); 2710 __ subp(rsp, kScratchRegister); 2711 // Check the stack for overflow. We are not trying to catch interruptions 2712 // (i.e. debug break and preemption) here, so check the "real stack 2713 // limit". 2714 __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex); 2715 __ j(greater, &done, Label::kNear); // Signed comparison. 2716 // Restore the stack pointer. 2717 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0)); 2718 { 2719 FrameScope scope(masm, StackFrame::MANUAL); 2720 __ EnterFrame(StackFrame::INTERNAL); 2721 __ CallRuntime(Runtime::kThrowStackOverflow); 2722 } 2723 __ bind(&done); 2724 } 2725 2726 // Adjust effective number of arguments to include return address. 2727 __ incl(rax); 2728 2729 // Relocate arguments and return address down the stack. 2730 { 2731 Label loop; 2732 __ Set(rcx, 0); 2733 __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0)); 2734 __ bind(&loop); 2735 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); 2736 __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister); 2737 __ incl(rcx); 2738 __ cmpl(rcx, rax); 2739 __ j(less, &loop); 2740 } 2741 2742 // Copy [[BoundArguments]] to the stack (below the arguments). 2743 { 2744 Label loop; 2745 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset)); 2746 __ SmiToInteger32(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); 2747 __ bind(&loop); 2748 __ decl(rbx); 2749 __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size, 2750 FixedArray::kHeaderSize)); 2751 __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister); 2752 __ leal(rax, Operand(rax, 1)); 2753 __ j(greater, &loop); 2754 } 2755 2756 // Adjust effective number of arguments (rax contains the number of 2757 // arguments from the call plus return address plus the number of 2758 // [[BoundArguments]]), so we need to subtract one for the return address. 2759 __ decl(rax); 2760 } 2761 __ bind(&no_bound_arguments); 2762 } 2763 2764 } // namespace 2765 2766 // static 2767 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, 2768 TailCallMode tail_call_mode) { 2769 // ----------- S t a t e ------------- 2770 // -- rax : the number of arguments (not including the receiver) 2771 // -- rdi : the function to call (checked to be a JSBoundFunction) 2772 // ----------------------------------- 2773 __ AssertBoundFunction(rdi); 2774 2775 if (tail_call_mode == TailCallMode::kAllow) { 2776 PrepareForTailCall(masm, rax, rbx, rcx, r8); 2777 } 2778 2779 // Patch the receiver to [[BoundThis]]. 2780 StackArgumentsAccessor args(rsp, rax); 2781 __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset)); 2782 __ movp(args.GetReceiverOperand(), rbx); 2783 2784 // Push the [[BoundArguments]] onto the stack. 2785 Generate_PushBoundArguments(masm); 2786 2787 // Call the [[BoundTargetFunction]] via the Call builtin. 2788 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset)); 2789 __ Load(rcx, 2790 ExternalReference(Builtins::kCall_ReceiverIsAny, masm->isolate())); 2791 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); 2792 __ jmp(rcx); 2793 } 2794 2795 // static 2796 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, 2797 TailCallMode tail_call_mode) { 2798 // ----------- S t a t e ------------- 2799 // -- rax : the number of arguments (not including the receiver) 2800 // -- rdi : the target to call (can be any Object) 2801 // ----------------------------------- 2802 StackArgumentsAccessor args(rsp, rax); 2803 2804 Label non_callable, non_function, non_smi; 2805 __ JumpIfSmi(rdi, &non_callable); 2806 __ bind(&non_smi); 2807 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 2808 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), 2809 RelocInfo::CODE_TARGET); 2810 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE); 2811 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), 2812 RelocInfo::CODE_TARGET); 2813 2814 // Check if target has a [[Call]] internal method. 2815 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 2816 Immediate(1 << Map::kIsCallable)); 2817 __ j(zero, &non_callable); 2818 2819 __ CmpInstanceType(rcx, JS_PROXY_TYPE); 2820 __ j(not_equal, &non_function); 2821 2822 // 0. Prepare for tail call if necessary. 2823 if (tail_call_mode == TailCallMode::kAllow) { 2824 PrepareForTailCall(masm, rax, rbx, rcx, r8); 2825 } 2826 2827 // 1. Runtime fallback for Proxy [[Call]]. 2828 __ PopReturnAddressTo(kScratchRegister); 2829 __ Push(rdi); 2830 __ PushReturnAddressFrom(kScratchRegister); 2831 // Increase the arguments size to include the pushed function and the 2832 // existing receiver on the stack. 2833 __ addp(rax, Immediate(2)); 2834 // Tail-call to the runtime. 2835 __ JumpToExternalReference( 2836 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); 2837 2838 // 2. Call to something else, which might have a [[Call]] internal method (if 2839 // not we raise an exception). 2840 __ bind(&non_function); 2841 // Overwrite the original receiver with the (original) target. 2842 __ movp(args.GetReceiverOperand(), rdi); 2843 // Let the "call_as_function_delegate" take care of the rest. 2844 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi); 2845 __ Jump(masm->isolate()->builtins()->CallFunction( 2846 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), 2847 RelocInfo::CODE_TARGET); 2848 2849 // 3. Call to something that is not callable. 2850 __ bind(&non_callable); 2851 { 2852 FrameScope scope(masm, StackFrame::INTERNAL); 2853 __ Push(rdi); 2854 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2855 } 2856 } 2857 2858 static void CheckSpreadAndPushToStack(MacroAssembler* masm) { 2859 Label runtime_call, push_args; 2860 // Load the spread argument into rbx. 2861 __ movp(rbx, Operand(rsp, kPointerSize)); 2862 __ JumpIfSmi(rbx, &runtime_call); 2863 // Load the map of the spread into r15. 2864 __ movp(r15, FieldOperand(rbx, HeapObject::kMapOffset)); 2865 // Load native context into r14. 2866 __ movp(r14, NativeContextOperand()); 2867 2868 // Check that the spread is an array. 2869 __ CmpInstanceType(r15, JS_ARRAY_TYPE); 2870 __ j(not_equal, &runtime_call); 2871 2872 // Check that we have the original ArrayPrototype. 2873 __ movp(rcx, FieldOperand(r15, Map::kPrototypeOffset)); 2874 __ cmpp(rcx, ContextOperand(r14, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); 2875 __ j(not_equal, &runtime_call); 2876 2877 // Check that the ArrayPrototype hasn't been modified in a way that would 2878 // affect iteration. 2879 __ LoadRoot(rcx, Heap::kArrayIteratorProtectorRootIndex); 2880 __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset), 2881 Smi::FromInt(Isolate::kProtectorValid)); 2882 __ j(not_equal, &runtime_call); 2883 2884 // Check that the map of the initial array iterator hasn't changed. 2885 __ movp(rcx, 2886 ContextOperand(r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); 2887 __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); 2888 __ cmpp(rcx, ContextOperand( 2889 r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); 2890 __ j(not_equal, &runtime_call); 2891 2892 // For FastPacked kinds, iteration will have the same effect as simply 2893 // accessing each property in order. 2894 Label no_protector_check; 2895 __ movzxbp(rcx, FieldOperand(r15, Map::kBitField2Offset)); 2896 __ DecodeField<Map::ElementsKindBits>(rcx); 2897 __ cmpp(rcx, Immediate(FAST_HOLEY_ELEMENTS)); 2898 __ j(above, &runtime_call); 2899 // For non-FastHoley kinds, we can skip the protector check. 2900 __ cmpp(rcx, Immediate(FAST_SMI_ELEMENTS)); 2901 __ j(equal, &no_protector_check); 2902 __ cmpp(rcx, Immediate(FAST_ELEMENTS)); 2903 __ j(equal, &no_protector_check); 2904 // Check the ArrayProtector cell. 2905 __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex); 2906 __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset), 2907 Smi::FromInt(Isolate::kProtectorValid)); 2908 __ j(not_equal, &runtime_call); 2909 2910 __ bind(&no_protector_check); 2911 // Load the FixedArray backing store, but use the length from the array. 2912 __ SmiToInteger32(r9, FieldOperand(rbx, JSArray::kLengthOffset)); 2913 __ movp(rbx, FieldOperand(rbx, JSArray::kElementsOffset)); 2914 __ jmp(&push_args); 2915 2916 __ bind(&runtime_call); 2917 { 2918 // Call the builtin for the result of the spread. 2919 FrameScope scope(masm, StackFrame::INTERNAL); 2920 __ Push(rdi); // target 2921 __ Push(rdx); // new target 2922 __ Integer32ToSmi(rax, rax); 2923 __ Push(rax); // nargs 2924 __ Push(rbx); 2925 __ CallRuntime(Runtime::kSpreadIterableFixed); 2926 __ movp(rbx, rax); 2927 __ Pop(rax); // nargs 2928 __ SmiToInteger32(rax, rax); 2929 __ Pop(rdx); // new target 2930 __ Pop(rdi); // target 2931 } 2932 2933 { 2934 // Calculate the new nargs including the result of the spread. 2935 __ SmiToInteger32(r9, FieldOperand(rbx, FixedArray::kLengthOffset)); 2936 2937 __ bind(&push_args); 2938 // rax += r9 - 1. Subtract 1 for the spread itself. 2939 __ leap(rax, Operand(rax, r9, times_1, -1)); 2940 } 2941 2942 // Check for stack overflow. 2943 { 2944 // Check the stack for overflow. We are not trying to catch interruptions 2945 // (i.e. debug break and preemption) here, so check the "real stack limit". 2946 Label done; 2947 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 2948 __ movp(rcx, rsp); 2949 // Make rcx the space we have left. The stack might already be overflowed 2950 // here which will cause rcx to become negative. 2951 __ subp(rcx, kScratchRegister); 2952 __ sarp(rcx, Immediate(kPointerSizeLog2)); 2953 // Check if the arguments will overflow the stack. 2954 __ cmpp(rcx, r9); 2955 __ j(greater, &done, Label::kNear); // Signed comparison. 2956 __ TailCallRuntime(Runtime::kThrowStackOverflow); 2957 __ bind(&done); 2958 } 2959 2960 // Put the evaluated spread onto the stack as additional arguments. 2961 { 2962 // Pop the return address and spread argument. 2963 __ PopReturnAddressTo(r8); 2964 __ Pop(rcx); 2965 2966 __ Set(rcx, 0); 2967 Label done, push, loop; 2968 __ bind(&loop); 2969 __ cmpl(rcx, r9); 2970 __ j(equal, &done, Label::kNear); 2971 __ movp(kScratchRegister, FieldOperand(rbx, rcx, times_pointer_size, 2972 FixedArray::kHeaderSize)); 2973 __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 2974 __ j(not_equal, &push, Label::kNear); 2975 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 2976 __ bind(&push); 2977 __ Push(kScratchRegister); 2978 __ incl(rcx); 2979 __ jmp(&loop); 2980 __ bind(&done); 2981 __ PushReturnAddressFrom(r8); 2982 } 2983 } 2984 2985 // static 2986 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) { 2987 // ----------- S t a t e ------------- 2988 // -- rax : the number of arguments (not including the receiver) 2989 // -- rdi : the target to call (can be any Object) 2990 // ----------------------------------- 2991 2992 // CheckSpreadAndPushToStack will push rdx to save it. 2993 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 2994 CheckSpreadAndPushToStack(masm); 2995 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, 2996 TailCallMode::kDisallow), 2997 RelocInfo::CODE_TARGET); 2998 } 2999 3000 // static 3001 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 3002 // ----------- S t a t e ------------- 3003 // -- rax : the number of arguments (not including the receiver) 3004 // -- rdx : the new target (checked to be a constructor) 3005 // -- rdi : the constructor to call (checked to be a JSFunction) 3006 // ----------------------------------- 3007 __ AssertFunction(rdi); 3008 3009 // Calling convention for function specific ConstructStubs require 3010 // rbx to contain either an AllocationSite or undefined. 3011 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); 3012 3013 // Tail call to the function-specific construct stub (still in the caller 3014 // context at this point). 3015 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 3016 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); 3017 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); 3018 __ jmp(rcx); 3019 } 3020 3021 // static 3022 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { 3023 // ----------- S t a t e ------------- 3024 // -- rax : the number of arguments (not including the receiver) 3025 // -- rdx : the new target (checked to be a constructor) 3026 // -- rdi : the constructor to call (checked to be a JSBoundFunction) 3027 // ----------------------------------- 3028 __ AssertBoundFunction(rdi); 3029 3030 // Push the [[BoundArguments]] onto the stack. 3031 Generate_PushBoundArguments(masm); 3032 3033 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. 3034 { 3035 Label done; 3036 __ cmpp(rdi, rdx); 3037 __ j(not_equal, &done, Label::kNear); 3038 __ movp(rdx, 3039 FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset)); 3040 __ bind(&done); 3041 } 3042 3043 // Construct the [[BoundTargetFunction]] via the Construct builtin. 3044 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset)); 3045 __ Load(rcx, ExternalReference(Builtins::kConstruct, masm->isolate())); 3046 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); 3047 __ jmp(rcx); 3048 } 3049 3050 // static 3051 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { 3052 // ----------- S t a t e ------------- 3053 // -- rax : the number of arguments (not including the receiver) 3054 // -- rdi : the constructor to call (checked to be a JSProxy) 3055 // -- rdx : the new target (either the same as the constructor or 3056 // the JSFunction on which new was invoked initially) 3057 // ----------------------------------- 3058 3059 // Call into the Runtime for Proxy [[Construct]]. 3060 __ PopReturnAddressTo(kScratchRegister); 3061 __ Push(rdi); 3062 __ Push(rdx); 3063 __ PushReturnAddressFrom(kScratchRegister); 3064 // Include the pushed new_target, constructor and the receiver. 3065 __ addp(rax, Immediate(3)); 3066 __ JumpToExternalReference( 3067 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); 3068 } 3069 3070 // static 3071 void Builtins::Generate_Construct(MacroAssembler* masm) { 3072 // ----------- S t a t e ------------- 3073 // -- rax : the number of arguments (not including the receiver) 3074 // -- rdx : the new target (either the same as the constructor or 3075 // the JSFunction on which new was invoked initially) 3076 // -- rdi : the constructor to call (can be any Object) 3077 // ----------------------------------- 3078 StackArgumentsAccessor args(rsp, rax); 3079 3080 // Check if target is a Smi. 3081 Label non_constructor; 3082 __ JumpIfSmi(rdi, &non_constructor, Label::kNear); 3083 3084 // Dispatch based on instance type. 3085 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 3086 __ j(equal, masm->isolate()->builtins()->ConstructFunction(), 3087 RelocInfo::CODE_TARGET); 3088 3089 // Check if target has a [[Construct]] internal method. 3090 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 3091 Immediate(1 << Map::kIsConstructor)); 3092 __ j(zero, &non_constructor, Label::kNear); 3093 3094 // Only dispatch to bound functions after checking whether they are 3095 // constructors. 3096 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE); 3097 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(), 3098 RelocInfo::CODE_TARGET); 3099 3100 // Only dispatch to proxies after checking whether they are constructors. 3101 __ CmpInstanceType(rcx, JS_PROXY_TYPE); 3102 __ j(equal, masm->isolate()->builtins()->ConstructProxy(), 3103 RelocInfo::CODE_TARGET); 3104 3105 // Called Construct on an exotic Object with a [[Construct]] internal method. 3106 { 3107 // Overwrite the original receiver with the (original) target. 3108 __ movp(args.GetReceiverOperand(), rdi); 3109 // Let the "call_as_constructor_delegate" take care of the rest. 3110 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi); 3111 __ Jump(masm->isolate()->builtins()->CallFunction(), 3112 RelocInfo::CODE_TARGET); 3113 } 3114 3115 // Called Construct on an Object that doesn't have a [[Construct]] internal 3116 // method. 3117 __ bind(&non_constructor); 3118 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), 3119 RelocInfo::CODE_TARGET); 3120 } 3121 3122 // static 3123 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { 3124 // ----------- S t a t e ------------- 3125 // -- rax : the number of arguments (not including the receiver) 3126 // -- rdx : the new target (either the same as the constructor or 3127 // the JSFunction on which new was invoked initially) 3128 // -- rdi : the constructor to call (can be any Object) 3129 // ----------------------------------- 3130 3131 CheckSpreadAndPushToStack(masm); 3132 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 3133 } 3134 3135 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, 3136 Register function_template_info, 3137 Register scratch0, Register scratch1, 3138 Register scratch2, 3139 Label* receiver_check_failed) { 3140 Register signature = scratch0; 3141 Register map = scratch1; 3142 Register constructor = scratch2; 3143 3144 // If there is no signature, return the holder. 3145 __ movp(signature, FieldOperand(function_template_info, 3146 FunctionTemplateInfo::kSignatureOffset)); 3147 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex); 3148 Label receiver_check_passed; 3149 __ j(equal, &receiver_check_passed, Label::kNear); 3150 3151 // Walk the prototype chain. 3152 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset)); 3153 Label prototype_loop_start; 3154 __ bind(&prototype_loop_start); 3155 3156 // Get the constructor, if any. 3157 __ GetMapConstructor(constructor, map, kScratchRegister); 3158 __ CmpInstanceType(kScratchRegister, JS_FUNCTION_TYPE); 3159 Label next_prototype; 3160 __ j(not_equal, &next_prototype, Label::kNear); 3161 3162 // Get the constructor's signature. 3163 Register type = constructor; 3164 __ movp(type, 3165 FieldOperand(constructor, JSFunction::kSharedFunctionInfoOffset)); 3166 __ movp(type, FieldOperand(type, SharedFunctionInfo::kFunctionDataOffset)); 3167 3168 // Loop through the chain of inheriting function templates. 3169 Label function_template_loop; 3170 __ bind(&function_template_loop); 3171 3172 // If the signatures match, we have a compatible receiver. 3173 __ cmpp(signature, type); 3174 __ j(equal, &receiver_check_passed, Label::kNear); 3175 3176 // If the current type is not a FunctionTemplateInfo, load the next prototype 3177 // in the chain. 3178 __ JumpIfSmi(type, &next_prototype, Label::kNear); 3179 __ CmpObjectType(type, FUNCTION_TEMPLATE_INFO_TYPE, kScratchRegister); 3180 __ j(not_equal, &next_prototype, Label::kNear); 3181 3182 // Otherwise load the parent function template and iterate. 3183 __ movp(type, 3184 FieldOperand(type, FunctionTemplateInfo::kParentTemplateOffset)); 3185 __ jmp(&function_template_loop, Label::kNear); 3186 3187 // Load the next prototype. 3188 __ bind(&next_prototype); 3189 __ testq(FieldOperand(map, Map::kBitField3Offset), 3190 Immediate(Map::HasHiddenPrototype::kMask)); 3191 __ j(zero, receiver_check_failed); 3192 __ movp(receiver, FieldOperand(map, Map::kPrototypeOffset)); 3193 __ movp(map, FieldOperand(receiver, HeapObject::kMapOffset)); 3194 // Iterate. 3195 __ jmp(&prototype_loop_start, Label::kNear); 3196 3197 __ bind(&receiver_check_passed); 3198 } 3199 3200 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { 3201 // ----------- S t a t e ------------- 3202 // -- rax : number of arguments (not including the receiver) 3203 // -- rdi : callee 3204 // -- rsi : context 3205 // -- rsp[0] : return address 3206 // -- rsp[8] : last argument 3207 // -- ... 3208 // -- rsp[rax * 8] : first argument 3209 // -- rsp[(rax + 1) * 8] : receiver 3210 // ----------------------------------- 3211 3212 StackArgumentsAccessor args(rsp, rax); 3213 3214 // Load the FunctionTemplateInfo. 3215 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 3216 __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset)); 3217 3218 // Do the compatible receiver check. 3219 Label receiver_check_failed; 3220 __ movp(rcx, args.GetReceiverOperand()); 3221 CompatibleReceiverCheck(masm, rcx, rbx, rdx, r8, r9, &receiver_check_failed); 3222 3223 // Get the callback offset from the FunctionTemplateInfo, and jump to the 3224 // beginning of the code. 3225 __ movp(rdx, FieldOperand(rbx, FunctionTemplateInfo::kCallCodeOffset)); 3226 __ movp(rdx, FieldOperand(rdx, CallHandlerInfo::kFastHandlerOffset)); 3227 __ addp(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); 3228 __ jmp(rdx); 3229 3230 // Compatible receiver check failed: pop return address, arguments and 3231 // receiver and throw an Illegal Invocation exception. 3232 __ bind(&receiver_check_failed); 3233 __ PopReturnAddressTo(rbx); 3234 __ leap(rax, Operand(rax, times_pointer_size, 1 * kPointerSize)); 3235 __ addp(rsp, rax); 3236 __ PushReturnAddressFrom(rbx); 3237 { 3238 FrameScope scope(masm, StackFrame::INTERNAL); 3239 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); 3240 } 3241 } 3242 3243 static void Generate_OnStackReplacementHelper(MacroAssembler* masm, 3244 bool has_handler_frame) { 3245 // Lookup the function in the JavaScript frame. 3246 if (has_handler_frame) { 3247 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 3248 __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset)); 3249 } else { 3250 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 3251 } 3252 3253 { 3254 FrameScope scope(masm, StackFrame::INTERNAL); 3255 // Pass function as argument. 3256 __ Push(rax); 3257 __ CallRuntime(Runtime::kCompileForOnStackReplacement); 3258 } 3259 3260 Label skip; 3261 // If the code object is null, just return to the caller. 3262 __ cmpp(rax, Immediate(0)); 3263 __ j(not_equal, &skip, Label::kNear); 3264 __ ret(0); 3265 3266 __ bind(&skip); 3267 3268 // Drop any potential handler frame that is be sitting on top of the actual 3269 // JavaScript frame. This is the case then OSR is triggered from bytecode. 3270 if (has_handler_frame) { 3271 __ leave(); 3272 } 3273 3274 // Load deoptimization data from the code object. 3275 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag)); 3276 3277 // Load the OSR entrypoint offset from the deoptimization data. 3278 __ SmiToInteger32( 3279 rbx, Operand(rbx, FixedArray::OffsetOfElementAt( 3280 DeoptimizationInputData::kOsrPcOffsetIndex) - 3281 kHeapObjectTag)); 3282 3283 // Compute the target address = code_obj + header_size + osr_offset 3284 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag)); 3285 3286 // Overwrite the return address on the stack. 3287 __ movq(StackOperandForReturnAddress(0), rax); 3288 3289 // And "return" to the OSR entry point of the function. 3290 __ ret(0); 3291 } 3292 3293 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 3294 Generate_OnStackReplacementHelper(masm, false); 3295 } 3296 3297 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { 3298 Generate_OnStackReplacementHelper(masm, true); 3299 } 3300 3301 #undef __ 3302 3303 } // namespace internal 3304 } // namespace v8 3305 3306 #endif // V8_TARGET_ARCH_X64 3307