1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_IA32 6 7 #include "src/code-factory.h" 8 #include "src/codegen.h" 9 #include "src/deoptimizer.h" 10 #include "src/full-codegen/full-codegen.h" 11 #include "src/ia32/frames-ia32.h" 12 13 namespace v8 { 14 namespace internal { 15 16 17 #define __ ACCESS_MASM(masm) 18 19 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) { 20 // ----------- S t a t e ------------- 21 // -- eax : number of arguments excluding receiver 22 // -- edi : target 23 // -- edx : new.target 24 // -- esp[0] : return address 25 // -- esp[4] : last argument 26 // -- ... 27 // -- esp[4 * argc] : first argument 28 // -- esp[4 * (argc +1)] : receiver 29 // ----------------------------------- 30 __ AssertFunction(edi); 31 32 // Make sure we operate in the context of the called function (for example 33 // ConstructStubs implemented in C++ will be run in the context of the caller 34 // instead of the callee, due to the way that [[Construct]] is defined for 35 // ordinary functions). 36 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 37 38 // Insert extra arguments. 39 const int num_extra_args = 2; 40 __ PopReturnAddressTo(ecx); 41 __ Push(edi); 42 __ Push(edx); 43 __ PushReturnAddressFrom(ecx); 44 45 // JumpToExternalReference expects eax to contain the number of arguments 46 // including the receiver and the extra arguments. 47 __ add(eax, Immediate(num_extra_args + 1)); 48 49 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); 50 } 51 52 static void GenerateTailCallToReturnedCode(MacroAssembler* masm, 53 Runtime::FunctionId function_id) { 54 // ----------- S t a t e ------------- 55 // -- eax : argument count (preserved for callee) 56 // -- edx : new target (preserved for callee) 57 // -- edi : target function (preserved for callee) 58 // ----------------------------------- 59 { 60 FrameScope scope(masm, StackFrame::INTERNAL); 61 // Push the number of arguments to the callee. 62 __ SmiTag(eax); 63 __ push(eax); 64 // Push a copy of the target function and the new target. 65 __ push(edi); 66 __ push(edx); 67 // Function is also the parameter to the runtime call. 68 __ push(edi); 69 70 __ CallRuntime(function_id, 1); 71 __ mov(ebx, eax); 72 73 // Restore target function and new target. 74 __ pop(edx); 75 __ pop(edi); 76 __ pop(eax); 77 __ SmiUntag(eax); 78 } 79 80 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize)); 81 __ jmp(ebx); 82 } 83 84 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 85 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 86 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset)); 87 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize)); 88 __ jmp(ebx); 89 } 90 91 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { 92 // Checking whether the queued function is ready for install is optional, 93 // since we come across interrupts and stack checks elsewhere. However, 94 // not checking may delay installing ready functions, and always checking 95 // would be quite expensive. A good compromise is to first check against 96 // stack limit as a cue for an interrupt signal. 97 Label ok; 98 ExternalReference stack_limit = 99 ExternalReference::address_of_stack_limit(masm->isolate()); 100 __ cmp(esp, Operand::StaticVariable(stack_limit)); 101 __ j(above_equal, &ok, Label::kNear); 102 103 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); 104 105 __ bind(&ok); 106 GenerateTailCallToSharedCode(masm); 107 } 108 109 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 110 bool is_api_function, 111 bool create_implicit_receiver, 112 bool check_derived_construct) { 113 // ----------- S t a t e ------------- 114 // -- eax: number of arguments 115 // -- esi: context 116 // -- edi: constructor function 117 // -- ebx: allocation site or undefined 118 // -- edx: new target 119 // ----------------------------------- 120 121 // Enter a construct frame. 122 { 123 FrameScope scope(masm, StackFrame::CONSTRUCT); 124 125 // Preserve the incoming parameters on the stack. 126 __ AssertUndefinedOrAllocationSite(ebx); 127 __ push(esi); 128 __ push(ebx); 129 __ SmiTag(eax); 130 __ push(eax); 131 132 if (create_implicit_receiver) { 133 // Allocate the new receiver object. 134 __ Push(edi); 135 __ Push(edx); 136 FastNewObjectStub stub(masm->isolate()); 137 __ CallStub(&stub); 138 __ mov(ebx, eax); 139 __ Pop(edx); 140 __ Pop(edi); 141 142 // ----------- S t a t e ------------- 143 // -- edi: constructor function 144 // -- ebx: newly allocated object 145 // -- edx: new target 146 // ----------------------------------- 147 148 // Retrieve smi-tagged arguments count from the stack. 149 __ mov(eax, Operand(esp, 0)); 150 } 151 152 __ SmiUntag(eax); 153 154 if (create_implicit_receiver) { 155 // Push the allocated receiver to the stack. We need two copies 156 // because we may have to return the original one and the calling 157 // conventions dictate that the called function pops the receiver. 158 __ push(ebx); 159 __ push(ebx); 160 } else { 161 __ PushRoot(Heap::kTheHoleValueRootIndex); 162 } 163 164 // Set up pointer to last argument. 165 __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset)); 166 167 // Copy arguments and receiver to the expression stack. 168 Label loop, entry; 169 __ mov(ecx, eax); 170 __ jmp(&entry); 171 __ bind(&loop); 172 __ push(Operand(ebx, ecx, times_4, 0)); 173 __ bind(&entry); 174 __ dec(ecx); 175 __ j(greater_equal, &loop); 176 177 // Call the function. 178 ParameterCount actual(eax); 179 __ InvokeFunction(edi, edx, actual, CALL_FUNCTION, 180 CheckDebugStepCallWrapper()); 181 182 // Store offset of return address for deoptimizer. 183 if (create_implicit_receiver && !is_api_function) { 184 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); 185 } 186 187 // Restore context from the frame. 188 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset)); 189 190 if (create_implicit_receiver) { 191 // If the result is an object (in the ECMA sense), we should get rid 192 // of the receiver and use the result. 193 Label use_receiver, exit; 194 195 // If the result is a smi, it is *not* an object in the ECMA sense. 196 __ JumpIfSmi(eax, &use_receiver); 197 198 // If the type of the result (stored in its map) is less than 199 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense. 200 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); 201 __ j(above_equal, &exit); 202 203 // Throw away the result of the constructor invocation and use the 204 // on-stack receiver as the result. 205 __ bind(&use_receiver); 206 __ mov(eax, Operand(esp, 0)); 207 208 // Restore the arguments count and leave the construct frame. The 209 // arguments count is stored below the receiver. 210 __ bind(&exit); 211 __ mov(ebx, Operand(esp, 1 * kPointerSize)); 212 } else { 213 __ mov(ebx, Operand(esp, 0)); 214 } 215 216 // Leave construct frame. 217 } 218 219 // ES6 9.2.2. Step 13+ 220 // Check that the result is not a Smi, indicating that the constructor result 221 // from a derived class is neither undefined nor an Object. 222 if (check_derived_construct) { 223 Label dont_throw; 224 __ JumpIfNotSmi(eax, &dont_throw); 225 { 226 FrameScope scope(masm, StackFrame::INTERNAL); 227 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); 228 } 229 __ bind(&dont_throw); 230 } 231 232 // Remove caller arguments from the stack and return. 233 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 234 __ pop(ecx); 235 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver 236 __ push(ecx); 237 if (create_implicit_receiver) { 238 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1); 239 } 240 __ ret(0); 241 } 242 243 244 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 245 Generate_JSConstructStubHelper(masm, false, true, false); 246 } 247 248 249 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { 250 Generate_JSConstructStubHelper(masm, true, false, false); 251 } 252 253 254 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { 255 Generate_JSConstructStubHelper(masm, false, false, false); 256 } 257 258 259 void Builtins::Generate_JSBuiltinsConstructStubForDerived( 260 MacroAssembler* masm) { 261 Generate_JSConstructStubHelper(masm, false, false, true); 262 } 263 264 265 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { 266 FrameScope scope(masm, StackFrame::INTERNAL); 267 __ push(edi); 268 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); 269 } 270 271 272 enum IsTagged { kEaxIsSmiTagged, kEaxIsUntaggedInt }; 273 274 275 // Clobbers ecx, edx, edi; preserves all other registers. 276 static void Generate_CheckStackOverflow(MacroAssembler* masm, 277 IsTagged eax_is_tagged) { 278 // eax : the number of items to be pushed to the stack 279 // 280 // Check the stack for overflow. We are not trying to catch 281 // interruptions (e.g. debug break and preemption) here, so the "real stack 282 // limit" is checked. 283 Label okay; 284 ExternalReference real_stack_limit = 285 ExternalReference::address_of_real_stack_limit(masm->isolate()); 286 __ mov(edi, Operand::StaticVariable(real_stack_limit)); 287 // Make ecx the space we have left. The stack might already be overflowed 288 // here which will cause ecx to become negative. 289 __ mov(ecx, esp); 290 __ sub(ecx, edi); 291 // Make edx the space we need for the array when it is unrolled onto the 292 // stack. 293 __ mov(edx, eax); 294 int smi_tag = eax_is_tagged == kEaxIsSmiTagged ? kSmiTagSize : 0; 295 __ shl(edx, kPointerSizeLog2 - smi_tag); 296 // Check if the arguments will overflow the stack. 297 __ cmp(ecx, edx); 298 __ j(greater, &okay); // Signed comparison. 299 300 // Out of stack space. 301 __ CallRuntime(Runtime::kThrowStackOverflow); 302 303 __ bind(&okay); 304 } 305 306 307 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 308 bool is_construct) { 309 ProfileEntryHookStub::MaybeCallEntryHook(masm); 310 311 { 312 FrameScope scope(masm, StackFrame::INTERNAL); 313 314 // Setup the context (we need to use the caller context from the isolate). 315 ExternalReference context_address(Isolate::kContextAddress, 316 masm->isolate()); 317 __ mov(esi, Operand::StaticVariable(context_address)); 318 319 // Load the previous frame pointer (ebx) to access C arguments 320 __ mov(ebx, Operand(ebp, 0)); 321 322 // Push the function and the receiver onto the stack. 323 __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset)); 324 __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset)); 325 326 // Load the number of arguments and setup pointer to the arguments. 327 __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset)); 328 __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset)); 329 330 // Check if we have enough stack space to push all arguments. 331 // Expects argument count in eax. Clobbers ecx, edx, edi. 332 Generate_CheckStackOverflow(masm, kEaxIsUntaggedInt); 333 334 // Copy arguments to the stack in a loop. 335 Label loop, entry; 336 __ Move(ecx, Immediate(0)); 337 __ jmp(&entry, Label::kNear); 338 __ bind(&loop); 339 __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv 340 __ push(Operand(edx, 0)); // dereference handle 341 __ inc(ecx); 342 __ bind(&entry); 343 __ cmp(ecx, eax); 344 __ j(not_equal, &loop); 345 346 // Load the previous frame pointer (ebx) to access C arguments 347 __ mov(ebx, Operand(ebp, 0)); 348 349 // Get the new.target and function from the frame. 350 __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset)); 351 __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset)); 352 353 // Invoke the code. 354 Handle<Code> builtin = is_construct 355 ? masm->isolate()->builtins()->Construct() 356 : masm->isolate()->builtins()->Call(); 357 __ Call(builtin, RelocInfo::CODE_TARGET); 358 359 // Exit the internal frame. Notice that this also removes the empty. 360 // context and the function left on the stack by the code 361 // invocation. 362 } 363 __ ret(kPointerSize); // Remove receiver. 364 } 365 366 367 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 368 Generate_JSEntryTrampolineHelper(masm, false); 369 } 370 371 372 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 373 Generate_JSEntryTrampolineHelper(masm, true); 374 } 375 376 // static 377 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { 378 // ----------- S t a t e ------------- 379 // -- eax : the value to pass to the generator 380 // -- ebx : the JSGeneratorObject to resume 381 // -- edx : the resume mode (tagged) 382 // -- esp[0] : return address 383 // ----------------------------------- 384 __ AssertGeneratorObject(ebx); 385 386 // Store input value into generator object. 387 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOrDebugPosOffset), eax); 388 __ RecordWriteField(ebx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx, 389 kDontSaveFPRegs); 390 391 // Store resume mode into generator object. 392 __ mov(FieldOperand(ebx, JSGeneratorObject::kResumeModeOffset), edx); 393 394 // Load suspended function and context. 395 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset)); 396 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset)); 397 398 // Flood function if we are stepping. 399 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; 400 Label stepping_prepared; 401 ExternalReference last_step_action = 402 ExternalReference::debug_last_step_action_address(masm->isolate()); 403 STATIC_ASSERT(StepFrame > StepIn); 404 __ cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn)); 405 __ j(greater_equal, &prepare_step_in_if_stepping); 406 407 // Flood function if we need to continue stepping in the suspended generator. 408 ExternalReference debug_suspended_generator = 409 ExternalReference::debug_suspended_generator_address(masm->isolate()); 410 __ cmp(ebx, Operand::StaticVariable(debug_suspended_generator)); 411 __ j(equal, &prepare_step_in_suspended_generator); 412 __ bind(&stepping_prepared); 413 414 // Pop return address. 415 __ PopReturnAddressTo(eax); 416 417 // Push receiver. 418 __ Push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset)); 419 420 // ----------- S t a t e ------------- 421 // -- eax : return address 422 // -- ebx : the JSGeneratorObject to resume 423 // -- edx : the resume mode (tagged) 424 // -- edi : generator function 425 // -- esi : generator context 426 // -- esp[0] : generator receiver 427 // ----------------------------------- 428 429 // Push holes for arguments to generator function. Since the parser forced 430 // context allocation for any variables in generators, the actual argument 431 // values have already been copied into the context and these dummy values 432 // will never be used. 433 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 434 __ mov(ecx, 435 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset)); 436 { 437 Label done_loop, loop; 438 __ bind(&loop); 439 __ sub(ecx, Immediate(Smi::FromInt(1))); 440 __ j(carry, &done_loop, Label::kNear); 441 __ PushRoot(Heap::kTheHoleValueRootIndex); 442 __ jmp(&loop); 443 __ bind(&done_loop); 444 } 445 446 // Dispatch on the kind of generator object. 447 Label old_generator; 448 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 449 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset)); 450 __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx); 451 __ j(not_equal, &old_generator); 452 453 // New-style (ignition/turbofan) generator object 454 { 455 __ PushReturnAddressFrom(eax); 456 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 457 __ mov(eax, 458 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset)); 459 // We abuse new.target both to indicate that this is a resume call and to 460 // pass in the generator object. In ordinary calls, new.target is always 461 // undefined because generator functions are non-constructable. 462 __ mov(edx, ebx); 463 __ jmp(FieldOperand(edi, JSFunction::kCodeEntryOffset)); 464 } 465 466 // Old-style (full-codegen) generator object 467 __ bind(&old_generator); 468 { 469 // Enter a new JavaScript frame, and initialize its slots as they were when 470 // the generator was suspended. 471 FrameScope scope(masm, StackFrame::MANUAL); 472 __ PushReturnAddressFrom(eax); // Return address. 473 __ Push(ebp); // Caller's frame pointer. 474 __ Move(ebp, esp); 475 __ Push(esi); // Callee's context. 476 __ Push(edi); // Callee's JS Function. 477 478 // Restore the operand stack. 479 __ mov(eax, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset)); 480 { 481 Label done_loop, loop; 482 __ Move(ecx, Smi::FromInt(0)); 483 __ bind(&loop); 484 __ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset)); 485 __ j(equal, &done_loop, Label::kNear); 486 __ Push(FieldOperand(eax, ecx, times_half_pointer_size, 487 FixedArray::kHeaderSize)); 488 __ add(ecx, Immediate(Smi::FromInt(1))); 489 __ jmp(&loop); 490 __ bind(&done_loop); 491 } 492 493 // Reset operand stack so we don't leak. 494 __ mov(FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset), 495 Immediate(masm->isolate()->factory()->empty_fixed_array())); 496 497 // Resume the generator function at the continuation. 498 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 499 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset)); 500 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset)); 501 __ SmiUntag(ecx); 502 __ lea(edx, FieldOperand(edx, ecx, times_1, Code::kHeaderSize)); 503 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset), 504 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); 505 __ mov(eax, ebx); // Continuation expects generator object in eax. 506 __ jmp(edx); 507 } 508 509 __ bind(&prepare_step_in_if_stepping); 510 { 511 FrameScope scope(masm, StackFrame::INTERNAL); 512 __ Push(ebx); 513 __ Push(edx); 514 __ Push(edi); 515 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping); 516 __ Pop(edx); 517 __ Pop(ebx); 518 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset)); 519 } 520 __ jmp(&stepping_prepared); 521 522 __ bind(&prepare_step_in_suspended_generator); 523 { 524 FrameScope scope(masm, StackFrame::INTERNAL); 525 __ Push(ebx); 526 __ Push(edx); 527 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); 528 __ Pop(edx); 529 __ Pop(ebx); 530 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset)); 531 } 532 __ jmp(&stepping_prepared); 533 } 534 535 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1, 536 Register scratch2) { 537 Register args_count = scratch1; 538 Register return_pc = scratch2; 539 540 // Get the arguments + reciever count. 541 __ mov(args_count, 542 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 543 __ mov(args_count, 544 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset)); 545 546 // Leave the frame (also dropping the register file). 547 __ leave(); 548 549 // Drop receiver + arguments. 550 __ pop(return_pc); 551 __ add(esp, args_count); 552 __ push(return_pc); 553 } 554 555 // Generate code for entering a JS function with the interpreter. 556 // On entry to the function the receiver and arguments have been pushed on the 557 // stack left to right. The actual argument count matches the formal parameter 558 // count expected by the function. 559 // 560 // The live registers are: 561 // o edi: the JS function object being called 562 // o edx: the new target 563 // o esi: our context 564 // o ebp: the caller's frame pointer 565 // o esp: stack pointer (pointing to return address) 566 // 567 // The function builds an interpreter frame. See InterpreterFrameConstants in 568 // frames.h for its layout. 569 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { 570 ProfileEntryHookStub::MaybeCallEntryHook(masm); 571 572 // Open a frame scope to indicate that there is a frame on the stack. The 573 // MANUAL indicates that the scope shouldn't actually generate code to set up 574 // the frame (that is done below). 575 FrameScope frame_scope(masm, StackFrame::MANUAL); 576 __ push(ebp); // Caller's frame pointer. 577 __ mov(ebp, esp); 578 __ push(esi); // Callee's context. 579 __ push(edi); // Callee's JS function. 580 __ push(edx); // Callee's new target. 581 582 // Get the bytecode array from the function object (or from the DebugInfo if 583 // it is present) and load it into kInterpreterBytecodeArrayRegister. 584 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 585 Label load_debug_bytecode_array, bytecode_array_loaded; 586 __ cmp(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset), 587 Immediate(DebugInfo::uninitialized())); 588 __ j(not_equal, &load_debug_bytecode_array); 589 __ mov(kInterpreterBytecodeArrayRegister, 590 FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset)); 591 __ bind(&bytecode_array_loaded); 592 593 // Check function data field is actually a BytecodeArray object. 594 Label bytecode_array_not_present; 595 __ CompareRoot(kInterpreterBytecodeArrayRegister, 596 Heap::kUndefinedValueRootIndex); 597 __ j(equal, &bytecode_array_not_present); 598 if (FLAG_debug_code) { 599 __ AssertNotSmi(kInterpreterBytecodeArrayRegister); 600 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, 601 eax); 602 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); 603 } 604 605 // Push bytecode array. 606 __ push(kInterpreterBytecodeArrayRegister); 607 // Push Smi tagged initial bytecode array offset. 608 __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag))); 609 610 // Allocate the local and temporary register file on the stack. 611 { 612 // Load frame size from the BytecodeArray object. 613 __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister, 614 BytecodeArray::kFrameSizeOffset)); 615 616 // Do a stack check to ensure we don't go over the limit. 617 Label ok; 618 __ mov(ecx, esp); 619 __ sub(ecx, ebx); 620 ExternalReference stack_limit = 621 ExternalReference::address_of_real_stack_limit(masm->isolate()); 622 __ cmp(ecx, Operand::StaticVariable(stack_limit)); 623 __ j(above_equal, &ok); 624 __ CallRuntime(Runtime::kThrowStackOverflow); 625 __ bind(&ok); 626 627 // If ok, push undefined as the initial value for all register file entries. 628 Label loop_header; 629 Label loop_check; 630 __ mov(eax, Immediate(masm->isolate()->factory()->undefined_value())); 631 __ jmp(&loop_check); 632 __ bind(&loop_header); 633 // TODO(rmcilroy): Consider doing more than one push per loop iteration. 634 __ push(eax); 635 // Continue loop if not done. 636 __ bind(&loop_check); 637 __ sub(ebx, Immediate(kPointerSize)); 638 __ j(greater_equal, &loop_header); 639 } 640 641 // Load accumulator, bytecode offset and dispatch table into registers. 642 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); 643 __ mov(kInterpreterBytecodeOffsetRegister, 644 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag)); 645 __ mov(kInterpreterDispatchTableRegister, 646 Immediate(ExternalReference::interpreter_dispatch_table_address( 647 masm->isolate()))); 648 649 // Dispatch to the first bytecode handler for the function. 650 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister, 651 kInterpreterBytecodeOffsetRegister, times_1, 0)); 652 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx, 653 times_pointer_size, 0)); 654 __ call(ebx); 655 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset()); 656 657 // The return value is in eax. 658 LeaveInterpreterFrame(masm, ebx, ecx); 659 __ ret(0); 660 661 // Load debug copy of the bytecode array. 662 __ bind(&load_debug_bytecode_array); 663 Register debug_info = kInterpreterBytecodeArrayRegister; 664 __ mov(debug_info, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset)); 665 __ mov(kInterpreterBytecodeArrayRegister, 666 FieldOperand(debug_info, DebugInfo::kAbstractCodeIndex)); 667 __ jmp(&bytecode_array_loaded); 668 669 // If the bytecode array is no longer present, then the underlying function 670 // has been switched to a different kind of code and we heal the closure by 671 // switching the code entry field over to the new code object as well. 672 __ bind(&bytecode_array_not_present); 673 __ pop(edx); // Callee's new target. 674 __ pop(edi); // Callee's JS function. 675 __ pop(esi); // Callee's context. 676 __ leave(); // Leave the frame so we can tail call. 677 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 678 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset)); 679 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); 680 __ mov(FieldOperand(edi, JSFunction::kCodeEntryOffset), ecx); 681 __ RecordWriteCodeEntryField(edi, ecx, ebx); 682 __ jmp(ecx); 683 } 684 685 void Builtins::Generate_InterpreterMarkBaselineOnReturn(MacroAssembler* masm) { 686 // Save the function and context for call to CompileBaseline. 687 __ mov(edi, Operand(ebp, StandardFrameConstants::kFunctionOffset)); 688 __ mov(kContextRegister, 689 Operand(ebp, StandardFrameConstants::kContextOffset)); 690 691 // Leave the frame before recompiling for baseline so that we don't count as 692 // an activation on the stack. 693 LeaveInterpreterFrame(masm, ebx, ecx); 694 695 { 696 FrameScope frame_scope(masm, StackFrame::INTERNAL); 697 // Push return value. 698 __ push(eax); 699 700 // Push function as argument and compile for baseline. 701 __ push(edi); 702 __ CallRuntime(Runtime::kCompileBaseline); 703 704 // Restore return value. 705 __ pop(eax); 706 } 707 __ ret(0); 708 } 709 710 static void Generate_InterpreterPushArgs(MacroAssembler* masm, 711 Register array_limit) { 712 // ----------- S t a t e ------------- 713 // -- ebx : Pointer to the last argument in the args array. 714 // -- array_limit : Pointer to one before the first argument in the 715 // args array. 716 // ----------------------------------- 717 Label loop_header, loop_check; 718 __ jmp(&loop_check); 719 __ bind(&loop_header); 720 __ Push(Operand(ebx, 0)); 721 __ sub(ebx, Immediate(kPointerSize)); 722 __ bind(&loop_check); 723 __ cmp(ebx, array_limit); 724 __ j(greater, &loop_header, Label::kNear); 725 } 726 727 // static 728 void Builtins::Generate_InterpreterPushArgsAndCallImpl( 729 MacroAssembler* masm, TailCallMode tail_call_mode) { 730 // ----------- S t a t e ------------- 731 // -- eax : the number of arguments (not including the receiver) 732 // -- ebx : the address of the first argument to be pushed. Subsequent 733 // arguments should be consecutive above this, in the same order as 734 // they are to be pushed onto the stack. 735 // -- edi : the target to call (can be any Object). 736 // ----------------------------------- 737 738 // Pop return address to allow tail-call after pushing arguments. 739 __ Pop(edx); 740 741 // Find the address of the last argument. 742 __ mov(ecx, eax); 743 __ add(ecx, Immediate(1)); // Add one for receiver. 744 __ shl(ecx, kPointerSizeLog2); 745 __ neg(ecx); 746 __ add(ecx, ebx); 747 748 Generate_InterpreterPushArgs(masm, ecx); 749 750 // Call the target. 751 __ Push(edx); // Re-push return address. 752 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, 753 tail_call_mode), 754 RelocInfo::CODE_TARGET); 755 } 756 757 758 // static 759 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) { 760 // ----------- S t a t e ------------- 761 // -- eax : the number of arguments (not including the receiver) 762 // -- edx : the new target 763 // -- edi : the constructor 764 // -- ebx : the address of the first argument to be pushed. Subsequent 765 // arguments should be consecutive above this, in the same order as 766 // they are to be pushed onto the stack. 767 // ----------------------------------- 768 769 // Pop return address to allow tail-call after pushing arguments. 770 __ Pop(ecx); 771 772 // Push edi in the slot meant for receiver. We need an extra register 773 // so store edi temporarily on stack. 774 __ Push(edi); 775 776 // Find the address of the last argument. 777 __ mov(edi, eax); 778 __ neg(edi); 779 __ shl(edi, kPointerSizeLog2); 780 __ add(edi, ebx); 781 782 Generate_InterpreterPushArgs(masm, edi); 783 784 // Restore the constructor from slot on stack. It was pushed at the slot 785 // meant for receiver. 786 __ mov(edi, Operand(esp, eax, times_pointer_size, 0)); 787 788 // Re-push return address. 789 __ Push(ecx); 790 791 // Call the constructor with unmodified eax, edi, ebi values. 792 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 793 } 794 795 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { 796 // Set the return address to the correct point in the interpreter entry 797 // trampoline. 798 Smi* interpreter_entry_return_pc_offset( 799 masm->isolate()->heap()->interpreter_entry_return_pc_offset()); 800 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0)); 801 __ LoadHeapObject(ebx, 802 masm->isolate()->builtins()->InterpreterEntryTrampoline()); 803 __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() + 804 Code::kHeaderSize - kHeapObjectTag)); 805 __ push(ebx); 806 807 // Initialize the dispatch table register. 808 __ mov(kInterpreterDispatchTableRegister, 809 Immediate(ExternalReference::interpreter_dispatch_table_address( 810 masm->isolate()))); 811 812 // Get the bytecode array pointer from the frame. 813 __ mov(kInterpreterBytecodeArrayRegister, 814 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp)); 815 816 if (FLAG_debug_code) { 817 // Check function data field is actually a BytecodeArray object. 818 __ AssertNotSmi(kInterpreterBytecodeArrayRegister); 819 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE, 820 ebx); 821 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); 822 } 823 824 // Get the target bytecode offset from the frame. 825 __ mov(kInterpreterBytecodeOffsetRegister, 826 Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); 827 __ SmiUntag(kInterpreterBytecodeOffsetRegister); 828 829 // Dispatch to the target bytecode. 830 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister, 831 kInterpreterBytecodeOffsetRegister, times_1, 0)); 832 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx, 833 times_pointer_size, 0)); 834 __ jmp(ebx); 835 } 836 837 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { 838 // ----------- S t a t e ------------- 839 // -- eax : argument count (preserved for callee) 840 // -- edx : new target (preserved for callee) 841 // -- edi : target function (preserved for callee) 842 // ----------------------------------- 843 // First lookup code, maybe we don't need to compile! 844 Label gotta_call_runtime, gotta_call_runtime_no_stack; 845 Label maybe_call_runtime; 846 Label try_shared; 847 Label loop_top, loop_bottom; 848 849 Register closure = edi; 850 Register new_target = edx; 851 Register argument_count = eax; 852 853 __ push(argument_count); 854 __ push(new_target); 855 __ push(closure); 856 857 Register map = argument_count; 858 Register index = ebx; 859 __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); 860 __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); 861 __ mov(index, FieldOperand(map, FixedArray::kLengthOffset)); 862 __ cmp(index, Immediate(Smi::FromInt(2))); 863 __ j(less, &gotta_call_runtime); 864 865 // Find literals. 866 // edx : native context 867 // ebx : length / index 868 // eax : optimized code map 869 // stack[0] : new target 870 // stack[4] : closure 871 Register native_context = edx; 872 __ mov(native_context, NativeContextOperand()); 873 874 __ bind(&loop_top); 875 Register temp = edi; 876 877 // Does the native context match? 878 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, 879 SharedFunctionInfo::kOffsetToPreviousContext)); 880 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset)); 881 __ cmp(temp, native_context); 882 __ j(not_equal, &loop_bottom); 883 // OSR id set to none? 884 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, 885 SharedFunctionInfo::kOffsetToPreviousOsrAstId)); 886 const int bailout_id = BailoutId::None().ToInt(); 887 __ cmp(temp, Immediate(Smi::FromInt(bailout_id))); 888 __ j(not_equal, &loop_bottom); 889 890 // Literals available? 891 Label got_literals, maybe_cleared_weakcell; 892 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, 893 SharedFunctionInfo::kOffsetToPreviousLiterals)); 894 895 // temp contains either a WeakCell pointing to the literals array or the 896 // literals array directly. 897 STATIC_ASSERT(WeakCell::kValueOffset == FixedArray::kLengthOffset); 898 __ JumpIfSmi(FieldOperand(temp, WeakCell::kValueOffset), 899 &maybe_cleared_weakcell); 900 // The WeakCell value is a pointer, therefore it's a valid literals array. 901 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset)); 902 __ jmp(&got_literals); 903 904 // We have a smi. If it's 0, then we are looking at a cleared WeakCell 905 // around the literals array, and we should visit the runtime. If it's > 0, 906 // then temp already contains the literals array. 907 __ bind(&maybe_cleared_weakcell); 908 __ cmp(FieldOperand(temp, WeakCell::kValueOffset), Immediate(0)); 909 __ j(equal, &gotta_call_runtime); 910 911 // Save the literals in the closure. 912 __ bind(&got_literals); 913 __ mov(ecx, Operand(esp, 0)); 914 __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp); 915 __ push(index); 916 __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index, 917 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 918 __ pop(index); 919 920 // Code available? 921 Register entry = ecx; 922 __ mov(entry, FieldOperand(map, index, times_half_pointer_size, 923 SharedFunctionInfo::kOffsetToPreviousCachedCode)); 924 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset)); 925 __ JumpIfSmi(entry, &maybe_call_runtime); 926 927 // Found literals and code. Get them into the closure and return. 928 __ pop(closure); 929 // Store code entry in the closure. 930 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); 931 932 Label install_optimized_code_and_tailcall; 933 __ bind(&install_optimized_code_and_tailcall); 934 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); 935 __ RecordWriteCodeEntryField(closure, entry, eax); 936 937 // Link the closure into the optimized function list. 938 // ecx : code entry 939 // edx : native context 940 // edi : closure 941 __ mov(ebx, 942 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); 943 __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx); 944 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax, 945 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 946 const int function_list_offset = 947 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); 948 __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), 949 closure); 950 // Save closure before the write barrier. 951 __ mov(ebx, closure); 952 __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax, 953 kDontSaveFPRegs); 954 __ mov(closure, ebx); 955 __ pop(new_target); 956 __ pop(argument_count); 957 __ jmp(entry); 958 959 __ bind(&loop_bottom); 960 __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength))); 961 __ cmp(index, Immediate(Smi::FromInt(1))); 962 __ j(greater, &loop_top); 963 964 // We found neither literals nor code. 965 __ jmp(&gotta_call_runtime); 966 967 __ bind(&maybe_call_runtime); 968 __ pop(closure); 969 970 // Last possibility. Check the context free optimized code map entry. 971 __ mov(entry, FieldOperand(map, FixedArray::kHeaderSize + 972 SharedFunctionInfo::kSharedCodeIndex)); 973 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset)); 974 __ JumpIfSmi(entry, &try_shared); 975 976 // Store code entry in the closure. 977 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); 978 __ jmp(&install_optimized_code_and_tailcall); 979 980 __ bind(&try_shared); 981 __ pop(new_target); 982 __ pop(argument_count); 983 // Is the full code valid? 984 __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); 985 __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset)); 986 __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset)); 987 __ and_(ebx, Code::KindField::kMask); 988 __ shr(ebx, Code::KindField::kShift); 989 __ cmp(ebx, Immediate(Code::BUILTIN)); 990 __ j(equal, &gotta_call_runtime_no_stack); 991 // Yes, install the full code. 992 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); 993 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); 994 __ RecordWriteCodeEntryField(closure, entry, ebx); 995 __ jmp(entry); 996 997 __ bind(&gotta_call_runtime); 998 __ pop(closure); 999 __ pop(new_target); 1000 __ pop(argument_count); 1001 __ bind(&gotta_call_runtime_no_stack); 1002 1003 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); 1004 } 1005 1006 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) { 1007 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline); 1008 } 1009 1010 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 1011 GenerateTailCallToReturnedCode(masm, 1012 Runtime::kCompileOptimized_NotConcurrent); 1013 } 1014 1015 1016 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { 1017 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); 1018 } 1019 1020 1021 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { 1022 // For now, we are relying on the fact that make_code_young doesn't do any 1023 // garbage collection which allows us to save/restore the registers without 1024 // worrying about which of them contain pointers. We also don't build an 1025 // internal frame to make the code faster, since we shouldn't have to do stack 1026 // crawls in MakeCodeYoung. This seems a bit fragile. 1027 1028 // Re-execute the code that was patched back to the young age when 1029 // the stub returns. 1030 __ sub(Operand(esp, 0), Immediate(5)); 1031 __ pushad(); 1032 __ mov(eax, Operand(esp, 8 * kPointerSize)); 1033 { 1034 FrameScope scope(masm, StackFrame::MANUAL); 1035 __ PrepareCallCFunction(2, ebx); 1036 __ mov(Operand(esp, 1 * kPointerSize), 1037 Immediate(ExternalReference::isolate_address(masm->isolate()))); 1038 __ mov(Operand(esp, 0), eax); 1039 __ CallCFunction( 1040 ExternalReference::get_make_code_young_function(masm->isolate()), 2); 1041 } 1042 __ popad(); 1043 __ ret(0); 1044 } 1045 1046 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ 1047 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ 1048 MacroAssembler* masm) { \ 1049 GenerateMakeCodeYoungAgainCommon(masm); \ 1050 } \ 1051 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ 1052 MacroAssembler* masm) { \ 1053 GenerateMakeCodeYoungAgainCommon(masm); \ 1054 } 1055 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) 1056 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR 1057 1058 1059 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { 1060 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact 1061 // that make_code_young doesn't do any garbage collection which allows us to 1062 // save/restore the registers without worrying about which of them contain 1063 // pointers. 1064 __ pushad(); 1065 __ mov(eax, Operand(esp, 8 * kPointerSize)); 1066 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); 1067 { // NOLINT 1068 FrameScope scope(masm, StackFrame::MANUAL); 1069 __ PrepareCallCFunction(2, ebx); 1070 __ mov(Operand(esp, 1 * kPointerSize), 1071 Immediate(ExternalReference::isolate_address(masm->isolate()))); 1072 __ mov(Operand(esp, 0), eax); 1073 __ CallCFunction( 1074 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), 1075 2); 1076 } 1077 __ popad(); 1078 1079 // Perform prologue operations usually performed by the young code stub. 1080 __ pop(eax); // Pop return address into scratch register. 1081 __ push(ebp); // Caller's frame pointer. 1082 __ mov(ebp, esp); 1083 __ push(esi); // Callee's context. 1084 __ push(edi); // Callee's JS Function. 1085 __ push(eax); // Push return address after frame prologue. 1086 1087 // Jump to point after the code-age stub. 1088 __ ret(0); 1089 } 1090 1091 1092 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 1093 GenerateMakeCodeYoungAgainCommon(masm); 1094 } 1095 1096 1097 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { 1098 Generate_MarkCodeAsExecutedOnce(masm); 1099 } 1100 1101 1102 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 1103 SaveFPRegsMode save_doubles) { 1104 // Enter an internal frame. 1105 { 1106 FrameScope scope(masm, StackFrame::INTERNAL); 1107 1108 // Preserve registers across notification, this is important for compiled 1109 // stubs that tail call the runtime on deopts passing their parameters in 1110 // registers. 1111 __ pushad(); 1112 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); 1113 __ popad(); 1114 // Tear down internal frame. 1115 } 1116 1117 __ pop(MemOperand(esp, 0)); // Ignore state offset 1118 __ ret(0); // Return to IC Miss stub, continuation still on stack. 1119 } 1120 1121 1122 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 1123 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 1124 } 1125 1126 1127 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 1128 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 1129 } 1130 1131 1132 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 1133 Deoptimizer::BailoutType type) { 1134 { 1135 FrameScope scope(masm, StackFrame::INTERNAL); 1136 1137 // Pass deoptimization type to the runtime system. 1138 __ push(Immediate(Smi::FromInt(static_cast<int>(type)))); 1139 __ CallRuntime(Runtime::kNotifyDeoptimized); 1140 1141 // Tear down internal frame. 1142 } 1143 1144 // Get the full codegen state from the stack and untag it. 1145 __ mov(ecx, Operand(esp, 1 * kPointerSize)); 1146 __ SmiUntag(ecx); 1147 1148 // Switch on the state. 1149 Label not_no_registers, not_tos_eax; 1150 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)); 1151 __ j(not_equal, ¬_no_registers, Label::kNear); 1152 __ ret(1 * kPointerSize); // Remove state. 1153 1154 __ bind(¬_no_registers); 1155 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code()); 1156 __ mov(eax, Operand(esp, 2 * kPointerSize)); 1157 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)); 1158 __ j(not_equal, ¬_tos_eax, Label::kNear); 1159 __ ret(2 * kPointerSize); // Remove state, eax. 1160 1161 __ bind(¬_tos_eax); 1162 __ Abort(kNoCasesLeft); 1163 } 1164 1165 1166 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 1167 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 1168 } 1169 1170 1171 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 1172 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 1173 } 1174 1175 1176 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 1177 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 1178 } 1179 1180 1181 // static 1182 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, 1183 int field_index) { 1184 // ----------- S t a t e ------------- 1185 // -- eax : number of arguments 1186 // -- edi : function 1187 // -- esi : context 1188 // -- esp[0] : return address 1189 // -- esp[4] : receiver 1190 // ----------------------------------- 1191 1192 // 1. Load receiver into eax and check that it's actually a JSDate object. 1193 Label receiver_not_date; 1194 { 1195 __ mov(eax, Operand(esp, kPointerSize)); 1196 __ JumpIfSmi(eax, &receiver_not_date); 1197 __ CmpObjectType(eax, JS_DATE_TYPE, ebx); 1198 __ j(not_equal, &receiver_not_date); 1199 } 1200 1201 // 2. Load the specified date field, falling back to the runtime as necessary. 1202 if (field_index == JSDate::kDateValue) { 1203 __ mov(eax, FieldOperand(eax, JSDate::kValueOffset)); 1204 } else { 1205 if (field_index < JSDate::kFirstUncachedField) { 1206 Label stamp_mismatch; 1207 __ mov(edx, Operand::StaticVariable( 1208 ExternalReference::date_cache_stamp(masm->isolate()))); 1209 __ cmp(edx, FieldOperand(eax, JSDate::kCacheStampOffset)); 1210 __ j(not_equal, &stamp_mismatch, Label::kNear); 1211 __ mov(eax, FieldOperand( 1212 eax, JSDate::kValueOffset + field_index * kPointerSize)); 1213 __ ret(1 * kPointerSize); 1214 __ bind(&stamp_mismatch); 1215 } 1216 FrameScope scope(masm, StackFrame::INTERNAL); 1217 __ PrepareCallCFunction(2, ebx); 1218 __ mov(Operand(esp, 0), eax); 1219 __ mov(Operand(esp, 1 * kPointerSize), 1220 Immediate(Smi::FromInt(field_index))); 1221 __ CallCFunction( 1222 ExternalReference::get_date_field_function(masm->isolate()), 2); 1223 } 1224 __ ret(1 * kPointerSize); 1225 1226 // 3. Raise a TypeError if the receiver is not a date. 1227 __ bind(&receiver_not_date); 1228 { 1229 FrameScope scope(masm, StackFrame::MANUAL); 1230 __ Push(ebp); 1231 __ Move(ebp, esp); 1232 __ Push(esi); 1233 __ Push(edi); 1234 __ Push(Immediate(0)); 1235 __ CallRuntime(Runtime::kThrowNotDateError); 1236 } 1237 } 1238 1239 // static 1240 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { 1241 // ----------- S t a t e ------------- 1242 // -- eax : argc 1243 // -- esp[0] : return address 1244 // -- esp[4] : argArray 1245 // -- esp[8] : thisArg 1246 // -- esp[12] : receiver 1247 // ----------------------------------- 1248 1249 // 1. Load receiver into edi, argArray into eax (if present), remove all 1250 // arguments from the stack (including the receiver), and push thisArg (if 1251 // present) instead. 1252 { 1253 Label no_arg_array, no_this_arg; 1254 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex); 1255 __ mov(ebx, edx); 1256 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize)); 1257 __ test(eax, eax); 1258 __ j(zero, &no_this_arg, Label::kNear); 1259 { 1260 __ mov(edx, Operand(esp, eax, times_pointer_size, 0)); 1261 __ cmp(eax, Immediate(1)); 1262 __ j(equal, &no_arg_array, Label::kNear); 1263 __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize)); 1264 __ bind(&no_arg_array); 1265 } 1266 __ bind(&no_this_arg); 1267 __ PopReturnAddressTo(ecx); 1268 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize)); 1269 __ Push(edx); 1270 __ PushReturnAddressFrom(ecx); 1271 __ Move(eax, ebx); 1272 } 1273 1274 // ----------- S t a t e ------------- 1275 // -- eax : argArray 1276 // -- edi : receiver 1277 // -- esp[0] : return address 1278 // -- esp[4] : thisArg 1279 // ----------------------------------- 1280 1281 // 2. Make sure the receiver is actually callable. 1282 Label receiver_not_callable; 1283 __ JumpIfSmi(edi, &receiver_not_callable, Label::kNear); 1284 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset)); 1285 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 1286 Immediate(1 << Map::kIsCallable)); 1287 __ j(zero, &receiver_not_callable, Label::kNear); 1288 1289 // 3. Tail call with no arguments if argArray is null or undefined. 1290 Label no_arguments; 1291 __ JumpIfRoot(eax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear); 1292 __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &no_arguments, 1293 Label::kNear); 1294 1295 // 4a. Apply the receiver to the given argArray (passing undefined for 1296 // new.target). 1297 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex); 1298 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1299 1300 // 4b. The argArray is either null or undefined, so we tail call without any 1301 // arguments to the receiver. 1302 __ bind(&no_arguments); 1303 { 1304 __ Set(eax, 0); 1305 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1306 } 1307 1308 // 4c. The receiver is not callable, throw an appropriate TypeError. 1309 __ bind(&receiver_not_callable); 1310 { 1311 __ mov(Operand(esp, kPointerSize), edi); 1312 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1313 } 1314 } 1315 1316 1317 // static 1318 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { 1319 // Stack Layout: 1320 // esp[0] : Return address 1321 // esp[8] : Argument n 1322 // esp[16] : Argument n-1 1323 // ... 1324 // esp[8 * n] : Argument 1 1325 // esp[8 * (n + 1)] : Receiver (callable to call) 1326 // 1327 // eax contains the number of arguments, n, not counting the receiver. 1328 // 1329 // 1. Make sure we have at least one argument. 1330 { 1331 Label done; 1332 __ test(eax, eax); 1333 __ j(not_zero, &done, Label::kNear); 1334 __ PopReturnAddressTo(ebx); 1335 __ PushRoot(Heap::kUndefinedValueRootIndex); 1336 __ PushReturnAddressFrom(ebx); 1337 __ inc(eax); 1338 __ bind(&done); 1339 } 1340 1341 // 2. Get the callable to call (passed as receiver) from the stack. 1342 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize)); 1343 1344 // 3. Shift arguments and return address one slot down on the stack 1345 // (overwriting the original receiver). Adjust argument count to make 1346 // the original first argument the new receiver. 1347 { 1348 Label loop; 1349 __ mov(ecx, eax); 1350 __ bind(&loop); 1351 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0)); 1352 __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx); 1353 __ dec(ecx); 1354 __ j(not_sign, &loop); // While non-negative (to copy return address). 1355 __ pop(ebx); // Discard copy of return address. 1356 __ dec(eax); // One fewer argument (first argument is new receiver). 1357 } 1358 1359 // 4. Call the callable. 1360 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 1361 } 1362 1363 1364 void Builtins::Generate_ReflectApply(MacroAssembler* masm) { 1365 // ----------- S t a t e ------------- 1366 // -- eax : argc 1367 // -- esp[0] : return address 1368 // -- esp[4] : argumentsList 1369 // -- esp[8] : thisArgument 1370 // -- esp[12] : target 1371 // -- esp[16] : receiver 1372 // ----------------------------------- 1373 1374 // 1. Load target into edi (if present), argumentsList into eax (if present), 1375 // remove all arguments from the stack (including the receiver), and push 1376 // thisArgument (if present) instead. 1377 { 1378 Label done; 1379 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex); 1380 __ mov(edx, edi); 1381 __ mov(ebx, edi); 1382 __ cmp(eax, Immediate(1)); 1383 __ j(below, &done, Label::kNear); 1384 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize)); 1385 __ j(equal, &done, Label::kNear); 1386 __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize)); 1387 __ cmp(eax, Immediate(3)); 1388 __ j(below, &done, Label::kNear); 1389 __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize)); 1390 __ bind(&done); 1391 __ PopReturnAddressTo(ecx); 1392 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize)); 1393 __ Push(edx); 1394 __ PushReturnAddressFrom(ecx); 1395 __ Move(eax, ebx); 1396 } 1397 1398 // ----------- S t a t e ------------- 1399 // -- eax : argumentsList 1400 // -- edi : target 1401 // -- esp[0] : return address 1402 // -- esp[4] : thisArgument 1403 // ----------------------------------- 1404 1405 // 2. Make sure the target is actually callable. 1406 Label target_not_callable; 1407 __ JumpIfSmi(edi, &target_not_callable, Label::kNear); 1408 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset)); 1409 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 1410 Immediate(1 << Map::kIsCallable)); 1411 __ j(zero, &target_not_callable, Label::kNear); 1412 1413 // 3a. Apply the target to the given argumentsList (passing undefined for 1414 // new.target). 1415 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex); 1416 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1417 1418 // 3b. The target is not callable, throw an appropriate TypeError. 1419 __ bind(&target_not_callable); 1420 { 1421 __ mov(Operand(esp, kPointerSize), edi); 1422 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); 1423 } 1424 } 1425 1426 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { 1427 // ----------- S t a t e ------------- 1428 // -- eax : argc 1429 // -- esp[0] : return address 1430 // -- esp[4] : new.target (optional) 1431 // -- esp[8] : argumentsList 1432 // -- esp[12] : target 1433 // -- esp[16] : receiver 1434 // ----------------------------------- 1435 1436 // 1. Load target into edi (if present), argumentsList into eax (if present), 1437 // new.target into edx (if present, otherwise use target), remove all 1438 // arguments from the stack (including the receiver), and push thisArgument 1439 // (if present) instead. 1440 { 1441 Label done; 1442 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex); 1443 __ mov(edx, edi); 1444 __ mov(ebx, edi); 1445 __ cmp(eax, Immediate(1)); 1446 __ j(below, &done, Label::kNear); 1447 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize)); 1448 __ mov(edx, edi); 1449 __ j(equal, &done, Label::kNear); 1450 __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize)); 1451 __ cmp(eax, Immediate(3)); 1452 __ j(below, &done, Label::kNear); 1453 __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize)); 1454 __ bind(&done); 1455 __ PopReturnAddressTo(ecx); 1456 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize)); 1457 __ PushRoot(Heap::kUndefinedValueRootIndex); 1458 __ PushReturnAddressFrom(ecx); 1459 __ Move(eax, ebx); 1460 } 1461 1462 // ----------- S t a t e ------------- 1463 // -- eax : argumentsList 1464 // -- edx : new.target 1465 // -- edi : target 1466 // -- esp[0] : return address 1467 // -- esp[4] : receiver (undefined) 1468 // ----------------------------------- 1469 1470 // 2. Make sure the target is actually a constructor. 1471 Label target_not_constructor; 1472 __ JumpIfSmi(edi, &target_not_constructor, Label::kNear); 1473 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset)); 1474 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 1475 Immediate(1 << Map::kIsConstructor)); 1476 __ j(zero, &target_not_constructor, Label::kNear); 1477 1478 // 3. Make sure the target is actually a constructor. 1479 Label new_target_not_constructor; 1480 __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear); 1481 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 1482 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 1483 Immediate(1 << Map::kIsConstructor)); 1484 __ j(zero, &new_target_not_constructor, Label::kNear); 1485 1486 // 4a. Construct the target with the given new.target and argumentsList. 1487 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); 1488 1489 // 4b. The target is not a constructor, throw an appropriate TypeError. 1490 __ bind(&target_not_constructor); 1491 { 1492 __ mov(Operand(esp, kPointerSize), edi); 1493 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); 1494 } 1495 1496 // 4c. The new.target is not a constructor, throw an appropriate TypeError. 1497 __ bind(&new_target_not_constructor); 1498 { 1499 __ mov(Operand(esp, kPointerSize), edx); 1500 __ TailCallRuntime(Runtime::kThrowCalledNonCallable); 1501 } 1502 } 1503 1504 1505 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 1506 // ----------- S t a t e ------------- 1507 // -- eax : argc 1508 // -- esp[0] : return address 1509 // -- esp[4] : last argument 1510 // ----------------------------------- 1511 Label generic_array_code; 1512 1513 // Get the InternalArray function. 1514 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi); 1515 1516 if (FLAG_debug_code) { 1517 // Initial map for the builtin InternalArray function should be a map. 1518 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 1519 // Will both indicate a NULL and a Smi. 1520 __ test(ebx, Immediate(kSmiTagMask)); 1521 __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction); 1522 __ CmpObjectType(ebx, MAP_TYPE, ecx); 1523 __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction); 1524 } 1525 1526 // Run the native code for the InternalArray function called as a normal 1527 // function. 1528 // tail call a stub 1529 InternalArrayConstructorStub stub(masm->isolate()); 1530 __ TailCallStub(&stub); 1531 } 1532 1533 1534 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 1535 // ----------- S t a t e ------------- 1536 // -- eax : argc 1537 // -- esp[0] : return address 1538 // -- esp[4] : last argument 1539 // ----------------------------------- 1540 Label generic_array_code; 1541 1542 // Get the Array function. 1543 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi); 1544 __ mov(edx, edi); 1545 1546 if (FLAG_debug_code) { 1547 // Initial map for the builtin Array function should be a map. 1548 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 1549 // Will both indicate a NULL and a Smi. 1550 __ test(ebx, Immediate(kSmiTagMask)); 1551 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); 1552 __ CmpObjectType(ebx, MAP_TYPE, ecx); 1553 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); 1554 } 1555 1556 // Run the native code for the Array function called as a normal function. 1557 // tail call a stub 1558 __ mov(ebx, masm->isolate()->factory()->undefined_value()); 1559 ArrayConstructorStub stub(masm->isolate()); 1560 __ TailCallStub(&stub); 1561 } 1562 1563 1564 // static 1565 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { 1566 // ----------- S t a t e ------------- 1567 // -- eax : number of arguments 1568 // -- edi : function 1569 // -- esi : context 1570 // -- esp[0] : return address 1571 // -- esp[(argc - n) * 8] : arg[n] (zero-based) 1572 // -- esp[(argc + 1) * 8] : receiver 1573 // ----------------------------------- 1574 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above; 1575 Heap::RootListIndex const root_index = 1576 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex 1577 : Heap::kMinusInfinityValueRootIndex; 1578 XMMRegister const reg = (kind == MathMaxMinKind::kMin) ? xmm1 : xmm0; 1579 1580 // Load the accumulator with the default return value (either -Infinity or 1581 // +Infinity), with the tagged value in edx and the double value in xmm0. 1582 __ LoadRoot(edx, root_index); 1583 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 1584 __ Move(ecx, eax); 1585 1586 Label done_loop, loop; 1587 __ bind(&loop); 1588 { 1589 // Check if all parameters done. 1590 __ test(ecx, ecx); 1591 __ j(zero, &done_loop); 1592 1593 // Load the next parameter tagged value into ebx. 1594 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0)); 1595 1596 // Load the double value of the parameter into xmm1, maybe converting the 1597 // parameter to a number first using the ToNumber builtin if necessary. 1598 Label convert, convert_smi, convert_number, done_convert; 1599 __ bind(&convert); 1600 __ JumpIfSmi(ebx, &convert_smi); 1601 __ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset), 1602 Heap::kHeapNumberMapRootIndex, &convert_number); 1603 { 1604 // Parameter is not a Number, use the ToNumber builtin to convert it. 1605 FrameScope scope(masm, StackFrame::MANUAL); 1606 __ Push(ebp); 1607 __ Move(ebp, esp); 1608 __ Push(esi); 1609 __ Push(edi); 1610 __ SmiTag(eax); 1611 __ SmiTag(ecx); 1612 __ Push(eax); 1613 __ Push(ecx); 1614 __ Push(edx); 1615 __ mov(eax, ebx); 1616 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 1617 __ mov(ebx, eax); 1618 __ Pop(edx); 1619 __ Pop(ecx); 1620 __ Pop(eax); 1621 __ Pop(edi); 1622 __ Pop(esi); 1623 { 1624 // Restore the double accumulator value (xmm0). 1625 Label restore_smi, done_restore; 1626 __ JumpIfSmi(edx, &restore_smi, Label::kNear); 1627 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 1628 __ jmp(&done_restore, Label::kNear); 1629 __ bind(&restore_smi); 1630 __ SmiUntag(edx); 1631 __ Cvtsi2sd(xmm0, edx); 1632 __ SmiTag(edx); 1633 __ bind(&done_restore); 1634 } 1635 __ SmiUntag(ecx); 1636 __ SmiUntag(eax); 1637 __ leave(); 1638 } 1639 __ jmp(&convert); 1640 __ bind(&convert_number); 1641 __ movsd(xmm1, FieldOperand(ebx, HeapNumber::kValueOffset)); 1642 __ jmp(&done_convert, Label::kNear); 1643 __ bind(&convert_smi); 1644 __ SmiUntag(ebx); 1645 __ Cvtsi2sd(xmm1, ebx); 1646 __ SmiTag(ebx); 1647 __ bind(&done_convert); 1648 1649 // Perform the actual comparison with the accumulator value on the left hand 1650 // side (xmm0) and the next parameter value on the right hand side (xmm1). 1651 Label compare_equal, compare_nan, compare_swap, done_compare; 1652 __ ucomisd(xmm0, xmm1); 1653 __ j(parity_even, &compare_nan, Label::kNear); 1654 __ j(cc, &done_compare, Label::kNear); 1655 __ j(equal, &compare_equal, Label::kNear); 1656 1657 // Result is on the right hand side. 1658 __ bind(&compare_swap); 1659 __ movaps(xmm0, xmm1); 1660 __ mov(edx, ebx); 1661 __ jmp(&done_compare, Label::kNear); 1662 1663 // At least one side is NaN, which means that the result will be NaN too. 1664 __ bind(&compare_nan); 1665 __ LoadRoot(edx, Heap::kNanValueRootIndex); 1666 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); 1667 __ jmp(&done_compare, Label::kNear); 1668 1669 // Left and right hand side are equal, check for -0 vs. +0. 1670 __ bind(&compare_equal); 1671 __ Push(edi); // Preserve function in edi. 1672 __ movmskpd(edi, reg); 1673 __ test(edi, Immediate(1)); 1674 __ Pop(edi); 1675 __ j(not_zero, &compare_swap); 1676 1677 __ bind(&done_compare); 1678 __ dec(ecx); 1679 __ jmp(&loop); 1680 } 1681 1682 __ bind(&done_loop); 1683 __ PopReturnAddressTo(ecx); 1684 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize)); 1685 __ PushReturnAddressFrom(ecx); 1686 __ mov(eax, edx); 1687 __ Ret(); 1688 } 1689 1690 // static 1691 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { 1692 // ----------- S t a t e ------------- 1693 // -- eax : number of arguments 1694 // -- edi : constructor function 1695 // -- esp[0] : return address 1696 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1697 // -- esp[(argc + 1) * 4] : receiver 1698 // ----------------------------------- 1699 1700 // 1. Load the first argument into eax and get rid of the rest (including the 1701 // receiver). 1702 Label no_arguments; 1703 { 1704 __ test(eax, eax); 1705 __ j(zero, &no_arguments, Label::kNear); 1706 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0)); 1707 __ PopReturnAddressTo(ecx); 1708 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize)); 1709 __ PushReturnAddressFrom(ecx); 1710 __ mov(eax, ebx); 1711 } 1712 1713 // 2a. Convert the first argument to a number. 1714 __ Jump(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 1715 1716 // 2b. No arguments, return +0 (already in eax). 1717 __ bind(&no_arguments); 1718 __ ret(1 * kPointerSize); 1719 } 1720 1721 1722 // static 1723 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { 1724 // ----------- S t a t e ------------- 1725 // -- eax : number of arguments 1726 // -- edi : constructor function 1727 // -- edx : new target 1728 // -- esp[0] : return address 1729 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1730 // -- esp[(argc + 1) * 4] : receiver 1731 // ----------------------------------- 1732 1733 // 1. Make sure we operate in the context of the called function. 1734 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 1735 1736 // 2. Load the first argument into ebx and get rid of the rest (including the 1737 // receiver). 1738 { 1739 Label no_arguments, done; 1740 __ test(eax, eax); 1741 __ j(zero, &no_arguments, Label::kNear); 1742 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0)); 1743 __ jmp(&done, Label::kNear); 1744 __ bind(&no_arguments); 1745 __ Move(ebx, Smi::FromInt(0)); 1746 __ bind(&done); 1747 __ PopReturnAddressTo(ecx); 1748 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize)); 1749 __ PushReturnAddressFrom(ecx); 1750 } 1751 1752 // 3. Make sure ebx is a number. 1753 { 1754 Label done_convert; 1755 __ JumpIfSmi(ebx, &done_convert); 1756 __ CompareRoot(FieldOperand(ebx, HeapObject::kMapOffset), 1757 Heap::kHeapNumberMapRootIndex); 1758 __ j(equal, &done_convert); 1759 { 1760 FrameScope scope(masm, StackFrame::INTERNAL); 1761 __ Push(edi); 1762 __ Push(edx); 1763 __ Move(eax, ebx); 1764 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); 1765 __ Move(ebx, eax); 1766 __ Pop(edx); 1767 __ Pop(edi); 1768 } 1769 __ bind(&done_convert); 1770 } 1771 1772 // 4. Check if new target and constructor differ. 1773 Label new_object; 1774 __ cmp(edx, edi); 1775 __ j(not_equal, &new_object); 1776 1777 // 5. Allocate a JSValue wrapper for the number. 1778 __ AllocateJSValue(eax, edi, ebx, ecx, &new_object); 1779 __ Ret(); 1780 1781 // 6. Fallback to the runtime to create new object. 1782 __ bind(&new_object); 1783 { 1784 FrameScope scope(masm, StackFrame::INTERNAL); 1785 __ Push(ebx); // the first argument 1786 FastNewObjectStub stub(masm->isolate()); 1787 __ CallStub(&stub); 1788 __ Pop(FieldOperand(eax, JSValue::kValueOffset)); 1789 } 1790 __ Ret(); 1791 } 1792 1793 1794 // static 1795 void Builtins::Generate_StringConstructor(MacroAssembler* masm) { 1796 // ----------- S t a t e ------------- 1797 // -- eax : number of arguments 1798 // -- edi : constructor function 1799 // -- esp[0] : return address 1800 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1801 // -- esp[(argc + 1) * 4] : receiver 1802 // ----------------------------------- 1803 1804 // 1. Load the first argument into eax and get rid of the rest (including the 1805 // receiver). 1806 Label no_arguments; 1807 { 1808 __ test(eax, eax); 1809 __ j(zero, &no_arguments, Label::kNear); 1810 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0)); 1811 __ PopReturnAddressTo(ecx); 1812 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize)); 1813 __ PushReturnAddressFrom(ecx); 1814 __ mov(eax, ebx); 1815 } 1816 1817 // 2a. At least one argument, return eax if it's a string, otherwise 1818 // dispatch to appropriate conversion. 1819 Label to_string, symbol_descriptive_string; 1820 { 1821 __ JumpIfSmi(eax, &to_string, Label::kNear); 1822 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE); 1823 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx); 1824 __ j(above, &to_string, Label::kNear); 1825 __ j(equal, &symbol_descriptive_string, Label::kNear); 1826 __ Ret(); 1827 } 1828 1829 // 2b. No arguments, return the empty string (and pop the receiver). 1830 __ bind(&no_arguments); 1831 { 1832 __ LoadRoot(eax, Heap::kempty_stringRootIndex); 1833 __ ret(1 * kPointerSize); 1834 } 1835 1836 // 3a. Convert eax to a string. 1837 __ bind(&to_string); 1838 { 1839 ToStringStub stub(masm->isolate()); 1840 __ TailCallStub(&stub); 1841 } 1842 1843 // 3b. Convert symbol in eax to a string. 1844 __ bind(&symbol_descriptive_string); 1845 { 1846 __ PopReturnAddressTo(ecx); 1847 __ Push(eax); 1848 __ PushReturnAddressFrom(ecx); 1849 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); 1850 } 1851 } 1852 1853 1854 // static 1855 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { 1856 // ----------- S t a t e ------------- 1857 // -- eax : number of arguments 1858 // -- edi : constructor function 1859 // -- edx : new target 1860 // -- esp[0] : return address 1861 // -- esp[(argc - n) * 4] : arg[n] (zero-based) 1862 // -- esp[(argc + 1) * 4] : receiver 1863 // ----------------------------------- 1864 1865 // 1. Make sure we operate in the context of the called function. 1866 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 1867 1868 // 2. Load the first argument into ebx and get rid of the rest (including the 1869 // receiver). 1870 { 1871 Label no_arguments, done; 1872 __ test(eax, eax); 1873 __ j(zero, &no_arguments, Label::kNear); 1874 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0)); 1875 __ jmp(&done, Label::kNear); 1876 __ bind(&no_arguments); 1877 __ LoadRoot(ebx, Heap::kempty_stringRootIndex); 1878 __ bind(&done); 1879 __ PopReturnAddressTo(ecx); 1880 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize)); 1881 __ PushReturnAddressFrom(ecx); 1882 } 1883 1884 // 3. Make sure ebx is a string. 1885 { 1886 Label convert, done_convert; 1887 __ JumpIfSmi(ebx, &convert, Label::kNear); 1888 __ CmpObjectType(ebx, FIRST_NONSTRING_TYPE, ecx); 1889 __ j(below, &done_convert); 1890 __ bind(&convert); 1891 { 1892 FrameScope scope(masm, StackFrame::INTERNAL); 1893 ToStringStub stub(masm->isolate()); 1894 __ Push(edi); 1895 __ Push(edx); 1896 __ Move(eax, ebx); 1897 __ CallStub(&stub); 1898 __ Move(ebx, eax); 1899 __ Pop(edx); 1900 __ Pop(edi); 1901 } 1902 __ bind(&done_convert); 1903 } 1904 1905 // 4. Check if new target and constructor differ. 1906 Label new_object; 1907 __ cmp(edx, edi); 1908 __ j(not_equal, &new_object); 1909 1910 // 5. Allocate a JSValue wrapper for the string. 1911 __ AllocateJSValue(eax, edi, ebx, ecx, &new_object); 1912 __ Ret(); 1913 1914 // 6. Fallback to the runtime to create new object. 1915 __ bind(&new_object); 1916 { 1917 FrameScope scope(masm, StackFrame::INTERNAL); 1918 __ Push(ebx); // the first argument 1919 FastNewObjectStub stub(masm->isolate()); 1920 __ CallStub(&stub); 1921 __ Pop(FieldOperand(eax, JSValue::kValueOffset)); 1922 } 1923 __ Ret(); 1924 } 1925 1926 1927 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm, 1928 Label* stack_overflow) { 1929 // ----------- S t a t e ------------- 1930 // -- eax : actual number of arguments 1931 // -- ebx : expected number of arguments 1932 // -- edx : new target (passed through to callee) 1933 // ----------------------------------- 1934 // Check the stack for overflow. We are not trying to catch 1935 // interruptions (e.g. debug break and preemption) here, so the "real stack 1936 // limit" is checked. 1937 ExternalReference real_stack_limit = 1938 ExternalReference::address_of_real_stack_limit(masm->isolate()); 1939 __ mov(edi, Operand::StaticVariable(real_stack_limit)); 1940 // Make ecx the space we have left. The stack might already be overflowed 1941 // here which will cause ecx to become negative. 1942 __ mov(ecx, esp); 1943 __ sub(ecx, edi); 1944 // Make edi the space we need for the array when it is unrolled onto the 1945 // stack. 1946 __ mov(edi, ebx); 1947 __ shl(edi, kPointerSizeLog2); 1948 // Check if the arguments will overflow the stack. 1949 __ cmp(ecx, edi); 1950 __ j(less_equal, stack_overflow); // Signed comparison. 1951 } 1952 1953 1954 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1955 __ push(ebp); 1956 __ mov(ebp, esp); 1957 1958 // Store the arguments adaptor context sentinel. 1959 __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 1960 1961 // Push the function on the stack. 1962 __ push(edi); 1963 1964 // Preserve the number of arguments on the stack. Must preserve eax, 1965 // ebx and ecx because these registers are used when copying the 1966 // arguments and the receiver. 1967 STATIC_ASSERT(kSmiTagSize == 1); 1968 __ lea(edi, Operand(eax, eax, times_1, kSmiTag)); 1969 __ push(edi); 1970 } 1971 1972 1973 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1974 // Retrieve the number of arguments from the stack. 1975 __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1976 1977 // Leave the frame. 1978 __ leave(); 1979 1980 // Remove caller arguments from the stack. 1981 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); 1982 __ pop(ecx); 1983 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver 1984 __ push(ecx); 1985 } 1986 1987 1988 // static 1989 void Builtins::Generate_Apply(MacroAssembler* masm) { 1990 // ----------- S t a t e ------------- 1991 // -- eax : argumentsList 1992 // -- edi : target 1993 // -- edx : new.target (checked to be constructor or undefined) 1994 // -- esp[0] : return address. 1995 // -- esp[4] : thisArgument 1996 // ----------------------------------- 1997 1998 // Create the list of arguments from the array-like argumentsList. 1999 { 2000 Label create_arguments, create_array, create_runtime, done_create; 2001 __ JumpIfSmi(eax, &create_runtime); 2002 2003 // Load the map of argumentsList into ecx. 2004 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 2005 2006 // Load native context into ebx. 2007 __ mov(ebx, NativeContextOperand()); 2008 2009 // Check if argumentsList is an (unmodified) arguments object. 2010 __ cmp(ecx, ContextOperand(ebx, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); 2011 __ j(equal, &create_arguments); 2012 __ cmp(ecx, ContextOperand(ebx, Context::STRICT_ARGUMENTS_MAP_INDEX)); 2013 __ j(equal, &create_arguments); 2014 2015 // Check if argumentsList is a fast JSArray. 2016 __ CmpInstanceType(ecx, JS_ARRAY_TYPE); 2017 __ j(equal, &create_array); 2018 2019 // Ask the runtime to create the list (actually a FixedArray). 2020 __ bind(&create_runtime); 2021 { 2022 FrameScope scope(masm, StackFrame::INTERNAL); 2023 __ Push(edi); 2024 __ Push(edx); 2025 __ Push(eax); 2026 __ CallRuntime(Runtime::kCreateListFromArrayLike); 2027 __ Pop(edx); 2028 __ Pop(edi); 2029 __ mov(ebx, FieldOperand(eax, FixedArray::kLengthOffset)); 2030 __ SmiUntag(ebx); 2031 } 2032 __ jmp(&done_create); 2033 2034 // Try to create the list from an arguments object. 2035 __ bind(&create_arguments); 2036 __ mov(ebx, FieldOperand(eax, JSArgumentsObject::kLengthOffset)); 2037 __ mov(ecx, FieldOperand(eax, JSObject::kElementsOffset)); 2038 __ cmp(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); 2039 __ j(not_equal, &create_runtime); 2040 __ SmiUntag(ebx); 2041 __ mov(eax, ecx); 2042 __ jmp(&done_create); 2043 2044 // Try to create the list from a JSArray object. 2045 __ bind(&create_array); 2046 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); 2047 __ DecodeField<Map::ElementsKindBits>(ecx); 2048 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 2049 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 2050 STATIC_ASSERT(FAST_ELEMENTS == 2); 2051 __ cmp(ecx, Immediate(FAST_ELEMENTS)); 2052 __ j(above, &create_runtime); 2053 __ cmp(ecx, Immediate(FAST_HOLEY_SMI_ELEMENTS)); 2054 __ j(equal, &create_runtime); 2055 __ mov(ebx, FieldOperand(eax, JSArray::kLengthOffset)); 2056 __ SmiUntag(ebx); 2057 __ mov(eax, FieldOperand(eax, JSArray::kElementsOffset)); 2058 2059 __ bind(&done_create); 2060 } 2061 2062 // Check for stack overflow. 2063 { 2064 // Check the stack for overflow. We are not trying to catch interruptions 2065 // (i.e. debug break and preemption) here, so check the "real stack limit". 2066 Label done; 2067 ExternalReference real_stack_limit = 2068 ExternalReference::address_of_real_stack_limit(masm->isolate()); 2069 __ mov(ecx, Operand::StaticVariable(real_stack_limit)); 2070 // Make ecx the space we have left. The stack might already be overflowed 2071 // here which will cause ecx to become negative. 2072 __ neg(ecx); 2073 __ add(ecx, esp); 2074 __ sar(ecx, kPointerSizeLog2); 2075 // Check if the arguments will overflow the stack. 2076 __ cmp(ecx, ebx); 2077 __ j(greater, &done, Label::kNear); // Signed comparison. 2078 __ TailCallRuntime(Runtime::kThrowStackOverflow); 2079 __ bind(&done); 2080 } 2081 2082 // ----------- S t a t e ------------- 2083 // -- edi : target 2084 // -- eax : args (a FixedArray built from argumentsList) 2085 // -- ebx : len (number of elements to push from args) 2086 // -- edx : new.target (checked to be constructor or undefined) 2087 // -- esp[0] : return address. 2088 // -- esp[4] : thisArgument 2089 // ----------------------------------- 2090 2091 // Push arguments onto the stack (thisArgument is already on the stack). 2092 { 2093 __ movd(xmm0, edx); 2094 __ PopReturnAddressTo(edx); 2095 __ Move(ecx, Immediate(0)); 2096 Label done, loop; 2097 __ bind(&loop); 2098 __ cmp(ecx, ebx); 2099 __ j(equal, &done, Label::kNear); 2100 __ Push( 2101 FieldOperand(eax, ecx, times_pointer_size, FixedArray::kHeaderSize)); 2102 __ inc(ecx); 2103 __ jmp(&loop); 2104 __ bind(&done); 2105 __ PushReturnAddressFrom(edx); 2106 __ movd(edx, xmm0); 2107 __ Move(eax, ebx); 2108 } 2109 2110 // Dispatch to Call or Construct depending on whether new.target is undefined. 2111 { 2112 __ CompareRoot(edx, Heap::kUndefinedValueRootIndex); 2113 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); 2114 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 2115 } 2116 } 2117 2118 namespace { 2119 2120 // Drops top JavaScript frame and an arguments adaptor frame below it (if 2121 // present) preserving all the arguments prepared for current call. 2122 // Does nothing if debugger is currently active. 2123 // ES6 14.6.3. PrepareForTailCall 2124 // 2125 // Stack structure for the function g() tail calling f(): 2126 // 2127 // ------- Caller frame: ------- 2128 // | ... 2129 // | g()'s arg M 2130 // | ... 2131 // | g()'s arg 1 2132 // | g()'s receiver arg 2133 // | g()'s caller pc 2134 // ------- g()'s frame: ------- 2135 // | g()'s caller fp <- fp 2136 // | g()'s context 2137 // | function pointer: g 2138 // | ------------------------- 2139 // | ... 2140 // | ... 2141 // | f()'s arg N 2142 // | ... 2143 // | f()'s arg 1 2144 // | f()'s receiver arg 2145 // | f()'s caller pc <- sp 2146 // ---------------------- 2147 // 2148 void PrepareForTailCall(MacroAssembler* masm, Register args_reg, 2149 Register scratch1, Register scratch2, 2150 Register scratch3) { 2151 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); 2152 Comment cmnt(masm, "[ PrepareForTailCall"); 2153 2154 // Prepare for tail call only if ES2015 tail call elimination is enabled. 2155 Label done; 2156 ExternalReference is_tail_call_elimination_enabled = 2157 ExternalReference::is_tail_call_elimination_enabled_address( 2158 masm->isolate()); 2159 __ movzx_b(scratch1, 2160 Operand::StaticVariable(is_tail_call_elimination_enabled)); 2161 __ cmp(scratch1, Immediate(0)); 2162 __ j(equal, &done, Label::kNear); 2163 2164 // Drop possible interpreter handler/stub frame. 2165 { 2166 Label no_interpreter_frame; 2167 __ cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset), 2168 Immediate(Smi::FromInt(StackFrame::STUB))); 2169 __ j(not_equal, &no_interpreter_frame, Label::kNear); 2170 __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 2171 __ bind(&no_interpreter_frame); 2172 } 2173 2174 // Check if next frame is an arguments adaptor frame. 2175 Register caller_args_count_reg = scratch1; 2176 Label no_arguments_adaptor, formal_parameter_count_loaded; 2177 __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 2178 __ cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset), 2179 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 2180 __ j(not_equal, &no_arguments_adaptor, Label::kNear); 2181 2182 // Drop current frame and load arguments count from arguments adaptor frame. 2183 __ mov(ebp, scratch2); 2184 __ mov(caller_args_count_reg, 2185 Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 2186 __ SmiUntag(caller_args_count_reg); 2187 __ jmp(&formal_parameter_count_loaded, Label::kNear); 2188 2189 __ bind(&no_arguments_adaptor); 2190 // Load caller's formal parameter count 2191 __ mov(scratch1, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 2192 __ mov(scratch1, 2193 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset)); 2194 __ mov( 2195 caller_args_count_reg, 2196 FieldOperand(scratch1, SharedFunctionInfo::kFormalParameterCountOffset)); 2197 __ SmiUntag(caller_args_count_reg); 2198 2199 __ bind(&formal_parameter_count_loaded); 2200 2201 ParameterCount callee_args_count(args_reg); 2202 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, 2203 scratch3, ReturnAddressState::kOnStack, 0); 2204 __ bind(&done); 2205 } 2206 } // namespace 2207 2208 // static 2209 void Builtins::Generate_CallFunction(MacroAssembler* masm, 2210 ConvertReceiverMode mode, 2211 TailCallMode tail_call_mode) { 2212 // ----------- S t a t e ------------- 2213 // -- eax : the number of arguments (not including the receiver) 2214 // -- edi : the function to call (checked to be a JSFunction) 2215 // ----------------------------------- 2216 __ AssertFunction(edi); 2217 2218 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) 2219 // Check that the function is not a "classConstructor". 2220 Label class_constructor; 2221 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2222 __ test_b(FieldOperand(edx, SharedFunctionInfo::kFunctionKindByteOffset), 2223 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte)); 2224 __ j(not_zero, &class_constructor); 2225 2226 // Enter the context of the function; ToObject has to run in the function 2227 // context, and we also need to take the global proxy from the function 2228 // context in case of conversion. 2229 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == 2230 SharedFunctionInfo::kStrictModeByteOffset); 2231 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 2232 // We need to convert the receiver for non-native sloppy mode functions. 2233 Label done_convert; 2234 __ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset), 2235 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) | 2236 (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); 2237 __ j(not_zero, &done_convert); 2238 { 2239 // ----------- S t a t e ------------- 2240 // -- eax : the number of arguments (not including the receiver) 2241 // -- edx : the shared function info. 2242 // -- edi : the function to call (checked to be a JSFunction) 2243 // -- esi : the function context. 2244 // ----------------------------------- 2245 2246 if (mode == ConvertReceiverMode::kNullOrUndefined) { 2247 // Patch receiver to global proxy. 2248 __ LoadGlobalProxy(ecx); 2249 } else { 2250 Label convert_to_object, convert_receiver; 2251 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize)); 2252 __ JumpIfSmi(ecx, &convert_to_object, Label::kNear); 2253 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 2254 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx); 2255 __ j(above_equal, &done_convert); 2256 if (mode != ConvertReceiverMode::kNotNullOrUndefined) { 2257 Label convert_global_proxy; 2258 __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex, 2259 &convert_global_proxy, Label::kNear); 2260 __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object, 2261 Label::kNear); 2262 __ bind(&convert_global_proxy); 2263 { 2264 // Patch receiver to global proxy. 2265 __ LoadGlobalProxy(ecx); 2266 } 2267 __ jmp(&convert_receiver); 2268 } 2269 __ bind(&convert_to_object); 2270 { 2271 // Convert receiver using ToObject. 2272 // TODO(bmeurer): Inline the allocation here to avoid building the frame 2273 // in the fast case? (fall back to AllocateInNewSpace?) 2274 FrameScope scope(masm, StackFrame::INTERNAL); 2275 __ SmiTag(eax); 2276 __ Push(eax); 2277 __ Push(edi); 2278 __ mov(eax, ecx); 2279 ToObjectStub stub(masm->isolate()); 2280 __ CallStub(&stub); 2281 __ mov(ecx, eax); 2282 __ Pop(edi); 2283 __ Pop(eax); 2284 __ SmiUntag(eax); 2285 } 2286 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2287 __ bind(&convert_receiver); 2288 } 2289 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx); 2290 } 2291 __ bind(&done_convert); 2292 2293 // ----------- S t a t e ------------- 2294 // -- eax : the number of arguments (not including the receiver) 2295 // -- edx : the shared function info. 2296 // -- edi : the function to call (checked to be a JSFunction) 2297 // -- esi : the function context. 2298 // ----------------------------------- 2299 2300 if (tail_call_mode == TailCallMode::kAllow) { 2301 PrepareForTailCall(masm, eax, ebx, ecx, edx); 2302 // Reload shared function info. 2303 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2304 } 2305 2306 __ mov(ebx, 2307 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); 2308 __ SmiUntag(ebx); 2309 ParameterCount actual(eax); 2310 ParameterCount expected(ebx); 2311 __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION, 2312 CheckDebugStepCallWrapper()); 2313 // The function is a "classConstructor", need to raise an exception. 2314 __ bind(&class_constructor); 2315 { 2316 FrameScope frame(masm, StackFrame::INTERNAL); 2317 __ push(edi); 2318 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); 2319 } 2320 } 2321 2322 2323 namespace { 2324 2325 void Generate_PushBoundArguments(MacroAssembler* masm) { 2326 // ----------- S t a t e ------------- 2327 // -- eax : the number of arguments (not including the receiver) 2328 // -- edx : new.target (only in case of [[Construct]]) 2329 // -- edi : target (checked to be a JSBoundFunction) 2330 // ----------------------------------- 2331 2332 // Load [[BoundArguments]] into ecx and length of that into ebx. 2333 Label no_bound_arguments; 2334 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset)); 2335 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); 2336 __ SmiUntag(ebx); 2337 __ test(ebx, ebx); 2338 __ j(zero, &no_bound_arguments); 2339 { 2340 // ----------- S t a t e ------------- 2341 // -- eax : the number of arguments (not including the receiver) 2342 // -- edx : new.target (only in case of [[Construct]]) 2343 // -- edi : target (checked to be a JSBoundFunction) 2344 // -- ecx : the [[BoundArguments]] (implemented as FixedArray) 2345 // -- ebx : the number of [[BoundArguments]] 2346 // ----------------------------------- 2347 2348 // Reserve stack space for the [[BoundArguments]]. 2349 { 2350 Label done; 2351 __ lea(ecx, Operand(ebx, times_pointer_size, 0)); 2352 __ sub(esp, ecx); 2353 // Check the stack for overflow. We are not trying to catch interruptions 2354 // (i.e. debug break and preemption) here, so check the "real stack 2355 // limit". 2356 __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex); 2357 __ j(greater, &done, Label::kNear); // Signed comparison. 2358 // Restore the stack pointer. 2359 __ lea(esp, Operand(esp, ebx, times_pointer_size, 0)); 2360 { 2361 FrameScope scope(masm, StackFrame::MANUAL); 2362 __ EnterFrame(StackFrame::INTERNAL); 2363 __ CallRuntime(Runtime::kThrowStackOverflow); 2364 } 2365 __ bind(&done); 2366 } 2367 2368 // Adjust effective number of arguments to include return address. 2369 __ inc(eax); 2370 2371 // Relocate arguments and return address down the stack. 2372 { 2373 Label loop; 2374 __ Set(ecx, 0); 2375 __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0)); 2376 __ bind(&loop); 2377 __ movd(xmm0, Operand(ebx, ecx, times_pointer_size, 0)); 2378 __ movd(Operand(esp, ecx, times_pointer_size, 0), xmm0); 2379 __ inc(ecx); 2380 __ cmp(ecx, eax); 2381 __ j(less, &loop); 2382 } 2383 2384 // Copy [[BoundArguments]] to the stack (below the arguments). 2385 { 2386 Label loop; 2387 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset)); 2388 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset)); 2389 __ SmiUntag(ebx); 2390 __ bind(&loop); 2391 __ dec(ebx); 2392 __ movd(xmm0, FieldOperand(ecx, ebx, times_pointer_size, 2393 FixedArray::kHeaderSize)); 2394 __ movd(Operand(esp, eax, times_pointer_size, 0), xmm0); 2395 __ lea(eax, Operand(eax, 1)); 2396 __ j(greater, &loop); 2397 } 2398 2399 // Adjust effective number of arguments (eax contains the number of 2400 // arguments from the call plus return address plus the number of 2401 // [[BoundArguments]]), so we need to subtract one for the return address. 2402 __ dec(eax); 2403 } 2404 __ bind(&no_bound_arguments); 2405 } 2406 2407 } // namespace 2408 2409 2410 // static 2411 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, 2412 TailCallMode tail_call_mode) { 2413 // ----------- S t a t e ------------- 2414 // -- eax : the number of arguments (not including the receiver) 2415 // -- edi : the function to call (checked to be a JSBoundFunction) 2416 // ----------------------------------- 2417 __ AssertBoundFunction(edi); 2418 2419 if (tail_call_mode == TailCallMode::kAllow) { 2420 PrepareForTailCall(masm, eax, ebx, ecx, edx); 2421 } 2422 2423 // Patch the receiver to [[BoundThis]]. 2424 __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset)); 2425 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx); 2426 2427 // Push the [[BoundArguments]] onto the stack. 2428 Generate_PushBoundArguments(masm); 2429 2430 // Call the [[BoundTargetFunction]] via the Call builtin. 2431 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset)); 2432 __ mov(ecx, Operand::StaticVariable(ExternalReference( 2433 Builtins::kCall_ReceiverIsAny, masm->isolate()))); 2434 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); 2435 __ jmp(ecx); 2436 } 2437 2438 2439 // static 2440 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, 2441 TailCallMode tail_call_mode) { 2442 // ----------- S t a t e ------------- 2443 // -- eax : the number of arguments (not including the receiver) 2444 // -- edi : the target to call (can be any Object). 2445 // ----------------------------------- 2446 2447 Label non_callable, non_function, non_smi; 2448 __ JumpIfSmi(edi, &non_callable); 2449 __ bind(&non_smi); 2450 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2451 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), 2452 RelocInfo::CODE_TARGET); 2453 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE); 2454 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), 2455 RelocInfo::CODE_TARGET); 2456 2457 // Check if target has a [[Call]] internal method. 2458 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 2459 Immediate(1 << Map::kIsCallable)); 2460 __ j(zero, &non_callable); 2461 2462 __ CmpInstanceType(ecx, JS_PROXY_TYPE); 2463 __ j(not_equal, &non_function); 2464 2465 // 0. Prepare for tail call if necessary. 2466 if (tail_call_mode == TailCallMode::kAllow) { 2467 PrepareForTailCall(masm, eax, ebx, ecx, edx); 2468 } 2469 2470 // 1. Runtime fallback for Proxy [[Call]]. 2471 __ PopReturnAddressTo(ecx); 2472 __ Push(edi); 2473 __ PushReturnAddressFrom(ecx); 2474 // Increase the arguments size to include the pushed function and the 2475 // existing receiver on the stack. 2476 __ add(eax, Immediate(2)); 2477 // Tail-call to the runtime. 2478 __ JumpToExternalReference( 2479 ExternalReference(Runtime::kJSProxyCall, masm->isolate())); 2480 2481 // 2. Call to something else, which might have a [[Call]] internal method (if 2482 // not we raise an exception). 2483 __ bind(&non_function); 2484 // Overwrite the original receiver with the (original) target. 2485 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); 2486 // Let the "call_as_function_delegate" take care of the rest. 2487 __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi); 2488 __ Jump(masm->isolate()->builtins()->CallFunction( 2489 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode), 2490 RelocInfo::CODE_TARGET); 2491 2492 // 3. Call to something that is not callable. 2493 __ bind(&non_callable); 2494 { 2495 FrameScope scope(masm, StackFrame::INTERNAL); 2496 __ Push(edi); 2497 __ CallRuntime(Runtime::kThrowCalledNonCallable); 2498 } 2499 } 2500 2501 2502 // static 2503 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { 2504 // ----------- S t a t e ------------- 2505 // -- eax : the number of arguments (not including the receiver) 2506 // -- edx : the new target (checked to be a constructor) 2507 // -- edi : the constructor to call (checked to be a JSFunction) 2508 // ----------------------------------- 2509 __ AssertFunction(edi); 2510 2511 // Calling convention for function specific ConstructStubs require 2512 // ebx to contain either an AllocationSite or undefined. 2513 __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex); 2514 2515 // Tail call to the function-specific construct stub (still in the caller 2516 // context at this point). 2517 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2518 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); 2519 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); 2520 __ jmp(ecx); 2521 } 2522 2523 2524 // static 2525 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { 2526 // ----------- S t a t e ------------- 2527 // -- eax : the number of arguments (not including the receiver) 2528 // -- edx : the new target (checked to be a constructor) 2529 // -- edi : the constructor to call (checked to be a JSBoundFunction) 2530 // ----------------------------------- 2531 __ AssertBoundFunction(edi); 2532 2533 // Push the [[BoundArguments]] onto the stack. 2534 Generate_PushBoundArguments(masm); 2535 2536 // Patch new.target to [[BoundTargetFunction]] if new.target equals target. 2537 { 2538 Label done; 2539 __ cmp(edi, edx); 2540 __ j(not_equal, &done, Label::kNear); 2541 __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset)); 2542 __ bind(&done); 2543 } 2544 2545 // Construct the [[BoundTargetFunction]] via the Construct builtin. 2546 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset)); 2547 __ mov(ecx, Operand::StaticVariable( 2548 ExternalReference(Builtins::kConstruct, masm->isolate()))); 2549 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); 2550 __ jmp(ecx); 2551 } 2552 2553 2554 // static 2555 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { 2556 // ----------- S t a t e ------------- 2557 // -- eax : the number of arguments (not including the receiver) 2558 // -- edi : the constructor to call (checked to be a JSProxy) 2559 // -- edx : the new target (either the same as the constructor or 2560 // the JSFunction on which new was invoked initially) 2561 // ----------------------------------- 2562 2563 // Call into the Runtime for Proxy [[Construct]]. 2564 __ PopReturnAddressTo(ecx); 2565 __ Push(edi); 2566 __ Push(edx); 2567 __ PushReturnAddressFrom(ecx); 2568 // Include the pushed new_target, constructor and the receiver. 2569 __ add(eax, Immediate(3)); 2570 // Tail-call to the runtime. 2571 __ JumpToExternalReference( 2572 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); 2573 } 2574 2575 2576 // static 2577 void Builtins::Generate_Construct(MacroAssembler* masm) { 2578 // ----------- S t a t e ------------- 2579 // -- eax : the number of arguments (not including the receiver) 2580 // -- edx : the new target (either the same as the constructor or 2581 // the JSFunction on which new was invoked initially) 2582 // -- edi : the constructor to call (can be any Object) 2583 // ----------------------------------- 2584 2585 // Check if target is a Smi. 2586 Label non_constructor; 2587 __ JumpIfSmi(edi, &non_constructor, Label::kNear); 2588 2589 // Dispatch based on instance type. 2590 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 2591 __ j(equal, masm->isolate()->builtins()->ConstructFunction(), 2592 RelocInfo::CODE_TARGET); 2593 2594 // Check if target has a [[Construct]] internal method. 2595 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 2596 Immediate(1 << Map::kIsConstructor)); 2597 __ j(zero, &non_constructor, Label::kNear); 2598 2599 // Only dispatch to bound functions after checking whether they are 2600 // constructors. 2601 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE); 2602 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(), 2603 RelocInfo::CODE_TARGET); 2604 2605 // Only dispatch to proxies after checking whether they are constructors. 2606 __ CmpInstanceType(ecx, JS_PROXY_TYPE); 2607 __ j(equal, masm->isolate()->builtins()->ConstructProxy(), 2608 RelocInfo::CODE_TARGET); 2609 2610 // Called Construct on an exotic Object with a [[Construct]] internal method. 2611 { 2612 // Overwrite the original receiver with the (original) target. 2613 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); 2614 // Let the "call_as_constructor_delegate" take care of the rest. 2615 __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi); 2616 __ Jump(masm->isolate()->builtins()->CallFunction(), 2617 RelocInfo::CODE_TARGET); 2618 } 2619 2620 // Called Construct on an Object that doesn't have a [[Construct]] internal 2621 // method. 2622 __ bind(&non_constructor); 2623 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), 2624 RelocInfo::CODE_TARGET); 2625 } 2626 2627 // static 2628 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { 2629 // ----------- S t a t e ------------- 2630 // -- edx : requested object size (untagged) 2631 // -- esp[0] : return address 2632 // ----------------------------------- 2633 __ SmiTag(edx); 2634 __ PopReturnAddressTo(ecx); 2635 __ Push(edx); 2636 __ PushReturnAddressFrom(ecx); 2637 __ Move(esi, Smi::FromInt(0)); 2638 __ TailCallRuntime(Runtime::kAllocateInNewSpace); 2639 } 2640 2641 // static 2642 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) { 2643 // ----------- S t a t e ------------- 2644 // -- edx : requested object size (untagged) 2645 // -- esp[0] : return address 2646 // ----------------------------------- 2647 __ SmiTag(edx); 2648 __ PopReturnAddressTo(ecx); 2649 __ Push(edx); 2650 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); 2651 __ PushReturnAddressFrom(ecx); 2652 __ Move(esi, Smi::FromInt(0)); 2653 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); 2654 } 2655 2656 // static 2657 void Builtins::Generate_StringToNumber(MacroAssembler* masm) { 2658 // The StringToNumber stub takes one argument in eax. 2659 __ AssertString(eax); 2660 2661 // Check if string has a cached array index. 2662 Label runtime; 2663 __ test(FieldOperand(eax, String::kHashFieldOffset), 2664 Immediate(String::kContainsCachedArrayIndexMask)); 2665 __ j(not_zero, &runtime, Label::kNear); 2666 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset)); 2667 __ IndexFromHash(eax, eax); 2668 __ Ret(); 2669 2670 __ bind(&runtime); 2671 { 2672 FrameScope frame(masm, StackFrame::INTERNAL); 2673 // Push argument. 2674 __ push(eax); 2675 // We cannot use a tail call here because this builtin can also be called 2676 // from wasm. 2677 __ CallRuntime(Runtime::kStringToNumber); 2678 } 2679 __ Ret(); 2680 } 2681 2682 // static 2683 void Builtins::Generate_ToNumber(MacroAssembler* masm) { 2684 // The ToNumber stub takes one argument in eax. 2685 Label not_smi; 2686 __ JumpIfNotSmi(eax, ¬_smi, Label::kNear); 2687 __ Ret(); 2688 __ bind(¬_smi); 2689 2690 Label not_heap_number; 2691 __ CompareMap(eax, masm->isolate()->factory()->heap_number_map()); 2692 __ j(not_equal, ¬_heap_number, Label::kNear); 2693 __ Ret(); 2694 __ bind(¬_heap_number); 2695 2696 __ Jump(masm->isolate()->builtins()->NonNumberToNumber(), 2697 RelocInfo::CODE_TARGET); 2698 } 2699 2700 // static 2701 void Builtins::Generate_NonNumberToNumber(MacroAssembler* masm) { 2702 // The NonNumberToNumber stub takes one argument in eax. 2703 __ AssertNotNumber(eax); 2704 2705 Label not_string; 2706 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi); 2707 // eax: object 2708 // edi: object map 2709 __ j(above_equal, ¬_string, Label::kNear); 2710 __ Jump(masm->isolate()->builtins()->StringToNumber(), 2711 RelocInfo::CODE_TARGET); 2712 __ bind(¬_string); 2713 2714 Label not_oddball; 2715 __ CmpInstanceType(edi, ODDBALL_TYPE); 2716 __ j(not_equal, ¬_oddball, Label::kNear); 2717 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset)); 2718 __ Ret(); 2719 __ bind(¬_oddball); 2720 { 2721 FrameScope frame(masm, StackFrame::INTERNAL); 2722 // Push argument. 2723 __ push(eax); 2724 // We cannot use a tail call here because this builtin can also be called 2725 // from wasm. 2726 __ CallRuntime(Runtime::kToNumber); 2727 } 2728 __ Ret(); 2729 } 2730 2731 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 2732 // ----------- S t a t e ------------- 2733 // -- eax : actual number of arguments 2734 // -- ebx : expected number of arguments 2735 // -- edx : new target (passed through to callee) 2736 // -- edi : function (passed through to callee) 2737 // ----------------------------------- 2738 2739 Label invoke, dont_adapt_arguments, stack_overflow; 2740 __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1); 2741 2742 Label enough, too_few; 2743 __ cmp(eax, ebx); 2744 __ j(less, &too_few); 2745 __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel); 2746 __ j(equal, &dont_adapt_arguments); 2747 2748 { // Enough parameters: Actual >= expected. 2749 __ bind(&enough); 2750 EnterArgumentsAdaptorFrame(masm); 2751 ArgumentsAdaptorStackCheck(masm, &stack_overflow); 2752 2753 // Copy receiver and all expected arguments. 2754 const int offset = StandardFrameConstants::kCallerSPOffset; 2755 __ lea(edi, Operand(ebp, eax, times_4, offset)); 2756 __ mov(eax, -1); // account for receiver 2757 2758 Label copy; 2759 __ bind(©); 2760 __ inc(eax); 2761 __ push(Operand(edi, 0)); 2762 __ sub(edi, Immediate(kPointerSize)); 2763 __ cmp(eax, ebx); 2764 __ j(less, ©); 2765 // eax now contains the expected number of arguments. 2766 __ jmp(&invoke); 2767 } 2768 2769 { // Too few parameters: Actual < expected. 2770 __ bind(&too_few); 2771 EnterArgumentsAdaptorFrame(masm); 2772 ArgumentsAdaptorStackCheck(masm, &stack_overflow); 2773 2774 // Remember expected arguments in ecx. 2775 __ mov(ecx, ebx); 2776 2777 // Copy receiver and all actual arguments. 2778 const int offset = StandardFrameConstants::kCallerSPOffset; 2779 __ lea(edi, Operand(ebp, eax, times_4, offset)); 2780 // ebx = expected - actual. 2781 __ sub(ebx, eax); 2782 // eax = -actual - 1 2783 __ neg(eax); 2784 __ sub(eax, Immediate(1)); 2785 2786 Label copy; 2787 __ bind(©); 2788 __ inc(eax); 2789 __ push(Operand(edi, 0)); 2790 __ sub(edi, Immediate(kPointerSize)); 2791 __ test(eax, eax); 2792 __ j(not_zero, ©); 2793 2794 // Fill remaining expected arguments with undefined values. 2795 Label fill; 2796 __ bind(&fill); 2797 __ inc(eax); 2798 __ push(Immediate(masm->isolate()->factory()->undefined_value())); 2799 __ cmp(eax, ebx); 2800 __ j(less, &fill); 2801 2802 // Restore expected arguments. 2803 __ mov(eax, ecx); 2804 } 2805 2806 // Call the entry point. 2807 __ bind(&invoke); 2808 // Restore function pointer. 2809 __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset)); 2810 // eax : expected number of arguments 2811 // edx : new target (passed through to callee) 2812 // edi : function (passed through to callee) 2813 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2814 __ call(ecx); 2815 2816 // Store offset of return address for deoptimizer. 2817 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 2818 2819 // Leave frame and return. 2820 LeaveArgumentsAdaptorFrame(masm); 2821 __ ret(0); 2822 2823 // ------------------------------------------- 2824 // Dont adapt arguments. 2825 // ------------------------------------------- 2826 __ bind(&dont_adapt_arguments); 2827 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset)); 2828 __ jmp(ecx); 2829 2830 __ bind(&stack_overflow); 2831 { 2832 FrameScope frame(masm, StackFrame::MANUAL); 2833 __ CallRuntime(Runtime::kThrowStackOverflow); 2834 __ int3(); 2835 } 2836 } 2837 2838 2839 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, 2840 Register function_template_info, 2841 Register scratch0, Register scratch1, 2842 Label* receiver_check_failed) { 2843 // If there is no signature, return the holder. 2844 __ CompareRoot(FieldOperand(function_template_info, 2845 FunctionTemplateInfo::kSignatureOffset), 2846 Heap::kUndefinedValueRootIndex); 2847 Label receiver_check_passed; 2848 __ j(equal, &receiver_check_passed, Label::kNear); 2849 2850 // Walk the prototype chain. 2851 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset)); 2852 Label prototype_loop_start; 2853 __ bind(&prototype_loop_start); 2854 2855 // Get the constructor, if any. 2856 __ GetMapConstructor(scratch0, scratch0, scratch1); 2857 __ CmpInstanceType(scratch1, JS_FUNCTION_TYPE); 2858 Label next_prototype; 2859 __ j(not_equal, &next_prototype, Label::kNear); 2860 2861 // Get the constructor's signature. 2862 __ mov(scratch0, 2863 FieldOperand(scratch0, JSFunction::kSharedFunctionInfoOffset)); 2864 __ mov(scratch0, 2865 FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset)); 2866 2867 // Loop through the chain of inheriting function templates. 2868 Label function_template_loop; 2869 __ bind(&function_template_loop); 2870 2871 // If the signatures match, we have a compatible receiver. 2872 __ cmp(scratch0, FieldOperand(function_template_info, 2873 FunctionTemplateInfo::kSignatureOffset)); 2874 __ j(equal, &receiver_check_passed, Label::kNear); 2875 2876 // If the current type is not a FunctionTemplateInfo, load the next prototype 2877 // in the chain. 2878 __ JumpIfSmi(scratch0, &next_prototype, Label::kNear); 2879 __ CmpObjectType(scratch0, FUNCTION_TEMPLATE_INFO_TYPE, scratch1); 2880 __ j(not_equal, &next_prototype, Label::kNear); 2881 2882 // Otherwise load the parent function template and iterate. 2883 __ mov(scratch0, 2884 FieldOperand(scratch0, FunctionTemplateInfo::kParentTemplateOffset)); 2885 __ jmp(&function_template_loop, Label::kNear); 2886 2887 // Load the next prototype. 2888 __ bind(&next_prototype); 2889 __ mov(receiver, FieldOperand(receiver, HeapObject::kMapOffset)); 2890 __ test(FieldOperand(receiver, Map::kBitField3Offset), 2891 Immediate(Map::HasHiddenPrototype::kMask)); 2892 __ j(zero, receiver_check_failed); 2893 2894 __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset)); 2895 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset)); 2896 // Iterate. 2897 __ jmp(&prototype_loop_start, Label::kNear); 2898 2899 __ bind(&receiver_check_passed); 2900 } 2901 2902 2903 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { 2904 // ----------- S t a t e ------------- 2905 // -- eax : number of arguments (not including the receiver) 2906 // -- edi : callee 2907 // -- esi : context 2908 // -- esp[0] : return address 2909 // -- esp[4] : last argument 2910 // -- ... 2911 // -- esp[eax * 4] : first argument 2912 // -- esp[(eax + 1) * 4] : receiver 2913 // ----------------------------------- 2914 2915 // Load the FunctionTemplateInfo. 2916 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 2917 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset)); 2918 2919 // Do the compatible receiver check. 2920 Label receiver_check_failed; 2921 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPCOnStackSize)); 2922 __ Push(eax); 2923 CompatibleReceiverCheck(masm, ecx, ebx, edx, eax, &receiver_check_failed); 2924 __ Pop(eax); 2925 // Get the callback offset from the FunctionTemplateInfo, and jump to the 2926 // beginning of the code. 2927 __ mov(edx, FieldOperand(ebx, FunctionTemplateInfo::kCallCodeOffset)); 2928 __ mov(edx, FieldOperand(edx, CallHandlerInfo::kFastHandlerOffset)); 2929 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag)); 2930 __ jmp(edx); 2931 2932 // Compatible receiver check failed: pop return address, arguments and 2933 // receiver and throw an Illegal Invocation exception. 2934 __ bind(&receiver_check_failed); 2935 __ Pop(eax); 2936 __ PopReturnAddressTo(ebx); 2937 __ lea(eax, Operand(eax, times_pointer_size, 1 * kPointerSize)); 2938 __ add(esp, eax); 2939 __ PushReturnAddressFrom(ebx); 2940 { 2941 FrameScope scope(masm, StackFrame::INTERNAL); 2942 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); 2943 } 2944 } 2945 2946 2947 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 2948 // Lookup the function in the JavaScript frame. 2949 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); 2950 { 2951 FrameScope scope(masm, StackFrame::INTERNAL); 2952 // Pass function as argument. 2953 __ push(eax); 2954 __ CallRuntime(Runtime::kCompileForOnStackReplacement); 2955 } 2956 2957 Label skip; 2958 // If the code object is null, just return to the unoptimized code. 2959 __ cmp(eax, Immediate(0)); 2960 __ j(not_equal, &skip, Label::kNear); 2961 __ ret(0); 2962 2963 __ bind(&skip); 2964 2965 // Load deoptimization data from the code object. 2966 __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag)); 2967 2968 // Load the OSR entrypoint offset from the deoptimization data. 2969 __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt( 2970 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag)); 2971 __ SmiUntag(ebx); 2972 2973 // Compute the target address = code_obj + header_size + osr_offset 2974 __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag)); 2975 2976 // Overwrite the return address on the stack. 2977 __ mov(Operand(esp, 0), eax); 2978 2979 // And "return" to the OSR entry point of the function. 2980 __ ret(0); 2981 } 2982 2983 2984 #undef __ 2985 } // namespace internal 2986 } // namespace v8 2987 2988 #endif // V8_TARGET_ARCH_IA32 2989