1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #if V8_TARGET_ARCH_X64 31 32 #include "codegen.h" 33 #include "deoptimizer.h" 34 #include "full-codegen.h" 35 36 namespace v8 { 37 namespace internal { 38 39 40 #define __ ACCESS_MASM(masm) 41 42 43 void Builtins::Generate_Adaptor(MacroAssembler* masm, 44 CFunctionId id, 45 BuiltinExtraArguments extra_args) { 46 // ----------- S t a t e ------------- 47 // -- rax : number of arguments excluding receiver 48 // -- rdi : called function (only guaranteed when 49 // extra_args requires it) 50 // -- rsi : context 51 // -- rsp[0] : return address 52 // -- rsp[8] : last argument 53 // -- ... 54 // -- rsp[8 * argc] : first argument (argc == rax) 55 // -- rsp[8 * (argc + 1)] : receiver 56 // ----------------------------------- 57 58 // Insert extra arguments. 59 int num_extra_args = 0; 60 if (extra_args == NEEDS_CALLED_FUNCTION) { 61 num_extra_args = 1; 62 __ PopReturnAddressTo(kScratchRegister); 63 __ push(rdi); 64 __ PushReturnAddressFrom(kScratchRegister); 65 } else { 66 ASSERT(extra_args == NO_EXTRA_ARGUMENTS); 67 } 68 69 // JumpToExternalReference expects rax to contain the number of arguments 70 // including the receiver and the extra arguments. 71 __ addq(rax, Immediate(num_extra_args + 1)); 72 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); 73 } 74 75 76 static void CallRuntimePassFunction(MacroAssembler* masm, 77 Runtime::FunctionId function_id) { 78 FrameScope scope(masm, StackFrame::INTERNAL); 79 // Push a copy of the function onto the stack. 80 __ push(rdi); 81 // Push call kind information. 82 __ push(rcx); 83 // Function is also the parameter to the runtime call. 84 __ push(rdi); 85 86 __ CallRuntime(function_id, 1); 87 // Restore call kind information. 88 __ pop(rcx); 89 // Restore receiver. 90 __ pop(rdi); 91 } 92 93 94 static void GenerateTailCallToSharedCode(MacroAssembler* masm) { 95 __ movq(kScratchRegister, 96 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 97 __ movq(kScratchRegister, 98 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset)); 99 __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); 100 __ jmp(kScratchRegister); 101 } 102 103 104 void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { 105 // Checking whether the queued function is ready for install is optional, 106 // since we come across interrupts and stack checks elsewhere. However, 107 // not checking may delay installing ready functions, and always checking 108 // would be quite expensive. A good compromise is to first check against 109 // stack limit as a cue for an interrupt signal. 110 Label ok; 111 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 112 __ j(above_equal, &ok); 113 114 CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode); 115 // Tail call to returned code. 116 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 117 __ jmp(rax); 118 119 __ bind(&ok); 120 GenerateTailCallToSharedCode(masm); 121 } 122 123 124 void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) { 125 CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile); 126 GenerateTailCallToSharedCode(masm); 127 } 128 129 130 static void Generate_JSConstructStubHelper(MacroAssembler* masm, 131 bool is_api_function, 132 bool count_constructions) { 133 // ----------- S t a t e ------------- 134 // -- rax: number of arguments 135 // -- rdi: constructor function 136 // ----------------------------------- 137 138 // Should never count constructions for api objects. 139 ASSERT(!is_api_function || !count_constructions); 140 141 // Enter a construct frame. 142 { 143 FrameScope scope(masm, StackFrame::CONSTRUCT); 144 145 // Store a smi-tagged arguments count on the stack. 146 __ Integer32ToSmi(rax, rax); 147 __ push(rax); 148 149 // Push the function to invoke on the stack. 150 __ push(rdi); 151 152 // Try to allocate the object without transitioning into C code. If any of 153 // the preconditions is not met, the code bails out to the runtime call. 154 Label rt_call, allocated; 155 if (FLAG_inline_new) { 156 Label undo_allocation; 157 158 #ifdef ENABLE_DEBUGGER_SUPPORT 159 ExternalReference debug_step_in_fp = 160 ExternalReference::debug_step_in_fp_address(masm->isolate()); 161 __ Move(kScratchRegister, debug_step_in_fp); 162 __ cmpq(Operand(kScratchRegister, 0), Immediate(0)); 163 __ j(not_equal, &rt_call); 164 #endif 165 166 // Verified that the constructor is a JSFunction. 167 // Load the initial map and verify that it is in fact a map. 168 // rdi: constructor 169 __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 170 // Will both indicate a NULL and a Smi 171 ASSERT(kSmiTag == 0); 172 __ JumpIfSmi(rax, &rt_call); 173 // rdi: constructor 174 // rax: initial map (if proven valid below) 175 __ CmpObjectType(rax, MAP_TYPE, rbx); 176 __ j(not_equal, &rt_call); 177 178 // Check that the constructor is not constructing a JSFunction (see 179 // comments in Runtime_NewObject in runtime.cc). In which case the 180 // initial map's instance type would be JS_FUNCTION_TYPE. 181 // rdi: constructor 182 // rax: initial map 183 __ CmpInstanceType(rax, JS_FUNCTION_TYPE); 184 __ j(equal, &rt_call); 185 186 if (count_constructions) { 187 Label allocate; 188 // Decrease generous allocation count. 189 __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 190 __ decb(FieldOperand(rcx, 191 SharedFunctionInfo::kConstructionCountOffset)); 192 __ j(not_zero, &allocate); 193 194 __ push(rax); 195 __ push(rdi); 196 197 __ push(rdi); // constructor 198 // The call will replace the stub, so the countdown is only done once. 199 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); 200 201 __ pop(rdi); 202 __ pop(rax); 203 204 __ bind(&allocate); 205 } 206 207 // Now allocate the JSObject on the heap. 208 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset)); 209 __ shl(rdi, Immediate(kPointerSizeLog2)); 210 // rdi: size of new object 211 __ Allocate(rdi, 212 rbx, 213 rdi, 214 no_reg, 215 &rt_call, 216 NO_ALLOCATION_FLAGS); 217 // Allocated the JSObject, now initialize the fields. 218 // rax: initial map 219 // rbx: JSObject (not HeapObject tagged - the actual address). 220 // rdi: start of next object 221 __ movq(Operand(rbx, JSObject::kMapOffset), rax); 222 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 223 __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx); 224 __ movq(Operand(rbx, JSObject::kElementsOffset), rcx); 225 // Set extra fields in the newly allocated object. 226 // rax: initial map 227 // rbx: JSObject 228 // rdi: start of next object 229 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); 230 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 231 if (count_constructions) { 232 __ movzxbq(rsi, 233 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); 234 __ lea(rsi, 235 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize)); 236 // rsi: offset of first field after pre-allocated fields 237 if (FLAG_debug_code) { 238 __ cmpq(rsi, rdi); 239 __ Assert(less_equal, 240 kUnexpectedNumberOfPreAllocatedPropertyFields); 241 } 242 __ InitializeFieldsWithFiller(rcx, rsi, rdx); 243 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex); 244 } 245 __ InitializeFieldsWithFiller(rcx, rdi, rdx); 246 247 // Add the object tag to make the JSObject real, so that we can continue 248 // and jump into the continuation code at any time from now on. Any 249 // failures need to undo the allocation, so that the heap is in a 250 // consistent state and verifiable. 251 // rax: initial map 252 // rbx: JSObject 253 // rdi: start of next object 254 __ or_(rbx, Immediate(kHeapObjectTag)); 255 256 // Check if a non-empty properties array is needed. 257 // Allocate and initialize a FixedArray if it is. 258 // rax: initial map 259 // rbx: JSObject 260 // rdi: start of next object 261 // Calculate total properties described map. 262 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset)); 263 __ movzxbq(rcx, 264 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset)); 265 __ addq(rdx, rcx); 266 // Calculate unused properties past the end of the in-object properties. 267 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset)); 268 __ subq(rdx, rcx); 269 // Done if no extra properties are to be allocated. 270 __ j(zero, &allocated); 271 __ Assert(positive, kPropertyAllocationCountFailed); 272 273 // Scale the number of elements by pointer size and add the header for 274 // FixedArrays to the start of the next object calculation from above. 275 // rbx: JSObject 276 // rdi: start of next object (will be start of FixedArray) 277 // rdx: number of elements in properties array 278 __ Allocate(FixedArray::kHeaderSize, 279 times_pointer_size, 280 rdx, 281 rdi, 282 rax, 283 no_reg, 284 &undo_allocation, 285 RESULT_CONTAINS_TOP); 286 287 // Initialize the FixedArray. 288 // rbx: JSObject 289 // rdi: FixedArray 290 // rdx: number of elements 291 // rax: start of next object 292 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex); 293 __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map 294 __ Integer32ToSmi(rdx, rdx); 295 __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length 296 297 // Initialize the fields to undefined. 298 // rbx: JSObject 299 // rdi: FixedArray 300 // rax: start of next object 301 // rdx: number of elements 302 { Label loop, entry; 303 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); 304 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize)); 305 __ jmp(&entry); 306 __ bind(&loop); 307 __ movq(Operand(rcx, 0), rdx); 308 __ addq(rcx, Immediate(kPointerSize)); 309 __ bind(&entry); 310 __ cmpq(rcx, rax); 311 __ j(below, &loop); 312 } 313 314 // Store the initialized FixedArray into the properties field of 315 // the JSObject 316 // rbx: JSObject 317 // rdi: FixedArray 318 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag 319 __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi); 320 321 322 // Continue with JSObject being successfully allocated 323 // rbx: JSObject 324 __ jmp(&allocated); 325 326 // Undo the setting of the new top so that the heap is verifiable. For 327 // example, the map's unused properties potentially do not match the 328 // allocated objects unused properties. 329 // rbx: JSObject (previous new top) 330 __ bind(&undo_allocation); 331 __ UndoAllocationInNewSpace(rbx); 332 } 333 334 // Allocate the new receiver object using the runtime call. 335 // rdi: function (constructor) 336 __ bind(&rt_call); 337 // Must restore rdi (constructor) before calling runtime. 338 __ movq(rdi, Operand(rsp, 0)); 339 __ push(rdi); 340 __ CallRuntime(Runtime::kNewObject, 1); 341 __ movq(rbx, rax); // store result in rbx 342 343 // New object allocated. 344 // rbx: newly allocated object 345 __ bind(&allocated); 346 // Retrieve the function from the stack. 347 __ pop(rdi); 348 349 // Retrieve smi-tagged arguments count from the stack. 350 __ movq(rax, Operand(rsp, 0)); 351 __ SmiToInteger32(rax, rax); 352 353 // Push the allocated receiver to the stack. We need two copies 354 // because we may have to return the original one and the calling 355 // conventions dictate that the called function pops the receiver. 356 __ push(rbx); 357 __ push(rbx); 358 359 // Set up pointer to last argument. 360 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); 361 362 // Copy arguments and receiver to the expression stack. 363 Label loop, entry; 364 __ movq(rcx, rax); 365 __ jmp(&entry); 366 __ bind(&loop); 367 __ push(Operand(rbx, rcx, times_pointer_size, 0)); 368 __ bind(&entry); 369 __ decq(rcx); 370 __ j(greater_equal, &loop); 371 372 // Call the function. 373 if (is_api_function) { 374 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 375 Handle<Code> code = 376 masm->isolate()->builtins()->HandleApiCallConstruct(); 377 ParameterCount expected(0); 378 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET, 379 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD); 380 } else { 381 ParameterCount actual(rax); 382 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 383 NullCallWrapper(), CALL_AS_METHOD); 384 } 385 386 // Store offset of return address for deoptimizer. 387 if (!is_api_function && !count_constructions) { 388 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset()); 389 } 390 391 // Restore context from the frame. 392 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); 393 394 // If the result is an object (in the ECMA sense), we should get rid 395 // of the receiver and use the result; see ECMA-262 section 13.2.2-7 396 // on page 74. 397 Label use_receiver, exit; 398 // If the result is a smi, it is *not* an object in the ECMA sense. 399 __ JumpIfSmi(rax, &use_receiver); 400 401 // If the type of the result (stored in its map) is less than 402 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense. 403 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 404 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); 405 __ j(above_equal, &exit); 406 407 // Throw away the result of the constructor invocation and use the 408 // on-stack receiver as the result. 409 __ bind(&use_receiver); 410 __ movq(rax, Operand(rsp, 0)); 411 412 // Restore the arguments count and leave the construct frame. 413 __ bind(&exit); 414 __ movq(rbx, Operand(rsp, kPointerSize)); // Get arguments count. 415 416 // Leave construct frame. 417 } 418 419 // Remove caller arguments from the stack and return. 420 __ PopReturnAddressTo(rcx); 421 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 422 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 423 __ PushReturnAddressFrom(rcx); 424 Counters* counters = masm->isolate()->counters(); 425 __ IncrementCounter(counters->constructed_objects(), 1); 426 __ ret(0); 427 } 428 429 430 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) { 431 Generate_JSConstructStubHelper(masm, false, true); 432 } 433 434 435 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { 436 Generate_JSConstructStubHelper(masm, false, false); 437 } 438 439 440 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { 441 Generate_JSConstructStubHelper(masm, true, false); 442 } 443 444 445 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, 446 bool is_construct) { 447 ProfileEntryHookStub::MaybeCallEntryHook(masm); 448 449 // Expects five C++ function parameters. 450 // - Address entry (ignored) 451 // - JSFunction* function ( 452 // - Object* receiver 453 // - int argc 454 // - Object*** argv 455 // (see Handle::Invoke in execution.cc). 456 457 // Open a C++ scope for the FrameScope. 458 { 459 // Platform specific argument handling. After this, the stack contains 460 // an internal frame and the pushed function and receiver, and 461 // register rax and rbx holds the argument count and argument array, 462 // while rdi holds the function pointer and rsi the context. 463 464 #ifdef _WIN64 465 // MSVC parameters in: 466 // rcx : entry (ignored) 467 // rdx : function 468 // r8 : receiver 469 // r9 : argc 470 // [rsp+0x20] : argv 471 472 // Clear the context before we push it when entering the internal frame. 473 __ Set(rsi, 0); 474 // Enter an internal frame. 475 FrameScope scope(masm, StackFrame::INTERNAL); 476 477 // Load the function context into rsi. 478 __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); 479 480 // Push the function and the receiver onto the stack. 481 __ push(rdx); 482 __ push(r8); 483 484 // Load the number of arguments and setup pointer to the arguments. 485 __ movq(rax, r9); 486 // Load the previous frame pointer to access C argument on stack 487 __ movq(kScratchRegister, Operand(rbp, 0)); 488 __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); 489 // Load the function pointer into rdi. 490 __ movq(rdi, rdx); 491 #else // _WIN64 492 // GCC parameters in: 493 // rdi : entry (ignored) 494 // rsi : function 495 // rdx : receiver 496 // rcx : argc 497 // r8 : argv 498 499 __ movq(rdi, rsi); 500 // rdi : function 501 502 // Clear the context before we push it when entering the internal frame. 503 __ Set(rsi, 0); 504 // Enter an internal frame. 505 FrameScope scope(masm, StackFrame::INTERNAL); 506 507 // Push the function and receiver and setup the context. 508 __ push(rdi); 509 __ push(rdx); 510 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 511 512 // Load the number of arguments and setup pointer to the arguments. 513 __ movq(rax, rcx); 514 __ movq(rbx, r8); 515 #endif // _WIN64 516 517 // Current stack contents: 518 // [rsp + 2 * kPointerSize ... ] : Internal frame 519 // [rsp + kPointerSize] : function 520 // [rsp] : receiver 521 // Current register contents: 522 // rax : argc 523 // rbx : argv 524 // rsi : context 525 // rdi : function 526 527 // Copy arguments to the stack in a loop. 528 // Register rbx points to array of pointers to handle locations. 529 // Push the values of these handles. 530 Label loop, entry; 531 __ Set(rcx, 0); // Set loop variable to 0. 532 __ jmp(&entry); 533 __ bind(&loop); 534 __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); 535 __ push(Operand(kScratchRegister, 0)); // dereference handle 536 __ addq(rcx, Immediate(1)); 537 __ bind(&entry); 538 __ cmpq(rcx, rax); 539 __ j(not_equal, &loop); 540 541 // Invoke the code. 542 if (is_construct) { 543 // No type feedback cell is available 544 Handle<Object> undefined_sentinel( 545 masm->isolate()->factory()->undefined_value()); 546 __ Move(rbx, undefined_sentinel); 547 // Expects rdi to hold function pointer. 548 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); 549 __ CallStub(&stub); 550 } else { 551 ParameterCount actual(rax); 552 // Function must be in rdi. 553 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 554 NullCallWrapper(), CALL_AS_METHOD); 555 } 556 // Exit the internal frame. Notice that this also removes the empty 557 // context and the function left on the stack by the code 558 // invocation. 559 } 560 561 // TODO(X64): Is argument correct? Is there a receiver to remove? 562 __ ret(1 * kPointerSize); // Remove receiver. 563 } 564 565 566 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { 567 Generate_JSEntryTrampolineHelper(masm, false); 568 } 569 570 571 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { 572 Generate_JSEntryTrampolineHelper(masm, true); 573 } 574 575 576 void Builtins::Generate_LazyCompile(MacroAssembler* masm) { 577 CallRuntimePassFunction(masm, Runtime::kLazyCompile); 578 // Do a tail-call of the compiled function. 579 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 580 __ jmp(rax); 581 } 582 583 584 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { 585 CallRuntimePassFunction(masm, Runtime::kLazyRecompile); 586 // Do a tail-call of the compiled function. 587 __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); 588 __ jmp(rax); 589 } 590 591 592 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { 593 // For now, we are relying on the fact that make_code_young doesn't do any 594 // garbage collection which allows us to save/restore the registers without 595 // worrying about which of them contain pointers. We also don't build an 596 // internal frame to make the code faster, since we shouldn't have to do stack 597 // crawls in MakeCodeYoung. This seems a bit fragile. 598 599 // Re-execute the code that was patched back to the young age when 600 // the stub returns. 601 __ subq(Operand(rsp, 0), Immediate(5)); 602 __ Pushad(); 603 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); 604 __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); 605 { // NOLINT 606 FrameScope scope(masm, StackFrame::MANUAL); 607 __ PrepareCallCFunction(1); 608 __ CallCFunction( 609 ExternalReference::get_make_code_young_function(masm->isolate()), 1); 610 } 611 __ Popad(); 612 __ ret(0); 613 } 614 615 616 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ 617 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ 618 MacroAssembler* masm) { \ 619 GenerateMakeCodeYoungAgainCommon(masm); \ 620 } \ 621 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ 622 MacroAssembler* masm) { \ 623 GenerateMakeCodeYoungAgainCommon(masm); \ 624 } 625 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) 626 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR 627 628 629 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { 630 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact 631 // that make_code_young doesn't do any garbage collection which allows us to 632 // save/restore the registers without worrying about which of them contain 633 // pointers. 634 __ Pushad(); 635 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate())); 636 __ movq(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize)); 637 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); 638 { // NOLINT 639 FrameScope scope(masm, StackFrame::MANUAL); 640 __ PrepareCallCFunction(1); 641 __ CallCFunction( 642 ExternalReference::get_mark_code_as_executed_function(masm->isolate()), 643 1); 644 } 645 __ Popad(); 646 647 // Perform prologue operations usually performed by the young code stub. 648 __ PopReturnAddressTo(kScratchRegister); 649 __ push(rbp); // Caller's frame pointer. 650 __ movq(rbp, rsp); 651 __ push(rsi); // Callee's context. 652 __ push(rdi); // Callee's JS Function. 653 __ PushReturnAddressFrom(kScratchRegister); 654 655 // Jump to point after the code-age stub. 656 __ ret(0); 657 } 658 659 660 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { 661 GenerateMakeCodeYoungAgainCommon(masm); 662 } 663 664 665 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, 666 SaveFPRegsMode save_doubles) { 667 // Enter an internal frame. 668 { 669 FrameScope scope(masm, StackFrame::INTERNAL); 670 671 // Preserve registers across notification, this is important for compiled 672 // stubs that tail call the runtime on deopts passing their parameters in 673 // registers. 674 __ Pushad(); 675 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles); 676 __ Popad(); 677 // Tear down internal frame. 678 } 679 680 __ pop(MemOperand(rsp, 0)); // Ignore state offset 681 __ ret(0); // Return to IC Miss stub, continuation still on stack. 682 } 683 684 685 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { 686 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); 687 } 688 689 690 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { 691 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); 692 } 693 694 695 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, 696 Deoptimizer::BailoutType type) { 697 // Enter an internal frame. 698 { 699 FrameScope scope(masm, StackFrame::INTERNAL); 700 701 // Pass the deoptimization type to the runtime system. 702 __ Push(Smi::FromInt(static_cast<int>(type))); 703 704 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); 705 // Tear down internal frame. 706 } 707 708 // Get the full codegen state from the stack and untag it. 709 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize)); 710 711 // Switch on the state. 712 Label not_no_registers, not_tos_rax; 713 __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS)); 714 __ j(not_equal, ¬_no_registers, Label::kNear); 715 __ ret(1 * kPointerSize); // Remove state. 716 717 __ bind(¬_no_registers); 718 __ movq(rax, Operand(rsp, kPCOnStackSize + kPointerSize)); 719 __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG)); 720 __ j(not_equal, ¬_tos_rax, Label::kNear); 721 __ ret(2 * kPointerSize); // Remove state, rax. 722 723 __ bind(¬_tos_rax); 724 __ Abort(kNoCasesLeft); 725 } 726 727 728 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { 729 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); 730 } 731 732 733 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { 734 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 735 } 736 737 738 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { 739 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 740 } 741 742 743 void Builtins::Generate_FunctionCall(MacroAssembler* masm) { 744 // Stack Layout: 745 // rsp[0] : Return address 746 // rsp[8] : Argument n 747 // rsp[16] : Argument n-1 748 // ... 749 // rsp[8 * n] : Argument 1 750 // rsp[8 * (n + 1)] : Receiver (function to call) 751 // 752 // rax contains the number of arguments, n, not counting the receiver. 753 // 754 // 1. Make sure we have at least one argument. 755 { Label done; 756 __ testq(rax, rax); 757 __ j(not_zero, &done); 758 __ PopReturnAddressTo(rbx); 759 __ Push(masm->isolate()->factory()->undefined_value()); 760 __ PushReturnAddressFrom(rbx); 761 __ incq(rax); 762 __ bind(&done); 763 } 764 765 // 2. Get the function to call (passed as receiver) from the stack, check 766 // if it is a function. 767 Label slow, non_function; 768 StackArgumentsAccessor args(rsp, rax); 769 __ movq(rdi, args.GetReceiverOperand()); 770 __ JumpIfSmi(rdi, &non_function); 771 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 772 __ j(not_equal, &slow); 773 774 // 3a. Patch the first argument if necessary when calling a function. 775 Label shift_arguments; 776 __ Set(rdx, 0); // indicate regular JS_FUNCTION 777 { Label convert_to_object, use_global_receiver, patch_receiver; 778 // Change context eagerly in case we need the global receiver. 779 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 780 781 // Do not transform the receiver for strict mode functions. 782 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 783 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset), 784 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 785 __ j(not_equal, &shift_arguments); 786 787 // Do not transform the receiver for natives. 788 // SharedFunctionInfo is already loaded into rbx. 789 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset), 790 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 791 __ j(not_zero, &shift_arguments); 792 793 // Compute the receiver in non-strict mode. 794 __ movq(rbx, args.GetArgumentOperand(1)); 795 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear); 796 797 __ CompareRoot(rbx, Heap::kNullValueRootIndex); 798 __ j(equal, &use_global_receiver); 799 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 800 __ j(equal, &use_global_receiver); 801 802 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 803 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); 804 __ j(above_equal, &shift_arguments); 805 806 __ bind(&convert_to_object); 807 { 808 // Enter an internal frame in order to preserve argument count. 809 FrameScope scope(masm, StackFrame::INTERNAL); 810 __ Integer32ToSmi(rax, rax); 811 __ push(rax); 812 813 __ push(rbx); 814 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 815 __ movq(rbx, rax); 816 __ Set(rdx, 0); // indicate regular JS_FUNCTION 817 818 __ pop(rax); 819 __ SmiToInteger32(rax, rax); 820 } 821 822 // Restore the function to rdi. 823 __ movq(rdi, args.GetReceiverOperand()); 824 __ jmp(&patch_receiver, Label::kNear); 825 826 // Use the global receiver object from the called function as the 827 // receiver. 828 __ bind(&use_global_receiver); 829 const int kGlobalIndex = 830 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 831 __ movq(rbx, FieldOperand(rsi, kGlobalIndex)); 832 __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset)); 833 __ movq(rbx, FieldOperand(rbx, kGlobalIndex)); 834 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 835 836 __ bind(&patch_receiver); 837 __ movq(args.GetArgumentOperand(1), rbx); 838 839 __ jmp(&shift_arguments); 840 } 841 842 // 3b. Check for function proxy. 843 __ bind(&slow); 844 __ Set(rdx, 1); // indicate function proxy 845 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); 846 __ j(equal, &shift_arguments); 847 __ bind(&non_function); 848 __ Set(rdx, 2); // indicate non-function 849 850 // 3c. Patch the first argument when calling a non-function. The 851 // CALL_NON_FUNCTION builtin expects the non-function callee as 852 // receiver, so overwrite the first argument which will ultimately 853 // become the receiver. 854 __ movq(args.GetArgumentOperand(1), rdi); 855 856 // 4. Shift arguments and return address one slot down on the stack 857 // (overwriting the original receiver). Adjust argument count to make 858 // the original first argument the new receiver. 859 __ bind(&shift_arguments); 860 { Label loop; 861 __ movq(rcx, rax); 862 __ bind(&loop); 863 __ movq(rbx, Operand(rsp, rcx, times_pointer_size, 0)); 864 __ movq(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx); 865 __ decq(rcx); 866 __ j(not_sign, &loop); // While non-negative (to copy return address). 867 __ pop(rbx); // Discard copy of return address. 868 __ decq(rax); // One fewer argument (first argument is new receiver). 869 } 870 871 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, 872 // or a function proxy via CALL_FUNCTION_PROXY. 873 { Label function, non_proxy; 874 __ testq(rdx, rdx); 875 __ j(zero, &function); 876 __ Set(rbx, 0); 877 __ SetCallKind(rcx, CALL_AS_METHOD); 878 __ cmpq(rdx, Immediate(1)); 879 __ j(not_equal, &non_proxy); 880 881 __ PopReturnAddressTo(rdx); 882 __ push(rdi); // re-add proxy object as additional argument 883 __ PushReturnAddressFrom(rdx); 884 __ incq(rax); 885 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 886 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 887 RelocInfo::CODE_TARGET); 888 889 __ bind(&non_proxy); 890 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); 891 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 892 RelocInfo::CODE_TARGET); 893 __ bind(&function); 894 } 895 896 // 5b. Get the code to call from the function and check that the number of 897 // expected arguments matches what we're providing. If so, jump 898 // (tail-call) to the code in register edx without checking arguments. 899 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 900 __ movsxlq(rbx, 901 FieldOperand(rdx, 902 SharedFunctionInfo::kFormalParameterCountOffset)); 903 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 904 __ SetCallKind(rcx, CALL_AS_METHOD); 905 __ cmpq(rax, rbx); 906 __ j(not_equal, 907 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 908 RelocInfo::CODE_TARGET); 909 910 ParameterCount expected(0); 911 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, 912 NullCallWrapper(), CALL_AS_METHOD); 913 } 914 915 916 void Builtins::Generate_FunctionApply(MacroAssembler* masm) { 917 // Stack at entry: 918 // rsp : return address 919 // rsp[8] : arguments 920 // rsp[16] : receiver ("this") 921 // rsp[24] : function 922 { 923 FrameScope frame_scope(masm, StackFrame::INTERNAL); 924 // Stack frame: 925 // rbp : Old base pointer 926 // rbp[8] : return address 927 // rbp[16] : function arguments 928 // rbp[24] : receiver 929 // rbp[32] : function 930 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize; 931 static const int kReceiverOffset = kArgumentsOffset + kPointerSize; 932 static const int kFunctionOffset = kReceiverOffset + kPointerSize; 933 934 __ push(Operand(rbp, kFunctionOffset)); 935 __ push(Operand(rbp, kArgumentsOffset)); 936 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); 937 938 // Check the stack for overflow. We are not trying to catch 939 // interruptions (e.g. debug break and preemption) here, so the "real stack 940 // limit" is checked. 941 Label okay; 942 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); 943 __ movq(rcx, rsp); 944 // Make rcx the space we have left. The stack might already be overflowed 945 // here which will cause rcx to become negative. 946 __ subq(rcx, kScratchRegister); 947 // Make rdx the space we need for the array when it is unrolled onto the 948 // stack. 949 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); 950 // Check if the arguments will overflow the stack. 951 __ cmpq(rcx, rdx); 952 __ j(greater, &okay); // Signed comparison. 953 954 // Out of stack space. 955 __ push(Operand(rbp, kFunctionOffset)); 956 __ push(rax); 957 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); 958 __ bind(&okay); 959 // End of stack check. 960 961 // Push current index and limit. 962 const int kLimitOffset = 963 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; 964 const int kIndexOffset = kLimitOffset - 1 * kPointerSize; 965 __ push(rax); // limit 966 __ push(Immediate(0)); // index 967 968 // Get the receiver. 969 __ movq(rbx, Operand(rbp, kReceiverOffset)); 970 971 // Check that the function is a JS function (otherwise it must be a proxy). 972 Label push_receiver; 973 __ movq(rdi, Operand(rbp, kFunctionOffset)); 974 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 975 __ j(not_equal, &push_receiver); 976 977 // Change context eagerly to get the right global object if necessary. 978 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 979 980 // Do not transform the receiver for strict mode functions. 981 Label call_to_object, use_global_receiver; 982 __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 983 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), 984 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); 985 __ j(not_equal, &push_receiver); 986 987 // Do not transform the receiver for natives. 988 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), 989 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); 990 __ j(not_equal, &push_receiver); 991 992 // Compute the receiver in non-strict mode. 993 __ JumpIfSmi(rbx, &call_to_object, Label::kNear); 994 __ CompareRoot(rbx, Heap::kNullValueRootIndex); 995 __ j(equal, &use_global_receiver); 996 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 997 __ j(equal, &use_global_receiver); 998 999 // If given receiver is already a JavaScript object then there's no 1000 // reason for converting it. 1001 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); 1002 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); 1003 __ j(above_equal, &push_receiver); 1004 1005 // Convert the receiver to an object. 1006 __ bind(&call_to_object); 1007 __ push(rbx); 1008 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); 1009 __ movq(rbx, rax); 1010 __ jmp(&push_receiver, Label::kNear); 1011 1012 // Use the current global receiver object as the receiver. 1013 __ bind(&use_global_receiver); 1014 const int kGlobalOffset = 1015 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize; 1016 __ movq(rbx, FieldOperand(rsi, kGlobalOffset)); 1017 __ movq(rbx, FieldOperand(rbx, GlobalObject::kNativeContextOffset)); 1018 __ movq(rbx, FieldOperand(rbx, kGlobalOffset)); 1019 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); 1020 1021 // Push the receiver. 1022 __ bind(&push_receiver); 1023 __ push(rbx); 1024 1025 // Copy all arguments from the array to the stack. 1026 Label entry, loop; 1027 __ movq(rax, Operand(rbp, kIndexOffset)); 1028 __ jmp(&entry); 1029 __ bind(&loop); 1030 __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments 1031 1032 // Use inline caching to speed up access to arguments. 1033 Handle<Code> ic = 1034 masm->isolate()->builtins()->KeyedLoadIC_Initialize(); 1035 __ Call(ic, RelocInfo::CODE_TARGET); 1036 // It is important that we do not have a test instruction after the 1037 // call. A test instruction after the call is used to indicate that 1038 // we have generated an inline version of the keyed load. In this 1039 // case, we know that we are not generating a test instruction next. 1040 1041 // Push the nth argument. 1042 __ push(rax); 1043 1044 // Update the index on the stack and in register rax. 1045 __ movq(rax, Operand(rbp, kIndexOffset)); 1046 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); 1047 __ movq(Operand(rbp, kIndexOffset), rax); 1048 1049 __ bind(&entry); 1050 __ cmpq(rax, Operand(rbp, kLimitOffset)); 1051 __ j(not_equal, &loop); 1052 1053 // Invoke the function. 1054 Label call_proxy; 1055 ParameterCount actual(rax); 1056 __ SmiToInteger32(rax, rax); 1057 __ movq(rdi, Operand(rbp, kFunctionOffset)); 1058 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 1059 __ j(not_equal, &call_proxy); 1060 __ InvokeFunction(rdi, actual, CALL_FUNCTION, 1061 NullCallWrapper(), CALL_AS_METHOD); 1062 1063 frame_scope.GenerateLeaveFrame(); 1064 __ ret(3 * kPointerSize); // remove this, receiver, and arguments 1065 1066 // Invoke the function proxy. 1067 __ bind(&call_proxy); 1068 __ push(rdi); // add function proxy as last argument 1069 __ incq(rax); 1070 __ Set(rbx, 0); 1071 __ SetCallKind(rcx, CALL_AS_METHOD); 1072 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); 1073 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), 1074 RelocInfo::CODE_TARGET); 1075 1076 // Leave internal frame. 1077 } 1078 __ ret(3 * kPointerSize); // remove this, receiver, and arguments 1079 } 1080 1081 1082 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { 1083 // ----------- S t a t e ------------- 1084 // -- rax : argc 1085 // -- rsp[0] : return address 1086 // -- rsp[8] : last argument 1087 // ----------------------------------- 1088 Label generic_array_code; 1089 1090 // Get the InternalArray function. 1091 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi); 1092 1093 if (FLAG_debug_code) { 1094 // Initial map for the builtin InternalArray functions should be maps. 1095 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 1096 // Will both indicate a NULL and a Smi. 1097 STATIC_ASSERT(kSmiTag == 0); 1098 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); 1099 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction); 1100 __ CmpObjectType(rbx, MAP_TYPE, rcx); 1101 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction); 1102 } 1103 1104 // Run the native code for the InternalArray function called as a normal 1105 // function. 1106 // tail call a stub 1107 InternalArrayConstructorStub stub(masm->isolate()); 1108 __ TailCallStub(&stub); 1109 } 1110 1111 1112 void Builtins::Generate_ArrayCode(MacroAssembler* masm) { 1113 // ----------- S t a t e ------------- 1114 // -- rax : argc 1115 // -- rsp[0] : return address 1116 // -- rsp[8] : last argument 1117 // ----------------------------------- 1118 Label generic_array_code; 1119 1120 // Get the Array function. 1121 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi); 1122 1123 if (FLAG_debug_code) { 1124 // Initial map for the builtin Array functions should be maps. 1125 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 1126 // Will both indicate a NULL and a Smi. 1127 STATIC_ASSERT(kSmiTag == 0); 1128 Condition not_smi = NegateCondition(masm->CheckSmi(rbx)); 1129 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 1130 __ CmpObjectType(rbx, MAP_TYPE, rcx); 1131 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 1132 } 1133 1134 // Run the native code for the Array function called as a normal function. 1135 // tail call a stub 1136 Handle<Object> undefined_sentinel( 1137 masm->isolate()->heap()->undefined_value(), 1138 masm->isolate()); 1139 __ Move(rbx, undefined_sentinel); 1140 ArrayConstructorStub stub(masm->isolate()); 1141 __ TailCallStub(&stub); 1142 } 1143 1144 1145 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) { 1146 // ----------- S t a t e ------------- 1147 // -- rax : number of arguments 1148 // -- rdi : constructor function 1149 // -- rsp[0] : return address 1150 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1151 // -- rsp[(argc + 1) * 8] : receiver 1152 // ----------------------------------- 1153 Counters* counters = masm->isolate()->counters(); 1154 __ IncrementCounter(counters->string_ctor_calls(), 1); 1155 1156 if (FLAG_debug_code) { 1157 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx); 1158 __ cmpq(rdi, rcx); 1159 __ Assert(equal, kUnexpectedStringFunction); 1160 } 1161 1162 // Load the first argument into rax and get rid of the rest 1163 // (including the receiver). 1164 StackArgumentsAccessor args(rsp, rax); 1165 Label no_arguments; 1166 __ testq(rax, rax); 1167 __ j(zero, &no_arguments); 1168 __ movq(rbx, args.GetArgumentOperand(1)); 1169 __ PopReturnAddressTo(rcx); 1170 __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); 1171 __ PushReturnAddressFrom(rcx); 1172 __ movq(rax, rbx); 1173 1174 // Lookup the argument in the number to string cache. 1175 Label not_cached, argument_is_string; 1176 __ LookupNumberStringCache(rax, // Input. 1177 rbx, // Result. 1178 rcx, // Scratch 1. 1179 rdx, // Scratch 2. 1180 ¬_cached); 1181 __ IncrementCounter(counters->string_ctor_cached_number(), 1); 1182 __ bind(&argument_is_string); 1183 1184 // ----------- S t a t e ------------- 1185 // -- rbx : argument converted to string 1186 // -- rdi : constructor function 1187 // -- rsp[0] : return address 1188 // ----------------------------------- 1189 1190 // Allocate a JSValue and put the tagged pointer into rax. 1191 Label gc_required; 1192 __ Allocate(JSValue::kSize, 1193 rax, // Result. 1194 rcx, // New allocation top (we ignore it). 1195 no_reg, 1196 &gc_required, 1197 TAG_OBJECT); 1198 1199 // Set the map. 1200 __ LoadGlobalFunctionInitialMap(rdi, rcx); 1201 if (FLAG_debug_code) { 1202 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset), 1203 Immediate(JSValue::kSize >> kPointerSizeLog2)); 1204 __ Assert(equal, kUnexpectedStringWrapperInstanceSize); 1205 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0)); 1206 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper); 1207 } 1208 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rcx); 1209 1210 // Set properties and elements. 1211 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 1212 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rcx); 1213 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); 1214 1215 // Set the value. 1216 __ movq(FieldOperand(rax, JSValue::kValueOffset), rbx); 1217 1218 // Ensure the object is fully initialized. 1219 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize); 1220 1221 // We're done. Return. 1222 __ ret(0); 1223 1224 // The argument was not found in the number to string cache. Check 1225 // if it's a string already before calling the conversion builtin. 1226 Label convert_argument; 1227 __ bind(¬_cached); 1228 STATIC_ASSERT(kSmiTag == 0); 1229 __ JumpIfSmi(rax, &convert_argument); 1230 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx); 1231 __ j(NegateCondition(is_string), &convert_argument); 1232 __ movq(rbx, rax); 1233 __ IncrementCounter(counters->string_ctor_string_value(), 1); 1234 __ jmp(&argument_is_string); 1235 1236 // Invoke the conversion builtin and put the result into rbx. 1237 __ bind(&convert_argument); 1238 __ IncrementCounter(counters->string_ctor_conversions(), 1); 1239 { 1240 FrameScope scope(masm, StackFrame::INTERNAL); 1241 __ push(rdi); // Preserve the function. 1242 __ push(rax); 1243 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); 1244 __ pop(rdi); 1245 } 1246 __ movq(rbx, rax); 1247 __ jmp(&argument_is_string); 1248 1249 // Load the empty string into rbx, remove the receiver from the 1250 // stack, and jump back to the case where the argument is a string. 1251 __ bind(&no_arguments); 1252 __ LoadRoot(rbx, Heap::kempty_stringRootIndex); 1253 __ PopReturnAddressTo(rcx); 1254 __ lea(rsp, Operand(rsp, kPointerSize)); 1255 __ PushReturnAddressFrom(rcx); 1256 __ jmp(&argument_is_string); 1257 1258 // At this point the argument is already a string. Call runtime to 1259 // create a string wrapper. 1260 __ bind(&gc_required); 1261 __ IncrementCounter(counters->string_ctor_gc_required(), 1); 1262 { 1263 FrameScope scope(masm, StackFrame::INTERNAL); 1264 __ push(rbx); 1265 __ CallRuntime(Runtime::kNewStringWrapper, 1); 1266 } 1267 __ ret(0); 1268 } 1269 1270 1271 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { 1272 __ push(rbp); 1273 __ movq(rbp, rsp); 1274 1275 // Store the arguments adaptor context sentinel. 1276 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 1277 1278 // Push the function on the stack. 1279 __ push(rdi); 1280 1281 // Preserve the number of arguments on the stack. Must preserve rax, 1282 // rbx and rcx because these registers are used when copying the 1283 // arguments and the receiver. 1284 __ Integer32ToSmi(r8, rax); 1285 __ push(r8); 1286 } 1287 1288 1289 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { 1290 // Retrieve the number of arguments from the stack. Number is a Smi. 1291 __ movq(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 1292 1293 // Leave the frame. 1294 __ movq(rsp, rbp); 1295 __ pop(rbp); 1296 1297 // Remove caller arguments from the stack. 1298 __ PopReturnAddressTo(rcx); 1299 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2); 1300 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize)); 1301 __ PushReturnAddressFrom(rcx); 1302 } 1303 1304 1305 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { 1306 // ----------- S t a t e ------------- 1307 // -- rax : actual number of arguments 1308 // -- rbx : expected number of arguments 1309 // -- rcx : call kind information 1310 // -- rdx : code entry to call 1311 // ----------------------------------- 1312 1313 Label invoke, dont_adapt_arguments; 1314 Counters* counters = masm->isolate()->counters(); 1315 __ IncrementCounter(counters->arguments_adaptors(), 1); 1316 1317 Label enough, too_few; 1318 __ cmpq(rax, rbx); 1319 __ j(less, &too_few); 1320 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); 1321 __ j(equal, &dont_adapt_arguments); 1322 1323 { // Enough parameters: Actual >= expected. 1324 __ bind(&enough); 1325 EnterArgumentsAdaptorFrame(masm); 1326 1327 // Copy receiver and all expected arguments. 1328 const int offset = StandardFrameConstants::kCallerSPOffset; 1329 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset)); 1330 __ Set(r8, -1); // account for receiver 1331 1332 Label copy; 1333 __ bind(©); 1334 __ incq(r8); 1335 __ push(Operand(rax, 0)); 1336 __ subq(rax, Immediate(kPointerSize)); 1337 __ cmpq(r8, rbx); 1338 __ j(less, ©); 1339 __ jmp(&invoke); 1340 } 1341 1342 { // Too few parameters: Actual < expected. 1343 __ bind(&too_few); 1344 EnterArgumentsAdaptorFrame(masm); 1345 1346 // Copy receiver and all actual arguments. 1347 const int offset = StandardFrameConstants::kCallerSPOffset; 1348 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset)); 1349 __ Set(r8, -1); // account for receiver 1350 1351 Label copy; 1352 __ bind(©); 1353 __ incq(r8); 1354 __ push(Operand(rdi, 0)); 1355 __ subq(rdi, Immediate(kPointerSize)); 1356 __ cmpq(r8, rax); 1357 __ j(less, ©); 1358 1359 // Fill remaining expected arguments with undefined values. 1360 Label fill; 1361 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 1362 __ bind(&fill); 1363 __ incq(r8); 1364 __ push(kScratchRegister); 1365 __ cmpq(r8, rbx); 1366 __ j(less, &fill); 1367 1368 // Restore function pointer. 1369 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1370 } 1371 1372 // Call the entry point. 1373 __ bind(&invoke); 1374 __ call(rdx); 1375 1376 // Store offset of return address for deoptimizer. 1377 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset()); 1378 1379 // Leave frame and return. 1380 LeaveArgumentsAdaptorFrame(masm); 1381 __ ret(0); 1382 1383 // ------------------------------------------- 1384 // Dont adapt arguments. 1385 // ------------------------------------------- 1386 __ bind(&dont_adapt_arguments); 1387 __ jmp(rdx); 1388 } 1389 1390 1391 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { 1392 // Lookup the function in the JavaScript frame. 1393 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 1394 { 1395 FrameScope scope(masm, StackFrame::INTERNAL); 1396 // Lookup and calculate pc offset. 1397 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerPCOffset)); 1398 __ movq(rbx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset)); 1399 __ subq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag)); 1400 __ subq(rdx, FieldOperand(rbx, SharedFunctionInfo::kCodeOffset)); 1401 __ Integer32ToSmi(rdx, rdx); 1402 1403 // Pass both function and pc offset as arguments. 1404 __ push(rax); 1405 __ push(rdx); 1406 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 2); 1407 } 1408 1409 Label skip; 1410 // If the code object is null, just return to the unoptimized code. 1411 __ cmpq(rax, Immediate(0)); 1412 __ j(not_equal, &skip, Label::kNear); 1413 __ ret(0); 1414 1415 __ bind(&skip); 1416 1417 // Load deoptimization data from the code object. 1418 __ movq(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag)); 1419 1420 // Load the OSR entrypoint offset from the deoptimization data. 1421 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt( 1422 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag)); 1423 1424 // Compute the target address = code_obj + header_size + osr_offset 1425 __ lea(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag)); 1426 1427 // Overwrite the return address on the stack. 1428 __ movq(Operand(rsp, 0), rax); 1429 1430 // And "return" to the OSR entry point of the function. 1431 __ ret(0); 1432 } 1433 1434 1435 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { 1436 // We check the stack limit as indicator that recompilation might be done. 1437 Label ok; 1438 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 1439 __ j(above_equal, &ok); 1440 { 1441 FrameScope scope(masm, StackFrame::INTERNAL); 1442 __ CallRuntime(Runtime::kStackGuard, 0); 1443 } 1444 __ jmp(masm->isolate()->builtins()->OnStackReplacement(), 1445 RelocInfo::CODE_TARGET); 1446 1447 __ bind(&ok); 1448 __ ret(0); 1449 } 1450 1451 1452 #undef __ 1453 1454 } } // namespace v8::internal 1455 1456 #endif // V8_TARGET_ARCH_X64 1457