1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #if defined(V8_TARGET_ARCH_X64) 31 32 #include "ic-inl.h" 33 #include "codegen.h" 34 #include "stub-cache.h" 35 36 namespace v8 { 37 namespace internal { 38 39 #define __ ACCESS_MASM(masm) 40 41 42 static void ProbeTable(Isolate* isolate, 43 MacroAssembler* masm, 44 Code::Flags flags, 45 StubCache::Table table, 46 Register receiver, 47 Register name, 48 // The offset is scaled by 4, based on 49 // kHeapObjectTagSize, which is two bits 50 Register offset) { 51 // We need to scale up the pointer by 2 because the offset is scaled by less 52 // than the pointer size. 53 ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1); 54 ScaleFactor scale_factor = times_2; 55 56 ASSERT_EQ(24, sizeof(StubCache::Entry)); 57 // The offset register holds the entry offset times four (due to masking 58 // and shifting optimizations). 59 ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); 60 ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); 61 Label miss; 62 63 // Multiply by 3 because there are 3 fields per entry (name, code, map). 64 __ lea(offset, Operand(offset, offset, times_2, 0)); 65 66 __ LoadAddress(kScratchRegister, key_offset); 67 68 // Check that the key in the entry matches the name. 69 // Multiply entry offset by 16 to get the entry address. Since the 70 // offset register already holds the entry offset times four, multiply 71 // by a further four. 72 __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0)); 73 __ j(not_equal, &miss); 74 75 // Get the map entry from the cache. 76 // Use key_offset + kPointerSize * 2, rather than loading map_offset. 77 __ movq(kScratchRegister, 78 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2)); 79 __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset)); 80 __ j(not_equal, &miss); 81 82 // Get the code entry from the cache. 83 __ LoadAddress(kScratchRegister, value_offset); 84 __ movq(kScratchRegister, 85 Operand(kScratchRegister, offset, scale_factor, 0)); 86 87 // Check that the flags match what we're looking for. 88 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); 89 __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup)); 90 __ cmpl(offset, Immediate(flags)); 91 __ j(not_equal, &miss); 92 93 #ifdef DEBUG 94 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { 95 __ jmp(&miss); 96 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) { 97 __ jmp(&miss); 98 } 99 #endif 100 101 // Jump to the first instruction in the code stub. 102 __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag)); 103 __ jmp(kScratchRegister); 104 105 __ bind(&miss); 106 } 107 108 109 // Helper function used to check that the dictionary doesn't contain 110 // the property. This function may return false negatives, so miss_label 111 // must always call a backup property check that is complete. 112 // This function is safe to call if the receiver has fast properties. 113 // Name must be a symbol and receiver must be a heap object. 114 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm, 115 Label* miss_label, 116 Register receiver, 117 Handle<String> name, 118 Register r0, 119 Register r1) { 120 ASSERT(name->IsSymbol()); 121 Counters* counters = masm->isolate()->counters(); 122 __ IncrementCounter(counters->negative_lookups(), 1); 123 __ IncrementCounter(counters->negative_lookups_miss(), 1); 124 125 __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset)); 126 127 const int kInterceptorOrAccessCheckNeededMask = 128 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded); 129 130 // Bail out if the receiver has a named interceptor or requires access checks. 131 __ testb(FieldOperand(r0, Map::kBitFieldOffset), 132 Immediate(kInterceptorOrAccessCheckNeededMask)); 133 __ j(not_zero, miss_label); 134 135 // Check that receiver is a JSObject. 136 __ CmpInstanceType(r0, FIRST_SPEC_OBJECT_TYPE); 137 __ j(below, miss_label); 138 139 // Load properties array. 140 Register properties = r0; 141 __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset)); 142 143 // Check that the properties array is a dictionary. 144 __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset), 145 Heap::kHashTableMapRootIndex); 146 __ j(not_equal, miss_label); 147 148 Label done; 149 StringDictionaryLookupStub::GenerateNegativeLookup(masm, 150 miss_label, 151 &done, 152 properties, 153 name, 154 r1); 155 __ bind(&done); 156 __ DecrementCounter(counters->negative_lookups_miss(), 1); 157 } 158 159 160 void StubCache::GenerateProbe(MacroAssembler* masm, 161 Code::Flags flags, 162 Register receiver, 163 Register name, 164 Register scratch, 165 Register extra, 166 Register extra2, 167 Register extra3) { 168 Isolate* isolate = masm->isolate(); 169 Label miss; 170 USE(extra); // The register extra is not used on the X64 platform. 171 USE(extra2); // The register extra2 is not used on the X64 platform. 172 USE(extra3); // The register extra2 is not used on the X64 platform. 173 // Make sure that code is valid. The multiplying code relies on the 174 // entry size being 24. 175 ASSERT(sizeof(Entry) == 24); 176 177 // Make sure the flags do not name a specific type. 178 ASSERT(Code::ExtractTypeFromFlags(flags) == 0); 179 180 // Make sure that there are no register conflicts. 181 ASSERT(!scratch.is(receiver)); 182 ASSERT(!scratch.is(name)); 183 184 // Check scratch register is valid, extra and extra2 are unused. 185 ASSERT(!scratch.is(no_reg)); 186 ASSERT(extra2.is(no_reg)); 187 ASSERT(extra3.is(no_reg)); 188 189 Counters* counters = masm->isolate()->counters(); 190 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1); 191 192 // Check that the receiver isn't a smi. 193 __ JumpIfSmi(receiver, &miss); 194 195 // Get the map of the receiver and compute the hash. 196 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset)); 197 // Use only the low 32 bits of the map pointer. 198 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 199 __ xor_(scratch, Immediate(flags)); 200 // We mask out the last two bits because they are not part of the hash and 201 // they are always 01 for maps. Also in the two 'and' instructions below. 202 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); 203 204 // Probe the primary table. 205 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch); 206 207 // Primary miss: Compute hash for secondary probe. 208 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset)); 209 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 210 __ xor_(scratch, Immediate(flags)); 211 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize)); 212 __ subl(scratch, name); 213 __ addl(scratch, Immediate(flags)); 214 __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize)); 215 216 // Probe the secondary table. 217 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch); 218 219 // Cache miss: Fall-through and let caller handle the miss by 220 // entering the runtime system. 221 __ bind(&miss); 222 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1); 223 } 224 225 226 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm, 227 int index, 228 Register prototype) { 229 // Load the global or builtins object from the current context. 230 __ movq(prototype, 231 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); 232 // Load the global context from the global or builtins object. 233 __ movq(prototype, 234 FieldOperand(prototype, GlobalObject::kGlobalContextOffset)); 235 // Load the function from the global context. 236 __ movq(prototype, Operand(prototype, Context::SlotOffset(index))); 237 // Load the initial map. The global functions all have initial maps. 238 __ movq(prototype, 239 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset)); 240 // Load the prototype from the initial map. 241 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); 242 } 243 244 245 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( 246 MacroAssembler* masm, 247 int index, 248 Register prototype, 249 Label* miss) { 250 Isolate* isolate = masm->isolate(); 251 // Check we're still in the same context. 252 __ Move(prototype, isolate->global()); 253 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)), 254 prototype); 255 __ j(not_equal, miss); 256 // Get the global function with the given index. 257 Handle<JSFunction> function( 258 JSFunction::cast(isolate->global_context()->get(index))); 259 // Load its initial map. The global functions all have initial maps. 260 __ Move(prototype, Handle<Map>(function->initial_map())); 261 // Load the prototype from the initial map. 262 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset)); 263 } 264 265 266 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, 267 Register receiver, 268 Register scratch, 269 Label* miss_label) { 270 // Check that the receiver isn't a smi. 271 __ JumpIfSmi(receiver, miss_label); 272 273 // Check that the object is a JS array. 274 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch); 275 __ j(not_equal, miss_label); 276 277 // Load length directly from the JS array. 278 __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset)); 279 __ ret(0); 280 } 281 282 283 // Generate code to check if an object is a string. If the object is 284 // a string, the map's instance type is left in the scratch register. 285 static void GenerateStringCheck(MacroAssembler* masm, 286 Register receiver, 287 Register scratch, 288 Label* smi, 289 Label* non_string_object) { 290 // Check that the object isn't a smi. 291 __ JumpIfSmi(receiver, smi); 292 293 // Check that the object is a string. 294 __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset)); 295 __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 296 STATIC_ASSERT(kNotStringTag != 0); 297 __ testl(scratch, Immediate(kNotStringTag)); 298 __ j(not_zero, non_string_object); 299 } 300 301 302 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, 303 Register receiver, 304 Register scratch1, 305 Register scratch2, 306 Label* miss, 307 bool support_wrappers) { 308 Label check_wrapper; 309 310 // Check if the object is a string leaving the instance type in the 311 // scratch register. 312 GenerateStringCheck(masm, receiver, scratch1, miss, 313 support_wrappers ? &check_wrapper : miss); 314 315 // Load length directly from the string. 316 __ movq(rax, FieldOperand(receiver, String::kLengthOffset)); 317 __ ret(0); 318 319 if (support_wrappers) { 320 // Check if the object is a JSValue wrapper. 321 __ bind(&check_wrapper); 322 __ cmpl(scratch1, Immediate(JS_VALUE_TYPE)); 323 __ j(not_equal, miss); 324 325 // Check if the wrapped value is a string and load the length 326 // directly if it is. 327 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset)); 328 GenerateStringCheck(masm, scratch2, scratch1, miss, miss); 329 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset)); 330 __ ret(0); 331 } 332 } 333 334 335 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, 336 Register receiver, 337 Register result, 338 Register scratch, 339 Label* miss_label) { 340 __ TryGetFunctionPrototype(receiver, result, miss_label); 341 if (!result.is(rax)) __ movq(rax, result); 342 __ ret(0); 343 } 344 345 346 // Load a fast property out of a holder object (src). In-object properties 347 // are loaded directly otherwise the property is loaded from the properties 348 // fixed array. 349 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, 350 Register dst, 351 Register src, 352 Handle<JSObject> holder, 353 int index) { 354 // Adjust for the number of properties stored in the holder. 355 index -= holder->map()->inobject_properties(); 356 if (index < 0) { 357 // Get the property straight out of the holder. 358 int offset = holder->map()->instance_size() + (index * kPointerSize); 359 __ movq(dst, FieldOperand(src, offset)); 360 } else { 361 // Calculate the offset into the properties array. 362 int offset = index * kPointerSize + FixedArray::kHeaderSize; 363 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset)); 364 __ movq(dst, FieldOperand(dst, offset)); 365 } 366 } 367 368 369 static void PushInterceptorArguments(MacroAssembler* masm, 370 Register receiver, 371 Register holder, 372 Register name, 373 Handle<JSObject> holder_obj) { 374 __ push(name); 375 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor()); 376 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor)); 377 __ Move(kScratchRegister, interceptor); 378 __ push(kScratchRegister); 379 __ push(receiver); 380 __ push(holder); 381 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset)); 382 } 383 384 385 static void CompileCallLoadPropertyWithInterceptor( 386 MacroAssembler* masm, 387 Register receiver, 388 Register holder, 389 Register name, 390 Handle<JSObject> holder_obj) { 391 PushInterceptorArguments(masm, receiver, holder, name, holder_obj); 392 393 ExternalReference ref = 394 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), 395 masm->isolate()); 396 __ Set(rax, 5); 397 __ LoadAddress(rbx, ref); 398 399 CEntryStub stub(1); 400 __ CallStub(&stub); 401 } 402 403 404 // Number of pointers to be reserved on stack for fast API call. 405 static const int kFastApiCallArguments = 3; 406 407 408 // Reserves space for the extra arguments to API function in the 409 // caller's frame. 410 // 411 // These arguments are set by CheckPrototypes and GenerateFastApiCall. 412 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 413 // ----------- S t a t e ------------- 414 // -- rsp[0] : return address 415 // -- rsp[8] : last argument in the internal frame of the caller 416 // ----------------------------------- 417 __ movq(scratch, Operand(rsp, 0)); 418 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); 419 __ movq(Operand(rsp, 0), scratch); 420 __ Move(scratch, Smi::FromInt(0)); 421 for (int i = 1; i <= kFastApiCallArguments; i++) { 422 __ movq(Operand(rsp, i * kPointerSize), scratch); 423 } 424 } 425 426 427 // Undoes the effects of ReserveSpaceForFastApiCall. 428 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) { 429 // ----------- S t a t e ------------- 430 // -- rsp[0] : return address. 431 // -- rsp[8] : last fast api call extra argument. 432 // -- ... 433 // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument. 434 // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal 435 // frame. 436 // ----------------------------------- 437 __ movq(scratch, Operand(rsp, 0)); 438 __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch); 439 __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments)); 440 } 441 442 443 // Generates call to API function. 444 static void GenerateFastApiCall(MacroAssembler* masm, 445 const CallOptimization& optimization, 446 int argc) { 447 // ----------- S t a t e ------------- 448 // -- rsp[0] : return address 449 // -- rsp[8] : object passing the type check 450 // (last fast api call extra argument, 451 // set by CheckPrototypes) 452 // -- rsp[16] : api function 453 // (first fast api call extra argument) 454 // -- rsp[24] : api call data 455 // -- rsp[32] : last argument 456 // -- ... 457 // -- rsp[(argc + 3) * 8] : first argument 458 // -- rsp[(argc + 4) * 8] : receiver 459 // ----------------------------------- 460 // Get the function and setup the context. 461 Handle<JSFunction> function = optimization.constant_function(); 462 __ LoadHeapObject(rdi, function); 463 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 464 465 // Pass the additional arguments. 466 __ movq(Operand(rsp, 2 * kPointerSize), rdi); 467 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); 468 Handle<Object> call_data(api_call_info->data()); 469 if (masm->isolate()->heap()->InNewSpace(*call_data)) { 470 __ Move(rcx, api_call_info); 471 __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset)); 472 __ movq(Operand(rsp, 3 * kPointerSize), rbx); 473 } else { 474 __ Move(Operand(rsp, 3 * kPointerSize), call_data); 475 } 476 477 // Prepare arguments. 478 __ lea(rbx, Operand(rsp, 3 * kPointerSize)); 479 480 #ifdef _WIN64 481 // Win64 uses first register--rcx--for returned value. 482 Register arguments_arg = rdx; 483 #else 484 Register arguments_arg = rdi; 485 #endif 486 487 // Allocate the v8::Arguments structure in the arguments' space since 488 // it's not controlled by GC. 489 const int kApiStackSpace = 4; 490 491 __ PrepareCallApiFunction(kApiStackSpace); 492 493 __ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_. 494 __ addq(rbx, Immediate(argc * kPointerSize)); 495 __ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_. 496 __ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_. 497 // v8::Arguments::is_construct_call_. 498 __ Set(StackSpaceOperand(3), 0); 499 500 // v8::InvocationCallback's argument. 501 __ lea(arguments_arg, StackSpaceOperand(0)); 502 503 // Function address is a foreign pointer outside V8's heap. 504 Address function_address = v8::ToCData<Address>(api_call_info->callback()); 505 __ CallApiFunctionAndReturn(function_address, 506 argc + kFastApiCallArguments + 1); 507 } 508 509 510 class CallInterceptorCompiler BASE_EMBEDDED { 511 public: 512 CallInterceptorCompiler(StubCompiler* stub_compiler, 513 const ParameterCount& arguments, 514 Register name, 515 Code::ExtraICState extra_ic_state) 516 : stub_compiler_(stub_compiler), 517 arguments_(arguments), 518 name_(name), 519 extra_ic_state_(extra_ic_state) {} 520 521 void Compile(MacroAssembler* masm, 522 Handle<JSObject> object, 523 Handle<JSObject> holder, 524 Handle<String> name, 525 LookupResult* lookup, 526 Register receiver, 527 Register scratch1, 528 Register scratch2, 529 Register scratch3, 530 Label* miss) { 531 ASSERT(holder->HasNamedInterceptor()); 532 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined()); 533 534 // Check that the receiver isn't a smi. 535 __ JumpIfSmi(receiver, miss); 536 537 CallOptimization optimization(lookup); 538 if (optimization.is_constant_call()) { 539 CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3, 540 holder, lookup, name, optimization, miss); 541 } else { 542 CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3, 543 name, holder, miss); 544 } 545 } 546 547 private: 548 void CompileCacheable(MacroAssembler* masm, 549 Handle<JSObject> object, 550 Register receiver, 551 Register scratch1, 552 Register scratch2, 553 Register scratch3, 554 Handle<JSObject> interceptor_holder, 555 LookupResult* lookup, 556 Handle<String> name, 557 const CallOptimization& optimization, 558 Label* miss_label) { 559 ASSERT(optimization.is_constant_call()); 560 ASSERT(!lookup->holder()->IsGlobalObject()); 561 562 int depth1 = kInvalidProtoDepth; 563 int depth2 = kInvalidProtoDepth; 564 bool can_do_fast_api_call = false; 565 if (optimization.is_simple_api_call() && 566 !lookup->holder()->IsGlobalObject()) { 567 depth1 = optimization.GetPrototypeDepthOfExpectedType( 568 object, interceptor_holder); 569 if (depth1 == kInvalidProtoDepth) { 570 depth2 = optimization.GetPrototypeDepthOfExpectedType( 571 interceptor_holder, Handle<JSObject>(lookup->holder())); 572 } 573 can_do_fast_api_call = 574 depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth; 575 } 576 577 Counters* counters = masm->isolate()->counters(); 578 __ IncrementCounter(counters->call_const_interceptor(), 1); 579 580 if (can_do_fast_api_call) { 581 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1); 582 ReserveSpaceForFastApiCall(masm, scratch1); 583 } 584 585 // Check that the maps from receiver to interceptor's holder 586 // haven't changed and thus we can invoke interceptor. 587 Label miss_cleanup; 588 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label; 589 Register holder = 590 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder, 591 scratch1, scratch2, scratch3, 592 name, depth1, miss); 593 594 // Invoke an interceptor and if it provides a value, 595 // branch to |regular_invoke|. 596 Label regular_invoke; 597 LoadWithInterceptor(masm, receiver, holder, interceptor_holder, 598 ®ular_invoke); 599 600 // Interceptor returned nothing for this property. Try to use cached 601 // constant function. 602 603 // Check that the maps from interceptor's holder to constant function's 604 // holder haven't changed and thus we can use cached constant function. 605 if (*interceptor_holder != lookup->holder()) { 606 stub_compiler_->CheckPrototypes(interceptor_holder, receiver, 607 Handle<JSObject>(lookup->holder()), 608 scratch1, scratch2, scratch3, 609 name, depth2, miss); 610 } else { 611 // CheckPrototypes has a side effect of fetching a 'holder' 612 // for API (object which is instanceof for the signature). It's 613 // safe to omit it here, as if present, it should be fetched 614 // by the previous CheckPrototypes. 615 ASSERT(depth2 == kInvalidProtoDepth); 616 } 617 618 // Invoke function. 619 if (can_do_fast_api_call) { 620 GenerateFastApiCall(masm, optimization, arguments_.immediate()); 621 } else { 622 CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state_) 623 ? CALL_AS_FUNCTION 624 : CALL_AS_METHOD; 625 __ InvokeFunction(optimization.constant_function(), arguments_, 626 JUMP_FUNCTION, NullCallWrapper(), call_kind); 627 } 628 629 // Deferred code for fast API call case---clean preallocated space. 630 if (can_do_fast_api_call) { 631 __ bind(&miss_cleanup); 632 FreeSpaceForFastApiCall(masm, scratch1); 633 __ jmp(miss_label); 634 } 635 636 // Invoke a regular function. 637 __ bind(®ular_invoke); 638 if (can_do_fast_api_call) { 639 FreeSpaceForFastApiCall(masm, scratch1); 640 } 641 } 642 643 void CompileRegular(MacroAssembler* masm, 644 Handle<JSObject> object, 645 Register receiver, 646 Register scratch1, 647 Register scratch2, 648 Register scratch3, 649 Handle<String> name, 650 Handle<JSObject> interceptor_holder, 651 Label* miss_label) { 652 Register holder = 653 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder, 654 scratch1, scratch2, scratch3, 655 name, miss_label); 656 657 FrameScope scope(masm, StackFrame::INTERNAL); 658 // Save the name_ register across the call. 659 __ push(name_); 660 661 PushInterceptorArguments(masm, receiver, holder, name_, interceptor_holder); 662 663 __ CallExternalReference( 664 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), 665 masm->isolate()), 666 5); 667 668 // Restore the name_ register. 669 __ pop(name_); 670 671 // Leave the internal frame. 672 } 673 674 void LoadWithInterceptor(MacroAssembler* masm, 675 Register receiver, 676 Register holder, 677 Handle<JSObject> holder_obj, 678 Label* interceptor_succeeded) { 679 { 680 FrameScope scope(masm, StackFrame::INTERNAL); 681 __ push(holder); // Save the holder. 682 __ push(name_); // Save the name. 683 684 CompileCallLoadPropertyWithInterceptor(masm, 685 receiver, 686 holder, 687 name_, 688 holder_obj); 689 690 __ pop(name_); // Restore the name. 691 __ pop(receiver); // Restore the holder. 692 // Leave the internal frame. 693 } 694 695 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex); 696 __ j(not_equal, interceptor_succeeded); 697 } 698 699 StubCompiler* stub_compiler_; 700 const ParameterCount& arguments_; 701 Register name_; 702 Code::ExtraICState extra_ic_state_; 703 }; 704 705 706 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { 707 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC); 708 Handle<Code> code = (kind == Code::LOAD_IC) 709 ? masm->isolate()->builtins()->LoadIC_Miss() 710 : masm->isolate()->builtins()->KeyedLoadIC_Miss(); 711 __ Jump(code, RelocInfo::CODE_TARGET); 712 } 713 714 715 void StubCompiler::GenerateKeyedLoadMissForceGeneric(MacroAssembler* masm) { 716 Handle<Code> code = 717 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); 718 __ Jump(code, RelocInfo::CODE_TARGET); 719 } 720 721 722 // Both name_reg and receiver_reg are preserved on jumps to miss_label, 723 // but may be destroyed if store is successful. 724 void StubCompiler::GenerateStoreField(MacroAssembler* masm, 725 Handle<JSObject> object, 726 int index, 727 Handle<Map> transition, 728 Register receiver_reg, 729 Register name_reg, 730 Register scratch, 731 Label* miss_label) { 732 // Check that the map of the object hasn't changed. 733 CompareMapMode mode = transition.is_null() ? ALLOW_ELEMENT_TRANSITION_MAPS 734 : REQUIRE_EXACT_MAP; 735 __ CheckMap(receiver_reg, Handle<Map>(object->map()), 736 miss_label, DO_SMI_CHECK, mode); 737 738 // Perform global security token check if needed. 739 if (object->IsJSGlobalProxy()) { 740 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label); 741 } 742 743 // Stub never generated for non-global objects that require access 744 // checks. 745 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 746 747 // Perform map transition for the receiver if necessary. 748 if (!transition.is_null() && (object->map()->unused_property_fields() == 0)) { 749 // The properties must be extended before we can store the value. 750 // We jump to a runtime call that extends the properties array. 751 __ pop(scratch); // Return address. 752 __ push(receiver_reg); 753 __ Push(transition); 754 __ push(rax); 755 __ push(scratch); 756 __ TailCallExternalReference( 757 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), 758 masm->isolate()), 759 3, 760 1); 761 return; 762 } 763 764 if (!transition.is_null()) { 765 // Update the map of the object; no write barrier updating is 766 // needed because the map is never in new space. 767 __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset), transition); 768 } 769 770 // Adjust for the number of properties stored in the object. Even in the 771 // face of a transition we can use the old map here because the size of the 772 // object and the number of in-object properties is not going to change. 773 index -= object->map()->inobject_properties(); 774 775 if (index < 0) { 776 // Set the property straight into the object. 777 int offset = object->map()->instance_size() + (index * kPointerSize); 778 __ movq(FieldOperand(receiver_reg, offset), rax); 779 780 // Update the write barrier for the array address. 781 // Pass the value being stored in the now unused name_reg. 782 __ movq(name_reg, rax); 783 __ RecordWriteField( 784 receiver_reg, offset, name_reg, scratch, kDontSaveFPRegs); 785 } else { 786 // Write to the properties array. 787 int offset = index * kPointerSize + FixedArray::kHeaderSize; 788 // Get the properties array (optimistically). 789 __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); 790 __ movq(FieldOperand(scratch, offset), rax); 791 792 // Update the write barrier for the array address. 793 // Pass the value being stored in the now unused name_reg. 794 __ movq(name_reg, rax); 795 __ RecordWriteField( 796 scratch, offset, name_reg, receiver_reg, kDontSaveFPRegs); 797 } 798 799 // Return the value (register rax). 800 __ ret(0); 801 } 802 803 804 // Generate code to check that a global property cell is empty. Create 805 // the property cell at compilation time if no cell exists for the 806 // property. 807 static void GenerateCheckPropertyCell(MacroAssembler* masm, 808 Handle<GlobalObject> global, 809 Handle<String> name, 810 Register scratch, 811 Label* miss) { 812 Handle<JSGlobalPropertyCell> cell = 813 GlobalObject::EnsurePropertyCell(global, name); 814 ASSERT(cell->value()->IsTheHole()); 815 __ Move(scratch, cell); 816 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), 817 masm->isolate()->factory()->the_hole_value()); 818 __ j(not_equal, miss); 819 } 820 821 822 // Calls GenerateCheckPropertyCell for each global object in the prototype chain 823 // from object to (but not including) holder. 824 static void GenerateCheckPropertyCells(MacroAssembler* masm, 825 Handle<JSObject> object, 826 Handle<JSObject> holder, 827 Handle<String> name, 828 Register scratch, 829 Label* miss) { 830 Handle<JSObject> current = object; 831 while (!current.is_identical_to(holder)) { 832 if (current->IsGlobalObject()) { 833 GenerateCheckPropertyCell(masm, 834 Handle<GlobalObject>::cast(current), 835 name, 836 scratch, 837 miss); 838 } 839 current = Handle<JSObject>(JSObject::cast(current->GetPrototype())); 840 } 841 } 842 843 #undef __ 844 #define __ ACCESS_MASM((masm())) 845 846 847 Register StubCompiler::CheckPrototypes(Handle<JSObject> object, 848 Register object_reg, 849 Handle<JSObject> holder, 850 Register holder_reg, 851 Register scratch1, 852 Register scratch2, 853 Handle<String> name, 854 int save_at_depth, 855 Label* miss) { 856 // Make sure there's no overlap between holder and object registers. 857 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg)); 858 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg) 859 && !scratch2.is(scratch1)); 860 861 // Keep track of the current object in register reg. On the first 862 // iteration, reg is an alias for object_reg, on later iterations, 863 // it is an alias for holder_reg. 864 Register reg = object_reg; 865 int depth = 0; 866 867 if (save_at_depth == depth) { 868 __ movq(Operand(rsp, kPointerSize), object_reg); 869 } 870 871 // Check the maps in the prototype chain. 872 // Traverse the prototype chain from the object and do map checks. 873 Handle<JSObject> current = object; 874 while (!current.is_identical_to(holder)) { 875 ++depth; 876 877 // Only global objects and objects that do not require access 878 // checks are allowed in stubs. 879 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); 880 881 Handle<JSObject> prototype(JSObject::cast(current->GetPrototype())); 882 if (!current->HasFastProperties() && 883 !current->IsJSGlobalObject() && 884 !current->IsJSGlobalProxy()) { 885 if (!name->IsSymbol()) { 886 name = factory()->LookupSymbol(name); 887 } 888 ASSERT(current->property_dictionary()->FindEntry(*name) == 889 StringDictionary::kNotFound); 890 891 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, 892 scratch1, scratch2); 893 894 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 895 reg = holder_reg; // From now on the object will be in holder_reg. 896 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 897 } else { 898 bool in_new_space = heap()->InNewSpace(*prototype); 899 Handle<Map> current_map(current->map()); 900 if (in_new_space) { 901 // Save the map in scratch1 for later. 902 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); 903 } 904 __ CheckMap(reg, Handle<Map>(current_map), 905 miss, DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS); 906 907 // Check access rights to the global object. This has to happen after 908 // the map check so that we know that the object is actually a global 909 // object. 910 if (current->IsJSGlobalProxy()) { 911 __ CheckAccessGlobalProxy(reg, scratch2, miss); 912 } 913 reg = holder_reg; // From now on the object will be in holder_reg. 914 915 if (in_new_space) { 916 // The prototype is in new space; we cannot store a reference to it 917 // in the code. Load it from the map. 918 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset)); 919 } else { 920 // The prototype is in old space; load it directly. 921 __ Move(reg, prototype); 922 } 923 } 924 925 if (save_at_depth == depth) { 926 __ movq(Operand(rsp, kPointerSize), reg); 927 } 928 929 // Go to the next object in the prototype chain. 930 current = prototype; 931 } 932 ASSERT(current.is_identical_to(holder)); 933 934 // Log the check depth. 935 LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); 936 937 // Check the holder map. 938 __ CheckMap(reg, Handle<Map>(holder->map()), 939 miss, DONT_DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS); 940 941 // Perform security check for access to the global object. 942 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded()); 943 if (current->IsJSGlobalProxy()) { 944 __ CheckAccessGlobalProxy(reg, scratch1, miss); 945 } 946 947 // If we've skipped any global objects, it's not enough to verify that 948 // their maps haven't changed. We also need to check that the property 949 // cell for the property is still empty. 950 GenerateCheckPropertyCells(masm(), object, holder, name, scratch1, miss); 951 952 // Return the register containing the holder. 953 return reg; 954 } 955 956 957 void StubCompiler::GenerateLoadField(Handle<JSObject> object, 958 Handle<JSObject> holder, 959 Register receiver, 960 Register scratch1, 961 Register scratch2, 962 Register scratch3, 963 int index, 964 Handle<String> name, 965 Label* miss) { 966 // Check that the receiver isn't a smi. 967 __ JumpIfSmi(receiver, miss); 968 969 // Check the prototype chain. 970 Register reg = CheckPrototypes( 971 object, receiver, holder, scratch1, scratch2, scratch3, name, miss); 972 973 // Get the value from the properties. 974 GenerateFastPropertyLoad(masm(), rax, reg, holder, index); 975 __ ret(0); 976 } 977 978 979 void StubCompiler::GenerateLoadCallback(Handle<JSObject> object, 980 Handle<JSObject> holder, 981 Register receiver, 982 Register name_reg, 983 Register scratch1, 984 Register scratch2, 985 Register scratch3, 986 Handle<AccessorInfo> callback, 987 Handle<String> name, 988 Label* miss) { 989 // Check that the receiver isn't a smi. 990 __ JumpIfSmi(receiver, miss); 991 992 // Check that the maps haven't changed. 993 Register reg = CheckPrototypes(object, receiver, holder, scratch1, 994 scratch2, scratch3, name, miss); 995 996 // Insert additional parameters into the stack frame above return address. 997 ASSERT(!scratch2.is(reg)); 998 __ pop(scratch2); // Get return address to place it below. 999 1000 __ push(receiver); // receiver 1001 __ push(reg); // holder 1002 if (heap()->InNewSpace(callback->data())) { 1003 __ Move(scratch1, callback); 1004 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data 1005 } else { 1006 __ Push(Handle<Object>(callback->data())); 1007 } 1008 __ push(name_reg); // name 1009 // Save a pointer to where we pushed the arguments pointer. 1010 // This will be passed as the const AccessorInfo& to the C++ callback. 1011 1012 #ifdef _WIN64 1013 // Win64 uses first register--rcx--for returned value. 1014 Register accessor_info_arg = r8; 1015 Register name_arg = rdx; 1016 #else 1017 Register accessor_info_arg = rsi; 1018 Register name_arg = rdi; 1019 #endif 1020 1021 ASSERT(!name_arg.is(scratch2)); 1022 __ movq(name_arg, rsp); 1023 __ push(scratch2); // Restore return address. 1024 1025 // 3 elements array for v8::Arguments::values_ and handler for name. 1026 const int kStackSpace = 4; 1027 1028 // Allocate v8::AccessorInfo in non-GCed stack space. 1029 const int kArgStackSpace = 1; 1030 1031 __ PrepareCallApiFunction(kArgStackSpace); 1032 __ lea(rax, Operand(name_arg, 3 * kPointerSize)); 1033 1034 // v8::AccessorInfo::args_. 1035 __ movq(StackSpaceOperand(0), rax); 1036 1037 // The context register (rsi) has been saved in PrepareCallApiFunction and 1038 // could be used to pass arguments. 1039 __ lea(accessor_info_arg, StackSpaceOperand(0)); 1040 1041 Address getter_address = v8::ToCData<Address>(callback->getter()); 1042 __ CallApiFunctionAndReturn(getter_address, kStackSpace); 1043 } 1044 1045 1046 void StubCompiler::GenerateLoadConstant(Handle<JSObject> object, 1047 Handle<JSObject> holder, 1048 Register receiver, 1049 Register scratch1, 1050 Register scratch2, 1051 Register scratch3, 1052 Handle<JSFunction> value, 1053 Handle<String> name, 1054 Label* miss) { 1055 // Check that the receiver isn't a smi. 1056 __ JumpIfSmi(receiver, miss); 1057 1058 // Check that the maps haven't changed. 1059 CheckPrototypes( 1060 object, receiver, holder, scratch1, scratch2, scratch3, name, miss); 1061 1062 // Return the constant value. 1063 __ LoadHeapObject(rax, value); 1064 __ ret(0); 1065 } 1066 1067 1068 void StubCompiler::GenerateLoadInterceptor(Handle<JSObject> object, 1069 Handle<JSObject> interceptor_holder, 1070 LookupResult* lookup, 1071 Register receiver, 1072 Register name_reg, 1073 Register scratch1, 1074 Register scratch2, 1075 Register scratch3, 1076 Handle<String> name, 1077 Label* miss) { 1078 ASSERT(interceptor_holder->HasNamedInterceptor()); 1079 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined()); 1080 1081 // Check that the receiver isn't a smi. 1082 __ JumpIfSmi(receiver, miss); 1083 1084 // So far the most popular follow ups for interceptor loads are FIELD 1085 // and CALLBACKS, so inline only them, other cases may be added 1086 // later. 1087 bool compile_followup_inline = false; 1088 if (lookup->IsFound() && lookup->IsCacheable()) { 1089 if (lookup->type() == FIELD) { 1090 compile_followup_inline = true; 1091 } else if (lookup->type() == CALLBACKS && 1092 lookup->GetCallbackObject()->IsAccessorInfo()) { 1093 compile_followup_inline = 1094 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL; 1095 } 1096 } 1097 1098 if (compile_followup_inline) { 1099 // Compile the interceptor call, followed by inline code to load the 1100 // property from further up the prototype chain if the call fails. 1101 // Check that the maps haven't changed. 1102 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder, 1103 scratch1, scratch2, scratch3, 1104 name, miss); 1105 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1)); 1106 1107 // Preserve the receiver register explicitly whenever it is different from 1108 // the holder and it is needed should the interceptor return without any 1109 // result. The CALLBACKS case needs the receiver to be passed into C++ code, 1110 // the FIELD case might cause a miss during the prototype check. 1111 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder(); 1112 bool must_preserve_receiver_reg = !receiver.is(holder_reg) && 1113 (lookup->type() == CALLBACKS || must_perfrom_prototype_check); 1114 1115 // Save necessary data before invoking an interceptor. 1116 // Requires a frame to make GC aware of pushed pointers. 1117 { 1118 FrameScope frame_scope(masm(), StackFrame::INTERNAL); 1119 1120 if (must_preserve_receiver_reg) { 1121 __ push(receiver); 1122 } 1123 __ push(holder_reg); 1124 __ push(name_reg); 1125 1126 // Invoke an interceptor. Note: map checks from receiver to 1127 // interceptor's holder has been compiled before (see a caller 1128 // of this method.) 1129 CompileCallLoadPropertyWithInterceptor(masm(), 1130 receiver, 1131 holder_reg, 1132 name_reg, 1133 interceptor_holder); 1134 1135 // Check if interceptor provided a value for property. If it's 1136 // the case, return immediately. 1137 Label interceptor_failed; 1138 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex); 1139 __ j(equal, &interceptor_failed); 1140 frame_scope.GenerateLeaveFrame(); 1141 __ ret(0); 1142 1143 __ bind(&interceptor_failed); 1144 __ pop(name_reg); 1145 __ pop(holder_reg); 1146 if (must_preserve_receiver_reg) { 1147 __ pop(receiver); 1148 } 1149 1150 // Leave the internal frame. 1151 } 1152 1153 // Check that the maps from interceptor's holder to lookup's holder 1154 // haven't changed. And load lookup's holder into |holder| register. 1155 if (must_perfrom_prototype_check) { 1156 holder_reg = CheckPrototypes(interceptor_holder, 1157 holder_reg, 1158 Handle<JSObject>(lookup->holder()), 1159 scratch1, 1160 scratch2, 1161 scratch3, 1162 name, 1163 miss); 1164 } 1165 1166 if (lookup->type() == FIELD) { 1167 // We found FIELD property in prototype chain of interceptor's holder. 1168 // Retrieve a field from field's holder. 1169 GenerateFastPropertyLoad(masm(), rax, holder_reg, 1170 Handle<JSObject>(lookup->holder()), 1171 lookup->GetFieldIndex()); 1172 __ ret(0); 1173 } else { 1174 // We found CALLBACKS property in prototype chain of interceptor's 1175 // holder. 1176 ASSERT(lookup->type() == CALLBACKS); 1177 Handle<AccessorInfo> callback( 1178 AccessorInfo::cast(lookup->GetCallbackObject())); 1179 ASSERT(callback->getter() != NULL); 1180 1181 // Tail call to runtime. 1182 // Important invariant in CALLBACKS case: the code above must be 1183 // structured to never clobber |receiver| register. 1184 __ pop(scratch2); // return address 1185 __ push(receiver); 1186 __ push(holder_reg); 1187 __ Move(holder_reg, callback); 1188 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset)); 1189 __ push(holder_reg); 1190 __ push(name_reg); 1191 __ push(scratch2); // restore return address 1192 1193 ExternalReference ref = 1194 ExternalReference(IC_Utility(IC::kLoadCallbackProperty), 1195 isolate()); 1196 __ TailCallExternalReference(ref, 5, 1); 1197 } 1198 } else { // !compile_followup_inline 1199 // Call the runtime system to load the interceptor. 1200 // Check that the maps haven't changed. 1201 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder, 1202 scratch1, scratch2, scratch3, 1203 name, miss); 1204 __ pop(scratch2); // save old return address 1205 PushInterceptorArguments(masm(), receiver, holder_reg, 1206 name_reg, interceptor_holder); 1207 __ push(scratch2); // restore old return address 1208 1209 ExternalReference ref = ExternalReference( 1210 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate()); 1211 __ TailCallExternalReference(ref, 5, 1); 1212 } 1213 } 1214 1215 1216 void CallStubCompiler::GenerateNameCheck(Handle<String> name, Label* miss) { 1217 if (kind_ == Code::KEYED_CALL_IC) { 1218 __ Cmp(rcx, name); 1219 __ j(not_equal, miss); 1220 } 1221 } 1222 1223 1224 void CallStubCompiler::GenerateGlobalReceiverCheck(Handle<JSObject> object, 1225 Handle<JSObject> holder, 1226 Handle<String> name, 1227 Label* miss) { 1228 ASSERT(holder->IsGlobalObject()); 1229 1230 // Get the number of arguments. 1231 const int argc = arguments().immediate(); 1232 1233 // Get the receiver from the stack. 1234 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1235 1236 1237 // Check that the maps haven't changed. 1238 __ JumpIfSmi(rdx, miss); 1239 CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss); 1240 } 1241 1242 1243 void CallStubCompiler::GenerateLoadFunctionFromCell( 1244 Handle<JSGlobalPropertyCell> cell, 1245 Handle<JSFunction> function, 1246 Label* miss) { 1247 // Get the value from the cell. 1248 __ Move(rdi, cell); 1249 __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset)); 1250 1251 // Check that the cell contains the same function. 1252 if (heap()->InNewSpace(*function)) { 1253 // We can't embed a pointer to a function in new space so we have 1254 // to verify that the shared function info is unchanged. This has 1255 // the nice side effect that multiple closures based on the same 1256 // function can all use this call IC. Before we load through the 1257 // function, we have to verify that it still is a function. 1258 __ JumpIfSmi(rdi, miss); 1259 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax); 1260 __ j(not_equal, miss); 1261 1262 // Check the shared function info. Make sure it hasn't changed. 1263 __ Move(rax, Handle<SharedFunctionInfo>(function->shared())); 1264 __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax); 1265 } else { 1266 __ Cmp(rdi, function); 1267 } 1268 __ j(not_equal, miss); 1269 } 1270 1271 1272 void CallStubCompiler::GenerateMissBranch() { 1273 Handle<Code> code = 1274 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(), 1275 kind_, 1276 extra_state_); 1277 __ Jump(code, RelocInfo::CODE_TARGET); 1278 } 1279 1280 1281 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object, 1282 Handle<JSObject> holder, 1283 int index, 1284 Handle<String> name) { 1285 // ----------- S t a t e ------------- 1286 // rcx : function name 1287 // rsp[0] : return address 1288 // rsp[8] : argument argc 1289 // rsp[16] : argument argc - 1 1290 // ... 1291 // rsp[argc * 8] : argument 1 1292 // rsp[(argc + 1) * 8] : argument 0 = receiver 1293 // ----------------------------------- 1294 Label miss; 1295 1296 GenerateNameCheck(name, &miss); 1297 1298 // Get the receiver from the stack. 1299 const int argc = arguments().immediate(); 1300 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1301 1302 // Check that the receiver isn't a smi. 1303 __ JumpIfSmi(rdx, &miss); 1304 1305 // Do the right check and compute the holder register. 1306 Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi, 1307 name, &miss); 1308 1309 GenerateFastPropertyLoad(masm(), rdi, reg, holder, index); 1310 1311 // Check that the function really is a function. 1312 __ JumpIfSmi(rdi, &miss); 1313 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx); 1314 __ j(not_equal, &miss); 1315 1316 // Patch the receiver on the stack with the global proxy if 1317 // necessary. 1318 if (object->IsGlobalObject()) { 1319 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 1320 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 1321 } 1322 1323 // Invoke the function. 1324 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 1325 ? CALL_AS_FUNCTION 1326 : CALL_AS_METHOD; 1327 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, 1328 NullCallWrapper(), call_kind); 1329 1330 // Handle call cache miss. 1331 __ bind(&miss); 1332 GenerateMissBranch(); 1333 1334 // Return the generated code. 1335 return GetCode(FIELD, name); 1336 } 1337 1338 1339 Handle<Code> CallStubCompiler::CompileArrayPushCall( 1340 Handle<Object> object, 1341 Handle<JSObject> holder, 1342 Handle<JSGlobalPropertyCell> cell, 1343 Handle<JSFunction> function, 1344 Handle<String> name) { 1345 // ----------- S t a t e ------------- 1346 // -- rcx : name 1347 // -- rsp[0] : return address 1348 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1349 // -- ... 1350 // -- rsp[(argc + 1) * 8] : receiver 1351 // ----------------------------------- 1352 1353 // If object is not an array, bail out to regular call. 1354 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null(); 1355 1356 Label miss; 1357 GenerateNameCheck(name, &miss); 1358 1359 // Get the receiver from the stack. 1360 const int argc = arguments().immediate(); 1361 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1362 1363 // Check that the receiver isn't a smi. 1364 __ JumpIfSmi(rdx, &miss); 1365 1366 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 1367 name, &miss); 1368 1369 if (argc == 0) { 1370 // Noop, return the length. 1371 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset)); 1372 __ ret((argc + 1) * kPointerSize); 1373 } else { 1374 Label call_builtin; 1375 1376 if (argc == 1) { // Otherwise fall through to call builtin. 1377 Label attempt_to_grow_elements, with_write_barrier; 1378 1379 // Get the elements array of the object. 1380 __ movq(rdi, FieldOperand(rdx, JSArray::kElementsOffset)); 1381 1382 // Check that the elements are in fast mode and writable. 1383 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset), 1384 factory()->fixed_array_map()); 1385 __ j(not_equal, &call_builtin); 1386 1387 // Get the array's length into rax and calculate new length. 1388 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset)); 1389 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue); 1390 __ addl(rax, Immediate(argc)); 1391 1392 // Get the elements' length into rcx. 1393 __ SmiToInteger32(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); 1394 1395 // Check if we could survive without allocation. 1396 __ cmpl(rax, rcx); 1397 __ j(greater, &attempt_to_grow_elements); 1398 1399 // Check if value is a smi. 1400 __ movq(rcx, Operand(rsp, argc * kPointerSize)); 1401 __ JumpIfNotSmi(rcx, &with_write_barrier); 1402 1403 // Save new length. 1404 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); 1405 1406 // Store the value. 1407 __ movq(FieldOperand(rdi, 1408 rax, 1409 times_pointer_size, 1410 FixedArray::kHeaderSize - argc * kPointerSize), 1411 rcx); 1412 1413 __ Integer32ToSmi(rax, rax); // Return new length as smi. 1414 __ ret((argc + 1) * kPointerSize); 1415 1416 __ bind(&with_write_barrier); 1417 1418 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); 1419 1420 if (FLAG_smi_only_arrays && !FLAG_trace_elements_transitions) { 1421 Label fast_object, not_fast_object; 1422 __ CheckFastObjectElements(rbx, ¬_fast_object, Label::kNear); 1423 __ jmp(&fast_object); 1424 // In case of fast smi-only, convert to fast object, otherwise bail out. 1425 __ bind(¬_fast_object); 1426 __ CheckFastSmiOnlyElements(rbx, &call_builtin); 1427 // rdx: receiver 1428 // rbx: map 1429 __ movq(r9, rdi); // Backup rdi as it is going to be trashed. 1430 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, 1431 FAST_ELEMENTS, 1432 rbx, 1433 rdi, 1434 &call_builtin); 1435 ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm()); 1436 __ movq(rdi, r9); 1437 __ bind(&fast_object); 1438 } else { 1439 __ CheckFastObjectElements(rbx, &call_builtin); 1440 } 1441 1442 // Save new length. 1443 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax); 1444 1445 // Store the value. 1446 __ lea(rdx, FieldOperand(rdi, 1447 rax, times_pointer_size, 1448 FixedArray::kHeaderSize - argc * kPointerSize)); 1449 __ movq(Operand(rdx, 0), rcx); 1450 1451 __ RecordWrite(rdi, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, 1452 OMIT_SMI_CHECK); 1453 1454 __ Integer32ToSmi(rax, rax); // Return new length as smi. 1455 __ ret((argc + 1) * kPointerSize); 1456 1457 __ bind(&attempt_to_grow_elements); 1458 if (!FLAG_inline_new) { 1459 __ jmp(&call_builtin); 1460 } 1461 1462 __ movq(rbx, Operand(rsp, argc * kPointerSize)); 1463 // Growing elements that are SMI-only requires special handling in case 1464 // the new element is non-Smi. For now, delegate to the builtin. 1465 Label no_fast_elements_check; 1466 __ JumpIfSmi(rbx, &no_fast_elements_check); 1467 __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); 1468 __ CheckFastObjectElements(rcx, &call_builtin, Label::kFar); 1469 __ bind(&no_fast_elements_check); 1470 1471 ExternalReference new_space_allocation_top = 1472 ExternalReference::new_space_allocation_top_address(isolate()); 1473 ExternalReference new_space_allocation_limit = 1474 ExternalReference::new_space_allocation_limit_address(isolate()); 1475 1476 const int kAllocationDelta = 4; 1477 // Load top. 1478 __ Load(rcx, new_space_allocation_top); 1479 1480 // Check if it's the end of elements. 1481 __ lea(rdx, FieldOperand(rdi, 1482 rax, times_pointer_size, 1483 FixedArray::kHeaderSize - argc * kPointerSize)); 1484 __ cmpq(rdx, rcx); 1485 __ j(not_equal, &call_builtin); 1486 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize)); 1487 Operand limit_operand = 1488 masm()->ExternalOperand(new_space_allocation_limit); 1489 __ cmpq(rcx, limit_operand); 1490 __ j(above, &call_builtin); 1491 1492 // We fit and could grow elements. 1493 __ Store(new_space_allocation_top, rcx); 1494 1495 // Push the argument... 1496 __ movq(Operand(rdx, 0), rbx); 1497 // ... and fill the rest with holes. 1498 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 1499 for (int i = 1; i < kAllocationDelta; i++) { 1500 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister); 1501 } 1502 1503 // We know the elements array is in new space so we don't need the 1504 // remembered set, but we just pushed a value onto it so we may have to 1505 // tell the incremental marker to rescan the object that we just grew. We 1506 // don't need to worry about the holes because they are in old space and 1507 // already marked black. 1508 __ RecordWrite(rdi, rdx, rbx, kDontSaveFPRegs, OMIT_REMEMBERED_SET); 1509 1510 // Restore receiver to rdx as finish sequence assumes it's here. 1511 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1512 1513 // Increment element's and array's sizes. 1514 __ SmiAddConstant(FieldOperand(rdi, FixedArray::kLengthOffset), 1515 Smi::FromInt(kAllocationDelta)); 1516 1517 // Make new length a smi before returning it. 1518 __ Integer32ToSmi(rax, rax); 1519 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax); 1520 1521 __ ret((argc + 1) * kPointerSize); 1522 } 1523 1524 __ bind(&call_builtin); 1525 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, 1526 isolate()), 1527 argc + 1, 1528 1); 1529 } 1530 1531 __ bind(&miss); 1532 GenerateMissBranch(); 1533 1534 // Return the generated code. 1535 return GetCode(function); 1536 } 1537 1538 1539 Handle<Code> CallStubCompiler::CompileArrayPopCall( 1540 Handle<Object> object, 1541 Handle<JSObject> holder, 1542 Handle<JSGlobalPropertyCell> cell, 1543 Handle<JSFunction> function, 1544 Handle<String> name) { 1545 // ----------- S t a t e ------------- 1546 // -- rcx : name 1547 // -- rsp[0] : return address 1548 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1549 // -- ... 1550 // -- rsp[(argc + 1) * 8] : receiver 1551 // ----------------------------------- 1552 1553 // If object is not an array, bail out to regular call. 1554 if (!object->IsJSArray() || !cell.is_null()) return Handle<Code>::null(); 1555 1556 Label miss, return_undefined, call_builtin; 1557 GenerateNameCheck(name, &miss); 1558 1559 // Get the receiver from the stack. 1560 const int argc = arguments().immediate(); 1561 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1562 1563 // Check that the receiver isn't a smi. 1564 __ JumpIfSmi(rdx, &miss); 1565 1566 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 1567 name, &miss); 1568 1569 // Get the elements array of the object. 1570 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); 1571 1572 // Check that the elements are in fast mode and writable. 1573 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), 1574 Heap::kFixedArrayMapRootIndex); 1575 __ j(not_equal, &call_builtin); 1576 1577 // Get the array's length into rcx and calculate new length. 1578 __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); 1579 __ subl(rcx, Immediate(1)); 1580 __ j(negative, &return_undefined); 1581 1582 // Get the last element. 1583 __ LoadRoot(r9, Heap::kTheHoleValueRootIndex); 1584 __ movq(rax, FieldOperand(rbx, 1585 rcx, times_pointer_size, 1586 FixedArray::kHeaderSize)); 1587 // Check if element is already the hole. 1588 __ cmpq(rax, r9); 1589 // If so, call slow-case to also check prototypes for value. 1590 __ j(equal, &call_builtin); 1591 1592 // Set the array's length. 1593 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx); 1594 1595 // Fill with the hole and return original value. 1596 __ movq(FieldOperand(rbx, 1597 rcx, times_pointer_size, 1598 FixedArray::kHeaderSize), 1599 r9); 1600 __ ret((argc + 1) * kPointerSize); 1601 1602 __ bind(&return_undefined); 1603 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 1604 __ ret((argc + 1) * kPointerSize); 1605 1606 __ bind(&call_builtin); 1607 __ TailCallExternalReference( 1608 ExternalReference(Builtins::c_ArrayPop, isolate()), 1609 argc + 1, 1610 1); 1611 1612 __ bind(&miss); 1613 GenerateMissBranch(); 1614 1615 // Return the generated code. 1616 return GetCode(function); 1617 } 1618 1619 1620 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall( 1621 Handle<Object> object, 1622 Handle<JSObject> holder, 1623 Handle<JSGlobalPropertyCell> cell, 1624 Handle<JSFunction> function, 1625 Handle<String> name) { 1626 // ----------- S t a t e ------------- 1627 // -- rcx : function name 1628 // -- rsp[0] : return address 1629 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1630 // -- ... 1631 // -- rsp[(argc + 1) * 8] : receiver 1632 // ----------------------------------- 1633 1634 // If object is not a string, bail out to regular call. 1635 if (!object->IsString() || !cell.is_null()) return Handle<Code>::null(); 1636 1637 const int argc = arguments().immediate(); 1638 1639 Label miss; 1640 Label name_miss; 1641 Label index_out_of_range; 1642 Label* index_out_of_range_label = &index_out_of_range; 1643 if (kind_ == Code::CALL_IC && 1644 (CallICBase::StringStubState::decode(extra_state_) == 1645 DEFAULT_STRING_STUB)) { 1646 index_out_of_range_label = &miss; 1647 } 1648 GenerateNameCheck(name, &name_miss); 1649 1650 // Check that the maps starting from the prototype haven't changed. 1651 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1652 Context::STRING_FUNCTION_INDEX, 1653 rax, 1654 &miss); 1655 ASSERT(!object.is_identical_to(holder)); 1656 CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())), 1657 rax, holder, rbx, rdx, rdi, name, &miss); 1658 1659 Register receiver = rbx; 1660 Register index = rdi; 1661 Register result = rax; 1662 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); 1663 if (argc > 0) { 1664 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); 1665 } else { 1666 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 1667 } 1668 1669 StringCharCodeAtGenerator generator(receiver, 1670 index, 1671 result, 1672 &miss, // When not a string. 1673 &miss, // When not a number. 1674 index_out_of_range_label, 1675 STRING_INDEX_IS_NUMBER); 1676 generator.GenerateFast(masm()); 1677 __ ret((argc + 1) * kPointerSize); 1678 1679 StubRuntimeCallHelper call_helper; 1680 generator.GenerateSlow(masm(), call_helper); 1681 1682 if (index_out_of_range.is_linked()) { 1683 __ bind(&index_out_of_range); 1684 __ LoadRoot(rax, Heap::kNanValueRootIndex); 1685 __ ret((argc + 1) * kPointerSize); 1686 } 1687 1688 __ bind(&miss); 1689 // Restore function name in rcx. 1690 __ Move(rcx, name); 1691 __ bind(&name_miss); 1692 GenerateMissBranch(); 1693 1694 // Return the generated code. 1695 return GetCode(function); 1696 } 1697 1698 1699 Handle<Code> CallStubCompiler::CompileStringCharAtCall( 1700 Handle<Object> object, 1701 Handle<JSObject> holder, 1702 Handle<JSGlobalPropertyCell> cell, 1703 Handle<JSFunction> function, 1704 Handle<String> name) { 1705 // ----------- S t a t e ------------- 1706 // -- rcx : function name 1707 // -- rsp[0] : return address 1708 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1709 // -- ... 1710 // -- rsp[(argc + 1) * 8] : receiver 1711 // ----------------------------------- 1712 1713 // If object is not a string, bail out to regular call. 1714 if (!object->IsString() || !cell.is_null()) return Handle<Code>::null(); 1715 1716 const int argc = arguments().immediate(); 1717 Label miss; 1718 Label name_miss; 1719 Label index_out_of_range; 1720 Label* index_out_of_range_label = &index_out_of_range; 1721 if (kind_ == Code::CALL_IC && 1722 (CallICBase::StringStubState::decode(extra_state_) == 1723 DEFAULT_STRING_STUB)) { 1724 index_out_of_range_label = &miss; 1725 } 1726 GenerateNameCheck(name, &name_miss); 1727 1728 // Check that the maps starting from the prototype haven't changed. 1729 GenerateDirectLoadGlobalFunctionPrototype(masm(), 1730 Context::STRING_FUNCTION_INDEX, 1731 rax, 1732 &miss); 1733 ASSERT(!object.is_identical_to(holder)); 1734 CheckPrototypes(Handle<JSObject>(JSObject::cast(object->GetPrototype())), 1735 rax, holder, rbx, rdx, rdi, name, &miss); 1736 1737 Register receiver = rax; 1738 Register index = rdi; 1739 Register scratch = rdx; 1740 Register result = rax; 1741 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize)); 1742 if (argc > 0) { 1743 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize)); 1744 } else { 1745 __ LoadRoot(index, Heap::kUndefinedValueRootIndex); 1746 } 1747 1748 StringCharAtGenerator generator(receiver, 1749 index, 1750 scratch, 1751 result, 1752 &miss, // When not a string. 1753 &miss, // When not a number. 1754 index_out_of_range_label, 1755 STRING_INDEX_IS_NUMBER); 1756 generator.GenerateFast(masm()); 1757 __ ret((argc + 1) * kPointerSize); 1758 1759 StubRuntimeCallHelper call_helper; 1760 generator.GenerateSlow(masm(), call_helper); 1761 1762 if (index_out_of_range.is_linked()) { 1763 __ bind(&index_out_of_range); 1764 __ LoadRoot(rax, Heap::kEmptyStringRootIndex); 1765 __ ret((argc + 1) * kPointerSize); 1766 } 1767 __ bind(&miss); 1768 // Restore function name in rcx. 1769 __ Move(rcx, name); 1770 __ bind(&name_miss); 1771 GenerateMissBranch(); 1772 1773 // Return the generated code. 1774 return GetCode(function); 1775 } 1776 1777 1778 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall( 1779 Handle<Object> object, 1780 Handle<JSObject> holder, 1781 Handle<JSGlobalPropertyCell> cell, 1782 Handle<JSFunction> function, 1783 Handle<String> name) { 1784 // ----------- S t a t e ------------- 1785 // -- rcx : function name 1786 // -- rsp[0] : return address 1787 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1788 // -- ... 1789 // -- rsp[(argc + 1) * 8] : receiver 1790 // ----------------------------------- 1791 1792 // If the object is not a JSObject or we got an unexpected number of 1793 // arguments, bail out to the regular call. 1794 const int argc = arguments().immediate(); 1795 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); 1796 1797 Label miss; 1798 GenerateNameCheck(name, &miss); 1799 1800 if (cell.is_null()) { 1801 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 1802 __ JumpIfSmi(rdx, &miss); 1803 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 1804 name, &miss); 1805 } else { 1806 ASSERT(cell->value() == *function); 1807 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, 1808 &miss); 1809 GenerateLoadFunctionFromCell(cell, function, &miss); 1810 } 1811 1812 // Load the char code argument. 1813 Register code = rbx; 1814 __ movq(code, Operand(rsp, 1 * kPointerSize)); 1815 1816 // Check the code is a smi. 1817 Label slow; 1818 __ JumpIfNotSmi(code, &slow); 1819 1820 // Convert the smi code to uint16. 1821 __ SmiAndConstant(code, code, Smi::FromInt(0xffff)); 1822 1823 StringCharFromCodeGenerator generator(code, rax); 1824 generator.GenerateFast(masm()); 1825 __ ret(2 * kPointerSize); 1826 1827 StubRuntimeCallHelper call_helper; 1828 generator.GenerateSlow(masm(), call_helper); 1829 1830 // Tail call the full function. We do not have to patch the receiver 1831 // because the function makes no use of it. 1832 __ bind(&slow); 1833 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 1834 ? CALL_AS_FUNCTION 1835 : CALL_AS_METHOD; 1836 __ InvokeFunction(function, arguments(), JUMP_FUNCTION, 1837 NullCallWrapper(), call_kind); 1838 1839 __ bind(&miss); 1840 // rcx: function name. 1841 GenerateMissBranch(); 1842 1843 // Return the generated code. 1844 return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name); 1845 } 1846 1847 1848 Handle<Code> CallStubCompiler::CompileMathFloorCall( 1849 Handle<Object> object, 1850 Handle<JSObject> holder, 1851 Handle<JSGlobalPropertyCell> cell, 1852 Handle<JSFunction> function, 1853 Handle<String> name) { 1854 // TODO(872): implement this. 1855 return Handle<Code>::null(); 1856 } 1857 1858 1859 Handle<Code> CallStubCompiler::CompileMathAbsCall( 1860 Handle<Object> object, 1861 Handle<JSObject> holder, 1862 Handle<JSGlobalPropertyCell> cell, 1863 Handle<JSFunction> function, 1864 Handle<String> name) { 1865 // ----------- S t a t e ------------- 1866 // -- rcx : function name 1867 // -- rsp[0] : return address 1868 // -- rsp[(argc - n) * 8] : arg[n] (zero-based) 1869 // -- ... 1870 // -- rsp[(argc + 1) * 8] : receiver 1871 // ----------------------------------- 1872 1873 // If the object is not a JSObject or we got an unexpected number of 1874 // arguments, bail out to the regular call. 1875 const int argc = arguments().immediate(); 1876 if (!object->IsJSObject() || argc != 1) return Handle<Code>::null(); 1877 1878 Label miss; 1879 GenerateNameCheck(name, &miss); 1880 1881 if (cell.is_null()) { 1882 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); 1883 __ JumpIfSmi(rdx, &miss); 1884 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 1885 name, &miss); 1886 } else { 1887 ASSERT(cell->value() == *function); 1888 GenerateGlobalReceiverCheck(Handle<JSObject>::cast(object), holder, name, 1889 &miss); 1890 GenerateLoadFunctionFromCell(cell, function, &miss); 1891 } 1892 // Load the (only) argument into rax. 1893 __ movq(rax, Operand(rsp, 1 * kPointerSize)); 1894 1895 // Check if the argument is a smi. 1896 Label not_smi; 1897 STATIC_ASSERT(kSmiTag == 0); 1898 __ JumpIfNotSmi(rax, ¬_smi); 1899 __ SmiToInteger32(rax, rax); 1900 1901 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0 1902 // otherwise. 1903 __ movl(rbx, rax); 1904 __ sarl(rbx, Immediate(kBitsPerInt - 1)); 1905 1906 // Do bitwise not or do nothing depending on ebx. 1907 __ xorl(rax, rbx); 1908 1909 // Add 1 or do nothing depending on ebx. 1910 __ subl(rax, rbx); 1911 1912 // If the result is still negative, go to the slow case. 1913 // This only happens for the most negative smi. 1914 Label slow; 1915 __ j(negative, &slow); 1916 1917 // Smi case done. 1918 __ Integer32ToSmi(rax, rax); 1919 __ ret(2 * kPointerSize); 1920 1921 // Check if the argument is a heap number and load its value. 1922 __ bind(¬_smi); 1923 __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK); 1924 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); 1925 1926 // Check the sign of the argument. If the argument is positive, 1927 // just return it. 1928 Label negative_sign; 1929 const int sign_mask_shift = 1930 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte; 1931 __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift, 1932 RelocInfo::NONE); 1933 __ testq(rbx, rdi); 1934 __ j(not_zero, &negative_sign); 1935 __ ret(2 * kPointerSize); 1936 1937 // If the argument is negative, clear the sign, and return a new 1938 // number. We still have the sign mask in rdi. 1939 __ bind(&negative_sign); 1940 __ xor_(rbx, rdi); 1941 __ AllocateHeapNumber(rax, rdx, &slow); 1942 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx); 1943 __ ret(2 * kPointerSize); 1944 1945 // Tail call the full function. We do not have to patch the receiver 1946 // because the function makes no use of it. 1947 __ bind(&slow); 1948 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 1949 ? CALL_AS_FUNCTION 1950 : CALL_AS_METHOD; 1951 __ InvokeFunction(function, arguments(), JUMP_FUNCTION, 1952 NullCallWrapper(), call_kind); 1953 1954 __ bind(&miss); 1955 // rcx: function name. 1956 GenerateMissBranch(); 1957 1958 // Return the generated code. 1959 return cell.is_null() ? GetCode(function) : GetCode(NORMAL, name); 1960 } 1961 1962 1963 Handle<Code> CallStubCompiler::CompileFastApiCall( 1964 const CallOptimization& optimization, 1965 Handle<Object> object, 1966 Handle<JSObject> holder, 1967 Handle<JSGlobalPropertyCell> cell, 1968 Handle<JSFunction> function, 1969 Handle<String> name) { 1970 ASSERT(optimization.is_simple_api_call()); 1971 // Bail out if object is a global object as we don't want to 1972 // repatch it to global receiver. 1973 if (object->IsGlobalObject()) return Handle<Code>::null(); 1974 if (!cell.is_null()) return Handle<Code>::null(); 1975 if (!object->IsJSObject()) return Handle<Code>::null(); 1976 int depth = optimization.GetPrototypeDepthOfExpectedType( 1977 Handle<JSObject>::cast(object), holder); 1978 if (depth == kInvalidProtoDepth) return Handle<Code>::null(); 1979 1980 Label miss, miss_before_stack_reserved; 1981 GenerateNameCheck(name, &miss_before_stack_reserved); 1982 1983 // Get the receiver from the stack. 1984 const int argc = arguments().immediate(); 1985 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 1986 1987 // Check that the receiver isn't a smi. 1988 __ JumpIfSmi(rdx, &miss_before_stack_reserved); 1989 1990 Counters* counters = isolate()->counters(); 1991 __ IncrementCounter(counters->call_const(), 1); 1992 __ IncrementCounter(counters->call_const_fast_api(), 1); 1993 1994 // Allocate space for v8::Arguments implicit values. Must be initialized 1995 // before calling any runtime function. 1996 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); 1997 1998 // Check that the maps haven't changed and find a Holder as a side effect. 1999 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, rdi, 2000 name, depth, &miss); 2001 2002 // Move the return address on top of the stack. 2003 __ movq(rax, Operand(rsp, 3 * kPointerSize)); 2004 __ movq(Operand(rsp, 0 * kPointerSize), rax); 2005 2006 GenerateFastApiCall(masm(), optimization, argc); 2007 2008 __ bind(&miss); 2009 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize)); 2010 2011 __ bind(&miss_before_stack_reserved); 2012 GenerateMissBranch(); 2013 2014 // Return the generated code. 2015 return GetCode(function); 2016 } 2017 2018 2019 Handle<Code> CallStubCompiler::CompileCallConstant(Handle<Object> object, 2020 Handle<JSObject> holder, 2021 Handle<JSFunction> function, 2022 Handle<String> name, 2023 CheckType check) { 2024 // ----------- S t a t e ------------- 2025 // rcx : function name 2026 // rsp[0] : return address 2027 // rsp[8] : argument argc 2028 // rsp[16] : argument argc - 1 2029 // ... 2030 // rsp[argc * 8] : argument 1 2031 // rsp[(argc + 1) * 8] : argument 0 = receiver 2032 // ----------------------------------- 2033 2034 if (HasCustomCallGenerator(function)) { 2035 Handle<Code> code = CompileCustomCall(object, holder, 2036 Handle<JSGlobalPropertyCell>::null(), 2037 function, name); 2038 // A null handle means bail out to the regular compiler code below. 2039 if (!code.is_null()) return code; 2040 } 2041 2042 Label miss; 2043 GenerateNameCheck(name, &miss); 2044 2045 // Get the receiver from the stack. 2046 const int argc = arguments().immediate(); 2047 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 2048 2049 // Check that the receiver isn't a smi. 2050 if (check != NUMBER_CHECK) { 2051 __ JumpIfSmi(rdx, &miss); 2052 } 2053 2054 // Make sure that it's okay not to patch the on stack receiver 2055 // unless we're doing a receiver map check. 2056 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK); 2057 2058 Counters* counters = isolate()->counters(); 2059 switch (check) { 2060 case RECEIVER_MAP_CHECK: 2061 __ IncrementCounter(counters->call_const(), 1); 2062 2063 // Check that the maps haven't changed. 2064 CheckPrototypes(Handle<JSObject>::cast(object), rdx, holder, rbx, rax, 2065 rdi, name, &miss); 2066 2067 // Patch the receiver on the stack with the global proxy if 2068 // necessary. 2069 if (object->IsGlobalObject()) { 2070 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 2071 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 2072 } 2073 break; 2074 2075 case STRING_CHECK: 2076 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) { 2077 // Check that the object is a two-byte string or a symbol. 2078 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax); 2079 __ j(above_equal, &miss); 2080 // Check that the maps starting from the prototype haven't changed. 2081 GenerateDirectLoadGlobalFunctionPrototype( 2082 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss); 2083 CheckPrototypes( 2084 Handle<JSObject>(JSObject::cast(object->GetPrototype())), 2085 rax, holder, rbx, rdx, rdi, name, &miss); 2086 } else { 2087 // Calling non-strict non-builtins with a value as the receiver 2088 // requires boxing. 2089 __ jmp(&miss); 2090 } 2091 break; 2092 2093 case NUMBER_CHECK: 2094 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) { 2095 Label fast; 2096 // Check that the object is a smi or a heap number. 2097 __ JumpIfSmi(rdx, &fast); 2098 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax); 2099 __ j(not_equal, &miss); 2100 __ bind(&fast); 2101 // Check that the maps starting from the prototype haven't changed. 2102 GenerateDirectLoadGlobalFunctionPrototype( 2103 masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss); 2104 CheckPrototypes( 2105 Handle<JSObject>(JSObject::cast(object->GetPrototype())), 2106 rax, holder, rbx, rdx, rdi, name, &miss); 2107 } else { 2108 // Calling non-strict non-builtins with a value as the receiver 2109 // requires boxing. 2110 __ jmp(&miss); 2111 } 2112 break; 2113 2114 case BOOLEAN_CHECK: 2115 if (function->IsBuiltin() || !function->shared()->is_classic_mode()) { 2116 Label fast; 2117 // Check that the object is a boolean. 2118 __ CompareRoot(rdx, Heap::kTrueValueRootIndex); 2119 __ j(equal, &fast); 2120 __ CompareRoot(rdx, Heap::kFalseValueRootIndex); 2121 __ j(not_equal, &miss); 2122 __ bind(&fast); 2123 // Check that the maps starting from the prototype haven't changed. 2124 GenerateDirectLoadGlobalFunctionPrototype( 2125 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss); 2126 CheckPrototypes( 2127 Handle<JSObject>(JSObject::cast(object->GetPrototype())), 2128 rax, holder, rbx, rdx, rdi, name, &miss); 2129 } else { 2130 // Calling non-strict non-builtins with a value as the receiver 2131 // requires boxing. 2132 __ jmp(&miss); 2133 } 2134 break; 2135 } 2136 2137 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 2138 ? CALL_AS_FUNCTION 2139 : CALL_AS_METHOD; 2140 __ InvokeFunction(function, arguments(), JUMP_FUNCTION, 2141 NullCallWrapper(), call_kind); 2142 2143 // Handle call cache miss. 2144 __ bind(&miss); 2145 GenerateMissBranch(); 2146 2147 // Return the generated code. 2148 return GetCode(function); 2149 } 2150 2151 2152 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object, 2153 Handle<JSObject> holder, 2154 Handle<String> name) { 2155 // ----------- S t a t e ------------- 2156 // rcx : function name 2157 // rsp[0] : return address 2158 // rsp[8] : argument argc 2159 // rsp[16] : argument argc - 1 2160 // ... 2161 // rsp[argc * 8] : argument 1 2162 // rsp[(argc + 1) * 8] : argument 0 = receiver 2163 // ----------------------------------- 2164 Label miss; 2165 GenerateNameCheck(name, &miss); 2166 2167 // Get the number of arguments. 2168 const int argc = arguments().immediate(); 2169 2170 LookupResult lookup(isolate()); 2171 LookupPostInterceptor(holder, name, &lookup); 2172 2173 // Get the receiver from the stack. 2174 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 2175 2176 CallInterceptorCompiler compiler(this, arguments(), rcx, extra_state_); 2177 compiler.Compile(masm(), object, holder, name, &lookup, rdx, rbx, rdi, rax, 2178 &miss); 2179 2180 // Restore receiver. 2181 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); 2182 2183 // Check that the function really is a function. 2184 __ JumpIfSmi(rax, &miss); 2185 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx); 2186 __ j(not_equal, &miss); 2187 2188 // Patch the receiver on the stack with the global proxy if 2189 // necessary. 2190 if (object->IsGlobalObject()) { 2191 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 2192 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 2193 } 2194 2195 // Invoke the function. 2196 __ movq(rdi, rax); 2197 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 2198 ? CALL_AS_FUNCTION 2199 : CALL_AS_METHOD; 2200 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION, 2201 NullCallWrapper(), call_kind); 2202 2203 // Handle load cache miss. 2204 __ bind(&miss); 2205 GenerateMissBranch(); 2206 2207 // Return the generated code. 2208 return GetCode(INTERCEPTOR, name); 2209 } 2210 2211 2212 Handle<Code> CallStubCompiler::CompileCallGlobal( 2213 Handle<JSObject> object, 2214 Handle<GlobalObject> holder, 2215 Handle<JSGlobalPropertyCell> cell, 2216 Handle<JSFunction> function, 2217 Handle<String> name) { 2218 // ----------- S t a t e ------------- 2219 // rcx : function name 2220 // rsp[0] : return address 2221 // rsp[8] : argument argc 2222 // rsp[16] : argument argc - 1 2223 // ... 2224 // rsp[argc * 8] : argument 1 2225 // rsp[(argc + 1) * 8] : argument 0 = receiver 2226 // ----------------------------------- 2227 2228 if (HasCustomCallGenerator(function)) { 2229 Handle<Code> code = CompileCustomCall(object, holder, cell, function, name); 2230 // A null handle means bail out to the regular compiler code below. 2231 if (!code.is_null()) return code; 2232 } 2233 2234 Label miss; 2235 GenerateNameCheck(name, &miss); 2236 2237 // Get the number of arguments. 2238 const int argc = arguments().immediate(); 2239 GenerateGlobalReceiverCheck(object, holder, name, &miss); 2240 GenerateLoadFunctionFromCell(cell, function, &miss); 2241 2242 // Patch the receiver on the stack with the global proxy. 2243 if (object->IsGlobalObject()) { 2244 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset)); 2245 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx); 2246 } 2247 2248 // Set up the context (function already in rdi). 2249 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 2250 2251 // Jump to the cached code (tail call). 2252 Counters* counters = isolate()->counters(); 2253 __ IncrementCounter(counters->call_global_inline(), 1); 2254 ParameterCount expected(function->shared()->formal_parameter_count()); 2255 CallKind call_kind = CallICBase::Contextual::decode(extra_state_) 2256 ? CALL_AS_FUNCTION 2257 : CALL_AS_METHOD; 2258 // We call indirectly through the code field in the function to 2259 // allow recompilation to take effect without changing any of the 2260 // call sites. 2261 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); 2262 __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION, 2263 NullCallWrapper(), call_kind); 2264 2265 // Handle call cache miss. 2266 __ bind(&miss); 2267 __ IncrementCounter(counters->call_global_inline_miss(), 1); 2268 GenerateMissBranch(); 2269 2270 // Return the generated code. 2271 return GetCode(NORMAL, name); 2272 } 2273 2274 2275 Handle<Code> StoreStubCompiler::CompileStoreField(Handle<JSObject> object, 2276 int index, 2277 Handle<Map> transition, 2278 Handle<String> name) { 2279 // ----------- S t a t e ------------- 2280 // -- rax : value 2281 // -- rcx : name 2282 // -- rdx : receiver 2283 // -- rsp[0] : return address 2284 // ----------------------------------- 2285 Label miss; 2286 2287 // Generate store field code. Preserves receiver and name on jump to miss. 2288 GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss); 2289 2290 // Handle store cache miss. 2291 __ bind(&miss); 2292 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss(); 2293 __ Jump(ic, RelocInfo::CODE_TARGET); 2294 2295 // Return the generated code. 2296 return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name); 2297 } 2298 2299 2300 Handle<Code> StoreStubCompiler::CompileStoreCallback( 2301 Handle<JSObject> object, 2302 Handle<AccessorInfo> callback, 2303 Handle<String> name) { 2304 // ----------- S t a t e ------------- 2305 // -- rax : value 2306 // -- rcx : name 2307 // -- rdx : receiver 2308 // -- rsp[0] : return address 2309 // ----------------------------------- 2310 Label miss; 2311 2312 // Check that the map of the object hasn't changed. 2313 __ CheckMap(rdx, Handle<Map>(object->map()), &miss, 2314 DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS); 2315 2316 // Perform global security token check if needed. 2317 if (object->IsJSGlobalProxy()) { 2318 __ CheckAccessGlobalProxy(rdx, rbx, &miss); 2319 } 2320 2321 // Stub never generated for non-global objects that require access 2322 // checks. 2323 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); 2324 2325 __ pop(rbx); // remove the return address 2326 __ push(rdx); // receiver 2327 __ Push(callback); // callback info 2328 __ push(rcx); // name 2329 __ push(rax); // value 2330 __ push(rbx); // restore return address 2331 2332 // Do tail-call to the runtime system. 2333 ExternalReference store_callback_property = 2334 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate()); 2335 __ TailCallExternalReference(store_callback_property, 4, 1); 2336 2337 // Handle store cache miss. 2338 __ bind(&miss); 2339 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss(); 2340 __ Jump(ic, RelocInfo::CODE_TARGET); 2341 2342 // Return the generated code. 2343 return GetCode(CALLBACKS, name); 2344 } 2345 2346 2347 Handle<Code> StoreStubCompiler::CompileStoreInterceptor( 2348 Handle<JSObject> receiver, 2349 Handle<String> name) { 2350 // ----------- S t a t e ------------- 2351 // -- rax : value 2352 // -- rcx : name 2353 // -- rdx : receiver 2354 // -- rsp[0] : return address 2355 // ----------------------------------- 2356 Label miss; 2357 2358 // Check that the map of the object hasn't changed. 2359 __ CheckMap(rdx, Handle<Map>(receiver->map()), &miss, 2360 DO_SMI_CHECK, ALLOW_ELEMENT_TRANSITION_MAPS); 2361 2362 // Perform global security token check if needed. 2363 if (receiver->IsJSGlobalProxy()) { 2364 __ CheckAccessGlobalProxy(rdx, rbx, &miss); 2365 } 2366 2367 // Stub never generated for non-global objects that require access 2368 // checks. 2369 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded()); 2370 2371 __ pop(rbx); // remove the return address 2372 __ push(rdx); // receiver 2373 __ push(rcx); // name 2374 __ push(rax); // value 2375 __ Push(Smi::FromInt(strict_mode_)); 2376 __ push(rbx); // restore return address 2377 2378 // Do tail-call to the runtime system. 2379 ExternalReference store_ic_property = 2380 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate()); 2381 __ TailCallExternalReference(store_ic_property, 4, 1); 2382 2383 // Handle store cache miss. 2384 __ bind(&miss); 2385 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss(); 2386 __ Jump(ic, RelocInfo::CODE_TARGET); 2387 2388 // Return the generated code. 2389 return GetCode(INTERCEPTOR, name); 2390 } 2391 2392 2393 Handle<Code> StoreStubCompiler::CompileStoreGlobal( 2394 Handle<GlobalObject> object, 2395 Handle<JSGlobalPropertyCell> cell, 2396 Handle<String> name) { 2397 // ----------- S t a t e ------------- 2398 // -- rax : value 2399 // -- rcx : name 2400 // -- rdx : receiver 2401 // -- rsp[0] : return address 2402 // ----------------------------------- 2403 Label miss; 2404 2405 // Check that the map of the global has not changed. 2406 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 2407 Handle<Map>(object->map())); 2408 __ j(not_equal, &miss); 2409 2410 // Compute the cell operand to use. 2411 __ Move(rbx, cell); 2412 Operand cell_operand = FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset); 2413 2414 // Check that the value in the cell is not the hole. If it is, this 2415 // cell could have been deleted and reintroducing the global needs 2416 // to update the property details in the property dictionary of the 2417 // global object. We bail out to the runtime system to do that. 2418 __ CompareRoot(cell_operand, Heap::kTheHoleValueRootIndex); 2419 __ j(equal, &miss); 2420 2421 // Store the value in the cell. 2422 __ movq(cell_operand, rax); 2423 // Cells are always rescanned, so no write barrier here. 2424 2425 // Return the value (register rax). 2426 Counters* counters = isolate()->counters(); 2427 __ IncrementCounter(counters->named_store_global_inline(), 1); 2428 __ ret(0); 2429 2430 // Handle store cache miss. 2431 __ bind(&miss); 2432 __ IncrementCounter(counters->named_store_global_inline_miss(), 1); 2433 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss(); 2434 __ Jump(ic, RelocInfo::CODE_TARGET); 2435 2436 // Return the generated code. 2437 return GetCode(NORMAL, name); 2438 } 2439 2440 2441 Handle<Code> KeyedStoreStubCompiler::CompileStoreField(Handle<JSObject> object, 2442 int index, 2443 Handle<Map> transition, 2444 Handle<String> name) { 2445 // ----------- S t a t e ------------- 2446 // -- rax : value 2447 // -- rcx : key 2448 // -- rdx : receiver 2449 // -- rsp[0] : return address 2450 // ----------------------------------- 2451 Label miss; 2452 2453 Counters* counters = isolate()->counters(); 2454 __ IncrementCounter(counters->keyed_store_field(), 1); 2455 2456 // Check that the name has not changed. 2457 __ Cmp(rcx, name); 2458 __ j(not_equal, &miss); 2459 2460 // Generate store field code. Preserves receiver and name on jump to miss. 2461 GenerateStoreField(masm(), object, index, transition, rdx, rcx, rbx, &miss); 2462 2463 // Handle store cache miss. 2464 __ bind(&miss); 2465 __ DecrementCounter(counters->keyed_store_field(), 1); 2466 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss(); 2467 __ Jump(ic, RelocInfo::CODE_TARGET); 2468 2469 // Return the generated code. 2470 return GetCode(transition.is_null() ? FIELD : MAP_TRANSITION, name); 2471 } 2472 2473 2474 Handle<Code> KeyedStoreStubCompiler::CompileStoreElement( 2475 Handle<Map> receiver_map) { 2476 // ----------- S t a t e ------------- 2477 // -- rax : value 2478 // -- rcx : key 2479 // -- rdx : receiver 2480 // -- rsp[0] : return address 2481 // ----------------------------------- 2482 2483 ElementsKind elements_kind = receiver_map->elements_kind(); 2484 bool is_js_array = receiver_map->instance_type() == JS_ARRAY_TYPE; 2485 Handle<Code> stub = 2486 KeyedStoreElementStub(is_js_array, elements_kind, grow_mode_).GetCode(); 2487 2488 __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK); 2489 2490 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss(); 2491 __ jmp(ic, RelocInfo::CODE_TARGET); 2492 2493 // Return the generated code. 2494 return GetCode(NORMAL, factory()->empty_string()); 2495 } 2496 2497 2498 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic( 2499 MapHandleList* receiver_maps, 2500 CodeHandleList* handler_stubs, 2501 MapHandleList* transitioned_maps) { 2502 // ----------- S t a t e ------------- 2503 // -- rax : value 2504 // -- rcx : key 2505 // -- rdx : receiver 2506 // -- rsp[0] : return address 2507 // ----------------------------------- 2508 Label miss; 2509 __ JumpIfSmi(rdx, &miss, Label::kNear); 2510 2511 __ movq(rdi, FieldOperand(rdx, HeapObject::kMapOffset)); 2512 int receiver_count = receiver_maps->length(); 2513 for (int i = 0; i < receiver_count; ++i) { 2514 // Check map and tail call if there's a match 2515 __ Cmp(rdi, receiver_maps->at(i)); 2516 if (transitioned_maps->at(i).is_null()) { 2517 __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET); 2518 } else { 2519 Label next_map; 2520 __ j(not_equal, &next_map, Label::kNear); 2521 __ movq(rbx, transitioned_maps->at(i), RelocInfo::EMBEDDED_OBJECT); 2522 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET); 2523 __ bind(&next_map); 2524 } 2525 } 2526 2527 __ bind(&miss); 2528 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss(); 2529 __ jmp(ic, RelocInfo::CODE_TARGET); 2530 2531 // Return the generated code. 2532 return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC); 2533 } 2534 2535 2536 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<String> name, 2537 Handle<JSObject> object, 2538 Handle<JSObject> last) { 2539 // ----------- S t a t e ------------- 2540 // -- rax : receiver 2541 // -- rcx : name 2542 // -- rsp[0] : return address 2543 // ----------------------------------- 2544 Label miss; 2545 2546 // Check that receiver is not a smi. 2547 __ JumpIfSmi(rax, &miss); 2548 2549 // Check the maps of the full prototype chain. Also check that 2550 // global property cells up to (but not including) the last object 2551 // in the prototype chain are empty. 2552 CheckPrototypes(object, rax, last, rbx, rdx, rdi, name, &miss); 2553 2554 // If the last object in the prototype chain is a global object, 2555 // check that the global property cell is empty. 2556 if (last->IsGlobalObject()) { 2557 GenerateCheckPropertyCell( 2558 masm(), Handle<GlobalObject>::cast(last), name, rdx, &miss); 2559 } 2560 2561 // Return undefined if maps of the full prototype chain are still the 2562 // same and no global property with this name contains a value. 2563 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); 2564 __ ret(0); 2565 2566 __ bind(&miss); 2567 GenerateLoadMiss(masm(), Code::LOAD_IC); 2568 2569 // Return the generated code. 2570 return GetCode(NONEXISTENT, factory()->empty_string()); 2571 } 2572 2573 2574 Handle<Code> LoadStubCompiler::CompileLoadField(Handle<JSObject> object, 2575 Handle<JSObject> holder, 2576 int index, 2577 Handle<String> name) { 2578 // ----------- S t a t e ------------- 2579 // -- rax : receiver 2580 // -- rcx : name 2581 // -- rsp[0] : return address 2582 // ----------------------------------- 2583 Label miss; 2584 2585 GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss); 2586 __ bind(&miss); 2587 GenerateLoadMiss(masm(), Code::LOAD_IC); 2588 2589 // Return the generated code. 2590 return GetCode(FIELD, name); 2591 } 2592 2593 2594 Handle<Code> LoadStubCompiler::CompileLoadCallback( 2595 Handle<String> name, 2596 Handle<JSObject> object, 2597 Handle<JSObject> holder, 2598 Handle<AccessorInfo> callback) { 2599 // ----------- S t a t e ------------- 2600 // -- rax : receiver 2601 // -- rcx : name 2602 // -- rsp[0] : return address 2603 // ----------------------------------- 2604 Label miss; 2605 GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx, rdi, callback, 2606 name, &miss); 2607 __ bind(&miss); 2608 GenerateLoadMiss(masm(), Code::LOAD_IC); 2609 2610 // Return the generated code. 2611 return GetCode(CALLBACKS, name); 2612 } 2613 2614 2615 Handle<Code> LoadStubCompiler::CompileLoadConstant(Handle<JSObject> object, 2616 Handle<JSObject> holder, 2617 Handle<JSFunction> value, 2618 Handle<String> name) { 2619 // ----------- S t a t e ------------- 2620 // -- rax : receiver 2621 // -- rcx : name 2622 // -- rsp[0] : return address 2623 // ----------------------------------- 2624 Label miss; 2625 2626 GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss); 2627 __ bind(&miss); 2628 GenerateLoadMiss(masm(), Code::LOAD_IC); 2629 2630 // Return the generated code. 2631 return GetCode(CONSTANT_FUNCTION, name); 2632 } 2633 2634 2635 Handle<Code> LoadStubCompiler::CompileLoadInterceptor(Handle<JSObject> receiver, 2636 Handle<JSObject> holder, 2637 Handle<String> name) { 2638 // ----------- S t a t e ------------- 2639 // -- rax : receiver 2640 // -- rcx : name 2641 // -- rsp[0] : return address 2642 // ----------------------------------- 2643 Label miss; 2644 LookupResult lookup(isolate()); 2645 LookupPostInterceptor(holder, name, &lookup); 2646 2647 // TODO(368): Compile in the whole chain: all the interceptors in 2648 // prototypes and ultimate answer. 2649 GenerateLoadInterceptor(receiver, holder, &lookup, rax, rcx, rdx, rbx, rdi, 2650 name, &miss); 2651 __ bind(&miss); 2652 GenerateLoadMiss(masm(), Code::LOAD_IC); 2653 2654 // Return the generated code. 2655 return GetCode(INTERCEPTOR, name); 2656 } 2657 2658 2659 Handle<Code> LoadStubCompiler::CompileLoadGlobal( 2660 Handle<JSObject> object, 2661 Handle<GlobalObject> holder, 2662 Handle<JSGlobalPropertyCell> cell, 2663 Handle<String> name, 2664 bool is_dont_delete) { 2665 // ----------- S t a t e ------------- 2666 // -- rax : receiver 2667 // -- rcx : name 2668 // -- rsp[0] : return address 2669 // ----------------------------------- 2670 Label miss; 2671 2672 // Check that the maps haven't changed. 2673 __ JumpIfSmi(rax, &miss); 2674 CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss); 2675 2676 // Get the value from the cell. 2677 __ Move(rbx, cell); 2678 __ movq(rbx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset)); 2679 2680 // Check for deleted property if property can actually be deleted. 2681 if (!is_dont_delete) { 2682 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); 2683 __ j(equal, &miss); 2684 } else if (FLAG_debug_code) { 2685 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); 2686 __ Check(not_equal, "DontDelete cells can't contain the hole"); 2687 } 2688 2689 Counters* counters = isolate()->counters(); 2690 __ IncrementCounter(counters->named_load_global_stub(), 1); 2691 __ movq(rax, rbx); 2692 __ ret(0); 2693 2694 __ bind(&miss); 2695 __ IncrementCounter(counters->named_load_global_stub_miss(), 1); 2696 GenerateLoadMiss(masm(), Code::LOAD_IC); 2697 2698 // Return the generated code. 2699 return GetCode(NORMAL, name); 2700 } 2701 2702 2703 Handle<Code> KeyedLoadStubCompiler::CompileLoadField(Handle<String> name, 2704 Handle<JSObject> receiver, 2705 Handle<JSObject> holder, 2706 int index) { 2707 // ----------- S t a t e ------------- 2708 // -- rax : key 2709 // -- rdx : receiver 2710 // -- rsp[0] : return address 2711 // ----------------------------------- 2712 Label miss; 2713 2714 Counters* counters = isolate()->counters(); 2715 __ IncrementCounter(counters->keyed_load_field(), 1); 2716 2717 // Check that the name has not changed. 2718 __ Cmp(rax, name); 2719 __ j(not_equal, &miss); 2720 2721 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss); 2722 2723 __ bind(&miss); 2724 __ DecrementCounter(counters->keyed_load_field(), 1); 2725 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2726 2727 // Return the generated code. 2728 return GetCode(FIELD, name); 2729 } 2730 2731 2732 Handle<Code> KeyedLoadStubCompiler::CompileLoadCallback( 2733 Handle<String> name, 2734 Handle<JSObject> receiver, 2735 Handle<JSObject> holder, 2736 Handle<AccessorInfo> callback) { 2737 // ----------- S t a t e ------------- 2738 // -- rax : key 2739 // -- rdx : receiver 2740 // -- rsp[0] : return address 2741 // ----------------------------------- 2742 Label miss; 2743 Counters* counters = isolate()->counters(); 2744 __ IncrementCounter(counters->keyed_load_callback(), 1); 2745 2746 // Check that the name has not changed. 2747 __ Cmp(rax, name); 2748 __ j(not_equal, &miss); 2749 2750 GenerateLoadCallback(receiver, holder, rdx, rax, rbx, rcx, rdi, callback, 2751 name, &miss); 2752 __ bind(&miss); 2753 __ DecrementCounter(counters->keyed_load_callback(), 1); 2754 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2755 2756 // Return the generated code. 2757 return GetCode(CALLBACKS, name); 2758 } 2759 2760 2761 Handle<Code> KeyedLoadStubCompiler::CompileLoadConstant( 2762 Handle<String> name, 2763 Handle<JSObject> receiver, 2764 Handle<JSObject> holder, 2765 Handle<JSFunction> value) { 2766 // ----------- S t a t e ------------- 2767 // -- rax : key 2768 // -- rdx : receiver 2769 // -- rsp[0] : return address 2770 // ----------------------------------- 2771 Label miss; 2772 2773 Counters* counters = isolate()->counters(); 2774 __ IncrementCounter(counters->keyed_load_constant_function(), 1); 2775 2776 // Check that the name has not changed. 2777 __ Cmp(rax, name); 2778 __ j(not_equal, &miss); 2779 2780 GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi, 2781 value, name, &miss); 2782 __ bind(&miss); 2783 __ DecrementCounter(counters->keyed_load_constant_function(), 1); 2784 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2785 2786 // Return the generated code. 2787 return GetCode(CONSTANT_FUNCTION, name); 2788 } 2789 2790 2791 Handle<Code> KeyedLoadStubCompiler::CompileLoadInterceptor( 2792 Handle<JSObject> receiver, 2793 Handle<JSObject> holder, 2794 Handle<String> name) { 2795 // ----------- S t a t e ------------- 2796 // -- rax : key 2797 // -- rdx : receiver 2798 // -- rsp[0] : return address 2799 // ----------------------------------- 2800 Label miss; 2801 Counters* counters = isolate()->counters(); 2802 __ IncrementCounter(counters->keyed_load_interceptor(), 1); 2803 2804 // Check that the name has not changed. 2805 __ Cmp(rax, name); 2806 __ j(not_equal, &miss); 2807 2808 LookupResult lookup(isolate()); 2809 LookupPostInterceptor(holder, name, &lookup); 2810 GenerateLoadInterceptor(receiver, holder, &lookup, rdx, rax, rcx, rbx, rdi, 2811 name, &miss); 2812 __ bind(&miss); 2813 __ DecrementCounter(counters->keyed_load_interceptor(), 1); 2814 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2815 2816 // Return the generated code. 2817 return GetCode(INTERCEPTOR, name); 2818 } 2819 2820 2821 Handle<Code> KeyedLoadStubCompiler::CompileLoadArrayLength( 2822 Handle<String> name) { 2823 // ----------- S t a t e ------------- 2824 // -- rax : key 2825 // -- rdx : receiver 2826 // -- rsp[0] : return address 2827 // ----------------------------------- 2828 Label miss; 2829 2830 Counters* counters = isolate()->counters(); 2831 __ IncrementCounter(counters->keyed_load_array_length(), 1); 2832 2833 // Check that the name has not changed. 2834 __ Cmp(rax, name); 2835 __ j(not_equal, &miss); 2836 2837 GenerateLoadArrayLength(masm(), rdx, rcx, &miss); 2838 __ bind(&miss); 2839 __ DecrementCounter(counters->keyed_load_array_length(), 1); 2840 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2841 2842 // Return the generated code. 2843 return GetCode(CALLBACKS, name); 2844 } 2845 2846 2847 Handle<Code> KeyedLoadStubCompiler::CompileLoadStringLength( 2848 Handle<String> name) { 2849 // ----------- S t a t e ------------- 2850 // -- rax : key 2851 // -- rdx : receiver 2852 // -- rsp[0] : return address 2853 // ----------------------------------- 2854 Label miss; 2855 2856 Counters* counters = isolate()->counters(); 2857 __ IncrementCounter(counters->keyed_load_string_length(), 1); 2858 2859 // Check that the name has not changed. 2860 __ Cmp(rax, name); 2861 __ j(not_equal, &miss); 2862 2863 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true); 2864 __ bind(&miss); 2865 __ DecrementCounter(counters->keyed_load_string_length(), 1); 2866 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2867 2868 // Return the generated code. 2869 return GetCode(CALLBACKS, name); 2870 } 2871 2872 2873 Handle<Code> KeyedLoadStubCompiler::CompileLoadFunctionPrototype( 2874 Handle<String> name) { 2875 // ----------- S t a t e ------------- 2876 // -- rax : key 2877 // -- rdx : receiver 2878 // -- rsp[0] : return address 2879 // ----------------------------------- 2880 Label miss; 2881 2882 Counters* counters = isolate()->counters(); 2883 __ IncrementCounter(counters->keyed_load_function_prototype(), 1); 2884 2885 // Check that the name has not changed. 2886 __ Cmp(rax, name); 2887 __ j(not_equal, &miss); 2888 2889 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss); 2890 __ bind(&miss); 2891 __ DecrementCounter(counters->keyed_load_function_prototype(), 1); 2892 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2893 2894 // Return the generated code. 2895 return GetCode(CALLBACKS, name); 2896 } 2897 2898 2899 Handle<Code> KeyedLoadStubCompiler::CompileLoadElement( 2900 Handle<Map> receiver_map) { 2901 // ----------- S t a t e ------------- 2902 // -- rax : key 2903 // -- rdx : receiver 2904 // -- rsp[0] : return address 2905 // ----------------------------------- 2906 ElementsKind elements_kind = receiver_map->elements_kind(); 2907 Handle<Code> stub = KeyedLoadElementStub(elements_kind).GetCode(); 2908 2909 __ DispatchMap(rdx, receiver_map, stub, DO_SMI_CHECK); 2910 2911 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Miss(); 2912 __ jmp(ic, RelocInfo::CODE_TARGET); 2913 2914 // Return the generated code. 2915 return GetCode(NORMAL, factory()->empty_string()); 2916 } 2917 2918 2919 Handle<Code> KeyedLoadStubCompiler::CompileLoadPolymorphic( 2920 MapHandleList* receiver_maps, 2921 CodeHandleList* handler_ics) { 2922 // ----------- S t a t e ------------- 2923 // -- rax : key 2924 // -- rdx : receiver 2925 // -- rsp[0] : return address 2926 // ----------------------------------- 2927 Label miss; 2928 __ JumpIfSmi(rdx, &miss); 2929 2930 Register map_reg = rbx; 2931 __ movq(map_reg, FieldOperand(rdx, HeapObject::kMapOffset)); 2932 int receiver_count = receiver_maps->length(); 2933 for (int current = 0; current < receiver_count; ++current) { 2934 // Check map and tail call if there's a match 2935 __ Cmp(map_reg, receiver_maps->at(current)); 2936 __ j(equal, handler_ics->at(current), RelocInfo::CODE_TARGET); 2937 } 2938 2939 __ bind(&miss); 2940 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); 2941 2942 // Return the generated code. 2943 return GetCode(NORMAL, factory()->empty_string(), MEGAMORPHIC); 2944 } 2945 2946 2947 // Specialized stub for constructing objects from functions which only have only 2948 // simple assignments of the form this.x = ...; in their body. 2949 Handle<Code> ConstructStubCompiler::CompileConstructStub( 2950 Handle<JSFunction> function) { 2951 // ----------- S t a t e ------------- 2952 // -- rax : argc 2953 // -- rdi : constructor 2954 // -- rsp[0] : return address 2955 // -- rsp[4] : last argument 2956 // ----------------------------------- 2957 Label generic_stub_call; 2958 2959 // Use r8 for holding undefined which is used in several places below. 2960 __ Move(r8, factory()->undefined_value()); 2961 2962 #ifdef ENABLE_DEBUGGER_SUPPORT 2963 // Check to see whether there are any break points in the function code. If 2964 // there are jump to the generic constructor stub which calls the actual 2965 // code for the function thereby hitting the break points. 2966 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 2967 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kDebugInfoOffset)); 2968 __ cmpq(rbx, r8); 2969 __ j(not_equal, &generic_stub_call); 2970 #endif 2971 2972 // Load the initial map and verify that it is in fact a map. 2973 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 2974 // Will both indicate a NULL and a Smi. 2975 STATIC_ASSERT(kSmiTag == 0); 2976 __ JumpIfSmi(rbx, &generic_stub_call); 2977 __ CmpObjectType(rbx, MAP_TYPE, rcx); 2978 __ j(not_equal, &generic_stub_call); 2979 2980 #ifdef DEBUG 2981 // Cannot construct functions this way. 2982 // rdi: constructor 2983 // rbx: initial map 2984 __ CmpInstanceType(rbx, JS_FUNCTION_TYPE); 2985 __ Assert(not_equal, "Function constructed by construct stub."); 2986 #endif 2987 2988 // Now allocate the JSObject in new space. 2989 // rdi: constructor 2990 // rbx: initial map 2991 __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset)); 2992 __ shl(rcx, Immediate(kPointerSizeLog2)); 2993 __ AllocateInNewSpace(rcx, rdx, rcx, no_reg, 2994 &generic_stub_call, NO_ALLOCATION_FLAGS); 2995 2996 // Allocated the JSObject, now initialize the fields and add the heap tag. 2997 // rbx: initial map 2998 // rdx: JSObject (untagged) 2999 __ movq(Operand(rdx, JSObject::kMapOffset), rbx); 3000 __ Move(rbx, factory()->empty_fixed_array()); 3001 __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx); 3002 __ movq(Operand(rdx, JSObject::kElementsOffset), rbx); 3003 3004 // rax: argc 3005 // rdx: JSObject (untagged) 3006 // Load the address of the first in-object property into r9. 3007 __ lea(r9, Operand(rdx, JSObject::kHeaderSize)); 3008 // Calculate the location of the first argument. The stack contains only the 3009 // return address on top of the argc arguments. 3010 __ lea(rcx, Operand(rsp, rax, times_pointer_size, 0)); 3011 3012 // rax: argc 3013 // rcx: first argument 3014 // rdx: JSObject (untagged) 3015 // r8: undefined 3016 // r9: first in-object property of the JSObject 3017 // Fill the initialized properties with a constant value or a passed argument 3018 // depending on the this.x = ...; assignment in the function. 3019 Handle<SharedFunctionInfo> shared(function->shared()); 3020 for (int i = 0; i < shared->this_property_assignments_count(); i++) { 3021 if (shared->IsThisPropertyAssignmentArgument(i)) { 3022 // Check if the argument assigned to the property is actually passed. 3023 // If argument is not passed the property is set to undefined, 3024 // otherwise find it on the stack. 3025 int arg_number = shared->GetThisPropertyAssignmentArgument(i); 3026 __ movq(rbx, r8); 3027 __ cmpq(rax, Immediate(arg_number)); 3028 __ cmovq(above, rbx, Operand(rcx, arg_number * -kPointerSize)); 3029 // Store value in the property. 3030 __ movq(Operand(r9, i * kPointerSize), rbx); 3031 } else { 3032 // Set the property to the constant value. 3033 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i)); 3034 __ Move(Operand(r9, i * kPointerSize), constant); 3035 } 3036 } 3037 3038 // Fill the unused in-object property fields with undefined. 3039 ASSERT(function->has_initial_map()); 3040 for (int i = shared->this_property_assignments_count(); 3041 i < function->initial_map()->inobject_properties(); 3042 i++) { 3043 __ movq(Operand(r9, i * kPointerSize), r8); 3044 } 3045 3046 // rax: argc 3047 // rdx: JSObject (untagged) 3048 // Move argc to rbx and the JSObject to return to rax and tag it. 3049 __ movq(rbx, rax); 3050 __ movq(rax, rdx); 3051 __ or_(rax, Immediate(kHeapObjectTag)); 3052 3053 // rax: JSObject 3054 // rbx: argc 3055 // Remove caller arguments and receiver from the stack and return. 3056 __ pop(rcx); 3057 __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize)); 3058 __ push(rcx); 3059 Counters* counters = isolate()->counters(); 3060 __ IncrementCounter(counters->constructed_objects(), 1); 3061 __ IncrementCounter(counters->constructed_objects_stub(), 1); 3062 __ ret(0); 3063 3064 // Jump to the generic stub in case the specialized code cannot handle the 3065 // construction. 3066 __ bind(&generic_stub_call); 3067 Handle<Code> code = isolate()->builtins()->JSConstructStubGeneric(); 3068 __ Jump(code, RelocInfo::CODE_TARGET); 3069 3070 // Return the generated code. 3071 return GetCode(); 3072 } 3073 3074 3075 #undef __ 3076 #define __ ACCESS_MASM(masm) 3077 3078 3079 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement( 3080 MacroAssembler* masm) { 3081 // ----------- S t a t e ------------- 3082 // -- rax : key 3083 // -- rdx : receiver 3084 // -- rsp[0] : return address 3085 // ----------------------------------- 3086 Label slow, miss_force_generic; 3087 3088 // This stub is meant to be tail-jumped to, the receiver must already 3089 // have been verified by the caller to not be a smi. 3090 3091 __ JumpIfNotSmi(rax, &miss_force_generic); 3092 __ SmiToInteger32(rbx, rax); 3093 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); 3094 3095 // Check whether the elements is a number dictionary. 3096 // rdx: receiver 3097 // rax: key 3098 // rbx: key as untagged int32 3099 // rcx: elements 3100 __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax); 3101 __ ret(0); 3102 3103 __ bind(&slow); 3104 // ----------- S t a t e ------------- 3105 // -- rax : key 3106 // -- rdx : receiver 3107 // -- rsp[0] : return address 3108 // ----------------------------------- 3109 Handle<Code> slow_ic = 3110 masm->isolate()->builtins()->KeyedLoadIC_Slow(); 3111 __ jmp(slow_ic, RelocInfo::CODE_TARGET); 3112 3113 __ bind(&miss_force_generic); 3114 // ----------- S t a t e ------------- 3115 // -- rax : key 3116 // -- rdx : receiver 3117 // -- rsp[0] : return address 3118 // ----------------------------------- 3119 Handle<Code> miss_ic = 3120 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); 3121 __ jmp(miss_ic, RelocInfo::CODE_TARGET); 3122 } 3123 3124 void KeyedLoadStubCompiler::GenerateLoadExternalArray( 3125 MacroAssembler* masm, 3126 ElementsKind elements_kind) { 3127 // ----------- S t a t e ------------- 3128 // -- rax : key 3129 // -- rdx : receiver 3130 // -- rsp[0] : return address 3131 // ----------------------------------- 3132 Label slow, miss_force_generic; 3133 3134 // This stub is meant to be tail-jumped to, the receiver must already 3135 // have been verified by the caller to not be a smi. 3136 3137 // Check that the key is a smi. 3138 __ JumpIfNotSmi(rax, &miss_force_generic); 3139 3140 // Check that the index is in range. 3141 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); 3142 __ SmiToInteger32(rcx, rax); 3143 __ cmpq(rax, FieldOperand(rbx, ExternalArray::kLengthOffset)); 3144 // Unsigned comparison catches both negative and too-large values. 3145 __ j(above_equal, &miss_force_generic); 3146 3147 // rax: index (as a smi) 3148 // rdx: receiver (JSObject) 3149 // rcx: untagged index 3150 // rbx: elements array 3151 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset)); 3152 // rbx: base pointer of external storage 3153 switch (elements_kind) { 3154 case EXTERNAL_BYTE_ELEMENTS: 3155 __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0)); 3156 break; 3157 case EXTERNAL_PIXEL_ELEMENTS: 3158 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 3159 __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0)); 3160 break; 3161 case EXTERNAL_SHORT_ELEMENTS: 3162 __ movsxwq(rcx, Operand(rbx, rcx, times_2, 0)); 3163 break; 3164 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 3165 __ movzxwq(rcx, Operand(rbx, rcx, times_2, 0)); 3166 break; 3167 case EXTERNAL_INT_ELEMENTS: 3168 __ movsxlq(rcx, Operand(rbx, rcx, times_4, 0)); 3169 break; 3170 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 3171 __ movl(rcx, Operand(rbx, rcx, times_4, 0)); 3172 break; 3173 case EXTERNAL_FLOAT_ELEMENTS: 3174 __ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0)); 3175 break; 3176 case EXTERNAL_DOUBLE_ELEMENTS: 3177 __ movsd(xmm0, Operand(rbx, rcx, times_8, 0)); 3178 break; 3179 default: 3180 UNREACHABLE(); 3181 break; 3182 } 3183 3184 // rax: index 3185 // rdx: receiver 3186 // For integer array types: 3187 // rcx: value 3188 // For floating-point array type: 3189 // xmm0: value as double. 3190 3191 ASSERT(kSmiValueSize == 32); 3192 if (elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) { 3193 // For the UnsignedInt array type, we need to see whether 3194 // the value can be represented in a Smi. If not, we need to convert 3195 // it to a HeapNumber. 3196 Label box_int; 3197 3198 __ JumpIfUIntNotValidSmiValue(rcx, &box_int, Label::kNear); 3199 3200 __ Integer32ToSmi(rax, rcx); 3201 __ ret(0); 3202 3203 __ bind(&box_int); 3204 3205 // Allocate a HeapNumber for the int and perform int-to-double 3206 // conversion. 3207 // The value is zero-extended since we loaded the value from memory 3208 // with movl. 3209 __ cvtqsi2sd(xmm0, rcx); 3210 3211 __ AllocateHeapNumber(rcx, rbx, &slow); 3212 // Set the value. 3213 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); 3214 __ movq(rax, rcx); 3215 __ ret(0); 3216 } else if (elements_kind == EXTERNAL_FLOAT_ELEMENTS || 3217 elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { 3218 // For the floating-point array type, we need to always allocate a 3219 // HeapNumber. 3220 __ AllocateHeapNumber(rcx, rbx, &slow); 3221 // Set the value. 3222 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); 3223 __ movq(rax, rcx); 3224 __ ret(0); 3225 } else { 3226 __ Integer32ToSmi(rax, rcx); 3227 __ ret(0); 3228 } 3229 3230 // Slow case: Jump to runtime. 3231 __ bind(&slow); 3232 Counters* counters = masm->isolate()->counters(); 3233 __ IncrementCounter(counters->keyed_load_external_array_slow(), 1); 3234 3235 // ----------- S t a t e ------------- 3236 // -- rax : key 3237 // -- rdx : receiver 3238 // -- rsp[0] : return address 3239 // ----------------------------------- 3240 3241 Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Slow(); 3242 __ jmp(ic, RelocInfo::CODE_TARGET); 3243 3244 // Miss case: Jump to runtime. 3245 __ bind(&miss_force_generic); 3246 3247 // ----------- S t a t e ------------- 3248 // -- rax : key 3249 // -- rdx : receiver 3250 // -- rsp[0] : return address 3251 // ----------------------------------- 3252 Handle<Code> miss_ic = 3253 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); 3254 __ jmp(miss_ic, RelocInfo::CODE_TARGET); 3255 } 3256 3257 3258 void KeyedStoreStubCompiler::GenerateStoreExternalArray( 3259 MacroAssembler* masm, 3260 ElementsKind elements_kind) { 3261 // ----------- S t a t e ------------- 3262 // -- rax : value 3263 // -- rcx : key 3264 // -- rdx : receiver 3265 // -- rsp[0] : return address 3266 // ----------------------------------- 3267 Label slow, miss_force_generic; 3268 3269 // This stub is meant to be tail-jumped to, the receiver must already 3270 // have been verified by the caller to not be a smi. 3271 3272 // Check that the key is a smi. 3273 __ JumpIfNotSmi(rcx, &miss_force_generic); 3274 3275 // Check that the index is in range. 3276 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset)); 3277 __ SmiToInteger32(rdi, rcx); // Untag the index. 3278 __ cmpq(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset)); 3279 // Unsigned comparison catches both negative and too-large values. 3280 __ j(above_equal, &miss_force_generic); 3281 3282 // Handle both smis and HeapNumbers in the fast path. Go to the 3283 // runtime for all other kinds of values. 3284 // rax: value 3285 // rcx: key (a smi) 3286 // rdx: receiver (a JSObject) 3287 // rbx: elements array 3288 // rdi: untagged key 3289 Label check_heap_number; 3290 if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) { 3291 // Float to pixel conversion is only implemented in the runtime for now. 3292 __ JumpIfNotSmi(rax, &slow); 3293 } else { 3294 __ JumpIfNotSmi(rax, &check_heap_number, Label::kNear); 3295 } 3296 // No more branches to slow case on this path. Key and receiver not needed. 3297 __ SmiToInteger32(rdx, rax); 3298 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset)); 3299 // rbx: base pointer of external storage 3300 switch (elements_kind) { 3301 case EXTERNAL_PIXEL_ELEMENTS: 3302 { // Clamp the value to [0..255]. 3303 Label done; 3304 __ testl(rdx, Immediate(0xFFFFFF00)); 3305 __ j(zero, &done, Label::kNear); 3306 __ setcc(negative, rdx); // 1 if negative, 0 if positive. 3307 __ decb(rdx); // 0 if negative, 255 if positive. 3308 __ bind(&done); 3309 } 3310 __ movb(Operand(rbx, rdi, times_1, 0), rdx); 3311 break; 3312 case EXTERNAL_BYTE_ELEMENTS: 3313 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 3314 __ movb(Operand(rbx, rdi, times_1, 0), rdx); 3315 break; 3316 case EXTERNAL_SHORT_ELEMENTS: 3317 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 3318 __ movw(Operand(rbx, rdi, times_2, 0), rdx); 3319 break; 3320 case EXTERNAL_INT_ELEMENTS: 3321 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 3322 __ movl(Operand(rbx, rdi, times_4, 0), rdx); 3323 break; 3324 case EXTERNAL_FLOAT_ELEMENTS: 3325 // Need to perform int-to-float conversion. 3326 __ cvtlsi2ss(xmm0, rdx); 3327 __ movss(Operand(rbx, rdi, times_4, 0), xmm0); 3328 break; 3329 case EXTERNAL_DOUBLE_ELEMENTS: 3330 // Need to perform int-to-float conversion. 3331 __ cvtlsi2sd(xmm0, rdx); 3332 __ movsd(Operand(rbx, rdi, times_8, 0), xmm0); 3333 break; 3334 case FAST_ELEMENTS: 3335 case FAST_SMI_ONLY_ELEMENTS: 3336 case FAST_DOUBLE_ELEMENTS: 3337 case DICTIONARY_ELEMENTS: 3338 case NON_STRICT_ARGUMENTS_ELEMENTS: 3339 UNREACHABLE(); 3340 break; 3341 } 3342 __ ret(0); 3343 3344 // TODO(danno): handle heap number -> pixel array conversion 3345 if (elements_kind != EXTERNAL_PIXEL_ELEMENTS) { 3346 __ bind(&check_heap_number); 3347 // rax: value 3348 // rcx: key (a smi) 3349 // rdx: receiver (a JSObject) 3350 // rbx: elements array 3351 // rdi: untagged key 3352 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister); 3353 __ j(not_equal, &slow); 3354 // No more branches to slow case on this path. 3355 3356 // The WebGL specification leaves the behavior of storing NaN and 3357 // +/-Infinity into integer arrays basically undefined. For more 3358 // reproducible behavior, convert these to zero. 3359 __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset)); 3360 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset)); 3361 // rdi: untagged index 3362 // rbx: base pointer of external storage 3363 // top of FPU stack: value 3364 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { 3365 __ cvtsd2ss(xmm0, xmm0); 3366 __ movss(Operand(rbx, rdi, times_4, 0), xmm0); 3367 __ ret(0); 3368 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { 3369 __ movsd(Operand(rbx, rdi, times_8, 0), xmm0); 3370 __ ret(0); 3371 } else { 3372 // Perform float-to-int conversion with truncation (round-to-zero) 3373 // behavior. 3374 3375 // Convert to int32 and store the low byte/word. 3376 // If the value is NaN or +/-infinity, the result is 0x80000000, 3377 // which is automatically zero when taken mod 2^n, n < 32. 3378 // rdx: value (converted to an untagged integer) 3379 // rdi: untagged index 3380 // rbx: base pointer of external storage 3381 switch (elements_kind) { 3382 case EXTERNAL_BYTE_ELEMENTS: 3383 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 3384 __ cvttsd2si(rdx, xmm0); 3385 __ movb(Operand(rbx, rdi, times_1, 0), rdx); 3386 break; 3387 case EXTERNAL_SHORT_ELEMENTS: 3388 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 3389 __ cvttsd2si(rdx, xmm0); 3390 __ movw(Operand(rbx, rdi, times_2, 0), rdx); 3391 break; 3392 case EXTERNAL_INT_ELEMENTS: 3393 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 3394 // Convert to int64, so that NaN and infinities become 3395 // 0x8000000000000000, which is zero mod 2^32. 3396 __ cvttsd2siq(rdx, xmm0); 3397 __ movl(Operand(rbx, rdi, times_4, 0), rdx); 3398 break; 3399 case EXTERNAL_PIXEL_ELEMENTS: 3400 case EXTERNAL_FLOAT_ELEMENTS: 3401 case EXTERNAL_DOUBLE_ELEMENTS: 3402 case FAST_ELEMENTS: 3403 case FAST_SMI_ONLY_ELEMENTS: 3404 case FAST_DOUBLE_ELEMENTS: 3405 case DICTIONARY_ELEMENTS: 3406 case NON_STRICT_ARGUMENTS_ELEMENTS: 3407 UNREACHABLE(); 3408 break; 3409 } 3410 __ ret(0); 3411 } 3412 } 3413 3414 // Slow case: call runtime. 3415 __ bind(&slow); 3416 3417 // ----------- S t a t e ------------- 3418 // -- rax : value 3419 // -- rcx : key 3420 // -- rdx : receiver 3421 // -- rsp[0] : return address 3422 // ----------------------------------- 3423 3424 Handle<Code> ic = masm->isolate()->builtins()->KeyedStoreIC_Slow(); 3425 __ jmp(ic, RelocInfo::CODE_TARGET); 3426 3427 // Miss case: call runtime. 3428 __ bind(&miss_force_generic); 3429 3430 // ----------- S t a t e ------------- 3431 // -- rax : value 3432 // -- rcx : key 3433 // -- rdx : receiver 3434 // -- rsp[0] : return address 3435 // ----------------------------------- 3436 3437 Handle<Code> miss_ic = 3438 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); 3439 __ jmp(miss_ic, RelocInfo::CODE_TARGET); 3440 } 3441 3442 3443 void KeyedLoadStubCompiler::GenerateLoadFastElement(MacroAssembler* masm) { 3444 // ----------- S t a t e ------------- 3445 // -- rax : key 3446 // -- rdx : receiver 3447 // -- rsp[0] : return address 3448 // ----------------------------------- 3449 Label miss_force_generic; 3450 3451 // This stub is meant to be tail-jumped to, the receiver must already 3452 // have been verified by the caller to not be a smi. 3453 3454 // Check that the key is a smi. 3455 __ JumpIfNotSmi(rax, &miss_force_generic); 3456 3457 // Get the elements array. 3458 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); 3459 __ AssertFastElements(rcx); 3460 3461 // Check that the key is within bounds. 3462 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); 3463 __ j(above_equal, &miss_force_generic); 3464 3465 // Load the result and make sure it's not the hole. 3466 SmiIndex index = masm->SmiToIndex(rbx, rax, kPointerSizeLog2); 3467 __ movq(rbx, FieldOperand(rcx, 3468 index.reg, 3469 index.scale, 3470 FixedArray::kHeaderSize)); 3471 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex); 3472 __ j(equal, &miss_force_generic); 3473 __ movq(rax, rbx); 3474 __ ret(0); 3475 3476 __ bind(&miss_force_generic); 3477 Code* code = masm->isolate()->builtins()->builtin( 3478 Builtins::kKeyedLoadIC_MissForceGeneric); 3479 Handle<Code> ic(code); 3480 __ jmp(ic, RelocInfo::CODE_TARGET); 3481 } 3482 3483 3484 void KeyedLoadStubCompiler::GenerateLoadFastDoubleElement( 3485 MacroAssembler* masm) { 3486 // ----------- S t a t e ------------- 3487 // -- rax : key 3488 // -- rdx : receiver 3489 // -- rsp[0] : return address 3490 // ----------------------------------- 3491 Label miss_force_generic, slow_allocate_heapnumber; 3492 3493 // This stub is meant to be tail-jumped to, the receiver must already 3494 // have been verified by the caller to not be a smi. 3495 3496 // Check that the key is a smi. 3497 __ JumpIfNotSmi(rax, &miss_force_generic); 3498 3499 // Get the elements array. 3500 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); 3501 __ AssertFastElements(rcx); 3502 3503 // Check that the key is within bounds. 3504 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); 3505 __ j(above_equal, &miss_force_generic); 3506 3507 // Check for the hole 3508 __ SmiToInteger32(kScratchRegister, rax); 3509 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32); 3510 __ cmpl(FieldOperand(rcx, kScratchRegister, times_8, offset), 3511 Immediate(kHoleNanUpper32)); 3512 __ j(equal, &miss_force_generic); 3513 3514 // Always allocate a heap number for the result. 3515 __ movsd(xmm0, FieldOperand(rcx, kScratchRegister, times_8, 3516 FixedDoubleArray::kHeaderSize)); 3517 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); 3518 // Set the value. 3519 __ movq(rax, rcx); 3520 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); 3521 __ ret(0); 3522 3523 __ bind(&slow_allocate_heapnumber); 3524 Handle<Code> slow_ic = 3525 masm->isolate()->builtins()->KeyedLoadIC_Slow(); 3526 __ jmp(slow_ic, RelocInfo::CODE_TARGET); 3527 3528 __ bind(&miss_force_generic); 3529 Handle<Code> miss_ic = 3530 masm->isolate()->builtins()->KeyedLoadIC_MissForceGeneric(); 3531 __ jmp(miss_ic, RelocInfo::CODE_TARGET); 3532 } 3533 3534 3535 void KeyedStoreStubCompiler::GenerateStoreFastElement( 3536 MacroAssembler* masm, 3537 bool is_js_array, 3538 ElementsKind elements_kind, 3539 KeyedAccessGrowMode grow_mode) { 3540 // ----------- S t a t e ------------- 3541 // -- rax : value 3542 // -- rcx : key 3543 // -- rdx : receiver 3544 // -- rsp[0] : return address 3545 // ----------------------------------- 3546 Label miss_force_generic, transition_elements_kind, finish_store, grow; 3547 Label check_capacity, slow; 3548 3549 // This stub is meant to be tail-jumped to, the receiver must already 3550 // have been verified by the caller to not be a smi. 3551 3552 // Check that the key is a smi. 3553 __ JumpIfNotSmi(rcx, &miss_force_generic); 3554 3555 if (elements_kind == FAST_SMI_ONLY_ELEMENTS) { 3556 __ JumpIfNotSmi(rax, &transition_elements_kind); 3557 } 3558 3559 // Get the elements array and make sure it is a fast element array, not 'cow'. 3560 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 3561 // Check that the key is within bounds. 3562 if (is_js_array) { 3563 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); 3564 if (grow_mode == ALLOW_JSARRAY_GROWTH) { 3565 __ j(above_equal, &grow); 3566 } else { 3567 __ j(above_equal, &miss_force_generic); 3568 } 3569 } else { 3570 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); 3571 __ j(above_equal, &miss_force_generic); 3572 } 3573 3574 __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset), 3575 Heap::kFixedArrayMapRootIndex); 3576 __ j(not_equal, &miss_force_generic); 3577 3578 __ bind(&finish_store); 3579 if (elements_kind == FAST_SMI_ONLY_ELEMENTS) { 3580 __ SmiToInteger32(rcx, rcx); 3581 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize), 3582 rax); 3583 } else { 3584 // Do the store and update the write barrier. 3585 ASSERT(elements_kind == FAST_ELEMENTS); 3586 __ SmiToInteger32(rcx, rcx); 3587 __ lea(rcx, 3588 FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize)); 3589 __ movq(Operand(rcx, 0), rax); 3590 // Make sure to preserve the value in register rax. 3591 __ movq(rbx, rax); 3592 __ RecordWrite(rdi, rcx, rbx, kDontSaveFPRegs); 3593 } 3594 3595 // Done. 3596 __ ret(0); 3597 3598 // Handle store cache miss. 3599 __ bind(&miss_force_generic); 3600 Handle<Code> ic_force_generic = 3601 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); 3602 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET); 3603 3604 __ bind(&transition_elements_kind); 3605 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss(); 3606 __ jmp(ic_miss, RelocInfo::CODE_TARGET); 3607 3608 if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) { 3609 // Grow the array by a single element if possible. 3610 __ bind(&grow); 3611 3612 // Make sure the array is only growing by a single element, anything else 3613 // must be handled by the runtime. Flags are already set by previous 3614 // compare. 3615 __ j(not_equal, &miss_force_generic); 3616 3617 // Check for the empty array, and preallocate a small backing store if 3618 // possible. 3619 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 3620 __ CompareRoot(rdi, Heap::kEmptyFixedArrayRootIndex); 3621 __ j(not_equal, &check_capacity); 3622 3623 int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements); 3624 __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT); 3625 3626 // rax: value 3627 // rcx: key 3628 // rdx: receiver 3629 // rdi: elements 3630 // Make sure that the backing store can hold additional elements. 3631 __ Move(FieldOperand(rdi, JSObject::kMapOffset), 3632 masm->isolate()->factory()->fixed_array_map()); 3633 __ Move(FieldOperand(rdi, FixedArray::kLengthOffset), 3634 Smi::FromInt(JSArray::kPreallocatedArrayElements)); 3635 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); 3636 for (int i = 1; i < JSArray::kPreallocatedArrayElements; ++i) { 3637 __ movq(FieldOperand(rdi, FixedArray::SizeFor(i)), rbx); 3638 } 3639 3640 // Store the element at index zero. 3641 __ movq(FieldOperand(rdi, FixedArray::SizeFor(0)), rax); 3642 3643 // Install the new backing store in the JSArray. 3644 __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi); 3645 __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx, 3646 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3647 3648 // Increment the length of the array. 3649 __ Move(FieldOperand(rdx, JSArray::kLengthOffset), Smi::FromInt(1)); 3650 __ ret(0); 3651 3652 __ bind(&check_capacity); 3653 // Check for cow elements, in general they are not handled by this stub. 3654 __ CompareRoot(FieldOperand(rdi, HeapObject::kMapOffset), 3655 Heap::kFixedCOWArrayMapRootIndex); 3656 __ j(equal, &miss_force_generic); 3657 3658 // rax: value 3659 // rcx: key 3660 // rdx: receiver 3661 // rdi: elements 3662 // Make sure that the backing store can hold additional elements. 3663 __ cmpq(rcx, FieldOperand(rdi, FixedArray::kLengthOffset)); 3664 __ j(above_equal, &slow); 3665 3666 // Grow the array and finish the store. 3667 __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset), 3668 Smi::FromInt(1)); 3669 __ jmp(&finish_store); 3670 3671 __ bind(&slow); 3672 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow(); 3673 __ jmp(ic_slow, RelocInfo::CODE_TARGET); 3674 } 3675 } 3676 3677 3678 void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement( 3679 MacroAssembler* masm, 3680 bool is_js_array, 3681 KeyedAccessGrowMode grow_mode) { 3682 // ----------- S t a t e ------------- 3683 // -- rax : value 3684 // -- rcx : key 3685 // -- rdx : receiver 3686 // -- rsp[0] : return address 3687 // ----------------------------------- 3688 Label miss_force_generic, transition_elements_kind, finish_store; 3689 Label grow, slow, check_capacity; 3690 3691 // This stub is meant to be tail-jumped to, the receiver must already 3692 // have been verified by the caller to not be a smi. 3693 3694 // Check that the key is a smi. 3695 __ JumpIfNotSmi(rcx, &miss_force_generic); 3696 3697 // Get the elements array. 3698 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 3699 __ AssertFastElements(rdi); 3700 3701 // Check that the key is within bounds. 3702 if (is_js_array) { 3703 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); 3704 if (grow_mode == ALLOW_JSARRAY_GROWTH) { 3705 __ j(above_equal, &grow); 3706 } else { 3707 __ j(above_equal, &miss_force_generic); 3708 } 3709 } else { 3710 __ SmiCompare(rcx, FieldOperand(rdi, FixedDoubleArray::kLengthOffset)); 3711 __ j(above_equal, &miss_force_generic); 3712 } 3713 3714 // Handle smi values specially 3715 __ bind(&finish_store); 3716 __ SmiToInteger32(rcx, rcx); 3717 __ StoreNumberToDoubleElements(rax, rdi, rcx, xmm0, 3718 &transition_elements_kind); 3719 __ ret(0); 3720 3721 // Handle store cache miss, replacing the ic with the generic stub. 3722 __ bind(&miss_force_generic); 3723 Handle<Code> ic_force_generic = 3724 masm->isolate()->builtins()->KeyedStoreIC_MissForceGeneric(); 3725 __ jmp(ic_force_generic, RelocInfo::CODE_TARGET); 3726 3727 __ bind(&transition_elements_kind); 3728 // Restore smi-tagging of rcx. 3729 __ Integer32ToSmi(rcx, rcx); 3730 Handle<Code> ic_miss = masm->isolate()->builtins()->KeyedStoreIC_Miss(); 3731 __ jmp(ic_miss, RelocInfo::CODE_TARGET); 3732 3733 if (is_js_array && grow_mode == ALLOW_JSARRAY_GROWTH) { 3734 // Grow the array by a single element if possible. 3735 __ bind(&grow); 3736 3737 // Make sure the array is only growing by a single element, anything else 3738 // must be handled by the runtime. Flags are already set by previous 3739 // compare. 3740 __ j(not_equal, &miss_force_generic); 3741 3742 // Transition on values that can't be stored in a FixedDoubleArray. 3743 Label value_is_smi; 3744 __ JumpIfSmi(rax, &value_is_smi); 3745 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), 3746 Heap::kHeapNumberMapRootIndex); 3747 __ j(not_equal, &transition_elements_kind); 3748 __ bind(&value_is_smi); 3749 3750 // Check for the empty array, and preallocate a small backing store if 3751 // possible. 3752 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 3753 __ CompareRoot(rdi, Heap::kEmptyFixedArrayRootIndex); 3754 __ j(not_equal, &check_capacity); 3755 3756 int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements); 3757 __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT); 3758 3759 // rax: value 3760 // rcx: key 3761 // rdx: receiver 3762 // rdi: elements 3763 // Initialize the new FixedDoubleArray. Leave elements unitialized for 3764 // efficiency, they are guaranteed to be initialized before use. 3765 __ Move(FieldOperand(rdi, JSObject::kMapOffset), 3766 masm->isolate()->factory()->fixed_double_array_map()); 3767 __ Move(FieldOperand(rdi, FixedDoubleArray::kLengthOffset), 3768 Smi::FromInt(JSArray::kPreallocatedArrayElements)); 3769 3770 // Install the new backing store in the JSArray. 3771 __ movq(FieldOperand(rdx, JSObject::kElementsOffset), rdi); 3772 __ RecordWriteField(rdx, JSObject::kElementsOffset, rdi, rbx, 3773 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); 3774 3775 // Increment the length of the array. 3776 __ Move(FieldOperand(rdx, JSArray::kLengthOffset), Smi::FromInt(1)); 3777 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset)); 3778 __ jmp(&finish_store); 3779 3780 __ bind(&check_capacity); 3781 // rax: value 3782 // rcx: key 3783 // rdx: receiver 3784 // rdi: elements 3785 // Make sure that the backing store can hold additional elements. 3786 __ cmpq(rcx, FieldOperand(rdi, FixedDoubleArray::kLengthOffset)); 3787 __ j(above_equal, &slow); 3788 3789 // Grow the array and finish the store. 3790 __ SmiAddConstant(FieldOperand(rdx, JSArray::kLengthOffset), 3791 Smi::FromInt(1)); 3792 __ jmp(&finish_store); 3793 3794 __ bind(&slow); 3795 Handle<Code> ic_slow = masm->isolate()->builtins()->KeyedStoreIC_Slow(); 3796 __ jmp(ic_slow, RelocInfo::CODE_TARGET); 3797 } 3798 } 3799 3800 3801 #undef __ 3802 3803 } } // namespace v8::internal 3804 3805 #endif // V8_TARGET_ARCH_X64 3806