Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #if V8_TARGET_ARCH_X64
      8 
      9 #include "src/arguments.h"
     10 #include "src/ic-inl.h"
     11 #include "src/codegen.h"
     12 #include "src/stub-cache.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 #define __ ACCESS_MASM(masm)
     18 
     19 
     20 static void ProbeTable(Isolate* isolate,
     21                        MacroAssembler* masm,
     22                        Code::Flags flags,
     23                        StubCache::Table table,
     24                        Register receiver,
     25                        Register name,
     26                        // The offset is scaled by 4, based on
     27                        // kHeapObjectTagSize, which is two bits
     28                        Register offset) {
     29   // We need to scale up the pointer by 2 when the offset is scaled by less
     30   // than the pointer size.
     31   ASSERT(kPointerSize == kInt64Size
     32       ? kPointerSizeLog2 == kHeapObjectTagSize + 1
     33       : kPointerSizeLog2 == kHeapObjectTagSize);
     34   ScaleFactor scale_factor = kPointerSize == kInt64Size ? times_2 : times_1;
     35 
     36   ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
     37   // The offset register holds the entry offset times four (due to masking
     38   // and shifting optimizations).
     39   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
     40   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
     41   Label miss;
     42 
     43   // Multiply by 3 because there are 3 fields per entry (name, code, map).
     44   __ leap(offset, Operand(offset, offset, times_2, 0));
     45 
     46   __ LoadAddress(kScratchRegister, key_offset);
     47 
     48   // Check that the key in the entry matches the name.
     49   // Multiply entry offset by 16 to get the entry address. Since the
     50   // offset register already holds the entry offset times four, multiply
     51   // by a further four.
     52   __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
     53   __ j(not_equal, &miss);
     54 
     55   // Get the map entry from the cache.
     56   // Use key_offset + kPointerSize * 2, rather than loading map_offset.
     57   __ movp(kScratchRegister,
     58           Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
     59   __ cmpp(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
     60   __ j(not_equal, &miss);
     61 
     62   // Get the code entry from the cache.
     63   __ LoadAddress(kScratchRegister, value_offset);
     64   __ movp(kScratchRegister,
     65           Operand(kScratchRegister, offset, scale_factor, 0));
     66 
     67   // Check that the flags match what we're looking for.
     68   __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
     69   __ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup));
     70   __ cmpl(offset, Immediate(flags));
     71   __ j(not_equal, &miss);
     72 
     73 #ifdef DEBUG
     74     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
     75       __ jmp(&miss);
     76     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
     77       __ jmp(&miss);
     78     }
     79 #endif
     80 
     81   // Jump to the first instruction in the code stub.
     82   __ addp(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
     83   __ jmp(kScratchRegister);
     84 
     85   __ bind(&miss);
     86 }
     87 
     88 
     89 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
     90                                                     Label* miss_label,
     91                                                     Register receiver,
     92                                                     Handle<Name> name,
     93                                                     Register scratch0,
     94                                                     Register scratch1) {
     95   ASSERT(name->IsUniqueName());
     96   ASSERT(!receiver.is(scratch0));
     97   Counters* counters = masm->isolate()->counters();
     98   __ IncrementCounter(counters->negative_lookups(), 1);
     99   __ IncrementCounter(counters->negative_lookups_miss(), 1);
    100 
    101   __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
    102 
    103   const int kInterceptorOrAccessCheckNeededMask =
    104       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
    105 
    106   // Bail out if the receiver has a named interceptor or requires access checks.
    107   __ testb(FieldOperand(scratch0, Map::kBitFieldOffset),
    108            Immediate(kInterceptorOrAccessCheckNeededMask));
    109   __ j(not_zero, miss_label);
    110 
    111   // Check that receiver is a JSObject.
    112   __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
    113   __ j(below, miss_label);
    114 
    115   // Load properties array.
    116   Register properties = scratch0;
    117   __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
    118 
    119   // Check that the properties array is a dictionary.
    120   __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
    121                  Heap::kHashTableMapRootIndex);
    122   __ j(not_equal, miss_label);
    123 
    124   Label done;
    125   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
    126                                                    miss_label,
    127                                                    &done,
    128                                                    properties,
    129                                                    name,
    130                                                    scratch1);
    131   __ bind(&done);
    132   __ DecrementCounter(counters->negative_lookups_miss(), 1);
    133 }
    134 
    135 
    136 void StubCache::GenerateProbe(MacroAssembler* masm,
    137                               Code::Flags flags,
    138                               Register receiver,
    139                               Register name,
    140                               Register scratch,
    141                               Register extra,
    142                               Register extra2,
    143                               Register extra3) {
    144   Isolate* isolate = masm->isolate();
    145   Label miss;
    146   USE(extra);   // The register extra is not used on the X64 platform.
    147   USE(extra2);  // The register extra2 is not used on the X64 platform.
    148   USE(extra3);  // The register extra2 is not used on the X64 platform.
    149   // Make sure that code is valid. The multiplying code relies on the
    150   // entry size being 3 * kPointerSize.
    151   ASSERT(sizeof(Entry) == 3 * kPointerSize);
    152 
    153   // Make sure the flags do not name a specific type.
    154   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
    155 
    156   // Make sure that there are no register conflicts.
    157   ASSERT(!scratch.is(receiver));
    158   ASSERT(!scratch.is(name));
    159 
    160   // Check scratch register is valid, extra and extra2 are unused.
    161   ASSERT(!scratch.is(no_reg));
    162   ASSERT(extra2.is(no_reg));
    163   ASSERT(extra3.is(no_reg));
    164 
    165   Counters* counters = masm->isolate()->counters();
    166   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
    167 
    168   // Check that the receiver isn't a smi.
    169   __ JumpIfSmi(receiver, &miss);
    170 
    171   // Get the map of the receiver and compute the hash.
    172   __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
    173   // Use only the low 32 bits of the map pointer.
    174   __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
    175   __ xorp(scratch, Immediate(flags));
    176   // We mask out the last two bits because they are not part of the hash and
    177   // they are always 01 for maps.  Also in the two 'and' instructions below.
    178   __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
    179 
    180   // Probe the primary table.
    181   ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
    182 
    183   // Primary miss: Compute hash for secondary probe.
    184   __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
    185   __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
    186   __ xorp(scratch, Immediate(flags));
    187   __ andp(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
    188   __ subl(scratch, name);
    189   __ addl(scratch, Immediate(flags));
    190   __ andp(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
    191 
    192   // Probe the secondary table.
    193   ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
    194 
    195   // Cache miss: Fall-through and let caller handle the miss by
    196   // entering the runtime system.
    197   __ bind(&miss);
    198   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
    199 }
    200 
    201 
    202 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
    203                                                        int index,
    204                                                        Register prototype) {
    205   // Load the global or builtins object from the current context.
    206   __ movp(prototype,
    207           Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
    208   // Load the native context from the global or builtins object.
    209   __ movp(prototype,
    210           FieldOperand(prototype, GlobalObject::kNativeContextOffset));
    211   // Load the function from the native context.
    212   __ movp(prototype, Operand(prototype, Context::SlotOffset(index)));
    213   // Load the initial map.  The global functions all have initial maps.
    214   __ movp(prototype,
    215           FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
    216   // Load the prototype from the initial map.
    217   __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
    218 }
    219 
    220 
    221 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
    222     MacroAssembler* masm,
    223     int index,
    224     Register prototype,
    225     Label* miss) {
    226   Isolate* isolate = masm->isolate();
    227   // Get the global function with the given index.
    228   Handle<JSFunction> function(
    229       JSFunction::cast(isolate->native_context()->get(index)));
    230 
    231   // Check we're still in the same context.
    232   Register scratch = prototype;
    233   const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
    234   __ movp(scratch, Operand(rsi, offset));
    235   __ movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
    236   __ Cmp(Operand(scratch, Context::SlotOffset(index)), function);
    237   __ j(not_equal, miss);
    238 
    239   // Load its initial map. The global functions all have initial maps.
    240   __ Move(prototype, Handle<Map>(function->initial_map()));
    241   // Load the prototype from the initial map.
    242   __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
    243 }
    244 
    245 
    246 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
    247                                            Register receiver,
    248                                            Register scratch,
    249                                            Label* miss_label) {
    250   // Check that the receiver isn't a smi.
    251   __ JumpIfSmi(receiver, miss_label);
    252 
    253   // Check that the object is a JS array.
    254   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
    255   __ j(not_equal, miss_label);
    256 
    257   // Load length directly from the JS array.
    258   __ movp(rax, FieldOperand(receiver, JSArray::kLengthOffset));
    259   __ ret(0);
    260 }
    261 
    262 
    263 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
    264                                                  Register receiver,
    265                                                  Register result,
    266                                                  Register scratch,
    267                                                  Label* miss_label) {
    268   __ TryGetFunctionPrototype(receiver, result, miss_label);
    269   if (!result.is(rax)) __ movp(rax, result);
    270   __ ret(0);
    271 }
    272 
    273 
    274 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
    275                                             Register dst,
    276                                             Register src,
    277                                             bool inobject,
    278                                             int index,
    279                                             Representation representation) {
    280   ASSERT(!representation.IsDouble());
    281   int offset = index * kPointerSize;
    282   if (!inobject) {
    283     // Calculate the offset into the properties array.
    284     offset = offset + FixedArray::kHeaderSize;
    285     __ movp(dst, FieldOperand(src, JSObject::kPropertiesOffset));
    286     src = dst;
    287   }
    288   __ movp(dst, FieldOperand(src, offset));
    289 }
    290 
    291 
    292 static void PushInterceptorArguments(MacroAssembler* masm,
    293                                      Register receiver,
    294                                      Register holder,
    295                                      Register name,
    296                                      Handle<JSObject> holder_obj) {
    297   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
    298   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
    299   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
    300   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
    301   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
    302   __ Push(name);
    303   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
    304   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
    305   __ Move(kScratchRegister, interceptor);
    306   __ Push(kScratchRegister);
    307   __ Push(receiver);
    308   __ Push(holder);
    309 }
    310 
    311 
    312 static void CompileCallLoadPropertyWithInterceptor(
    313     MacroAssembler* masm,
    314     Register receiver,
    315     Register holder,
    316     Register name,
    317     Handle<JSObject> holder_obj,
    318     IC::UtilityId id) {
    319   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
    320   __ CallExternalReference(
    321       ExternalReference(IC_Utility(id), masm->isolate()),
    322       StubCache::kInterceptorArgsLength);
    323 }
    324 
    325 
    326 // Generate call to api function.
    327 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
    328                                        const CallOptimization& optimization,
    329                                        Handle<Map> receiver_map,
    330                                        Register receiver,
    331                                        Register scratch_in,
    332                                        bool is_store,
    333                                        int argc,
    334                                        Register* values) {
    335   ASSERT(optimization.is_simple_api_call());
    336 
    337   __ PopReturnAddressTo(scratch_in);
    338   // receiver
    339   __ Push(receiver);
    340   // Write the arguments to stack frame.
    341   for (int i = 0; i < argc; i++) {
    342     Register arg = values[argc-1-i];
    343     ASSERT(!receiver.is(arg));
    344     ASSERT(!scratch_in.is(arg));
    345     __ Push(arg);
    346   }
    347   __ PushReturnAddressFrom(scratch_in);
    348   // Stack now matches JSFunction abi.
    349 
    350   // Abi for CallApiFunctionStub.
    351   Register callee = rax;
    352   Register call_data = rbx;
    353   Register holder = rcx;
    354   Register api_function_address = rdx;
    355   Register scratch = rdi;  // scratch_in is no longer valid.
    356 
    357   // Put holder in place.
    358   CallOptimization::HolderLookup holder_lookup;
    359   Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
    360       receiver_map,
    361       &holder_lookup);
    362   switch (holder_lookup) {
    363     case CallOptimization::kHolderIsReceiver:
    364       __ Move(holder, receiver);
    365       break;
    366     case CallOptimization::kHolderFound:
    367       __ Move(holder, api_holder);
    368      break;
    369     case CallOptimization::kHolderNotFound:
    370       UNREACHABLE();
    371       break;
    372   }
    373 
    374   Isolate* isolate = masm->isolate();
    375   Handle<JSFunction> function = optimization.constant_function();
    376   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
    377   Handle<Object> call_data_obj(api_call_info->data(), isolate);
    378 
    379   // Put callee in place.
    380   __ Move(callee, function);
    381 
    382   bool call_data_undefined = false;
    383   // Put call_data in place.
    384   if (isolate->heap()->InNewSpace(*call_data_obj)) {
    385     __ Move(scratch, api_call_info);
    386     __ movp(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
    387   } else if (call_data_obj->IsUndefined()) {
    388     call_data_undefined = true;
    389     __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
    390   } else {
    391     __ Move(call_data, call_data_obj);
    392   }
    393 
    394   // Put api_function_address in place.
    395   Address function_address = v8::ToCData<Address>(api_call_info->callback());
    396   __ Move(
    397       api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE);
    398 
    399   // Jump to stub.
    400   CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
    401   __ TailCallStub(&stub);
    402 }
    403 
    404 
    405 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
    406                                             Label* label,
    407                                             Handle<Name> name) {
    408   if (!label->is_unused()) {
    409     __ bind(label);
    410     __ Move(this->name(), name);
    411   }
    412 }
    413 
    414 
    415 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
    416                                              Handle<JSGlobalObject> global,
    417                                              Handle<Name> name,
    418                                              Register scratch,
    419                                              Label* miss) {
    420   Handle<PropertyCell> cell =
    421       JSGlobalObject::EnsurePropertyCell(global, name);
    422   ASSERT(cell->value()->IsTheHole());
    423   __ Move(scratch, cell);
    424   __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
    425          masm->isolate()->factory()->the_hole_value());
    426   __ j(not_equal, miss);
    427 }
    428 
    429 
    430 void StoreStubCompiler::GenerateNegativeHolderLookup(
    431     MacroAssembler* masm,
    432     Handle<JSObject> holder,
    433     Register holder_reg,
    434     Handle<Name> name,
    435     Label* miss) {
    436   if (holder->IsJSGlobalObject()) {
    437     GenerateCheckPropertyCell(
    438         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
    439   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
    440     GenerateDictionaryNegativeLookup(
    441         masm, miss, holder_reg, name, scratch1(), scratch2());
    442   }
    443 }
    444 
    445 
    446 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
    447 // store is successful.
    448 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
    449                                                 Handle<JSObject> object,
    450                                                 LookupResult* lookup,
    451                                                 Handle<Map> transition,
    452                                                 Handle<Name> name,
    453                                                 Register receiver_reg,
    454                                                 Register storage_reg,
    455                                                 Register value_reg,
    456                                                 Register scratch1,
    457                                                 Register scratch2,
    458                                                 Register unused,
    459                                                 Label* miss_label,
    460                                                 Label* slow) {
    461   int descriptor = transition->LastAdded();
    462   DescriptorArray* descriptors = transition->instance_descriptors();
    463   PropertyDetails details = descriptors->GetDetails(descriptor);
    464   Representation representation = details.representation();
    465   ASSERT(!representation.IsNone());
    466 
    467   if (details.type() == CONSTANT) {
    468     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
    469     __ Cmp(value_reg, constant);
    470     __ j(not_equal, miss_label);
    471   } else if (representation.IsSmi()) {
    472     __ JumpIfNotSmi(value_reg, miss_label);
    473   } else if (representation.IsHeapObject()) {
    474     __ JumpIfSmi(value_reg, miss_label);
    475     HeapType* field_type = descriptors->GetFieldType(descriptor);
    476     HeapType::Iterator<Map> it = field_type->Classes();
    477     if (!it.Done()) {
    478       Label do_store;
    479       while (true) {
    480         __ CompareMap(value_reg, it.Current());
    481         it.Advance();
    482         if (it.Done()) {
    483           __ j(not_equal, miss_label);
    484           break;
    485         }
    486         __ j(equal, &do_store, Label::kNear);
    487       }
    488       __ bind(&do_store);
    489     }
    490   } else if (representation.IsDouble()) {
    491     Label do_store, heap_number;
    492     __ AllocateHeapNumber(storage_reg, scratch1, slow);
    493 
    494     __ JumpIfNotSmi(value_reg, &heap_number);
    495     __ SmiToInteger32(scratch1, value_reg);
    496     __ Cvtlsi2sd(xmm0, scratch1);
    497     __ jmp(&do_store);
    498 
    499     __ bind(&heap_number);
    500     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
    501                 miss_label, DONT_DO_SMI_CHECK);
    502     __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
    503 
    504     __ bind(&do_store);
    505     __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
    506   }
    507 
    508   // Stub never generated for non-global objects that require access
    509   // checks.
    510   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    511 
    512   // Perform map transition for the receiver if necessary.
    513   if (details.type() == FIELD &&
    514       object->map()->unused_property_fields() == 0) {
    515     // The properties must be extended before we can store the value.
    516     // We jump to a runtime call that extends the properties array.
    517     __ PopReturnAddressTo(scratch1);
    518     __ Push(receiver_reg);
    519     __ Push(transition);
    520     __ Push(value_reg);
    521     __ PushReturnAddressFrom(scratch1);
    522     __ TailCallExternalReference(
    523         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
    524                           masm->isolate()),
    525         3,
    526         1);
    527     return;
    528   }
    529 
    530   // Update the map of the object.
    531   __ Move(scratch1, transition);
    532   __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
    533 
    534   // Update the write barrier for the map field.
    535   __ RecordWriteField(receiver_reg,
    536                       HeapObject::kMapOffset,
    537                       scratch1,
    538                       scratch2,
    539                       kDontSaveFPRegs,
    540                       OMIT_REMEMBERED_SET,
    541                       OMIT_SMI_CHECK);
    542 
    543   if (details.type() == CONSTANT) {
    544     ASSERT(value_reg.is(rax));
    545     __ ret(0);
    546     return;
    547   }
    548 
    549   int index = transition->instance_descriptors()->GetFieldIndex(
    550       transition->LastAdded());
    551 
    552   // Adjust for the number of properties stored in the object. Even in the
    553   // face of a transition we can use the old map here because the size of the
    554   // object and the number of in-object properties is not going to change.
    555   index -= object->map()->inobject_properties();
    556 
    557   // TODO(verwaest): Share this code as a code stub.
    558   SmiCheck smi_check = representation.IsTagged()
    559       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
    560   if (index < 0) {
    561     // Set the property straight into the object.
    562     int offset = object->map()->instance_size() + (index * kPointerSize);
    563     if (representation.IsDouble()) {
    564       __ movp(FieldOperand(receiver_reg, offset), storage_reg);
    565     } else {
    566       __ movp(FieldOperand(receiver_reg, offset), value_reg);
    567     }
    568 
    569     if (!representation.IsSmi()) {
    570       // Update the write barrier for the array address.
    571       if (!representation.IsDouble()) {
    572         __ movp(storage_reg, value_reg);
    573       }
    574       __ RecordWriteField(
    575           receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs,
    576           EMIT_REMEMBERED_SET, smi_check);
    577     }
    578   } else {
    579     // Write to the properties array.
    580     int offset = index * kPointerSize + FixedArray::kHeaderSize;
    581     // Get the properties array (optimistically).
    582     __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
    583     if (representation.IsDouble()) {
    584       __ movp(FieldOperand(scratch1, offset), storage_reg);
    585     } else {
    586       __ movp(FieldOperand(scratch1, offset), value_reg);
    587     }
    588 
    589     if (!representation.IsSmi()) {
    590       // Update the write barrier for the array address.
    591       if (!representation.IsDouble()) {
    592         __ movp(storage_reg, value_reg);
    593       }
    594       __ RecordWriteField(
    595           scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs,
    596           EMIT_REMEMBERED_SET, smi_check);
    597     }
    598   }
    599 
    600   // Return the value (register rax).
    601   ASSERT(value_reg.is(rax));
    602   __ ret(0);
    603 }
    604 
    605 
    606 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
    607 // but may be destroyed if store is successful.
    608 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
    609                                            Handle<JSObject> object,
    610                                            LookupResult* lookup,
    611                                            Register receiver_reg,
    612                                            Register name_reg,
    613                                            Register value_reg,
    614                                            Register scratch1,
    615                                            Register scratch2,
    616                                            Label* miss_label) {
    617   // Stub never generated for non-global objects that require access
    618   // checks.
    619   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
    620 
    621   FieldIndex index = lookup->GetFieldIndex();
    622 
    623   Representation representation = lookup->representation();
    624   ASSERT(!representation.IsNone());
    625   if (representation.IsSmi()) {
    626     __ JumpIfNotSmi(value_reg, miss_label);
    627   } else if (representation.IsHeapObject()) {
    628     __ JumpIfSmi(value_reg, miss_label);
    629     HeapType* field_type = lookup->GetFieldType();
    630     HeapType::Iterator<Map> it = field_type->Classes();
    631     if (!it.Done()) {
    632       Label do_store;
    633       while (true) {
    634         __ CompareMap(value_reg, it.Current());
    635         it.Advance();
    636         if (it.Done()) {
    637           __ j(not_equal, miss_label);
    638           break;
    639         }
    640         __ j(equal, &do_store, Label::kNear);
    641       }
    642       __ bind(&do_store);
    643     }
    644   } else if (representation.IsDouble()) {
    645     // Load the double storage.
    646     if (index.is_inobject()) {
    647       __ movp(scratch1, FieldOperand(receiver_reg, index.offset()));
    648     } else {
    649       __ movp(scratch1,
    650               FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
    651       __ movp(scratch1, FieldOperand(scratch1, index.offset()));
    652     }
    653 
    654     // Store the value into the storage.
    655     Label do_store, heap_number;
    656     __ JumpIfNotSmi(value_reg, &heap_number);
    657     __ SmiToInteger32(scratch2, value_reg);
    658     __ Cvtlsi2sd(xmm0, scratch2);
    659     __ jmp(&do_store);
    660 
    661     __ bind(&heap_number);
    662     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
    663                 miss_label, DONT_DO_SMI_CHECK);
    664     __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
    665     __ bind(&do_store);
    666     __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
    667     // Return the value (register rax).
    668     ASSERT(value_reg.is(rax));
    669     __ ret(0);
    670     return;
    671   }
    672 
    673   // TODO(verwaest): Share this code as a code stub.
    674   SmiCheck smi_check = representation.IsTagged()
    675       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
    676   if (index.is_inobject()) {
    677     // Set the property straight into the object.
    678     __ movp(FieldOperand(receiver_reg, index.offset()), value_reg);
    679 
    680     if (!representation.IsSmi()) {
    681       // Update the write barrier for the array address.
    682       // Pass the value being stored in the now unused name_reg.
    683       __ movp(name_reg, value_reg);
    684       __ RecordWriteField(
    685           receiver_reg, index.offset(), name_reg, scratch1, kDontSaveFPRegs,
    686           EMIT_REMEMBERED_SET, smi_check);
    687     }
    688   } else {
    689     // Write to the properties array.
    690     // Get the properties array (optimistically).
    691     __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
    692     __ movp(FieldOperand(scratch1, index.offset()), value_reg);
    693 
    694     if (!representation.IsSmi()) {
    695       // Update the write barrier for the array address.
    696       // Pass the value being stored in the now unused name_reg.
    697       __ movp(name_reg, value_reg);
    698       __ RecordWriteField(
    699           scratch1, index.offset(), name_reg, receiver_reg, kDontSaveFPRegs,
    700           EMIT_REMEMBERED_SET, smi_check);
    701     }
    702   }
    703 
    704   // Return the value (register rax).
    705   ASSERT(value_reg.is(rax));
    706   __ ret(0);
    707 }
    708 
    709 
    710 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
    711   __ jmp(code, RelocInfo::CODE_TARGET);
    712 }
    713 
    714 
    715 #undef __
    716 #define __ ACCESS_MASM((masm()))
    717 
    718 
    719 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
    720                                        Register object_reg,
    721                                        Handle<JSObject> holder,
    722                                        Register holder_reg,
    723                                        Register scratch1,
    724                                        Register scratch2,
    725                                        Handle<Name> name,
    726                                        Label* miss,
    727                                        PrototypeCheckType check) {
    728   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
    729 
    730   // Make sure there's no overlap between holder and object registers.
    731   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
    732   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
    733          && !scratch2.is(scratch1));
    734 
    735   // Keep track of the current object in register reg.  On the first
    736   // iteration, reg is an alias for object_reg, on later iterations,
    737   // it is an alias for holder_reg.
    738   Register reg = object_reg;
    739   int depth = 0;
    740 
    741   Handle<JSObject> current = Handle<JSObject>::null();
    742   if (type->IsConstant()) {
    743     current = Handle<JSObject>::cast(type->AsConstant()->Value());
    744   }
    745   Handle<JSObject> prototype = Handle<JSObject>::null();
    746   Handle<Map> current_map = receiver_map;
    747   Handle<Map> holder_map(holder->map());
    748   // Traverse the prototype chain and check the maps in the prototype chain for
    749   // fast and global objects or do negative lookup for normal objects.
    750   while (!current_map.is_identical_to(holder_map)) {
    751     ++depth;
    752 
    753     // Only global objects and objects that do not require access
    754     // checks are allowed in stubs.
    755     ASSERT(current_map->IsJSGlobalProxyMap() ||
    756            !current_map->is_access_check_needed());
    757 
    758     prototype = handle(JSObject::cast(current_map->prototype()));
    759     if (current_map->is_dictionary_map() &&
    760         !current_map->IsJSGlobalObjectMap() &&
    761         !current_map->IsJSGlobalProxyMap()) {
    762       if (!name->IsUniqueName()) {
    763         ASSERT(name->IsString());
    764         name = factory()->InternalizeString(Handle<String>::cast(name));
    765       }
    766       ASSERT(current.is_null() ||
    767              current->property_dictionary()->FindEntry(name) ==
    768              NameDictionary::kNotFound);
    769 
    770       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
    771                                        scratch1, scratch2);
    772 
    773       __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
    774       reg = holder_reg;  // From now on the object will be in holder_reg.
    775       __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
    776     } else {
    777       bool in_new_space = heap()->InNewSpace(*prototype);
    778       if (in_new_space) {
    779         // Save the map in scratch1 for later.
    780         __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
    781       }
    782       if (depth != 1 || check == CHECK_ALL_MAPS) {
    783         __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
    784       }
    785 
    786       // Check access rights to the global object.  This has to happen after
    787       // the map check so that we know that the object is actually a global
    788       // object.
    789       if (current_map->IsJSGlobalProxyMap()) {
    790         __ CheckAccessGlobalProxy(reg, scratch2, miss);
    791       } else if (current_map->IsJSGlobalObjectMap()) {
    792         GenerateCheckPropertyCell(
    793             masm(), Handle<JSGlobalObject>::cast(current), name,
    794             scratch2, miss);
    795       }
    796       reg = holder_reg;  // From now on the object will be in holder_reg.
    797 
    798       if (in_new_space) {
    799         // The prototype is in new space; we cannot store a reference to it
    800         // in the code.  Load it from the map.
    801         __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
    802       } else {
    803         // The prototype is in old space; load it directly.
    804         __ Move(reg, prototype);
    805       }
    806     }
    807 
    808     // Go to the next object in the prototype chain.
    809     current = prototype;
    810     current_map = handle(current->map());
    811   }
    812 
    813   // Log the check depth.
    814   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
    815 
    816   if (depth != 0 || check == CHECK_ALL_MAPS) {
    817     // Check the holder map.
    818     __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
    819   }
    820 
    821   // Perform security check for access to the global object.
    822   ASSERT(current_map->IsJSGlobalProxyMap() ||
    823          !current_map->is_access_check_needed());
    824   if (current_map->IsJSGlobalProxyMap()) {
    825     __ CheckAccessGlobalProxy(reg, scratch1, miss);
    826   }
    827 
    828   // Return the register containing the holder.
    829   return reg;
    830 }
    831 
    832 
    833 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
    834   if (!miss->is_unused()) {
    835     Label success;
    836     __ jmp(&success);
    837     __ bind(miss);
    838     TailCallBuiltin(masm(), MissBuiltin(kind()));
    839     __ bind(&success);
    840   }
    841 }
    842 
    843 
    844 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
    845   if (!miss->is_unused()) {
    846     Label success;
    847     __ jmp(&success);
    848     GenerateRestoreName(masm(), miss, name);
    849     TailCallBuiltin(masm(), MissBuiltin(kind()));
    850     __ bind(&success);
    851   }
    852 }
    853 
    854 
    855 Register LoadStubCompiler::CallbackHandlerFrontend(
    856     Handle<HeapType> type,
    857     Register object_reg,
    858     Handle<JSObject> holder,
    859     Handle<Name> name,
    860     Handle<Object> callback) {
    861   Label miss;
    862 
    863   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
    864 
    865   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
    866     ASSERT(!reg.is(scratch2()));
    867     ASSERT(!reg.is(scratch3()));
    868     ASSERT(!reg.is(scratch4()));
    869 
    870     // Load the properties dictionary.
    871     Register dictionary = scratch4();
    872     __ movp(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
    873 
    874     // Probe the dictionary.
    875     Label probe_done;
    876     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
    877                                                      &miss,
    878                                                      &probe_done,
    879                                                      dictionary,
    880                                                      this->name(),
    881                                                      scratch2(),
    882                                                      scratch3());
    883     __ bind(&probe_done);
    884 
    885     // If probing finds an entry in the dictionary, scratch3 contains the
    886     // index into the dictionary. Check that the value is the callback.
    887     Register index = scratch3();
    888     const int kElementsStartOffset =
    889         NameDictionary::kHeaderSize +
    890         NameDictionary::kElementsStartIndex * kPointerSize;
    891     const int kValueOffset = kElementsStartOffset + kPointerSize;
    892     __ movp(scratch2(),
    893             Operand(dictionary, index, times_pointer_size,
    894                     kValueOffset - kHeapObjectTag));
    895     __ Move(scratch3(), callback, RelocInfo::EMBEDDED_OBJECT);
    896     __ cmpp(scratch2(), scratch3());
    897     __ j(not_equal, &miss);
    898   }
    899 
    900   HandlerFrontendFooter(name, &miss);
    901   return reg;
    902 }
    903 
    904 
    905 void LoadStubCompiler::GenerateLoadField(Register reg,
    906                                          Handle<JSObject> holder,
    907                                          FieldIndex field,
    908                                          Representation representation) {
    909   if (!reg.is(receiver())) __ movp(receiver(), reg);
    910   if (kind() == Code::LOAD_IC) {
    911     LoadFieldStub stub(isolate(), field);
    912     GenerateTailCall(masm(), stub.GetCode());
    913   } else {
    914     KeyedLoadFieldStub stub(isolate(), field);
    915     GenerateTailCall(masm(), stub.GetCode());
    916   }
    917 }
    918 
    919 
    920 void LoadStubCompiler::GenerateLoadCallback(
    921     Register reg,
    922     Handle<ExecutableAccessorInfo> callback) {
    923   // Insert additional parameters into the stack frame above return address.
    924   ASSERT(!scratch4().is(reg));
    925   __ PopReturnAddressTo(scratch4());
    926 
    927   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
    928   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
    929   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
    930   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
    931   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
    932   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
    933   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
    934   __ Push(receiver());  // receiver
    935   if (heap()->InNewSpace(callback->data())) {
    936     ASSERT(!scratch2().is(reg));
    937     __ Move(scratch2(), callback);
    938     __ Push(FieldOperand(scratch2(),
    939                          ExecutableAccessorInfo::kDataOffset));  // data
    940   } else {
    941     __ Push(Handle<Object>(callback->data(), isolate()));
    942   }
    943   ASSERT(!kScratchRegister.is(reg));
    944   __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
    945   __ Push(kScratchRegister);  // return value
    946   __ Push(kScratchRegister);  // return value default
    947   __ PushAddress(ExternalReference::isolate_address(isolate()));
    948   __ Push(reg);  // holder
    949   __ Push(name());  // name
    950   // Save a pointer to where we pushed the arguments pointer.  This will be
    951   // passed as the const PropertyAccessorInfo& to the C++ callback.
    952 
    953   __ PushReturnAddressFrom(scratch4());
    954 
    955   // Abi for CallApiGetter
    956   Register api_function_address = r8;
    957   Address getter_address = v8::ToCData<Address>(callback->getter());
    958   __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
    959 
    960   CallApiGetterStub stub(isolate());
    961   __ TailCallStub(&stub);
    962 }
    963 
    964 
    965 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
    966   // Return the constant value.
    967   __ Move(rax, value);
    968   __ ret(0);
    969 }
    970 
    971 
    972 void LoadStubCompiler::GenerateLoadInterceptor(
    973     Register holder_reg,
    974     Handle<Object> object,
    975     Handle<JSObject> interceptor_holder,
    976     LookupResult* lookup,
    977     Handle<Name> name) {
    978   ASSERT(interceptor_holder->HasNamedInterceptor());
    979   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
    980 
    981   // So far the most popular follow ups for interceptor loads are FIELD
    982   // and CALLBACKS, so inline only them, other cases may be added
    983   // later.
    984   bool compile_followup_inline = false;
    985   if (lookup->IsFound() && lookup->IsCacheable()) {
    986     if (lookup->IsField()) {
    987       compile_followup_inline = true;
    988     } else if (lookup->type() == CALLBACKS &&
    989                lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
    990       ExecutableAccessorInfo* callback =
    991           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
    992       compile_followup_inline = callback->getter() != NULL &&
    993           callback->IsCompatibleReceiver(*object);
    994     }
    995   }
    996 
    997   if (compile_followup_inline) {
    998     // Compile the interceptor call, followed by inline code to load the
    999     // property from further up the prototype chain if the call fails.
   1000     // Check that the maps haven't changed.
   1001     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
   1002 
   1003     // Preserve the receiver register explicitly whenever it is different from
   1004     // the holder and it is needed should the interceptor return without any
   1005     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
   1006     // the FIELD case might cause a miss during the prototype check.
   1007     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
   1008     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
   1009         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
   1010 
   1011     // Save necessary data before invoking an interceptor.
   1012     // Requires a frame to make GC aware of pushed pointers.
   1013     {
   1014       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
   1015 
   1016       if (must_preserve_receiver_reg) {
   1017         __ Push(receiver());
   1018       }
   1019       __ Push(holder_reg);
   1020       __ Push(this->name());
   1021 
   1022       // Invoke an interceptor.  Note: map checks from receiver to
   1023       // interceptor's holder has been compiled before (see a caller
   1024       // of this method.)
   1025       CompileCallLoadPropertyWithInterceptor(
   1026           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
   1027           IC::kLoadPropertyWithInterceptorOnly);
   1028 
   1029       // Check if interceptor provided a value for property.  If it's
   1030       // the case, return immediately.
   1031       Label interceptor_failed;
   1032       __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
   1033       __ j(equal, &interceptor_failed);
   1034       frame_scope.GenerateLeaveFrame();
   1035       __ ret(0);
   1036 
   1037       __ bind(&interceptor_failed);
   1038       __ Pop(this->name());
   1039       __ Pop(holder_reg);
   1040       if (must_preserve_receiver_reg) {
   1041         __ Pop(receiver());
   1042       }
   1043 
   1044       // Leave the internal frame.
   1045     }
   1046 
   1047     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
   1048   } else {  // !compile_followup_inline
   1049     // Call the runtime system to load the interceptor.
   1050     // Check that the maps haven't changed.
   1051     __ PopReturnAddressTo(scratch2());
   1052     PushInterceptorArguments(masm(), receiver(), holder_reg,
   1053                              this->name(), interceptor_holder);
   1054     __ PushReturnAddressFrom(scratch2());
   1055 
   1056     ExternalReference ref = ExternalReference(
   1057         IC_Utility(IC::kLoadPropertyWithInterceptor), isolate());
   1058     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
   1059   }
   1060 }
   1061 
   1062 
   1063 Handle<Code> StoreStubCompiler::CompileStoreCallback(
   1064     Handle<JSObject> object,
   1065     Handle<JSObject> holder,
   1066     Handle<Name> name,
   1067     Handle<ExecutableAccessorInfo> callback) {
   1068   Register holder_reg = HandlerFrontend(
   1069       IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
   1070 
   1071   __ PopReturnAddressTo(scratch1());
   1072   __ Push(receiver());
   1073   __ Push(holder_reg);
   1074   __ Push(callback);  // callback info
   1075   __ Push(name);
   1076   __ Push(value());
   1077   __ PushReturnAddressFrom(scratch1());
   1078 
   1079   // Do tail-call to the runtime system.
   1080   ExternalReference store_callback_property =
   1081       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
   1082   __ TailCallExternalReference(store_callback_property, 5, 1);
   1083 
   1084   // Return the generated code.
   1085   return GetCode(kind(), Code::FAST, name);
   1086 }
   1087 
   1088 
   1089 #undef __
   1090 #define __ ACCESS_MASM(masm)
   1091 
   1092 
   1093 void StoreStubCompiler::GenerateStoreViaSetter(
   1094     MacroAssembler* masm,
   1095     Handle<HeapType> type,
   1096     Register receiver,
   1097     Handle<JSFunction> setter) {
   1098   // ----------- S t a t e -------------
   1099   //  -- rsp[0] : return address
   1100   // -----------------------------------
   1101   {
   1102     FrameScope scope(masm, StackFrame::INTERNAL);
   1103 
   1104     // Save value register, so we can restore it later.
   1105     __ Push(value());
   1106 
   1107     if (!setter.is_null()) {
   1108       // Call the JavaScript setter with receiver and value on the stack.
   1109       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
   1110         // Swap in the global receiver.
   1111         __ movp(receiver,
   1112                 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
   1113       }
   1114       __ Push(receiver);
   1115       __ Push(value());
   1116       ParameterCount actual(1);
   1117       ParameterCount expected(setter);
   1118       __ InvokeFunction(setter, expected, actual,
   1119                         CALL_FUNCTION, NullCallWrapper());
   1120     } else {
   1121       // If we generate a global code snippet for deoptimization only, remember
   1122       // the place to continue after deoptimization.
   1123       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
   1124     }
   1125 
   1126     // We have to return the passed value, not the return value of the setter.
   1127     __ Pop(rax);
   1128 
   1129     // Restore context register.
   1130     __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   1131   }
   1132   __ ret(0);
   1133 }
   1134 
   1135 
   1136 #undef __
   1137 #define __ ACCESS_MASM(masm())
   1138 
   1139 
   1140 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
   1141     Handle<JSObject> object,
   1142     Handle<Name> name) {
   1143   __ PopReturnAddressTo(scratch1());
   1144   __ Push(receiver());
   1145   __ Push(this->name());
   1146   __ Push(value());
   1147   __ PushReturnAddressFrom(scratch1());
   1148 
   1149   // Do tail-call to the runtime system.
   1150   ExternalReference store_ic_property =
   1151       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
   1152   __ TailCallExternalReference(store_ic_property, 3, 1);
   1153 
   1154   // Return the generated code.
   1155   return GetCode(kind(), Code::FAST, name);
   1156 }
   1157 
   1158 
   1159 void StoreStubCompiler::GenerateStoreArrayLength() {
   1160   // Prepare tail call to StoreIC_ArrayLength.
   1161   __ PopReturnAddressTo(scratch1());
   1162   __ Push(receiver());
   1163   __ Push(value());
   1164   __ PushReturnAddressFrom(scratch1());
   1165 
   1166   ExternalReference ref =
   1167       ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
   1168                         masm()->isolate());
   1169   __ TailCallExternalReference(ref, 2, 1);
   1170 }
   1171 
   1172 
   1173 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
   1174     MapHandleList* receiver_maps,
   1175     CodeHandleList* handler_stubs,
   1176     MapHandleList* transitioned_maps) {
   1177   Label miss;
   1178   __ JumpIfSmi(receiver(), &miss, Label::kNear);
   1179 
   1180   __ movp(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
   1181   int receiver_count = receiver_maps->length();
   1182   for (int i = 0; i < receiver_count; ++i) {
   1183     // Check map and tail call if there's a match
   1184     __ Cmp(scratch1(), receiver_maps->at(i));
   1185     if (transitioned_maps->at(i).is_null()) {
   1186       __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
   1187     } else {
   1188       Label next_map;
   1189       __ j(not_equal, &next_map, Label::kNear);
   1190       __ Move(transition_map(),
   1191               transitioned_maps->at(i),
   1192               RelocInfo::EMBEDDED_OBJECT);
   1193       __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
   1194       __ bind(&next_map);
   1195     }
   1196   }
   1197 
   1198   __ bind(&miss);
   1199 
   1200   TailCallBuiltin(masm(), MissBuiltin(kind()));
   1201 
   1202   // Return the generated code.
   1203   return GetICCode(
   1204       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
   1205 }
   1206 
   1207 
   1208 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
   1209                                                       Handle<JSObject> last,
   1210                                                       Handle<Name> name) {
   1211   NonexistentHandlerFrontend(type, last, name);
   1212 
   1213   // Return undefined if maps of the full prototype chain are still the
   1214   // same and no global property with this name contains a value.
   1215   __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
   1216   __ ret(0);
   1217 
   1218   // Return the generated code.
   1219   return GetCode(kind(), Code::FAST, name);
   1220 }
   1221 
   1222 
   1223 Register* LoadStubCompiler::registers() {
   1224   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   1225   static Register registers[] = { rax, rcx, rdx, rbx, rdi, r8 };
   1226   return registers;
   1227 }
   1228 
   1229 
   1230 Register* KeyedLoadStubCompiler::registers() {
   1231   // receiver, name, scratch1, scratch2, scratch3, scratch4.
   1232   static Register registers[] = { rdx, rax, rbx, rcx, rdi, r8 };
   1233   return registers;
   1234 }
   1235 
   1236 
   1237 Register StoreStubCompiler::value() {
   1238   return rax;
   1239 }
   1240 
   1241 
   1242 Register* StoreStubCompiler::registers() {
   1243   // receiver, name, scratch1, scratch2, scratch3.
   1244   static Register registers[] = { rdx, rcx, rbx, rdi, r8 };
   1245   return registers;
   1246 }
   1247 
   1248 
   1249 Register* KeyedStoreStubCompiler::registers() {
   1250   // receiver, name, scratch1, scratch2, scratch3.
   1251   static Register registers[] = { rdx, rcx, rbx, rdi, r8 };
   1252   return registers;
   1253 }
   1254 
   1255 
   1256 #undef __
   1257 #define __ ACCESS_MASM(masm)
   1258 
   1259 
   1260 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
   1261                                              Handle<HeapType> type,
   1262                                              Register receiver,
   1263                                              Handle<JSFunction> getter) {
   1264   // ----------- S t a t e -------------
   1265   //  -- rax    : receiver
   1266   //  -- rcx    : name
   1267   //  -- rsp[0] : return address
   1268   // -----------------------------------
   1269   {
   1270     FrameScope scope(masm, StackFrame::INTERNAL);
   1271 
   1272     if (!getter.is_null()) {
   1273       // Call the JavaScript getter with the receiver on the stack.
   1274       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
   1275         // Swap in the global receiver.
   1276         __ movp(receiver,
   1277                 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
   1278       }
   1279       __ Push(receiver);
   1280       ParameterCount actual(0);
   1281       ParameterCount expected(getter);
   1282       __ InvokeFunction(getter, expected, actual,
   1283                         CALL_FUNCTION, NullCallWrapper());
   1284     } else {
   1285       // If we generate a global code snippet for deoptimization only, remember
   1286       // the place to continue after deoptimization.
   1287       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
   1288     }
   1289 
   1290     // Restore context register.
   1291     __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
   1292   }
   1293   __ ret(0);
   1294 }
   1295 
   1296 
   1297 #undef __
   1298 #define __ ACCESS_MASM(masm())
   1299 
   1300 
   1301 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
   1302     Handle<HeapType> type,
   1303     Handle<GlobalObject> global,
   1304     Handle<PropertyCell> cell,
   1305     Handle<Name> name,
   1306     bool is_dont_delete) {
   1307   Label miss;
   1308   // TODO(verwaest): Directly store to rax. Currently we cannot do this, since
   1309   // rax is used as receiver(), which we would otherwise clobber before a
   1310   // potential miss.
   1311   HandlerFrontendHeader(type, receiver(), global, name, &miss);
   1312 
   1313   // Get the value from the cell.
   1314   __ Move(rbx, cell);
   1315   __ movp(rbx, FieldOperand(rbx, PropertyCell::kValueOffset));
   1316 
   1317   // Check for deleted property if property can actually be deleted.
   1318   if (!is_dont_delete) {
   1319     __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
   1320     __ j(equal, &miss);
   1321   } else if (FLAG_debug_code) {
   1322     __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
   1323     __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
   1324   }
   1325 
   1326   Counters* counters = isolate()->counters();
   1327   __ IncrementCounter(counters->named_load_global_stub(), 1);
   1328   __ movp(rax, rbx);
   1329   __ ret(0);
   1330 
   1331   HandlerFrontendFooter(name, &miss);
   1332 
   1333   // Return the generated code.
   1334   return GetCode(kind(), Code::NORMAL, name);
   1335 }
   1336 
   1337 
   1338 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
   1339     TypeHandleList* types,
   1340     CodeHandleList* handlers,
   1341     Handle<Name> name,
   1342     Code::StubType type,
   1343     IcCheckType check) {
   1344   Label miss;
   1345 
   1346   if (check == PROPERTY &&
   1347       (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
   1348     __ Cmp(this->name(), name);
   1349     __ j(not_equal, &miss);
   1350   }
   1351 
   1352   Label number_case;
   1353   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
   1354   __ JumpIfSmi(receiver(), smi_target);
   1355 
   1356   Register map_reg = scratch1();
   1357   __ movp(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
   1358   int receiver_count = types->length();
   1359   int number_of_handled_maps = 0;
   1360   for (int current = 0; current < receiver_count; ++current) {
   1361     Handle<HeapType> type = types->at(current);
   1362     Handle<Map> map = IC::TypeToMap(*type, isolate());
   1363     if (!map->is_deprecated()) {
   1364       number_of_handled_maps++;
   1365       // Check map and tail call if there's a match
   1366       __ Cmp(map_reg, map);
   1367       if (type->Is(HeapType::Number())) {
   1368         ASSERT(!number_case.is_unused());
   1369         __ bind(&number_case);
   1370       }
   1371       __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
   1372     }
   1373   }
   1374   ASSERT(number_of_handled_maps > 0);
   1375 
   1376   __  bind(&miss);
   1377   TailCallBuiltin(masm(), MissBuiltin(kind()));
   1378 
   1379   // Return the generated code.
   1380   InlineCacheState state =
   1381       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
   1382   return GetICCode(kind(), type, name, state);
   1383 }
   1384 
   1385 
   1386 #undef __
   1387 #define __ ACCESS_MASM(masm)
   1388 
   1389 
   1390 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
   1391     MacroAssembler* masm) {
   1392   // ----------- S t a t e -------------
   1393   //  -- rax    : key
   1394   //  -- rdx    : receiver
   1395   //  -- rsp[0] : return address
   1396   // -----------------------------------
   1397   Label slow, miss;
   1398 
   1399   // This stub is meant to be tail-jumped to, the receiver must already
   1400   // have been verified by the caller to not be a smi.
   1401 
   1402   __ JumpIfNotSmi(rax, &miss);
   1403   __ SmiToInteger32(rbx, rax);
   1404   __ movp(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
   1405 
   1406   // Check whether the elements is a number dictionary.
   1407   // rdx: receiver
   1408   // rax: key
   1409   // rbx: key as untagged int32
   1410   // rcx: elements
   1411   __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
   1412   __ ret(0);
   1413 
   1414   __ bind(&slow);
   1415   // ----------- S t a t e -------------
   1416   //  -- rax    : key
   1417   //  -- rdx    : receiver
   1418   //  -- rsp[0] : return address
   1419   // -----------------------------------
   1420   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
   1421 
   1422   __ bind(&miss);
   1423   // ----------- S t a t e -------------
   1424   //  -- rax    : key
   1425   //  -- rdx    : receiver
   1426   //  -- rsp[0] : return address
   1427   // -----------------------------------
   1428   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
   1429 }
   1430 
   1431 
   1432 #undef __
   1433 
   1434 } }  // namespace v8::internal
   1435 
   1436 #endif  // V8_TARGET_ARCH_X64
   1437