Home | History | Annotate | Download | only in x64
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #if defined(V8_TARGET_ARCH_X64)
     31 
     32 #include "codegen.h"
     33 #include "ic-inl.h"
     34 #include "runtime.h"
     35 #include "stub-cache.h"
     36 
     37 namespace v8 {
     38 namespace internal {
     39 
     40 // ----------------------------------------------------------------------------
     41 // Static IC stub generators.
     42 //
     43 
     44 #define __ ACCESS_MASM(masm)
     45 
     46 
     47 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
     48                                             Register type,
     49                                             Label* global_object) {
     50   // Register usage:
     51   //   type: holds the receiver instance type on entry.
     52   __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
     53   __ j(equal, global_object);
     54   __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
     55   __ j(equal, global_object);
     56   __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
     57   __ j(equal, global_object);
     58 }
     59 
     60 
     61 // Generated code falls through if the receiver is a regular non-global
     62 // JS object with slow properties and no interceptors.
     63 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
     64                                                   Register receiver,
     65                                                   Register r0,
     66                                                   Register r1,
     67                                                   Label* miss) {
     68   // Register usage:
     69   //   receiver: holds the receiver on entry and is unchanged.
     70   //   r0: used to hold receiver instance type.
     71   //       Holds the property dictionary on fall through.
     72   //   r1: used to hold receivers map.
     73 
     74   __ JumpIfSmi(receiver, miss);
     75 
     76   // Check that the receiver is a valid JS object.
     77   __ movq(r1, FieldOperand(receiver, HeapObject::kMapOffset));
     78   __ movb(r0, FieldOperand(r1, Map::kInstanceTypeOffset));
     79   __ cmpb(r0, Immediate(FIRST_SPEC_OBJECT_TYPE));
     80   __ j(below, miss);
     81 
     82   // If this assert fails, we have to check upper bound too.
     83   STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
     84 
     85   GenerateGlobalInstanceTypeCheck(masm, r0, miss);
     86 
     87   // Check for non-global object that requires access check.
     88   __ testb(FieldOperand(r1, Map::kBitFieldOffset),
     89            Immediate((1 << Map::kIsAccessCheckNeeded) |
     90                      (1 << Map::kHasNamedInterceptor)));
     91   __ j(not_zero, miss);
     92 
     93   __ movq(r0, FieldOperand(receiver, JSObject::kPropertiesOffset));
     94   __ CompareRoot(FieldOperand(r0, HeapObject::kMapOffset),
     95                  Heap::kHashTableMapRootIndex);
     96   __ j(not_equal, miss);
     97 }
     98 
     99 
    100 
    101 // Helper function used to load a property from a dictionary backing storage.
    102 // This function may return false negatives, so miss_label
    103 // must always call a backup property load that is complete.
    104 // This function is safe to call if name is not a symbol, and will jump to
    105 // the miss_label in that case.
    106 // The generated code assumes that the receiver has slow properties,
    107 // is not a global object and does not have interceptors.
    108 static void GenerateDictionaryLoad(MacroAssembler* masm,
    109                                    Label* miss_label,
    110                                    Register elements,
    111                                    Register name,
    112                                    Register r0,
    113                                    Register r1,
    114                                    Register result) {
    115   // Register use:
    116   //
    117   // elements - holds the property dictionary on entry and is unchanged.
    118   //
    119   // name - holds the name of the property on entry and is unchanged.
    120   //
    121   // r0   - used to hold the capacity of the property dictionary.
    122   //
    123   // r1   - used to hold the index into the property dictionary.
    124   //
    125   // result - holds the result on exit if the load succeeded.
    126 
    127   Label done;
    128 
    129   // Probe the dictionary.
    130   StringDictionaryLookupStub::GeneratePositiveLookup(masm,
    131                                                      miss_label,
    132                                                      &done,
    133                                                      elements,
    134                                                      name,
    135                                                      r0,
    136                                                      r1);
    137 
    138   // If probing finds an entry in the dictionary, r0 contains the
    139   // index into the dictionary. Check that the value is a normal
    140   // property.
    141   __ bind(&done);
    142   const int kElementsStartOffset =
    143       StringDictionary::kHeaderSize +
    144       StringDictionary::kElementsStartIndex * kPointerSize;
    145   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
    146   __ Test(Operand(elements, r1, times_pointer_size,
    147                   kDetailsOffset - kHeapObjectTag),
    148           Smi::FromInt(PropertyDetails::TypeField::kMask));
    149   __ j(not_zero, miss_label);
    150 
    151   // Get the value at the masked, scaled index.
    152   const int kValueOffset = kElementsStartOffset + kPointerSize;
    153   __ movq(result,
    154           Operand(elements, r1, times_pointer_size,
    155                   kValueOffset - kHeapObjectTag));
    156 }
    157 
    158 
    159 // Helper function used to store a property to a dictionary backing
    160 // storage. This function may fail to store a property even though it
    161 // is in the dictionary, so code at miss_label must always call a
    162 // backup property store that is complete. This function is safe to
    163 // call if name is not a symbol, and will jump to the miss_label in
    164 // that case. The generated code assumes that the receiver has slow
    165 // properties, is not a global object and does not have interceptors.
    166 static void GenerateDictionaryStore(MacroAssembler* masm,
    167                                     Label* miss_label,
    168                                     Register elements,
    169                                     Register name,
    170                                     Register value,
    171                                     Register scratch0,
    172                                     Register scratch1) {
    173   // Register use:
    174   //
    175   // elements - holds the property dictionary on entry and is clobbered.
    176   //
    177   // name - holds the name of the property on entry and is unchanged.
    178   //
    179   // value - holds the value to store and is unchanged.
    180   //
    181   // scratch0 - used for index into the property dictionary and is clobbered.
    182   //
    183   // scratch1 - used to hold the capacity of the property dictionary and is
    184   //            clobbered.
    185   Label done;
    186 
    187   // Probe the dictionary.
    188   StringDictionaryLookupStub::GeneratePositiveLookup(masm,
    189                                                      miss_label,
    190                                                      &done,
    191                                                      elements,
    192                                                      name,
    193                                                      scratch0,
    194                                                      scratch1);
    195 
    196   // If probing finds an entry in the dictionary, scratch0 contains the
    197   // index into the dictionary. Check that the value is a normal
    198   // property that is not read only.
    199   __ bind(&done);
    200   const int kElementsStartOffset =
    201       StringDictionary::kHeaderSize +
    202       StringDictionary::kElementsStartIndex * kPointerSize;
    203   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
    204   const int kTypeAndReadOnlyMask =
    205       (PropertyDetails::TypeField::kMask |
    206        PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
    207   __ Test(Operand(elements,
    208                   scratch1,
    209                   times_pointer_size,
    210                   kDetailsOffset - kHeapObjectTag),
    211           Smi::FromInt(kTypeAndReadOnlyMask));
    212   __ j(not_zero, miss_label);
    213 
    214   // Store the value at the masked, scaled index.
    215   const int kValueOffset = kElementsStartOffset + kPointerSize;
    216   __ lea(scratch1, Operand(elements,
    217                            scratch1,
    218                            times_pointer_size,
    219                            kValueOffset - kHeapObjectTag));
    220   __ movq(Operand(scratch1, 0), value);
    221 
    222   // Update write barrier. Make sure not to clobber the value.
    223   __ movq(scratch0, value);
    224   __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
    225 }
    226 
    227 
    228 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
    229   // ----------- S t a t e -------------
    230   //  -- rax    : receiver
    231   //  -- rcx    : name
    232   //  -- rsp[0] : return address
    233   // -----------------------------------
    234   Label miss;
    235 
    236   StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss);
    237   __ bind(&miss);
    238   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
    239 }
    240 
    241 
    242 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
    243   // ----------- S t a t e -------------
    244   //  -- rax    : receiver
    245   //  -- rcx    : name
    246   //  -- rsp[0] : return address
    247   // -----------------------------------
    248   Label miss;
    249 
    250   StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss,
    251                                          support_wrappers);
    252   __ bind(&miss);
    253   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
    254 }
    255 
    256 
    257 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
    258   // ----------- S t a t e -------------
    259   //  -- rax    : receiver
    260   //  -- rcx    : name
    261   //  -- rsp[0] : return address
    262   // -----------------------------------
    263   Label miss;
    264 
    265   StubCompiler::GenerateLoadFunctionPrototype(masm, rax, rdx, rbx, &miss);
    266   __ bind(&miss);
    267   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
    268 }
    269 
    270 
    271 // Checks the receiver for special cases (value type, slow case bits).
    272 // Falls through for regular JS object.
    273 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
    274                                            Register receiver,
    275                                            Register map,
    276                                            int interceptor_bit,
    277                                            Label* slow) {
    278   // Register use:
    279   //   receiver - holds the receiver and is unchanged.
    280   // Scratch registers:
    281   //   map - used to hold the map of the receiver.
    282 
    283   // Check that the object isn't a smi.
    284   __ JumpIfSmi(receiver, slow);
    285 
    286   // Check that the object is some kind of JS object EXCEPT JS Value type.
    287   // In the case that the object is a value-wrapper object,
    288   // we enter the runtime system to make sure that indexing
    289   // into string objects work as intended.
    290   ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
    291   __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
    292   __ j(below, slow);
    293 
    294   // Check bit field.
    295   __ testb(FieldOperand(map, Map::kBitFieldOffset),
    296            Immediate((1 << Map::kIsAccessCheckNeeded) |
    297                      (1 << interceptor_bit)));
    298   __ j(not_zero, slow);
    299 }
    300 
    301 
    302 // Loads an indexed element from a fast case array.
    303 // If not_fast_array is NULL, doesn't perform the elements map check.
    304 static void GenerateFastArrayLoad(MacroAssembler* masm,
    305                                   Register receiver,
    306                                   Register key,
    307                                   Register elements,
    308                                   Register scratch,
    309                                   Register result,
    310                                   Label* not_fast_array,
    311                                   Label* out_of_range) {
    312   // Register use:
    313   //
    314   // receiver - holds the receiver on entry.
    315   //            Unchanged unless 'result' is the same register.
    316   //
    317   // key      - holds the smi key on entry.
    318   //            Unchanged unless 'result' is the same register.
    319   //
    320   // elements - holds the elements of the receiver on exit.
    321   //
    322   // result   - holds the result on exit if the load succeeded.
    323   //            Allowed to be the the same as 'receiver' or 'key'.
    324   //            Unchanged on bailout so 'receiver' and 'key' can be safely
    325   //            used by further computation.
    326   //
    327   // Scratch registers:
    328   //
    329   //   scratch - used to hold elements of the receiver and the loaded value.
    330 
    331   __ movq(elements, FieldOperand(receiver, JSObject::kElementsOffset));
    332   if (not_fast_array != NULL) {
    333     // Check that the object is in fast mode and writable.
    334     __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
    335                    Heap::kFixedArrayMapRootIndex);
    336     __ j(not_equal, not_fast_array);
    337   } else {
    338     __ AssertFastElements(elements);
    339   }
    340   // Check that the key (index) is within bounds.
    341   __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
    342   // Unsigned comparison rejects negative indices.
    343   __ j(above_equal, out_of_range);
    344   // Fast case: Do the load.
    345   SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
    346   __ movq(scratch, FieldOperand(elements,
    347                                 index.reg,
    348                                 index.scale,
    349                                 FixedArray::kHeaderSize));
    350   __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
    351   // In case the loaded value is the_hole we have to consult GetProperty
    352   // to ensure the prototype chain is searched.
    353   __ j(equal, out_of_range);
    354   if (!result.is(scratch)) {
    355     __ movq(result, scratch);
    356   }
    357 }
    358 
    359 
    360 // Checks whether a key is an array index string or a symbol string.
    361 // Falls through if the key is a symbol.
    362 static void GenerateKeyStringCheck(MacroAssembler* masm,
    363                                    Register key,
    364                                    Register map,
    365                                    Register hash,
    366                                    Label* index_string,
    367                                    Label* not_symbol) {
    368   // Register use:
    369   //   key - holds the key and is unchanged. Assumed to be non-smi.
    370   // Scratch registers:
    371   //   map - used to hold the map of the key.
    372   //   hash - used to hold the hash of the key.
    373   __ CmpObjectType(key, FIRST_NONSTRING_TYPE, map);
    374   __ j(above_equal, not_symbol);
    375   // Is the string an array index, with cached numeric value?
    376   __ movl(hash, FieldOperand(key, String::kHashFieldOffset));
    377   __ testl(hash, Immediate(String::kContainsCachedArrayIndexMask));
    378   __ j(zero, index_string);  // The value in hash is used at jump target.
    379 
    380   // Is the string a symbol?
    381   STATIC_ASSERT(kSymbolTag != 0);
    382   __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
    383            Immediate(kIsSymbolMask));
    384   __ j(zero, not_symbol);
    385 }
    386 
    387 
    388 
    389 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
    390   // ----------- S t a t e -------------
    391   //  -- rax    : key
    392   //  -- rdx    : receiver
    393   //  -- rsp[0] : return address
    394   // -----------------------------------
    395   Label slow, check_string, index_smi, index_string, property_array_property;
    396   Label probe_dictionary, check_number_dictionary;
    397 
    398   // Check that the key is a smi.
    399   __ JumpIfNotSmi(rax, &check_string);
    400   __ bind(&index_smi);
    401   // Now the key is known to be a smi. This place is also jumped to from below
    402   // where a numeric string is converted to a smi.
    403 
    404   GenerateKeyedLoadReceiverCheck(
    405       masm, rdx, rcx, Map::kHasIndexedInterceptor, &slow);
    406 
    407   // Check the receiver's map to see if it has fast elements.
    408   __ CheckFastElements(rcx, &check_number_dictionary);
    409 
    410   GenerateFastArrayLoad(masm,
    411                         rdx,
    412                         rax,
    413                         rcx,
    414                         rbx,
    415                         rax,
    416                         NULL,
    417                         &slow);
    418   Counters* counters = masm->isolate()->counters();
    419   __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
    420   __ ret(0);
    421 
    422   __ bind(&check_number_dictionary);
    423   __ SmiToInteger32(rbx, rax);
    424   __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
    425 
    426   // Check whether the elements is a number dictionary.
    427   // rdx: receiver
    428   // rax: key
    429   // rbx: key as untagged int32
    430   // rcx: elements
    431   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
    432                  Heap::kHashTableMapRootIndex);
    433   __ j(not_equal, &slow);
    434   __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
    435   __ ret(0);
    436 
    437   __ bind(&slow);
    438   // Slow case: Jump to runtime.
    439   // rdx: receiver
    440   // rax: key
    441   __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
    442   GenerateRuntimeGetProperty(masm);
    443 
    444   __ bind(&check_string);
    445   GenerateKeyStringCheck(masm, rax, rcx, rbx, &index_string, &slow);
    446 
    447   GenerateKeyedLoadReceiverCheck(
    448       masm, rdx, rcx, Map::kHasNamedInterceptor, &slow);
    449 
    450   // If the receiver is a fast-case object, check the keyed lookup
    451   // cache. Otherwise probe the dictionary leaving result in rcx.
    452   __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
    453   __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
    454                  Heap::kHashTableMapRootIndex);
    455   __ j(equal, &probe_dictionary);
    456 
    457   // Load the map of the receiver, compute the keyed lookup cache hash
    458   // based on 32 bits of the map pointer and the string hash.
    459   __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
    460   __ movl(rcx, rbx);
    461   __ shr(rcx, Immediate(KeyedLookupCache::kMapHashShift));
    462   __ movl(rdi, FieldOperand(rax, String::kHashFieldOffset));
    463   __ shr(rdi, Immediate(String::kHashShift));
    464   __ xor_(rcx, rdi);
    465   int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
    466   __ and_(rcx, Immediate(mask));
    467 
    468   // Load the key (consisting of map and symbol) from the cache and
    469   // check for match.
    470   Label load_in_object_property;
    471   static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
    472   Label hit_on_nth_entry[kEntriesPerBucket];
    473   ExternalReference cache_keys
    474       = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
    475 
    476   for (int i = 0; i < kEntriesPerBucket - 1; i++) {
    477     Label try_next_entry;
    478     __ movq(rdi, rcx);
    479     __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
    480     __ LoadAddress(kScratchRegister, cache_keys);
    481     int off = kPointerSize * i * 2;
    482     __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
    483     __ j(not_equal, &try_next_entry);
    484     __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
    485     __ j(equal, &hit_on_nth_entry[i]);
    486     __ bind(&try_next_entry);
    487   }
    488 
    489   int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
    490   __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, off));
    491   __ j(not_equal, &slow);
    492   __ cmpq(rax, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
    493   __ j(not_equal, &slow);
    494 
    495   // Get field offset, which is a 32-bit integer.
    496   ExternalReference cache_field_offsets
    497       = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
    498 
    499   // Hit on nth entry.
    500   for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
    501     __ bind(&hit_on_nth_entry[i]);
    502     if (i != 0) {
    503       __ addl(rcx, Immediate(i));
    504     }
    505     __ LoadAddress(kScratchRegister, cache_field_offsets);
    506     __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
    507     __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
    508     __ subq(rdi, rcx);
    509     __ j(above_equal, &property_array_property);
    510     if (i != 0) {
    511       __ jmp(&load_in_object_property);
    512     }
    513   }
    514 
    515   // Load in-object property.
    516   __ bind(&load_in_object_property);
    517   __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
    518   __ addq(rcx, rdi);
    519   __ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
    520   __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
    521   __ ret(0);
    522 
    523   // Load property array property.
    524   __ bind(&property_array_property);
    525   __ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset));
    526   __ movq(rax, FieldOperand(rax, rdi, times_pointer_size,
    527                             FixedArray::kHeaderSize));
    528   __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
    529   __ ret(0);
    530 
    531   // Do a quick inline probe of the receiver's dictionary, if it
    532   // exists.
    533   __ bind(&probe_dictionary);
    534   // rdx: receiver
    535   // rax: key
    536   // rbx: elements
    537 
    538   __ movq(rcx, FieldOperand(rdx, JSObject::kMapOffset));
    539   __ movb(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
    540   GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
    541 
    542   GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
    543   __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
    544   __ ret(0);
    545 
    546   __ bind(&index_string);
    547   __ IndexFromHash(rbx, rax);
    548   __ jmp(&index_smi);
    549 }
    550 
    551 
    552 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
    553   // ----------- S t a t e -------------
    554   //  -- rax    : key
    555   //  -- rdx    : receiver
    556   //  -- rsp[0] : return address
    557   // -----------------------------------
    558   Label miss;
    559 
    560   Register receiver = rdx;
    561   Register index = rax;
    562   Register scratch = rcx;
    563   Register result = rax;
    564 
    565   StringCharAtGenerator char_at_generator(receiver,
    566                                           index,
    567                                           scratch,
    568                                           result,
    569                                           &miss,  // When not a string.
    570                                           &miss,  // When not a number.
    571                                           &miss,  // When index out of range.
    572                                           STRING_INDEX_IS_ARRAY_INDEX);
    573   char_at_generator.GenerateFast(masm);
    574   __ ret(0);
    575 
    576   StubRuntimeCallHelper call_helper;
    577   char_at_generator.GenerateSlow(masm, call_helper);
    578 
    579   __ bind(&miss);
    580   GenerateMiss(masm, false);
    581 }
    582 
    583 
    584 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
    585   // ----------- S t a t e -------------
    586   //  -- rax    : key
    587   //  -- rdx    : receiver
    588   //  -- rsp[0] : return address
    589   // -----------------------------------
    590   Label slow;
    591 
    592   // Check that the receiver isn't a smi.
    593   __ JumpIfSmi(rdx, &slow);
    594 
    595   // Check that the key is an array index, that is Uint32.
    596   STATIC_ASSERT(kSmiValueSize <= 32);
    597   __ JumpUnlessNonNegativeSmi(rax, &slow);
    598 
    599   // Get the map of the receiver.
    600   __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
    601 
    602   // Check that it has indexed interceptor and access checks
    603   // are not enabled for this object.
    604   __ movb(rcx, FieldOperand(rcx, Map::kBitFieldOffset));
    605   __ andb(rcx, Immediate(kSlowCaseBitFieldMask));
    606   __ cmpb(rcx, Immediate(1 << Map::kHasIndexedInterceptor));
    607   __ j(not_zero, &slow);
    608 
    609   // Everything is fine, call runtime.
    610   __ pop(rcx);
    611   __ push(rdx);  // receiver
    612   __ push(rax);  // key
    613   __ push(rcx);  // return address
    614 
    615   // Perform tail call to the entry.
    616   __ TailCallExternalReference(
    617       ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
    618                         masm->isolate()),
    619       2,
    620       1);
    621 
    622   __ bind(&slow);
    623   GenerateMiss(masm, false);
    624 }
    625 
    626 
    627 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
    628                                    StrictModeFlag strict_mode) {
    629   // ----------- S t a t e -------------
    630   //  -- rax     : value
    631   //  -- rcx     : key
    632   //  -- rdx     : receiver
    633   //  -- rsp[0]  : return address
    634   // -----------------------------------
    635   Label slow, slow_with_tagged_index, fast, array, extra, check_extra_double;
    636   Label fast_object_with_map_check, fast_object_without_map_check;
    637   Label fast_double_with_map_check, fast_double_without_map_check;
    638   Label transition_smi_elements, finish_object_store, non_double_value;
    639   Label transition_double_elements;
    640 
    641   // Check that the object isn't a smi.
    642   __ JumpIfSmi(rdx, &slow_with_tagged_index);
    643   // Get the map from the receiver.
    644   __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
    645   // Check that the receiver does not require access checks.  We need
    646   // to do this because this generic stub does not perform map checks.
    647   __ testb(FieldOperand(r9, Map::kBitFieldOffset),
    648            Immediate(1 << Map::kIsAccessCheckNeeded));
    649   __ j(not_zero, &slow_with_tagged_index);
    650   // Check that the key is a smi.
    651   __ JumpIfNotSmi(rcx, &slow_with_tagged_index);
    652   __ SmiToInteger32(rcx, rcx);
    653 
    654   __ CmpInstanceType(r9, JS_ARRAY_TYPE);
    655   __ j(equal, &array);
    656   // Check that the object is some kind of JSObject.
    657   __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE);
    658   __ j(below, &slow);
    659 
    660   // Object case: Check key against length in the elements array.
    661   // rax: value
    662   // rdx: JSObject
    663   // rcx: index
    664   __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
    665   // Check array bounds.
    666   __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
    667   // rax: value
    668   // rbx: FixedArray
    669   // rcx: index
    670   __ j(above, &fast_object_with_map_check);
    671 
    672   // Slow case: call runtime.
    673   __ bind(&slow);
    674   __ Integer32ToSmi(rcx, rcx);
    675   __ bind(&slow_with_tagged_index);
    676   GenerateRuntimeSetProperty(masm, strict_mode);
    677   // Never returns to here.
    678 
    679   // Extra capacity case: Check if there is extra capacity to
    680   // perform the store and update the length. Used for adding one
    681   // element to the array by writing to array[array.length].
    682   __ bind(&extra);
    683   // rax: value
    684   // rdx: receiver (a JSArray)
    685   // rbx: receiver's elements array (a FixedArray)
    686   // rcx: index
    687   // flags: smicompare (rdx.length(), rbx)
    688   __ j(not_equal, &slow);  // do not leave holes in the array
    689   __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), rcx);
    690   __ j(below_equal, &slow);
    691   // Increment index to get new length.
    692   __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
    693   __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
    694   __ j(not_equal, &check_extra_double);
    695   __ leal(rdi, Operand(rcx, 1));
    696   __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
    697   __ jmp(&fast_object_without_map_check);
    698 
    699   __ bind(&check_extra_double);
    700   // rdi: elements array's map
    701   __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
    702   __ j(not_equal, &slow);
    703   __ leal(rdi, Operand(rcx, 1));
    704   __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rdi);
    705   __ jmp(&fast_double_without_map_check);
    706 
    707   // Array case: Get the length and the elements array from the JS
    708   // array. Check that the array is in fast mode (and writable); if it
    709   // is the length is always a smi.
    710   __ bind(&array);
    711   // rax: value
    712   // rdx: receiver (a JSArray)
    713   // rcx: index
    714   __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
    715 
    716   // Check the key against the length in the array, compute the
    717   // address to store into and fall through to fast case.
    718   __ SmiCompareInteger32(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
    719   __ j(below_equal, &extra);
    720 
    721   // Fast case: Do the store.
    722   __ bind(&fast_object_with_map_check);
    723   // rax: value
    724   // rbx: receiver's elements array (a FixedArray)
    725   // rcx: index
    726   // rdx: receiver (a JSArray)
    727   __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
    728   __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
    729   __ j(not_equal, &fast_double_with_map_check);
    730   __ bind(&fast_object_without_map_check);
    731   // Smi stores don't require further checks.
    732   Label non_smi_value;
    733   __ JumpIfNotSmi(rax, &non_smi_value);
    734   // It's irrelevant whether array is smi-only or not when writing a smi.
    735   __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
    736           rax);
    737   __ ret(0);
    738 
    739   __ bind(&non_smi_value);
    740   // Writing a non-smi, check whether array allows non-smi elements.
    741   // r9: receiver's map
    742   __ CheckFastObjectElements(r9, &transition_smi_elements);
    743   __ bind(&finish_object_store);
    744   __ movq(FieldOperand(rbx, rcx, times_pointer_size, FixedArray::kHeaderSize),
    745           rax);
    746   __ movq(rdx, rax);  // Preserve the value which is returned.
    747   __ RecordWriteArray(
    748       rbx, rdx, rcx, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
    749   __ ret(0);
    750 
    751   __ bind(&fast_double_with_map_check);
    752   // Check for fast double array case. If this fails, call through to the
    753   // runtime.
    754   // rdi: elements array's map
    755   __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
    756   __ j(not_equal, &slow);
    757   __ bind(&fast_double_without_map_check);
    758   // If the value is a number, store it as a double in the FastDoubleElements
    759   // array.
    760   __ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
    761                                  &transition_double_elements);
    762   __ ret(0);
    763 
    764   __ bind(&transition_smi_elements);
    765   __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
    766 
    767   // Transition the array appropriately depending on the value type.
    768   __ movq(r9, FieldOperand(rax, HeapObject::kMapOffset));
    769   __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
    770   __ j(not_equal, &non_double_value);
    771 
    772   // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
    773   // FAST_DOUBLE_ELEMENTS and complete the store.
    774   __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
    775                                          FAST_DOUBLE_ELEMENTS,
    776                                          rbx,
    777                                          rdi,
    778                                          &slow);
    779   ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
    780   __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
    781   __ jmp(&fast_double_without_map_check);
    782 
    783   __ bind(&non_double_value);
    784   // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
    785   __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
    786                                          FAST_ELEMENTS,
    787                                          rbx,
    788                                          rdi,
    789                                          &slow);
    790   ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
    791   __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
    792   __ jmp(&finish_object_store);
    793 
    794   __ bind(&transition_double_elements);
    795   // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
    796   // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
    797   // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
    798   __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
    799   __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
    800                                          FAST_ELEMENTS,
    801                                          rbx,
    802                                          rdi,
    803                                          &slow);
    804   ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
    805   __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
    806   __ jmp(&finish_object_store);
    807 }
    808 
    809 
    810 // The generated code does not accept smi keys.
    811 // The generated code falls through if both probes miss.
    812 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
    813                                                int argc,
    814                                                Code::Kind kind,
    815                                                Code::ExtraICState extra_state) {
    816   // ----------- S t a t e -------------
    817   // rcx                      : function name
    818   // rdx                      : receiver
    819   // -----------------------------------
    820   Label number, non_number, non_string, boolean, probe, miss;
    821 
    822   // Probe the stub cache.
    823   Code::Flags flags = Code::ComputeFlags(kind,
    824                                          MONOMORPHIC,
    825                                          extra_state,
    826                                          NORMAL,
    827                                          argc);
    828   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
    829                                                   rax);
    830 
    831   // If the stub cache probing failed, the receiver might be a value.
    832   // For value objects, we use the map of the prototype objects for
    833   // the corresponding JSValue for the cache and that is what we need
    834   // to probe.
    835   //
    836   // Check for number.
    837   __ JumpIfSmi(rdx, &number);
    838   __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx);
    839   __ j(not_equal, &non_number);
    840   __ bind(&number);
    841   StubCompiler::GenerateLoadGlobalFunctionPrototype(
    842       masm, Context::NUMBER_FUNCTION_INDEX, rdx);
    843   __ jmp(&probe);
    844 
    845   // Check for string.
    846   __ bind(&non_number);
    847   __ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE);
    848   __ j(above_equal, &non_string);
    849   StubCompiler::GenerateLoadGlobalFunctionPrototype(
    850       masm, Context::STRING_FUNCTION_INDEX, rdx);
    851   __ jmp(&probe);
    852 
    853   // Check for boolean.
    854   __ bind(&non_string);
    855   __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
    856   __ j(equal, &boolean);
    857   __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
    858   __ j(not_equal, &miss);
    859   __ bind(&boolean);
    860   StubCompiler::GenerateLoadGlobalFunctionPrototype(
    861       masm, Context::BOOLEAN_FUNCTION_INDEX, rdx);
    862 
    863   // Probe the stub cache for the value object.
    864   __ bind(&probe);
    865   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
    866                                                   no_reg);
    867 
    868   __ bind(&miss);
    869 }
    870 
    871 
    872 static void GenerateFunctionTailCall(MacroAssembler* masm,
    873                                      int argc,
    874                                      Label* miss) {
    875   // ----------- S t a t e -------------
    876   // rcx                    : function name
    877   // rdi                    : function
    878   // rsp[0]                 : return address
    879   // rsp[8]                 : argument argc
    880   // rsp[16]                : argument argc - 1
    881   // ...
    882   // rsp[argc * 8]          : argument 1
    883   // rsp[(argc + 1) * 8]    : argument 0 = receiver
    884   // -----------------------------------
    885   __ JumpIfSmi(rdi, miss);
    886   // Check that the value is a JavaScript function.
    887   __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
    888   __ j(not_equal, miss);
    889 
    890   // Invoke the function.
    891   ParameterCount actual(argc);
    892   __ InvokeFunction(rdi, actual, JUMP_FUNCTION,
    893                     NullCallWrapper(), CALL_AS_METHOD);
    894 }
    895 
    896 
    897 // The generated code falls through if the call should be handled by runtime.
    898 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
    899   // ----------- S t a t e -------------
    900   // rcx                    : function name
    901   // rsp[0]                 : return address
    902   // rsp[8]                 : argument argc
    903   // rsp[16]                : argument argc - 1
    904   // ...
    905   // rsp[argc * 8]          : argument 1
    906   // rsp[(argc + 1) * 8]    : argument 0 = receiver
    907   // -----------------------------------
    908   Label miss;
    909 
    910   // Get the receiver of the function from the stack.
    911   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
    912 
    913   GenerateStringDictionaryReceiverCheck(masm, rdx, rax, rbx, &miss);
    914 
    915   // rax: elements
    916   // Search the dictionary placing the result in rdi.
    917   GenerateDictionaryLoad(masm, &miss, rax, rcx, rbx, rdi, rdi);
    918 
    919   GenerateFunctionTailCall(masm, argc, &miss);
    920 
    921   __ bind(&miss);
    922 }
    923 
    924 
    925 void CallICBase::GenerateMiss(MacroAssembler* masm,
    926                               int argc,
    927                               IC::UtilityId id,
    928                               Code::ExtraICState extra_state) {
    929   // ----------- S t a t e -------------
    930   // rcx                      : function name
    931   // rsp[0]                   : return address
    932   // rsp[8]                   : argument argc
    933   // rsp[16]                  : argument argc - 1
    934   // ...
    935   // rsp[argc * 8]            : argument 1
    936   // rsp[(argc + 1) * 8]      : argument 0 = receiver
    937   // -----------------------------------
    938 
    939   Counters* counters = masm->isolate()->counters();
    940   if (id == IC::kCallIC_Miss) {
    941     __ IncrementCounter(counters->call_miss(), 1);
    942   } else {
    943     __ IncrementCounter(counters->keyed_call_miss(), 1);
    944   }
    945 
    946   // Get the receiver of the function from the stack; 1 ~ return address.
    947   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
    948 
    949   // Enter an internal frame.
    950   {
    951     FrameScope scope(masm, StackFrame::INTERNAL);
    952 
    953     // Push the receiver and the name of the function.
    954     __ push(rdx);
    955     __ push(rcx);
    956 
    957     // Call the entry.
    958     CEntryStub stub(1);
    959     __ Set(rax, 2);
    960     __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
    961     __ CallStub(&stub);
    962 
    963     // Move result to rdi and exit the internal frame.
    964     __ movq(rdi, rax);
    965   }
    966 
    967   // Check if the receiver is a global object of some sort.
    968   // This can happen only for regular CallIC but not KeyedCallIC.
    969   if (id == IC::kCallIC_Miss) {
    970     Label invoke, global;
    971     __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));  // receiver
    972     __ JumpIfSmi(rdx, &invoke);
    973     __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
    974     __ j(equal, &global);
    975     __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
    976     __ j(not_equal, &invoke);
    977 
    978     // Patch the receiver on the stack.
    979     __ bind(&global);
    980     __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
    981     __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
    982     __ bind(&invoke);
    983   }
    984 
    985   // Invoke the function.
    986   CallKind call_kind = CallICBase::Contextual::decode(extra_state)
    987       ? CALL_AS_FUNCTION
    988       : CALL_AS_METHOD;
    989   ParameterCount actual(argc);
    990   __ InvokeFunction(rdi,
    991                     actual,
    992                     JUMP_FUNCTION,
    993                     NullCallWrapper(),
    994                     call_kind);
    995 }
    996 
    997 
    998 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
    999                                  int argc,
   1000                                  Code::ExtraICState extra_ic_state) {
   1001   // ----------- S t a t e -------------
   1002   // rcx                      : function name
   1003   // rsp[0]                   : return address
   1004   // rsp[8]                   : argument argc
   1005   // rsp[16]                  : argument argc - 1
   1006   // ...
   1007   // rsp[argc * 8]            : argument 1
   1008   // rsp[(argc + 1) * 8]      : argument 0 = receiver
   1009   // -----------------------------------
   1010 
   1011   // Get the receiver of the function from the stack; 1 ~ return address.
   1012   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
   1013   GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
   1014   GenerateMiss(masm, argc, extra_ic_state);
   1015 }
   1016 
   1017 
   1018 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   1019   // ----------- S t a t e -------------
   1020   // rcx                      : function name
   1021   // rsp[0]                   : return address
   1022   // rsp[8]                   : argument argc
   1023   // rsp[16]                  : argument argc - 1
   1024   // ...
   1025   // rsp[argc * 8]            : argument 1
   1026   // rsp[(argc + 1) * 8]      : argument 0 = receiver
   1027   // -----------------------------------
   1028 
   1029   // Get the receiver of the function from the stack; 1 ~ return address.
   1030   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
   1031 
   1032   Label do_call, slow_call, slow_load;
   1033   Label check_number_dictionary, check_string, lookup_monomorphic_cache;
   1034   Label index_smi, index_string;
   1035 
   1036   // Check that the key is a smi.
   1037   __ JumpIfNotSmi(rcx, &check_string);
   1038 
   1039   __ bind(&index_smi);
   1040   // Now the key is known to be a smi. This place is also jumped to from below
   1041   // where a numeric string is converted to a smi.
   1042 
   1043   GenerateKeyedLoadReceiverCheck(
   1044       masm, rdx, rax, Map::kHasIndexedInterceptor, &slow_call);
   1045 
   1046   GenerateFastArrayLoad(
   1047       masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
   1048   Counters* counters = masm->isolate()->counters();
   1049   __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
   1050 
   1051   __ bind(&do_call);
   1052   // receiver in rdx is not used after this point.
   1053   // rcx: key
   1054   // rdi: function
   1055   GenerateFunctionTailCall(masm, argc, &slow_call);
   1056 
   1057   __ bind(&check_number_dictionary);
   1058   // rax: elements
   1059   // rcx: smi key
   1060   // Check whether the elements is a number dictionary.
   1061   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
   1062                  Heap::kHashTableMapRootIndex);
   1063   __ j(not_equal, &slow_load);
   1064   __ SmiToInteger32(rbx, rcx);
   1065   // ebx: untagged index
   1066   __ LoadFromNumberDictionary(&slow_load, rax, rcx, rbx, r9, rdi, rdi);
   1067   __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
   1068   __ jmp(&do_call);
   1069 
   1070   __ bind(&slow_load);
   1071   // This branch is taken when calling KeyedCallIC_Miss is neither required
   1072   // nor beneficial.
   1073   __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
   1074   {
   1075     FrameScope scope(masm, StackFrame::INTERNAL);
   1076     __ push(rcx);  // save the key
   1077     __ push(rdx);  // pass the receiver
   1078     __ push(rcx);  // pass the key
   1079     __ CallRuntime(Runtime::kKeyedGetProperty, 2);
   1080     __ pop(rcx);  // restore the key
   1081   }
   1082   __ movq(rdi, rax);
   1083   __ jmp(&do_call);
   1084 
   1085   __ bind(&check_string);
   1086   GenerateKeyStringCheck(masm, rcx, rax, rbx, &index_string, &slow_call);
   1087 
   1088   // The key is known to be a symbol.
   1089   // If the receiver is a regular JS object with slow properties then do
   1090   // a quick inline probe of the receiver's dictionary.
   1091   // Otherwise do the monomorphic cache probe.
   1092   GenerateKeyedLoadReceiverCheck(
   1093       masm, rdx, rax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
   1094 
   1095   __ movq(rbx, FieldOperand(rdx, JSObject::kPropertiesOffset));
   1096   __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
   1097                  Heap::kHashTableMapRootIndex);
   1098   __ j(not_equal, &lookup_monomorphic_cache);
   1099 
   1100   GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
   1101   __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
   1102   __ jmp(&do_call);
   1103 
   1104   __ bind(&lookup_monomorphic_cache);
   1105   __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
   1106   GenerateMonomorphicCacheProbe(masm,
   1107                                 argc,
   1108                                 Code::KEYED_CALL_IC,
   1109                                 Code::kNoExtraICState);
   1110   // Fall through on miss.
   1111 
   1112   __ bind(&slow_call);
   1113   // This branch is taken if:
   1114   // - the receiver requires boxing or access check,
   1115   // - the key is neither smi nor symbol,
   1116   // - the value loaded is not a function,
   1117   // - there is hope that the runtime will create a monomorphic call stub
   1118   //   that will get fetched next time.
   1119   __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
   1120   GenerateMiss(masm, argc);
   1121 
   1122   __ bind(&index_string);
   1123   __ IndexFromHash(rbx, rcx);
   1124   // Now jump to the place where smi keys are handled.
   1125   __ jmp(&index_smi);
   1126 }
   1127 
   1128 
   1129 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
   1130   // ----------- S t a t e -------------
   1131   // rcx                      : function name
   1132   // rsp[0]                   : return address
   1133   // rsp[8]                   : argument argc
   1134   // rsp[16]                  : argument argc - 1
   1135   // ...
   1136   // rsp[argc * 8]            : argument 1
   1137   // rsp[(argc + 1) * 8]      : argument 0 = receiver
   1138   // -----------------------------------
   1139 
   1140   // Check if the name is a string.
   1141   Label miss;
   1142   __ JumpIfSmi(rcx, &miss);
   1143   Condition cond = masm->IsObjectStringType(rcx, rax, rax);
   1144   __ j(NegateCondition(cond), &miss);
   1145   CallICBase::GenerateNormal(masm, argc);
   1146   __ bind(&miss);
   1147   GenerateMiss(masm, argc);
   1148 }
   1149 
   1150 
   1151 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
   1152                                              Register object,
   1153                                              Register key,
   1154                                              Register scratch1,
   1155                                              Register scratch2,
   1156                                              Register scratch3,
   1157                                              Label* unmapped_case,
   1158                                              Label* slow_case) {
   1159   Heap* heap = masm->isolate()->heap();
   1160 
   1161   // Check that the receiver is a JSObject. Because of the elements
   1162   // map check later, we do not need to check for interceptors or
   1163   // whether it requires access checks.
   1164   __ JumpIfSmi(object, slow_case);
   1165   // Check that the object is some kind of JSObject.
   1166   __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
   1167   __ j(below, slow_case);
   1168 
   1169   // Check that the key is a positive smi.
   1170   Condition check = masm->CheckNonNegativeSmi(key);
   1171   __ j(NegateCondition(check), slow_case);
   1172 
   1173   // Load the elements into scratch1 and check its map. If not, jump
   1174   // to the unmapped lookup with the parameter map in scratch1.
   1175   Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
   1176   __ movq(scratch1, FieldOperand(object, JSObject::kElementsOffset));
   1177   __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
   1178 
   1179   // Check if element is in the range of mapped arguments.
   1180   __ movq(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
   1181   __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
   1182   __ cmpq(key, scratch2);
   1183   __ j(greater_equal, unmapped_case);
   1184 
   1185   // Load element index and check whether it is the hole.
   1186   const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
   1187   __ SmiToInteger64(scratch3, key);
   1188   __ movq(scratch2, FieldOperand(scratch1,
   1189                                  scratch3,
   1190                                  times_pointer_size,
   1191                                  kHeaderSize));
   1192   __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
   1193   __ j(equal, unmapped_case);
   1194 
   1195   // Load value from context and return it. We can reuse scratch1 because
   1196   // we do not jump to the unmapped lookup (which requires the parameter
   1197   // map in scratch1).
   1198   __ movq(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
   1199   __ SmiToInteger64(scratch3, scratch2);
   1200   return FieldOperand(scratch1,
   1201                       scratch3,
   1202                       times_pointer_size,
   1203                       Context::kHeaderSize);
   1204 }
   1205 
   1206 
   1207 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
   1208                                                Register key,
   1209                                                Register parameter_map,
   1210                                                Register scratch,
   1211                                                Label* slow_case) {
   1212   // Element is in arguments backing store, which is referenced by the
   1213   // second element of the parameter_map. The parameter_map register
   1214   // must be loaded with the parameter map of the arguments object and is
   1215   // overwritten.
   1216   const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
   1217   Register backing_store = parameter_map;
   1218   __ movq(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
   1219   Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
   1220   __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
   1221   __ movq(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
   1222   __ cmpq(key, scratch);
   1223   __ j(greater_equal, slow_case);
   1224   __ SmiToInteger64(scratch, key);
   1225   return FieldOperand(backing_store,
   1226                       scratch,
   1227                       times_pointer_size,
   1228                       FixedArray::kHeaderSize);
   1229 }
   1230 
   1231 
   1232 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
   1233   // ----------- S t a t e -------------
   1234   //  -- rax    : key
   1235   //  -- rdx    : receiver
   1236   //  -- rsp[0]  : return address
   1237   // -----------------------------------
   1238   Label slow, notin;
   1239   Operand mapped_location =
   1240       GenerateMappedArgumentsLookup(
   1241           masm, rdx, rax, rbx, rcx, rdi, &notin, &slow);
   1242   __ movq(rax, mapped_location);
   1243   __ Ret();
   1244   __ bind(&notin);
   1245   // The unmapped lookup expects that the parameter map is in rbx.
   1246   Operand unmapped_location =
   1247       GenerateUnmappedArgumentsLookup(masm, rax, rbx, rcx, &slow);
   1248   __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
   1249   __ j(equal, &slow);
   1250   __ movq(rax, unmapped_location);
   1251   __ Ret();
   1252   __ bind(&slow);
   1253   GenerateMiss(masm, false);
   1254 }
   1255 
   1256 
   1257 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
   1258   // ----------- S t a t e -------------
   1259   //  -- rax     : value
   1260   //  -- rcx     : key
   1261   //  -- rdx     : receiver
   1262   //  -- rsp[0]  : return address
   1263   // -----------------------------------
   1264   Label slow, notin;
   1265   Operand mapped_location = GenerateMappedArgumentsLookup(
   1266       masm, rdx, rcx, rbx, rdi, r8, &notin, &slow);
   1267   __ movq(mapped_location, rax);
   1268   __ lea(r9, mapped_location);
   1269   __ movq(r8, rax);
   1270   __ RecordWrite(rbx,
   1271                  r9,
   1272                  r8,
   1273                  kDontSaveFPRegs,
   1274                  EMIT_REMEMBERED_SET,
   1275                  INLINE_SMI_CHECK);
   1276   __ Ret();
   1277   __ bind(&notin);
   1278   // The unmapped lookup expects that the parameter map is in rbx.
   1279   Operand unmapped_location =
   1280       GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rdi, &slow);
   1281   __ movq(unmapped_location, rax);
   1282   __ lea(r9, unmapped_location);
   1283   __ movq(r8, rax);
   1284   __ RecordWrite(rbx,
   1285                  r9,
   1286                  r8,
   1287                  kDontSaveFPRegs,
   1288                  EMIT_REMEMBERED_SET,
   1289                  INLINE_SMI_CHECK);
   1290   __ Ret();
   1291   __ bind(&slow);
   1292   GenerateMiss(masm, false);
   1293 }
   1294 
   1295 
   1296 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
   1297                                              int argc) {
   1298   // ----------- S t a t e -------------
   1299   // rcx                      : function name
   1300   // rsp[0]                   : return address
   1301   // rsp[8]                   : argument argc
   1302   // rsp[16]                  : argument argc - 1
   1303   // ...
   1304   // rsp[argc * 8]            : argument 1
   1305   // rsp[(argc + 1) * 8]      : argument 0 = receiver
   1306   // -----------------------------------
   1307   Label slow, notin;
   1308   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
   1309   Operand mapped_location = GenerateMappedArgumentsLookup(
   1310       masm, rdx, rcx, rbx, rax, r8, &notin, &slow);
   1311   __ movq(rdi, mapped_location);
   1312   GenerateFunctionTailCall(masm, argc, &slow);
   1313   __ bind(&notin);
   1314   // The unmapped lookup expects that the parameter map is in rbx.
   1315   Operand unmapped_location =
   1316       GenerateUnmappedArgumentsLookup(masm, rcx, rbx, rax, &slow);
   1317   __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
   1318   __ j(equal, &slow);
   1319   __ movq(rdi, unmapped_location);
   1320   GenerateFunctionTailCall(masm, argc, &slow);
   1321   __ bind(&slow);
   1322   GenerateMiss(masm, argc);
   1323 }
   1324 
   1325 
   1326 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
   1327   // ----------- S t a t e -------------
   1328   //  -- rax    : receiver
   1329   //  -- rcx    : name
   1330   //  -- rsp[0] : return address
   1331   // -----------------------------------
   1332 
   1333   // Probe the stub cache.
   1334   Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
   1335   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rax, rcx, rbx,
   1336                                                   rdx);
   1337 
   1338   // Cache miss: Jump to runtime.
   1339   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
   1340 }
   1341 
   1342 
   1343 void LoadIC::GenerateNormal(MacroAssembler* masm) {
   1344   // ----------- S t a t e -------------
   1345   //  -- rax    : receiver
   1346   //  -- rcx    : name
   1347   //  -- rsp[0] : return address
   1348   // -----------------------------------
   1349   Label miss;
   1350 
   1351   GenerateStringDictionaryReceiverCheck(masm, rax, rdx, rbx, &miss);
   1352 
   1353   //  rdx: elements
   1354   // Search the dictionary placing the result in rax.
   1355   GenerateDictionaryLoad(masm, &miss, rdx, rcx, rbx, rdi, rax);
   1356   __ ret(0);
   1357 
   1358   // Cache miss: Jump to runtime.
   1359   __ bind(&miss);
   1360   GenerateMiss(masm);
   1361 }
   1362 
   1363 
   1364 void LoadIC::GenerateMiss(MacroAssembler* masm) {
   1365   // ----------- S t a t e -------------
   1366   //  -- rax    : receiver
   1367   //  -- rcx    : name
   1368   //  -- rsp[0] : return address
   1369   // -----------------------------------
   1370 
   1371   Counters* counters = masm->isolate()->counters();
   1372   __ IncrementCounter(counters->load_miss(), 1);
   1373 
   1374   __ pop(rbx);
   1375   __ push(rax);  // receiver
   1376   __ push(rcx);  // name
   1377   __ push(rbx);  // return address
   1378 
   1379   // Perform tail call to the entry.
   1380   ExternalReference ref =
   1381       ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
   1382   __ TailCallExternalReference(ref, 2, 1);
   1383 }
   1384 
   1385 
   1386 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
   1387   // ----------- S t a t e -------------
   1388   //  -- rax    : key
   1389   //  -- rdx    : receiver
   1390   //  -- rsp[0]  : return address
   1391   // -----------------------------------
   1392 
   1393   Counters* counters = masm->isolate()->counters();
   1394   __ IncrementCounter(counters->keyed_load_miss(), 1);
   1395 
   1396   __ pop(rbx);
   1397   __ push(rdx);  // receiver
   1398   __ push(rax);  // name
   1399   __ push(rbx);  // return address
   1400 
   1401   // Perform tail call to the entry.
   1402   ExternalReference ref = force_generic
   1403       ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric),
   1404                           masm->isolate())
   1405       : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
   1406   __ TailCallExternalReference(ref, 2, 1);
   1407 }
   1408 
   1409 
   1410 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
   1411   // ----------- S t a t e -------------
   1412   //  -- rax    : key
   1413   //  -- rdx    : receiver
   1414   //  -- rsp[0]  : return address
   1415   // -----------------------------------
   1416 
   1417   __ pop(rbx);
   1418   __ push(rdx);  // receiver
   1419   __ push(rax);  // name
   1420   __ push(rbx);  // return address
   1421 
   1422   // Perform tail call to the entry.
   1423   __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
   1424 }
   1425 
   1426 
   1427 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
   1428                                   StrictModeFlag strict_mode) {
   1429   // ----------- S t a t e -------------
   1430   //  -- rax    : value
   1431   //  -- rcx    : name
   1432   //  -- rdx    : receiver
   1433   //  -- rsp[0] : return address
   1434   // -----------------------------------
   1435 
   1436   // Get the receiver from the stack and probe the stub cache.
   1437   Code::Flags flags =
   1438       Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
   1439   Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
   1440                                                   no_reg);
   1441 
   1442   // Cache miss: Jump to runtime.
   1443   GenerateMiss(masm);
   1444 }
   1445 
   1446 
   1447 void StoreIC::GenerateMiss(MacroAssembler* masm) {
   1448   // ----------- S t a t e -------------
   1449   //  -- rax    : value
   1450   //  -- rcx    : name
   1451   //  -- rdx    : receiver
   1452   //  -- rsp[0] : return address
   1453   // -----------------------------------
   1454 
   1455   __ pop(rbx);
   1456   __ push(rdx);  // receiver
   1457   __ push(rcx);  // name
   1458   __ push(rax);  // value
   1459   __ push(rbx);  // return address
   1460 
   1461   // Perform tail call to the entry.
   1462   ExternalReference ref =
   1463       ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
   1464   __ TailCallExternalReference(ref, 3, 1);
   1465 }
   1466 
   1467 
   1468 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
   1469   // ----------- S t a t e -------------
   1470   //  -- rax    : value
   1471   //  -- rcx    : name
   1472   //  -- rdx    : receiver
   1473   //  -- rsp[0] : return address
   1474   // -----------------------------------
   1475   //
   1476   // This accepts as a receiver anything JSArray::SetElementsLength accepts
   1477   // (currently anything except for external arrays which means anything with
   1478   // elements of FixedArray type).  Value must be a number, but only smis are
   1479   // accepted as the most common case.
   1480 
   1481   Label miss;
   1482 
   1483   Register receiver = rdx;
   1484   Register value = rax;
   1485   Register scratch = rbx;
   1486 
   1487   // Check that the receiver isn't a smi.
   1488   __ JumpIfSmi(receiver, &miss);
   1489 
   1490   // Check that the object is a JS array.
   1491   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
   1492   __ j(not_equal, &miss);
   1493 
   1494   // Check that elements are FixedArray.
   1495   // We rely on StoreIC_ArrayLength below to deal with all types of
   1496   // fast elements (including COW).
   1497   __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
   1498   __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
   1499   __ j(not_equal, &miss);
   1500 
   1501   // Check that the array has fast properties, otherwise the length
   1502   // property might have been redefined.
   1503   __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
   1504   __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
   1505                  Heap::kHashTableMapRootIndex);
   1506   __ j(equal, &miss);
   1507 
   1508   // Check that value is a smi.
   1509   __ JumpIfNotSmi(value, &miss);
   1510 
   1511   // Prepare tail call to StoreIC_ArrayLength.
   1512   __ pop(scratch);
   1513   __ push(receiver);
   1514   __ push(value);
   1515   __ push(scratch);  // return address
   1516 
   1517   ExternalReference ref =
   1518       ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
   1519   __ TailCallExternalReference(ref, 2, 1);
   1520 
   1521   __ bind(&miss);
   1522 
   1523   GenerateMiss(masm);
   1524 }
   1525 
   1526 
   1527 void StoreIC::GenerateNormal(MacroAssembler* masm) {
   1528   // ----------- S t a t e -------------
   1529   //  -- rax    : value
   1530   //  -- rcx    : name
   1531   //  -- rdx    : receiver
   1532   //  -- rsp[0] : return address
   1533   // -----------------------------------
   1534 
   1535   Label miss;
   1536 
   1537   GenerateStringDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
   1538 
   1539   GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
   1540   Counters* counters = masm->isolate()->counters();
   1541   __ IncrementCounter(counters->store_normal_hit(), 1);
   1542   __ ret(0);
   1543 
   1544   __ bind(&miss);
   1545   __ IncrementCounter(counters->store_normal_miss(), 1);
   1546   GenerateMiss(masm);
   1547 }
   1548 
   1549 
   1550 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
   1551                                   StrictModeFlag strict_mode) {
   1552   // ----------- S t a t e -------------
   1553   //  -- rax    : value
   1554   //  -- rcx    : name
   1555   //  -- rdx    : receiver
   1556   //  -- rsp[0] : return address
   1557   // -----------------------------------
   1558   __ pop(rbx);
   1559   __ push(rdx);
   1560   __ push(rcx);
   1561   __ push(rax);
   1562   __ Push(Smi::FromInt(NONE));  // PropertyAttributes
   1563   __ Push(Smi::FromInt(strict_mode));
   1564   __ push(rbx);  // return address
   1565 
   1566   // Do tail-call to runtime routine.
   1567   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
   1568 }
   1569 
   1570 
   1571 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
   1572                                               StrictModeFlag strict_mode) {
   1573   // ----------- S t a t e -------------
   1574   //  -- rax     : value
   1575   //  -- rcx     : key
   1576   //  -- rdx     : receiver
   1577   //  -- rsp[0]  : return address
   1578   // -----------------------------------
   1579 
   1580   __ pop(rbx);
   1581   __ push(rdx);  // receiver
   1582   __ push(rcx);  // key
   1583   __ push(rax);  // value
   1584   __ Push(Smi::FromInt(NONE));          // PropertyAttributes
   1585   __ Push(Smi::FromInt(strict_mode));   // Strict mode.
   1586   __ push(rbx);  // return address
   1587 
   1588   // Do tail-call to runtime routine.
   1589   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
   1590 }
   1591 
   1592 
   1593 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
   1594   // ----------- S t a t e -------------
   1595   //  -- rax     : value
   1596   //  -- rcx     : key
   1597   //  -- rdx     : receiver
   1598   //  -- rsp[0]  : return address
   1599   // -----------------------------------
   1600 
   1601   __ pop(rbx);
   1602   __ push(rdx);  // receiver
   1603   __ push(rcx);  // key
   1604   __ push(rax);  // value
   1605   __ push(rbx);  // return address
   1606 
   1607   // Do tail-call to runtime routine.
   1608   ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
   1609   __ TailCallExternalReference(ref, 3, 1);
   1610 }
   1611 
   1612 
   1613 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
   1614   // ----------- S t a t e -------------
   1615   //  -- rax     : value
   1616   //  -- rcx     : key
   1617   //  -- rdx     : receiver
   1618   //  -- rsp[0]  : return address
   1619   // -----------------------------------
   1620 
   1621   __ pop(rbx);
   1622   __ push(rdx);  // receiver
   1623   __ push(rcx);  // key
   1624   __ push(rax);  // value
   1625   __ push(rbx);  // return address
   1626 
   1627   // Do tail-call to runtime routine.
   1628   ExternalReference ref = force_generic
   1629     ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
   1630                         masm->isolate())
   1631     : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
   1632   __ TailCallExternalReference(ref, 3, 1);
   1633 }
   1634 
   1635 
   1636 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
   1637   // ----------- S t a t e -------------
   1638   //  -- rbx     : target map
   1639   //  -- rdx     : receiver
   1640   //  -- rsp[0]  : return address
   1641   // -----------------------------------
   1642   // Must return the modified receiver in eax.
   1643   if (!FLAG_trace_elements_transitions) {
   1644     Label fail;
   1645     ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
   1646     __ movq(rax, rdx);
   1647     __ Ret();
   1648     __ bind(&fail);
   1649   }
   1650 
   1651   __ pop(rbx);
   1652   __ push(rdx);
   1653   __ push(rbx);  // return address
   1654   __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
   1655 }
   1656 
   1657 
   1658 void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
   1659     MacroAssembler* masm) {
   1660   // ----------- S t a t e -------------
   1661   //  -- rbx     : target map
   1662   //  -- rdx     : receiver
   1663   //  -- rsp[0]  : return address
   1664   // -----------------------------------
   1665   // Must return the modified receiver in eax.
   1666   if (!FLAG_trace_elements_transitions) {
   1667     Label fail;
   1668     ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
   1669     __ movq(rax, rdx);
   1670     __ Ret();
   1671     __ bind(&fail);
   1672   }
   1673 
   1674   __ pop(rbx);
   1675   __ push(rdx);
   1676   __ push(rbx);  // return address
   1677   __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
   1678 }
   1679 
   1680 
   1681 #undef __
   1682 
   1683 
   1684 Condition CompareIC::ComputeCondition(Token::Value op) {
   1685   switch (op) {
   1686     case Token::EQ_STRICT:
   1687     case Token::EQ:
   1688       return equal;
   1689     case Token::LT:
   1690       return less;
   1691     case Token::GT:
   1692       return greater;
   1693     case Token::LTE:
   1694       return less_equal;
   1695     case Token::GTE:
   1696       return greater_equal;
   1697     default:
   1698       UNREACHABLE();
   1699       return no_condition;
   1700   }
   1701 }
   1702 
   1703 
   1704 static bool HasInlinedSmiCode(Address address) {
   1705   // The address of the instruction following the call.
   1706   Address test_instruction_address =
   1707       address + Assembler::kCallTargetAddressOffset;
   1708 
   1709   // If the instruction following the call is not a test al, nothing
   1710   // was inlined.
   1711   return *test_instruction_address == Assembler::kTestAlByte;
   1712 }
   1713 
   1714 
   1715 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
   1716   HandleScope scope;
   1717   Handle<Code> rewritten;
   1718   State previous_state = GetState();
   1719 
   1720   State state = TargetState(previous_state, HasInlinedSmiCode(address()), x, y);
   1721   if (state == GENERIC) {
   1722     CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
   1723     rewritten = stub.GetCode();
   1724   } else {
   1725     ICCompareStub stub(op_, state);
   1726     if (state == KNOWN_OBJECTS) {
   1727       stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
   1728     }
   1729     rewritten = stub.GetCode();
   1730   }
   1731   set_target(*rewritten);
   1732 
   1733 #ifdef DEBUG
   1734   if (FLAG_trace_ic) {
   1735     PrintF("[CompareIC (%s->%s)#%s]\n",
   1736            GetStateName(previous_state),
   1737            GetStateName(state),
   1738            Token::Name(op_));
   1739   }
   1740 #endif
   1741 
   1742   // Activate inlined smi code.
   1743   if (previous_state == UNINITIALIZED) {
   1744     PatchInlinedSmiCode(address());
   1745   }
   1746 }
   1747 
   1748 void PatchInlinedSmiCode(Address address) {
   1749   // The address of the instruction following the call.
   1750   Address test_instruction_address =
   1751       address + Assembler::kCallTargetAddressOffset;
   1752 
   1753   // If the instruction following the call is not a test al, nothing
   1754   // was inlined.
   1755   if (*test_instruction_address != Assembler::kTestAlByte) {
   1756     ASSERT(*test_instruction_address == Assembler::kNopByte);
   1757     return;
   1758   }
   1759 
   1760   Address delta_address = test_instruction_address + 1;
   1761   // The delta to the start of the map check instruction and the
   1762   // condition code uses at the patched jump.
   1763   int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
   1764   if (FLAG_trace_ic) {
   1765     PrintF("[  patching ic at %p, test=%p, delta=%d\n",
   1766            address, test_instruction_address, delta);
   1767   }
   1768 
   1769   // Patch with a short conditional jump. There must be a
   1770   // short jump-if-carry/not-carry at this position.
   1771   Address jmp_address = test_instruction_address - delta;
   1772   ASSERT(*jmp_address == Assembler::kJncShortOpcode ||
   1773          *jmp_address == Assembler::kJcShortOpcode);
   1774   Condition cc = *jmp_address == Assembler::kJncShortOpcode
   1775       ? not_zero
   1776       : zero;
   1777   *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
   1778 }
   1779 
   1780 
   1781 } }  // namespace v8::internal
   1782 
   1783 #endif  // V8_TARGET_ARCH_X64
   1784