Home | History | Annotate | Download | only in x64
      1 // Copyright 2010 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #include "codegen-inl.h"
     31 #include "ic-inl.h"
     32 #include "runtime.h"
     33 #include "stub-cache.h"
     34 #include "utils.h"
     35 
     36 namespace v8 {
     37 namespace internal {
     38 
     39 // ----------------------------------------------------------------------------
     40 // Static IC stub generators.
     41 //
     42 
     43 #define __ ACCESS_MASM(masm)
     44 
     45 
     46 // Helper function used to load a property from a dictionary backing storage.
     47 // This function may return false negatives, so miss_label
     48 // must always call a backup property load that is complete.
     49 // This function is safe to call if the receiver has fast properties,
     50 // or if name is not a symbol, and will jump to the miss_label in that case.
     51 static void GenerateDictionaryLoad(MacroAssembler* masm,
     52                                    Label* miss_label,
     53                                    Register r0,
     54                                    Register r1,
     55                                    Register r2,
     56                                    Register name,
     57                                    DictionaryCheck check_dictionary) {
     58   // Register use:
     59   //
     60   // r0   - used to hold the property dictionary.
     61   //
     62   // r1   - initially the receiver
     63   //      - used for the index into the property dictionary
     64   //      - holds the result on exit.
     65   //
     66   // r2   - used to hold the capacity of the property dictionary.
     67   //
     68   // name - holds the name of the property and is unchanged.
     69 
     70   Label done;
     71 
     72   // Check for the absence of an interceptor.
     73   // Load the map into r0.
     74   __ movq(r0, FieldOperand(r1, JSObject::kMapOffset));
     75   // Test the has_named_interceptor bit in the map.
     76   __ testl(FieldOperand(r0, Map::kInstanceAttributesOffset),
     77           Immediate(1 << (Map::kHasNamedInterceptor + (3 * 8))));
     78 
     79   // Jump to miss if the interceptor bit is set.
     80   __ j(not_zero, miss_label);
     81 
     82   // Bail out if we have a JS global proxy object.
     83   __ movzxbq(r0, FieldOperand(r0, Map::kInstanceTypeOffset));
     84   __ cmpb(r0, Immediate(JS_GLOBAL_PROXY_TYPE));
     85   __ j(equal, miss_label);
     86 
     87   // Possible work-around for http://crbug.com/16276.
     88   __ cmpb(r0, Immediate(JS_GLOBAL_OBJECT_TYPE));
     89   __ j(equal, miss_label);
     90   __ cmpb(r0, Immediate(JS_BUILTINS_OBJECT_TYPE));
     91   __ j(equal, miss_label);
     92 
     93   // Load properties array.
     94   __ movq(r0, FieldOperand(r1, JSObject::kPropertiesOffset));
     95 
     96   if (check_dictionary == CHECK_DICTIONARY) {
     97     // Check that the properties array is a dictionary.
     98     __ Cmp(FieldOperand(r0, HeapObject::kMapOffset), Factory::hash_table_map());
     99     __ j(not_equal, miss_label);
    100   }
    101 
    102   // Compute the capacity mask.
    103   const int kCapacityOffset =
    104       StringDictionary::kHeaderSize +
    105       StringDictionary::kCapacityIndex * kPointerSize;
    106   __ movq(r2, FieldOperand(r0, kCapacityOffset));
    107   __ SmiToInteger32(r2, r2);
    108   __ decl(r2);
    109 
    110   // Generate an unrolled loop that performs a few probes before
    111   // giving up. Measurements done on Gmail indicate that 2 probes
    112   // cover ~93% of loads from dictionaries.
    113   static const int kProbes = 4;
    114   const int kElementsStartOffset =
    115       StringDictionary::kHeaderSize +
    116       StringDictionary::kElementsStartIndex * kPointerSize;
    117   for (int i = 0; i < kProbes; i++) {
    118     // Compute the masked index: (hash + i + i * i) & mask.
    119     __ movl(r1, FieldOperand(name, String::kHashFieldOffset));
    120     __ shrl(r1, Immediate(String::kHashShift));
    121     if (i > 0) {
    122       __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i)));
    123     }
    124     __ and_(r1, r2);
    125 
    126     // Scale the index by multiplying by the entry size.
    127     ASSERT(StringDictionary::kEntrySize == 3);
    128     __ lea(r1, Operand(r1, r1, times_2, 0));  // r1 = r1 * 3
    129 
    130     // Check if the key is identical to the name.
    131     __ cmpq(name, Operand(r0, r1, times_pointer_size,
    132                           kElementsStartOffset - kHeapObjectTag));
    133     if (i != kProbes - 1) {
    134       __ j(equal, &done);
    135     } else {
    136       __ j(not_equal, miss_label);
    137     }
    138   }
    139 
    140   // Check that the value is a normal property.
    141   __ bind(&done);
    142   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
    143   __ Test(Operand(r0, r1, times_pointer_size, kDetailsOffset - kHeapObjectTag),
    144           Smi::FromInt(PropertyDetails::TypeField::mask()));
    145   __ j(not_zero, miss_label);
    146 
    147   // Get the value at the masked, scaled index.
    148   const int kValueOffset = kElementsStartOffset + kPointerSize;
    149   __ movq(r1,
    150           Operand(r0, r1, times_pointer_size, kValueOffset - kHeapObjectTag));
    151 }
    152 
    153 
    154 // One byte opcode for test eax,0xXXXXXXXX.
    155 static const byte kTestEaxByte = 0xA9;
    156 
    157 
    158 static bool PatchInlinedMapCheck(Address address, Object* map) {
    159   // Arguments are address of start of call sequence that called
    160   // the IC,
    161   Address test_instruction_address =
    162       address + Assembler::kCallTargetAddressOffset;
    163   // The keyed load has a fast inlined case if the IC call instruction
    164   // is immediately followed by a test instruction.
    165   if (*test_instruction_address != kTestEaxByte) return false;
    166 
    167   // Fetch the offset from the test instruction to the map compare
    168   // instructions (starting with the 64-bit immediate mov of the map
    169   // address). This offset is stored in the last 4 bytes of the 5
    170   // byte test instruction.
    171   Address delta_address = test_instruction_address + 1;
    172   int delta = *reinterpret_cast<int*>(delta_address);
    173   // Compute the map address.  The map address is in the last 8 bytes
    174   // of the 10-byte immediate mov instruction (incl. REX prefix), so we add 2
    175   // to the offset to get the map address.
    176   Address map_address = test_instruction_address + delta + 2;
    177   // Patch the map check.
    178   *(reinterpret_cast<Object**>(map_address)) = map;
    179   return true;
    180 }
    181 
    182 
    183 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
    184   return PatchInlinedMapCheck(address, map);
    185 }
    186 
    187 
    188 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
    189   return PatchInlinedMapCheck(address, map);
    190 }
    191 
    192 
    193 void KeyedLoadIC::ClearInlinedVersion(Address address) {
    194   // Insert null as the map to check for to make sure the map check fails
    195   // sending control flow to the IC instead of the inlined version.
    196   PatchInlinedLoad(address, Heap::null_value());
    197 }
    198 
    199 
    200 void KeyedStoreIC::ClearInlinedVersion(Address address) {
    201   // Insert null as the elements map to check for.  This will make
    202   // sure that the elements fast-case map check fails so that control
    203   // flows to the IC instead of the inlined version.
    204   PatchInlinedStore(address, Heap::null_value());
    205 }
    206 
    207 
    208 void KeyedStoreIC::RestoreInlinedVersion(Address address) {
    209   // Restore the fast-case elements map check so that the inlined
    210   // version can be used again.
    211   PatchInlinedStore(address, Heap::fixed_array_map());
    212 }
    213 
    214 
    215 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
    216   // ----------- S t a t e -------------
    217   //  -- rsp[0]  : return address
    218   //  -- rsp[8]  : name
    219   //  -- rsp[16] : receiver
    220   // -----------------------------------
    221 
    222   __ pop(rbx);
    223   __ push(Operand(rsp, 1 * kPointerSize));  // receiver
    224   __ push(Operand(rsp, 1 * kPointerSize));  // name
    225   __ push(rbx);  // return address
    226 
    227   // Perform tail call to the entry.
    228   __ TailCallRuntime(ExternalReference(IC_Utility(kKeyedLoadIC_Miss)), 2, 1);
    229 }
    230 
    231 
    232 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
    233   // ----------- S t a t e -------------
    234   //  -- rsp[0]  : return address
    235   //  -- rsp[8]  : name
    236   //  -- rsp[16] : receiver
    237   // -----------------------------------
    238 
    239   __ pop(rbx);
    240   __ push(Operand(rsp, 1 * kPointerSize));  // receiver
    241   __ push(Operand(rsp, 1 * kPointerSize));  // name
    242   __ push(rbx);  // return address
    243 
    244   // Perform tail call to the entry.
    245   __ TailCallRuntime(ExternalReference(Runtime::kKeyedGetProperty), 2, 1);
    246 }
    247 
    248 
    249 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
    250   // ----------- S t a t e -------------
    251   //  -- rsp[0] : return address
    252   //  -- rsp[8] : name
    253   //  -- rsp[16] : receiver
    254   // -----------------------------------
    255   Label slow, check_string, index_int, index_string;
    256   Label check_pixel_array, probe_dictionary;
    257 
    258   // Load name and receiver.
    259   __ movq(rax, Operand(rsp, kPointerSize));
    260   __ movq(rcx, Operand(rsp, 2 * kPointerSize));
    261 
    262   // Check that the object isn't a smi.
    263   __ JumpIfSmi(rcx, &slow);
    264 
    265   // Check that the object is some kind of JS object EXCEPT JS Value type.
    266   // In the case that the object is a value-wrapper object,
    267   // we enter the runtime system to make sure that indexing
    268   // into string objects work as intended.
    269   ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
    270   __ CmpObjectType(rcx, JS_OBJECT_TYPE, rdx);
    271   __ j(below, &slow);
    272 
    273   // Check bit field.
    274   __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
    275            Immediate(kSlowCaseBitFieldMask));
    276   __ j(not_zero, &slow);
    277 
    278   // Check that the key is a smi.
    279   __ JumpIfNotSmi(rax, &check_string);
    280   __ SmiToInteger32(rax, rax);
    281   // Get the elements array of the object.
    282   __ bind(&index_int);
    283   __ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset));
    284   // Check that the object is in fast mode (not dictionary).
    285   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
    286                  Heap::kFixedArrayMapRootIndex);
    287   __ j(not_equal, &check_pixel_array);
    288   // Check that the key (index) is within bounds.
    289   __ cmpl(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
    290   __ j(above_equal, &slow);  // Unsigned comparison rejects negative indices.
    291   // Fast case: Do the load.
    292   __ movq(rax, Operand(rcx, rax, times_pointer_size,
    293                       FixedArray::kHeaderSize - kHeapObjectTag));
    294   __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
    295   // In case the loaded value is the_hole we have to consult GetProperty
    296   // to ensure the prototype chain is searched.
    297   __ j(equal, &slow);
    298   __ IncrementCounter(&Counters::keyed_load_generic_smi, 1);
    299   __ ret(0);
    300 
    301   // Check whether the elements is a pixel array.
    302   // rax: untagged index
    303   // rcx: elements array
    304   __ bind(&check_pixel_array);
    305   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
    306                  Heap::kPixelArrayMapRootIndex);
    307   __ j(not_equal, &slow);
    308   __ cmpl(rax, FieldOperand(rcx, PixelArray::kLengthOffset));
    309   __ j(above_equal, &slow);
    310   __ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset));
    311   __ movzxbq(rax, Operand(rcx, rax, times_1, 0));
    312   __ Integer32ToSmi(rax, rax);
    313   __ ret(0);
    314 
    315   // Slow case: Load name and receiver from stack and jump to runtime.
    316   __ bind(&slow);
    317   __ IncrementCounter(&Counters::keyed_load_generic_slow, 1);
    318   GenerateRuntimeGetProperty(masm);
    319   __ bind(&check_string);
    320   // The key is not a smi.
    321   // Is it a string?
    322   __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
    323   __ j(above_equal, &slow);
    324   // Is the string an array index, with cached numeric value?
    325   __ movl(rbx, FieldOperand(rax, String::kHashFieldOffset));
    326   __ testl(rbx, Immediate(String::kIsArrayIndexMask));
    327 
    328   // Is the string a symbol?
    329   __ j(not_zero, &index_string);  // The value in rbx is used at jump target.
    330   ASSERT(kSymbolTag != 0);
    331   __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset),
    332            Immediate(kIsSymbolMask));
    333   __ j(zero, &slow);
    334 
    335   // If the receiver is a fast-case object, check the keyed lookup
    336   // cache. Otherwise probe the dictionary leaving result in rcx.
    337   __ movq(rbx, FieldOperand(rcx, JSObject::kPropertiesOffset));
    338   __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), Factory::hash_table_map());
    339   __ j(equal, &probe_dictionary);
    340 
    341   // Load the map of the receiver, compute the keyed lookup cache hash
    342   // based on 32 bits of the map pointer and the string hash.
    343   __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
    344   __ movl(rdx, rbx);
    345   __ shr(rdx, Immediate(KeyedLookupCache::kMapHashShift));
    346   __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
    347   __ shr(rax, Immediate(String::kHashShift));
    348   __ xor_(rdx, rax);
    349   __ and_(rdx, Immediate(KeyedLookupCache::kCapacityMask));
    350 
    351   // Load the key (consisting of map and symbol) from the cache and
    352   // check for match.
    353   ExternalReference cache_keys
    354       = ExternalReference::keyed_lookup_cache_keys();
    355   __ movq(rdi, rdx);
    356   __ shl(rdi, Immediate(kPointerSizeLog2 + 1));
    357   __ movq(kScratchRegister, cache_keys);
    358   __ cmpq(rbx, Operand(kScratchRegister, rdi, times_1, 0));
    359   __ j(not_equal, &slow);
    360   __ movq(rdi, Operand(kScratchRegister, rdi, times_1, kPointerSize));
    361   __ cmpq(Operand(rsp, kPointerSize), rdi);
    362   __ j(not_equal, &slow);
    363 
    364   // Get field offset which is a 32-bit integer and check that it is
    365   // an in-object property.
    366   ExternalReference cache_field_offsets
    367       = ExternalReference::keyed_lookup_cache_field_offsets();
    368   __ movq(kScratchRegister, cache_field_offsets);
    369   __ movl(rax, Operand(kScratchRegister, rdx, times_4, 0));
    370   __ movzxbq(rdx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
    371   __ cmpq(rax, rdx);
    372   __ j(above_equal, &slow);
    373 
    374   // Load in-object property.
    375   __ subq(rax, rdx);
    376   __ movzxbq(rdx, FieldOperand(rbx, Map::kInstanceSizeOffset));
    377   __ addq(rax, rdx);
    378   __ movq(rax, FieldOperand(rcx, rax, times_pointer_size, 0));
    379   __ ret(0);
    380 
    381   // Do a quick inline probe of the receiver's dictionary, if it
    382   // exists.
    383   __ bind(&probe_dictionary);
    384   GenerateDictionaryLoad(masm,
    385                          &slow,
    386                          rbx,
    387                          rcx,
    388                          rdx,
    389                          rax,
    390                          DICTIONARY_CHECK_DONE);
    391   __ movq(rax, rcx);
    392   __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
    393   __ ret(0);
    394   // If the hash field contains an array index pick it out. The assert checks
    395   // that the constants for the maximum number of digits for an array index
    396   // cached in the hash field and the number of bits reserved for it does not
    397   // conflict.
    398   ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
    399          (1 << String::kArrayIndexValueBits));
    400   __ bind(&index_string);
    401   __ movl(rax, rbx);
    402   __ and_(rax, Immediate(String::kArrayIndexHashMask));
    403   __ shrl(rax, Immediate(String::kHashShift));
    404   __ jmp(&index_int);
    405 }
    406 
    407 
    408 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
    409   // ----------- S t a t e -------------
    410   //  -- rsp[0] : return address
    411   //  -- rsp[8] : name
    412   //  -- rsp[16] : receiver
    413   // -----------------------------------
    414 
    415   GenerateGeneric(masm);
    416 }
    417 
    418 
    419 void KeyedLoadIC::GenerateExternalArray(MacroAssembler* masm,
    420                                         ExternalArrayType array_type) {
    421   // ----------- S t a t e -------------
    422   //  -- rsp[0] : return address
    423   //  -- rsp[8] : name
    424   //  -- rsp[16] : receiver
    425   // -----------------------------------
    426   Label slow, failed_allocation;
    427 
    428   // Load name and receiver.
    429   __ movq(rax, Operand(rsp, kPointerSize));
    430   __ movq(rcx, Operand(rsp, 2 * kPointerSize));
    431 
    432   // Check that the object isn't a smi.
    433   __ JumpIfSmi(rcx, &slow);
    434 
    435   // Check that the key is a smi.
    436   __ JumpIfNotSmi(rax, &slow);
    437 
    438   // Check that the object is a JS object.
    439   __ CmpObjectType(rcx, JS_OBJECT_TYPE, rdx);
    440   __ j(not_equal, &slow);
    441   // Check that the receiver does not require access checks.  We need
    442   // to check this explicitly since this generic stub does not perform
    443   // map checks.  The map is already in rdx.
    444   __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
    445            Immediate(1 << Map::kIsAccessCheckNeeded));
    446   __ j(not_zero, &slow);
    447 
    448   // Check that the elements array is the appropriate type of
    449   // ExternalArray.
    450   // rax: index (as a smi)
    451   // rcx: JSObject
    452   __ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset));
    453   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
    454                  Heap::RootIndexForExternalArrayType(array_type));
    455   __ j(not_equal, &slow);
    456 
    457   // Check that the index is in range.
    458   __ SmiToInteger32(rax, rax);
    459   __ cmpl(rax, FieldOperand(rcx, ExternalArray::kLengthOffset));
    460   // Unsigned comparison catches both negative and too-large values.
    461   __ j(above_equal, &slow);
    462 
    463   // rax: untagged index
    464   // rcx: elements array
    465   __ movq(rcx, FieldOperand(rcx, ExternalArray::kExternalPointerOffset));
    466   // rcx: base pointer of external storage
    467   switch (array_type) {
    468     case kExternalByteArray:
    469       __ movsxbq(rax, Operand(rcx, rax, times_1, 0));
    470       break;
    471     case kExternalUnsignedByteArray:
    472       __ movzxbq(rax, Operand(rcx, rax, times_1, 0));
    473       break;
    474     case kExternalShortArray:
    475       __ movsxwq(rax, Operand(rcx, rax, times_2, 0));
    476       break;
    477     case kExternalUnsignedShortArray:
    478       __ movzxwq(rax, Operand(rcx, rax, times_2, 0));
    479       break;
    480     case kExternalIntArray:
    481       __ movsxlq(rax, Operand(rcx, rax, times_4, 0));
    482       break;
    483     case kExternalUnsignedIntArray:
    484       __ movl(rax, Operand(rcx, rax, times_4, 0));
    485       break;
    486     case kExternalFloatArray:
    487       __ fld_s(Operand(rcx, rax, times_4, 0));
    488       break;
    489     default:
    490       UNREACHABLE();
    491       break;
    492   }
    493 
    494   // For integer array types:
    495   // rax: value
    496   // For floating-point array type:
    497   // FP(0): value
    498 
    499   if (array_type == kExternalIntArray ||
    500       array_type == kExternalUnsignedIntArray) {
    501     // For the Int and UnsignedInt array types, we need to see whether
    502     // the value can be represented in a Smi. If not, we need to convert
    503     // it to a HeapNumber.
    504     Label box_int;
    505     if (array_type == kExternalIntArray) {
    506       __ JumpIfNotValidSmiValue(rax, &box_int);
    507     } else {
    508       ASSERT_EQ(array_type, kExternalUnsignedIntArray);
    509       __ JumpIfUIntNotValidSmiValue(rax, &box_int);
    510     }
    511 
    512     __ Integer32ToSmi(rax, rax);
    513     __ ret(0);
    514 
    515     __ bind(&box_int);
    516 
    517     // Allocate a HeapNumber for the int and perform int-to-double
    518     // conversion.
    519     __ push(rax);
    520     if (array_type == kExternalIntArray) {
    521       __ fild_s(Operand(rsp, 0));
    522     } else {
    523       ASSERT(array_type == kExternalUnsignedIntArray);
    524       // Need to zero-extend the value.
    525       __ fild_d(Operand(rsp, 0));
    526     }
    527     __ pop(rax);
    528     // FP(0): value
    529     __ AllocateHeapNumber(rax, rbx, &failed_allocation);
    530     // Set the value.
    531     __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
    532     __ ret(0);
    533   } else if (array_type == kExternalFloatArray) {
    534     // For the floating-point array type, we need to always allocate a
    535     // HeapNumber.
    536     __ AllocateHeapNumber(rax, rbx, &failed_allocation);
    537     // Set the value.
    538     __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
    539     __ ret(0);
    540   } else {
    541     __ Integer32ToSmi(rax, rax);
    542     __ ret(0);
    543   }
    544 
    545   // If we fail allocation of the HeapNumber, we still have a value on
    546   // top of the FPU stack. Remove it.
    547   __ bind(&failed_allocation);
    548   __ ffree();
    549   __ fincstp();
    550   // Fall through to slow case.
    551 
    552   // Slow case: Load name and receiver from stack and jump to runtime.
    553   __ bind(&slow);
    554   __ IncrementCounter(&Counters::keyed_load_external_array_slow, 1);
    555   GenerateRuntimeGetProperty(masm);
    556 }
    557 
    558 
    559 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
    560   // ----------- S t a t e -------------
    561   //  -- rsp[0] : return address
    562   //  -- rsp[8] : key
    563   //  -- rsp[16] : receiver
    564   // -----------------------------------
    565   Label slow;
    566 
    567   // Load key and receiver.
    568   __ movq(rax, Operand(rsp, kPointerSize));
    569   __ movq(rcx, Operand(rsp, 2 * kPointerSize));
    570 
    571   // Check that the receiver isn't a smi.
    572   __ JumpIfSmi(rcx, &slow);
    573 
    574   // Check that the key is a smi.
    575   __ JumpIfNotSmi(rax, &slow);
    576 
    577   // Get the map of the receiver.
    578   __ movq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
    579 
    580   // Check that it has indexed interceptor and access checks
    581   // are not enabled for this object.
    582   __ movb(rdx, FieldOperand(rdx, Map::kBitFieldOffset));
    583   __ andb(rdx, Immediate(kSlowCaseBitFieldMask));
    584   __ cmpb(rdx, Immediate(1 << Map::kHasIndexedInterceptor));
    585   __ j(not_zero, &slow);
    586 
    587   // Everything is fine, call runtime.
    588   __ pop(rdx);
    589   __ push(rcx);  // receiver
    590   __ push(rax);  // key
    591   __ push(rdx);  // return address
    592 
    593   // Perform tail call to the entry.
    594   __ TailCallRuntime(ExternalReference(
    595         IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1);
    596 
    597   __ bind(&slow);
    598   GenerateMiss(masm);
    599 }
    600 
    601 
    602 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
    603   // ----------- S t a t e -------------
    604   //  -- rax     : value
    605   //  -- rsp[0]  : return address
    606   //  -- rsp[8]  : key
    607   //  -- rsp[16] : receiver
    608   // -----------------------------------
    609 
    610   __ pop(rcx);
    611   __ push(Operand(rsp, 1 * kPointerSize));  // receiver
    612   __ push(Operand(rsp, 1 * kPointerSize));  // key
    613   __ push(rax);  // value
    614   __ push(rcx);  // return address
    615 
    616   // Do tail-call to runtime routine.
    617   __ TailCallRuntime(ExternalReference(IC_Utility(kKeyedStoreIC_Miss)), 3, 1);
    618 }
    619 
    620 
    621 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm) {
    622   // ----------- S t a t e -------------
    623   //  -- rax     : value
    624   //  -- rsp[0]  : return address
    625   //  -- rsp[8]  : key
    626   //  -- rsp[16] : receiver
    627   // -----------------------------------
    628 
    629   __ pop(rcx);
    630   __ push(Operand(rsp, 1 * kPointerSize));  // receiver
    631   __ push(Operand(rsp, 1 * kPointerSize));  // key
    632   __ push(rax);  // value
    633   __ push(rcx);  // return address
    634 
    635   // Do tail-call to runtime routine.
    636   __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3, 1);
    637 }
    638 
    639 
    640 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
    641   // ----------- S t a t e -------------
    642   //  -- rax    : value
    643   //  -- rsp[0] : return address
    644   //  -- rsp[8] : key
    645   //  -- rsp[16] : receiver
    646   // -----------------------------------
    647   Label slow, fast, array, extra, check_pixel_array;
    648 
    649   // Get the receiver from the stack.
    650   __ movq(rdx, Operand(rsp, 2 * kPointerSize));  // 2 ~ return address, key
    651   // Check that the object isn't a smi.
    652   __ JumpIfSmi(rdx, &slow);
    653   // Get the map from the receiver.
    654   __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
    655   // Check that the receiver does not require access checks.  We need
    656   // to do this because this generic stub does not perform map checks.
    657   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
    658            Immediate(1 << Map::kIsAccessCheckNeeded));
    659   __ j(not_zero, &slow);
    660   // Get the key from the stack.
    661   __ movq(rbx, Operand(rsp, 1 * kPointerSize));  // 1 ~ return address
    662   // Check that the key is a smi.
    663   __ JumpIfNotSmi(rbx, &slow);
    664 
    665   __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
    666   __ j(equal, &array);
    667   // Check that the object is some kind of JS object.
    668   __ CmpInstanceType(rcx, FIRST_JS_OBJECT_TYPE);
    669   __ j(below, &slow);
    670 
    671   // Object case: Check key against length in the elements array.
    672   // rax: value
    673   // rdx: JSObject
    674   // rbx: index (as a smi)
    675   __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
    676   // Check that the object is in fast mode (not dictionary).
    677   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
    678                  Heap::kFixedArrayMapRootIndex);
    679   __ j(not_equal, &check_pixel_array);
    680   // Untag the key (for checking against untagged length in the fixed array).
    681   __ SmiToInteger32(rdx, rbx);
    682   __ cmpl(rdx, FieldOperand(rcx, Array::kLengthOffset));
    683   // rax: value
    684   // rcx: FixedArray
    685   // rbx: index (as a smi)
    686   __ j(below, &fast);
    687 
    688   // Slow case: call runtime.
    689   __ bind(&slow);
    690   GenerateRuntimeSetProperty(masm);
    691 
    692   // Check whether the elements is a pixel array.
    693   // rax: value
    694   // rcx: elements array
    695   // rbx: index (as a smi), zero-extended.
    696   __ bind(&check_pixel_array);
    697   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
    698                  Heap::kPixelArrayMapRootIndex);
    699   __ j(not_equal, &slow);
    700   // Check that the value is a smi. If a conversion is needed call into the
    701   // runtime to convert and clamp.
    702   __ JumpIfNotSmi(rax, &slow);
    703   __ SmiToInteger32(rbx, rbx);
    704   __ cmpl(rbx, FieldOperand(rcx, PixelArray::kLengthOffset));
    705   __ j(above_equal, &slow);
    706   __ movq(rdx, rax);  // Save the value.
    707   __ SmiToInteger32(rax, rax);
    708   {  // Clamp the value to [0..255].
    709     Label done;
    710     __ testl(rax, Immediate(0xFFFFFF00));
    711     __ j(zero, &done);
    712     __ setcc(negative, rax);  // 1 if negative, 0 if positive.
    713     __ decb(rax);  // 0 if negative, 255 if positive.
    714     __ bind(&done);
    715   }
    716   __ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset));
    717   __ movb(Operand(rcx, rbx, times_1, 0), rax);
    718   __ movq(rax, rdx);  // Return the original value.
    719   __ ret(0);
    720 
    721   // Extra capacity case: Check if there is extra capacity to
    722   // perform the store and update the length. Used for adding one
    723   // element to the array by writing to array[array.length].
    724   __ bind(&extra);
    725   // rax: value
    726   // rdx: JSArray
    727   // rcx: FixedArray
    728   // rbx: index (as a smi)
    729   // flags: smicompare (rdx.length(), rbx)
    730   __ j(not_equal, &slow);  // do not leave holes in the array
    731   __ SmiToInteger64(rbx, rbx);
    732   __ cmpl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
    733   __ j(above_equal, &slow);
    734   // Increment and restore smi-tag.
    735   __ Integer64PlusConstantToSmi(rbx, rbx, 1);
    736   __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rbx);
    737   __ SmiSubConstant(rbx, rbx, Smi::FromInt(1));
    738   __ jmp(&fast);
    739 
    740   // Array case: Get the length and the elements array from the JS
    741   // array. Check that the array is in fast mode; if it is the
    742   // length is always a smi.
    743   __ bind(&array);
    744   // rax: value
    745   // rdx: JSArray
    746   // rbx: index (as a smi)
    747   __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
    748   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
    749                  Heap::kFixedArrayMapRootIndex);
    750   __ j(not_equal, &slow);
    751 
    752   // Check the key against the length in the array, compute the
    753   // address to store into and fall through to fast case.
    754   __ SmiCompare(FieldOperand(rdx, JSArray::kLengthOffset), rbx);
    755   __ j(below_equal, &extra);
    756 
    757   // Fast case: Do the store.
    758   __ bind(&fast);
    759   // rax: value
    760   // rcx: FixedArray
    761   // rbx: index (as a smi)
    762   Label non_smi_value;
    763   __ JumpIfNotSmi(rax, &non_smi_value);
    764   SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
    765   __ movq(Operand(rcx, index.reg, index.scale,
    766                   FixedArray::kHeaderSize - kHeapObjectTag),
    767           rax);
    768   __ ret(0);
    769   __ bind(&non_smi_value);
    770   // Slow case that needs to retain rbx for use by RecordWrite.
    771   // Update write barrier for the elements array address.
    772   SmiIndex index2 = masm->SmiToIndex(kScratchRegister, rbx, kPointerSizeLog2);
    773   __ movq(Operand(rcx, index2.reg, index2.scale,
    774                   FixedArray::kHeaderSize - kHeapObjectTag),
    775           rax);
    776   __ movq(rdx, rax);
    777   __ RecordWriteNonSmi(rcx, 0, rdx, rbx);
    778   __ ret(0);
    779 }
    780 
    781 
    782 void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm,
    783                                          ExternalArrayType array_type) {
    784   // ----------- S t a t e -------------
    785   //  -- rax    : value
    786   //  -- rsp[0] : return address
    787   //  -- rsp[8] : key
    788   //  -- rsp[16] : receiver
    789   // -----------------------------------
    790   Label slow, check_heap_number;
    791 
    792   // Get the receiver from the stack.
    793   __ movq(rdx, Operand(rsp, 2 * kPointerSize));
    794   // Check that the object isn't a smi.
    795   __ JumpIfSmi(rdx, &slow);
    796   // Get the map from the receiver.
    797   __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset));
    798   // Check that the receiver does not require access checks.  We need
    799   // to do this because this generic stub does not perform map checks.
    800   __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
    801            Immediate(1 << Map::kIsAccessCheckNeeded));
    802   __ j(not_zero, &slow);
    803   // Get the key from the stack.
    804   __ movq(rbx, Operand(rsp, 1 * kPointerSize));  // 1 ~ return address
    805   // Check that the key is a smi.
    806   __ JumpIfNotSmi(rbx, &slow);
    807 
    808   // Check that the object is a JS object.
    809   __ CmpInstanceType(rcx, JS_OBJECT_TYPE);
    810   __ j(not_equal, &slow);
    811 
    812   // Check that the elements array is the appropriate type of
    813   // ExternalArray.
    814   // rax: value
    815   // rdx: JSObject
    816   // rbx: index (as a smi)
    817   __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
    818   __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
    819                  Heap::RootIndexForExternalArrayType(array_type));
    820   __ j(not_equal, &slow);
    821 
    822   // Check that the index is in range.
    823   __ SmiToInteger32(rbx, rbx);  // Untag the index.
    824   __ cmpl(rbx, FieldOperand(rcx, ExternalArray::kLengthOffset));
    825   // Unsigned comparison catches both negative and too-large values.
    826   __ j(above_equal, &slow);
    827 
    828   // Handle both smis and HeapNumbers in the fast path. Go to the
    829   // runtime for all other kinds of values.
    830   // rax: value
    831   // rcx: elements array
    832   // rbx: untagged index
    833   __ JumpIfNotSmi(rax, &check_heap_number);
    834   __ movq(rdx, rax);  // Save the value.
    835   __ SmiToInteger32(rax, rax);
    836   __ movq(rcx, FieldOperand(rcx, ExternalArray::kExternalPointerOffset));
    837   // rcx: base pointer of external storage
    838   switch (array_type) {
    839     case kExternalByteArray:
    840     case kExternalUnsignedByteArray:
    841       __ movb(Operand(rcx, rbx, times_1, 0), rax);
    842       break;
    843     case kExternalShortArray:
    844     case kExternalUnsignedShortArray:
    845       __ movw(Operand(rcx, rbx, times_2, 0), rax);
    846       break;
    847     case kExternalIntArray:
    848     case kExternalUnsignedIntArray:
    849       __ movl(Operand(rcx, rbx, times_4, 0), rax);
    850       break;
    851     case kExternalFloatArray:
    852       // Need to perform int-to-float conversion.
    853       __ push(rax);
    854       __ fild_s(Operand(rsp, 0));
    855       __ pop(rax);
    856       __ fstp_s(Operand(rcx, rbx, times_4, 0));
    857       break;
    858     default:
    859       UNREACHABLE();
    860       break;
    861   }
    862   __ movq(rax, rdx);  // Return the original value.
    863   __ ret(0);
    864 
    865   __ bind(&check_heap_number);
    866   __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rdx);
    867   __ j(not_equal, &slow);
    868 
    869   // The WebGL specification leaves the behavior of storing NaN and
    870   // +/-Infinity into integer arrays basically undefined. For more
    871   // reproducible behavior, convert these to zero.
    872   __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
    873   __ movq(rdx, rax);  // Save the value.
    874   __ movq(rcx, FieldOperand(rcx, ExternalArray::kExternalPointerOffset));
    875   // rbx: untagged index
    876   // rcx: base pointer of external storage
    877   // top of FPU stack: value
    878   if (array_type == kExternalFloatArray) {
    879     __ fstp_s(Operand(rcx, rbx, times_4, 0));
    880     __ movq(rax, rdx);  // Return the original value.
    881     __ ret(0);
    882   } else {
    883     // Need to perform float-to-int conversion.
    884     // Test the top of the FP stack for NaN.
    885     Label is_nan;
    886     __ fucomi(0);
    887     __ j(parity_even, &is_nan);
    888 
    889     __ push(rax);  // Make room on stack
    890     __ fistp_d(Operand(rsp, 0));
    891     __ pop(rax);
    892     // rax: untagged integer value
    893     switch (array_type) {
    894       case kExternalByteArray:
    895       case kExternalUnsignedByteArray:
    896         __ movb(Operand(rcx, rbx, times_1, 0), rax);
    897         break;
    898       case kExternalShortArray:
    899       case kExternalUnsignedShortArray:
    900         __ movw(Operand(rcx, rbx, times_2, 0), rax);
    901         break;
    902       case kExternalIntArray:
    903       case kExternalUnsignedIntArray: {
    904         // We also need to explicitly check for +/-Infinity. These are
    905         // converted to MIN_INT, but we need to be careful not to
    906         // confuse with legal uses of MIN_INT.
    907         Label not_infinity;
    908         // This test would apparently detect both NaN and Infinity,
    909         // but we've already checked for NaN using the FPU hardware
    910         // above.
    911         __ movzxwq(rdi, FieldOperand(rdx, HeapNumber::kValueOffset + 6));
    912         __ and_(rdi, Immediate(0x7FF0));
    913         __ cmpw(rdi, Immediate(0x7FF0));
    914         __ j(not_equal, &not_infinity);
    915         __ movq(rax, Immediate(0));
    916         __ bind(&not_infinity);
    917         __ movl(Operand(rcx, rbx, times_4, 0), rax);
    918         break;
    919       }
    920       default:
    921         UNREACHABLE();
    922         break;
    923     }
    924     __ movq(rax, rdx);  // Return the original value.
    925     __ ret(0);
    926 
    927     __ bind(&is_nan);
    928     __ ffree();
    929     __ fincstp();
    930     __ movq(rax, Immediate(0));
    931     switch (array_type) {
    932       case kExternalByteArray:
    933       case kExternalUnsignedByteArray:
    934         __ movb(Operand(rcx, rbx, times_1, 0), rax);
    935         break;
    936       case kExternalShortArray:
    937       case kExternalUnsignedShortArray:
    938         __ movw(Operand(rcx, rbx, times_2, 0), rax);
    939         break;
    940       case kExternalIntArray:
    941       case kExternalUnsignedIntArray:
    942         __ movl(Operand(rcx, rbx, times_4, 0), rax);
    943         break;
    944       default:
    945         UNREACHABLE();
    946         break;
    947     }
    948     __ movq(rax, rdx);  // Return the original value.
    949     __ ret(0);
    950   }
    951 
    952   // Slow case: call runtime.
    953   __ bind(&slow);
    954   GenerateRuntimeSetProperty(masm);
    955 }
    956 
    957 
    958 void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
    959   // ----------- S t a t e -------------
    960   // rcx                      : function name
    961   // rsp[0]                   : return address
    962   // rsp[8]                   : argument argc
    963   // rsp[16]                  : argument argc - 1
    964   // ...
    965   // rsp[argc * 8]            : argument 1
    966   // rsp[(argc + 1) * 8]      : argument 0 = receiver
    967   // -----------------------------------
    968   // Get the receiver of the function from the stack; 1 ~ return address.
    969   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
    970 
    971   // Enter an internal frame.
    972   __ EnterInternalFrame();
    973 
    974   // Push the receiver and the name of the function.
    975   __ push(rdx);
    976   __ push(rcx);
    977 
    978   // Call the entry.
    979   CEntryStub stub(1);
    980   __ movq(rax, Immediate(2));
    981   __ movq(rbx, ExternalReference(IC_Utility(kCallIC_Miss)));
    982   __ CallStub(&stub);
    983 
    984   // Move result to rdi and exit the internal frame.
    985   __ movq(rdi, rax);
    986   __ LeaveInternalFrame();
    987 
    988   // Check if the receiver is a global object of some sort.
    989   Label invoke, global;
    990   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));  // receiver
    991   __ JumpIfSmi(rdx, &invoke);
    992   __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx);
    993   __ j(equal, &global);
    994   __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE);
    995   __ j(not_equal, &invoke);
    996 
    997   // Patch the receiver on the stack.
    998   __ bind(&global);
    999   __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
   1000   __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
   1001 
   1002   // Invoke the function.
   1003   ParameterCount actual(argc);
   1004   __ bind(&invoke);
   1005   __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
   1006 }
   1007 
   1008 
   1009 // Defined in ic.cc.
   1010 Object* CallIC_Miss(Arguments args);
   1011 
   1012 void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
   1013   // ----------- S t a t e -------------
   1014   // rcx                      : function name
   1015   // rsp[0]                   : return address
   1016   // rsp[8]                   : argument argc
   1017   // rsp[16]                  : argument argc - 1
   1018   // ...
   1019   // rsp[argc * 8]            : argument 1
   1020   // rsp[(argc + 1) * 8]      : argument 0 = receiver
   1021   // -----------------------------------
   1022   Label number, non_number, non_string, boolean, probe, miss;
   1023 
   1024   // Get the receiver of the function from the stack; 1 ~ return address.
   1025   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
   1026 
   1027   // Probe the stub cache.
   1028   Code::Flags flags =
   1029       Code::ComputeFlags(Code::CALL_IC, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc);
   1030   StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, rax);
   1031 
   1032   // If the stub cache probing failed, the receiver might be a value.
   1033   // For value objects, we use the map of the prototype objects for
   1034   // the corresponding JSValue for the cache and that is what we need
   1035   // to probe.
   1036   //
   1037   // Check for number.
   1038   __ JumpIfSmi(rdx, &number);
   1039   __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rbx);
   1040   __ j(not_equal, &non_number);
   1041   __ bind(&number);
   1042   StubCompiler::GenerateLoadGlobalFunctionPrototype(
   1043       masm, Context::NUMBER_FUNCTION_INDEX, rdx);
   1044   __ jmp(&probe);
   1045 
   1046   // Check for string.
   1047   __ bind(&non_number);
   1048   __ CmpInstanceType(rbx, FIRST_NONSTRING_TYPE);
   1049   __ j(above_equal, &non_string);
   1050   StubCompiler::GenerateLoadGlobalFunctionPrototype(
   1051       masm, Context::STRING_FUNCTION_INDEX, rdx);
   1052   __ jmp(&probe);
   1053 
   1054   // Check for boolean.
   1055   __ bind(&non_string);
   1056   __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
   1057   __ j(equal, &boolean);
   1058   __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
   1059   __ j(not_equal, &miss);
   1060   __ bind(&boolean);
   1061   StubCompiler::GenerateLoadGlobalFunctionPrototype(
   1062       masm, Context::BOOLEAN_FUNCTION_INDEX, rdx);
   1063 
   1064   // Probe the stub cache for the value object.
   1065   __ bind(&probe);
   1066   StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg);
   1067 
   1068   // Cache miss: Jump to runtime.
   1069   __ bind(&miss);
   1070   GenerateMiss(masm, argc);
   1071 }
   1072 
   1073 
   1074 static void GenerateNormalHelper(MacroAssembler* masm,
   1075                                  int argc,
   1076                                  bool is_global_object,
   1077                                  Label* miss) {
   1078   // ----------- S t a t e -------------
   1079   // rcx                    : function name
   1080   // rdx                    : receiver
   1081   // rsp[0]                 : return address
   1082   // rsp[8]                 : argument argc
   1083   // rsp[16]                : argument argc - 1
   1084   // ...
   1085   // rsp[argc * 8]          : argument 1
   1086   // rsp[(argc + 1) * 8]    : argument 0 = receiver
   1087   // -----------------------------------
   1088   // Search dictionary - put result in register rdx.
   1089   GenerateDictionaryLoad(masm, miss, rax, rdx, rbx, rcx, CHECK_DICTIONARY);
   1090 
   1091   // Move the result to register rdi and check that it isn't a smi.
   1092   __ movq(rdi, rdx);
   1093   __ JumpIfSmi(rdx, miss);
   1094 
   1095   // Check that the value is a JavaScript function.
   1096   __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rdx);
   1097   __ j(not_equal, miss);
   1098 
   1099   // Patch the receiver with the global proxy if necessary.
   1100   if (is_global_object) {
   1101     __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
   1102     __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
   1103     __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
   1104   }
   1105 
   1106   // Invoke the function.
   1107   ParameterCount actual(argc);
   1108   __ InvokeFunction(rdi, actual, JUMP_FUNCTION);
   1109 }
   1110 
   1111 
   1112 void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
   1113   // ----------- S t a t e -------------
   1114   // rcx                    : function name
   1115   // rsp[0]                 : return address
   1116   // rsp[8]                 : argument argc
   1117   // rsp[16]                : argument argc - 1
   1118   // ...
   1119   // rsp[argc * 8]          : argument 1
   1120   // rsp[(argc + 1) * 8]    : argument 0 = receiver
   1121   // -----------------------------------
   1122   Label miss, global_object, non_global_object;
   1123 
   1124   // Get the receiver of the function from the stack.
   1125   __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
   1126 
   1127   // Check that the receiver isn't a smi.
   1128   __ JumpIfSmi(rdx, &miss);
   1129 
   1130   // Check that the receiver is a valid JS object.
   1131   // Because there are so many map checks and type checks, do not
   1132   // use CmpObjectType, but load map and type into registers.
   1133   __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
   1134   __ movb(rax, FieldOperand(rbx, Map::kInstanceTypeOffset));
   1135   __ cmpb(rax, Immediate(FIRST_JS_OBJECT_TYPE));
   1136   __ j(below, &miss);
   1137 
   1138   // If this assert fails, we have to check upper bound too.
   1139   ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
   1140 
   1141   // Check for access to global object.
   1142   __ cmpb(rax, Immediate(JS_GLOBAL_OBJECT_TYPE));
   1143   __ j(equal, &global_object);
   1144   __ cmpb(rax, Immediate(JS_BUILTINS_OBJECT_TYPE));
   1145   __ j(not_equal, &non_global_object);
   1146 
   1147   // Accessing global object: Load and invoke.
   1148   __ bind(&global_object);
   1149   // Check that the global object does not require access checks.
   1150   __ movb(rbx, FieldOperand(rbx, Map::kBitFieldOffset));
   1151   __ testb(rbx, Immediate(1 << Map::kIsAccessCheckNeeded));
   1152   __ j(not_equal, &miss);
   1153   GenerateNormalHelper(masm, argc, true, &miss);
   1154 
   1155   // Accessing non-global object: Check for access to global proxy.
   1156   Label global_proxy, invoke;
   1157   __ bind(&non_global_object);
   1158   __ cmpb(rax, Immediate(JS_GLOBAL_PROXY_TYPE));
   1159   __ j(equal, &global_proxy);
   1160   // Check that the non-global, non-global-proxy object does not
   1161   // require access checks.
   1162   __ movb(rbx, FieldOperand(rbx, Map::kBitFieldOffset));
   1163   __ testb(rbx, Immediate(1 << Map::kIsAccessCheckNeeded));
   1164   __ j(not_equal, &miss);
   1165   __ bind(&invoke);
   1166   GenerateNormalHelper(masm, argc, false, &miss);
   1167 
   1168   // Global object proxy access: Check access rights.
   1169   __ bind(&global_proxy);
   1170   __ CheckAccessGlobalProxy(rdx, rax, &miss);
   1171   __ jmp(&invoke);
   1172 
   1173   // Cache miss: Jump to runtime.
   1174   __ bind(&miss);
   1175   GenerateMiss(masm, argc);
   1176 }
   1177 
   1178 
   1179 // The offset from the inlined patch site to the start of the
   1180 // inlined load instruction.
   1181 const int LoadIC::kOffsetToLoadInstruction = 20;
   1182 
   1183 
   1184 void LoadIC::ClearInlinedVersion(Address address) {
   1185   // Reset the map check of the inlined inobject property load (if
   1186   // present) to guarantee failure by holding an invalid map (the null
   1187   // value).  The offset can be patched to anything.
   1188   PatchInlinedLoad(address, Heap::null_value(), kMaxInt);
   1189 }
   1190 
   1191 
   1192 void LoadIC::GenerateMiss(MacroAssembler* masm) {
   1193   // ----------- S t a t e -------------
   1194   //  -- rcx    : name
   1195   //  -- rsp[0] : return address
   1196   //  -- rsp[8] : receiver
   1197   // -----------------------------------
   1198 
   1199   __ pop(rbx);
   1200   __ push(Operand(rsp, 0));  // receiver
   1201   __ push(rcx);  // name
   1202   __ push(rbx);  // return address
   1203 
   1204   // Perform tail call to the entry.
   1205   __ TailCallRuntime(ExternalReference(IC_Utility(kLoadIC_Miss)), 2, 1);
   1206 }
   1207 
   1208 
   1209 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
   1210   // ----------- S t a t e -------------
   1211   //  -- rcx    : name
   1212   //  -- rsp[0] : return address
   1213   //  -- rsp[8] : receiver
   1214   // -----------------------------------
   1215   Label miss;
   1216 
   1217   __ movq(rax, Operand(rsp, kPointerSize));
   1218 
   1219   StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss);
   1220   __ bind(&miss);
   1221   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
   1222 }
   1223 
   1224 
   1225 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
   1226   // ----------- S t a t e -------------
   1227   //  -- rcx    : name
   1228   //  -- rsp[0] : return address
   1229   //  -- rsp[8] : receiver
   1230   // -----------------------------------
   1231   Label miss;
   1232 
   1233   __ movq(rax, Operand(rsp, kPointerSize));
   1234 
   1235   StubCompiler::GenerateLoadFunctionPrototype(masm, rax, rdx, rbx, &miss);
   1236   __ bind(&miss);
   1237   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
   1238 }
   1239 
   1240 
   1241 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
   1242   // ----------- S t a t e -------------
   1243   //  -- rcx    : name
   1244   //  -- rsp[0] : return address
   1245   //  -- rsp[8] : receiver
   1246   // -----------------------------------
   1247 
   1248   __ movq(rax, Operand(rsp, kPointerSize));
   1249 
   1250   // Probe the stub cache.
   1251   Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
   1252                                          NOT_IN_LOOP,
   1253                                          MONOMORPHIC);
   1254   StubCache::GenerateProbe(masm, flags, rax, rcx, rbx, rdx);
   1255 
   1256   // Cache miss: Jump to runtime.
   1257   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
   1258 }
   1259 
   1260 
   1261 void LoadIC::GenerateNormal(MacroAssembler* masm) {
   1262   // ----------- S t a t e -------------
   1263   //  -- rcx    : name
   1264   //  -- rsp[0] : return address
   1265   //  -- rsp[8] : receiver
   1266   // -----------------------------------
   1267   Label miss, probe, global;
   1268 
   1269   __ movq(rax, Operand(rsp, kPointerSize));
   1270 
   1271   // Check that the receiver isn't a smi.
   1272   __ JumpIfSmi(rax, &miss);
   1273 
   1274   // Check that the receiver is a valid JS object.
   1275   __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
   1276   __ j(below, &miss);
   1277 
   1278   // If this assert fails, we have to check upper bound too.
   1279   ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
   1280 
   1281   // Check for access to global object (unlikely).
   1282   __ CmpInstanceType(rbx, JS_GLOBAL_PROXY_TYPE);
   1283   __ j(equal, &global);
   1284 
   1285   // Check for non-global object that requires access check.
   1286   __ testl(FieldOperand(rbx, Map::kBitFieldOffset),
   1287           Immediate(1 << Map::kIsAccessCheckNeeded));
   1288   __ j(not_zero, &miss);
   1289 
   1290   // Search the dictionary placing the result in rax.
   1291   __ bind(&probe);
   1292   GenerateDictionaryLoad(masm, &miss, rdx, rax, rbx, rcx, CHECK_DICTIONARY);
   1293   __ ret(0);
   1294 
   1295   // Global object access: Check access rights.
   1296   __ bind(&global);
   1297   __ CheckAccessGlobalProxy(rax, rdx, &miss);
   1298   __ jmp(&probe);
   1299 
   1300   // Cache miss: Restore receiver from stack and jump to runtime.
   1301   __ bind(&miss);
   1302   __ movq(rax, Operand(rsp, 1 * kPointerSize));
   1303   GenerateMiss(masm);
   1304 }
   1305 
   1306 
   1307 void LoadIC::GenerateStringLength(MacroAssembler* masm) {
   1308   // ----------- S t a t e -------------
   1309   //  -- rcx    : name
   1310   //  -- rsp[0] : return address
   1311   //  -- rsp[8] : receiver
   1312   // -----------------------------------
   1313   Label miss;
   1314 
   1315   __ movq(rax, Operand(rsp, kPointerSize));
   1316 
   1317   StubCompiler::GenerateLoadStringLength(masm, rax, rdx, rbx, &miss);
   1318   __ bind(&miss);
   1319   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
   1320 }
   1321 
   1322 
   1323 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
   1324   // The address of the instruction following the call.
   1325   Address test_instruction_address =
   1326       address + Assembler::kCallTargetAddressOffset;
   1327   // If the instruction following the call is not a test eax, nothing
   1328   // was inlined.
   1329   if (*test_instruction_address != kTestEaxByte) return false;
   1330 
   1331   Address delta_address = test_instruction_address + 1;
   1332   // The delta to the start of the map check instruction.
   1333   int delta = *reinterpret_cast<int*>(delta_address);
   1334 
   1335   // The map address is the last 8 bytes of the 10-byte
   1336   // immediate move instruction, so we add 2 to get the
   1337   // offset to the last 8 bytes.
   1338   Address map_address = test_instruction_address + delta + 2;
   1339   *(reinterpret_cast<Object**>(map_address)) = map;
   1340 
   1341   // The offset is in the 32-bit displacement of a seven byte
   1342   // memory-to-register move instruction (REX.W 0x88 ModR/M disp32),
   1343   // so we add 3 to get the offset of the displacement.
   1344   Address offset_address =
   1345       test_instruction_address + delta + kOffsetToLoadInstruction + 3;
   1346   *reinterpret_cast<int*>(offset_address) = offset - kHeapObjectTag;
   1347   return true;
   1348 }
   1349 
   1350 
   1351 void StoreIC::GenerateMiss(MacroAssembler* masm) {
   1352   // ----------- S t a t e -------------
   1353   //  -- rax    : value
   1354   //  -- rcx    : name
   1355   //  -- rdx    : receiver
   1356   //  -- rsp[0] : return address
   1357   // -----------------------------------
   1358 
   1359   __ pop(rbx);
   1360   __ push(rdx);  // receiver
   1361   __ push(rcx);  // name
   1362   __ push(rax);  // value
   1363   __ push(rbx);  // return address
   1364 
   1365   // Perform tail call to the entry.
   1366   __ TailCallRuntime(ExternalReference(IC_Utility(kStoreIC_Miss)), 3, 1);
   1367 }
   1368 
   1369 
   1370 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
   1371   // ----------- S t a t e -------------
   1372   //  -- rax    : value
   1373   //  -- rcx    : name
   1374   //  -- rdx    : receiver
   1375   //  -- rsp[0] : return address
   1376   // -----------------------------------
   1377 
   1378   // Get the receiver from the stack and probe the stub cache.
   1379   Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
   1380                                          NOT_IN_LOOP,
   1381                                          MONOMORPHIC);
   1382   StubCache::GenerateProbe(masm, flags, rdx, rcx, rbx, no_reg);
   1383 
   1384   // Cache miss: Jump to runtime.
   1385   GenerateMiss(masm);
   1386 }
   1387 
   1388 
   1389 #undef __
   1390 
   1391 
   1392 } }  // namespace v8::internal
   1393