Home | History | Annotate | Download | only in ia32
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #if V8_TARGET_ARCH_IA32
      8 
      9 #include "src/codegen.h"
     10 #include "src/ic/ic.h"
     11 #include "src/ic/ic-compiler.h"
     12 #include "src/ic/stub-cache.h"
     13 
     14 namespace v8 {
     15 namespace internal {
     16 
     17 // ----------------------------------------------------------------------------
     18 // Static IC stub generators.
     19 //
     20 
     21 #define __ ACCESS_MASM(masm)
     22 
     23 
     24 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
     25                                             Label* global_object) {
     26   // Register usage:
     27   //   type: holds the receiver instance type on entry.
     28   __ cmp(type, JS_GLOBAL_OBJECT_TYPE);
     29   __ j(equal, global_object);
     30   __ cmp(type, JS_BUILTINS_OBJECT_TYPE);
     31   __ j(equal, global_object);
     32   __ cmp(type, JS_GLOBAL_PROXY_TYPE);
     33   __ j(equal, global_object);
     34 }
     35 
     36 
     37 // Helper function used to load a property from a dictionary backing
     38 // storage. This function may fail to load a property even though it is
     39 // in the dictionary, so code at miss_label must always call a backup
     40 // property load that is complete. This function is safe to call if
     41 // name is not internalized, and will jump to the miss_label in that
     42 // case. The generated code assumes that the receiver has slow
     43 // properties, is not a global object and does not have interceptors.
     44 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
     45                                    Register elements, Register name,
     46                                    Register r0, Register r1, Register result) {
     47   // Register use:
     48   //
     49   // elements - holds the property dictionary on entry and is unchanged.
     50   //
     51   // name - holds the name of the property on entry and is unchanged.
     52   //
     53   // Scratch registers:
     54   //
     55   // r0   - used for the index into the property dictionary
     56   //
     57   // r1   - used to hold the capacity of the property dictionary.
     58   //
     59   // result - holds the result on exit.
     60 
     61   Label done;
     62 
     63   // Probe the dictionary.
     64   NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
     65                                                    elements, name, r0, r1);
     66 
     67   // If probing finds an entry in the dictionary, r0 contains the
     68   // index into the dictionary. Check that the value is a normal
     69   // property.
     70   __ bind(&done);
     71   const int kElementsStartOffset =
     72       NameDictionary::kHeaderSize +
     73       NameDictionary::kElementsStartIndex * kPointerSize;
     74   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
     75   __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
     76           Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
     77   __ j(not_zero, miss_label);
     78 
     79   // Get the value at the masked, scaled index.
     80   const int kValueOffset = kElementsStartOffset + kPointerSize;
     81   __ mov(result, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
     82 }
     83 
     84 
     85 // Helper function used to store a property to a dictionary backing
     86 // storage. This function may fail to store a property eventhough it
     87 // is in the dictionary, so code at miss_label must always call a
     88 // backup property store that is complete. This function is safe to
     89 // call if name is not internalized, and will jump to the miss_label in
     90 // that case. The generated code assumes that the receiver has slow
     91 // properties, is not a global object and does not have interceptors.
     92 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss_label,
     93                                     Register elements, Register name,
     94                                     Register value, Register r0, Register r1) {
     95   // Register use:
     96   //
     97   // elements - holds the property dictionary on entry and is clobbered.
     98   //
     99   // name - holds the name of the property on entry and is unchanged.
    100   //
    101   // value - holds the value to store and is unchanged.
    102   //
    103   // r0 - used for index into the property dictionary and is clobbered.
    104   //
    105   // r1 - used to hold the capacity of the property dictionary and is clobbered.
    106   Label done;
    107 
    108 
    109   // Probe the dictionary.
    110   NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
    111                                                    elements, name, r0, r1);
    112 
    113   // If probing finds an entry in the dictionary, r0 contains the
    114   // index into the dictionary. Check that the value is a normal
    115   // property that is not read only.
    116   __ bind(&done);
    117   const int kElementsStartOffset =
    118       NameDictionary::kHeaderSize +
    119       NameDictionary::kElementsStartIndex * kPointerSize;
    120   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
    121   const int kTypeAndReadOnlyMask =
    122       (PropertyDetails::TypeField::kMask |
    123        PropertyDetails::AttributesField::encode(READ_ONLY))
    124       << kSmiTagSize;
    125   __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
    126           Immediate(kTypeAndReadOnlyMask));
    127   __ j(not_zero, miss_label);
    128 
    129   // Store the value at the masked, scaled index.
    130   const int kValueOffset = kElementsStartOffset + kPointerSize;
    131   __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
    132   __ mov(Operand(r0, 0), value);
    133 
    134   // Update write barrier. Make sure not to clobber the value.
    135   __ mov(r1, value);
    136   __ RecordWrite(elements, r0, r1, kDontSaveFPRegs);
    137 }
    138 
    139 
    140 // Checks the receiver for special cases (value type, slow case bits).
    141 // Falls through for regular JS object.
    142 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
    143                                            Register receiver, Register map,
    144                                            int interceptor_bit, Label* slow) {
    145   // Register use:
    146   //   receiver - holds the receiver and is unchanged.
    147   // Scratch registers:
    148   //   map - used to hold the map of the receiver.
    149 
    150   // Check that the object isn't a smi.
    151   __ JumpIfSmi(receiver, slow);
    152 
    153   // Get the map of the receiver.
    154   __ mov(map, FieldOperand(receiver, HeapObject::kMapOffset));
    155 
    156   // Check bit field.
    157   __ test_b(FieldOperand(map, Map::kBitFieldOffset),
    158             (1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit));
    159   __ j(not_zero, slow);
    160   // Check that the object is some kind of JS object EXCEPT JS Value type.
    161   // In the case that the object is a value-wrapper object,
    162   // we enter the runtime system to make sure that indexing
    163   // into string objects works as intended.
    164   DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
    165 
    166   __ CmpInstanceType(map, JS_OBJECT_TYPE);
    167   __ j(below, slow);
    168 }
    169 
    170 
    171 // Loads an indexed element from a fast case array.
    172 // If not_fast_array is NULL, doesn't perform the elements map check.
    173 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
    174                                   Register key, Register scratch,
    175                                   Register result, Label* not_fast_array,
    176                                   Label* out_of_range) {
    177   // Register use:
    178   //   receiver - holds the receiver and is unchanged.
    179   //   key - holds the key and is unchanged (must be a smi).
    180   // Scratch registers:
    181   //   scratch - used to hold elements of the receiver and the loaded value.
    182   //   result - holds the result on exit if the load succeeds and
    183   //            we fall through.
    184 
    185   __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset));
    186   if (not_fast_array != NULL) {
    187     // Check that the object is in fast mode and writable.
    188     __ CheckMap(scratch, masm->isolate()->factory()->fixed_array_map(),
    189                 not_fast_array, DONT_DO_SMI_CHECK);
    190   } else {
    191     __ AssertFastElements(scratch);
    192   }
    193   // Check that the key (index) is within bounds.
    194   __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
    195   __ j(above_equal, out_of_range);
    196   // Fast case: Do the load.
    197   STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
    198   __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
    199   __ cmp(scratch, Immediate(masm->isolate()->factory()->the_hole_value()));
    200   // In case the loaded value is the_hole we have to consult GetProperty
    201   // to ensure the prototype chain is searched.
    202   __ j(equal, out_of_range);
    203   if (!result.is(scratch)) {
    204     __ mov(result, scratch);
    205   }
    206 }
    207 
    208 
    209 // Checks whether a key is an array index string or a unique name.
    210 // Falls through if the key is a unique name.
    211 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
    212                                  Register map, Register hash,
    213                                  Label* index_string, Label* not_unique) {
    214   // Register use:
    215   //   key - holds the key and is unchanged. Assumed to be non-smi.
    216   // Scratch registers:
    217   //   map - used to hold the map of the key.
    218   //   hash - used to hold the hash of the key.
    219   Label unique;
    220   __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
    221   __ j(above, not_unique);
    222   STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
    223   __ j(equal, &unique);
    224 
    225   // Is the string an array index, with cached numeric value?
    226   __ mov(hash, FieldOperand(key, Name::kHashFieldOffset));
    227   __ test(hash, Immediate(Name::kContainsCachedArrayIndexMask));
    228   __ j(zero, index_string);
    229 
    230   // Is the string internalized? We already know it's a string so a single
    231   // bit test is enough.
    232   STATIC_ASSERT(kNotInternalizedTag != 0);
    233   __ test_b(FieldOperand(map, Map::kInstanceTypeOffset),
    234             kIsNotInternalizedMask);
    235   __ j(not_zero, not_unique);
    236 
    237   __ bind(&unique);
    238 }
    239 
    240 
    241 static Operand GenerateMappedArgumentsLookup(
    242     MacroAssembler* masm, Register object, Register key, Register scratch1,
    243     Register scratch2, Label* unmapped_case, Label* slow_case) {
    244   Heap* heap = masm->isolate()->heap();
    245   Factory* factory = masm->isolate()->factory();
    246 
    247   // Check that the receiver is a JSObject. Because of the elements
    248   // map check later, we do not need to check for interceptors or
    249   // whether it requires access checks.
    250   __ JumpIfSmi(object, slow_case);
    251   // Check that the object is some kind of JSObject.
    252   __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
    253   __ j(below, slow_case);
    254 
    255   // Check that the key is a positive smi.
    256   __ test(key, Immediate(0x80000001));
    257   __ j(not_zero, slow_case);
    258 
    259   // Load the elements into scratch1 and check its map.
    260   Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
    261   __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset));
    262   __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
    263 
    264   // Check if element is in the range of mapped arguments. If not, jump
    265   // to the unmapped lookup with the parameter map in scratch1.
    266   __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
    267   __ sub(scratch2, Immediate(Smi::FromInt(2)));
    268   __ cmp(key, scratch2);
    269   __ j(above_equal, unmapped_case);
    270 
    271   // Load element index and check whether it is the hole.
    272   const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
    273   __ mov(scratch2,
    274          FieldOperand(scratch1, key, times_half_pointer_size, kHeaderSize));
    275   __ cmp(scratch2, factory->the_hole_value());
    276   __ j(equal, unmapped_case);
    277 
    278   // Load value from context and return it. We can reuse scratch1 because
    279   // we do not jump to the unmapped lookup (which requires the parameter
    280   // map in scratch1).
    281   const int kContextOffset = FixedArray::kHeaderSize;
    282   __ mov(scratch1, FieldOperand(scratch1, kContextOffset));
    283   return FieldOperand(scratch1, scratch2, times_half_pointer_size,
    284                       Context::kHeaderSize);
    285 }
    286 
    287 
    288 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
    289                                                Register key,
    290                                                Register parameter_map,
    291                                                Register scratch,
    292                                                Label* slow_case) {
    293   // Element is in arguments backing store, which is referenced by the
    294   // second element of the parameter_map.
    295   const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
    296   Register backing_store = parameter_map;
    297   __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
    298   Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
    299   __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
    300   __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
    301   __ cmp(key, scratch);
    302   __ j(greater_equal, slow_case);
    303   return FieldOperand(backing_store, key, times_half_pointer_size,
    304                       FixedArray::kHeaderSize);
    305 }
    306 
    307 
    308 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
    309   // The return address is on the stack.
    310   Label slow, check_name, index_smi, index_name, property_array_property;
    311   Label probe_dictionary, check_number_dictionary;
    312 
    313   Register receiver = LoadDescriptor::ReceiverRegister();
    314   Register key = LoadDescriptor::NameRegister();
    315   DCHECK(receiver.is(edx));
    316   DCHECK(key.is(ecx));
    317 
    318   // Check that the key is a smi.
    319   __ JumpIfNotSmi(key, &check_name);
    320   __ bind(&index_smi);
    321   // Now the key is known to be a smi. This place is also jumped to from
    322   // where a numeric string is converted to a smi.
    323 
    324   GenerateKeyedLoadReceiverCheck(masm, receiver, eax,
    325                                  Map::kHasIndexedInterceptor, &slow);
    326 
    327   // Check the receiver's map to see if it has fast elements.
    328   __ CheckFastElements(eax, &check_number_dictionary);
    329 
    330   GenerateFastArrayLoad(masm, receiver, key, eax, eax, NULL, &slow);
    331   Isolate* isolate = masm->isolate();
    332   Counters* counters = isolate->counters();
    333   __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
    334   __ ret(0);
    335 
    336   __ bind(&check_number_dictionary);
    337   __ mov(ebx, key);
    338   __ SmiUntag(ebx);
    339   __ mov(eax, FieldOperand(receiver, JSObject::kElementsOffset));
    340 
    341   // Check whether the elements is a number dictionary.
    342   // ebx: untagged index
    343   // eax: elements
    344   __ CheckMap(eax, isolate->factory()->hash_table_map(), &slow,
    345               DONT_DO_SMI_CHECK);
    346   Label slow_pop_receiver;
    347   // Push receiver on the stack to free up a register for the dictionary
    348   // probing.
    349   __ push(receiver);
    350   __ LoadFromNumberDictionary(&slow_pop_receiver, eax, key, ebx, edx, edi, eax);
    351   // Pop receiver before returning.
    352   __ pop(receiver);
    353   __ ret(0);
    354 
    355   __ bind(&slow_pop_receiver);
    356   // Pop the receiver from the stack and jump to runtime.
    357   __ pop(receiver);
    358 
    359   __ bind(&slow);
    360   // Slow case: jump to runtime.
    361   __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
    362   GenerateRuntimeGetProperty(masm);
    363 
    364   __ bind(&check_name);
    365   GenerateKeyNameCheck(masm, key, eax, ebx, &index_name, &slow);
    366 
    367   GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor,
    368                                  &slow);
    369 
    370   // If the receiver is a fast-case object, check the keyed lookup
    371   // cache. Otherwise probe the dictionary.
    372   __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset));
    373   __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
    374          Immediate(isolate->factory()->hash_table_map()));
    375   __ j(equal, &probe_dictionary);
    376 
    377   // The receiver's map is still in eax, compute the keyed lookup cache hash
    378   // based on 32 bits of the map pointer and the string hash.
    379   if (FLAG_debug_code) {
    380     __ cmp(eax, FieldOperand(receiver, HeapObject::kMapOffset));
    381     __ Check(equal, kMapIsNoLongerInEax);
    382   }
    383   __ mov(ebx, eax);  // Keep the map around for later.
    384   __ shr(eax, KeyedLookupCache::kMapHashShift);
    385   __ mov(edi, FieldOperand(key, String::kHashFieldOffset));
    386   __ shr(edi, String::kHashShift);
    387   __ xor_(eax, edi);
    388   __ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
    389 
    390   // Load the key (consisting of map and internalized string) from the cache and
    391   // check for match.
    392   Label load_in_object_property;
    393   static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
    394   Label hit_on_nth_entry[kEntriesPerBucket];
    395   ExternalReference cache_keys =
    396       ExternalReference::keyed_lookup_cache_keys(masm->isolate());
    397 
    398   for (int i = 0; i < kEntriesPerBucket - 1; i++) {
    399     Label try_next_entry;
    400     __ mov(edi, eax);
    401     __ shl(edi, kPointerSizeLog2 + 1);
    402     if (i != 0) {
    403       __ add(edi, Immediate(kPointerSize * i * 2));
    404     }
    405     __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
    406     __ j(not_equal, &try_next_entry);
    407     __ add(edi, Immediate(kPointerSize));
    408     __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys));
    409     __ j(equal, &hit_on_nth_entry[i]);
    410     __ bind(&try_next_entry);
    411   }
    412 
    413   __ lea(edi, Operand(eax, 1));
    414   __ shl(edi, kPointerSizeLog2 + 1);
    415   __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2));
    416   __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
    417   __ j(not_equal, &slow);
    418   __ add(edi, Immediate(kPointerSize));
    419   __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys));
    420   __ j(not_equal, &slow);
    421 
    422   // Get field offset.
    423   // ebx      : receiver's map
    424   // eax      : lookup cache index
    425   ExternalReference cache_field_offsets =
    426       ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
    427 
    428   // Hit on nth entry.
    429   for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
    430     __ bind(&hit_on_nth_entry[i]);
    431     if (i != 0) {
    432       __ add(eax, Immediate(i));
    433     }
    434     __ mov(edi,
    435            Operand::StaticArray(eax, times_pointer_size, cache_field_offsets));
    436     __ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
    437     __ sub(edi, eax);
    438     __ j(above_equal, &property_array_property);
    439     if (i != 0) {
    440       __ jmp(&load_in_object_property);
    441     }
    442   }
    443 
    444   // Load in-object property.
    445   __ bind(&load_in_object_property);
    446   __ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset));
    447   __ add(eax, edi);
    448   __ mov(eax, FieldOperand(receiver, eax, times_pointer_size, 0));
    449   __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
    450   __ ret(0);
    451 
    452   // Load property array property.
    453   __ bind(&property_array_property);
    454   __ mov(eax, FieldOperand(receiver, JSObject::kPropertiesOffset));
    455   __ mov(eax,
    456          FieldOperand(eax, edi, times_pointer_size, FixedArray::kHeaderSize));
    457   __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
    458   __ ret(0);
    459 
    460   // Do a quick inline probe of the receiver's dictionary, if it
    461   // exists.
    462   __ bind(&probe_dictionary);
    463 
    464   __ mov(eax, FieldOperand(receiver, JSObject::kMapOffset));
    465   __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset));
    466   GenerateGlobalInstanceTypeCheck(masm, eax, &slow);
    467 
    468   GenerateDictionaryLoad(masm, &slow, ebx, key, eax, edi, eax);
    469   __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
    470   __ ret(0);
    471 
    472   __ bind(&index_name);
    473   __ IndexFromHash(ebx, key);
    474   // Now jump to the place where smi keys are handled.
    475   __ jmp(&index_smi);
    476 }
    477 
    478 
    479 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
    480   // Return address is on the stack.
    481   Label miss;
    482 
    483   Register receiver = LoadDescriptor::ReceiverRegister();
    484   Register index = LoadDescriptor::NameRegister();
    485   Register scratch = ebx;
    486   DCHECK(!scratch.is(receiver) && !scratch.is(index));
    487   Register result = eax;
    488   DCHECK(!result.is(scratch));
    489 
    490   StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
    491                                           &miss,  // When not a string.
    492                                           &miss,  // When not a number.
    493                                           &miss,  // When index out of range.
    494                                           STRING_INDEX_IS_ARRAY_INDEX);
    495   char_at_generator.GenerateFast(masm);
    496   __ ret(0);
    497 
    498   StubRuntimeCallHelper call_helper;
    499   char_at_generator.GenerateSlow(masm, call_helper);
    500 
    501   __ bind(&miss);
    502   GenerateMiss(masm);
    503 }
    504 
    505 
    506 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
    507   // Return address is on the stack.
    508   Label slow, notin;
    509   Register receiver = StoreDescriptor::ReceiverRegister();
    510   Register name = StoreDescriptor::NameRegister();
    511   Register value = StoreDescriptor::ValueRegister();
    512   DCHECK(receiver.is(edx));
    513   DCHECK(name.is(ecx));
    514   DCHECK(value.is(eax));
    515 
    516   Operand mapped_location = GenerateMappedArgumentsLookup(
    517       masm, receiver, name, ebx, edi, &notin, &slow);
    518   __ mov(mapped_location, value);
    519   __ lea(ecx, mapped_location);
    520   __ mov(edx, value);
    521   __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs);
    522   __ Ret();
    523   __ bind(&notin);
    524   // The unmapped lookup expects that the parameter map is in ebx.
    525   Operand unmapped_location =
    526       GenerateUnmappedArgumentsLookup(masm, name, ebx, edi, &slow);
    527   __ mov(unmapped_location, value);
    528   __ lea(edi, unmapped_location);
    529   __ mov(edx, value);
    530   __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs);
    531   __ Ret();
    532   __ bind(&slow);
    533   GenerateMiss(masm);
    534 }
    535 
    536 
    537 static void KeyedStoreGenerateGenericHelper(
    538     MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
    539     KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
    540   Label transition_smi_elements;
    541   Label finish_object_store, non_double_value, transition_double_elements;
    542   Label fast_double_without_map_check;
    543   Register receiver = StoreDescriptor::ReceiverRegister();
    544   Register key = StoreDescriptor::NameRegister();
    545   Register value = StoreDescriptor::ValueRegister();
    546   DCHECK(receiver.is(edx));
    547   DCHECK(key.is(ecx));
    548   DCHECK(value.is(eax));
    549   // key is a smi.
    550   // ebx: FixedArray receiver->elements
    551   // edi: receiver map
    552   // Fast case: Do the store, could either Object or double.
    553   __ bind(fast_object);
    554   if (check_map == kCheckMap) {
    555     __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
    556     __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
    557     __ j(not_equal, fast_double);
    558   }
    559 
    560   // HOLECHECK: guards "A[i] = V"
    561   // We have to go to the runtime if the current value is the hole because
    562   // there may be a callback on the element
    563   Label holecheck_passed1;
    564   __ cmp(FixedArrayElementOperand(ebx, key),
    565          masm->isolate()->factory()->the_hole_value());
    566   __ j(not_equal, &holecheck_passed1);
    567   __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow);
    568   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
    569 
    570   __ bind(&holecheck_passed1);
    571 
    572   // Smi stores don't require further checks.
    573   Label non_smi_value;
    574   __ JumpIfNotSmi(value, &non_smi_value);
    575   if (increment_length == kIncrementLength) {
    576     // Add 1 to receiver->length.
    577     __ add(FieldOperand(receiver, JSArray::kLengthOffset),
    578            Immediate(Smi::FromInt(1)));
    579   }
    580   // It's irrelevant whether array is smi-only or not when writing a smi.
    581   __ mov(FixedArrayElementOperand(ebx, key), value);
    582   __ ret(0);
    583 
    584   __ bind(&non_smi_value);
    585   // Escape to elements kind transition case.
    586   __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset));
    587   __ CheckFastObjectElements(edi, &transition_smi_elements);
    588 
    589   // Fast elements array, store the value to the elements backing store.
    590   __ bind(&finish_object_store);
    591   if (increment_length == kIncrementLength) {
    592     // Add 1 to receiver->length.
    593     __ add(FieldOperand(receiver, JSArray::kLengthOffset),
    594            Immediate(Smi::FromInt(1)));
    595   }
    596   __ mov(FixedArrayElementOperand(ebx, key), value);
    597   // Update write barrier for the elements array address.
    598   __ mov(edx, value);  // Preserve the value which is returned.
    599   __ RecordWriteArray(ebx, edx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
    600                       OMIT_SMI_CHECK);
    601   __ ret(0);
    602 
    603   __ bind(fast_double);
    604   if (check_map == kCheckMap) {
    605     // Check for fast double array case. If this fails, call through to the
    606     // runtime.
    607     __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
    608     __ j(not_equal, slow);
    609     // If the value is a number, store it as a double in the FastDoubleElements
    610     // array.
    611   }
    612 
    613   // HOLECHECK: guards "A[i] double hole?"
    614   // We have to see if the double version of the hole is present. If so
    615   // go to the runtime.
    616   uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
    617   __ cmp(FieldOperand(ebx, key, times_4, offset), Immediate(kHoleNanUpper32));
    618   __ j(not_equal, &fast_double_without_map_check);
    619   __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow);
    620   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
    621 
    622   __ bind(&fast_double_without_map_check);
    623   __ StoreNumberToDoubleElements(value, ebx, key, edi, xmm0,
    624                                  &transition_double_elements);
    625   if (increment_length == kIncrementLength) {
    626     // Add 1 to receiver->length.
    627     __ add(FieldOperand(receiver, JSArray::kLengthOffset),
    628            Immediate(Smi::FromInt(1)));
    629   }
    630   __ ret(0);
    631 
    632   __ bind(&transition_smi_elements);
    633   __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
    634 
    635   // Transition the array appropriately depending on the value type.
    636   __ CheckMap(value, masm->isolate()->factory()->heap_number_map(),
    637               &non_double_value, DONT_DO_SMI_CHECK);
    638 
    639   // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
    640   // and complete the store.
    641   __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
    642                                          FAST_DOUBLE_ELEMENTS, ebx, edi, slow);
    643   AllocationSiteMode mode =
    644       AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
    645   ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
    646                                                    ebx, mode, slow);
    647   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
    648   __ jmp(&fast_double_without_map_check);
    649 
    650   __ bind(&non_double_value);
    651   // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
    652   __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS, ebx,
    653                                          edi, slow);
    654   mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
    655   ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
    656       masm, receiver, key, value, ebx, mode, slow);
    657   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
    658   __ jmp(&finish_object_store);
    659 
    660   __ bind(&transition_double_elements);
    661   // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
    662   // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
    663   // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
    664   __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
    665   __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
    666                                          ebx, edi, slow);
    667   mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
    668   ElementsTransitionGenerator::GenerateDoubleToObject(masm, receiver, key,
    669                                                       value, ebx, mode, slow);
    670   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
    671   __ jmp(&finish_object_store);
    672 }
    673 
    674 
    675 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
    676                                    StrictMode strict_mode) {
    677   // Return address is on the stack.
    678   Label slow, fast_object, fast_object_grow;
    679   Label fast_double, fast_double_grow;
    680   Label array, extra, check_if_double_array;
    681   Register receiver = StoreDescriptor::ReceiverRegister();
    682   Register key = StoreDescriptor::NameRegister();
    683   DCHECK(receiver.is(edx));
    684   DCHECK(key.is(ecx));
    685 
    686   // Check that the object isn't a smi.
    687   __ JumpIfSmi(receiver, &slow);
    688   // Get the map from the receiver.
    689   __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset));
    690   // Check that the receiver does not require access checks and is not observed.
    691   // The generic stub does not perform map checks or handle observed objects.
    692   __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
    693             1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved);
    694   __ j(not_zero, &slow);
    695   // Check that the key is a smi.
    696   __ JumpIfNotSmi(key, &slow);
    697   __ CmpInstanceType(edi, JS_ARRAY_TYPE);
    698   __ j(equal, &array);
    699   // Check that the object is some kind of JSObject.
    700   __ CmpInstanceType(edi, FIRST_JS_OBJECT_TYPE);
    701   __ j(below, &slow);
    702 
    703   // Object case: Check key against length in the elements array.
    704   // Key is a smi.
    705   // edi: receiver map
    706   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
    707   // Check array bounds. Both the key and the length of FixedArray are smis.
    708   __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset));
    709   __ j(below, &fast_object);
    710 
    711   // Slow case: call runtime.
    712   __ bind(&slow);
    713   PropertyICCompiler::GenerateRuntimeSetProperty(masm, strict_mode);
    714 
    715   // Extra capacity case: Check if there is extra capacity to
    716   // perform the store and update the length. Used for adding one
    717   // element to the array by writing to array[array.length].
    718   __ bind(&extra);
    719   // receiver is a JSArray.
    720   // key is a smi.
    721   // ebx: receiver->elements, a FixedArray
    722   // edi: receiver map
    723   // flags: compare (key, receiver.length())
    724   // do not leave holes in the array:
    725   __ j(not_equal, &slow);
    726   __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset));
    727   __ j(above_equal, &slow);
    728   __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
    729   __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
    730   __ j(not_equal, &check_if_double_array);
    731   __ jmp(&fast_object_grow);
    732 
    733   __ bind(&check_if_double_array);
    734   __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
    735   __ j(not_equal, &slow);
    736   __ jmp(&fast_double_grow);
    737 
    738   // Array case: Get the length and the elements array from the JS
    739   // array. Check that the array is in fast mode (and writable); if it
    740   // is the length is always a smi.
    741   __ bind(&array);
    742   // receiver is a JSArray.
    743   // key is a smi.
    744   // edi: receiver map
    745   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
    746 
    747   // Check the key against the length in the array and fall through to the
    748   // common store code.
    749   __ cmp(key, FieldOperand(receiver, JSArray::kLengthOffset));  // Compare smis.
    750   __ j(above_equal, &extra);
    751 
    752   KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, &slow,
    753                                   kCheckMap, kDontIncrementLength);
    754   KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
    755                                   &slow, kDontCheckMap, kIncrementLength);
    756 }
    757 
    758 
    759 void LoadIC::GenerateNormal(MacroAssembler* masm) {
    760   Register dictionary = eax;
    761   DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
    762   DCHECK(!dictionary.is(LoadDescriptor::NameRegister()));
    763 
    764   Label slow;
    765 
    766   __ mov(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(),
    767                                   JSObject::kPropertiesOffset));
    768   GenerateDictionaryLoad(masm, &slow, dictionary,
    769                          LoadDescriptor::NameRegister(), edi, ebx, eax);
    770   __ ret(0);
    771 
    772   // Dictionary load failed, go slow (but don't miss).
    773   __ bind(&slow);
    774   GenerateRuntimeGetProperty(masm);
    775 }
    776 
    777 
    778 static void LoadIC_PushArgs(MacroAssembler* masm) {
    779   Register receiver = LoadDescriptor::ReceiverRegister();
    780   Register name = LoadDescriptor::NameRegister();
    781   DCHECK(!ebx.is(receiver) && !ebx.is(name));
    782 
    783   __ pop(ebx);
    784   __ push(receiver);
    785   __ push(name);
    786   __ push(ebx);
    787 }
    788 
    789 
    790 void LoadIC::GenerateMiss(MacroAssembler* masm) {
    791   // Return address is on the stack.
    792   __ IncrementCounter(masm->isolate()->counters()->load_miss(), 1);
    793 
    794   LoadIC_PushArgs(masm);
    795 
    796   // Perform tail call to the entry.
    797   ExternalReference ref =
    798       ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
    799   __ TailCallExternalReference(ref, 2, 1);
    800 }
    801 
    802 
    803 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
    804   // Return address is on the stack.
    805   LoadIC_PushArgs(masm);
    806 
    807   // Perform tail call to the entry.
    808   __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
    809 }
    810 
    811 
    812 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
    813   // Return address is on the stack.
    814   __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1);
    815 
    816   LoadIC_PushArgs(masm);
    817 
    818   // Perform tail call to the entry.
    819   ExternalReference ref =
    820       ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
    821   __ TailCallExternalReference(ref, 2, 1);
    822 }
    823 
    824 
    825 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
    826   // Return address is on the stack.
    827   LoadIC_PushArgs(masm);
    828 
    829   // Perform tail call to the entry.
    830   __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
    831 }
    832 
    833 
    834 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
    835   // Return address is on the stack.
    836   Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
    837       Code::ComputeHandlerFlags(Code::STORE_IC));
    838   masm->isolate()->stub_cache()->GenerateProbe(
    839       masm, flags, false, StoreDescriptor::ReceiverRegister(),
    840       StoreDescriptor::NameRegister(), ebx, no_reg);
    841 
    842   // Cache miss: Jump to runtime.
    843   GenerateMiss(masm);
    844 }
    845 
    846 
    847 static void StoreIC_PushArgs(MacroAssembler* masm) {
    848   Register receiver = StoreDescriptor::ReceiverRegister();
    849   Register name = StoreDescriptor::NameRegister();
    850   Register value = StoreDescriptor::ValueRegister();
    851 
    852   DCHECK(!ebx.is(receiver) && !ebx.is(name) && !ebx.is(value));
    853 
    854   __ pop(ebx);
    855   __ push(receiver);
    856   __ push(name);
    857   __ push(value);
    858   __ push(ebx);
    859 }
    860 
    861 
    862 void StoreIC::GenerateMiss(MacroAssembler* masm) {
    863   // Return address is on the stack.
    864   StoreIC_PushArgs(masm);
    865 
    866   // Perform tail call to the entry.
    867   ExternalReference ref =
    868       ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
    869   __ TailCallExternalReference(ref, 3, 1);
    870 }
    871 
    872 
    873 void StoreIC::GenerateNormal(MacroAssembler* masm) {
    874   Label restore_miss;
    875   Register receiver = StoreDescriptor::ReceiverRegister();
    876   Register name = StoreDescriptor::NameRegister();
    877   Register value = StoreDescriptor::ValueRegister();
    878   Register dictionary = ebx;
    879 
    880   __ mov(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
    881 
    882   // A lot of registers are needed for storing to slow case
    883   // objects. Push and restore receiver but rely on
    884   // GenerateDictionaryStore preserving the value and name.
    885   __ push(receiver);
    886   GenerateDictionaryStore(masm, &restore_miss, dictionary, name, value,
    887                           receiver, edi);
    888   __ Drop(1);
    889   Counters* counters = masm->isolate()->counters();
    890   __ IncrementCounter(counters->store_normal_hit(), 1);
    891   __ ret(0);
    892 
    893   __ bind(&restore_miss);
    894   __ pop(receiver);
    895   __ IncrementCounter(counters->store_normal_miss(), 1);
    896   GenerateMiss(masm);
    897 }
    898 
    899 
    900 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
    901   // Return address is on the stack.
    902   StoreIC_PushArgs(masm);
    903 
    904   // Do tail-call to runtime routine.
    905   ExternalReference ref =
    906       ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
    907   __ TailCallExternalReference(ref, 3, 1);
    908 }
    909 
    910 
    911 #undef __
    912 
    913 
    914 Condition CompareIC::ComputeCondition(Token::Value op) {
    915   switch (op) {
    916     case Token::EQ_STRICT:
    917     case Token::EQ:
    918       return equal;
    919     case Token::LT:
    920       return less;
    921     case Token::GT:
    922       return greater;
    923     case Token::LTE:
    924       return less_equal;
    925     case Token::GTE:
    926       return greater_equal;
    927     default:
    928       UNREACHABLE();
    929       return no_condition;
    930   }
    931 }
    932 
    933 
    934 bool CompareIC::HasInlinedSmiCode(Address address) {
    935   // The address of the instruction following the call.
    936   Address test_instruction_address =
    937       address + Assembler::kCallTargetAddressOffset;
    938 
    939   // If the instruction following the call is not a test al, nothing
    940   // was inlined.
    941   return *test_instruction_address == Assembler::kTestAlByte;
    942 }
    943 
    944 
    945 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
    946   // The address of the instruction following the call.
    947   Address test_instruction_address =
    948       address + Assembler::kCallTargetAddressOffset;
    949 
    950   // If the instruction following the call is not a test al, nothing
    951   // was inlined.
    952   if (*test_instruction_address != Assembler::kTestAlByte) {
    953     DCHECK(*test_instruction_address == Assembler::kNopByte);
    954     return;
    955   }
    956 
    957   Address delta_address = test_instruction_address + 1;
    958   // The delta to the start of the map check instruction and the
    959   // condition code uses at the patched jump.
    960   uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
    961   if (FLAG_trace_ic) {
    962     PrintF("[  patching ic at %p, test=%p, delta=%d\n", address,
    963            test_instruction_address, delta);
    964   }
    965 
    966   // Patch with a short conditional jump. Enabling means switching from a short
    967   // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
    968   // reverse operation of that.
    969   Address jmp_address = test_instruction_address - delta;
    970   DCHECK((check == ENABLE_INLINED_SMI_CHECK)
    971              ? (*jmp_address == Assembler::kJncShortOpcode ||
    972                 *jmp_address == Assembler::kJcShortOpcode)
    973              : (*jmp_address == Assembler::kJnzShortOpcode ||
    974                 *jmp_address == Assembler::kJzShortOpcode));
    975   Condition cc =
    976       (check == ENABLE_INLINED_SMI_CHECK)
    977           ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
    978           : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
    979   *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
    980 }
    981 }
    982 }  // namespace v8::internal
    983 
    984 #endif  // V8_TARGET_ARCH_IA32
    985