Home | History | Annotate | Download | only in ic
      1 // Copyright 2016 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/ic/keyed-store-generic.h"
      6 
      7 #include "src/code-factory.h"
      8 #include "src/code-stub-assembler.h"
      9 #include "src/contexts.h"
     10 #include "src/ic/accessor-assembler.h"
     11 #include "src/interface-descriptors.h"
     12 #include "src/isolate.h"
     13 #include "src/objects-inl.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 
     18 using compiler::Node;
     19 
     20 class KeyedStoreGenericAssembler : public AccessorAssembler {
     21  public:
     22   explicit KeyedStoreGenericAssembler(compiler::CodeAssemblerState* state)
     23       : AccessorAssembler(state) {}
     24 
     25   void KeyedStoreGeneric(LanguageMode language_mode);
     26 
     27  private:
     28   enum UpdateLength {
     29     kDontChangeLength,
     30     kIncrementLengthByOne,
     31     kBumpLengthWithGap
     32   };
     33 
     34   void EmitGenericElementStore(Node* receiver, Node* receiver_map,
     35                                Node* instance_type, Node* intptr_index,
     36                                Node* value, Node* context, Label* slow);
     37 
     38   void EmitGenericPropertyStore(Node* receiver, Node* receiver_map,
     39                                 const StoreICParameters* p, Label* slow,
     40                                 LanguageMode language_mode);
     41 
     42   void BranchIfPrototypesHaveNonFastElements(Node* receiver_map,
     43                                              Label* non_fast_elements,
     44                                              Label* only_fast_elements);
     45 
     46   void TryRewriteElements(Node* receiver, Node* receiver_map, Node* elements,
     47                           Node* native_context, ElementsKind from_kind,
     48                           ElementsKind to_kind, Label* bailout);
     49 
     50   void StoreElementWithCapacity(Node* receiver, Node* receiver_map,
     51                                 Node* elements, Node* elements_kind,
     52                                 Node* intptr_index, Node* value, Node* context,
     53                                 Label* slow, UpdateLength update_length);
     54 
     55   void MaybeUpdateLengthAndReturn(Node* receiver, Node* index, Node* value,
     56                                   UpdateLength update_length);
     57 
     58   void TryChangeToHoleyMapHelper(Node* receiver, Node* receiver_map,
     59                                  Node* native_context, ElementsKind packed_kind,
     60                                  ElementsKind holey_kind, Label* done,
     61                                  Label* map_mismatch, Label* bailout);
     62   void TryChangeToHoleyMap(Node* receiver, Node* receiver_map,
     63                            Node* current_elements_kind, Node* context,
     64                            ElementsKind packed_kind, Label* bailout);
     65   void TryChangeToHoleyMapMulti(Node* receiver, Node* receiver_map,
     66                                 Node* current_elements_kind, Node* context,
     67                                 ElementsKind packed_kind,
     68                                 ElementsKind packed_kind_2, Label* bailout);
     69 
     70   void JumpIfDataProperty(Node* details, Label* writable, Label* readonly);
     71   void LookupPropertyOnPrototypeChain(Node* receiver_map, Node* name,
     72                                       Label* accessor,
     73                                       Variable* var_accessor_pair,
     74                                       Variable* var_accessor_holder,
     75                                       Label* readonly, Label* bailout);
     76 
     77   void CheckFieldType(Node* descriptors, Node* name_index, Node* representation,
     78                       Node* value, Label* bailout);
     79   void OverwriteExistingFastProperty(Node* object, Node* object_map,
     80                                      Node* properties, Node* descriptors,
     81                                      Node* descriptor_name_index, Node* details,
     82                                      Node* value, Label* slow);
     83 };
     84 
     85 void KeyedStoreGenericGenerator::Generate(compiler::CodeAssemblerState* state,
     86                                           LanguageMode language_mode) {
     87   KeyedStoreGenericAssembler assembler(state);
     88   assembler.KeyedStoreGeneric(language_mode);
     89 }
     90 
     91 void KeyedStoreGenericAssembler::BranchIfPrototypesHaveNonFastElements(
     92     Node* receiver_map, Label* non_fast_elements, Label* only_fast_elements) {
     93   Variable var_map(this, MachineRepresentation::kTagged);
     94   var_map.Bind(receiver_map);
     95   Label loop_body(this, &var_map);
     96   Goto(&loop_body);
     97 
     98   Bind(&loop_body);
     99   {
    100     Node* map = var_map.value();
    101     Node* prototype = LoadMapPrototype(map);
    102     GotoIf(WordEqual(prototype, NullConstant()), only_fast_elements);
    103     Node* prototype_map = LoadMap(prototype);
    104     var_map.Bind(prototype_map);
    105     Node* instance_type = LoadMapInstanceType(prototype_map);
    106     STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
    107     STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
    108     GotoIf(Int32LessThanOrEqual(instance_type,
    109                                 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
    110            non_fast_elements);
    111     Node* elements_kind = LoadMapElementsKind(prototype_map);
    112     STATIC_ASSERT(FIRST_ELEMENTS_KIND == FIRST_FAST_ELEMENTS_KIND);
    113     GotoIf(IsFastElementsKind(elements_kind), &loop_body);
    114     GotoIf(Word32Equal(elements_kind, Int32Constant(NO_ELEMENTS)), &loop_body);
    115     Goto(non_fast_elements);
    116   }
    117 }
    118 
    119 void KeyedStoreGenericAssembler::TryRewriteElements(
    120     Node* receiver, Node* receiver_map, Node* elements, Node* native_context,
    121     ElementsKind from_kind, ElementsKind to_kind, Label* bailout) {
    122   DCHECK(IsFastPackedElementsKind(from_kind));
    123   ElementsKind holey_from_kind = GetHoleyElementsKind(from_kind);
    124   ElementsKind holey_to_kind = GetHoleyElementsKind(to_kind);
    125   if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
    126     TrapAllocationMemento(receiver, bailout);
    127   }
    128   Label perform_transition(this), check_holey_map(this);
    129   Variable var_target_map(this, MachineRepresentation::kTagged);
    130   // Check if the receiver has the default |from_kind| map.
    131   {
    132     Node* packed_map =
    133         LoadContextElement(native_context, Context::ArrayMapIndex(from_kind));
    134     GotoIf(WordNotEqual(receiver_map, packed_map), &check_holey_map);
    135     var_target_map.Bind(
    136         LoadContextElement(native_context, Context::ArrayMapIndex(to_kind)));
    137     Goto(&perform_transition);
    138   }
    139 
    140   // Check if the receiver has the default |holey_from_kind| map.
    141   Bind(&check_holey_map);
    142   {
    143     Node* holey_map = LoadContextElement(
    144         native_context, Context::ArrayMapIndex(holey_from_kind));
    145     GotoIf(WordNotEqual(receiver_map, holey_map), bailout);
    146     var_target_map.Bind(LoadContextElement(
    147         native_context, Context::ArrayMapIndex(holey_to_kind)));
    148     Goto(&perform_transition);
    149   }
    150 
    151   // Found a supported transition target map, perform the transition!
    152   Bind(&perform_transition);
    153   {
    154     if (IsFastDoubleElementsKind(from_kind) !=
    155         IsFastDoubleElementsKind(to_kind)) {
    156       Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
    157       GrowElementsCapacity(receiver, elements, from_kind, to_kind, capacity,
    158                            capacity, INTPTR_PARAMETERS, bailout);
    159     }
    160     StoreMap(receiver, var_target_map.value());
    161   }
    162 }
    163 
    164 void KeyedStoreGenericAssembler::TryChangeToHoleyMapHelper(
    165     Node* receiver, Node* receiver_map, Node* native_context,
    166     ElementsKind packed_kind, ElementsKind holey_kind, Label* done,
    167     Label* map_mismatch, Label* bailout) {
    168   Node* packed_map =
    169       LoadContextElement(native_context, Context::ArrayMapIndex(packed_kind));
    170   GotoIf(WordNotEqual(receiver_map, packed_map), map_mismatch);
    171   if (AllocationSite::GetMode(packed_kind, holey_kind) ==
    172       TRACK_ALLOCATION_SITE) {
    173     TrapAllocationMemento(receiver, bailout);
    174   }
    175   Node* holey_map =
    176       LoadContextElement(native_context, Context::ArrayMapIndex(holey_kind));
    177   StoreMap(receiver, holey_map);
    178   Goto(done);
    179 }
    180 
    181 void KeyedStoreGenericAssembler::TryChangeToHoleyMap(
    182     Node* receiver, Node* receiver_map, Node* current_elements_kind,
    183     Node* context, ElementsKind packed_kind, Label* bailout) {
    184   ElementsKind holey_kind = GetHoleyElementsKind(packed_kind);
    185   Label already_holey(this);
    186 
    187   GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)),
    188          &already_holey);
    189   Node* native_context = LoadNativeContext(context);
    190   TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind,
    191                             holey_kind, &already_holey, bailout, bailout);
    192   Bind(&already_holey);
    193 }
    194 
    195 void KeyedStoreGenericAssembler::TryChangeToHoleyMapMulti(
    196     Node* receiver, Node* receiver_map, Node* current_elements_kind,
    197     Node* context, ElementsKind packed_kind, ElementsKind packed_kind_2,
    198     Label* bailout) {
    199   ElementsKind holey_kind = GetHoleyElementsKind(packed_kind);
    200   ElementsKind holey_kind_2 = GetHoleyElementsKind(packed_kind_2);
    201   Label already_holey(this), check_other_kind(this);
    202 
    203   GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind)),
    204          &already_holey);
    205   GotoIf(Word32Equal(current_elements_kind, Int32Constant(holey_kind_2)),
    206          &already_holey);
    207 
    208   Node* native_context = LoadNativeContext(context);
    209   TryChangeToHoleyMapHelper(receiver, receiver_map, native_context, packed_kind,
    210                             holey_kind, &already_holey, &check_other_kind,
    211                             bailout);
    212   Bind(&check_other_kind);
    213   TryChangeToHoleyMapHelper(receiver, receiver_map, native_context,
    214                             packed_kind_2, holey_kind_2, &already_holey,
    215                             bailout, bailout);
    216   Bind(&already_holey);
    217 }
    218 
    219 void KeyedStoreGenericAssembler::MaybeUpdateLengthAndReturn(
    220     Node* receiver, Node* index, Node* value, UpdateLength update_length) {
    221   if (update_length != kDontChangeLength) {
    222     Node* new_length = SmiTag(IntPtrAdd(index, IntPtrConstant(1)));
    223     StoreObjectFieldNoWriteBarrier(receiver, JSArray::kLengthOffset, new_length,
    224                                    MachineRepresentation::kTagged);
    225   }
    226   Return(value);
    227 }
    228 
    229 void KeyedStoreGenericAssembler::StoreElementWithCapacity(
    230     Node* receiver, Node* receiver_map, Node* elements, Node* elements_kind,
    231     Node* intptr_index, Node* value, Node* context, Label* slow,
    232     UpdateLength update_length) {
    233   if (update_length != kDontChangeLength) {
    234     CSA_ASSERT(this, Word32Equal(LoadMapInstanceType(receiver_map),
    235                                  Int32Constant(JS_ARRAY_TYPE)));
    236     // Check if the length property is writable. The fast check is only
    237     // supported for fast properties.
    238     GotoIf(IsDictionaryMap(receiver_map), slow);
    239     // The length property is non-configurable, so it's guaranteed to always
    240     // be the first property.
    241     Node* descriptors = LoadMapDescriptors(receiver_map);
    242     Node* details =
    243         LoadFixedArrayElement(descriptors, DescriptorArray::ToDetailsIndex(0));
    244     GotoIf(IsSetSmi(details, PropertyDetails::kAttributesReadOnlyMask), slow);
    245   }
    246   STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
    247   const int kHeaderSize = FixedArray::kHeaderSize - kHeapObjectTag;
    248 
    249   Label check_double_elements(this), check_cow_elements(this);
    250   Node* elements_map = LoadMap(elements);
    251   GotoIf(WordNotEqual(elements_map, LoadRoot(Heap::kFixedArrayMapRootIndex)),
    252          &check_double_elements);
    253 
    254   // FixedArray backing store -> Smi or object elements.
    255   {
    256     Node* offset = ElementOffsetFromIndex(intptr_index, FAST_ELEMENTS,
    257                                           INTPTR_PARAMETERS, kHeaderSize);
    258     // Check if we're about to overwrite the hole. We can safely do that
    259     // only if there can be no setters on the prototype chain.
    260     // If we know that we're storing beyond the previous array length, we
    261     // can skip the hole check (and always assume the hole).
    262     {
    263       Label hole_check_passed(this);
    264       if (update_length == kDontChangeLength) {
    265         Node* element = Load(MachineType::AnyTagged(), elements, offset);
    266         GotoIf(WordNotEqual(element, TheHoleConstant()), &hole_check_passed);
    267       }
    268       BranchIfPrototypesHaveNonFastElements(receiver_map, slow,
    269                                             &hole_check_passed);
    270       Bind(&hole_check_passed);
    271     }
    272 
    273     // Check if the value we're storing matches the elements_kind. Smis
    274     // can always be stored.
    275     {
    276       Label non_smi_value(this);
    277       GotoIfNot(TaggedIsSmi(value), &non_smi_value);
    278       // If we're about to introduce holes, ensure holey elements.
    279       if (update_length == kBumpLengthWithGap) {
    280         TryChangeToHoleyMapMulti(receiver, receiver_map, elements_kind, context,
    281                                  FAST_SMI_ELEMENTS, FAST_ELEMENTS, slow);
    282       }
    283       StoreNoWriteBarrier(MachineRepresentation::kTagged, elements, offset,
    284                           value);
    285       MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
    286 
    287       Bind(&non_smi_value);
    288     }
    289 
    290     // Check if we already have object elements; just do the store if so.
    291     {
    292       Label must_transition(this);
    293       STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
    294       STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
    295       GotoIf(Int32LessThanOrEqual(elements_kind,
    296                                   Int32Constant(FAST_HOLEY_SMI_ELEMENTS)),
    297              &must_transition);
    298       if (update_length == kBumpLengthWithGap) {
    299         TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context,
    300                             FAST_ELEMENTS, slow);
    301       }
    302       Store(elements, offset, value);
    303       MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
    304 
    305       Bind(&must_transition);
    306     }
    307 
    308     // Transition to the required ElementsKind.
    309     {
    310       Label transition_to_double(this), transition_to_object(this);
    311       Node* native_context = LoadNativeContext(context);
    312       Branch(WordEqual(LoadMap(value), LoadRoot(Heap::kHeapNumberMapRootIndex)),
    313              &transition_to_double, &transition_to_object);
    314       Bind(&transition_to_double);
    315       {
    316         // If we're adding holes at the end, always transition to a holey
    317         // elements kind, otherwise try to remain packed.
    318         ElementsKind target_kind = update_length == kBumpLengthWithGap
    319                                        ? FAST_HOLEY_DOUBLE_ELEMENTS
    320                                        : FAST_DOUBLE_ELEMENTS;
    321         TryRewriteElements(receiver, receiver_map, elements, native_context,
    322                            FAST_SMI_ELEMENTS, target_kind, slow);
    323         // Reload migrated elements.
    324         Node* double_elements = LoadElements(receiver);
    325         Node* double_offset = ElementOffsetFromIndex(
    326             intptr_index, FAST_DOUBLE_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize);
    327         // Make sure we do not store signalling NaNs into double arrays.
    328         Node* double_value = Float64SilenceNaN(LoadHeapNumberValue(value));
    329         StoreNoWriteBarrier(MachineRepresentation::kFloat64, double_elements,
    330                             double_offset, double_value);
    331         MaybeUpdateLengthAndReturn(receiver, intptr_index, value,
    332                                    update_length);
    333       }
    334 
    335       Bind(&transition_to_object);
    336       {
    337         // If we're adding holes at the end, always transition to a holey
    338         // elements kind, otherwise try to remain packed.
    339         ElementsKind target_kind = update_length == kBumpLengthWithGap
    340                                        ? FAST_HOLEY_ELEMENTS
    341                                        : FAST_ELEMENTS;
    342         TryRewriteElements(receiver, receiver_map, elements, native_context,
    343                            FAST_SMI_ELEMENTS, target_kind, slow);
    344         // The elements backing store didn't change, no reload necessary.
    345         CSA_ASSERT(this, WordEqual(elements, LoadElements(receiver)));
    346         Store(elements, offset, value);
    347         MaybeUpdateLengthAndReturn(receiver, intptr_index, value,
    348                                    update_length);
    349       }
    350     }
    351   }
    352 
    353   Bind(&check_double_elements);
    354   Node* fixed_double_array_map = LoadRoot(Heap::kFixedDoubleArrayMapRootIndex);
    355   GotoIf(WordNotEqual(elements_map, fixed_double_array_map),
    356          &check_cow_elements);
    357   // FixedDoubleArray backing store -> double elements.
    358   {
    359     Node* offset = ElementOffsetFromIndex(intptr_index, FAST_DOUBLE_ELEMENTS,
    360                                           INTPTR_PARAMETERS, kHeaderSize);
    361     // Check if we're about to overwrite the hole. We can safely do that
    362     // only if there can be no setters on the prototype chain.
    363     {
    364       Label hole_check_passed(this);
    365       // If we know that we're storing beyond the previous array length, we
    366       // can skip the hole check (and always assume the hole).
    367       if (update_length == kDontChangeLength) {
    368         Label found_hole(this);
    369         LoadDoubleWithHoleCheck(elements, offset, &found_hole,
    370                                 MachineType::None());
    371         Goto(&hole_check_passed);
    372         Bind(&found_hole);
    373       }
    374       BranchIfPrototypesHaveNonFastElements(receiver_map, slow,
    375                                             &hole_check_passed);
    376       Bind(&hole_check_passed);
    377     }
    378 
    379     // Try to store the value as a double.
    380     {
    381       Label non_number_value(this);
    382       Node* double_value = TryTaggedToFloat64(value, &non_number_value);
    383 
    384       // Make sure we do not store signalling NaNs into double arrays.
    385       double_value = Float64SilenceNaN(double_value);
    386       // If we're about to introduce holes, ensure holey elements.
    387       if (update_length == kBumpLengthWithGap) {
    388         TryChangeToHoleyMap(receiver, receiver_map, elements_kind, context,
    389                             FAST_DOUBLE_ELEMENTS, slow);
    390       }
    391       StoreNoWriteBarrier(MachineRepresentation::kFloat64, elements, offset,
    392                           double_value);
    393       MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
    394 
    395       Bind(&non_number_value);
    396     }
    397 
    398     // Transition to object elements.
    399     {
    400       Node* native_context = LoadNativeContext(context);
    401       ElementsKind target_kind = update_length == kBumpLengthWithGap
    402                                      ? FAST_HOLEY_ELEMENTS
    403                                      : FAST_ELEMENTS;
    404       TryRewriteElements(receiver, receiver_map, elements, native_context,
    405                          FAST_DOUBLE_ELEMENTS, target_kind, slow);
    406       // Reload migrated elements.
    407       Node* fast_elements = LoadElements(receiver);
    408       Node* fast_offset = ElementOffsetFromIndex(
    409           intptr_index, FAST_ELEMENTS, INTPTR_PARAMETERS, kHeaderSize);
    410       Store(fast_elements, fast_offset, value);
    411       MaybeUpdateLengthAndReturn(receiver, intptr_index, value, update_length);
    412     }
    413   }
    414 
    415   Bind(&check_cow_elements);
    416   {
    417     // TODO(jkummerow): Use GrowElementsCapacity instead of bailing out.
    418     Goto(slow);
    419   }
    420 }
    421 
    422 void KeyedStoreGenericAssembler::EmitGenericElementStore(
    423     Node* receiver, Node* receiver_map, Node* instance_type, Node* intptr_index,
    424     Node* value, Node* context, Label* slow) {
    425   Label if_fast(this), if_in_bounds(this), if_increment_length_by_one(this),
    426       if_bump_length_with_gap(this), if_grow(this), if_nonfast(this),
    427       if_typed_array(this), if_dictionary(this);
    428   Node* elements = LoadElements(receiver);
    429   Node* elements_kind = LoadMapElementsKind(receiver_map);
    430   Branch(IsFastElementsKind(elements_kind), &if_fast, &if_nonfast);
    431   Bind(&if_fast);
    432 
    433   Label if_array(this);
    434   GotoIf(Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE)), &if_array);
    435   {
    436     Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
    437     Branch(UintPtrLessThan(intptr_index, capacity), &if_in_bounds, &if_grow);
    438   }
    439   Bind(&if_array);
    440   {
    441     Node* length = SmiUntag(LoadJSArrayLength(receiver));
    442     GotoIf(UintPtrLessThan(intptr_index, length), &if_in_bounds);
    443     Node* capacity = SmiUntag(LoadFixedArrayBaseLength(elements));
    444     GotoIf(UintPtrGreaterThanOrEqual(intptr_index, capacity), &if_grow);
    445     Branch(WordEqual(intptr_index, length), &if_increment_length_by_one,
    446            &if_bump_length_with_gap);
    447   }
    448 
    449   Bind(&if_in_bounds);
    450   {
    451     StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
    452                              intptr_index, value, context, slow,
    453                              kDontChangeLength);
    454   }
    455 
    456   Bind(&if_increment_length_by_one);
    457   {
    458     StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
    459                              intptr_index, value, context, slow,
    460                              kIncrementLengthByOne);
    461   }
    462 
    463   Bind(&if_bump_length_with_gap);
    464   {
    465     StoreElementWithCapacity(receiver, receiver_map, elements, elements_kind,
    466                              intptr_index, value, context, slow,
    467                              kBumpLengthWithGap);
    468   }
    469 
    470   // Out-of-capacity accesses (index >= capacity) jump here. Additionally,
    471   // an ElementsKind transition might be necessary.
    472   // The index can also be negative at this point! Jump to the runtime in that
    473   // case to convert it to a named property.
    474   Bind(&if_grow);
    475   {
    476     Comment("Grow backing store");
    477     // TODO(jkummerow): Support inline backing store growth.
    478     Goto(slow);
    479   }
    480 
    481   // Any ElementsKind > LAST_FAST_ELEMENTS_KIND jumps here for further dispatch.
    482   Bind(&if_nonfast);
    483   {
    484     STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
    485     GotoIf(Int32GreaterThanOrEqual(
    486                elements_kind,
    487                Int32Constant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
    488            &if_typed_array);
    489     GotoIf(Word32Equal(elements_kind, Int32Constant(DICTIONARY_ELEMENTS)),
    490            &if_dictionary);
    491     Goto(slow);
    492   }
    493 
    494   Bind(&if_dictionary);
    495   {
    496     Comment("Dictionary");
    497     // TODO(jkummerow): Support storing to dictionary elements.
    498     Goto(slow);
    499   }
    500 
    501   Bind(&if_typed_array);
    502   {
    503     Comment("Typed array");
    504     // TODO(jkummerow): Support typed arrays.
    505     Goto(slow);
    506   }
    507 }
    508 
    509 void KeyedStoreGenericAssembler::JumpIfDataProperty(Node* details,
    510                                                     Label* writable,
    511                                                     Label* readonly) {
    512   // Accessor properties never have the READ_ONLY attribute set.
    513   GotoIf(IsSetWord32(details, PropertyDetails::kAttributesReadOnlyMask),
    514          readonly);
    515   Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
    516   GotoIf(Word32Equal(kind, Int32Constant(kData)), writable);
    517   // Fall through if it's an accessor property.
    518 }
    519 
    520 void KeyedStoreGenericAssembler::LookupPropertyOnPrototypeChain(
    521     Node* receiver_map, Node* name, Label* accessor,
    522     Variable* var_accessor_pair, Variable* var_accessor_holder, Label* readonly,
    523     Label* bailout) {
    524   Label ok_to_write(this);
    525   Variable var_holder(this, MachineRepresentation::kTagged);
    526   var_holder.Bind(LoadMapPrototype(receiver_map));
    527   Variable var_holder_map(this, MachineRepresentation::kTagged);
    528   var_holder_map.Bind(LoadMap(var_holder.value()));
    529 
    530   Variable* merged_variables[] = {&var_holder, &var_holder_map};
    531   Label loop(this, arraysize(merged_variables), merged_variables);
    532   Goto(&loop);
    533   Bind(&loop);
    534   {
    535     Node* holder = var_holder.value();
    536     Node* holder_map = var_holder_map.value();
    537     Node* instance_type = LoadMapInstanceType(holder_map);
    538     Label next_proto(this);
    539     {
    540       Label found(this), found_fast(this), found_dict(this), found_global(this);
    541       Variable var_meta_storage(this, MachineRepresentation::kTagged);
    542       Variable var_entry(this, MachineType::PointerRepresentation());
    543       TryLookupProperty(holder, holder_map, instance_type, name, &found_fast,
    544                         &found_dict, &found_global, &var_meta_storage,
    545                         &var_entry, &next_proto, bailout);
    546       Bind(&found_fast);
    547       {
    548         Node* descriptors = var_meta_storage.value();
    549         Node* name_index = var_entry.value();
    550         Node* details =
    551             LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
    552         JumpIfDataProperty(details, &ok_to_write, readonly);
    553 
    554         // Accessor case.
    555         // TODO(jkummerow): Implement a trimmed-down LoadAccessorFromFastObject.
    556         Variable var_details(this, MachineRepresentation::kWord32);
    557         LoadPropertyFromFastObject(holder, holder_map, descriptors, name_index,
    558                                    &var_details, var_accessor_pair);
    559         var_accessor_holder->Bind(holder);
    560         Goto(accessor);
    561       }
    562 
    563       Bind(&found_dict);
    564       {
    565         Node* dictionary = var_meta_storage.value();
    566         Node* entry = var_entry.value();
    567         Node* details =
    568             LoadDetailsByKeyIndex<NameDictionary>(dictionary, entry);
    569         JumpIfDataProperty(details, &ok_to_write, readonly);
    570 
    571         // Accessor case.
    572         var_accessor_pair->Bind(
    573             LoadValueByKeyIndex<NameDictionary>(dictionary, entry));
    574         var_accessor_holder->Bind(holder);
    575         Goto(accessor);
    576       }
    577 
    578       Bind(&found_global);
    579       {
    580         Node* dictionary = var_meta_storage.value();
    581         Node* entry = var_entry.value();
    582         Node* property_cell =
    583             LoadValueByKeyIndex<GlobalDictionary>(dictionary, entry);
    584         Node* value =
    585             LoadObjectField(property_cell, PropertyCell::kValueOffset);
    586         GotoIf(WordEqual(value, TheHoleConstant()), &next_proto);
    587         Node* details = LoadAndUntagToWord32ObjectField(
    588             property_cell, PropertyCell::kDetailsOffset);
    589         JumpIfDataProperty(details, &ok_to_write, readonly);
    590 
    591         // Accessor case.
    592         var_accessor_pair->Bind(value);
    593         var_accessor_holder->Bind(holder);
    594         Goto(accessor);
    595       }
    596     }
    597 
    598     Bind(&next_proto);
    599     // Bailout if it can be an integer indexed exotic case.
    600     GotoIf(Word32Equal(instance_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
    601            bailout);
    602     Node* proto = LoadMapPrototype(holder_map);
    603     GotoIf(WordEqual(proto, NullConstant()), &ok_to_write);
    604     var_holder.Bind(proto);
    605     var_holder_map.Bind(LoadMap(proto));
    606     Goto(&loop);
    607   }
    608   Bind(&ok_to_write);
    609 }
    610 
    611 void KeyedStoreGenericAssembler::CheckFieldType(Node* descriptors,
    612                                                 Node* name_index,
    613                                                 Node* representation,
    614                                                 Node* value, Label* bailout) {
    615   Label r_smi(this), r_double(this), r_heapobject(this), all_fine(this);
    616   // Ignore FLAG_track_fields etc. and always emit code for all checks,
    617   // because this builtin is part of the snapshot and therefore should
    618   // be flag independent.
    619   GotoIf(Word32Equal(representation, Int32Constant(Representation::kSmi)),
    620          &r_smi);
    621   GotoIf(Word32Equal(representation, Int32Constant(Representation::kDouble)),
    622          &r_double);
    623   GotoIf(
    624       Word32Equal(representation, Int32Constant(Representation::kHeapObject)),
    625       &r_heapobject);
    626   GotoIf(Word32Equal(representation, Int32Constant(Representation::kNone)),
    627          bailout);
    628   CSA_ASSERT(this, Word32Equal(representation,
    629                                Int32Constant(Representation::kTagged)));
    630   Goto(&all_fine);
    631 
    632   Bind(&r_smi);
    633   { Branch(TaggedIsSmi(value), &all_fine, bailout); }
    634 
    635   Bind(&r_double);
    636   {
    637     GotoIf(TaggedIsSmi(value), &all_fine);
    638     Node* value_map = LoadMap(value);
    639     // While supporting mutable HeapNumbers would be straightforward, such
    640     // objects should not end up here anyway.
    641     CSA_ASSERT(this,
    642                WordNotEqual(value_map,
    643                             LoadRoot(Heap::kMutableHeapNumberMapRootIndex)));
    644     Branch(IsHeapNumberMap(value_map), &all_fine, bailout);
    645   }
    646 
    647   Bind(&r_heapobject);
    648   {
    649     GotoIf(TaggedIsSmi(value), bailout);
    650     Node* field_type =
    651         LoadValueByKeyIndex<DescriptorArray>(descriptors, name_index);
    652     intptr_t kNoneType = reinterpret_cast<intptr_t>(FieldType::None());
    653     intptr_t kAnyType = reinterpret_cast<intptr_t>(FieldType::Any());
    654     // FieldType::None can't hold any value.
    655     GotoIf(WordEqual(field_type, IntPtrConstant(kNoneType)), bailout);
    656     // FieldType::Any can hold any value.
    657     GotoIf(WordEqual(field_type, IntPtrConstant(kAnyType)), &all_fine);
    658     CSA_ASSERT(this, IsWeakCell(field_type));
    659     // Cleared WeakCells count as FieldType::None, which can't hold any value.
    660     field_type = LoadWeakCellValue(field_type, bailout);
    661     // FieldType::Class(...) performs a map check.
    662     CSA_ASSERT(this, IsMap(field_type));
    663     Branch(WordEqual(LoadMap(value), field_type), &all_fine, bailout);
    664   }
    665 
    666   Bind(&all_fine);
    667 }
    668 
    669 void KeyedStoreGenericAssembler::OverwriteExistingFastProperty(
    670     Node* object, Node* object_map, Node* properties, Node* descriptors,
    671     Node* descriptor_name_index, Node* details, Node* value, Label* slow) {
    672   // Properties in descriptors can't be overwritten without map transition.
    673   GotoIf(Word32NotEqual(DecodeWord32<PropertyDetails::LocationField>(details),
    674                         Int32Constant(kField)),
    675          slow);
    676 
    677   if (FLAG_track_constant_fields) {
    678     // TODO(ishell): Taking the slow path is not necessary if new and old
    679     // values are identical.
    680     GotoIf(Word32Equal(DecodeWord32<PropertyDetails::ConstnessField>(details),
    681                        Int32Constant(kConst)),
    682            slow);
    683   }
    684 
    685   Label done(this);
    686   Node* representation =
    687       DecodeWord32<PropertyDetails::RepresentationField>(details);
    688 
    689   CheckFieldType(descriptors, descriptor_name_index, representation, value,
    690                  slow);
    691   Node* field_index =
    692       DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
    693   Node* inobject_properties = LoadMapInobjectProperties(object_map);
    694 
    695   Label inobject(this), backing_store(this);
    696   Branch(UintPtrLessThan(field_index, inobject_properties), &inobject,
    697          &backing_store);
    698 
    699   Bind(&inobject);
    700   {
    701     Node* field_offset =
    702         IntPtrMul(IntPtrSub(LoadMapInstanceSize(object_map),
    703                             IntPtrSub(inobject_properties, field_index)),
    704                   IntPtrConstant(kPointerSize));
    705     Label tagged_rep(this), double_rep(this);
    706     Branch(Word32Equal(representation, Int32Constant(Representation::kDouble)),
    707            &double_rep, &tagged_rep);
    708     Bind(&double_rep);
    709     {
    710       Node* double_value = ChangeNumberToFloat64(value);
    711       if (FLAG_unbox_double_fields) {
    712         StoreObjectFieldNoWriteBarrier(object, field_offset, double_value,
    713                                        MachineRepresentation::kFloat64);
    714       } else {
    715         Node* mutable_heap_number = LoadObjectField(object, field_offset);
    716         StoreHeapNumberValue(mutable_heap_number, double_value);
    717       }
    718       Goto(&done);
    719     }
    720 
    721     Bind(&tagged_rep);
    722     {
    723       StoreObjectField(object, field_offset, value);
    724       Goto(&done);
    725     }
    726   }
    727 
    728   Bind(&backing_store);
    729   {
    730     Node* backing_store_index = IntPtrSub(field_index, inobject_properties);
    731     Label tagged_rep(this), double_rep(this);
    732     Branch(Word32Equal(representation, Int32Constant(Representation::kDouble)),
    733            &double_rep, &tagged_rep);
    734     Bind(&double_rep);
    735     {
    736       Node* double_value = ChangeNumberToFloat64(value);
    737       Node* mutable_heap_number =
    738           LoadFixedArrayElement(properties, backing_store_index);
    739       StoreHeapNumberValue(mutable_heap_number, double_value);
    740       Goto(&done);
    741     }
    742     Bind(&tagged_rep);
    743     {
    744       StoreFixedArrayElement(properties, backing_store_index, value);
    745       Goto(&done);
    746     }
    747   }
    748   Bind(&done);
    749 }
    750 
    751 void KeyedStoreGenericAssembler::EmitGenericPropertyStore(
    752     Node* receiver, Node* receiver_map, const StoreICParameters* p, Label* slow,
    753     LanguageMode language_mode) {
    754   Variable var_accessor_pair(this, MachineRepresentation::kTagged);
    755   Variable var_accessor_holder(this, MachineRepresentation::kTagged);
    756   Label stub_cache(this), fast_properties(this), dictionary_properties(this),
    757       accessor(this), readonly(this);
    758   Node* properties = LoadProperties(receiver);
    759   Node* properties_map = LoadMap(properties);
    760   Branch(WordEqual(properties_map, LoadRoot(Heap::kHashTableMapRootIndex)),
    761          &dictionary_properties, &fast_properties);
    762 
    763   Bind(&fast_properties);
    764   {
    765     Comment("fast property store");
    766     Node* bitfield3 = LoadMapBitField3(receiver_map);
    767     Node* descriptors = LoadMapDescriptors(receiver_map);
    768     Label descriptor_found(this);
    769     Variable var_name_index(this, MachineType::PointerRepresentation());
    770     // TODO(jkummerow): Maybe look for existing map transitions?
    771     Label* notfound = &stub_cache;
    772     DescriptorLookup(p->name, descriptors, bitfield3, &descriptor_found,
    773                      &var_name_index, notfound);
    774 
    775     Bind(&descriptor_found);
    776     {
    777       Node* name_index = var_name_index.value();
    778       Node* details =
    779           LoadDetailsByKeyIndex<DescriptorArray>(descriptors, name_index);
    780       Label data_property(this);
    781       JumpIfDataProperty(details, &data_property, &readonly);
    782 
    783       // Accessor case.
    784       // TODO(jkummerow): Implement a trimmed-down LoadAccessorFromFastObject.
    785       Variable var_details(this, MachineRepresentation::kWord32);
    786       LoadPropertyFromFastObject(receiver, receiver_map, descriptors,
    787                                  name_index, &var_details, &var_accessor_pair);
    788       var_accessor_holder.Bind(receiver);
    789       Goto(&accessor);
    790 
    791       Bind(&data_property);
    792       {
    793         OverwriteExistingFastProperty(receiver, receiver_map, properties,
    794                                       descriptors, name_index, details,
    795                                       p->value, slow);
    796         Return(p->value);
    797       }
    798     }
    799   }
    800 
    801   Bind(&dictionary_properties);
    802   {
    803     Comment("dictionary property store");
    804     // We checked for LAST_CUSTOM_ELEMENTS_RECEIVER before, which rules out
    805     // seeing global objects here (which would need special handling).
    806 
    807     Variable var_name_index(this, MachineType::PointerRepresentation());
    808     Label dictionary_found(this, &var_name_index), not_found(this);
    809     NameDictionaryLookup<NameDictionary>(properties, p->name, &dictionary_found,
    810                                          &var_name_index, &not_found);
    811     Bind(&dictionary_found);
    812     {
    813       Label overwrite(this);
    814       Node* details = LoadDetailsByKeyIndex<NameDictionary>(
    815           properties, var_name_index.value());
    816       JumpIfDataProperty(details, &overwrite, &readonly);
    817 
    818       // Accessor case.
    819       var_accessor_pair.Bind(LoadValueByKeyIndex<NameDictionary>(
    820           properties, var_name_index.value()));
    821       var_accessor_holder.Bind(receiver);
    822       Goto(&accessor);
    823 
    824       Bind(&overwrite);
    825       {
    826         StoreValueByKeyIndex<NameDictionary>(properties, var_name_index.value(),
    827                                              p->value);
    828         Return(p->value);
    829       }
    830     }
    831 
    832     Bind(&not_found);
    833     {
    834       LookupPropertyOnPrototypeChain(receiver_map, p->name, &accessor,
    835                                      &var_accessor_pair, &var_accessor_holder,
    836                                      &readonly, slow);
    837       Add<NameDictionary>(properties, p->name, p->value, slow);
    838       Return(p->value);
    839     }
    840   }
    841 
    842   Bind(&accessor);
    843   {
    844     Label not_callable(this);
    845     Node* accessor_pair = var_accessor_pair.value();
    846     GotoIf(IsAccessorInfoMap(LoadMap(accessor_pair)), slow);
    847     CSA_ASSERT(this, HasInstanceType(accessor_pair, ACCESSOR_PAIR_TYPE));
    848     Node* setter = LoadObjectField(accessor_pair, AccessorPair::kSetterOffset);
    849     Node* setter_map = LoadMap(setter);
    850     // FunctionTemplateInfo setters are not supported yet.
    851     GotoIf(IsFunctionTemplateInfoMap(setter_map), slow);
    852     GotoIfNot(IsCallableMap(setter_map), &not_callable);
    853 
    854     Callable callable = CodeFactory::Call(isolate());
    855     CallJS(callable, p->context, setter, receiver, p->value);
    856     Return(p->value);
    857 
    858     Bind(&not_callable);
    859     {
    860       if (language_mode == STRICT) {
    861         Node* message =
    862             SmiConstant(Smi::FromInt(MessageTemplate::kNoSetterInCallback));
    863         TailCallRuntime(Runtime::kThrowTypeError, p->context, message, p->name,
    864                         var_accessor_holder.value());
    865       } else {
    866         DCHECK_EQ(SLOPPY, language_mode);
    867         Return(p->value);
    868       }
    869     }
    870   }
    871 
    872   Bind(&readonly);
    873   {
    874     if (language_mode == STRICT) {
    875       Node* message =
    876           SmiConstant(Smi::FromInt(MessageTemplate::kStrictReadOnlyProperty));
    877       Node* type = Typeof(p->receiver, p->context);
    878       TailCallRuntime(Runtime::kThrowTypeError, p->context, message, p->name,
    879                       type, p->receiver);
    880     } else {
    881       DCHECK_EQ(SLOPPY, language_mode);
    882       Return(p->value);
    883     }
    884   }
    885 
    886   Bind(&stub_cache);
    887   {
    888     Comment("stub cache probe");
    889     Variable var_handler(this, MachineRepresentation::kTagged);
    890     Label found_handler(this, &var_handler), stub_cache_miss(this);
    891     TryProbeStubCache(isolate()->store_stub_cache(), receiver, p->name,
    892                       &found_handler, &var_handler, &stub_cache_miss);
    893     Bind(&found_handler);
    894     {
    895       Comment("KeyedStoreGeneric found handler");
    896       HandleStoreICHandlerCase(p, var_handler.value(), &stub_cache_miss);
    897     }
    898     Bind(&stub_cache_miss);
    899     {
    900       Comment("KeyedStoreGeneric_miss");
    901       TailCallRuntime(Runtime::kKeyedStoreIC_Miss, p->context, p->value,
    902                       p->slot, p->vector, p->receiver, p->name);
    903     }
    904   }
    905 }
    906 
    907 void KeyedStoreGenericAssembler::KeyedStoreGeneric(LanguageMode language_mode) {
    908   typedef StoreWithVectorDescriptor Descriptor;
    909 
    910   Node* receiver = Parameter(Descriptor::kReceiver);
    911   Node* name = Parameter(Descriptor::kName);
    912   Node* value = Parameter(Descriptor::kValue);
    913   Node* slot = Parameter(Descriptor::kSlot);
    914   Node* vector = Parameter(Descriptor::kVector);
    915   Node* context = Parameter(Descriptor::kContext);
    916 
    917   Variable var_index(this, MachineType::PointerRepresentation());
    918   Variable var_unique(this, MachineRepresentation::kTagged);
    919   var_unique.Bind(name);  // Dummy initialization.
    920   Label if_index(this), if_unique_name(this), slow(this);
    921 
    922   GotoIf(TaggedIsSmi(receiver), &slow);
    923   Node* receiver_map = LoadMap(receiver);
    924   Node* instance_type = LoadMapInstanceType(receiver_map);
    925   // Receivers requiring non-standard element accesses (interceptors, access
    926   // checks, strings and string wrappers, proxies) are handled in the runtime.
    927   GotoIf(Int32LessThanOrEqual(instance_type,
    928                               Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
    929          &slow);
    930 
    931   TryToName(name, &if_index, &var_index, &if_unique_name, &var_unique, &slow);
    932 
    933   Bind(&if_index);
    934   {
    935     Comment("integer index");
    936     EmitGenericElementStore(receiver, receiver_map, instance_type,
    937                             var_index.value(), value, context, &slow);
    938   }
    939 
    940   Bind(&if_unique_name);
    941   {
    942     Comment("key is unique name");
    943     StoreICParameters p(context, receiver, var_unique.value(), value, slot,
    944                         vector);
    945     EmitGenericPropertyStore(receiver, receiver_map, &p, &slow, language_mode);
    946   }
    947 
    948   Bind(&slow);
    949   {
    950     Comment("KeyedStoreGeneric_slow");
    951     TailCallRuntime(Runtime::kSetProperty, context, receiver, name, value,
    952                     SmiConstant(language_mode));
    953   }
    954 }
    955 
    956 }  // namespace internal
    957 }  // namespace v8
    958