Home | History | Annotate | Download | only in compiler
      1 // Copyright 2015 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/compiler/js-native-context-specialization.h"
      6 
      7 #include "src/accessors.h"
      8 #include "src/code-factory.h"
      9 #include "src/compilation-dependencies.h"
     10 #include "src/compiler/access-builder.h"
     11 #include "src/compiler/access-info.h"
     12 #include "src/compiler/js-graph.h"
     13 #include "src/compiler/js-operator.h"
     14 #include "src/compiler/linkage.h"
     15 #include "src/compiler/node-matchers.h"
     16 #include "src/field-index-inl.h"
     17 #include "src/isolate-inl.h"
     18 #include "src/objects-inl.h"  // TODO(mstarzinger): Temporary cycle breaker!
     19 #include "src/type-cache.h"
     20 #include "src/type-feedback-vector.h"
     21 
     22 namespace v8 {
     23 namespace internal {
     24 namespace compiler {
     25 
     26 JSNativeContextSpecialization::JSNativeContextSpecialization(
     27     Editor* editor, JSGraph* jsgraph, Flags flags,
     28     MaybeHandle<Context> native_context, CompilationDependencies* dependencies,
     29     Zone* zone)
     30     : AdvancedReducer(editor),
     31       jsgraph_(jsgraph),
     32       flags_(flags),
     33       native_context_(native_context),
     34       dependencies_(dependencies),
     35       zone_(zone),
     36       type_cache_(TypeCache::Get()) {}
     37 
     38 
     39 Reduction JSNativeContextSpecialization::Reduce(Node* node) {
     40   switch (node->opcode()) {
     41     case IrOpcode::kJSLoadNamed:
     42       return ReduceJSLoadNamed(node);
     43     case IrOpcode::kJSStoreNamed:
     44       return ReduceJSStoreNamed(node);
     45     case IrOpcode::kJSLoadProperty:
     46       return ReduceJSLoadProperty(node);
     47     case IrOpcode::kJSStoreProperty:
     48       return ReduceJSStoreProperty(node);
     49     default:
     50       break;
     51   }
     52   return NoChange();
     53 }
     54 
     55 
     56 Reduction JSNativeContextSpecialization::ReduceNamedAccess(
     57     Node* node, Node* value, MapHandleList const& receiver_maps,
     58     Handle<Name> name, AccessMode access_mode, LanguageMode language_mode,
     59     Node* index) {
     60   DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
     61          node->opcode() == IrOpcode::kJSStoreNamed ||
     62          node->opcode() == IrOpcode::kJSLoadProperty ||
     63          node->opcode() == IrOpcode::kJSStoreProperty);
     64   Node* receiver = NodeProperties::GetValueInput(node, 0);
     65   Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
     66   Node* effect = NodeProperties::GetEffectInput(node);
     67   Node* control = NodeProperties::GetControlInput(node);
     68 
     69   // Not much we can do if deoptimization support is disabled.
     70   if (!(flags() & kDeoptimizationEnabled)) return NoChange();
     71 
     72   // Retrieve the native context from the given {node}.
     73   Handle<Context> native_context;
     74   if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
     75 
     76   // Compute property access infos for the receiver maps.
     77   AccessInfoFactory access_info_factory(dependencies(), native_context,
     78                                         graph()->zone());
     79   ZoneVector<PropertyAccessInfo> access_infos(zone());
     80   if (!access_info_factory.ComputePropertyAccessInfos(
     81           receiver_maps, name, access_mode, &access_infos)) {
     82     return NoChange();
     83   }
     84 
     85   // Nothing to do if we have no non-deprecated maps.
     86   if (access_infos.empty()) return NoChange();
     87 
     88   // The final states for every polymorphic branch. We join them with
     89   // Merge++Phi+EffectPhi at the bottom.
     90   ZoneVector<Node*> values(zone());
     91   ZoneVector<Node*> effects(zone());
     92   ZoneVector<Node*> controls(zone());
     93 
     94   // The list of "exiting" controls, which currently go to a single deoptimize.
     95   // TODO(bmeurer): Consider using an IC as fallback.
     96   Node* const exit_effect = effect;
     97   ZoneVector<Node*> exit_controls(zone());
     98 
     99   // Ensure that {index} matches the specified {name} (if {index} is given).
    100   if (index != nullptr) {
    101     Node* check = graph()->NewNode(simplified()->ReferenceEqual(Type::Name()),
    102                                    index, jsgraph()->HeapConstant(name));
    103     Node* branch =
    104         graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
    105     exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
    106     control = graph()->NewNode(common()->IfTrue(), branch);
    107   }
    108 
    109   // Ensure that {receiver} is a heap object.
    110   Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
    111   Node* branch = graph()->NewNode(common()->Branch(), check, control);
    112   control = graph()->NewNode(common()->IfFalse(), branch);
    113   Node* receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
    114   Node* receiverissmi_effect = effect;
    115 
    116   // Load the {receiver} map. The resulting effect is the dominating effect for
    117   // all (polymorphic) branches.
    118   Node* receiver_map = effect =
    119       graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
    120                        receiver, effect, control);
    121 
    122   // Generate code for the various different property access patterns.
    123   Node* fallthrough_control = control;
    124   for (PropertyAccessInfo const& access_info : access_infos) {
    125     Node* this_value = value;
    126     Node* this_receiver = receiver;
    127     Node* this_effect = effect;
    128     Node* this_control;
    129 
    130     // Perform map check on {receiver}.
    131     Type* receiver_type = access_info.receiver_type();
    132     if (receiver_type->Is(Type::String())) {
    133       // Emit an instance type check for strings.
    134       Node* receiver_instance_type = this_effect = graph()->NewNode(
    135           simplified()->LoadField(AccessBuilder::ForMapInstanceType()),
    136           receiver_map, this_effect, fallthrough_control);
    137       Node* check =
    138           graph()->NewNode(machine()->Uint32LessThan(), receiver_instance_type,
    139                            jsgraph()->Uint32Constant(FIRST_NONSTRING_TYPE));
    140       Node* branch =
    141           graph()->NewNode(common()->Branch(), check, fallthrough_control);
    142       fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
    143       this_control = graph()->NewNode(common()->IfTrue(), branch);
    144     } else {
    145       // Emit a (sequence of) map checks for other {receiver}s.
    146       ZoneVector<Node*> this_controls(zone());
    147       ZoneVector<Node*> this_effects(zone());
    148       for (auto i = access_info.receiver_type()->Classes(); !i.Done();
    149            i.Advance()) {
    150         Handle<Map> map = i.Current();
    151         Node* check =
    152             graph()->NewNode(simplified()->ReferenceEqual(Type::Internal()),
    153                              receiver_map, jsgraph()->Constant(map));
    154         Node* branch =
    155             graph()->NewNode(common()->Branch(), check, fallthrough_control);
    156         fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
    157         this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
    158         this_effects.push_back(this_effect);
    159       }
    160 
    161       // The Number case requires special treatment to also deal with Smis.
    162       if (receiver_type->Is(Type::Number())) {
    163         // Join this check with the "receiver is smi" check above, and mark the
    164         // "receiver is smi" check as "consumed" so that we don't deoptimize if
    165         // the {receiver} is actually a Smi.
    166         if (receiverissmi_control != nullptr) {
    167           this_controls.push_back(receiverissmi_control);
    168           this_effects.push_back(receiverissmi_effect);
    169           receiverissmi_control = receiverissmi_effect = nullptr;
    170         }
    171       }
    172 
    173       // Create dominating Merge+EffectPhi for this {receiver} type.
    174       int const this_control_count = static_cast<int>(this_controls.size());
    175       this_control =
    176           (this_control_count == 1)
    177               ? this_controls.front()
    178               : graph()->NewNode(common()->Merge(this_control_count),
    179                                  this_control_count, &this_controls.front());
    180       this_effects.push_back(this_control);
    181       int const this_effect_count = static_cast<int>(this_effects.size());
    182       this_effect =
    183           (this_control_count == 1)
    184               ? this_effects.front()
    185               : graph()->NewNode(common()->EffectPhi(this_control_count),
    186                                  this_effect_count, &this_effects.front());
    187     }
    188 
    189     // Determine actual holder and perform prototype chain checks.
    190     Handle<JSObject> holder;
    191     if (access_info.holder().ToHandle(&holder)) {
    192       AssumePrototypesStable(receiver_type, native_context, holder);
    193     }
    194 
    195     // Generate the actual property access.
    196     if (access_info.IsNotFound()) {
    197       DCHECK_EQ(AccessMode::kLoad, access_mode);
    198       if (is_strong(language_mode)) {
    199         // TODO(bmeurer/mstarzinger): Add support for lowering inside try
    200         // blocks rewiring the IfException edge to a runtime call/throw.
    201         exit_controls.push_back(this_control);
    202         continue;
    203       } else {
    204         this_value = jsgraph()->UndefinedConstant();
    205       }
    206     } else if (access_info.IsDataConstant()) {
    207       this_value = jsgraph()->Constant(access_info.constant());
    208       if (access_mode == AccessMode::kStore) {
    209         Node* check = graph()->NewNode(
    210             simplified()->ReferenceEqual(Type::Tagged()), value, this_value);
    211         Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
    212                                         check, this_control);
    213         exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
    214         this_control = graph()->NewNode(common()->IfTrue(), branch);
    215       }
    216     } else {
    217       DCHECK(access_info.IsDataField());
    218       FieldIndex const field_index = access_info.field_index();
    219       FieldCheck const field_check = access_info.field_check();
    220       Type* const field_type = access_info.field_type();
    221       switch (field_check) {
    222         case FieldCheck::kNone:
    223           break;
    224         case FieldCheck::kJSArrayBufferViewBufferNotNeutered: {
    225           Node* this_buffer = this_effect =
    226               graph()->NewNode(simplified()->LoadField(
    227                                    AccessBuilder::ForJSArrayBufferViewBuffer()),
    228                                this_receiver, this_effect, this_control);
    229           Node* this_buffer_bit_field = this_effect =
    230               graph()->NewNode(simplified()->LoadField(
    231                                    AccessBuilder::ForJSArrayBufferBitField()),
    232                                this_buffer, this_effect, this_control);
    233           Node* check = graph()->NewNode(
    234               machine()->Word32Equal(),
    235               graph()->NewNode(machine()->Word32And(), this_buffer_bit_field,
    236                                jsgraph()->Int32Constant(
    237                                    1 << JSArrayBuffer::WasNeutered::kShift)),
    238               jsgraph()->Int32Constant(0));
    239           Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
    240                                           check, this_control);
    241           exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
    242           this_control = graph()->NewNode(common()->IfFalse(), branch);
    243           break;
    244         }
    245       }
    246       if (access_mode == AccessMode::kLoad &&
    247           access_info.holder().ToHandle(&holder)) {
    248         this_receiver = jsgraph()->Constant(holder);
    249       }
    250       Node* this_storage = this_receiver;
    251       if (!field_index.is_inobject()) {
    252         this_storage = this_effect = graph()->NewNode(
    253             simplified()->LoadField(AccessBuilder::ForJSObjectProperties()),
    254             this_storage, this_effect, this_control);
    255       }
    256       FieldAccess field_access = {kTaggedBase, field_index.offset(), name,
    257                                   field_type, MachineType::AnyTagged()};
    258       if (access_mode == AccessMode::kLoad) {
    259         if (field_type->Is(Type::UntaggedFloat64())) {
    260           if (!field_index.is_inobject() || field_index.is_hidden_field() ||
    261               !FLAG_unbox_double_fields) {
    262             this_storage = this_effect =
    263                 graph()->NewNode(simplified()->LoadField(field_access),
    264                                  this_storage, this_effect, this_control);
    265             field_access.offset = HeapNumber::kValueOffset;
    266             field_access.name = MaybeHandle<Name>();
    267           }
    268           field_access.machine_type = MachineType::Float64();
    269         }
    270         this_value = this_effect =
    271             graph()->NewNode(simplified()->LoadField(field_access),
    272                              this_storage, this_effect, this_control);
    273       } else {
    274         DCHECK_EQ(AccessMode::kStore, access_mode);
    275         if (field_type->Is(Type::UntaggedFloat64())) {
    276           Node* check =
    277               graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
    278           Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
    279                                           check, this_control);
    280           exit_controls.push_back(
    281               graph()->NewNode(common()->IfFalse(), branch));
    282           this_control = graph()->NewNode(common()->IfTrue(), branch);
    283           this_value = graph()->NewNode(common()->Guard(Type::Number()),
    284                                         this_value, this_control);
    285 
    286           if (!field_index.is_inobject() || field_index.is_hidden_field() ||
    287               !FLAG_unbox_double_fields) {
    288             if (access_info.HasTransitionMap()) {
    289               // Allocate a MutableHeapNumber for the new property.
    290               Callable callable =
    291                   CodeFactory::AllocateMutableHeapNumber(isolate());
    292               CallDescriptor* desc = Linkage::GetStubCallDescriptor(
    293                   isolate(), jsgraph()->zone(), callable.descriptor(), 0,
    294                   CallDescriptor::kNoFlags, Operator::kNoThrow);
    295               Node* this_box = this_effect = graph()->NewNode(
    296                   common()->Call(desc),
    297                   jsgraph()->HeapConstant(callable.code()),
    298                   jsgraph()->NoContextConstant(), this_effect, this_control);
    299               this_effect = graph()->NewNode(
    300                   simplified()->StoreField(AccessBuilder::ForHeapNumberValue()),
    301                   this_box, this_value, this_effect, this_control);
    302               this_value = this_box;
    303 
    304               field_access.type = Type::TaggedPointer();
    305             } else {
    306               // We just store directly to the MutableHeapNumber.
    307               this_storage = this_effect =
    308                   graph()->NewNode(simplified()->LoadField(field_access),
    309                                    this_storage, this_effect, this_control);
    310               field_access.offset = HeapNumber::kValueOffset;
    311               field_access.name = MaybeHandle<Name>();
    312               field_access.machine_type = MachineType::Float64();
    313             }
    314           } else {
    315             // Unboxed double field, we store directly to the field.
    316             field_access.machine_type = MachineType::Float64();
    317           }
    318         } else if (field_type->Is(Type::TaggedSigned())) {
    319           Node* check =
    320               graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
    321           Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
    322                                           check, this_control);
    323           exit_controls.push_back(
    324               graph()->NewNode(common()->IfFalse(), branch));
    325           this_control = graph()->NewNode(common()->IfTrue(), branch);
    326           this_value = graph()->NewNode(common()->Guard(type_cache_.kSmi),
    327                                         this_value, this_control);
    328         } else if (field_type->Is(Type::TaggedPointer())) {
    329           Node* check =
    330               graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
    331           Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
    332                                           check, this_control);
    333           exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
    334           this_control = graph()->NewNode(common()->IfFalse(), branch);
    335           if (field_type->NumClasses() > 0) {
    336             // Emit a (sequence of) map checks for the value.
    337             ZoneVector<Node*> this_controls(zone());
    338             Node* this_value_map = this_effect = graph()->NewNode(
    339                 simplified()->LoadField(AccessBuilder::ForMap()), this_value,
    340                 this_effect, this_control);
    341             for (auto i = field_type->Classes(); !i.Done(); i.Advance()) {
    342               Handle<Map> field_map(i.Current());
    343               check = graph()->NewNode(
    344                   simplified()->ReferenceEqual(Type::Internal()),
    345                   this_value_map, jsgraph()->Constant(field_map));
    346               branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
    347                                         check, this_control);
    348               this_control = graph()->NewNode(common()->IfFalse(), branch);
    349               this_controls.push_back(
    350                   graph()->NewNode(common()->IfTrue(), branch));
    351             }
    352             exit_controls.push_back(this_control);
    353             int const this_control_count =
    354                 static_cast<int>(this_controls.size());
    355             this_control =
    356                 (this_control_count == 1)
    357                     ? this_controls.front()
    358                     : graph()->NewNode(common()->Merge(this_control_count),
    359                                        this_control_count,
    360                                        &this_controls.front());
    361           }
    362         } else {
    363           DCHECK(field_type->Is(Type::Tagged()));
    364         }
    365         Handle<Map> transition_map;
    366         if (access_info.transition_map().ToHandle(&transition_map)) {
    367           this_effect = graph()->NewNode(common()->BeginRegion(), this_effect);
    368           this_effect = graph()->NewNode(
    369               simplified()->StoreField(AccessBuilder::ForMap()), this_receiver,
    370               jsgraph()->Constant(transition_map), this_effect, this_control);
    371         }
    372         this_effect = graph()->NewNode(simplified()->StoreField(field_access),
    373                                        this_storage, this_value, this_effect,
    374                                        this_control);
    375         if (access_info.HasTransitionMap()) {
    376           this_effect =
    377               graph()->NewNode(common()->FinishRegion(),
    378                                jsgraph()->UndefinedConstant(), this_effect);
    379         }
    380       }
    381     }
    382 
    383     // Remember the final state for this property access.
    384     values.push_back(this_value);
    385     effects.push_back(this_effect);
    386     controls.push_back(this_control);
    387   }
    388 
    389   // Collect the fallthrough control as final "exit" control.
    390   if (fallthrough_control != control) {
    391     // Mark the last fallthrough branch as deferred.
    392     MarkAsDeferred(fallthrough_control);
    393   }
    394   exit_controls.push_back(fallthrough_control);
    395 
    396   // Also collect the "receiver is smi" control if we didn't handle the case of
    397   // Number primitives in the polymorphic branches above.
    398   if (receiverissmi_control != nullptr) {
    399     // Mark the "receiver is smi" case as deferred.
    400     MarkAsDeferred(receiverissmi_control);
    401     DCHECK_EQ(exit_effect, receiverissmi_effect);
    402     exit_controls.push_back(receiverissmi_control);
    403   }
    404 
    405   // Generate the single "exit" point, where we get if either all map/instance
    406   // type checks failed, or one of the assumptions inside one of the cases
    407   // failes (i.e. failing prototype chain check).
    408   // TODO(bmeurer): Consider falling back to IC here if deoptimization is
    409   // disabled.
    410   int const exit_control_count = static_cast<int>(exit_controls.size());
    411   Node* exit_control =
    412       (exit_control_count == 1)
    413           ? exit_controls.front()
    414           : graph()->NewNode(common()->Merge(exit_control_count),
    415                              exit_control_count, &exit_controls.front());
    416   Node* deoptimize =
    417       graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
    418                        frame_state, exit_effect, exit_control);
    419   // TODO(bmeurer): This should be on the AdvancedReducer somehow.
    420   NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
    421 
    422   // Generate the final merge point for all (polymorphic) branches.
    423   int const control_count = static_cast<int>(controls.size());
    424   if (control_count == 0) {
    425     value = effect = control = jsgraph()->Dead();
    426   } else if (control_count == 1) {
    427     value = values.front();
    428     effect = effects.front();
    429     control = controls.front();
    430   } else {
    431     control = graph()->NewNode(common()->Merge(control_count), control_count,
    432                                &controls.front());
    433     values.push_back(control);
    434     value = graph()->NewNode(
    435         common()->Phi(MachineRepresentation::kTagged, control_count),
    436         control_count + 1, &values.front());
    437     effects.push_back(control);
    438     effect = graph()->NewNode(common()->EffectPhi(control_count),
    439                               control_count + 1, &effects.front());
    440   }
    441   ReplaceWithValue(node, value, effect, control);
    442   return Replace(value);
    443 }
    444 
    445 
    446 Reduction JSNativeContextSpecialization::ReduceJSLoadNamed(Node* node) {
    447   DCHECK_EQ(IrOpcode::kJSLoadNamed, node->opcode());
    448   NamedAccess const& p = NamedAccessOf(node->op());
    449   Node* const value = jsgraph()->Dead();
    450 
    451   // Extract receiver maps from the LOAD_IC using the LoadICNexus.
    452   MapHandleList receiver_maps;
    453   if (!p.feedback().IsValid()) return NoChange();
    454   LoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
    455   if (nexus.ExtractMaps(&receiver_maps) == 0) return NoChange();
    456   DCHECK_LT(0, receiver_maps.length());
    457 
    458   // Try to lower the named access based on the {receiver_maps}.
    459   return ReduceNamedAccess(node, value, receiver_maps, p.name(),
    460                            AccessMode::kLoad, p.language_mode());
    461 }
    462 
    463 
    464 Reduction JSNativeContextSpecialization::ReduceJSStoreNamed(Node* node) {
    465   DCHECK_EQ(IrOpcode::kJSStoreNamed, node->opcode());
    466   NamedAccess const& p = NamedAccessOf(node->op());
    467   Node* const value = NodeProperties::GetValueInput(node, 1);
    468 
    469   // Extract receiver maps from the STORE_IC using the StoreICNexus.
    470   MapHandleList receiver_maps;
    471   if (!p.feedback().IsValid()) return NoChange();
    472   StoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
    473   if (nexus.ExtractMaps(&receiver_maps) == 0) return NoChange();
    474   DCHECK_LT(0, receiver_maps.length());
    475 
    476   // Try to lower the named access based on the {receiver_maps}.
    477   return ReduceNamedAccess(node, value, receiver_maps, p.name(),
    478                            AccessMode::kStore, p.language_mode());
    479 }
    480 
    481 
    482 Reduction JSNativeContextSpecialization::ReduceElementAccess(
    483     Node* node, Node* index, Node* value, MapHandleList const& receiver_maps,
    484     AccessMode access_mode, LanguageMode language_mode,
    485     KeyedAccessStoreMode store_mode) {
    486   DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
    487          node->opcode() == IrOpcode::kJSStoreProperty);
    488   Node* receiver = NodeProperties::GetValueInput(node, 0);
    489   Node* context = NodeProperties::GetContextInput(node);
    490   Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
    491   Node* effect = NodeProperties::GetEffectInput(node);
    492   Node* control = NodeProperties::GetControlInput(node);
    493 
    494   // Not much we can do if deoptimization support is disabled.
    495   if (!(flags() & kDeoptimizationEnabled)) return NoChange();
    496 
    497   // TODO(bmeurer): Add support for non-standard stores.
    498   if (store_mode != STANDARD_STORE) return NoChange();
    499 
    500   // Retrieve the native context from the given {node}.
    501   Handle<Context> native_context;
    502   if (!GetNativeContext(node).ToHandle(&native_context)) return NoChange();
    503 
    504   // Compute element access infos for the receiver maps.
    505   AccessInfoFactory access_info_factory(dependencies(), native_context,
    506                                         graph()->zone());
    507   ZoneVector<ElementAccessInfo> access_infos(zone());
    508   if (!access_info_factory.ComputeElementAccessInfos(receiver_maps, access_mode,
    509                                                      &access_infos)) {
    510     return NoChange();
    511   }
    512 
    513   // Nothing to do if we have no non-deprecated maps.
    514   if (access_infos.empty()) return NoChange();
    515 
    516   // The final states for every polymorphic branch. We join them with
    517   // Merge+Phi+EffectPhi at the bottom.
    518   ZoneVector<Node*> values(zone());
    519   ZoneVector<Node*> effects(zone());
    520   ZoneVector<Node*> controls(zone());
    521 
    522   // The list of "exiting" controls, which currently go to a single deoptimize.
    523   // TODO(bmeurer): Consider using an IC as fallback.
    524   Node* const exit_effect = effect;
    525   ZoneVector<Node*> exit_controls(zone());
    526 
    527   // Ensure that {receiver} is a heap object.
    528   Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
    529   Node* branch =
    530       graph()->NewNode(common()->Branch(BranchHint::kFalse), check, control);
    531   exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
    532   control = graph()->NewNode(common()->IfFalse(), branch);
    533 
    534   // Load the {receiver} map. The resulting effect is the dominating effect for
    535   // all (polymorphic) branches.
    536   Node* receiver_map = effect =
    537       graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
    538                        receiver, effect, control);
    539 
    540   // Generate code for the various different element access patterns.
    541   Node* fallthrough_control = control;
    542   for (ElementAccessInfo const& access_info : access_infos) {
    543     Node* this_receiver = receiver;
    544     Node* this_value = value;
    545     Node* this_index = index;
    546     Node* this_effect;
    547     Node* this_control;
    548 
    549     // Perform map check on {receiver}.
    550     Type* receiver_type = access_info.receiver_type();
    551     bool receiver_is_jsarray = true;
    552     {
    553       ZoneVector<Node*> this_controls(zone());
    554       ZoneVector<Node*> this_effects(zone());
    555       for (auto i = access_info.receiver_type()->Classes(); !i.Done();
    556            i.Advance()) {
    557         Handle<Map> map = i.Current();
    558         Node* check =
    559             graph()->NewNode(simplified()->ReferenceEqual(Type::Any()),
    560                              receiver_map, jsgraph()->Constant(map));
    561         Node* branch =
    562             graph()->NewNode(common()->Branch(), check, fallthrough_control);
    563         this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
    564         this_effects.push_back(effect);
    565         fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
    566         if (!map->IsJSArrayMap()) receiver_is_jsarray = false;
    567       }
    568 
    569       // Generate possible elements kind transitions.
    570       for (auto transition : access_info.transitions()) {
    571         Handle<Map> transition_source = transition.first;
    572         Handle<Map> transition_target = transition.second;
    573 
    574         // Check if {receiver} has the specified {transition_source} map.
    575         Node* check = graph()->NewNode(
    576             simplified()->ReferenceEqual(Type::Any()), receiver_map,
    577             jsgraph()->HeapConstant(transition_source));
    578         Node* branch =
    579             graph()->NewNode(common()->Branch(), check, fallthrough_control);
    580 
    581         // Migrate {receiver} from {transition_source} to {transition_target}.
    582         Node* transition_control = graph()->NewNode(common()->IfTrue(), branch);
    583         Node* transition_effect = effect;
    584         if (IsSimpleMapChangeTransition(transition_source->elements_kind(),
    585                                         transition_target->elements_kind())) {
    586           // In-place migration, just store the {transition_target} map.
    587           transition_effect = graph()->NewNode(
    588               simplified()->StoreField(AccessBuilder::ForMap()), receiver,
    589               jsgraph()->HeapConstant(transition_target), transition_effect,
    590               transition_control);
    591         } else {
    592           // Instance migration, let the stub deal with the {receiver}.
    593           TransitionElementsKindStub stub(isolate(),
    594                                           transition_source->elements_kind(),
    595                                           transition_target->elements_kind(),
    596                                           transition_source->IsJSArrayMap());
    597           CallDescriptor const* const desc = Linkage::GetStubCallDescriptor(
    598               isolate(), graph()->zone(), stub.GetCallInterfaceDescriptor(), 0,
    599               CallDescriptor::kNeedsFrameState, node->op()->properties());
    600           transition_effect = graph()->NewNode(
    601               common()->Call(desc), jsgraph()->HeapConstant(stub.GetCode()),
    602               receiver, jsgraph()->HeapConstant(transition_target), context,
    603               frame_state, transition_effect, transition_control);
    604         }
    605         this_controls.push_back(transition_control);
    606         this_effects.push_back(transition_effect);
    607 
    608         fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
    609       }
    610 
    611       // Create single chokepoint for the control.
    612       int const this_control_count = static_cast<int>(this_controls.size());
    613       if (this_control_count == 1) {
    614         this_control = this_controls.front();
    615         this_effect = this_effects.front();
    616       } else {
    617         this_control =
    618             graph()->NewNode(common()->Merge(this_control_count),
    619                              this_control_count, &this_controls.front());
    620         this_effects.push_back(this_control);
    621         this_effect =
    622             graph()->NewNode(common()->EffectPhi(this_control_count),
    623                              this_control_count + 1, &this_effects.front());
    624       }
    625     }
    626 
    627     // Certain stores need a prototype chain check because shape changes
    628     // could allow callbacks on elements in the prototype chain that are
    629     // not compatible with (monomorphic) keyed stores.
    630     Handle<JSObject> holder;
    631     if (access_info.holder().ToHandle(&holder)) {
    632       AssumePrototypesStable(receiver_type, native_context, holder);
    633     }
    634 
    635     // Check that the {index} is actually a Number.
    636     if (!NumberMatcher(this_index).HasValue()) {
    637       Node* check =
    638           graph()->NewNode(simplified()->ObjectIsNumber(), this_index);
    639       Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
    640                                       check, this_control);
    641       exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
    642       this_control = graph()->NewNode(common()->IfTrue(), branch);
    643       this_index = graph()->NewNode(common()->Guard(Type::Number()), this_index,
    644                                     this_control);
    645     }
    646 
    647     // Convert the {index} to an unsigned32 value and check if the result is
    648     // equal to the original {index}.
    649     if (!NumberMatcher(this_index).IsInRange(0.0, kMaxUInt32)) {
    650       Node* this_index32 =
    651           graph()->NewNode(simplified()->NumberToUint32(), this_index);
    652       Node* check = graph()->NewNode(simplified()->NumberEqual(), this_index32,
    653                                      this_index);
    654       Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
    655                                       check, this_control);
    656       exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
    657       this_control = graph()->NewNode(common()->IfTrue(), branch);
    658       this_index = this_index32;
    659     }
    660 
    661     // TODO(bmeurer): We currently specialize based on elements kind. We should
    662     // also be able to properly support strings and other JSObjects here.
    663     ElementsKind elements_kind = access_info.elements_kind();
    664 
    665     // Load the elements for the {receiver}.
    666     Node* this_elements = this_effect = graph()->NewNode(
    667         simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
    668         this_receiver, this_effect, this_control);
    669 
    670     // Don't try to store to a copy-on-write backing store.
    671     if (access_mode == AccessMode::kStore &&
    672         IsFastSmiOrObjectElementsKind(elements_kind)) {
    673       Node* this_elements_map = this_effect =
    674           graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
    675                            this_elements, this_effect, this_control);
    676       check = graph()->NewNode(
    677           simplified()->ReferenceEqual(Type::Any()), this_elements_map,
    678           jsgraph()->HeapConstant(factory()->fixed_array_map()));
    679       branch = graph()->NewNode(common()->Branch(BranchHint::kTrue), check,
    680                                 this_control);
    681       exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
    682       this_control = graph()->NewNode(common()->IfTrue(), branch);
    683     }
    684 
    685     // Load the length of the {receiver}.
    686     Node* this_length = this_effect =
    687         receiver_is_jsarray
    688             ? graph()->NewNode(
    689                   simplified()->LoadField(
    690                       AccessBuilder::ForJSArrayLength(elements_kind)),
    691                   this_receiver, this_effect, this_control)
    692             : graph()->NewNode(
    693                   simplified()->LoadField(AccessBuilder::ForFixedArrayLength()),
    694                   this_elements, this_effect, this_control);
    695 
    696     // Check that the {index} is in the valid range for the {receiver}.
    697     Node* check = graph()->NewNode(simplified()->NumberLessThan(), this_index,
    698                                    this_length);
    699     Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue), check,
    700                                     this_control);
    701     exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
    702     this_control = graph()->NewNode(common()->IfTrue(), branch);
    703 
    704     // Compute the element access.
    705     Type* element_type = Type::Any();
    706     MachineType element_machine_type = MachineType::AnyTagged();
    707     if (IsFastDoubleElementsKind(elements_kind)) {
    708       element_type = type_cache_.kFloat64;
    709       element_machine_type = MachineType::Float64();
    710     } else if (IsFastSmiElementsKind(elements_kind)) {
    711       element_type = type_cache_.kSmi;
    712     }
    713     ElementAccess element_access = {kTaggedBase, FixedArray::kHeaderSize,
    714                                     element_type, element_machine_type};
    715 
    716     // Access the actual element.
    717     // TODO(bmeurer): Refactor this into separate methods or even a separate
    718     // class that deals with the elements access.
    719     if (access_mode == AccessMode::kLoad) {
    720       // Compute the real element access type, which includes the hole in case
    721       // of holey backing stores.
    722       if (elements_kind == FAST_HOLEY_ELEMENTS ||
    723           elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
    724         element_access.type = Type::Union(
    725             element_type,
    726             Type::Constant(factory()->the_hole_value(), graph()->zone()),
    727             graph()->zone());
    728       }
    729       // Perform the actual backing store access.
    730       this_value = this_effect = graph()->NewNode(
    731           simplified()->LoadElement(element_access), this_elements, this_index,
    732           this_effect, this_control);
    733       // Handle loading from holey backing stores correctly, by either mapping
    734       // the hole to undefined if possible, or deoptimizing otherwise.
    735       if (elements_kind == FAST_HOLEY_ELEMENTS ||
    736           elements_kind == FAST_HOLEY_SMI_ELEMENTS) {
    737         // Perform the hole check on the result.
    738         Node* check =
    739             graph()->NewNode(simplified()->ReferenceEqual(element_access.type),
    740                              this_value, jsgraph()->TheHoleConstant());
    741         Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
    742                                         check, this_control);
    743         Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
    744         Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
    745         // Check if we are allowed to turn the hole into undefined.
    746         Type* initial_holey_array_type = Type::Class(
    747             handle(isolate()->get_initial_js_array_map(elements_kind)),
    748             graph()->zone());
    749         if (receiver_type->NowIs(initial_holey_array_type) &&
    750             isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
    751           // Add a code dependency on the array protector cell.
    752           AssumePrototypesStable(receiver_type, native_context,
    753                                  isolate()->initial_object_prototype());
    754           dependencies()->AssumePropertyCell(factory()->array_protector());
    755           // Turn the hole into undefined.
    756           this_control =
    757               graph()->NewNode(common()->Merge(2), if_true, if_false);
    758           this_value = graph()->NewNode(
    759               common()->Phi(MachineRepresentation::kTagged, 2),
    760               jsgraph()->UndefinedConstant(), this_value, this_control);
    761           element_type =
    762               Type::Union(element_type, Type::Undefined(), graph()->zone());
    763         } else {
    764           // Deoptimize in case of the hole.
    765           exit_controls.push_back(if_true);
    766           this_control = if_false;
    767         }
    768         // Rename the result to represent the actual type (not polluted by the
    769         // hole).
    770         this_value = graph()->NewNode(common()->Guard(element_type), this_value,
    771                                       this_control);
    772       } else if (elements_kind == FAST_HOLEY_DOUBLE_ELEMENTS) {
    773         // Perform the hole check on the result.
    774         Node* check =
    775             graph()->NewNode(simplified()->NumberIsHoleNaN(), this_value);
    776         // Check if we are allowed to return the hole directly.
    777         Type* initial_holey_array_type = Type::Class(
    778             handle(isolate()->get_initial_js_array_map(elements_kind)),
    779             graph()->zone());
    780         if (receiver_type->NowIs(initial_holey_array_type) &&
    781             isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
    782           // Add a code dependency on the array protector cell.
    783           AssumePrototypesStable(receiver_type, native_context,
    784                                  isolate()->initial_object_prototype());
    785           dependencies()->AssumePropertyCell(factory()->array_protector());
    786           // Turn the hole into undefined.
    787           this_value = graph()->NewNode(
    788               common()->Select(MachineRepresentation::kTagged,
    789                                BranchHint::kFalse),
    790               check, jsgraph()->UndefinedConstant(), this_value);
    791         } else {
    792           // Deoptimize in case of the hole.
    793           Node* branch = graph()->NewNode(common()->Branch(BranchHint::kFalse),
    794                                           check, this_control);
    795           this_control = graph()->NewNode(common()->IfFalse(), branch);
    796           exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
    797         }
    798       }
    799     } else {
    800       DCHECK_EQ(AccessMode::kStore, access_mode);
    801       if (IsFastSmiElementsKind(elements_kind)) {
    802         Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
    803         Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
    804                                         check, this_control);
    805         exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
    806         this_control = graph()->NewNode(common()->IfTrue(), branch);
    807         this_value = graph()->NewNode(common()->Guard(type_cache_.kSmi),
    808                                       this_value, this_control);
    809       } else if (IsFastDoubleElementsKind(elements_kind)) {
    810         Node* check =
    811             graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
    812         Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
    813                                         check, this_control);
    814         exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
    815         this_control = graph()->NewNode(common()->IfTrue(), branch);
    816         this_value = graph()->NewNode(common()->Guard(Type::Number()),
    817                                       this_value, this_control);
    818       }
    819       this_effect = graph()->NewNode(simplified()->StoreElement(element_access),
    820                                      this_elements, this_index, this_value,
    821                                      this_effect, this_control);
    822     }
    823 
    824     // Remember the final state for this element access.
    825     values.push_back(this_value);
    826     effects.push_back(this_effect);
    827     controls.push_back(this_control);
    828   }
    829 
    830   // Collect the fallthrough control as final "exit" control.
    831   if (fallthrough_control != control) {
    832     // Mark the last fallthrough branch as deferred.
    833     MarkAsDeferred(fallthrough_control);
    834   }
    835   exit_controls.push_back(fallthrough_control);
    836 
    837   // Generate the single "exit" point, where we get if either all map/instance
    838   // type checks failed, or one of the assumptions inside one of the cases
    839   // failes (i.e. failing prototype chain check).
    840   // TODO(bmeurer): Consider falling back to IC here if deoptimization is
    841   // disabled.
    842   int const exit_control_count = static_cast<int>(exit_controls.size());
    843   Node* exit_control =
    844       (exit_control_count == 1)
    845           ? exit_controls.front()
    846           : graph()->NewNode(common()->Merge(exit_control_count),
    847                              exit_control_count, &exit_controls.front());
    848   Node* deoptimize =
    849       graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
    850                        frame_state, exit_effect, exit_control);
    851   // TODO(bmeurer): This should be on the AdvancedReducer somehow.
    852   NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
    853 
    854   // Generate the final merge point for all (polymorphic) branches.
    855   int const control_count = static_cast<int>(controls.size());
    856   if (control_count == 0) {
    857     value = effect = control = jsgraph()->Dead();
    858   } else if (control_count == 1) {
    859     value = values.front();
    860     effect = effects.front();
    861     control = controls.front();
    862   } else {
    863     control = graph()->NewNode(common()->Merge(control_count), control_count,
    864                                &controls.front());
    865     values.push_back(control);
    866     value = graph()->NewNode(
    867         common()->Phi(MachineRepresentation::kTagged, control_count),
    868         control_count + 1, &values.front());
    869     effects.push_back(control);
    870     effect = graph()->NewNode(common()->EffectPhi(control_count),
    871                               control_count + 1, &effects.front());
    872   }
    873   ReplaceWithValue(node, value, effect, control);
    874   return Replace(value);
    875 }
    876 
    877 
    878 Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
    879     Node* node, Node* index, Node* value, FeedbackNexus const& nexus,
    880     AccessMode access_mode, LanguageMode language_mode,
    881     KeyedAccessStoreMode store_mode) {
    882   DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
    883          node->opcode() == IrOpcode::kJSStoreProperty);
    884 
    885   // Extract receiver maps from the {nexus}.
    886   MapHandleList receiver_maps;
    887   if (nexus.ExtractMaps(&receiver_maps) == 0) return NoChange();
    888   DCHECK_LT(0, receiver_maps.length());
    889 
    890   // Optimize access for constant {index}.
    891   HeapObjectMatcher mindex(index);
    892   if (mindex.HasValue() && mindex.Value()->IsPrimitive()) {
    893     // Keyed access requires a ToPropertyKey on the {index} first before
    894     // looking up the property on the object (see ES6 section 12.3.2.1).
    895     // We can only do this for non-observable ToPropertyKey invocations,
    896     // so we limit the constant indices to primitives at this point.
    897     Handle<Name> name;
    898     if (Object::ToName(isolate(), mindex.Value()).ToHandle(&name)) {
    899       uint32_t array_index;
    900       if (name->AsArrayIndex(&array_index)) {
    901         // Use the constant array index.
    902         index = jsgraph()->Constant(static_cast<double>(array_index));
    903       } else {
    904         name = factory()->InternalizeName(name);
    905         return ReduceNamedAccess(node, value, receiver_maps, name, access_mode,
    906                                  language_mode);
    907       }
    908     }
    909   }
    910 
    911   // Check if we have feedback for a named access.
    912   if (Name* name = nexus.FindFirstName()) {
    913     return ReduceNamedAccess(node, value, receiver_maps,
    914                              handle(name, isolate()), access_mode,
    915                              language_mode, index);
    916   }
    917 
    918   // Try to lower the element access based on the {receiver_maps}.
    919   return ReduceElementAccess(node, index, value, receiver_maps, access_mode,
    920                              language_mode, store_mode);
    921 }
    922 
    923 
    924 Reduction JSNativeContextSpecialization::ReduceJSLoadProperty(Node* node) {
    925   DCHECK_EQ(IrOpcode::kJSLoadProperty, node->opcode());
    926   PropertyAccess const& p = PropertyAccessOf(node->op());
    927   Node* const index = NodeProperties::GetValueInput(node, 1);
    928   Node* const value = jsgraph()->Dead();
    929 
    930   // Extract receiver maps from the KEYED_LOAD_IC using the KeyedLoadICNexus.
    931   if (!p.feedback().IsValid()) return NoChange();
    932   KeyedLoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
    933 
    934   // Try to lower the keyed access based on the {nexus}.
    935   return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kLoad,
    936                            p.language_mode(), STANDARD_STORE);
    937 }
    938 
    939 
    940 Reduction JSNativeContextSpecialization::ReduceJSStoreProperty(Node* node) {
    941   DCHECK_EQ(IrOpcode::kJSStoreProperty, node->opcode());
    942   PropertyAccess const& p = PropertyAccessOf(node->op());
    943   Node* const index = NodeProperties::GetValueInput(node, 1);
    944   Node* const value = NodeProperties::GetValueInput(node, 2);
    945 
    946   // Extract receiver maps from the KEYED_STORE_IC using the KeyedStoreICNexus.
    947   if (!p.feedback().IsValid()) return NoChange();
    948   KeyedStoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
    949 
    950   // Extract the keyed access store mode from the KEYED_STORE_IC.
    951   KeyedAccessStoreMode store_mode = nexus.GetKeyedAccessStoreMode();
    952 
    953   // Try to lower the keyed access based on the {nexus}.
    954   return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kStore,
    955                            p.language_mode(), store_mode);
    956 }
    957 
    958 
    959 void JSNativeContextSpecialization::AssumePrototypesStable(
    960     Type* receiver_type, Handle<Context> native_context,
    961     Handle<JSObject> holder) {
    962   // Determine actual holder and perform prototype chain checks.
    963   for (auto i = receiver_type->Classes(); !i.Done(); i.Advance()) {
    964     Handle<Map> map = i.Current();
    965     // Perform the implicit ToObject for primitives here.
    966     // Implemented according to ES6 section 7.3.2 GetV (V, P).
    967     Handle<JSFunction> constructor;
    968     if (Map::GetConstructorFunction(map, native_context)
    969             .ToHandle(&constructor)) {
    970       map = handle(constructor->initial_map(), isolate());
    971     }
    972     dependencies()->AssumePrototypeMapsStable(map, holder);
    973   }
    974 }
    975 
    976 
    977 void JSNativeContextSpecialization::MarkAsDeferred(Node* if_projection) {
    978   Node* branch = NodeProperties::GetControlInput(if_projection);
    979   DCHECK_EQ(IrOpcode::kBranch, branch->opcode());
    980   if (if_projection->opcode() == IrOpcode::kIfTrue) {
    981     NodeProperties::ChangeOp(branch, common()->Branch(BranchHint::kFalse));
    982   } else {
    983     DCHECK_EQ(IrOpcode::kIfFalse, if_projection->opcode());
    984     NodeProperties::ChangeOp(branch, common()->Branch(BranchHint::kTrue));
    985   }
    986 }
    987 
    988 
    989 MaybeHandle<Context> JSNativeContextSpecialization::GetNativeContext(
    990     Node* node) {
    991   Node* const context = NodeProperties::GetContextInput(node);
    992   return NodeProperties::GetSpecializationNativeContext(context,
    993                                                         native_context());
    994 }
    995 
    996 
    997 Graph* JSNativeContextSpecialization::graph() const {
    998   return jsgraph()->graph();
    999 }
   1000 
   1001 
   1002 Isolate* JSNativeContextSpecialization::isolate() const {
   1003   return jsgraph()->isolate();
   1004 }
   1005 
   1006 
   1007 Factory* JSNativeContextSpecialization::factory() const {
   1008   return isolate()->factory();
   1009 }
   1010 
   1011 
   1012 MachineOperatorBuilder* JSNativeContextSpecialization::machine() const {
   1013   return jsgraph()->machine();
   1014 }
   1015 
   1016 
   1017 CommonOperatorBuilder* JSNativeContextSpecialization::common() const {
   1018   return jsgraph()->common();
   1019 }
   1020 
   1021 
   1022 JSOperatorBuilder* JSNativeContextSpecialization::javascript() const {
   1023   return jsgraph()->javascript();
   1024 }
   1025 
   1026 
   1027 SimplifiedOperatorBuilder* JSNativeContextSpecialization::simplified() const {
   1028   return jsgraph()->simplified();
   1029 }
   1030 
   1031 }  // namespace compiler
   1032 }  // namespace internal
   1033 }  // namespace v8
   1034