Home | History | Annotate | Download | only in src
      1 // Copyright 2016 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/code-stub-assembler.h"
      6 #include "src/code-factory.h"
      7 #include "src/frames-inl.h"
      8 #include "src/frames.h"
      9 #include "src/ic/stub-cache.h"
     10 
     11 namespace v8 {
     12 namespace internal {
     13 
     14 using compiler::Node;
     15 
     16 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
     17                                      const CallInterfaceDescriptor& descriptor,
     18                                      Code::Flags flags, const char* name,
     19                                      size_t result_size)
     20     : compiler::CodeAssembler(isolate, zone, descriptor, flags, name,
     21                               result_size) {}
     22 
     23 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
     24                                      int parameter_count, Code::Flags flags,
     25                                      const char* name)
     26     : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {}
     27 
     28 void CodeStubAssembler::Assert(Node* condition) {
     29 #if defined(DEBUG)
     30   Label ok(this);
     31   Comment("[ Assert");
     32   GotoIf(condition, &ok);
     33   DebugBreak();
     34   Goto(&ok);
     35   Bind(&ok);
     36   Comment("] Assert");
     37 #endif
     38 }
     39 
     40 Node* CodeStubAssembler::BooleanMapConstant() {
     41   return HeapConstant(isolate()->factory()->boolean_map());
     42 }
     43 
     44 Node* CodeStubAssembler::EmptyStringConstant() {
     45   return LoadRoot(Heap::kempty_stringRootIndex);
     46 }
     47 
     48 Node* CodeStubAssembler::HeapNumberMapConstant() {
     49   return HeapConstant(isolate()->factory()->heap_number_map());
     50 }
     51 
     52 Node* CodeStubAssembler::NoContextConstant() {
     53   return SmiConstant(Smi::FromInt(0));
     54 }
     55 
     56 Node* CodeStubAssembler::NullConstant() {
     57   return LoadRoot(Heap::kNullValueRootIndex);
     58 }
     59 
     60 Node* CodeStubAssembler::UndefinedConstant() {
     61   return LoadRoot(Heap::kUndefinedValueRootIndex);
     62 }
     63 
     64 Node* CodeStubAssembler::TheHoleConstant() {
     65   return LoadRoot(Heap::kTheHoleValueRootIndex);
     66 }
     67 
     68 Node* CodeStubAssembler::HashSeed() {
     69   return SmiToWord32(LoadRoot(Heap::kHashSeedRootIndex));
     70 }
     71 
     72 Node* CodeStubAssembler::StaleRegisterConstant() {
     73   return LoadRoot(Heap::kStaleRegisterRootIndex);
     74 }
     75 
     76 Node* CodeStubAssembler::Float64Round(Node* x) {
     77   Node* one = Float64Constant(1.0);
     78   Node* one_half = Float64Constant(0.5);
     79 
     80   Variable var_x(this, MachineRepresentation::kFloat64);
     81   Label return_x(this);
     82 
     83   // Round up {x} towards Infinity.
     84   var_x.Bind(Float64Ceil(x));
     85 
     86   GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
     87          &return_x);
     88   var_x.Bind(Float64Sub(var_x.value(), one));
     89   Goto(&return_x);
     90 
     91   Bind(&return_x);
     92   return var_x.value();
     93 }
     94 
     95 Node* CodeStubAssembler::Float64Ceil(Node* x) {
     96   if (IsFloat64RoundUpSupported()) {
     97     return Float64RoundUp(x);
     98   }
     99 
    100   Node* one = Float64Constant(1.0);
    101   Node* zero = Float64Constant(0.0);
    102   Node* two_52 = Float64Constant(4503599627370496.0E0);
    103   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
    104 
    105   Variable var_x(this, MachineRepresentation::kFloat64);
    106   Label return_x(this), return_minus_x(this);
    107   var_x.Bind(x);
    108 
    109   // Check if {x} is greater than zero.
    110   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
    111   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
    112          &if_xnotgreaterthanzero);
    113 
    114   Bind(&if_xgreaterthanzero);
    115   {
    116     // Just return {x} unless it's in the range ]0,2^52[.
    117     GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
    118 
    119     // Round positive {x} towards Infinity.
    120     var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
    121     GotoUnless(Float64LessThan(var_x.value(), x), &return_x);
    122     var_x.Bind(Float64Add(var_x.value(), one));
    123     Goto(&return_x);
    124   }
    125 
    126   Bind(&if_xnotgreaterthanzero);
    127   {
    128     // Just return {x} unless it's in the range ]-2^52,0[
    129     GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
    130     GotoUnless(Float64LessThan(x, zero), &return_x);
    131 
    132     // Round negated {x} towards Infinity and return the result negated.
    133     Node* minus_x = Float64Neg(x);
    134     var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
    135     GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
    136     var_x.Bind(Float64Sub(var_x.value(), one));
    137     Goto(&return_minus_x);
    138   }
    139 
    140   Bind(&return_minus_x);
    141   var_x.Bind(Float64Neg(var_x.value()));
    142   Goto(&return_x);
    143 
    144   Bind(&return_x);
    145   return var_x.value();
    146 }
    147 
    148 Node* CodeStubAssembler::Float64Floor(Node* x) {
    149   if (IsFloat64RoundDownSupported()) {
    150     return Float64RoundDown(x);
    151   }
    152 
    153   Node* one = Float64Constant(1.0);
    154   Node* zero = Float64Constant(0.0);
    155   Node* two_52 = Float64Constant(4503599627370496.0E0);
    156   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
    157 
    158   Variable var_x(this, MachineRepresentation::kFloat64);
    159   Label return_x(this), return_minus_x(this);
    160   var_x.Bind(x);
    161 
    162   // Check if {x} is greater than zero.
    163   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
    164   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
    165          &if_xnotgreaterthanzero);
    166 
    167   Bind(&if_xgreaterthanzero);
    168   {
    169     // Just return {x} unless it's in the range ]0,2^52[.
    170     GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
    171 
    172     // Round positive {x} towards -Infinity.
    173     var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
    174     GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
    175     var_x.Bind(Float64Sub(var_x.value(), one));
    176     Goto(&return_x);
    177   }
    178 
    179   Bind(&if_xnotgreaterthanzero);
    180   {
    181     // Just return {x} unless it's in the range ]-2^52,0[
    182     GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
    183     GotoUnless(Float64LessThan(x, zero), &return_x);
    184 
    185     // Round negated {x} towards -Infinity and return the result negated.
    186     Node* minus_x = Float64Neg(x);
    187     var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
    188     GotoUnless(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
    189     var_x.Bind(Float64Add(var_x.value(), one));
    190     Goto(&return_minus_x);
    191   }
    192 
    193   Bind(&return_minus_x);
    194   var_x.Bind(Float64Neg(var_x.value()));
    195   Goto(&return_x);
    196 
    197   Bind(&return_x);
    198   return var_x.value();
    199 }
    200 
    201 Node* CodeStubAssembler::Float64Trunc(Node* x) {
    202   if (IsFloat64RoundTruncateSupported()) {
    203     return Float64RoundTruncate(x);
    204   }
    205 
    206   Node* one = Float64Constant(1.0);
    207   Node* zero = Float64Constant(0.0);
    208   Node* two_52 = Float64Constant(4503599627370496.0E0);
    209   Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
    210 
    211   Variable var_x(this, MachineRepresentation::kFloat64);
    212   Label return_x(this), return_minus_x(this);
    213   var_x.Bind(x);
    214 
    215   // Check if {x} is greater than 0.
    216   Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
    217   Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
    218          &if_xnotgreaterthanzero);
    219 
    220   Bind(&if_xgreaterthanzero);
    221   {
    222     if (IsFloat64RoundDownSupported()) {
    223       var_x.Bind(Float64RoundDown(x));
    224     } else {
    225       // Just return {x} unless it's in the range ]0,2^52[.
    226       GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
    227 
    228       // Round positive {x} towards -Infinity.
    229       var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
    230       GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
    231       var_x.Bind(Float64Sub(var_x.value(), one));
    232     }
    233     Goto(&return_x);
    234   }
    235 
    236   Bind(&if_xnotgreaterthanzero);
    237   {
    238     if (IsFloat64RoundUpSupported()) {
    239       var_x.Bind(Float64RoundUp(x));
    240       Goto(&return_x);
    241     } else {
    242       // Just return {x} unless its in the range ]-2^52,0[.
    243       GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
    244       GotoUnless(Float64LessThan(x, zero), &return_x);
    245 
    246       // Round negated {x} towards -Infinity and return result negated.
    247       Node* minus_x = Float64Neg(x);
    248       var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
    249       GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
    250       var_x.Bind(Float64Sub(var_x.value(), one));
    251       Goto(&return_minus_x);
    252     }
    253   }
    254 
    255   Bind(&return_minus_x);
    256   var_x.Bind(Float64Neg(var_x.value()));
    257   Goto(&return_x);
    258 
    259   Bind(&return_x);
    260   return var_x.value();
    261 }
    262 
    263 Node* CodeStubAssembler::SmiFromWord32(Node* value) {
    264   value = ChangeInt32ToIntPtr(value);
    265   return WordShl(value, SmiShiftBitsConstant());
    266 }
    267 
    268 Node* CodeStubAssembler::SmiTag(Node* value) {
    269   int32_t constant_value;
    270   if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
    271     return SmiConstant(Smi::FromInt(constant_value));
    272   }
    273   return WordShl(value, SmiShiftBitsConstant());
    274 }
    275 
    276 Node* CodeStubAssembler::SmiUntag(Node* value) {
    277   return WordSar(value, SmiShiftBitsConstant());
    278 }
    279 
    280 Node* CodeStubAssembler::SmiToWord32(Node* value) {
    281   Node* result = WordSar(value, SmiShiftBitsConstant());
    282   if (Is64()) {
    283     result = TruncateInt64ToInt32(result);
    284   }
    285   return result;
    286 }
    287 
    288 Node* CodeStubAssembler::SmiToFloat64(Node* value) {
    289   return ChangeInt32ToFloat64(SmiToWord32(value));
    290 }
    291 
    292 Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); }
    293 
    294 Node* CodeStubAssembler::SmiAddWithOverflow(Node* a, Node* b) {
    295   return IntPtrAddWithOverflow(a, b);
    296 }
    297 
    298 Node* CodeStubAssembler::SmiSub(Node* a, Node* b) { return IntPtrSub(a, b); }
    299 
    300 Node* CodeStubAssembler::SmiSubWithOverflow(Node* a, Node* b) {
    301   return IntPtrSubWithOverflow(a, b);
    302 }
    303 
    304 Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); }
    305 
    306 Node* CodeStubAssembler::SmiAboveOrEqual(Node* a, Node* b) {
    307   return UintPtrGreaterThanOrEqual(a, b);
    308 }
    309 
    310 Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) {
    311   return IntPtrLessThan(a, b);
    312 }
    313 
    314 Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) {
    315   return IntPtrLessThanOrEqual(a, b);
    316 }
    317 
    318 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
    319   // TODO(bmeurer): Consider using Select once available.
    320   Variable min(this, MachineRepresentation::kTagged);
    321   Label if_a(this), if_b(this), join(this);
    322   BranchIfSmiLessThan(a, b, &if_a, &if_b);
    323   Bind(&if_a);
    324   min.Bind(a);
    325   Goto(&join);
    326   Bind(&if_b);
    327   min.Bind(b);
    328   Goto(&join);
    329   Bind(&join);
    330   return min.value();
    331 }
    332 
    333 Node* CodeStubAssembler::WordIsSmi(Node* a) {
    334   return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask)), IntPtrConstant(0));
    335 }
    336 
    337 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
    338   return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
    339                    IntPtrConstant(0));
    340 }
    341 
    342 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
    343                                               AllocationFlags flags,
    344                                               Node* top_address,
    345                                               Node* limit_address) {
    346   Node* top = Load(MachineType::Pointer(), top_address);
    347   Node* limit = Load(MachineType::Pointer(), limit_address);
    348 
    349   // If there's not enough space, call the runtime.
    350   Variable result(this, MachineRepresentation::kTagged);
    351   Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
    352   Label merge_runtime(this, &result);
    353 
    354   Node* new_top = IntPtrAdd(top, size_in_bytes);
    355   Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
    356          &no_runtime_call);
    357 
    358   Bind(&runtime_call);
    359   // AllocateInTargetSpace does not use the context.
    360   Node* context = SmiConstant(Smi::FromInt(0));
    361 
    362   Node* runtime_result;
    363   if (flags & kPretenured) {
    364     Node* runtime_flags = SmiConstant(
    365         Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
    366                      AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
    367     runtime_result = CallRuntime(Runtime::kAllocateInTargetSpace, context,
    368                                  SmiTag(size_in_bytes), runtime_flags);
    369   } else {
    370     runtime_result = CallRuntime(Runtime::kAllocateInNewSpace, context,
    371                                  SmiTag(size_in_bytes));
    372   }
    373   result.Bind(runtime_result);
    374   Goto(&merge_runtime);
    375 
    376   // When there is enough space, return `top' and bump it up.
    377   Bind(&no_runtime_call);
    378   Node* no_runtime_result = top;
    379   StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
    380                       new_top);
    381   no_runtime_result = BitcastWordToTagged(
    382       IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)));
    383   result.Bind(no_runtime_result);
    384   Goto(&merge_runtime);
    385 
    386   Bind(&merge_runtime);
    387   return result.value();
    388 }
    389 
    390 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
    391                                             AllocationFlags flags,
    392                                             Node* top_address,
    393                                             Node* limit_address) {
    394   Node* top = Load(MachineType::Pointer(), top_address);
    395   Node* limit = Load(MachineType::Pointer(), limit_address);
    396   Variable adjusted_size(this, MachineType::PointerRepresentation());
    397   adjusted_size.Bind(size_in_bytes);
    398   if (flags & kDoubleAlignment) {
    399     // TODO(epertoso): Simd128 alignment.
    400     Label aligned(this), not_aligned(this), merge(this, &adjusted_size);
    401     Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), &not_aligned,
    402            &aligned);
    403 
    404     Bind(&not_aligned);
    405     Node* not_aligned_size =
    406         IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
    407     adjusted_size.Bind(not_aligned_size);
    408     Goto(&merge);
    409 
    410     Bind(&aligned);
    411     Goto(&merge);
    412 
    413     Bind(&merge);
    414   }
    415 
    416   Variable address(this, MachineRepresentation::kTagged);
    417   address.Bind(AllocateRawUnaligned(adjusted_size.value(), kNone, top, limit));
    418 
    419   Label needs_filler(this), doesnt_need_filler(this),
    420       merge_address(this, &address);
    421   Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &doesnt_need_filler,
    422          &needs_filler);
    423 
    424   Bind(&needs_filler);
    425   // Store a filler and increase the address by kPointerSize.
    426   // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
    427   // it when Simd128 alignment is supported.
    428   StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
    429                       LoadRoot(Heap::kOnePointerFillerMapRootIndex));
    430   address.Bind(BitcastWordToTagged(
    431       IntPtrAdd(address.value(), IntPtrConstant(kPointerSize))));
    432   Goto(&merge_address);
    433 
    434   Bind(&doesnt_need_filler);
    435   Goto(&merge_address);
    436 
    437   Bind(&merge_address);
    438   // Update the top.
    439   StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
    440                       IntPtrAdd(top, adjusted_size.value()));
    441   return address.value();
    442 }
    443 
    444 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
    445   bool const new_space = !(flags & kPretenured);
    446   Node* top_address = ExternalConstant(
    447       new_space
    448           ? ExternalReference::new_space_allocation_top_address(isolate())
    449           : ExternalReference::old_space_allocation_top_address(isolate()));
    450   Node* limit_address = ExternalConstant(
    451       new_space
    452           ? ExternalReference::new_space_allocation_limit_address(isolate())
    453           : ExternalReference::old_space_allocation_limit_address(isolate()));
    454 
    455 #ifdef V8_HOST_ARCH_32_BIT
    456   if (flags & kDoubleAlignment) {
    457     return AllocateRawAligned(size_in_bytes, flags, top_address, limit_address);
    458   }
    459 #endif
    460 
    461   return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address);
    462 }
    463 
    464 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
    465   return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
    466 }
    467 
    468 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
    469   return BitcastWordToTagged(IntPtrAdd(previous, offset));
    470 }
    471 
    472 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
    473   return InnerAllocate(previous, IntPtrConstant(offset));
    474 }
    475 
    476 compiler::Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
    477   Node* frame_pointer = LoadFramePointer();
    478   return Load(rep, frame_pointer, IntPtrConstant(offset));
    479 }
    480 
    481 compiler::Node* CodeStubAssembler::LoadFromParentFrame(int offset,
    482                                                        MachineType rep) {
    483   Node* frame_pointer = LoadParentFramePointer();
    484   return Load(rep, frame_pointer, IntPtrConstant(offset));
    485 }
    486 
    487 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
    488                                           MachineType rep) {
    489   return Load(rep, buffer, IntPtrConstant(offset));
    490 }
    491 
    492 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
    493                                          MachineType rep) {
    494   return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
    495 }
    496 
    497 Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset,
    498                                          MachineType rep) {
    499   return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
    500 }
    501 
    502 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
    503   return LoadObjectField(object, HeapNumber::kValueOffset,
    504                          MachineType::Float64());
    505 }
    506 
    507 Node* CodeStubAssembler::LoadMap(Node* object) {
    508   return LoadObjectField(object, HeapObject::kMapOffset);
    509 }
    510 
    511 Node* CodeStubAssembler::LoadInstanceType(Node* object) {
    512   return LoadMapInstanceType(LoadMap(object));
    513 }
    514 
    515 void CodeStubAssembler::AssertInstanceType(Node* object,
    516                                            InstanceType instance_type) {
    517   Assert(Word32Equal(LoadInstanceType(object), Int32Constant(instance_type)));
    518 }
    519 
    520 Node* CodeStubAssembler::LoadProperties(Node* object) {
    521   return LoadObjectField(object, JSObject::kPropertiesOffset);
    522 }
    523 
    524 Node* CodeStubAssembler::LoadElements(Node* object) {
    525   return LoadObjectField(object, JSObject::kElementsOffset);
    526 }
    527 
    528 Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) {
    529   return LoadObjectField(array, FixedArrayBase::kLengthOffset);
    530 }
    531 
    532 Node* CodeStubAssembler::LoadMapBitField(Node* map) {
    533   return LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8());
    534 }
    535 
    536 Node* CodeStubAssembler::LoadMapBitField2(Node* map) {
    537   return LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8());
    538 }
    539 
    540 Node* CodeStubAssembler::LoadMapBitField3(Node* map) {
    541   return LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32());
    542 }
    543 
    544 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
    545   return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8());
    546 }
    547 
    548 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) {
    549   return LoadObjectField(map, Map::kDescriptorsOffset);
    550 }
    551 
    552 Node* CodeStubAssembler::LoadMapPrototype(Node* map) {
    553   return LoadObjectField(map, Map::kPrototypeOffset);
    554 }
    555 
    556 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
    557   return LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8());
    558 }
    559 
    560 Node* CodeStubAssembler::LoadMapInobjectProperties(Node* map) {
    561   // See Map::GetInObjectProperties() for details.
    562   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
    563   Assert(Int32GreaterThanOrEqual(LoadMapInstanceType(map),
    564                                  Int32Constant(FIRST_JS_OBJECT_TYPE)));
    565   return LoadObjectField(
    566       map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
    567       MachineType::Uint8());
    568 }
    569 
    570 Node* CodeStubAssembler::LoadNameHashField(Node* name) {
    571   return LoadObjectField(name, Name::kHashFieldOffset, MachineType::Uint32());
    572 }
    573 
    574 Node* CodeStubAssembler::LoadNameHash(Node* name, Label* if_hash_not_computed) {
    575   Node* hash_field = LoadNameHashField(name);
    576   if (if_hash_not_computed != nullptr) {
    577     GotoIf(WordEqual(
    578                Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)),
    579                Int32Constant(0)),
    580            if_hash_not_computed);
    581   }
    582   return Word32Shr(hash_field, Int32Constant(Name::kHashShift));
    583 }
    584 
    585 Node* CodeStubAssembler::LoadStringLength(Node* object) {
    586   return LoadObjectField(object, String::kLengthOffset);
    587 }
    588 
    589 Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
    590   return LoadObjectField(object, JSValue::kValueOffset);
    591 }
    592 
    593 Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) {
    594   Node* value = LoadObjectField(weak_cell, WeakCell::kValueOffset);
    595   if (if_cleared != nullptr) {
    596     GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared);
    597   }
    598   return value;
    599 }
    600 
    601 Node* CodeStubAssembler::AllocateUninitializedFixedArray(Node* length) {
    602   Node* header_size = IntPtrConstant(FixedArray::kHeaderSize);
    603   Node* data_size = WordShl(length, IntPtrConstant(kPointerSizeLog2));
    604   Node* total_size = IntPtrAdd(data_size, header_size);
    605 
    606   Node* result = Allocate(total_size, kNone);
    607   StoreMapNoWriteBarrier(result, LoadRoot(Heap::kFixedArrayMapRootIndex));
    608   StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
    609       SmiTag(length));
    610 
    611   return result;
    612 }
    613 
    614 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node,
    615                                                int additional_offset,
    616                                                ParameterMode parameter_mode) {
    617   int32_t header_size =
    618       FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
    619   Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
    620                                         parameter_mode, header_size);
    621   return Load(MachineType::AnyTagged(), object, offset);
    622 }
    623 
    624 Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
    625     Node* object, Node* index_node, MachineType machine_type,
    626     int additional_offset, ParameterMode parameter_mode) {
    627   int32_t header_size =
    628       FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
    629   Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS,
    630                                         parameter_mode, header_size);
    631   return Load(machine_type, object, offset);
    632 }
    633 
    634 Node* CodeStubAssembler::LoadNativeContext(Node* context) {
    635   return LoadFixedArrayElement(context,
    636                                Int32Constant(Context::NATIVE_CONTEXT_INDEX));
    637 }
    638 
    639 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind,
    640                                                 Node* native_context) {
    641   return LoadFixedArrayElement(native_context,
    642                                Int32Constant(Context::ArrayMapIndex(kind)));
    643 }
    644 
    645 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
    646   return StoreNoWriteBarrier(
    647       MachineRepresentation::kFloat64, object,
    648       IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag), value);
    649 }
    650 
    651 Node* CodeStubAssembler::StoreObjectField(
    652     Node* object, int offset, Node* value) {
    653   return Store(MachineRepresentation::kTagged, object,
    654                IntPtrConstant(offset - kHeapObjectTag), value);
    655 }
    656 
    657 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
    658     Node* object, int offset, Node* value, MachineRepresentation rep) {
    659   return StoreNoWriteBarrier(rep, object,
    660                              IntPtrConstant(offset - kHeapObjectTag), value);
    661 }
    662 
    663 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
    664   return StoreNoWriteBarrier(
    665       MachineRepresentation::kTagged, object,
    666       IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
    667 }
    668 
    669 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
    670                                                 Node* value,
    671                                                 WriteBarrierMode barrier_mode,
    672                                                 ParameterMode parameter_mode) {
    673   DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
    674          barrier_mode == UPDATE_WRITE_BARRIER);
    675   Node* offset =
    676       ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, parameter_mode,
    677                              FixedArray::kHeaderSize - kHeapObjectTag);
    678   MachineRepresentation rep = MachineRepresentation::kTagged;
    679   if (barrier_mode == SKIP_WRITE_BARRIER) {
    680     return StoreNoWriteBarrier(rep, object, offset, value);
    681   } else {
    682     return Store(rep, object, offset, value);
    683   }
    684 }
    685 
    686 Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
    687     Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
    688   Node* offset =
    689       ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
    690                              FixedArray::kHeaderSize - kHeapObjectTag);
    691   MachineRepresentation rep = MachineRepresentation::kFloat64;
    692   return StoreNoWriteBarrier(rep, object, offset, value);
    693 }
    694 
    695 Node* CodeStubAssembler::AllocateHeapNumber() {
    696   Node* result = Allocate(HeapNumber::kSize, kNone);
    697   StoreMapNoWriteBarrier(result, HeapNumberMapConstant());
    698   return result;
    699 }
    700 
    701 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value) {
    702   Node* result = AllocateHeapNumber();
    703   StoreHeapNumberValue(result, value);
    704   return result;
    705 }
    706 
    707 Node* CodeStubAssembler::AllocateSeqOneByteString(int length) {
    708   Node* result = Allocate(SeqOneByteString::SizeFor(length));
    709   StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
    710   StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
    711                                  SmiConstant(Smi::FromInt(length)));
    712   StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
    713                                  IntPtrConstant(String::kEmptyHashField),
    714                                  MachineRepresentation::kWord32);
    715   return result;
    716 }
    717 
    718 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length) {
    719   Variable var_result(this, MachineRepresentation::kTagged);
    720 
    721   // Compute the SeqOneByteString size and check if it fits into new space.
    722   Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
    723       if_join(this);
    724   Node* size = WordAnd(
    725       IntPtrAdd(
    726           IntPtrAdd(length, IntPtrConstant(SeqOneByteString::kHeaderSize)),
    727           IntPtrConstant(kObjectAlignmentMask)),
    728       IntPtrConstant(~kObjectAlignmentMask));
    729   Branch(IntPtrLessThanOrEqual(size,
    730                                IntPtrConstant(Page::kMaxRegularHeapObjectSize)),
    731          &if_sizeissmall, &if_notsizeissmall);
    732 
    733   Bind(&if_sizeissmall);
    734   {
    735     // Just allocate the SeqOneByteString in new space.
    736     Node* result = Allocate(size);
    737     StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
    738     StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
    739                                    SmiFromWord(length));
    740     StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
    741                                    IntPtrConstant(String::kEmptyHashField),
    742                                    MachineRepresentation::kWord32);
    743     var_result.Bind(result);
    744     Goto(&if_join);
    745   }
    746 
    747   Bind(&if_notsizeissmall);
    748   {
    749     // We might need to allocate in large object space, go to the runtime.
    750     Node* result = CallRuntime(Runtime::kAllocateSeqOneByteString, context,
    751                                SmiFromWord(length));
    752     var_result.Bind(result);
    753     Goto(&if_join);
    754   }
    755 
    756   Bind(&if_join);
    757   return var_result.value();
    758 }
    759 
    760 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length) {
    761   Node* result = Allocate(SeqTwoByteString::SizeFor(length));
    762   StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
    763   StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
    764                                  SmiConstant(Smi::FromInt(length)));
    765   StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
    766                                  IntPtrConstant(String::kEmptyHashField),
    767                                  MachineRepresentation::kWord32);
    768   return result;
    769 }
    770 
    771 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length) {
    772   Variable var_result(this, MachineRepresentation::kTagged);
    773 
    774   // Compute the SeqTwoByteString size and check if it fits into new space.
    775   Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
    776       if_join(this);
    777   Node* size = WordAnd(
    778       IntPtrAdd(IntPtrAdd(WordShl(length, 1),
    779                           IntPtrConstant(SeqTwoByteString::kHeaderSize)),
    780                 IntPtrConstant(kObjectAlignmentMask)),
    781       IntPtrConstant(~kObjectAlignmentMask));
    782   Branch(IntPtrLessThanOrEqual(size,
    783                                IntPtrConstant(Page::kMaxRegularHeapObjectSize)),
    784          &if_sizeissmall, &if_notsizeissmall);
    785 
    786   Bind(&if_sizeissmall);
    787   {
    788     // Just allocate the SeqTwoByteString in new space.
    789     Node* result = Allocate(size);
    790     StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
    791     StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
    792                                    SmiFromWord(length));
    793     StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
    794                                    IntPtrConstant(String::kEmptyHashField),
    795                                    MachineRepresentation::kWord32);
    796     var_result.Bind(result);
    797     Goto(&if_join);
    798   }
    799 
    800   Bind(&if_notsizeissmall);
    801   {
    802     // We might need to allocate in large object space, go to the runtime.
    803     Node* result = CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
    804                                SmiFromWord(length));
    805     var_result.Bind(result);
    806     Goto(&if_join);
    807   }
    808 
    809   Bind(&if_join);
    810   return var_result.value();
    811 }
    812 
    813 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
    814                                          Node* capacity_node, Node* length_node,
    815                                          compiler::Node* allocation_site,
    816                                          ParameterMode mode) {
    817   bool is_double = IsFastDoubleElementsKind(kind);
    818   int base_size = JSArray::kSize + FixedArray::kHeaderSize;
    819   int elements_offset = JSArray::kSize;
    820 
    821   Comment("begin allocation of JSArray");
    822 
    823   if (allocation_site != nullptr) {
    824     base_size += AllocationMemento::kSize;
    825     elements_offset += AllocationMemento::kSize;
    826   }
    827 
    828   int32_t capacity;
    829   bool constant_capacity = ToInt32Constant(capacity_node, capacity);
    830   Node* total_size =
    831       ElementOffsetFromIndex(capacity_node, kind, mode, base_size);
    832 
    833   // Allocate both array and elements object, and initialize the JSArray.
    834   Heap* heap = isolate()->heap();
    835   Node* array = Allocate(total_size);
    836   StoreMapNoWriteBarrier(array, array_map);
    837   Node* empty_properties =
    838       HeapConstant(Handle<HeapObject>(heap->empty_fixed_array()));
    839   StoreObjectFieldNoWriteBarrier(array, JSArray::kPropertiesOffset,
    840                                  empty_properties);
    841   StoreObjectFieldNoWriteBarrier(
    842       array, JSArray::kLengthOffset,
    843       mode == SMI_PARAMETERS ? length_node : SmiTag(length_node));
    844 
    845   if (allocation_site != nullptr) {
    846     InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
    847   }
    848 
    849   // Setup elements object.
    850   Node* elements = InnerAllocate(array, elements_offset);
    851   StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
    852   Handle<Map> elements_map(is_double ? heap->fixed_double_array_map()
    853                                      : heap->fixed_array_map());
    854   StoreMapNoWriteBarrier(elements, HeapConstant(elements_map));
    855   StoreObjectFieldNoWriteBarrier(
    856       elements, FixedArray::kLengthOffset,
    857       mode == SMI_PARAMETERS ? capacity_node : SmiTag(capacity_node));
    858 
    859   int const first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
    860   Node* hole = HeapConstant(Handle<HeapObject>(heap->the_hole_value()));
    861   Node* double_hole =
    862       Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
    863   DCHECK_EQ(kHoleNanLower32, kHoleNanUpper32);
    864   if (constant_capacity && capacity <= kElementLoopUnrollThreshold) {
    865     for (int i = 0; i < capacity; ++i) {
    866       if (is_double) {
    867         Node* offset = ElementOffsetFromIndex(Int32Constant(i), kind, mode,
    868                                               first_element_offset);
    869         // Don't use doubles to store the hole double, since manipulating the
    870         // signaling NaN used for the hole in C++, e.g. with bit_cast, will
    871         // change its value on ia32 (the x87 stack is used to return values
    872         // and stores to the stack silently clear the signalling bit).
    873         //
    874         // TODO(danno): When we have a Float32/Float64 wrapper class that
    875         // preserves double bits during manipulation, remove this code/change
    876         // this to an indexed Float64 store.
    877         if (Is64()) {
    878           StoreNoWriteBarrier(MachineRepresentation::kWord64, elements, offset,
    879                               double_hole);
    880         } else {
    881           StoreNoWriteBarrier(MachineRepresentation::kWord32, elements, offset,
    882                               double_hole);
    883           offset = ElementOffsetFromIndex(Int32Constant(i), kind, mode,
    884                                           first_element_offset + kPointerSize);
    885           StoreNoWriteBarrier(MachineRepresentation::kWord32, elements, offset,
    886                               double_hole);
    887         }
    888       } else {
    889         StoreFixedArrayElement(elements, Int32Constant(i), hole,
    890                                SKIP_WRITE_BARRIER);
    891       }
    892     }
    893   } else {
    894     Variable current(this, MachineRepresentation::kTagged);
    895     Label test(this);
    896     Label decrement(this, &current);
    897     Label done(this);
    898     Node* limit = IntPtrAdd(elements, IntPtrConstant(first_element_offset));
    899     current.Bind(
    900         IntPtrAdd(limit, ElementOffsetFromIndex(capacity_node, kind, mode, 0)));
    901 
    902     Branch(WordEqual(current.value(), limit), &done, &decrement);
    903 
    904     Bind(&decrement);
    905     current.Bind(IntPtrSub(
    906         current.value(),
    907         Int32Constant(IsFastDoubleElementsKind(kind) ? kDoubleSize
    908                                                      : kPointerSize)));
    909     if (is_double) {
    910       // Don't use doubles to store the hole double, since manipulating the
    911       // signaling NaN used for the hole in C++, e.g. with bit_cast, will
    912       // change its value on ia32 (the x87 stack is used to return values
    913       // and stores to the stack silently clear the signalling bit).
    914       //
    915       // TODO(danno): When we have a Float32/Float64 wrapper class that
    916       // preserves double bits during manipulation, remove this code/change
    917       // this to an indexed Float64 store.
    918       if (Is64()) {
    919         StoreNoWriteBarrier(MachineRepresentation::kWord64, current.value(),
    920                             double_hole);
    921       } else {
    922         StoreNoWriteBarrier(MachineRepresentation::kWord32, current.value(),
    923                             double_hole);
    924         StoreNoWriteBarrier(
    925             MachineRepresentation::kWord32,
    926             IntPtrAdd(current.value(), Int32Constant(kPointerSize)),
    927             double_hole);
    928       }
    929     } else {
    930       StoreNoWriteBarrier(MachineRepresentation::kTagged, current.value(),
    931                           hole);
    932     }
    933     Node* compare = WordNotEqual(current.value(), limit);
    934     Branch(compare, &decrement, &done);
    935 
    936     Bind(&done);
    937   }
    938 
    939   return array;
    940 }
    941 
    942 void CodeStubAssembler::InitializeAllocationMemento(
    943     compiler::Node* base_allocation, int base_allocation_size,
    944     compiler::Node* allocation_site) {
    945   StoreObjectFieldNoWriteBarrier(
    946       base_allocation, AllocationMemento::kMapOffset + base_allocation_size,
    947       HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map())));
    948   StoreObjectFieldNoWriteBarrier(
    949       base_allocation,
    950       AllocationMemento::kAllocationSiteOffset + base_allocation_size,
    951       allocation_site);
    952   if (FLAG_allocation_site_pretenuring) {
    953     Node* count = LoadObjectField(allocation_site,
    954                                   AllocationSite::kPretenureCreateCountOffset);
    955     Node* incremented_count = IntPtrAdd(count, SmiConstant(Smi::FromInt(1)));
    956     StoreObjectFieldNoWriteBarrier(allocation_site,
    957                                    AllocationSite::kPretenureCreateCountOffset,
    958                                    incremented_count);
    959   }
    960 }
    961 
    962 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
    963   // We might need to loop once due to ToNumber conversion.
    964   Variable var_value(this, MachineRepresentation::kTagged),
    965       var_result(this, MachineRepresentation::kFloat64);
    966   Label loop(this, &var_value), done_loop(this, &var_result);
    967   var_value.Bind(value);
    968   Goto(&loop);
    969   Bind(&loop);
    970   {
    971     // Load the current {value}.
    972     value = var_value.value();
    973 
    974     // Check if the {value} is a Smi or a HeapObject.
    975     Label if_valueissmi(this), if_valueisnotsmi(this);
    976     Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
    977 
    978     Bind(&if_valueissmi);
    979     {
    980       // Convert the Smi {value}.
    981       var_result.Bind(SmiToFloat64(value));
    982       Goto(&done_loop);
    983     }
    984 
    985     Bind(&if_valueisnotsmi);
    986     {
    987       // Check if {value} is a HeapNumber.
    988       Label if_valueisheapnumber(this),
    989           if_valueisnotheapnumber(this, Label::kDeferred);
    990       Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
    991              &if_valueisheapnumber, &if_valueisnotheapnumber);
    992 
    993       Bind(&if_valueisheapnumber);
    994       {
    995         // Load the floating point value.
    996         var_result.Bind(LoadHeapNumberValue(value));
    997         Goto(&done_loop);
    998       }
    999 
   1000       Bind(&if_valueisnotheapnumber);
   1001       {
   1002         // Convert the {value} to a Number first.
   1003         Callable callable = CodeFactory::NonNumberToNumber(isolate());
   1004         var_value.Bind(CallStub(callable, context, value));
   1005         Goto(&loop);
   1006       }
   1007     }
   1008   }
   1009   Bind(&done_loop);
   1010   return var_result.value();
   1011 }
   1012 
   1013 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
   1014   // We might need to loop once due to ToNumber conversion.
   1015   Variable var_value(this, MachineRepresentation::kTagged),
   1016       var_result(this, MachineRepresentation::kWord32);
   1017   Label loop(this, &var_value), done_loop(this, &var_result);
   1018   var_value.Bind(value);
   1019   Goto(&loop);
   1020   Bind(&loop);
   1021   {
   1022     // Load the current {value}.
   1023     value = var_value.value();
   1024 
   1025     // Check if the {value} is a Smi or a HeapObject.
   1026     Label if_valueissmi(this), if_valueisnotsmi(this);
   1027     Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
   1028 
   1029     Bind(&if_valueissmi);
   1030     {
   1031       // Convert the Smi {value}.
   1032       var_result.Bind(SmiToWord32(value));
   1033       Goto(&done_loop);
   1034     }
   1035 
   1036     Bind(&if_valueisnotsmi);
   1037     {
   1038       // Check if {value} is a HeapNumber.
   1039       Label if_valueisheapnumber(this),
   1040           if_valueisnotheapnumber(this, Label::kDeferred);
   1041       Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
   1042              &if_valueisheapnumber, &if_valueisnotheapnumber);
   1043 
   1044       Bind(&if_valueisheapnumber);
   1045       {
   1046         // Truncate the floating point value.
   1047         var_result.Bind(TruncateHeapNumberValueToWord32(value));
   1048         Goto(&done_loop);
   1049       }
   1050 
   1051       Bind(&if_valueisnotheapnumber);
   1052       {
   1053         // Convert the {value} to a Number first.
   1054         Callable callable = CodeFactory::NonNumberToNumber(isolate());
   1055         var_value.Bind(CallStub(callable, context, value));
   1056         Goto(&loop);
   1057       }
   1058     }
   1059   }
   1060   Bind(&done_loop);
   1061   return var_result.value();
   1062 }
   1063 
   1064 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
   1065   Node* value = LoadHeapNumberValue(object);
   1066   return TruncateFloat64ToWord32(value);
   1067 }
   1068 
   1069 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
   1070   Node* value32 = RoundFloat64ToInt32(value);
   1071   Node* value64 = ChangeInt32ToFloat64(value32);
   1072 
   1073   Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
   1074 
   1075   Label if_valueisequal(this), if_valueisnotequal(this);
   1076   Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
   1077   Bind(&if_valueisequal);
   1078   {
   1079     GotoUnless(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
   1080     BranchIfInt32LessThan(Float64ExtractHighWord32(value), Int32Constant(0),
   1081                           &if_valueisheapnumber, &if_valueisint32);
   1082   }
   1083   Bind(&if_valueisnotequal);
   1084   Goto(&if_valueisheapnumber);
   1085 
   1086   Variable var_result(this, MachineRepresentation::kTagged);
   1087   Bind(&if_valueisint32);
   1088   {
   1089     if (Is64()) {
   1090       Node* result = SmiTag(ChangeInt32ToInt64(value32));
   1091       var_result.Bind(result);
   1092       Goto(&if_join);
   1093     } else {
   1094       Node* pair = Int32AddWithOverflow(value32, value32);
   1095       Node* overflow = Projection(1, pair);
   1096       Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
   1097       Branch(overflow, &if_overflow, &if_notoverflow);
   1098       Bind(&if_overflow);
   1099       Goto(&if_valueisheapnumber);
   1100       Bind(&if_notoverflow);
   1101       {
   1102         Node* result = Projection(0, pair);
   1103         var_result.Bind(result);
   1104         Goto(&if_join);
   1105       }
   1106     }
   1107   }
   1108   Bind(&if_valueisheapnumber);
   1109   {
   1110     Node* result = AllocateHeapNumberWithValue(value);
   1111     var_result.Bind(result);
   1112     Goto(&if_join);
   1113   }
   1114   Bind(&if_join);
   1115   return var_result.value();
   1116 }
   1117 
   1118 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
   1119   if (Is64()) {
   1120     return SmiTag(ChangeInt32ToInt64(value));
   1121   }
   1122   Variable var_result(this, MachineRepresentation::kTagged);
   1123   Node* pair = Int32AddWithOverflow(value, value);
   1124   Node* overflow = Projection(1, pair);
   1125   Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
   1126       if_join(this);
   1127   Branch(overflow, &if_overflow, &if_notoverflow);
   1128   Bind(&if_overflow);
   1129   {
   1130     Node* value64 = ChangeInt32ToFloat64(value);
   1131     Node* result = AllocateHeapNumberWithValue(value64);
   1132     var_result.Bind(result);
   1133   }
   1134   Goto(&if_join);
   1135   Bind(&if_notoverflow);
   1136   {
   1137     Node* result = Projection(0, pair);
   1138     var_result.Bind(result);
   1139   }
   1140   Goto(&if_join);
   1141   Bind(&if_join);
   1142   return var_result.value();
   1143 }
   1144 
   1145 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
   1146   Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
   1147       if_join(this);
   1148   Variable var_result(this, MachineRepresentation::kTagged);
   1149   // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
   1150   Branch(Int32LessThan(value, Int32Constant(0)), &if_overflow,
   1151          &if_not_overflow);
   1152   Bind(&if_not_overflow);
   1153   {
   1154     if (Is64()) {
   1155       var_result.Bind(SmiTag(ChangeUint32ToUint64(value)));
   1156     } else {
   1157       // If tagging {value} results in an overflow, we need to use a HeapNumber
   1158       // to represent it.
   1159       Node* pair = Int32AddWithOverflow(value, value);
   1160       Node* overflow = Projection(1, pair);
   1161       GotoIf(overflow, &if_overflow);
   1162 
   1163       Node* result = Projection(0, pair);
   1164       var_result.Bind(result);
   1165     }
   1166   }
   1167   Goto(&if_join);
   1168 
   1169   Bind(&if_overflow);
   1170   {
   1171     Node* float64_value = ChangeUint32ToFloat64(value);
   1172     var_result.Bind(AllocateHeapNumberWithValue(float64_value));
   1173   }
   1174   Goto(&if_join);
   1175 
   1176   Bind(&if_join);
   1177   return var_result.value();
   1178 }
   1179 
   1180 Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
   1181                                       char const* method_name) {
   1182   Variable var_value(this, MachineRepresentation::kTagged);
   1183   var_value.Bind(value);
   1184 
   1185   // Check if the {value} is a Smi or a HeapObject.
   1186   Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
   1187       if_valueisstring(this);
   1188   Branch(WordIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
   1189   Bind(&if_valueisnotsmi);
   1190   {
   1191     // Load the instance type of the {value}.
   1192     Node* value_instance_type = LoadInstanceType(value);
   1193 
   1194     // Check if the {value} is already String.
   1195     Label if_valueisnotstring(this, Label::kDeferred);
   1196     Branch(
   1197         Int32LessThan(value_instance_type, Int32Constant(FIRST_NONSTRING_TYPE)),
   1198         &if_valueisstring, &if_valueisnotstring);
   1199     Bind(&if_valueisnotstring);
   1200     {
   1201       // Check if the {value} is null.
   1202       Label if_valueisnullorundefined(this, Label::kDeferred),
   1203           if_valueisnotnullorundefined(this, Label::kDeferred),
   1204           if_valueisnotnull(this, Label::kDeferred);
   1205       Branch(WordEqual(value, NullConstant()), &if_valueisnullorundefined,
   1206              &if_valueisnotnull);
   1207       Bind(&if_valueisnotnull);
   1208       {
   1209         // Check if the {value} is undefined.
   1210         Branch(WordEqual(value, UndefinedConstant()),
   1211                &if_valueisnullorundefined, &if_valueisnotnullorundefined);
   1212         Bind(&if_valueisnotnullorundefined);
   1213         {
   1214           // Convert the {value} to a String.
   1215           Callable callable = CodeFactory::ToString(isolate());
   1216           var_value.Bind(CallStub(callable, context, value));
   1217           Goto(&if_valueisstring);
   1218         }
   1219       }
   1220 
   1221       Bind(&if_valueisnullorundefined);
   1222       {
   1223         // The {value} is either null or undefined.
   1224         CallRuntime(Runtime::kThrowCalledOnNullOrUndefined, context,
   1225                     HeapConstant(factory()->NewStringFromAsciiChecked(
   1226                         method_name, TENURED)));
   1227         Goto(&if_valueisstring);  // Never reached.
   1228       }
   1229     }
   1230   }
   1231   Bind(&if_valueissmi);
   1232   {
   1233     // The {value} is a Smi, convert it to a String.
   1234     Callable callable = CodeFactory::NumberToString(isolate());
   1235     var_value.Bind(CallStub(callable, context, value));
   1236     Goto(&if_valueisstring);
   1237   }
   1238   Bind(&if_valueisstring);
   1239   return var_value.value();
   1240 }
   1241 
   1242 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index) {
   1243   // Translate the {index} into a Word.
   1244   index = SmiToWord(index);
   1245 
   1246   // We may need to loop in case of cons or sliced strings.
   1247   Variable var_index(this, MachineType::PointerRepresentation());
   1248   Variable var_result(this, MachineRepresentation::kWord32);
   1249   Variable var_string(this, MachineRepresentation::kTagged);
   1250   Variable* loop_vars[] = {&var_index, &var_string};
   1251   Label done_loop(this, &var_result), loop(this, 2, loop_vars);
   1252   var_string.Bind(string);
   1253   var_index.Bind(index);
   1254   Goto(&loop);
   1255   Bind(&loop);
   1256   {
   1257     // Load the current {index}.
   1258     index = var_index.value();
   1259 
   1260     // Load the current {string}.
   1261     string = var_string.value();
   1262 
   1263     // Load the instance type of the {string}.
   1264     Node* string_instance_type = LoadInstanceType(string);
   1265 
   1266     // Check if the {string} is a SeqString.
   1267     Label if_stringissequential(this), if_stringisnotsequential(this);
   1268     Branch(Word32Equal(Word32And(string_instance_type,
   1269                                  Int32Constant(kStringRepresentationMask)),
   1270                        Int32Constant(kSeqStringTag)),
   1271            &if_stringissequential, &if_stringisnotsequential);
   1272 
   1273     Bind(&if_stringissequential);
   1274     {
   1275       // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
   1276       Label if_stringistwobyte(this), if_stringisonebyte(this);
   1277       Branch(Word32Equal(Word32And(string_instance_type,
   1278                                    Int32Constant(kStringEncodingMask)),
   1279                          Int32Constant(kTwoByteStringTag)),
   1280              &if_stringistwobyte, &if_stringisonebyte);
   1281 
   1282       Bind(&if_stringisonebyte);
   1283       {
   1284         var_result.Bind(
   1285             Load(MachineType::Uint8(), string,
   1286                  IntPtrAdd(index, IntPtrConstant(SeqOneByteString::kHeaderSize -
   1287                                                  kHeapObjectTag))));
   1288         Goto(&done_loop);
   1289       }
   1290 
   1291       Bind(&if_stringistwobyte);
   1292       {
   1293         var_result.Bind(
   1294             Load(MachineType::Uint16(), string,
   1295                  IntPtrAdd(WordShl(index, IntPtrConstant(1)),
   1296                            IntPtrConstant(SeqTwoByteString::kHeaderSize -
   1297                                           kHeapObjectTag))));
   1298         Goto(&done_loop);
   1299       }
   1300     }
   1301 
   1302     Bind(&if_stringisnotsequential);
   1303     {
   1304       // Check if the {string} is a ConsString.
   1305       Label if_stringiscons(this), if_stringisnotcons(this);
   1306       Branch(Word32Equal(Word32And(string_instance_type,
   1307                                    Int32Constant(kStringRepresentationMask)),
   1308                          Int32Constant(kConsStringTag)),
   1309              &if_stringiscons, &if_stringisnotcons);
   1310 
   1311       Bind(&if_stringiscons);
   1312       {
   1313         // Check whether the right hand side is the empty string (i.e. if
   1314         // this is really a flat string in a cons string). If that is not
   1315         // the case we flatten the string first.
   1316         Label if_rhsisempty(this), if_rhsisnotempty(this, Label::kDeferred);
   1317         Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
   1318         Branch(WordEqual(rhs, EmptyStringConstant()), &if_rhsisempty,
   1319                &if_rhsisnotempty);
   1320 
   1321         Bind(&if_rhsisempty);
   1322         {
   1323           // Just operate on the left hand side of the {string}.
   1324           var_string.Bind(LoadObjectField(string, ConsString::kFirstOffset));
   1325           Goto(&loop);
   1326         }
   1327 
   1328         Bind(&if_rhsisnotempty);
   1329         {
   1330           // Flatten the {string} and lookup in the resulting string.
   1331           var_string.Bind(CallRuntime(Runtime::kFlattenString,
   1332                                       NoContextConstant(), string));
   1333           Goto(&loop);
   1334         }
   1335       }
   1336 
   1337       Bind(&if_stringisnotcons);
   1338       {
   1339         // Check if the {string} is an ExternalString.
   1340         Label if_stringisexternal(this), if_stringisnotexternal(this);
   1341         Branch(Word32Equal(Word32And(string_instance_type,
   1342                                      Int32Constant(kStringRepresentationMask)),
   1343                            Int32Constant(kExternalStringTag)),
   1344                &if_stringisexternal, &if_stringisnotexternal);
   1345 
   1346         Bind(&if_stringisexternal);
   1347         {
   1348           // Check if the {string} is a short external string.
   1349           Label if_stringisshort(this),
   1350               if_stringisnotshort(this, Label::kDeferred);
   1351           Branch(Word32Equal(Word32And(string_instance_type,
   1352                                        Int32Constant(kShortExternalStringMask)),
   1353                              Int32Constant(0)),
   1354                  &if_stringisshort, &if_stringisnotshort);
   1355 
   1356           Bind(&if_stringisshort);
   1357           {
   1358             // Load the actual resource data from the {string}.
   1359             Node* string_resource_data =
   1360                 LoadObjectField(string, ExternalString::kResourceDataOffset,
   1361                                 MachineType::Pointer());
   1362 
   1363             // Check if the {string} is a TwoByteExternalString or a
   1364             // OneByteExternalString.
   1365             Label if_stringistwobyte(this), if_stringisonebyte(this);
   1366             Branch(Word32Equal(Word32And(string_instance_type,
   1367                                          Int32Constant(kStringEncodingMask)),
   1368                                Int32Constant(kTwoByteStringTag)),
   1369                    &if_stringistwobyte, &if_stringisonebyte);
   1370 
   1371             Bind(&if_stringisonebyte);
   1372             {
   1373               var_result.Bind(
   1374                   Load(MachineType::Uint8(), string_resource_data, index));
   1375               Goto(&done_loop);
   1376             }
   1377 
   1378             Bind(&if_stringistwobyte);
   1379             {
   1380               var_result.Bind(Load(MachineType::Uint16(), string_resource_data,
   1381                                    WordShl(index, IntPtrConstant(1))));
   1382               Goto(&done_loop);
   1383             }
   1384           }
   1385 
   1386           Bind(&if_stringisnotshort);
   1387           {
   1388             // The {string} might be compressed, call the runtime.
   1389             var_result.Bind(SmiToWord32(
   1390                 CallRuntime(Runtime::kExternalStringGetChar,
   1391                             NoContextConstant(), string, SmiTag(index))));
   1392             Goto(&done_loop);
   1393           }
   1394         }
   1395 
   1396         Bind(&if_stringisnotexternal);
   1397         {
   1398           // The {string} is a SlicedString, continue with its parent.
   1399           Node* string_offset =
   1400               SmiToWord(LoadObjectField(string, SlicedString::kOffsetOffset));
   1401           Node* string_parent =
   1402               LoadObjectField(string, SlicedString::kParentOffset);
   1403           var_index.Bind(IntPtrAdd(index, string_offset));
   1404           var_string.Bind(string_parent);
   1405           Goto(&loop);
   1406         }
   1407       }
   1408     }
   1409   }
   1410 
   1411   Bind(&done_loop);
   1412   return var_result.value();
   1413 }
   1414 
   1415 Node* CodeStubAssembler::StringFromCharCode(Node* code) {
   1416   Variable var_result(this, MachineRepresentation::kTagged);
   1417 
   1418   // Check if the {code} is a one-byte char code.
   1419   Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
   1420       if_done(this);
   1421   Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
   1422          &if_codeisonebyte, &if_codeistwobyte);
   1423   Bind(&if_codeisonebyte);
   1424   {
   1425     // Load the isolate wide single character string cache.
   1426     Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
   1427 
   1428     // Check if we have an entry for the {code} in the single character string
   1429     // cache already.
   1430     Label if_entryisundefined(this, Label::kDeferred),
   1431         if_entryisnotundefined(this);
   1432     Node* entry = LoadFixedArrayElement(cache, code);
   1433     Branch(WordEqual(entry, UndefinedConstant()), &if_entryisundefined,
   1434            &if_entryisnotundefined);
   1435 
   1436     Bind(&if_entryisundefined);
   1437     {
   1438       // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
   1439       Node* result = AllocateSeqOneByteString(1);
   1440       StoreNoWriteBarrier(
   1441           MachineRepresentation::kWord8, result,
   1442           IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
   1443       StoreFixedArrayElement(cache, code, result);
   1444       var_result.Bind(result);
   1445       Goto(&if_done);
   1446     }
   1447 
   1448     Bind(&if_entryisnotundefined);
   1449     {
   1450       // Return the entry from the {cache}.
   1451       var_result.Bind(entry);
   1452       Goto(&if_done);
   1453     }
   1454   }
   1455 
   1456   Bind(&if_codeistwobyte);
   1457   {
   1458     // Allocate a new SeqTwoByteString for {code}.
   1459     Node* result = AllocateSeqTwoByteString(1);
   1460     StoreNoWriteBarrier(
   1461         MachineRepresentation::kWord16, result,
   1462         IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
   1463     var_result.Bind(result);
   1464     Goto(&if_done);
   1465   }
   1466 
   1467   Bind(&if_done);
   1468   return var_result.value();
   1469 }
   1470 
   1471 Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
   1472                                         uint32_t mask) {
   1473   return Word32Shr(Word32And(word32, Int32Constant(mask)),
   1474                    Int32Constant(shift));
   1475 }
   1476 
   1477 void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
   1478   if (FLAG_native_code_counters && counter->Enabled()) {
   1479     Node* counter_address = ExternalConstant(ExternalReference(counter));
   1480     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
   1481                         Int32Constant(value));
   1482   }
   1483 }
   1484 
   1485 void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
   1486   DCHECK(delta > 0);
   1487   if (FLAG_native_code_counters && counter->Enabled()) {
   1488     Node* counter_address = ExternalConstant(ExternalReference(counter));
   1489     Node* value = Load(MachineType::Int32(), counter_address);
   1490     value = Int32Add(value, Int32Constant(delta));
   1491     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
   1492   }
   1493 }
   1494 
   1495 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
   1496   DCHECK(delta > 0);
   1497   if (FLAG_native_code_counters && counter->Enabled()) {
   1498     Node* counter_address = ExternalConstant(ExternalReference(counter));
   1499     Node* value = Load(MachineType::Int32(), counter_address);
   1500     value = Int32Sub(value, Int32Constant(delta));
   1501     StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
   1502   }
   1503 }
   1504 
   1505 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
   1506                                   Variable* var_index, Label* if_keyisunique,
   1507                                   Label* if_bailout) {
   1508   DCHECK_EQ(MachineRepresentation::kWord32, var_index->rep());
   1509   Comment("TryToName");
   1510 
   1511   Label if_keyissmi(this), if_keyisnotsmi(this);
   1512   Branch(WordIsSmi(key), &if_keyissmi, &if_keyisnotsmi);
   1513   Bind(&if_keyissmi);
   1514   {
   1515     // Negative smi keys are named properties. Handle in the runtime.
   1516     GotoUnless(WordIsPositiveSmi(key), if_bailout);
   1517 
   1518     var_index->Bind(SmiToWord32(key));
   1519     Goto(if_keyisindex);
   1520   }
   1521 
   1522   Bind(&if_keyisnotsmi);
   1523 
   1524   Node* key_instance_type = LoadInstanceType(key);
   1525   // Symbols are unique.
   1526   GotoIf(Word32Equal(key_instance_type, Int32Constant(SYMBOL_TYPE)),
   1527          if_keyisunique);
   1528 
   1529   Label if_keyisinternalized(this);
   1530   Node* bits =
   1531       WordAnd(key_instance_type,
   1532               Int32Constant(kIsNotStringMask | kIsNotInternalizedMask));
   1533   Branch(Word32Equal(bits, Int32Constant(kStringTag | kInternalizedTag)),
   1534          &if_keyisinternalized, if_bailout);
   1535   Bind(&if_keyisinternalized);
   1536 
   1537   // Check whether the key is an array index passed in as string. Handle
   1538   // uniform with smi keys if so.
   1539   // TODO(verwaest): Also support non-internalized strings.
   1540   Node* hash = LoadNameHashField(key);
   1541   Node* bit = Word32And(hash, Int32Constant(Name::kIsNotArrayIndexMask));
   1542   GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_keyisunique);
   1543   // Key is an index. Check if it is small enough to be encoded in the
   1544   // hash_field. Handle too big array index in runtime.
   1545   bit = Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask));
   1546   GotoIf(Word32NotEqual(bit, Int32Constant(0)), if_bailout);
   1547   var_index->Bind(BitFieldDecode<Name::ArrayIndexValueBits>(hash));
   1548   Goto(if_keyisindex);
   1549 }
   1550 
   1551 template <typename Dictionary>
   1552 Node* CodeStubAssembler::EntryToIndex(Node* entry, int field_index) {
   1553   Node* entry_index = Int32Mul(entry, Int32Constant(Dictionary::kEntrySize));
   1554   return Int32Add(entry_index,
   1555                   Int32Constant(Dictionary::kElementsStartIndex + field_index));
   1556 }
   1557 
   1558 template <typename Dictionary>
   1559 void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
   1560                                              Node* unique_name, Label* if_found,
   1561                                              Variable* var_name_index,
   1562                                              Label* if_not_found,
   1563                                              int inlined_probes) {
   1564   DCHECK_EQ(MachineRepresentation::kWord32, var_name_index->rep());
   1565   Comment("NameDictionaryLookup");
   1566 
   1567   Node* capacity = SmiToWord32(LoadFixedArrayElement(
   1568       dictionary, Int32Constant(Dictionary::kCapacityIndex)));
   1569   Node* mask = Int32Sub(capacity, Int32Constant(1));
   1570   Node* hash = LoadNameHash(unique_name);
   1571 
   1572   // See Dictionary::FirstProbe().
   1573   Node* count = Int32Constant(0);
   1574   Node* entry = Word32And(hash, mask);
   1575 
   1576   for (int i = 0; i < inlined_probes; i++) {
   1577     Node* index = EntryToIndex<Dictionary>(entry);
   1578     var_name_index->Bind(index);
   1579 
   1580     Node* current = LoadFixedArrayElement(dictionary, index);
   1581     GotoIf(WordEqual(current, unique_name), if_found);
   1582 
   1583     // See Dictionary::NextProbe().
   1584     count = Int32Constant(i + 1);
   1585     entry = Word32And(Int32Add(entry, count), mask);
   1586   }
   1587 
   1588   Node* undefined = UndefinedConstant();
   1589 
   1590   Variable var_count(this, MachineRepresentation::kWord32);
   1591   Variable var_entry(this, MachineRepresentation::kWord32);
   1592   Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
   1593   Label loop(this, 3, loop_vars);
   1594   var_count.Bind(count);
   1595   var_entry.Bind(entry);
   1596   Goto(&loop);
   1597   Bind(&loop);
   1598   {
   1599     Node* count = var_count.value();
   1600     Node* entry = var_entry.value();
   1601 
   1602     Node* index = EntryToIndex<Dictionary>(entry);
   1603     var_name_index->Bind(index);
   1604 
   1605     Node* current = LoadFixedArrayElement(dictionary, index);
   1606     GotoIf(WordEqual(current, undefined), if_not_found);
   1607     GotoIf(WordEqual(current, unique_name), if_found);
   1608 
   1609     // See Dictionary::NextProbe().
   1610     count = Int32Add(count, Int32Constant(1));
   1611     entry = Word32And(Int32Add(entry, count), mask);
   1612 
   1613     var_count.Bind(count);
   1614     var_entry.Bind(entry);
   1615     Goto(&loop);
   1616   }
   1617 }
   1618 
   1619 // Instantiate template methods to workaround GCC compilation issue.
   1620 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
   1621     Node*, Node*, Label*, Variable*, Label*, int);
   1622 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
   1623     Node*, Node*, Label*, Variable*, Label*, int);
   1624 
   1625 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) {
   1626   // See v8::internal::ComputeIntegerHash()
   1627   Node* hash = key;
   1628   hash = Word32Xor(hash, seed);
   1629   hash = Int32Add(Word32Xor(hash, Int32Constant(0xffffffff)),
   1630                   Word32Shl(hash, Int32Constant(15)));
   1631   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
   1632   hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
   1633   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
   1634   hash = Int32Mul(hash, Int32Constant(2057));
   1635   hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
   1636   return Word32And(hash, Int32Constant(0x3fffffff));
   1637 }
   1638 
   1639 template <typename Dictionary>
   1640 void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary, Node* key,
   1641                                                Label* if_found,
   1642                                                Variable* var_entry,
   1643                                                Label* if_not_found) {
   1644   DCHECK_EQ(MachineRepresentation::kWord32, var_entry->rep());
   1645   Comment("NumberDictionaryLookup");
   1646 
   1647   Node* capacity = SmiToWord32(LoadFixedArrayElement(
   1648       dictionary, Int32Constant(Dictionary::kCapacityIndex)));
   1649   Node* mask = Int32Sub(capacity, Int32Constant(1));
   1650 
   1651   Node* seed;
   1652   if (Dictionary::ShapeT::UsesSeed) {
   1653     seed = HashSeed();
   1654   } else {
   1655     seed = Int32Constant(kZeroHashSeed);
   1656   }
   1657   Node* hash = ComputeIntegerHash(key, seed);
   1658   Node* key_as_float64 = ChangeUint32ToFloat64(key);
   1659 
   1660   // See Dictionary::FirstProbe().
   1661   Node* count = Int32Constant(0);
   1662   Node* entry = Word32And(hash, mask);
   1663 
   1664   Node* undefined = UndefinedConstant();
   1665   Node* the_hole = TheHoleConstant();
   1666 
   1667   Variable var_count(this, MachineRepresentation::kWord32);
   1668   Variable* loop_vars[] = {&var_count, var_entry};
   1669   Label loop(this, 2, loop_vars);
   1670   var_count.Bind(count);
   1671   var_entry->Bind(entry);
   1672   Goto(&loop);
   1673   Bind(&loop);
   1674   {
   1675     Node* count = var_count.value();
   1676     Node* entry = var_entry->value();
   1677 
   1678     Node* index = EntryToIndex<Dictionary>(entry);
   1679     Node* current = LoadFixedArrayElement(dictionary, index);
   1680     GotoIf(WordEqual(current, undefined), if_not_found);
   1681     Label next_probe(this);
   1682     {
   1683       Label if_currentissmi(this), if_currentisnotsmi(this);
   1684       Branch(WordIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
   1685       Bind(&if_currentissmi);
   1686       {
   1687         Node* current_value = SmiToWord32(current);
   1688         Branch(Word32Equal(current_value, key), if_found, &next_probe);
   1689       }
   1690       Bind(&if_currentisnotsmi);
   1691       {
   1692         GotoIf(WordEqual(current, the_hole), &next_probe);
   1693         // Current must be the Number.
   1694         Node* current_value = LoadHeapNumberValue(current);
   1695         Branch(Float64Equal(current_value, key_as_float64), if_found,
   1696                &next_probe);
   1697       }
   1698     }
   1699 
   1700     Bind(&next_probe);
   1701     // See Dictionary::NextProbe().
   1702     count = Int32Add(count, Int32Constant(1));
   1703     entry = Word32And(Int32Add(entry, count), mask);
   1704 
   1705     var_count.Bind(count);
   1706     var_entry->Bind(entry);
   1707     Goto(&loop);
   1708   }
   1709 }
   1710 
   1711 void CodeStubAssembler::TryLookupProperty(
   1712     Node* object, Node* map, Node* instance_type, Node* unique_name,
   1713     Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
   1714     Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found,
   1715     Label* if_bailout) {
   1716   DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep());
   1717   DCHECK_EQ(MachineRepresentation::kWord32, var_name_index->rep());
   1718 
   1719   Label if_objectisspecial(this);
   1720   STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
   1721   GotoIf(Int32LessThanOrEqual(instance_type,
   1722                               Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
   1723          &if_objectisspecial);
   1724 
   1725   Node* bit_field = LoadMapBitField(map);
   1726   Node* mask = Int32Constant(1 << Map::kHasNamedInterceptor |
   1727                              1 << Map::kIsAccessCheckNeeded);
   1728   Assert(Word32Equal(Word32And(bit_field, mask), Int32Constant(0)));
   1729 
   1730   Node* bit_field3 = LoadMapBitField3(map);
   1731   Node* bit = BitFieldDecode<Map::DictionaryMap>(bit_field3);
   1732   Label if_isfastmap(this), if_isslowmap(this);
   1733   Branch(Word32Equal(bit, Int32Constant(0)), &if_isfastmap, &if_isslowmap);
   1734   Bind(&if_isfastmap);
   1735   {
   1736     Comment("DescriptorArrayLookup");
   1737     Node* nof = BitFieldDecode<Map::NumberOfOwnDescriptorsBits>(bit_field3);
   1738     // Bail out to the runtime for large numbers of own descriptors. The stub
   1739     // only does linear search, which becomes too expensive in that case.
   1740     {
   1741       static const int32_t kMaxLinear = 210;
   1742       GotoIf(Int32GreaterThan(nof, Int32Constant(kMaxLinear)), if_bailout);
   1743     }
   1744     Node* descriptors = LoadMapDescriptors(map);
   1745     var_meta_storage->Bind(descriptors);
   1746 
   1747     Variable var_descriptor(this, MachineRepresentation::kWord32);
   1748     Label loop(this, &var_descriptor);
   1749     var_descriptor.Bind(Int32Constant(0));
   1750     Goto(&loop);
   1751     Bind(&loop);
   1752     {
   1753       Node* index = var_descriptor.value();
   1754       Node* name_offset = Int32Constant(DescriptorArray::ToKeyIndex(0));
   1755       Node* factor = Int32Constant(DescriptorArray::kDescriptorSize);
   1756       GotoIf(Word32Equal(index, nof), if_not_found);
   1757 
   1758       Node* name_index = Int32Add(name_offset, Int32Mul(index, factor));
   1759       Node* name = LoadFixedArrayElement(descriptors, name_index);
   1760 
   1761       var_name_index->Bind(name_index);
   1762       GotoIf(WordEqual(name, unique_name), if_found_fast);
   1763 
   1764       var_descriptor.Bind(Int32Add(index, Int32Constant(1)));
   1765       Goto(&loop);
   1766     }
   1767   }
   1768   Bind(&if_isslowmap);
   1769   {
   1770     Node* dictionary = LoadProperties(object);
   1771     var_meta_storage->Bind(dictionary);
   1772 
   1773     NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
   1774                                          var_name_index, if_not_found);
   1775   }
   1776   Bind(&if_objectisspecial);
   1777   {
   1778     // Handle global object here and other special objects in runtime.
   1779     GotoUnless(Word32Equal(instance_type, Int32Constant(JS_GLOBAL_OBJECT_TYPE)),
   1780                if_bailout);
   1781 
   1782     // Handle interceptors and access checks in runtime.
   1783     Node* bit_field = LoadMapBitField(map);
   1784     Node* mask = Int32Constant(1 << Map::kHasNamedInterceptor |
   1785                                1 << Map::kIsAccessCheckNeeded);
   1786     GotoIf(Word32NotEqual(Word32And(bit_field, mask), Int32Constant(0)),
   1787            if_bailout);
   1788 
   1789     Node* dictionary = LoadProperties(object);
   1790     var_meta_storage->Bind(dictionary);
   1791 
   1792     NameDictionaryLookup<GlobalDictionary>(
   1793         dictionary, unique_name, if_found_global, var_name_index, if_not_found);
   1794   }
   1795 }
   1796 
   1797 void CodeStubAssembler::TryHasOwnProperty(compiler::Node* object,
   1798                                           compiler::Node* map,
   1799                                           compiler::Node* instance_type,
   1800                                           compiler::Node* unique_name,
   1801                                           Label* if_found, Label* if_not_found,
   1802                                           Label* if_bailout) {
   1803   Comment("TryHasOwnProperty");
   1804   Variable var_meta_storage(this, MachineRepresentation::kTagged);
   1805   Variable var_name_index(this, MachineRepresentation::kWord32);
   1806 
   1807   Label if_found_global(this);
   1808   TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
   1809                     &if_found_global, &var_meta_storage, &var_name_index,
   1810                     if_not_found, if_bailout);
   1811   Bind(&if_found_global);
   1812   {
   1813     Variable var_value(this, MachineRepresentation::kTagged);
   1814     Variable var_details(this, MachineRepresentation::kWord32);
   1815     // Check if the property cell is not deleted.
   1816     LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
   1817                                      var_name_index.value(), &var_value,
   1818                                      &var_details, if_not_found);
   1819     Goto(if_found);
   1820   }
   1821 }
   1822 
   1823 void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
   1824                                                    Node* descriptors,
   1825                                                    Node* name_index,
   1826                                                    Variable* var_details,
   1827                                                    Variable* var_value) {
   1828   DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
   1829   DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
   1830   Comment("[ LoadPropertyFromFastObject");
   1831 
   1832   const int name_to_details_offset =
   1833       (DescriptorArray::kDescriptorDetails - DescriptorArray::kDescriptorKey) *
   1834       kPointerSize;
   1835   const int name_to_value_offset =
   1836       (DescriptorArray::kDescriptorValue - DescriptorArray::kDescriptorKey) *
   1837       kPointerSize;
   1838 
   1839   Node* details = SmiToWord32(
   1840       LoadFixedArrayElement(descriptors, name_index, name_to_details_offset));
   1841   var_details->Bind(details);
   1842 
   1843   Node* location = BitFieldDecode<PropertyDetails::LocationField>(details);
   1844 
   1845   Label if_in_field(this), if_in_descriptor(this), done(this);
   1846   Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
   1847          &if_in_descriptor);
   1848   Bind(&if_in_field);
   1849   {
   1850     Node* field_index =
   1851         BitFieldDecode<PropertyDetails::FieldIndexField>(details);
   1852     Node* representation =
   1853         BitFieldDecode<PropertyDetails::RepresentationField>(details);
   1854 
   1855     Node* inobject_properties = LoadMapInobjectProperties(map);
   1856 
   1857     Label if_inobject(this), if_backing_store(this);
   1858     Variable var_double_value(this, MachineRepresentation::kFloat64);
   1859     Label rebox_double(this, &var_double_value);
   1860     BranchIfInt32LessThan(field_index, inobject_properties, &if_inobject,
   1861                           &if_backing_store);
   1862     Bind(&if_inobject);
   1863     {
   1864       Comment("if_inobject");
   1865       Node* field_offset = ChangeInt32ToIntPtr(
   1866           Int32Mul(Int32Sub(LoadMapInstanceSize(map),
   1867                             Int32Sub(inobject_properties, field_index)),
   1868                    Int32Constant(kPointerSize)));
   1869 
   1870       Label if_double(this), if_tagged(this);
   1871       BranchIfWord32NotEqual(representation,
   1872                              Int32Constant(Representation::kDouble), &if_tagged,
   1873                              &if_double);
   1874       Bind(&if_tagged);
   1875       {
   1876         var_value->Bind(LoadObjectField(object, field_offset));
   1877         Goto(&done);
   1878       }
   1879       Bind(&if_double);
   1880       {
   1881         if (FLAG_unbox_double_fields) {
   1882           var_double_value.Bind(
   1883               LoadObjectField(object, field_offset, MachineType::Float64()));
   1884         } else {
   1885           Node* mutable_heap_number = LoadObjectField(object, field_offset);
   1886           var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
   1887         }
   1888         Goto(&rebox_double);
   1889       }
   1890     }
   1891     Bind(&if_backing_store);
   1892     {
   1893       Comment("if_backing_store");
   1894       Node* properties = LoadProperties(object);
   1895       field_index = Int32Sub(field_index, inobject_properties);
   1896       Node* value = LoadFixedArrayElement(properties, field_index);
   1897 
   1898       Label if_double(this), if_tagged(this);
   1899       BranchIfWord32NotEqual(representation,
   1900                              Int32Constant(Representation::kDouble), &if_tagged,
   1901                              &if_double);
   1902       Bind(&if_tagged);
   1903       {
   1904         var_value->Bind(value);
   1905         Goto(&done);
   1906       }
   1907       Bind(&if_double);
   1908       {
   1909         var_double_value.Bind(LoadHeapNumberValue(value));
   1910         Goto(&rebox_double);
   1911       }
   1912     }
   1913     Bind(&rebox_double);
   1914     {
   1915       Comment("rebox_double");
   1916       Node* heap_number = AllocateHeapNumber();
   1917       StoreHeapNumberValue(heap_number, var_double_value.value());
   1918       var_value->Bind(heap_number);
   1919       Goto(&done);
   1920     }
   1921   }
   1922   Bind(&if_in_descriptor);
   1923   {
   1924     Node* value =
   1925         LoadFixedArrayElement(descriptors, name_index, name_to_value_offset);
   1926     var_value->Bind(value);
   1927     Goto(&done);
   1928   }
   1929   Bind(&done);
   1930 
   1931   Comment("] LoadPropertyFromFastObject");
   1932 }
   1933 
   1934 void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
   1935                                                        Node* name_index,
   1936                                                        Variable* var_details,
   1937                                                        Variable* var_value) {
   1938   Comment("LoadPropertyFromNameDictionary");
   1939 
   1940   const int name_to_details_offset =
   1941       (NameDictionary::kEntryDetailsIndex - NameDictionary::kEntryKeyIndex) *
   1942       kPointerSize;
   1943   const int name_to_value_offset =
   1944       (NameDictionary::kEntryValueIndex - NameDictionary::kEntryKeyIndex) *
   1945       kPointerSize;
   1946 
   1947   Node* details = SmiToWord32(
   1948       LoadFixedArrayElement(dictionary, name_index, name_to_details_offset));
   1949 
   1950   var_details->Bind(details);
   1951   var_value->Bind(
   1952       LoadFixedArrayElement(dictionary, name_index, name_to_value_offset));
   1953 
   1954   Comment("] LoadPropertyFromNameDictionary");
   1955 }
   1956 
   1957 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
   1958                                                          Node* name_index,
   1959                                                          Variable* var_details,
   1960                                                          Variable* var_value,
   1961                                                          Label* if_deleted) {
   1962   Comment("[ LoadPropertyFromGlobalDictionary");
   1963 
   1964   const int name_to_value_offset =
   1965       (GlobalDictionary::kEntryValueIndex - GlobalDictionary::kEntryKeyIndex) *
   1966       kPointerSize;
   1967 
   1968   Node* property_cell =
   1969       LoadFixedArrayElement(dictionary, name_index, name_to_value_offset);
   1970 
   1971   Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
   1972   GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
   1973 
   1974   var_value->Bind(value);
   1975 
   1976   Node* details =
   1977       SmiToWord32(LoadObjectField(property_cell, PropertyCell::kDetailsOffset));
   1978   var_details->Bind(details);
   1979 
   1980   Comment("] LoadPropertyFromGlobalDictionary");
   1981 }
   1982 
   1983 void CodeStubAssembler::TryGetOwnProperty(
   1984     Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
   1985     Node* unique_name, Label* if_found_value, Variable* var_value,
   1986     Label* if_not_found, Label* if_bailout) {
   1987   DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
   1988   Comment("TryGetOwnProperty");
   1989 
   1990   Variable var_meta_storage(this, MachineRepresentation::kTagged);
   1991   Variable var_entry(this, MachineRepresentation::kWord32);
   1992 
   1993   Label if_found_fast(this), if_found_dict(this), if_found_global(this);
   1994 
   1995   Variable var_details(this, MachineRepresentation::kWord32);
   1996   Variable* vars[] = {var_value, &var_details};
   1997   Label if_found(this, 2, vars);
   1998 
   1999   TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
   2000                     &if_found_dict, &if_found_global, &var_meta_storage,
   2001                     &var_entry, if_not_found, if_bailout);
   2002   Bind(&if_found_fast);
   2003   {
   2004     Node* descriptors = var_meta_storage.value();
   2005     Node* name_index = var_entry.value();
   2006 
   2007     LoadPropertyFromFastObject(object, map, descriptors, name_index,
   2008                                &var_details, var_value);
   2009     Goto(&if_found);
   2010   }
   2011   Bind(&if_found_dict);
   2012   {
   2013     Node* dictionary = var_meta_storage.value();
   2014     Node* entry = var_entry.value();
   2015     LoadPropertyFromNameDictionary(dictionary, entry, &var_details, var_value);
   2016     Goto(&if_found);
   2017   }
   2018   Bind(&if_found_global);
   2019   {
   2020     Node* dictionary = var_meta_storage.value();
   2021     Node* entry = var_entry.value();
   2022 
   2023     LoadPropertyFromGlobalDictionary(dictionary, entry, &var_details, var_value,
   2024                                      if_not_found);
   2025     Goto(&if_found);
   2026   }
   2027   // Here we have details and value which could be an accessor.
   2028   Bind(&if_found);
   2029   {
   2030     Node* details = var_details.value();
   2031     Node* kind = BitFieldDecode<PropertyDetails::KindField>(details);
   2032 
   2033     Label if_accessor(this);
   2034     Branch(Word32Equal(kind, Int32Constant(kData)), if_found_value,
   2035            &if_accessor);
   2036     Bind(&if_accessor);
   2037     {
   2038       Node* accessor_pair = var_value->value();
   2039       GotoIf(Word32Equal(LoadInstanceType(accessor_pair),
   2040                          Int32Constant(ACCESSOR_INFO_TYPE)),
   2041              if_bailout);
   2042       AssertInstanceType(accessor_pair, ACCESSOR_PAIR_TYPE);
   2043       Node* getter =
   2044           LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
   2045       Node* getter_map = LoadMap(getter);
   2046       Node* instance_type = LoadMapInstanceType(getter_map);
   2047       // FunctionTemplateInfo getters are not supported yet.
   2048       GotoIf(Word32Equal(instance_type,
   2049                          Int32Constant(FUNCTION_TEMPLATE_INFO_TYPE)),
   2050              if_bailout);
   2051 
   2052       // Return undefined if the {getter} is not callable.
   2053       var_value->Bind(UndefinedConstant());
   2054       GotoIf(Word32Equal(Word32And(LoadMapBitField(getter_map),
   2055                                    Int32Constant(1 << Map::kIsCallable)),
   2056                          Int32Constant(0)),
   2057              if_found_value);
   2058 
   2059       // Call the accessor.
   2060       Callable callable = CodeFactory::Call(isolate());
   2061       Node* result = CallJS(callable, context, getter, receiver);
   2062       var_value->Bind(result);
   2063       Goto(if_found_value);
   2064     }
   2065   }
   2066 }
   2067 
   2068 void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
   2069                                          Node* instance_type, Node* index,
   2070                                          Label* if_found, Label* if_not_found,
   2071                                          Label* if_bailout) {
   2072   // Handle special objects in runtime.
   2073   GotoIf(Int32LessThanOrEqual(instance_type,
   2074                               Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
   2075          if_bailout);
   2076 
   2077   Node* bit_field2 = LoadMapBitField2(map);
   2078   Node* elements_kind = BitFieldDecode<Map::ElementsKindBits>(bit_field2);
   2079 
   2080   // TODO(verwaest): Support other elements kinds as well.
   2081   Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
   2082       if_isfaststringwrapper(this), if_isslowstringwrapper(this);
   2083   // clang-format off
   2084   int32_t values[] = {
   2085       // Handled by {if_isobjectorsmi}.
   2086       FAST_SMI_ELEMENTS, FAST_HOLEY_SMI_ELEMENTS, FAST_ELEMENTS,
   2087           FAST_HOLEY_ELEMENTS,
   2088       // Handled by {if_isdouble}.
   2089       FAST_DOUBLE_ELEMENTS, FAST_HOLEY_DOUBLE_ELEMENTS,
   2090       // Handled by {if_isdictionary}.
   2091       DICTIONARY_ELEMENTS,
   2092       // Handled by {if_isfaststringwrapper}.
   2093       FAST_STRING_WRAPPER_ELEMENTS,
   2094       // Handled by {if_isslowstringwrapper}.
   2095       SLOW_STRING_WRAPPER_ELEMENTS,
   2096       // Handled by {if_not_found}.
   2097       NO_ELEMENTS,
   2098   };
   2099   Label* labels[] = {
   2100       &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
   2101           &if_isobjectorsmi,
   2102       &if_isdouble, &if_isdouble,
   2103       &if_isdictionary,
   2104       &if_isfaststringwrapper,
   2105       &if_isslowstringwrapper,
   2106       if_not_found,
   2107   };
   2108   // clang-format on
   2109   STATIC_ASSERT(arraysize(values) == arraysize(labels));
   2110   Switch(elements_kind, if_bailout, values, labels, arraysize(values));
   2111 
   2112   Bind(&if_isobjectorsmi);
   2113   {
   2114     Node* elements = LoadElements(object);
   2115     Node* length = LoadFixedArrayBaseLength(elements);
   2116 
   2117     GotoIf(Int32GreaterThanOrEqual(index, SmiToWord32(length)), if_not_found);
   2118 
   2119     Node* element = LoadFixedArrayElement(elements, index);
   2120     Node* the_hole = TheHoleConstant();
   2121     Branch(WordEqual(element, the_hole), if_not_found, if_found);
   2122   }
   2123   Bind(&if_isdouble);
   2124   {
   2125     Node* elements = LoadElements(object);
   2126     Node* length = LoadFixedArrayBaseLength(elements);
   2127 
   2128     GotoIf(Int32GreaterThanOrEqual(index, SmiToWord32(length)), if_not_found);
   2129 
   2130     if (kPointerSize == kDoubleSize) {
   2131       Node* element =
   2132           LoadFixedDoubleArrayElement(elements, index, MachineType::Uint64());
   2133       Node* the_hole = Int64Constant(kHoleNanInt64);
   2134       Branch(Word64Equal(element, the_hole), if_not_found, if_found);
   2135     } else {
   2136       Node* element_upper =
   2137           LoadFixedDoubleArrayElement(elements, index, MachineType::Uint32(),
   2138                                       kIeeeDoubleExponentWordOffset);
   2139       Branch(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
   2140              if_not_found, if_found);
   2141     }
   2142   }
   2143   Bind(&if_isdictionary);
   2144   {
   2145     Variable var_entry(this, MachineRepresentation::kWord32);
   2146     Node* elements = LoadElements(object);
   2147     NumberDictionaryLookup<SeededNumberDictionary>(elements, index, if_found,
   2148                                                    &var_entry, if_not_found);
   2149   }
   2150   Bind(&if_isfaststringwrapper);
   2151   {
   2152     AssertInstanceType(object, JS_VALUE_TYPE);
   2153     Node* string = LoadJSValueValue(object);
   2154     Assert(Int32LessThan(LoadInstanceType(string),
   2155                          Int32Constant(FIRST_NONSTRING_TYPE)));
   2156     Node* length = LoadStringLength(string);
   2157     GotoIf(Int32LessThan(index, SmiToWord32(length)), if_found);
   2158     Goto(&if_isobjectorsmi);
   2159   }
   2160   Bind(&if_isslowstringwrapper);
   2161   {
   2162     AssertInstanceType(object, JS_VALUE_TYPE);
   2163     Node* string = LoadJSValueValue(object);
   2164     Assert(Int32LessThan(LoadInstanceType(string),
   2165                          Int32Constant(FIRST_NONSTRING_TYPE)));
   2166     Node* length = LoadStringLength(string);
   2167     GotoIf(Int32LessThan(index, SmiToWord32(length)), if_found);
   2168     Goto(&if_isdictionary);
   2169   }
   2170 }
   2171 
   2172 // Instantiate template methods to workaround GCC compilation issue.
   2173 template void CodeStubAssembler::NumberDictionaryLookup<SeededNumberDictionary>(
   2174     Node*, Node*, Label*, Variable*, Label*);
   2175 template void CodeStubAssembler::NumberDictionaryLookup<
   2176     UnseededNumberDictionary>(Node*, Node*, Label*, Variable*, Label*);
   2177 
   2178 Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
   2179                                              Node* object) {
   2180   Variable var_result(this, MachineRepresentation::kTagged);
   2181   Label return_false(this), return_true(this),
   2182       return_runtime(this, Label::kDeferred), return_result(this);
   2183 
   2184   // Goto runtime if {object} is a Smi.
   2185   GotoIf(WordIsSmi(object), &return_runtime);
   2186 
   2187   // Load map of {object}.
   2188   Node* object_map = LoadMap(object);
   2189 
   2190   // Lookup the {callable} and {object} map in the global instanceof cache.
   2191   // Note: This is safe because we clear the global instanceof cache whenever
   2192   // we change the prototype of any object.
   2193   Node* instanceof_cache_function =
   2194       LoadRoot(Heap::kInstanceofCacheFunctionRootIndex);
   2195   Node* instanceof_cache_map = LoadRoot(Heap::kInstanceofCacheMapRootIndex);
   2196   {
   2197     Label instanceof_cache_miss(this);
   2198     GotoUnless(WordEqual(instanceof_cache_function, callable),
   2199                &instanceof_cache_miss);
   2200     GotoUnless(WordEqual(instanceof_cache_map, object_map),
   2201                &instanceof_cache_miss);
   2202     var_result.Bind(LoadRoot(Heap::kInstanceofCacheAnswerRootIndex));
   2203     Goto(&return_result);
   2204     Bind(&instanceof_cache_miss);
   2205   }
   2206 
   2207   // Goto runtime if {callable} is a Smi.
   2208   GotoIf(WordIsSmi(callable), &return_runtime);
   2209 
   2210   // Load map of {callable}.
   2211   Node* callable_map = LoadMap(callable);
   2212 
   2213   // Goto runtime if {callable} is not a JSFunction.
   2214   Node* callable_instance_type = LoadMapInstanceType(callable_map);
   2215   GotoUnless(
   2216       Word32Equal(callable_instance_type, Int32Constant(JS_FUNCTION_TYPE)),
   2217       &return_runtime);
   2218 
   2219   // Goto runtime if {callable} is not a constructor or has
   2220   // a non-instance "prototype".
   2221   Node* callable_bitfield = LoadMapBitField(callable_map);
   2222   GotoUnless(
   2223       Word32Equal(Word32And(callable_bitfield,
   2224                             Int32Constant((1 << Map::kHasNonInstancePrototype) |
   2225                                           (1 << Map::kIsConstructor))),
   2226                   Int32Constant(1 << Map::kIsConstructor)),
   2227       &return_runtime);
   2228 
   2229   // Get the "prototype" (or initial map) of the {callable}.
   2230   Node* callable_prototype =
   2231       LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
   2232   {
   2233     Variable var_callable_prototype(this, MachineRepresentation::kTagged);
   2234     Label callable_prototype_valid(this);
   2235     var_callable_prototype.Bind(callable_prototype);
   2236 
   2237     // Resolve the "prototype" if the {callable} has an initial map.  Afterwards
   2238     // the {callable_prototype} will be either the JSReceiver prototype object
   2239     // or the hole value, which means that no instances of the {callable} were
   2240     // created so far and hence we should return false.
   2241     Node* callable_prototype_instance_type =
   2242         LoadInstanceType(callable_prototype);
   2243     GotoUnless(
   2244         Word32Equal(callable_prototype_instance_type, Int32Constant(MAP_TYPE)),
   2245         &callable_prototype_valid);
   2246     var_callable_prototype.Bind(
   2247         LoadObjectField(callable_prototype, Map::kPrototypeOffset));
   2248     Goto(&callable_prototype_valid);
   2249     Bind(&callable_prototype_valid);
   2250     callable_prototype = var_callable_prototype.value();
   2251   }
   2252 
   2253   // Update the global instanceof cache with the current {object} map and
   2254   // {callable}.  The cached answer will be set when it is known below.
   2255   StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, callable);
   2256   StoreRoot(Heap::kInstanceofCacheMapRootIndex, object_map);
   2257 
   2258   // Loop through the prototype chain looking for the {callable} prototype.
   2259   Variable var_object_map(this, MachineRepresentation::kTagged);
   2260   var_object_map.Bind(object_map);
   2261   Label loop(this, &var_object_map);
   2262   Goto(&loop);
   2263   Bind(&loop);
   2264   {
   2265     Node* object_map = var_object_map.value();
   2266 
   2267     // Check if the current {object} needs to be access checked.
   2268     Node* object_bitfield = LoadMapBitField(object_map);
   2269     GotoUnless(
   2270         Word32Equal(Word32And(object_bitfield,
   2271                               Int32Constant(1 << Map::kIsAccessCheckNeeded)),
   2272                     Int32Constant(0)),
   2273         &return_runtime);
   2274 
   2275     // Check if the current {object} is a proxy.
   2276     Node* object_instance_type = LoadMapInstanceType(object_map);
   2277     GotoIf(Word32Equal(object_instance_type, Int32Constant(JS_PROXY_TYPE)),
   2278            &return_runtime);
   2279 
   2280     // Check the current {object} prototype.
   2281     Node* object_prototype = LoadMapPrototype(object_map);
   2282     GotoIf(WordEqual(object_prototype, NullConstant()), &return_false);
   2283     GotoIf(WordEqual(object_prototype, callable_prototype), &return_true);
   2284 
   2285     // Continue with the prototype.
   2286     var_object_map.Bind(LoadMap(object_prototype));
   2287     Goto(&loop);
   2288   }
   2289 
   2290   Bind(&return_true);
   2291   StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(true));
   2292   var_result.Bind(BooleanConstant(true));
   2293   Goto(&return_result);
   2294 
   2295   Bind(&return_false);
   2296   StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(false));
   2297   var_result.Bind(BooleanConstant(false));
   2298   Goto(&return_result);
   2299 
   2300   Bind(&return_runtime);
   2301   {
   2302     // Invalidate the global instanceof cache.
   2303     StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, SmiConstant(0));
   2304     // Fallback to the runtime implementation.
   2305     var_result.Bind(
   2306         CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
   2307   }
   2308   Goto(&return_result);
   2309 
   2310   Bind(&return_result);
   2311   return var_result.value();
   2312 }
   2313 
   2314 compiler::Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
   2315                                                           ElementsKind kind,
   2316                                                           ParameterMode mode,
   2317                                                           int base_size) {
   2318   bool is_double = IsFastDoubleElementsKind(kind);
   2319   int element_size_shift = is_double ? kDoubleSizeLog2 : kPointerSizeLog2;
   2320   int element_size = 1 << element_size_shift;
   2321   int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
   2322   int32_t index = 0;
   2323   bool constant_index = false;
   2324   if (mode == SMI_PARAMETERS) {
   2325     element_size_shift -= kSmiShiftBits;
   2326     intptr_t temp = 0;
   2327     constant_index = ToIntPtrConstant(index_node, temp);
   2328     index = temp >> kSmiShiftBits;
   2329   } else {
   2330     constant_index = ToInt32Constant(index_node, index);
   2331   }
   2332   if (constant_index) {
   2333     return IntPtrConstant(base_size + element_size * index);
   2334   }
   2335   if (Is64() && mode == INTEGER_PARAMETERS) {
   2336     index_node = ChangeInt32ToInt64(index_node);
   2337   }
   2338   if (base_size == 0) {
   2339     return (element_size_shift >= 0)
   2340                ? WordShl(index_node, IntPtrConstant(element_size_shift))
   2341                : WordShr(index_node, IntPtrConstant(-element_size_shift));
   2342   }
   2343   return IntPtrAdd(
   2344       IntPtrConstant(base_size),
   2345       (element_size_shift >= 0)
   2346           ? WordShl(index_node, IntPtrConstant(element_size_shift))
   2347           : WordShr(index_node, IntPtrConstant(-element_size_shift)));
   2348 }
   2349 
   2350 compiler::Node* CodeStubAssembler::LoadTypeFeedbackVectorForStub() {
   2351   Node* function =
   2352       LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset);
   2353   Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset);
   2354   return LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset);
   2355 }
   2356 
   2357 compiler::Node* CodeStubAssembler::LoadReceiverMap(compiler::Node* receiver) {
   2358   Variable var_receiver_map(this, MachineRepresentation::kTagged);
   2359   // TODO(ishell): defer blocks when it works.
   2360   Label load_smi_map(this /*, Label::kDeferred*/), load_receiver_map(this),
   2361       if_result(this);
   2362 
   2363   Branch(WordIsSmi(receiver), &load_smi_map, &load_receiver_map);
   2364   Bind(&load_smi_map);
   2365   {
   2366     var_receiver_map.Bind(LoadRoot(Heap::kHeapNumberMapRootIndex));
   2367     Goto(&if_result);
   2368   }
   2369   Bind(&load_receiver_map);
   2370   {
   2371     var_receiver_map.Bind(LoadMap(receiver));
   2372     Goto(&if_result);
   2373   }
   2374   Bind(&if_result);
   2375   return var_receiver_map.value();
   2376 }
   2377 
   2378 compiler::Node* CodeStubAssembler::TryMonomorphicCase(
   2379     const LoadICParameters* p, compiler::Node* receiver_map, Label* if_handler,
   2380     Variable* var_handler, Label* if_miss) {
   2381   DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
   2382 
   2383   // TODO(ishell): add helper class that hides offset computations for a series
   2384   // of loads.
   2385   int32_t header_size = FixedArray::kHeaderSize - kHeapObjectTag;
   2386   Node* offset = ElementOffsetFromIndex(p->slot, FAST_HOLEY_ELEMENTS,
   2387                                         SMI_PARAMETERS, header_size);
   2388   Node* feedback = Load(MachineType::AnyTagged(), p->vector, offset);
   2389 
   2390   // Try to quickly handle the monomorphic case without knowing for sure
   2391   // if we have a weak cell in feedback. We do know it's safe to look
   2392   // at WeakCell::kValueOffset.
   2393   GotoUnless(WordEqual(receiver_map, LoadWeakCellValue(feedback)), if_miss);
   2394 
   2395   Node* handler = Load(MachineType::AnyTagged(), p->vector,
   2396                        IntPtrAdd(offset, IntPtrConstant(kPointerSize)));
   2397 
   2398   var_handler->Bind(handler);
   2399   Goto(if_handler);
   2400   return feedback;
   2401 }
   2402 
   2403 void CodeStubAssembler::HandlePolymorphicCase(
   2404     const LoadICParameters* p, compiler::Node* receiver_map,
   2405     compiler::Node* feedback, Label* if_handler, Variable* var_handler,
   2406     Label* if_miss, int unroll_count) {
   2407   DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
   2408 
   2409   // Iterate {feedback} array.
   2410   const int kEntrySize = 2;
   2411 
   2412   for (int i = 0; i < unroll_count; i++) {
   2413     Label next_entry(this);
   2414     Node* cached_map = LoadWeakCellValue(
   2415         LoadFixedArrayElement(feedback, Int32Constant(i * kEntrySize)));
   2416     GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry);
   2417 
   2418     // Found, now call handler.
   2419     Node* handler =
   2420         LoadFixedArrayElement(feedback, Int32Constant(i * kEntrySize + 1));
   2421     var_handler->Bind(handler);
   2422     Goto(if_handler);
   2423 
   2424     Bind(&next_entry);
   2425   }
   2426   Node* length = SmiToWord32(LoadFixedArrayBaseLength(feedback));
   2427 
   2428   // Loop from {unroll_count}*kEntrySize to {length}.
   2429   Variable var_index(this, MachineRepresentation::kWord32);
   2430   Label loop(this, &var_index);
   2431   var_index.Bind(Int32Constant(unroll_count * kEntrySize));
   2432   Goto(&loop);
   2433   Bind(&loop);
   2434   {
   2435     Node* index = var_index.value();
   2436     GotoIf(Int32GreaterThanOrEqual(index, length), if_miss);
   2437 
   2438     Node* cached_map =
   2439         LoadWeakCellValue(LoadFixedArrayElement(feedback, index));
   2440 
   2441     Label next_entry(this);
   2442     GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry);
   2443 
   2444     // Found, now call handler.
   2445     Node* handler = LoadFixedArrayElement(feedback, index, kPointerSize);
   2446     var_handler->Bind(handler);
   2447     Goto(if_handler);
   2448 
   2449     Bind(&next_entry);
   2450     var_index.Bind(Int32Add(index, Int32Constant(kEntrySize)));
   2451     Goto(&loop);
   2452   }
   2453 }
   2454 
   2455 compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name,
   2456                                                           Code::Flags flags,
   2457                                                           compiler::Node* map) {
   2458   // See v8::internal::StubCache::PrimaryOffset().
   2459   STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift);
   2460   // Compute the hash of the name (use entire hash field).
   2461   Node* hash_field = LoadNameHashField(name);
   2462   Assert(WordEqual(
   2463       Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)),
   2464       Int32Constant(0)));
   2465 
   2466   // Using only the low bits in 64-bit mode is unlikely to increase the
   2467   // risk of collision even if the heap is spread over an area larger than
   2468   // 4Gb (and not at all if it isn't).
   2469   Node* hash = Int32Add(hash_field, map);
   2470   // We always set the in_loop bit to zero when generating the lookup code
   2471   // so do it here too so the hash codes match.
   2472   uint32_t iflags =
   2473       (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
   2474   // Base the offset on a simple combination of name, flags, and map.
   2475   hash = Word32Xor(hash, Int32Constant(iflags));
   2476   uint32_t mask = (StubCache::kPrimaryTableSize - 1)
   2477                   << StubCache::kCacheIndexShift;
   2478   return Word32And(hash, Int32Constant(mask));
   2479 }
   2480 
   2481 compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset(
   2482     compiler::Node* name, Code::Flags flags, compiler::Node* seed) {
   2483   // See v8::internal::StubCache::SecondaryOffset().
   2484 
   2485   // Use the seed from the primary cache in the secondary cache.
   2486   Node* hash = Int32Sub(seed, name);
   2487   // We always set the in_loop bit to zero when generating the lookup code
   2488   // so do it here too so the hash codes match.
   2489   uint32_t iflags =
   2490       (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
   2491   hash = Int32Add(hash, Int32Constant(iflags));
   2492   int32_t mask = (StubCache::kSecondaryTableSize - 1)
   2493                  << StubCache::kCacheIndexShift;
   2494   return Word32And(hash, Int32Constant(mask));
   2495 }
   2496 
   2497 enum CodeStubAssembler::StubCacheTable : int {
   2498   kPrimary = static_cast<int>(StubCache::kPrimary),
   2499   kSecondary = static_cast<int>(StubCache::kSecondary)
   2500 };
   2501 
   2502 void CodeStubAssembler::TryProbeStubCacheTable(
   2503     StubCache* stub_cache, StubCacheTable table_id,
   2504     compiler::Node* entry_offset, compiler::Node* name, Code::Flags flags,
   2505     compiler::Node* map, Label* if_handler, Variable* var_handler,
   2506     Label* if_miss) {
   2507   StubCache::Table table = static_cast<StubCache::Table>(table_id);
   2508 #ifdef DEBUG
   2509   if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
   2510     Goto(if_miss);
   2511     return;
   2512   } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
   2513     Goto(if_miss);
   2514     return;
   2515   }
   2516 #endif
   2517   // The {table_offset} holds the entry offset times four (due to masking
   2518   // and shifting optimizations).
   2519   const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift;
   2520   entry_offset = Int32Mul(entry_offset, Int32Constant(kMultiplier));
   2521 
   2522   // Check that the key in the entry matches the name.
   2523   Node* key_base =
   2524       ExternalConstant(ExternalReference(stub_cache->key_reference(table)));
   2525   Node* entry_key = Load(MachineType::Pointer(), key_base, entry_offset);
   2526   GotoIf(WordNotEqual(name, entry_key), if_miss);
   2527 
   2528   // Get the map entry from the cache.
   2529   DCHECK_EQ(kPointerSize * 2, stub_cache->map_reference(table).address() -
   2530                                   stub_cache->key_reference(table).address());
   2531   Node* entry_map =
   2532       Load(MachineType::Pointer(), key_base,
   2533            Int32Add(entry_offset, Int32Constant(kPointerSize * 2)));
   2534   GotoIf(WordNotEqual(map, entry_map), if_miss);
   2535 
   2536   // Check that the flags match what we're looking for.
   2537   DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() -
   2538                               stub_cache->key_reference(table).address());
   2539   Node* code = Load(MachineType::Pointer(), key_base,
   2540                     Int32Add(entry_offset, Int32Constant(kPointerSize)));
   2541 
   2542   Node* code_flags =
   2543       LoadObjectField(code, Code::kFlagsOffset, MachineType::Uint32());
   2544   GotoIf(Word32NotEqual(Int32Constant(flags),
   2545                         Word32And(code_flags,
   2546                                   Int32Constant(~Code::kFlagsNotUsedInLookup))),
   2547          if_miss);
   2548 
   2549   // We found the handler.
   2550   var_handler->Bind(code);
   2551   Goto(if_handler);
   2552 }
   2553 
   2554 void CodeStubAssembler::TryProbeStubCache(
   2555     StubCache* stub_cache, Code::Flags flags, compiler::Node* receiver,
   2556     compiler::Node* name, Label* if_handler, Variable* var_handler,
   2557     Label* if_miss) {
   2558   Label try_secondary(this), miss(this);
   2559 
   2560   Counters* counters = isolate()->counters();
   2561   IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
   2562 
   2563   // Check that the {receiver} isn't a smi.
   2564   GotoIf(WordIsSmi(receiver), &miss);
   2565 
   2566   Node* receiver_map = LoadMap(receiver);
   2567 
   2568   // Probe the primary table.
   2569   Node* primary_offset = StubCachePrimaryOffset(name, flags, receiver_map);
   2570   TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name, flags,
   2571                          receiver_map, if_handler, var_handler, &try_secondary);
   2572 
   2573   Bind(&try_secondary);
   2574   {
   2575     // Probe the secondary table.
   2576     Node* secondary_offset =
   2577         StubCacheSecondaryOffset(name, flags, primary_offset);
   2578     TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name,
   2579                            flags, receiver_map, if_handler, var_handler, &miss);
   2580   }
   2581 
   2582   Bind(&miss);
   2583   {
   2584     IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
   2585     Goto(if_miss);
   2586   }
   2587 }
   2588 
   2589 void CodeStubAssembler::LoadIC(const LoadICParameters* p) {
   2590   Variable var_handler(this, MachineRepresentation::kTagged);
   2591   // TODO(ishell): defer blocks when it works.
   2592   Label if_handler(this, &var_handler), try_polymorphic(this),
   2593       try_megamorphic(this /*, Label::kDeferred*/),
   2594       miss(this /*, Label::kDeferred*/);
   2595 
   2596   Node* receiver_map = LoadReceiverMap(p->receiver);
   2597 
   2598   // Check monomorphic case.
   2599   Node* feedback = TryMonomorphicCase(p, receiver_map, &if_handler,
   2600                                       &var_handler, &try_polymorphic);
   2601   Bind(&if_handler);
   2602   {
   2603     LoadWithVectorDescriptor descriptor(isolate());
   2604     TailCallStub(descriptor, var_handler.value(), p->context, p->receiver,
   2605                  p->name, p->slot, p->vector);
   2606   }
   2607 
   2608   Bind(&try_polymorphic);
   2609   {
   2610     // Check polymorphic case.
   2611     GotoUnless(
   2612         WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
   2613         &try_megamorphic);
   2614     HandlePolymorphicCase(p, receiver_map, feedback, &if_handler, &var_handler,
   2615                           &miss, 2);
   2616   }
   2617 
   2618   Bind(&try_megamorphic);
   2619   {
   2620     // Check megamorphic case.
   2621     GotoUnless(
   2622         WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
   2623         &miss);
   2624 
   2625     Code::Flags code_flags =
   2626         Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
   2627 
   2628     TryProbeStubCache(isolate()->stub_cache(), code_flags, p->receiver, p->name,
   2629                       &if_handler, &var_handler, &miss);
   2630   }
   2631   Bind(&miss);
   2632   {
   2633     TailCallRuntime(Runtime::kLoadIC_Miss, p->context, p->receiver, p->name,
   2634                     p->slot, p->vector);
   2635   }
   2636 }
   2637 
   2638 void CodeStubAssembler::LoadGlobalIC(const LoadICParameters* p) {
   2639   Label try_handler(this), miss(this);
   2640   Node* weak_cell =
   2641       LoadFixedArrayElement(p->vector, p->slot, 0, SMI_PARAMETERS);
   2642   AssertInstanceType(weak_cell, WEAK_CELL_TYPE);
   2643 
   2644   // Load value or try handler case if the {weak_cell} is cleared.
   2645   Node* property_cell = LoadWeakCellValue(weak_cell, &try_handler);
   2646   AssertInstanceType(property_cell, PROPERTY_CELL_TYPE);
   2647 
   2648   Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
   2649   GotoIf(WordEqual(value, TheHoleConstant()), &miss);
   2650   Return(value);
   2651 
   2652   Bind(&try_handler);
   2653   {
   2654     Node* handler =
   2655         LoadFixedArrayElement(p->vector, p->slot, kPointerSize, SMI_PARAMETERS);
   2656     GotoIf(WordEqual(handler, LoadRoot(Heap::kuninitialized_symbolRootIndex)),
   2657            &miss);
   2658 
   2659     // In this case {handler} must be a Code object.
   2660     AssertInstanceType(handler, CODE_TYPE);
   2661     LoadWithVectorDescriptor descriptor(isolate());
   2662     Node* native_context = LoadNativeContext(p->context);
   2663     Node* receiver = LoadFixedArrayElement(
   2664         native_context, Int32Constant(Context::EXTENSION_INDEX));
   2665     Node* fake_name = IntPtrConstant(0);
   2666     TailCallStub(descriptor, handler, p->context, receiver, fake_name, p->slot,
   2667                  p->vector);
   2668   }
   2669   Bind(&miss);
   2670   {
   2671     TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot,
   2672                     p->vector);
   2673   }
   2674 }
   2675 
   2676 }  // namespace internal
   2677 }  // namespace v8
   2678