Home | History | Annotate | Download | only in builtins
      1 // Copyright 2017 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/api.h"
      6 #include "src/builtins/builtins-utils-gen.h"
      7 #include "src/builtins/builtins.h"
      8 #include "src/code-stub-assembler.h"
      9 #include "src/heap/heap-inl.h"
     10 #include "src/ic/accessor-assembler.h"
     11 #include "src/ic/keyed-store-generic.h"
     12 #include "src/macro-assembler.h"
     13 #include "src/objects/debug-objects.h"
     14 #include "src/objects/shared-function-info.h"
     15 #include "src/runtime/runtime.h"
     16 
     17 namespace v8 {
     18 namespace internal {
     19 
     20 template <typename T>
     21 using TNode = compiler::TNode<T>;
     22 
     23 // -----------------------------------------------------------------------------
     24 // Interrupt and stack checks.
     25 
     26 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
     27   masm->TailCallRuntime(Runtime::kInterrupt);
     28 }
     29 
     30 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
     31   masm->TailCallRuntime(Runtime::kStackGuard);
     32 }
     33 
     34 // -----------------------------------------------------------------------------
     35 // TurboFan support builtins.
     36 
     37 TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
     38   Node* object = Parameter(Descriptor::kObject);
     39 
     40   // Load the {object}s elements.
     41   Node* source = LoadObjectField(object, JSObject::kElementsOffset);
     42   Node* target = CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
     43   StoreObjectField(object, JSObject::kElementsOffset, target);
     44   Return(target);
     45 }
     46 
     47 TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
     48   Node* object = Parameter(Descriptor::kObject);
     49   Node* key = Parameter(Descriptor::kKey);
     50   Node* context = Parameter(Descriptor::kContext);
     51 
     52   Label runtime(this, Label::kDeferred);
     53   Node* elements = LoadElements(object);
     54   elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
     55                                      key, &runtime);
     56   Return(elements);
     57 
     58   BIND(&runtime);
     59   TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
     60 }
     61 
     62 TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
     63   Node* object = Parameter(Descriptor::kObject);
     64   Node* key = Parameter(Descriptor::kKey);
     65   Node* context = Parameter(Descriptor::kContext);
     66 
     67   Label runtime(this, Label::kDeferred);
     68   Node* elements = LoadElements(object);
     69   elements =
     70       TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
     71   Return(elements);
     72 
     73   BIND(&runtime);
     74   TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
     75 }
     76 
     77 TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
     78   Node* frame = Parameter(Descriptor::kFrame);
     79   TNode<IntPtrT> length = SmiToIntPtr(Parameter(Descriptor::kLength));
     80   TNode<IntPtrT> mapped_count =
     81       SmiToIntPtr(Parameter(Descriptor::kMappedCount));
     82 
     83   // Check if we can allocate in new space.
     84   ElementsKind kind = PACKED_ELEMENTS;
     85   int max_elements = FixedArray::GetMaxLengthForNewSpaceAllocation(kind);
     86   Label if_newspace(this), if_oldspace(this, Label::kDeferred);
     87   Branch(IntPtrLessThan(length, IntPtrConstant(max_elements)), &if_newspace,
     88          &if_oldspace);
     89 
     90   BIND(&if_newspace);
     91   {
     92     // Prefer EmptyFixedArray in case of non-positive {length} (the {length}
     93     // can be negative here for rest parameters).
     94     Label if_empty(this), if_notempty(this);
     95     Branch(IntPtrLessThanOrEqual(length, IntPtrConstant(0)), &if_empty,
     96            &if_notempty);
     97 
     98     BIND(&if_empty);
     99     Return(EmptyFixedArrayConstant());
    100 
    101     BIND(&if_notempty);
    102     {
    103       // Allocate a FixedArray in new space.
    104       TNode<FixedArray> result = CAST(AllocateFixedArray(kind, length));
    105 
    106       // The elements might be used to back mapped arguments. In that case fill
    107       // the mapped elements (i.e. the first {mapped_count}) with the hole, but
    108       // make sure not to overshoot the {length} if some arguments are missing.
    109       TNode<IntPtrT> number_of_holes = IntPtrMin(mapped_count, length);
    110       Node* the_hole = TheHoleConstant();
    111 
    112       // Fill the first elements up to {number_of_holes} with the hole.
    113       TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
    114       Label loop1(this, &var_index), done_loop1(this);
    115       Goto(&loop1);
    116       BIND(&loop1);
    117       {
    118         // Load the current {index}.
    119         TNode<IntPtrT> index = var_index.value();
    120 
    121         // Check if we are done.
    122         GotoIf(WordEqual(index, number_of_holes), &done_loop1);
    123 
    124         // Store the hole into the {result}.
    125         StoreFixedArrayElement(result, index, the_hole, SKIP_WRITE_BARRIER);
    126 
    127         // Continue with next {index}.
    128         var_index = IntPtrAdd(index, IntPtrConstant(1));
    129         Goto(&loop1);
    130       }
    131       BIND(&done_loop1);
    132 
    133       // Compute the effective {offset} into the {frame}.
    134       TNode<IntPtrT> offset = IntPtrAdd(length, IntPtrConstant(1));
    135 
    136       // Copy the parameters from {frame} (starting at {offset}) to {result}.
    137       Label loop2(this, &var_index), done_loop2(this);
    138       Goto(&loop2);
    139       BIND(&loop2);
    140       {
    141         // Load the current {index}.
    142         TNode<IntPtrT> index = var_index.value();
    143 
    144         // Check if we are done.
    145         GotoIf(WordEqual(index, length), &done_loop2);
    146 
    147         // Load the parameter at the given {index}.
    148         TNode<Object> value =
    149             CAST(Load(MachineType::AnyTagged(), frame,
    150                       TimesPointerSize(IntPtrSub(offset, index))));
    151 
    152         // Store the {value} into the {result}.
    153         StoreFixedArrayElement(result, index, value, SKIP_WRITE_BARRIER);
    154 
    155         // Continue with next {index}.
    156         var_index = IntPtrAdd(index, IntPtrConstant(1));
    157         Goto(&loop2);
    158       }
    159       BIND(&done_loop2);
    160 
    161       Return(result);
    162     }
    163   }
    164 
    165   BIND(&if_oldspace);
    166   {
    167     // Allocate in old space (or large object space).
    168     TailCallRuntime(Runtime::kNewArgumentsElements, NoContextConstant(),
    169                     BitcastWordToTagged(frame), SmiFromIntPtr(length),
    170                     SmiFromIntPtr(mapped_count));
    171   }
    172 }
    173 
    174 TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
    175   Return(Parameter(Descriptor::kReceiver));
    176 }
    177 
    178 TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
    179   Label tailcall_to_shared(this);
    180   TNode<Context> context = CAST(Parameter(Descriptor::kContext));
    181   TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
    182   TNode<Int32T> arg_count =
    183       UncheckedCast<Int32T>(Parameter(Descriptor::kJSActualArgumentsCount));
    184   TNode<JSFunction> function = CAST(Parameter(Descriptor::kJSTarget));
    185 
    186   // Check break-at-entry flag on the debug info.
    187   TNode<SharedFunctionInfo> shared =
    188       CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
    189   TNode<Object> maybe_heap_object_or_smi =
    190       LoadObjectField(shared, SharedFunctionInfo::kScriptOrDebugInfoOffset);
    191   TNode<HeapObject> maybe_debug_info =
    192       TaggedToHeapObject(maybe_heap_object_or_smi, &tailcall_to_shared);
    193   GotoIfNot(HasInstanceType(maybe_debug_info, InstanceType::DEBUG_INFO_TYPE),
    194             &tailcall_to_shared);
    195 
    196   {
    197     TNode<DebugInfo> debug_info = CAST(maybe_debug_info);
    198     TNode<Smi> flags =
    199         CAST(LoadObjectField(debug_info, DebugInfo::kFlagsOffset));
    200     GotoIfNot(SmiToInt32(SmiAnd(flags, SmiConstant(DebugInfo::kBreakAtEntry))),
    201               &tailcall_to_shared);
    202 
    203     CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
    204     Goto(&tailcall_to_shared);
    205   }
    206 
    207   BIND(&tailcall_to_shared);
    208   // Tail call into code object on the SharedFunctionInfo.
    209   TNode<Code> code = GetSharedFunctionInfoCode(shared);
    210   TailCallJSCode(code, context, function, new_target, arg_count);
    211 }
    212 
    213 class RecordWriteCodeStubAssembler : public CodeStubAssembler {
    214  public:
    215   explicit RecordWriteCodeStubAssembler(compiler::CodeAssemblerState* state)
    216       : CodeStubAssembler(state) {}
    217 
    218   Node* IsMarking() {
    219     Node* is_marking_addr = ExternalConstant(
    220         ExternalReference::heap_is_marking_flag_address(this->isolate()));
    221     return Load(MachineType::Uint8(), is_marking_addr);
    222   }
    223 
    224   Node* IsPageFlagSet(Node* object, int mask) {
    225     Node* page = WordAnd(object, IntPtrConstant(~kPageAlignmentMask));
    226     Node* flags = Load(MachineType::Pointer(), page,
    227                        IntPtrConstant(MemoryChunk::kFlagsOffset));
    228     return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
    229                         IntPtrConstant(0));
    230   }
    231 
    232   Node* IsWhite(Node* object) {
    233     DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
    234     Node* cell;
    235     Node* mask;
    236     GetMarkBit(object, &cell, &mask);
    237     mask = TruncateIntPtrToInt32(mask);
    238     // Non-white has 1 for the first bit, so we only need to check for the first
    239     // bit.
    240     return Word32Equal(Word32And(Load(MachineType::Int32(), cell), mask),
    241                        Int32Constant(0));
    242   }
    243 
    244   void GetMarkBit(Node* object, Node** cell, Node** mask) {
    245     Node* page = WordAnd(object, IntPtrConstant(~kPageAlignmentMask));
    246 
    247     {
    248       // Temp variable to calculate cell offset in bitmap.
    249       Node* r0;
    250       int shift = Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 -
    251                   Bitmap::kBytesPerCellLog2;
    252       r0 = WordShr(object, IntPtrConstant(shift));
    253       r0 = WordAnd(r0, IntPtrConstant((kPageAlignmentMask >> shift) &
    254                                       ~(Bitmap::kBytesPerCell - 1)));
    255       *cell = IntPtrAdd(IntPtrAdd(page, r0),
    256                         IntPtrConstant(MemoryChunk::kHeaderSize));
    257     }
    258     {
    259       // Temp variable to calculate bit offset in cell.
    260       Node* r1;
    261       r1 = WordShr(object, IntPtrConstant(kPointerSizeLog2));
    262       r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
    263       // It seems that LSB(e.g. cl) is automatically used, so no manual masking
    264       // is needed. Uncomment the following line otherwise.
    265       // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
    266       *mask = WordShl(IntPtrConstant(1), r1);
    267     }
    268   }
    269 
    270   Node* ShouldSkipFPRegs(Node* mode) {
    271     return WordEqual(mode, SmiConstant(kDontSaveFPRegs));
    272   }
    273 
    274   Node* ShouldEmitRememberSet(Node* remembered_set) {
    275     return WordEqual(remembered_set, SmiConstant(EMIT_REMEMBERED_SET));
    276   }
    277 
    278   void CallCFunction1WithCallerSavedRegistersMode(MachineType return_type,
    279                                                   MachineType arg0_type,
    280                                                   Node* function, Node* arg0,
    281                                                   Node* mode, Label* next) {
    282     Label dont_save_fp(this), save_fp(this);
    283     Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
    284     BIND(&dont_save_fp);
    285     {
    286       CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
    287                                              arg0, kDontSaveFPRegs);
    288       Goto(next);
    289     }
    290 
    291     BIND(&save_fp);
    292     {
    293       CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
    294                                              arg0, kSaveFPRegs);
    295       Goto(next);
    296     }
    297   }
    298 
    299   void CallCFunction3WithCallerSavedRegistersMode(
    300       MachineType return_type, MachineType arg0_type, MachineType arg1_type,
    301       MachineType arg2_type, Node* function, Node* arg0, Node* arg1, Node* arg2,
    302       Node* mode, Label* next) {
    303     Label dont_save_fp(this), save_fp(this);
    304     Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
    305     BIND(&dont_save_fp);
    306     {
    307       CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
    308                                              arg2_type, function, arg0, arg1,
    309                                              arg2, kDontSaveFPRegs);
    310       Goto(next);
    311     }
    312 
    313     BIND(&save_fp);
    314     {
    315       CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
    316                                              arg2_type, function, arg0, arg1,
    317                                              arg2, kSaveFPRegs);
    318       Goto(next);
    319     }
    320   }
    321 
    322   void InsertToStoreBufferAndGoto(Node* isolate, Node* slot, Node* mode,
    323                                   Label* next) {
    324     Node* store_buffer_top_addr =
    325         ExternalConstant(ExternalReference::store_buffer_top(this->isolate()));
    326     Node* store_buffer_top =
    327         Load(MachineType::Pointer(), store_buffer_top_addr);
    328     StoreNoWriteBarrier(MachineType::PointerRepresentation(), store_buffer_top,
    329                         slot);
    330     Node* new_store_buffer_top =
    331         IntPtrAdd(store_buffer_top, IntPtrConstant(kPointerSize));
    332     StoreNoWriteBarrier(MachineType::PointerRepresentation(),
    333                         store_buffer_top_addr, new_store_buffer_top);
    334 
    335     Node* test = WordAnd(new_store_buffer_top,
    336                          IntPtrConstant(Heap::store_buffer_mask_constant()));
    337 
    338     Label overflow(this);
    339     Branch(WordEqual(test, IntPtrConstant(0)), &overflow, next);
    340 
    341     BIND(&overflow);
    342     {
    343       Node* function =
    344           ExternalConstant(ExternalReference::store_buffer_overflow_function());
    345       CallCFunction1WithCallerSavedRegistersMode(MachineType::Int32(),
    346                                                  MachineType::Pointer(),
    347                                                  function, isolate, mode, next);
    348     }
    349   }
    350 };
    351 
    352 TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
    353   Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
    354   Node* slot = Parameter(Descriptor::kSlot);
    355   Node* isolate = Parameter(Descriptor::kIsolate);
    356   Node* remembered_set = Parameter(Descriptor::kRememberedSet);
    357   Node* fp_mode = Parameter(Descriptor::kFPMode);
    358 
    359   Node* value = Load(MachineType::Pointer(), slot);
    360 
    361   Label generational_wb(this);
    362   Label incremental_wb(this);
    363   Label exit(this);
    364 
    365   Branch(ShouldEmitRememberSet(remembered_set), &generational_wb,
    366          &incremental_wb);
    367 
    368   BIND(&generational_wb);
    369   {
    370     Label test_old_to_new_flags(this);
    371     Label store_buffer_exit(this), store_buffer_incremental_wb(this);
    372     // When incremental marking is not on, we skip cross generation pointer
    373     // checking here, because there are checks for
    374     // `kPointersFromHereAreInterestingMask` and
    375     // `kPointersToHereAreInterestingMask` in
    376     // `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
    377     // which serves as the cross generation checking.
    378     Branch(IsMarking(), &test_old_to_new_flags, &store_buffer_exit);
    379 
    380     BIND(&test_old_to_new_flags);
    381     {
    382       // TODO(albertnetymk): Try to cache the page flag for value and object,
    383       // instead of calling IsPageFlagSet each time.
    384       Node* value_in_new_space =
    385           IsPageFlagSet(value, MemoryChunk::kIsInNewSpaceMask);
    386       GotoIfNot(value_in_new_space, &incremental_wb);
    387 
    388       Node* object_in_new_space =
    389           IsPageFlagSet(object, MemoryChunk::kIsInNewSpaceMask);
    390       GotoIf(object_in_new_space, &incremental_wb);
    391 
    392       Goto(&store_buffer_incremental_wb);
    393     }
    394 
    395     BIND(&store_buffer_exit);
    396     { InsertToStoreBufferAndGoto(isolate, slot, fp_mode, &exit); }
    397 
    398     BIND(&store_buffer_incremental_wb);
    399     { InsertToStoreBufferAndGoto(isolate, slot, fp_mode, &incremental_wb); }
    400   }
    401 
    402   BIND(&incremental_wb);
    403   {
    404     Label call_incremental_wb(this);
    405 
    406     // There are two cases we need to call incremental write barrier.
    407     // 1) value_is_white
    408     GotoIf(IsWhite(value), &call_incremental_wb);
    409 
    410     // 2) is_compacting && value_in_EC && obj_isnt_skip
    411     // is_compacting = true when is_marking = true
    412     GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
    413               &exit);
    414     GotoIf(
    415         IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
    416         &exit);
    417 
    418     Goto(&call_incremental_wb);
    419 
    420     BIND(&call_incremental_wb);
    421     {
    422       Node* function = ExternalConstant(
    423           ExternalReference::incremental_marking_record_write_function());
    424       CallCFunction3WithCallerSavedRegistersMode(
    425           MachineType::Int32(), MachineType::Pointer(), MachineType::Pointer(),
    426           MachineType::Pointer(), function, object, slot, isolate, fp_mode,
    427           &exit);
    428     }
    429   }
    430 
    431   BIND(&exit);
    432   Return(TrueConstant());
    433 }
    434 
    435 class DeletePropertyBaseAssembler : public AccessorAssembler {
    436  public:
    437   explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
    438       : AccessorAssembler(state) {}
    439 
    440   void DeleteDictionaryProperty(TNode<Object> receiver,
    441                                 TNode<NameDictionary> properties,
    442                                 TNode<Name> name, TNode<Context> context,
    443                                 Label* dont_delete, Label* notfound) {
    444     TVARIABLE(IntPtrT, var_name_index);
    445     Label dictionary_found(this, &var_name_index);
    446     NameDictionaryLookup<NameDictionary>(properties, name, &dictionary_found,
    447                                          &var_name_index, notfound);
    448 
    449     BIND(&dictionary_found);
    450     TNode<IntPtrT> key_index = var_name_index.value();
    451     TNode<Uint32T> details =
    452         LoadDetailsByKeyIndex<NameDictionary>(properties, key_index);
    453     GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
    454            dont_delete);
    455     // Overwrite the entry itself (see NameDictionary::SetEntry).
    456     TNode<HeapObject> filler = TheHoleConstant();
    457     DCHECK(Heap::RootIsImmortalImmovable(Heap::kTheHoleValueRootIndex));
    458     StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
    459     StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
    460                                          SKIP_WRITE_BARRIER);
    461     StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
    462                                            SmiConstant(0));
    463 
    464     // Update bookkeeping information (see NameDictionary::ElementRemoved).
    465     TNode<Smi> nof = GetNumberOfElements<NameDictionary>(properties);
    466     TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
    467     SetNumberOfElements<NameDictionary>(properties, new_nof);
    468     TNode<Smi> num_deleted =
    469         GetNumberOfDeletedElements<NameDictionary>(properties);
    470     TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
    471     SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
    472 
    473     // Shrink the dictionary if necessary (see NameDictionary::Shrink).
    474     Label shrinking_done(this);
    475     TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
    476     GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
    477     GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
    478     CallRuntime(Runtime::kShrinkPropertyDictionary, context, receiver);
    479     Goto(&shrinking_done);
    480     BIND(&shrinking_done);
    481 
    482     Return(TrueConstant());
    483   }
    484 };
    485 
    486 TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
    487   TNode<Object> receiver = CAST(Parameter(Descriptor::kObject));
    488   TNode<Object> key = CAST(Parameter(Descriptor::kKey));
    489   TNode<Smi> language_mode = CAST(Parameter(Descriptor::kLanguageMode));
    490   TNode<Context> context = CAST(Parameter(Descriptor::kContext));
    491 
    492   VARIABLE(var_index, MachineType::PointerRepresentation());
    493   VARIABLE(var_unique, MachineRepresentation::kTagged, key);
    494   Label if_index(this), if_unique_name(this), if_notunique(this),
    495       if_notfound(this), slow(this);
    496 
    497   GotoIf(TaggedIsSmi(receiver), &slow);
    498   TNode<Map> receiver_map = LoadMap(CAST(receiver));
    499   TNode<Int32T> instance_type = LoadMapInstanceType(receiver_map);
    500   GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
    501   TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
    502             &if_notunique);
    503 
    504   BIND(&if_index);
    505   {
    506     Comment("integer index");
    507     Goto(&slow);  // TODO(jkummerow): Implement more smarts here.
    508   }
    509 
    510   BIND(&if_unique_name);
    511   {
    512     Comment("key is unique name");
    513     TNode<Name> unique = CAST(var_unique.value());
    514     CheckForAssociatedProtector(unique, &slow);
    515 
    516     Label dictionary(this), dont_delete(this);
    517     GotoIf(IsDictionaryMap(receiver_map), &dictionary);
    518 
    519     // Fast properties need to clear recorded slots, which can only be done
    520     // in C++.
    521     Goto(&slow);
    522 
    523     BIND(&dictionary);
    524     {
    525       InvalidateValidityCellIfPrototype(receiver_map);
    526 
    527       TNode<NameDictionary> properties =
    528           CAST(LoadSlowProperties(CAST(receiver)));
    529       DeleteDictionaryProperty(receiver, properties, unique, context,
    530                                &dont_delete, &if_notfound);
    531     }
    532 
    533     BIND(&dont_delete);
    534     {
    535       STATIC_ASSERT(LanguageModeSize == 2);
    536       GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
    537              &slow);
    538       Return(FalseConstant());
    539     }
    540   }
    541 
    542   BIND(&if_notunique);
    543   {
    544     // If the string was not found in the string table, then no object can
    545     // have a property with that name.
    546     TryInternalizeString(key, &if_index, &var_index, &if_unique_name,
    547                          &var_unique, &if_notfound, &slow);
    548   }
    549 
    550   BIND(&if_notfound);
    551   Return(TrueConstant());
    552 
    553   BIND(&slow);
    554   {
    555     TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
    556                     language_mode);
    557   }
    558 }
    559 
    560 TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
    561   Node* receiver = Parameter(Descriptor::kReceiver);
    562   Node* context = Parameter(Descriptor::kContext);
    563 
    564   Label if_empty(this), if_runtime(this, Label::kDeferred);
    565   Node* receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
    566   Return(receiver_map);
    567 
    568   BIND(&if_empty);
    569   Return(EmptyFixedArrayConstant());
    570 
    571   BIND(&if_runtime);
    572   TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
    573 }
    574 
    575 TF_BUILTIN(ForInFilter, CodeStubAssembler) {
    576   Node* key = Parameter(Descriptor::kKey);
    577   Node* object = Parameter(Descriptor::kObject);
    578   Node* context = Parameter(Descriptor::kContext);
    579 
    580   CSA_ASSERT(this, IsString(key));
    581 
    582   Label if_true(this), if_false(this);
    583   TNode<Oddball> result = HasProperty(context, object, key, kForInHasProperty);
    584   Branch(IsTrue(result), &if_true, &if_false);
    585 
    586   BIND(&if_true);
    587   Return(key);
    588 
    589   BIND(&if_false);
    590   Return(UndefinedConstant());
    591 }
    592 
    593 TF_BUILTIN(SameValue, CodeStubAssembler) {
    594   Node* lhs = Parameter(Descriptor::kLeft);
    595   Node* rhs = Parameter(Descriptor::kRight);
    596 
    597   Label if_true(this), if_false(this);
    598   BranchIfSameValue(lhs, rhs, &if_true, &if_false);
    599 
    600   BIND(&if_true);
    601   Return(TrueConstant());
    602 
    603   BIND(&if_false);
    604   Return(FalseConstant());
    605 }
    606 
    607 class InternalBuiltinsAssembler : public CodeStubAssembler {
    608  public:
    609   explicit InternalBuiltinsAssembler(compiler::CodeAssemblerState* state)
    610       : CodeStubAssembler(state) {}
    611 
    612   TNode<IntPtrT> GetPendingMicrotaskCount();
    613   void SetPendingMicrotaskCount(TNode<IntPtrT> count);
    614 
    615   TNode<FixedArray> GetMicrotaskQueue();
    616   void SetMicrotaskQueue(TNode<FixedArray> queue);
    617 
    618   TNode<Context> GetCurrentContext();
    619   void SetCurrentContext(TNode<Context> context);
    620 
    621   void EnterMicrotaskContext(TNode<Context> context);
    622   void LeaveMicrotaskContext();
    623 
    624   void RunPromiseHook(Runtime::FunctionId id, TNode<Context> context,
    625                       SloppyTNode<HeapObject> promise_or_capability);
    626 
    627   TNode<Object> GetPendingException() {
    628     auto ref = ExternalReference::Create(kPendingExceptionAddress, isolate());
    629     return TNode<Object>::UncheckedCast(
    630         Load(MachineType::AnyTagged(), ExternalConstant(ref)));
    631   }
    632   void ClearPendingException() {
    633     auto ref = ExternalReference::Create(kPendingExceptionAddress, isolate());
    634     StoreNoWriteBarrier(MachineRepresentation::kTagged, ExternalConstant(ref),
    635                         TheHoleConstant());
    636   }
    637 
    638   TNode<Object> GetScheduledException() {
    639     auto ref = ExternalReference::scheduled_exception_address(isolate());
    640     return TNode<Object>::UncheckedCast(
    641         Load(MachineType::AnyTagged(), ExternalConstant(ref)));
    642   }
    643   void ClearScheduledException() {
    644     auto ref = ExternalReference::scheduled_exception_address(isolate());
    645     StoreNoWriteBarrier(MachineRepresentation::kTagged, ExternalConstant(ref),
    646                         TheHoleConstant());
    647   }
    648 
    649   template <typename Descriptor>
    650   void GenerateAdaptorWithExitFrameType(
    651       Builtins::ExitFrameType exit_frame_type);
    652 };
    653 
    654 template <typename Descriptor>
    655 void InternalBuiltinsAssembler::GenerateAdaptorWithExitFrameType(
    656     Builtins::ExitFrameType exit_frame_type) {
    657   TNode<JSFunction> target = CAST(Parameter(Descriptor::kTarget));
    658   TNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget));
    659   TNode<WordT> c_function =
    660       UncheckedCast<WordT>(Parameter(Descriptor::kCFunction));
    661 
    662   // The logic contained here is mirrored for TurboFan inlining in
    663   // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
    664 
    665   // Make sure we operate in the context of the called function (for example
    666   // ConstructStubs implemented in C++ will be run in the context of the caller
    667   // instead of the callee, due to the way that [[Construct]] is defined for
    668   // ordinary functions).
    669   TNode<Context> context =
    670       CAST(LoadObjectField(target, JSFunction::kContextOffset));
    671 
    672   // Update arguments count for CEntry to contain the number of arguments
    673   // including the receiver and the extra arguments.
    674   TNode<Int32T> argc =
    675       UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount));
    676   argc = Int32Add(
    677       argc,
    678       Int32Constant(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
    679 
    680   TNode<Code> code = HeapConstant(
    681       CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
    682                           exit_frame_type == Builtins::BUILTIN_EXIT));
    683 
    684   // Unconditionally push argc, target and new target as extra stack arguments.
    685   // They will be used by stack frame iterators when constructing stack trace.
    686   TailCallStub(CEntry1ArgvOnStackDescriptor{},  // descriptor
    687                code, context,       // standard arguments for TailCallStub
    688                argc, c_function,    // register arguments
    689                TheHoleConstant(),   // additional stack argument 1 (padding)
    690                SmiFromInt32(argc),  // additional stack argument 2
    691                target,              // additional stack argument 3
    692                new_target);         // additional stack argument 4
    693 }
    694 
    695 TF_BUILTIN(AdaptorWithExitFrame, InternalBuiltinsAssembler) {
    696   GenerateAdaptorWithExitFrameType<Descriptor>(Builtins::EXIT);
    697 }
    698 
    699 TF_BUILTIN(AdaptorWithBuiltinExitFrame, InternalBuiltinsAssembler) {
    700   GenerateAdaptorWithExitFrameType<Descriptor>(Builtins::BUILTIN_EXIT);
    701 }
    702 
    703 TNode<IntPtrT> InternalBuiltinsAssembler::GetPendingMicrotaskCount() {
    704   auto ref = ExternalReference::pending_microtask_count_address(isolate());
    705   if (kIntSize == 8) {
    706     return TNode<IntPtrT>::UncheckedCast(
    707         Load(MachineType::Int64(), ExternalConstant(ref)));
    708   } else {
    709     Node* const value = Load(MachineType::Int32(), ExternalConstant(ref));
    710     return ChangeInt32ToIntPtr(value);
    711   }
    712 }
    713 
    714 void InternalBuiltinsAssembler::SetPendingMicrotaskCount(TNode<IntPtrT> count) {
    715   auto ref = ExternalReference::pending_microtask_count_address(isolate());
    716   auto rep = kIntSize == 8 ? MachineRepresentation::kWord64
    717                            : MachineRepresentation::kWord32;
    718   if (kIntSize == 4 && kPointerSize == 8) {
    719     Node* const truncated_count =
    720         TruncateInt64ToInt32(TNode<Int64T>::UncheckedCast(count));
    721     StoreNoWriteBarrier(rep, ExternalConstant(ref), truncated_count);
    722   } else {
    723     StoreNoWriteBarrier(rep, ExternalConstant(ref), count);
    724   }
    725 }
    726 
    727 TNode<FixedArray> InternalBuiltinsAssembler::GetMicrotaskQueue() {
    728   return TNode<FixedArray>::UncheckedCast(
    729       LoadRoot(Heap::kMicrotaskQueueRootIndex));
    730 }
    731 
    732 void InternalBuiltinsAssembler::SetMicrotaskQueue(TNode<FixedArray> queue) {
    733   StoreRoot(Heap::kMicrotaskQueueRootIndex, queue);
    734 }
    735 
    736 TNode<Context> InternalBuiltinsAssembler::GetCurrentContext() {
    737   auto ref = ExternalReference::Create(kContextAddress, isolate());
    738   return TNode<Context>::UncheckedCast(
    739       Load(MachineType::AnyTagged(), ExternalConstant(ref)));
    740 }
    741 
    742 void InternalBuiltinsAssembler::SetCurrentContext(TNode<Context> context) {
    743   auto ref = ExternalReference::Create(kContextAddress, isolate());
    744   StoreNoWriteBarrier(MachineRepresentation::kTagged, ExternalConstant(ref),
    745                       context);
    746 }
    747 
    748 void InternalBuiltinsAssembler::EnterMicrotaskContext(
    749     TNode<Context> microtask_context) {
    750   auto ref = ExternalReference::handle_scope_implementer_address(isolate());
    751   Node* const hsi = Load(MachineType::Pointer(), ExternalConstant(ref));
    752   StoreNoWriteBarrier(
    753       MachineType::PointerRepresentation(), hsi,
    754       IntPtrConstant(HandleScopeImplementerOffsets::kMicrotaskContext),
    755       BitcastTaggedToWord(microtask_context));
    756 
    757   // Load mirrored std::vector length from
    758   // HandleScopeImplementer::entered_contexts_count_
    759   auto type = kSizetSize == 8 ? MachineType::Uint64() : MachineType::Uint32();
    760   Node* entered_contexts_length = Load(
    761       type, hsi,
    762       IntPtrConstant(HandleScopeImplementerOffsets::kEnteredContextsCount));
    763 
    764   auto rep = kSizetSize == 8 ? MachineRepresentation::kWord64
    765                              : MachineRepresentation::kWord32;
    766 
    767   StoreNoWriteBarrier(
    768       rep, hsi,
    769       IntPtrConstant(
    770           HandleScopeImplementerOffsets::kEnteredContextCountDuringMicrotasks),
    771       entered_contexts_length);
    772 }
    773 
    774 void InternalBuiltinsAssembler::LeaveMicrotaskContext() {
    775   auto ref = ExternalReference::handle_scope_implementer_address(isolate());
    776 
    777   Node* const hsi = Load(MachineType::Pointer(), ExternalConstant(ref));
    778   StoreNoWriteBarrier(
    779       MachineType::PointerRepresentation(), hsi,
    780       IntPtrConstant(HandleScopeImplementerOffsets::kMicrotaskContext),
    781       IntPtrConstant(0));
    782   if (kSizetSize == 4) {
    783     StoreNoWriteBarrier(
    784         MachineRepresentation::kWord32, hsi,
    785         IntPtrConstant(HandleScopeImplementerOffsets::
    786                            kEnteredContextCountDuringMicrotasks),
    787         Int32Constant(0));
    788   } else {
    789     StoreNoWriteBarrier(
    790         MachineRepresentation::kWord64, hsi,
    791         IntPtrConstant(HandleScopeImplementerOffsets::
    792                            kEnteredContextCountDuringMicrotasks),
    793         Int64Constant(0));
    794   }
    795 }
    796 
    797 void InternalBuiltinsAssembler::RunPromiseHook(
    798     Runtime::FunctionId id, TNode<Context> context,
    799     SloppyTNode<HeapObject> promise_or_capability) {
    800   Label hook(this, Label::kDeferred), done_hook(this);
    801   GotoIf(IsDebugActive(), &hook);
    802   Branch(IsPromiseHookEnabledOrHasAsyncEventDelegate(), &hook, &done_hook);
    803   BIND(&hook);
    804   {
    805     // Get to the underlying JSPromise instance.
    806     Node* const promise = Select<HeapObject>(
    807         IsJSPromise(promise_or_capability),
    808         [=] { return promise_or_capability; },
    809         [=] {
    810           return CAST(LoadObjectField(promise_or_capability,
    811                                       PromiseCapability::kPromiseOffset));
    812         });
    813     CallRuntime(id, context, promise);
    814     Goto(&done_hook);
    815   }
    816   BIND(&done_hook);
    817 }
    818 
    819 TF_BUILTIN(EnqueueMicrotask, InternalBuiltinsAssembler) {
    820   Node* microtask = Parameter(Descriptor::kMicrotask);
    821 
    822   TNode<IntPtrT> num_tasks = GetPendingMicrotaskCount();
    823   TNode<IntPtrT> new_num_tasks = IntPtrAdd(num_tasks, IntPtrConstant(1));
    824   TNode<FixedArray> queue = GetMicrotaskQueue();
    825   TNode<IntPtrT> queue_length = LoadAndUntagFixedArrayBaseLength(queue);
    826 
    827   Label if_append(this), if_grow(this), done(this);
    828   Branch(WordEqual(num_tasks, queue_length), &if_grow, &if_append);
    829 
    830   BIND(&if_grow);
    831   {
    832     // Determine the new queue length and check if we need to allocate
    833     // in large object space (instead of just going to new space, where
    834     // we also know that we don't need any write barriers for setting
    835     // up the new queue object).
    836     Label if_newspace(this), if_lospace(this, Label::kDeferred);
    837     TNode<IntPtrT> new_queue_length =
    838         IntPtrMax(IntPtrConstant(8), IntPtrAdd(num_tasks, num_tasks));
    839     Branch(IntPtrLessThanOrEqual(new_queue_length,
    840                                  IntPtrConstant(FixedArray::kMaxRegularLength)),
    841            &if_newspace, &if_lospace);
    842 
    843     BIND(&if_newspace);
    844     {
    845       // This is the likely case where the new queue fits into new space,
    846       // and thus we don't need any write barriers for initializing it.
    847       TNode<FixedArray> new_queue =
    848           CAST(AllocateFixedArray(PACKED_ELEMENTS, new_queue_length));
    849       CopyFixedArrayElements(PACKED_ELEMENTS, queue, new_queue, num_tasks,
    850                              SKIP_WRITE_BARRIER);
    851       StoreFixedArrayElement(new_queue, num_tasks, microtask,
    852                              SKIP_WRITE_BARRIER);
    853       FillFixedArrayWithValue(PACKED_ELEMENTS, new_queue, new_num_tasks,
    854                               new_queue_length, Heap::kUndefinedValueRootIndex);
    855       SetMicrotaskQueue(new_queue);
    856       Goto(&done);
    857     }
    858 
    859     BIND(&if_lospace);
    860     {
    861       // The fallback case where the new queue ends up in large object space.
    862       TNode<FixedArray> new_queue = CAST(AllocateFixedArray(
    863           PACKED_ELEMENTS, new_queue_length, INTPTR_PARAMETERS,
    864           AllocationFlag::kAllowLargeObjectAllocation));
    865       CopyFixedArrayElements(PACKED_ELEMENTS, queue, new_queue, num_tasks);
    866       StoreFixedArrayElement(new_queue, num_tasks, microtask);
    867       FillFixedArrayWithValue(PACKED_ELEMENTS, new_queue, new_num_tasks,
    868                               new_queue_length, Heap::kUndefinedValueRootIndex);
    869       SetMicrotaskQueue(new_queue);
    870       Goto(&done);
    871     }
    872   }
    873 
    874   BIND(&if_append);
    875   {
    876     StoreFixedArrayElement(queue, num_tasks, microtask);
    877     Goto(&done);
    878   }
    879 
    880   BIND(&done);
    881   SetPendingMicrotaskCount(new_num_tasks);
    882   Return(UndefinedConstant());
    883 }
    884 
    885 TF_BUILTIN(RunMicrotasks, InternalBuiltinsAssembler) {
    886   // Load the current context from the isolate.
    887   TNode<Context> current_context = GetCurrentContext();
    888 
    889   Label init_queue_loop(this);
    890   Goto(&init_queue_loop);
    891   BIND(&init_queue_loop);
    892   {
    893     TVARIABLE(IntPtrT, index, IntPtrConstant(0));
    894     Label loop(this, &index), loop_next(this);
    895 
    896     TNode<IntPtrT> num_tasks = GetPendingMicrotaskCount();
    897     ReturnIf(IntPtrEqual(num_tasks, IntPtrConstant(0)), UndefinedConstant());
    898 
    899     TNode<FixedArray> queue = GetMicrotaskQueue();
    900 
    901     CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
    902                          LoadAndUntagFixedArrayBaseLength(queue), num_tasks));
    903     CSA_ASSERT(this, IntPtrGreaterThan(num_tasks, IntPtrConstant(0)));
    904 
    905     SetPendingMicrotaskCount(IntPtrConstant(0));
    906     SetMicrotaskQueue(EmptyFixedArrayConstant());
    907 
    908     Goto(&loop);
    909     BIND(&loop);
    910     {
    911       TNode<HeapObject> microtask =
    912           CAST(LoadFixedArrayElement(queue, index.value()));
    913       index = IntPtrAdd(index.value(), IntPtrConstant(1));
    914 
    915       CSA_ASSERT(this, TaggedIsNotSmi(microtask));
    916 
    917       TNode<Map> microtask_map = LoadMap(microtask);
    918       TNode<Int32T> microtask_type = LoadMapInstanceType(microtask_map);
    919 
    920       VARIABLE(var_exception, MachineRepresentation::kTagged,
    921                TheHoleConstant());
    922       Label if_exception(this, Label::kDeferred);
    923       Label is_callable(this), is_callback(this),
    924           is_promise_fulfill_reaction_job(this),
    925           is_promise_reject_reaction_job(this),
    926           is_promise_resolve_thenable_job(this),
    927           is_unreachable(this, Label::kDeferred);
    928 
    929       int32_t case_values[] = {CALLABLE_TASK_TYPE, CALLBACK_TASK_TYPE,
    930                                PROMISE_FULFILL_REACTION_JOB_TASK_TYPE,
    931                                PROMISE_REJECT_REACTION_JOB_TASK_TYPE,
    932                                PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE};
    933       Label* case_labels[] = {
    934           &is_callable, &is_callback, &is_promise_fulfill_reaction_job,
    935           &is_promise_reject_reaction_job, &is_promise_resolve_thenable_job};
    936       static_assert(arraysize(case_values) == arraysize(case_labels), "");
    937       Switch(microtask_type, &is_unreachable, case_values, case_labels,
    938              arraysize(case_labels));
    939 
    940       BIND(&is_callable);
    941       {
    942         // Enter the context of the {microtask}.
    943         TNode<Context> microtask_context =
    944             LoadObjectField<Context>(microtask, CallableTask::kContextOffset);
    945         TNode<Context> native_context = LoadNativeContext(microtask_context);
    946 
    947         CSA_ASSERT(this, IsNativeContext(native_context));
    948         EnterMicrotaskContext(microtask_context);
    949         SetCurrentContext(native_context);
    950 
    951         TNode<JSReceiver> callable = LoadObjectField<JSReceiver>(
    952             microtask, CallableTask::kCallableOffset);
    953         Node* const result = CallJS(
    954             CodeFactory::Call(isolate(), ConvertReceiverMode::kNullOrUndefined),
    955             microtask_context, callable, UndefinedConstant());
    956         GotoIfException(result, &if_exception, &var_exception);
    957         LeaveMicrotaskContext();
    958         SetCurrentContext(current_context);
    959         Goto(&loop_next);
    960       }
    961 
    962       BIND(&is_callback);
    963       {
    964         Node* const microtask_callback =
    965             LoadObjectField(microtask, CallbackTask::kCallbackOffset);
    966         Node* const microtask_data =
    967             LoadObjectField(microtask, CallbackTask::kDataOffset);
    968 
    969         // If this turns out to become a bottleneck because of the calls
    970         // to C++ via CEntry, we can choose to speed them up using a
    971         // similar mechanism that we use for the CallApiFunction stub,
    972         // except that calling the MicrotaskCallback is even easier, since
    973         // it doesn't accept any tagged parameters, doesn't return a value
    974         // and ignores exceptions.
    975         //
    976         // But from our current measurements it doesn't seem to be a
    977         // serious performance problem, even if the microtask is full
    978         // of CallHandlerTasks (which is not a realistic use case anyways).
    979         Node* const result =
    980             CallRuntime(Runtime::kRunMicrotaskCallback, current_context,
    981                         microtask_callback, microtask_data);
    982         GotoIfException(result, &if_exception, &var_exception);
    983         Goto(&loop_next);
    984       }
    985 
    986       BIND(&is_promise_resolve_thenable_job);
    987       {
    988         // Enter the context of the {microtask}.
    989         TNode<Context> microtask_context = LoadObjectField<Context>(
    990             microtask, PromiseResolveThenableJobTask::kContextOffset);
    991         TNode<Context> native_context = LoadNativeContext(microtask_context);
    992         CSA_ASSERT(this, IsNativeContext(native_context));
    993         EnterMicrotaskContext(microtask_context);
    994         SetCurrentContext(native_context);
    995 
    996         Node* const promise_to_resolve = LoadObjectField(
    997             microtask, PromiseResolveThenableJobTask::kPromiseToResolveOffset);
    998         Node* const then = LoadObjectField(
    999             microtask, PromiseResolveThenableJobTask::kThenOffset);
   1000         Node* const thenable = LoadObjectField(
   1001             microtask, PromiseResolveThenableJobTask::kThenableOffset);
   1002 
   1003         Node* const result =
   1004             CallBuiltin(Builtins::kPromiseResolveThenableJob, native_context,
   1005                         promise_to_resolve, thenable, then);
   1006         GotoIfException(result, &if_exception, &var_exception);
   1007         LeaveMicrotaskContext();
   1008         SetCurrentContext(current_context);
   1009         Goto(&loop_next);
   1010       }
   1011 
   1012       BIND(&is_promise_fulfill_reaction_job);
   1013       {
   1014         // Enter the context of the {microtask}.
   1015         TNode<Context> microtask_context = LoadObjectField<Context>(
   1016             microtask, PromiseReactionJobTask::kContextOffset);
   1017         TNode<Context> native_context = LoadNativeContext(microtask_context);
   1018         CSA_ASSERT(this, IsNativeContext(native_context));
   1019         EnterMicrotaskContext(microtask_context);
   1020         SetCurrentContext(native_context);
   1021 
   1022         Node* const argument =
   1023             LoadObjectField(microtask, PromiseReactionJobTask::kArgumentOffset);
   1024         Node* const handler =
   1025             LoadObjectField(microtask, PromiseReactionJobTask::kHandlerOffset);
   1026         Node* const promise_or_capability = LoadObjectField(
   1027             microtask, PromiseReactionJobTask::kPromiseOrCapabilityOffset);
   1028 
   1029         // Run the promise before/debug hook if enabled.
   1030         RunPromiseHook(Runtime::kPromiseHookBefore, microtask_context,
   1031                        promise_or_capability);
   1032 
   1033         Node* const result =
   1034             CallBuiltin(Builtins::kPromiseFulfillReactionJob, microtask_context,
   1035                         argument, handler, promise_or_capability);
   1036         GotoIfException(result, &if_exception, &var_exception);
   1037 
   1038         // Run the promise after/debug hook if enabled.
   1039         RunPromiseHook(Runtime::kPromiseHookAfter, microtask_context,
   1040                        promise_or_capability);
   1041 
   1042         LeaveMicrotaskContext();
   1043         SetCurrentContext(current_context);
   1044         Goto(&loop_next);
   1045       }
   1046 
   1047       BIND(&is_promise_reject_reaction_job);
   1048       {
   1049         // Enter the context of the {microtask}.
   1050         TNode<Context> microtask_context = LoadObjectField<Context>(
   1051             microtask, PromiseReactionJobTask::kContextOffset);
   1052         TNode<Context> native_context = LoadNativeContext(microtask_context);
   1053         CSA_ASSERT(this, IsNativeContext(native_context));
   1054         EnterMicrotaskContext(microtask_context);
   1055         SetCurrentContext(native_context);
   1056 
   1057         Node* const argument =
   1058             LoadObjectField(microtask, PromiseReactionJobTask::kArgumentOffset);
   1059         Node* const handler =
   1060             LoadObjectField(microtask, PromiseReactionJobTask::kHandlerOffset);
   1061         Node* const promise_or_capability = LoadObjectField(
   1062             microtask, PromiseReactionJobTask::kPromiseOrCapabilityOffset);
   1063 
   1064         // Run the promise before/debug hook if enabled.
   1065         RunPromiseHook(Runtime::kPromiseHookBefore, microtask_context,
   1066                        promise_or_capability);
   1067 
   1068         Node* const result =
   1069             CallBuiltin(Builtins::kPromiseRejectReactionJob, microtask_context,
   1070                         argument, handler, promise_or_capability);
   1071         GotoIfException(result, &if_exception, &var_exception);
   1072 
   1073         // Run the promise after/debug hook if enabled.
   1074         RunPromiseHook(Runtime::kPromiseHookAfter, microtask_context,
   1075                        promise_or_capability);
   1076 
   1077         LeaveMicrotaskContext();
   1078         SetCurrentContext(current_context);
   1079         Goto(&loop_next);
   1080       }
   1081 
   1082       BIND(&is_unreachable);
   1083       Unreachable();
   1084 
   1085       BIND(&if_exception);
   1086       {
   1087         // Report unhandled exceptions from microtasks.
   1088         CallRuntime(Runtime::kReportMessage, current_context,
   1089                     var_exception.value());
   1090         LeaveMicrotaskContext();
   1091         SetCurrentContext(current_context);
   1092         Goto(&loop_next);
   1093       }
   1094 
   1095       BIND(&loop_next);
   1096       Branch(IntPtrLessThan(index.value(), num_tasks), &loop, &init_queue_loop);
   1097     }
   1098   }
   1099 }
   1100 
   1101 TF_BUILTIN(AllocateInNewSpace, CodeStubAssembler) {
   1102   TNode<Int32T> requested_size =
   1103       UncheckedCast<Int32T>(Parameter(Descriptor::kRequestedSize));
   1104 
   1105   TailCallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
   1106                   SmiFromInt32(requested_size));
   1107 }
   1108 
   1109 TF_BUILTIN(AllocateInOldSpace, CodeStubAssembler) {
   1110   TNode<Int32T> requested_size =
   1111       UncheckedCast<Int32T>(Parameter(Descriptor::kRequestedSize));
   1112 
   1113   int flags = AllocateTargetSpace::encode(OLD_SPACE);
   1114   TailCallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
   1115                   SmiFromInt32(requested_size), SmiConstant(flags));
   1116 }
   1117 
   1118 TF_BUILTIN(Abort, CodeStubAssembler) {
   1119   TNode<Smi> message_id = CAST(Parameter(Descriptor::kMessageOrMessageId));
   1120   TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
   1121 }
   1122 
   1123 TF_BUILTIN(AbortJS, CodeStubAssembler) {
   1124   TNode<String> message = CAST(Parameter(Descriptor::kMessageOrMessageId));
   1125   TailCallRuntime(Runtime::kAbortJS, NoContextConstant(), message);
   1126 }
   1127 
   1128 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
   1129     MacroAssembler* masm) {
   1130   Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, false);
   1131 }
   1132 
   1133 void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
   1134     MacroAssembler* masm) {
   1135   Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, true);
   1136 }
   1137 
   1138 void Builtins::
   1139     Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
   1140         MacroAssembler* masm) {
   1141   Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvInRegister, false);
   1142 }
   1143 
   1144 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
   1145     MacroAssembler* masm) {
   1146   Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, false);
   1147 }
   1148 
   1149 void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(
   1150     MacroAssembler* masm) {
   1151   Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, true);
   1152 }
   1153 
   1154 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
   1155     MacroAssembler* masm) {
   1156   Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, false);
   1157 }
   1158 
   1159 void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
   1160     MacroAssembler* masm) {
   1161   Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, true);
   1162 }
   1163 
   1164 void Builtins::
   1165     Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
   1166         MacroAssembler* masm) {
   1167   Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvInRegister, false);
   1168 }
   1169 
   1170 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
   1171     MacroAssembler* masm) {
   1172   Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, false);
   1173 }
   1174 
   1175 void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
   1176     MacroAssembler* masm) {
   1177   Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, true);
   1178 }
   1179 
   1180 void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
   1181   // CallApiGetterStub only exists as a stub to avoid duplicating code between
   1182   // here and code-stubs-<arch>.cc. For example, see CallApiFunctionAndReturn.
   1183   // Here we abuse the instantiated stub to generate code.
   1184   CallApiGetterStub stub(masm->isolate());
   1185   stub.Generate(masm);
   1186 }
   1187 
   1188 void Builtins::Generate_CallApiCallback_Argc0(MacroAssembler* masm) {
   1189   // The common variants of CallApiCallbackStub (i.e. all that are embedded into
   1190   // the snapshot) are generated as builtins. The rest remain available as code
   1191   // stubs. Here we abuse the instantiated stub to generate code and avoid
   1192   // duplication.
   1193   const int kArgc = 0;
   1194   CallApiCallbackStub stub(masm->isolate(), kArgc);
   1195   stub.Generate(masm);
   1196 }
   1197 
   1198 void Builtins::Generate_CallApiCallback_Argc1(MacroAssembler* masm) {
   1199   // The common variants of CallApiCallbackStub (i.e. all that are embedded into
   1200   // the snapshot) are generated as builtins. The rest remain available as code
   1201   // stubs. Here we abuse the instantiated stub to generate code and avoid
   1202   // duplication.
   1203   const int kArgc = 1;
   1204   CallApiCallbackStub stub(masm->isolate(), kArgc);
   1205   stub.Generate(masm);
   1206 }
   1207 
   1208 // ES6 [[Get]] operation.
   1209 TF_BUILTIN(GetProperty, CodeStubAssembler) {
   1210   Label call_runtime(this, Label::kDeferred), return_undefined(this), end(this);
   1211 
   1212   Node* object = Parameter(Descriptor::kObject);
   1213   Node* key = Parameter(Descriptor::kKey);
   1214   Node* context = Parameter(Descriptor::kContext);
   1215   VARIABLE(var_result, MachineRepresentation::kTagged);
   1216 
   1217   CodeStubAssembler::LookupInHolder lookup_property_in_holder =
   1218       [=, &var_result, &end](Node* receiver, Node* holder, Node* holder_map,
   1219                              Node* holder_instance_type, Node* unique_name,
   1220                              Label* next_holder, Label* if_bailout) {
   1221         VARIABLE(var_value, MachineRepresentation::kTagged);
   1222         Label if_found(this);
   1223         TryGetOwnProperty(context, receiver, holder, holder_map,
   1224                           holder_instance_type, unique_name, &if_found,
   1225                           &var_value, next_holder, if_bailout);
   1226         BIND(&if_found);
   1227         {
   1228           var_result.Bind(var_value.value());
   1229           Goto(&end);
   1230         }
   1231       };
   1232 
   1233   CodeStubAssembler::LookupInHolder lookup_element_in_holder =
   1234       [=](Node* receiver, Node* holder, Node* holder_map,
   1235           Node* holder_instance_type, Node* index, Label* next_holder,
   1236           Label* if_bailout) {
   1237         // Not supported yet.
   1238         Use(next_holder);
   1239         Goto(if_bailout);
   1240       };
   1241 
   1242   TryPrototypeChainLookup(object, key, lookup_property_in_holder,
   1243                           lookup_element_in_holder, &return_undefined,
   1244                           &call_runtime);
   1245 
   1246   BIND(&return_undefined);
   1247   {
   1248     var_result.Bind(UndefinedConstant());
   1249     Goto(&end);
   1250   }
   1251 
   1252   BIND(&call_runtime);
   1253   {
   1254     var_result.Bind(CallRuntime(Runtime::kGetProperty, context, object, key));
   1255     Goto(&end);
   1256   }
   1257 
   1258   BIND(&end);
   1259   Return(var_result.value());
   1260 }
   1261 
   1262 // ES6 [[Set]] operation.
   1263 TF_BUILTIN(SetProperty, CodeStubAssembler) {
   1264   TNode<Context> context = CAST(Parameter(Descriptor::kContext));
   1265   TNode<Object> receiver = CAST(Parameter(Descriptor::kReceiver));
   1266   TNode<Object> key = CAST(Parameter(Descriptor::kKey));
   1267   TNode<Object> value = CAST(Parameter(Descriptor::kValue));
   1268 
   1269   KeyedStoreGenericGenerator::SetProperty(state(), context, receiver, key,
   1270                                           value, LanguageMode::kStrict);
   1271 }
   1272 
   1273 }  // namespace internal
   1274 }  // namespace v8
   1275