Home | History | Annotate | Download | only in builtins
      1 // Copyright 2016 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/builtins/builtins-constructor.h"
      6 #include "src/ast/ast.h"
      7 #include "src/builtins/builtins-utils.h"
      8 #include "src/builtins/builtins.h"
      9 #include "src/code-factory.h"
     10 #include "src/code-stub-assembler.h"
     11 #include "src/counters.h"
     12 #include "src/interface-descriptors.h"
     13 #include "src/objects-inl.h"
     14 
     15 namespace v8 {
     16 namespace internal {
     17 
     18 typedef compiler::Node Node;
     19 
     20 Node* ConstructorBuiltinsAssembler::EmitFastNewClosure(Node* shared_info,
     21                                                        Node* feedback_vector,
     22                                                        Node* slot,
     23                                                        Node* context) {
     24   typedef compiler::CodeAssembler::Label Label;
     25   typedef compiler::CodeAssembler::Variable Variable;
     26 
     27   Isolate* isolate = this->isolate();
     28   Factory* factory = isolate->factory();
     29   IncrementCounter(isolate->counters()->fast_new_closure_total(), 1);
     30 
     31   // Create a new closure from the given function info in new space
     32   Node* result = Allocate(JSFunction::kSize);
     33 
     34   // Calculate the index of the map we should install on the function based on
     35   // the FunctionKind and LanguageMode of the function.
     36   // Note: Must be kept in sync with Context::FunctionMapIndex
     37   Node* compiler_hints =
     38       LoadObjectField(shared_info, SharedFunctionInfo::kCompilerHintsOffset,
     39                       MachineType::Uint32());
     40   Node* is_strict = Word32And(
     41       compiler_hints, Int32Constant(1 << SharedFunctionInfo::kStrictModeBit));
     42 
     43   Label if_normal(this), if_generator(this), if_async(this),
     44       if_class_constructor(this), if_function_without_prototype(this),
     45       load_map(this);
     46   Variable map_index(this, MachineType::PointerRepresentation());
     47 
     48   STATIC_ASSERT(FunctionKind::kNormalFunction == 0);
     49   Node* is_not_normal =
     50       Word32And(compiler_hints,
     51                 Int32Constant(SharedFunctionInfo::kAllFunctionKindBitsMask));
     52   GotoIfNot(is_not_normal, &if_normal);
     53 
     54   Node* is_generator = Word32And(
     55       compiler_hints, Int32Constant(FunctionKind::kGeneratorFunction
     56                                     << SharedFunctionInfo::kFunctionKindShift));
     57   GotoIf(is_generator, &if_generator);
     58 
     59   Node* is_async = Word32And(
     60       compiler_hints, Int32Constant(FunctionKind::kAsyncFunction
     61                                     << SharedFunctionInfo::kFunctionKindShift));
     62   GotoIf(is_async, &if_async);
     63 
     64   Node* is_class_constructor = Word32And(
     65       compiler_hints, Int32Constant(FunctionKind::kClassConstructor
     66                                     << SharedFunctionInfo::kFunctionKindShift));
     67   GotoIf(is_class_constructor, &if_class_constructor);
     68 
     69   if (FLAG_debug_code) {
     70     // Function must be a function without a prototype.
     71     CSA_ASSERT(
     72         this,
     73         Word32And(compiler_hints,
     74                   Int32Constant((FunctionKind::kAccessorFunction |
     75                                  FunctionKind::kArrowFunction |
     76                                  FunctionKind::kConciseMethod)
     77                                 << SharedFunctionInfo::kFunctionKindShift)));
     78   }
     79   Goto(&if_function_without_prototype);
     80 
     81   Bind(&if_normal);
     82   {
     83     map_index.Bind(SelectIntPtrConstant(is_strict,
     84                                         Context::STRICT_FUNCTION_MAP_INDEX,
     85                                         Context::SLOPPY_FUNCTION_MAP_INDEX));
     86     Goto(&load_map);
     87   }
     88 
     89   Bind(&if_generator);
     90   {
     91     map_index.Bind(IntPtrConstant(Context::GENERATOR_FUNCTION_MAP_INDEX));
     92     Goto(&load_map);
     93   }
     94 
     95   Bind(&if_async);
     96   {
     97     map_index.Bind(IntPtrConstant(Context::ASYNC_FUNCTION_MAP_INDEX));
     98     Goto(&load_map);
     99   }
    100 
    101   Bind(&if_class_constructor);
    102   {
    103     map_index.Bind(IntPtrConstant(Context::CLASS_FUNCTION_MAP_INDEX));
    104     Goto(&load_map);
    105   }
    106 
    107   Bind(&if_function_without_prototype);
    108   {
    109     map_index.Bind(
    110         IntPtrConstant(Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX));
    111     Goto(&load_map);
    112   }
    113 
    114   Bind(&load_map);
    115 
    116   // Get the function map in the current native context and set that
    117   // as the map of the allocated object.
    118   Node* native_context = LoadNativeContext(context);
    119   Node* map_slot_value =
    120       LoadFixedArrayElement(native_context, map_index.value());
    121   StoreMapNoWriteBarrier(result, map_slot_value);
    122 
    123   // Initialize the rest of the function.
    124   Node* empty_fixed_array = HeapConstant(factory->empty_fixed_array());
    125   StoreObjectFieldNoWriteBarrier(result, JSObject::kPropertiesOffset,
    126                                  empty_fixed_array);
    127   StoreObjectFieldNoWriteBarrier(result, JSObject::kElementsOffset,
    128                                  empty_fixed_array);
    129   Node* literals_cell = LoadFixedArrayElement(
    130       feedback_vector, slot, 0, CodeStubAssembler::SMI_PARAMETERS);
    131   {
    132     // Bump the closure counter encoded in the cell's map.
    133     Node* cell_map = LoadMap(literals_cell);
    134     Label no_closures(this), one_closure(this), cell_done(this);
    135 
    136     GotoIf(IsNoClosuresCellMap(cell_map), &no_closures);
    137     GotoIf(IsOneClosureCellMap(cell_map), &one_closure);
    138     CSA_ASSERT(this, IsManyClosuresCellMap(cell_map));
    139     Goto(&cell_done);
    140 
    141     Bind(&no_closures);
    142     StoreMapNoWriteBarrier(literals_cell, Heap::kOneClosureCellMapRootIndex);
    143     Goto(&cell_done);
    144 
    145     Bind(&one_closure);
    146     StoreMapNoWriteBarrier(literals_cell, Heap::kManyClosuresCellMapRootIndex);
    147     Goto(&cell_done);
    148 
    149     Bind(&cell_done);
    150   }
    151   StoreObjectFieldNoWriteBarrier(result, JSFunction::kFeedbackVectorOffset,
    152                                  literals_cell);
    153   StoreObjectFieldNoWriteBarrier(
    154       result, JSFunction::kPrototypeOrInitialMapOffset, TheHoleConstant());
    155   StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
    156                                  shared_info);
    157   StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
    158   Handle<Code> lazy_builtin_handle(
    159       isolate->builtins()->builtin(Builtins::kCompileLazy));
    160   Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
    161   Node* lazy_builtin_entry =
    162       IntPtrAdd(BitcastTaggedToWord(lazy_builtin),
    163                 IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
    164   StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeEntryOffset,
    165                                  lazy_builtin_entry,
    166                                  MachineType::PointerRepresentation());
    167   StoreObjectFieldNoWriteBarrier(result, JSFunction::kNextFunctionLinkOffset,
    168                                  UndefinedConstant());
    169 
    170   return result;
    171 }
    172 
    173 TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
    174   Node* shared = Parameter(FastNewClosureDescriptor::kSharedFunctionInfo);
    175   Node* context = Parameter(FastNewClosureDescriptor::kContext);
    176   Node* vector = Parameter(FastNewClosureDescriptor::kVector);
    177   Node* slot = Parameter(FastNewClosureDescriptor::kSlot);
    178   Return(EmitFastNewClosure(shared, vector, slot, context));
    179 }
    180 
    181 TF_BUILTIN(FastNewObject, ConstructorBuiltinsAssembler) {
    182   typedef FastNewObjectDescriptor Descriptor;
    183   Node* context = Parameter(Descriptor::kContext);
    184   Node* target = Parameter(Descriptor::kTarget);
    185   Node* new_target = Parameter(Descriptor::kNewTarget);
    186 
    187   Label call_runtime(this);
    188 
    189   Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
    190   Return(result);
    191 
    192   Bind(&call_runtime);
    193   TailCallRuntime(Runtime::kNewObject, context, target, new_target);
    194 }
    195 
    196 Node* ConstructorBuiltinsAssembler::EmitFastNewObject(Node* context,
    197                                                       Node* target,
    198                                                       Node* new_target) {
    199   Variable var_obj(this, MachineRepresentation::kTagged);
    200   Label call_runtime(this), end(this);
    201 
    202   Node* result = EmitFastNewObject(context, target, new_target, &call_runtime);
    203   var_obj.Bind(result);
    204   Goto(&end);
    205 
    206   Bind(&call_runtime);
    207   var_obj.Bind(CallRuntime(Runtime::kNewObject, context, target, new_target));
    208   Goto(&end);
    209 
    210   Bind(&end);
    211   return var_obj.value();
    212 }
    213 
    214 Node* ConstructorBuiltinsAssembler::EmitFastNewObject(
    215     Node* context, Node* target, Node* new_target,
    216     CodeAssemblerLabel* call_runtime) {
    217   CSA_ASSERT(this, HasInstanceType(target, JS_FUNCTION_TYPE));
    218   CSA_ASSERT(this, IsJSReceiver(new_target));
    219 
    220   // Verify that the new target is a JSFunction.
    221   Label fast(this), end(this);
    222   GotoIf(HasInstanceType(new_target, JS_FUNCTION_TYPE), &fast);
    223   Goto(call_runtime);
    224 
    225   Bind(&fast);
    226 
    227   // Load the initial map and verify that it's in fact a map.
    228   Node* initial_map =
    229       LoadObjectField(new_target, JSFunction::kPrototypeOrInitialMapOffset);
    230   GotoIf(TaggedIsSmi(initial_map), call_runtime);
    231   GotoIf(DoesntHaveInstanceType(initial_map, MAP_TYPE), call_runtime);
    232 
    233   // Fall back to runtime if the target differs from the new target's
    234   // initial map constructor.
    235   Node* new_target_constructor =
    236       LoadObjectField(initial_map, Map::kConstructorOrBackPointerOffset);
    237   GotoIf(WordNotEqual(target, new_target_constructor), call_runtime);
    238 
    239   Node* instance_size_words = ChangeUint32ToWord(LoadObjectField(
    240       initial_map, Map::kInstanceSizeOffset, MachineType::Uint8()));
    241   Node* instance_size =
    242       WordShl(instance_size_words, IntPtrConstant(kPointerSizeLog2));
    243 
    244   Node* object = Allocate(instance_size);
    245   StoreMapNoWriteBarrier(object, initial_map);
    246   Node* empty_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
    247   StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset,
    248                                  empty_array);
    249   StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset,
    250                                  empty_array);
    251 
    252   instance_size_words = ChangeUint32ToWord(LoadObjectField(
    253       initial_map, Map::kInstanceSizeOffset, MachineType::Uint8()));
    254   instance_size =
    255       WordShl(instance_size_words, IntPtrConstant(kPointerSizeLog2));
    256 
    257   // Perform in-object slack tracking if requested.
    258   Node* bit_field3 = LoadMapBitField3(initial_map);
    259   Label slack_tracking(this), finalize(this, Label::kDeferred), done(this);
    260   GotoIf(IsSetWord32<Map::ConstructionCounter>(bit_field3), &slack_tracking);
    261 
    262   // Initialize remaining fields.
    263   {
    264     Comment("no slack tracking");
    265     InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
    266                              instance_size, Heap::kUndefinedValueRootIndex);
    267     Goto(&end);
    268   }
    269 
    270   {
    271     Bind(&slack_tracking);
    272 
    273     // Decrease generous allocation count.
    274     STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
    275     Comment("update allocation count");
    276     Node* new_bit_field3 = Int32Sub(
    277         bit_field3, Int32Constant(1 << Map::ConstructionCounter::kShift));
    278     StoreObjectFieldNoWriteBarrier(initial_map, Map::kBitField3Offset,
    279                                    new_bit_field3,
    280                                    MachineRepresentation::kWord32);
    281     GotoIf(IsClearWord32<Map::ConstructionCounter>(new_bit_field3), &finalize);
    282 
    283     Node* unused_fields = LoadObjectField(
    284         initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
    285     Node* used_size =
    286         IntPtrSub(instance_size, WordShl(ChangeUint32ToWord(unused_fields),
    287                                          IntPtrConstant(kPointerSizeLog2)));
    288 
    289     Comment("initialize filler fields (no finalize)");
    290     InitializeFieldsWithRoot(object, used_size, instance_size,
    291                              Heap::kOnePointerFillerMapRootIndex);
    292 
    293     Comment("initialize undefined fields (no finalize)");
    294     InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
    295                              used_size, Heap::kUndefinedValueRootIndex);
    296     Goto(&end);
    297   }
    298 
    299   {
    300     // Finalize the instance size.
    301     Bind(&finalize);
    302 
    303     Node* unused_fields = LoadObjectField(
    304         initial_map, Map::kUnusedPropertyFieldsOffset, MachineType::Uint8());
    305     Node* used_size =
    306         IntPtrSub(instance_size, WordShl(ChangeUint32ToWord(unused_fields),
    307                                          IntPtrConstant(kPointerSizeLog2)));
    308 
    309     Comment("initialize filler fields (finalize)");
    310     InitializeFieldsWithRoot(object, used_size, instance_size,
    311                              Heap::kOnePointerFillerMapRootIndex);
    312 
    313     Comment("initialize undefined fields (finalize)");
    314     InitializeFieldsWithRoot(object, IntPtrConstant(JSObject::kHeaderSize),
    315                              used_size, Heap::kUndefinedValueRootIndex);
    316 
    317     CallRuntime(Runtime::kFinalizeInstanceSize, context, initial_map);
    318     Goto(&end);
    319   }
    320 
    321   Bind(&end);
    322   return object;
    323 }
    324 
    325 Node* ConstructorBuiltinsAssembler::EmitFastNewFunctionContext(
    326     Node* function, Node* slots, Node* context, ScopeType scope_type) {
    327   slots = ChangeUint32ToWord(slots);
    328 
    329   // TODO(ishell): Use CSA::OptimalParameterMode() here.
    330   CodeStubAssembler::ParameterMode mode = CodeStubAssembler::INTPTR_PARAMETERS;
    331   Node* min_context_slots = IntPtrConstant(Context::MIN_CONTEXT_SLOTS);
    332   Node* length = IntPtrAdd(slots, min_context_slots);
    333   Node* size = GetFixedArrayAllocationSize(length, FAST_ELEMENTS, mode);
    334 
    335   // Create a new closure from the given function info in new space
    336   Node* function_context = Allocate(size);
    337 
    338   Heap::RootListIndex context_type;
    339   switch (scope_type) {
    340     case EVAL_SCOPE:
    341       context_type = Heap::kEvalContextMapRootIndex;
    342       break;
    343     case FUNCTION_SCOPE:
    344       context_type = Heap::kFunctionContextMapRootIndex;
    345       break;
    346     default:
    347       UNREACHABLE();
    348   }
    349   StoreMapNoWriteBarrier(function_context, context_type);
    350   StoreObjectFieldNoWriteBarrier(function_context, Context::kLengthOffset,
    351                                  SmiTag(length));
    352 
    353   // Set up the fixed slots.
    354   StoreFixedArrayElement(function_context, Context::CLOSURE_INDEX, function,
    355                          SKIP_WRITE_BARRIER);
    356   StoreFixedArrayElement(function_context, Context::PREVIOUS_INDEX, context,
    357                          SKIP_WRITE_BARRIER);
    358   StoreFixedArrayElement(function_context, Context::EXTENSION_INDEX,
    359                          TheHoleConstant(), SKIP_WRITE_BARRIER);
    360 
    361   // Copy the native context from the previous context.
    362   Node* native_context = LoadNativeContext(context);
    363   StoreFixedArrayElement(function_context, Context::NATIVE_CONTEXT_INDEX,
    364                          native_context, SKIP_WRITE_BARRIER);
    365 
    366   // Initialize the rest of the slots to undefined.
    367   Node* undefined = UndefinedConstant();
    368   BuildFastFixedArrayForEach(
    369       function_context, FAST_ELEMENTS, min_context_slots, length,
    370       [this, undefined](Node* context, Node* offset) {
    371         StoreNoWriteBarrier(MachineRepresentation::kTagged, context, offset,
    372                             undefined);
    373       },
    374       mode);
    375 
    376   return function_context;
    377 }
    378 
    379 // static
    380 int ConstructorBuiltinsAssembler::MaximumFunctionContextSlots() {
    381   return FLAG_test_small_max_function_context_stub_size ? kSmallMaximumSlots
    382                                                         : kMaximumSlots;
    383 }
    384 
    385 TF_BUILTIN(FastNewFunctionContextEval, ConstructorBuiltinsAssembler) {
    386   Node* function = Parameter(FastNewFunctionContextDescriptor::kFunction);
    387   Node* slots = Parameter(FastNewFunctionContextDescriptor::kSlots);
    388   Node* context = Parameter(FastNewFunctionContextDescriptor::kContext);
    389   Return(EmitFastNewFunctionContext(function, slots, context,
    390                                     ScopeType::EVAL_SCOPE));
    391 }
    392 
    393 TF_BUILTIN(FastNewFunctionContextFunction, ConstructorBuiltinsAssembler) {
    394   Node* function = Parameter(FastNewFunctionContextDescriptor::kFunction);
    395   Node* slots = Parameter(FastNewFunctionContextDescriptor::kSlots);
    396   Node* context = Parameter(FastNewFunctionContextDescriptor::kContext);
    397   Return(EmitFastNewFunctionContext(function, slots, context,
    398                                     ScopeType::FUNCTION_SCOPE));
    399 }
    400 
    401 Handle<Code> Builtins::NewFunctionContext(ScopeType scope_type) {
    402   switch (scope_type) {
    403     case ScopeType::EVAL_SCOPE:
    404       return FastNewFunctionContextEval();
    405     case ScopeType::FUNCTION_SCOPE:
    406       return FastNewFunctionContextFunction();
    407     default:
    408       UNREACHABLE();
    409   }
    410   return Handle<Code>::null();
    411 }
    412 
    413 Node* ConstructorBuiltinsAssembler::EmitFastCloneRegExp(Node* closure,
    414                                                         Node* literal_index,
    415                                                         Node* pattern,
    416                                                         Node* flags,
    417                                                         Node* context) {
    418   typedef CodeStubAssembler::Label Label;
    419   typedef CodeStubAssembler::Variable Variable;
    420   typedef compiler::Node Node;
    421 
    422   Label call_runtime(this, Label::kDeferred), end(this);
    423 
    424   Variable result(this, MachineRepresentation::kTagged);
    425 
    426   Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset);
    427   Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset);
    428   Node* boilerplate = LoadFixedArrayElement(feedback_vector, literal_index, 0,
    429                                             CodeStubAssembler::SMI_PARAMETERS);
    430   GotoIf(IsUndefined(boilerplate), &call_runtime);
    431 
    432   {
    433     int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
    434     Node* copy = Allocate(size);
    435     for (int offset = 0; offset < size; offset += kPointerSize) {
    436       Node* value = LoadObjectField(boilerplate, offset);
    437       StoreObjectFieldNoWriteBarrier(copy, offset, value);
    438     }
    439     result.Bind(copy);
    440     Goto(&end);
    441   }
    442 
    443   Bind(&call_runtime);
    444   {
    445     result.Bind(CallRuntime(Runtime::kCreateRegExpLiteral, context, closure,
    446                             literal_index, pattern, flags));
    447     Goto(&end);
    448   }
    449 
    450   Bind(&end);
    451   return result.value();
    452 }
    453 
    454 TF_BUILTIN(FastCloneRegExp, ConstructorBuiltinsAssembler) {
    455   Node* closure = Parameter(FastCloneRegExpDescriptor::kClosure);
    456   Node* literal_index = Parameter(FastCloneRegExpDescriptor::kLiteralIndex);
    457   Node* pattern = Parameter(FastCloneRegExpDescriptor::kPattern);
    458   Node* flags = Parameter(FastCloneRegExpDescriptor::kFlags);
    459   Node* context = Parameter(FastCloneRegExpDescriptor::kContext);
    460 
    461   Return(EmitFastCloneRegExp(closure, literal_index, pattern, flags, context));
    462 }
    463 
    464 Node* ConstructorBuiltinsAssembler::NonEmptyShallowClone(
    465     Node* boilerplate, Node* boilerplate_map, Node* boilerplate_elements,
    466     Node* allocation_site, Node* capacity, ElementsKind kind) {
    467   typedef CodeStubAssembler::ParameterMode ParameterMode;
    468 
    469   ParameterMode param_mode = OptimalParameterMode();
    470 
    471   Node* length = LoadJSArrayLength(boilerplate);
    472   capacity = TaggedToParameter(capacity, param_mode);
    473 
    474   Node *array, *elements;
    475   std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
    476       kind, boilerplate_map, length, allocation_site, capacity, param_mode);
    477 
    478   Comment("copy elements header");
    479   // Header consists of map and length.
    480   STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
    481   StoreMap(elements, LoadMap(boilerplate_elements));
    482   {
    483     int offset = FixedArrayBase::kLengthOffset;
    484     StoreObjectFieldNoWriteBarrier(
    485         elements, offset, LoadObjectField(boilerplate_elements, offset));
    486   }
    487 
    488   length = TaggedToParameter(length, param_mode);
    489 
    490   Comment("copy boilerplate elements");
    491   CopyFixedArrayElements(kind, boilerplate_elements, elements, length,
    492                          SKIP_WRITE_BARRIER, param_mode);
    493   IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
    494 
    495   return array;
    496 }
    497 
    498 Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowArray(
    499     Node* closure, Node* literal_index, Node* context,
    500     CodeAssemblerLabel* call_runtime, AllocationSiteMode allocation_site_mode) {
    501   typedef CodeStubAssembler::Label Label;
    502   typedef CodeStubAssembler::Variable Variable;
    503   typedef compiler::Node Node;
    504 
    505   Label zero_capacity(this), cow_elements(this), fast_elements(this),
    506       return_result(this);
    507   Variable result(this, MachineRepresentation::kTagged);
    508 
    509   Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset);
    510   Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset);
    511   Node* allocation_site = LoadFixedArrayElement(
    512       feedback_vector, literal_index, 0, CodeStubAssembler::SMI_PARAMETERS);
    513 
    514   GotoIf(IsUndefined(allocation_site), call_runtime);
    515   allocation_site = LoadFixedArrayElement(feedback_vector, literal_index, 0,
    516                                           CodeStubAssembler::SMI_PARAMETERS);
    517 
    518   Node* boilerplate =
    519       LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset);
    520   Node* boilerplate_map = LoadMap(boilerplate);
    521   Node* boilerplate_elements = LoadElements(boilerplate);
    522   Node* capacity = LoadFixedArrayBaseLength(boilerplate_elements);
    523   allocation_site =
    524       allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr;
    525 
    526   Node* zero = SmiConstant(Smi::kZero);
    527   GotoIf(SmiEqual(capacity, zero), &zero_capacity);
    528 
    529   Node* elements_map = LoadMap(boilerplate_elements);
    530   GotoIf(IsFixedCOWArrayMap(elements_map), &cow_elements);
    531 
    532   GotoIf(IsFixedArrayMap(elements_map), &fast_elements);
    533   {
    534     Comment("fast double elements path");
    535     if (FLAG_debug_code) {
    536       Label correct_elements_map(this), abort(this, Label::kDeferred);
    537       Branch(IsFixedDoubleArrayMap(elements_map), &correct_elements_map,
    538              &abort);
    539 
    540       Bind(&abort);
    541       {
    542         Node* abort_id = SmiConstant(
    543             Smi::FromInt(BailoutReason::kExpectedFixedDoubleArrayMap));
    544         CallRuntime(Runtime::kAbort, context, abort_id);
    545         result.Bind(UndefinedConstant());
    546         Goto(&return_result);
    547       }
    548       Bind(&correct_elements_map);
    549     }
    550 
    551     Node* array =
    552         NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements,
    553                              allocation_site, capacity, FAST_DOUBLE_ELEMENTS);
    554     result.Bind(array);
    555     Goto(&return_result);
    556   }
    557 
    558   Bind(&fast_elements);
    559   {
    560     Comment("fast elements path");
    561     Node* array =
    562         NonEmptyShallowClone(boilerplate, boilerplate_map, boilerplate_elements,
    563                              allocation_site, capacity, FAST_ELEMENTS);
    564     result.Bind(array);
    565     Goto(&return_result);
    566   }
    567 
    568   Variable length(this, MachineRepresentation::kTagged),
    569       elements(this, MachineRepresentation::kTagged);
    570   Label allocate_without_elements(this);
    571 
    572   Bind(&cow_elements);
    573   {
    574     Comment("fixed cow path");
    575     length.Bind(LoadJSArrayLength(boilerplate));
    576     elements.Bind(boilerplate_elements);
    577 
    578     Goto(&allocate_without_elements);
    579   }
    580 
    581   Bind(&zero_capacity);
    582   {
    583     Comment("zero capacity path");
    584     length.Bind(zero);
    585     elements.Bind(LoadRoot(Heap::kEmptyFixedArrayRootIndex));
    586 
    587     Goto(&allocate_without_elements);
    588   }
    589 
    590   Bind(&allocate_without_elements);
    591   {
    592     Node* array = AllocateUninitializedJSArrayWithoutElements(
    593         FAST_ELEMENTS, boilerplate_map, length.value(), allocation_site);
    594     StoreObjectField(array, JSObject::kElementsOffset, elements.value());
    595     result.Bind(array);
    596     Goto(&return_result);
    597   }
    598 
    599   Bind(&return_result);
    600   return result.value();
    601 }
    602 
    603 void ConstructorBuiltinsAssembler::CreateFastCloneShallowArrayBuiltin(
    604     AllocationSiteMode allocation_site_mode) {
    605   typedef compiler::Node Node;
    606   typedef CodeStubAssembler::Label Label;
    607 
    608   Node* closure = Parameter(FastCloneShallowArrayDescriptor::kClosure);
    609   Node* literal_index =
    610       Parameter(FastCloneShallowArrayDescriptor::kLiteralIndex);
    611   Node* constant_elements =
    612       Parameter(FastCloneShallowArrayDescriptor::kConstantElements);
    613   Node* context = Parameter(FastCloneShallowArrayDescriptor::kContext);
    614   Label call_runtime(this, Label::kDeferred);
    615   Return(EmitFastCloneShallowArray(closure, literal_index, context,
    616                                    &call_runtime, allocation_site_mode));
    617 
    618   Bind(&call_runtime);
    619   {
    620     Comment("call runtime");
    621     Node* flags =
    622         SmiConstant(Smi::FromInt(ArrayLiteral::kShallowElements |
    623                                  (allocation_site_mode == TRACK_ALLOCATION_SITE
    624                                       ? 0
    625                                       : ArrayLiteral::kDisableMementos)));
    626     Return(CallRuntime(Runtime::kCreateArrayLiteral, context, closure,
    627                        literal_index, constant_elements, flags));
    628   }
    629 }
    630 
    631 TF_BUILTIN(FastCloneShallowArrayTrack, ConstructorBuiltinsAssembler) {
    632   CreateFastCloneShallowArrayBuiltin(TRACK_ALLOCATION_SITE);
    633 }
    634 
    635 TF_BUILTIN(FastCloneShallowArrayDontTrack, ConstructorBuiltinsAssembler) {
    636   CreateFastCloneShallowArrayBuiltin(DONT_TRACK_ALLOCATION_SITE);
    637 }
    638 
    639 Handle<Code> Builtins::NewCloneShallowArray(
    640     AllocationSiteMode allocation_mode) {
    641   switch (allocation_mode) {
    642     case TRACK_ALLOCATION_SITE:
    643       return FastCloneShallowArrayTrack();
    644     case DONT_TRACK_ALLOCATION_SITE:
    645       return FastCloneShallowArrayDontTrack();
    646     default:
    647       UNREACHABLE();
    648   }
    649   return Handle<Code>::null();
    650 }
    651 
    652 // static
    653 int ConstructorBuiltinsAssembler::FastCloneShallowObjectPropertiesCount(
    654     int literal_length) {
    655   // This heuristic of setting empty literals to have
    656   // kInitialGlobalObjectUnusedPropertiesCount must remain in-sync with the
    657   // runtime.
    658   // TODO(verwaest): Unify this with the heuristic in the runtime.
    659   return literal_length == 0
    660              ? JSObject::kInitialGlobalObjectUnusedPropertiesCount
    661              : literal_length;
    662 }
    663 
    664 Node* ConstructorBuiltinsAssembler::EmitFastCloneShallowObject(
    665     CodeAssemblerLabel* call_runtime, Node* closure, Node* literals_index,
    666     Node* properties_count) {
    667   Node* cell = LoadObjectField(closure, JSFunction::kFeedbackVectorOffset);
    668   Node* feedback_vector = LoadObjectField(cell, Cell::kValueOffset);
    669   Node* allocation_site = LoadFixedArrayElement(
    670       feedback_vector, literals_index, 0, CodeStubAssembler::SMI_PARAMETERS);
    671   GotoIf(IsUndefined(allocation_site), call_runtime);
    672 
    673   // Calculate the object and allocation size based on the properties count.
    674   Node* object_size = IntPtrAdd(WordShl(properties_count, kPointerSizeLog2),
    675                                 IntPtrConstant(JSObject::kHeaderSize));
    676   Node* allocation_size = object_size;
    677   if (FLAG_allocation_site_pretenuring) {
    678     allocation_size =
    679         IntPtrAdd(object_size, IntPtrConstant(AllocationMemento::kSize));
    680   }
    681   Node* boilerplate =
    682       LoadObjectField(allocation_site, AllocationSite::kTransitionInfoOffset);
    683   Node* boilerplate_map = LoadMap(boilerplate);
    684   Node* instance_size = LoadMapInstanceSize(boilerplate_map);
    685   Node* size_in_words = WordShr(object_size, kPointerSizeLog2);
    686   GotoIfNot(WordEqual(instance_size, size_in_words), call_runtime);
    687 
    688   Node* copy = Allocate(allocation_size);
    689 
    690   // Copy boilerplate elements.
    691   Variable offset(this, MachineType::PointerRepresentation());
    692   offset.Bind(IntPtrConstant(-kHeapObjectTag));
    693   Node* end_offset = IntPtrAdd(object_size, offset.value());
    694   Label loop_body(this, &offset), loop_check(this, &offset);
    695   // We should always have an object size greater than zero.
    696   Goto(&loop_body);
    697   Bind(&loop_body);
    698   {
    699     // The Allocate above guarantees that the copy lies in new space. This
    700     // allows us to skip write barriers. This is necessary since we may also be
    701     // copying unboxed doubles.
    702     Node* field = Load(MachineType::IntPtr(), boilerplate, offset.value());
    703     StoreNoWriteBarrier(MachineType::PointerRepresentation(), copy,
    704                         offset.value(), field);
    705     Goto(&loop_check);
    706   }
    707   Bind(&loop_check);
    708   {
    709     offset.Bind(IntPtrAdd(offset.value(), IntPtrConstant(kPointerSize)));
    710     GotoIfNot(IntPtrGreaterThanOrEqual(offset.value(), end_offset), &loop_body);
    711   }
    712 
    713   if (FLAG_allocation_site_pretenuring) {
    714     Node* memento = InnerAllocate(copy, object_size);
    715     StoreMapNoWriteBarrier(memento, Heap::kAllocationMementoMapRootIndex);
    716     StoreObjectFieldNoWriteBarrier(
    717         memento, AllocationMemento::kAllocationSiteOffset, allocation_site);
    718     Node* memento_create_count = LoadObjectField(
    719         allocation_site, AllocationSite::kPretenureCreateCountOffset);
    720     memento_create_count =
    721         SmiAdd(memento_create_count, SmiConstant(Smi::FromInt(1)));
    722     StoreObjectFieldNoWriteBarrier(allocation_site,
    723                                    AllocationSite::kPretenureCreateCountOffset,
    724                                    memento_create_count);
    725   }
    726 
    727   // TODO(verwaest): Allocate and fill in double boxes.
    728   return copy;
    729 }
    730 
    731 void ConstructorBuiltinsAssembler::CreateFastCloneShallowObjectBuiltin(
    732     int properties_count) {
    733   DCHECK_GE(properties_count, 0);
    734   DCHECK_LE(properties_count, kMaximumClonedShallowObjectProperties);
    735   Label call_runtime(this);
    736   Node* closure = Parameter(0);
    737   Node* literals_index = Parameter(1);
    738 
    739   Node* properties_count_node =
    740       IntPtrConstant(FastCloneShallowObjectPropertiesCount(properties_count));
    741   Node* copy = EmitFastCloneShallowObject(
    742       &call_runtime, closure, literals_index, properties_count_node);
    743   Return(copy);
    744 
    745   Bind(&call_runtime);
    746   Node* constant_properties = Parameter(2);
    747   Node* flags = Parameter(3);
    748   Node* context = Parameter(4);
    749   TailCallRuntime(Runtime::kCreateObjectLiteral, context, closure,
    750                   literals_index, constant_properties, flags);
    751 }
    752 
    753 #define SHALLOW_OBJECT_BUILTIN(props)                                       \
    754   TF_BUILTIN(FastCloneShallowObject##props, ConstructorBuiltinsAssembler) { \
    755     CreateFastCloneShallowObjectBuiltin(props);                             \
    756   }
    757 
    758 SHALLOW_OBJECT_BUILTIN(0);
    759 SHALLOW_OBJECT_BUILTIN(1);
    760 SHALLOW_OBJECT_BUILTIN(2);
    761 SHALLOW_OBJECT_BUILTIN(3);
    762 SHALLOW_OBJECT_BUILTIN(4);
    763 SHALLOW_OBJECT_BUILTIN(5);
    764 SHALLOW_OBJECT_BUILTIN(6);
    765 
    766 Handle<Code> Builtins::NewCloneShallowObject(int length) {
    767   switch (length) {
    768     case 0:
    769       return FastCloneShallowObject0();
    770     case 1:
    771       return FastCloneShallowObject1();
    772     case 2:
    773       return FastCloneShallowObject2();
    774     case 3:
    775       return FastCloneShallowObject3();
    776     case 4:
    777       return FastCloneShallowObject4();
    778     case 5:
    779       return FastCloneShallowObject5();
    780     case 6:
    781       return FastCloneShallowObject6();
    782     default:
    783       UNREACHABLE();
    784   }
    785   return Handle<Code>::null();
    786 }
    787 
    788 }  // namespace internal
    789 }  // namespace v8
    790