Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "src/v8.h"
      6 
      7 #include "src/api.h"
      8 #include "src/arguments.h"
      9 #include "src/base/once.h"
     10 #include "src/bootstrapper.h"
     11 #include "src/builtins.h"
     12 #include "src/cpu-profiler.h"
     13 #include "src/gdb-jit.h"
     14 #include "src/ic-inl.h"
     15 #include "src/heap-profiler.h"
     16 #include "src/mark-compact.h"
     17 #include "src/stub-cache.h"
     18 #include "src/vm-state-inl.h"
     19 
     20 namespace v8 {
     21 namespace internal {
     22 
     23 namespace {
     24 
     25 // Arguments object passed to C++ builtins.
     26 template <BuiltinExtraArguments extra_args>
     27 class BuiltinArguments : public Arguments {
     28  public:
     29   BuiltinArguments(int length, Object** arguments)
     30       : Arguments(length, arguments) { }
     31 
     32   Object*& operator[] (int index) {
     33     ASSERT(index < length());
     34     return Arguments::operator[](index);
     35   }
     36 
     37   template <class S> Handle<S> at(int index) {
     38     ASSERT(index < length());
     39     return Arguments::at<S>(index);
     40   }
     41 
     42   Handle<Object> receiver() {
     43     return Arguments::at<Object>(0);
     44   }
     45 
     46   Handle<JSFunction> called_function() {
     47     STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
     48     return Arguments::at<JSFunction>(Arguments::length() - 1);
     49   }
     50 
     51   // Gets the total number of arguments including the receiver (but
     52   // excluding extra arguments).
     53   int length() const {
     54     STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
     55     return Arguments::length();
     56   }
     57 
     58 #ifdef DEBUG
     59   void Verify() {
     60     // Check we have at least the receiver.
     61     ASSERT(Arguments::length() >= 1);
     62   }
     63 #endif
     64 };
     65 
     66 
     67 // Specialize BuiltinArguments for the called function extra argument.
     68 
     69 template <>
     70 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
     71   return Arguments::length() - 1;
     72 }
     73 
     74 #ifdef DEBUG
     75 template <>
     76 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
     77   // Check we have at least the receiver and the called function.
     78   ASSERT(Arguments::length() >= 2);
     79   // Make sure cast to JSFunction succeeds.
     80   called_function();
     81 }
     82 #endif
     83 
     84 
     85 #define DEF_ARG_TYPE(name, spec)                      \
     86   typedef BuiltinArguments<spec> name##ArgumentsType;
     87 BUILTIN_LIST_C(DEF_ARG_TYPE)
     88 #undef DEF_ARG_TYPE
     89 
     90 }  // namespace
     91 
     92 // ----------------------------------------------------------------------------
     93 // Support macro for defining builtins in C++.
     94 // ----------------------------------------------------------------------------
     95 //
     96 // A builtin function is defined by writing:
     97 //
     98 //   BUILTIN(name) {
     99 //     ...
    100 //   }
    101 //
    102 // In the body of the builtin function the arguments can be accessed
    103 // through the BuiltinArguments object args.
    104 
    105 #ifdef DEBUG
    106 
    107 #define BUILTIN(name)                                            \
    108   MUST_USE_RESULT static Object* Builtin_Impl_##name(            \
    109       name##ArgumentsType args, Isolate* isolate);               \
    110   MUST_USE_RESULT static Object* Builtin_##name(                 \
    111       int args_length, Object** args_object, Isolate* isolate) { \
    112     name##ArgumentsType args(args_length, args_object);          \
    113     args.Verify();                                               \
    114     return Builtin_Impl_##name(args, isolate);                   \
    115   }                                                              \
    116   MUST_USE_RESULT static Object* Builtin_Impl_##name(            \
    117       name##ArgumentsType args, Isolate* isolate)
    118 
    119 #else  // For release mode.
    120 
    121 #define BUILTIN(name)                                            \
    122   static Object* Builtin_impl##name(                             \
    123       name##ArgumentsType args, Isolate* isolate);               \
    124   static Object* Builtin_##name(                                 \
    125       int args_length, Object** args_object, Isolate* isolate) { \
    126     name##ArgumentsType args(args_length, args_object);          \
    127     return Builtin_impl##name(args, isolate);                    \
    128   }                                                              \
    129   static Object* Builtin_impl##name(                             \
    130       name##ArgumentsType args, Isolate* isolate)
    131 #endif
    132 
    133 
    134 #ifdef DEBUG
    135 static inline bool CalledAsConstructor(Isolate* isolate) {
    136   // Calculate the result using a full stack frame iterator and check
    137   // that the state of the stack is as we assume it to be in the
    138   // code below.
    139   StackFrameIterator it(isolate);
    140   ASSERT(it.frame()->is_exit());
    141   it.Advance();
    142   StackFrame* frame = it.frame();
    143   bool reference_result = frame->is_construct();
    144   Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
    145   // Because we know fp points to an exit frame we can use the relevant
    146   // part of ExitFrame::ComputeCallerState directly.
    147   const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
    148   Address caller_fp = Memory::Address_at(fp + kCallerOffset);
    149   // This inlines the part of StackFrame::ComputeType that grabs the
    150   // type of the current frame.  Note that StackFrame::ComputeType
    151   // has been specialized for each architecture so if any one of them
    152   // changes this code has to be changed as well.
    153   const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
    154   const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
    155   Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
    156   bool result = (marker == kConstructMarker);
    157   ASSERT_EQ(result, reference_result);
    158   return result;
    159 }
    160 #endif
    161 
    162 
    163 // ----------------------------------------------------------------------------
    164 
    165 BUILTIN(Illegal) {
    166   UNREACHABLE();
    167   return isolate->heap()->undefined_value();  // Make compiler happy.
    168 }
    169 
    170 
    171 BUILTIN(EmptyFunction) {
    172   return isolate->heap()->undefined_value();
    173 }
    174 
    175 
    176 static void MoveDoubleElements(FixedDoubleArray* dst, int dst_index,
    177                                FixedDoubleArray* src, int src_index, int len) {
    178   if (len == 0) return;
    179   MemMove(dst->data_start() + dst_index, src->data_start() + src_index,
    180           len * kDoubleSize);
    181 }
    182 
    183 
    184 static FixedArrayBase* LeftTrimFixedArray(Heap* heap,
    185                                           FixedArrayBase* elms,
    186                                           int to_trim) {
    187   ASSERT(heap->CanMoveObjectStart(elms));
    188 
    189   Map* map = elms->map();
    190   int entry_size;
    191   if (elms->IsFixedArray()) {
    192     entry_size = kPointerSize;
    193   } else {
    194     entry_size = kDoubleSize;
    195   }
    196   ASSERT(elms->map() != heap->fixed_cow_array_map());
    197   // For now this trick is only applied to fixed arrays in new and paged space.
    198   // In large object space the object's start must coincide with chunk
    199   // and thus the trick is just not applicable.
    200   ASSERT(!heap->lo_space()->Contains(elms));
    201 
    202   STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
    203   STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
    204   STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
    205 
    206   Object** former_start = HeapObject::RawField(elms, 0);
    207 
    208   const int len = elms->length();
    209 
    210   if (to_trim * entry_size > FixedArrayBase::kHeaderSize &&
    211       elms->IsFixedArray() &&
    212       !heap->new_space()->Contains(elms)) {
    213     // If we are doing a big trim in old space then we zap the space that was
    214     // formerly part of the array so that the GC (aided by the card-based
    215     // remembered set) won't find pointers to new-space there.
    216     Object** zap = reinterpret_cast<Object**>(elms->address());
    217     zap++;  // Header of filler must be at least one word so skip that.
    218     for (int i = 1; i < to_trim; i++) {
    219       *zap++ = Smi::FromInt(0);
    220     }
    221   }
    222   // Technically in new space this write might be omitted (except for
    223   // debug mode which iterates through the heap), but to play safer
    224   // we still do it.
    225   // Since left trimming is only performed on pages which are not concurrently
    226   // swept creating a filler object does not require synchronization.
    227   heap->CreateFillerObjectAt(elms->address(), to_trim * entry_size);
    228 
    229   int new_start_index = to_trim * (entry_size / kPointerSize);
    230   former_start[new_start_index] = map;
    231   former_start[new_start_index + 1] = Smi::FromInt(len - to_trim);
    232 
    233   // Maintain marking consistency for HeapObjectIterator and
    234   // IncrementalMarking.
    235   int size_delta = to_trim * entry_size;
    236   Address new_start = elms->address() + size_delta;
    237   heap->marking()->TransferMark(elms->address(), new_start);
    238   heap->AdjustLiveBytes(new_start, -size_delta, Heap::FROM_MUTATOR);
    239 
    240   FixedArrayBase* new_elms =
    241       FixedArrayBase::cast(HeapObject::FromAddress(new_start));
    242   HeapProfiler* profiler = heap->isolate()->heap_profiler();
    243   if (profiler->is_tracking_object_moves()) {
    244     profiler->ObjectMoveEvent(elms->address(),
    245                               new_elms->address(),
    246                               new_elms->Size());
    247   }
    248   return new_elms;
    249 }
    250 
    251 
    252 static bool ArrayPrototypeHasNoElements(Heap* heap,
    253                                         Context* native_context,
    254                                         JSObject* array_proto) {
    255   DisallowHeapAllocation no_gc;
    256   // This method depends on non writability of Object and Array prototype
    257   // fields.
    258   if (array_proto->elements() != heap->empty_fixed_array()) return false;
    259   // Object.prototype
    260   Object* proto = array_proto->GetPrototype();
    261   if (proto == heap->null_value()) return false;
    262   array_proto = JSObject::cast(proto);
    263   if (array_proto != native_context->initial_object_prototype()) return false;
    264   if (array_proto->elements() != heap->empty_fixed_array()) return false;
    265   return array_proto->GetPrototype()->IsNull();
    266 }
    267 
    268 
    269 // Returns empty handle if not applicable.
    270 MUST_USE_RESULT
    271 static inline MaybeHandle<FixedArrayBase> EnsureJSArrayWithWritableFastElements(
    272     Isolate* isolate,
    273     Handle<Object> receiver,
    274     Arguments* args,
    275     int first_added_arg) {
    276   if (!receiver->IsJSArray()) return MaybeHandle<FixedArrayBase>();
    277   Handle<JSArray> array = Handle<JSArray>::cast(receiver);
    278   // If there may be elements accessors in the prototype chain, the fast path
    279   // cannot be used if there arguments to add to the array.
    280   if (args != NULL && array->map()->DictionaryElementsInPrototypeChainOnly()) {
    281     return MaybeHandle<FixedArrayBase>();
    282   }
    283   if (array->map()->is_observed()) return MaybeHandle<FixedArrayBase>();
    284   if (!array->map()->is_extensible()) return MaybeHandle<FixedArrayBase>();
    285   Handle<FixedArrayBase> elms(array->elements(), isolate);
    286   Heap* heap = isolate->heap();
    287   Map* map = elms->map();
    288   if (map == heap->fixed_array_map()) {
    289     if (args == NULL || array->HasFastObjectElements()) return elms;
    290   } else if (map == heap->fixed_cow_array_map()) {
    291     elms = JSObject::EnsureWritableFastElements(array);
    292     if (args == NULL || array->HasFastObjectElements()) return elms;
    293   } else if (map == heap->fixed_double_array_map()) {
    294     if (args == NULL) return elms;
    295   } else {
    296     return MaybeHandle<FixedArrayBase>();
    297   }
    298 
    299   // Need to ensure that the arguments passed in args can be contained in
    300   // the array.
    301   int args_length = args->length();
    302   if (first_added_arg >= args_length) return handle(array->elements(), isolate);
    303 
    304   ElementsKind origin_kind = array->map()->elements_kind();
    305   ASSERT(!IsFastObjectElementsKind(origin_kind));
    306   ElementsKind target_kind = origin_kind;
    307   {
    308     DisallowHeapAllocation no_gc;
    309     int arg_count = args->length() - first_added_arg;
    310     Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
    311     for (int i = 0; i < arg_count; i++) {
    312       Object* arg = arguments[i];
    313       if (arg->IsHeapObject()) {
    314         if (arg->IsHeapNumber()) {
    315           target_kind = FAST_DOUBLE_ELEMENTS;
    316         } else {
    317           target_kind = FAST_ELEMENTS;
    318           break;
    319         }
    320       }
    321     }
    322   }
    323   if (target_kind != origin_kind) {
    324     JSObject::TransitionElementsKind(array, target_kind);
    325     return handle(array->elements(), isolate);
    326   }
    327   return elms;
    328 }
    329 
    330 
    331 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
    332                                                      JSArray* receiver) {
    333   if (!FLAG_clever_optimizations) return false;
    334   DisallowHeapAllocation no_gc;
    335   Context* native_context = heap->isolate()->context()->native_context();
    336   JSObject* array_proto =
    337       JSObject::cast(native_context->array_function()->prototype());
    338   return receiver->GetPrototype() == array_proto &&
    339          ArrayPrototypeHasNoElements(heap, native_context, array_proto);
    340 }
    341 
    342 
    343 MUST_USE_RESULT static Object* CallJsBuiltin(
    344     Isolate* isolate,
    345     const char* name,
    346     BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
    347   HandleScope handleScope(isolate);
    348 
    349   Handle<Object> js_builtin = Object::GetProperty(
    350       isolate,
    351       handle(isolate->native_context()->builtins(), isolate),
    352       name).ToHandleChecked();
    353   Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
    354   int argc = args.length() - 1;
    355   ScopedVector<Handle<Object> > argv(argc);
    356   for (int i = 0; i < argc; ++i) {
    357     argv[i] = args.at<Object>(i + 1);
    358   }
    359   Handle<Object> result;
    360   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
    361       isolate, result,
    362       Execution::Call(isolate,
    363                       function,
    364                       args.receiver(),
    365                       argc,
    366                       argv.start()));
    367   return *result;
    368 }
    369 
    370 
    371 BUILTIN(ArrayPush) {
    372   HandleScope scope(isolate);
    373   Handle<Object> receiver = args.receiver();
    374   MaybeHandle<FixedArrayBase> maybe_elms_obj =
    375       EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
    376   Handle<FixedArrayBase> elms_obj;
    377   if (!maybe_elms_obj.ToHandle(&elms_obj)) {
    378     return CallJsBuiltin(isolate, "ArrayPush", args);
    379   }
    380 
    381   Handle<JSArray> array = Handle<JSArray>::cast(receiver);
    382   int len = Smi::cast(array->length())->value();
    383   int to_add = args.length() - 1;
    384   if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
    385     return CallJsBuiltin(isolate, "ArrayPush", args);
    386   }
    387   ASSERT(!array->map()->is_observed());
    388 
    389   ElementsKind kind = array->GetElementsKind();
    390 
    391   if (IsFastSmiOrObjectElementsKind(kind)) {
    392     Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
    393     if (to_add == 0) {
    394       return Smi::FromInt(len);
    395     }
    396     // Currently fixed arrays cannot grow too big, so
    397     // we should never hit this case.
    398     ASSERT(to_add <= (Smi::kMaxValue - len));
    399 
    400     int new_length = len + to_add;
    401 
    402     if (new_length > elms->length()) {
    403       // New backing storage is needed.
    404       int capacity = new_length + (new_length >> 1) + 16;
    405       Handle<FixedArray> new_elms =
    406           isolate->factory()->NewUninitializedFixedArray(capacity);
    407 
    408       ElementsAccessor* accessor = array->GetElementsAccessor();
    409       accessor->CopyElements(
    410           elms_obj, 0, kind, new_elms, 0,
    411           ElementsAccessor::kCopyToEndAndInitializeToHole);
    412 
    413       elms = new_elms;
    414     }
    415 
    416     // Add the provided values.
    417     DisallowHeapAllocation no_gc;
    418     WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
    419     for (int index = 0; index < to_add; index++) {
    420       elms->set(index + len, args[index + 1], mode);
    421     }
    422 
    423     if (*elms != array->elements()) {
    424       array->set_elements(*elms);
    425     }
    426 
    427     // Set the length.
    428     array->set_length(Smi::FromInt(new_length));
    429     return Smi::FromInt(new_length);
    430   } else {
    431     int elms_len = elms_obj->length();
    432     if (to_add == 0) {
    433       return Smi::FromInt(len);
    434     }
    435     // Currently fixed arrays cannot grow too big, so
    436     // we should never hit this case.
    437     ASSERT(to_add <= (Smi::kMaxValue - len));
    438 
    439     int new_length = len + to_add;
    440 
    441     Handle<FixedDoubleArray> new_elms;
    442 
    443     if (new_length > elms_len) {
    444       // New backing storage is needed.
    445       int capacity = new_length + (new_length >> 1) + 16;
    446       // Create new backing store; since capacity > 0, we can
    447       // safely cast to FixedDoubleArray.
    448       new_elms = Handle<FixedDoubleArray>::cast(
    449           isolate->factory()->NewFixedDoubleArray(capacity));
    450 
    451       ElementsAccessor* accessor = array->GetElementsAccessor();
    452       accessor->CopyElements(
    453           elms_obj, 0, kind, new_elms, 0,
    454           ElementsAccessor::kCopyToEndAndInitializeToHole);
    455 
    456     } else {
    457       // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
    458       // empty_fixed_array.
    459       new_elms = Handle<FixedDoubleArray>::cast(elms_obj);
    460     }
    461 
    462     // Add the provided values.
    463     DisallowHeapAllocation no_gc;
    464     int index;
    465     for (index = 0; index < to_add; index++) {
    466       Object* arg = args[index + 1];
    467       new_elms->set(index + len, arg->Number());
    468     }
    469 
    470     if (*new_elms != array->elements()) {
    471       array->set_elements(*new_elms);
    472     }
    473 
    474     // Set the length.
    475     array->set_length(Smi::FromInt(new_length));
    476     return Smi::FromInt(new_length);
    477   }
    478 }
    479 
    480 
    481 BUILTIN(ArrayPop) {
    482   HandleScope scope(isolate);
    483   Handle<Object> receiver = args.receiver();
    484   MaybeHandle<FixedArrayBase> maybe_elms_obj =
    485       EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
    486   Handle<FixedArrayBase> elms_obj;
    487   if (!maybe_elms_obj.ToHandle(&elms_obj)) {
    488     return CallJsBuiltin(isolate, "ArrayPop", args);
    489   }
    490 
    491   Handle<JSArray> array = Handle<JSArray>::cast(receiver);
    492   ASSERT(!array->map()->is_observed());
    493 
    494   int len = Smi::cast(array->length())->value();
    495   if (len == 0) return isolate->heap()->undefined_value();
    496 
    497   ElementsAccessor* accessor = array->GetElementsAccessor();
    498   int new_length = len - 1;
    499   Handle<Object> element =
    500       accessor->Get(array, array, new_length, elms_obj).ToHandleChecked();
    501   if (element->IsTheHole()) {
    502     return CallJsBuiltin(isolate, "ArrayPop", args);
    503   }
    504   RETURN_FAILURE_ON_EXCEPTION(
    505       isolate,
    506       accessor->SetLength(array, handle(Smi::FromInt(new_length), isolate)));
    507   return *element;
    508 }
    509 
    510 
    511 BUILTIN(ArrayShift) {
    512   HandleScope scope(isolate);
    513   Heap* heap = isolate->heap();
    514   Handle<Object> receiver = args.receiver();
    515   MaybeHandle<FixedArrayBase> maybe_elms_obj =
    516       EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
    517   Handle<FixedArrayBase> elms_obj;
    518   if (!maybe_elms_obj.ToHandle(&elms_obj) ||
    519       !IsJSArrayFastElementMovingAllowed(heap,
    520                                          *Handle<JSArray>::cast(receiver))) {
    521     return CallJsBuiltin(isolate, "ArrayShift", args);
    522   }
    523   Handle<JSArray> array = Handle<JSArray>::cast(receiver);
    524   ASSERT(!array->map()->is_observed());
    525 
    526   int len = Smi::cast(array->length())->value();
    527   if (len == 0) return heap->undefined_value();
    528 
    529   // Get first element
    530   ElementsAccessor* accessor = array->GetElementsAccessor();
    531   Handle<Object> first =
    532     accessor->Get(array, array, 0, elms_obj).ToHandleChecked();
    533   if (first->IsTheHole()) {
    534     return CallJsBuiltin(isolate, "ArrayShift", args);
    535   }
    536 
    537   if (heap->CanMoveObjectStart(*elms_obj)) {
    538     array->set_elements(LeftTrimFixedArray(heap, *elms_obj, 1));
    539   } else {
    540     // Shift the elements.
    541     if (elms_obj->IsFixedArray()) {
    542       Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
    543       DisallowHeapAllocation no_gc;
    544       heap->MoveElements(*elms, 0, 1, len - 1);
    545       elms->set(len - 1, heap->the_hole_value());
    546     } else {
    547       Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
    548       MoveDoubleElements(*elms, 0, *elms, 1, len - 1);
    549       elms->set_the_hole(len - 1);
    550     }
    551   }
    552 
    553   // Set the length.
    554   array->set_length(Smi::FromInt(len - 1));
    555 
    556   return *first;
    557 }
    558 
    559 
    560 BUILTIN(ArrayUnshift) {
    561   HandleScope scope(isolate);
    562   Heap* heap = isolate->heap();
    563   Handle<Object> receiver = args.receiver();
    564   MaybeHandle<FixedArrayBase> maybe_elms_obj =
    565       EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
    566   Handle<FixedArrayBase> elms_obj;
    567   if (!maybe_elms_obj.ToHandle(&elms_obj) ||
    568       !IsJSArrayFastElementMovingAllowed(heap,
    569                                          *Handle<JSArray>::cast(receiver))) {
    570     return CallJsBuiltin(isolate, "ArrayUnshift", args);
    571   }
    572   Handle<JSArray> array = Handle<JSArray>::cast(receiver);
    573   ASSERT(!array->map()->is_observed());
    574   if (!array->HasFastSmiOrObjectElements()) {
    575     return CallJsBuiltin(isolate, "ArrayUnshift", args);
    576   }
    577   int len = Smi::cast(array->length())->value();
    578   int to_add = args.length() - 1;
    579   int new_length = len + to_add;
    580   // Currently fixed arrays cannot grow too big, so
    581   // we should never hit this case.
    582   ASSERT(to_add <= (Smi::kMaxValue - len));
    583 
    584   if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
    585     return CallJsBuiltin(isolate, "ArrayUnshift", args);
    586   }
    587 
    588   Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
    589 
    590   JSObject::EnsureCanContainElements(array, &args, 1, to_add,
    591                                      DONT_ALLOW_DOUBLE_ELEMENTS);
    592 
    593   if (new_length > elms->length()) {
    594     // New backing storage is needed.
    595     int capacity = new_length + (new_length >> 1) + 16;
    596     Handle<FixedArray> new_elms =
    597         isolate->factory()->NewUninitializedFixedArray(capacity);
    598 
    599     ElementsKind kind = array->GetElementsKind();
    600     ElementsAccessor* accessor = array->GetElementsAccessor();
    601     accessor->CopyElements(
    602         elms, 0, kind, new_elms, to_add,
    603         ElementsAccessor::kCopyToEndAndInitializeToHole);
    604 
    605     elms = new_elms;
    606     array->set_elements(*elms);
    607   } else {
    608     DisallowHeapAllocation no_gc;
    609     heap->MoveElements(*elms, to_add, 0, len);
    610   }
    611 
    612   // Add the provided values.
    613   DisallowHeapAllocation no_gc;
    614   WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
    615   for (int i = 0; i < to_add; i++) {
    616     elms->set(i, args[i + 1], mode);
    617   }
    618 
    619   // Set the length.
    620   array->set_length(Smi::FromInt(new_length));
    621   return Smi::FromInt(new_length);
    622 }
    623 
    624 
    625 BUILTIN(ArraySlice) {
    626   HandleScope scope(isolate);
    627   Heap* heap = isolate->heap();
    628   Handle<Object> receiver = args.receiver();
    629   int len = -1;
    630   int relative_start = 0;
    631   int relative_end = 0;
    632   {
    633     DisallowHeapAllocation no_gc;
    634     if (receiver->IsJSArray()) {
    635       JSArray* array = JSArray::cast(*receiver);
    636       if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
    637         AllowHeapAllocation allow_allocation;
    638         return CallJsBuiltin(isolate, "ArraySlice", args);
    639       }
    640 
    641       if (!array->HasFastElements()) {
    642         AllowHeapAllocation allow_allocation;
    643         return CallJsBuiltin(isolate, "ArraySlice", args);
    644       }
    645 
    646       len = Smi::cast(array->length())->value();
    647     } else {
    648       // Array.slice(arguments, ...) is quite a common idiom (notably more
    649       // than 50% of invocations in Web apps).  Treat it in C++ as well.
    650       Map* arguments_map = isolate->context()->native_context()->
    651           sloppy_arguments_boilerplate()->map();
    652 
    653       bool is_arguments_object_with_fast_elements =
    654           receiver->IsJSObject() &&
    655           JSObject::cast(*receiver)->map() == arguments_map;
    656       if (!is_arguments_object_with_fast_elements) {
    657         AllowHeapAllocation allow_allocation;
    658         return CallJsBuiltin(isolate, "ArraySlice", args);
    659       }
    660       JSObject* object = JSObject::cast(*receiver);
    661 
    662       if (!object->HasFastElements()) {
    663         AllowHeapAllocation allow_allocation;
    664         return CallJsBuiltin(isolate, "ArraySlice", args);
    665       }
    666 
    667       Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
    668       if (!len_obj->IsSmi()) {
    669         AllowHeapAllocation allow_allocation;
    670         return CallJsBuiltin(isolate, "ArraySlice", args);
    671       }
    672       len = Smi::cast(len_obj)->value();
    673       if (len > object->elements()->length()) {
    674         AllowHeapAllocation allow_allocation;
    675         return CallJsBuiltin(isolate, "ArraySlice", args);
    676       }
    677     }
    678 
    679     ASSERT(len >= 0);
    680     int n_arguments = args.length() - 1;
    681 
    682     // Note carefully choosen defaults---if argument is missing,
    683     // it's undefined which gets converted to 0 for relative_start
    684     // and to len for relative_end.
    685     relative_start = 0;
    686     relative_end = len;
    687     if (n_arguments > 0) {
    688       Object* arg1 = args[1];
    689       if (arg1->IsSmi()) {
    690         relative_start = Smi::cast(arg1)->value();
    691       } else if (arg1->IsHeapNumber()) {
    692         double start = HeapNumber::cast(arg1)->value();
    693         if (start < kMinInt || start > kMaxInt) {
    694           AllowHeapAllocation allow_allocation;
    695           return CallJsBuiltin(isolate, "ArraySlice", args);
    696         }
    697         relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
    698       } else if (!arg1->IsUndefined()) {
    699         AllowHeapAllocation allow_allocation;
    700         return CallJsBuiltin(isolate, "ArraySlice", args);
    701       }
    702       if (n_arguments > 1) {
    703         Object* arg2 = args[2];
    704         if (arg2->IsSmi()) {
    705           relative_end = Smi::cast(arg2)->value();
    706         } else if (arg2->IsHeapNumber()) {
    707           double end = HeapNumber::cast(arg2)->value();
    708           if (end < kMinInt || end > kMaxInt) {
    709             AllowHeapAllocation allow_allocation;
    710             return CallJsBuiltin(isolate, "ArraySlice", args);
    711           }
    712           relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
    713         } else if (!arg2->IsUndefined()) {
    714           AllowHeapAllocation allow_allocation;
    715           return CallJsBuiltin(isolate, "ArraySlice", args);
    716         }
    717       }
    718     }
    719   }
    720 
    721   // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
    722   int k = (relative_start < 0) ? Max(len + relative_start, 0)
    723                                : Min(relative_start, len);
    724 
    725   // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
    726   int final = (relative_end < 0) ? Max(len + relative_end, 0)
    727                                  : Min(relative_end, len);
    728 
    729   // Calculate the length of result array.
    730   int result_len = Max(final - k, 0);
    731 
    732   Handle<JSObject> object = Handle<JSObject>::cast(receiver);
    733   Handle<FixedArrayBase> elms(object->elements(), isolate);
    734 
    735   ElementsKind kind = object->GetElementsKind();
    736   if (IsHoleyElementsKind(kind)) {
    737     DisallowHeapAllocation no_gc;
    738     bool packed = true;
    739     ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
    740     for (int i = k; i < final; i++) {
    741       if (!accessor->HasElement(object, object, i, elms)) {
    742         packed = false;
    743         break;
    744       }
    745     }
    746     if (packed) {
    747       kind = GetPackedElementsKind(kind);
    748     } else if (!receiver->IsJSArray()) {
    749       AllowHeapAllocation allow_allocation;
    750       return CallJsBuiltin(isolate, "ArraySlice", args);
    751     }
    752   }
    753 
    754   Handle<JSArray> result_array =
    755       isolate->factory()->NewJSArray(kind, result_len, result_len);
    756 
    757   DisallowHeapAllocation no_gc;
    758   if (result_len == 0) return *result_array;
    759 
    760   ElementsAccessor* accessor = object->GetElementsAccessor();
    761   accessor->CopyElements(
    762       elms, k, kind, handle(result_array->elements(), isolate), 0, result_len);
    763   return *result_array;
    764 }
    765 
    766 
    767 BUILTIN(ArraySplice) {
    768   HandleScope scope(isolate);
    769   Heap* heap = isolate->heap();
    770   Handle<Object> receiver = args.receiver();
    771   MaybeHandle<FixedArrayBase> maybe_elms_obj =
    772       EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3);
    773   Handle<FixedArrayBase> elms_obj;
    774   if (!maybe_elms_obj.ToHandle(&elms_obj) ||
    775       !IsJSArrayFastElementMovingAllowed(heap,
    776                                          *Handle<JSArray>::cast(receiver))) {
    777     return CallJsBuiltin(isolate, "ArraySplice", args);
    778   }
    779   Handle<JSArray> array = Handle<JSArray>::cast(receiver);
    780   ASSERT(!array->map()->is_observed());
    781 
    782   int len = Smi::cast(array->length())->value();
    783 
    784   int n_arguments = args.length() - 1;
    785 
    786   int relative_start = 0;
    787   if (n_arguments > 0) {
    788     DisallowHeapAllocation no_gc;
    789     Object* arg1 = args[1];
    790     if (arg1->IsSmi()) {
    791       relative_start = Smi::cast(arg1)->value();
    792     } else if (arg1->IsHeapNumber()) {
    793       double start = HeapNumber::cast(arg1)->value();
    794       if (start < kMinInt || start > kMaxInt) {
    795         AllowHeapAllocation allow_allocation;
    796         return CallJsBuiltin(isolate, "ArraySplice", args);
    797       }
    798       relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
    799     } else if (!arg1->IsUndefined()) {
    800       AllowHeapAllocation allow_allocation;
    801       return CallJsBuiltin(isolate, "ArraySplice", args);
    802     }
    803   }
    804   int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
    805                                           : Min(relative_start, len);
    806 
    807   // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
    808   // given as a request to delete all the elements from the start.
    809   // And it differs from the case of undefined delete count.
    810   // This does not follow ECMA-262, but we do the same for
    811   // compatibility.
    812   int actual_delete_count;
    813   if (n_arguments == 1) {
    814     ASSERT(len - actual_start >= 0);
    815     actual_delete_count = len - actual_start;
    816   } else {
    817     int value = 0;  // ToInteger(undefined) == 0
    818     if (n_arguments > 1) {
    819       DisallowHeapAllocation no_gc;
    820       Object* arg2 = args[2];
    821       if (arg2->IsSmi()) {
    822         value = Smi::cast(arg2)->value();
    823       } else {
    824         AllowHeapAllocation allow_allocation;
    825         return CallJsBuiltin(isolate, "ArraySplice", args);
    826       }
    827     }
    828     actual_delete_count = Min(Max(value, 0), len - actual_start);
    829   }
    830 
    831   ElementsKind elements_kind = array->GetElementsKind();
    832 
    833   int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
    834   int new_length = len - actual_delete_count + item_count;
    835 
    836   // For double mode we do not support changing the length.
    837   if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
    838     return CallJsBuiltin(isolate, "ArraySplice", args);
    839   }
    840 
    841   if (new_length == 0) {
    842     Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(
    843         elms_obj, elements_kind, actual_delete_count);
    844     array->set_elements(heap->empty_fixed_array());
    845     array->set_length(Smi::FromInt(0));
    846     return *result;
    847   }
    848 
    849   Handle<JSArray> result_array =
    850       isolate->factory()->NewJSArray(elements_kind,
    851                                      actual_delete_count,
    852                                      actual_delete_count);
    853 
    854   if (actual_delete_count > 0) {
    855     DisallowHeapAllocation no_gc;
    856     ElementsAccessor* accessor = array->GetElementsAccessor();
    857     accessor->CopyElements(
    858         elms_obj, actual_start, elements_kind,
    859         handle(result_array->elements(), isolate), 0, actual_delete_count);
    860   }
    861 
    862   bool elms_changed = false;
    863   if (item_count < actual_delete_count) {
    864     // Shrink the array.
    865     const bool trim_array = !heap->lo_space()->Contains(*elms_obj) &&
    866       ((actual_start + item_count) <
    867           (len - actual_delete_count - actual_start));
    868     if (trim_array) {
    869       const int delta = actual_delete_count - item_count;
    870 
    871       if (elms_obj->IsFixedDoubleArray()) {
    872         Handle<FixedDoubleArray> elms =
    873             Handle<FixedDoubleArray>::cast(elms_obj);
    874         MoveDoubleElements(*elms, delta, *elms, 0, actual_start);
    875       } else {
    876         Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
    877         DisallowHeapAllocation no_gc;
    878         heap->MoveElements(*elms, delta, 0, actual_start);
    879       }
    880 
    881       if (heap->CanMoveObjectStart(*elms_obj)) {
    882         // On the fast path we move the start of the object in memory.
    883         elms_obj = handle(LeftTrimFixedArray(heap, *elms_obj, delta), isolate);
    884       } else {
    885         // This is the slow path. We are going to move the elements to the left
    886         // by copying them. For trimmed values we store the hole.
    887         if (elms_obj->IsFixedDoubleArray()) {
    888           Handle<FixedDoubleArray> elms =
    889               Handle<FixedDoubleArray>::cast(elms_obj);
    890           MoveDoubleElements(*elms, 0, *elms, delta, len - delta);
    891           elms->FillWithHoles(len - delta, len);
    892         } else {
    893           Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
    894           DisallowHeapAllocation no_gc;
    895           heap->MoveElements(*elms, 0, delta, len - delta);
    896           elms->FillWithHoles(len - delta, len);
    897         }
    898       }
    899       elms_changed = true;
    900     } else {
    901       if (elms_obj->IsFixedDoubleArray()) {
    902         Handle<FixedDoubleArray> elms =
    903             Handle<FixedDoubleArray>::cast(elms_obj);
    904         MoveDoubleElements(*elms, actual_start + item_count,
    905                            *elms, actual_start + actual_delete_count,
    906                            (len - actual_delete_count - actual_start));
    907         elms->FillWithHoles(new_length, len);
    908       } else {
    909         Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
    910         DisallowHeapAllocation no_gc;
    911         heap->MoveElements(*elms, actual_start + item_count,
    912                            actual_start + actual_delete_count,
    913                            (len - actual_delete_count - actual_start));
    914         elms->FillWithHoles(new_length, len);
    915       }
    916     }
    917   } else if (item_count > actual_delete_count) {
    918     Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
    919     // Currently fixed arrays cannot grow too big, so
    920     // we should never hit this case.
    921     ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
    922 
    923     // Check if array need to grow.
    924     if (new_length > elms->length()) {
    925       // New backing storage is needed.
    926       int capacity = new_length + (new_length >> 1) + 16;
    927       Handle<FixedArray> new_elms =
    928           isolate->factory()->NewUninitializedFixedArray(capacity);
    929 
    930       DisallowHeapAllocation no_gc;
    931 
    932       ElementsKind kind = array->GetElementsKind();
    933       ElementsAccessor* accessor = array->GetElementsAccessor();
    934       if (actual_start > 0) {
    935         // Copy the part before actual_start as is.
    936         accessor->CopyElements(
    937             elms, 0, kind, new_elms, 0, actual_start);
    938       }
    939       accessor->CopyElements(
    940           elms, actual_start + actual_delete_count, kind,
    941           new_elms, actual_start + item_count,
    942           ElementsAccessor::kCopyToEndAndInitializeToHole);
    943 
    944       elms_obj = new_elms;
    945       elms_changed = true;
    946     } else {
    947       DisallowHeapAllocation no_gc;
    948       heap->MoveElements(*elms, actual_start + item_count,
    949                          actual_start + actual_delete_count,
    950                          (len - actual_delete_count - actual_start));
    951     }
    952   }
    953 
    954   if (IsFastDoubleElementsKind(elements_kind)) {
    955     Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
    956     for (int k = actual_start; k < actual_start + item_count; k++) {
    957       Object* arg = args[3 + k - actual_start];
    958       if (arg->IsSmi()) {
    959         elms->set(k, Smi::cast(arg)->value());
    960       } else {
    961         elms->set(k, HeapNumber::cast(arg)->value());
    962       }
    963     }
    964   } else {
    965     Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
    966     DisallowHeapAllocation no_gc;
    967     WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
    968     for (int k = actual_start; k < actual_start + item_count; k++) {
    969       elms->set(k, args[3 + k - actual_start], mode);
    970     }
    971   }
    972 
    973   if (elms_changed) {
    974     array->set_elements(*elms_obj);
    975   }
    976   // Set the length.
    977   array->set_length(Smi::FromInt(new_length));
    978 
    979   return *result_array;
    980 }
    981 
    982 
    983 BUILTIN(ArrayConcat) {
    984   HandleScope scope(isolate);
    985 
    986   int n_arguments = args.length();
    987   int result_len = 0;
    988   ElementsKind elements_kind = GetInitialFastElementsKind();
    989   bool has_double = false;
    990   {
    991     DisallowHeapAllocation no_gc;
    992     Heap* heap = isolate->heap();
    993     Context* native_context = isolate->context()->native_context();
    994     JSObject* array_proto =
    995         JSObject::cast(native_context->array_function()->prototype());
    996     if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
    997       AllowHeapAllocation allow_allocation;
    998       return CallJsBuiltin(isolate, "ArrayConcatJS", args);
    999     }
   1000 
   1001     // Iterate through all the arguments performing checks
   1002     // and calculating total length.
   1003     bool is_holey = false;
   1004     for (int i = 0; i < n_arguments; i++) {
   1005       Object* arg = args[i];
   1006       if (!arg->IsJSArray() ||
   1007           !JSArray::cast(arg)->HasFastElements() ||
   1008           JSArray::cast(arg)->GetPrototype() != array_proto) {
   1009         AllowHeapAllocation allow_allocation;
   1010         return CallJsBuiltin(isolate, "ArrayConcatJS", args);
   1011       }
   1012       int len = Smi::cast(JSArray::cast(arg)->length())->value();
   1013 
   1014       // We shouldn't overflow when adding another len.
   1015       const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
   1016       STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
   1017       USE(kHalfOfMaxInt);
   1018       result_len += len;
   1019       ASSERT(result_len >= 0);
   1020 
   1021       if (result_len > FixedDoubleArray::kMaxLength) {
   1022         AllowHeapAllocation allow_allocation;
   1023         return CallJsBuiltin(isolate, "ArrayConcatJS", args);
   1024       }
   1025 
   1026       ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
   1027       has_double = has_double || IsFastDoubleElementsKind(arg_kind);
   1028       is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
   1029       if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
   1030         elements_kind = arg_kind;
   1031       }
   1032     }
   1033     if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
   1034   }
   1035 
   1036   // If a double array is concatted into a fast elements array, the fast
   1037   // elements array needs to be initialized to contain proper holes, since
   1038   // boxing doubles may cause incremental marking.
   1039   ArrayStorageAllocationMode mode =
   1040       has_double && IsFastObjectElementsKind(elements_kind)
   1041       ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
   1042   Handle<JSArray> result_array =
   1043       isolate->factory()->NewJSArray(elements_kind,
   1044                                      result_len,
   1045                                      result_len,
   1046                                      mode);
   1047   if (result_len == 0) return *result_array;
   1048 
   1049   int j = 0;
   1050   Handle<FixedArrayBase> storage(result_array->elements(), isolate);
   1051   ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
   1052   for (int i = 0; i < n_arguments; i++) {
   1053     // TODO(ishell): It is crucial to keep |array| as a raw pointer to avoid
   1054     // performance degradation. Revisit this later.
   1055     JSArray* array = JSArray::cast(args[i]);
   1056     int len = Smi::cast(array->length())->value();
   1057     ElementsKind from_kind = array->GetElementsKind();
   1058     if (len > 0) {
   1059       accessor->CopyElements(array, 0, from_kind, storage, j, len);
   1060       j += len;
   1061     }
   1062   }
   1063 
   1064   ASSERT(j == result_len);
   1065 
   1066   return *result_array;
   1067 }
   1068 
   1069 
   1070 // -----------------------------------------------------------------------------
   1071 // Generator and strict mode poison pills
   1072 
   1073 
   1074 BUILTIN(StrictModePoisonPill) {
   1075   HandleScope scope(isolate);
   1076   return isolate->Throw(*isolate->factory()->NewTypeError(
   1077       "strict_poison_pill", HandleVector<Object>(NULL, 0)));
   1078 }
   1079 
   1080 
   1081 BUILTIN(GeneratorPoisonPill) {
   1082   HandleScope scope(isolate);
   1083   return isolate->Throw(*isolate->factory()->NewTypeError(
   1084       "generator_poison_pill", HandleVector<Object>(NULL, 0)));
   1085 }
   1086 
   1087 
   1088 // -----------------------------------------------------------------------------
   1089 //
   1090 
   1091 
   1092 // Searches the hidden prototype chain of the given object for the first
   1093 // object that is an instance of the given type.  If no such object can
   1094 // be found then Heap::null_value() is returned.
   1095 static inline Object* FindHidden(Heap* heap,
   1096                                  Object* object,
   1097                                  FunctionTemplateInfo* type) {
   1098   if (type->IsTemplateFor(object)) return object;
   1099   Object* proto = object->GetPrototype(heap->isolate());
   1100   if (proto->IsJSObject() &&
   1101       JSObject::cast(proto)->map()->is_hidden_prototype()) {
   1102     return FindHidden(heap, proto, type);
   1103   }
   1104   return heap->null_value();
   1105 }
   1106 
   1107 
   1108 // Returns the holder JSObject if the function can legally be called
   1109 // with this receiver.  Returns Heap::null_value() if the call is
   1110 // illegal.  Any arguments that don't fit the expected type is
   1111 // overwritten with undefined.  Note that holder and the arguments are
   1112 // implicitly rewritten with the first object in the hidden prototype
   1113 // chain that actually has the expected type.
   1114 static inline Object* TypeCheck(Heap* heap,
   1115                                 int argc,
   1116                                 Object** argv,
   1117                                 FunctionTemplateInfo* info) {
   1118   Object* recv = argv[0];
   1119   // API calls are only supported with JSObject receivers.
   1120   if (!recv->IsJSObject()) return heap->null_value();
   1121   Object* sig_obj = info->signature();
   1122   if (sig_obj->IsUndefined()) return recv;
   1123   SignatureInfo* sig = SignatureInfo::cast(sig_obj);
   1124   // If necessary, check the receiver
   1125   Object* recv_type = sig->receiver();
   1126   Object* holder = recv;
   1127   if (!recv_type->IsUndefined()) {
   1128     holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type));
   1129     if (holder == heap->null_value()) return heap->null_value();
   1130   }
   1131   Object* args_obj = sig->args();
   1132   // If there is no argument signature we're done
   1133   if (args_obj->IsUndefined()) return holder;
   1134   FixedArray* args = FixedArray::cast(args_obj);
   1135   int length = args->length();
   1136   if (argc <= length) length = argc - 1;
   1137   for (int i = 0; i < length; i++) {
   1138     Object* argtype = args->get(i);
   1139     if (argtype->IsUndefined()) continue;
   1140     Object** arg = &argv[-1 - i];
   1141     Object* current = *arg;
   1142     current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype));
   1143     if (current == heap->null_value()) current = heap->undefined_value();
   1144     *arg = current;
   1145   }
   1146   return holder;
   1147 }
   1148 
   1149 
   1150 template <bool is_construct>
   1151 MUST_USE_RESULT static Object* HandleApiCallHelper(
   1152     BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
   1153   ASSERT(is_construct == CalledAsConstructor(isolate));
   1154   Heap* heap = isolate->heap();
   1155 
   1156   HandleScope scope(isolate);
   1157   Handle<JSFunction> function = args.called_function();
   1158   ASSERT(function->shared()->IsApiFunction());
   1159 
   1160   Handle<FunctionTemplateInfo> fun_data(
   1161       function->shared()->get_api_func_data(), isolate);
   1162   if (is_construct) {
   1163     ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
   1164         isolate, fun_data,
   1165         isolate->factory()->ConfigureInstance(
   1166             fun_data, Handle<JSObject>::cast(args.receiver())));
   1167   }
   1168 
   1169   SharedFunctionInfo* shared = function->shared();
   1170   if (shared->strict_mode() == SLOPPY && !shared->native()) {
   1171     Object* recv = args[0];
   1172     ASSERT(!recv->IsNull());
   1173     if (recv->IsUndefined()) {
   1174       args[0] = function->context()->global_object()->global_receiver();
   1175     }
   1176   }
   1177 
   1178   Object* raw_holder = TypeCheck(heap, args.length(), &args[0], *fun_data);
   1179 
   1180   if (raw_holder->IsNull()) {
   1181     // This function cannot be called with the given receiver.  Abort!
   1182     Handle<Object> obj =
   1183         isolate->factory()->NewTypeError(
   1184             "illegal_invocation", HandleVector(&function, 1));
   1185     return isolate->Throw(*obj);
   1186   }
   1187 
   1188   Object* raw_call_data = fun_data->call_code();
   1189   if (!raw_call_data->IsUndefined()) {
   1190     CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
   1191     Object* callback_obj = call_data->callback();
   1192     v8::FunctionCallback callback =
   1193         v8::ToCData<v8::FunctionCallback>(callback_obj);
   1194     Object* data_obj = call_data->data();
   1195     Object* result;
   1196 
   1197     LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
   1198     ASSERT(raw_holder->IsJSObject());
   1199 
   1200     FunctionCallbackArguments custom(isolate,
   1201                                      data_obj,
   1202                                      *function,
   1203                                      raw_holder,
   1204                                      &args[0] - 1,
   1205                                      args.length() - 1,
   1206                                      is_construct);
   1207 
   1208     v8::Handle<v8::Value> value = custom.Call(callback);
   1209     if (value.IsEmpty()) {
   1210       result = heap->undefined_value();
   1211     } else {
   1212       result = *reinterpret_cast<Object**>(*value);
   1213       result->VerifyApiCallResultType();
   1214     }
   1215 
   1216     RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
   1217     if (!is_construct || result->IsJSObject()) return result;
   1218   }
   1219 
   1220   return *args.receiver();
   1221 }
   1222 
   1223 
   1224 BUILTIN(HandleApiCall) {
   1225   return HandleApiCallHelper<false>(args, isolate);
   1226 }
   1227 
   1228 
   1229 BUILTIN(HandleApiCallConstruct) {
   1230   return HandleApiCallHelper<true>(args, isolate);
   1231 }
   1232 
   1233 
   1234 // Helper function to handle calls to non-function objects created through the
   1235 // API. The object can be called as either a constructor (using new) or just as
   1236 // a function (without new).
   1237 MUST_USE_RESULT static Object* HandleApiCallAsFunctionOrConstructor(
   1238     Isolate* isolate,
   1239     bool is_construct_call,
   1240     BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
   1241   // Non-functions are never called as constructors. Even if this is an object
   1242   // called as a constructor the delegate call is not a construct call.
   1243   ASSERT(!CalledAsConstructor(isolate));
   1244   Heap* heap = isolate->heap();
   1245 
   1246   Handle<Object> receiver = args.receiver();
   1247 
   1248   // Get the object called.
   1249   JSObject* obj = JSObject::cast(*receiver);
   1250 
   1251   // Get the invocation callback from the function descriptor that was
   1252   // used to create the called object.
   1253   ASSERT(obj->map()->has_instance_call_handler());
   1254   JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
   1255   ASSERT(constructor->shared()->IsApiFunction());
   1256   Object* handler =
   1257       constructor->shared()->get_api_func_data()->instance_call_handler();
   1258   ASSERT(!handler->IsUndefined());
   1259   CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
   1260   Object* callback_obj = call_data->callback();
   1261   v8::FunctionCallback callback =
   1262       v8::ToCData<v8::FunctionCallback>(callback_obj);
   1263 
   1264   // Get the data for the call and perform the callback.
   1265   Object* result;
   1266   {
   1267     HandleScope scope(isolate);
   1268     LOG(isolate, ApiObjectAccess("call non-function", obj));
   1269 
   1270     FunctionCallbackArguments custom(isolate,
   1271                                      call_data->data(),
   1272                                      constructor,
   1273                                      obj,
   1274                                      &args[0] - 1,
   1275                                      args.length() - 1,
   1276                                      is_construct_call);
   1277     v8::Handle<v8::Value> value = custom.Call(callback);
   1278     if (value.IsEmpty()) {
   1279       result = heap->undefined_value();
   1280     } else {
   1281       result = *reinterpret_cast<Object**>(*value);
   1282       result->VerifyApiCallResultType();
   1283     }
   1284   }
   1285   // Check for exceptions and return result.
   1286   RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
   1287   return result;
   1288 }
   1289 
   1290 
   1291 // Handle calls to non-function objects created through the API. This delegate
   1292 // function is used when the call is a normal function call.
   1293 BUILTIN(HandleApiCallAsFunction) {
   1294   return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
   1295 }
   1296 
   1297 
   1298 // Handle calls to non-function objects created through the API. This delegate
   1299 // function is used when the call is a construct call.
   1300 BUILTIN(HandleApiCallAsConstructor) {
   1301   return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
   1302 }
   1303 
   1304 
   1305 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
   1306   LoadIC::GenerateMiss(masm);
   1307 }
   1308 
   1309 
   1310 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
   1311   LoadIC::GenerateNormal(masm);
   1312 }
   1313 
   1314 
   1315 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
   1316   LoadStubCompiler::GenerateLoadViaGetterForDeopt(masm);
   1317 }
   1318 
   1319 
   1320 static void Generate_LoadIC_Slow(MacroAssembler* masm) {
   1321   LoadIC::GenerateRuntimeGetProperty(masm);
   1322 }
   1323 
   1324 
   1325 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
   1326   KeyedLoadIC::GenerateInitialize(masm);
   1327 }
   1328 
   1329 
   1330 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
   1331   KeyedLoadIC::GenerateRuntimeGetProperty(masm);
   1332 }
   1333 
   1334 
   1335 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
   1336   KeyedLoadIC::GenerateMiss(masm);
   1337 }
   1338 
   1339 
   1340 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
   1341   KeyedLoadIC::GenerateGeneric(masm);
   1342 }
   1343 
   1344 
   1345 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
   1346   KeyedLoadIC::GenerateString(masm);
   1347 }
   1348 
   1349 
   1350 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
   1351   KeyedLoadIC::GeneratePreMonomorphic(masm);
   1352 }
   1353 
   1354 
   1355 static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
   1356   KeyedLoadIC::GenerateIndexedInterceptor(masm);
   1357 }
   1358 
   1359 
   1360 static void Generate_KeyedLoadIC_SloppyArguments(MacroAssembler* masm) {
   1361   KeyedLoadIC::GenerateSloppyArguments(masm);
   1362 }
   1363 
   1364 
   1365 static void Generate_StoreIC_Slow(MacroAssembler* masm) {
   1366   StoreIC::GenerateSlow(masm);
   1367 }
   1368 
   1369 
   1370 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
   1371   StoreIC::GenerateMiss(masm);
   1372 }
   1373 
   1374 
   1375 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
   1376   StoreIC::GenerateNormal(masm);
   1377 }
   1378 
   1379 
   1380 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
   1381   StoreStubCompiler::GenerateStoreViaSetterForDeopt(masm);
   1382 }
   1383 
   1384 
   1385 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
   1386   KeyedStoreIC::GenerateGeneric(masm, SLOPPY);
   1387 }
   1388 
   1389 
   1390 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
   1391   KeyedStoreIC::GenerateGeneric(masm, STRICT);
   1392 }
   1393 
   1394 
   1395 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
   1396   KeyedStoreIC::GenerateMiss(masm);
   1397 }
   1398 
   1399 
   1400 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
   1401   KeyedStoreIC::GenerateSlow(masm);
   1402 }
   1403 
   1404 
   1405 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
   1406   KeyedStoreIC::GenerateInitialize(masm);
   1407 }
   1408 
   1409 
   1410 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
   1411   KeyedStoreIC::GenerateInitialize(masm);
   1412 }
   1413 
   1414 
   1415 static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
   1416   KeyedStoreIC::GeneratePreMonomorphic(masm);
   1417 }
   1418 
   1419 
   1420 static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
   1421   KeyedStoreIC::GeneratePreMonomorphic(masm);
   1422 }
   1423 
   1424 
   1425 static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
   1426   KeyedStoreIC::GenerateSloppyArguments(masm);
   1427 }
   1428 
   1429 
   1430 static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) {
   1431   DebugCodegen::GenerateCallICStubDebugBreak(masm);
   1432 }
   1433 
   1434 
   1435 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
   1436   DebugCodegen::GenerateLoadICDebugBreak(masm);
   1437 }
   1438 
   1439 
   1440 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
   1441   DebugCodegen::GenerateStoreICDebugBreak(masm);
   1442 }
   1443 
   1444 
   1445 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
   1446   DebugCodegen::GenerateKeyedLoadICDebugBreak(masm);
   1447 }
   1448 
   1449 
   1450 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
   1451   DebugCodegen::GenerateKeyedStoreICDebugBreak(masm);
   1452 }
   1453 
   1454 
   1455 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
   1456   DebugCodegen::GenerateCompareNilICDebugBreak(masm);
   1457 }
   1458 
   1459 
   1460 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
   1461   DebugCodegen::GenerateReturnDebugBreak(masm);
   1462 }
   1463 
   1464 
   1465 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
   1466   DebugCodegen::GenerateCallFunctionStubDebugBreak(masm);
   1467 }
   1468 
   1469 
   1470 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
   1471   DebugCodegen::GenerateCallConstructStubDebugBreak(masm);
   1472 }
   1473 
   1474 
   1475 static void Generate_CallConstructStub_Recording_DebugBreak(
   1476     MacroAssembler* masm) {
   1477   DebugCodegen::GenerateCallConstructStubRecordDebugBreak(masm);
   1478 }
   1479 
   1480 
   1481 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
   1482   DebugCodegen::GenerateSlotDebugBreak(masm);
   1483 }
   1484 
   1485 
   1486 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
   1487   DebugCodegen::GeneratePlainReturnLiveEdit(masm);
   1488 }
   1489 
   1490 
   1491 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
   1492   DebugCodegen::GenerateFrameDropperLiveEdit(masm);
   1493 }
   1494 
   1495 
   1496 Builtins::Builtins() : initialized_(false) {
   1497   memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
   1498   memset(names_, 0, sizeof(names_[0]) * builtin_count);
   1499 }
   1500 
   1501 
   1502 Builtins::~Builtins() {
   1503 }
   1504 
   1505 
   1506 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
   1507 Address const Builtins::c_functions_[cfunction_count] = {
   1508   BUILTIN_LIST_C(DEF_ENUM_C)
   1509 };
   1510 #undef DEF_ENUM_C
   1511 
   1512 #define DEF_JS_NAME(name, ignore) #name,
   1513 #define DEF_JS_ARGC(ignore, argc) argc,
   1514 const char* const Builtins::javascript_names_[id_count] = {
   1515   BUILTINS_LIST_JS(DEF_JS_NAME)
   1516 };
   1517 
   1518 int const Builtins::javascript_argc_[id_count] = {
   1519   BUILTINS_LIST_JS(DEF_JS_ARGC)
   1520 };
   1521 #undef DEF_JS_NAME
   1522 #undef DEF_JS_ARGC
   1523 
   1524 struct BuiltinDesc {
   1525   byte* generator;
   1526   byte* c_code;
   1527   const char* s_name;  // name is only used for generating log information.
   1528   int name;
   1529   Code::Flags flags;
   1530   BuiltinExtraArguments extra_args;
   1531 };
   1532 
   1533 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
   1534 
   1535 class BuiltinFunctionTable {
   1536  public:
   1537   BuiltinDesc* functions() {
   1538     base::CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
   1539     return functions_;
   1540   }
   1541 
   1542   base::OnceType once_;
   1543   BuiltinDesc functions_[Builtins::builtin_count + 1];
   1544 
   1545   friend class Builtins;
   1546 };
   1547 
   1548 static BuiltinFunctionTable builtin_function_table =
   1549     BUILTIN_FUNCTION_TABLE_INIT;
   1550 
   1551 // Define array of pointers to generators and C builtin functions.
   1552 // We do this in a sort of roundabout way so that we can do the initialization
   1553 // within the lexical scope of Builtins:: and within a context where
   1554 // Code::Flags names a non-abstract type.
   1555 void Builtins::InitBuiltinFunctionTable() {
   1556   BuiltinDesc* functions = builtin_function_table.functions_;
   1557   functions[builtin_count].generator = NULL;
   1558   functions[builtin_count].c_code = NULL;
   1559   functions[builtin_count].s_name = NULL;
   1560   functions[builtin_count].name = builtin_count;
   1561   functions[builtin_count].flags = static_cast<Code::Flags>(0);
   1562   functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
   1563 
   1564 #define DEF_FUNCTION_PTR_C(aname, aextra_args)                         \
   1565     functions->generator = FUNCTION_ADDR(Generate_Adaptor);            \
   1566     functions->c_code = FUNCTION_ADDR(Builtin_##aname);                \
   1567     functions->s_name = #aname;                                        \
   1568     functions->name = c_##aname;                                       \
   1569     functions->flags = Code::ComputeFlags(Code::BUILTIN);              \
   1570     functions->extra_args = aextra_args;                               \
   1571     ++functions;
   1572 
   1573 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra)                       \
   1574     functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
   1575     functions->c_code = NULL;                                               \
   1576     functions->s_name = #aname;                                             \
   1577     functions->name = k##aname;                                             \
   1578     functions->flags = Code::ComputeFlags(Code::kind,                       \
   1579                                           state,                            \
   1580                                           extra);                           \
   1581     functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
   1582     ++functions;
   1583 
   1584 #define DEF_FUNCTION_PTR_H(aname, kind)                                     \
   1585     functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
   1586     functions->c_code = NULL;                                               \
   1587     functions->s_name = #aname;                                             \
   1588     functions->name = k##aname;                                             \
   1589     functions->flags = Code::ComputeHandlerFlags(Code::kind);               \
   1590     functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
   1591     ++functions;
   1592 
   1593   BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
   1594   BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
   1595   BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
   1596   BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
   1597 
   1598 #undef DEF_FUNCTION_PTR_C
   1599 #undef DEF_FUNCTION_PTR_A
   1600 }
   1601 
   1602 
   1603 void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
   1604   ASSERT(!initialized_);
   1605 
   1606   // Create a scope for the handles in the builtins.
   1607   HandleScope scope(isolate);
   1608 
   1609   const BuiltinDesc* functions = builtin_function_table.functions();
   1610 
   1611   // For now we generate builtin adaptor code into a stack-allocated
   1612   // buffer, before copying it into individual code objects. Be careful
   1613   // with alignment, some platforms don't like unaligned code.
   1614 #ifdef DEBUG
   1615   // We can generate a lot of debug code on Arm64.
   1616   const size_t buffer_size = 32*KB;
   1617 #else
   1618   const size_t buffer_size = 8*KB;
   1619 #endif
   1620   union { int force_alignment; byte buffer[buffer_size]; } u;
   1621 
   1622   // Traverse the list of builtins and generate an adaptor in a
   1623   // separate code object for each one.
   1624   for (int i = 0; i < builtin_count; i++) {
   1625     if (create_heap_objects) {
   1626       MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
   1627       // Generate the code/adaptor.
   1628       typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
   1629       Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
   1630       // We pass all arguments to the generator, but it may not use all of
   1631       // them.  This works because the first arguments are on top of the
   1632       // stack.
   1633       ASSERT(!masm.has_frame());
   1634       g(&masm, functions[i].name, functions[i].extra_args);
   1635       // Move the code into the object heap.
   1636       CodeDesc desc;
   1637       masm.GetCode(&desc);
   1638       Code::Flags flags =  functions[i].flags;
   1639       Handle<Code> code =
   1640           isolate->factory()->NewCode(desc, flags, masm.CodeObject());
   1641       // Log the event and add the code to the builtins array.
   1642       PROFILE(isolate,
   1643               CodeCreateEvent(Logger::BUILTIN_TAG, *code, functions[i].s_name));
   1644       GDBJIT(AddCode(GDBJITInterface::BUILTIN, functions[i].s_name, *code));
   1645       builtins_[i] = *code;
   1646 #ifdef ENABLE_DISASSEMBLER
   1647       if (FLAG_print_builtin_code) {
   1648         CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
   1649         PrintF(trace_scope.file(), "Builtin: %s\n", functions[i].s_name);
   1650         code->Disassemble(functions[i].s_name, trace_scope.file());
   1651         PrintF(trace_scope.file(), "\n");
   1652       }
   1653 #endif
   1654     } else {
   1655       // Deserializing. The values will be filled in during IterateBuiltins.
   1656       builtins_[i] = NULL;
   1657     }
   1658     names_[i] = functions[i].s_name;
   1659   }
   1660 
   1661   // Mark as initialized.
   1662   initialized_ = true;
   1663 }
   1664 
   1665 
   1666 void Builtins::TearDown() {
   1667   initialized_ = false;
   1668 }
   1669 
   1670 
   1671 void Builtins::IterateBuiltins(ObjectVisitor* v) {
   1672   v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
   1673 }
   1674 
   1675 
   1676 const char* Builtins::Lookup(byte* pc) {
   1677   // may be called during initialization (disassembler!)
   1678   if (initialized_) {
   1679     for (int i = 0; i < builtin_count; i++) {
   1680       Code* entry = Code::cast(builtins_[i]);
   1681       if (entry->contains(pc)) {
   1682         return names_[i];
   1683       }
   1684     }
   1685   }
   1686   return NULL;
   1687 }
   1688 
   1689 
   1690 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
   1691   masm->TailCallRuntime(Runtime::kHiddenInterrupt, 0, 1);
   1692 }
   1693 
   1694 
   1695 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
   1696   masm->TailCallRuntime(Runtime::kHiddenStackGuard, 0, 1);
   1697 }
   1698 
   1699 
   1700 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore)               \
   1701 Handle<Code> Builtins::name() {                               \
   1702   Code** code_address =                                       \
   1703       reinterpret_cast<Code**>(builtin_address(k##name));     \
   1704   return Handle<Code>(code_address);                          \
   1705 }
   1706 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
   1707 Handle<Code> Builtins::name() {                             \
   1708   Code** code_address =                                     \
   1709       reinterpret_cast<Code**>(builtin_address(k##name));   \
   1710   return Handle<Code>(code_address);                        \
   1711 }
   1712 #define DEFINE_BUILTIN_ACCESSOR_H(name, kind)               \
   1713 Handle<Code> Builtins::name() {                             \
   1714   Code** code_address =                                     \
   1715       reinterpret_cast<Code**>(builtin_address(k##name));   \
   1716   return Handle<Code>(code_address);                        \
   1717 }
   1718 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
   1719 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
   1720 BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
   1721 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
   1722 #undef DEFINE_BUILTIN_ACCESSOR_C
   1723 #undef DEFINE_BUILTIN_ACCESSOR_A
   1724 
   1725 
   1726 } }  // namespace v8::internal
   1727