Home | History | Annotate | Download | only in src
      1 // Copyright 2012 the V8 project authors. All rights reserved.
      2 // Redistribution and use in source and binary forms, with or without
      3 // modification, are permitted provided that the following conditions are
      4 // met:
      5 //
      6 //     * Redistributions of source code must retain the above copyright
      7 //       notice, this list of conditions and the following disclaimer.
      8 //     * Redistributions in binary form must reproduce the above
      9 //       copyright notice, this list of conditions and the following
     10 //       disclaimer in the documentation and/or other materials provided
     11 //       with the distribution.
     12 //     * Neither the name of Google Inc. nor the names of its
     13 //       contributors may be used to endorse or promote products derived
     14 //       from this software without specific prior written permission.
     15 //
     16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
     17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
     18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
     19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
     20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
     21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
     22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
     26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     27 
     28 #include "v8.h"
     29 
     30 #include "api.h"
     31 #include "arguments.h"
     32 #include "bootstrapper.h"
     33 #include "builtins.h"
     34 #include "cpu-profiler.h"
     35 #include "gdb-jit.h"
     36 #include "ic-inl.h"
     37 #include "heap-profiler.h"
     38 #include "mark-compact.h"
     39 #include "stub-cache.h"
     40 #include "vm-state-inl.h"
     41 
     42 namespace v8 {
     43 namespace internal {
     44 
     45 namespace {
     46 
     47 // Arguments object passed to C++ builtins.
     48 template <BuiltinExtraArguments extra_args>
     49 class BuiltinArguments : public Arguments {
     50  public:
     51   BuiltinArguments(int length, Object** arguments)
     52       : Arguments(length, arguments) { }
     53 
     54   Object*& operator[] (int index) {
     55     ASSERT(index < length());
     56     return Arguments::operator[](index);
     57   }
     58 
     59   template <class S> Handle<S> at(int index) {
     60     ASSERT(index < length());
     61     return Arguments::at<S>(index);
     62   }
     63 
     64   Handle<Object> receiver() {
     65     return Arguments::at<Object>(0);
     66   }
     67 
     68   Handle<JSFunction> called_function() {
     69     STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
     70     return Arguments::at<JSFunction>(Arguments::length() - 1);
     71   }
     72 
     73   // Gets the total number of arguments including the receiver (but
     74   // excluding extra arguments).
     75   int length() const {
     76     STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
     77     return Arguments::length();
     78   }
     79 
     80 #ifdef DEBUG
     81   void Verify() {
     82     // Check we have at least the receiver.
     83     ASSERT(Arguments::length() >= 1);
     84   }
     85 #endif
     86 };
     87 
     88 
     89 // Specialize BuiltinArguments for the called function extra argument.
     90 
     91 template <>
     92 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
     93   return Arguments::length() - 1;
     94 }
     95 
     96 #ifdef DEBUG
     97 template <>
     98 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
     99   // Check we have at least the receiver and the called function.
    100   ASSERT(Arguments::length() >= 2);
    101   // Make sure cast to JSFunction succeeds.
    102   called_function();
    103 }
    104 #endif
    105 
    106 
    107 #define DEF_ARG_TYPE(name, spec)                      \
    108   typedef BuiltinArguments<spec> name##ArgumentsType;
    109 BUILTIN_LIST_C(DEF_ARG_TYPE)
    110 #undef DEF_ARG_TYPE
    111 
    112 }  // namespace
    113 
    114 // ----------------------------------------------------------------------------
    115 // Support macro for defining builtins in C++.
    116 // ----------------------------------------------------------------------------
    117 //
    118 // A builtin function is defined by writing:
    119 //
    120 //   BUILTIN(name) {
    121 //     ...
    122 //   }
    123 //
    124 // In the body of the builtin function the arguments can be accessed
    125 // through the BuiltinArguments object args.
    126 
    127 #ifdef DEBUG
    128 
    129 #define BUILTIN(name)                                            \
    130   MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name(       \
    131       name##ArgumentsType args, Isolate* isolate);               \
    132   MUST_USE_RESULT static MaybeObject* Builtin_##name(            \
    133       int args_length, Object** args_object, Isolate* isolate) { \
    134     name##ArgumentsType args(args_length, args_object);          \
    135     args.Verify();                                               \
    136     return Builtin_Impl_##name(args, isolate);                   \
    137   }                                                              \
    138   MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name(       \
    139       name##ArgumentsType args, Isolate* isolate)
    140 
    141 #else  // For release mode.
    142 
    143 #define BUILTIN(name)                                            \
    144   static MaybeObject* Builtin_impl##name(                        \
    145       name##ArgumentsType args, Isolate* isolate);               \
    146   static MaybeObject* Builtin_##name(                            \
    147       int args_length, Object** args_object, Isolate* isolate) { \
    148     name##ArgumentsType args(args_length, args_object);          \
    149     return Builtin_impl##name(args, isolate);                    \
    150   }                                                              \
    151   static MaybeObject* Builtin_impl##name(                        \
    152       name##ArgumentsType args, Isolate* isolate)
    153 #endif
    154 
    155 
    156 static inline bool CalledAsConstructor(Isolate* isolate) {
    157 #ifdef DEBUG
    158   // Calculate the result using a full stack frame iterator and check
    159   // that the state of the stack is as we assume it to be in the
    160   // code below.
    161   StackFrameIterator it(isolate);
    162   ASSERT(it.frame()->is_exit());
    163   it.Advance();
    164   StackFrame* frame = it.frame();
    165   bool reference_result = frame->is_construct();
    166 #endif
    167   Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
    168   // Because we know fp points to an exit frame we can use the relevant
    169   // part of ExitFrame::ComputeCallerState directly.
    170   const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
    171   Address caller_fp = Memory::Address_at(fp + kCallerOffset);
    172   // This inlines the part of StackFrame::ComputeType that grabs the
    173   // type of the current frame.  Note that StackFrame::ComputeType
    174   // has been specialized for each architecture so if any one of them
    175   // changes this code has to be changed as well.
    176   const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
    177   const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
    178   Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
    179   bool result = (marker == kConstructMarker);
    180   ASSERT_EQ(result, reference_result);
    181   return result;
    182 }
    183 
    184 
    185 // ----------------------------------------------------------------------------
    186 
    187 BUILTIN(Illegal) {
    188   UNREACHABLE();
    189   return isolate->heap()->undefined_value();  // Make compiler happy.
    190 }
    191 
    192 
    193 BUILTIN(EmptyFunction) {
    194   return isolate->heap()->undefined_value();
    195 }
    196 
    197 
    198 static void MoveDoubleElements(FixedDoubleArray* dst,
    199                                int dst_index,
    200                                FixedDoubleArray* src,
    201                                int src_index,
    202                                int len) {
    203   if (len == 0) return;
    204   OS::MemMove(dst->data_start() + dst_index,
    205               src->data_start() + src_index,
    206               len * kDoubleSize);
    207 }
    208 
    209 
    210 static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) {
    211   ASSERT(dst->map() != heap->fixed_cow_array_map());
    212   MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from);
    213 }
    214 
    215 
    216 static void FillWithHoles(FixedDoubleArray* dst, int from, int to) {
    217   for (int i = from; i < to; i++) {
    218     dst->set_the_hole(i);
    219   }
    220 }
    221 
    222 
    223 static FixedArrayBase* LeftTrimFixedArray(Heap* heap,
    224                                           FixedArrayBase* elms,
    225                                           int to_trim) {
    226   Map* map = elms->map();
    227   int entry_size;
    228   if (elms->IsFixedArray()) {
    229     entry_size = kPointerSize;
    230   } else {
    231     entry_size = kDoubleSize;
    232   }
    233   ASSERT(elms->map() != heap->fixed_cow_array_map());
    234   // For now this trick is only applied to fixed arrays in new and paged space.
    235   // In large object space the object's start must coincide with chunk
    236   // and thus the trick is just not applicable.
    237   ASSERT(!heap->lo_space()->Contains(elms));
    238 
    239   STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
    240   STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
    241   STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
    242 
    243   Object** former_start = HeapObject::RawField(elms, 0);
    244 
    245   const int len = elms->length();
    246 
    247   if (to_trim * entry_size > FixedArrayBase::kHeaderSize &&
    248       elms->IsFixedArray() &&
    249       !heap->new_space()->Contains(elms)) {
    250     // If we are doing a big trim in old space then we zap the space that was
    251     // formerly part of the array so that the GC (aided by the card-based
    252     // remembered set) won't find pointers to new-space there.
    253     Object** zap = reinterpret_cast<Object**>(elms->address());
    254     zap++;  // Header of filler must be at least one word so skip that.
    255     for (int i = 1; i < to_trim; i++) {
    256       *zap++ = Smi::FromInt(0);
    257     }
    258   }
    259   // Technically in new space this write might be omitted (except for
    260   // debug mode which iterates through the heap), but to play safer
    261   // we still do it.
    262   heap->CreateFillerObjectAt(elms->address(), to_trim * entry_size);
    263 
    264   int new_start_index = to_trim * (entry_size / kPointerSize);
    265   former_start[new_start_index] = map;
    266   former_start[new_start_index + 1] = Smi::FromInt(len - to_trim);
    267 
    268   // Maintain marking consistency for HeapObjectIterator and
    269   // IncrementalMarking.
    270   int size_delta = to_trim * entry_size;
    271   if (heap->marking()->TransferMark(elms->address(),
    272                                     elms->address() + size_delta)) {
    273     MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
    274   }
    275 
    276   FixedArrayBase* new_elms = FixedArrayBase::cast(HeapObject::FromAddress(
    277       elms->address() + size_delta));
    278   HeapProfiler* profiler = heap->isolate()->heap_profiler();
    279   if (profiler->is_tracking_object_moves()) {
    280     profiler->ObjectMoveEvent(elms->address(),
    281                               new_elms->address(),
    282                               new_elms->Size());
    283   }
    284   return new_elms;
    285 }
    286 
    287 
    288 static bool ArrayPrototypeHasNoElements(Heap* heap,
    289                                         Context* native_context,
    290                                         JSObject* array_proto) {
    291   // This method depends on non writability of Object and Array prototype
    292   // fields.
    293   if (array_proto->elements() != heap->empty_fixed_array()) return false;
    294   // Object.prototype
    295   Object* proto = array_proto->GetPrototype();
    296   if (proto == heap->null_value()) return false;
    297   array_proto = JSObject::cast(proto);
    298   if (array_proto != native_context->initial_object_prototype()) return false;
    299   if (array_proto->elements() != heap->empty_fixed_array()) return false;
    300   return array_proto->GetPrototype()->IsNull();
    301 }
    302 
    303 
    304 MUST_USE_RESULT
    305 static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
    306     Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
    307   if (!receiver->IsJSArray()) return NULL;
    308   JSArray* array = JSArray::cast(receiver);
    309   if (array->map()->is_observed()) return NULL;
    310   if (!array->map()->is_extensible()) return NULL;
    311   HeapObject* elms = array->elements();
    312   Map* map = elms->map();
    313   if (map == heap->fixed_array_map()) {
    314     if (args == NULL || array->HasFastObjectElements()) return elms;
    315   } else if (map == heap->fixed_cow_array_map()) {
    316     MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
    317     if (args == NULL || array->HasFastObjectElements() ||
    318         !maybe_writable_result->To(&elms)) {
    319       return maybe_writable_result;
    320     }
    321   } else if (map == heap->fixed_double_array_map()) {
    322     if (args == NULL) return elms;
    323   } else {
    324     return NULL;
    325   }
    326 
    327   // Need to ensure that the arguments passed in args can be contained in
    328   // the array.
    329   int args_length = args->length();
    330   if (first_added_arg >= args_length) return array->elements();
    331 
    332   ElementsKind origin_kind = array->map()->elements_kind();
    333   ASSERT(!IsFastObjectElementsKind(origin_kind));
    334   ElementsKind target_kind = origin_kind;
    335   int arg_count = args->length() - first_added_arg;
    336   Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
    337   for (int i = 0; i < arg_count; i++) {
    338     Object* arg = arguments[i];
    339     if (arg->IsHeapObject()) {
    340       if (arg->IsHeapNumber()) {
    341         target_kind = FAST_DOUBLE_ELEMENTS;
    342       } else {
    343         target_kind = FAST_ELEMENTS;
    344         break;
    345       }
    346     }
    347   }
    348   if (target_kind != origin_kind) {
    349     MaybeObject* maybe_failure = array->TransitionElementsKind(target_kind);
    350     if (maybe_failure->IsFailure()) return maybe_failure;
    351     return array->elements();
    352   }
    353   return elms;
    354 }
    355 
    356 
    357 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
    358                                                      JSArray* receiver) {
    359   if (!FLAG_clever_optimizations) return false;
    360   Context* native_context = heap->isolate()->context()->native_context();
    361   JSObject* array_proto =
    362       JSObject::cast(native_context->array_function()->prototype());
    363   return receiver->GetPrototype() == array_proto &&
    364          ArrayPrototypeHasNoElements(heap, native_context, array_proto);
    365 }
    366 
    367 
    368 MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
    369     Isolate* isolate,
    370     const char* name,
    371     BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
    372   HandleScope handleScope(isolate);
    373 
    374   Handle<Object> js_builtin =
    375       GetProperty(Handle<JSObject>(isolate->native_context()->builtins()),
    376                   name);
    377   Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
    378   int argc = args.length() - 1;
    379   ScopedVector<Handle<Object> > argv(argc);
    380   for (int i = 0; i < argc; ++i) {
    381     argv[i] = args.at<Object>(i + 1);
    382   }
    383   bool pending_exception;
    384   Handle<Object> result = Execution::Call(isolate,
    385                                           function,
    386                                           args.receiver(),
    387                                           argc,
    388                                           argv.start(),
    389                                           &pending_exception);
    390   if (pending_exception) return Failure::Exception();
    391   return *result;
    392 }
    393 
    394 
    395 BUILTIN(ArrayPush) {
    396   Heap* heap = isolate->heap();
    397   Object* receiver = *args.receiver();
    398   FixedArrayBase* elms_obj;
    399   MaybeObject* maybe_elms_obj =
    400       EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 1);
    401   if (maybe_elms_obj == NULL) {
    402     return CallJsBuiltin(isolate, "ArrayPush", args);
    403   }
    404   if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
    405 
    406   JSArray* array = JSArray::cast(receiver);
    407   ASSERT(!array->map()->is_observed());
    408 
    409   ElementsKind kind = array->GetElementsKind();
    410 
    411   if (IsFastSmiOrObjectElementsKind(kind)) {
    412     FixedArray* elms = FixedArray::cast(elms_obj);
    413 
    414     int len = Smi::cast(array->length())->value();
    415     int to_add = args.length() - 1;
    416     if (to_add == 0) {
    417       return Smi::FromInt(len);
    418     }
    419     // Currently fixed arrays cannot grow too big, so
    420     // we should never hit this case.
    421     ASSERT(to_add <= (Smi::kMaxValue - len));
    422 
    423     int new_length = len + to_add;
    424 
    425     if (new_length > elms->length()) {
    426       // New backing storage is needed.
    427       int capacity = new_length + (new_length >> 1) + 16;
    428       FixedArray* new_elms;
    429       MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
    430       if (!maybe_obj->To(&new_elms)) return maybe_obj;
    431 
    432       ElementsAccessor* accessor = array->GetElementsAccessor();
    433       MaybeObject* maybe_failure = accessor->CopyElements(
    434            NULL, 0, kind, new_elms, 0,
    435            ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
    436       ASSERT(!maybe_failure->IsFailure());
    437       USE(maybe_failure);
    438 
    439       elms = new_elms;
    440     }
    441 
    442     // Add the provided values.
    443     DisallowHeapAllocation no_gc;
    444     WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
    445     for (int index = 0; index < to_add; index++) {
    446       elms->set(index + len, args[index + 1], mode);
    447     }
    448 
    449     if (elms != array->elements()) {
    450       array->set_elements(elms);
    451     }
    452 
    453     // Set the length.
    454     array->set_length(Smi::FromInt(new_length));
    455     return Smi::FromInt(new_length);
    456   } else {
    457     int len = Smi::cast(array->length())->value();
    458     int elms_len = elms_obj->length();
    459 
    460     int to_add = args.length() - 1;
    461     if (to_add == 0) {
    462       return Smi::FromInt(len);
    463     }
    464     // Currently fixed arrays cannot grow too big, so
    465     // we should never hit this case.
    466     ASSERT(to_add <= (Smi::kMaxValue - len));
    467 
    468     int new_length = len + to_add;
    469 
    470     FixedDoubleArray* new_elms;
    471 
    472     if (new_length > elms_len) {
    473       // New backing storage is needed.
    474       int capacity = new_length + (new_length >> 1) + 16;
    475       MaybeObject* maybe_obj =
    476           heap->AllocateUninitializedFixedDoubleArray(capacity);
    477       if (!maybe_obj->To(&new_elms)) return maybe_obj;
    478 
    479       ElementsAccessor* accessor = array->GetElementsAccessor();
    480       MaybeObject* maybe_failure = accessor->CopyElements(
    481               NULL, 0, kind, new_elms, 0,
    482               ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
    483       ASSERT(!maybe_failure->IsFailure());
    484       USE(maybe_failure);
    485     } else {
    486       // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
    487       // empty_fixed_array.
    488       new_elms = FixedDoubleArray::cast(elms_obj);
    489     }
    490 
    491     // Add the provided values.
    492     DisallowHeapAllocation no_gc;
    493     int index;
    494     for (index = 0; index < to_add; index++) {
    495       Object* arg = args[index + 1];
    496       new_elms->set(index + len, arg->Number());
    497     }
    498 
    499     if (new_elms != array->elements()) {
    500       array->set_elements(new_elms);
    501     }
    502 
    503     // Set the length.
    504     array->set_length(Smi::FromInt(new_length));
    505     return Smi::FromInt(new_length);
    506   }
    507 }
    508 
    509 
    510 BUILTIN(ArrayPop) {
    511   Heap* heap = isolate->heap();
    512   Object* receiver = *args.receiver();
    513   FixedArrayBase* elms_obj;
    514   MaybeObject* maybe_elms =
    515       EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
    516   if (maybe_elms == NULL) return CallJsBuiltin(isolate, "ArrayPop", args);
    517   if (!maybe_elms->To(&elms_obj)) return maybe_elms;
    518 
    519   JSArray* array = JSArray::cast(receiver);
    520   ASSERT(!array->map()->is_observed());
    521 
    522   int len = Smi::cast(array->length())->value();
    523   if (len == 0) return heap->undefined_value();
    524 
    525   ElementsAccessor* accessor = array->GetElementsAccessor();
    526   int new_length = len - 1;
    527   MaybeObject* maybe_result;
    528   if (accessor->HasElement(array, array, new_length, elms_obj)) {
    529     maybe_result = accessor->Get(array, array, new_length, elms_obj);
    530   } else {
    531     maybe_result = array->GetPrototype()->GetElement(isolate, len - 1);
    532   }
    533   if (maybe_result->IsFailure()) return maybe_result;
    534   MaybeObject* maybe_failure =
    535       accessor->SetLength(array, Smi::FromInt(new_length));
    536   if (maybe_failure->IsFailure()) return maybe_failure;
    537   return maybe_result;
    538 }
    539 
    540 
    541 BUILTIN(ArrayShift) {
    542   Heap* heap = isolate->heap();
    543   Object* receiver = *args.receiver();
    544   FixedArrayBase* elms_obj;
    545   MaybeObject* maybe_elms_obj =
    546       EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
    547   if (maybe_elms_obj == NULL)
    548       return CallJsBuiltin(isolate, "ArrayShift", args);
    549   if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
    550 
    551   if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
    552     return CallJsBuiltin(isolate, "ArrayShift", args);
    553   }
    554   JSArray* array = JSArray::cast(receiver);
    555   ASSERT(!array->map()->is_observed());
    556 
    557   int len = Smi::cast(array->length())->value();
    558   if (len == 0) return heap->undefined_value();
    559 
    560   // Get first element
    561   ElementsAccessor* accessor = array->GetElementsAccessor();
    562   Object* first;
    563   MaybeObject* maybe_first = accessor->Get(receiver, array, 0, elms_obj);
    564   if (!maybe_first->To(&first)) return maybe_first;
    565   if (first->IsTheHole()) {
    566     first = heap->undefined_value();
    567   }
    568 
    569   if (!heap->lo_space()->Contains(elms_obj)) {
    570     array->set_elements(LeftTrimFixedArray(heap, elms_obj, 1));
    571   } else {
    572     // Shift the elements.
    573     if (elms_obj->IsFixedArray()) {
    574       FixedArray* elms = FixedArray::cast(elms_obj);
    575       DisallowHeapAllocation no_gc;
    576       heap->MoveElements(elms, 0, 1, len - 1);
    577       elms->set(len - 1, heap->the_hole_value());
    578     } else {
    579       FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
    580       MoveDoubleElements(elms, 0, elms, 1, len - 1);
    581       elms->set_the_hole(len - 1);
    582     }
    583   }
    584 
    585   // Set the length.
    586   array->set_length(Smi::FromInt(len - 1));
    587 
    588   return first;
    589 }
    590 
    591 
    592 BUILTIN(ArrayUnshift) {
    593   Heap* heap = isolate->heap();
    594   Object* receiver = *args.receiver();
    595   FixedArrayBase* elms_obj;
    596   MaybeObject* maybe_elms_obj =
    597       EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
    598   if (maybe_elms_obj == NULL)
    599       return CallJsBuiltin(isolate, "ArrayUnshift", args);
    600   if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
    601 
    602   if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
    603     return CallJsBuiltin(isolate, "ArrayUnshift", args);
    604   }
    605   JSArray* array = JSArray::cast(receiver);
    606   ASSERT(!array->map()->is_observed());
    607   if (!array->HasFastSmiOrObjectElements()) {
    608     return CallJsBuiltin(isolate, "ArrayUnshift", args);
    609   }
    610   FixedArray* elms = FixedArray::cast(elms_obj);
    611 
    612   int len = Smi::cast(array->length())->value();
    613   int to_add = args.length() - 1;
    614   int new_length = len + to_add;
    615   // Currently fixed arrays cannot grow too big, so
    616   // we should never hit this case.
    617   ASSERT(to_add <= (Smi::kMaxValue - len));
    618 
    619   MaybeObject* maybe_object =
    620       array->EnsureCanContainElements(&args, 1, to_add,
    621                                       DONT_ALLOW_DOUBLE_ELEMENTS);
    622   if (maybe_object->IsFailure()) return maybe_object;
    623 
    624   if (new_length > elms->length()) {
    625     // New backing storage is needed.
    626     int capacity = new_length + (new_length >> 1) + 16;
    627     FixedArray* new_elms;
    628     MaybeObject* maybe_elms = heap->AllocateUninitializedFixedArray(capacity);
    629     if (!maybe_elms->To(&new_elms)) return maybe_elms;
    630 
    631     ElementsKind kind = array->GetElementsKind();
    632     ElementsAccessor* accessor = array->GetElementsAccessor();
    633     MaybeObject* maybe_failure = accessor->CopyElements(
    634             NULL, 0, kind, new_elms, to_add,
    635             ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
    636     ASSERT(!maybe_failure->IsFailure());
    637     USE(maybe_failure);
    638 
    639     elms = new_elms;
    640     array->set_elements(elms);
    641   } else {
    642     DisallowHeapAllocation no_gc;
    643     heap->MoveElements(elms, to_add, 0, len);
    644   }
    645 
    646   // Add the provided values.
    647   DisallowHeapAllocation no_gc;
    648   WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
    649   for (int i = 0; i < to_add; i++) {
    650     elms->set(i, args[i + 1], mode);
    651   }
    652 
    653   // Set the length.
    654   array->set_length(Smi::FromInt(new_length));
    655   return Smi::FromInt(new_length);
    656 }
    657 
    658 
    659 BUILTIN(ArraySlice) {
    660   Heap* heap = isolate->heap();
    661   Object* receiver = *args.receiver();
    662   FixedArrayBase* elms;
    663   int len = -1;
    664   if (receiver->IsJSArray()) {
    665     JSArray* array = JSArray::cast(receiver);
    666     if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
    667       return CallJsBuiltin(isolate, "ArraySlice", args);
    668     }
    669 
    670     if (array->HasFastElements()) {
    671       elms = array->elements();
    672     } else {
    673       return CallJsBuiltin(isolate, "ArraySlice", args);
    674     }
    675 
    676     len = Smi::cast(array->length())->value();
    677   } else {
    678     // Array.slice(arguments, ...) is quite a common idiom (notably more
    679     // than 50% of invocations in Web apps).  Treat it in C++ as well.
    680     Map* arguments_map =
    681         isolate->context()->native_context()->arguments_boilerplate()->map();
    682 
    683     bool is_arguments_object_with_fast_elements =
    684         receiver->IsJSObject() &&
    685         JSObject::cast(receiver)->map() == arguments_map;
    686     if (!is_arguments_object_with_fast_elements) {
    687       return CallJsBuiltin(isolate, "ArraySlice", args);
    688     }
    689     JSObject* object = JSObject::cast(receiver);
    690 
    691     if (object->HasFastElements()) {
    692       elms = object->elements();
    693     } else {
    694       return CallJsBuiltin(isolate, "ArraySlice", args);
    695     }
    696     Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
    697     if (!len_obj->IsSmi()) {
    698       return CallJsBuiltin(isolate, "ArraySlice", args);
    699     }
    700     len = Smi::cast(len_obj)->value();
    701     if (len > elms->length()) {
    702       return CallJsBuiltin(isolate, "ArraySlice", args);
    703     }
    704   }
    705 
    706   JSObject* object = JSObject::cast(receiver);
    707 
    708   ASSERT(len >= 0);
    709   int n_arguments = args.length() - 1;
    710 
    711   // Note carefully choosen defaults---if argument is missing,
    712   // it's undefined which gets converted to 0 for relative_start
    713   // and to len for relative_end.
    714   int relative_start = 0;
    715   int relative_end = len;
    716   if (n_arguments > 0) {
    717     Object* arg1 = args[1];
    718     if (arg1->IsSmi()) {
    719       relative_start = Smi::cast(arg1)->value();
    720     } else if (arg1->IsHeapNumber()) {
    721       double start = HeapNumber::cast(arg1)->value();
    722       if (start < kMinInt || start > kMaxInt) {
    723         return CallJsBuiltin(isolate, "ArraySlice", args);
    724       }
    725       relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
    726     } else if (!arg1->IsUndefined()) {
    727       return CallJsBuiltin(isolate, "ArraySlice", args);
    728     }
    729     if (n_arguments > 1) {
    730       Object* arg2 = args[2];
    731       if (arg2->IsSmi()) {
    732         relative_end = Smi::cast(arg2)->value();
    733       } else if (arg2->IsHeapNumber()) {
    734         double end = HeapNumber::cast(arg2)->value();
    735         if (end < kMinInt || end > kMaxInt) {
    736           return CallJsBuiltin(isolate, "ArraySlice", args);
    737         }
    738         relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
    739       } else if (!arg2->IsUndefined()) {
    740         return CallJsBuiltin(isolate, "ArraySlice", args);
    741       }
    742     }
    743   }
    744 
    745   // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
    746   int k = (relative_start < 0) ? Max(len + relative_start, 0)
    747                                : Min(relative_start, len);
    748 
    749   // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
    750   int final = (relative_end < 0) ? Max(len + relative_end, 0)
    751                                  : Min(relative_end, len);
    752 
    753   // Calculate the length of result array.
    754   int result_len = Max(final - k, 0);
    755 
    756   ElementsKind kind = object->GetElementsKind();
    757   if (IsHoleyElementsKind(kind)) {
    758     bool packed = true;
    759     ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
    760     for (int i = k; i < final; i++) {
    761       if (!accessor->HasElement(object, object, i, elms)) {
    762         packed = false;
    763         break;
    764       }
    765     }
    766     if (packed) {
    767       kind = GetPackedElementsKind(kind);
    768     } else if (!receiver->IsJSArray()) {
    769       return CallJsBuiltin(isolate, "ArraySlice", args);
    770     }
    771   }
    772 
    773   JSArray* result_array;
    774   MaybeObject* maybe_array = heap->AllocateJSArrayAndStorage(kind,
    775                                                              result_len,
    776                                                              result_len);
    777 
    778   DisallowHeapAllocation no_gc;
    779   if (result_len == 0) return maybe_array;
    780   if (!maybe_array->To(&result_array)) return maybe_array;
    781 
    782   ElementsAccessor* accessor = object->GetElementsAccessor();
    783   MaybeObject* maybe_failure = accessor->CopyElements(
    784       NULL, k, kind, result_array->elements(), 0, result_len, elms);
    785   ASSERT(!maybe_failure->IsFailure());
    786   USE(maybe_failure);
    787 
    788   return result_array;
    789 }
    790 
    791 
    792 BUILTIN(ArraySplice) {
    793   Heap* heap = isolate->heap();
    794   Object* receiver = *args.receiver();
    795   FixedArrayBase* elms_obj;
    796   MaybeObject* maybe_elms =
    797       EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 3);
    798   if (maybe_elms == NULL) {
    799     return CallJsBuiltin(isolate, "ArraySplice", args);
    800   }
    801   if (!maybe_elms->To(&elms_obj)) return maybe_elms;
    802 
    803   if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
    804     return CallJsBuiltin(isolate, "ArraySplice", args);
    805   }
    806   JSArray* array = JSArray::cast(receiver);
    807   ASSERT(!array->map()->is_observed());
    808 
    809   int len = Smi::cast(array->length())->value();
    810 
    811   int n_arguments = args.length() - 1;
    812 
    813   int relative_start = 0;
    814   if (n_arguments > 0) {
    815     Object* arg1 = args[1];
    816     if (arg1->IsSmi()) {
    817       relative_start = Smi::cast(arg1)->value();
    818     } else if (arg1->IsHeapNumber()) {
    819       double start = HeapNumber::cast(arg1)->value();
    820       if (start < kMinInt || start > kMaxInt) {
    821         return CallJsBuiltin(isolate, "ArraySplice", args);
    822       }
    823       relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
    824     } else if (!arg1->IsUndefined()) {
    825       return CallJsBuiltin(isolate, "ArraySplice", args);
    826     }
    827   }
    828   int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
    829                                           : Min(relative_start, len);
    830 
    831   // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
    832   // given as a request to delete all the elements from the start.
    833   // And it differs from the case of undefined delete count.
    834   // This does not follow ECMA-262, but we do the same for
    835   // compatibility.
    836   int actual_delete_count;
    837   if (n_arguments == 1) {
    838     ASSERT(len - actual_start >= 0);
    839     actual_delete_count = len - actual_start;
    840   } else {
    841     int value = 0;  // ToInteger(undefined) == 0
    842     if (n_arguments > 1) {
    843       Object* arg2 = args[2];
    844       if (arg2->IsSmi()) {
    845         value = Smi::cast(arg2)->value();
    846       } else {
    847         return CallJsBuiltin(isolate, "ArraySplice", args);
    848       }
    849     }
    850     actual_delete_count = Min(Max(value, 0), len - actual_start);
    851   }
    852 
    853   ElementsKind elements_kind = array->GetElementsKind();
    854 
    855   int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
    856   int new_length = len - actual_delete_count + item_count;
    857 
    858   // For double mode we do not support changing the length.
    859   if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
    860     return CallJsBuiltin(isolate, "ArraySplice", args);
    861   }
    862 
    863   if (new_length == 0) {
    864     MaybeObject* maybe_array = heap->AllocateJSArrayWithElements(
    865         elms_obj, elements_kind, actual_delete_count);
    866     if (maybe_array->IsFailure()) return maybe_array;
    867     array->set_elements(heap->empty_fixed_array());
    868     array->set_length(Smi::FromInt(0));
    869     return maybe_array;
    870   }
    871 
    872   JSArray* result_array = NULL;
    873   MaybeObject* maybe_array =
    874       heap->AllocateJSArrayAndStorage(elements_kind,
    875                                       actual_delete_count,
    876                                       actual_delete_count);
    877   if (!maybe_array->To(&result_array)) return maybe_array;
    878 
    879   if (actual_delete_count > 0) {
    880     DisallowHeapAllocation no_gc;
    881     ElementsAccessor* accessor = array->GetElementsAccessor();
    882     MaybeObject* maybe_failure = accessor->CopyElements(
    883         NULL, actual_start, elements_kind, result_array->elements(),
    884         0, actual_delete_count, elms_obj);
    885     // Cannot fail since the origin and target array are of the same elements
    886     // kind.
    887     ASSERT(!maybe_failure->IsFailure());
    888     USE(maybe_failure);
    889   }
    890 
    891   bool elms_changed = false;
    892   if (item_count < actual_delete_count) {
    893     // Shrink the array.
    894     const bool trim_array = !heap->lo_space()->Contains(elms_obj) &&
    895       ((actual_start + item_count) <
    896           (len - actual_delete_count - actual_start));
    897     if (trim_array) {
    898       const int delta = actual_delete_count - item_count;
    899 
    900       if (elms_obj->IsFixedDoubleArray()) {
    901         FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
    902         MoveDoubleElements(elms, delta, elms, 0, actual_start);
    903       } else {
    904         FixedArray* elms = FixedArray::cast(elms_obj);
    905         DisallowHeapAllocation no_gc;
    906         heap->MoveElements(elms, delta, 0, actual_start);
    907       }
    908 
    909       elms_obj = LeftTrimFixedArray(heap, elms_obj, delta);
    910 
    911       elms_changed = true;
    912     } else {
    913       if (elms_obj->IsFixedDoubleArray()) {
    914         FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
    915         MoveDoubleElements(elms, actual_start + item_count,
    916                            elms, actual_start + actual_delete_count,
    917                            (len - actual_delete_count - actual_start));
    918         FillWithHoles(elms, new_length, len);
    919       } else {
    920         FixedArray* elms = FixedArray::cast(elms_obj);
    921         DisallowHeapAllocation no_gc;
    922         heap->MoveElements(elms, actual_start + item_count,
    923                            actual_start + actual_delete_count,
    924                            (len - actual_delete_count - actual_start));
    925         FillWithHoles(heap, elms, new_length, len);
    926       }
    927     }
    928   } else if (item_count > actual_delete_count) {
    929     FixedArray* elms = FixedArray::cast(elms_obj);
    930     // Currently fixed arrays cannot grow too big, so
    931     // we should never hit this case.
    932     ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
    933 
    934     // Check if array need to grow.
    935     if (new_length > elms->length()) {
    936       // New backing storage is needed.
    937       int capacity = new_length + (new_length >> 1) + 16;
    938       FixedArray* new_elms;
    939       MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
    940       if (!maybe_obj->To(&new_elms)) return maybe_obj;
    941 
    942       DisallowHeapAllocation no_gc;
    943 
    944       ElementsKind kind = array->GetElementsKind();
    945       ElementsAccessor* accessor = array->GetElementsAccessor();
    946       if (actual_start > 0) {
    947         // Copy the part before actual_start as is.
    948         MaybeObject* maybe_failure = accessor->CopyElements(
    949             NULL, 0, kind, new_elms, 0, actual_start, elms);
    950         ASSERT(!maybe_failure->IsFailure());
    951         USE(maybe_failure);
    952       }
    953       MaybeObject* maybe_failure = accessor->CopyElements(
    954           NULL, actual_start + actual_delete_count, kind, new_elms,
    955           actual_start + item_count,
    956           ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
    957       ASSERT(!maybe_failure->IsFailure());
    958       USE(maybe_failure);
    959 
    960       elms_obj = new_elms;
    961       elms_changed = true;
    962     } else {
    963       DisallowHeapAllocation no_gc;
    964       heap->MoveElements(elms, actual_start + item_count,
    965                          actual_start + actual_delete_count,
    966                          (len - actual_delete_count - actual_start));
    967     }
    968   }
    969 
    970   if (IsFastDoubleElementsKind(elements_kind)) {
    971     FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
    972     for (int k = actual_start; k < actual_start + item_count; k++) {
    973       Object* arg = args[3 + k - actual_start];
    974       if (arg->IsSmi()) {
    975         elms->set(k, Smi::cast(arg)->value());
    976       } else {
    977         elms->set(k, HeapNumber::cast(arg)->value());
    978       }
    979     }
    980   } else {
    981     FixedArray* elms = FixedArray::cast(elms_obj);
    982     DisallowHeapAllocation no_gc;
    983     WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
    984     for (int k = actual_start; k < actual_start + item_count; k++) {
    985       elms->set(k, args[3 + k - actual_start], mode);
    986     }
    987   }
    988 
    989   if (elms_changed) {
    990     array->set_elements(elms_obj);
    991   }
    992   // Set the length.
    993   array->set_length(Smi::FromInt(new_length));
    994 
    995   return result_array;
    996 }
    997 
    998 
    999 BUILTIN(ArrayConcat) {
   1000   Heap* heap = isolate->heap();
   1001   Context* native_context = isolate->context()->native_context();
   1002   JSObject* array_proto =
   1003       JSObject::cast(native_context->array_function()->prototype());
   1004   if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
   1005     return CallJsBuiltin(isolate, "ArrayConcat", args);
   1006   }
   1007 
   1008   // Iterate through all the arguments performing checks
   1009   // and calculating total length.
   1010   int n_arguments = args.length();
   1011   int result_len = 0;
   1012   ElementsKind elements_kind = GetInitialFastElementsKind();
   1013   bool has_double = false;
   1014   bool is_holey = false;
   1015   for (int i = 0; i < n_arguments; i++) {
   1016     Object* arg = args[i];
   1017     if (!arg->IsJSArray() ||
   1018         !JSArray::cast(arg)->HasFastElements() ||
   1019         JSArray::cast(arg)->GetPrototype() != array_proto) {
   1020       return CallJsBuiltin(isolate, "ArrayConcat", args);
   1021     }
   1022     int len = Smi::cast(JSArray::cast(arg)->length())->value();
   1023 
   1024     // We shouldn't overflow when adding another len.
   1025     const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
   1026     STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
   1027     USE(kHalfOfMaxInt);
   1028     result_len += len;
   1029     ASSERT(result_len >= 0);
   1030 
   1031     if (result_len > FixedDoubleArray::kMaxLength) {
   1032       return CallJsBuiltin(isolate, "ArrayConcat", args);
   1033     }
   1034 
   1035     ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
   1036     has_double = has_double || IsFastDoubleElementsKind(arg_kind);
   1037     is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
   1038     if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
   1039       elements_kind = arg_kind;
   1040     }
   1041   }
   1042 
   1043   if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
   1044 
   1045   // If a double array is concatted into a fast elements array, the fast
   1046   // elements array needs to be initialized to contain proper holes, since
   1047   // boxing doubles may cause incremental marking.
   1048   ArrayStorageAllocationMode mode =
   1049       has_double && IsFastObjectElementsKind(elements_kind)
   1050       ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
   1051   JSArray* result_array;
   1052   // Allocate result.
   1053   MaybeObject* maybe_array =
   1054       heap->AllocateJSArrayAndStorage(elements_kind,
   1055                                       result_len,
   1056                                       result_len,
   1057                                       mode);
   1058   if (!maybe_array->To(&result_array)) return maybe_array;
   1059   if (result_len == 0) return result_array;
   1060 
   1061   int j = 0;
   1062   FixedArrayBase* storage = result_array->elements();
   1063   ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
   1064   for (int i = 0; i < n_arguments; i++) {
   1065     JSArray* array = JSArray::cast(args[i]);
   1066     int len = Smi::cast(array->length())->value();
   1067     ElementsKind from_kind = array->GetElementsKind();
   1068     if (len > 0) {
   1069       MaybeObject* maybe_failure =
   1070           accessor->CopyElements(array, 0, from_kind, storage, j, len);
   1071       if (maybe_failure->IsFailure()) return maybe_failure;
   1072       j += len;
   1073     }
   1074   }
   1075 
   1076   ASSERT(j == result_len);
   1077 
   1078   return result_array;
   1079 }
   1080 
   1081 
   1082 // -----------------------------------------------------------------------------
   1083 // Strict mode poison pills
   1084 
   1085 
   1086 BUILTIN(StrictModePoisonPill) {
   1087   HandleScope scope(isolate);
   1088   return isolate->Throw(*isolate->factory()->NewTypeError(
   1089       "strict_poison_pill", HandleVector<Object>(NULL, 0)));
   1090 }
   1091 
   1092 
   1093 // -----------------------------------------------------------------------------
   1094 //
   1095 
   1096 
   1097 // Searches the hidden prototype chain of the given object for the first
   1098 // object that is an instance of the given type.  If no such object can
   1099 // be found then Heap::null_value() is returned.
   1100 static inline Object* FindHidden(Heap* heap,
   1101                                  Object* object,
   1102                                  FunctionTemplateInfo* type) {
   1103   if (type->IsTemplateFor(object)) return object;
   1104   Object* proto = object->GetPrototype(heap->isolate());
   1105   if (proto->IsJSObject() &&
   1106       JSObject::cast(proto)->map()->is_hidden_prototype()) {
   1107     return FindHidden(heap, proto, type);
   1108   }
   1109   return heap->null_value();
   1110 }
   1111 
   1112 
   1113 // Returns the holder JSObject if the function can legally be called
   1114 // with this receiver.  Returns Heap::null_value() if the call is
   1115 // illegal.  Any arguments that don't fit the expected type is
   1116 // overwritten with undefined.  Note that holder and the arguments are
   1117 // implicitly rewritten with the first object in the hidden prototype
   1118 // chain that actually has the expected type.
   1119 static inline Object* TypeCheck(Heap* heap,
   1120                                 int argc,
   1121                                 Object** argv,
   1122                                 FunctionTemplateInfo* info) {
   1123   Object* recv = argv[0];
   1124   // API calls are only supported with JSObject receivers.
   1125   if (!recv->IsJSObject()) return heap->null_value();
   1126   Object* sig_obj = info->signature();
   1127   if (sig_obj->IsUndefined()) return recv;
   1128   SignatureInfo* sig = SignatureInfo::cast(sig_obj);
   1129   // If necessary, check the receiver
   1130   Object* recv_type = sig->receiver();
   1131   Object* holder = recv;
   1132   if (!recv_type->IsUndefined()) {
   1133     holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type));
   1134     if (holder == heap->null_value()) return heap->null_value();
   1135   }
   1136   Object* args_obj = sig->args();
   1137   // If there is no argument signature we're done
   1138   if (args_obj->IsUndefined()) return holder;
   1139   FixedArray* args = FixedArray::cast(args_obj);
   1140   int length = args->length();
   1141   if (argc <= length) length = argc - 1;
   1142   for (int i = 0; i < length; i++) {
   1143     Object* argtype = args->get(i);
   1144     if (argtype->IsUndefined()) continue;
   1145     Object** arg = &argv[-1 - i];
   1146     Object* current = *arg;
   1147     current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype));
   1148     if (current == heap->null_value()) current = heap->undefined_value();
   1149     *arg = current;
   1150   }
   1151   return holder;
   1152 }
   1153 
   1154 
   1155 template <bool is_construct>
   1156 MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
   1157     BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
   1158   ASSERT(is_construct == CalledAsConstructor(isolate));
   1159   Heap* heap = isolate->heap();
   1160 
   1161   HandleScope scope(isolate);
   1162   Handle<JSFunction> function = args.called_function();
   1163   ASSERT(function->shared()->IsApiFunction());
   1164 
   1165   FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data();
   1166   if (is_construct) {
   1167     Handle<FunctionTemplateInfo> desc(fun_data, isolate);
   1168     bool pending_exception = false;
   1169     isolate->factory()->ConfigureInstance(
   1170         desc, Handle<JSObject>::cast(args.receiver()), &pending_exception);
   1171     ASSERT(isolate->has_pending_exception() == pending_exception);
   1172     if (pending_exception) return Failure::Exception();
   1173     fun_data = *desc;
   1174   }
   1175 
   1176   Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data);
   1177 
   1178   if (raw_holder->IsNull()) {
   1179     // This function cannot be called with the given receiver.  Abort!
   1180     Handle<Object> obj =
   1181         isolate->factory()->NewTypeError(
   1182             "illegal_invocation", HandleVector(&function, 1));
   1183     return isolate->Throw(*obj);
   1184   }
   1185 
   1186   Object* raw_call_data = fun_data->call_code();
   1187   if (!raw_call_data->IsUndefined()) {
   1188     CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
   1189     Object* callback_obj = call_data->callback();
   1190     v8::FunctionCallback callback =
   1191         v8::ToCData<v8::FunctionCallback>(callback_obj);
   1192     Object* data_obj = call_data->data();
   1193     Object* result;
   1194 
   1195     LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
   1196     ASSERT(raw_holder->IsJSObject());
   1197 
   1198     FunctionCallbackArguments custom(isolate,
   1199                                      data_obj,
   1200                                      *function,
   1201                                      raw_holder,
   1202                                      &args[0] - 1,
   1203                                      args.length() - 1,
   1204                                      is_construct);
   1205 
   1206     v8::Handle<v8::Value> value = custom.Call(callback);
   1207     if (value.IsEmpty()) {
   1208       result = heap->undefined_value();
   1209     } else {
   1210       result = *reinterpret_cast<Object**>(*value);
   1211       result->VerifyApiCallResultType();
   1212     }
   1213 
   1214     RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   1215     if (!is_construct || result->IsJSObject()) return result;
   1216   }
   1217 
   1218   return *args.receiver();
   1219 }
   1220 
   1221 
   1222 BUILTIN(HandleApiCall) {
   1223   return HandleApiCallHelper<false>(args, isolate);
   1224 }
   1225 
   1226 
   1227 BUILTIN(HandleApiCallConstruct) {
   1228   return HandleApiCallHelper<true>(args, isolate);
   1229 }
   1230 
   1231 
   1232 // Helper function to handle calls to non-function objects created through the
   1233 // API. The object can be called as either a constructor (using new) or just as
   1234 // a function (without new).
   1235 MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
   1236     Isolate* isolate,
   1237     bool is_construct_call,
   1238     BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
   1239   // Non-functions are never called as constructors. Even if this is an object
   1240   // called as a constructor the delegate call is not a construct call.
   1241   ASSERT(!CalledAsConstructor(isolate));
   1242   Heap* heap = isolate->heap();
   1243 
   1244   Handle<Object> receiver = args.receiver();
   1245 
   1246   // Get the object called.
   1247   JSObject* obj = JSObject::cast(*receiver);
   1248 
   1249   // Get the invocation callback from the function descriptor that was
   1250   // used to create the called object.
   1251   ASSERT(obj->map()->has_instance_call_handler());
   1252   JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
   1253   ASSERT(constructor->shared()->IsApiFunction());
   1254   Object* handler =
   1255       constructor->shared()->get_api_func_data()->instance_call_handler();
   1256   ASSERT(!handler->IsUndefined());
   1257   CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
   1258   Object* callback_obj = call_data->callback();
   1259   v8::FunctionCallback callback =
   1260       v8::ToCData<v8::FunctionCallback>(callback_obj);
   1261 
   1262   // Get the data for the call and perform the callback.
   1263   Object* result;
   1264   {
   1265     HandleScope scope(isolate);
   1266     LOG(isolate, ApiObjectAccess("call non-function", obj));
   1267 
   1268     FunctionCallbackArguments custom(isolate,
   1269                                      call_data->data(),
   1270                                      constructor,
   1271                                      obj,
   1272                                      &args[0] - 1,
   1273                                      args.length() - 1,
   1274                                      is_construct_call);
   1275     v8::Handle<v8::Value> value = custom.Call(callback);
   1276     if (value.IsEmpty()) {
   1277       result = heap->undefined_value();
   1278     } else {
   1279       result = *reinterpret_cast<Object**>(*value);
   1280       result->VerifyApiCallResultType();
   1281     }
   1282   }
   1283   // Check for exceptions and return result.
   1284   RETURN_IF_SCHEDULED_EXCEPTION(isolate);
   1285   return result;
   1286 }
   1287 
   1288 
   1289 // Handle calls to non-function objects created through the API. This delegate
   1290 // function is used when the call is a normal function call.
   1291 BUILTIN(HandleApiCallAsFunction) {
   1292   return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
   1293 }
   1294 
   1295 
   1296 // Handle calls to non-function objects created through the API. This delegate
   1297 // function is used when the call is a construct call.
   1298 BUILTIN(HandleApiCallAsConstructor) {
   1299   return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
   1300 }
   1301 
   1302 
   1303 static void Generate_LoadIC_Initialize(MacroAssembler* masm) {
   1304   LoadIC::GenerateInitialize(masm);
   1305 }
   1306 
   1307 
   1308 static void Generate_LoadIC_PreMonomorphic(MacroAssembler* masm) {
   1309   LoadIC::GeneratePreMonomorphic(masm);
   1310 }
   1311 
   1312 
   1313 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
   1314   LoadIC::GenerateMiss(masm);
   1315 }
   1316 
   1317 
   1318 static void Generate_LoadIC_Megamorphic(MacroAssembler* masm) {
   1319   LoadIC::GenerateMegamorphic(masm);
   1320 }
   1321 
   1322 
   1323 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
   1324   LoadIC::GenerateNormal(masm);
   1325 }
   1326 
   1327 
   1328 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
   1329   LoadStubCompiler::GenerateLoadViaGetter(
   1330       masm, LoadStubCompiler::registers()[0], Handle<JSFunction>());
   1331 }
   1332 
   1333 
   1334 static void Generate_LoadIC_Slow(MacroAssembler* masm) {
   1335   LoadIC::GenerateRuntimeGetProperty(masm);
   1336 }
   1337 
   1338 
   1339 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
   1340   KeyedLoadIC::GenerateInitialize(masm);
   1341 }
   1342 
   1343 
   1344 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
   1345   KeyedLoadIC::GenerateRuntimeGetProperty(masm);
   1346 }
   1347 
   1348 
   1349 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
   1350   KeyedLoadIC::GenerateMiss(masm);
   1351 }
   1352 
   1353 
   1354 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
   1355   KeyedLoadIC::GenerateGeneric(masm);
   1356 }
   1357 
   1358 
   1359 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
   1360   KeyedLoadIC::GenerateString(masm);
   1361 }
   1362 
   1363 
   1364 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
   1365   KeyedLoadIC::GeneratePreMonomorphic(masm);
   1366 }
   1367 
   1368 
   1369 static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
   1370   KeyedLoadIC::GenerateIndexedInterceptor(masm);
   1371 }
   1372 
   1373 
   1374 static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) {
   1375   KeyedLoadIC::GenerateNonStrictArguments(masm);
   1376 }
   1377 
   1378 
   1379 static void Generate_StoreIC_Slow(MacroAssembler* masm) {
   1380   StoreIC::GenerateSlow(masm);
   1381 }
   1382 
   1383 
   1384 static void Generate_StoreIC_Initialize(MacroAssembler* masm) {
   1385   StoreIC::GenerateInitialize(masm);
   1386 }
   1387 
   1388 
   1389 static void Generate_StoreIC_Initialize_Strict(MacroAssembler* masm) {
   1390   StoreIC::GenerateInitialize(masm);
   1391 }
   1392 
   1393 
   1394 static void Generate_StoreIC_PreMonomorphic(MacroAssembler* masm) {
   1395   StoreIC::GeneratePreMonomorphic(masm);
   1396 }
   1397 
   1398 
   1399 static void Generate_StoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
   1400   StoreIC::GeneratePreMonomorphic(masm);
   1401 }
   1402 
   1403 
   1404 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
   1405   StoreIC::GenerateMiss(masm);
   1406 }
   1407 
   1408 
   1409 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
   1410   StoreIC::GenerateNormal(masm);
   1411 }
   1412 
   1413 
   1414 static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) {
   1415   StoreIC::GenerateMegamorphic(masm,
   1416                                StoreIC::ComputeExtraICState(kNonStrictMode));
   1417 }
   1418 
   1419 
   1420 static void Generate_StoreIC_Megamorphic_Strict(MacroAssembler* masm) {
   1421   StoreIC::GenerateMegamorphic(masm,
   1422                                StoreIC::ComputeExtraICState(kStrictMode));
   1423 }
   1424 
   1425 
   1426 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
   1427   StoreStubCompiler::GenerateStoreViaSetter(masm, Handle<JSFunction>());
   1428 }
   1429 
   1430 
   1431 static void Generate_StoreIC_Generic(MacroAssembler* masm) {
   1432   StoreIC::GenerateRuntimeSetProperty(masm, kNonStrictMode);
   1433 }
   1434 
   1435 
   1436 static void Generate_StoreIC_Generic_Strict(MacroAssembler* masm) {
   1437   StoreIC::GenerateRuntimeSetProperty(masm, kStrictMode);
   1438 }
   1439 
   1440 
   1441 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
   1442   KeyedStoreIC::GenerateGeneric(masm, kNonStrictMode);
   1443 }
   1444 
   1445 
   1446 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
   1447   KeyedStoreIC::GenerateGeneric(masm, kStrictMode);
   1448 }
   1449 
   1450 
   1451 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
   1452   KeyedStoreIC::GenerateMiss(masm);
   1453 }
   1454 
   1455 
   1456 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
   1457   KeyedStoreIC::GenerateSlow(masm);
   1458 }
   1459 
   1460 
   1461 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
   1462   KeyedStoreIC::GenerateInitialize(masm);
   1463 }
   1464 
   1465 
   1466 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
   1467   KeyedStoreIC::GenerateInitialize(masm);
   1468 }
   1469 
   1470 
   1471 static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
   1472   KeyedStoreIC::GeneratePreMonomorphic(masm);
   1473 }
   1474 
   1475 
   1476 static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
   1477   KeyedStoreIC::GeneratePreMonomorphic(masm);
   1478 }
   1479 
   1480 
   1481 static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) {
   1482   KeyedStoreIC::GenerateNonStrictArguments(masm);
   1483 }
   1484 
   1485 
   1486 #ifdef ENABLE_DEBUGGER_SUPPORT
   1487 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
   1488   Debug::GenerateLoadICDebugBreak(masm);
   1489 }
   1490 
   1491 
   1492 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
   1493   Debug::GenerateStoreICDebugBreak(masm);
   1494 }
   1495 
   1496 
   1497 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
   1498   Debug::GenerateKeyedLoadICDebugBreak(masm);
   1499 }
   1500 
   1501 
   1502 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
   1503   Debug::GenerateKeyedStoreICDebugBreak(masm);
   1504 }
   1505 
   1506 
   1507 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
   1508   Debug::GenerateCompareNilICDebugBreak(masm);
   1509 }
   1510 
   1511 
   1512 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
   1513   Debug::GenerateReturnDebugBreak(masm);
   1514 }
   1515 
   1516 
   1517 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
   1518   Debug::GenerateCallFunctionStubDebugBreak(masm);
   1519 }
   1520 
   1521 
   1522 static void Generate_CallFunctionStub_Recording_DebugBreak(
   1523     MacroAssembler* masm) {
   1524   Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
   1525 }
   1526 
   1527 
   1528 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
   1529   Debug::GenerateCallConstructStubDebugBreak(masm);
   1530 }
   1531 
   1532 
   1533 static void Generate_CallConstructStub_Recording_DebugBreak(
   1534     MacroAssembler* masm) {
   1535   Debug::GenerateCallConstructStubRecordDebugBreak(masm);
   1536 }
   1537 
   1538 
   1539 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
   1540   Debug::GenerateSlotDebugBreak(masm);
   1541 }
   1542 
   1543 
   1544 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
   1545   Debug::GeneratePlainReturnLiveEdit(masm);
   1546 }
   1547 
   1548 
   1549 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
   1550   Debug::GenerateFrameDropperLiveEdit(masm);
   1551 }
   1552 #endif
   1553 
   1554 
   1555 Builtins::Builtins() : initialized_(false) {
   1556   memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
   1557   memset(names_, 0, sizeof(names_[0]) * builtin_count);
   1558 }
   1559 
   1560 
   1561 Builtins::~Builtins() {
   1562 }
   1563 
   1564 
   1565 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
   1566 Address const Builtins::c_functions_[cfunction_count] = {
   1567   BUILTIN_LIST_C(DEF_ENUM_C)
   1568 };
   1569 #undef DEF_ENUM_C
   1570 
   1571 #define DEF_JS_NAME(name, ignore) #name,
   1572 #define DEF_JS_ARGC(ignore, argc) argc,
   1573 const char* const Builtins::javascript_names_[id_count] = {
   1574   BUILTINS_LIST_JS(DEF_JS_NAME)
   1575 };
   1576 
   1577 int const Builtins::javascript_argc_[id_count] = {
   1578   BUILTINS_LIST_JS(DEF_JS_ARGC)
   1579 };
   1580 #undef DEF_JS_NAME
   1581 #undef DEF_JS_ARGC
   1582 
   1583 struct BuiltinDesc {
   1584   byte* generator;
   1585   byte* c_code;
   1586   const char* s_name;  // name is only used for generating log information.
   1587   int name;
   1588   Code::Flags flags;
   1589   BuiltinExtraArguments extra_args;
   1590 };
   1591 
   1592 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
   1593 
   1594 class BuiltinFunctionTable {
   1595  public:
   1596   BuiltinDesc* functions() {
   1597     CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
   1598     return functions_;
   1599   }
   1600 
   1601   OnceType once_;
   1602   BuiltinDesc functions_[Builtins::builtin_count + 1];
   1603 
   1604   friend class Builtins;
   1605 };
   1606 
   1607 static BuiltinFunctionTable builtin_function_table =
   1608     BUILTIN_FUNCTION_TABLE_INIT;
   1609 
   1610 // Define array of pointers to generators and C builtin functions.
   1611 // We do this in a sort of roundabout way so that we can do the initialization
   1612 // within the lexical scope of Builtins:: and within a context where
   1613 // Code::Flags names a non-abstract type.
   1614 void Builtins::InitBuiltinFunctionTable() {
   1615   BuiltinDesc* functions = builtin_function_table.functions_;
   1616   functions[builtin_count].generator = NULL;
   1617   functions[builtin_count].c_code = NULL;
   1618   functions[builtin_count].s_name = NULL;
   1619   functions[builtin_count].name = builtin_count;
   1620   functions[builtin_count].flags = static_cast<Code::Flags>(0);
   1621   functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
   1622 
   1623 #define DEF_FUNCTION_PTR_C(aname, aextra_args)                         \
   1624     functions->generator = FUNCTION_ADDR(Generate_Adaptor);            \
   1625     functions->c_code = FUNCTION_ADDR(Builtin_##aname);                \
   1626     functions->s_name = #aname;                                        \
   1627     functions->name = c_##aname;                                       \
   1628     functions->flags = Code::ComputeFlags(Code::BUILTIN);              \
   1629     functions->extra_args = aextra_args;                               \
   1630     ++functions;
   1631 
   1632 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra)                       \
   1633     functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
   1634     functions->c_code = NULL;                                               \
   1635     functions->s_name = #aname;                                             \
   1636     functions->name = k##aname;                                             \
   1637     functions->flags = Code::ComputeFlags(Code::kind,                       \
   1638                                           state,                            \
   1639                                           extra);                           \
   1640     functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
   1641     ++functions;
   1642 
   1643 #define DEF_FUNCTION_PTR_H(aname, kind)                                     \
   1644     functions->generator = FUNCTION_ADDR(Generate_##aname);                 \
   1645     functions->c_code = NULL;                                               \
   1646     functions->s_name = #aname;                                             \
   1647     functions->name = k##aname;                                             \
   1648     functions->flags = Code::ComputeFlags(                                  \
   1649         Code::HANDLER, MONOMORPHIC, kNoExtraICState,                        \
   1650         Code::NORMAL, Code::kind);                                          \
   1651     functions->extra_args = NO_EXTRA_ARGUMENTS;                             \
   1652     ++functions;
   1653 
   1654   BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
   1655   BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
   1656   BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
   1657   BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
   1658 
   1659 #undef DEF_FUNCTION_PTR_C
   1660 #undef DEF_FUNCTION_PTR_A
   1661 }
   1662 
   1663 
   1664 void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
   1665   ASSERT(!initialized_);
   1666   Heap* heap = isolate->heap();
   1667 
   1668   // Create a scope for the handles in the builtins.
   1669   HandleScope scope(isolate);
   1670 
   1671   const BuiltinDesc* functions = builtin_function_table.functions();
   1672 
   1673   // For now we generate builtin adaptor code into a stack-allocated
   1674   // buffer, before copying it into individual code objects. Be careful
   1675   // with alignment, some platforms don't like unaligned code.
   1676   union { int force_alignment; byte buffer[8*KB]; } u;
   1677 
   1678   // Traverse the list of builtins and generate an adaptor in a
   1679   // separate code object for each one.
   1680   for (int i = 0; i < builtin_count; i++) {
   1681     if (create_heap_objects) {
   1682       MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
   1683       // Generate the code/adaptor.
   1684       typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
   1685       Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
   1686       // We pass all arguments to the generator, but it may not use all of
   1687       // them.  This works because the first arguments are on top of the
   1688       // stack.
   1689       ASSERT(!masm.has_frame());
   1690       g(&masm, functions[i].name, functions[i].extra_args);
   1691       // Move the code into the object heap.
   1692       CodeDesc desc;
   1693       masm.GetCode(&desc);
   1694       Code::Flags flags =  functions[i].flags;
   1695       Object* code = NULL;
   1696       {
   1697         // During startup it's OK to always allocate and defer GC to later.
   1698         // This simplifies things because we don't need to retry.
   1699         AlwaysAllocateScope __scope__;
   1700         { MaybeObject* maybe_code =
   1701               heap->CreateCode(desc, flags, masm.CodeObject());
   1702           if (!maybe_code->ToObject(&code)) {
   1703             v8::internal::V8::FatalProcessOutOfMemory("CreateCode");
   1704           }
   1705         }
   1706       }
   1707       // Log the event and add the code to the builtins array.
   1708       PROFILE(isolate,
   1709               CodeCreateEvent(Logger::BUILTIN_TAG,
   1710                               Code::cast(code),
   1711                               functions[i].s_name));
   1712       GDBJIT(AddCode(GDBJITInterface::BUILTIN,
   1713                      functions[i].s_name,
   1714                      Code::cast(code)));
   1715       builtins_[i] = code;
   1716 #ifdef ENABLE_DISASSEMBLER
   1717       if (FLAG_print_builtin_code) {
   1718         CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
   1719         PrintF(trace_scope.file(), "Builtin: %s\n", functions[i].s_name);
   1720         Code::cast(code)->Disassemble(functions[i].s_name, trace_scope.file());
   1721         PrintF(trace_scope.file(), "\n");
   1722       }
   1723 #endif
   1724     } else {
   1725       // Deserializing. The values will be filled in during IterateBuiltins.
   1726       builtins_[i] = NULL;
   1727     }
   1728     names_[i] = functions[i].s_name;
   1729   }
   1730 
   1731   // Mark as initialized.
   1732   initialized_ = true;
   1733 }
   1734 
   1735 
   1736 void Builtins::TearDown() {
   1737   initialized_ = false;
   1738 }
   1739 
   1740 
   1741 void Builtins::IterateBuiltins(ObjectVisitor* v) {
   1742   v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
   1743 }
   1744 
   1745 
   1746 const char* Builtins::Lookup(byte* pc) {
   1747   // may be called during initialization (disassembler!)
   1748   if (initialized_) {
   1749     for (int i = 0; i < builtin_count; i++) {
   1750       Code* entry = Code::cast(builtins_[i]);
   1751       if (entry->contains(pc)) {
   1752         return names_[i];
   1753       }
   1754     }
   1755   }
   1756   return NULL;
   1757 }
   1758 
   1759 
   1760 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
   1761   masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
   1762 }
   1763 
   1764 
   1765 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
   1766   masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
   1767 }
   1768 
   1769 
   1770 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore)               \
   1771 Handle<Code> Builtins::name() {                               \
   1772   Code** code_address =                                       \
   1773       reinterpret_cast<Code**>(builtin_address(k##name));     \
   1774   return Handle<Code>(code_address);                          \
   1775 }
   1776 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
   1777 Handle<Code> Builtins::name() {                             \
   1778   Code** code_address =                                     \
   1779       reinterpret_cast<Code**>(builtin_address(k##name));   \
   1780   return Handle<Code>(code_address);                        \
   1781 }
   1782 #define DEFINE_BUILTIN_ACCESSOR_H(name, kind)               \
   1783 Handle<Code> Builtins::name() {                             \
   1784   Code** code_address =                                     \
   1785       reinterpret_cast<Code**>(builtin_address(k##name));   \
   1786   return Handle<Code>(code_address);                        \
   1787 }
   1788 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
   1789 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
   1790 BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
   1791 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
   1792 #undef DEFINE_BUILTIN_ACCESSOR_C
   1793 #undef DEFINE_BUILTIN_ACCESSOR_A
   1794 
   1795 
   1796 } }  // namespace v8::internal
   1797