1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "api.h" 31 #include "arguments.h" 32 #include "bootstrapper.h" 33 #include "builtins.h" 34 #include "cpu-profiler.h" 35 #include "gdb-jit.h" 36 #include "ic-inl.h" 37 #include "heap-profiler.h" 38 #include "mark-compact.h" 39 #include "stub-cache.h" 40 #include "vm-state-inl.h" 41 42 namespace v8 { 43 namespace internal { 44 45 namespace { 46 47 // Arguments object passed to C++ builtins. 48 template <BuiltinExtraArguments extra_args> 49 class BuiltinArguments : public Arguments { 50 public: 51 BuiltinArguments(int length, Object** arguments) 52 : Arguments(length, arguments) { } 53 54 Object*& operator[] (int index) { 55 ASSERT(index < length()); 56 return Arguments::operator[](index); 57 } 58 59 template <class S> Handle<S> at(int index) { 60 ASSERT(index < length()); 61 return Arguments::at<S>(index); 62 } 63 64 Handle<Object> receiver() { 65 return Arguments::at<Object>(0); 66 } 67 68 Handle<JSFunction> called_function() { 69 STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION); 70 return Arguments::at<JSFunction>(Arguments::length() - 1); 71 } 72 73 // Gets the total number of arguments including the receiver (but 74 // excluding extra arguments). 75 int length() const { 76 STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS); 77 return Arguments::length(); 78 } 79 80 #ifdef DEBUG 81 void Verify() { 82 // Check we have at least the receiver. 83 ASSERT(Arguments::length() >= 1); 84 } 85 #endif 86 }; 87 88 89 // Specialize BuiltinArguments for the called function extra argument. 90 91 template <> 92 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const { 93 return Arguments::length() - 1; 94 } 95 96 #ifdef DEBUG 97 template <> 98 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() { 99 // Check we have at least the receiver and the called function. 100 ASSERT(Arguments::length() >= 2); 101 // Make sure cast to JSFunction succeeds. 102 called_function(); 103 } 104 #endif 105 106 107 #define DEF_ARG_TYPE(name, spec) \ 108 typedef BuiltinArguments<spec> name##ArgumentsType; 109 BUILTIN_LIST_C(DEF_ARG_TYPE) 110 #undef DEF_ARG_TYPE 111 112 } // namespace 113 114 // ---------------------------------------------------------------------------- 115 // Support macro for defining builtins in C++. 116 // ---------------------------------------------------------------------------- 117 // 118 // A builtin function is defined by writing: 119 // 120 // BUILTIN(name) { 121 // ... 122 // } 123 // 124 // In the body of the builtin function the arguments can be accessed 125 // through the BuiltinArguments object args. 126 127 #ifdef DEBUG 128 129 #define BUILTIN(name) \ 130 MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \ 131 name##ArgumentsType args, Isolate* isolate); \ 132 MUST_USE_RESULT static MaybeObject* Builtin_##name( \ 133 int args_length, Object** args_object, Isolate* isolate) { \ 134 name##ArgumentsType args(args_length, args_object); \ 135 ASSERT(isolate == Isolate::Current()); \ 136 args.Verify(); \ 137 return Builtin_Impl_##name(args, isolate); \ 138 } \ 139 MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \ 140 name##ArgumentsType args, Isolate* isolate) 141 142 #else // For release mode. 143 144 #define BUILTIN(name) \ 145 static MaybeObject* Builtin_impl##name( \ 146 name##ArgumentsType args, Isolate* isolate); \ 147 static MaybeObject* Builtin_##name( \ 148 int args_length, Object** args_object, Isolate* isolate) { \ 149 name##ArgumentsType args(args_length, args_object); \ 150 return Builtin_impl##name(args, isolate); \ 151 } \ 152 static MaybeObject* Builtin_impl##name( \ 153 name##ArgumentsType args, Isolate* isolate) 154 #endif 155 156 157 static inline bool CalledAsConstructor(Isolate* isolate) { 158 #ifdef DEBUG 159 // Calculate the result using a full stack frame iterator and check 160 // that the state of the stack is as we assume it to be in the 161 // code below. 162 StackFrameIterator it(isolate); 163 ASSERT(it.frame()->is_exit()); 164 it.Advance(); 165 StackFrame* frame = it.frame(); 166 bool reference_result = frame->is_construct(); 167 #endif 168 Address fp = Isolate::c_entry_fp(isolate->thread_local_top()); 169 // Because we know fp points to an exit frame we can use the relevant 170 // part of ExitFrame::ComputeCallerState directly. 171 const int kCallerOffset = ExitFrameConstants::kCallerFPOffset; 172 Address caller_fp = Memory::Address_at(fp + kCallerOffset); 173 // This inlines the part of StackFrame::ComputeType that grabs the 174 // type of the current frame. Note that StackFrame::ComputeType 175 // has been specialized for each architecture so if any one of them 176 // changes this code has to be changed as well. 177 const int kMarkerOffset = StandardFrameConstants::kMarkerOffset; 178 const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT); 179 Object* marker = Memory::Object_at(caller_fp + kMarkerOffset); 180 bool result = (marker == kConstructMarker); 181 ASSERT_EQ(result, reference_result); 182 return result; 183 } 184 185 186 // ---------------------------------------------------------------------------- 187 188 BUILTIN(Illegal) { 189 UNREACHABLE(); 190 return isolate->heap()->undefined_value(); // Make compiler happy. 191 } 192 193 194 BUILTIN(EmptyFunction) { 195 return isolate->heap()->undefined_value(); 196 } 197 198 199 static MaybeObject* ArrayCodeGenericCommon(Arguments* args, 200 Isolate* isolate, 201 JSFunction* constructor) { 202 ASSERT(args->length() >= 1); 203 Heap* heap = isolate->heap(); 204 isolate->counters()->array_function_runtime()->Increment(); 205 206 JSArray* array; 207 if (CalledAsConstructor(isolate)) { 208 array = JSArray::cast((*args)[0]); 209 // Initialize elements and length in case later allocations fail so that the 210 // array object is initialized in a valid state. 211 MaybeObject* maybe_array = array->Initialize(0); 212 if (maybe_array->IsFailure()) return maybe_array; 213 214 AllocationMemento* memento = AllocationMemento::FindForJSObject(array); 215 if (memento != NULL && memento->IsValid()) { 216 AllocationSite* site = memento->GetAllocationSite(); 217 ElementsKind to_kind = site->GetElementsKind(); 218 if (IsMoreGeneralElementsKindTransition(array->GetElementsKind(), 219 to_kind)) { 220 // We have advice that we should change the elements kind 221 if (FLAG_trace_track_allocation_sites) { 222 PrintF("AllocationSite: pre-transitioning array %p(%s->%s)\n", 223 reinterpret_cast<void*>(array), 224 ElementsKindToString(array->GetElementsKind()), 225 ElementsKindToString(to_kind)); 226 } 227 228 maybe_array = array->TransitionElementsKind(to_kind); 229 if (maybe_array->IsFailure()) return maybe_array; 230 } 231 } 232 233 if (!FLAG_smi_only_arrays) { 234 Context* native_context = isolate->context()->native_context(); 235 if (array->GetElementsKind() == GetInitialFastElementsKind() && 236 !native_context->js_array_maps()->IsUndefined()) { 237 FixedArray* map_array = 238 FixedArray::cast(native_context->js_array_maps()); 239 array->set_map(Map::cast(map_array-> 240 get(TERMINAL_FAST_ELEMENTS_KIND))); 241 } 242 } 243 } else { 244 // Allocate the JS Array 245 MaybeObject* maybe_obj = heap->AllocateJSObject(constructor); 246 if (!maybe_obj->To(&array)) return maybe_obj; 247 } 248 249 Arguments adjusted_arguments(args->length() - 1, args->arguments() - 1); 250 ASSERT(adjusted_arguments.length() < 1 || 251 adjusted_arguments[0] == (*args)[1]); 252 return ArrayConstructInitializeElements(array, &adjusted_arguments); 253 } 254 255 256 BUILTIN(InternalArrayCodeGeneric) { 257 return ArrayCodeGenericCommon( 258 &args, 259 isolate, 260 isolate->context()->native_context()->internal_array_function()); 261 } 262 263 264 BUILTIN(ArrayCodeGeneric) { 265 return ArrayCodeGenericCommon( 266 &args, 267 isolate, 268 isolate->context()->native_context()->array_function()); 269 } 270 271 272 static void MoveDoubleElements(FixedDoubleArray* dst, 273 int dst_index, 274 FixedDoubleArray* src, 275 int src_index, 276 int len) { 277 if (len == 0) return; 278 OS::MemMove(dst->data_start() + dst_index, 279 src->data_start() + src_index, 280 len * kDoubleSize); 281 } 282 283 284 static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) { 285 ASSERT(dst->map() != heap->fixed_cow_array_map()); 286 MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from); 287 } 288 289 290 static void FillWithHoles(FixedDoubleArray* dst, int from, int to) { 291 for (int i = from; i < to; i++) { 292 dst->set_the_hole(i); 293 } 294 } 295 296 297 static FixedArrayBase* LeftTrimFixedArray(Heap* heap, 298 FixedArrayBase* elms, 299 int to_trim) { 300 Map* map = elms->map(); 301 int entry_size; 302 if (elms->IsFixedArray()) { 303 entry_size = kPointerSize; 304 } else { 305 entry_size = kDoubleSize; 306 } 307 ASSERT(elms->map() != HEAP->fixed_cow_array_map()); 308 // For now this trick is only applied to fixed arrays in new and paged space. 309 // In large object space the object's start must coincide with chunk 310 // and thus the trick is just not applicable. 311 ASSERT(!HEAP->lo_space()->Contains(elms)); 312 313 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0); 314 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize); 315 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize); 316 317 Object** former_start = HeapObject::RawField(elms, 0); 318 319 const int len = elms->length(); 320 321 if (to_trim * entry_size > FixedArrayBase::kHeaderSize && 322 elms->IsFixedArray() && 323 !heap->new_space()->Contains(elms)) { 324 // If we are doing a big trim in old space then we zap the space that was 325 // formerly part of the array so that the GC (aided by the card-based 326 // remembered set) won't find pointers to new-space there. 327 Object** zap = reinterpret_cast<Object**>(elms->address()); 328 zap++; // Header of filler must be at least one word so skip that. 329 for (int i = 1; i < to_trim; i++) { 330 *zap++ = Smi::FromInt(0); 331 } 332 } 333 // Technically in new space this write might be omitted (except for 334 // debug mode which iterates through the heap), but to play safer 335 // we still do it. 336 heap->CreateFillerObjectAt(elms->address(), to_trim * entry_size); 337 338 int new_start_index = to_trim * (entry_size / kPointerSize); 339 former_start[new_start_index] = map; 340 former_start[new_start_index + 1] = Smi::FromInt(len - to_trim); 341 342 // Maintain marking consistency for HeapObjectIterator and 343 // IncrementalMarking. 344 int size_delta = to_trim * entry_size; 345 if (heap->marking()->TransferMark(elms->address(), 346 elms->address() + size_delta)) { 347 MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta); 348 } 349 350 HEAP_PROFILE(heap, ObjectMoveEvent(elms->address(), 351 elms->address() + size_delta)); 352 return FixedArrayBase::cast(HeapObject::FromAddress( 353 elms->address() + to_trim * entry_size)); 354 } 355 356 357 static bool ArrayPrototypeHasNoElements(Heap* heap, 358 Context* native_context, 359 JSObject* array_proto) { 360 // This method depends on non writability of Object and Array prototype 361 // fields. 362 if (array_proto->elements() != heap->empty_fixed_array()) return false; 363 // Object.prototype 364 Object* proto = array_proto->GetPrototype(); 365 if (proto == heap->null_value()) return false; 366 array_proto = JSObject::cast(proto); 367 if (array_proto != native_context->initial_object_prototype()) return false; 368 if (array_proto->elements() != heap->empty_fixed_array()) return false; 369 return array_proto->GetPrototype()->IsNull(); 370 } 371 372 373 MUST_USE_RESULT 374 static inline MaybeObject* EnsureJSArrayWithWritableFastElements( 375 Heap* heap, Object* receiver, Arguments* args, int first_added_arg) { 376 if (!receiver->IsJSArray()) return NULL; 377 JSArray* array = JSArray::cast(receiver); 378 HeapObject* elms = array->elements(); 379 Map* map = elms->map(); 380 if (map == heap->fixed_array_map()) { 381 if (args == NULL || array->HasFastObjectElements()) return elms; 382 } else if (map == heap->fixed_cow_array_map()) { 383 MaybeObject* maybe_writable_result = array->EnsureWritableFastElements(); 384 if (args == NULL || array->HasFastObjectElements() || 385 !maybe_writable_result->To(&elms)) { 386 return maybe_writable_result; 387 } 388 } else if (map == heap->fixed_double_array_map()) { 389 if (args == NULL) return elms; 390 } else { 391 return NULL; 392 } 393 394 // Need to ensure that the arguments passed in args can be contained in 395 // the array. 396 int args_length = args->length(); 397 if (first_added_arg >= args_length) return array->elements(); 398 399 ElementsKind origin_kind = array->map()->elements_kind(); 400 ASSERT(!IsFastObjectElementsKind(origin_kind)); 401 ElementsKind target_kind = origin_kind; 402 int arg_count = args->length() - first_added_arg; 403 Object** arguments = args->arguments() - first_added_arg - (arg_count - 1); 404 for (int i = 0; i < arg_count; i++) { 405 Object* arg = arguments[i]; 406 if (arg->IsHeapObject()) { 407 if (arg->IsHeapNumber()) { 408 target_kind = FAST_DOUBLE_ELEMENTS; 409 } else { 410 target_kind = FAST_ELEMENTS; 411 break; 412 } 413 } 414 } 415 if (target_kind != origin_kind) { 416 MaybeObject* maybe_failure = array->TransitionElementsKind(target_kind); 417 if (maybe_failure->IsFailure()) return maybe_failure; 418 return array->elements(); 419 } 420 return elms; 421 } 422 423 424 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap, 425 JSArray* receiver) { 426 if (!FLAG_clever_optimizations) return false; 427 Context* native_context = heap->isolate()->context()->native_context(); 428 JSObject* array_proto = 429 JSObject::cast(native_context->array_function()->prototype()); 430 return receiver->GetPrototype() == array_proto && 431 ArrayPrototypeHasNoElements(heap, native_context, array_proto); 432 } 433 434 435 MUST_USE_RESULT static MaybeObject* CallJsBuiltin( 436 Isolate* isolate, 437 const char* name, 438 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) { 439 HandleScope handleScope(isolate); 440 441 Handle<Object> js_builtin = 442 GetProperty(Handle<JSObject>(isolate->native_context()->builtins()), 443 name); 444 Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin); 445 int argc = args.length() - 1; 446 ScopedVector<Handle<Object> > argv(argc); 447 for (int i = 0; i < argc; ++i) { 448 argv[i] = args.at<Object>(i + 1); 449 } 450 bool pending_exception; 451 Handle<Object> result = Execution::Call(function, 452 args.receiver(), 453 argc, 454 argv.start(), 455 &pending_exception); 456 if (pending_exception) return Failure::Exception(); 457 return *result; 458 } 459 460 461 BUILTIN(ArrayPush) { 462 Heap* heap = isolate->heap(); 463 Object* receiver = *args.receiver(); 464 FixedArrayBase* elms_obj; 465 MaybeObject* maybe_elms_obj = 466 EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 1); 467 if (maybe_elms_obj == NULL) { 468 return CallJsBuiltin(isolate, "ArrayPush", args); 469 } 470 if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj; 471 472 JSArray* array = JSArray::cast(receiver); 473 ASSERT(!array->map()->is_observed()); 474 475 ElementsKind kind = array->GetElementsKind(); 476 477 if (IsFastSmiOrObjectElementsKind(kind)) { 478 FixedArray* elms = FixedArray::cast(elms_obj); 479 480 int len = Smi::cast(array->length())->value(); 481 int to_add = args.length() - 1; 482 if (to_add == 0) { 483 return Smi::FromInt(len); 484 } 485 // Currently fixed arrays cannot grow too big, so 486 // we should never hit this case. 487 ASSERT(to_add <= (Smi::kMaxValue - len)); 488 489 int new_length = len + to_add; 490 491 if (new_length > elms->length()) { 492 // New backing storage is needed. 493 int capacity = new_length + (new_length >> 1) + 16; 494 FixedArray* new_elms; 495 MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity); 496 if (!maybe_obj->To(&new_elms)) return maybe_obj; 497 498 ElementsAccessor* accessor = array->GetElementsAccessor(); 499 MaybeObject* maybe_failure = accessor->CopyElements( 500 NULL, 0, kind, new_elms, 0, 501 ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj); 502 ASSERT(!maybe_failure->IsFailure()); 503 USE(maybe_failure); 504 505 elms = new_elms; 506 } 507 508 // Add the provided values. 509 DisallowHeapAllocation no_gc; 510 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc); 511 for (int index = 0; index < to_add; index++) { 512 elms->set(index + len, args[index + 1], mode); 513 } 514 515 if (elms != array->elements()) { 516 array->set_elements(elms); 517 } 518 519 // Set the length. 520 array->set_length(Smi::FromInt(new_length)); 521 return Smi::FromInt(new_length); 522 } else { 523 int len = Smi::cast(array->length())->value(); 524 int elms_len = elms_obj->length(); 525 526 int to_add = args.length() - 1; 527 if (to_add == 0) { 528 return Smi::FromInt(len); 529 } 530 // Currently fixed arrays cannot grow too big, so 531 // we should never hit this case. 532 ASSERT(to_add <= (Smi::kMaxValue - len)); 533 534 int new_length = len + to_add; 535 536 FixedDoubleArray* new_elms; 537 538 if (new_length > elms_len) { 539 // New backing storage is needed. 540 int capacity = new_length + (new_length >> 1) + 16; 541 MaybeObject* maybe_obj = 542 heap->AllocateUninitializedFixedDoubleArray(capacity); 543 if (!maybe_obj->To(&new_elms)) return maybe_obj; 544 545 ElementsAccessor* accessor = array->GetElementsAccessor(); 546 MaybeObject* maybe_failure = accessor->CopyElements( 547 NULL, 0, kind, new_elms, 0, 548 ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj); 549 ASSERT(!maybe_failure->IsFailure()); 550 USE(maybe_failure); 551 } else { 552 // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the 553 // empty_fixed_array. 554 new_elms = FixedDoubleArray::cast(elms_obj); 555 } 556 557 // Add the provided values. 558 DisallowHeapAllocation no_gc; 559 int index; 560 for (index = 0; index < to_add; index++) { 561 Object* arg = args[index + 1]; 562 new_elms->set(index + len, arg->Number()); 563 } 564 565 if (new_elms != array->elements()) { 566 array->set_elements(new_elms); 567 } 568 569 // Set the length. 570 array->set_length(Smi::FromInt(new_length)); 571 return Smi::FromInt(new_length); 572 } 573 } 574 575 576 BUILTIN(ArrayPop) { 577 Heap* heap = isolate->heap(); 578 Object* receiver = *args.receiver(); 579 FixedArrayBase* elms_obj; 580 MaybeObject* maybe_elms = 581 EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0); 582 if (maybe_elms == NULL) return CallJsBuiltin(isolate, "ArrayPop", args); 583 if (!maybe_elms->To(&elms_obj)) return maybe_elms; 584 585 JSArray* array = JSArray::cast(receiver); 586 ASSERT(!array->map()->is_observed()); 587 588 int len = Smi::cast(array->length())->value(); 589 if (len == 0) return heap->undefined_value(); 590 591 ElementsAccessor* accessor = array->GetElementsAccessor(); 592 int new_length = len - 1; 593 MaybeObject* maybe_result; 594 if (accessor->HasElement(array, array, new_length, elms_obj)) { 595 maybe_result = accessor->Get(array, array, new_length, elms_obj); 596 } else { 597 maybe_result = array->GetPrototype()->GetElement(len - 1); 598 } 599 if (maybe_result->IsFailure()) return maybe_result; 600 MaybeObject* maybe_failure = 601 accessor->SetLength(array, Smi::FromInt(new_length)); 602 if (maybe_failure->IsFailure()) return maybe_failure; 603 return maybe_result; 604 } 605 606 607 BUILTIN(ArrayShift) { 608 Heap* heap = isolate->heap(); 609 Object* receiver = *args.receiver(); 610 FixedArrayBase* elms_obj; 611 MaybeObject* maybe_elms_obj = 612 EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0); 613 if (maybe_elms_obj == NULL) 614 return CallJsBuiltin(isolate, "ArrayShift", args); 615 if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj; 616 617 if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) { 618 return CallJsBuiltin(isolate, "ArrayShift", args); 619 } 620 JSArray* array = JSArray::cast(receiver); 621 ASSERT(!array->map()->is_observed()); 622 623 int len = Smi::cast(array->length())->value(); 624 if (len == 0) return heap->undefined_value(); 625 626 // Get first element 627 ElementsAccessor* accessor = array->GetElementsAccessor(); 628 Object* first; 629 MaybeObject* maybe_first = accessor->Get(receiver, array, 0, elms_obj); 630 if (!maybe_first->To(&first)) return maybe_first; 631 if (first->IsTheHole()) { 632 first = heap->undefined_value(); 633 } 634 635 if (!heap->lo_space()->Contains(elms_obj)) { 636 array->set_elements(LeftTrimFixedArray(heap, elms_obj, 1)); 637 } else { 638 // Shift the elements. 639 if (elms_obj->IsFixedArray()) { 640 FixedArray* elms = FixedArray::cast(elms_obj); 641 DisallowHeapAllocation no_gc; 642 heap->MoveElements(elms, 0, 1, len - 1); 643 elms->set(len - 1, heap->the_hole_value()); 644 } else { 645 FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj); 646 MoveDoubleElements(elms, 0, elms, 1, len - 1); 647 elms->set_the_hole(len - 1); 648 } 649 } 650 651 // Set the length. 652 array->set_length(Smi::FromInt(len - 1)); 653 654 return first; 655 } 656 657 658 BUILTIN(ArrayUnshift) { 659 Heap* heap = isolate->heap(); 660 Object* receiver = *args.receiver(); 661 FixedArrayBase* elms_obj; 662 MaybeObject* maybe_elms_obj = 663 EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0); 664 if (maybe_elms_obj == NULL) 665 return CallJsBuiltin(isolate, "ArrayUnshift", args); 666 if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj; 667 668 if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) { 669 return CallJsBuiltin(isolate, "ArrayUnshift", args); 670 } 671 JSArray* array = JSArray::cast(receiver); 672 ASSERT(!array->map()->is_observed()); 673 if (!array->HasFastSmiOrObjectElements()) { 674 return CallJsBuiltin(isolate, "ArrayUnshift", args); 675 } 676 FixedArray* elms = FixedArray::cast(elms_obj); 677 678 int len = Smi::cast(array->length())->value(); 679 int to_add = args.length() - 1; 680 int new_length = len + to_add; 681 // Currently fixed arrays cannot grow too big, so 682 // we should never hit this case. 683 ASSERT(to_add <= (Smi::kMaxValue - len)); 684 685 MaybeObject* maybe_object = 686 array->EnsureCanContainElements(&args, 1, to_add, 687 DONT_ALLOW_DOUBLE_ELEMENTS); 688 if (maybe_object->IsFailure()) return maybe_object; 689 690 if (new_length > elms->length()) { 691 // New backing storage is needed. 692 int capacity = new_length + (new_length >> 1) + 16; 693 FixedArray* new_elms; 694 MaybeObject* maybe_elms = heap->AllocateUninitializedFixedArray(capacity); 695 if (!maybe_elms->To(&new_elms)) return maybe_elms; 696 697 ElementsKind kind = array->GetElementsKind(); 698 ElementsAccessor* accessor = array->GetElementsAccessor(); 699 MaybeObject* maybe_failure = accessor->CopyElements( 700 NULL, 0, kind, new_elms, to_add, 701 ElementsAccessor::kCopyToEndAndInitializeToHole, elms); 702 ASSERT(!maybe_failure->IsFailure()); 703 USE(maybe_failure); 704 705 elms = new_elms; 706 array->set_elements(elms); 707 } else { 708 DisallowHeapAllocation no_gc; 709 heap->MoveElements(elms, to_add, 0, len); 710 } 711 712 // Add the provided values. 713 DisallowHeapAllocation no_gc; 714 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc); 715 for (int i = 0; i < to_add; i++) { 716 elms->set(i, args[i + 1], mode); 717 } 718 719 // Set the length. 720 array->set_length(Smi::FromInt(new_length)); 721 return Smi::FromInt(new_length); 722 } 723 724 725 BUILTIN(ArraySlice) { 726 Heap* heap = isolate->heap(); 727 Object* receiver = *args.receiver(); 728 FixedArrayBase* elms; 729 int len = -1; 730 if (receiver->IsJSArray()) { 731 JSArray* array = JSArray::cast(receiver); 732 if (!IsJSArrayFastElementMovingAllowed(heap, array)) { 733 return CallJsBuiltin(isolate, "ArraySlice", args); 734 } 735 736 if (array->HasFastElements()) { 737 elms = array->elements(); 738 } else { 739 return CallJsBuiltin(isolate, "ArraySlice", args); 740 } 741 742 len = Smi::cast(array->length())->value(); 743 } else { 744 // Array.slice(arguments, ...) is quite a common idiom (notably more 745 // than 50% of invocations in Web apps). Treat it in C++ as well. 746 Map* arguments_map = 747 isolate->context()->native_context()->arguments_boilerplate()->map(); 748 749 bool is_arguments_object_with_fast_elements = 750 receiver->IsJSObject() && 751 JSObject::cast(receiver)->map() == arguments_map; 752 if (!is_arguments_object_with_fast_elements) { 753 return CallJsBuiltin(isolate, "ArraySlice", args); 754 } 755 JSObject* object = JSObject::cast(receiver); 756 757 if (object->HasFastElements()) { 758 elms = object->elements(); 759 } else { 760 return CallJsBuiltin(isolate, "ArraySlice", args); 761 } 762 Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex); 763 if (!len_obj->IsSmi()) { 764 return CallJsBuiltin(isolate, "ArraySlice", args); 765 } 766 len = Smi::cast(len_obj)->value(); 767 if (len > elms->length()) { 768 return CallJsBuiltin(isolate, "ArraySlice", args); 769 } 770 } 771 772 JSObject* object = JSObject::cast(receiver); 773 774 ASSERT(len >= 0); 775 int n_arguments = args.length() - 1; 776 777 // Note carefully choosen defaults---if argument is missing, 778 // it's undefined which gets converted to 0 for relative_start 779 // and to len for relative_end. 780 int relative_start = 0; 781 int relative_end = len; 782 if (n_arguments > 0) { 783 Object* arg1 = args[1]; 784 if (arg1->IsSmi()) { 785 relative_start = Smi::cast(arg1)->value(); 786 } else if (arg1->IsHeapNumber()) { 787 double start = HeapNumber::cast(arg1)->value(); 788 if (start < kMinInt || start > kMaxInt) { 789 return CallJsBuiltin(isolate, "ArraySlice", args); 790 } 791 relative_start = std::isnan(start) ? 0 : static_cast<int>(start); 792 } else if (!arg1->IsUndefined()) { 793 return CallJsBuiltin(isolate, "ArraySlice", args); 794 } 795 if (n_arguments > 1) { 796 Object* arg2 = args[2]; 797 if (arg2->IsSmi()) { 798 relative_end = Smi::cast(arg2)->value(); 799 } else if (arg2->IsHeapNumber()) { 800 double end = HeapNumber::cast(arg2)->value(); 801 if (end < kMinInt || end > kMaxInt) { 802 return CallJsBuiltin(isolate, "ArraySlice", args); 803 } 804 relative_end = std::isnan(end) ? 0 : static_cast<int>(end); 805 } else if (!arg2->IsUndefined()) { 806 return CallJsBuiltin(isolate, "ArraySlice", args); 807 } 808 } 809 } 810 811 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6. 812 int k = (relative_start < 0) ? Max(len + relative_start, 0) 813 : Min(relative_start, len); 814 815 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8. 816 int final = (relative_end < 0) ? Max(len + relative_end, 0) 817 : Min(relative_end, len); 818 819 // Calculate the length of result array. 820 int result_len = Max(final - k, 0); 821 822 ElementsKind kind = object->GetElementsKind(); 823 if (IsHoleyElementsKind(kind)) { 824 bool packed = true; 825 ElementsAccessor* accessor = ElementsAccessor::ForKind(kind); 826 for (int i = k; i < final; i++) { 827 if (!accessor->HasElement(object, object, i, elms)) { 828 packed = false; 829 break; 830 } 831 } 832 if (packed) { 833 kind = GetPackedElementsKind(kind); 834 } else if (!receiver->IsJSArray()) { 835 return CallJsBuiltin(isolate, "ArraySlice", args); 836 } 837 } 838 839 JSArray* result_array; 840 MaybeObject* maybe_array = heap->AllocateJSArrayAndStorage(kind, 841 result_len, 842 result_len); 843 844 DisallowHeapAllocation no_gc; 845 if (result_len == 0) return maybe_array; 846 if (!maybe_array->To(&result_array)) return maybe_array; 847 848 ElementsAccessor* accessor = object->GetElementsAccessor(); 849 MaybeObject* maybe_failure = accessor->CopyElements( 850 NULL, k, kind, result_array->elements(), 0, result_len, elms); 851 ASSERT(!maybe_failure->IsFailure()); 852 USE(maybe_failure); 853 854 return result_array; 855 } 856 857 858 BUILTIN(ArraySplice) { 859 Heap* heap = isolate->heap(); 860 Object* receiver = *args.receiver(); 861 FixedArrayBase* elms_obj; 862 MaybeObject* maybe_elms = 863 EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 3); 864 if (maybe_elms == NULL) { 865 return CallJsBuiltin(isolate, "ArraySplice", args); 866 } 867 if (!maybe_elms->To(&elms_obj)) return maybe_elms; 868 869 if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) { 870 return CallJsBuiltin(isolate, "ArraySplice", args); 871 } 872 JSArray* array = JSArray::cast(receiver); 873 ASSERT(!array->map()->is_observed()); 874 875 int len = Smi::cast(array->length())->value(); 876 877 int n_arguments = args.length() - 1; 878 879 int relative_start = 0; 880 if (n_arguments > 0) { 881 Object* arg1 = args[1]; 882 if (arg1->IsSmi()) { 883 relative_start = Smi::cast(arg1)->value(); 884 } else if (arg1->IsHeapNumber()) { 885 double start = HeapNumber::cast(arg1)->value(); 886 if (start < kMinInt || start > kMaxInt) { 887 return CallJsBuiltin(isolate, "ArraySplice", args); 888 } 889 relative_start = std::isnan(start) ? 0 : static_cast<int>(start); 890 } else if (!arg1->IsUndefined()) { 891 return CallJsBuiltin(isolate, "ArraySplice", args); 892 } 893 } 894 int actual_start = (relative_start < 0) ? Max(len + relative_start, 0) 895 : Min(relative_start, len); 896 897 // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is 898 // given as a request to delete all the elements from the start. 899 // And it differs from the case of undefined delete count. 900 // This does not follow ECMA-262, but we do the same for 901 // compatibility. 902 int actual_delete_count; 903 if (n_arguments == 1) { 904 ASSERT(len - actual_start >= 0); 905 actual_delete_count = len - actual_start; 906 } else { 907 int value = 0; // ToInteger(undefined) == 0 908 if (n_arguments > 1) { 909 Object* arg2 = args[2]; 910 if (arg2->IsSmi()) { 911 value = Smi::cast(arg2)->value(); 912 } else { 913 return CallJsBuiltin(isolate, "ArraySplice", args); 914 } 915 } 916 actual_delete_count = Min(Max(value, 0), len - actual_start); 917 } 918 919 ElementsKind elements_kind = array->GetElementsKind(); 920 921 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0; 922 int new_length = len - actual_delete_count + item_count; 923 924 // For double mode we do not support changing the length. 925 if (new_length > len && IsFastDoubleElementsKind(elements_kind)) { 926 return CallJsBuiltin(isolate, "ArraySplice", args); 927 } 928 929 if (new_length == 0) { 930 MaybeObject* maybe_array = heap->AllocateJSArrayWithElements( 931 elms_obj, elements_kind, actual_delete_count); 932 if (maybe_array->IsFailure()) return maybe_array; 933 array->set_elements(heap->empty_fixed_array()); 934 array->set_length(Smi::FromInt(0)); 935 return maybe_array; 936 } 937 938 JSArray* result_array = NULL; 939 MaybeObject* maybe_array = 940 heap->AllocateJSArrayAndStorage(elements_kind, 941 actual_delete_count, 942 actual_delete_count); 943 if (!maybe_array->To(&result_array)) return maybe_array; 944 945 if (actual_delete_count > 0) { 946 DisallowHeapAllocation no_gc; 947 ElementsAccessor* accessor = array->GetElementsAccessor(); 948 MaybeObject* maybe_failure = accessor->CopyElements( 949 NULL, actual_start, elements_kind, result_array->elements(), 950 0, actual_delete_count, elms_obj); 951 // Cannot fail since the origin and target array are of the same elements 952 // kind. 953 ASSERT(!maybe_failure->IsFailure()); 954 USE(maybe_failure); 955 } 956 957 bool elms_changed = false; 958 if (item_count < actual_delete_count) { 959 // Shrink the array. 960 const bool trim_array = !heap->lo_space()->Contains(elms_obj) && 961 ((actual_start + item_count) < 962 (len - actual_delete_count - actual_start)); 963 if (trim_array) { 964 const int delta = actual_delete_count - item_count; 965 966 if (elms_obj->IsFixedDoubleArray()) { 967 FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj); 968 MoveDoubleElements(elms, delta, elms, 0, actual_start); 969 } else { 970 FixedArray* elms = FixedArray::cast(elms_obj); 971 DisallowHeapAllocation no_gc; 972 heap->MoveElements(elms, delta, 0, actual_start); 973 } 974 975 elms_obj = LeftTrimFixedArray(heap, elms_obj, delta); 976 977 elms_changed = true; 978 } else { 979 if (elms_obj->IsFixedDoubleArray()) { 980 FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj); 981 MoveDoubleElements(elms, actual_start + item_count, 982 elms, actual_start + actual_delete_count, 983 (len - actual_delete_count - actual_start)); 984 FillWithHoles(elms, new_length, len); 985 } else { 986 FixedArray* elms = FixedArray::cast(elms_obj); 987 DisallowHeapAllocation no_gc; 988 heap->MoveElements(elms, actual_start + item_count, 989 actual_start + actual_delete_count, 990 (len - actual_delete_count - actual_start)); 991 FillWithHoles(heap, elms, new_length, len); 992 } 993 } 994 } else if (item_count > actual_delete_count) { 995 FixedArray* elms = FixedArray::cast(elms_obj); 996 // Currently fixed arrays cannot grow too big, so 997 // we should never hit this case. 998 ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len)); 999 1000 // Check if array need to grow. 1001 if (new_length > elms->length()) { 1002 // New backing storage is needed. 1003 int capacity = new_length + (new_length >> 1) + 16; 1004 FixedArray* new_elms; 1005 MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity); 1006 if (!maybe_obj->To(&new_elms)) return maybe_obj; 1007 1008 DisallowHeapAllocation no_gc; 1009 1010 ElementsKind kind = array->GetElementsKind(); 1011 ElementsAccessor* accessor = array->GetElementsAccessor(); 1012 if (actual_start > 0) { 1013 // Copy the part before actual_start as is. 1014 MaybeObject* maybe_failure = accessor->CopyElements( 1015 NULL, 0, kind, new_elms, 0, actual_start, elms); 1016 ASSERT(!maybe_failure->IsFailure()); 1017 USE(maybe_failure); 1018 } 1019 MaybeObject* maybe_failure = accessor->CopyElements( 1020 NULL, actual_start + actual_delete_count, kind, new_elms, 1021 actual_start + item_count, 1022 ElementsAccessor::kCopyToEndAndInitializeToHole, elms); 1023 ASSERT(!maybe_failure->IsFailure()); 1024 USE(maybe_failure); 1025 1026 elms_obj = new_elms; 1027 elms_changed = true; 1028 } else { 1029 DisallowHeapAllocation no_gc; 1030 heap->MoveElements(elms, actual_start + item_count, 1031 actual_start + actual_delete_count, 1032 (len - actual_delete_count - actual_start)); 1033 } 1034 } 1035 1036 if (IsFastDoubleElementsKind(elements_kind)) { 1037 FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj); 1038 for (int k = actual_start; k < actual_start + item_count; k++) { 1039 Object* arg = args[3 + k - actual_start]; 1040 if (arg->IsSmi()) { 1041 elms->set(k, Smi::cast(arg)->value()); 1042 } else { 1043 elms->set(k, HeapNumber::cast(arg)->value()); 1044 } 1045 } 1046 } else { 1047 FixedArray* elms = FixedArray::cast(elms_obj); 1048 DisallowHeapAllocation no_gc; 1049 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc); 1050 for (int k = actual_start; k < actual_start + item_count; k++) { 1051 elms->set(k, args[3 + k - actual_start], mode); 1052 } 1053 } 1054 1055 if (elms_changed) { 1056 array->set_elements(elms_obj); 1057 } 1058 // Set the length. 1059 array->set_length(Smi::FromInt(new_length)); 1060 1061 return result_array; 1062 } 1063 1064 1065 BUILTIN(ArrayConcat) { 1066 Heap* heap = isolate->heap(); 1067 Context* native_context = isolate->context()->native_context(); 1068 JSObject* array_proto = 1069 JSObject::cast(native_context->array_function()->prototype()); 1070 if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) { 1071 return CallJsBuiltin(isolate, "ArrayConcat", args); 1072 } 1073 1074 // Iterate through all the arguments performing checks 1075 // and calculating total length. 1076 int n_arguments = args.length(); 1077 int result_len = 0; 1078 ElementsKind elements_kind = GetInitialFastElementsKind(); 1079 bool has_double = false; 1080 bool is_holey = false; 1081 for (int i = 0; i < n_arguments; i++) { 1082 Object* arg = args[i]; 1083 if (!arg->IsJSArray() || 1084 !JSArray::cast(arg)->HasFastElements() || 1085 JSArray::cast(arg)->GetPrototype() != array_proto) { 1086 return CallJsBuiltin(isolate, "ArrayConcat", args); 1087 } 1088 int len = Smi::cast(JSArray::cast(arg)->length())->value(); 1089 1090 // We shouldn't overflow when adding another len. 1091 const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2); 1092 STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt); 1093 USE(kHalfOfMaxInt); 1094 result_len += len; 1095 ASSERT(result_len >= 0); 1096 1097 if (result_len > FixedDoubleArray::kMaxLength) { 1098 return CallJsBuiltin(isolate, "ArrayConcat", args); 1099 } 1100 1101 ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind(); 1102 has_double = has_double || IsFastDoubleElementsKind(arg_kind); 1103 is_holey = is_holey || IsFastHoleyElementsKind(arg_kind); 1104 if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) { 1105 elements_kind = arg_kind; 1106 } 1107 } 1108 1109 if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind); 1110 1111 // If a double array is concatted into a fast elements array, the fast 1112 // elements array needs to be initialized to contain proper holes, since 1113 // boxing doubles may cause incremental marking. 1114 ArrayStorageAllocationMode mode = 1115 has_double && IsFastObjectElementsKind(elements_kind) 1116 ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS; 1117 JSArray* result_array; 1118 // Allocate result. 1119 MaybeObject* maybe_array = 1120 heap->AllocateJSArrayAndStorage(elements_kind, 1121 result_len, 1122 result_len, 1123 mode); 1124 if (!maybe_array->To(&result_array)) return maybe_array; 1125 if (result_len == 0) return result_array; 1126 1127 int j = 0; 1128 FixedArrayBase* storage = result_array->elements(); 1129 ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind); 1130 for (int i = 0; i < n_arguments; i++) { 1131 JSArray* array = JSArray::cast(args[i]); 1132 int len = Smi::cast(array->length())->value(); 1133 ElementsKind from_kind = array->GetElementsKind(); 1134 if (len > 0) { 1135 MaybeObject* maybe_failure = 1136 accessor->CopyElements(array, 0, from_kind, storage, j, len); 1137 if (maybe_failure->IsFailure()) return maybe_failure; 1138 j += len; 1139 } 1140 } 1141 1142 ASSERT(j == result_len); 1143 1144 return result_array; 1145 } 1146 1147 1148 // ----------------------------------------------------------------------------- 1149 // Strict mode poison pills 1150 1151 1152 BUILTIN(StrictModePoisonPill) { 1153 HandleScope scope(isolate); 1154 return isolate->Throw(*isolate->factory()->NewTypeError( 1155 "strict_poison_pill", HandleVector<Object>(NULL, 0))); 1156 } 1157 1158 1159 // ----------------------------------------------------------------------------- 1160 // 1161 1162 1163 // Searches the hidden prototype chain of the given object for the first 1164 // object that is an instance of the given type. If no such object can 1165 // be found then Heap::null_value() is returned. 1166 static inline Object* FindHidden(Heap* heap, 1167 Object* object, 1168 FunctionTemplateInfo* type) { 1169 if (object->IsInstanceOf(type)) return object; 1170 Object* proto = object->GetPrototype(heap->isolate()); 1171 if (proto->IsJSObject() && 1172 JSObject::cast(proto)->map()->is_hidden_prototype()) { 1173 return FindHidden(heap, proto, type); 1174 } 1175 return heap->null_value(); 1176 } 1177 1178 1179 // Returns the holder JSObject if the function can legally be called 1180 // with this receiver. Returns Heap::null_value() if the call is 1181 // illegal. Any arguments that don't fit the expected type is 1182 // overwritten with undefined. Note that holder and the arguments are 1183 // implicitly rewritten with the first object in the hidden prototype 1184 // chain that actually has the expected type. 1185 static inline Object* TypeCheck(Heap* heap, 1186 int argc, 1187 Object** argv, 1188 FunctionTemplateInfo* info) { 1189 Object* recv = argv[0]; 1190 // API calls are only supported with JSObject receivers. 1191 if (!recv->IsJSObject()) return heap->null_value(); 1192 Object* sig_obj = info->signature(); 1193 if (sig_obj->IsUndefined()) return recv; 1194 SignatureInfo* sig = SignatureInfo::cast(sig_obj); 1195 // If necessary, check the receiver 1196 Object* recv_type = sig->receiver(); 1197 Object* holder = recv; 1198 if (!recv_type->IsUndefined()) { 1199 holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type)); 1200 if (holder == heap->null_value()) return heap->null_value(); 1201 } 1202 Object* args_obj = sig->args(); 1203 // If there is no argument signature we're done 1204 if (args_obj->IsUndefined()) return holder; 1205 FixedArray* args = FixedArray::cast(args_obj); 1206 int length = args->length(); 1207 if (argc <= length) length = argc - 1; 1208 for (int i = 0; i < length; i++) { 1209 Object* argtype = args->get(i); 1210 if (argtype->IsUndefined()) continue; 1211 Object** arg = &argv[-1 - i]; 1212 Object* current = *arg; 1213 current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype)); 1214 if (current == heap->null_value()) current = heap->undefined_value(); 1215 *arg = current; 1216 } 1217 return holder; 1218 } 1219 1220 1221 template <bool is_construct> 1222 MUST_USE_RESULT static MaybeObject* HandleApiCallHelper( 1223 BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) { 1224 ASSERT(is_construct == CalledAsConstructor(isolate)); 1225 Heap* heap = isolate->heap(); 1226 1227 HandleScope scope(isolate); 1228 Handle<JSFunction> function = args.called_function(); 1229 ASSERT(function->shared()->IsApiFunction()); 1230 1231 FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data(); 1232 if (is_construct) { 1233 Handle<FunctionTemplateInfo> desc(fun_data, isolate); 1234 bool pending_exception = false; 1235 isolate->factory()->ConfigureInstance( 1236 desc, Handle<JSObject>::cast(args.receiver()), &pending_exception); 1237 ASSERT(isolate->has_pending_exception() == pending_exception); 1238 if (pending_exception) return Failure::Exception(); 1239 fun_data = *desc; 1240 } 1241 1242 Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data); 1243 1244 if (raw_holder->IsNull()) { 1245 // This function cannot be called with the given receiver. Abort! 1246 Handle<Object> obj = 1247 isolate->factory()->NewTypeError( 1248 "illegal_invocation", HandleVector(&function, 1)); 1249 return isolate->Throw(*obj); 1250 } 1251 1252 Object* raw_call_data = fun_data->call_code(); 1253 if (!raw_call_data->IsUndefined()) { 1254 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data); 1255 Object* callback_obj = call_data->callback(); 1256 v8::InvocationCallback callback = 1257 v8::ToCData<v8::InvocationCallback>(callback_obj); 1258 Object* data_obj = call_data->data(); 1259 Object* result; 1260 1261 LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver()))); 1262 ASSERT(raw_holder->IsJSObject()); 1263 1264 FunctionCallbackArguments custom(isolate, 1265 data_obj, 1266 *function, 1267 raw_holder, 1268 &args[0] - 1, 1269 args.length() - 1, 1270 is_construct); 1271 1272 v8::Handle<v8::Value> value = custom.Call(callback); 1273 if (value.IsEmpty()) { 1274 result = heap->undefined_value(); 1275 } else { 1276 result = *reinterpret_cast<Object**>(*value); 1277 result->VerifyApiCallResultType(); 1278 } 1279 1280 RETURN_IF_SCHEDULED_EXCEPTION(isolate); 1281 if (!is_construct || result->IsJSObject()) return result; 1282 } 1283 1284 return *args.receiver(); 1285 } 1286 1287 1288 BUILTIN(HandleApiCall) { 1289 return HandleApiCallHelper<false>(args, isolate); 1290 } 1291 1292 1293 BUILTIN(HandleApiCallConstruct) { 1294 return HandleApiCallHelper<true>(args, isolate); 1295 } 1296 1297 1298 // Helper function to handle calls to non-function objects created through the 1299 // API. The object can be called as either a constructor (using new) or just as 1300 // a function (without new). 1301 MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor( 1302 Isolate* isolate, 1303 bool is_construct_call, 1304 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) { 1305 // Non-functions are never called as constructors. Even if this is an object 1306 // called as a constructor the delegate call is not a construct call. 1307 ASSERT(!CalledAsConstructor(isolate)); 1308 Heap* heap = isolate->heap(); 1309 1310 Handle<Object> receiver = args.receiver(); 1311 1312 // Get the object called. 1313 JSObject* obj = JSObject::cast(*receiver); 1314 1315 // Get the invocation callback from the function descriptor that was 1316 // used to create the called object. 1317 ASSERT(obj->map()->has_instance_call_handler()); 1318 JSFunction* constructor = JSFunction::cast(obj->map()->constructor()); 1319 ASSERT(constructor->shared()->IsApiFunction()); 1320 Object* handler = 1321 constructor->shared()->get_api_func_data()->instance_call_handler(); 1322 ASSERT(!handler->IsUndefined()); 1323 CallHandlerInfo* call_data = CallHandlerInfo::cast(handler); 1324 Object* callback_obj = call_data->callback(); 1325 v8::InvocationCallback callback = 1326 v8::ToCData<v8::InvocationCallback>(callback_obj); 1327 1328 // Get the data for the call and perform the callback. 1329 Object* result; 1330 { 1331 HandleScope scope(isolate); 1332 LOG(isolate, ApiObjectAccess("call non-function", obj)); 1333 1334 FunctionCallbackArguments custom(isolate, 1335 call_data->data(), 1336 constructor, 1337 obj, 1338 &args[0] - 1, 1339 args.length() - 1, 1340 is_construct_call); 1341 v8::Handle<v8::Value> value = custom.Call(callback); 1342 if (value.IsEmpty()) { 1343 result = heap->undefined_value(); 1344 } else { 1345 result = *reinterpret_cast<Object**>(*value); 1346 result->VerifyApiCallResultType(); 1347 } 1348 } 1349 // Check for exceptions and return result. 1350 RETURN_IF_SCHEDULED_EXCEPTION(isolate); 1351 return result; 1352 } 1353 1354 1355 // Handle calls to non-function objects created through the API. This delegate 1356 // function is used when the call is a normal function call. 1357 BUILTIN(HandleApiCallAsFunction) { 1358 return HandleApiCallAsFunctionOrConstructor(isolate, false, args); 1359 } 1360 1361 1362 // Handle calls to non-function objects created through the API. This delegate 1363 // function is used when the call is a construct call. 1364 BUILTIN(HandleApiCallAsConstructor) { 1365 return HandleApiCallAsFunctionOrConstructor(isolate, true, args); 1366 } 1367 1368 1369 static void Generate_LoadIC_Initialize(MacroAssembler* masm) { 1370 LoadIC::GenerateInitialize(masm); 1371 } 1372 1373 1374 static void Generate_LoadIC_PreMonomorphic(MacroAssembler* masm) { 1375 LoadIC::GeneratePreMonomorphic(masm); 1376 } 1377 1378 1379 static void Generate_LoadIC_Miss(MacroAssembler* masm) { 1380 LoadIC::GenerateMiss(masm); 1381 } 1382 1383 1384 static void Generate_LoadIC_Megamorphic(MacroAssembler* masm) { 1385 LoadIC::GenerateMegamorphic(masm); 1386 } 1387 1388 1389 static void Generate_LoadIC_Normal(MacroAssembler* masm) { 1390 LoadIC::GenerateNormal(masm); 1391 } 1392 1393 1394 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) { 1395 LoadStubCompiler::GenerateLoadViaGetter(masm, Handle<JSFunction>()); 1396 } 1397 1398 1399 static void Generate_LoadIC_Slow(MacroAssembler* masm) { 1400 LoadIC::GenerateRuntimeGetProperty(masm); 1401 } 1402 1403 1404 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) { 1405 KeyedLoadIC::GenerateInitialize(masm); 1406 } 1407 1408 1409 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) { 1410 KeyedLoadIC::GenerateRuntimeGetProperty(masm); 1411 } 1412 1413 1414 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) { 1415 KeyedLoadIC::GenerateMiss(masm, MISS); 1416 } 1417 1418 1419 static void Generate_KeyedLoadIC_MissForceGeneric(MacroAssembler* masm) { 1420 KeyedLoadIC::GenerateMiss(masm, MISS_FORCE_GENERIC); 1421 } 1422 1423 1424 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) { 1425 KeyedLoadIC::GenerateGeneric(masm); 1426 } 1427 1428 1429 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) { 1430 KeyedLoadIC::GenerateString(masm); 1431 } 1432 1433 1434 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) { 1435 KeyedLoadIC::GeneratePreMonomorphic(masm); 1436 } 1437 1438 1439 static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) { 1440 KeyedLoadIC::GenerateIndexedInterceptor(masm); 1441 } 1442 1443 1444 static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) { 1445 KeyedLoadIC::GenerateNonStrictArguments(masm); 1446 } 1447 1448 1449 static void Generate_StoreIC_Slow(MacroAssembler* masm) { 1450 StoreIC::GenerateSlow(masm); 1451 } 1452 1453 1454 static void Generate_StoreIC_Initialize(MacroAssembler* masm) { 1455 StoreIC::GenerateInitialize(masm); 1456 } 1457 1458 1459 static void Generate_StoreIC_Initialize_Strict(MacroAssembler* masm) { 1460 StoreIC::GenerateInitialize(masm); 1461 } 1462 1463 1464 static void Generate_StoreIC_Miss(MacroAssembler* masm) { 1465 StoreIC::GenerateMiss(masm); 1466 } 1467 1468 1469 static void Generate_StoreIC_Normal(MacroAssembler* masm) { 1470 StoreIC::GenerateNormal(masm); 1471 } 1472 1473 1474 static void Generate_StoreIC_Normal_Strict(MacroAssembler* masm) { 1475 StoreIC::GenerateNormal(masm); 1476 } 1477 1478 1479 static void Generate_StoreIC_Megamorphic(MacroAssembler* masm) { 1480 StoreIC::GenerateMegamorphic(masm, kNonStrictMode); 1481 } 1482 1483 1484 static void Generate_StoreIC_Megamorphic_Strict(MacroAssembler* masm) { 1485 StoreIC::GenerateMegamorphic(masm, kStrictMode); 1486 } 1487 1488 1489 static void Generate_StoreIC_GlobalProxy(MacroAssembler* masm) { 1490 StoreIC::GenerateRuntimeSetProperty(masm, kNonStrictMode); 1491 } 1492 1493 1494 static void Generate_StoreIC_GlobalProxy_Strict(MacroAssembler* masm) { 1495 StoreIC::GenerateRuntimeSetProperty(masm, kStrictMode); 1496 } 1497 1498 1499 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) { 1500 StoreStubCompiler::GenerateStoreViaSetter(masm, Handle<JSFunction>()); 1501 } 1502 1503 1504 static void Generate_StoreIC_Generic(MacroAssembler* masm) { 1505 StoreIC::GenerateRuntimeSetProperty(masm, kNonStrictMode); 1506 } 1507 1508 1509 static void Generate_StoreIC_Generic_Strict(MacroAssembler* masm) { 1510 StoreIC::GenerateRuntimeSetProperty(masm, kStrictMode); 1511 } 1512 1513 1514 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) { 1515 KeyedStoreIC::GenerateGeneric(masm, kNonStrictMode); 1516 } 1517 1518 1519 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) { 1520 KeyedStoreIC::GenerateGeneric(masm, kStrictMode); 1521 } 1522 1523 1524 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) { 1525 KeyedStoreIC::GenerateMiss(masm, MISS); 1526 } 1527 1528 1529 static void Generate_KeyedStoreIC_MissForceGeneric(MacroAssembler* masm) { 1530 KeyedStoreIC::GenerateMiss(masm, MISS_FORCE_GENERIC); 1531 } 1532 1533 1534 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) { 1535 KeyedStoreIC::GenerateSlow(masm); 1536 } 1537 1538 1539 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) { 1540 KeyedStoreIC::GenerateInitialize(masm); 1541 } 1542 1543 1544 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) { 1545 KeyedStoreIC::GenerateInitialize(masm); 1546 } 1547 1548 1549 static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) { 1550 KeyedStoreIC::GenerateNonStrictArguments(masm); 1551 } 1552 1553 1554 #ifdef ENABLE_DEBUGGER_SUPPORT 1555 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) { 1556 Debug::GenerateLoadICDebugBreak(masm); 1557 } 1558 1559 1560 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) { 1561 Debug::GenerateStoreICDebugBreak(masm); 1562 } 1563 1564 1565 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) { 1566 Debug::GenerateKeyedLoadICDebugBreak(masm); 1567 } 1568 1569 1570 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) { 1571 Debug::GenerateKeyedStoreICDebugBreak(masm); 1572 } 1573 1574 1575 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) { 1576 Debug::GenerateCompareNilICDebugBreak(masm); 1577 } 1578 1579 1580 static void Generate_Return_DebugBreak(MacroAssembler* masm) { 1581 Debug::GenerateReturnDebugBreak(masm); 1582 } 1583 1584 1585 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) { 1586 Debug::GenerateCallFunctionStubDebugBreak(masm); 1587 } 1588 1589 1590 static void Generate_CallFunctionStub_Recording_DebugBreak( 1591 MacroAssembler* masm) { 1592 Debug::GenerateCallFunctionStubRecordDebugBreak(masm); 1593 } 1594 1595 1596 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) { 1597 Debug::GenerateCallConstructStubDebugBreak(masm); 1598 } 1599 1600 1601 static void Generate_CallConstructStub_Recording_DebugBreak( 1602 MacroAssembler* masm) { 1603 Debug::GenerateCallConstructStubRecordDebugBreak(masm); 1604 } 1605 1606 1607 static void Generate_Slot_DebugBreak(MacroAssembler* masm) { 1608 Debug::GenerateSlotDebugBreak(masm); 1609 } 1610 1611 1612 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) { 1613 Debug::GeneratePlainReturnLiveEdit(masm); 1614 } 1615 1616 1617 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) { 1618 Debug::GenerateFrameDropperLiveEdit(masm); 1619 } 1620 #endif 1621 1622 1623 Builtins::Builtins() : initialized_(false) { 1624 memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count); 1625 memset(names_, 0, sizeof(names_[0]) * builtin_count); 1626 } 1627 1628 1629 Builtins::~Builtins() { 1630 } 1631 1632 1633 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name), 1634 Address const Builtins::c_functions_[cfunction_count] = { 1635 BUILTIN_LIST_C(DEF_ENUM_C) 1636 }; 1637 #undef DEF_ENUM_C 1638 1639 #define DEF_JS_NAME(name, ignore) #name, 1640 #define DEF_JS_ARGC(ignore, argc) argc, 1641 const char* const Builtins::javascript_names_[id_count] = { 1642 BUILTINS_LIST_JS(DEF_JS_NAME) 1643 }; 1644 1645 int const Builtins::javascript_argc_[id_count] = { 1646 BUILTINS_LIST_JS(DEF_JS_ARGC) 1647 }; 1648 #undef DEF_JS_NAME 1649 #undef DEF_JS_ARGC 1650 1651 struct BuiltinDesc { 1652 byte* generator; 1653 byte* c_code; 1654 const char* s_name; // name is only used for generating log information. 1655 int name; 1656 Code::Flags flags; 1657 BuiltinExtraArguments extra_args; 1658 }; 1659 1660 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} } 1661 1662 class BuiltinFunctionTable { 1663 public: 1664 BuiltinDesc* functions() { 1665 CallOnce(&once_, &Builtins::InitBuiltinFunctionTable); 1666 return functions_; 1667 } 1668 1669 OnceType once_; 1670 BuiltinDesc functions_[Builtins::builtin_count + 1]; 1671 1672 friend class Builtins; 1673 }; 1674 1675 static BuiltinFunctionTable builtin_function_table = 1676 BUILTIN_FUNCTION_TABLE_INIT; 1677 1678 // Define array of pointers to generators and C builtin functions. 1679 // We do this in a sort of roundabout way so that we can do the initialization 1680 // within the lexical scope of Builtins:: and within a context where 1681 // Code::Flags names a non-abstract type. 1682 void Builtins::InitBuiltinFunctionTable() { 1683 BuiltinDesc* functions = builtin_function_table.functions_; 1684 functions[builtin_count].generator = NULL; 1685 functions[builtin_count].c_code = NULL; 1686 functions[builtin_count].s_name = NULL; 1687 functions[builtin_count].name = builtin_count; 1688 functions[builtin_count].flags = static_cast<Code::Flags>(0); 1689 functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS; 1690 1691 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \ 1692 functions->generator = FUNCTION_ADDR(Generate_Adaptor); \ 1693 functions->c_code = FUNCTION_ADDR(Builtin_##aname); \ 1694 functions->s_name = #aname; \ 1695 functions->name = c_##aname; \ 1696 functions->flags = Code::ComputeFlags(Code::BUILTIN); \ 1697 functions->extra_args = aextra_args; \ 1698 ++functions; 1699 1700 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \ 1701 functions->generator = FUNCTION_ADDR(Generate_##aname); \ 1702 functions->c_code = NULL; \ 1703 functions->s_name = #aname; \ 1704 functions->name = k##aname; \ 1705 functions->flags = Code::ComputeFlags(Code::kind, \ 1706 state, \ 1707 extra); \ 1708 functions->extra_args = NO_EXTRA_ARGUMENTS; \ 1709 ++functions; 1710 1711 BUILTIN_LIST_C(DEF_FUNCTION_PTR_C) 1712 BUILTIN_LIST_A(DEF_FUNCTION_PTR_A) 1713 BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A) 1714 1715 #undef DEF_FUNCTION_PTR_C 1716 #undef DEF_FUNCTION_PTR_A 1717 } 1718 1719 1720 void Builtins::SetUp(bool create_heap_objects) { 1721 ASSERT(!initialized_); 1722 Isolate* isolate = Isolate::Current(); 1723 Heap* heap = isolate->heap(); 1724 1725 // Create a scope for the handles in the builtins. 1726 HandleScope scope(isolate); 1727 1728 const BuiltinDesc* functions = builtin_function_table.functions(); 1729 1730 // For now we generate builtin adaptor code into a stack-allocated 1731 // buffer, before copying it into individual code objects. Be careful 1732 // with alignment, some platforms don't like unaligned code. 1733 union { int force_alignment; byte buffer[8*KB]; } u; 1734 1735 // Traverse the list of builtins and generate an adaptor in a 1736 // separate code object for each one. 1737 for (int i = 0; i < builtin_count; i++) { 1738 if (create_heap_objects) { 1739 MacroAssembler masm(isolate, u.buffer, sizeof u.buffer); 1740 // Generate the code/adaptor. 1741 typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments); 1742 Generator g = FUNCTION_CAST<Generator>(functions[i].generator); 1743 // We pass all arguments to the generator, but it may not use all of 1744 // them. This works because the first arguments are on top of the 1745 // stack. 1746 ASSERT(!masm.has_frame()); 1747 g(&masm, functions[i].name, functions[i].extra_args); 1748 // Move the code into the object heap. 1749 CodeDesc desc; 1750 masm.GetCode(&desc); 1751 Code::Flags flags = functions[i].flags; 1752 Object* code = NULL; 1753 { 1754 // During startup it's OK to always allocate and defer GC to later. 1755 // This simplifies things because we don't need to retry. 1756 AlwaysAllocateScope __scope__; 1757 { MaybeObject* maybe_code = 1758 heap->CreateCode(desc, flags, masm.CodeObject()); 1759 if (!maybe_code->ToObject(&code)) { 1760 v8::internal::V8::FatalProcessOutOfMemory("CreateCode"); 1761 } 1762 } 1763 } 1764 // Log the event and add the code to the builtins array. 1765 PROFILE(isolate, 1766 CodeCreateEvent(Logger::BUILTIN_TAG, 1767 Code::cast(code), 1768 functions[i].s_name)); 1769 GDBJIT(AddCode(GDBJITInterface::BUILTIN, 1770 functions[i].s_name, 1771 Code::cast(code))); 1772 builtins_[i] = code; 1773 #ifdef ENABLE_DISASSEMBLER 1774 if (FLAG_print_builtin_code) { 1775 PrintF("Builtin: %s\n", functions[i].s_name); 1776 Code::cast(code)->Disassemble(functions[i].s_name); 1777 PrintF("\n"); 1778 } 1779 #endif 1780 } else { 1781 // Deserializing. The values will be filled in during IterateBuiltins. 1782 builtins_[i] = NULL; 1783 } 1784 names_[i] = functions[i].s_name; 1785 } 1786 1787 // Mark as initialized. 1788 initialized_ = true; 1789 } 1790 1791 1792 void Builtins::TearDown() { 1793 initialized_ = false; 1794 } 1795 1796 1797 void Builtins::IterateBuiltins(ObjectVisitor* v) { 1798 v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count); 1799 } 1800 1801 1802 const char* Builtins::Lookup(byte* pc) { 1803 // may be called during initialization (disassembler!) 1804 if (initialized_) { 1805 for (int i = 0; i < builtin_count; i++) { 1806 Code* entry = Code::cast(builtins_[i]); 1807 if (entry->contains(pc)) { 1808 return names_[i]; 1809 } 1810 } 1811 } 1812 return NULL; 1813 } 1814 1815 1816 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \ 1817 Handle<Code> Builtins::name() { \ 1818 Code** code_address = \ 1819 reinterpret_cast<Code**>(builtin_address(k##name)); \ 1820 return Handle<Code>(code_address); \ 1821 } 1822 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \ 1823 Handle<Code> Builtins::name() { \ 1824 Code** code_address = \ 1825 reinterpret_cast<Code**>(builtin_address(k##name)); \ 1826 return Handle<Code>(code_address); \ 1827 } 1828 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C) 1829 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A) 1830 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A) 1831 #undef DEFINE_BUILTIN_ACCESSOR_C 1832 #undef DEFINE_BUILTIN_ACCESSOR_A 1833 1834 1835 } } // namespace v8::internal 1836