1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "accessors.h" 31 #include "allocation-site-scopes.h" 32 #include "api.h" 33 #include "arguments.h" 34 #include "bootstrapper.h" 35 #include "codegen.h" 36 #include "code-stubs.h" 37 #include "cpu-profiler.h" 38 #include "debug.h" 39 #include "deoptimizer.h" 40 #include "date.h" 41 #include "elements.h" 42 #include "execution.h" 43 #include "full-codegen.h" 44 #include "hydrogen.h" 45 #include "isolate-inl.h" 46 #include "log.h" 47 #include "objects-inl.h" 48 #include "objects-visiting.h" 49 #include "objects-visiting-inl.h" 50 #include "macro-assembler.h" 51 #include "mark-compact.h" 52 #include "safepoint-table.h" 53 #include "string-stream.h" 54 #include "utils.h" 55 56 #ifdef ENABLE_DISASSEMBLER 57 #include "disasm.h" 58 #include "disassembler.h" 59 #endif 60 61 namespace v8 { 62 namespace internal { 63 64 65 MUST_USE_RESULT static MaybeObject* CreateJSValue(JSFunction* constructor, 66 Object* value) { 67 Object* result; 68 { MaybeObject* maybe_result = 69 constructor->GetHeap()->AllocateJSObject(constructor); 70 if (!maybe_result->ToObject(&result)) return maybe_result; 71 } 72 JSValue::cast(result)->set_value(value); 73 return result; 74 } 75 76 77 MaybeObject* Object::ToObject(Context* native_context) { 78 if (IsNumber()) { 79 return CreateJSValue(native_context->number_function(), this); 80 } else if (IsBoolean()) { 81 return CreateJSValue(native_context->boolean_function(), this); 82 } else if (IsString()) { 83 return CreateJSValue(native_context->string_function(), this); 84 } 85 ASSERT(IsJSObject()); 86 return this; 87 } 88 89 90 MaybeObject* Object::ToObject(Isolate* isolate) { 91 if (IsJSReceiver()) { 92 return this; 93 } else if (IsNumber()) { 94 Context* native_context = isolate->context()->native_context(); 95 return CreateJSValue(native_context->number_function(), this); 96 } else if (IsBoolean()) { 97 Context* native_context = isolate->context()->native_context(); 98 return CreateJSValue(native_context->boolean_function(), this); 99 } else if (IsString()) { 100 Context* native_context = isolate->context()->native_context(); 101 return CreateJSValue(native_context->string_function(), this); 102 } else if (IsSymbol()) { 103 Context* native_context = isolate->context()->native_context(); 104 return CreateJSValue(native_context->symbol_function(), this); 105 } 106 107 // Throw a type error. 108 return Failure::InternalError(); 109 } 110 111 112 bool Object::BooleanValue() { 113 if (IsBoolean()) return IsTrue(); 114 if (IsSmi()) return Smi::cast(this)->value() != 0; 115 if (IsUndefined() || IsNull()) return false; 116 if (IsUndetectableObject()) return false; // Undetectable object is false. 117 if (IsString()) return String::cast(this)->length() != 0; 118 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue(); 119 return true; 120 } 121 122 123 bool Object::IsCallable() { 124 Object* fun = this; 125 while (fun->IsJSFunctionProxy()) { 126 fun = JSFunctionProxy::cast(fun)->call_trap(); 127 } 128 return fun->IsJSFunction() || 129 (fun->IsHeapObject() && 130 HeapObject::cast(fun)->map()->has_instance_call_handler()); 131 } 132 133 134 void Object::Lookup(Name* name, LookupResult* result) { 135 Object* holder = NULL; 136 if (IsJSReceiver()) { 137 holder = this; 138 } else { 139 Context* native_context = result->isolate()->context()->native_context(); 140 if (IsNumber()) { 141 holder = native_context->number_function()->instance_prototype(); 142 } else if (IsString()) { 143 holder = native_context->string_function()->instance_prototype(); 144 } else if (IsSymbol()) { 145 holder = native_context->symbol_function()->instance_prototype(); 146 } else if (IsBoolean()) { 147 holder = native_context->boolean_function()->instance_prototype(); 148 } else { 149 result->isolate()->PushStackTraceAndDie( 150 0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001); 151 } 152 } 153 ASSERT(holder != NULL); // Cannot handle null or undefined. 154 JSReceiver::cast(holder)->Lookup(name, result); 155 } 156 157 158 Handle<Object> Object::GetPropertyWithReceiver( 159 Handle<Object> object, 160 Handle<Object> receiver, 161 Handle<Name> name, 162 PropertyAttributes* attributes) { 163 LookupResult lookup(name->GetIsolate()); 164 object->Lookup(*name, &lookup); 165 Handle<Object> result = 166 GetProperty(object, receiver, &lookup, name, attributes); 167 ASSERT(*attributes <= ABSENT); 168 return result; 169 } 170 171 172 MaybeObject* Object::GetPropertyWithReceiver(Object* receiver, 173 Name* name, 174 PropertyAttributes* attributes) { 175 LookupResult result(name->GetIsolate()); 176 Lookup(name, &result); 177 MaybeObject* value = GetProperty(receiver, &result, name, attributes); 178 ASSERT(*attributes <= ABSENT); 179 return value; 180 } 181 182 183 bool Object::ToInt32(int32_t* value) { 184 if (IsSmi()) { 185 *value = Smi::cast(this)->value(); 186 return true; 187 } 188 if (IsHeapNumber()) { 189 double num = HeapNumber::cast(this)->value(); 190 if (FastI2D(FastD2I(num)) == num) { 191 *value = FastD2I(num); 192 return true; 193 } 194 } 195 return false; 196 } 197 198 199 bool Object::ToUint32(uint32_t* value) { 200 if (IsSmi()) { 201 int num = Smi::cast(this)->value(); 202 if (num >= 0) { 203 *value = static_cast<uint32_t>(num); 204 return true; 205 } 206 } 207 if (IsHeapNumber()) { 208 double num = HeapNumber::cast(this)->value(); 209 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) { 210 *value = FastD2UI(num); 211 return true; 212 } 213 } 214 return false; 215 } 216 217 218 bool FunctionTemplateInfo::IsTemplateFor(Object* object) { 219 if (!object->IsHeapObject()) return false; 220 return IsTemplateFor(HeapObject::cast(object)->map()); 221 } 222 223 224 bool FunctionTemplateInfo::IsTemplateFor(Map* map) { 225 // There is a constraint on the object; check. 226 if (!map->IsJSObjectMap()) return false; 227 // Fetch the constructor function of the object. 228 Object* cons_obj = map->constructor(); 229 if (!cons_obj->IsJSFunction()) return false; 230 JSFunction* fun = JSFunction::cast(cons_obj); 231 // Iterate through the chain of inheriting function templates to 232 // see if the required one occurs. 233 for (Object* type = fun->shared()->function_data(); 234 type->IsFunctionTemplateInfo(); 235 type = FunctionTemplateInfo::cast(type)->parent_template()) { 236 if (type == this) return true; 237 } 238 // Didn't find the required type in the inheritance chain. 239 return false; 240 } 241 242 243 template<typename To> 244 static inline To* CheckedCast(void *from) { 245 uintptr_t temp = reinterpret_cast<uintptr_t>(from); 246 ASSERT(temp % sizeof(To) == 0); 247 return reinterpret_cast<To*>(temp); 248 } 249 250 251 static MaybeObject* PerformCompare(const BitmaskCompareDescriptor& descriptor, 252 char* ptr, 253 Heap* heap) { 254 uint32_t bitmask = descriptor.bitmask; 255 uint32_t compare_value = descriptor.compare_value; 256 uint32_t value; 257 switch (descriptor.size) { 258 case 1: 259 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr)); 260 compare_value &= 0xff; 261 bitmask &= 0xff; 262 break; 263 case 2: 264 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr)); 265 compare_value &= 0xffff; 266 bitmask &= 0xffff; 267 break; 268 case 4: 269 value = *CheckedCast<uint32_t>(ptr); 270 break; 271 default: 272 UNREACHABLE(); 273 return NULL; 274 } 275 return heap->ToBoolean((bitmask & value) == (bitmask & compare_value)); 276 } 277 278 279 static MaybeObject* PerformCompare(const PointerCompareDescriptor& descriptor, 280 char* ptr, 281 Heap* heap) { 282 uintptr_t compare_value = 283 reinterpret_cast<uintptr_t>(descriptor.compare_value); 284 uintptr_t value = *CheckedCast<uintptr_t>(ptr); 285 return heap->ToBoolean(compare_value == value); 286 } 287 288 289 static MaybeObject* GetPrimitiveValue( 290 const PrimitiveValueDescriptor& descriptor, 291 char* ptr, 292 Heap* heap) { 293 int32_t int32_value = 0; 294 switch (descriptor.data_type) { 295 case kDescriptorInt8Type: 296 int32_value = *CheckedCast<int8_t>(ptr); 297 break; 298 case kDescriptorUint8Type: 299 int32_value = *CheckedCast<uint8_t>(ptr); 300 break; 301 case kDescriptorInt16Type: 302 int32_value = *CheckedCast<int16_t>(ptr); 303 break; 304 case kDescriptorUint16Type: 305 int32_value = *CheckedCast<uint16_t>(ptr); 306 break; 307 case kDescriptorInt32Type: 308 int32_value = *CheckedCast<int32_t>(ptr); 309 break; 310 case kDescriptorUint32Type: { 311 uint32_t value = *CheckedCast<uint32_t>(ptr); 312 return heap->NumberFromUint32(value); 313 } 314 case kDescriptorBoolType: { 315 uint8_t byte = *CheckedCast<uint8_t>(ptr); 316 return heap->ToBoolean(byte & (0x1 << descriptor.bool_offset)); 317 } 318 case kDescriptorFloatType: { 319 float value = *CheckedCast<float>(ptr); 320 return heap->NumberFromDouble(value); 321 } 322 case kDescriptorDoubleType: { 323 double value = *CheckedCast<double>(ptr); 324 return heap->NumberFromDouble(value); 325 } 326 } 327 return heap->NumberFromInt32(int32_value); 328 } 329 330 331 static MaybeObject* GetDeclaredAccessorProperty(Object* receiver, 332 DeclaredAccessorInfo* info, 333 Isolate* isolate) { 334 char* current = reinterpret_cast<char*>(receiver); 335 DeclaredAccessorDescriptorIterator iterator(info->descriptor()); 336 while (true) { 337 const DeclaredAccessorDescriptorData* data = iterator.Next(); 338 switch (data->type) { 339 case kDescriptorReturnObject: { 340 ASSERT(iterator.Complete()); 341 current = *CheckedCast<char*>(current); 342 return *CheckedCast<Object*>(current); 343 } 344 case kDescriptorPointerDereference: 345 ASSERT(!iterator.Complete()); 346 current = *reinterpret_cast<char**>(current); 347 break; 348 case kDescriptorPointerShift: 349 ASSERT(!iterator.Complete()); 350 current += data->pointer_shift_descriptor.byte_offset; 351 break; 352 case kDescriptorObjectDereference: { 353 ASSERT(!iterator.Complete()); 354 Object* object = CheckedCast<Object>(current); 355 int field = data->object_dereference_descriptor.internal_field; 356 Object* smi = JSObject::cast(object)->GetInternalField(field); 357 ASSERT(smi->IsSmi()); 358 current = reinterpret_cast<char*>(smi); 359 break; 360 } 361 case kDescriptorBitmaskCompare: 362 ASSERT(iterator.Complete()); 363 return PerformCompare(data->bitmask_compare_descriptor, 364 current, 365 isolate->heap()); 366 case kDescriptorPointerCompare: 367 ASSERT(iterator.Complete()); 368 return PerformCompare(data->pointer_compare_descriptor, 369 current, 370 isolate->heap()); 371 case kDescriptorPrimitiveValue: 372 ASSERT(iterator.Complete()); 373 return GetPrimitiveValue(data->primitive_value_descriptor, 374 current, 375 isolate->heap()); 376 } 377 } 378 UNREACHABLE(); 379 return NULL; 380 } 381 382 383 Handle<FixedArray> JSObject::EnsureWritableFastElements( 384 Handle<JSObject> object) { 385 CALL_HEAP_FUNCTION(object->GetIsolate(), 386 object->EnsureWritableFastElements(), 387 FixedArray); 388 } 389 390 391 Handle<Object> JSObject::GetPropertyWithCallback(Handle<JSObject> object, 392 Handle<Object> receiver, 393 Handle<Object> structure, 394 Handle<Name> name) { 395 Isolate* isolate = name->GetIsolate(); 396 // To accommodate both the old and the new api we switch on the 397 // data structure used to store the callbacks. Eventually foreign 398 // callbacks should be phased out. 399 if (structure->IsForeign()) { 400 AccessorDescriptor* callback = 401 reinterpret_cast<AccessorDescriptor*>( 402 Handle<Foreign>::cast(structure)->foreign_address()); 403 CALL_HEAP_FUNCTION(isolate, 404 (callback->getter)(isolate, *receiver, callback->data), 405 Object); 406 } 407 408 // api style callbacks. 409 if (structure->IsAccessorInfo()) { 410 Handle<AccessorInfo> accessor_info = Handle<AccessorInfo>::cast(structure); 411 if (!accessor_info->IsCompatibleReceiver(*receiver)) { 412 Handle<Object> args[2] = { name, receiver }; 413 Handle<Object> error = 414 isolate->factory()->NewTypeError("incompatible_method_receiver", 415 HandleVector(args, 416 ARRAY_SIZE(args))); 417 isolate->Throw(*error); 418 return Handle<Object>::null(); 419 } 420 // TODO(rossberg): Handling symbols in the API requires changing the API, 421 // so we do not support it for now. 422 if (name->IsSymbol()) return isolate->factory()->undefined_value(); 423 if (structure->IsDeclaredAccessorInfo()) { 424 CALL_HEAP_FUNCTION( 425 isolate, 426 GetDeclaredAccessorProperty(*receiver, 427 DeclaredAccessorInfo::cast(*structure), 428 isolate), 429 Object); 430 } 431 432 Handle<ExecutableAccessorInfo> data = 433 Handle<ExecutableAccessorInfo>::cast(structure); 434 v8::AccessorGetterCallback call_fun = 435 v8::ToCData<v8::AccessorGetterCallback>(data->getter()); 436 if (call_fun == NULL) return isolate->factory()->undefined_value(); 437 438 HandleScope scope(isolate); 439 Handle<JSObject> self = Handle<JSObject>::cast(receiver); 440 Handle<String> key = Handle<String>::cast(name); 441 LOG(isolate, ApiNamedPropertyAccess("load", *self, *name)); 442 PropertyCallbackArguments args(isolate, data->data(), *self, *object); 443 v8::Handle<v8::Value> result = 444 args.Call(call_fun, v8::Utils::ToLocal(key)); 445 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 446 if (result.IsEmpty()) { 447 return isolate->factory()->undefined_value(); 448 } 449 Handle<Object> return_value = v8::Utils::OpenHandle(*result); 450 return_value->VerifyApiCallResultType(); 451 return scope.CloseAndEscape(return_value); 452 } 453 454 // __defineGetter__ callback 455 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(), 456 isolate); 457 if (getter->IsSpecFunction()) { 458 // TODO(rossberg): nicer would be to cast to some JSCallable here... 459 CALL_HEAP_FUNCTION( 460 isolate, 461 object->GetPropertyWithDefinedGetter(*receiver, 462 JSReceiver::cast(*getter)), 463 Object); 464 } 465 // Getter is not a function. 466 return isolate->factory()->undefined_value(); 467 } 468 469 470 MaybeObject* JSProxy::GetPropertyWithHandler(Object* receiver_raw, 471 Name* name_raw) { 472 Isolate* isolate = GetIsolate(); 473 HandleScope scope(isolate); 474 Handle<Object> receiver(receiver_raw, isolate); 475 Handle<Object> name(name_raw, isolate); 476 477 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 478 if (name->IsSymbol()) return isolate->heap()->undefined_value(); 479 480 Handle<Object> args[] = { receiver, name }; 481 Handle<Object> result = CallTrap( 482 "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args); 483 if (isolate->has_pending_exception()) return Failure::Exception(); 484 485 return *result; 486 } 487 488 489 Handle<Object> Object::GetProperty(Handle<Object> object, 490 Handle<Name> name) { 491 // TODO(rossberg): The index test should not be here but in the GetProperty 492 // method (or somewhere else entirely). Needs more global clean-up. 493 uint32_t index; 494 Isolate* isolate = name->GetIsolate(); 495 if (name->AsArrayIndex(&index)) 496 return GetElement(isolate, object, index); 497 CALL_HEAP_FUNCTION(isolate, object->GetProperty(*name), Object); 498 } 499 500 501 Handle<Object> Object::GetElement(Isolate* isolate, 502 Handle<Object> object, 503 uint32_t index) { 504 CALL_HEAP_FUNCTION(isolate, object->GetElement(isolate, index), Object); 505 } 506 507 508 MaybeObject* JSProxy::GetElementWithHandler(Object* receiver, 509 uint32_t index) { 510 String* name; 511 MaybeObject* maybe = GetHeap()->Uint32ToString(index); 512 if (!maybe->To<String>(&name)) return maybe; 513 return GetPropertyWithHandler(receiver, name); 514 } 515 516 517 Handle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy, 518 Handle<JSReceiver> receiver, 519 uint32_t index, 520 Handle<Object> value, 521 StrictModeFlag strict_mode) { 522 Isolate* isolate = proxy->GetIsolate(); 523 Handle<String> name = isolate->factory()->Uint32ToString(index); 524 return SetPropertyWithHandler( 525 proxy, receiver, name, value, NONE, strict_mode); 526 } 527 528 529 bool JSProxy::HasElementWithHandler(Handle<JSProxy> proxy, uint32_t index) { 530 Isolate* isolate = proxy->GetIsolate(); 531 Handle<String> name = isolate->factory()->Uint32ToString(index); 532 return HasPropertyWithHandler(proxy, name); 533 } 534 535 536 MaybeObject* Object::GetPropertyWithDefinedGetter(Object* receiver, 537 JSReceiver* getter) { 538 Isolate* isolate = getter->GetIsolate(); 539 HandleScope scope(isolate); 540 Handle<JSReceiver> fun(getter); 541 Handle<Object> self(receiver, isolate); 542 #ifdef ENABLE_DEBUGGER_SUPPORT 543 Debug* debug = isolate->debug(); 544 // Handle stepping into a getter if step into is active. 545 // TODO(rossberg): should this apply to getters that are function proxies? 546 if (debug->StepInActive() && fun->IsJSFunction()) { 547 debug->HandleStepIn( 548 Handle<JSFunction>::cast(fun), Handle<Object>::null(), 0, false); 549 } 550 #endif 551 552 bool has_pending_exception; 553 Handle<Object> result = Execution::Call( 554 isolate, fun, self, 0, NULL, &has_pending_exception, true); 555 // Check for pending exception and return the result. 556 if (has_pending_exception) return Failure::Exception(); 557 return *result; 558 } 559 560 561 // Only deal with CALLBACKS and INTERCEPTOR 562 Handle<Object> JSObject::GetPropertyWithFailedAccessCheck( 563 Handle<JSObject> object, 564 Handle<Object> receiver, 565 LookupResult* result, 566 Handle<Name> name, 567 PropertyAttributes* attributes) { 568 Isolate* isolate = name->GetIsolate(); 569 if (result->IsProperty()) { 570 switch (result->type()) { 571 case CALLBACKS: { 572 // Only allow API accessors. 573 Handle<Object> callback_obj(result->GetCallbackObject(), isolate); 574 if (callback_obj->IsAccessorInfo()) { 575 if (!AccessorInfo::cast(*callback_obj)->all_can_read()) break; 576 *attributes = result->GetAttributes(); 577 // Fall through to GetPropertyWithCallback. 578 } else if (callback_obj->IsAccessorPair()) { 579 if (!AccessorPair::cast(*callback_obj)->all_can_read()) break; 580 // Fall through to GetPropertyWithCallback. 581 } else { 582 break; 583 } 584 Handle<JSObject> holder(result->holder(), isolate); 585 return GetPropertyWithCallback(holder, receiver, callback_obj, name); 586 } 587 case NORMAL: 588 case FIELD: 589 case CONSTANT: { 590 // Search ALL_CAN_READ accessors in prototype chain. 591 LookupResult r(isolate); 592 result->holder()->LookupRealNamedPropertyInPrototypes(*name, &r); 593 if (r.IsProperty()) { 594 return GetPropertyWithFailedAccessCheck( 595 object, receiver, &r, name, attributes); 596 } 597 break; 598 } 599 case INTERCEPTOR: { 600 // If the object has an interceptor, try real named properties. 601 // No access check in GetPropertyAttributeWithInterceptor. 602 LookupResult r(isolate); 603 result->holder()->LookupRealNamedProperty(*name, &r); 604 if (r.IsProperty()) { 605 return GetPropertyWithFailedAccessCheck( 606 object, receiver, &r, name, attributes); 607 } 608 break; 609 } 610 default: 611 UNREACHABLE(); 612 } 613 } 614 615 // No accessible property found. 616 *attributes = ABSENT; 617 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_GET); 618 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 619 return isolate->factory()->undefined_value(); 620 } 621 622 623 PropertyAttributes JSObject::GetPropertyAttributeWithFailedAccessCheck( 624 Object* receiver, 625 LookupResult* result, 626 Name* name, 627 bool continue_search) { 628 if (result->IsProperty()) { 629 switch (result->type()) { 630 case CALLBACKS: { 631 // Only allow API accessors. 632 Object* obj = result->GetCallbackObject(); 633 if (obj->IsAccessorInfo()) { 634 AccessorInfo* info = AccessorInfo::cast(obj); 635 if (info->all_can_read()) { 636 return result->GetAttributes(); 637 } 638 } else if (obj->IsAccessorPair()) { 639 AccessorPair* pair = AccessorPair::cast(obj); 640 if (pair->all_can_read()) { 641 return result->GetAttributes(); 642 } 643 } 644 break; 645 } 646 647 case NORMAL: 648 case FIELD: 649 case CONSTANT: { 650 if (!continue_search) break; 651 // Search ALL_CAN_READ accessors in prototype chain. 652 LookupResult r(GetIsolate()); 653 result->holder()->LookupRealNamedPropertyInPrototypes(name, &r); 654 if (r.IsProperty()) { 655 return GetPropertyAttributeWithFailedAccessCheck(receiver, 656 &r, 657 name, 658 continue_search); 659 } 660 break; 661 } 662 663 case INTERCEPTOR: { 664 // If the object has an interceptor, try real named properties. 665 // No access check in GetPropertyAttributeWithInterceptor. 666 LookupResult r(GetIsolate()); 667 if (continue_search) { 668 result->holder()->LookupRealNamedProperty(name, &r); 669 } else { 670 result->holder()->LocalLookupRealNamedProperty(name, &r); 671 } 672 if (!r.IsFound()) break; 673 return GetPropertyAttributeWithFailedAccessCheck(receiver, 674 &r, 675 name, 676 continue_search); 677 } 678 679 case HANDLER: 680 case TRANSITION: 681 case NONEXISTENT: 682 UNREACHABLE(); 683 } 684 } 685 686 GetIsolate()->ReportFailedAccessCheck(this, v8::ACCESS_HAS); 687 return ABSENT; 688 } 689 690 691 Object* JSObject::GetNormalizedProperty(LookupResult* result) { 692 ASSERT(!HasFastProperties()); 693 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry()); 694 if (IsGlobalObject()) { 695 value = PropertyCell::cast(value)->value(); 696 } 697 ASSERT(!value->IsPropertyCell() && !value->IsCell()); 698 return value; 699 } 700 701 702 void JSObject::SetNormalizedProperty(Handle<JSObject> object, 703 LookupResult* result, 704 Handle<Object> value) { 705 ASSERT(!object->HasFastProperties()); 706 NameDictionary* property_dictionary = object->property_dictionary(); 707 if (object->IsGlobalObject()) { 708 Handle<PropertyCell> cell(PropertyCell::cast( 709 property_dictionary->ValueAt(result->GetDictionaryEntry()))); 710 PropertyCell::SetValueInferType(cell, value); 711 } else { 712 property_dictionary->ValueAtPut(result->GetDictionaryEntry(), *value); 713 } 714 } 715 716 717 // TODO(mstarzinger): Temporary wrapper until handlified. 718 static Handle<NameDictionary> NameDictionaryAdd(Handle<NameDictionary> dict, 719 Handle<Name> name, 720 Handle<Object> value, 721 PropertyDetails details) { 722 CALL_HEAP_FUNCTION(dict->GetIsolate(), 723 dict->Add(*name, *value, details), 724 NameDictionary); 725 } 726 727 728 void JSObject::SetNormalizedProperty(Handle<JSObject> object, 729 Handle<Name> name, 730 Handle<Object> value, 731 PropertyDetails details) { 732 ASSERT(!object->HasFastProperties()); 733 Handle<NameDictionary> property_dictionary(object->property_dictionary()); 734 735 if (!name->IsUniqueName()) { 736 name = object->GetIsolate()->factory()->InternalizedStringFromString( 737 Handle<String>::cast(name)); 738 } 739 740 int entry = property_dictionary->FindEntry(*name); 741 if (entry == NameDictionary::kNotFound) { 742 Handle<Object> store_value = value; 743 if (object->IsGlobalObject()) { 744 store_value = object->GetIsolate()->factory()->NewPropertyCell(value); 745 } 746 747 property_dictionary = 748 NameDictionaryAdd(property_dictionary, name, store_value, details); 749 object->set_properties(*property_dictionary); 750 return; 751 } 752 753 PropertyDetails original_details = property_dictionary->DetailsAt(entry); 754 int enumeration_index; 755 // Preserve the enumeration index unless the property was deleted. 756 if (original_details.IsDeleted()) { 757 enumeration_index = property_dictionary->NextEnumerationIndex(); 758 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1); 759 } else { 760 enumeration_index = original_details.dictionary_index(); 761 ASSERT(enumeration_index > 0); 762 } 763 764 details = PropertyDetails( 765 details.attributes(), details.type(), enumeration_index); 766 767 if (object->IsGlobalObject()) { 768 Handle<PropertyCell> cell( 769 PropertyCell::cast(property_dictionary->ValueAt(entry))); 770 PropertyCell::SetValueInferType(cell, value); 771 // Please note we have to update the property details. 772 property_dictionary->DetailsAtPut(entry, details); 773 } else { 774 property_dictionary->SetEntry(entry, *name, *value, details); 775 } 776 } 777 778 779 // TODO(mstarzinger): Temporary wrapper until target is handlified. 780 Handle<NameDictionary> NameDictionaryShrink(Handle<NameDictionary> dict, 781 Handle<Name> name) { 782 CALL_HEAP_FUNCTION(dict->GetIsolate(), dict->Shrink(*name), NameDictionary); 783 } 784 785 786 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object, 787 Handle<Name> name, 788 DeleteMode mode) { 789 ASSERT(!object->HasFastProperties()); 790 Isolate* isolate = object->GetIsolate(); 791 Handle<NameDictionary> dictionary(object->property_dictionary()); 792 int entry = dictionary->FindEntry(*name); 793 if (entry != NameDictionary::kNotFound) { 794 // If we have a global object set the cell to the hole. 795 if (object->IsGlobalObject()) { 796 PropertyDetails details = dictionary->DetailsAt(entry); 797 if (details.IsDontDelete()) { 798 if (mode != FORCE_DELETION) return isolate->factory()->false_value(); 799 // When forced to delete global properties, we have to make a 800 // map change to invalidate any ICs that think they can load 801 // from the DontDelete cell without checking if it contains 802 // the hole value. 803 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); 804 ASSERT(new_map->is_dictionary_map()); 805 object->set_map(*new_map); 806 } 807 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry))); 808 Handle<Object> value = isolate->factory()->the_hole_value(); 809 PropertyCell::SetValueInferType(cell, value); 810 dictionary->DetailsAtPut(entry, details.AsDeleted()); 811 } else { 812 Handle<Object> deleted(dictionary->DeleteProperty(entry, mode), isolate); 813 if (*deleted == isolate->heap()->true_value()) { 814 Handle<NameDictionary> new_properties = 815 NameDictionaryShrink(dictionary, name); 816 object->set_properties(*new_properties); 817 } 818 return deleted; 819 } 820 } 821 return isolate->factory()->true_value(); 822 } 823 824 825 bool JSObject::IsDirty() { 826 Object* cons_obj = map()->constructor(); 827 if (!cons_obj->IsJSFunction()) 828 return true; 829 JSFunction* fun = JSFunction::cast(cons_obj); 830 if (!fun->shared()->IsApiFunction()) 831 return true; 832 // If the object is fully fast case and has the same map it was 833 // created with then no changes can have been made to it. 834 return map() != fun->initial_map() 835 || !HasFastObjectElements() 836 || !HasFastProperties(); 837 } 838 839 840 Handle<Object> Object::GetProperty(Handle<Object> object, 841 Handle<Object> receiver, 842 LookupResult* result, 843 Handle<Name> key, 844 PropertyAttributes* attributes) { 845 Isolate* isolate = result->isolate(); 846 CALL_HEAP_FUNCTION( 847 isolate, 848 object->GetProperty(*receiver, result, *key, attributes), 849 Object); 850 } 851 852 853 MaybeObject* Object::GetPropertyOrFail(Handle<Object> object, 854 Handle<Object> receiver, 855 LookupResult* result, 856 Handle<Name> key, 857 PropertyAttributes* attributes) { 858 Isolate* isolate = result->isolate(); 859 CALL_HEAP_FUNCTION_PASS_EXCEPTION( 860 isolate, 861 object->GetProperty(*receiver, result, *key, attributes)); 862 } 863 864 865 // TODO(yangguo): handlify this and get rid of. 866 MaybeObject* Object::GetProperty(Object* receiver, 867 LookupResult* result, 868 Name* name, 869 PropertyAttributes* attributes) { 870 Isolate* isolate = name->GetIsolate(); 871 Heap* heap = isolate->heap(); 872 873 #ifdef DEBUG 874 // TODO(mstarzinger): Only because of the AssertNoContextChange, drop as soon 875 // as this method has been fully handlified. 876 HandleScope scope(isolate); 877 #endif 878 879 // Make sure that the top context does not change when doing 880 // callbacks or interceptor calls. 881 AssertNoContextChange ncc(isolate); 882 883 // Traverse the prototype chain from the current object (this) to 884 // the holder and check for access rights. This avoids traversing the 885 // objects more than once in case of interceptors, because the 886 // holder will always be the interceptor holder and the search may 887 // only continue with a current object just after the interceptor 888 // holder in the prototype chain. 889 // Proxy handlers do not use the proxy's prototype, so we can skip this. 890 if (!result->IsHandler()) { 891 Object* last = result->IsProperty() 892 ? result->holder() 893 : Object::cast(heap->null_value()); 894 ASSERT(this != this->GetPrototype(isolate)); 895 for (Object* current = this; 896 true; 897 current = current->GetPrototype(isolate)) { 898 if (current->IsAccessCheckNeeded()) { 899 // Check if we're allowed to read from the current object. Note 900 // that even though we may not actually end up loading the named 901 // property from the current object, we still check that we have 902 // access to it. 903 JSObject* checked = JSObject::cast(current); 904 if (!isolate->MayNamedAccess(checked, name, v8::ACCESS_GET)) { 905 HandleScope scope(isolate); 906 Handle<Object> value = JSObject::GetPropertyWithFailedAccessCheck( 907 handle(checked, isolate), 908 handle(receiver, isolate), 909 result, 910 handle(name, isolate), 911 attributes); 912 RETURN_IF_EMPTY_HANDLE(isolate, value); 913 return *value; 914 } 915 } 916 // Stop traversing the chain once we reach the last object in the 917 // chain; either the holder of the result or null in case of an 918 // absent property. 919 if (current == last) break; 920 } 921 } 922 923 if (!result->IsProperty()) { 924 *attributes = ABSENT; 925 return heap->undefined_value(); 926 } 927 *attributes = result->GetAttributes(); 928 Object* value; 929 switch (result->type()) { 930 case NORMAL: 931 value = result->holder()->GetNormalizedProperty(result); 932 ASSERT(!value->IsTheHole() || result->IsReadOnly()); 933 return value->IsTheHole() ? heap->undefined_value() : value; 934 case FIELD: { 935 MaybeObject* maybe_result = result->holder()->FastPropertyAt( 936 result->representation(), 937 result->GetFieldIndex().field_index()); 938 if (!maybe_result->To(&value)) return maybe_result; 939 ASSERT(!value->IsTheHole() || result->IsReadOnly()); 940 return value->IsTheHole() ? heap->undefined_value() : value; 941 } 942 case CONSTANT: 943 return result->GetConstant(); 944 case CALLBACKS: { 945 HandleScope scope(isolate); 946 Handle<Object> value = JSObject::GetPropertyWithCallback( 947 handle(result->holder(), isolate), 948 handle(receiver, isolate), 949 handle(result->GetCallbackObject(), isolate), 950 handle(name, isolate)); 951 RETURN_IF_EMPTY_HANDLE(isolate, value); 952 return *value; 953 } 954 case HANDLER: 955 return result->proxy()->GetPropertyWithHandler(receiver, name); 956 case INTERCEPTOR: { 957 HandleScope scope(isolate); 958 Handle<Object> value = JSObject::GetPropertyWithInterceptor( 959 handle(result->holder(), isolate), 960 handle(receiver, isolate), 961 handle(name, isolate), 962 attributes); 963 RETURN_IF_EMPTY_HANDLE(isolate, value); 964 return *value; 965 } 966 case TRANSITION: 967 case NONEXISTENT: 968 UNREACHABLE(); 969 break; 970 } 971 UNREACHABLE(); 972 return NULL; 973 } 974 975 976 MaybeObject* Object::GetElementWithReceiver(Isolate* isolate, 977 Object* receiver, 978 uint32_t index) { 979 Heap* heap = isolate->heap(); 980 Object* holder = this; 981 982 // Iterate up the prototype chain until an element is found or the null 983 // prototype is encountered. 984 for (holder = this; 985 holder != heap->null_value(); 986 holder = holder->GetPrototype(isolate)) { 987 if (!holder->IsJSObject()) { 988 Context* native_context = isolate->context()->native_context(); 989 if (holder->IsNumber()) { 990 holder = native_context->number_function()->instance_prototype(); 991 } else if (holder->IsString()) { 992 holder = native_context->string_function()->instance_prototype(); 993 } else if (holder->IsSymbol()) { 994 holder = native_context->symbol_function()->instance_prototype(); 995 } else if (holder->IsBoolean()) { 996 holder = native_context->boolean_function()->instance_prototype(); 997 } else if (holder->IsJSProxy()) { 998 return JSProxy::cast(holder)->GetElementWithHandler(receiver, index); 999 } else { 1000 // Undefined and null have no indexed properties. 1001 ASSERT(holder->IsUndefined() || holder->IsNull()); 1002 return heap->undefined_value(); 1003 } 1004 } 1005 1006 // Inline the case for JSObjects. Doing so significantly improves the 1007 // performance of fetching elements where checking the prototype chain is 1008 // necessary. 1009 JSObject* js_object = JSObject::cast(holder); 1010 1011 // Check access rights if needed. 1012 if (js_object->IsAccessCheckNeeded()) { 1013 Isolate* isolate = heap->isolate(); 1014 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) { 1015 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET); 1016 RETURN_IF_SCHEDULED_EXCEPTION(isolate); 1017 return heap->undefined_value(); 1018 } 1019 } 1020 1021 if (js_object->HasIndexedInterceptor()) { 1022 return js_object->GetElementWithInterceptor(receiver, index); 1023 } 1024 1025 if (js_object->elements() != heap->empty_fixed_array()) { 1026 MaybeObject* result = js_object->GetElementsAccessor()->Get( 1027 receiver, js_object, index); 1028 if (result != heap->the_hole_value()) return result; 1029 } 1030 } 1031 1032 return heap->undefined_value(); 1033 } 1034 1035 1036 Object* Object::GetPrototype(Isolate* isolate) { 1037 if (IsSmi()) { 1038 Context* context = isolate->context()->native_context(); 1039 return context->number_function()->instance_prototype(); 1040 } 1041 1042 HeapObject* heap_object = HeapObject::cast(this); 1043 1044 // The object is either a number, a string, a boolean, 1045 // a real JS object, or a Harmony proxy. 1046 if (heap_object->IsJSReceiver()) { 1047 return heap_object->map()->prototype(); 1048 } 1049 Context* context = isolate->context()->native_context(); 1050 1051 if (heap_object->IsHeapNumber()) { 1052 return context->number_function()->instance_prototype(); 1053 } 1054 if (heap_object->IsString()) { 1055 return context->string_function()->instance_prototype(); 1056 } 1057 if (heap_object->IsSymbol()) { 1058 return context->symbol_function()->instance_prototype(); 1059 } 1060 if (heap_object->IsBoolean()) { 1061 return context->boolean_function()->instance_prototype(); 1062 } else { 1063 return isolate->heap()->null_value(); 1064 } 1065 } 1066 1067 1068 Map* Object::GetMarkerMap(Isolate* isolate) { 1069 if (IsSmi()) return isolate->heap()->heap_number_map(); 1070 return HeapObject::cast(this)->map(); 1071 } 1072 1073 1074 Object* Object::GetHash() { 1075 // The object is either a number, a name, an odd-ball, 1076 // a real JS object, or a Harmony proxy. 1077 if (IsNumber()) { 1078 uint32_t hash = ComputeLongHash(double_to_uint64(Number())); 1079 return Smi::FromInt(hash & Smi::kMaxValue); 1080 } 1081 if (IsName()) { 1082 uint32_t hash = Name::cast(this)->Hash(); 1083 return Smi::FromInt(hash); 1084 } 1085 if (IsOddball()) { 1086 uint32_t hash = Oddball::cast(this)->to_string()->Hash(); 1087 return Smi::FromInt(hash); 1088 } 1089 1090 ASSERT(IsJSReceiver()); 1091 return JSReceiver::cast(this)->GetIdentityHash(); 1092 } 1093 1094 1095 Handle<Object> Object::GetOrCreateHash(Handle<Object> object, 1096 Isolate* isolate) { 1097 Handle<Object> hash(object->GetHash(), isolate); 1098 if (hash->IsSmi()) 1099 return hash; 1100 1101 ASSERT(object->IsJSReceiver()); 1102 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object)); 1103 } 1104 1105 1106 bool Object::SameValue(Object* other) { 1107 if (other == this) return true; 1108 1109 // The object is either a number, a name, an odd-ball, 1110 // a real JS object, or a Harmony proxy. 1111 if (IsNumber() && other->IsNumber()) { 1112 double this_value = Number(); 1113 double other_value = other->Number(); 1114 bool equal = this_value == other_value; 1115 // SameValue(NaN, NaN) is true. 1116 if (!equal) return std::isnan(this_value) && std::isnan(other_value); 1117 // SameValue(0.0, -0.0) is false. 1118 return (this_value != 0) || ((1 / this_value) == (1 / other_value)); 1119 } 1120 if (IsString() && other->IsString()) { 1121 return String::cast(this)->Equals(String::cast(other)); 1122 } 1123 return false; 1124 } 1125 1126 1127 void Object::ShortPrint(FILE* out) { 1128 HeapStringAllocator allocator; 1129 StringStream accumulator(&allocator); 1130 ShortPrint(&accumulator); 1131 accumulator.OutputToFile(out); 1132 } 1133 1134 1135 void Object::ShortPrint(StringStream* accumulator) { 1136 if (IsSmi()) { 1137 Smi::cast(this)->SmiPrint(accumulator); 1138 } else if (IsFailure()) { 1139 Failure::cast(this)->FailurePrint(accumulator); 1140 } else { 1141 HeapObject::cast(this)->HeapObjectShortPrint(accumulator); 1142 } 1143 } 1144 1145 1146 void Smi::SmiPrint(FILE* out) { 1147 PrintF(out, "%d", value()); 1148 } 1149 1150 1151 void Smi::SmiPrint(StringStream* accumulator) { 1152 accumulator->Add("%d", value()); 1153 } 1154 1155 1156 void Failure::FailurePrint(StringStream* accumulator) { 1157 accumulator->Add("Failure(%p)", reinterpret_cast<void*>(value())); 1158 } 1159 1160 1161 void Failure::FailurePrint(FILE* out) { 1162 PrintF(out, "Failure(%p)", reinterpret_cast<void*>(value())); 1163 } 1164 1165 1166 // Should a word be prefixed by 'a' or 'an' in order to read naturally in 1167 // English? Returns false for non-ASCII or words that don't start with 1168 // a capital letter. The a/an rule follows pronunciation in English. 1169 // We don't use the BBC's overcorrect "an historic occasion" though if 1170 // you speak a dialect you may well say "an 'istoric occasion". 1171 static bool AnWord(String* str) { 1172 if (str->length() == 0) return false; // A nothing. 1173 int c0 = str->Get(0); 1174 int c1 = str->length() > 1 ? str->Get(1) : 0; 1175 if (c0 == 'U') { 1176 if (c1 > 'Z') { 1177 return true; // An Umpire, but a UTF8String, a U. 1178 } 1179 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') { 1180 return true; // An Ape, an ABCBook. 1181 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) && 1182 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' || 1183 c0 == 'S' || c0 == 'X')) { 1184 return true; // An MP3File, an M. 1185 } 1186 return false; 1187 } 1188 1189 1190 MaybeObject* String::SlowTryFlatten(PretenureFlag pretenure) { 1191 #ifdef DEBUG 1192 // Do not attempt to flatten in debug mode when allocation is not 1193 // allowed. This is to avoid an assertion failure when allocating. 1194 // Flattening strings is the only case where we always allow 1195 // allocation because no GC is performed if the allocation fails. 1196 if (!AllowHeapAllocation::IsAllowed()) return this; 1197 #endif 1198 1199 Heap* heap = GetHeap(); 1200 switch (StringShape(this).representation_tag()) { 1201 case kConsStringTag: { 1202 ConsString* cs = ConsString::cast(this); 1203 if (cs->second()->length() == 0) { 1204 return cs->first(); 1205 } 1206 // There's little point in putting the flat string in new space if the 1207 // cons string is in old space. It can never get GCed until there is 1208 // an old space GC. 1209 PretenureFlag tenure = heap->InNewSpace(this) ? pretenure : TENURED; 1210 int len = length(); 1211 Object* object; 1212 String* result; 1213 if (IsOneByteRepresentation()) { 1214 { MaybeObject* maybe_object = 1215 heap->AllocateRawOneByteString(len, tenure); 1216 if (!maybe_object->ToObject(&object)) return maybe_object; 1217 } 1218 result = String::cast(object); 1219 String* first = cs->first(); 1220 int first_length = first->length(); 1221 uint8_t* dest = SeqOneByteString::cast(result)->GetChars(); 1222 WriteToFlat(first, dest, 0, first_length); 1223 String* second = cs->second(); 1224 WriteToFlat(second, 1225 dest + first_length, 1226 0, 1227 len - first_length); 1228 } else { 1229 { MaybeObject* maybe_object = 1230 heap->AllocateRawTwoByteString(len, tenure); 1231 if (!maybe_object->ToObject(&object)) return maybe_object; 1232 } 1233 result = String::cast(object); 1234 uc16* dest = SeqTwoByteString::cast(result)->GetChars(); 1235 String* first = cs->first(); 1236 int first_length = first->length(); 1237 WriteToFlat(first, dest, 0, first_length); 1238 String* second = cs->second(); 1239 WriteToFlat(second, 1240 dest + first_length, 1241 0, 1242 len - first_length); 1243 } 1244 cs->set_first(result); 1245 cs->set_second(heap->empty_string(), SKIP_WRITE_BARRIER); 1246 return result; 1247 } 1248 default: 1249 return this; 1250 } 1251 } 1252 1253 1254 bool String::MakeExternal(v8::String::ExternalStringResource* resource) { 1255 // Externalizing twice leaks the external resource, so it's 1256 // prohibited by the API. 1257 ASSERT(!this->IsExternalString()); 1258 #ifdef ENABLE_SLOW_ASSERTS 1259 if (FLAG_enable_slow_asserts) { 1260 // Assert that the resource and the string are equivalent. 1261 ASSERT(static_cast<size_t>(this->length()) == resource->length()); 1262 ScopedVector<uc16> smart_chars(this->length()); 1263 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 1264 ASSERT(memcmp(smart_chars.start(), 1265 resource->data(), 1266 resource->length() * sizeof(smart_chars[0])) == 0); 1267 } 1268 #endif // DEBUG 1269 Heap* heap = GetHeap(); 1270 int size = this->Size(); // Byte size of the original string. 1271 if (size < ExternalString::kShortSize) { 1272 return false; 1273 } 1274 bool is_ascii = this->IsOneByteRepresentation(); 1275 bool is_internalized = this->IsInternalizedString(); 1276 1277 // Morph the object to an external string by adjusting the map and 1278 // reinitializing the fields. 1279 if (size >= ExternalString::kSize) { 1280 this->set_map_no_write_barrier( 1281 is_internalized 1282 ? (is_ascii 1283 ? heap->external_internalized_string_with_one_byte_data_map() 1284 : heap->external_internalized_string_map()) 1285 : (is_ascii 1286 ? heap->external_string_with_one_byte_data_map() 1287 : heap->external_string_map())); 1288 } else { 1289 this->set_map_no_write_barrier( 1290 is_internalized 1291 ? (is_ascii 1292 ? heap-> 1293 short_external_internalized_string_with_one_byte_data_map() 1294 : heap->short_external_internalized_string_map()) 1295 : (is_ascii 1296 ? heap->short_external_string_with_one_byte_data_map() 1297 : heap->short_external_string_map())); 1298 } 1299 ExternalTwoByteString* self = ExternalTwoByteString::cast(this); 1300 self->set_resource(resource); 1301 if (is_internalized) self->Hash(); // Force regeneration of the hash value. 1302 1303 // Fill the remainder of the string with dead wood. 1304 int new_size = this->Size(); // Byte size of the external String object. 1305 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); 1306 if (Marking::IsBlack(Marking::MarkBitFrom(this))) { 1307 MemoryChunk::IncrementLiveBytesFromMutator(this->address(), 1308 new_size - size); 1309 } 1310 return true; 1311 } 1312 1313 1314 bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) { 1315 #ifdef ENABLE_SLOW_ASSERTS 1316 if (FLAG_enable_slow_asserts) { 1317 // Assert that the resource and the string are equivalent. 1318 ASSERT(static_cast<size_t>(this->length()) == resource->length()); 1319 if (this->IsTwoByteRepresentation()) { 1320 ScopedVector<uint16_t> smart_chars(this->length()); 1321 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 1322 ASSERT(String::IsOneByte(smart_chars.start(), this->length())); 1323 } 1324 ScopedVector<char> smart_chars(this->length()); 1325 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 1326 ASSERT(memcmp(smart_chars.start(), 1327 resource->data(), 1328 resource->length() * sizeof(smart_chars[0])) == 0); 1329 } 1330 #endif // DEBUG 1331 Heap* heap = GetHeap(); 1332 int size = this->Size(); // Byte size of the original string. 1333 if (size < ExternalString::kShortSize) { 1334 return false; 1335 } 1336 bool is_internalized = this->IsInternalizedString(); 1337 1338 // Morph the object to an external string by adjusting the map and 1339 // reinitializing the fields. Use short version if space is limited. 1340 if (size >= ExternalString::kSize) { 1341 this->set_map_no_write_barrier( 1342 is_internalized ? heap->external_ascii_internalized_string_map() 1343 : heap->external_ascii_string_map()); 1344 } else { 1345 this->set_map_no_write_barrier( 1346 is_internalized ? heap->short_external_ascii_internalized_string_map() 1347 : heap->short_external_ascii_string_map()); 1348 } 1349 ExternalAsciiString* self = ExternalAsciiString::cast(this); 1350 self->set_resource(resource); 1351 if (is_internalized) self->Hash(); // Force regeneration of the hash value. 1352 1353 // Fill the remainder of the string with dead wood. 1354 int new_size = this->Size(); // Byte size of the external String object. 1355 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); 1356 if (Marking::IsBlack(Marking::MarkBitFrom(this))) { 1357 MemoryChunk::IncrementLiveBytesFromMutator(this->address(), 1358 new_size - size); 1359 } 1360 return true; 1361 } 1362 1363 1364 void String::StringShortPrint(StringStream* accumulator) { 1365 int len = length(); 1366 if (len > kMaxShortPrintLength) { 1367 accumulator->Add("<Very long string[%u]>", len); 1368 return; 1369 } 1370 1371 if (!LooksValid()) { 1372 accumulator->Add("<Invalid String>"); 1373 return; 1374 } 1375 1376 ConsStringIteratorOp op; 1377 StringCharacterStream stream(this, &op); 1378 1379 bool truncated = false; 1380 if (len > kMaxShortPrintLength) { 1381 len = kMaxShortPrintLength; 1382 truncated = true; 1383 } 1384 bool ascii = true; 1385 for (int i = 0; i < len; i++) { 1386 uint16_t c = stream.GetNext(); 1387 1388 if (c < 32 || c >= 127) { 1389 ascii = false; 1390 } 1391 } 1392 stream.Reset(this); 1393 if (ascii) { 1394 accumulator->Add("<String[%u]: ", length()); 1395 for (int i = 0; i < len; i++) { 1396 accumulator->Put(static_cast<char>(stream.GetNext())); 1397 } 1398 accumulator->Put('>'); 1399 } else { 1400 // Backslash indicates that the string contains control 1401 // characters and that backslashes are therefore escaped. 1402 accumulator->Add("<String[%u]\\: ", length()); 1403 for (int i = 0; i < len; i++) { 1404 uint16_t c = stream.GetNext(); 1405 if (c == '\n') { 1406 accumulator->Add("\\n"); 1407 } else if (c == '\r') { 1408 accumulator->Add("\\r"); 1409 } else if (c == '\\') { 1410 accumulator->Add("\\\\"); 1411 } else if (c < 32 || c > 126) { 1412 accumulator->Add("\\x%02x", c); 1413 } else { 1414 accumulator->Put(static_cast<char>(c)); 1415 } 1416 } 1417 if (truncated) { 1418 accumulator->Put('.'); 1419 accumulator->Put('.'); 1420 accumulator->Put('.'); 1421 } 1422 accumulator->Put('>'); 1423 } 1424 return; 1425 } 1426 1427 1428 void JSObject::JSObjectShortPrint(StringStream* accumulator) { 1429 switch (map()->instance_type()) { 1430 case JS_ARRAY_TYPE: { 1431 double length = JSArray::cast(this)->length()->IsUndefined() 1432 ? 0 1433 : JSArray::cast(this)->length()->Number(); 1434 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length)); 1435 break; 1436 } 1437 case JS_WEAK_MAP_TYPE: { 1438 accumulator->Add("<JS WeakMap>"); 1439 break; 1440 } 1441 case JS_WEAK_SET_TYPE: { 1442 accumulator->Add("<JS WeakSet>"); 1443 break; 1444 } 1445 case JS_REGEXP_TYPE: { 1446 accumulator->Add("<JS RegExp>"); 1447 break; 1448 } 1449 case JS_FUNCTION_TYPE: { 1450 JSFunction* function = JSFunction::cast(this); 1451 Object* fun_name = function->shared()->DebugName(); 1452 bool printed = false; 1453 if (fun_name->IsString()) { 1454 String* str = String::cast(fun_name); 1455 if (str->length() > 0) { 1456 accumulator->Add("<JS Function "); 1457 accumulator->Put(str); 1458 printed = true; 1459 } 1460 } 1461 if (!printed) { 1462 accumulator->Add("<JS Function"); 1463 } 1464 accumulator->Add(" (SharedFunctionInfo %p)", 1465 reinterpret_cast<void*>(function->shared())); 1466 accumulator->Put('>'); 1467 break; 1468 } 1469 case JS_GENERATOR_OBJECT_TYPE: { 1470 accumulator->Add("<JS Generator>"); 1471 break; 1472 } 1473 case JS_MODULE_TYPE: { 1474 accumulator->Add("<JS Module>"); 1475 break; 1476 } 1477 // All other JSObjects are rather similar to each other (JSObject, 1478 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue). 1479 default: { 1480 Map* map_of_this = map(); 1481 Heap* heap = GetHeap(); 1482 Object* constructor = map_of_this->constructor(); 1483 bool printed = false; 1484 if (constructor->IsHeapObject() && 1485 !heap->Contains(HeapObject::cast(constructor))) { 1486 accumulator->Add("!!!INVALID CONSTRUCTOR!!!"); 1487 } else { 1488 bool global_object = IsJSGlobalProxy(); 1489 if (constructor->IsJSFunction()) { 1490 if (!heap->Contains(JSFunction::cast(constructor)->shared())) { 1491 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!"); 1492 } else { 1493 Object* constructor_name = 1494 JSFunction::cast(constructor)->shared()->name(); 1495 if (constructor_name->IsString()) { 1496 String* str = String::cast(constructor_name); 1497 if (str->length() > 0) { 1498 bool vowel = AnWord(str); 1499 accumulator->Add("<%sa%s ", 1500 global_object ? "Global Object: " : "", 1501 vowel ? "n" : ""); 1502 accumulator->Put(str); 1503 accumulator->Add(" with %smap %p", 1504 map_of_this->is_deprecated() ? "deprecated " : "", 1505 map_of_this); 1506 printed = true; 1507 } 1508 } 1509 } 1510 } 1511 if (!printed) { 1512 accumulator->Add("<JS %sObject", global_object ? "Global " : ""); 1513 } 1514 } 1515 if (IsJSValue()) { 1516 accumulator->Add(" value = "); 1517 JSValue::cast(this)->value()->ShortPrint(accumulator); 1518 } 1519 accumulator->Put('>'); 1520 break; 1521 } 1522 } 1523 } 1524 1525 1526 void JSObject::PrintElementsTransition( 1527 FILE* file, ElementsKind from_kind, FixedArrayBase* from_elements, 1528 ElementsKind to_kind, FixedArrayBase* to_elements) { 1529 if (from_kind != to_kind) { 1530 PrintF(file, "elements transition ["); 1531 PrintElementsKind(file, from_kind); 1532 PrintF(file, " -> "); 1533 PrintElementsKind(file, to_kind); 1534 PrintF(file, "] in "); 1535 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true); 1536 PrintF(file, " for "); 1537 ShortPrint(file); 1538 PrintF(file, " from "); 1539 from_elements->ShortPrint(file); 1540 PrintF(file, " to "); 1541 to_elements->ShortPrint(file); 1542 PrintF(file, "\n"); 1543 } 1544 } 1545 1546 1547 void Map::PrintGeneralization(FILE* file, 1548 const char* reason, 1549 int modify_index, 1550 int split, 1551 int descriptors, 1552 bool constant_to_field, 1553 Representation old_representation, 1554 Representation new_representation) { 1555 PrintF(file, "[generalizing "); 1556 constructor_name()->PrintOn(file); 1557 PrintF(file, "] "); 1558 String::cast(instance_descriptors()->GetKey(modify_index))->PrintOn(file); 1559 if (constant_to_field) { 1560 PrintF(file, ":c->f"); 1561 } else { 1562 PrintF(file, ":%s->%s", 1563 old_representation.Mnemonic(), 1564 new_representation.Mnemonic()); 1565 } 1566 PrintF(file, " ("); 1567 if (strlen(reason) > 0) { 1568 PrintF(file, "%s", reason); 1569 } else { 1570 PrintF(file, "+%i maps", descriptors - split); 1571 } 1572 PrintF(file, ") ["); 1573 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true); 1574 PrintF(file, "]\n"); 1575 } 1576 1577 1578 void JSObject::PrintInstanceMigration(FILE* file, 1579 Map* original_map, 1580 Map* new_map) { 1581 PrintF(file, "[migrating "); 1582 map()->constructor_name()->PrintOn(file); 1583 PrintF(file, "] "); 1584 DescriptorArray* o = original_map->instance_descriptors(); 1585 DescriptorArray* n = new_map->instance_descriptors(); 1586 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) { 1587 Representation o_r = o->GetDetails(i).representation(); 1588 Representation n_r = n->GetDetails(i).representation(); 1589 if (!o_r.Equals(n_r)) { 1590 String::cast(o->GetKey(i))->PrintOn(file); 1591 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic()); 1592 } else if (o->GetDetails(i).type() == CONSTANT && 1593 n->GetDetails(i).type() == FIELD) { 1594 Name* name = o->GetKey(i); 1595 if (name->IsString()) { 1596 String::cast(name)->PrintOn(file); 1597 } else { 1598 PrintF(file, "???"); 1599 } 1600 PrintF(file, " "); 1601 } 1602 } 1603 PrintF(file, "\n"); 1604 } 1605 1606 1607 void HeapObject::HeapObjectShortPrint(StringStream* accumulator) { 1608 Heap* heap = GetHeap(); 1609 if (!heap->Contains(this)) { 1610 accumulator->Add("!!!INVALID POINTER!!!"); 1611 return; 1612 } 1613 if (!heap->Contains(map())) { 1614 accumulator->Add("!!!INVALID MAP!!!"); 1615 return; 1616 } 1617 1618 accumulator->Add("%p ", this); 1619 1620 if (IsString()) { 1621 String::cast(this)->StringShortPrint(accumulator); 1622 return; 1623 } 1624 if (IsJSObject()) { 1625 JSObject::cast(this)->JSObjectShortPrint(accumulator); 1626 return; 1627 } 1628 switch (map()->instance_type()) { 1629 case MAP_TYPE: 1630 accumulator->Add("<Map(elements=%u)>", Map::cast(this)->elements_kind()); 1631 break; 1632 case FIXED_ARRAY_TYPE: 1633 accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length()); 1634 break; 1635 case FIXED_DOUBLE_ARRAY_TYPE: 1636 accumulator->Add("<FixedDoubleArray[%u]>", 1637 FixedDoubleArray::cast(this)->length()); 1638 break; 1639 case BYTE_ARRAY_TYPE: 1640 accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length()); 1641 break; 1642 case FREE_SPACE_TYPE: 1643 accumulator->Add("<FreeSpace[%u]>", FreeSpace::cast(this)->Size()); 1644 break; 1645 case EXTERNAL_PIXEL_ARRAY_TYPE: 1646 accumulator->Add("<ExternalPixelArray[%u]>", 1647 ExternalPixelArray::cast(this)->length()); 1648 break; 1649 case EXTERNAL_BYTE_ARRAY_TYPE: 1650 accumulator->Add("<ExternalByteArray[%u]>", 1651 ExternalByteArray::cast(this)->length()); 1652 break; 1653 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: 1654 accumulator->Add("<ExternalUnsignedByteArray[%u]>", 1655 ExternalUnsignedByteArray::cast(this)->length()); 1656 break; 1657 case EXTERNAL_SHORT_ARRAY_TYPE: 1658 accumulator->Add("<ExternalShortArray[%u]>", 1659 ExternalShortArray::cast(this)->length()); 1660 break; 1661 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: 1662 accumulator->Add("<ExternalUnsignedShortArray[%u]>", 1663 ExternalUnsignedShortArray::cast(this)->length()); 1664 break; 1665 case EXTERNAL_INT_ARRAY_TYPE: 1666 accumulator->Add("<ExternalIntArray[%u]>", 1667 ExternalIntArray::cast(this)->length()); 1668 break; 1669 case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: 1670 accumulator->Add("<ExternalUnsignedIntArray[%u]>", 1671 ExternalUnsignedIntArray::cast(this)->length()); 1672 break; 1673 case EXTERNAL_FLOAT_ARRAY_TYPE: 1674 accumulator->Add("<ExternalFloatArray[%u]>", 1675 ExternalFloatArray::cast(this)->length()); 1676 break; 1677 case EXTERNAL_DOUBLE_ARRAY_TYPE: 1678 accumulator->Add("<ExternalDoubleArray[%u]>", 1679 ExternalDoubleArray::cast(this)->length()); 1680 break; 1681 case SHARED_FUNCTION_INFO_TYPE: { 1682 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this); 1683 SmartArrayPointer<char> debug_name = 1684 shared->DebugName()->ToCString(); 1685 if (debug_name[0] != 0) { 1686 accumulator->Add("<SharedFunctionInfo %s>", *debug_name); 1687 } else { 1688 accumulator->Add("<SharedFunctionInfo>"); 1689 } 1690 break; 1691 } 1692 case JS_MESSAGE_OBJECT_TYPE: 1693 accumulator->Add("<JSMessageObject>"); 1694 break; 1695 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 1696 case NAME##_TYPE: \ 1697 accumulator->Put('<'); \ 1698 accumulator->Add(#Name); \ 1699 accumulator->Put('>'); \ 1700 break; 1701 STRUCT_LIST(MAKE_STRUCT_CASE) 1702 #undef MAKE_STRUCT_CASE 1703 case CODE_TYPE: 1704 accumulator->Add("<Code>"); 1705 break; 1706 case ODDBALL_TYPE: { 1707 if (IsUndefined()) 1708 accumulator->Add("<undefined>"); 1709 else if (IsTheHole()) 1710 accumulator->Add("<the hole>"); 1711 else if (IsNull()) 1712 accumulator->Add("<null>"); 1713 else if (IsTrue()) 1714 accumulator->Add("<true>"); 1715 else if (IsFalse()) 1716 accumulator->Add("<false>"); 1717 else 1718 accumulator->Add("<Odd Oddball>"); 1719 break; 1720 } 1721 case SYMBOL_TYPE: { 1722 Symbol* symbol = Symbol::cast(this); 1723 accumulator->Add("<Symbol: %d", symbol->Hash()); 1724 if (!symbol->name()->IsUndefined()) { 1725 accumulator->Add(" "); 1726 String::cast(symbol->name())->StringShortPrint(accumulator); 1727 } 1728 accumulator->Add(">"); 1729 break; 1730 } 1731 case HEAP_NUMBER_TYPE: 1732 accumulator->Add("<Number: "); 1733 HeapNumber::cast(this)->HeapNumberPrint(accumulator); 1734 accumulator->Put('>'); 1735 break; 1736 case JS_PROXY_TYPE: 1737 accumulator->Add("<JSProxy>"); 1738 break; 1739 case JS_FUNCTION_PROXY_TYPE: 1740 accumulator->Add("<JSFunctionProxy>"); 1741 break; 1742 case FOREIGN_TYPE: 1743 accumulator->Add("<Foreign>"); 1744 break; 1745 case CELL_TYPE: 1746 accumulator->Add("Cell for "); 1747 Cell::cast(this)->value()->ShortPrint(accumulator); 1748 break; 1749 case PROPERTY_CELL_TYPE: 1750 accumulator->Add("PropertyCell for "); 1751 PropertyCell::cast(this)->value()->ShortPrint(accumulator); 1752 break; 1753 default: 1754 accumulator->Add("<Other heap object (%d)>", map()->instance_type()); 1755 break; 1756 } 1757 } 1758 1759 1760 void HeapObject::Iterate(ObjectVisitor* v) { 1761 // Handle header 1762 IteratePointer(v, kMapOffset); 1763 // Handle object body 1764 Map* m = map(); 1765 IterateBody(m->instance_type(), SizeFromMap(m), v); 1766 } 1767 1768 1769 void HeapObject::IterateBody(InstanceType type, int object_size, 1770 ObjectVisitor* v) { 1771 // Avoiding <Type>::cast(this) because it accesses the map pointer field. 1772 // During GC, the map pointer field is encoded. 1773 if (type < FIRST_NONSTRING_TYPE) { 1774 switch (type & kStringRepresentationMask) { 1775 case kSeqStringTag: 1776 break; 1777 case kConsStringTag: 1778 ConsString::BodyDescriptor::IterateBody(this, v); 1779 break; 1780 case kSlicedStringTag: 1781 SlicedString::BodyDescriptor::IterateBody(this, v); 1782 break; 1783 case kExternalStringTag: 1784 if ((type & kStringEncodingMask) == kOneByteStringTag) { 1785 reinterpret_cast<ExternalAsciiString*>(this)-> 1786 ExternalAsciiStringIterateBody(v); 1787 } else { 1788 reinterpret_cast<ExternalTwoByteString*>(this)-> 1789 ExternalTwoByteStringIterateBody(v); 1790 } 1791 break; 1792 } 1793 return; 1794 } 1795 1796 switch (type) { 1797 case FIXED_ARRAY_TYPE: 1798 FixedArray::BodyDescriptor::IterateBody(this, object_size, v); 1799 break; 1800 case CONSTANT_POOL_ARRAY_TYPE: 1801 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v); 1802 break; 1803 case FIXED_DOUBLE_ARRAY_TYPE: 1804 break; 1805 case JS_OBJECT_TYPE: 1806 case JS_CONTEXT_EXTENSION_OBJECT_TYPE: 1807 case JS_GENERATOR_OBJECT_TYPE: 1808 case JS_MODULE_TYPE: 1809 case JS_VALUE_TYPE: 1810 case JS_DATE_TYPE: 1811 case JS_ARRAY_TYPE: 1812 case JS_ARRAY_BUFFER_TYPE: 1813 case JS_TYPED_ARRAY_TYPE: 1814 case JS_DATA_VIEW_TYPE: 1815 case JS_SET_TYPE: 1816 case JS_MAP_TYPE: 1817 case JS_WEAK_MAP_TYPE: 1818 case JS_WEAK_SET_TYPE: 1819 case JS_REGEXP_TYPE: 1820 case JS_GLOBAL_PROXY_TYPE: 1821 case JS_GLOBAL_OBJECT_TYPE: 1822 case JS_BUILTINS_OBJECT_TYPE: 1823 case JS_MESSAGE_OBJECT_TYPE: 1824 JSObject::BodyDescriptor::IterateBody(this, object_size, v); 1825 break; 1826 case JS_FUNCTION_TYPE: 1827 reinterpret_cast<JSFunction*>(this) 1828 ->JSFunctionIterateBody(object_size, v); 1829 break; 1830 case ODDBALL_TYPE: 1831 Oddball::BodyDescriptor::IterateBody(this, v); 1832 break; 1833 case JS_PROXY_TYPE: 1834 JSProxy::BodyDescriptor::IterateBody(this, v); 1835 break; 1836 case JS_FUNCTION_PROXY_TYPE: 1837 JSFunctionProxy::BodyDescriptor::IterateBody(this, v); 1838 break; 1839 case FOREIGN_TYPE: 1840 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v); 1841 break; 1842 case MAP_TYPE: 1843 Map::BodyDescriptor::IterateBody(this, v); 1844 break; 1845 case CODE_TYPE: 1846 reinterpret_cast<Code*>(this)->CodeIterateBody(v); 1847 break; 1848 case CELL_TYPE: 1849 Cell::BodyDescriptor::IterateBody(this, v); 1850 break; 1851 case PROPERTY_CELL_TYPE: 1852 PropertyCell::BodyDescriptor::IterateBody(this, v); 1853 break; 1854 case SYMBOL_TYPE: 1855 Symbol::BodyDescriptor::IterateBody(this, v); 1856 break; 1857 case HEAP_NUMBER_TYPE: 1858 case FILLER_TYPE: 1859 case BYTE_ARRAY_TYPE: 1860 case FREE_SPACE_TYPE: 1861 case EXTERNAL_PIXEL_ARRAY_TYPE: 1862 case EXTERNAL_BYTE_ARRAY_TYPE: 1863 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: 1864 case EXTERNAL_SHORT_ARRAY_TYPE: 1865 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: 1866 case EXTERNAL_INT_ARRAY_TYPE: 1867 case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: 1868 case EXTERNAL_FLOAT_ARRAY_TYPE: 1869 case EXTERNAL_DOUBLE_ARRAY_TYPE: 1870 break; 1871 case SHARED_FUNCTION_INFO_TYPE: { 1872 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v); 1873 break; 1874 } 1875 1876 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 1877 case NAME##_TYPE: 1878 STRUCT_LIST(MAKE_STRUCT_CASE) 1879 #undef MAKE_STRUCT_CASE 1880 if (type == ALLOCATION_SITE_TYPE) { 1881 AllocationSite::BodyDescriptor::IterateBody(this, v); 1882 } else { 1883 StructBodyDescriptor::IterateBody(this, object_size, v); 1884 } 1885 break; 1886 default: 1887 PrintF("Unknown type: %d\n", type); 1888 UNREACHABLE(); 1889 } 1890 } 1891 1892 1893 bool HeapNumber::HeapNumberBooleanValue() { 1894 // NaN, +0, and -0 should return the false object 1895 #if __BYTE_ORDER == __LITTLE_ENDIAN 1896 union IeeeDoubleLittleEndianArchType u; 1897 #elif __BYTE_ORDER == __BIG_ENDIAN 1898 union IeeeDoubleBigEndianArchType u; 1899 #endif 1900 u.d = value(); 1901 if (u.bits.exp == 2047) { 1902 // Detect NaN for IEEE double precision floating point. 1903 if ((u.bits.man_low | u.bits.man_high) != 0) return false; 1904 } 1905 if (u.bits.exp == 0) { 1906 // Detect +0, and -0 for IEEE double precision floating point. 1907 if ((u.bits.man_low | u.bits.man_high) == 0) return false; 1908 } 1909 return true; 1910 } 1911 1912 1913 void HeapNumber::HeapNumberPrint(FILE* out) { 1914 PrintF(out, "%.16g", Number()); 1915 } 1916 1917 1918 void HeapNumber::HeapNumberPrint(StringStream* accumulator) { 1919 // The Windows version of vsnprintf can allocate when printing a %g string 1920 // into a buffer that may not be big enough. We don't want random memory 1921 // allocation when producing post-crash stack traces, so we print into a 1922 // buffer that is plenty big enough for any floating point number, then 1923 // print that using vsnprintf (which may truncate but never allocate if 1924 // there is no more space in the buffer). 1925 EmbeddedVector<char, 100> buffer; 1926 OS::SNPrintF(buffer, "%.16g", Number()); 1927 accumulator->Add("%s", buffer.start()); 1928 } 1929 1930 1931 String* JSReceiver::class_name() { 1932 if (IsJSFunction() && IsJSFunctionProxy()) { 1933 return GetHeap()->function_class_string(); 1934 } 1935 if (map()->constructor()->IsJSFunction()) { 1936 JSFunction* constructor = JSFunction::cast(map()->constructor()); 1937 return String::cast(constructor->shared()->instance_class_name()); 1938 } 1939 // If the constructor is not present, return "Object". 1940 return GetHeap()->Object_string(); 1941 } 1942 1943 1944 String* Map::constructor_name() { 1945 if (constructor()->IsJSFunction()) { 1946 JSFunction* constructor = JSFunction::cast(this->constructor()); 1947 String* name = String::cast(constructor->shared()->name()); 1948 if (name->length() > 0) return name; 1949 String* inferred_name = constructor->shared()->inferred_name(); 1950 if (inferred_name->length() > 0) return inferred_name; 1951 Object* proto = prototype(); 1952 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name(); 1953 } 1954 // TODO(rossberg): what about proxies? 1955 // If the constructor is not present, return "Object". 1956 return GetHeap()->Object_string(); 1957 } 1958 1959 1960 String* JSReceiver::constructor_name() { 1961 return map()->constructor_name(); 1962 } 1963 1964 1965 // TODO(mstarzinger): Temporary wrapper until handlified. 1966 static Handle<Object> NewStorageFor(Isolate* isolate, 1967 Handle<Object> object, 1968 Representation representation) { 1969 Heap* heap = isolate->heap(); 1970 CALL_HEAP_FUNCTION(isolate, 1971 object->AllocateNewStorageFor(heap, representation), 1972 Object); 1973 } 1974 1975 1976 void JSObject::AddFastPropertyUsingMap(Handle<JSObject> object, 1977 Handle<Map> new_map, 1978 Handle<Name> name, 1979 Handle<Object> value, 1980 int field_index, 1981 Representation representation) { 1982 Isolate* isolate = object->GetIsolate(); 1983 1984 // This method is used to transition to a field. If we are transitioning to a 1985 // double field, allocate new storage. 1986 Handle<Object> storage = NewStorageFor(isolate, value, representation); 1987 1988 if (object->map()->unused_property_fields() == 0) { 1989 int new_unused = new_map->unused_property_fields(); 1990 Handle<FixedArray> properties(object->properties()); 1991 Handle<FixedArray> values = isolate->factory()->CopySizeFixedArray( 1992 properties, properties->length() + new_unused + 1); 1993 object->set_properties(*values); 1994 } 1995 1996 object->set_map(*new_map); 1997 object->FastPropertyAtPut(field_index, *storage); 1998 } 1999 2000 2001 static MaybeObject* CopyAddFieldDescriptor(Map* map, 2002 Name* name, 2003 int index, 2004 PropertyAttributes attributes, 2005 Representation representation, 2006 TransitionFlag flag) { 2007 Map* new_map; 2008 FieldDescriptor new_field_desc(name, index, attributes, representation); 2009 MaybeObject* maybe_map = map->CopyAddDescriptor(&new_field_desc, flag); 2010 if (!maybe_map->To(&new_map)) return maybe_map; 2011 int unused_property_fields = map->unused_property_fields() - 1; 2012 if (unused_property_fields < 0) { 2013 unused_property_fields += JSObject::kFieldsAdded; 2014 } 2015 new_map->set_unused_property_fields(unused_property_fields); 2016 return new_map; 2017 } 2018 2019 2020 static Handle<Map> CopyAddFieldDescriptor(Handle<Map> map, 2021 Handle<Name> name, 2022 int index, 2023 PropertyAttributes attributes, 2024 Representation representation, 2025 TransitionFlag flag) { 2026 CALL_HEAP_FUNCTION(map->GetIsolate(), 2027 CopyAddFieldDescriptor( 2028 *map, *name, index, attributes, representation, flag), 2029 Map); 2030 } 2031 2032 2033 void JSObject::AddFastProperty(Handle<JSObject> object, 2034 Handle<Name> name, 2035 Handle<Object> value, 2036 PropertyAttributes attributes, 2037 StoreFromKeyed store_mode, 2038 ValueType value_type, 2039 TransitionFlag flag) { 2040 ASSERT(!object->IsJSGlobalProxy()); 2041 ASSERT(DescriptorArray::kNotFound == 2042 object->map()->instance_descriptors()->Search( 2043 *name, object->map()->NumberOfOwnDescriptors())); 2044 2045 // Normalize the object if the name is an actual name (not the 2046 // hidden strings) and is not a real identifier. 2047 // Normalize the object if it will have too many fast properties. 2048 Isolate* isolate = object->GetIsolate(); 2049 if (!name->IsCacheable(isolate) || 2050 object->TooManyFastProperties(store_mode)) { 2051 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 2052 AddSlowProperty(object, name, value, attributes); 2053 return; 2054 } 2055 2056 // Compute the new index for new field. 2057 int index = object->map()->NextFreePropertyIndex(); 2058 2059 // Allocate new instance descriptors with (name, index) added 2060 if (object->IsJSContextExtensionObject()) value_type = FORCE_TAGGED; 2061 Representation representation = value->OptimalRepresentation(value_type); 2062 Handle<Map> new_map = CopyAddFieldDescriptor( 2063 handle(object->map()), name, index, attributes, representation, flag); 2064 2065 AddFastPropertyUsingMap(object, new_map, name, value, index, representation); 2066 } 2067 2068 2069 static MaybeObject* CopyAddConstantDescriptor(Map* map, 2070 Name* name, 2071 Object* value, 2072 PropertyAttributes attributes, 2073 TransitionFlag flag) { 2074 ConstantDescriptor new_constant_desc(name, value, attributes); 2075 return map->CopyAddDescriptor(&new_constant_desc, flag); 2076 } 2077 2078 2079 static Handle<Map> CopyAddConstantDescriptor(Handle<Map> map, 2080 Handle<Name> name, 2081 Handle<Object> value, 2082 PropertyAttributes attributes, 2083 TransitionFlag flag) { 2084 CALL_HEAP_FUNCTION(map->GetIsolate(), 2085 CopyAddConstantDescriptor( 2086 *map, *name, *value, attributes, flag), 2087 Map); 2088 } 2089 2090 2091 void JSObject::AddConstantProperty(Handle<JSObject> object, 2092 Handle<Name> name, 2093 Handle<Object> constant, 2094 PropertyAttributes attributes, 2095 TransitionFlag initial_flag) { 2096 TransitionFlag flag = 2097 // Do not add transitions to global objects. 2098 (object->IsGlobalObject() || 2099 // Don't add transitions to special properties with non-trivial 2100 // attributes. 2101 attributes != NONE) 2102 ? OMIT_TRANSITION 2103 : initial_flag; 2104 2105 // Allocate new instance descriptors with (name, constant) added. 2106 Handle<Map> new_map = CopyAddConstantDescriptor( 2107 handle(object->map()), name, constant, attributes, flag); 2108 2109 object->set_map(*new_map); 2110 } 2111 2112 2113 void JSObject::AddSlowProperty(Handle<JSObject> object, 2114 Handle<Name> name, 2115 Handle<Object> value, 2116 PropertyAttributes attributes) { 2117 ASSERT(!object->HasFastProperties()); 2118 Isolate* isolate = object->GetIsolate(); 2119 Handle<NameDictionary> dict(object->property_dictionary()); 2120 if (object->IsGlobalObject()) { 2121 // In case name is an orphaned property reuse the cell. 2122 int entry = dict->FindEntry(*name); 2123 if (entry != NameDictionary::kNotFound) { 2124 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry))); 2125 PropertyCell::SetValueInferType(cell, value); 2126 // Assign an enumeration index to the property and update 2127 // SetNextEnumerationIndex. 2128 int index = dict->NextEnumerationIndex(); 2129 PropertyDetails details = PropertyDetails(attributes, NORMAL, index); 2130 dict->SetNextEnumerationIndex(index + 1); 2131 dict->SetEntry(entry, *name, *cell, details); 2132 return; 2133 } 2134 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value); 2135 PropertyCell::SetValueInferType(cell, value); 2136 value = cell; 2137 } 2138 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); 2139 Handle<NameDictionary> result = NameDictionaryAdd(dict, name, value, details); 2140 if (*dict != *result) object->set_properties(*result); 2141 } 2142 2143 2144 Handle<Object> JSObject::AddProperty(Handle<JSObject> object, 2145 Handle<Name> name, 2146 Handle<Object> value, 2147 PropertyAttributes attributes, 2148 StrictModeFlag strict_mode, 2149 JSReceiver::StoreFromKeyed store_mode, 2150 ExtensibilityCheck extensibility_check, 2151 ValueType value_type, 2152 StoreMode mode, 2153 TransitionFlag transition_flag) { 2154 ASSERT(!object->IsJSGlobalProxy()); 2155 Isolate* isolate = object->GetIsolate(); 2156 2157 if (!name->IsUniqueName()) { 2158 name = isolate->factory()->InternalizedStringFromString( 2159 Handle<String>::cast(name)); 2160 } 2161 2162 if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK && 2163 !object->map()->is_extensible()) { 2164 if (strict_mode == kNonStrictMode) { 2165 return value; 2166 } else { 2167 Handle<Object> args[1] = { name }; 2168 Handle<Object> error = isolate->factory()->NewTypeError( 2169 "object_not_extensible", HandleVector(args, ARRAY_SIZE(args))); 2170 isolate->Throw(*error); 2171 return Handle<Object>(); 2172 } 2173 } 2174 2175 if (object->HasFastProperties()) { 2176 // Ensure the descriptor array does not get too big. 2177 if (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors) { 2178 // TODO(verwaest): Support other constants. 2179 // if (mode == ALLOW_AS_CONSTANT && 2180 // !value->IsTheHole() && 2181 // !value->IsConsString()) { 2182 if (value->IsJSFunction()) { 2183 AddConstantProperty(object, name, value, attributes, transition_flag); 2184 } else { 2185 AddFastProperty(object, name, value, attributes, store_mode, 2186 value_type, transition_flag); 2187 } 2188 } else { 2189 // Normalize the object to prevent very large instance descriptors. 2190 // This eliminates unwanted N^2 allocation and lookup behavior. 2191 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 2192 AddSlowProperty(object, name, value, attributes); 2193 } 2194 } else { 2195 AddSlowProperty(object, name, value, attributes); 2196 } 2197 2198 if (FLAG_harmony_observation && 2199 object->map()->is_observed() && 2200 *name != isolate->heap()->hidden_string()) { 2201 Handle<Object> old_value = isolate->factory()->the_hole_value(); 2202 EnqueueChangeRecord(object, "add", name, old_value); 2203 } 2204 2205 return value; 2206 } 2207 2208 2209 void JSObject::EnqueueChangeRecord(Handle<JSObject> object, 2210 const char* type_str, 2211 Handle<Name> name, 2212 Handle<Object> old_value) { 2213 Isolate* isolate = object->GetIsolate(); 2214 HandleScope scope(isolate); 2215 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str); 2216 if (object->IsJSGlobalObject()) { 2217 object = handle(JSGlobalObject::cast(*object)->global_receiver(), isolate); 2218 } 2219 Handle<Object> args[] = { type, object, name, old_value }; 2220 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4; 2221 bool threw; 2222 2223 Execution::Call(isolate, 2224 Handle<JSFunction>(isolate->observers_notify_change()), 2225 isolate->factory()->undefined_value(), 2226 argc, args, 2227 &threw); 2228 ASSERT(!threw); 2229 } 2230 2231 2232 Handle<Object> JSObject::SetPropertyPostInterceptor( 2233 Handle<JSObject> object, 2234 Handle<Name> name, 2235 Handle<Object> value, 2236 PropertyAttributes attributes, 2237 StrictModeFlag strict_mode) { 2238 // Check local property, ignore interceptor. 2239 LookupResult result(object->GetIsolate()); 2240 object->LocalLookupRealNamedProperty(*name, &result); 2241 if (!result.IsFound()) { 2242 object->map()->LookupTransition(*object, *name, &result); 2243 } 2244 if (result.IsFound()) { 2245 // An existing property or a map transition was found. Use set property to 2246 // handle all these cases. 2247 return SetPropertyForResult(object, &result, name, value, attributes, 2248 strict_mode, MAY_BE_STORE_FROM_KEYED); 2249 } 2250 bool done = false; 2251 Handle<Object> result_object = SetPropertyViaPrototypes( 2252 object, name, value, attributes, strict_mode, &done); 2253 if (done) return result_object; 2254 // Add a new real property. 2255 return AddProperty(object, name, value, attributes, strict_mode); 2256 } 2257 2258 2259 static void ReplaceSlowProperty(Handle<JSObject> object, 2260 Handle<Name> name, 2261 Handle<Object> value, 2262 PropertyAttributes attributes) { 2263 NameDictionary* dictionary = object->property_dictionary(); 2264 int old_index = dictionary->FindEntry(*name); 2265 int new_enumeration_index = 0; // 0 means "Use the next available index." 2266 if (old_index != -1) { 2267 // All calls to ReplaceSlowProperty have had all transitions removed. 2268 new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index(); 2269 } 2270 2271 PropertyDetails new_details(attributes, NORMAL, new_enumeration_index); 2272 JSObject::SetNormalizedProperty(object, name, value, new_details); 2273 } 2274 2275 2276 const char* Representation::Mnemonic() const { 2277 switch (kind_) { 2278 case kNone: return "v"; 2279 case kTagged: return "t"; 2280 case kSmi: return "s"; 2281 case kDouble: return "d"; 2282 case kInteger32: return "i"; 2283 case kHeapObject: return "h"; 2284 case kExternal: return "x"; 2285 default: 2286 UNREACHABLE(); 2287 return NULL; 2288 } 2289 } 2290 2291 2292 enum RightTrimMode { FROM_GC, FROM_MUTATOR }; 2293 2294 2295 static void ZapEndOfFixedArray(Address new_end, int to_trim) { 2296 // If we are doing a big trim in old space then we zap the space. 2297 Object** zap = reinterpret_cast<Object**>(new_end); 2298 zap++; // Header of filler must be at least one word so skip that. 2299 for (int i = 1; i < to_trim; i++) { 2300 *zap++ = Smi::FromInt(0); 2301 } 2302 } 2303 2304 2305 template<RightTrimMode trim_mode> 2306 static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { 2307 ASSERT(elms->map() != heap->fixed_cow_array_map()); 2308 // For now this trick is only applied to fixed arrays in new and paged space. 2309 ASSERT(!heap->lo_space()->Contains(elms)); 2310 2311 const int len = elms->length(); 2312 2313 ASSERT(to_trim < len); 2314 2315 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); 2316 2317 if (trim_mode != FROM_GC || Heap::ShouldZapGarbage()) { 2318 ZapEndOfFixedArray(new_end, to_trim); 2319 } 2320 2321 int size_delta = to_trim * kPointerSize; 2322 2323 // Technically in new space this write might be omitted (except for 2324 // debug mode which iterates through the heap), but to play safer 2325 // we still do it. 2326 heap->CreateFillerObjectAt(new_end, size_delta); 2327 2328 elms->set_length(len - to_trim); 2329 2330 // Maintain marking consistency for IncrementalMarking. 2331 if (Marking::IsBlack(Marking::MarkBitFrom(elms))) { 2332 if (trim_mode == FROM_GC) { 2333 MemoryChunk::IncrementLiveBytesFromGC(elms->address(), -size_delta); 2334 } else { 2335 MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta); 2336 } 2337 } 2338 2339 // The array may not be moved during GC, 2340 // and size has to be adjusted nevertheless. 2341 HeapProfiler* profiler = heap->isolate()->heap_profiler(); 2342 if (profiler->is_tracking_allocations()) { 2343 profiler->UpdateObjectSizeEvent(elms->address(), elms->Size()); 2344 } 2345 } 2346 2347 2348 bool Map::InstancesNeedRewriting(Map* target, 2349 int target_number_of_fields, 2350 int target_inobject, 2351 int target_unused) { 2352 // If fields were added (or removed), rewrite the instance. 2353 int number_of_fields = NumberOfFields(); 2354 ASSERT(target_number_of_fields >= number_of_fields); 2355 if (target_number_of_fields != number_of_fields) return true; 2356 2357 if (FLAG_track_double_fields) { 2358 // If smi descriptors were replaced by double descriptors, rewrite. 2359 DescriptorArray* old_desc = instance_descriptors(); 2360 DescriptorArray* new_desc = target->instance_descriptors(); 2361 int limit = NumberOfOwnDescriptors(); 2362 for (int i = 0; i < limit; i++) { 2363 if (new_desc->GetDetails(i).representation().IsDouble() && 2364 !old_desc->GetDetails(i).representation().IsDouble()) { 2365 return true; 2366 } 2367 } 2368 } 2369 2370 // If no fields were added, and no inobject properties were removed, setting 2371 // the map is sufficient. 2372 if (target_inobject == inobject_properties()) return false; 2373 // In-object slack tracking may have reduced the object size of the new map. 2374 // In that case, succeed if all existing fields were inobject, and they still 2375 // fit within the new inobject size. 2376 ASSERT(target_inobject < inobject_properties()); 2377 if (target_number_of_fields <= target_inobject) { 2378 ASSERT(target_number_of_fields + target_unused == target_inobject); 2379 return false; 2380 } 2381 // Otherwise, properties will need to be moved to the backing store. 2382 return true; 2383 } 2384 2385 2386 // To migrate an instance to a map: 2387 // - First check whether the instance needs to be rewritten. If not, simply 2388 // change the map. 2389 // - Otherwise, allocate a fixed array large enough to hold all fields, in 2390 // addition to unused space. 2391 // - Copy all existing properties in, in the following order: backing store 2392 // properties, unused fields, inobject properties. 2393 // - If all allocation succeeded, commit the state atomically: 2394 // * Copy inobject properties from the backing store back into the object. 2395 // * Trim the difference in instance size of the object. This also cleanly 2396 // frees inobject properties that moved to the backing store. 2397 // * If there are properties left in the backing store, trim of the space used 2398 // to temporarily store the inobject properties. 2399 // * If there are properties left in the backing store, install the backing 2400 // store. 2401 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) { 2402 Isolate* isolate = object->GetIsolate(); 2403 Handle<Map> old_map(object->map()); 2404 int number_of_fields = new_map->NumberOfFields(); 2405 int inobject = new_map->inobject_properties(); 2406 int unused = new_map->unused_property_fields(); 2407 2408 // Nothing to do if no functions were converted to fields and no smis were 2409 // converted to doubles. 2410 if (!old_map->InstancesNeedRewriting( 2411 *new_map, number_of_fields, inobject, unused)) { 2412 object->set_map(*new_map); 2413 return; 2414 } 2415 2416 int total_size = number_of_fields + unused; 2417 int external = total_size - inobject; 2418 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size); 2419 2420 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors()); 2421 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors()); 2422 int descriptors = new_map->NumberOfOwnDescriptors(); 2423 2424 for (int i = 0; i < descriptors; i++) { 2425 PropertyDetails details = new_descriptors->GetDetails(i); 2426 if (details.type() != FIELD) continue; 2427 PropertyDetails old_details = old_descriptors->GetDetails(i); 2428 if (old_details.type() == CALLBACKS) { 2429 ASSERT(details.representation().IsTagged()); 2430 continue; 2431 } 2432 ASSERT(old_details.type() == CONSTANT || 2433 old_details.type() == FIELD); 2434 Object* raw_value = old_details.type() == CONSTANT 2435 ? old_descriptors->GetValue(i) 2436 : object->RawFastPropertyAt(old_descriptors->GetFieldIndex(i)); 2437 Handle<Object> value(raw_value, isolate); 2438 if (FLAG_track_double_fields && 2439 !old_details.representation().IsDouble() && 2440 details.representation().IsDouble()) { 2441 if (old_details.representation().IsNone()) { 2442 value = handle(Smi::FromInt(0), isolate); 2443 } 2444 value = NewStorageFor(isolate, value, details.representation()); 2445 } 2446 ASSERT(!(FLAG_track_double_fields && 2447 details.representation().IsDouble() && 2448 value->IsSmi())); 2449 int target_index = new_descriptors->GetFieldIndex(i) - inobject; 2450 if (target_index < 0) target_index += total_size; 2451 array->set(target_index, *value); 2452 } 2453 2454 // From here on we cannot fail and we shouldn't GC anymore. 2455 DisallowHeapAllocation no_allocation; 2456 2457 // Copy (real) inobject properties. If necessary, stop at number_of_fields to 2458 // avoid overwriting |one_pointer_filler_map|. 2459 int limit = Min(inobject, number_of_fields); 2460 for (int i = 0; i < limit; i++) { 2461 object->FastPropertyAtPut(i, array->get(external + i)); 2462 } 2463 2464 // Create filler object past the new instance size. 2465 int new_instance_size = new_map->instance_size(); 2466 int instance_size_delta = old_map->instance_size() - new_instance_size; 2467 ASSERT(instance_size_delta >= 0); 2468 Address address = object->address() + new_instance_size; 2469 isolate->heap()->CreateFillerObjectAt(address, instance_size_delta); 2470 2471 // If there are properties in the new backing store, trim it to the correct 2472 // size and install the backing store into the object. 2473 if (external > 0) { 2474 RightTrimFixedArray<FROM_MUTATOR>(isolate->heap(), *array, inobject); 2475 object->set_properties(*array); 2476 } 2477 2478 object->set_map(*new_map); 2479 } 2480 2481 2482 Handle<TransitionArray> Map::AddTransition(Handle<Map> map, 2483 Handle<Name> key, 2484 Handle<Map> target, 2485 SimpleTransitionFlag flag) { 2486 CALL_HEAP_FUNCTION(map->GetIsolate(), 2487 map->AddTransition(*key, *target, flag), 2488 TransitionArray); 2489 } 2490 2491 2492 void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object, 2493 int modify_index, 2494 Representation new_representation, 2495 StoreMode store_mode) { 2496 Handle<Map> new_map = Map::GeneralizeRepresentation( 2497 handle(object->map()), modify_index, new_representation, store_mode); 2498 if (object->map() == *new_map) return; 2499 return MigrateToMap(object, new_map); 2500 } 2501 2502 2503 int Map::NumberOfFields() { 2504 DescriptorArray* descriptors = instance_descriptors(); 2505 int result = 0; 2506 for (int i = 0; i < NumberOfOwnDescriptors(); i++) { 2507 if (descriptors->GetDetails(i).type() == FIELD) result++; 2508 } 2509 return result; 2510 } 2511 2512 2513 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map, 2514 int modify_index, 2515 StoreMode store_mode, 2516 PropertyAttributes attributes, 2517 const char* reason) { 2518 Handle<Map> new_map = Copy(map); 2519 2520 DescriptorArray* descriptors = new_map->instance_descriptors(); 2521 descriptors->InitializeRepresentations(Representation::Tagged()); 2522 2523 // Unless the instance is being migrated, ensure that modify_index is a field. 2524 PropertyDetails details = descriptors->GetDetails(modify_index); 2525 if (store_mode == FORCE_FIELD && details.type() != FIELD) { 2526 FieldDescriptor d(descriptors->GetKey(modify_index), 2527 new_map->NumberOfFields(), 2528 attributes, 2529 Representation::Tagged()); 2530 d.SetSortedKeyIndex(details.pointer()); 2531 descriptors->Set(modify_index, &d); 2532 int unused_property_fields = new_map->unused_property_fields() - 1; 2533 if (unused_property_fields < 0) { 2534 unused_property_fields += JSObject::kFieldsAdded; 2535 } 2536 new_map->set_unused_property_fields(unused_property_fields); 2537 } 2538 2539 if (FLAG_trace_generalization) { 2540 map->PrintGeneralization(stdout, reason, modify_index, 2541 new_map->NumberOfOwnDescriptors(), 2542 new_map->NumberOfOwnDescriptors(), 2543 details.type() == CONSTANT && store_mode == FORCE_FIELD, 2544 Representation::Tagged(), Representation::Tagged()); 2545 } 2546 return new_map; 2547 } 2548 2549 2550 void Map::DeprecateTransitionTree() { 2551 if (!FLAG_track_fields) return; 2552 if (is_deprecated()) return; 2553 if (HasTransitionArray()) { 2554 TransitionArray* transitions = this->transitions(); 2555 for (int i = 0; i < transitions->number_of_transitions(); i++) { 2556 transitions->GetTarget(i)->DeprecateTransitionTree(); 2557 } 2558 } 2559 deprecate(); 2560 dependent_code()->DeoptimizeDependentCodeGroup( 2561 GetIsolate(), DependentCode::kTransitionGroup); 2562 NotifyLeafMapLayoutChange(); 2563 } 2564 2565 2566 // Invalidates a transition target at |key|, and installs |new_descriptors| over 2567 // the current instance_descriptors to ensure proper sharing of descriptor 2568 // arrays. 2569 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) { 2570 if (HasTransitionArray()) { 2571 TransitionArray* transitions = this->transitions(); 2572 int transition = transitions->Search(key); 2573 if (transition != TransitionArray::kNotFound) { 2574 transitions->GetTarget(transition)->DeprecateTransitionTree(); 2575 } 2576 } 2577 2578 // Don't overwrite the empty descriptor array. 2579 if (NumberOfOwnDescriptors() == 0) return; 2580 2581 DescriptorArray* to_replace = instance_descriptors(); 2582 Map* current = this; 2583 while (current->instance_descriptors() == to_replace) { 2584 current->SetEnumLength(kInvalidEnumCacheSentinel); 2585 current->set_instance_descriptors(new_descriptors); 2586 Object* next = current->GetBackPointer(); 2587 if (next->IsUndefined()) break; 2588 current = Map::cast(next); 2589 } 2590 2591 set_owns_descriptors(false); 2592 } 2593 2594 2595 Map* Map::FindRootMap() { 2596 Map* result = this; 2597 while (true) { 2598 Object* back = result->GetBackPointer(); 2599 if (back->IsUndefined()) return result; 2600 result = Map::cast(back); 2601 } 2602 } 2603 2604 2605 // Returns NULL if the updated map is incompatible. 2606 Map* Map::FindUpdatedMap(int verbatim, 2607 int length, 2608 DescriptorArray* descriptors) { 2609 // This can only be called on roots of transition trees. 2610 ASSERT(GetBackPointer()->IsUndefined()); 2611 2612 Map* current = this; 2613 2614 for (int i = verbatim; i < length; i++) { 2615 if (!current->HasTransitionArray()) break; 2616 Name* name = descriptors->GetKey(i); 2617 TransitionArray* transitions = current->transitions(); 2618 int transition = transitions->Search(name); 2619 if (transition == TransitionArray::kNotFound) break; 2620 current = transitions->GetTarget(transition); 2621 PropertyDetails details = descriptors->GetDetails(i); 2622 PropertyDetails target_details = 2623 current->instance_descriptors()->GetDetails(i); 2624 if (details.attributes() != target_details.attributes()) return NULL; 2625 if (details.type() == CALLBACKS) { 2626 if (target_details.type() != CALLBACKS) return NULL; 2627 if (descriptors->GetValue(i) != 2628 current->instance_descriptors()->GetValue(i)) { 2629 return NULL; 2630 } 2631 } 2632 } 2633 2634 return current; 2635 } 2636 2637 2638 Map* Map::FindLastMatchMap(int verbatim, 2639 int length, 2640 DescriptorArray* descriptors) { 2641 // This can only be called on roots of transition trees. 2642 ASSERT(GetBackPointer()->IsUndefined()); 2643 2644 Map* current = this; 2645 2646 for (int i = verbatim; i < length; i++) { 2647 if (!current->HasTransitionArray()) break; 2648 Name* name = descriptors->GetKey(i); 2649 TransitionArray* transitions = current->transitions(); 2650 int transition = transitions->Search(name); 2651 if (transition == TransitionArray::kNotFound) break; 2652 2653 Map* next = transitions->GetTarget(transition); 2654 DescriptorArray* next_descriptors = next->instance_descriptors(); 2655 2656 if (next_descriptors->GetValue(i) != descriptors->GetValue(i)) break; 2657 2658 PropertyDetails details = descriptors->GetDetails(i); 2659 PropertyDetails next_details = next_descriptors->GetDetails(i); 2660 if (details.type() != next_details.type()) break; 2661 if (details.attributes() != next_details.attributes()) break; 2662 if (!details.representation().Equals(next_details.representation())) break; 2663 2664 current = next; 2665 } 2666 return current; 2667 } 2668 2669 2670 // Generalize the representation of the descriptor at |modify_index|. 2671 // This method rewrites the transition tree to reflect the new change. To avoid 2672 // high degrees over polymorphism, and to stabilize quickly, on every rewrite 2673 // the new type is deduced by merging the current type with any potential new 2674 // (partial) version of the type in the transition tree. 2675 // To do this, on each rewrite: 2676 // - Search the root of the transition tree using FindRootMap. 2677 // - Find |updated|, the newest matching version of this map using 2678 // FindUpdatedMap. This uses the keys in the own map's descriptor array to 2679 // walk the transition tree. 2680 // - Merge/generalize the descriptor array of the current map and |updated|. 2681 // - Generalize the |modify_index| descriptor using |new_representation|. 2682 // - Walk the tree again starting from the root towards |updated|. Stop at 2683 // |split_map|, the first map who's descriptor array does not match the merged 2684 // descriptor array. 2685 // - If |updated| == |split_map|, |updated| is in the expected state. Return it. 2686 // - Otherwise, invalidate the outdated transition target from |updated|, and 2687 // replace its transition tree with a new branch for the updated descriptors. 2688 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, 2689 int modify_index, 2690 Representation new_representation, 2691 StoreMode store_mode) { 2692 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors()); 2693 PropertyDetails old_details = old_descriptors->GetDetails(modify_index); 2694 Representation old_representation = old_details.representation(); 2695 2696 // It's fine to transition from None to anything but double without any 2697 // modification to the object, because the default uninitialized value for 2698 // representation None can be overwritten by both smi and tagged values. 2699 // Doubles, however, would require a box allocation. 2700 if (old_representation.IsNone() && 2701 !new_representation.IsNone() && 2702 !new_representation.IsDouble()) { 2703 old_descriptors->SetRepresentation(modify_index, new_representation); 2704 return old_map; 2705 } 2706 2707 int descriptors = old_map->NumberOfOwnDescriptors(); 2708 Handle<Map> root_map(old_map->FindRootMap()); 2709 2710 // Check the state of the root map. 2711 if (!old_map->EquivalentToForTransition(*root_map)) { 2712 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode, 2713 old_details.attributes(), "not equivalent"); 2714 } 2715 2716 int verbatim = root_map->NumberOfOwnDescriptors(); 2717 2718 if (store_mode != ALLOW_AS_CONSTANT && modify_index < verbatim) { 2719 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode, 2720 old_details.attributes(), "root modification"); 2721 } 2722 2723 Map* raw_updated = root_map->FindUpdatedMap( 2724 verbatim, descriptors, *old_descriptors); 2725 if (raw_updated == NULL) { 2726 return CopyGeneralizeAllRepresentations(old_map, modify_index, store_mode, 2727 old_details.attributes(), "incompatible"); 2728 } 2729 2730 Handle<Map> updated(raw_updated); 2731 Handle<DescriptorArray> updated_descriptors(updated->instance_descriptors()); 2732 2733 int valid = updated->NumberOfOwnDescriptors(); 2734 2735 // Directly change the map if the target map is more general. Ensure that the 2736 // target type of the modify_index is a FIELD, unless we are migrating. 2737 if (updated_descriptors->IsMoreGeneralThan( 2738 verbatim, valid, descriptors, *old_descriptors) && 2739 (store_mode == ALLOW_AS_CONSTANT || 2740 updated_descriptors->GetDetails(modify_index).type() == FIELD)) { 2741 Representation updated_representation = 2742 updated_descriptors->GetDetails(modify_index).representation(); 2743 if (new_representation.fits_into(updated_representation)) return updated; 2744 } 2745 2746 Handle<DescriptorArray> new_descriptors = DescriptorArray::Merge( 2747 updated_descriptors, verbatim, valid, descriptors, modify_index, 2748 store_mode, old_descriptors); 2749 ASSERT(store_mode == ALLOW_AS_CONSTANT || 2750 new_descriptors->GetDetails(modify_index).type() == FIELD); 2751 2752 old_representation = 2753 new_descriptors->GetDetails(modify_index).representation(); 2754 Representation updated_representation = 2755 new_representation.generalize(old_representation); 2756 if (!updated_representation.Equals(old_representation)) { 2757 new_descriptors->SetRepresentation(modify_index, updated_representation); 2758 } 2759 2760 Handle<Map> split_map(root_map->FindLastMatchMap( 2761 verbatim, descriptors, *new_descriptors)); 2762 2763 int split_descriptors = split_map->NumberOfOwnDescriptors(); 2764 // This is shadowed by |updated_descriptors| being more general than 2765 // |old_descriptors|. 2766 ASSERT(descriptors != split_descriptors); 2767 2768 int descriptor = split_descriptors; 2769 split_map->DeprecateTarget( 2770 old_descriptors->GetKey(descriptor), *new_descriptors); 2771 2772 if (FLAG_trace_generalization) { 2773 old_map->PrintGeneralization( 2774 stdout, "", modify_index, descriptor, descriptors, 2775 old_descriptors->GetDetails(modify_index).type() == CONSTANT && 2776 store_mode == FORCE_FIELD, 2777 old_representation, updated_representation); 2778 } 2779 2780 // Add missing transitions. 2781 Handle<Map> new_map = split_map; 2782 for (; descriptor < descriptors; descriptor++) { 2783 new_map = Map::CopyInstallDescriptors(new_map, descriptor, new_descriptors); 2784 } 2785 2786 new_map->set_owns_descriptors(true); 2787 return new_map; 2788 } 2789 2790 2791 // Generalize the representation of all FIELD descriptors. 2792 Handle<Map> Map::GeneralizeAllFieldRepresentations( 2793 Handle<Map> map, 2794 Representation new_representation) { 2795 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 2796 for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) { 2797 PropertyDetails details = descriptors->GetDetails(i); 2798 if (details.type() == FIELD) { 2799 map = GeneralizeRepresentation(map, i, new_representation, FORCE_FIELD); 2800 } 2801 } 2802 return map; 2803 } 2804 2805 2806 Handle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) { 2807 Handle<Map> proto_map(map); 2808 while (proto_map->prototype()->IsJSObject()) { 2809 Handle<JSObject> holder(JSObject::cast(proto_map->prototype())); 2810 if (holder->map()->is_deprecated()) { 2811 JSObject::TryMigrateInstance(holder); 2812 } 2813 proto_map = Handle<Map>(holder->map()); 2814 } 2815 return CurrentMapForDeprecatedInternal(map); 2816 } 2817 2818 2819 Handle<Map> Map::CurrentMapForDeprecatedInternal(Handle<Map> map) { 2820 if (!map->is_deprecated()) return map; 2821 2822 DisallowHeapAllocation no_allocation; 2823 DescriptorArray* old_descriptors = map->instance_descriptors(); 2824 2825 int descriptors = map->NumberOfOwnDescriptors(); 2826 Map* root_map = map->FindRootMap(); 2827 2828 // Check the state of the root map. 2829 if (!map->EquivalentToForTransition(root_map)) return Handle<Map>(); 2830 int verbatim = root_map->NumberOfOwnDescriptors(); 2831 2832 Map* updated = root_map->FindUpdatedMap( 2833 verbatim, descriptors, old_descriptors); 2834 if (updated == NULL) return Handle<Map>(); 2835 2836 DescriptorArray* updated_descriptors = updated->instance_descriptors(); 2837 int valid = updated->NumberOfOwnDescriptors(); 2838 if (!updated_descriptors->IsMoreGeneralThan( 2839 verbatim, valid, descriptors, old_descriptors)) { 2840 return Handle<Map>(); 2841 } 2842 2843 return handle(updated); 2844 } 2845 2846 2847 Handle<Object> JSObject::SetPropertyWithInterceptor( 2848 Handle<JSObject> object, 2849 Handle<Name> name, 2850 Handle<Object> value, 2851 PropertyAttributes attributes, 2852 StrictModeFlag strict_mode) { 2853 // TODO(rossberg): Support symbols in the API. 2854 if (name->IsSymbol()) return value; 2855 Isolate* isolate = object->GetIsolate(); 2856 Handle<String> name_string = Handle<String>::cast(name); 2857 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); 2858 if (!interceptor->setter()->IsUndefined()) { 2859 LOG(isolate, 2860 ApiNamedPropertyAccess("interceptor-named-set", *object, *name)); 2861 PropertyCallbackArguments args( 2862 isolate, interceptor->data(), *object, *object); 2863 v8::NamedPropertySetterCallback setter = 2864 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter()); 2865 Handle<Object> value_unhole = value->IsTheHole() 2866 ? Handle<Object>(isolate->factory()->undefined_value()) : value; 2867 v8::Handle<v8::Value> result = args.Call(setter, 2868 v8::Utils::ToLocal(name_string), 2869 v8::Utils::ToLocal(value_unhole)); 2870 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 2871 if (!result.IsEmpty()) return value; 2872 } 2873 Handle<Object> result = 2874 SetPropertyPostInterceptor(object, name, value, attributes, strict_mode); 2875 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 2876 return result; 2877 } 2878 2879 2880 Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, 2881 Handle<Name> name, 2882 Handle<Object> value, 2883 PropertyAttributes attributes, 2884 StrictModeFlag strict_mode, 2885 StoreFromKeyed store_mode) { 2886 LookupResult result(object->GetIsolate()); 2887 object->LocalLookup(*name, &result, true); 2888 if (!result.IsFound()) { 2889 object->map()->LookupTransition(JSObject::cast(*object), *name, &result); 2890 } 2891 return SetProperty(object, &result, name, value, attributes, strict_mode, 2892 store_mode); 2893 } 2894 2895 2896 Handle<Object> JSObject::SetPropertyWithCallback(Handle<JSObject> object, 2897 Handle<Object> structure, 2898 Handle<Name> name, 2899 Handle<Object> value, 2900 Handle<JSObject> holder, 2901 StrictModeFlag strict_mode) { 2902 Isolate* isolate = object->GetIsolate(); 2903 2904 // We should never get here to initialize a const with the hole 2905 // value since a const declaration would conflict with the setter. 2906 ASSERT(!value->IsTheHole()); 2907 2908 // To accommodate both the old and the new api we switch on the 2909 // data structure used to store the callbacks. Eventually foreign 2910 // callbacks should be phased out. 2911 if (structure->IsForeign()) { 2912 AccessorDescriptor* callback = 2913 reinterpret_cast<AccessorDescriptor*>( 2914 Handle<Foreign>::cast(structure)->foreign_address()); 2915 CALL_AND_RETRY_OR_DIE(isolate, 2916 (callback->setter)( 2917 isolate, *object, *value, callback->data), 2918 break, 2919 return Handle<Object>()); 2920 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 2921 return value; 2922 } 2923 2924 if (structure->IsExecutableAccessorInfo()) { 2925 // api style callbacks 2926 ExecutableAccessorInfo* data = ExecutableAccessorInfo::cast(*structure); 2927 if (!data->IsCompatibleReceiver(*object)) { 2928 Handle<Object> args[2] = { name, object }; 2929 Handle<Object> error = 2930 isolate->factory()->NewTypeError("incompatible_method_receiver", 2931 HandleVector(args, 2932 ARRAY_SIZE(args))); 2933 isolate->Throw(*error); 2934 return Handle<Object>(); 2935 } 2936 // TODO(rossberg): Support symbols in the API. 2937 if (name->IsSymbol()) return value; 2938 Object* call_obj = data->setter(); 2939 v8::AccessorSetterCallback call_fun = 2940 v8::ToCData<v8::AccessorSetterCallback>(call_obj); 2941 if (call_fun == NULL) return value; 2942 Handle<String> key = Handle<String>::cast(name); 2943 LOG(isolate, ApiNamedPropertyAccess("store", *object, *name)); 2944 PropertyCallbackArguments args( 2945 isolate, data->data(), *object, JSObject::cast(*holder)); 2946 args.Call(call_fun, 2947 v8::Utils::ToLocal(key), 2948 v8::Utils::ToLocal(value)); 2949 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 2950 return value; 2951 } 2952 2953 if (structure->IsAccessorPair()) { 2954 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate); 2955 if (setter->IsSpecFunction()) { 2956 // TODO(rossberg): nicer would be to cast to some JSCallable here... 2957 return SetPropertyWithDefinedSetter( 2958 object, Handle<JSReceiver>::cast(setter), value); 2959 } else { 2960 if (strict_mode == kNonStrictMode) { 2961 return value; 2962 } 2963 Handle<Object> args[2] = { name, holder }; 2964 Handle<Object> error = 2965 isolate->factory()->NewTypeError("no_setter_in_callback", 2966 HandleVector(args, 2)); 2967 isolate->Throw(*error); 2968 return Handle<Object>(); 2969 } 2970 } 2971 2972 // TODO(dcarney): Handle correctly. 2973 if (structure->IsDeclaredAccessorInfo()) { 2974 return value; 2975 } 2976 2977 UNREACHABLE(); 2978 return Handle<Object>(); 2979 } 2980 2981 2982 Handle<Object> JSReceiver::SetPropertyWithDefinedSetter( 2983 Handle<JSReceiver> object, 2984 Handle<JSReceiver> setter, 2985 Handle<Object> value) { 2986 Isolate* isolate = object->GetIsolate(); 2987 2988 #ifdef ENABLE_DEBUGGER_SUPPORT 2989 Debug* debug = isolate->debug(); 2990 // Handle stepping into a setter if step into is active. 2991 // TODO(rossberg): should this apply to getters that are function proxies? 2992 if (debug->StepInActive() && setter->IsJSFunction()) { 2993 debug->HandleStepIn( 2994 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false); 2995 } 2996 #endif 2997 2998 bool has_pending_exception; 2999 Handle<Object> argv[] = { value }; 3000 Execution::Call( 3001 isolate, setter, object, ARRAY_SIZE(argv), argv, &has_pending_exception); 3002 // Check for pending exception and return the result. 3003 if (has_pending_exception) return Handle<Object>(); 3004 return value; 3005 } 3006 3007 3008 Handle<Object> JSObject::SetElementWithCallbackSetterInPrototypes( 3009 Handle<JSObject> object, 3010 uint32_t index, 3011 Handle<Object> value, 3012 bool* found, 3013 StrictModeFlag strict_mode) { 3014 Isolate *isolate = object->GetIsolate(); 3015 for (Handle<Object> proto = handle(object->GetPrototype(), isolate); 3016 !proto->IsNull(); 3017 proto = handle(proto->GetPrototype(isolate), isolate)) { 3018 if (proto->IsJSProxy()) { 3019 return JSProxy::SetPropertyViaPrototypesWithHandler( 3020 Handle<JSProxy>::cast(proto), 3021 object, 3022 isolate->factory()->Uint32ToString(index), // name 3023 value, 3024 NONE, 3025 strict_mode, 3026 found); 3027 } 3028 Handle<JSObject> js_proto = Handle<JSObject>::cast(proto); 3029 if (!js_proto->HasDictionaryElements()) { 3030 continue; 3031 } 3032 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary()); 3033 int entry = dictionary->FindEntry(index); 3034 if (entry != SeededNumberDictionary::kNotFound) { 3035 PropertyDetails details = dictionary->DetailsAt(entry); 3036 if (details.type() == CALLBACKS) { 3037 *found = true; 3038 Handle<Object> structure(dictionary->ValueAt(entry), isolate); 3039 return SetElementWithCallback(object, structure, index, value, js_proto, 3040 strict_mode); 3041 } 3042 } 3043 } 3044 *found = false; 3045 return isolate->factory()->the_hole_value(); 3046 } 3047 3048 3049 Handle<Object> JSObject::SetPropertyViaPrototypes(Handle<JSObject> object, 3050 Handle<Name> name, 3051 Handle<Object> value, 3052 PropertyAttributes attributes, 3053 StrictModeFlag strict_mode, 3054 bool* done) { 3055 Isolate* isolate = object->GetIsolate(); 3056 3057 *done = false; 3058 // We could not find a local property so let's check whether there is an 3059 // accessor that wants to handle the property, or whether the property is 3060 // read-only on the prototype chain. 3061 LookupResult result(isolate); 3062 object->LookupRealNamedPropertyInPrototypes(*name, &result); 3063 if (result.IsFound()) { 3064 switch (result.type()) { 3065 case NORMAL: 3066 case FIELD: 3067 case CONSTANT: 3068 *done = result.IsReadOnly(); 3069 break; 3070 case INTERCEPTOR: { 3071 PropertyAttributes attr = 3072 result.holder()->GetPropertyAttributeWithInterceptor( 3073 *object, *name, true); 3074 *done = !!(attr & READ_ONLY); 3075 break; 3076 } 3077 case CALLBACKS: { 3078 if (!FLAG_es5_readonly && result.IsReadOnly()) break; 3079 *done = true; 3080 Handle<Object> callback_object(result.GetCallbackObject(), isolate); 3081 return SetPropertyWithCallback(object, callback_object, name, value, 3082 handle(result.holder()), strict_mode); 3083 } 3084 case HANDLER: { 3085 Handle<JSProxy> proxy(result.proxy()); 3086 return JSProxy::SetPropertyViaPrototypesWithHandler( 3087 proxy, object, name, value, attributes, strict_mode, done); 3088 } 3089 case TRANSITION: 3090 case NONEXISTENT: 3091 UNREACHABLE(); 3092 break; 3093 } 3094 } 3095 3096 // If we get here with *done true, we have encountered a read-only property. 3097 if (!FLAG_es5_readonly) *done = false; 3098 if (*done) { 3099 if (strict_mode == kNonStrictMode) return value; 3100 Handle<Object> args[] = { name, object }; 3101 Handle<Object> error = isolate->factory()->NewTypeError( 3102 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); 3103 isolate->Throw(*error); 3104 return Handle<Object>(); 3105 } 3106 return isolate->factory()->the_hole_value(); 3107 } 3108 3109 3110 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) { 3111 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 3112 if (slack <= descriptors->NumberOfSlackDescriptors()) return; 3113 int number_of_descriptors = descriptors->number_of_descriptors(); 3114 Isolate* isolate = map->GetIsolate(); 3115 Handle<DescriptorArray> new_descriptors = 3116 isolate->factory()->NewDescriptorArray(number_of_descriptors, slack); 3117 DescriptorArray::WhitenessWitness witness(*new_descriptors); 3118 3119 for (int i = 0; i < number_of_descriptors; ++i) { 3120 new_descriptors->CopyFrom(i, *descriptors, i, witness); 3121 } 3122 3123 map->set_instance_descriptors(*new_descriptors); 3124 } 3125 3126 3127 template<class T> 3128 static int AppendUniqueCallbacks(NeanderArray* callbacks, 3129 Handle<typename T::Array> array, 3130 int valid_descriptors) { 3131 int nof_callbacks = callbacks->length(); 3132 3133 Isolate* isolate = array->GetIsolate(); 3134 // Ensure the keys are unique names before writing them into the 3135 // instance descriptor. Since it may cause a GC, it has to be done before we 3136 // temporarily put the heap in an invalid state while appending descriptors. 3137 for (int i = 0; i < nof_callbacks; ++i) { 3138 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i))); 3139 if (entry->name()->IsUniqueName()) continue; 3140 Handle<String> key = 3141 isolate->factory()->InternalizedStringFromString( 3142 Handle<String>(String::cast(entry->name()))); 3143 entry->set_name(*key); 3144 } 3145 3146 // Fill in new callback descriptors. Process the callbacks from 3147 // back to front so that the last callback with a given name takes 3148 // precedence over previously added callbacks with that name. 3149 for (int i = nof_callbacks - 1; i >= 0; i--) { 3150 AccessorInfo* entry = AccessorInfo::cast(callbacks->get(i)); 3151 Name* key = Name::cast(entry->name()); 3152 // Check if a descriptor with this name already exists before writing. 3153 if (!T::Contains(key, entry, valid_descriptors, array)) { 3154 T::Insert(key, entry, valid_descriptors, array); 3155 valid_descriptors++; 3156 } 3157 } 3158 3159 return valid_descriptors; 3160 } 3161 3162 struct DescriptorArrayAppender { 3163 typedef DescriptorArray Array; 3164 static bool Contains(Name* key, 3165 AccessorInfo* entry, 3166 int valid_descriptors, 3167 Handle<DescriptorArray> array) { 3168 return array->Search(key, valid_descriptors) != DescriptorArray::kNotFound; 3169 } 3170 static void Insert(Name* key, 3171 AccessorInfo* entry, 3172 int valid_descriptors, 3173 Handle<DescriptorArray> array) { 3174 CallbacksDescriptor desc(key, entry, entry->property_attributes()); 3175 array->Append(&desc); 3176 } 3177 }; 3178 3179 3180 struct FixedArrayAppender { 3181 typedef FixedArray Array; 3182 static bool Contains(Name* key, 3183 AccessorInfo* entry, 3184 int valid_descriptors, 3185 Handle<FixedArray> array) { 3186 for (int i = 0; i < valid_descriptors; i++) { 3187 if (key == AccessorInfo::cast(array->get(i))->name()) return true; 3188 } 3189 return false; 3190 } 3191 static void Insert(Name* key, 3192 AccessorInfo* entry, 3193 int valid_descriptors, 3194 Handle<FixedArray> array) { 3195 array->set(valid_descriptors, entry); 3196 } 3197 }; 3198 3199 3200 void Map::AppendCallbackDescriptors(Handle<Map> map, 3201 Handle<Object> descriptors) { 3202 int nof = map->NumberOfOwnDescriptors(); 3203 Handle<DescriptorArray> array(map->instance_descriptors()); 3204 NeanderArray callbacks(descriptors); 3205 ASSERT(array->NumberOfSlackDescriptors() >= callbacks.length()); 3206 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof); 3207 map->SetNumberOfOwnDescriptors(nof); 3208 } 3209 3210 3211 int AccessorInfo::AppendUnique(Handle<Object> descriptors, 3212 Handle<FixedArray> array, 3213 int valid_descriptors) { 3214 NeanderArray callbacks(descriptors); 3215 ASSERT(array->length() >= callbacks.length() + valid_descriptors); 3216 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks, 3217 array, 3218 valid_descriptors); 3219 } 3220 3221 3222 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) { 3223 ASSERT(!map.is_null()); 3224 for (int i = 0; i < maps->length(); ++i) { 3225 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true; 3226 } 3227 return false; 3228 } 3229 3230 3231 template <class T> 3232 static Handle<T> MaybeNull(T* p) { 3233 if (p == NULL) return Handle<T>::null(); 3234 return Handle<T>(p); 3235 } 3236 3237 3238 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) { 3239 ElementsKind kind = elements_kind(); 3240 Handle<Map> transitioned_map = Handle<Map>::null(); 3241 Handle<Map> current_map(this); 3242 bool packed = IsFastPackedElementsKind(kind); 3243 if (IsTransitionableFastElementsKind(kind)) { 3244 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) { 3245 kind = GetNextMoreGeneralFastElementsKind(kind, false); 3246 Handle<Map> maybe_transitioned_map = 3247 MaybeNull(current_map->LookupElementsTransitionMap(kind)); 3248 if (maybe_transitioned_map.is_null()) break; 3249 if (ContainsMap(candidates, maybe_transitioned_map) && 3250 (packed || !IsFastPackedElementsKind(kind))) { 3251 transitioned_map = maybe_transitioned_map; 3252 if (!IsFastPackedElementsKind(kind)) packed = false; 3253 } 3254 current_map = maybe_transitioned_map; 3255 } 3256 } 3257 return transitioned_map; 3258 } 3259 3260 3261 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) { 3262 Map* current_map = map; 3263 int index = GetSequenceIndexFromFastElementsKind(map->elements_kind()); 3264 int to_index = IsFastElementsKind(to_kind) 3265 ? GetSequenceIndexFromFastElementsKind(to_kind) 3266 : GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); 3267 3268 ASSERT(index <= to_index); 3269 3270 for (; index < to_index; ++index) { 3271 if (!current_map->HasElementsTransition()) return current_map; 3272 current_map = current_map->elements_transition_map(); 3273 } 3274 if (!IsFastElementsKind(to_kind) && current_map->HasElementsTransition()) { 3275 Map* next_map = current_map->elements_transition_map(); 3276 if (next_map->elements_kind() == to_kind) return next_map; 3277 } 3278 ASSERT(IsFastElementsKind(to_kind) 3279 ? current_map->elements_kind() == to_kind 3280 : current_map->elements_kind() == TERMINAL_FAST_ELEMENTS_KIND); 3281 return current_map; 3282 } 3283 3284 3285 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) { 3286 Map* to_map = FindClosestElementsTransition(this, to_kind); 3287 if (to_map->elements_kind() == to_kind) return to_map; 3288 return NULL; 3289 } 3290 3291 3292 bool Map::IsMapInArrayPrototypeChain() { 3293 Isolate* isolate = GetIsolate(); 3294 if (isolate->initial_array_prototype()->map() == this) { 3295 return true; 3296 } 3297 3298 if (isolate->initial_object_prototype()->map() == this) { 3299 return true; 3300 } 3301 3302 return false; 3303 } 3304 3305 3306 static MaybeObject* AddMissingElementsTransitions(Map* map, 3307 ElementsKind to_kind) { 3308 ASSERT(IsFastElementsKind(map->elements_kind())); 3309 int index = GetSequenceIndexFromFastElementsKind(map->elements_kind()); 3310 int to_index = IsFastElementsKind(to_kind) 3311 ? GetSequenceIndexFromFastElementsKind(to_kind) 3312 : GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); 3313 3314 ASSERT(index <= to_index); 3315 3316 Map* current_map = map; 3317 3318 for (; index < to_index; ++index) { 3319 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(index + 1); 3320 MaybeObject* maybe_next_map = 3321 current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION); 3322 if (!maybe_next_map->To(¤t_map)) return maybe_next_map; 3323 } 3324 3325 // In case we are exiting the fast elements kind system, just add the map in 3326 // the end. 3327 if (!IsFastElementsKind(to_kind)) { 3328 MaybeObject* maybe_next_map = 3329 current_map->CopyAsElementsKind(to_kind, INSERT_TRANSITION); 3330 if (!maybe_next_map->To(¤t_map)) return maybe_next_map; 3331 } 3332 3333 ASSERT(current_map->elements_kind() == to_kind); 3334 return current_map; 3335 } 3336 3337 3338 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object, 3339 ElementsKind to_kind) { 3340 Isolate* isolate = object->GetIsolate(); 3341 CALL_HEAP_FUNCTION(isolate, 3342 object->GetElementsTransitionMap(isolate, to_kind), 3343 Map); 3344 } 3345 3346 3347 MaybeObject* JSObject::GetElementsTransitionMapSlow(ElementsKind to_kind) { 3348 Map* start_map = map(); 3349 ElementsKind from_kind = start_map->elements_kind(); 3350 3351 if (from_kind == to_kind) { 3352 return start_map; 3353 } 3354 3355 bool allow_store_transition = 3356 // Only remember the map transition if there is not an already existing 3357 // non-matching element transition. 3358 !start_map->IsUndefined() && !start_map->is_shared() && 3359 IsFastElementsKind(from_kind); 3360 3361 // Only store fast element maps in ascending generality. 3362 if (IsFastElementsKind(to_kind)) { 3363 allow_store_transition &= 3364 IsTransitionableFastElementsKind(from_kind) && 3365 IsMoreGeneralElementsKindTransition(from_kind, to_kind); 3366 } 3367 3368 if (!allow_store_transition) { 3369 return start_map->CopyAsElementsKind(to_kind, OMIT_TRANSITION); 3370 } 3371 3372 return start_map->AsElementsKind(to_kind); 3373 } 3374 3375 3376 MaybeObject* Map::AsElementsKind(ElementsKind kind) { 3377 Map* closest_map = FindClosestElementsTransition(this, kind); 3378 3379 if (closest_map->elements_kind() == kind) { 3380 return closest_map; 3381 } 3382 3383 return AddMissingElementsTransitions(closest_map, kind); 3384 } 3385 3386 3387 void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) { 3388 if (IsJSGlobalProxy()) { 3389 Object* proto = GetPrototype(); 3390 if (proto->IsNull()) return result->NotFound(); 3391 ASSERT(proto->IsJSGlobalObject()); 3392 return JSObject::cast(proto)->LocalLookupRealNamedProperty(name, result); 3393 } 3394 3395 if (HasFastProperties()) { 3396 map()->LookupDescriptor(this, name, result); 3397 // A property or a map transition was found. We return all of these result 3398 // types because LocalLookupRealNamedProperty is used when setting 3399 // properties where map transitions are handled. 3400 ASSERT(!result->IsFound() || 3401 (result->holder() == this && result->IsFastPropertyType())); 3402 // Disallow caching for uninitialized constants. These can only 3403 // occur as fields. 3404 if (result->IsField() && 3405 result->IsReadOnly() && 3406 RawFastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) { 3407 result->DisallowCaching(); 3408 } 3409 return; 3410 } 3411 3412 int entry = property_dictionary()->FindEntry(name); 3413 if (entry != NameDictionary::kNotFound) { 3414 Object* value = property_dictionary()->ValueAt(entry); 3415 if (IsGlobalObject()) { 3416 PropertyDetails d = property_dictionary()->DetailsAt(entry); 3417 if (d.IsDeleted()) { 3418 result->NotFound(); 3419 return; 3420 } 3421 value = PropertyCell::cast(value)->value(); 3422 } 3423 // Make sure to disallow caching for uninitialized constants 3424 // found in the dictionary-mode objects. 3425 if (value->IsTheHole()) result->DisallowCaching(); 3426 result->DictionaryResult(this, entry); 3427 return; 3428 } 3429 3430 result->NotFound(); 3431 } 3432 3433 3434 void JSObject::LookupRealNamedProperty(Name* name, LookupResult* result) { 3435 LocalLookupRealNamedProperty(name, result); 3436 if (result->IsFound()) return; 3437 3438 LookupRealNamedPropertyInPrototypes(name, result); 3439 } 3440 3441 3442 void JSObject::LookupRealNamedPropertyInPrototypes(Name* name, 3443 LookupResult* result) { 3444 Isolate* isolate = GetIsolate(); 3445 Heap* heap = isolate->heap(); 3446 for (Object* pt = GetPrototype(); 3447 pt != heap->null_value(); 3448 pt = pt->GetPrototype(isolate)) { 3449 if (pt->IsJSProxy()) { 3450 return result->HandlerResult(JSProxy::cast(pt)); 3451 } 3452 JSObject::cast(pt)->LocalLookupRealNamedProperty(name, result); 3453 ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR)); 3454 if (result->IsFound()) return; 3455 } 3456 result->NotFound(); 3457 } 3458 3459 3460 // We only need to deal with CALLBACKS and INTERCEPTORS 3461 Handle<Object> JSObject::SetPropertyWithFailedAccessCheck( 3462 Handle<JSObject> object, 3463 LookupResult* result, 3464 Handle<Name> name, 3465 Handle<Object> value, 3466 bool check_prototype, 3467 StrictModeFlag strict_mode) { 3468 if (check_prototype && !result->IsProperty()) { 3469 object->LookupRealNamedPropertyInPrototypes(*name, result); 3470 } 3471 3472 if (result->IsProperty()) { 3473 if (!result->IsReadOnly()) { 3474 switch (result->type()) { 3475 case CALLBACKS: { 3476 Object* obj = result->GetCallbackObject(); 3477 if (obj->IsAccessorInfo()) { 3478 Handle<AccessorInfo> info(AccessorInfo::cast(obj)); 3479 if (info->all_can_write()) { 3480 return SetPropertyWithCallback(object, 3481 info, 3482 name, 3483 value, 3484 handle(result->holder()), 3485 strict_mode); 3486 } 3487 } else if (obj->IsAccessorPair()) { 3488 Handle<AccessorPair> pair(AccessorPair::cast(obj)); 3489 if (pair->all_can_read()) { 3490 return SetPropertyWithCallback(object, 3491 pair, 3492 name, 3493 value, 3494 handle(result->holder()), 3495 strict_mode); 3496 } 3497 } 3498 break; 3499 } 3500 case INTERCEPTOR: { 3501 // Try lookup real named properties. Note that only property can be 3502 // set is callbacks marked as ALL_CAN_WRITE on the prototype chain. 3503 LookupResult r(object->GetIsolate()); 3504 object->LookupRealNamedProperty(*name, &r); 3505 if (r.IsProperty()) { 3506 return SetPropertyWithFailedAccessCheck(object, 3507 &r, 3508 name, 3509 value, 3510 check_prototype, 3511 strict_mode); 3512 } 3513 break; 3514 } 3515 default: { 3516 break; 3517 } 3518 } 3519 } 3520 } 3521 3522 Isolate* isolate = object->GetIsolate(); 3523 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_SET); 3524 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 3525 return value; 3526 } 3527 3528 3529 Handle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, 3530 LookupResult* result, 3531 Handle<Name> key, 3532 Handle<Object> value, 3533 PropertyAttributes attributes, 3534 StrictModeFlag strict_mode, 3535 StoreFromKeyed store_mode) { 3536 if (result->IsHandler()) { 3537 return JSProxy::SetPropertyWithHandler(handle(result->proxy()), 3538 object, key, value, attributes, strict_mode); 3539 } else { 3540 return JSObject::SetPropertyForResult(Handle<JSObject>::cast(object), 3541 result, key, value, attributes, strict_mode, store_mode); 3542 } 3543 } 3544 3545 3546 bool JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy, Handle<Name> name) { 3547 Isolate* isolate = proxy->GetIsolate(); 3548 3549 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3550 if (name->IsSymbol()) return false; 3551 3552 Handle<Object> args[] = { name }; 3553 Handle<Object> result = proxy->CallTrap( 3554 "has", isolate->derived_has_trap(), ARRAY_SIZE(args), args); 3555 if (isolate->has_pending_exception()) return false; 3556 3557 return result->BooleanValue(); 3558 } 3559 3560 3561 Handle<Object> JSProxy::SetPropertyWithHandler(Handle<JSProxy> proxy, 3562 Handle<JSReceiver> receiver, 3563 Handle<Name> name, 3564 Handle<Object> value, 3565 PropertyAttributes attributes, 3566 StrictModeFlag strict_mode) { 3567 Isolate* isolate = proxy->GetIsolate(); 3568 3569 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3570 if (name->IsSymbol()) return value; 3571 3572 Handle<Object> args[] = { receiver, name, value }; 3573 proxy->CallTrap("set", isolate->derived_set_trap(), ARRAY_SIZE(args), args); 3574 if (isolate->has_pending_exception()) return Handle<Object>(); 3575 3576 return value; 3577 } 3578 3579 3580 Handle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( 3581 Handle<JSProxy> proxy, 3582 Handle<JSReceiver> receiver, 3583 Handle<Name> name, 3584 Handle<Object> value, 3585 PropertyAttributes attributes, 3586 StrictModeFlag strict_mode, 3587 bool* done) { 3588 Isolate* isolate = proxy->GetIsolate(); 3589 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy. 3590 3591 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3592 if (name->IsSymbol()) { 3593 *done = false; 3594 return isolate->factory()->the_hole_value(); 3595 } 3596 3597 *done = true; // except where redefined... 3598 Handle<Object> args[] = { name }; 3599 Handle<Object> result = proxy->CallTrap( 3600 "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args); 3601 if (isolate->has_pending_exception()) return Handle<Object>(); 3602 3603 if (result->IsUndefined()) { 3604 *done = false; 3605 return isolate->factory()->the_hole_value(); 3606 } 3607 3608 // Emulate [[GetProperty]] semantics for proxies. 3609 bool has_pending_exception; 3610 Handle<Object> argv[] = { result }; 3611 Handle<Object> desc = Execution::Call( 3612 isolate, isolate->to_complete_property_descriptor(), result, 3613 ARRAY_SIZE(argv), argv, &has_pending_exception); 3614 if (has_pending_exception) return Handle<Object>(); 3615 3616 // [[GetProperty]] requires to check that all properties are configurable. 3617 Handle<String> configurable_name = 3618 isolate->factory()->InternalizeOneByteString( 3619 STATIC_ASCII_VECTOR("configurable_")); 3620 Handle<Object> configurable( 3621 v8::internal::GetProperty(isolate, desc, configurable_name)); 3622 ASSERT(!isolate->has_pending_exception()); 3623 ASSERT(configurable->IsTrue() || configurable->IsFalse()); 3624 if (configurable->IsFalse()) { 3625 Handle<String> trap = 3626 isolate->factory()->InternalizeOneByteString( 3627 STATIC_ASCII_VECTOR("getPropertyDescriptor")); 3628 Handle<Object> args[] = { handler, trap, name }; 3629 Handle<Object> error = isolate->factory()->NewTypeError( 3630 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args))); 3631 isolate->Throw(*error); 3632 return Handle<Object>(); 3633 } 3634 ASSERT(configurable->IsTrue()); 3635 3636 // Check for DataDescriptor. 3637 Handle<String> hasWritable_name = 3638 isolate->factory()->InternalizeOneByteString( 3639 STATIC_ASCII_VECTOR("hasWritable_")); 3640 Handle<Object> hasWritable( 3641 v8::internal::GetProperty(isolate, desc, hasWritable_name)); 3642 ASSERT(!isolate->has_pending_exception()); 3643 ASSERT(hasWritable->IsTrue() || hasWritable->IsFalse()); 3644 if (hasWritable->IsTrue()) { 3645 Handle<String> writable_name = 3646 isolate->factory()->InternalizeOneByteString( 3647 STATIC_ASCII_VECTOR("writable_")); 3648 Handle<Object> writable( 3649 v8::internal::GetProperty(isolate, desc, writable_name)); 3650 ASSERT(!isolate->has_pending_exception()); 3651 ASSERT(writable->IsTrue() || writable->IsFalse()); 3652 *done = writable->IsFalse(); 3653 if (!*done) return isolate->factory()->the_hole_value(); 3654 if (strict_mode == kNonStrictMode) return value; 3655 Handle<Object> args[] = { name, receiver }; 3656 Handle<Object> error = isolate->factory()->NewTypeError( 3657 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); 3658 isolate->Throw(*error); 3659 return Handle<Object>(); 3660 } 3661 3662 // We have an AccessorDescriptor. 3663 Handle<String> set_name = isolate->factory()->InternalizeOneByteString( 3664 STATIC_ASCII_VECTOR("set_")); 3665 Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_name)); 3666 ASSERT(!isolate->has_pending_exception()); 3667 if (!setter->IsUndefined()) { 3668 // TODO(rossberg): nicer would be to cast to some JSCallable here... 3669 return SetPropertyWithDefinedSetter( 3670 receiver, Handle<JSReceiver>::cast(setter), value); 3671 } 3672 3673 if (strict_mode == kNonStrictMode) return value; 3674 Handle<Object> args2[] = { name, proxy }; 3675 Handle<Object> error = isolate->factory()->NewTypeError( 3676 "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2))); 3677 isolate->Throw(*error); 3678 return Handle<Object>(); 3679 } 3680 3681 3682 Handle<Object> JSProxy::DeletePropertyWithHandler( 3683 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) { 3684 Isolate* isolate = proxy->GetIsolate(); 3685 3686 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3687 if (name->IsSymbol()) return isolate->factory()->false_value(); 3688 3689 Handle<Object> args[] = { name }; 3690 Handle<Object> result = proxy->CallTrap( 3691 "delete", Handle<Object>(), ARRAY_SIZE(args), args); 3692 if (isolate->has_pending_exception()) return Handle<Object>(); 3693 3694 bool result_bool = result->BooleanValue(); 3695 if (mode == STRICT_DELETION && !result_bool) { 3696 Handle<Object> handler(proxy->handler(), isolate); 3697 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString( 3698 STATIC_ASCII_VECTOR("delete")); 3699 Handle<Object> args[] = { handler, trap_name }; 3700 Handle<Object> error = isolate->factory()->NewTypeError( 3701 "handler_failed", HandleVector(args, ARRAY_SIZE(args))); 3702 isolate->Throw(*error); 3703 return Handle<Object>(); 3704 } 3705 return isolate->factory()->ToBoolean(result_bool); 3706 } 3707 3708 3709 Handle<Object> JSProxy::DeleteElementWithHandler( 3710 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) { 3711 Isolate* isolate = proxy->GetIsolate(); 3712 Handle<String> name = isolate->factory()->Uint32ToString(index); 3713 return JSProxy::DeletePropertyWithHandler(proxy, name, mode); 3714 } 3715 3716 3717 MUST_USE_RESULT PropertyAttributes JSProxy::GetPropertyAttributeWithHandler( 3718 JSReceiver* receiver_raw, 3719 Name* name_raw) { 3720 Isolate* isolate = GetIsolate(); 3721 HandleScope scope(isolate); 3722 Handle<JSProxy> proxy(this); 3723 Handle<Object> handler(this->handler(), isolate); // Trap might morph proxy. 3724 Handle<JSReceiver> receiver(receiver_raw); 3725 Handle<Object> name(name_raw, isolate); 3726 3727 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3728 if (name->IsSymbol()) return ABSENT; 3729 3730 Handle<Object> args[] = { name }; 3731 Handle<Object> result = CallTrap( 3732 "getPropertyDescriptor", Handle<Object>(), ARRAY_SIZE(args), args); 3733 if (isolate->has_pending_exception()) return NONE; 3734 3735 if (result->IsUndefined()) return ABSENT; 3736 3737 bool has_pending_exception; 3738 Handle<Object> argv[] = { result }; 3739 Handle<Object> desc = Execution::Call( 3740 isolate, isolate->to_complete_property_descriptor(), result, 3741 ARRAY_SIZE(argv), argv, &has_pending_exception); 3742 if (has_pending_exception) return NONE; 3743 3744 // Convert result to PropertyAttributes. 3745 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString( 3746 STATIC_ASCII_VECTOR("enumerable_")); 3747 Handle<Object> enumerable(v8::internal::GetProperty(isolate, desc, enum_n)); 3748 if (isolate->has_pending_exception()) return NONE; 3749 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString( 3750 STATIC_ASCII_VECTOR("configurable_")); 3751 Handle<Object> configurable(v8::internal::GetProperty(isolate, desc, conf_n)); 3752 if (isolate->has_pending_exception()) return NONE; 3753 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString( 3754 STATIC_ASCII_VECTOR("writable_")); 3755 Handle<Object> writable(v8::internal::GetProperty(isolate, desc, writ_n)); 3756 if (isolate->has_pending_exception()) return NONE; 3757 if (!writable->BooleanValue()) { 3758 Handle<String> set_n = isolate->factory()->InternalizeOneByteString( 3759 STATIC_ASCII_VECTOR("set_")); 3760 Handle<Object> setter(v8::internal::GetProperty(isolate, desc, set_n)); 3761 if (isolate->has_pending_exception()) return NONE; 3762 writable = isolate->factory()->ToBoolean(!setter->IsUndefined()); 3763 } 3764 3765 if (configurable->IsFalse()) { 3766 Handle<String> trap = isolate->factory()->InternalizeOneByteString( 3767 STATIC_ASCII_VECTOR("getPropertyDescriptor")); 3768 Handle<Object> args[] = { handler, trap, name }; 3769 Handle<Object> error = isolate->factory()->NewTypeError( 3770 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args))); 3771 isolate->Throw(*error); 3772 return NONE; 3773 } 3774 3775 int attributes = NONE; 3776 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM; 3777 if (!configurable->BooleanValue()) attributes |= DONT_DELETE; 3778 if (!writable->BooleanValue()) attributes |= READ_ONLY; 3779 return static_cast<PropertyAttributes>(attributes); 3780 } 3781 3782 3783 MUST_USE_RESULT PropertyAttributes JSProxy::GetElementAttributeWithHandler( 3784 JSReceiver* receiver_raw, 3785 uint32_t index) { 3786 Isolate* isolate = GetIsolate(); 3787 HandleScope scope(isolate); 3788 Handle<JSProxy> proxy(this); 3789 Handle<JSReceiver> receiver(receiver_raw); 3790 Handle<String> name = isolate->factory()->Uint32ToString(index); 3791 return proxy->GetPropertyAttributeWithHandler(*receiver, *name); 3792 } 3793 3794 3795 void JSProxy::Fix(Handle<JSProxy> proxy) { 3796 Isolate* isolate = proxy->GetIsolate(); 3797 3798 // Save identity hash. 3799 Handle<Object> hash(proxy->GetIdentityHash(), isolate); 3800 3801 if (proxy->IsJSFunctionProxy()) { 3802 isolate->factory()->BecomeJSFunction(proxy); 3803 // Code will be set on the JavaScript side. 3804 } else { 3805 isolate->factory()->BecomeJSObject(proxy); 3806 } 3807 ASSERT(proxy->IsJSObject()); 3808 3809 // Inherit identity, if it was present. 3810 if (hash->IsSmi()) { 3811 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy), 3812 Handle<Smi>::cast(hash)); 3813 } 3814 } 3815 3816 3817 MUST_USE_RESULT Handle<Object> JSProxy::CallTrap(const char* name, 3818 Handle<Object> derived, 3819 int argc, 3820 Handle<Object> argv[]) { 3821 Isolate* isolate = GetIsolate(); 3822 Handle<Object> handler(this->handler(), isolate); 3823 3824 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name); 3825 Handle<Object> trap(v8::internal::GetProperty(isolate, handler, trap_name)); 3826 if (isolate->has_pending_exception()) return trap; 3827 3828 if (trap->IsUndefined()) { 3829 if (derived.is_null()) { 3830 Handle<Object> args[] = { handler, trap_name }; 3831 Handle<Object> error = isolate->factory()->NewTypeError( 3832 "handler_trap_missing", HandleVector(args, ARRAY_SIZE(args))); 3833 isolate->Throw(*error); 3834 return Handle<Object>(); 3835 } 3836 trap = Handle<Object>(derived); 3837 } 3838 3839 bool threw; 3840 return Execution::Call(isolate, trap, handler, argc, argv, &threw); 3841 } 3842 3843 3844 // TODO(mstarzinger): Temporary wrapper until handlified. 3845 static Handle<Map> MapAsElementsKind(Handle<Map> map, ElementsKind kind) { 3846 CALL_HEAP_FUNCTION(map->GetIsolate(), map->AsElementsKind(kind), Map); 3847 } 3848 3849 3850 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) { 3851 ASSERT(object->map()->inobject_properties() == map->inobject_properties()); 3852 ElementsKind obj_kind = object->map()->elements_kind(); 3853 ElementsKind map_kind = map->elements_kind(); 3854 if (map_kind != obj_kind) { 3855 ElementsKind to_kind = map_kind; 3856 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) || 3857 IsDictionaryElementsKind(obj_kind)) { 3858 to_kind = obj_kind; 3859 } 3860 if (IsDictionaryElementsKind(to_kind)) { 3861 NormalizeElements(object); 3862 } else { 3863 TransitionElementsKind(object, to_kind); 3864 } 3865 map = MapAsElementsKind(map, to_kind); 3866 } 3867 int total_size = 3868 map->NumberOfOwnDescriptors() + map->unused_property_fields(); 3869 int out_of_object = total_size - map->inobject_properties(); 3870 if (out_of_object != object->properties()->length()) { 3871 Isolate* isolate = object->GetIsolate(); 3872 Handle<FixedArray> new_properties = isolate->factory()->CopySizeFixedArray( 3873 handle(object->properties()), out_of_object); 3874 object->set_properties(*new_properties); 3875 } 3876 object->set_map(*map); 3877 } 3878 3879 3880 void JSObject::MigrateInstance(Handle<JSObject> object) { 3881 // Converting any field to the most specific type will cause the 3882 // GeneralizeFieldRepresentation algorithm to create the most general existing 3883 // transition that matches the object. This achieves what is needed. 3884 Handle<Map> original_map(object->map()); 3885 GeneralizeFieldRepresentation( 3886 object, 0, Representation::None(), ALLOW_AS_CONSTANT); 3887 object->map()->set_migration_target(true); 3888 if (FLAG_trace_migration) { 3889 object->PrintInstanceMigration(stdout, *original_map, object->map()); 3890 } 3891 } 3892 3893 3894 Handle<Object> JSObject::TryMigrateInstance(Handle<JSObject> object) { 3895 Handle<Map> original_map(object->map()); 3896 Handle<Map> new_map = Map::CurrentMapForDeprecatedInternal(original_map); 3897 if (new_map.is_null()) return Handle<Object>(); 3898 JSObject::MigrateToMap(object, new_map); 3899 if (FLAG_trace_migration) { 3900 object->PrintInstanceMigration(stdout, *original_map, object->map()); 3901 } 3902 return object; 3903 } 3904 3905 3906 Handle<Object> JSObject::SetPropertyUsingTransition( 3907 Handle<JSObject> object, 3908 LookupResult* lookup, 3909 Handle<Name> name, 3910 Handle<Object> value, 3911 PropertyAttributes attributes) { 3912 Handle<Map> transition_map(lookup->GetTransitionTarget()); 3913 int descriptor = transition_map->LastAdded(); 3914 3915 DescriptorArray* descriptors = transition_map->instance_descriptors(); 3916 PropertyDetails details = descriptors->GetDetails(descriptor); 3917 3918 if (details.type() == CALLBACKS || attributes != details.attributes()) { 3919 // AddProperty will either normalize the object, or create a new fast copy 3920 // of the map. If we get a fast copy of the map, all field representations 3921 // will be tagged since the transition is omitted. 3922 return JSObject::AddProperty( 3923 object, name, value, attributes, kNonStrictMode, 3924 JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED, 3925 JSReceiver::OMIT_EXTENSIBILITY_CHECK, 3926 JSObject::FORCE_TAGGED, FORCE_FIELD, OMIT_TRANSITION); 3927 } 3928 3929 // Keep the target CONSTANT if the same value is stored. 3930 // TODO(verwaest): Also support keeping the placeholder 3931 // (value->IsUninitialized) as constant. 3932 if (details.type() == CONSTANT && 3933 descriptors->GetValue(descriptor) == *value) { 3934 object->set_map(*transition_map); 3935 return value; 3936 } 3937 3938 Representation representation = details.representation(); 3939 3940 if (!value->FitsRepresentation(representation) || 3941 details.type() == CONSTANT) { 3942 transition_map = Map::GeneralizeRepresentation(transition_map, 3943 descriptor, value->OptimalRepresentation(), FORCE_FIELD); 3944 Object* back = transition_map->GetBackPointer(); 3945 if (back->IsMap()) { 3946 MigrateToMap(object, handle(Map::cast(back))); 3947 } 3948 descriptors = transition_map->instance_descriptors(); 3949 representation = descriptors->GetDetails(descriptor).representation(); 3950 } 3951 3952 int field_index = descriptors->GetFieldIndex(descriptor); 3953 AddFastPropertyUsingMap( 3954 object, transition_map, name, value, field_index, representation); 3955 return value; 3956 } 3957 3958 3959 static void SetPropertyToField(LookupResult* lookup, 3960 Handle<Name> name, 3961 Handle<Object> value) { 3962 Representation representation = lookup->representation(); 3963 if (!value->FitsRepresentation(representation) || 3964 lookup->type() == CONSTANT) { 3965 JSObject::GeneralizeFieldRepresentation(handle(lookup->holder()), 3966 lookup->GetDescriptorIndex(), 3967 value->OptimalRepresentation(), 3968 FORCE_FIELD); 3969 DescriptorArray* desc = lookup->holder()->map()->instance_descriptors(); 3970 int descriptor = lookup->GetDescriptorIndex(); 3971 representation = desc->GetDetails(descriptor).representation(); 3972 } 3973 3974 if (FLAG_track_double_fields && representation.IsDouble()) { 3975 HeapNumber* storage = HeapNumber::cast(lookup->holder()->RawFastPropertyAt( 3976 lookup->GetFieldIndex().field_index())); 3977 storage->set_value(value->Number()); 3978 return; 3979 } 3980 3981 lookup->holder()->FastPropertyAtPut( 3982 lookup->GetFieldIndex().field_index(), *value); 3983 } 3984 3985 3986 static void ConvertAndSetLocalProperty(LookupResult* lookup, 3987 Handle<Name> name, 3988 Handle<Object> value, 3989 PropertyAttributes attributes) { 3990 Handle<JSObject> object(lookup->holder()); 3991 if (object->TooManyFastProperties()) { 3992 JSObject::NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 3993 } 3994 3995 if (!object->HasFastProperties()) { 3996 ReplaceSlowProperty(object, name, value, attributes); 3997 return; 3998 } 3999 4000 int descriptor_index = lookup->GetDescriptorIndex(); 4001 if (lookup->GetAttributes() == attributes) { 4002 JSObject::GeneralizeFieldRepresentation( 4003 object, descriptor_index, Representation::Tagged(), FORCE_FIELD); 4004 } else { 4005 Handle<Map> old_map(object->map()); 4006 Handle<Map> new_map = Map::CopyGeneralizeAllRepresentations(old_map, 4007 descriptor_index, FORCE_FIELD, attributes, "attributes mismatch"); 4008 JSObject::MigrateToMap(object, new_map); 4009 } 4010 4011 DescriptorArray* descriptors = object->map()->instance_descriptors(); 4012 int index = descriptors->GetDetails(descriptor_index).field_index(); 4013 object->FastPropertyAtPut(index, *value); 4014 } 4015 4016 4017 static void SetPropertyToFieldWithAttributes(LookupResult* lookup, 4018 Handle<Name> name, 4019 Handle<Object> value, 4020 PropertyAttributes attributes) { 4021 if (lookup->GetAttributes() == attributes) { 4022 if (value->IsUninitialized()) return; 4023 SetPropertyToField(lookup, name, value); 4024 } else { 4025 ConvertAndSetLocalProperty(lookup, name, value, attributes); 4026 } 4027 } 4028 4029 4030 Handle<Object> JSObject::SetPropertyForResult(Handle<JSObject> object, 4031 LookupResult* lookup, 4032 Handle<Name> name, 4033 Handle<Object> value, 4034 PropertyAttributes attributes, 4035 StrictModeFlag strict_mode, 4036 StoreFromKeyed store_mode) { 4037 Isolate* isolate = object->GetIsolate(); 4038 4039 // Make sure that the top context does not change when doing callbacks or 4040 // interceptor calls. 4041 AssertNoContextChange ncc(isolate); 4042 4043 // Optimization for 2-byte strings often used as keys in a decompression 4044 // dictionary. We internalize these short keys to avoid constantly 4045 // reallocating them. 4046 if (name->IsString() && !name->IsInternalizedString() && 4047 Handle<String>::cast(name)->length() <= 2) { 4048 name = isolate->factory()->InternalizeString(Handle<String>::cast(name)); 4049 } 4050 4051 // Check access rights if needed. 4052 if (object->IsAccessCheckNeeded()) { 4053 if (!isolate->MayNamedAccess(*object, *name, v8::ACCESS_SET)) { 4054 return SetPropertyWithFailedAccessCheck(object, lookup, name, value, 4055 true, strict_mode); 4056 } 4057 } 4058 4059 if (object->IsJSGlobalProxy()) { 4060 Handle<Object> proto(object->GetPrototype(), isolate); 4061 if (proto->IsNull()) return value; 4062 ASSERT(proto->IsJSGlobalObject()); 4063 return SetPropertyForResult(Handle<JSObject>::cast(proto), 4064 lookup, name, value, attributes, strict_mode, store_mode); 4065 } 4066 4067 ASSERT(!lookup->IsFound() || lookup->holder() == *object || 4068 lookup->holder()->map()->is_hidden_prototype()); 4069 4070 if (!lookup->IsProperty() && !object->IsJSContextExtensionObject()) { 4071 bool done = false; 4072 Handle<Object> result_object = SetPropertyViaPrototypes( 4073 object, name, value, attributes, strict_mode, &done); 4074 if (done) return result_object; 4075 } 4076 4077 if (!lookup->IsFound()) { 4078 // Neither properties nor transitions found. 4079 return AddProperty( 4080 object, name, value, attributes, strict_mode, store_mode); 4081 } 4082 4083 if (lookup->IsProperty() && lookup->IsReadOnly()) { 4084 if (strict_mode == kStrictMode) { 4085 Handle<Object> args[] = { name, object }; 4086 Handle<Object> error = isolate->factory()->NewTypeError( 4087 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); 4088 isolate->Throw(*error); 4089 return Handle<Object>(); 4090 } else { 4091 return value; 4092 } 4093 } 4094 4095 Handle<Object> old_value = isolate->factory()->the_hole_value(); 4096 bool is_observed = FLAG_harmony_observation && 4097 object->map()->is_observed() && 4098 *name != isolate->heap()->hidden_string(); 4099 if (is_observed && lookup->IsDataProperty()) { 4100 old_value = Object::GetProperty(object, name); 4101 } 4102 4103 // This is a real property that is not read-only, or it is a 4104 // transition or null descriptor and there are no setters in the prototypes. 4105 Handle<Object> result = value; 4106 switch (lookup->type()) { 4107 case NORMAL: 4108 SetNormalizedProperty(handle(lookup->holder()), lookup, value); 4109 break; 4110 case FIELD: 4111 SetPropertyToField(lookup, name, value); 4112 break; 4113 case CONSTANT: 4114 // Only replace the constant if necessary. 4115 if (*value == lookup->GetConstant()) return value; 4116 SetPropertyToField(lookup, name, value); 4117 break; 4118 case CALLBACKS: { 4119 Handle<Object> callback_object(lookup->GetCallbackObject(), isolate); 4120 return SetPropertyWithCallback(object, callback_object, name, value, 4121 handle(lookup->holder()), strict_mode); 4122 } 4123 case INTERCEPTOR: 4124 result = SetPropertyWithInterceptor(handle(lookup->holder()), name, value, 4125 attributes, strict_mode); 4126 break; 4127 case TRANSITION: 4128 result = SetPropertyUsingTransition(handle(lookup->holder()), lookup, 4129 name, value, attributes); 4130 break; 4131 case HANDLER: 4132 case NONEXISTENT: 4133 UNREACHABLE(); 4134 } 4135 4136 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>()); 4137 4138 if (is_observed) { 4139 if (lookup->IsTransition()) { 4140 EnqueueChangeRecord(object, "add", name, old_value); 4141 } else { 4142 LookupResult new_lookup(isolate); 4143 object->LocalLookup(*name, &new_lookup, true); 4144 if (new_lookup.IsDataProperty()) { 4145 Handle<Object> new_value = Object::GetProperty(object, name); 4146 if (!new_value->SameValue(*old_value)) { 4147 EnqueueChangeRecord(object, "update", name, old_value); 4148 } 4149 } 4150 } 4151 } 4152 4153 return result; 4154 } 4155 4156 4157 // Set a real local property, even if it is READ_ONLY. If the property is not 4158 // present, add it with attributes NONE. This code is an exact clone of 4159 // SetProperty, with the check for IsReadOnly and the check for a 4160 // callback setter removed. The two lines looking up the LookupResult 4161 // result are also added. If one of the functions is changed, the other 4162 // should be. 4163 // Note that this method cannot be used to set the prototype of a function 4164 // because ConvertDescriptorToField() which is called in "case CALLBACKS:" 4165 // doesn't handle function prototypes correctly. 4166 Handle<Object> JSObject::SetLocalPropertyIgnoreAttributes( 4167 Handle<JSObject> object, 4168 Handle<Name> name, 4169 Handle<Object> value, 4170 PropertyAttributes attributes, 4171 ValueType value_type, 4172 StoreMode mode, 4173 ExtensibilityCheck extensibility_check) { 4174 Isolate* isolate = object->GetIsolate(); 4175 4176 // Make sure that the top context does not change when doing callbacks or 4177 // interceptor calls. 4178 AssertNoContextChange ncc(isolate); 4179 4180 LookupResult lookup(isolate); 4181 object->LocalLookup(*name, &lookup, true); 4182 if (!lookup.IsFound()) { 4183 object->map()->LookupTransition(*object, *name, &lookup); 4184 } 4185 4186 // Check access rights if needed. 4187 if (object->IsAccessCheckNeeded()) { 4188 if (!isolate->MayNamedAccess(*object, *name, v8::ACCESS_SET)) { 4189 return SetPropertyWithFailedAccessCheck(object, &lookup, name, value, 4190 false, kNonStrictMode); 4191 } 4192 } 4193 4194 if (object->IsJSGlobalProxy()) { 4195 Handle<Object> proto(object->GetPrototype(), isolate); 4196 if (proto->IsNull()) return value; 4197 ASSERT(proto->IsJSGlobalObject()); 4198 return SetLocalPropertyIgnoreAttributes(Handle<JSObject>::cast(proto), 4199 name, value, attributes, value_type, mode, extensibility_check); 4200 } 4201 4202 if (lookup.IsFound() && 4203 (lookup.type() == INTERCEPTOR || lookup.type() == CALLBACKS)) { 4204 object->LocalLookupRealNamedProperty(*name, &lookup); 4205 } 4206 4207 // Check for accessor in prototype chain removed here in clone. 4208 if (!lookup.IsFound()) { 4209 object->map()->LookupTransition(*object, *name, &lookup); 4210 TransitionFlag flag = lookup.IsFound() 4211 ? OMIT_TRANSITION : INSERT_TRANSITION; 4212 // Neither properties nor transitions found. 4213 return AddProperty(object, name, value, attributes, kNonStrictMode, 4214 MAY_BE_STORE_FROM_KEYED, extensibility_check, value_type, mode, flag); 4215 } 4216 4217 Handle<Object> old_value = isolate->factory()->the_hole_value(); 4218 PropertyAttributes old_attributes = ABSENT; 4219 bool is_observed = FLAG_harmony_observation && 4220 object->map()->is_observed() && 4221 *name != isolate->heap()->hidden_string(); 4222 if (is_observed && lookup.IsProperty()) { 4223 if (lookup.IsDataProperty()) old_value = 4224 Object::GetProperty(object, name); 4225 old_attributes = lookup.GetAttributes(); 4226 } 4227 4228 // Check of IsReadOnly removed from here in clone. 4229 switch (lookup.type()) { 4230 case NORMAL: 4231 ReplaceSlowProperty(object, name, value, attributes); 4232 break; 4233 case FIELD: 4234 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes); 4235 break; 4236 case CONSTANT: 4237 // Only replace the constant if necessary. 4238 if (lookup.GetAttributes() != attributes || 4239 *value != lookup.GetConstant()) { 4240 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes); 4241 } 4242 break; 4243 case CALLBACKS: 4244 ConvertAndSetLocalProperty(&lookup, name, value, attributes); 4245 break; 4246 case TRANSITION: { 4247 Handle<Object> result = SetPropertyUsingTransition( 4248 handle(lookup.holder()), &lookup, name, value, attributes); 4249 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>()); 4250 break; 4251 } 4252 case NONEXISTENT: 4253 case HANDLER: 4254 case INTERCEPTOR: 4255 UNREACHABLE(); 4256 } 4257 4258 if (is_observed) { 4259 if (lookup.IsTransition()) { 4260 EnqueueChangeRecord(object, "add", name, old_value); 4261 } else if (old_value->IsTheHole()) { 4262 EnqueueChangeRecord(object, "reconfigure", name, old_value); 4263 } else { 4264 LookupResult new_lookup(isolate); 4265 object->LocalLookup(*name, &new_lookup, true); 4266 bool value_changed = false; 4267 if (new_lookup.IsDataProperty()) { 4268 Handle<Object> new_value = Object::GetProperty(object, name); 4269 value_changed = !old_value->SameValue(*new_value); 4270 } 4271 if (new_lookup.GetAttributes() != old_attributes) { 4272 if (!value_changed) old_value = isolate->factory()->the_hole_value(); 4273 EnqueueChangeRecord(object, "reconfigure", name, old_value); 4274 } else if (value_changed) { 4275 EnqueueChangeRecord(object, "update", name, old_value); 4276 } 4277 } 4278 } 4279 4280 return value; 4281 } 4282 4283 4284 PropertyAttributes JSObject::GetPropertyAttributePostInterceptor( 4285 JSObject* receiver, 4286 Name* name, 4287 bool continue_search) { 4288 // Check local property, ignore interceptor. 4289 LookupResult result(GetIsolate()); 4290 LocalLookupRealNamedProperty(name, &result); 4291 if (result.IsFound()) return result.GetAttributes(); 4292 4293 if (continue_search) { 4294 // Continue searching via the prototype chain. 4295 Object* pt = GetPrototype(); 4296 if (!pt->IsNull()) { 4297 return JSObject::cast(pt)-> 4298 GetPropertyAttributeWithReceiver(receiver, name); 4299 } 4300 } 4301 return ABSENT; 4302 } 4303 4304 4305 PropertyAttributes JSObject::GetPropertyAttributeWithInterceptor( 4306 JSObject* receiver, 4307 Name* name, 4308 bool continue_search) { 4309 // TODO(rossberg): Support symbols in the API. 4310 if (name->IsSymbol()) return ABSENT; 4311 4312 Isolate* isolate = GetIsolate(); 4313 HandleScope scope(isolate); 4314 4315 // Make sure that the top context does not change when doing 4316 // callbacks or interceptor calls. 4317 AssertNoContextChange ncc(isolate); 4318 4319 Handle<InterceptorInfo> interceptor(GetNamedInterceptor()); 4320 Handle<JSObject> receiver_handle(receiver); 4321 Handle<JSObject> holder_handle(this); 4322 Handle<String> name_handle(String::cast(name)); 4323 PropertyCallbackArguments args(isolate, interceptor->data(), receiver, this); 4324 if (!interceptor->query()->IsUndefined()) { 4325 v8::NamedPropertyQueryCallback query = 4326 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query()); 4327 LOG(isolate, 4328 ApiNamedPropertyAccess("interceptor-named-has", *holder_handle, name)); 4329 v8::Handle<v8::Integer> result = 4330 args.Call(query, v8::Utils::ToLocal(name_handle)); 4331 if (!result.IsEmpty()) { 4332 ASSERT(result->IsInt32()); 4333 return static_cast<PropertyAttributes>(result->Int32Value()); 4334 } 4335 } else if (!interceptor->getter()->IsUndefined()) { 4336 v8::NamedPropertyGetterCallback getter = 4337 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter()); 4338 LOG(isolate, 4339 ApiNamedPropertyAccess("interceptor-named-get-has", this, name)); 4340 v8::Handle<v8::Value> result = 4341 args.Call(getter, v8::Utils::ToLocal(name_handle)); 4342 if (!result.IsEmpty()) return DONT_ENUM; 4343 } 4344 return holder_handle->GetPropertyAttributePostInterceptor(*receiver_handle, 4345 *name_handle, 4346 continue_search); 4347 } 4348 4349 4350 PropertyAttributes JSReceiver::GetPropertyAttributeWithReceiver( 4351 JSReceiver* receiver, 4352 Name* key) { 4353 uint32_t index = 0; 4354 if (IsJSObject() && key->AsArrayIndex(&index)) { 4355 return JSObject::cast(this)->GetElementAttributeWithReceiver( 4356 receiver, index, true); 4357 } 4358 // Named property. 4359 LookupResult lookup(GetIsolate()); 4360 Lookup(key, &lookup); 4361 return GetPropertyAttributeForResult(receiver, &lookup, key, true); 4362 } 4363 4364 4365 PropertyAttributes JSReceiver::GetPropertyAttributeForResult( 4366 JSReceiver* receiver, 4367 LookupResult* lookup, 4368 Name* name, 4369 bool continue_search) { 4370 // Check access rights if needed. 4371 if (IsAccessCheckNeeded()) { 4372 JSObject* this_obj = JSObject::cast(this); 4373 Heap* heap = GetHeap(); 4374 if (!heap->isolate()->MayNamedAccess(this_obj, name, v8::ACCESS_HAS)) { 4375 return this_obj->GetPropertyAttributeWithFailedAccessCheck( 4376 receiver, lookup, name, continue_search); 4377 } 4378 } 4379 if (lookup->IsFound()) { 4380 switch (lookup->type()) { 4381 case NORMAL: // fall through 4382 case FIELD: 4383 case CONSTANT: 4384 case CALLBACKS: 4385 return lookup->GetAttributes(); 4386 case HANDLER: { 4387 return JSProxy::cast(lookup->proxy())->GetPropertyAttributeWithHandler( 4388 receiver, name); 4389 } 4390 case INTERCEPTOR: 4391 return lookup->holder()->GetPropertyAttributeWithInterceptor( 4392 JSObject::cast(receiver), name, continue_search); 4393 case TRANSITION: 4394 case NONEXISTENT: 4395 UNREACHABLE(); 4396 } 4397 } 4398 return ABSENT; 4399 } 4400 4401 4402 PropertyAttributes JSReceiver::GetLocalPropertyAttribute(Name* name) { 4403 // Check whether the name is an array index. 4404 uint32_t index = 0; 4405 if (IsJSObject() && name->AsArrayIndex(&index)) { 4406 return GetLocalElementAttribute(index); 4407 } 4408 // Named property. 4409 LookupResult lookup(GetIsolate()); 4410 LocalLookup(name, &lookup, true); 4411 return GetPropertyAttributeForResult(this, &lookup, name, false); 4412 } 4413 4414 4415 PropertyAttributes JSObject::GetElementAttributeWithReceiver( 4416 JSReceiver* receiver, uint32_t index, bool continue_search) { 4417 Isolate* isolate = GetIsolate(); 4418 4419 // Check access rights if needed. 4420 if (IsAccessCheckNeeded()) { 4421 if (!isolate->MayIndexedAccess(this, index, v8::ACCESS_HAS)) { 4422 isolate->ReportFailedAccessCheck(this, v8::ACCESS_HAS); 4423 return ABSENT; 4424 } 4425 } 4426 4427 if (IsJSGlobalProxy()) { 4428 Object* proto = GetPrototype(); 4429 if (proto->IsNull()) return ABSENT; 4430 ASSERT(proto->IsJSGlobalObject()); 4431 return JSObject::cast(proto)->GetElementAttributeWithReceiver( 4432 receiver, index, continue_search); 4433 } 4434 4435 // Check for lookup interceptor except when bootstrapping. 4436 if (HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) { 4437 return GetElementAttributeWithInterceptor(receiver, index, continue_search); 4438 } 4439 4440 return GetElementAttributeWithoutInterceptor( 4441 receiver, index, continue_search); 4442 } 4443 4444 4445 PropertyAttributes JSObject::GetElementAttributeWithInterceptor( 4446 JSReceiver* receiver, uint32_t index, bool continue_search) { 4447 Isolate* isolate = GetIsolate(); 4448 HandleScope scope(isolate); 4449 4450 // Make sure that the top context does not change when doing 4451 // callbacks or interceptor calls. 4452 AssertNoContextChange ncc(isolate); 4453 4454 Handle<InterceptorInfo> interceptor(GetIndexedInterceptor()); 4455 Handle<JSReceiver> hreceiver(receiver); 4456 Handle<JSObject> holder(this); 4457 PropertyCallbackArguments args(isolate, interceptor->data(), receiver, this); 4458 if (!interceptor->query()->IsUndefined()) { 4459 v8::IndexedPropertyQueryCallback query = 4460 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query()); 4461 LOG(isolate, 4462 ApiIndexedPropertyAccess("interceptor-indexed-has", this, index)); 4463 v8::Handle<v8::Integer> result = args.Call(query, index); 4464 if (!result.IsEmpty()) 4465 return static_cast<PropertyAttributes>(result->Int32Value()); 4466 } else if (!interceptor->getter()->IsUndefined()) { 4467 v8::IndexedPropertyGetterCallback getter = 4468 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter()); 4469 LOG(isolate, 4470 ApiIndexedPropertyAccess("interceptor-indexed-get-has", this, index)); 4471 v8::Handle<v8::Value> result = args.Call(getter, index); 4472 if (!result.IsEmpty()) return NONE; 4473 } 4474 4475 return holder->GetElementAttributeWithoutInterceptor( 4476 *hreceiver, index, continue_search); 4477 } 4478 4479 4480 PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor( 4481 JSReceiver* receiver, uint32_t index, bool continue_search) { 4482 PropertyAttributes attr = GetElementsAccessor()->GetAttributes( 4483 receiver, this, index); 4484 if (attr != ABSENT) return attr; 4485 4486 // Handle [] on String objects. 4487 if (IsStringObjectWithCharacterAt(index)) { 4488 return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE); 4489 } 4490 4491 if (!continue_search) return ABSENT; 4492 4493 Object* pt = GetPrototype(); 4494 if (pt->IsJSProxy()) { 4495 // We need to follow the spec and simulate a call to [[GetOwnProperty]]. 4496 return JSProxy::cast(pt)->GetElementAttributeWithHandler(receiver, index); 4497 } 4498 if (pt->IsNull()) return ABSENT; 4499 return JSObject::cast(pt)->GetElementAttributeWithReceiver( 4500 receiver, index, true); 4501 } 4502 4503 4504 Handle<Map> NormalizedMapCache::Get(Handle<NormalizedMapCache> cache, 4505 Handle<JSObject> obj, 4506 PropertyNormalizationMode mode) { 4507 int index = obj->map()->Hash() % kEntries; 4508 Handle<Object> result = handle(cache->get(index), cache->GetIsolate()); 4509 if (result->IsMap() && 4510 Handle<Map>::cast(result)->EquivalentToForNormalization(obj->map(), 4511 mode)) { 4512 #ifdef VERIFY_HEAP 4513 if (FLAG_verify_heap) { 4514 Handle<Map>::cast(result)->SharedMapVerify(); 4515 } 4516 #endif 4517 #ifdef ENABLE_SLOW_ASSERTS 4518 if (FLAG_enable_slow_asserts) { 4519 // The cached map should match newly created normalized map bit-by-bit, 4520 // except for the code cache, which can contain some ics which can be 4521 // applied to the shared map. 4522 Handle<Map> fresh = Map::CopyNormalized(handle(obj->map()), mode, 4523 SHARED_NORMALIZED_MAP); 4524 4525 ASSERT(memcmp(fresh->address(), 4526 Handle<Map>::cast(result)->address(), 4527 Map::kCodeCacheOffset) == 0); 4528 STATIC_ASSERT(Map::kDependentCodeOffset == 4529 Map::kCodeCacheOffset + kPointerSize); 4530 int offset = Map::kDependentCodeOffset + kPointerSize; 4531 ASSERT(memcmp(fresh->address() + offset, 4532 Handle<Map>::cast(result)->address() + offset, 4533 Map::kSize - offset) == 0); 4534 } 4535 #endif 4536 return Handle<Map>::cast(result); 4537 } 4538 4539 Isolate* isolate = cache->GetIsolate(); 4540 Handle<Map> map = Map::CopyNormalized(handle(obj->map()), mode, 4541 SHARED_NORMALIZED_MAP); 4542 ASSERT(map->is_dictionary_map()); 4543 cache->set(index, *map); 4544 isolate->counters()->normalized_maps()->Increment(); 4545 4546 return map; 4547 } 4548 4549 4550 void NormalizedMapCache::Clear() { 4551 int entries = length(); 4552 for (int i = 0; i != entries; i++) { 4553 set_undefined(i); 4554 } 4555 } 4556 4557 4558 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object, 4559 Handle<Name> name, 4560 Handle<Code> code) { 4561 Handle<Map> map(object->map()); 4562 Map::UpdateCodeCache(map, name, code); 4563 } 4564 4565 4566 void JSObject::NormalizeProperties(Handle<JSObject> object, 4567 PropertyNormalizationMode mode, 4568 int expected_additional_properties) { 4569 if (!object->HasFastProperties()) return; 4570 4571 // The global object is always normalized. 4572 ASSERT(!object->IsGlobalObject()); 4573 // JSGlobalProxy must never be normalized 4574 ASSERT(!object->IsJSGlobalProxy()); 4575 4576 Isolate* isolate = object->GetIsolate(); 4577 HandleScope scope(isolate); 4578 Handle<Map> map(object->map()); 4579 4580 // Allocate new content. 4581 int real_size = map->NumberOfOwnDescriptors(); 4582 int property_count = real_size; 4583 if (expected_additional_properties > 0) { 4584 property_count += expected_additional_properties; 4585 } else { 4586 property_count += 2; // Make space for two more properties. 4587 } 4588 Handle<NameDictionary> dictionary = 4589 isolate->factory()->NewNameDictionary(property_count); 4590 4591 Handle<DescriptorArray> descs(map->instance_descriptors()); 4592 for (int i = 0; i < real_size; i++) { 4593 PropertyDetails details = descs->GetDetails(i); 4594 switch (details.type()) { 4595 case CONSTANT: { 4596 Handle<Name> key(descs->GetKey(i)); 4597 Handle<Object> value(descs->GetConstant(i), isolate); 4598 PropertyDetails d = PropertyDetails( 4599 details.attributes(), NORMAL, i + 1); 4600 dictionary = NameDictionaryAdd(dictionary, key, value, d); 4601 break; 4602 } 4603 case FIELD: { 4604 Handle<Name> key(descs->GetKey(i)); 4605 Handle<Object> value( 4606 object->RawFastPropertyAt(descs->GetFieldIndex(i)), isolate); 4607 PropertyDetails d = 4608 PropertyDetails(details.attributes(), NORMAL, i + 1); 4609 dictionary = NameDictionaryAdd(dictionary, key, value, d); 4610 break; 4611 } 4612 case CALLBACKS: { 4613 Handle<Name> key(descs->GetKey(i)); 4614 Handle<Object> value(descs->GetCallbacksObject(i), isolate); 4615 PropertyDetails d = PropertyDetails( 4616 details.attributes(), CALLBACKS, i + 1); 4617 dictionary = NameDictionaryAdd(dictionary, key, value, d); 4618 break; 4619 } 4620 case INTERCEPTOR: 4621 break; 4622 case HANDLER: 4623 case NORMAL: 4624 case TRANSITION: 4625 case NONEXISTENT: 4626 UNREACHABLE(); 4627 break; 4628 } 4629 } 4630 4631 // Copy the next enumeration index from instance descriptor. 4632 dictionary->SetNextEnumerationIndex(real_size + 1); 4633 4634 Handle<NormalizedMapCache> cache( 4635 isolate->context()->native_context()->normalized_map_cache()); 4636 Handle<Map> new_map = NormalizedMapCache::Get(cache, object, mode); 4637 ASSERT(new_map->is_dictionary_map()); 4638 4639 // From here on we cannot fail and we shouldn't GC anymore. 4640 DisallowHeapAllocation no_allocation; 4641 4642 // Resize the object in the heap if necessary. 4643 int new_instance_size = new_map->instance_size(); 4644 int instance_size_delta = map->instance_size() - new_instance_size; 4645 ASSERT(instance_size_delta >= 0); 4646 isolate->heap()->CreateFillerObjectAt(object->address() + new_instance_size, 4647 instance_size_delta); 4648 if (Marking::IsBlack(Marking::MarkBitFrom(*object))) { 4649 MemoryChunk::IncrementLiveBytesFromMutator(object->address(), 4650 -instance_size_delta); 4651 } 4652 4653 object->set_map(*new_map); 4654 map->NotifyLeafMapLayoutChange(); 4655 4656 object->set_properties(*dictionary); 4657 4658 isolate->counters()->props_to_dictionary()->Increment(); 4659 4660 #ifdef DEBUG 4661 if (FLAG_trace_normalization) { 4662 PrintF("Object properties have been normalized:\n"); 4663 object->Print(); 4664 } 4665 #endif 4666 } 4667 4668 4669 void JSObject::TransformToFastProperties(Handle<JSObject> object, 4670 int unused_property_fields) { 4671 if (object->HasFastProperties()) return; 4672 ASSERT(!object->IsGlobalObject()); 4673 CALL_HEAP_FUNCTION_VOID( 4674 object->GetIsolate(), 4675 object->property_dictionary()->TransformPropertiesToFastFor( 4676 *object, unused_property_fields)); 4677 } 4678 4679 4680 static MUST_USE_RESULT MaybeObject* CopyFastElementsToDictionary( 4681 Isolate* isolate, 4682 FixedArrayBase* array, 4683 int length, 4684 SeededNumberDictionary* dictionary) { 4685 Heap* heap = isolate->heap(); 4686 bool has_double_elements = array->IsFixedDoubleArray(); 4687 for (int i = 0; i < length; i++) { 4688 Object* value = NULL; 4689 if (has_double_elements) { 4690 FixedDoubleArray* double_array = FixedDoubleArray::cast(array); 4691 if (double_array->is_the_hole(i)) { 4692 value = isolate->heap()->the_hole_value(); 4693 } else { 4694 // Objects must be allocated in the old object space, since the 4695 // overall number of HeapNumbers needed for the conversion might 4696 // exceed the capacity of new space, and we would fail repeatedly 4697 // trying to convert the FixedDoubleArray. 4698 MaybeObject* maybe_value_object = 4699 heap->AllocateHeapNumber(double_array->get_scalar(i), TENURED); 4700 if (!maybe_value_object->ToObject(&value)) return maybe_value_object; 4701 } 4702 } else { 4703 value = FixedArray::cast(array)->get(i); 4704 } 4705 if (!value->IsTheHole()) { 4706 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0); 4707 MaybeObject* maybe_result = 4708 dictionary->AddNumberEntry(i, value, details); 4709 if (!maybe_result->To(&dictionary)) return maybe_result; 4710 } 4711 } 4712 return dictionary; 4713 } 4714 4715 4716 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary( 4717 Handle<FixedArrayBase> array, 4718 int length, 4719 Handle<SeededNumberDictionary> dict) { 4720 Isolate* isolate = array->GetIsolate(); 4721 CALL_HEAP_FUNCTION(isolate, 4722 CopyFastElementsToDictionary( 4723 isolate, *array, length, *dict), 4724 SeededNumberDictionary); 4725 } 4726 4727 4728 Handle<SeededNumberDictionary> JSObject::NormalizeElements( 4729 Handle<JSObject> object) { 4730 CALL_HEAP_FUNCTION(object->GetIsolate(), 4731 object->NormalizeElements(), 4732 SeededNumberDictionary); 4733 } 4734 4735 4736 MaybeObject* JSObject::NormalizeElements() { 4737 ASSERT(!HasExternalArrayElements()); 4738 4739 // Find the backing store. 4740 FixedArrayBase* array = FixedArrayBase::cast(elements()); 4741 Map* old_map = array->map(); 4742 bool is_arguments = 4743 (old_map == old_map->GetHeap()->non_strict_arguments_elements_map()); 4744 if (is_arguments) { 4745 array = FixedArrayBase::cast(FixedArray::cast(array)->get(1)); 4746 } 4747 if (array->IsDictionary()) return array; 4748 4749 ASSERT(HasFastSmiOrObjectElements() || 4750 HasFastDoubleElements() || 4751 HasFastArgumentsElements()); 4752 // Compute the effective length and allocate a new backing store. 4753 int length = IsJSArray() 4754 ? Smi::cast(JSArray::cast(this)->length())->value() 4755 : array->length(); 4756 int old_capacity = 0; 4757 int used_elements = 0; 4758 GetElementsCapacityAndUsage(&old_capacity, &used_elements); 4759 SeededNumberDictionary* dictionary; 4760 MaybeObject* maybe_dictionary = 4761 SeededNumberDictionary::Allocate(GetHeap(), used_elements); 4762 if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary; 4763 4764 maybe_dictionary = CopyFastElementsToDictionary( 4765 GetIsolate(), array, length, dictionary); 4766 if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary; 4767 4768 // Switch to using the dictionary as the backing storage for elements. 4769 if (is_arguments) { 4770 FixedArray::cast(elements())->set(1, dictionary); 4771 } else { 4772 // Set the new map first to satify the elements type assert in 4773 // set_elements(). 4774 Map* new_map; 4775 MaybeObject* maybe = GetElementsTransitionMap(GetIsolate(), 4776 DICTIONARY_ELEMENTS); 4777 if (!maybe->To(&new_map)) return maybe; 4778 set_map(new_map); 4779 set_elements(dictionary); 4780 } 4781 4782 old_map->GetHeap()->isolate()->counters()->elements_to_dictionary()-> 4783 Increment(); 4784 4785 #ifdef DEBUG 4786 if (FLAG_trace_normalization) { 4787 PrintF("Object elements have been normalized:\n"); 4788 Print(); 4789 } 4790 #endif 4791 4792 ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements()); 4793 return dictionary; 4794 } 4795 4796 4797 Smi* JSReceiver::GenerateIdentityHash() { 4798 Isolate* isolate = GetIsolate(); 4799 4800 int hash_value; 4801 int attempts = 0; 4802 do { 4803 // Generate a random 32-bit hash value but limit range to fit 4804 // within a smi. 4805 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue; 4806 attempts++; 4807 } while (hash_value == 0 && attempts < 30); 4808 hash_value = hash_value != 0 ? hash_value : 1; // never return 0 4809 4810 return Smi::FromInt(hash_value); 4811 } 4812 4813 4814 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) { 4815 Isolate* isolate = object->GetIsolate(); 4816 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash); 4817 } 4818 4819 4820 Object* JSObject::GetIdentityHash() { 4821 Object* stored_value = GetHiddenProperty(GetHeap()->identity_hash_string()); 4822 return stored_value->IsSmi() ? stored_value : GetHeap()->undefined_value(); 4823 } 4824 4825 4826 Handle<Object> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) { 4827 Handle<Object> hash(object->GetIdentityHash(), object->GetIsolate()); 4828 if (hash->IsSmi()) 4829 return hash; 4830 4831 Isolate* isolate = object->GetIsolate(); 4832 4833 hash = handle(object->GenerateIdentityHash(), isolate); 4834 Handle<Object> result = SetHiddenProperty(object, 4835 isolate->factory()->identity_hash_string(), hash); 4836 4837 if (result->IsUndefined()) { 4838 // Trying to get hash of detached proxy. 4839 return handle(Smi::FromInt(0), isolate); 4840 } 4841 4842 return hash; 4843 } 4844 4845 4846 Object* JSProxy::GetIdentityHash() { 4847 return this->hash(); 4848 } 4849 4850 4851 Handle<Object> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) { 4852 Isolate* isolate = proxy->GetIsolate(); 4853 4854 Handle<Object> hash(proxy->GetIdentityHash(), isolate); 4855 if (hash->IsSmi()) 4856 return hash; 4857 4858 hash = handle(proxy->GenerateIdentityHash(), isolate); 4859 proxy->set_hash(*hash); 4860 return hash; 4861 } 4862 4863 4864 Object* JSObject::GetHiddenProperty(Name* key) { 4865 ASSERT(key->IsUniqueName()); 4866 if (IsJSGlobalProxy()) { 4867 // For a proxy, use the prototype as target object. 4868 Object* proxy_parent = GetPrototype(); 4869 // If the proxy is detached, return undefined. 4870 if (proxy_parent->IsNull()) return GetHeap()->the_hole_value(); 4871 ASSERT(proxy_parent->IsJSGlobalObject()); 4872 return JSObject::cast(proxy_parent)->GetHiddenProperty(key); 4873 } 4874 ASSERT(!IsJSGlobalProxy()); 4875 Object* inline_value = GetHiddenPropertiesHashTable(); 4876 4877 if (inline_value->IsSmi()) { 4878 // Handle inline-stored identity hash. 4879 if (key == GetHeap()->identity_hash_string()) { 4880 return inline_value; 4881 } else { 4882 return GetHeap()->the_hole_value(); 4883 } 4884 } 4885 4886 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value(); 4887 4888 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value); 4889 Object* entry = hashtable->Lookup(key); 4890 return entry; 4891 } 4892 4893 4894 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object, 4895 Handle<Name> key, 4896 Handle<Object> value) { 4897 Isolate* isolate = object->GetIsolate(); 4898 4899 ASSERT(key->IsUniqueName()); 4900 if (object->IsJSGlobalProxy()) { 4901 // For a proxy, use the prototype as target object. 4902 Handle<Object> proxy_parent(object->GetPrototype(), isolate); 4903 // If the proxy is detached, return undefined. 4904 if (proxy_parent->IsNull()) return isolate->factory()->undefined_value(); 4905 ASSERT(proxy_parent->IsJSGlobalObject()); 4906 return SetHiddenProperty(Handle<JSObject>::cast(proxy_parent), key, value); 4907 } 4908 ASSERT(!object->IsJSGlobalProxy()); 4909 4910 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate); 4911 4912 // If there is no backing store yet, store the identity hash inline. 4913 if (value->IsSmi() && 4914 *key == *isolate->factory()->identity_hash_string() && 4915 (inline_value->IsUndefined() || inline_value->IsSmi())) { 4916 return JSObject::SetHiddenPropertiesHashTable(object, value); 4917 } 4918 4919 Handle<ObjectHashTable> hashtable = 4920 GetOrCreateHiddenPropertiesHashtable(object); 4921 4922 // If it was found, check if the key is already in the dictionary. 4923 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key, 4924 value); 4925 if (*new_table != *hashtable) { 4926 // If adding the key expanded the dictionary (i.e., Add returned a new 4927 // dictionary), store it back to the object. 4928 SetHiddenPropertiesHashTable(object, new_table); 4929 } 4930 4931 // Return this to mark success. 4932 return object; 4933 } 4934 4935 4936 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) { 4937 Isolate* isolate = object->GetIsolate(); 4938 ASSERT(key->IsUniqueName()); 4939 4940 if (object->IsJSGlobalProxy()) { 4941 Handle<Object> proto(object->GetPrototype(), isolate); 4942 if (proto->IsNull()) return; 4943 ASSERT(proto->IsJSGlobalObject()); 4944 return DeleteHiddenProperty(Handle<JSObject>::cast(proto), key); 4945 } 4946 4947 Object* inline_value = object->GetHiddenPropertiesHashTable(); 4948 4949 // We never delete (inline-stored) identity hashes. 4950 ASSERT(*key != *isolate->factory()->identity_hash_string()); 4951 if (inline_value->IsUndefined() || inline_value->IsSmi()) return; 4952 4953 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value)); 4954 ObjectHashTable::Put(hashtable, key, isolate->factory()->the_hole_value()); 4955 } 4956 4957 4958 bool JSObject::HasHiddenProperties() { 4959 return GetPropertyAttributePostInterceptor(this, 4960 GetHeap()->hidden_string(), 4961 false) != ABSENT; 4962 } 4963 4964 4965 Object* JSObject::GetHiddenPropertiesHashTable() { 4966 ASSERT(!IsJSGlobalProxy()); 4967 if (HasFastProperties()) { 4968 // If the object has fast properties, check whether the first slot 4969 // in the descriptor array matches the hidden string. Since the 4970 // hidden strings hash code is zero (and no other name has hash 4971 // code zero) it will always occupy the first entry if present. 4972 DescriptorArray* descriptors = this->map()->instance_descriptors(); 4973 if (descriptors->number_of_descriptors() > 0) { 4974 int sorted_index = descriptors->GetSortedKeyIndex(0); 4975 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() && 4976 sorted_index < map()->NumberOfOwnDescriptors()) { 4977 ASSERT(descriptors->GetType(sorted_index) == FIELD); 4978 ASSERT(descriptors->GetDetails(sorted_index).representation(). 4979 IsCompatibleForLoad(Representation::Tagged())); 4980 return this->RawFastPropertyAt( 4981 descriptors->GetFieldIndex(sorted_index)); 4982 } else { 4983 return GetHeap()->undefined_value(); 4984 } 4985 } else { 4986 return GetHeap()->undefined_value(); 4987 } 4988 } else { 4989 PropertyAttributes attributes; 4990 // You can't install a getter on a property indexed by the hidden string, 4991 // so we can be sure that GetLocalPropertyPostInterceptor returns a real 4992 // object. 4993 return GetLocalPropertyPostInterceptor(this, 4994 GetHeap()->hidden_string(), 4995 &attributes)->ToObjectUnchecked(); 4996 } 4997 } 4998 4999 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable( 5000 Handle<JSObject> object) { 5001 Isolate* isolate = object->GetIsolate(); 5002 5003 static const int kInitialCapacity = 4; 5004 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate); 5005 if (inline_value->IsHashTable()) { 5006 return Handle<ObjectHashTable>::cast(inline_value); 5007 } 5008 5009 Handle<ObjectHashTable> hashtable = isolate->factory()->NewObjectHashTable( 5010 kInitialCapacity, 5011 USE_CUSTOM_MINIMUM_CAPACITY); 5012 5013 if (inline_value->IsSmi()) { 5014 // We were storing the identity hash inline and now allocated an actual 5015 // dictionary. Put the identity hash into the new dictionary. 5016 hashtable = ObjectHashTable::Put(hashtable, 5017 isolate->factory()->identity_hash_string(), 5018 inline_value); 5019 } 5020 5021 JSObject::SetLocalPropertyIgnoreAttributes( 5022 object, 5023 isolate->factory()->hidden_string(), 5024 hashtable, 5025 DONT_ENUM, 5026 OPTIMAL_REPRESENTATION, 5027 ALLOW_AS_CONSTANT, 5028 OMIT_EXTENSIBILITY_CHECK); 5029 5030 return hashtable; 5031 } 5032 5033 5034 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object, 5035 Handle<Object> value) { 5036 ASSERT(!object->IsJSGlobalProxy()); 5037 5038 Isolate* isolate = object->GetIsolate(); 5039 5040 // We can store the identity hash inline iff there is no backing store 5041 // for hidden properties yet. 5042 ASSERT(object->HasHiddenProperties() != value->IsSmi()); 5043 if (object->HasFastProperties()) { 5044 // If the object has fast properties, check whether the first slot 5045 // in the descriptor array matches the hidden string. Since the 5046 // hidden strings hash code is zero (and no other name has hash 5047 // code zero) it will always occupy the first entry if present. 5048 DescriptorArray* descriptors = object->map()->instance_descriptors(); 5049 if (descriptors->number_of_descriptors() > 0) { 5050 int sorted_index = descriptors->GetSortedKeyIndex(0); 5051 if (descriptors->GetKey(sorted_index) == isolate->heap()->hidden_string() 5052 && sorted_index < object->map()->NumberOfOwnDescriptors()) { 5053 ASSERT(descriptors->GetType(sorted_index) == FIELD); 5054 object->FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), 5055 *value); 5056 return object; 5057 } 5058 } 5059 } 5060 5061 SetLocalPropertyIgnoreAttributes(object, 5062 isolate->factory()->hidden_string(), 5063 value, 5064 DONT_ENUM, 5065 OPTIMAL_REPRESENTATION, 5066 ALLOW_AS_CONSTANT, 5067 OMIT_EXTENSIBILITY_CHECK); 5068 return object; 5069 } 5070 5071 5072 Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object, 5073 Handle<Name> name, 5074 DeleteMode mode) { 5075 // Check local property, ignore interceptor. 5076 Isolate* isolate = object->GetIsolate(); 5077 LookupResult result(isolate); 5078 object->LocalLookupRealNamedProperty(*name, &result); 5079 if (!result.IsFound()) return isolate->factory()->true_value(); 5080 5081 // Normalize object if needed. 5082 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 5083 5084 return DeleteNormalizedProperty(object, name, mode); 5085 } 5086 5087 5088 Handle<Object> JSObject::DeletePropertyWithInterceptor(Handle<JSObject> object, 5089 Handle<Name> name) { 5090 Isolate* isolate = object->GetIsolate(); 5091 5092 // TODO(rossberg): Support symbols in the API. 5093 if (name->IsSymbol()) return isolate->factory()->false_value(); 5094 5095 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); 5096 if (!interceptor->deleter()->IsUndefined()) { 5097 v8::NamedPropertyDeleterCallback deleter = 5098 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter()); 5099 LOG(isolate, 5100 ApiNamedPropertyAccess("interceptor-named-delete", *object, *name)); 5101 PropertyCallbackArguments args( 5102 isolate, interceptor->data(), *object, *object); 5103 v8::Handle<v8::Boolean> result = 5104 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name))); 5105 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 5106 if (!result.IsEmpty()) { 5107 ASSERT(result->IsBoolean()); 5108 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 5109 result_internal->VerifyApiCallResultType(); 5110 // Rebox CustomArguments::kReturnValueOffset before returning. 5111 return handle(*result_internal, isolate); 5112 } 5113 } 5114 Handle<Object> result = 5115 DeletePropertyPostInterceptor(object, name, NORMAL_DELETION); 5116 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 5117 return result; 5118 } 5119 5120 5121 // TODO(mstarzinger): Temporary wrapper until handlified. 5122 static Handle<Object> AccessorDelete(Handle<JSObject> object, 5123 uint32_t index, 5124 JSObject::DeleteMode mode) { 5125 CALL_HEAP_FUNCTION(object->GetIsolate(), 5126 object->GetElementsAccessor()->Delete(*object, 5127 index, 5128 mode), 5129 Object); 5130 } 5131 5132 5133 Handle<Object> JSObject::DeleteElementWithInterceptor(Handle<JSObject> object, 5134 uint32_t index) { 5135 Isolate* isolate = object->GetIsolate(); 5136 Factory* factory = isolate->factory(); 5137 5138 // Make sure that the top context does not change when doing 5139 // callbacks or interceptor calls. 5140 AssertNoContextChange ncc(isolate); 5141 5142 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 5143 if (interceptor->deleter()->IsUndefined()) return factory->false_value(); 5144 v8::IndexedPropertyDeleterCallback deleter = 5145 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter()); 5146 LOG(isolate, 5147 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index)); 5148 PropertyCallbackArguments args( 5149 isolate, interceptor->data(), *object, *object); 5150 v8::Handle<v8::Boolean> result = args.Call(deleter, index); 5151 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 5152 if (!result.IsEmpty()) { 5153 ASSERT(result->IsBoolean()); 5154 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 5155 result_internal->VerifyApiCallResultType(); 5156 // Rebox CustomArguments::kReturnValueOffset before returning. 5157 return handle(*result_internal, isolate); 5158 } 5159 Handle<Object> delete_result = AccessorDelete(object, index, NORMAL_DELETION); 5160 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 5161 return delete_result; 5162 } 5163 5164 5165 Handle<Object> JSObject::DeleteElement(Handle<JSObject> object, 5166 uint32_t index, 5167 DeleteMode mode) { 5168 Isolate* isolate = object->GetIsolate(); 5169 Factory* factory = isolate->factory(); 5170 5171 // Check access rights if needed. 5172 if (object->IsAccessCheckNeeded() && 5173 !isolate->MayIndexedAccess(*object, index, v8::ACCESS_DELETE)) { 5174 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_DELETE); 5175 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 5176 return factory->false_value(); 5177 } 5178 5179 if (object->IsStringObjectWithCharacterAt(index)) { 5180 if (mode == STRICT_DELETION) { 5181 // Deleting a non-configurable property in strict mode. 5182 Handle<Object> name = factory->NewNumberFromUint(index); 5183 Handle<Object> args[2] = { name, object }; 5184 Handle<Object> error = 5185 factory->NewTypeError("strict_delete_property", 5186 HandleVector(args, 2)); 5187 isolate->Throw(*error); 5188 return Handle<Object>(); 5189 } 5190 return factory->false_value(); 5191 } 5192 5193 if (object->IsJSGlobalProxy()) { 5194 Handle<Object> proto(object->GetPrototype(), isolate); 5195 if (proto->IsNull()) return factory->false_value(); 5196 ASSERT(proto->IsJSGlobalObject()); 5197 return DeleteElement(Handle<JSObject>::cast(proto), index, mode); 5198 } 5199 5200 Handle<Object> old_value; 5201 bool should_enqueue_change_record = false; 5202 if (FLAG_harmony_observation && object->map()->is_observed()) { 5203 should_enqueue_change_record = HasLocalElement(object, index); 5204 if (should_enqueue_change_record) { 5205 old_value = object->GetLocalElementAccessorPair(index) != NULL 5206 ? Handle<Object>::cast(factory->the_hole_value()) 5207 : Object::GetElement(isolate, object, index); 5208 } 5209 } 5210 5211 // Skip interceptor if forcing deletion. 5212 Handle<Object> result; 5213 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) { 5214 result = DeleteElementWithInterceptor(object, index); 5215 } else { 5216 result = AccessorDelete(object, index, mode); 5217 } 5218 5219 if (should_enqueue_change_record && !HasLocalElement(object, index)) { 5220 Handle<String> name = factory->Uint32ToString(index); 5221 EnqueueChangeRecord(object, "delete", name, old_value); 5222 } 5223 5224 return result; 5225 } 5226 5227 5228 Handle<Object> JSObject::DeleteProperty(Handle<JSObject> object, 5229 Handle<Name> name, 5230 DeleteMode mode) { 5231 Isolate* isolate = object->GetIsolate(); 5232 // ECMA-262, 3rd, 8.6.2.5 5233 ASSERT(name->IsName()); 5234 5235 // Check access rights if needed. 5236 if (object->IsAccessCheckNeeded() && 5237 !isolate->MayNamedAccess(*object, *name, v8::ACCESS_DELETE)) { 5238 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_DELETE); 5239 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 5240 return isolate->factory()->false_value(); 5241 } 5242 5243 if (object->IsJSGlobalProxy()) { 5244 Object* proto = object->GetPrototype(); 5245 if (proto->IsNull()) return isolate->factory()->false_value(); 5246 ASSERT(proto->IsJSGlobalObject()); 5247 return JSGlobalObject::DeleteProperty( 5248 handle(JSGlobalObject::cast(proto)), name, mode); 5249 } 5250 5251 uint32_t index = 0; 5252 if (name->AsArrayIndex(&index)) { 5253 return DeleteElement(object, index, mode); 5254 } 5255 5256 LookupResult lookup(isolate); 5257 object->LocalLookup(*name, &lookup, true); 5258 if (!lookup.IsFound()) return isolate->factory()->true_value(); 5259 // Ignore attributes if forcing a deletion. 5260 if (lookup.IsDontDelete() && mode != FORCE_DELETION) { 5261 if (mode == STRICT_DELETION) { 5262 // Deleting a non-configurable property in strict mode. 5263 Handle<Object> args[2] = { name, object }; 5264 Handle<Object> error = isolate->factory()->NewTypeError( 5265 "strict_delete_property", HandleVector(args, ARRAY_SIZE(args))); 5266 isolate->Throw(*error); 5267 return Handle<Object>(); 5268 } 5269 return isolate->factory()->false_value(); 5270 } 5271 5272 Handle<Object> old_value = isolate->factory()->the_hole_value(); 5273 bool is_observed = FLAG_harmony_observation && 5274 object->map()->is_observed() && 5275 *name != isolate->heap()->hidden_string(); 5276 if (is_observed && lookup.IsDataProperty()) { 5277 old_value = Object::GetProperty(object, name); 5278 } 5279 Handle<Object> result; 5280 5281 // Check for interceptor. 5282 if (lookup.IsInterceptor()) { 5283 // Skip interceptor if forcing a deletion. 5284 if (mode == FORCE_DELETION) { 5285 result = DeletePropertyPostInterceptor(object, name, mode); 5286 } else { 5287 result = DeletePropertyWithInterceptor(object, name); 5288 } 5289 } else { 5290 // Normalize object if needed. 5291 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 5292 // Make sure the properties are normalized before removing the entry. 5293 result = DeleteNormalizedProperty(object, name, mode); 5294 } 5295 5296 if (is_observed && !HasLocalProperty(object, name)) { 5297 EnqueueChangeRecord(object, "delete", name, old_value); 5298 } 5299 5300 return result; 5301 } 5302 5303 5304 Handle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object, 5305 uint32_t index, 5306 DeleteMode mode) { 5307 if (object->IsJSProxy()) { 5308 return JSProxy::DeleteElementWithHandler( 5309 Handle<JSProxy>::cast(object), index, mode); 5310 } 5311 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode); 5312 } 5313 5314 5315 Handle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object, 5316 Handle<Name> name, 5317 DeleteMode mode) { 5318 if (object->IsJSProxy()) { 5319 return JSProxy::DeletePropertyWithHandler( 5320 Handle<JSProxy>::cast(object), name, mode); 5321 } 5322 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode); 5323 } 5324 5325 5326 bool JSObject::ReferencesObjectFromElements(FixedArray* elements, 5327 ElementsKind kind, 5328 Object* object) { 5329 ASSERT(IsFastObjectElementsKind(kind) || 5330 kind == DICTIONARY_ELEMENTS); 5331 if (IsFastObjectElementsKind(kind)) { 5332 int length = IsJSArray() 5333 ? Smi::cast(JSArray::cast(this)->length())->value() 5334 : elements->length(); 5335 for (int i = 0; i < length; ++i) { 5336 Object* element = elements->get(i); 5337 if (!element->IsTheHole() && element == object) return true; 5338 } 5339 } else { 5340 Object* key = 5341 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object); 5342 if (!key->IsUndefined()) return true; 5343 } 5344 return false; 5345 } 5346 5347 5348 // Check whether this object references another object. 5349 bool JSObject::ReferencesObject(Object* obj) { 5350 Map* map_of_this = map(); 5351 Heap* heap = GetHeap(); 5352 DisallowHeapAllocation no_allocation; 5353 5354 // Is the object the constructor for this object? 5355 if (map_of_this->constructor() == obj) { 5356 return true; 5357 } 5358 5359 // Is the object the prototype for this object? 5360 if (map_of_this->prototype() == obj) { 5361 return true; 5362 } 5363 5364 // Check if the object is among the named properties. 5365 Object* key = SlowReverseLookup(obj); 5366 if (!key->IsUndefined()) { 5367 return true; 5368 } 5369 5370 // Check if the object is among the indexed properties. 5371 ElementsKind kind = GetElementsKind(); 5372 switch (kind) { 5373 case EXTERNAL_PIXEL_ELEMENTS: 5374 case EXTERNAL_BYTE_ELEMENTS: 5375 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 5376 case EXTERNAL_SHORT_ELEMENTS: 5377 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 5378 case EXTERNAL_INT_ELEMENTS: 5379 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 5380 case EXTERNAL_FLOAT_ELEMENTS: 5381 case EXTERNAL_DOUBLE_ELEMENTS: 5382 case FAST_DOUBLE_ELEMENTS: 5383 case FAST_HOLEY_DOUBLE_ELEMENTS: 5384 // Raw pixels and external arrays do not reference other 5385 // objects. 5386 break; 5387 case FAST_SMI_ELEMENTS: 5388 case FAST_HOLEY_SMI_ELEMENTS: 5389 break; 5390 case FAST_ELEMENTS: 5391 case FAST_HOLEY_ELEMENTS: 5392 case DICTIONARY_ELEMENTS: { 5393 FixedArray* elements = FixedArray::cast(this->elements()); 5394 if (ReferencesObjectFromElements(elements, kind, obj)) return true; 5395 break; 5396 } 5397 case NON_STRICT_ARGUMENTS_ELEMENTS: { 5398 FixedArray* parameter_map = FixedArray::cast(elements()); 5399 // Check the mapped parameters. 5400 int length = parameter_map->length(); 5401 for (int i = 2; i < length; ++i) { 5402 Object* value = parameter_map->get(i); 5403 if (!value->IsTheHole() && value == obj) return true; 5404 } 5405 // Check the arguments. 5406 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 5407 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS : 5408 FAST_HOLEY_ELEMENTS; 5409 if (ReferencesObjectFromElements(arguments, kind, obj)) return true; 5410 break; 5411 } 5412 } 5413 5414 // For functions check the context. 5415 if (IsJSFunction()) { 5416 // Get the constructor function for arguments array. 5417 JSObject* arguments_boilerplate = 5418 heap->isolate()->context()->native_context()-> 5419 arguments_boilerplate(); 5420 JSFunction* arguments_function = 5421 JSFunction::cast(arguments_boilerplate->map()->constructor()); 5422 5423 // Get the context and don't check if it is the native context. 5424 JSFunction* f = JSFunction::cast(this); 5425 Context* context = f->context(); 5426 if (context->IsNativeContext()) { 5427 return false; 5428 } 5429 5430 // Check the non-special context slots. 5431 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) { 5432 // Only check JS objects. 5433 if (context->get(i)->IsJSObject()) { 5434 JSObject* ctxobj = JSObject::cast(context->get(i)); 5435 // If it is an arguments array check the content. 5436 if (ctxobj->map()->constructor() == arguments_function) { 5437 if (ctxobj->ReferencesObject(obj)) { 5438 return true; 5439 } 5440 } else if (ctxobj == obj) { 5441 return true; 5442 } 5443 } 5444 } 5445 5446 // Check the context extension (if any) if it can have references. 5447 if (context->has_extension() && !context->IsCatchContext()) { 5448 return JSObject::cast(context->extension())->ReferencesObject(obj); 5449 } 5450 } 5451 5452 // No references to object. 5453 return false; 5454 } 5455 5456 5457 Handle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { 5458 Isolate* isolate = object->GetIsolate(); 5459 5460 if (!object->map()->is_extensible()) return object; 5461 5462 if (object->IsAccessCheckNeeded() && 5463 !isolate->MayNamedAccess(*object, 5464 isolate->heap()->undefined_value(), 5465 v8::ACCESS_KEYS)) { 5466 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_KEYS); 5467 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 5468 return isolate->factory()->false_value(); 5469 } 5470 5471 if (object->IsJSGlobalProxy()) { 5472 Handle<Object> proto(object->GetPrototype(), isolate); 5473 if (proto->IsNull()) return object; 5474 ASSERT(proto->IsJSGlobalObject()); 5475 return PreventExtensions(Handle<JSObject>::cast(proto)); 5476 } 5477 5478 // It's not possible to seal objects with external array elements 5479 if (object->HasExternalArrayElements()) { 5480 Handle<Object> error = 5481 isolate->factory()->NewTypeError( 5482 "cant_prevent_ext_external_array_elements", 5483 HandleVector(&object, 1)); 5484 isolate->Throw(*error); 5485 return Handle<Object>(); 5486 } 5487 5488 // If there are fast elements we normalize. 5489 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 5490 ASSERT(object->HasDictionaryElements() || 5491 object->HasDictionaryArgumentsElements()); 5492 5493 // Make sure that we never go back to fast case. 5494 dictionary->set_requires_slow_elements(); 5495 5496 // Do a map transition, other objects with this map may still 5497 // be extensible. 5498 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps. 5499 Handle<Map> new_map = Map::Copy(handle(object->map())); 5500 5501 new_map->set_is_extensible(false); 5502 object->set_map(*new_map); 5503 ASSERT(!object->map()->is_extensible()); 5504 5505 if (FLAG_harmony_observation && object->map()->is_observed()) { 5506 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(), 5507 isolate->factory()->the_hole_value()); 5508 } 5509 return object; 5510 } 5511 5512 5513 template<typename Dictionary> 5514 static void FreezeDictionary(Dictionary* dictionary) { 5515 int capacity = dictionary->Capacity(); 5516 for (int i = 0; i < capacity; i++) { 5517 Object* k = dictionary->KeyAt(i); 5518 if (dictionary->IsKey(k)) { 5519 PropertyDetails details = dictionary->DetailsAt(i); 5520 int attrs = DONT_DELETE; 5521 // READ_ONLY is an invalid attribute for JS setters/getters. 5522 if (details.type() != CALLBACKS || 5523 !dictionary->ValueAt(i)->IsAccessorPair()) { 5524 attrs |= READ_ONLY; 5525 } 5526 details = details.CopyAddAttributes( 5527 static_cast<PropertyAttributes>(attrs)); 5528 dictionary->DetailsAtPut(i, details); 5529 } 5530 } 5531 } 5532 5533 5534 Handle<Object> JSObject::Freeze(Handle<JSObject> object) { 5535 // Freezing non-strict arguments should be handled elsewhere. 5536 ASSERT(!object->HasNonStrictArgumentsElements()); 5537 ASSERT(!object->map()->is_observed()); 5538 5539 if (object->map()->is_frozen()) return object; 5540 5541 Isolate* isolate = object->GetIsolate(); 5542 if (object->IsAccessCheckNeeded() && 5543 !isolate->MayNamedAccess(*object, 5544 isolate->heap()->undefined_value(), 5545 v8::ACCESS_KEYS)) { 5546 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_KEYS); 5547 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 5548 return isolate->factory()->false_value(); 5549 } 5550 5551 if (object->IsJSGlobalProxy()) { 5552 Handle<Object> proto(object->GetPrototype(), isolate); 5553 if (proto->IsNull()) return object; 5554 ASSERT(proto->IsJSGlobalObject()); 5555 return Freeze(Handle<JSObject>::cast(proto)); 5556 } 5557 5558 // It's not possible to freeze objects with external array elements 5559 if (object->HasExternalArrayElements()) { 5560 Handle<Object> error = 5561 isolate->factory()->NewTypeError( 5562 "cant_prevent_ext_external_array_elements", 5563 HandleVector(&object, 1)); 5564 isolate->Throw(*error); 5565 return Handle<Object>(); 5566 } 5567 5568 Handle<SeededNumberDictionary> new_element_dictionary; 5569 if (!object->elements()->IsDictionary()) { 5570 int length = object->IsJSArray() 5571 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value() 5572 : object->elements()->length(); 5573 if (length > 0) { 5574 int capacity = 0; 5575 int used = 0; 5576 object->GetElementsCapacityAndUsage(&capacity, &used); 5577 new_element_dictionary = 5578 isolate->factory()->NewSeededNumberDictionary(used); 5579 5580 // Move elements to a dictionary; avoid calling NormalizeElements to avoid 5581 // unnecessary transitions. 5582 new_element_dictionary = CopyFastElementsToDictionary( 5583 handle(object->elements()), length, new_element_dictionary); 5584 } else { 5585 // No existing elements, use a pre-allocated empty backing store 5586 new_element_dictionary = 5587 isolate->factory()->empty_slow_element_dictionary(); 5588 } 5589 } 5590 5591 LookupResult result(isolate); 5592 Handle<Map> old_map(object->map()); 5593 old_map->LookupTransition(*object, isolate->heap()->frozen_symbol(), &result); 5594 if (result.IsTransition()) { 5595 Map* transition_map = result.GetTransitionTarget(); 5596 ASSERT(transition_map->has_dictionary_elements()); 5597 ASSERT(transition_map->is_frozen()); 5598 ASSERT(!transition_map->is_extensible()); 5599 object->set_map(transition_map); 5600 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { 5601 // Create a new descriptor array with fully-frozen properties 5602 int num_descriptors = old_map->NumberOfOwnDescriptors(); 5603 Handle<DescriptorArray> new_descriptors = 5604 DescriptorArray::CopyUpToAddAttributes( 5605 handle(old_map->instance_descriptors()), num_descriptors, FROZEN); 5606 Handle<Map> new_map = Map::CopyReplaceDescriptors( 5607 old_map, new_descriptors, INSERT_TRANSITION, 5608 isolate->factory()->frozen_symbol()); 5609 new_map->freeze(); 5610 new_map->set_is_extensible(false); 5611 new_map->set_elements_kind(DICTIONARY_ELEMENTS); 5612 object->set_map(*new_map); 5613 } else { 5614 // Slow path: need to normalize properties for safety 5615 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 5616 5617 // Create a new map, since other objects with this map may be extensible. 5618 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps. 5619 Handle<Map> new_map = Map::Copy(handle(object->map())); 5620 new_map->freeze(); 5621 new_map->set_is_extensible(false); 5622 new_map->set_elements_kind(DICTIONARY_ELEMENTS); 5623 object->set_map(*new_map); 5624 5625 // Freeze dictionary-mode properties 5626 FreezeDictionary(object->property_dictionary()); 5627 } 5628 5629 ASSERT(object->map()->has_dictionary_elements()); 5630 if (!new_element_dictionary.is_null()) { 5631 object->set_elements(*new_element_dictionary); 5632 } 5633 5634 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) { 5635 SeededNumberDictionary* dictionary = object->element_dictionary(); 5636 // Make sure we never go back to the fast case 5637 dictionary->set_requires_slow_elements(); 5638 // Freeze all elements in the dictionary 5639 FreezeDictionary(dictionary); 5640 } 5641 5642 return object; 5643 } 5644 5645 5646 void JSObject::SetObserved(Handle<JSObject> object) { 5647 Isolate* isolate = object->GetIsolate(); 5648 5649 if (object->map()->is_observed()) 5650 return; 5651 5652 LookupResult result(isolate); 5653 object->map()->LookupTransition(*object, 5654 isolate->heap()->observed_symbol(), 5655 &result); 5656 5657 Handle<Map> new_map; 5658 if (result.IsTransition()) { 5659 new_map = handle(result.GetTransitionTarget()); 5660 ASSERT(new_map->is_observed()); 5661 } else if (object->map()->CanHaveMoreTransitions()) { 5662 new_map = Map::CopyForObserved(handle(object->map())); 5663 } else { 5664 new_map = Map::Copy(handle(object->map())); 5665 new_map->set_is_observed(); 5666 } 5667 object->set_map(*new_map); 5668 } 5669 5670 5671 Handle<JSObject> JSObject::Copy(Handle<JSObject> object) { 5672 Isolate* isolate = object->GetIsolate(); 5673 CALL_HEAP_FUNCTION(isolate, 5674 isolate->heap()->CopyJSObject(*object), JSObject); 5675 } 5676 5677 5678 template<class ContextObject> 5679 class JSObjectWalkVisitor { 5680 public: 5681 JSObjectWalkVisitor(ContextObject* site_context, bool copying, 5682 JSObject::DeepCopyHints hints) 5683 : site_context_(site_context), 5684 copying_(copying), 5685 hints_(hints) {} 5686 5687 Handle<JSObject> StructureWalk(Handle<JSObject> object); 5688 5689 protected: 5690 inline Handle<JSObject> VisitElementOrProperty(Handle<JSObject> object, 5691 Handle<JSObject> value) { 5692 Handle<AllocationSite> current_site = site_context()->EnterNewScope(); 5693 Handle<JSObject> copy_of_value = StructureWalk(value); 5694 site_context()->ExitScope(current_site, value); 5695 return copy_of_value; 5696 } 5697 5698 inline ContextObject* site_context() { return site_context_; } 5699 inline Isolate* isolate() { return site_context()->isolate(); } 5700 5701 inline bool copying() const { return copying_; } 5702 5703 private: 5704 ContextObject* site_context_; 5705 const bool copying_; 5706 const JSObject::DeepCopyHints hints_; 5707 }; 5708 5709 5710 template <class ContextObject> 5711 Handle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( 5712 Handle<JSObject> object) { 5713 Isolate* isolate = this->isolate(); 5714 bool copying = this->copying(); 5715 bool shallow = hints_ == JSObject::kObjectIsShallowArray; 5716 5717 if (!shallow) { 5718 StackLimitCheck check(isolate); 5719 5720 if (check.HasOverflowed()) { 5721 isolate->StackOverflow(); 5722 return Handle<JSObject>::null(); 5723 } 5724 } 5725 5726 if (object->map()->is_deprecated()) { 5727 JSObject::MigrateInstance(object); 5728 } 5729 5730 Handle<JSObject> copy; 5731 if (copying) { 5732 Handle<AllocationSite> site_to_pass; 5733 if (site_context()->ShouldCreateMemento(object)) { 5734 site_to_pass = site_context()->current(); 5735 } 5736 CALL_AND_RETRY_OR_DIE(isolate, 5737 isolate->heap()->CopyJSObject(*object, 5738 site_to_pass.is_null() ? NULL : *site_to_pass), 5739 { copy = Handle<JSObject>(JSObject::cast(__object__), 5740 isolate); 5741 break; 5742 }, 5743 return Handle<JSObject>()); 5744 } else { 5745 copy = object; 5746 } 5747 5748 ASSERT(copying || copy.is_identical_to(object)); 5749 5750 ElementsKind kind = copy->GetElementsKind(); 5751 if (copying && IsFastSmiOrObjectElementsKind(kind) && 5752 FixedArray::cast(copy->elements())->map() == 5753 isolate->heap()->fixed_cow_array_map()) { 5754 isolate->counters()->cow_arrays_created_runtime()->Increment(); 5755 } 5756 5757 if (!shallow) { 5758 HandleScope scope(isolate); 5759 5760 // Deep copy local properties. 5761 if (copy->HasFastProperties()) { 5762 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors()); 5763 int limit = copy->map()->NumberOfOwnDescriptors(); 5764 for (int i = 0; i < limit; i++) { 5765 PropertyDetails details = descriptors->GetDetails(i); 5766 if (details.type() != FIELD) continue; 5767 int index = descriptors->GetFieldIndex(i); 5768 Handle<Object> value(object->RawFastPropertyAt(index), isolate); 5769 if (value->IsJSObject()) { 5770 value = VisitElementOrProperty(copy, Handle<JSObject>::cast(value)); 5771 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, value, Handle<JSObject>()); 5772 } else { 5773 Representation representation = details.representation(); 5774 value = NewStorageFor(isolate, value, representation); 5775 } 5776 if (copying) { 5777 copy->FastPropertyAtPut(index, *value); 5778 } 5779 } 5780 } else { 5781 Handle<FixedArray> names = 5782 isolate->factory()->NewFixedArray(copy->NumberOfLocalProperties()); 5783 copy->GetLocalPropertyNames(*names, 0); 5784 for (int i = 0; i < names->length(); i++) { 5785 ASSERT(names->get(i)->IsString()); 5786 Handle<String> key_string(String::cast(names->get(i))); 5787 PropertyAttributes attributes = 5788 copy->GetLocalPropertyAttribute(*key_string); 5789 // Only deep copy fields from the object literal expression. 5790 // In particular, don't try to copy the length attribute of 5791 // an array. 5792 if (attributes != NONE) continue; 5793 Handle<Object> value( 5794 copy->GetProperty(*key_string, &attributes)->ToObjectUnchecked(), 5795 isolate); 5796 if (value->IsJSObject()) { 5797 Handle<JSObject> result = VisitElementOrProperty( 5798 copy, Handle<JSObject>::cast(value)); 5799 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>()); 5800 if (copying) { 5801 // Creating object copy for literals. No strict mode needed. 5802 CHECK_NOT_EMPTY_HANDLE(isolate, JSObject::SetProperty( 5803 copy, key_string, result, NONE, kNonStrictMode)); 5804 } 5805 } 5806 } 5807 } 5808 5809 // Deep copy local elements. 5810 // Pixel elements cannot be created using an object literal. 5811 ASSERT(!copy->HasExternalArrayElements()); 5812 switch (kind) { 5813 case FAST_SMI_ELEMENTS: 5814 case FAST_ELEMENTS: 5815 case FAST_HOLEY_SMI_ELEMENTS: 5816 case FAST_HOLEY_ELEMENTS: { 5817 Handle<FixedArray> elements(FixedArray::cast(copy->elements())); 5818 if (elements->map() == isolate->heap()->fixed_cow_array_map()) { 5819 #ifdef DEBUG 5820 for (int i = 0; i < elements->length(); i++) { 5821 ASSERT(!elements->get(i)->IsJSObject()); 5822 } 5823 #endif 5824 } else { 5825 for (int i = 0; i < elements->length(); i++) { 5826 Handle<Object> value(elements->get(i), isolate); 5827 ASSERT(value->IsSmi() || 5828 value->IsTheHole() || 5829 (IsFastObjectElementsKind(copy->GetElementsKind()))); 5830 if (value->IsJSObject()) { 5831 Handle<JSObject> result = VisitElementOrProperty( 5832 copy, Handle<JSObject>::cast(value)); 5833 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>()); 5834 if (copying) { 5835 elements->set(i, *result); 5836 } 5837 } 5838 } 5839 } 5840 break; 5841 } 5842 case DICTIONARY_ELEMENTS: { 5843 Handle<SeededNumberDictionary> element_dictionary( 5844 copy->element_dictionary()); 5845 int capacity = element_dictionary->Capacity(); 5846 for (int i = 0; i < capacity; i++) { 5847 Object* k = element_dictionary->KeyAt(i); 5848 if (element_dictionary->IsKey(k)) { 5849 Handle<Object> value(element_dictionary->ValueAt(i), isolate); 5850 if (value->IsJSObject()) { 5851 Handle<JSObject> result = VisitElementOrProperty( 5852 copy, Handle<JSObject>::cast(value)); 5853 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<JSObject>()); 5854 if (copying) { 5855 element_dictionary->ValueAtPut(i, *result); 5856 } 5857 } 5858 } 5859 } 5860 break; 5861 } 5862 case NON_STRICT_ARGUMENTS_ELEMENTS: 5863 UNIMPLEMENTED(); 5864 break; 5865 case EXTERNAL_PIXEL_ELEMENTS: 5866 case EXTERNAL_BYTE_ELEMENTS: 5867 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 5868 case EXTERNAL_SHORT_ELEMENTS: 5869 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 5870 case EXTERNAL_INT_ELEMENTS: 5871 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 5872 case EXTERNAL_FLOAT_ELEMENTS: 5873 case EXTERNAL_DOUBLE_ELEMENTS: 5874 case FAST_DOUBLE_ELEMENTS: 5875 case FAST_HOLEY_DOUBLE_ELEMENTS: 5876 // No contained objects, nothing to do. 5877 break; 5878 } 5879 } 5880 5881 return copy; 5882 } 5883 5884 5885 Handle<JSObject> JSObject::DeepWalk( 5886 Handle<JSObject> object, 5887 AllocationSiteCreationContext* site_context) { 5888 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false, 5889 kNoHints); 5890 Handle<JSObject> result = v.StructureWalk(object); 5891 ASSERT(result.is_null() || result.is_identical_to(object)); 5892 return result; 5893 } 5894 5895 5896 Handle<JSObject> JSObject::DeepCopy(Handle<JSObject> object, 5897 AllocationSiteUsageContext* site_context, 5898 DeepCopyHints hints) { 5899 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints); 5900 Handle<JSObject> copy = v.StructureWalk(object); 5901 ASSERT(!copy.is_identical_to(object)); 5902 return copy; 5903 } 5904 5905 5906 // Tests for the fast common case for property enumeration: 5907 // - This object and all prototypes has an enum cache (which means that 5908 // it is no proxy, has no interceptors and needs no access checks). 5909 // - This object has no elements. 5910 // - No prototype has enumerable properties/elements. 5911 bool JSReceiver::IsSimpleEnum() { 5912 Heap* heap = GetHeap(); 5913 for (Object* o = this; 5914 o != heap->null_value(); 5915 o = JSObject::cast(o)->GetPrototype()) { 5916 if (!o->IsJSObject()) return false; 5917 JSObject* curr = JSObject::cast(o); 5918 int enum_length = curr->map()->EnumLength(); 5919 if (enum_length == kInvalidEnumCacheSentinel) return false; 5920 ASSERT(!curr->HasNamedInterceptor()); 5921 ASSERT(!curr->HasIndexedInterceptor()); 5922 ASSERT(!curr->IsAccessCheckNeeded()); 5923 if (curr->NumberOfEnumElements() > 0) return false; 5924 if (curr != this && enum_length != 0) return false; 5925 } 5926 return true; 5927 } 5928 5929 5930 int Map::NumberOfDescribedProperties(DescriptorFlag which, 5931 PropertyAttributes filter) { 5932 int result = 0; 5933 DescriptorArray* descs = instance_descriptors(); 5934 int limit = which == ALL_DESCRIPTORS 5935 ? descs->number_of_descriptors() 5936 : NumberOfOwnDescriptors(); 5937 for (int i = 0; i < limit; i++) { 5938 if ((descs->GetDetails(i).attributes() & filter) == 0 && 5939 ((filter & SYMBOLIC) == 0 || !descs->GetKey(i)->IsSymbol())) { 5940 result++; 5941 } 5942 } 5943 return result; 5944 } 5945 5946 5947 int Map::NextFreePropertyIndex() { 5948 int max_index = -1; 5949 int number_of_own_descriptors = NumberOfOwnDescriptors(); 5950 DescriptorArray* descs = instance_descriptors(); 5951 for (int i = 0; i < number_of_own_descriptors; i++) { 5952 if (descs->GetType(i) == FIELD) { 5953 int current_index = descs->GetFieldIndex(i); 5954 if (current_index > max_index) max_index = current_index; 5955 } 5956 } 5957 return max_index + 1; 5958 } 5959 5960 5961 AccessorDescriptor* Map::FindAccessor(Name* name) { 5962 DescriptorArray* descs = instance_descriptors(); 5963 int number_of_own_descriptors = NumberOfOwnDescriptors(); 5964 for (int i = 0; i < number_of_own_descriptors; i++) { 5965 if (descs->GetType(i) == CALLBACKS && name->Equals(descs->GetKey(i))) { 5966 return descs->GetCallbacks(i); 5967 } 5968 } 5969 return NULL; 5970 } 5971 5972 5973 void JSReceiver::LocalLookup( 5974 Name* name, LookupResult* result, bool search_hidden_prototypes) { 5975 ASSERT(name->IsName()); 5976 5977 Heap* heap = GetHeap(); 5978 5979 if (IsJSGlobalProxy()) { 5980 Object* proto = GetPrototype(); 5981 if (proto->IsNull()) return result->NotFound(); 5982 ASSERT(proto->IsJSGlobalObject()); 5983 return JSReceiver::cast(proto)->LocalLookup( 5984 name, result, search_hidden_prototypes); 5985 } 5986 5987 if (IsJSProxy()) { 5988 result->HandlerResult(JSProxy::cast(this)); 5989 return; 5990 } 5991 5992 // Do not use inline caching if the object is a non-global object 5993 // that requires access checks. 5994 if (IsAccessCheckNeeded()) { 5995 result->DisallowCaching(); 5996 } 5997 5998 JSObject* js_object = JSObject::cast(this); 5999 6000 // Check for lookup interceptor except when bootstrapping. 6001 if (js_object->HasNamedInterceptor() && 6002 !heap->isolate()->bootstrapper()->IsActive()) { 6003 result->InterceptorResult(js_object); 6004 return; 6005 } 6006 6007 js_object->LocalLookupRealNamedProperty(name, result); 6008 if (result->IsFound() || !search_hidden_prototypes) return; 6009 6010 Object* proto = js_object->GetPrototype(); 6011 if (!proto->IsJSReceiver()) return; 6012 JSReceiver* receiver = JSReceiver::cast(proto); 6013 if (receiver->map()->is_hidden_prototype()) { 6014 receiver->LocalLookup(name, result, search_hidden_prototypes); 6015 } 6016 } 6017 6018 6019 void JSReceiver::Lookup(Name* name, LookupResult* result) { 6020 // Ecma-262 3rd 8.6.2.4 6021 Heap* heap = GetHeap(); 6022 for (Object* current = this; 6023 current != heap->null_value(); 6024 current = JSObject::cast(current)->GetPrototype()) { 6025 JSReceiver::cast(current)->LocalLookup(name, result, false); 6026 if (result->IsFound()) return; 6027 } 6028 result->NotFound(); 6029 } 6030 6031 6032 // Search object and its prototype chain for callback properties. 6033 void JSObject::LookupCallbackProperty(Name* name, LookupResult* result) { 6034 Heap* heap = GetHeap(); 6035 for (Object* current = this; 6036 current != heap->null_value() && current->IsJSObject(); 6037 current = JSObject::cast(current)->GetPrototype()) { 6038 JSObject::cast(current)->LocalLookupRealNamedProperty(name, result); 6039 if (result->IsPropertyCallbacks()) return; 6040 } 6041 result->NotFound(); 6042 } 6043 6044 6045 // Try to update an accessor in an elements dictionary. Return true if the 6046 // update succeeded, and false otherwise. 6047 static bool UpdateGetterSetterInDictionary( 6048 SeededNumberDictionary* dictionary, 6049 uint32_t index, 6050 Object* getter, 6051 Object* setter, 6052 PropertyAttributes attributes) { 6053 int entry = dictionary->FindEntry(index); 6054 if (entry != SeededNumberDictionary::kNotFound) { 6055 Object* result = dictionary->ValueAt(entry); 6056 PropertyDetails details = dictionary->DetailsAt(entry); 6057 if (details.type() == CALLBACKS && result->IsAccessorPair()) { 6058 ASSERT(!details.IsDontDelete()); 6059 if (details.attributes() != attributes) { 6060 dictionary->DetailsAtPut( 6061 entry, 6062 PropertyDetails(attributes, CALLBACKS, index)); 6063 } 6064 AccessorPair::cast(result)->SetComponents(getter, setter); 6065 return true; 6066 } 6067 } 6068 return false; 6069 } 6070 6071 6072 void JSObject::DefineElementAccessor(Handle<JSObject> object, 6073 uint32_t index, 6074 Handle<Object> getter, 6075 Handle<Object> setter, 6076 PropertyAttributes attributes, 6077 v8::AccessControl access_control) { 6078 switch (object->GetElementsKind()) { 6079 case FAST_SMI_ELEMENTS: 6080 case FAST_ELEMENTS: 6081 case FAST_DOUBLE_ELEMENTS: 6082 case FAST_HOLEY_SMI_ELEMENTS: 6083 case FAST_HOLEY_ELEMENTS: 6084 case FAST_HOLEY_DOUBLE_ELEMENTS: 6085 break; 6086 case EXTERNAL_PIXEL_ELEMENTS: 6087 case EXTERNAL_BYTE_ELEMENTS: 6088 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 6089 case EXTERNAL_SHORT_ELEMENTS: 6090 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 6091 case EXTERNAL_INT_ELEMENTS: 6092 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 6093 case EXTERNAL_FLOAT_ELEMENTS: 6094 case EXTERNAL_DOUBLE_ELEMENTS: 6095 // Ignore getters and setters on pixel and external array elements. 6096 return; 6097 case DICTIONARY_ELEMENTS: 6098 if (UpdateGetterSetterInDictionary(object->element_dictionary(), 6099 index, 6100 *getter, 6101 *setter, 6102 attributes)) { 6103 return; 6104 } 6105 break; 6106 case NON_STRICT_ARGUMENTS_ELEMENTS: { 6107 // Ascertain whether we have read-only properties or an existing 6108 // getter/setter pair in an arguments elements dictionary backing 6109 // store. 6110 FixedArray* parameter_map = FixedArray::cast(object->elements()); 6111 uint32_t length = parameter_map->length(); 6112 Object* probe = 6113 index < (length - 2) ? parameter_map->get(index + 2) : NULL; 6114 if (probe == NULL || probe->IsTheHole()) { 6115 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 6116 if (arguments->IsDictionary()) { 6117 SeededNumberDictionary* dictionary = 6118 SeededNumberDictionary::cast(arguments); 6119 if (UpdateGetterSetterInDictionary(dictionary, 6120 index, 6121 *getter, 6122 *setter, 6123 attributes)) { 6124 return; 6125 } 6126 } 6127 } 6128 break; 6129 } 6130 } 6131 6132 Isolate* isolate = object->GetIsolate(); 6133 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair(); 6134 accessors->SetComponents(*getter, *setter); 6135 accessors->set_access_flags(access_control); 6136 6137 SetElementCallback(object, index, accessors, attributes); 6138 } 6139 6140 6141 Handle<AccessorPair> JSObject::CreateAccessorPairFor(Handle<JSObject> object, 6142 Handle<Name> name) { 6143 Isolate* isolate = object->GetIsolate(); 6144 LookupResult result(isolate); 6145 object->LocalLookupRealNamedProperty(*name, &result); 6146 if (result.IsPropertyCallbacks()) { 6147 // Note that the result can actually have IsDontDelete() == true when we 6148 // e.g. have to fall back to the slow case while adding a setter after 6149 // successfully reusing a map transition for a getter. Nevertheless, this is 6150 // OK, because the assertion only holds for the whole addition of both 6151 // accessors, not for the addition of each part. See first comment in 6152 // DefinePropertyAccessor below. 6153 Object* obj = result.GetCallbackObject(); 6154 if (obj->IsAccessorPair()) { 6155 return AccessorPair::Copy(handle(AccessorPair::cast(obj), isolate)); 6156 } 6157 } 6158 return isolate->factory()->NewAccessorPair(); 6159 } 6160 6161 6162 void JSObject::DefinePropertyAccessor(Handle<JSObject> object, 6163 Handle<Name> name, 6164 Handle<Object> getter, 6165 Handle<Object> setter, 6166 PropertyAttributes attributes, 6167 v8::AccessControl access_control) { 6168 // We could assert that the property is configurable here, but we would need 6169 // to do a lookup, which seems to be a bit of overkill. 6170 bool only_attribute_changes = getter->IsNull() && setter->IsNull(); 6171 if (object->HasFastProperties() && !only_attribute_changes && 6172 access_control == v8::DEFAULT && 6173 (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors)) { 6174 bool getterOk = getter->IsNull() || 6175 DefineFastAccessor(object, name, ACCESSOR_GETTER, getter, attributes); 6176 bool setterOk = !getterOk || setter->IsNull() || 6177 DefineFastAccessor(object, name, ACCESSOR_SETTER, setter, attributes); 6178 if (getterOk && setterOk) return; 6179 } 6180 6181 Handle<AccessorPair> accessors = CreateAccessorPairFor(object, name); 6182 accessors->SetComponents(*getter, *setter); 6183 accessors->set_access_flags(access_control); 6184 6185 SetPropertyCallback(object, name, accessors, attributes); 6186 } 6187 6188 6189 bool JSObject::CanSetCallback(Name* name) { 6190 ASSERT(!IsAccessCheckNeeded() || 6191 GetIsolate()->MayNamedAccess(this, name, v8::ACCESS_SET)); 6192 6193 // Check if there is an API defined callback object which prohibits 6194 // callback overwriting in this object or its prototype chain. 6195 // This mechanism is needed for instance in a browser setting, where 6196 // certain accessors such as window.location should not be allowed 6197 // to be overwritten because allowing overwriting could potentially 6198 // cause security problems. 6199 LookupResult callback_result(GetIsolate()); 6200 LookupCallbackProperty(name, &callback_result); 6201 if (callback_result.IsFound()) { 6202 Object* obj = callback_result.GetCallbackObject(); 6203 if (obj->IsAccessorInfo()) { 6204 return !AccessorInfo::cast(obj)->prohibits_overwriting(); 6205 } 6206 if (obj->IsAccessorPair()) { 6207 return !AccessorPair::cast(obj)->prohibits_overwriting(); 6208 } 6209 } 6210 return true; 6211 } 6212 6213 6214 bool Map::DictionaryElementsInPrototypeChainOnly() { 6215 Heap* heap = GetHeap(); 6216 6217 if (IsDictionaryElementsKind(elements_kind())) { 6218 return false; 6219 } 6220 6221 for (Object* prototype = this->prototype(); 6222 prototype != heap->null_value(); 6223 prototype = prototype->GetPrototype(GetIsolate())) { 6224 if (prototype->IsJSProxy()) { 6225 // Be conservative, don't walk into proxies. 6226 return true; 6227 } 6228 6229 if (IsDictionaryElementsKind( 6230 JSObject::cast(prototype)->map()->elements_kind())) { 6231 return true; 6232 } 6233 } 6234 6235 return false; 6236 } 6237 6238 6239 void JSObject::SetElementCallback(Handle<JSObject> object, 6240 uint32_t index, 6241 Handle<Object> structure, 6242 PropertyAttributes attributes) { 6243 Heap* heap = object->GetHeap(); 6244 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0); 6245 6246 // Normalize elements to make this operation simple. 6247 bool had_dictionary_elements = object->HasDictionaryElements(); 6248 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 6249 ASSERT(object->HasDictionaryElements() || 6250 object->HasDictionaryArgumentsElements()); 6251 // Update the dictionary with the new CALLBACKS property. 6252 dictionary = SeededNumberDictionary::Set(dictionary, index, structure, 6253 details); 6254 dictionary->set_requires_slow_elements(); 6255 6256 // Update the dictionary backing store on the object. 6257 if (object->elements()->map() == heap->non_strict_arguments_elements_map()) { 6258 // Also delete any parameter alias. 6259 // 6260 // TODO(kmillikin): when deleting the last parameter alias we could 6261 // switch to a direct backing store without the parameter map. This 6262 // would allow GC of the context. 6263 FixedArray* parameter_map = FixedArray::cast(object->elements()); 6264 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) { 6265 parameter_map->set(index + 2, heap->the_hole_value()); 6266 } 6267 parameter_map->set(1, *dictionary); 6268 } else { 6269 object->set_elements(*dictionary); 6270 6271 if (!had_dictionary_elements) { 6272 // KeyedStoreICs (at least the non-generic ones) need a reset. 6273 heap->ClearAllICsByKind(Code::KEYED_STORE_IC); 6274 } 6275 } 6276 } 6277 6278 6279 void JSObject::SetPropertyCallback(Handle<JSObject> object, 6280 Handle<Name> name, 6281 Handle<Object> structure, 6282 PropertyAttributes attributes) { 6283 // Normalize object to make this operation simple. 6284 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 6285 6286 // For the global object allocate a new map to invalidate the global inline 6287 // caches which have a global property cell reference directly in the code. 6288 if (object->IsGlobalObject()) { 6289 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); 6290 ASSERT(new_map->is_dictionary_map()); 6291 object->set_map(*new_map); 6292 6293 // When running crankshaft, changing the map is not enough. We 6294 // need to deoptimize all functions that rely on this global 6295 // object. 6296 Deoptimizer::DeoptimizeGlobalObject(*object); 6297 } 6298 6299 // Update the dictionary with the new CALLBACKS property. 6300 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0); 6301 SetNormalizedProperty(object, name, structure, details); 6302 } 6303 6304 6305 void JSObject::DefineAccessor(Handle<JSObject> object, 6306 Handle<Name> name, 6307 Handle<Object> getter, 6308 Handle<Object> setter, 6309 PropertyAttributes attributes, 6310 v8::AccessControl access_control) { 6311 Isolate* isolate = object->GetIsolate(); 6312 // Check access rights if needed. 6313 if (object->IsAccessCheckNeeded() && 6314 !isolate->MayNamedAccess(*object, *name, v8::ACCESS_SET)) { 6315 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_SET); 6316 return; 6317 } 6318 6319 if (object->IsJSGlobalProxy()) { 6320 Handle<Object> proto(object->GetPrototype(), isolate); 6321 if (proto->IsNull()) return; 6322 ASSERT(proto->IsJSGlobalObject()); 6323 DefineAccessor(Handle<JSObject>::cast(proto), 6324 name, 6325 getter, 6326 setter, 6327 attributes, 6328 access_control); 6329 return; 6330 } 6331 6332 // Make sure that the top context does not change when doing callbacks or 6333 // interceptor calls. 6334 AssertNoContextChange ncc(isolate); 6335 6336 // Try to flatten before operating on the string. 6337 if (name->IsString()) String::cast(*name)->TryFlatten(); 6338 6339 if (!object->CanSetCallback(*name)) return; 6340 6341 uint32_t index = 0; 6342 bool is_element = name->AsArrayIndex(&index); 6343 6344 Handle<Object> old_value = isolate->factory()->the_hole_value(); 6345 bool is_observed = FLAG_harmony_observation && 6346 object->map()->is_observed() && 6347 *name != isolate->heap()->hidden_string(); 6348 bool preexists = false; 6349 if (is_observed) { 6350 if (is_element) { 6351 preexists = HasLocalElement(object, index); 6352 if (preexists && object->GetLocalElementAccessorPair(index) == NULL) { 6353 old_value = Object::GetElement(isolate, object, index); 6354 } 6355 } else { 6356 LookupResult lookup(isolate); 6357 object->LocalLookup(*name, &lookup, true); 6358 preexists = lookup.IsProperty(); 6359 if (preexists && lookup.IsDataProperty()) { 6360 old_value = Object::GetProperty(object, name); 6361 } 6362 } 6363 } 6364 6365 if (is_element) { 6366 DefineElementAccessor( 6367 object, index, getter, setter, attributes, access_control); 6368 } else { 6369 DefinePropertyAccessor( 6370 object, name, getter, setter, attributes, access_control); 6371 } 6372 6373 if (is_observed) { 6374 const char* type = preexists ? "reconfigure" : "add"; 6375 EnqueueChangeRecord(object, type, name, old_value); 6376 } 6377 } 6378 6379 6380 static bool TryAccessorTransition(JSObject* self, 6381 Map* transitioned_map, 6382 int target_descriptor, 6383 AccessorComponent component, 6384 Object* accessor, 6385 PropertyAttributes attributes) { 6386 DescriptorArray* descs = transitioned_map->instance_descriptors(); 6387 PropertyDetails details = descs->GetDetails(target_descriptor); 6388 6389 // If the transition target was not callbacks, fall back to the slow case. 6390 if (details.type() != CALLBACKS) return false; 6391 Object* descriptor = descs->GetCallbacksObject(target_descriptor); 6392 if (!descriptor->IsAccessorPair()) return false; 6393 6394 Object* target_accessor = AccessorPair::cast(descriptor)->get(component); 6395 PropertyAttributes target_attributes = details.attributes(); 6396 6397 // Reuse transition if adding same accessor with same attributes. 6398 if (target_accessor == accessor && target_attributes == attributes) { 6399 self->set_map(transitioned_map); 6400 return true; 6401 } 6402 6403 // If either not the same accessor, or not the same attributes, fall back to 6404 // the slow case. 6405 return false; 6406 } 6407 6408 6409 static MaybeObject* CopyInsertDescriptor(Map* map, 6410 Name* name, 6411 AccessorPair* accessors, 6412 PropertyAttributes attributes) { 6413 CallbacksDescriptor new_accessors_desc(name, accessors, attributes); 6414 return map->CopyInsertDescriptor(&new_accessors_desc, INSERT_TRANSITION); 6415 } 6416 6417 6418 static Handle<Map> CopyInsertDescriptor(Handle<Map> map, 6419 Handle<Name> name, 6420 Handle<AccessorPair> accessors, 6421 PropertyAttributes attributes) { 6422 CALL_HEAP_FUNCTION(map->GetIsolate(), 6423 CopyInsertDescriptor(*map, *name, *accessors, attributes), 6424 Map); 6425 } 6426 6427 6428 bool JSObject::DefineFastAccessor(Handle<JSObject> object, 6429 Handle<Name> name, 6430 AccessorComponent component, 6431 Handle<Object> accessor, 6432 PropertyAttributes attributes) { 6433 ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined()); 6434 Isolate* isolate = object->GetIsolate(); 6435 LookupResult result(isolate); 6436 object->LocalLookup(*name, &result); 6437 6438 if (result.IsFound() && !result.IsPropertyCallbacks()) { 6439 return false; 6440 } 6441 6442 // Return success if the same accessor with the same attributes already exist. 6443 AccessorPair* source_accessors = NULL; 6444 if (result.IsPropertyCallbacks()) { 6445 Object* callback_value = result.GetCallbackObject(); 6446 if (callback_value->IsAccessorPair()) { 6447 source_accessors = AccessorPair::cast(callback_value); 6448 Object* entry = source_accessors->get(component); 6449 if (entry == *accessor && result.GetAttributes() == attributes) { 6450 return true; 6451 } 6452 } else { 6453 return false; 6454 } 6455 6456 int descriptor_number = result.GetDescriptorIndex(); 6457 6458 object->map()->LookupTransition(*object, *name, &result); 6459 6460 if (result.IsFound()) { 6461 Map* target = result.GetTransitionTarget(); 6462 ASSERT(target->NumberOfOwnDescriptors() == 6463 object->map()->NumberOfOwnDescriptors()); 6464 // This works since descriptors are sorted in order of addition. 6465 ASSERT(object->map()->instance_descriptors()-> 6466 GetKey(descriptor_number) == *name); 6467 return TryAccessorTransition(*object, target, descriptor_number, 6468 component, *accessor, attributes); 6469 } 6470 } else { 6471 // If not, lookup a transition. 6472 object->map()->LookupTransition(*object, *name, &result); 6473 6474 // If there is a transition, try to follow it. 6475 if (result.IsFound()) { 6476 Map* target = result.GetTransitionTarget(); 6477 int descriptor_number = target->LastAdded(); 6478 ASSERT(target->instance_descriptors()->GetKey(descriptor_number) 6479 ->Equals(*name)); 6480 return TryAccessorTransition(*object, target, descriptor_number, 6481 component, *accessor, attributes); 6482 } 6483 } 6484 6485 // If there is no transition yet, add a transition to the a new accessor pair 6486 // containing the accessor. Allocate a new pair if there were no source 6487 // accessors. Otherwise, copy the pair and modify the accessor. 6488 Handle<AccessorPair> accessors = source_accessors != NULL 6489 ? AccessorPair::Copy(Handle<AccessorPair>(source_accessors)) 6490 : isolate->factory()->NewAccessorPair(); 6491 accessors->set(component, *accessor); 6492 Handle<Map> new_map = CopyInsertDescriptor(Handle<Map>(object->map()), 6493 name, accessors, attributes); 6494 object->set_map(*new_map); 6495 return true; 6496 } 6497 6498 6499 Handle<Object> JSObject::SetAccessor(Handle<JSObject> object, 6500 Handle<AccessorInfo> info) { 6501 Isolate* isolate = object->GetIsolate(); 6502 Factory* factory = isolate->factory(); 6503 Handle<Name> name(Name::cast(info->name())); 6504 6505 // Check access rights if needed. 6506 if (object->IsAccessCheckNeeded() && 6507 !isolate->MayNamedAccess(*object, *name, v8::ACCESS_SET)) { 6508 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_SET); 6509 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 6510 return factory->undefined_value(); 6511 } 6512 6513 if (object->IsJSGlobalProxy()) { 6514 Handle<Object> proto(object->GetPrototype(), isolate); 6515 if (proto->IsNull()) return object; 6516 ASSERT(proto->IsJSGlobalObject()); 6517 return SetAccessor(Handle<JSObject>::cast(proto), info); 6518 } 6519 6520 // Make sure that the top context does not change when doing callbacks or 6521 // interceptor calls. 6522 AssertNoContextChange ncc(isolate); 6523 6524 // Try to flatten before operating on the string. 6525 if (name->IsString()) FlattenString(Handle<String>::cast(name)); 6526 6527 if (!object->CanSetCallback(*name)) return factory->undefined_value(); 6528 6529 uint32_t index = 0; 6530 bool is_element = name->AsArrayIndex(&index); 6531 6532 if (is_element) { 6533 if (object->IsJSArray()) return factory->undefined_value(); 6534 6535 // Accessors overwrite previous callbacks (cf. with getters/setters). 6536 switch (object->GetElementsKind()) { 6537 case FAST_SMI_ELEMENTS: 6538 case FAST_ELEMENTS: 6539 case FAST_DOUBLE_ELEMENTS: 6540 case FAST_HOLEY_SMI_ELEMENTS: 6541 case FAST_HOLEY_ELEMENTS: 6542 case FAST_HOLEY_DOUBLE_ELEMENTS: 6543 break; 6544 case EXTERNAL_PIXEL_ELEMENTS: 6545 case EXTERNAL_BYTE_ELEMENTS: 6546 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 6547 case EXTERNAL_SHORT_ELEMENTS: 6548 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 6549 case EXTERNAL_INT_ELEMENTS: 6550 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 6551 case EXTERNAL_FLOAT_ELEMENTS: 6552 case EXTERNAL_DOUBLE_ELEMENTS: 6553 // Ignore getters and setters on pixel and external array 6554 // elements. 6555 return factory->undefined_value(); 6556 case DICTIONARY_ELEMENTS: 6557 break; 6558 case NON_STRICT_ARGUMENTS_ELEMENTS: 6559 UNIMPLEMENTED(); 6560 break; 6561 } 6562 6563 SetElementCallback(object, index, info, info->property_attributes()); 6564 } else { 6565 // Lookup the name. 6566 LookupResult result(isolate); 6567 object->LocalLookup(*name, &result, true); 6568 // ES5 forbids turning a property into an accessor if it's not 6569 // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5). 6570 if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) { 6571 return factory->undefined_value(); 6572 } 6573 6574 SetPropertyCallback(object, name, info, info->property_attributes()); 6575 } 6576 6577 return object; 6578 } 6579 6580 6581 Handle<Object> JSObject::GetAccessor(Handle<JSObject> object, 6582 Handle<Name> name, 6583 AccessorComponent component) { 6584 Isolate* isolate = object->GetIsolate(); 6585 6586 // Make sure that the top context does not change when doing callbacks or 6587 // interceptor calls. 6588 AssertNoContextChange ncc(isolate); 6589 6590 // Check access rights if needed. 6591 if (object->IsAccessCheckNeeded() && 6592 !isolate->MayNamedAccess(*object, *name, v8::ACCESS_HAS)) { 6593 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_HAS); 6594 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 6595 return isolate->factory()->undefined_value(); 6596 } 6597 6598 // Make the lookup and include prototypes. 6599 uint32_t index = 0; 6600 if (name->AsArrayIndex(&index)) { 6601 for (Handle<Object> obj = object; 6602 !obj->IsNull(); 6603 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) { 6604 if (obj->IsJSObject() && JSObject::cast(*obj)->HasDictionaryElements()) { 6605 JSObject* js_object = JSObject::cast(*obj); 6606 SeededNumberDictionary* dictionary = js_object->element_dictionary(); 6607 int entry = dictionary->FindEntry(index); 6608 if (entry != SeededNumberDictionary::kNotFound) { 6609 Object* element = dictionary->ValueAt(entry); 6610 if (dictionary->DetailsAt(entry).type() == CALLBACKS && 6611 element->IsAccessorPair()) { 6612 return handle(AccessorPair::cast(element)->GetComponent(component), 6613 isolate); 6614 } 6615 } 6616 } 6617 } 6618 } else { 6619 for (Handle<Object> obj = object; 6620 !obj->IsNull(); 6621 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) { 6622 LookupResult result(isolate); 6623 JSReceiver::cast(*obj)->LocalLookup(*name, &result); 6624 if (result.IsFound()) { 6625 if (result.IsReadOnly()) return isolate->factory()->undefined_value(); 6626 if (result.IsPropertyCallbacks()) { 6627 Object* obj = result.GetCallbackObject(); 6628 if (obj->IsAccessorPair()) { 6629 return handle(AccessorPair::cast(obj)->GetComponent(component), 6630 isolate); 6631 } 6632 } 6633 } 6634 } 6635 } 6636 return isolate->factory()->undefined_value(); 6637 } 6638 6639 6640 Object* JSObject::SlowReverseLookup(Object* value) { 6641 if (HasFastProperties()) { 6642 int number_of_own_descriptors = map()->NumberOfOwnDescriptors(); 6643 DescriptorArray* descs = map()->instance_descriptors(); 6644 for (int i = 0; i < number_of_own_descriptors; i++) { 6645 if (descs->GetType(i) == FIELD) { 6646 Object* property = RawFastPropertyAt(descs->GetFieldIndex(i)); 6647 if (FLAG_track_double_fields && 6648 descs->GetDetails(i).representation().IsDouble()) { 6649 ASSERT(property->IsHeapNumber()); 6650 if (value->IsNumber() && property->Number() == value->Number()) { 6651 return descs->GetKey(i); 6652 } 6653 } else if (property == value) { 6654 return descs->GetKey(i); 6655 } 6656 } else if (descs->GetType(i) == CONSTANT) { 6657 if (descs->GetConstant(i) == value) { 6658 return descs->GetKey(i); 6659 } 6660 } 6661 } 6662 return GetHeap()->undefined_value(); 6663 } else { 6664 return property_dictionary()->SlowReverseLookup(value); 6665 } 6666 } 6667 6668 6669 Handle<Map> Map::RawCopy(Handle<Map> map, 6670 int instance_size) { 6671 CALL_HEAP_FUNCTION(map->GetIsolate(), 6672 map->RawCopy(instance_size), 6673 Map); 6674 } 6675 6676 6677 MaybeObject* Map::RawCopy(int instance_size) { 6678 Map* result; 6679 MaybeObject* maybe_result = 6680 GetHeap()->AllocateMap(instance_type(), instance_size); 6681 if (!maybe_result->To(&result)) return maybe_result; 6682 6683 result->set_prototype(prototype()); 6684 result->set_constructor(constructor()); 6685 result->set_bit_field(bit_field()); 6686 result->set_bit_field2(bit_field2()); 6687 int new_bit_field3 = bit_field3(); 6688 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true); 6689 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0); 6690 new_bit_field3 = EnumLengthBits::update(new_bit_field3, 6691 kInvalidEnumCacheSentinel); 6692 new_bit_field3 = Deprecated::update(new_bit_field3, false); 6693 new_bit_field3 = IsUnstable::update(new_bit_field3, false); 6694 result->set_bit_field3(new_bit_field3); 6695 return result; 6696 } 6697 6698 6699 Handle<Map> Map::CopyNormalized(Handle<Map> map, 6700 PropertyNormalizationMode mode, 6701 NormalizedMapSharingMode sharing) { 6702 int new_instance_size = map->instance_size(); 6703 if (mode == CLEAR_INOBJECT_PROPERTIES) { 6704 new_instance_size -= map->inobject_properties() * kPointerSize; 6705 } 6706 6707 Handle<Map> result = Map::RawCopy(map, new_instance_size); 6708 6709 if (mode != CLEAR_INOBJECT_PROPERTIES) { 6710 result->set_inobject_properties(map->inobject_properties()); 6711 } 6712 6713 result->set_is_shared(sharing == SHARED_NORMALIZED_MAP); 6714 result->set_dictionary_map(true); 6715 result->set_migration_target(false); 6716 6717 #ifdef VERIFY_HEAP 6718 if (FLAG_verify_heap && result->is_shared()) { 6719 result->SharedMapVerify(); 6720 } 6721 #endif 6722 6723 return result; 6724 } 6725 6726 6727 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) { 6728 CALL_HEAP_FUNCTION(map->GetIsolate(), map->CopyDropDescriptors(), Map); 6729 } 6730 6731 6732 MaybeObject* Map::CopyDropDescriptors() { 6733 Map* result; 6734 MaybeObject* maybe_result = RawCopy(instance_size()); 6735 if (!maybe_result->To(&result)) return maybe_result; 6736 6737 // Please note instance_type and instance_size are set when allocated. 6738 result->set_inobject_properties(inobject_properties()); 6739 result->set_unused_property_fields(unused_property_fields()); 6740 6741 result->set_pre_allocated_property_fields(pre_allocated_property_fields()); 6742 result->set_is_shared(false); 6743 result->ClearCodeCache(GetHeap()); 6744 NotifyLeafMapLayoutChange(); 6745 return result; 6746 } 6747 6748 6749 MaybeObject* Map::ShareDescriptor(DescriptorArray* descriptors, 6750 Descriptor* descriptor) { 6751 // Sanity check. This path is only to be taken if the map owns its descriptor 6752 // array, implying that its NumberOfOwnDescriptors equals the number of 6753 // descriptors in the descriptor array. 6754 ASSERT(NumberOfOwnDescriptors() == 6755 instance_descriptors()->number_of_descriptors()); 6756 Map* result; 6757 MaybeObject* maybe_result = CopyDropDescriptors(); 6758 if (!maybe_result->To(&result)) return maybe_result; 6759 6760 Name* name = descriptor->GetKey(); 6761 6762 TransitionArray* transitions; 6763 MaybeObject* maybe_transitions = 6764 AddTransition(name, result, SIMPLE_TRANSITION); 6765 if (!maybe_transitions->To(&transitions)) return maybe_transitions; 6766 6767 int old_size = descriptors->number_of_descriptors(); 6768 6769 DescriptorArray* new_descriptors; 6770 6771 if (descriptors->NumberOfSlackDescriptors() > 0) { 6772 new_descriptors = descriptors; 6773 new_descriptors->Append(descriptor); 6774 } else { 6775 // Descriptor arrays grow by 50%. 6776 MaybeObject* maybe_descriptors = DescriptorArray::Allocate( 6777 GetIsolate(), old_size, old_size < 4 ? 1 : old_size / 2); 6778 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; 6779 6780 DescriptorArray::WhitenessWitness witness(new_descriptors); 6781 6782 // Copy the descriptors, inserting a descriptor. 6783 for (int i = 0; i < old_size; ++i) { 6784 new_descriptors->CopyFrom(i, descriptors, i, witness); 6785 } 6786 6787 new_descriptors->Append(descriptor, witness); 6788 6789 if (old_size > 0) { 6790 // If the source descriptors had an enum cache we copy it. This ensures 6791 // that the maps to which we push the new descriptor array back can rely 6792 // on a cache always being available once it is set. If the map has more 6793 // enumerated descriptors than available in the original cache, the cache 6794 // will be lazily replaced by the extended cache when needed. 6795 if (descriptors->HasEnumCache()) { 6796 new_descriptors->CopyEnumCacheFrom(descriptors); 6797 } 6798 6799 Map* map; 6800 // Replace descriptors by new_descriptors in all maps that share it. 6801 for (Object* current = GetBackPointer(); 6802 !current->IsUndefined(); 6803 current = map->GetBackPointer()) { 6804 map = Map::cast(current); 6805 if (map->instance_descriptors() != descriptors) break; 6806 map->set_instance_descriptors(new_descriptors); 6807 } 6808 6809 set_instance_descriptors(new_descriptors); 6810 } 6811 } 6812 6813 result->SetBackPointer(this); 6814 result->InitializeDescriptors(new_descriptors); 6815 ASSERT(result->NumberOfOwnDescriptors() == NumberOfOwnDescriptors() + 1); 6816 6817 set_transitions(transitions); 6818 set_owns_descriptors(false); 6819 6820 return result; 6821 } 6822 6823 6824 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map, 6825 Handle<DescriptorArray> descriptors, 6826 TransitionFlag flag, 6827 Handle<Name> name) { 6828 CALL_HEAP_FUNCTION(map->GetIsolate(), 6829 map->CopyReplaceDescriptors(*descriptors, flag, *name), 6830 Map); 6831 } 6832 6833 6834 MaybeObject* Map::CopyReplaceDescriptors(DescriptorArray* descriptors, 6835 TransitionFlag flag, 6836 Name* name, 6837 SimpleTransitionFlag simple_flag) { 6838 ASSERT(descriptors->IsSortedNoDuplicates()); 6839 6840 Map* result; 6841 MaybeObject* maybe_result = CopyDropDescriptors(); 6842 if (!maybe_result->To(&result)) return maybe_result; 6843 6844 result->InitializeDescriptors(descriptors); 6845 6846 if (flag == INSERT_TRANSITION && CanHaveMoreTransitions()) { 6847 TransitionArray* transitions; 6848 MaybeObject* maybe_transitions = AddTransition(name, result, simple_flag); 6849 if (!maybe_transitions->To(&transitions)) return maybe_transitions; 6850 set_transitions(transitions); 6851 result->SetBackPointer(this); 6852 } else { 6853 descriptors->InitializeRepresentations(Representation::Tagged()); 6854 } 6855 6856 return result; 6857 } 6858 6859 6860 // Since this method is used to rewrite an existing transition tree, it can 6861 // always insert transitions without checking. 6862 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map, 6863 int new_descriptor, 6864 Handle<DescriptorArray> descriptors) { 6865 ASSERT(descriptors->IsSortedNoDuplicates()); 6866 6867 Handle<Map> result = Map::CopyDropDescriptors(map); 6868 6869 result->InitializeDescriptors(*descriptors); 6870 result->SetNumberOfOwnDescriptors(new_descriptor + 1); 6871 6872 int unused_property_fields = map->unused_property_fields(); 6873 if (descriptors->GetDetails(new_descriptor).type() == FIELD) { 6874 unused_property_fields = map->unused_property_fields() - 1; 6875 if (unused_property_fields < 0) { 6876 unused_property_fields += JSObject::kFieldsAdded; 6877 } 6878 } 6879 6880 result->set_unused_property_fields(unused_property_fields); 6881 result->set_owns_descriptors(false); 6882 6883 Handle<Name> name = handle(descriptors->GetKey(new_descriptor)); 6884 Handle<TransitionArray> transitions = Map::AddTransition(map, name, result, 6885 SIMPLE_TRANSITION); 6886 6887 map->set_transitions(*transitions); 6888 result->SetBackPointer(*map); 6889 6890 return result; 6891 } 6892 6893 6894 MaybeObject* Map::CopyAsElementsKind(ElementsKind kind, TransitionFlag flag) { 6895 if (flag == INSERT_TRANSITION) { 6896 ASSERT(!HasElementsTransition() || 6897 ((elements_transition_map()->elements_kind() == DICTIONARY_ELEMENTS || 6898 IsExternalArrayElementsKind( 6899 elements_transition_map()->elements_kind())) && 6900 (kind == DICTIONARY_ELEMENTS || 6901 IsExternalArrayElementsKind(kind)))); 6902 ASSERT(!IsFastElementsKind(kind) || 6903 IsMoreGeneralElementsKindTransition(elements_kind(), kind)); 6904 ASSERT(kind != elements_kind()); 6905 } 6906 6907 bool insert_transition = 6908 flag == INSERT_TRANSITION && !HasElementsTransition(); 6909 6910 if (insert_transition && owns_descriptors()) { 6911 // In case the map owned its own descriptors, share the descriptors and 6912 // transfer ownership to the new map. 6913 Map* new_map; 6914 MaybeObject* maybe_new_map = CopyDropDescriptors(); 6915 if (!maybe_new_map->To(&new_map)) return maybe_new_map; 6916 6917 MaybeObject* added_elements = set_elements_transition_map(new_map); 6918 if (added_elements->IsFailure()) return added_elements; 6919 6920 new_map->set_elements_kind(kind); 6921 new_map->InitializeDescriptors(instance_descriptors()); 6922 new_map->SetBackPointer(this); 6923 set_owns_descriptors(false); 6924 return new_map; 6925 } 6926 6927 // In case the map did not own its own descriptors, a split is forced by 6928 // copying the map; creating a new descriptor array cell. 6929 // Create a new free-floating map only if we are not allowed to store it. 6930 Map* new_map; 6931 MaybeObject* maybe_new_map = Copy(); 6932 if (!maybe_new_map->To(&new_map)) return maybe_new_map; 6933 6934 new_map->set_elements_kind(kind); 6935 6936 if (insert_transition) { 6937 MaybeObject* added_elements = set_elements_transition_map(new_map); 6938 if (added_elements->IsFailure()) return added_elements; 6939 new_map->SetBackPointer(this); 6940 } 6941 6942 return new_map; 6943 } 6944 6945 6946 Handle<Map> Map::CopyForObserved(Handle<Map> map) { 6947 ASSERT(!map->is_observed()); 6948 6949 Isolate* isolate = map->GetIsolate(); 6950 6951 // In case the map owned its own descriptors, share the descriptors and 6952 // transfer ownership to the new map. 6953 Handle<Map> new_map; 6954 if (map->owns_descriptors()) { 6955 new_map = Map::CopyDropDescriptors(map); 6956 } else { 6957 new_map = Map::Copy(map); 6958 } 6959 6960 Handle<TransitionArray> transitions = 6961 Map::AddTransition(map, isolate->factory()->observed_symbol(), new_map, 6962 FULL_TRANSITION); 6963 6964 map->set_transitions(*transitions); 6965 6966 new_map->set_is_observed(); 6967 6968 if (map->owns_descriptors()) { 6969 new_map->InitializeDescriptors(map->instance_descriptors()); 6970 map->set_owns_descriptors(false); 6971 } 6972 6973 new_map->SetBackPointer(*map); 6974 return new_map; 6975 } 6976 6977 6978 MaybeObject* Map::CopyWithPreallocatedFieldDescriptors() { 6979 if (pre_allocated_property_fields() == 0) return CopyDropDescriptors(); 6980 6981 // If the map has pre-allocated properties always start out with a descriptor 6982 // array describing these properties. 6983 ASSERT(constructor()->IsJSFunction()); 6984 JSFunction* ctor = JSFunction::cast(constructor()); 6985 Map* map = ctor->initial_map(); 6986 DescriptorArray* descriptors = map->instance_descriptors(); 6987 6988 int number_of_own_descriptors = map->NumberOfOwnDescriptors(); 6989 DescriptorArray* new_descriptors; 6990 MaybeObject* maybe_descriptors = 6991 descriptors->CopyUpTo(number_of_own_descriptors); 6992 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; 6993 6994 return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION); 6995 } 6996 6997 6998 Handle<Map> Map::Copy(Handle<Map> map) { 6999 CALL_HEAP_FUNCTION(map->GetIsolate(), map->Copy(), Map); 7000 } 7001 7002 7003 MaybeObject* Map::Copy() { 7004 DescriptorArray* descriptors = instance_descriptors(); 7005 DescriptorArray* new_descriptors; 7006 int number_of_own_descriptors = NumberOfOwnDescriptors(); 7007 MaybeObject* maybe_descriptors = 7008 descriptors->CopyUpTo(number_of_own_descriptors); 7009 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; 7010 7011 return CopyReplaceDescriptors(new_descriptors, OMIT_TRANSITION); 7012 } 7013 7014 7015 MaybeObject* Map::CopyAddDescriptor(Descriptor* descriptor, 7016 TransitionFlag flag) { 7017 DescriptorArray* descriptors = instance_descriptors(); 7018 7019 // Ensure the key is unique. 7020 MaybeObject* maybe_failure = descriptor->KeyToUniqueName(); 7021 if (maybe_failure->IsFailure()) return maybe_failure; 7022 7023 int old_size = NumberOfOwnDescriptors(); 7024 int new_size = old_size + 1; 7025 7026 if (flag == INSERT_TRANSITION && 7027 owns_descriptors() && 7028 CanHaveMoreTransitions()) { 7029 return ShareDescriptor(descriptors, descriptor); 7030 } 7031 7032 DescriptorArray* new_descriptors; 7033 MaybeObject* maybe_descriptors = 7034 DescriptorArray::Allocate(GetIsolate(), old_size, 1); 7035 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; 7036 7037 DescriptorArray::WhitenessWitness witness(new_descriptors); 7038 7039 // Copy the descriptors, inserting a descriptor. 7040 for (int i = 0; i < old_size; ++i) { 7041 new_descriptors->CopyFrom(i, descriptors, i, witness); 7042 } 7043 7044 if (old_size != descriptors->number_of_descriptors()) { 7045 new_descriptors->SetNumberOfDescriptors(new_size); 7046 new_descriptors->Set(old_size, descriptor, witness); 7047 new_descriptors->Sort(); 7048 } else { 7049 new_descriptors->Append(descriptor, witness); 7050 } 7051 7052 Name* key = descriptor->GetKey(); 7053 return CopyReplaceDescriptors(new_descriptors, flag, key, SIMPLE_TRANSITION); 7054 } 7055 7056 7057 MaybeObject* Map::CopyInsertDescriptor(Descriptor* descriptor, 7058 TransitionFlag flag) { 7059 DescriptorArray* old_descriptors = instance_descriptors(); 7060 7061 // Ensure the key is unique. 7062 MaybeObject* maybe_result = descriptor->KeyToUniqueName(); 7063 if (maybe_result->IsFailure()) return maybe_result; 7064 7065 // We replace the key if it is already present. 7066 int index = old_descriptors->SearchWithCache(descriptor->GetKey(), this); 7067 if (index != DescriptorArray::kNotFound) { 7068 return CopyReplaceDescriptor(old_descriptors, descriptor, index, flag); 7069 } 7070 return CopyAddDescriptor(descriptor, flag); 7071 } 7072 7073 7074 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes( 7075 Handle<DescriptorArray> desc, 7076 int enumeration_index, 7077 PropertyAttributes attributes) { 7078 CALL_HEAP_FUNCTION(desc->GetIsolate(), 7079 desc->CopyUpToAddAttributes(enumeration_index, attributes), 7080 DescriptorArray); 7081 } 7082 7083 7084 MaybeObject* DescriptorArray::CopyUpToAddAttributes( 7085 int enumeration_index, PropertyAttributes attributes) { 7086 if (enumeration_index == 0) return GetHeap()->empty_descriptor_array(); 7087 7088 int size = enumeration_index; 7089 7090 DescriptorArray* descriptors; 7091 MaybeObject* maybe_descriptors = Allocate(GetIsolate(), size); 7092 if (!maybe_descriptors->To(&descriptors)) return maybe_descriptors; 7093 DescriptorArray::WhitenessWitness witness(descriptors); 7094 7095 if (attributes != NONE) { 7096 for (int i = 0; i < size; ++i) { 7097 Object* value = GetValue(i); 7098 PropertyDetails details = GetDetails(i); 7099 int mask = DONT_DELETE | DONT_ENUM; 7100 // READ_ONLY is an invalid attribute for JS setters/getters. 7101 if (details.type() != CALLBACKS || !value->IsAccessorPair()) { 7102 mask |= READ_ONLY; 7103 } 7104 details = details.CopyAddAttributes( 7105 static_cast<PropertyAttributes>(attributes & mask)); 7106 Descriptor desc(GetKey(i), value, details); 7107 descriptors->Set(i, &desc, witness); 7108 } 7109 } else { 7110 for (int i = 0; i < size; ++i) { 7111 descriptors->CopyFrom(i, this, i, witness); 7112 } 7113 } 7114 7115 if (number_of_descriptors() != enumeration_index) descriptors->Sort(); 7116 7117 return descriptors; 7118 } 7119 7120 7121 MaybeObject* Map::CopyReplaceDescriptor(DescriptorArray* descriptors, 7122 Descriptor* descriptor, 7123 int insertion_index, 7124 TransitionFlag flag) { 7125 // Ensure the key is unique. 7126 MaybeObject* maybe_failure = descriptor->KeyToUniqueName(); 7127 if (maybe_failure->IsFailure()) return maybe_failure; 7128 7129 Name* key = descriptor->GetKey(); 7130 ASSERT(key == descriptors->GetKey(insertion_index)); 7131 7132 int new_size = NumberOfOwnDescriptors(); 7133 ASSERT(0 <= insertion_index && insertion_index < new_size); 7134 7135 ASSERT_LT(insertion_index, new_size); 7136 7137 DescriptorArray* new_descriptors; 7138 MaybeObject* maybe_descriptors = 7139 DescriptorArray::Allocate(GetIsolate(), new_size); 7140 if (!maybe_descriptors->To(&new_descriptors)) return maybe_descriptors; 7141 DescriptorArray::WhitenessWitness witness(new_descriptors); 7142 7143 for (int i = 0; i < new_size; ++i) { 7144 if (i == insertion_index) { 7145 new_descriptors->Set(i, descriptor, witness); 7146 } else { 7147 new_descriptors->CopyFrom(i, descriptors, i, witness); 7148 } 7149 } 7150 7151 // Re-sort if descriptors were removed. 7152 if (new_size != descriptors->length()) new_descriptors->Sort(); 7153 7154 SimpleTransitionFlag simple_flag = 7155 (insertion_index == descriptors->number_of_descriptors() - 1) 7156 ? SIMPLE_TRANSITION 7157 : FULL_TRANSITION; 7158 return CopyReplaceDescriptors(new_descriptors, flag, key, simple_flag); 7159 } 7160 7161 7162 void Map::UpdateCodeCache(Handle<Map> map, 7163 Handle<Name> name, 7164 Handle<Code> code) { 7165 Isolate* isolate = map->GetIsolate(); 7166 CALL_HEAP_FUNCTION_VOID(isolate, 7167 map->UpdateCodeCache(*name, *code)); 7168 } 7169 7170 7171 MaybeObject* Map::UpdateCodeCache(Name* name, Code* code) { 7172 // Allocate the code cache if not present. 7173 if (code_cache()->IsFixedArray()) { 7174 Object* result; 7175 { MaybeObject* maybe_result = GetHeap()->AllocateCodeCache(); 7176 if (!maybe_result->ToObject(&result)) return maybe_result; 7177 } 7178 set_code_cache(result); 7179 } 7180 7181 // Update the code cache. 7182 return CodeCache::cast(code_cache())->Update(name, code); 7183 } 7184 7185 7186 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) { 7187 // Do a lookup if a code cache exists. 7188 if (!code_cache()->IsFixedArray()) { 7189 return CodeCache::cast(code_cache())->Lookup(name, flags); 7190 } else { 7191 return GetHeap()->undefined_value(); 7192 } 7193 } 7194 7195 7196 int Map::IndexInCodeCache(Object* name, Code* code) { 7197 // Get the internal index if a code cache exists. 7198 if (!code_cache()->IsFixedArray()) { 7199 return CodeCache::cast(code_cache())->GetIndex(name, code); 7200 } 7201 return -1; 7202 } 7203 7204 7205 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) { 7206 // No GC is supposed to happen between a call to IndexInCodeCache and 7207 // RemoveFromCodeCache so the code cache must be there. 7208 ASSERT(!code_cache()->IsFixedArray()); 7209 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index); 7210 } 7211 7212 7213 // An iterator over all map transitions in an descriptor array, reusing the map 7214 // field of the contens array while it is running. 7215 class IntrusiveMapTransitionIterator { 7216 public: 7217 explicit IntrusiveMapTransitionIterator(TransitionArray* transition_array) 7218 : transition_array_(transition_array) { } 7219 7220 void Start() { 7221 ASSERT(!IsIterating()); 7222 *TransitionArrayHeader() = Smi::FromInt(0); 7223 } 7224 7225 bool IsIterating() { 7226 return (*TransitionArrayHeader())->IsSmi(); 7227 } 7228 7229 Map* Next() { 7230 ASSERT(IsIterating()); 7231 int index = Smi::cast(*TransitionArrayHeader())->value(); 7232 int number_of_transitions = transition_array_->number_of_transitions(); 7233 while (index < number_of_transitions) { 7234 *TransitionArrayHeader() = Smi::FromInt(index + 1); 7235 return transition_array_->GetTarget(index); 7236 } 7237 7238 *TransitionArrayHeader() = transition_array_->GetHeap()->fixed_array_map(); 7239 return NULL; 7240 } 7241 7242 private: 7243 Object** TransitionArrayHeader() { 7244 return HeapObject::RawField(transition_array_, TransitionArray::kMapOffset); 7245 } 7246 7247 TransitionArray* transition_array_; 7248 }; 7249 7250 7251 // An iterator over all prototype transitions, reusing the map field of the 7252 // underlying array while it is running. 7253 class IntrusivePrototypeTransitionIterator { 7254 public: 7255 explicit IntrusivePrototypeTransitionIterator(HeapObject* proto_trans) 7256 : proto_trans_(proto_trans) { } 7257 7258 void Start() { 7259 ASSERT(!IsIterating()); 7260 *Header() = Smi::FromInt(0); 7261 } 7262 7263 bool IsIterating() { 7264 return (*Header())->IsSmi(); 7265 } 7266 7267 Map* Next() { 7268 ASSERT(IsIterating()); 7269 int transitionNumber = Smi::cast(*Header())->value(); 7270 if (transitionNumber < NumberOfTransitions()) { 7271 *Header() = Smi::FromInt(transitionNumber + 1); 7272 return GetTransition(transitionNumber); 7273 } 7274 *Header() = proto_trans_->GetHeap()->fixed_array_map(); 7275 return NULL; 7276 } 7277 7278 private: 7279 Object** Header() { 7280 return HeapObject::RawField(proto_trans_, FixedArray::kMapOffset); 7281 } 7282 7283 int NumberOfTransitions() { 7284 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_); 7285 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset); 7286 return Smi::cast(num)->value(); 7287 } 7288 7289 Map* GetTransition(int transitionNumber) { 7290 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_); 7291 return Map::cast(proto_trans->get(IndexFor(transitionNumber))); 7292 } 7293 7294 int IndexFor(int transitionNumber) { 7295 return Map::kProtoTransitionHeaderSize + 7296 Map::kProtoTransitionMapOffset + 7297 transitionNumber * Map::kProtoTransitionElementsPerEntry; 7298 } 7299 7300 HeapObject* proto_trans_; 7301 }; 7302 7303 7304 // To traverse the transition tree iteratively, we have to store two kinds of 7305 // information in a map: The parent map in the traversal and which children of a 7306 // node have already been visited. To do this without additional memory, we 7307 // temporarily reuse two maps with known values: 7308 // 7309 // (1) The map of the map temporarily holds the parent, and is restored to the 7310 // meta map afterwards. 7311 // 7312 // (2) The info which children have already been visited depends on which part 7313 // of the map we currently iterate: 7314 // 7315 // (a) If we currently follow normal map transitions, we temporarily store 7316 // the current index in the map of the FixedArray of the desciptor 7317 // array's contents, and restore it to the fixed array map afterwards. 7318 // Note that a single descriptor can have 0, 1, or 2 transitions. 7319 // 7320 // (b) If we currently follow prototype transitions, we temporarily store 7321 // the current index in the map of the FixedArray holding the prototype 7322 // transitions, and restore it to the fixed array map afterwards. 7323 // 7324 // Note that the child iterator is just a concatenation of two iterators: One 7325 // iterating over map transitions and one iterating over prototype transisitons. 7326 class TraversableMap : public Map { 7327 public: 7328 // Record the parent in the traversal within this map. Note that this destroys 7329 // this map's map! 7330 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); } 7331 7332 // Reset the current map's map, returning the parent previously stored in it. 7333 TraversableMap* GetAndResetParent() { 7334 TraversableMap* old_parent = static_cast<TraversableMap*>(map()); 7335 set_map_no_write_barrier(GetHeap()->meta_map()); 7336 return old_parent; 7337 } 7338 7339 // Start iterating over this map's children, possibly destroying a FixedArray 7340 // map (see explanation above). 7341 void ChildIteratorStart() { 7342 if (HasTransitionArray()) { 7343 if (HasPrototypeTransitions()) { 7344 IntrusivePrototypeTransitionIterator(GetPrototypeTransitions()).Start(); 7345 } 7346 7347 IntrusiveMapTransitionIterator(transitions()).Start(); 7348 } 7349 } 7350 7351 // If we have an unvisited child map, return that one and advance. If we have 7352 // none, return NULL and reset any destroyed FixedArray maps. 7353 TraversableMap* ChildIteratorNext() { 7354 TransitionArray* transition_array = unchecked_transition_array(); 7355 if (!transition_array->map()->IsSmi() && 7356 !transition_array->IsTransitionArray()) { 7357 return NULL; 7358 } 7359 7360 if (transition_array->HasPrototypeTransitions()) { 7361 HeapObject* proto_transitions = 7362 transition_array->UncheckedPrototypeTransitions(); 7363 IntrusivePrototypeTransitionIterator proto_iterator(proto_transitions); 7364 if (proto_iterator.IsIterating()) { 7365 Map* next = proto_iterator.Next(); 7366 if (next != NULL) return static_cast<TraversableMap*>(next); 7367 } 7368 } 7369 7370 IntrusiveMapTransitionIterator transition_iterator(transition_array); 7371 if (transition_iterator.IsIterating()) { 7372 Map* next = transition_iterator.Next(); 7373 if (next != NULL) return static_cast<TraversableMap*>(next); 7374 } 7375 7376 return NULL; 7377 } 7378 }; 7379 7380 7381 // Traverse the transition tree in postorder without using the C++ stack by 7382 // doing pointer reversal. 7383 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) { 7384 TraversableMap* current = static_cast<TraversableMap*>(this); 7385 current->ChildIteratorStart(); 7386 while (true) { 7387 TraversableMap* child = current->ChildIteratorNext(); 7388 if (child != NULL) { 7389 child->ChildIteratorStart(); 7390 child->SetParent(current); 7391 current = child; 7392 } else { 7393 TraversableMap* parent = current->GetAndResetParent(); 7394 callback(current, data); 7395 if (current == this) break; 7396 current = parent; 7397 } 7398 } 7399 } 7400 7401 7402 MaybeObject* CodeCache::Update(Name* name, Code* code) { 7403 // The number of monomorphic stubs for normal load/store/call IC's can grow to 7404 // a large number and therefore they need to go into a hash table. They are 7405 // used to load global properties from cells. 7406 if (code->type() == Code::NORMAL) { 7407 // Make sure that a hash table is allocated for the normal load code cache. 7408 if (normal_type_cache()->IsUndefined()) { 7409 Object* result; 7410 { MaybeObject* maybe_result = 7411 CodeCacheHashTable::Allocate(GetHeap(), 7412 CodeCacheHashTable::kInitialSize); 7413 if (!maybe_result->ToObject(&result)) return maybe_result; 7414 } 7415 set_normal_type_cache(result); 7416 } 7417 return UpdateNormalTypeCache(name, code); 7418 } else { 7419 ASSERT(default_cache()->IsFixedArray()); 7420 return UpdateDefaultCache(name, code); 7421 } 7422 } 7423 7424 7425 MaybeObject* CodeCache::UpdateDefaultCache(Name* name, Code* code) { 7426 // When updating the default code cache we disregard the type encoded in the 7427 // flags. This allows call constant stubs to overwrite call field 7428 // stubs, etc. 7429 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags()); 7430 7431 // First check whether we can update existing code cache without 7432 // extending it. 7433 FixedArray* cache = default_cache(); 7434 int length = cache->length(); 7435 int deleted_index = -1; 7436 for (int i = 0; i < length; i += kCodeCacheEntrySize) { 7437 Object* key = cache->get(i); 7438 if (key->IsNull()) { 7439 if (deleted_index < 0) deleted_index = i; 7440 continue; 7441 } 7442 if (key->IsUndefined()) { 7443 if (deleted_index >= 0) i = deleted_index; 7444 cache->set(i + kCodeCacheEntryNameOffset, name); 7445 cache->set(i + kCodeCacheEntryCodeOffset, code); 7446 return this; 7447 } 7448 if (name->Equals(Name::cast(key))) { 7449 Code::Flags found = 7450 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags(); 7451 if (Code::RemoveTypeFromFlags(found) == flags) { 7452 cache->set(i + kCodeCacheEntryCodeOffset, code); 7453 return this; 7454 } 7455 } 7456 } 7457 7458 // Reached the end of the code cache. If there were deleted 7459 // elements, reuse the space for the first of them. 7460 if (deleted_index >= 0) { 7461 cache->set(deleted_index + kCodeCacheEntryNameOffset, name); 7462 cache->set(deleted_index + kCodeCacheEntryCodeOffset, code); 7463 return this; 7464 } 7465 7466 // Extend the code cache with some new entries (at least one). Must be a 7467 // multiple of the entry size. 7468 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize; 7469 new_length = new_length - new_length % kCodeCacheEntrySize; 7470 ASSERT((new_length % kCodeCacheEntrySize) == 0); 7471 Object* result; 7472 { MaybeObject* maybe_result = cache->CopySize(new_length); 7473 if (!maybe_result->ToObject(&result)) return maybe_result; 7474 } 7475 7476 // Add the (name, code) pair to the new cache. 7477 cache = FixedArray::cast(result); 7478 cache->set(length + kCodeCacheEntryNameOffset, name); 7479 cache->set(length + kCodeCacheEntryCodeOffset, code); 7480 set_default_cache(cache); 7481 return this; 7482 } 7483 7484 7485 MaybeObject* CodeCache::UpdateNormalTypeCache(Name* name, Code* code) { 7486 // Adding a new entry can cause a new cache to be allocated. 7487 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7488 Object* new_cache; 7489 { MaybeObject* maybe_new_cache = cache->Put(name, code); 7490 if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache; 7491 } 7492 set_normal_type_cache(new_cache); 7493 return this; 7494 } 7495 7496 7497 Object* CodeCache::Lookup(Name* name, Code::Flags flags) { 7498 flags = Code::RemoveTypeFromFlags(flags); 7499 Object* result = LookupDefaultCache(name, flags); 7500 if (result->IsCode()) return result; 7501 return LookupNormalTypeCache(name, flags); 7502 } 7503 7504 7505 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) { 7506 FixedArray* cache = default_cache(); 7507 int length = cache->length(); 7508 for (int i = 0; i < length; i += kCodeCacheEntrySize) { 7509 Object* key = cache->get(i + kCodeCacheEntryNameOffset); 7510 // Skip deleted elements. 7511 if (key->IsNull()) continue; 7512 if (key->IsUndefined()) return key; 7513 if (name->Equals(Name::cast(key))) { 7514 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset)); 7515 if (Code::RemoveTypeFromFlags(code->flags()) == flags) { 7516 return code; 7517 } 7518 } 7519 } 7520 return GetHeap()->undefined_value(); 7521 } 7522 7523 7524 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) { 7525 if (!normal_type_cache()->IsUndefined()) { 7526 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7527 return cache->Lookup(name, flags); 7528 } else { 7529 return GetHeap()->undefined_value(); 7530 } 7531 } 7532 7533 7534 int CodeCache::GetIndex(Object* name, Code* code) { 7535 if (code->type() == Code::NORMAL) { 7536 if (normal_type_cache()->IsUndefined()) return -1; 7537 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7538 return cache->GetIndex(Name::cast(name), code->flags()); 7539 } 7540 7541 FixedArray* array = default_cache(); 7542 int len = array->length(); 7543 for (int i = 0; i < len; i += kCodeCacheEntrySize) { 7544 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1; 7545 } 7546 return -1; 7547 } 7548 7549 7550 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) { 7551 if (code->type() == Code::NORMAL) { 7552 ASSERT(!normal_type_cache()->IsUndefined()); 7553 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7554 ASSERT(cache->GetIndex(Name::cast(name), code->flags()) == index); 7555 cache->RemoveByIndex(index); 7556 } else { 7557 FixedArray* array = default_cache(); 7558 ASSERT(array->length() >= index && array->get(index)->IsCode()); 7559 // Use null instead of undefined for deleted elements to distinguish 7560 // deleted elements from unused elements. This distinction is used 7561 // when looking up in the cache and when updating the cache. 7562 ASSERT_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset); 7563 array->set_null(index - 1); // Name. 7564 array->set_null(index); // Code. 7565 } 7566 } 7567 7568 7569 // The key in the code cache hash table consists of the property name and the 7570 // code object. The actual match is on the name and the code flags. If a key 7571 // is created using the flags and not a code object it can only be used for 7572 // lookup not to create a new entry. 7573 class CodeCacheHashTableKey : public HashTableKey { 7574 public: 7575 CodeCacheHashTableKey(Name* name, Code::Flags flags) 7576 : name_(name), flags_(flags), code_(NULL) { } 7577 7578 CodeCacheHashTableKey(Name* name, Code* code) 7579 : name_(name), flags_(code->flags()), code_(code) { } 7580 7581 7582 bool IsMatch(Object* other) { 7583 if (!other->IsFixedArray()) return false; 7584 FixedArray* pair = FixedArray::cast(other); 7585 Name* name = Name::cast(pair->get(0)); 7586 Code::Flags flags = Code::cast(pair->get(1))->flags(); 7587 if (flags != flags_) { 7588 return false; 7589 } 7590 return name_->Equals(name); 7591 } 7592 7593 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) { 7594 return name->Hash() ^ flags; 7595 } 7596 7597 uint32_t Hash() { return NameFlagsHashHelper(name_, flags_); } 7598 7599 uint32_t HashForObject(Object* obj) { 7600 FixedArray* pair = FixedArray::cast(obj); 7601 Name* name = Name::cast(pair->get(0)); 7602 Code* code = Code::cast(pair->get(1)); 7603 return NameFlagsHashHelper(name, code->flags()); 7604 } 7605 7606 MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) { 7607 ASSERT(code_ != NULL); 7608 Object* obj; 7609 { MaybeObject* maybe_obj = heap->AllocateFixedArray(2); 7610 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 7611 } 7612 FixedArray* pair = FixedArray::cast(obj); 7613 pair->set(0, name_); 7614 pair->set(1, code_); 7615 return pair; 7616 } 7617 7618 private: 7619 Name* name_; 7620 Code::Flags flags_; 7621 // TODO(jkummerow): We should be able to get by without this. 7622 Code* code_; 7623 }; 7624 7625 7626 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) { 7627 CodeCacheHashTableKey key(name, flags); 7628 int entry = FindEntry(&key); 7629 if (entry == kNotFound) return GetHeap()->undefined_value(); 7630 return get(EntryToIndex(entry) + 1); 7631 } 7632 7633 7634 MaybeObject* CodeCacheHashTable::Put(Name* name, Code* code) { 7635 CodeCacheHashTableKey key(name, code); 7636 Object* obj; 7637 { MaybeObject* maybe_obj = EnsureCapacity(1, &key); 7638 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 7639 } 7640 7641 // Don't use |this|, as the table might have grown. 7642 CodeCacheHashTable* cache = reinterpret_cast<CodeCacheHashTable*>(obj); 7643 7644 int entry = cache->FindInsertionEntry(key.Hash()); 7645 Object* k; 7646 { MaybeObject* maybe_k = key.AsObject(GetHeap()); 7647 if (!maybe_k->ToObject(&k)) return maybe_k; 7648 } 7649 7650 cache->set(EntryToIndex(entry), k); 7651 cache->set(EntryToIndex(entry) + 1, code); 7652 cache->ElementAdded(); 7653 return cache; 7654 } 7655 7656 7657 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) { 7658 CodeCacheHashTableKey key(name, flags); 7659 int entry = FindEntry(&key); 7660 return (entry == kNotFound) ? -1 : entry; 7661 } 7662 7663 7664 void CodeCacheHashTable::RemoveByIndex(int index) { 7665 ASSERT(index >= 0); 7666 Heap* heap = GetHeap(); 7667 set(EntryToIndex(index), heap->the_hole_value()); 7668 set(EntryToIndex(index) + 1, heap->the_hole_value()); 7669 ElementRemoved(); 7670 } 7671 7672 7673 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> cache, 7674 MapHandleList* maps, 7675 Code::Flags flags, 7676 Handle<Code> code) { 7677 Isolate* isolate = cache->GetIsolate(); 7678 CALL_HEAP_FUNCTION_VOID(isolate, cache->Update(maps, flags, *code)); 7679 } 7680 7681 7682 MaybeObject* PolymorphicCodeCache::Update(MapHandleList* maps, 7683 Code::Flags flags, 7684 Code* code) { 7685 // Initialize cache if necessary. 7686 if (cache()->IsUndefined()) { 7687 Object* result; 7688 { MaybeObject* maybe_result = 7689 PolymorphicCodeCacheHashTable::Allocate( 7690 GetHeap(), 7691 PolymorphicCodeCacheHashTable::kInitialSize); 7692 if (!maybe_result->ToObject(&result)) return maybe_result; 7693 } 7694 set_cache(result); 7695 } else { 7696 // This entry shouldn't be contained in the cache yet. 7697 ASSERT(PolymorphicCodeCacheHashTable::cast(cache()) 7698 ->Lookup(maps, flags)->IsUndefined()); 7699 } 7700 PolymorphicCodeCacheHashTable* hash_table = 7701 PolymorphicCodeCacheHashTable::cast(cache()); 7702 Object* new_cache; 7703 { MaybeObject* maybe_new_cache = hash_table->Put(maps, flags, code); 7704 if (!maybe_new_cache->ToObject(&new_cache)) return maybe_new_cache; 7705 } 7706 set_cache(new_cache); 7707 return this; 7708 } 7709 7710 7711 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps, 7712 Code::Flags flags) { 7713 if (!cache()->IsUndefined()) { 7714 PolymorphicCodeCacheHashTable* hash_table = 7715 PolymorphicCodeCacheHashTable::cast(cache()); 7716 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate()); 7717 } else { 7718 return GetIsolate()->factory()->undefined_value(); 7719 } 7720 } 7721 7722 7723 // Despite their name, object of this class are not stored in the actual 7724 // hash table; instead they're temporarily used for lookups. It is therefore 7725 // safe to have a weak (non-owning) pointer to a MapList as a member field. 7726 class PolymorphicCodeCacheHashTableKey : public HashTableKey { 7727 public: 7728 // Callers must ensure that |maps| outlives the newly constructed object. 7729 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags) 7730 : maps_(maps), 7731 code_flags_(code_flags) {} 7732 7733 bool IsMatch(Object* other) { 7734 MapHandleList other_maps(kDefaultListAllocationSize); 7735 int other_flags; 7736 FromObject(other, &other_flags, &other_maps); 7737 if (code_flags_ != other_flags) return false; 7738 if (maps_->length() != other_maps.length()) return false; 7739 // Compare just the hashes first because it's faster. 7740 int this_hash = MapsHashHelper(maps_, code_flags_); 7741 int other_hash = MapsHashHelper(&other_maps, other_flags); 7742 if (this_hash != other_hash) return false; 7743 7744 // Full comparison: for each map in maps_, look for an equivalent map in 7745 // other_maps. This implementation is slow, but probably good enough for 7746 // now because the lists are short (<= 4 elements currently). 7747 for (int i = 0; i < maps_->length(); ++i) { 7748 bool match_found = false; 7749 for (int j = 0; j < other_maps.length(); ++j) { 7750 if (*(maps_->at(i)) == *(other_maps.at(j))) { 7751 match_found = true; 7752 break; 7753 } 7754 } 7755 if (!match_found) return false; 7756 } 7757 return true; 7758 } 7759 7760 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) { 7761 uint32_t hash = code_flags; 7762 for (int i = 0; i < maps->length(); ++i) { 7763 hash ^= maps->at(i)->Hash(); 7764 } 7765 return hash; 7766 } 7767 7768 uint32_t Hash() { 7769 return MapsHashHelper(maps_, code_flags_); 7770 } 7771 7772 uint32_t HashForObject(Object* obj) { 7773 MapHandleList other_maps(kDefaultListAllocationSize); 7774 int other_flags; 7775 FromObject(obj, &other_flags, &other_maps); 7776 return MapsHashHelper(&other_maps, other_flags); 7777 } 7778 7779 MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) { 7780 Object* obj; 7781 // The maps in |maps_| must be copied to a newly allocated FixedArray, 7782 // both because the referenced MapList is short-lived, and because C++ 7783 // objects can't be stored in the heap anyway. 7784 { MaybeObject* maybe_obj = 7785 heap->AllocateUninitializedFixedArray(maps_->length() + 1); 7786 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 7787 } 7788 FixedArray* list = FixedArray::cast(obj); 7789 list->set(0, Smi::FromInt(code_flags_)); 7790 for (int i = 0; i < maps_->length(); ++i) { 7791 list->set(i + 1, *maps_->at(i)); 7792 } 7793 return list; 7794 } 7795 7796 private: 7797 static MapHandleList* FromObject(Object* obj, 7798 int* code_flags, 7799 MapHandleList* maps) { 7800 FixedArray* list = FixedArray::cast(obj); 7801 maps->Rewind(0); 7802 *code_flags = Smi::cast(list->get(0))->value(); 7803 for (int i = 1; i < list->length(); ++i) { 7804 maps->Add(Handle<Map>(Map::cast(list->get(i)))); 7805 } 7806 return maps; 7807 } 7808 7809 MapHandleList* maps_; // weak. 7810 int code_flags_; 7811 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1; 7812 }; 7813 7814 7815 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps, 7816 int code_flags) { 7817 PolymorphicCodeCacheHashTableKey key(maps, code_flags); 7818 int entry = FindEntry(&key); 7819 if (entry == kNotFound) return GetHeap()->undefined_value(); 7820 return get(EntryToIndex(entry) + 1); 7821 } 7822 7823 7824 MaybeObject* PolymorphicCodeCacheHashTable::Put(MapHandleList* maps, 7825 int code_flags, 7826 Code* code) { 7827 PolymorphicCodeCacheHashTableKey key(maps, code_flags); 7828 Object* obj; 7829 { MaybeObject* maybe_obj = EnsureCapacity(1, &key); 7830 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 7831 } 7832 PolymorphicCodeCacheHashTable* cache = 7833 reinterpret_cast<PolymorphicCodeCacheHashTable*>(obj); 7834 int entry = cache->FindInsertionEntry(key.Hash()); 7835 { MaybeObject* maybe_obj = key.AsObject(GetHeap()); 7836 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 7837 } 7838 cache->set(EntryToIndex(entry), obj); 7839 cache->set(EntryToIndex(entry) + 1, code); 7840 cache->ElementAdded(); 7841 return cache; 7842 } 7843 7844 7845 MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) { 7846 ElementsAccessor* accessor = array->GetElementsAccessor(); 7847 MaybeObject* maybe_result = 7848 accessor->AddElementsToFixedArray(array, array, this); 7849 FixedArray* result; 7850 if (!maybe_result->To<FixedArray>(&result)) return maybe_result; 7851 #ifdef ENABLE_SLOW_ASSERTS 7852 if (FLAG_enable_slow_asserts) { 7853 for (int i = 0; i < result->length(); i++) { 7854 Object* current = result->get(i); 7855 ASSERT(current->IsNumber() || current->IsName()); 7856 } 7857 } 7858 #endif 7859 return result; 7860 } 7861 7862 7863 MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) { 7864 ElementsAccessor* accessor = ElementsAccessor::ForArray(other); 7865 MaybeObject* maybe_result = 7866 accessor->AddElementsToFixedArray(NULL, NULL, this, other); 7867 FixedArray* result; 7868 if (!maybe_result->To(&result)) return maybe_result; 7869 #ifdef ENABLE_SLOW_ASSERTS 7870 if (FLAG_enable_slow_asserts) { 7871 for (int i = 0; i < result->length(); i++) { 7872 Object* current = result->get(i); 7873 ASSERT(current->IsNumber() || current->IsName()); 7874 } 7875 } 7876 #endif 7877 return result; 7878 } 7879 7880 7881 MaybeObject* FixedArray::CopySize(int new_length, PretenureFlag pretenure) { 7882 Heap* heap = GetHeap(); 7883 if (new_length == 0) return heap->empty_fixed_array(); 7884 Object* obj; 7885 { MaybeObject* maybe_obj = heap->AllocateFixedArray(new_length, pretenure); 7886 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 7887 } 7888 FixedArray* result = FixedArray::cast(obj); 7889 // Copy the content 7890 DisallowHeapAllocation no_gc; 7891 int len = length(); 7892 if (new_length < len) len = new_length; 7893 // We are taking the map from the old fixed array so the map is sure to 7894 // be an immortal immutable object. 7895 result->set_map_no_write_barrier(map()); 7896 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 7897 for (int i = 0; i < len; i++) { 7898 result->set(i, get(i), mode); 7899 } 7900 return result; 7901 } 7902 7903 7904 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) { 7905 DisallowHeapAllocation no_gc; 7906 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc); 7907 for (int index = 0; index < len; index++) { 7908 dest->set(dest_pos+index, get(pos+index), mode); 7909 } 7910 } 7911 7912 7913 #ifdef DEBUG 7914 bool FixedArray::IsEqualTo(FixedArray* other) { 7915 if (length() != other->length()) return false; 7916 for (int i = 0 ; i < length(); ++i) { 7917 if (get(i) != other->get(i)) return false; 7918 } 7919 return true; 7920 } 7921 #endif 7922 7923 7924 MaybeObject* DescriptorArray::Allocate(Isolate* isolate, 7925 int number_of_descriptors, 7926 int slack) { 7927 Heap* heap = isolate->heap(); 7928 // Do not use DescriptorArray::cast on incomplete object. 7929 int size = number_of_descriptors + slack; 7930 if (size == 0) return heap->empty_descriptor_array(); 7931 FixedArray* result; 7932 // Allocate the array of keys. 7933 MaybeObject* maybe_array = heap->AllocateFixedArray(LengthFor(size)); 7934 if (!maybe_array->To(&result)) return maybe_array; 7935 7936 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors)); 7937 result->set(kEnumCacheIndex, Smi::FromInt(0)); 7938 return result; 7939 } 7940 7941 7942 void DescriptorArray::ClearEnumCache() { 7943 set(kEnumCacheIndex, Smi::FromInt(0)); 7944 } 7945 7946 7947 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage, 7948 FixedArray* new_cache, 7949 Object* new_index_cache) { 7950 ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength); 7951 ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray()); 7952 ASSERT(!IsEmpty()); 7953 ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length()); 7954 FixedArray::cast(bridge_storage)-> 7955 set(kEnumCacheBridgeCacheIndex, new_cache); 7956 FixedArray::cast(bridge_storage)-> 7957 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache); 7958 set(kEnumCacheIndex, bridge_storage); 7959 } 7960 7961 7962 void DescriptorArray::CopyFrom(int dst_index, 7963 DescriptorArray* src, 7964 int src_index, 7965 const WhitenessWitness& witness) { 7966 Object* value = src->GetValue(src_index); 7967 PropertyDetails details = src->GetDetails(src_index); 7968 Descriptor desc(src->GetKey(src_index), value, details); 7969 Set(dst_index, &desc, witness); 7970 } 7971 7972 7973 Handle<DescriptorArray> DescriptorArray::Merge(Handle<DescriptorArray> desc, 7974 int verbatim, 7975 int valid, 7976 int new_size, 7977 int modify_index, 7978 StoreMode store_mode, 7979 Handle<DescriptorArray> other) { 7980 CALL_HEAP_FUNCTION(desc->GetIsolate(), 7981 desc->Merge(verbatim, valid, new_size, modify_index, 7982 store_mode, *other), 7983 DescriptorArray); 7984 } 7985 7986 7987 // Generalize the |other| descriptor array by merging it into the (at least 7988 // partly) updated |this| descriptor array. 7989 // The method merges two descriptor array in three parts. Both descriptor arrays 7990 // are identical up to |verbatim|. They also overlap in keys up to |valid|. 7991 // Between |verbatim| and |valid|, the resulting descriptor type as well as the 7992 // representation are generalized from both |this| and |other|. Beyond |valid|, 7993 // the descriptors are copied verbatim from |other| up to |new_size|. 7994 // In case of incompatible types, the type and representation of |other| is 7995 // used. 7996 MaybeObject* DescriptorArray::Merge(int verbatim, 7997 int valid, 7998 int new_size, 7999 int modify_index, 8000 StoreMode store_mode, 8001 DescriptorArray* other) { 8002 ASSERT(verbatim <= valid); 8003 ASSERT(valid <= new_size); 8004 8005 DescriptorArray* result; 8006 // Allocate a new descriptor array large enough to hold the required 8007 // descriptors, with minimally the exact same size as this descriptor array. 8008 MaybeObject* maybe_descriptors = DescriptorArray::Allocate( 8009 GetIsolate(), new_size, 8010 Max(new_size, other->number_of_descriptors()) - new_size); 8011 if (!maybe_descriptors->To(&result)) return maybe_descriptors; 8012 ASSERT(result->length() > length() || 8013 result->NumberOfSlackDescriptors() > 0 || 8014 result->number_of_descriptors() == other->number_of_descriptors()); 8015 ASSERT(result->number_of_descriptors() == new_size); 8016 8017 DescriptorArray::WhitenessWitness witness(result); 8018 8019 int descriptor; 8020 8021 // 0 -> |verbatim| 8022 int current_offset = 0; 8023 for (descriptor = 0; descriptor < verbatim; descriptor++) { 8024 if (GetDetails(descriptor).type() == FIELD) current_offset++; 8025 result->CopyFrom(descriptor, other, descriptor, witness); 8026 } 8027 8028 // |verbatim| -> |valid| 8029 for (; descriptor < valid; descriptor++) { 8030 Name* key = GetKey(descriptor); 8031 PropertyDetails details = GetDetails(descriptor); 8032 PropertyDetails other_details = other->GetDetails(descriptor); 8033 8034 if (details.type() == FIELD || other_details.type() == FIELD || 8035 (store_mode == FORCE_FIELD && descriptor == modify_index) || 8036 (details.type() == CONSTANT && 8037 other_details.type() == CONSTANT && 8038 GetValue(descriptor) != other->GetValue(descriptor))) { 8039 Representation representation = 8040 details.representation().generalize(other_details.representation()); 8041 FieldDescriptor d(key, 8042 current_offset++, 8043 other_details.attributes(), 8044 representation); 8045 result->Set(descriptor, &d, witness); 8046 } else { 8047 result->CopyFrom(descriptor, other, descriptor, witness); 8048 } 8049 } 8050 8051 // |valid| -> |new_size| 8052 for (; descriptor < new_size; descriptor++) { 8053 PropertyDetails details = other->GetDetails(descriptor); 8054 if (details.type() == FIELD || 8055 (store_mode == FORCE_FIELD && descriptor == modify_index)) { 8056 Name* key = other->GetKey(descriptor); 8057 FieldDescriptor d(key, 8058 current_offset++, 8059 details.attributes(), 8060 details.representation()); 8061 result->Set(descriptor, &d, witness); 8062 } else { 8063 result->CopyFrom(descriptor, other, descriptor, witness); 8064 } 8065 } 8066 8067 result->Sort(); 8068 return result; 8069 } 8070 8071 8072 // Checks whether a merge of |other| into |this| would return a copy of |this|. 8073 bool DescriptorArray::IsMoreGeneralThan(int verbatim, 8074 int valid, 8075 int new_size, 8076 DescriptorArray* other) { 8077 ASSERT(verbatim <= valid); 8078 ASSERT(valid <= new_size); 8079 if (valid != new_size) return false; 8080 8081 for (int descriptor = verbatim; descriptor < valid; descriptor++) { 8082 PropertyDetails details = GetDetails(descriptor); 8083 PropertyDetails other_details = other->GetDetails(descriptor); 8084 if (!other_details.representation().fits_into(details.representation())) { 8085 return false; 8086 } 8087 if (details.type() == CONSTANT) { 8088 if (other_details.type() != CONSTANT) return false; 8089 if (GetValue(descriptor) != other->GetValue(descriptor)) return false; 8090 } 8091 } 8092 8093 return true; 8094 } 8095 8096 8097 // We need the whiteness witness since sort will reshuffle the entries in the 8098 // descriptor array. If the descriptor array were to be black, the shuffling 8099 // would move a slot that was already recorded as pointing into an evacuation 8100 // candidate. This would result in missing updates upon evacuation. 8101 void DescriptorArray::Sort() { 8102 // In-place heap sort. 8103 int len = number_of_descriptors(); 8104 // Reset sorting since the descriptor array might contain invalid pointers. 8105 for (int i = 0; i < len; ++i) SetSortedKey(i, i); 8106 // Bottom-up max-heap construction. 8107 // Index of the last node with children 8108 const int max_parent_index = (len / 2) - 1; 8109 for (int i = max_parent_index; i >= 0; --i) { 8110 int parent_index = i; 8111 const uint32_t parent_hash = GetSortedKey(i)->Hash(); 8112 while (parent_index <= max_parent_index) { 8113 int child_index = 2 * parent_index + 1; 8114 uint32_t child_hash = GetSortedKey(child_index)->Hash(); 8115 if (child_index + 1 < len) { 8116 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash(); 8117 if (right_child_hash > child_hash) { 8118 child_index++; 8119 child_hash = right_child_hash; 8120 } 8121 } 8122 if (child_hash <= parent_hash) break; 8123 SwapSortedKeys(parent_index, child_index); 8124 // Now element at child_index could be < its children. 8125 parent_index = child_index; // parent_hash remains correct. 8126 } 8127 } 8128 8129 // Extract elements and create sorted array. 8130 for (int i = len - 1; i > 0; --i) { 8131 // Put max element at the back of the array. 8132 SwapSortedKeys(0, i); 8133 // Shift down the new top element. 8134 int parent_index = 0; 8135 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash(); 8136 const int max_parent_index = (i / 2) - 1; 8137 while (parent_index <= max_parent_index) { 8138 int child_index = parent_index * 2 + 1; 8139 uint32_t child_hash = GetSortedKey(child_index)->Hash(); 8140 if (child_index + 1 < i) { 8141 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash(); 8142 if (right_child_hash > child_hash) { 8143 child_index++; 8144 child_hash = right_child_hash; 8145 } 8146 } 8147 if (child_hash <= parent_hash) break; 8148 SwapSortedKeys(parent_index, child_index); 8149 parent_index = child_index; 8150 } 8151 } 8152 ASSERT(IsSortedNoDuplicates()); 8153 } 8154 8155 8156 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) { 8157 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair(); 8158 copy->set_getter(pair->getter()); 8159 copy->set_setter(pair->setter()); 8160 return copy; 8161 } 8162 8163 8164 Object* AccessorPair::GetComponent(AccessorComponent component) { 8165 Object* accessor = get(component); 8166 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor; 8167 } 8168 8169 8170 MaybeObject* DeoptimizationInputData::Allocate(Isolate* isolate, 8171 int deopt_entry_count, 8172 PretenureFlag pretenure) { 8173 ASSERT(deopt_entry_count > 0); 8174 return isolate->heap()->AllocateFixedArray(LengthFor(deopt_entry_count), 8175 pretenure); 8176 } 8177 8178 8179 MaybeObject* DeoptimizationOutputData::Allocate(Isolate* isolate, 8180 int number_of_deopt_points, 8181 PretenureFlag pretenure) { 8182 if (number_of_deopt_points == 0) return isolate->heap()->empty_fixed_array(); 8183 return isolate->heap()->AllocateFixedArray( 8184 LengthOfFixedArray(number_of_deopt_points), pretenure); 8185 } 8186 8187 8188 #ifdef DEBUG 8189 bool DescriptorArray::IsEqualTo(DescriptorArray* other) { 8190 if (IsEmpty()) return other->IsEmpty(); 8191 if (other->IsEmpty()) return false; 8192 if (length() != other->length()) return false; 8193 for (int i = 0; i < length(); ++i) { 8194 if (get(i) != other->get(i)) return false; 8195 } 8196 return true; 8197 } 8198 #endif 8199 8200 8201 static bool IsIdentifier(UnicodeCache* cache, Name* name) { 8202 // Checks whether the buffer contains an identifier (no escape). 8203 if (!name->IsString()) return false; 8204 String* string = String::cast(name); 8205 if (string->length() == 0) return false; 8206 ConsStringIteratorOp op; 8207 StringCharacterStream stream(string, &op); 8208 if (!cache->IsIdentifierStart(stream.GetNext())) { 8209 return false; 8210 } 8211 while (stream.HasMore()) { 8212 if (!cache->IsIdentifierPart(stream.GetNext())) { 8213 return false; 8214 } 8215 } 8216 return true; 8217 } 8218 8219 8220 bool Name::IsCacheable(Isolate* isolate) { 8221 return IsSymbol() || 8222 IsIdentifier(isolate->unicode_cache(), this) || 8223 this == isolate->heap()->hidden_string(); 8224 } 8225 8226 8227 bool String::LooksValid() { 8228 if (!GetIsolate()->heap()->Contains(this)) return false; 8229 return true; 8230 } 8231 8232 8233 String::FlatContent String::GetFlatContent() { 8234 ASSERT(!AllowHeapAllocation::IsAllowed()); 8235 int length = this->length(); 8236 StringShape shape(this); 8237 String* string = this; 8238 int offset = 0; 8239 if (shape.representation_tag() == kConsStringTag) { 8240 ConsString* cons = ConsString::cast(string); 8241 if (cons->second()->length() != 0) { 8242 return FlatContent(); 8243 } 8244 string = cons->first(); 8245 shape = StringShape(string); 8246 } 8247 if (shape.representation_tag() == kSlicedStringTag) { 8248 SlicedString* slice = SlicedString::cast(string); 8249 offset = slice->offset(); 8250 string = slice->parent(); 8251 shape = StringShape(string); 8252 ASSERT(shape.representation_tag() != kConsStringTag && 8253 shape.representation_tag() != kSlicedStringTag); 8254 } 8255 if (shape.encoding_tag() == kOneByteStringTag) { 8256 const uint8_t* start; 8257 if (shape.representation_tag() == kSeqStringTag) { 8258 start = SeqOneByteString::cast(string)->GetChars(); 8259 } else { 8260 start = ExternalAsciiString::cast(string)->GetChars(); 8261 } 8262 return FlatContent(Vector<const uint8_t>(start + offset, length)); 8263 } else { 8264 ASSERT(shape.encoding_tag() == kTwoByteStringTag); 8265 const uc16* start; 8266 if (shape.representation_tag() == kSeqStringTag) { 8267 start = SeqTwoByteString::cast(string)->GetChars(); 8268 } else { 8269 start = ExternalTwoByteString::cast(string)->GetChars(); 8270 } 8271 return FlatContent(Vector<const uc16>(start + offset, length)); 8272 } 8273 } 8274 8275 8276 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls, 8277 RobustnessFlag robust_flag, 8278 int offset, 8279 int length, 8280 int* length_return) { 8281 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) { 8282 return SmartArrayPointer<char>(NULL); 8283 } 8284 Heap* heap = GetHeap(); 8285 8286 // Negative length means the to the end of the string. 8287 if (length < 0) length = kMaxInt - offset; 8288 8289 // Compute the size of the UTF-8 string. Start at the specified offset. 8290 Access<ConsStringIteratorOp> op( 8291 heap->isolate()->objects_string_iterator()); 8292 StringCharacterStream stream(this, op.value(), offset); 8293 int character_position = offset; 8294 int utf8_bytes = 0; 8295 int last = unibrow::Utf16::kNoPreviousCharacter; 8296 while (stream.HasMore() && character_position++ < offset + length) { 8297 uint16_t character = stream.GetNext(); 8298 utf8_bytes += unibrow::Utf8::Length(character, last); 8299 last = character; 8300 } 8301 8302 if (length_return) { 8303 *length_return = utf8_bytes; 8304 } 8305 8306 char* result = NewArray<char>(utf8_bytes + 1); 8307 8308 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset. 8309 stream.Reset(this, offset); 8310 character_position = offset; 8311 int utf8_byte_position = 0; 8312 last = unibrow::Utf16::kNoPreviousCharacter; 8313 while (stream.HasMore() && character_position++ < offset + length) { 8314 uint16_t character = stream.GetNext(); 8315 if (allow_nulls == DISALLOW_NULLS && character == 0) { 8316 character = ' '; 8317 } 8318 utf8_byte_position += 8319 unibrow::Utf8::Encode(result + utf8_byte_position, character, last); 8320 last = character; 8321 } 8322 result[utf8_byte_position] = 0; 8323 return SmartArrayPointer<char>(result); 8324 } 8325 8326 8327 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls, 8328 RobustnessFlag robust_flag, 8329 int* length_return) { 8330 return ToCString(allow_nulls, robust_flag, 0, -1, length_return); 8331 } 8332 8333 8334 const uc16* String::GetTwoByteData(unsigned start) { 8335 ASSERT(!IsOneByteRepresentationUnderneath()); 8336 switch (StringShape(this).representation_tag()) { 8337 case kSeqStringTag: 8338 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start); 8339 case kExternalStringTag: 8340 return ExternalTwoByteString::cast(this)-> 8341 ExternalTwoByteStringGetData(start); 8342 case kSlicedStringTag: { 8343 SlicedString* slice = SlicedString::cast(this); 8344 return slice->parent()->GetTwoByteData(start + slice->offset()); 8345 } 8346 case kConsStringTag: 8347 UNREACHABLE(); 8348 return NULL; 8349 } 8350 UNREACHABLE(); 8351 return NULL; 8352 } 8353 8354 8355 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) { 8356 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) { 8357 return SmartArrayPointer<uc16>(); 8358 } 8359 Heap* heap = GetHeap(); 8360 8361 Access<ConsStringIteratorOp> op( 8362 heap->isolate()->objects_string_iterator()); 8363 StringCharacterStream stream(this, op.value()); 8364 8365 uc16* result = NewArray<uc16>(length() + 1); 8366 8367 int i = 0; 8368 while (stream.HasMore()) { 8369 uint16_t character = stream.GetNext(); 8370 result[i++] = character; 8371 } 8372 result[i] = 0; 8373 return SmartArrayPointer<uc16>(result); 8374 } 8375 8376 8377 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) { 8378 return reinterpret_cast<uc16*>( 8379 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start; 8380 } 8381 8382 8383 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) { 8384 Relocatable* current = isolate->relocatable_top(); 8385 while (current != NULL) { 8386 current->PostGarbageCollection(); 8387 current = current->prev_; 8388 } 8389 } 8390 8391 8392 // Reserve space for statics needing saving and restoring. 8393 int Relocatable::ArchiveSpacePerThread() { 8394 return sizeof(Relocatable*); // NOLINT 8395 } 8396 8397 8398 // Archive statics that are thread local. 8399 char* Relocatable::ArchiveState(Isolate* isolate, char* to) { 8400 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top(); 8401 isolate->set_relocatable_top(NULL); 8402 return to + ArchiveSpacePerThread(); 8403 } 8404 8405 8406 // Restore statics that are thread local. 8407 char* Relocatable::RestoreState(Isolate* isolate, char* from) { 8408 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from)); 8409 return from + ArchiveSpacePerThread(); 8410 } 8411 8412 8413 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) { 8414 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage); 8415 Iterate(v, top); 8416 return thread_storage + ArchiveSpacePerThread(); 8417 } 8418 8419 8420 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) { 8421 Iterate(v, isolate->relocatable_top()); 8422 } 8423 8424 8425 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) { 8426 Relocatable* current = top; 8427 while (current != NULL) { 8428 current->IterateInstance(v); 8429 current = current->prev_; 8430 } 8431 } 8432 8433 8434 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str) 8435 : Relocatable(isolate), 8436 str_(str.location()), 8437 length_(str->length()) { 8438 PostGarbageCollection(); 8439 } 8440 8441 8442 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input) 8443 : Relocatable(isolate), 8444 str_(0), 8445 is_ascii_(true), 8446 length_(input.length()), 8447 start_(input.start()) { } 8448 8449 8450 void FlatStringReader::PostGarbageCollection() { 8451 if (str_ == NULL) return; 8452 Handle<String> str(str_); 8453 ASSERT(str->IsFlat()); 8454 DisallowHeapAllocation no_gc; 8455 // This does not actually prevent the vector from being relocated later. 8456 String::FlatContent content = str->GetFlatContent(); 8457 ASSERT(content.IsFlat()); 8458 is_ascii_ = content.IsAscii(); 8459 if (is_ascii_) { 8460 start_ = content.ToOneByteVector().start(); 8461 } else { 8462 start_ = content.ToUC16Vector().start(); 8463 } 8464 } 8465 8466 8467 String* ConsStringIteratorOp::Operate(String* string, 8468 unsigned* offset_out, 8469 int32_t* type_out, 8470 unsigned* length_out) { 8471 ASSERT(string->IsConsString()); 8472 ConsString* cons_string = ConsString::cast(string); 8473 // Set up search data. 8474 root_ = cons_string; 8475 consumed_ = *offset_out; 8476 // Now search. 8477 return Search(offset_out, type_out, length_out); 8478 } 8479 8480 8481 String* ConsStringIteratorOp::Search(unsigned* offset_out, 8482 int32_t* type_out, 8483 unsigned* length_out) { 8484 ConsString* cons_string = root_; 8485 // Reset the stack, pushing the root string. 8486 depth_ = 1; 8487 maximum_depth_ = 1; 8488 frames_[0] = cons_string; 8489 const unsigned consumed = consumed_; 8490 unsigned offset = 0; 8491 while (true) { 8492 // Loop until the string is found which contains the target offset. 8493 String* string = cons_string->first(); 8494 unsigned length = string->length(); 8495 int32_t type; 8496 if (consumed < offset + length) { 8497 // Target offset is in the left branch. 8498 // Keep going if we're still in a ConString. 8499 type = string->map()->instance_type(); 8500 if ((type & kStringRepresentationMask) == kConsStringTag) { 8501 cons_string = ConsString::cast(string); 8502 PushLeft(cons_string); 8503 continue; 8504 } 8505 // Tell the stack we're done decending. 8506 AdjustMaximumDepth(); 8507 } else { 8508 // Descend right. 8509 // Update progress through the string. 8510 offset += length; 8511 // Keep going if we're still in a ConString. 8512 string = cons_string->second(); 8513 type = string->map()->instance_type(); 8514 if ((type & kStringRepresentationMask) == kConsStringTag) { 8515 cons_string = ConsString::cast(string); 8516 PushRight(cons_string); 8517 // TODO(dcarney) Add back root optimization. 8518 continue; 8519 } 8520 // Need this to be updated for the current string. 8521 length = string->length(); 8522 // Account for the possibility of an empty right leaf. 8523 // This happens only if we have asked for an offset outside the string. 8524 if (length == 0) { 8525 // Reset depth so future operations will return null immediately. 8526 Reset(); 8527 return NULL; 8528 } 8529 // Tell the stack we're done decending. 8530 AdjustMaximumDepth(); 8531 // Pop stack so next iteration is in correct place. 8532 Pop(); 8533 } 8534 ASSERT(length != 0); 8535 // Adjust return values and exit. 8536 consumed_ = offset + length; 8537 *offset_out = consumed - offset; 8538 *type_out = type; 8539 *length_out = length; 8540 return string; 8541 } 8542 UNREACHABLE(); 8543 return NULL; 8544 } 8545 8546 8547 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack, 8548 int32_t* type_out, 8549 unsigned* length_out) { 8550 while (true) { 8551 // Tree traversal complete. 8552 if (depth_ == 0) { 8553 *blew_stack = false; 8554 return NULL; 8555 } 8556 // We've lost track of higher nodes. 8557 if (maximum_depth_ - depth_ == kStackSize) { 8558 *blew_stack = true; 8559 return NULL; 8560 } 8561 // Go right. 8562 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)]; 8563 String* string = cons_string->second(); 8564 int32_t type = string->map()->instance_type(); 8565 if ((type & kStringRepresentationMask) != kConsStringTag) { 8566 // Pop stack so next iteration is in correct place. 8567 Pop(); 8568 unsigned length = static_cast<unsigned>(string->length()); 8569 // Could be a flattened ConsString. 8570 if (length == 0) continue; 8571 *length_out = length; 8572 *type_out = type; 8573 consumed_ += length; 8574 return string; 8575 } 8576 cons_string = ConsString::cast(string); 8577 // TODO(dcarney) Add back root optimization. 8578 PushRight(cons_string); 8579 // Need to traverse all the way left. 8580 while (true) { 8581 // Continue left. 8582 string = cons_string->first(); 8583 type = string->map()->instance_type(); 8584 if ((type & kStringRepresentationMask) != kConsStringTag) { 8585 AdjustMaximumDepth(); 8586 unsigned length = static_cast<unsigned>(string->length()); 8587 ASSERT(length != 0); 8588 *length_out = length; 8589 *type_out = type; 8590 consumed_ += length; 8591 return string; 8592 } 8593 cons_string = ConsString::cast(string); 8594 PushLeft(cons_string); 8595 } 8596 } 8597 UNREACHABLE(); 8598 return NULL; 8599 } 8600 8601 8602 uint16_t ConsString::ConsStringGet(int index) { 8603 ASSERT(index >= 0 && index < this->length()); 8604 8605 // Check for a flattened cons string 8606 if (second()->length() == 0) { 8607 String* left = first(); 8608 return left->Get(index); 8609 } 8610 8611 String* string = String::cast(this); 8612 8613 while (true) { 8614 if (StringShape(string).IsCons()) { 8615 ConsString* cons_string = ConsString::cast(string); 8616 String* left = cons_string->first(); 8617 if (left->length() > index) { 8618 string = left; 8619 } else { 8620 index -= left->length(); 8621 string = cons_string->second(); 8622 } 8623 } else { 8624 return string->Get(index); 8625 } 8626 } 8627 8628 UNREACHABLE(); 8629 return 0; 8630 } 8631 8632 8633 uint16_t SlicedString::SlicedStringGet(int index) { 8634 return parent()->Get(offset() + index); 8635 } 8636 8637 8638 template <typename sinkchar> 8639 void String::WriteToFlat(String* src, 8640 sinkchar* sink, 8641 int f, 8642 int t) { 8643 String* source = src; 8644 int from = f; 8645 int to = t; 8646 while (true) { 8647 ASSERT(0 <= from && from <= to && to <= source->length()); 8648 switch (StringShape(source).full_representation_tag()) { 8649 case kOneByteStringTag | kExternalStringTag: { 8650 CopyChars(sink, 8651 ExternalAsciiString::cast(source)->GetChars() + from, 8652 to - from); 8653 return; 8654 } 8655 case kTwoByteStringTag | kExternalStringTag: { 8656 const uc16* data = 8657 ExternalTwoByteString::cast(source)->GetChars(); 8658 CopyChars(sink, 8659 data + from, 8660 to - from); 8661 return; 8662 } 8663 case kOneByteStringTag | kSeqStringTag: { 8664 CopyChars(sink, 8665 SeqOneByteString::cast(source)->GetChars() + from, 8666 to - from); 8667 return; 8668 } 8669 case kTwoByteStringTag | kSeqStringTag: { 8670 CopyChars(sink, 8671 SeqTwoByteString::cast(source)->GetChars() + from, 8672 to - from); 8673 return; 8674 } 8675 case kOneByteStringTag | kConsStringTag: 8676 case kTwoByteStringTag | kConsStringTag: { 8677 ConsString* cons_string = ConsString::cast(source); 8678 String* first = cons_string->first(); 8679 int boundary = first->length(); 8680 if (to - boundary >= boundary - from) { 8681 // Right hand side is longer. Recurse over left. 8682 if (from < boundary) { 8683 WriteToFlat(first, sink, from, boundary); 8684 sink += boundary - from; 8685 from = 0; 8686 } else { 8687 from -= boundary; 8688 } 8689 to -= boundary; 8690 source = cons_string->second(); 8691 } else { 8692 // Left hand side is longer. Recurse over right. 8693 if (to > boundary) { 8694 String* second = cons_string->second(); 8695 // When repeatedly appending to a string, we get a cons string that 8696 // is unbalanced to the left, a list, essentially. We inline the 8697 // common case of sequential ascii right child. 8698 if (to - boundary == 1) { 8699 sink[boundary - from] = static_cast<sinkchar>(second->Get(0)); 8700 } else if (second->IsSeqOneByteString()) { 8701 CopyChars(sink + boundary - from, 8702 SeqOneByteString::cast(second)->GetChars(), 8703 to - boundary); 8704 } else { 8705 WriteToFlat(second, 8706 sink + boundary - from, 8707 0, 8708 to - boundary); 8709 } 8710 to = boundary; 8711 } 8712 source = first; 8713 } 8714 break; 8715 } 8716 case kOneByteStringTag | kSlicedStringTag: 8717 case kTwoByteStringTag | kSlicedStringTag: { 8718 SlicedString* slice = SlicedString::cast(source); 8719 unsigned offset = slice->offset(); 8720 WriteToFlat(slice->parent(), sink, from + offset, to + offset); 8721 return; 8722 } 8723 } 8724 } 8725 } 8726 8727 8728 // Compares the contents of two strings by reading and comparing 8729 // int-sized blocks of characters. 8730 template <typename Char> 8731 static inline bool CompareRawStringContents(const Char* const a, 8732 const Char* const b, 8733 int length) { 8734 int i = 0; 8735 #ifndef V8_HOST_CAN_READ_UNALIGNED 8736 // If this architecture isn't comfortable reading unaligned ints 8737 // then we have to check that the strings are aligned before 8738 // comparing them blockwise. 8739 const int kAlignmentMask = sizeof(uint32_t) - 1; // NOLINT 8740 uint32_t pa_addr = reinterpret_cast<uint32_t>(a); 8741 uint32_t pb_addr = reinterpret_cast<uint32_t>(b); 8742 if (((pa_addr & kAlignmentMask) | (pb_addr & kAlignmentMask)) == 0) { 8743 #endif 8744 const int kStepSize = sizeof(int) / sizeof(Char); // NOLINT 8745 int endpoint = length - kStepSize; 8746 // Compare blocks until we reach near the end of the string. 8747 for (; i <= endpoint; i += kStepSize) { 8748 uint32_t wa = *reinterpret_cast<const uint32_t*>(a + i); 8749 uint32_t wb = *reinterpret_cast<const uint32_t*>(b + i); 8750 if (wa != wb) { 8751 return false; 8752 } 8753 } 8754 #ifndef V8_HOST_CAN_READ_UNALIGNED 8755 } 8756 #endif 8757 // Compare the remaining characters that didn't fit into a block. 8758 for (; i < length; i++) { 8759 if (a[i] != b[i]) { 8760 return false; 8761 } 8762 } 8763 return true; 8764 } 8765 8766 8767 template<typename Chars1, typename Chars2> 8768 class RawStringComparator : public AllStatic { 8769 public: 8770 static inline bool compare(const Chars1* a, const Chars2* b, int len) { 8771 ASSERT(sizeof(Chars1) != sizeof(Chars2)); 8772 for (int i = 0; i < len; i++) { 8773 if (a[i] != b[i]) { 8774 return false; 8775 } 8776 } 8777 return true; 8778 } 8779 }; 8780 8781 8782 template<> 8783 class RawStringComparator<uint16_t, uint16_t> { 8784 public: 8785 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) { 8786 return CompareRawStringContents(a, b, len); 8787 } 8788 }; 8789 8790 8791 template<> 8792 class RawStringComparator<uint8_t, uint8_t> { 8793 public: 8794 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) { 8795 return CompareRawStringContents(a, b, len); 8796 } 8797 }; 8798 8799 8800 class StringComparator { 8801 class State { 8802 public: 8803 explicit inline State(ConsStringIteratorOp* op) 8804 : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {} 8805 8806 inline void Init(String* string, unsigned len) { 8807 op_->Reset(); 8808 int32_t type = string->map()->instance_type(); 8809 String::Visit(string, 0, *this, *op_, type, len); 8810 } 8811 8812 inline void VisitOneByteString(const uint8_t* chars, unsigned length) { 8813 is_one_byte_ = true; 8814 buffer8_ = chars; 8815 length_ = length; 8816 } 8817 8818 inline void VisitTwoByteString(const uint16_t* chars, unsigned length) { 8819 is_one_byte_ = false; 8820 buffer16_ = chars; 8821 length_ = length; 8822 } 8823 8824 void Advance(unsigned consumed) { 8825 ASSERT(consumed <= length_); 8826 // Still in buffer. 8827 if (length_ != consumed) { 8828 if (is_one_byte_) { 8829 buffer8_ += consumed; 8830 } else { 8831 buffer16_ += consumed; 8832 } 8833 length_ -= consumed; 8834 return; 8835 } 8836 // Advance state. 8837 ASSERT(op_->HasMore()); 8838 int32_t type = 0; 8839 unsigned length = 0; 8840 String* next = op_->ContinueOperation(&type, &length); 8841 ASSERT(next != NULL); 8842 ConsStringNullOp null_op; 8843 String::Visit(next, 0, *this, null_op, type, length); 8844 } 8845 8846 ConsStringIteratorOp* const op_; 8847 bool is_one_byte_; 8848 unsigned length_; 8849 union { 8850 const uint8_t* buffer8_; 8851 const uint16_t* buffer16_; 8852 }; 8853 8854 private: 8855 DISALLOW_IMPLICIT_CONSTRUCTORS(State); 8856 }; 8857 8858 public: 8859 inline StringComparator(ConsStringIteratorOp* op_1, 8860 ConsStringIteratorOp* op_2) 8861 : state_1_(op_1), 8862 state_2_(op_2) { 8863 } 8864 8865 template<typename Chars1, typename Chars2> 8866 static inline bool Equals(State* state_1, State* state_2, unsigned to_check) { 8867 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_); 8868 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_); 8869 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check); 8870 } 8871 8872 bool Equals(unsigned length, String* string_1, String* string_2) { 8873 ASSERT(length != 0); 8874 state_1_.Init(string_1, length); 8875 state_2_.Init(string_2, length); 8876 while (true) { 8877 unsigned to_check = Min(state_1_.length_, state_2_.length_); 8878 ASSERT(to_check > 0 && to_check <= length); 8879 bool is_equal; 8880 if (state_1_.is_one_byte_) { 8881 if (state_2_.is_one_byte_) { 8882 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check); 8883 } else { 8884 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check); 8885 } 8886 } else { 8887 if (state_2_.is_one_byte_) { 8888 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check); 8889 } else { 8890 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check); 8891 } 8892 } 8893 // Looping done. 8894 if (!is_equal) return false; 8895 length -= to_check; 8896 // Exit condition. Strings are equal. 8897 if (length == 0) return true; 8898 state_1_.Advance(to_check); 8899 state_2_.Advance(to_check); 8900 } 8901 } 8902 8903 private: 8904 State state_1_; 8905 State state_2_; 8906 DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator); 8907 }; 8908 8909 8910 bool String::SlowEquals(String* other) { 8911 // Fast check: negative check with lengths. 8912 int len = length(); 8913 if (len != other->length()) return false; 8914 if (len == 0) return true; 8915 8916 // Fast check: if hash code is computed for both strings 8917 // a fast negative check can be performed. 8918 if (HasHashCode() && other->HasHashCode()) { 8919 #ifdef ENABLE_SLOW_ASSERTS 8920 if (FLAG_enable_slow_asserts) { 8921 if (Hash() != other->Hash()) { 8922 bool found_difference = false; 8923 for (int i = 0; i < len; i++) { 8924 if (Get(i) != other->Get(i)) { 8925 found_difference = true; 8926 break; 8927 } 8928 } 8929 ASSERT(found_difference); 8930 } 8931 } 8932 #endif 8933 if (Hash() != other->Hash()) return false; 8934 } 8935 8936 // We know the strings are both non-empty. Compare the first chars 8937 // before we try to flatten the strings. 8938 if (this->Get(0) != other->Get(0)) return false; 8939 8940 String* lhs = this->TryFlattenGetString(); 8941 String* rhs = other->TryFlattenGetString(); 8942 8943 // TODO(dcarney): Compare all types of flat strings with a Visitor. 8944 if (StringShape(lhs).IsSequentialAscii() && 8945 StringShape(rhs).IsSequentialAscii()) { 8946 const uint8_t* str1 = SeqOneByteString::cast(lhs)->GetChars(); 8947 const uint8_t* str2 = SeqOneByteString::cast(rhs)->GetChars(); 8948 return CompareRawStringContents(str1, str2, len); 8949 } 8950 8951 Isolate* isolate = GetIsolate(); 8952 StringComparator comparator(isolate->objects_string_compare_iterator_a(), 8953 isolate->objects_string_compare_iterator_b()); 8954 8955 return comparator.Equals(static_cast<unsigned>(len), lhs, rhs); 8956 } 8957 8958 8959 bool String::MarkAsUndetectable() { 8960 if (StringShape(this).IsInternalized()) return false; 8961 8962 Map* map = this->map(); 8963 Heap* heap = GetHeap(); 8964 if (map == heap->string_map()) { 8965 this->set_map(heap->undetectable_string_map()); 8966 return true; 8967 } else if (map == heap->ascii_string_map()) { 8968 this->set_map(heap->undetectable_ascii_string_map()); 8969 return true; 8970 } 8971 // Rest cannot be marked as undetectable 8972 return false; 8973 } 8974 8975 8976 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) { 8977 int slen = length(); 8978 // Can't check exact length equality, but we can check bounds. 8979 int str_len = str.length(); 8980 if (!allow_prefix_match && 8981 (str_len < slen || 8982 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) { 8983 return false; 8984 } 8985 int i; 8986 unsigned remaining_in_str = static_cast<unsigned>(str_len); 8987 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start()); 8988 for (i = 0; i < slen && remaining_in_str > 0; i++) { 8989 unsigned cursor = 0; 8990 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor); 8991 ASSERT(cursor > 0 && cursor <= remaining_in_str); 8992 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) { 8993 if (i > slen - 1) return false; 8994 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false; 8995 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false; 8996 } else { 8997 if (Get(i) != r) return false; 8998 } 8999 utf8_data += cursor; 9000 remaining_in_str -= cursor; 9001 } 9002 return (allow_prefix_match || i == slen) && remaining_in_str == 0; 9003 } 9004 9005 9006 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) { 9007 int slen = length(); 9008 if (str.length() != slen) return false; 9009 DisallowHeapAllocation no_gc; 9010 FlatContent content = GetFlatContent(); 9011 if (content.IsAscii()) { 9012 return CompareChars(content.ToOneByteVector().start(), 9013 str.start(), slen) == 0; 9014 } 9015 for (int i = 0; i < slen; i++) { 9016 if (Get(i) != static_cast<uint16_t>(str[i])) return false; 9017 } 9018 return true; 9019 } 9020 9021 9022 bool String::IsTwoByteEqualTo(Vector<const uc16> str) { 9023 int slen = length(); 9024 if (str.length() != slen) return false; 9025 DisallowHeapAllocation no_gc; 9026 FlatContent content = GetFlatContent(); 9027 if (content.IsTwoByte()) { 9028 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0; 9029 } 9030 for (int i = 0; i < slen; i++) { 9031 if (Get(i) != str[i]) return false; 9032 } 9033 return true; 9034 } 9035 9036 9037 class IteratingStringHasher: public StringHasher { 9038 public: 9039 static inline uint32_t Hash(String* string, uint32_t seed) { 9040 const unsigned len = static_cast<unsigned>(string->length()); 9041 IteratingStringHasher hasher(len, seed); 9042 if (hasher.has_trivial_hash()) { 9043 return hasher.GetHashField(); 9044 } 9045 int32_t type = string->map()->instance_type(); 9046 ConsStringNullOp null_op; 9047 String::Visit(string, 0, hasher, null_op, type, len); 9048 // Flat strings terminate immediately. 9049 if (hasher.consumed_ == len) { 9050 ASSERT(!string->IsConsString()); 9051 return hasher.GetHashField(); 9052 } 9053 ASSERT(string->IsConsString()); 9054 // This is a ConsString, iterate across it. 9055 ConsStringIteratorOp op; 9056 unsigned offset = 0; 9057 unsigned leaf_length = len; 9058 string = op.Operate(string, &offset, &type, &leaf_length); 9059 while (true) { 9060 ASSERT(hasher.consumed_ < len); 9061 String::Visit(string, 0, hasher, null_op, type, leaf_length); 9062 if (hasher.consumed_ == len) break; 9063 string = op.ContinueOperation(&type, &leaf_length); 9064 // This should be taken care of by the length check. 9065 ASSERT(string != NULL); 9066 } 9067 return hasher.GetHashField(); 9068 } 9069 inline void VisitOneByteString(const uint8_t* chars, unsigned length) { 9070 AddCharacters(chars, static_cast<int>(length)); 9071 consumed_ += length; 9072 } 9073 inline void VisitTwoByteString(const uint16_t* chars, unsigned length) { 9074 AddCharacters(chars, static_cast<int>(length)); 9075 consumed_ += length; 9076 } 9077 9078 private: 9079 inline IteratingStringHasher(int len, uint32_t seed) 9080 : StringHasher(len, seed), 9081 consumed_(0) {} 9082 unsigned consumed_; 9083 DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher); 9084 }; 9085 9086 9087 uint32_t String::ComputeAndSetHash() { 9088 // Should only be called if hash code has not yet been computed. 9089 ASSERT(!HasHashCode()); 9090 9091 // Store the hash code in the object. 9092 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed()); 9093 set_hash_field(field); 9094 9095 // Check the hash code is there. 9096 ASSERT(HasHashCode()); 9097 uint32_t result = field >> kHashShift; 9098 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed. 9099 return result; 9100 } 9101 9102 9103 bool String::ComputeArrayIndex(uint32_t* index) { 9104 int length = this->length(); 9105 if (length == 0 || length > kMaxArrayIndexSize) return false; 9106 ConsStringIteratorOp op; 9107 StringCharacterStream stream(this, &op); 9108 uint16_t ch = stream.GetNext(); 9109 9110 // If the string begins with a '0' character, it must only consist 9111 // of it to be a legal array index. 9112 if (ch == '0') { 9113 *index = 0; 9114 return length == 1; 9115 } 9116 9117 // Convert string to uint32 array index; character by character. 9118 int d = ch - '0'; 9119 if (d < 0 || d > 9) return false; 9120 uint32_t result = d; 9121 while (stream.HasMore()) { 9122 d = stream.GetNext() - '0'; 9123 if (d < 0 || d > 9) return false; 9124 // Check that the new result is below the 32 bit limit. 9125 if (result > 429496729U - ((d > 5) ? 1 : 0)) return false; 9126 result = (result * 10) + d; 9127 } 9128 9129 *index = result; 9130 return true; 9131 } 9132 9133 9134 bool String::SlowAsArrayIndex(uint32_t* index) { 9135 if (length() <= kMaxCachedArrayIndexLength) { 9136 Hash(); // force computation of hash code 9137 uint32_t field = hash_field(); 9138 if ((field & kIsNotArrayIndexMask) != 0) return false; 9139 // Isolate the array index form the full hash field. 9140 *index = (kArrayIndexHashMask & field) >> kHashShift; 9141 return true; 9142 } else { 9143 return ComputeArrayIndex(index); 9144 } 9145 } 9146 9147 9148 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) { 9149 int new_size, old_size; 9150 int old_length = string->length(); 9151 if (old_length <= new_length) return string; 9152 9153 if (string->IsSeqOneByteString()) { 9154 old_size = SeqOneByteString::SizeFor(old_length); 9155 new_size = SeqOneByteString::SizeFor(new_length); 9156 } else { 9157 ASSERT(string->IsSeqTwoByteString()); 9158 old_size = SeqTwoByteString::SizeFor(old_length); 9159 new_size = SeqTwoByteString::SizeFor(new_length); 9160 } 9161 9162 int delta = old_size - new_size; 9163 string->set_length(new_length); 9164 9165 Address start_of_string = string->address(); 9166 ASSERT_OBJECT_ALIGNED(start_of_string); 9167 ASSERT_OBJECT_ALIGNED(start_of_string + new_size); 9168 9169 Heap* heap = string->GetHeap(); 9170 NewSpace* newspace = heap->new_space(); 9171 if (newspace->Contains(start_of_string) && 9172 newspace->top() == start_of_string + old_size) { 9173 // Last allocated object in new space. Simply lower allocation top. 9174 newspace->set_top(start_of_string + new_size); 9175 } else { 9176 // Sizes are pointer size aligned, so that we can use filler objects 9177 // that are a multiple of pointer size. 9178 heap->CreateFillerObjectAt(start_of_string + new_size, delta); 9179 } 9180 if (Marking::IsBlack(Marking::MarkBitFrom(start_of_string))) { 9181 MemoryChunk::IncrementLiveBytesFromMutator(start_of_string, -delta); 9182 } 9183 9184 9185 if (new_length == 0) return heap->isolate()->factory()->empty_string(); 9186 return string; 9187 } 9188 9189 9190 AllocationMemento* AllocationMemento::FindForJSObject(JSObject* object, 9191 bool in_GC) { 9192 // Currently, AllocationMemento objects are only allocated immediately 9193 // after JSArrays and some JSObjects in NewSpace. Detecting whether a 9194 // memento is present involves carefully checking the object immediately 9195 // after the current object (if there is one) to see if it's an 9196 // AllocationMemento. 9197 if (FLAG_track_allocation_sites && object->GetHeap()->InNewSpace(object)) { 9198 Address ptr_end = (reinterpret_cast<Address>(object) - kHeapObjectTag) + 9199 object->Size(); 9200 Address top; 9201 if (in_GC) { 9202 top = object->GetHeap()->new_space()->FromSpacePageHigh(); 9203 } else { 9204 top = object->GetHeap()->NewSpaceTop(); 9205 } 9206 if ((ptr_end + AllocationMemento::kSize) <= top) { 9207 // There is room in newspace for allocation info. Do we have some? 9208 Map** possible_allocation_memento_map = 9209 reinterpret_cast<Map**>(ptr_end); 9210 if (*possible_allocation_memento_map == 9211 object->GetHeap()->allocation_memento_map()) { 9212 AllocationMemento* memento = AllocationMemento::cast( 9213 reinterpret_cast<Object*>(ptr_end + kHeapObjectTag)); 9214 if (memento->IsValid()) { 9215 return memento; 9216 } 9217 } 9218 } 9219 } 9220 return NULL; 9221 } 9222 9223 9224 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) { 9225 // For array indexes mix the length into the hash as an array index could 9226 // be zero. 9227 ASSERT(length > 0); 9228 ASSERT(length <= String::kMaxArrayIndexSize); 9229 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < 9230 (1 << String::kArrayIndexValueBits)); 9231 9232 value <<= String::kHashShift; 9233 value |= length << String::kArrayIndexHashLengthShift; 9234 9235 ASSERT((value & String::kIsNotArrayIndexMask) == 0); 9236 ASSERT((length > String::kMaxCachedArrayIndexLength) || 9237 (value & String::kContainsCachedArrayIndexMask) == 0); 9238 return value; 9239 } 9240 9241 9242 uint32_t StringHasher::GetHashField() { 9243 if (length_ <= String::kMaxHashCalcLength) { 9244 if (is_array_index_) { 9245 return MakeArrayIndexHash(array_index_, length_); 9246 } 9247 return (GetHashCore(raw_running_hash_) << String::kHashShift) | 9248 String::kIsNotArrayIndexMask; 9249 } else { 9250 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask; 9251 } 9252 } 9253 9254 9255 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars, 9256 uint32_t seed, 9257 int* utf16_length_out) { 9258 int vector_length = chars.length(); 9259 // Handle some edge cases 9260 if (vector_length <= 1) { 9261 ASSERT(vector_length == 0 || 9262 static_cast<uint8_t>(chars.start()[0]) <= 9263 unibrow::Utf8::kMaxOneByteChar); 9264 *utf16_length_out = vector_length; 9265 return HashSequentialString(chars.start(), vector_length, seed); 9266 } 9267 // Start with a fake length which won't affect computation. 9268 // It will be updated later. 9269 StringHasher hasher(String::kMaxArrayIndexSize, seed); 9270 unsigned remaining = static_cast<unsigned>(vector_length); 9271 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start()); 9272 int utf16_length = 0; 9273 bool is_index = true; 9274 ASSERT(hasher.is_array_index_); 9275 while (remaining > 0) { 9276 unsigned consumed = 0; 9277 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed); 9278 ASSERT(consumed > 0 && consumed <= remaining); 9279 stream += consumed; 9280 remaining -= consumed; 9281 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode; 9282 utf16_length += is_two_characters ? 2 : 1; 9283 // No need to keep hashing. But we do need to calculate utf16_length. 9284 if (utf16_length > String::kMaxHashCalcLength) continue; 9285 if (is_two_characters) { 9286 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c); 9287 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c); 9288 hasher.AddCharacter(c1); 9289 hasher.AddCharacter(c2); 9290 if (is_index) is_index = hasher.UpdateIndex(c1); 9291 if (is_index) is_index = hasher.UpdateIndex(c2); 9292 } else { 9293 hasher.AddCharacter(c); 9294 if (is_index) is_index = hasher.UpdateIndex(c); 9295 } 9296 } 9297 *utf16_length_out = static_cast<int>(utf16_length); 9298 // Must set length here so that hash computation is correct. 9299 hasher.length_ = utf16_length; 9300 return hasher.GetHashField(); 9301 } 9302 9303 9304 MaybeObject* String::SubString(int start, int end, PretenureFlag pretenure) { 9305 Heap* heap = GetHeap(); 9306 if (start == 0 && end == length()) return this; 9307 MaybeObject* result = heap->AllocateSubString(this, start, end, pretenure); 9308 return result; 9309 } 9310 9311 9312 void String::PrintOn(FILE* file) { 9313 int length = this->length(); 9314 for (int i = 0; i < length; i++) { 9315 PrintF(file, "%c", Get(i)); 9316 } 9317 } 9318 9319 9320 static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) { 9321 int live_enum = map->EnumLength(); 9322 if (live_enum == kInvalidEnumCacheSentinel) { 9323 live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM); 9324 } 9325 if (live_enum == 0) return descriptors->ClearEnumCache(); 9326 9327 FixedArray* enum_cache = descriptors->GetEnumCache(); 9328 9329 int to_trim = enum_cache->length() - live_enum; 9330 if (to_trim <= 0) return; 9331 RightTrimFixedArray<FROM_GC>(heap, descriptors->GetEnumCache(), to_trim); 9332 9333 if (!descriptors->HasEnumIndicesCache()) return; 9334 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); 9335 RightTrimFixedArray<FROM_GC>(heap, enum_indices_cache, to_trim); 9336 } 9337 9338 9339 static void TrimDescriptorArray(Heap* heap, 9340 Map* map, 9341 DescriptorArray* descriptors, 9342 int number_of_own_descriptors) { 9343 int number_of_descriptors = descriptors->number_of_descriptors_storage(); 9344 int to_trim = number_of_descriptors - number_of_own_descriptors; 9345 if (to_trim == 0) return; 9346 9347 RightTrimFixedArray<FROM_GC>( 9348 heap, descriptors, to_trim * DescriptorArray::kDescriptorSize); 9349 descriptors->SetNumberOfDescriptors(number_of_own_descriptors); 9350 9351 if (descriptors->HasEnumCache()) TrimEnumCache(heap, map, descriptors); 9352 descriptors->Sort(); 9353 } 9354 9355 9356 // Clear a possible back pointer in case the transition leads to a dead map. 9357 // Return true in case a back pointer has been cleared and false otherwise. 9358 static bool ClearBackPointer(Heap* heap, Map* target) { 9359 if (Marking::MarkBitFrom(target).Get()) return false; 9360 target->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER); 9361 return true; 9362 } 9363 9364 9365 // TODO(mstarzinger): This method should be moved into MarkCompactCollector, 9366 // because it cannot be called from outside the GC and we already have methods 9367 // depending on the transitions layout in the GC anyways. 9368 void Map::ClearNonLiveTransitions(Heap* heap) { 9369 // If there are no transitions to be cleared, return. 9370 // TODO(verwaest) Should be an assert, otherwise back pointers are not 9371 // properly cleared. 9372 if (!HasTransitionArray()) return; 9373 9374 TransitionArray* t = transitions(); 9375 MarkCompactCollector* collector = heap->mark_compact_collector(); 9376 9377 int transition_index = 0; 9378 9379 DescriptorArray* descriptors = instance_descriptors(); 9380 bool descriptors_owner_died = false; 9381 9382 // Compact all live descriptors to the left. 9383 for (int i = 0; i < t->number_of_transitions(); ++i) { 9384 Map* target = t->GetTarget(i); 9385 if (ClearBackPointer(heap, target)) { 9386 if (target->instance_descriptors() == descriptors) { 9387 descriptors_owner_died = true; 9388 } 9389 } else { 9390 if (i != transition_index) { 9391 Name* key = t->GetKey(i); 9392 t->SetKey(transition_index, key); 9393 Object** key_slot = t->GetKeySlot(transition_index); 9394 collector->RecordSlot(key_slot, key_slot, key); 9395 // Target slots do not need to be recorded since maps are not compacted. 9396 t->SetTarget(transition_index, t->GetTarget(i)); 9397 } 9398 transition_index++; 9399 } 9400 } 9401 9402 // If there are no transitions to be cleared, return. 9403 // TODO(verwaest) Should be an assert, otherwise back pointers are not 9404 // properly cleared. 9405 if (transition_index == t->number_of_transitions()) return; 9406 9407 int number_of_own_descriptors = NumberOfOwnDescriptors(); 9408 9409 if (descriptors_owner_died) { 9410 if (number_of_own_descriptors > 0) { 9411 TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors); 9412 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors); 9413 set_owns_descriptors(true); 9414 } else { 9415 ASSERT(descriptors == GetHeap()->empty_descriptor_array()); 9416 } 9417 } 9418 9419 int trim = t->number_of_transitions() - transition_index; 9420 if (trim > 0) { 9421 RightTrimFixedArray<FROM_GC>(heap, t, t->IsSimpleTransition() 9422 ? trim : trim * TransitionArray::kTransitionSize); 9423 } 9424 } 9425 9426 9427 int Map::Hash() { 9428 // For performance reasons we only hash the 3 most variable fields of a map: 9429 // constructor, prototype and bit_field2. 9430 9431 // Shift away the tag. 9432 int hash = (static_cast<uint32_t>( 9433 reinterpret_cast<uintptr_t>(constructor())) >> 2); 9434 9435 // XOR-ing the prototype and constructor directly yields too many zero bits 9436 // when the two pointers are close (which is fairly common). 9437 // To avoid this we shift the prototype 4 bits relatively to the constructor. 9438 hash ^= (static_cast<uint32_t>( 9439 reinterpret_cast<uintptr_t>(prototype())) << 2); 9440 9441 return hash ^ (hash >> 16) ^ bit_field2(); 9442 } 9443 9444 9445 static bool CheckEquivalent(Map* first, Map* second) { 9446 return 9447 first->constructor() == second->constructor() && 9448 first->prototype() == second->prototype() && 9449 first->instance_type() == second->instance_type() && 9450 first->bit_field() == second->bit_field() && 9451 first->bit_field2() == second->bit_field2() && 9452 first->is_observed() == second->is_observed() && 9453 first->function_with_prototype() == second->function_with_prototype(); 9454 } 9455 9456 9457 bool Map::EquivalentToForTransition(Map* other) { 9458 return CheckEquivalent(this, other); 9459 } 9460 9461 9462 bool Map::EquivalentToForNormalization(Map* other, 9463 PropertyNormalizationMode mode) { 9464 int properties = mode == CLEAR_INOBJECT_PROPERTIES 9465 ? 0 : other->inobject_properties(); 9466 return CheckEquivalent(this, other) && inobject_properties() == properties; 9467 } 9468 9469 9470 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) { 9471 int first_ptr_offset = OffsetOfElementAt(first_ptr_index()); 9472 int last_ptr_offset = 9473 OffsetOfElementAt(first_ptr_index() + count_of_ptr_entries()); 9474 v->VisitPointers( 9475 HeapObject::RawField(this, first_ptr_offset), 9476 HeapObject::RawField(this, last_ptr_offset)); 9477 } 9478 9479 9480 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) { 9481 // Iterate over all fields in the body but take care in dealing with 9482 // the code entry. 9483 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset); 9484 v->VisitCodeEntry(this->address() + kCodeEntryOffset); 9485 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size); 9486 } 9487 9488 9489 void JSFunction::MarkForLazyRecompilation() { 9490 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); 9491 ASSERT(!IsOptimized()); 9492 ASSERT(shared()->allows_lazy_compilation() || 9493 code()->optimizable()); 9494 ASSERT(!shared()->is_generator()); 9495 set_code_no_write_barrier( 9496 GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile)); 9497 // No write barrier required, since the builtin is part of the root set. 9498 } 9499 9500 9501 void JSFunction::MarkForConcurrentRecompilation() { 9502 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); 9503 ASSERT(!IsOptimized()); 9504 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); 9505 ASSERT(!shared()->is_generator()); 9506 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); 9507 if (FLAG_trace_concurrent_recompilation) { 9508 PrintF(" ** Marking "); 9509 PrintName(); 9510 PrintF(" for concurrent recompilation.\n"); 9511 } 9512 set_code_no_write_barrier( 9513 GetIsolate()->builtins()->builtin(Builtins::kConcurrentRecompile)); 9514 // No write barrier required, since the builtin is part of the root set. 9515 } 9516 9517 9518 void JSFunction::MarkInRecompileQueue() { 9519 // We can only arrive here via the concurrent-recompilation builtin. If 9520 // break points were set, the code would point to the lazy-compile builtin. 9521 ASSERT(!GetIsolate()->DebuggerHasBreakPoints()); 9522 ASSERT(IsMarkedForConcurrentRecompilation() && !IsOptimized()); 9523 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); 9524 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); 9525 if (FLAG_trace_concurrent_recompilation) { 9526 PrintF(" ** Queueing "); 9527 PrintName(); 9528 PrintF(" for concurrent recompilation.\n"); 9529 } 9530 set_code_no_write_barrier( 9531 GetIsolate()->builtins()->builtin(Builtins::kInRecompileQueue)); 9532 // No write barrier required, since the builtin is part of the root set. 9533 } 9534 9535 9536 static bool CompileLazyHelper(CompilationInfo* info, 9537 ClearExceptionFlag flag) { 9538 // Compile the source information to a code object. 9539 ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled()); 9540 ASSERT(!info->isolate()->has_pending_exception()); 9541 bool result = Compiler::CompileLazy(info); 9542 ASSERT(result != info->isolate()->has_pending_exception()); 9543 if (!result && flag == CLEAR_EXCEPTION) { 9544 info->isolate()->clear_pending_exception(); 9545 } 9546 return result; 9547 } 9548 9549 9550 bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared, 9551 ClearExceptionFlag flag) { 9552 ASSERT(shared->allows_lazy_compilation_without_context()); 9553 CompilationInfoWithZone info(shared); 9554 return CompileLazyHelper(&info, flag); 9555 } 9556 9557 9558 void SharedFunctionInfo::AddToOptimizedCodeMap( 9559 Handle<SharedFunctionInfo> shared, 9560 Handle<Context> native_context, 9561 Handle<Code> code, 9562 Handle<FixedArray> literals) { 9563 CALL_HEAP_FUNCTION_VOID( 9564 shared->GetIsolate(), 9565 shared->AddToOptimizedCodeMap(*native_context, *code, *literals)); 9566 } 9567 9568 9569 MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context, 9570 Code* code, 9571 FixedArray* literals) { 9572 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 9573 ASSERT(native_context->IsNativeContext()); 9574 STATIC_ASSERT(kEntryLength == 3); 9575 Heap* heap = GetHeap(); 9576 FixedArray* new_code_map; 9577 Object* value = optimized_code_map(); 9578 if (value->IsSmi()) { 9579 // No optimized code map. 9580 ASSERT_EQ(0, Smi::cast(value)->value()); 9581 // Create 3 entries per context {context, code, literals}. 9582 MaybeObject* maybe = heap->AllocateFixedArray(kInitialLength); 9583 if (!maybe->To(&new_code_map)) return maybe; 9584 new_code_map->set(kEntriesStart + 0, native_context); 9585 new_code_map->set(kEntriesStart + 1, code); 9586 new_code_map->set(kEntriesStart + 2, literals); 9587 } else { 9588 // Copy old map and append one new entry. 9589 FixedArray* old_code_map = FixedArray::cast(value); 9590 ASSERT_EQ(-1, SearchOptimizedCodeMap(native_context)); 9591 int old_length = old_code_map->length(); 9592 int new_length = old_length + kEntryLength; 9593 MaybeObject* maybe = old_code_map->CopySize(new_length); 9594 if (!maybe->To(&new_code_map)) return maybe; 9595 new_code_map->set(old_length + 0, native_context); 9596 new_code_map->set(old_length + 1, code); 9597 new_code_map->set(old_length + 2, literals); 9598 // Zap the old map for the sake of the heap verifier. 9599 if (Heap::ShouldZapGarbage()) { 9600 Object** data = old_code_map->data_start(); 9601 MemsetPointer(data, heap->the_hole_value(), old_length); 9602 } 9603 } 9604 #ifdef DEBUG 9605 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { 9606 ASSERT(new_code_map->get(i)->IsNativeContext()); 9607 ASSERT(new_code_map->get(i + 1)->IsCode()); 9608 ASSERT(Code::cast(new_code_map->get(i + 1))->kind() == 9609 Code::OPTIMIZED_FUNCTION); 9610 ASSERT(new_code_map->get(i + 2)->IsFixedArray()); 9611 } 9612 #endif 9613 set_optimized_code_map(new_code_map); 9614 return new_code_map; 9615 } 9616 9617 9618 void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function, 9619 int index) { 9620 ASSERT(index > kEntriesStart); 9621 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9622 if (!bound()) { 9623 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); 9624 ASSERT(cached_literals != NULL); 9625 function->set_literals(cached_literals); 9626 } 9627 Code* code = Code::cast(code_map->get(index)); 9628 ASSERT(code != NULL); 9629 ASSERT(function->context()->native_context() == code_map->get(index - 1)); 9630 function->ReplaceCode(code); 9631 } 9632 9633 9634 void SharedFunctionInfo::ClearOptimizedCodeMap() { 9635 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9636 9637 // If the next map link slot is already used then the function was 9638 // enqueued with code flushing and we remove it now. 9639 if (!code_map->get(kNextMapIndex)->IsUndefined()) { 9640 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); 9641 flusher->EvictOptimizedCodeMap(this); 9642 } 9643 9644 ASSERT(code_map->get(kNextMapIndex)->IsUndefined()); 9645 set_optimized_code_map(Smi::FromInt(0)); 9646 } 9647 9648 9649 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, 9650 const char* reason) { 9651 if (optimized_code_map()->IsSmi()) return; 9652 9653 int i; 9654 bool removed_entry = false; 9655 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9656 for (i = kEntriesStart; i < code_map->length(); i += kEntryLength) { 9657 ASSERT(code_map->get(i)->IsNativeContext()); 9658 if (Code::cast(code_map->get(i + 1)) == optimized_code) { 9659 if (FLAG_trace_opt) { 9660 PrintF("[evicting entry from optimizing code map (%s) for ", reason); 9661 ShortPrint(); 9662 PrintF("]\n"); 9663 } 9664 removed_entry = true; 9665 break; 9666 } 9667 } 9668 while (i < (code_map->length() - kEntryLength)) { 9669 code_map->set(i, code_map->get(i + kEntryLength)); 9670 code_map->set(i + 1, code_map->get(i + 1 + kEntryLength)); 9671 code_map->set(i + 2, code_map->get(i + 2 + kEntryLength)); 9672 i += kEntryLength; 9673 } 9674 if (removed_entry) { 9675 // Always trim even when array is cleared because of heap verifier. 9676 RightTrimFixedArray<FROM_MUTATOR>(GetHeap(), code_map, kEntryLength); 9677 if (code_map->length() == kEntriesStart) { 9678 ClearOptimizedCodeMap(); 9679 } 9680 } 9681 } 9682 9683 9684 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { 9685 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9686 ASSERT(shrink_by % kEntryLength == 0); 9687 ASSERT(shrink_by <= code_map->length() - kEntriesStart); 9688 // Always trim even when array is cleared because of heap verifier. 9689 RightTrimFixedArray<FROM_GC>(GetHeap(), code_map, shrink_by); 9690 if (code_map->length() == kEntriesStart) { 9691 ClearOptimizedCodeMap(); 9692 } 9693 } 9694 9695 9696 bool JSFunction::CompileLazy(Handle<JSFunction> function, 9697 ClearExceptionFlag flag) { 9698 bool result = true; 9699 if (function->shared()->is_compiled()) { 9700 function->ReplaceCode(function->shared()->code()); 9701 } else { 9702 ASSERT(function->shared()->allows_lazy_compilation()); 9703 CompilationInfoWithZone info(function); 9704 result = CompileLazyHelper(&info, flag); 9705 ASSERT(!result || function->is_compiled()); 9706 } 9707 return result; 9708 } 9709 9710 9711 Handle<Code> JSFunction::CompileOsr(Handle<JSFunction> function, 9712 BailoutId osr_ast_id, 9713 ClearExceptionFlag flag) { 9714 CompilationInfoWithZone info(function); 9715 info.SetOptimizing(osr_ast_id); 9716 if (CompileLazyHelper(&info, flag)) { 9717 // TODO(titzer): don't install the OSR code. 9718 // ASSERT(function->code() != *info.code()); 9719 return info.code(); 9720 } else { 9721 return Handle<Code>::null(); 9722 } 9723 } 9724 9725 9726 bool JSFunction::CompileOptimized(Handle<JSFunction> function, 9727 ClearExceptionFlag flag) { 9728 CompilationInfoWithZone info(function); 9729 info.SetOptimizing(BailoutId::None()); 9730 return CompileLazyHelper(&info, flag); 9731 } 9732 9733 9734 bool JSFunction::EnsureCompiled(Handle<JSFunction> function, 9735 ClearExceptionFlag flag) { 9736 return function->is_compiled() || CompileLazy(function, flag); 9737 } 9738 9739 9740 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) { 9741 if (object->IsGlobalObject()) return; 9742 9743 // Make sure prototypes are fast objects and their maps have the bit set 9744 // so they remain fast. 9745 if (!object->HasFastProperties()) { 9746 TransformToFastProperties(object, 0); 9747 } 9748 } 9749 9750 9751 static MUST_USE_RESULT MaybeObject* CacheInitialJSArrayMaps( 9752 Context* native_context, Map* initial_map) { 9753 // Replace all of the cached initial array maps in the native context with 9754 // the appropriate transitioned elements kind maps. 9755 Heap* heap = native_context->GetHeap(); 9756 MaybeObject* maybe_maps = 9757 heap->AllocateFixedArrayWithHoles(kElementsKindCount, TENURED); 9758 FixedArray* maps; 9759 if (!maybe_maps->To(&maps)) return maybe_maps; 9760 9761 Map* current_map = initial_map; 9762 ElementsKind kind = current_map->elements_kind(); 9763 ASSERT(kind == GetInitialFastElementsKind()); 9764 maps->set(kind, current_map); 9765 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1; 9766 i < kFastElementsKindCount; ++i) { 9767 Map* new_map; 9768 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i); 9769 if (current_map->HasElementsTransition()) { 9770 new_map = current_map->elements_transition_map(); 9771 ASSERT(new_map->elements_kind() == next_kind); 9772 } else { 9773 MaybeObject* maybe_new_map = 9774 current_map->CopyAsElementsKind(next_kind, INSERT_TRANSITION); 9775 if (!maybe_new_map->To(&new_map)) return maybe_new_map; 9776 } 9777 maps->set(next_kind, new_map); 9778 current_map = new_map; 9779 } 9780 native_context->set_js_array_maps(maps); 9781 return initial_map; 9782 } 9783 9784 9785 Handle<Object> CacheInitialJSArrayMaps(Handle<Context> native_context, 9786 Handle<Map> initial_map) { 9787 CALL_HEAP_FUNCTION(native_context->GetIsolate(), 9788 CacheInitialJSArrayMaps(*native_context, *initial_map), 9789 Object); 9790 } 9791 9792 9793 void JSFunction::SetInstancePrototype(Handle<JSFunction> function, 9794 Handle<Object> value) { 9795 ASSERT(value->IsJSReceiver()); 9796 9797 // First some logic for the map of the prototype to make sure it is in fast 9798 // mode. 9799 if (value->IsJSObject()) { 9800 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value)); 9801 } 9802 9803 // Now some logic for the maps of the objects that are created by using this 9804 // function as a constructor. 9805 if (function->has_initial_map()) { 9806 // If the function has allocated the initial map replace it with a 9807 // copy containing the new prototype. Also complete any in-object 9808 // slack tracking that is in progress at this point because it is 9809 // still tracking the old copy. 9810 if (function->shared()->IsInobjectSlackTrackingInProgress()) { 9811 function->shared()->CompleteInobjectSlackTracking(); 9812 } 9813 Handle<Map> new_map = Map::Copy(handle(function->initial_map())); 9814 new_map->set_prototype(*value); 9815 9816 // If the function is used as the global Array function, cache the 9817 // initial map (and transitioned versions) in the native context. 9818 Context* native_context = function->context()->native_context(); 9819 Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX); 9820 if (array_function->IsJSFunction() && 9821 *function == JSFunction::cast(array_function)) { 9822 CacheInitialJSArrayMaps(handle(native_context), new_map); 9823 } 9824 9825 function->set_initial_map(*new_map); 9826 } else { 9827 // Put the value in the initial map field until an initial map is 9828 // needed. At that point, a new initial map is created and the 9829 // prototype is put into the initial map where it belongs. 9830 function->set_prototype_or_initial_map(*value); 9831 } 9832 function->GetHeap()->ClearInstanceofCache(); 9833 } 9834 9835 9836 void JSFunction::SetPrototype(Handle<JSFunction> function, 9837 Handle<Object> value) { 9838 ASSERT(function->should_have_prototype()); 9839 Handle<Object> construct_prototype = value; 9840 9841 // If the value is not a JSReceiver, store the value in the map's 9842 // constructor field so it can be accessed. Also, set the prototype 9843 // used for constructing objects to the original object prototype. 9844 // See ECMA-262 13.2.2. 9845 if (!value->IsJSReceiver()) { 9846 // Copy the map so this does not affect unrelated functions. 9847 // Remove map transitions because they point to maps with a 9848 // different prototype. 9849 Handle<Map> new_map = Map::Copy(handle(function->map())); 9850 9851 function->set_map(*new_map); 9852 new_map->set_constructor(*value); 9853 new_map->set_non_instance_prototype(true); 9854 Isolate* isolate = new_map->GetIsolate(); 9855 construct_prototype = handle( 9856 isolate->context()->native_context()->initial_object_prototype(), 9857 isolate); 9858 } else { 9859 function->map()->set_non_instance_prototype(false); 9860 } 9861 9862 return SetInstancePrototype(function, construct_prototype); 9863 } 9864 9865 9866 void JSFunction::RemovePrototype() { 9867 Context* native_context = context()->native_context(); 9868 Map* no_prototype_map = shared()->is_classic_mode() 9869 ? native_context->function_without_prototype_map() 9870 : native_context->strict_mode_function_without_prototype_map(); 9871 9872 if (map() == no_prototype_map) return; 9873 9874 ASSERT(map() == (shared()->is_classic_mode() 9875 ? native_context->function_map() 9876 : native_context->strict_mode_function_map())); 9877 9878 set_map(no_prototype_map); 9879 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value()); 9880 } 9881 9882 9883 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) { 9884 if (function->has_initial_map()) return; 9885 Isolate* isolate = function->GetIsolate(); 9886 9887 // First create a new map with the size and number of in-object properties 9888 // suggested by the function. 9889 InstanceType instance_type; 9890 int instance_size; 9891 int in_object_properties; 9892 if (function->shared()->is_generator()) { 9893 instance_type = JS_GENERATOR_OBJECT_TYPE; 9894 instance_size = JSGeneratorObject::kSize; 9895 in_object_properties = 0; 9896 } else { 9897 instance_type = JS_OBJECT_TYPE; 9898 instance_size = function->shared()->CalculateInstanceSize(); 9899 in_object_properties = function->shared()->CalculateInObjectProperties(); 9900 } 9901 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size); 9902 9903 // Fetch or allocate prototype. 9904 Handle<Object> prototype; 9905 if (function->has_instance_prototype()) { 9906 prototype = handle(function->instance_prototype(), isolate); 9907 } else { 9908 prototype = isolate->factory()->NewFunctionPrototype(function); 9909 } 9910 map->set_inobject_properties(in_object_properties); 9911 map->set_unused_property_fields(in_object_properties); 9912 map->set_prototype(*prototype); 9913 ASSERT(map->has_fast_object_elements()); 9914 9915 if (!function->shared()->is_generator()) { 9916 function->shared()->StartInobjectSlackTracking(*map); 9917 } 9918 9919 // Finally link initial map and constructor function. 9920 function->set_initial_map(*map); 9921 map->set_constructor(*function); 9922 } 9923 9924 9925 void JSFunction::SetInstanceClassName(String* name) { 9926 shared()->set_instance_class_name(name); 9927 } 9928 9929 9930 void JSFunction::PrintName(FILE* out) { 9931 SmartArrayPointer<char> name = shared()->DebugName()->ToCString(); 9932 PrintF(out, "%s", *name); 9933 } 9934 9935 9936 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) { 9937 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex)); 9938 } 9939 9940 9941 // The filter is a pattern that matches function names in this way: 9942 // "*" all; the default 9943 // "-" all but the top-level function 9944 // "-name" all but the function "name" 9945 // "" only the top-level function 9946 // "name" only the function "name" 9947 // "name*" only functions starting with "name" 9948 bool JSFunction::PassesFilter(const char* raw_filter) { 9949 if (*raw_filter == '*') return true; 9950 String* name = shared()->DebugName(); 9951 Vector<const char> filter = CStrVector(raw_filter); 9952 if (filter.length() == 0) return name->length() == 0; 9953 if (filter[0] == '-') { 9954 if (filter.length() == 1) { 9955 return (name->length() != 0); 9956 } else if (!name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) { 9957 return true; 9958 } 9959 } else if (name->IsUtf8EqualTo(filter)) { 9960 return true; 9961 } 9962 if (filter[filter.length() - 1] == '*' && 9963 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) { 9964 return true; 9965 } 9966 return false; 9967 } 9968 9969 9970 MaybeObject* Oddball::Initialize(Heap* heap, 9971 const char* to_string, 9972 Object* to_number, 9973 byte kind) { 9974 String* internalized_to_string; 9975 { MaybeObject* maybe_string = 9976 heap->InternalizeUtf8String( 9977 CStrVector(to_string)); 9978 if (!maybe_string->To(&internalized_to_string)) return maybe_string; 9979 } 9980 set_to_string(internalized_to_string); 9981 set_to_number(to_number); 9982 set_kind(kind); 9983 return this; 9984 } 9985 9986 9987 String* SharedFunctionInfo::DebugName() { 9988 Object* n = name(); 9989 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name(); 9990 return String::cast(n); 9991 } 9992 9993 9994 bool SharedFunctionInfo::HasSourceCode() { 9995 return !script()->IsUndefined() && 9996 !reinterpret_cast<Script*>(script())->source()->IsUndefined(); 9997 } 9998 9999 10000 Handle<Object> SharedFunctionInfo::GetSourceCode() { 10001 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value(); 10002 Handle<String> source(String::cast(Script::cast(script())->source())); 10003 return GetIsolate()->factory()->NewSubString( 10004 source, start_position(), end_position()); 10005 } 10006 10007 10008 bool SharedFunctionInfo::IsInlineable() { 10009 // Check that the function has a script associated with it. 10010 if (!script()->IsScript()) return false; 10011 if (optimization_disabled()) return false; 10012 // If we never ran this (unlikely) then lets try to optimize it. 10013 if (code()->kind() != Code::FUNCTION) return true; 10014 return code()->optimizable(); 10015 } 10016 10017 10018 int SharedFunctionInfo::SourceSize() { 10019 return end_position() - start_position(); 10020 } 10021 10022 10023 int SharedFunctionInfo::CalculateInstanceSize() { 10024 int instance_size = 10025 JSObject::kHeaderSize + 10026 expected_nof_properties() * kPointerSize; 10027 if (instance_size > JSObject::kMaxInstanceSize) { 10028 instance_size = JSObject::kMaxInstanceSize; 10029 } 10030 return instance_size; 10031 } 10032 10033 10034 int SharedFunctionInfo::CalculateInObjectProperties() { 10035 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize; 10036 } 10037 10038 10039 // Support function for printing the source code to a StringStream 10040 // without any allocation in the heap. 10041 void SharedFunctionInfo::SourceCodePrint(StringStream* accumulator, 10042 int max_length) { 10043 // For some native functions there is no source. 10044 if (!HasSourceCode()) { 10045 accumulator->Add("<No Source>"); 10046 return; 10047 } 10048 10049 // Get the source for the script which this function came from. 10050 // Don't use String::cast because we don't want more assertion errors while 10051 // we are already creating a stack dump. 10052 String* script_source = 10053 reinterpret_cast<String*>(Script::cast(script())->source()); 10054 10055 if (!script_source->LooksValid()) { 10056 accumulator->Add("<Invalid Source>"); 10057 return; 10058 } 10059 10060 if (!is_toplevel()) { 10061 accumulator->Add("function "); 10062 Object* name = this->name(); 10063 if (name->IsString() && String::cast(name)->length() > 0) { 10064 accumulator->PrintName(name); 10065 } 10066 } 10067 10068 int len = end_position() - start_position(); 10069 if (len <= max_length || max_length < 0) { 10070 accumulator->Put(script_source, start_position(), end_position()); 10071 } else { 10072 accumulator->Put(script_source, 10073 start_position(), 10074 start_position() + max_length); 10075 accumulator->Add("...\n"); 10076 } 10077 } 10078 10079 10080 static bool IsCodeEquivalent(Code* code, Code* recompiled) { 10081 if (code->instruction_size() != recompiled->instruction_size()) return false; 10082 ByteArray* code_relocation = code->relocation_info(); 10083 ByteArray* recompiled_relocation = recompiled->relocation_info(); 10084 int length = code_relocation->length(); 10085 if (length != recompiled_relocation->length()) return false; 10086 int compare = memcmp(code_relocation->GetDataStartAddress(), 10087 recompiled_relocation->GetDataStartAddress(), 10088 length); 10089 return compare == 0; 10090 } 10091 10092 10093 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) { 10094 ASSERT(!has_deoptimization_support()); 10095 DisallowHeapAllocation no_allocation; 10096 Code* code = this->code(); 10097 if (IsCodeEquivalent(code, recompiled)) { 10098 // Copy the deoptimization data from the recompiled code. 10099 code->set_deoptimization_data(recompiled->deoptimization_data()); 10100 code->set_has_deoptimization_support(true); 10101 } else { 10102 // TODO(3025757): In case the recompiled isn't equivalent to the 10103 // old code, we have to replace it. We should try to avoid this 10104 // altogether because it flushes valuable type feedback by 10105 // effectively resetting all IC state. 10106 ReplaceCode(recompiled); 10107 } 10108 ASSERT(has_deoptimization_support()); 10109 } 10110 10111 10112 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) { 10113 // Disable optimization for the shared function info and mark the 10114 // code as non-optimizable. The marker on the shared function info 10115 // is there because we flush non-optimized code thereby loosing the 10116 // non-optimizable information for the code. When the code is 10117 // regenerated and set on the shared function info it is marked as 10118 // non-optimizable if optimization is disabled for the shared 10119 // function info. 10120 set_optimization_disabled(true); 10121 set_bailout_reason(reason); 10122 // Code should be the lazy compilation stub or else unoptimized. If the 10123 // latter, disable optimization for the code too. 10124 ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN); 10125 if (code()->kind() == Code::FUNCTION) { 10126 code()->set_optimizable(false); 10127 } 10128 PROFILE(GetIsolate(), 10129 LogExistingFunction(Handle<SharedFunctionInfo>(this), 10130 Handle<Code>(code()))); 10131 if (FLAG_trace_opt) { 10132 PrintF("[disabled optimization for "); 10133 ShortPrint(); 10134 PrintF(", reason: %s]\n", GetBailoutReason(reason)); 10135 } 10136 } 10137 10138 10139 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) { 10140 ASSERT(!id.IsNone()); 10141 Code* unoptimized = code(); 10142 DeoptimizationOutputData* data = 10143 DeoptimizationOutputData::cast(unoptimized->deoptimization_data()); 10144 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this); 10145 USE(ignore); 10146 return true; // Return true if there was no ASSERT. 10147 } 10148 10149 10150 void SharedFunctionInfo::StartInobjectSlackTracking(Map* map) { 10151 ASSERT(!IsInobjectSlackTrackingInProgress()); 10152 10153 if (!FLAG_clever_optimizations) return; 10154 10155 // Only initiate the tracking the first time. 10156 if (live_objects_may_exist()) return; 10157 set_live_objects_may_exist(true); 10158 10159 // No tracking during the snapshot construction phase. 10160 if (Serializer::enabled()) return; 10161 10162 if (map->unused_property_fields() == 0) return; 10163 10164 // Nonzero counter is a leftover from the previous attempt interrupted 10165 // by GC, keep it. 10166 if (construction_count() == 0) { 10167 set_construction_count(kGenerousAllocationCount); 10168 } 10169 set_initial_map(map); 10170 Builtins* builtins = map->GetHeap()->isolate()->builtins(); 10171 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric), 10172 construct_stub()); 10173 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown)); 10174 } 10175 10176 10177 // Called from GC, hence reinterpret_cast and unchecked accessors. 10178 void SharedFunctionInfo::DetachInitialMap() { 10179 Map* map = reinterpret_cast<Map*>(initial_map()); 10180 10181 // Make the map remember to restore the link if it survives the GC. 10182 map->set_bit_field2( 10183 map->bit_field2() | (1 << Map::kAttachedToSharedFunctionInfo)); 10184 10185 // Undo state changes made by StartInobjectTracking (except the 10186 // construction_count). This way if the initial map does not survive the GC 10187 // then StartInobjectTracking will be called again the next time the 10188 // constructor is called. The countdown will continue and (possibly after 10189 // several more GCs) CompleteInobjectSlackTracking will eventually be called. 10190 Heap* heap = map->GetHeap(); 10191 set_initial_map(heap->undefined_value()); 10192 Builtins* builtins = heap->isolate()->builtins(); 10193 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown), 10194 *RawField(this, kConstructStubOffset)); 10195 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric)); 10196 // It is safe to clear the flag: it will be set again if the map is live. 10197 set_live_objects_may_exist(false); 10198 } 10199 10200 10201 // Called from GC, hence reinterpret_cast and unchecked accessors. 10202 void SharedFunctionInfo::AttachInitialMap(Map* map) { 10203 map->set_bit_field2( 10204 map->bit_field2() & ~(1 << Map::kAttachedToSharedFunctionInfo)); 10205 10206 // Resume inobject slack tracking. 10207 set_initial_map(map); 10208 Builtins* builtins = map->GetHeap()->isolate()->builtins(); 10209 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubGeneric), 10210 *RawField(this, kConstructStubOffset)); 10211 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubCountdown)); 10212 // The map survived the gc, so there may be objects referencing it. 10213 set_live_objects_may_exist(true); 10214 } 10215 10216 10217 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) { 10218 code()->ClearInlineCaches(); 10219 set_ic_age(new_ic_age); 10220 if (code()->kind() == Code::FUNCTION) { 10221 code()->set_profiler_ticks(0); 10222 if (optimization_disabled() && 10223 opt_count() >= FLAG_max_opt_count) { 10224 // Re-enable optimizations if they were disabled due to opt_count limit. 10225 set_optimization_disabled(false); 10226 code()->set_optimizable(true); 10227 } 10228 set_opt_count(0); 10229 set_deopt_count(0); 10230 } 10231 } 10232 10233 10234 static void GetMinInobjectSlack(Map* map, void* data) { 10235 int slack = map->unused_property_fields(); 10236 if (*reinterpret_cast<int*>(data) > slack) { 10237 *reinterpret_cast<int*>(data) = slack; 10238 } 10239 } 10240 10241 10242 static void ShrinkInstanceSize(Map* map, void* data) { 10243 int slack = *reinterpret_cast<int*>(data); 10244 map->set_inobject_properties(map->inobject_properties() - slack); 10245 map->set_unused_property_fields(map->unused_property_fields() - slack); 10246 map->set_instance_size(map->instance_size() - slack * kPointerSize); 10247 10248 // Visitor id might depend on the instance size, recalculate it. 10249 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map)); 10250 } 10251 10252 10253 void SharedFunctionInfo::CompleteInobjectSlackTracking() { 10254 ASSERT(live_objects_may_exist() && IsInobjectSlackTrackingInProgress()); 10255 Map* map = Map::cast(initial_map()); 10256 10257 Heap* heap = map->GetHeap(); 10258 set_initial_map(heap->undefined_value()); 10259 Builtins* builtins = heap->isolate()->builtins(); 10260 ASSERT_EQ(builtins->builtin(Builtins::kJSConstructStubCountdown), 10261 construct_stub()); 10262 set_construct_stub(builtins->builtin(Builtins::kJSConstructStubGeneric)); 10263 10264 int slack = map->unused_property_fields(); 10265 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack); 10266 if (slack != 0) { 10267 // Resize the initial map and all maps in its transition tree. 10268 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack); 10269 10270 // Give the correct expected_nof_properties to initial maps created later. 10271 ASSERT(expected_nof_properties() >= slack); 10272 set_expected_nof_properties(expected_nof_properties() - slack); 10273 } 10274 } 10275 10276 10277 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context) { 10278 ASSERT(native_context->IsNativeContext()); 10279 if (!FLAG_cache_optimized_code) return -1; 10280 Object* value = optimized_code_map(); 10281 if (!value->IsSmi()) { 10282 FixedArray* optimized_code_map = FixedArray::cast(value); 10283 int length = optimized_code_map->length(); 10284 for (int i = kEntriesStart; i < length; i += kEntryLength) { 10285 if (optimized_code_map->get(i) == native_context) { 10286 return i + 1; 10287 } 10288 } 10289 if (FLAG_trace_opt) { 10290 PrintF("[didn't find optimized code in optimized code map for "); 10291 ShortPrint(); 10292 PrintF("]\n"); 10293 } 10294 } 10295 return -1; 10296 } 10297 10298 10299 #define DECLARE_TAG(ignore1, name, ignore2) name, 10300 const char* const VisitorSynchronization::kTags[ 10301 VisitorSynchronization::kNumberOfSyncTags] = { 10302 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG) 10303 }; 10304 #undef DECLARE_TAG 10305 10306 10307 #define DECLARE_TAG(ignore1, ignore2, name) name, 10308 const char* const VisitorSynchronization::kTagNames[ 10309 VisitorSynchronization::kNumberOfSyncTags] = { 10310 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG) 10311 }; 10312 #undef DECLARE_TAG 10313 10314 10315 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) { 10316 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 10317 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 10318 Object* old_target = target; 10319 VisitPointer(&target); 10320 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target. 10321 } 10322 10323 10324 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) { 10325 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); 10326 Object* stub = rinfo->code_age_stub(); 10327 if (stub) { 10328 VisitPointer(&stub); 10329 } 10330 } 10331 10332 10333 void ObjectVisitor::VisitCodeEntry(Address entry_address) { 10334 Object* code = Code::GetObjectFromEntryAddress(entry_address); 10335 Object* old_code = code; 10336 VisitPointer(&code); 10337 if (code != old_code) { 10338 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry(); 10339 } 10340 } 10341 10342 10343 void ObjectVisitor::VisitCell(RelocInfo* rinfo) { 10344 ASSERT(rinfo->rmode() == RelocInfo::CELL); 10345 Object* cell = rinfo->target_cell(); 10346 Object* old_cell = cell; 10347 VisitPointer(&cell); 10348 if (cell != old_cell) { 10349 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell)); 10350 } 10351 } 10352 10353 10354 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) { 10355 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 10356 rinfo->IsPatchedReturnSequence()) || 10357 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 10358 rinfo->IsPatchedDebugBreakSlotSequence())); 10359 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); 10360 Object* old_target = target; 10361 VisitPointer(&target); 10362 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target. 10363 } 10364 10365 10366 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) { 10367 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); 10368 Object* p = rinfo->target_object(); 10369 VisitPointer(&p); 10370 } 10371 10372 10373 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) { 10374 Address p = rinfo->target_reference(); 10375 VisitExternalReference(&p); 10376 } 10377 10378 10379 void Code::InvalidateRelocation() { 10380 set_relocation_info(GetHeap()->empty_byte_array()); 10381 } 10382 10383 10384 void Code::InvalidateEmbeddedObjects() { 10385 Object* undefined = GetHeap()->undefined_value(); 10386 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10387 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { 10388 RelocInfo::Mode mode = it.rinfo()->rmode(); 10389 if (mode == RelocInfo::EMBEDDED_OBJECT) { 10390 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER); 10391 } 10392 } 10393 } 10394 10395 10396 void Code::Relocate(intptr_t delta) { 10397 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) { 10398 it.rinfo()->apply(delta); 10399 } 10400 CPU::FlushICache(instruction_start(), instruction_size()); 10401 } 10402 10403 10404 void Code::CopyFrom(const CodeDesc& desc) { 10405 ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT); 10406 10407 // copy code 10408 CopyBytes(instruction_start(), desc.buffer, 10409 static_cast<size_t>(desc.instr_size)); 10410 10411 // copy reloc info 10412 CopyBytes(relocation_start(), 10413 desc.buffer + desc.buffer_size - desc.reloc_size, 10414 static_cast<size_t>(desc.reloc_size)); 10415 10416 // unbox handles and relocate 10417 intptr_t delta = instruction_start() - desc.buffer; 10418 int mode_mask = RelocInfo::kCodeTargetMask | 10419 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 10420 RelocInfo::ModeMask(RelocInfo::CELL) | 10421 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | 10422 RelocInfo::kApplyMask; 10423 // Needed to find target_object and runtime_entry on X64 10424 Assembler* origin = desc.origin; 10425 AllowDeferredHandleDereference embedding_raw_address; 10426 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { 10427 RelocInfo::Mode mode = it.rinfo()->rmode(); 10428 if (mode == RelocInfo::EMBEDDED_OBJECT) { 10429 Handle<Object> p = it.rinfo()->target_object_handle(origin); 10430 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER); 10431 } else if (mode == RelocInfo::CELL) { 10432 Handle<Cell> cell = it.rinfo()->target_cell_handle(); 10433 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER); 10434 } else if (RelocInfo::IsCodeTarget(mode)) { 10435 // rewrite code handles in inline cache targets to direct 10436 // pointers to the first instruction in the code object 10437 Handle<Object> p = it.rinfo()->target_object_handle(origin); 10438 Code* code = Code::cast(*p); 10439 it.rinfo()->set_target_address(code->instruction_start(), 10440 SKIP_WRITE_BARRIER); 10441 } else if (RelocInfo::IsRuntimeEntry(mode)) { 10442 Address p = it.rinfo()->target_runtime_entry(origin); 10443 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER); 10444 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) { 10445 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin); 10446 Code* code = Code::cast(*p); 10447 it.rinfo()->set_code_age_stub(code); 10448 } else { 10449 it.rinfo()->apply(delta); 10450 } 10451 } 10452 CPU::FlushICache(instruction_start(), instruction_size()); 10453 } 10454 10455 10456 // Locate the source position which is closest to the address in the code. This 10457 // is using the source position information embedded in the relocation info. 10458 // The position returned is relative to the beginning of the script where the 10459 // source for this function is found. 10460 int Code::SourcePosition(Address pc) { 10461 int distance = kMaxInt; 10462 int position = RelocInfo::kNoPosition; // Initially no position found. 10463 // Run through all the relocation info to find the best matching source 10464 // position. All the code needs to be considered as the sequence of the 10465 // instructions in the code does not necessarily follow the same order as the 10466 // source. 10467 RelocIterator it(this, RelocInfo::kPositionMask); 10468 while (!it.done()) { 10469 // Only look at positions after the current pc. 10470 if (it.rinfo()->pc() < pc) { 10471 // Get position and distance. 10472 10473 int dist = static_cast<int>(pc - it.rinfo()->pc()); 10474 int pos = static_cast<int>(it.rinfo()->data()); 10475 // If this position is closer than the current candidate or if it has the 10476 // same distance as the current candidate and the position is higher then 10477 // this position is the new candidate. 10478 if ((dist < distance) || 10479 (dist == distance && pos > position)) { 10480 position = pos; 10481 distance = dist; 10482 } 10483 } 10484 it.next(); 10485 } 10486 return position; 10487 } 10488 10489 10490 // Same as Code::SourcePosition above except it only looks for statement 10491 // positions. 10492 int Code::SourceStatementPosition(Address pc) { 10493 // First find the position as close as possible using all position 10494 // information. 10495 int position = SourcePosition(pc); 10496 // Now find the closest statement position before the position. 10497 int statement_position = 0; 10498 RelocIterator it(this, RelocInfo::kPositionMask); 10499 while (!it.done()) { 10500 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) { 10501 int p = static_cast<int>(it.rinfo()->data()); 10502 if (statement_position < p && p <= position) { 10503 statement_position = p; 10504 } 10505 } 10506 it.next(); 10507 } 10508 return statement_position; 10509 } 10510 10511 10512 SafepointEntry Code::GetSafepointEntry(Address pc) { 10513 SafepointTable table(this); 10514 return table.FindEntry(pc); 10515 } 10516 10517 10518 Object* Code::FindNthObject(int n, Map* match_map) { 10519 ASSERT(is_inline_cache_stub()); 10520 DisallowHeapAllocation no_allocation; 10521 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10522 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10523 RelocInfo* info = it.rinfo(); 10524 Object* object = info->target_object(); 10525 if (object->IsHeapObject()) { 10526 if (HeapObject::cast(object)->map() == match_map) { 10527 if (--n == 0) return object; 10528 } 10529 } 10530 } 10531 return NULL; 10532 } 10533 10534 10535 Map* Code::FindFirstMap() { 10536 Object* result = FindNthObject(1, GetHeap()->meta_map()); 10537 return (result != NULL) ? Map::cast(result) : NULL; 10538 } 10539 10540 10541 void Code::ReplaceNthObject(int n, 10542 Map* match_map, 10543 Object* replace_with) { 10544 ASSERT(is_inline_cache_stub() || is_handler()); 10545 DisallowHeapAllocation no_allocation; 10546 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10547 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10548 RelocInfo* info = it.rinfo(); 10549 Object* object = info->target_object(); 10550 if (object->IsHeapObject()) { 10551 if (HeapObject::cast(object)->map() == match_map) { 10552 if (--n == 0) { 10553 info->set_target_object(replace_with); 10554 return; 10555 } 10556 } 10557 } 10558 } 10559 UNREACHABLE(); 10560 } 10561 10562 10563 void Code::FindAllMaps(MapHandleList* maps) { 10564 ASSERT(is_inline_cache_stub()); 10565 DisallowHeapAllocation no_allocation; 10566 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10567 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10568 RelocInfo* info = it.rinfo(); 10569 Object* object = info->target_object(); 10570 if (object->IsMap()) maps->Add(handle(Map::cast(object))); 10571 } 10572 } 10573 10574 10575 void Code::FindAllTypes(TypeHandleList* types) { 10576 ASSERT(is_inline_cache_stub()); 10577 DisallowHeapAllocation no_allocation; 10578 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10579 Isolate* isolate = GetIsolate(); 10580 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10581 RelocInfo* info = it.rinfo(); 10582 Object* object = info->target_object(); 10583 if (object->IsMap()) { 10584 Handle<Map> map(Map::cast(object)); 10585 types->Add(handle(IC::MapToType(map), isolate)); 10586 } 10587 } 10588 } 10589 10590 10591 void Code::ReplaceFirstMap(Map* replace_with) { 10592 ReplaceNthObject(1, GetHeap()->meta_map(), replace_with); 10593 } 10594 10595 10596 Code* Code::FindFirstHandler() { 10597 ASSERT(is_inline_cache_stub()); 10598 DisallowHeapAllocation no_allocation; 10599 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); 10600 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10601 RelocInfo* info = it.rinfo(); 10602 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); 10603 if (code->kind() == Code::HANDLER) return code; 10604 } 10605 return NULL; 10606 } 10607 10608 10609 bool Code::FindHandlers(CodeHandleList* code_list, int length) { 10610 ASSERT(is_inline_cache_stub()); 10611 DisallowHeapAllocation no_allocation; 10612 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); 10613 int i = 0; 10614 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10615 if (i == length) return true; 10616 RelocInfo* info = it.rinfo(); 10617 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); 10618 // IC stubs with handlers never contain non-handler code objects before 10619 // handler targets. 10620 if (code->kind() != Code::HANDLER) break; 10621 code_list->Add(Handle<Code>(code)); 10622 i++; 10623 } 10624 return i == length; 10625 } 10626 10627 10628 Name* Code::FindFirstName() { 10629 ASSERT(is_inline_cache_stub()); 10630 DisallowHeapAllocation no_allocation; 10631 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10632 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10633 RelocInfo* info = it.rinfo(); 10634 Object* object = info->target_object(); 10635 if (object->IsName()) return Name::cast(object); 10636 } 10637 return NULL; 10638 } 10639 10640 10641 void Code::ReplaceNthCell(int n, Cell* replace_with) { 10642 ASSERT(is_inline_cache_stub()); 10643 DisallowHeapAllocation no_allocation; 10644 int mask = RelocInfo::ModeMask(RelocInfo::CELL); 10645 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10646 RelocInfo* info = it.rinfo(); 10647 if (--n == 0) { 10648 info->set_target_cell(replace_with); 10649 return; 10650 } 10651 } 10652 UNREACHABLE(); 10653 } 10654 10655 10656 void Code::ClearInlineCaches() { 10657 ClearInlineCaches(NULL); 10658 } 10659 10660 10661 void Code::ClearInlineCaches(Code::Kind kind) { 10662 ClearInlineCaches(&kind); 10663 } 10664 10665 10666 void Code::ClearInlineCaches(Code::Kind* kind) { 10667 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | 10668 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) | 10669 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) | 10670 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_CONTEXT); 10671 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10672 RelocInfo* info = it.rinfo(); 10673 Code* target(Code::GetCodeFromTargetAddress(info->target_address())); 10674 if (target->is_inline_cache_stub()) { 10675 if (kind == NULL || *kind == target->kind()) { 10676 IC::Clear(this->GetIsolate(), info->pc()); 10677 } 10678 } 10679 } 10680 } 10681 10682 10683 void Code::ClearTypeFeedbackCells(Heap* heap) { 10684 if (kind() != FUNCTION) return; 10685 Object* raw_info = type_feedback_info(); 10686 if (raw_info->IsTypeFeedbackInfo()) { 10687 TypeFeedbackCells* type_feedback_cells = 10688 TypeFeedbackInfo::cast(raw_info)->type_feedback_cells(); 10689 for (int i = 0; i < type_feedback_cells->CellCount(); i++) { 10690 Cell* cell = type_feedback_cells->GetCell(i); 10691 // Don't clear AllocationSites 10692 Object* value = cell->value(); 10693 if (value == NULL || !value->IsAllocationSite()) { 10694 cell->set_value(TypeFeedbackCells::RawUninitializedSentinel(heap)); 10695 } 10696 } 10697 } 10698 } 10699 10700 10701 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) { 10702 DisallowHeapAllocation no_gc; 10703 ASSERT(kind() == FUNCTION); 10704 BackEdgeTable back_edges(this, &no_gc); 10705 for (uint32_t i = 0; i < back_edges.length(); i++) { 10706 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i); 10707 } 10708 return BailoutId::None(); 10709 } 10710 10711 10712 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) { 10713 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY); 10714 } 10715 10716 10717 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) { 10718 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge, 10719 NO_MARKING_PARITY); 10720 } 10721 10722 10723 static Code::Age EffectiveAge(Code::Age age) { 10724 if (age == Code::kNotExecutedCodeAge) { 10725 // Treat that's never been executed as old immediately. 10726 age = Code::kIsOldCodeAge; 10727 } else if (age == Code::kExecutedOnceCodeAge) { 10728 // Pre-age code that has only been executed once. 10729 age = Code::kPreAgedCodeAge; 10730 } 10731 return age; 10732 } 10733 10734 10735 void Code::MakeOlder(MarkingParity current_parity) { 10736 byte* sequence = FindCodeAgeSequence(); 10737 if (sequence != NULL) { 10738 Age age; 10739 MarkingParity code_parity; 10740 GetCodeAgeAndParity(sequence, &age, &code_parity); 10741 age = EffectiveAge(age); 10742 if (age != kLastCodeAge && code_parity != current_parity) { 10743 PatchPlatformCodeAge(GetIsolate(), 10744 sequence, 10745 static_cast<Age>(age + 1), 10746 current_parity); 10747 } 10748 } 10749 } 10750 10751 10752 bool Code::IsOld() { 10753 return GetAge() >= kIsOldCodeAge; 10754 } 10755 10756 10757 byte* Code::FindCodeAgeSequence() { 10758 return FLAG_age_code && 10759 prologue_offset() != Code::kPrologueOffsetNotSet && 10760 (kind() == OPTIMIZED_FUNCTION || 10761 (kind() == FUNCTION && !has_debug_break_slots())) 10762 ? instruction_start() + prologue_offset() 10763 : NULL; 10764 } 10765 10766 10767 Code::Age Code::GetAge() { 10768 return EffectiveAge(GetRawAge()); 10769 } 10770 10771 10772 Code::Age Code::GetRawAge() { 10773 byte* sequence = FindCodeAgeSequence(); 10774 if (sequence == NULL) { 10775 return kNoAgeCodeAge; 10776 } 10777 Age age; 10778 MarkingParity parity; 10779 GetCodeAgeAndParity(sequence, &age, &parity); 10780 return age; 10781 } 10782 10783 10784 void Code::GetCodeAgeAndParity(Code* code, Age* age, 10785 MarkingParity* parity) { 10786 Isolate* isolate = code->GetIsolate(); 10787 Builtins* builtins = isolate->builtins(); 10788 Code* stub = NULL; 10789 #define HANDLE_CODE_AGE(AGE) \ 10790 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \ 10791 if (code == stub) { \ 10792 *age = k##AGE##CodeAge; \ 10793 *parity = EVEN_MARKING_PARITY; \ 10794 return; \ 10795 } \ 10796 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \ 10797 if (code == stub) { \ 10798 *age = k##AGE##CodeAge; \ 10799 *parity = ODD_MARKING_PARITY; \ 10800 return; \ 10801 } 10802 CODE_AGE_LIST(HANDLE_CODE_AGE) 10803 #undef HANDLE_CODE_AGE 10804 stub = *builtins->MarkCodeAsExecutedOnce(); 10805 if (code == stub) { 10806 *age = kNotExecutedCodeAge; 10807 *parity = NO_MARKING_PARITY; 10808 return; 10809 } 10810 stub = *builtins->MarkCodeAsExecutedTwice(); 10811 if (code == stub) { 10812 *age = kExecutedOnceCodeAge; 10813 *parity = NO_MARKING_PARITY; 10814 return; 10815 } 10816 UNREACHABLE(); 10817 } 10818 10819 10820 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) { 10821 Builtins* builtins = isolate->builtins(); 10822 switch (age) { 10823 #define HANDLE_CODE_AGE(AGE) \ 10824 case k##AGE##CodeAge: { \ 10825 Code* stub = parity == EVEN_MARKING_PARITY \ 10826 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \ 10827 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \ 10828 return stub; \ 10829 } 10830 CODE_AGE_LIST(HANDLE_CODE_AGE) 10831 #undef HANDLE_CODE_AGE 10832 case kNotExecutedCodeAge: { 10833 ASSERT(parity == NO_MARKING_PARITY); 10834 return *builtins->MarkCodeAsExecutedOnce(); 10835 } 10836 case kExecutedOnceCodeAge: { 10837 ASSERT(parity == NO_MARKING_PARITY); 10838 return *builtins->MarkCodeAsExecutedTwice(); 10839 } 10840 default: 10841 UNREACHABLE(); 10842 break; 10843 } 10844 return NULL; 10845 } 10846 10847 10848 void Code::PrintDeoptLocation(FILE* out, int bailout_id) { 10849 const char* last_comment = NULL; 10850 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT) 10851 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); 10852 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10853 RelocInfo* info = it.rinfo(); 10854 if (info->rmode() == RelocInfo::COMMENT) { 10855 last_comment = reinterpret_cast<const char*>(info->data()); 10856 } else if (last_comment != NULL) { 10857 if ((bailout_id == Deoptimizer::GetDeoptimizationId( 10858 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) || 10859 (bailout_id == Deoptimizer::GetDeoptimizationId( 10860 GetIsolate(), info->target_address(), Deoptimizer::SOFT))) { 10861 CHECK(RelocInfo::IsRuntimeEntry(info->rmode())); 10862 PrintF(out, " %s\n", last_comment); 10863 return; 10864 } 10865 } 10866 } 10867 } 10868 10869 10870 bool Code::CanDeoptAt(Address pc) { 10871 DeoptimizationInputData* deopt_data = 10872 DeoptimizationInputData::cast(deoptimization_data()); 10873 Address code_start_address = instruction_start(); 10874 for (int i = 0; i < deopt_data->DeoptCount(); i++) { 10875 if (deopt_data->Pc(i)->value() == -1) continue; 10876 Address address = code_start_address + deopt_data->Pc(i)->value(); 10877 if (address == pc) return true; 10878 } 10879 return false; 10880 } 10881 10882 10883 // Identify kind of code. 10884 const char* Code::Kind2String(Kind kind) { 10885 switch (kind) { 10886 #define CASE(name) case name: return #name; 10887 CODE_KIND_LIST(CASE) 10888 #undef CASE 10889 case NUMBER_OF_KINDS: break; 10890 } 10891 UNREACHABLE(); 10892 return NULL; 10893 } 10894 10895 10896 #ifdef ENABLE_DISASSEMBLER 10897 10898 void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) { 10899 disasm::NameConverter converter; 10900 int deopt_count = DeoptCount(); 10901 PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count); 10902 if (0 == deopt_count) return; 10903 10904 PrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc", 10905 FLAG_print_code_verbose ? "commands" : ""); 10906 for (int i = 0; i < deopt_count; i++) { 10907 PrintF(out, "%6d %6d %6d %6d", 10908 i, 10909 AstId(i).ToInt(), 10910 ArgumentsStackHeight(i)->value(), 10911 Pc(i)->value()); 10912 10913 if (!FLAG_print_code_verbose) { 10914 PrintF(out, "\n"); 10915 continue; 10916 } 10917 // Print details of the frame translation. 10918 int translation_index = TranslationIndex(i)->value(); 10919 TranslationIterator iterator(TranslationByteArray(), translation_index); 10920 Translation::Opcode opcode = 10921 static_cast<Translation::Opcode>(iterator.Next()); 10922 ASSERT(Translation::BEGIN == opcode); 10923 int frame_count = iterator.Next(); 10924 int jsframe_count = iterator.Next(); 10925 PrintF(out, " %s {frame count=%d, js frame count=%d}\n", 10926 Translation::StringFor(opcode), 10927 frame_count, 10928 jsframe_count); 10929 10930 while (iterator.HasNext() && 10931 Translation::BEGIN != 10932 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) { 10933 PrintF(out, "%24s %s ", "", Translation::StringFor(opcode)); 10934 10935 switch (opcode) { 10936 case Translation::BEGIN: 10937 UNREACHABLE(); 10938 break; 10939 10940 case Translation::JS_FRAME: { 10941 int ast_id = iterator.Next(); 10942 int function_id = iterator.Next(); 10943 unsigned height = iterator.Next(); 10944 PrintF(out, "{ast_id=%d, function=", ast_id); 10945 if (function_id != Translation::kSelfLiteralId) { 10946 Object* function = LiteralArray()->get(function_id); 10947 JSFunction::cast(function)->PrintName(out); 10948 } else { 10949 PrintF(out, "<self>"); 10950 } 10951 PrintF(out, ", height=%u}", height); 10952 break; 10953 } 10954 10955 case Translation::COMPILED_STUB_FRAME: { 10956 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next()); 10957 PrintF(out, "{kind=%d}", stub_kind); 10958 break; 10959 } 10960 10961 case Translation::ARGUMENTS_ADAPTOR_FRAME: 10962 case Translation::CONSTRUCT_STUB_FRAME: { 10963 int function_id = iterator.Next(); 10964 JSFunction* function = 10965 JSFunction::cast(LiteralArray()->get(function_id)); 10966 unsigned height = iterator.Next(); 10967 PrintF(out, "{function="); 10968 function->PrintName(out); 10969 PrintF(out, ", height=%u}", height); 10970 break; 10971 } 10972 10973 case Translation::GETTER_STUB_FRAME: 10974 case Translation::SETTER_STUB_FRAME: { 10975 int function_id = iterator.Next(); 10976 JSFunction* function = 10977 JSFunction::cast(LiteralArray()->get(function_id)); 10978 PrintF(out, "{function="); 10979 function->PrintName(out); 10980 PrintF(out, "}"); 10981 break; 10982 } 10983 10984 case Translation::REGISTER: { 10985 int reg_code = iterator.Next(); 10986 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code)); 10987 break; 10988 } 10989 10990 case Translation::INT32_REGISTER: { 10991 int reg_code = iterator.Next(); 10992 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code)); 10993 break; 10994 } 10995 10996 case Translation::UINT32_REGISTER: { 10997 int reg_code = iterator.Next(); 10998 PrintF(out, "{input=%s (unsigned)}", 10999 converter.NameOfCPURegister(reg_code)); 11000 break; 11001 } 11002 11003 case Translation::DOUBLE_REGISTER: { 11004 int reg_code = iterator.Next(); 11005 PrintF(out, "{input=%s}", 11006 DoubleRegister::AllocationIndexToString(reg_code)); 11007 break; 11008 } 11009 11010 case Translation::STACK_SLOT: { 11011 int input_slot_index = iterator.Next(); 11012 PrintF(out, "{input=%d}", input_slot_index); 11013 break; 11014 } 11015 11016 case Translation::INT32_STACK_SLOT: { 11017 int input_slot_index = iterator.Next(); 11018 PrintF(out, "{input=%d}", input_slot_index); 11019 break; 11020 } 11021 11022 case Translation::UINT32_STACK_SLOT: { 11023 int input_slot_index = iterator.Next(); 11024 PrintF(out, "{input=%d (unsigned)}", input_slot_index); 11025 break; 11026 } 11027 11028 case Translation::DOUBLE_STACK_SLOT: { 11029 int input_slot_index = iterator.Next(); 11030 PrintF(out, "{input=%d}", input_slot_index); 11031 break; 11032 } 11033 11034 case Translation::LITERAL: { 11035 unsigned literal_index = iterator.Next(); 11036 PrintF(out, "{literal_id=%u}", literal_index); 11037 break; 11038 } 11039 11040 case Translation::DUPLICATED_OBJECT: { 11041 int object_index = iterator.Next(); 11042 PrintF(out, "{object_index=%d}", object_index); 11043 break; 11044 } 11045 11046 case Translation::ARGUMENTS_OBJECT: 11047 case Translation::CAPTURED_OBJECT: { 11048 int args_length = iterator.Next(); 11049 PrintF(out, "{length=%d}", args_length); 11050 break; 11051 } 11052 } 11053 PrintF(out, "\n"); 11054 } 11055 } 11056 } 11057 11058 11059 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) { 11060 PrintF(out, "Deoptimization Output Data (deopt points = %d)\n", 11061 this->DeoptPoints()); 11062 if (this->DeoptPoints() == 0) return; 11063 11064 PrintF(out, "%6s %8s %s\n", "ast id", "pc", "state"); 11065 for (int i = 0; i < this->DeoptPoints(); i++) { 11066 int pc_and_state = this->PcAndState(i)->value(); 11067 PrintF(out, "%6d %8d %s\n", 11068 this->AstId(i).ToInt(), 11069 FullCodeGenerator::PcField::decode(pc_and_state), 11070 FullCodeGenerator::State2String( 11071 FullCodeGenerator::StateField::decode(pc_and_state))); 11072 } 11073 } 11074 11075 11076 const char* Code::ICState2String(InlineCacheState state) { 11077 switch (state) { 11078 case UNINITIALIZED: return "UNINITIALIZED"; 11079 case PREMONOMORPHIC: return "PREMONOMORPHIC"; 11080 case MONOMORPHIC: return "MONOMORPHIC"; 11081 case MONOMORPHIC_PROTOTYPE_FAILURE: return "MONOMORPHIC_PROTOTYPE_FAILURE"; 11082 case POLYMORPHIC: return "POLYMORPHIC"; 11083 case MEGAMORPHIC: return "MEGAMORPHIC"; 11084 case GENERIC: return "GENERIC"; 11085 case DEBUG_STUB: return "DEBUG_STUB"; 11086 } 11087 UNREACHABLE(); 11088 return NULL; 11089 } 11090 11091 11092 const char* Code::StubType2String(StubType type) { 11093 switch (type) { 11094 case NORMAL: return "NORMAL"; 11095 case FAST: return "FAST"; 11096 } 11097 UNREACHABLE(); // keep the compiler happy 11098 return NULL; 11099 } 11100 11101 11102 void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) { 11103 PrintF(out, "extra_ic_state = "); 11104 const char* name = NULL; 11105 switch (kind) { 11106 case CALL_IC: 11107 if (extra == STRING_INDEX_OUT_OF_BOUNDS) { 11108 name = "STRING_INDEX_OUT_OF_BOUNDS"; 11109 } 11110 break; 11111 case STORE_IC: 11112 case KEYED_STORE_IC: 11113 if (extra == kStrictMode) { 11114 name = "STRICT"; 11115 } 11116 break; 11117 default: 11118 break; 11119 } 11120 if (name != NULL) { 11121 PrintF(out, "%s\n", name); 11122 } else { 11123 PrintF(out, "%d\n", extra); 11124 } 11125 } 11126 11127 11128 void Code::Disassemble(const char* name, FILE* out) { 11129 PrintF(out, "kind = %s\n", Kind2String(kind())); 11130 if (has_major_key()) { 11131 PrintF(out, "major_key = %s\n", 11132 CodeStub::MajorName(CodeStub::GetMajorKey(this), true)); 11133 } 11134 if (is_inline_cache_stub()) { 11135 PrintF(out, "ic_state = %s\n", ICState2String(ic_state())); 11136 PrintExtraICState(out, kind(), needs_extended_extra_ic_state(kind()) ? 11137 extended_extra_ic_state() : extra_ic_state()); 11138 if (ic_state() == MONOMORPHIC) { 11139 PrintF(out, "type = %s\n", StubType2String(type())); 11140 } 11141 if (is_call_stub() || is_keyed_call_stub()) { 11142 PrintF(out, "argc = %d\n", arguments_count()); 11143 } 11144 if (is_compare_ic_stub()) { 11145 ASSERT(major_key() == CodeStub::CompareIC); 11146 CompareIC::State left_state, right_state, handler_state; 11147 Token::Value op; 11148 ICCompareStub::DecodeMinorKey(stub_info(), &left_state, &right_state, 11149 &handler_state, &op); 11150 PrintF(out, "compare_state = %s*%s -> %s\n", 11151 CompareIC::GetStateName(left_state), 11152 CompareIC::GetStateName(right_state), 11153 CompareIC::GetStateName(handler_state)); 11154 PrintF(out, "compare_operation = %s\n", Token::Name(op)); 11155 } 11156 } 11157 if ((name != NULL) && (name[0] != '\0')) { 11158 PrintF(out, "name = %s\n", name); 11159 } 11160 if (kind() == OPTIMIZED_FUNCTION) { 11161 PrintF(out, "stack_slots = %d\n", stack_slots()); 11162 } 11163 11164 PrintF(out, "Instructions (size = %d)\n", instruction_size()); 11165 Disassembler::Decode(out, this); 11166 PrintF(out, "\n"); 11167 11168 if (kind() == FUNCTION) { 11169 DeoptimizationOutputData* data = 11170 DeoptimizationOutputData::cast(this->deoptimization_data()); 11171 data->DeoptimizationOutputDataPrint(out); 11172 } else if (kind() == OPTIMIZED_FUNCTION) { 11173 DeoptimizationInputData* data = 11174 DeoptimizationInputData::cast(this->deoptimization_data()); 11175 data->DeoptimizationInputDataPrint(out); 11176 } 11177 PrintF(out, "\n"); 11178 11179 if (is_crankshafted()) { 11180 SafepointTable table(this); 11181 PrintF(out, "Safepoints (size = %u)\n", table.size()); 11182 for (unsigned i = 0; i < table.length(); i++) { 11183 unsigned pc_offset = table.GetPcOffset(i); 11184 PrintF(out, "%p %4d ", (instruction_start() + pc_offset), pc_offset); 11185 table.PrintEntry(i, out); 11186 PrintF(out, " (sp -> fp)"); 11187 SafepointEntry entry = table.GetEntry(i); 11188 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) { 11189 PrintF(out, " %6d", entry.deoptimization_index()); 11190 } else { 11191 PrintF(out, " <none>"); 11192 } 11193 if (entry.argument_count() > 0) { 11194 PrintF(out, " argc: %d", entry.argument_count()); 11195 } 11196 PrintF(out, "\n"); 11197 } 11198 PrintF(out, "\n"); 11199 } else if (kind() == FUNCTION) { 11200 unsigned offset = back_edge_table_offset(); 11201 // If there is no back edge table, the "table start" will be at or after 11202 // (due to alignment) the end of the instruction stream. 11203 if (static_cast<int>(offset) < instruction_size()) { 11204 DisallowHeapAllocation no_gc; 11205 BackEdgeTable back_edges(this, &no_gc); 11206 11207 PrintF(out, "Back edges (size = %u)\n", back_edges.length()); 11208 PrintF(out, "ast_id pc_offset loop_depth\n"); 11209 11210 for (uint32_t i = 0; i < back_edges.length(); i++) { 11211 PrintF(out, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(), 11212 back_edges.pc_offset(i), 11213 back_edges.loop_depth(i)); 11214 } 11215 11216 PrintF(out, "\n"); 11217 } 11218 #ifdef OBJECT_PRINT 11219 if (!type_feedback_info()->IsUndefined()) { 11220 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(out); 11221 PrintF(out, "\n"); 11222 } 11223 #endif 11224 } 11225 11226 PrintF(out, "RelocInfo (size = %d)\n", relocation_size()); 11227 for (RelocIterator it(this); !it.done(); it.next()) { 11228 it.rinfo()->Print(GetIsolate(), out); 11229 } 11230 PrintF(out, "\n"); 11231 } 11232 #endif // ENABLE_DISASSEMBLER 11233 11234 11235 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength( 11236 Handle<JSObject> object, 11237 int capacity, 11238 int length, 11239 SetFastElementsCapacitySmiMode smi_mode) { 11240 CALL_HEAP_FUNCTION( 11241 object->GetIsolate(), 11242 object->SetFastElementsCapacityAndLength(capacity, length, smi_mode), 11243 FixedArray); 11244 } 11245 11246 11247 MaybeObject* JSObject::SetFastElementsCapacityAndLength( 11248 int capacity, 11249 int length, 11250 SetFastElementsCapacitySmiMode smi_mode) { 11251 Heap* heap = GetHeap(); 11252 // We should never end in here with a pixel or external array. 11253 ASSERT(!HasExternalArrayElements()); 11254 11255 // Allocate a new fast elements backing store. 11256 FixedArray* new_elements; 11257 MaybeObject* maybe = heap->AllocateUninitializedFixedArray(capacity); 11258 if (!maybe->To(&new_elements)) return maybe; 11259 11260 ElementsKind elements_kind = GetElementsKind(); 11261 ElementsKind new_elements_kind; 11262 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it, 11263 // or if it's allowed and the old elements array contained only SMIs. 11264 bool has_fast_smi_elements = 11265 (smi_mode == kForceSmiElements) || 11266 ((smi_mode == kAllowSmiElements) && HasFastSmiElements()); 11267 if (has_fast_smi_elements) { 11268 if (IsHoleyElementsKind(elements_kind)) { 11269 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS; 11270 } else { 11271 new_elements_kind = FAST_SMI_ELEMENTS; 11272 } 11273 } else { 11274 if (IsHoleyElementsKind(elements_kind)) { 11275 new_elements_kind = FAST_HOLEY_ELEMENTS; 11276 } else { 11277 new_elements_kind = FAST_ELEMENTS; 11278 } 11279 } 11280 FixedArrayBase* old_elements = elements(); 11281 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind); 11282 MaybeObject* maybe_obj = 11283 accessor->CopyElements(this, new_elements, elements_kind); 11284 if (maybe_obj->IsFailure()) return maybe_obj; 11285 11286 if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) { 11287 Map* new_map = map(); 11288 if (new_elements_kind != elements_kind) { 11289 MaybeObject* maybe = 11290 GetElementsTransitionMap(GetIsolate(), new_elements_kind); 11291 if (!maybe->To(&new_map)) return maybe; 11292 } 11293 ValidateElements(); 11294 set_map_and_elements(new_map, new_elements); 11295 11296 // Transition through the allocation site as well if present. 11297 maybe_obj = UpdateAllocationSite(new_elements_kind); 11298 if (maybe_obj->IsFailure()) return maybe_obj; 11299 } else { 11300 FixedArray* parameter_map = FixedArray::cast(old_elements); 11301 parameter_map->set(1, new_elements); 11302 } 11303 11304 if (FLAG_trace_elements_transitions) { 11305 PrintElementsTransition(stdout, elements_kind, old_elements, 11306 GetElementsKind(), new_elements); 11307 } 11308 11309 if (IsJSArray()) { 11310 JSArray::cast(this)->set_length(Smi::FromInt(length)); 11311 } 11312 return new_elements; 11313 } 11314 11315 11316 bool Code::IsWeakEmbeddedObject(Kind kind, Object* object) { 11317 if (kind != Code::OPTIMIZED_FUNCTION) return false; 11318 11319 if (object->IsMap()) { 11320 return Map::cast(object)->CanTransition() && 11321 FLAG_collect_maps && 11322 FLAG_weak_embedded_maps_in_optimized_code; 11323 } 11324 11325 if (object->IsJSObject()) { 11326 return FLAG_weak_embedded_objects_in_optimized_code; 11327 } 11328 11329 return false; 11330 } 11331 11332 11333 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object, 11334 int capacity, 11335 int length) { 11336 CALL_HEAP_FUNCTION_VOID( 11337 object->GetIsolate(), 11338 object->SetFastDoubleElementsCapacityAndLength(capacity, length)); 11339 } 11340 11341 11342 MaybeObject* JSObject::SetFastDoubleElementsCapacityAndLength( 11343 int capacity, 11344 int length) { 11345 Heap* heap = GetHeap(); 11346 // We should never end in here with a pixel or external array. 11347 ASSERT(!HasExternalArrayElements()); 11348 11349 FixedArrayBase* elems; 11350 { MaybeObject* maybe_obj = 11351 heap->AllocateUninitializedFixedDoubleArray(capacity); 11352 if (!maybe_obj->To(&elems)) return maybe_obj; 11353 } 11354 11355 ElementsKind elements_kind = GetElementsKind(); 11356 ElementsKind new_elements_kind = elements_kind; 11357 if (IsHoleyElementsKind(elements_kind)) { 11358 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS; 11359 } else { 11360 new_elements_kind = FAST_DOUBLE_ELEMENTS; 11361 } 11362 11363 Map* new_map; 11364 { MaybeObject* maybe_obj = 11365 GetElementsTransitionMap(heap->isolate(), new_elements_kind); 11366 if (!maybe_obj->To(&new_map)) return maybe_obj; 11367 } 11368 11369 FixedArrayBase* old_elements = elements(); 11370 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS); 11371 { MaybeObject* maybe_obj = 11372 accessor->CopyElements(this, elems, elements_kind); 11373 if (maybe_obj->IsFailure()) return maybe_obj; 11374 } 11375 if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) { 11376 ValidateElements(); 11377 set_map_and_elements(new_map, elems); 11378 } else { 11379 FixedArray* parameter_map = FixedArray::cast(old_elements); 11380 parameter_map->set(1, elems); 11381 } 11382 11383 if (FLAG_trace_elements_transitions) { 11384 PrintElementsTransition(stdout, elements_kind, old_elements, 11385 GetElementsKind(), elems); 11386 } 11387 11388 if (IsJSArray()) { 11389 JSArray::cast(this)->set_length(Smi::FromInt(length)); 11390 } 11391 11392 return this; 11393 } 11394 11395 11396 MaybeObject* JSArray::Initialize(int capacity, int length) { 11397 ASSERT(capacity >= 0); 11398 return GetHeap()->AllocateJSArrayStorage(this, length, capacity, 11399 INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); 11400 } 11401 11402 11403 void JSArray::Expand(int required_size) { 11404 GetIsolate()->factory()->SetElementsCapacityAndLength( 11405 Handle<JSArray>(this), required_size, required_size); 11406 } 11407 11408 11409 // Returns false if the passed-in index is marked non-configurable, 11410 // which will cause the ES5 truncation operation to halt, and thus 11411 // no further old values need be collected. 11412 static bool GetOldValue(Isolate* isolate, 11413 Handle<JSObject> object, 11414 uint32_t index, 11415 List<Handle<Object> >* old_values, 11416 List<uint32_t>* indices) { 11417 PropertyAttributes attributes = object->GetLocalElementAttribute(index); 11418 ASSERT(attributes != ABSENT); 11419 if (attributes == DONT_DELETE) return false; 11420 old_values->Add(object->GetLocalElementAccessorPair(index) == NULL 11421 ? Object::GetElement(isolate, object, index) 11422 : Handle<Object>::cast(isolate->factory()->the_hole_value())); 11423 indices->Add(index); 11424 return true; 11425 } 11426 11427 static void EnqueueSpliceRecord(Handle<JSArray> object, 11428 uint32_t index, 11429 Handle<JSArray> deleted, 11430 uint32_t add_count) { 11431 Isolate* isolate = object->GetIsolate(); 11432 HandleScope scope(isolate); 11433 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index); 11434 Handle<Object> add_count_object = 11435 isolate->factory()->NewNumberFromUint(add_count); 11436 11437 Handle<Object> args[] = 11438 { object, index_object, deleted, add_count_object }; 11439 11440 bool threw; 11441 Execution::Call(isolate, 11442 Handle<JSFunction>(isolate->observers_enqueue_splice()), 11443 isolate->factory()->undefined_value(), ARRAY_SIZE(args), args, 11444 &threw); 11445 ASSERT(!threw); 11446 } 11447 11448 11449 static void BeginPerformSplice(Handle<JSArray> object) { 11450 Isolate* isolate = object->GetIsolate(); 11451 HandleScope scope(isolate); 11452 Handle<Object> args[] = { object }; 11453 11454 bool threw; 11455 Execution::Call(isolate, 11456 Handle<JSFunction>(isolate->observers_begin_perform_splice()), 11457 isolate->factory()->undefined_value(), ARRAY_SIZE(args), args, 11458 &threw); 11459 ASSERT(!threw); 11460 } 11461 11462 11463 static void EndPerformSplice(Handle<JSArray> object) { 11464 Isolate* isolate = object->GetIsolate(); 11465 HandleScope scope(isolate); 11466 Handle<Object> args[] = { object }; 11467 11468 bool threw; 11469 Execution::Call(isolate, 11470 Handle<JSFunction>(isolate->observers_end_perform_splice()), 11471 isolate->factory()->undefined_value(), ARRAY_SIZE(args), args, 11472 &threw); 11473 ASSERT(!threw); 11474 } 11475 11476 11477 MaybeObject* JSArray::SetElementsLength(Object* len) { 11478 // We should never end in here with a pixel or external array. 11479 ASSERT(AllowsSetElementsLength()); 11480 if (!(FLAG_harmony_observation && map()->is_observed())) 11481 return GetElementsAccessor()->SetLength(this, len); 11482 11483 Isolate* isolate = GetIsolate(); 11484 HandleScope scope(isolate); 11485 Handle<JSArray> self(this); 11486 List<uint32_t> indices; 11487 List<Handle<Object> > old_values; 11488 Handle<Object> old_length_handle(self->length(), isolate); 11489 Handle<Object> new_length_handle(len, isolate); 11490 uint32_t old_length = 0; 11491 CHECK(old_length_handle->ToArrayIndex(&old_length)); 11492 uint32_t new_length = 0; 11493 if (!new_length_handle->ToArrayIndex(&new_length)) 11494 return Failure::InternalError(); 11495 11496 static const PropertyAttributes kNoAttrFilter = NONE; 11497 int num_elements = self->NumberOfLocalElements(kNoAttrFilter); 11498 if (num_elements > 0) { 11499 if (old_length == static_cast<uint32_t>(num_elements)) { 11500 // Simple case for arrays without holes. 11501 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) { 11502 if (!GetOldValue(isolate, self, i, &old_values, &indices)) break; 11503 } 11504 } else { 11505 // For sparse arrays, only iterate over existing elements. 11506 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over 11507 // the to-be-removed indices twice. 11508 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements); 11509 self->GetLocalElementKeys(*keys, kNoAttrFilter); 11510 while (num_elements-- > 0) { 11511 uint32_t index = NumberToUint32(keys->get(num_elements)); 11512 if (index < new_length) break; 11513 if (!GetOldValue(isolate, self, index, &old_values, &indices)) break; 11514 } 11515 } 11516 } 11517 11518 MaybeObject* result = 11519 self->GetElementsAccessor()->SetLength(*self, *new_length_handle); 11520 Handle<Object> hresult; 11521 if (!result->ToHandle(&hresult, isolate)) return result; 11522 11523 CHECK(self->length()->ToArrayIndex(&new_length)); 11524 if (old_length == new_length) return *hresult; 11525 11526 BeginPerformSplice(self); 11527 11528 for (int i = 0; i < indices.length(); ++i) { 11529 JSObject::EnqueueChangeRecord( 11530 self, "delete", isolate->factory()->Uint32ToString(indices[i]), 11531 old_values[i]); 11532 } 11533 JSObject::EnqueueChangeRecord( 11534 self, "update", isolate->factory()->length_string(), 11535 old_length_handle); 11536 11537 EndPerformSplice(self); 11538 11539 uint32_t index = Min(old_length, new_length); 11540 uint32_t add_count = new_length > old_length ? new_length - old_length : 0; 11541 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0; 11542 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0); 11543 if (delete_count > 0) { 11544 for (int i = indices.length() - 1; i >= 0; i--) { 11545 JSObject::SetElement(deleted, indices[i] - index, old_values[i], NONE, 11546 kNonStrictMode); 11547 } 11548 11549 SetProperty(deleted, isolate->factory()->length_string(), 11550 isolate->factory()->NewNumberFromUint(delete_count), 11551 NONE, kNonStrictMode); 11552 } 11553 11554 EnqueueSpliceRecord(self, index, deleted, add_count); 11555 11556 return *hresult; 11557 } 11558 11559 11560 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map, 11561 Handle<Object> prototype) { 11562 FixedArray* cache = map->GetPrototypeTransitions(); 11563 int number_of_transitions = map->NumberOfProtoTransitions(); 11564 const int proto_offset = 11565 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset; 11566 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset; 11567 const int step = kProtoTransitionElementsPerEntry; 11568 for (int i = 0; i < number_of_transitions; i++) { 11569 if (cache->get(proto_offset + i * step) == *prototype) { 11570 Object* result = cache->get(map_offset + i * step); 11571 return Handle<Map>(Map::cast(result)); 11572 } 11573 } 11574 return Handle<Map>(); 11575 } 11576 11577 11578 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map, 11579 Handle<Object> prototype, 11580 Handle<Map> target_map) { 11581 ASSERT(target_map->IsMap()); 11582 ASSERT(HeapObject::cast(*prototype)->map()->IsMap()); 11583 // Don't cache prototype transition if this map is shared. 11584 if (map->is_shared() || !FLAG_cache_prototype_transitions) return map; 11585 11586 const int step = kProtoTransitionElementsPerEntry; 11587 const int header = kProtoTransitionHeaderSize; 11588 11589 Handle<FixedArray> cache(map->GetPrototypeTransitions()); 11590 int capacity = (cache->length() - header) / step; 11591 int transitions = map->NumberOfProtoTransitions() + 1; 11592 11593 if (transitions > capacity) { 11594 if (capacity > kMaxCachedPrototypeTransitions) return map; 11595 11596 // Grow array by factor 2 over and above what we need. 11597 Factory* factory = map->GetIsolate()->factory(); 11598 cache = factory->CopySizeFixedArray(cache, transitions * 2 * step + header); 11599 11600 CALL_AND_RETRY_OR_DIE(map->GetIsolate(), 11601 map->SetPrototypeTransitions(*cache), 11602 break, 11603 return Handle<Map>()); 11604 } 11605 11606 // Reload number of transitions as GC might shrink them. 11607 int last = map->NumberOfProtoTransitions(); 11608 int entry = header + last * step; 11609 11610 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype); 11611 cache->set(entry + kProtoTransitionMapOffset, *target_map); 11612 map->SetNumberOfProtoTransitions(last + 1); 11613 11614 return map; 11615 } 11616 11617 11618 void Map::ZapTransitions() { 11619 TransitionArray* transition_array = transitions(); 11620 // TODO(mstarzinger): Temporarily use a slower version instead of the faster 11621 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer. 11622 Object** data = transition_array->data_start(); 11623 Object* the_hole = GetHeap()->the_hole_value(); 11624 int length = transition_array->length(); 11625 for (int i = 0; i < length; i++) { 11626 data[i] = the_hole; 11627 } 11628 } 11629 11630 11631 void Map::ZapPrototypeTransitions() { 11632 FixedArray* proto_transitions = GetPrototypeTransitions(); 11633 MemsetPointer(proto_transitions->data_start(), 11634 GetHeap()->the_hole_value(), 11635 proto_transitions->length()); 11636 } 11637 11638 11639 void Map::AddDependentCompilationInfo(DependentCode::DependencyGroup group, 11640 CompilationInfo* info) { 11641 Handle<DependentCode> dep(dependent_code()); 11642 Handle<DependentCode> codes = 11643 DependentCode::Insert(dep, group, info->object_wrapper()); 11644 if (*codes != dependent_code()) set_dependent_code(*codes); 11645 info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone()); 11646 } 11647 11648 11649 void Map::AddDependentCode(DependentCode::DependencyGroup group, 11650 Handle<Code> code) { 11651 Handle<DependentCode> codes = DependentCode::Insert( 11652 Handle<DependentCode>(dependent_code()), group, code); 11653 if (*codes != dependent_code()) set_dependent_code(*codes); 11654 } 11655 11656 11657 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) { 11658 Recompute(entries); 11659 } 11660 11661 11662 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) { 11663 start_indexes_[0] = 0; 11664 for (int g = 1; g <= kGroupCount; g++) { 11665 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1)); 11666 start_indexes_[g] = start_indexes_[g - 1] + count; 11667 } 11668 } 11669 11670 11671 DependentCode* DependentCode::ForObject(Handle<HeapObject> object, 11672 DependencyGroup group) { 11673 AllowDeferredHandleDereference dependencies_are_safe; 11674 if (group == DependentCode::kPropertyCellChangedGroup) { 11675 return Handle<PropertyCell>::cast(object)->dependent_code(); 11676 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup || 11677 group == DependentCode::kAllocationSiteTransitionChangedGroup) { 11678 return Handle<AllocationSite>::cast(object)->dependent_code(); 11679 } 11680 return Handle<Map>::cast(object)->dependent_code(); 11681 } 11682 11683 11684 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries, 11685 DependencyGroup group, 11686 Handle<Object> object) { 11687 GroupStartIndexes starts(*entries); 11688 int start = starts.at(group); 11689 int end = starts.at(group + 1); 11690 int number_of_entries = starts.number_of_entries(); 11691 // Check for existing entry to avoid duplicates. 11692 for (int i = start; i < end; i++) { 11693 if (entries->object_at(i) == *object) return entries; 11694 } 11695 if (entries->length() < kCodesStartIndex + number_of_entries + 1) { 11696 Factory* factory = entries->GetIsolate()->factory(); 11697 int capacity = kCodesStartIndex + number_of_entries + 1; 11698 if (capacity > 5) capacity = capacity * 5 / 4; 11699 Handle<DependentCode> new_entries = Handle<DependentCode>::cast( 11700 factory->CopySizeFixedArray(entries, capacity, TENURED)); 11701 // The number of codes can change after GC. 11702 starts.Recompute(*entries); 11703 start = starts.at(group); 11704 end = starts.at(group + 1); 11705 number_of_entries = starts.number_of_entries(); 11706 for (int i = 0; i < number_of_entries; i++) { 11707 entries->clear_at(i); 11708 } 11709 // If the old fixed array was empty, we need to reset counters of the 11710 // new array. 11711 if (number_of_entries == 0) { 11712 for (int g = 0; g < kGroupCount; g++) { 11713 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0); 11714 } 11715 } 11716 entries = new_entries; 11717 } 11718 entries->ExtendGroup(group); 11719 entries->set_object_at(end, *object); 11720 entries->set_number_of_entries(group, end + 1 - start); 11721 return entries; 11722 } 11723 11724 11725 void DependentCode::UpdateToFinishedCode(DependencyGroup group, 11726 CompilationInfo* info, 11727 Code* code) { 11728 DisallowHeapAllocation no_gc; 11729 AllowDeferredHandleDereference get_object_wrapper; 11730 Foreign* info_wrapper = *info->object_wrapper(); 11731 GroupStartIndexes starts(this); 11732 int start = starts.at(group); 11733 int end = starts.at(group + 1); 11734 for (int i = start; i < end; i++) { 11735 if (object_at(i) == info_wrapper) { 11736 set_object_at(i, code); 11737 break; 11738 } 11739 } 11740 11741 #ifdef DEBUG 11742 for (int i = start; i < end; i++) { 11743 ASSERT(is_code_at(i) || compilation_info_at(i) != info); 11744 } 11745 #endif 11746 } 11747 11748 11749 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group, 11750 CompilationInfo* info) { 11751 DisallowHeapAllocation no_allocation; 11752 AllowDeferredHandleDereference get_object_wrapper; 11753 Foreign* info_wrapper = *info->object_wrapper(); 11754 GroupStartIndexes starts(this); 11755 int start = starts.at(group); 11756 int end = starts.at(group + 1); 11757 // Find compilation info wrapper. 11758 int info_pos = -1; 11759 for (int i = start; i < end; i++) { 11760 if (object_at(i) == info_wrapper) { 11761 info_pos = i; 11762 break; 11763 } 11764 } 11765 if (info_pos == -1) return; // Not found. 11766 int gap = info_pos; 11767 // Use the last of each group to fill the gap in the previous group. 11768 for (int i = group; i < kGroupCount; i++) { 11769 int last_of_group = starts.at(i + 1) - 1; 11770 ASSERT(last_of_group >= gap); 11771 if (last_of_group == gap) continue; 11772 copy(last_of_group, gap); 11773 gap = last_of_group; 11774 } 11775 ASSERT(gap == starts.number_of_entries() - 1); 11776 clear_at(gap); // Clear last gap. 11777 set_number_of_entries(group, end - start - 1); 11778 11779 #ifdef DEBUG 11780 for (int i = start; i < end - 1; i++) { 11781 ASSERT(is_code_at(i) || compilation_info_at(i) != info); 11782 } 11783 #endif 11784 } 11785 11786 11787 bool DependentCode::Contains(DependencyGroup group, Code* code) { 11788 GroupStartIndexes starts(this); 11789 int start = starts.at(group); 11790 int end = starts.at(group + 1); 11791 for (int i = start; i < end; i++) { 11792 if (object_at(i) == code) return true; 11793 } 11794 return false; 11795 } 11796 11797 11798 void DependentCode::DeoptimizeDependentCodeGroup( 11799 Isolate* isolate, 11800 DependentCode::DependencyGroup group) { 11801 ASSERT(AllowCodeDependencyChange::IsAllowed()); 11802 DisallowHeapAllocation no_allocation_scope; 11803 DependentCode::GroupStartIndexes starts(this); 11804 int start = starts.at(group); 11805 int end = starts.at(group + 1); 11806 int code_entries = starts.number_of_entries(); 11807 if (start == end) return; 11808 11809 // Mark all the code that needs to be deoptimized. 11810 bool marked = false; 11811 for (int i = start; i < end; i++) { 11812 if (is_code_at(i)) { 11813 Code* code = code_at(i); 11814 if (!code->marked_for_deoptimization()) { 11815 code->set_marked_for_deoptimization(true); 11816 marked = true; 11817 } 11818 } else { 11819 CompilationInfo* info = compilation_info_at(i); 11820 info->AbortDueToDependencyChange(); 11821 } 11822 } 11823 // Compact the array by moving all subsequent groups to fill in the new holes. 11824 for (int src = end, dst = start; src < code_entries; src++, dst++) { 11825 copy(src, dst); 11826 } 11827 // Now the holes are at the end of the array, zap them for heap-verifier. 11828 int removed = end - start; 11829 for (int i = code_entries - removed; i < code_entries; i++) { 11830 clear_at(i); 11831 } 11832 set_number_of_entries(group, 0); 11833 11834 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate); 11835 } 11836 11837 11838 Handle<Object> JSObject::SetPrototype(Handle<JSObject> object, 11839 Handle<Object> value, 11840 bool skip_hidden_prototypes) { 11841 #ifdef DEBUG 11842 int size = object->Size(); 11843 #endif 11844 11845 Isolate* isolate = object->GetIsolate(); 11846 Heap* heap = isolate->heap(); 11847 // Silently ignore the change if value is not a JSObject or null. 11848 // SpiderMonkey behaves this way. 11849 if (!value->IsJSReceiver() && !value->IsNull()) return value; 11850 11851 // From 8.6.2 Object Internal Methods 11852 // ... 11853 // In addition, if [[Extensible]] is false the value of the [[Class]] and 11854 // [[Prototype]] internal properties of the object may not be modified. 11855 // ... 11856 // Implementation specific extensions that modify [[Class]], [[Prototype]] 11857 // or [[Extensible]] must not violate the invariants defined in the preceding 11858 // paragraph. 11859 if (!object->map()->is_extensible()) { 11860 Handle<Object> args[] = { object }; 11861 Handle<Object> error = isolate->factory()->NewTypeError( 11862 "non_extensible_proto", HandleVector(args, ARRAY_SIZE(args))); 11863 isolate->Throw(*error); 11864 return Handle<Object>(); 11865 } 11866 11867 // Before we can set the prototype we need to be sure 11868 // prototype cycles are prevented. 11869 // It is sufficient to validate that the receiver is not in the new prototype 11870 // chain. 11871 for (Object* pt = *value; 11872 pt != heap->null_value(); 11873 pt = pt->GetPrototype(isolate)) { 11874 if (JSReceiver::cast(pt) == *object) { 11875 // Cycle detected. 11876 Handle<Object> error = isolate->factory()->NewError( 11877 "cyclic_proto", HandleVector<Object>(NULL, 0)); 11878 isolate->Throw(*error); 11879 return Handle<Object>(); 11880 } 11881 } 11882 11883 bool dictionary_elements_in_chain = 11884 object->map()->DictionaryElementsInPrototypeChainOnly(); 11885 Handle<JSObject> real_receiver = object; 11886 11887 if (skip_hidden_prototypes) { 11888 // Find the first object in the chain whose prototype object is not 11889 // hidden and set the new prototype on that object. 11890 Object* current_proto = real_receiver->GetPrototype(); 11891 while (current_proto->IsJSObject() && 11892 JSObject::cast(current_proto)->map()->is_hidden_prototype()) { 11893 real_receiver = handle(JSObject::cast(current_proto), isolate); 11894 current_proto = current_proto->GetPrototype(isolate); 11895 } 11896 } 11897 11898 // Set the new prototype of the object. 11899 Handle<Map> map(real_receiver->map()); 11900 11901 // Nothing to do if prototype is already set. 11902 if (map->prototype() == *value) return value; 11903 11904 if (value->IsJSObject()) { 11905 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value)); 11906 } 11907 11908 Handle<Map> new_map = Map::GetPrototypeTransition(map, value); 11909 if (new_map.is_null()) { 11910 new_map = Map::Copy(map); 11911 Map::PutPrototypeTransition(map, value, new_map); 11912 new_map->set_prototype(*value); 11913 } 11914 ASSERT(new_map->prototype() == *value); 11915 real_receiver->set_map(*new_map); 11916 11917 if (!dictionary_elements_in_chain && 11918 new_map->DictionaryElementsInPrototypeChainOnly()) { 11919 // If the prototype chain didn't previously have element callbacks, then 11920 // KeyedStoreICs need to be cleared to ensure any that involve this 11921 // map go generic. 11922 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC); 11923 } 11924 11925 heap->ClearInstanceofCache(); 11926 ASSERT(size == object->Size()); 11927 return value; 11928 } 11929 11930 11931 MaybeObject* JSObject::EnsureCanContainElements(Arguments* args, 11932 uint32_t first_arg, 11933 uint32_t arg_count, 11934 EnsureElementsMode mode) { 11935 // Elements in |Arguments| are ordered backwards (because they're on the 11936 // stack), but the method that's called here iterates over them in forward 11937 // direction. 11938 return EnsureCanContainElements( 11939 args->arguments() - first_arg - (arg_count - 1), 11940 arg_count, mode); 11941 } 11942 11943 11944 AccessorPair* JSObject::GetLocalPropertyAccessorPair(Name* name) { 11945 uint32_t index = 0; 11946 if (name->AsArrayIndex(&index)) { 11947 return GetLocalElementAccessorPair(index); 11948 } 11949 11950 LookupResult lookup(GetIsolate()); 11951 LocalLookupRealNamedProperty(name, &lookup); 11952 11953 if (lookup.IsPropertyCallbacks() && 11954 lookup.GetCallbackObject()->IsAccessorPair()) { 11955 return AccessorPair::cast(lookup.GetCallbackObject()); 11956 } 11957 return NULL; 11958 } 11959 11960 11961 AccessorPair* JSObject::GetLocalElementAccessorPair(uint32_t index) { 11962 if (IsJSGlobalProxy()) { 11963 Object* proto = GetPrototype(); 11964 if (proto->IsNull()) return NULL; 11965 ASSERT(proto->IsJSGlobalObject()); 11966 return JSObject::cast(proto)->GetLocalElementAccessorPair(index); 11967 } 11968 11969 // Check for lookup interceptor. 11970 if (HasIndexedInterceptor()) return NULL; 11971 11972 return GetElementsAccessor()->GetAccessorPair(this, this, index); 11973 } 11974 11975 11976 Handle<Object> JSObject::SetElementWithInterceptor( 11977 Handle<JSObject> object, 11978 uint32_t index, 11979 Handle<Object> value, 11980 PropertyAttributes attributes, 11981 StrictModeFlag strict_mode, 11982 bool check_prototype, 11983 SetPropertyMode set_mode) { 11984 Isolate* isolate = object->GetIsolate(); 11985 11986 // Make sure that the top context does not change when doing 11987 // callbacks or interceptor calls. 11988 AssertNoContextChange ncc(isolate); 11989 11990 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 11991 if (!interceptor->setter()->IsUndefined()) { 11992 v8::IndexedPropertySetterCallback setter = 11993 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter()); 11994 LOG(isolate, 11995 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index)); 11996 PropertyCallbackArguments args(isolate, interceptor->data(), *object, 11997 *object); 11998 v8::Handle<v8::Value> result = 11999 args.Call(setter, index, v8::Utils::ToLocal(value)); 12000 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 12001 if (!result.IsEmpty()) return value; 12002 } 12003 12004 return SetElementWithoutInterceptor(object, index, value, attributes, 12005 strict_mode, 12006 check_prototype, 12007 set_mode); 12008 } 12009 12010 12011 MaybeObject* JSObject::GetElementWithCallback(Object* receiver, 12012 Object* structure, 12013 uint32_t index, 12014 Object* holder) { 12015 Isolate* isolate = GetIsolate(); 12016 ASSERT(!structure->IsForeign()); 12017 12018 // api style callbacks. 12019 if (structure->IsExecutableAccessorInfo()) { 12020 Handle<ExecutableAccessorInfo> data( 12021 ExecutableAccessorInfo::cast(structure)); 12022 Object* fun_obj = data->getter(); 12023 v8::AccessorGetterCallback call_fun = 12024 v8::ToCData<v8::AccessorGetterCallback>(fun_obj); 12025 if (call_fun == NULL) return isolate->heap()->undefined_value(); 12026 HandleScope scope(isolate); 12027 Handle<JSObject> self(JSObject::cast(receiver)); 12028 Handle<JSObject> holder_handle(JSObject::cast(holder)); 12029 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12030 Handle<String> key = isolate->factory()->NumberToString(number); 12031 LOG(isolate, ApiNamedPropertyAccess("load", *self, *key)); 12032 PropertyCallbackArguments 12033 args(isolate, data->data(), *self, *holder_handle); 12034 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key)); 12035 RETURN_IF_SCHEDULED_EXCEPTION(isolate); 12036 if (result.IsEmpty()) return isolate->heap()->undefined_value(); 12037 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 12038 result_internal->VerifyApiCallResultType(); 12039 return *result_internal; 12040 } 12041 12042 // __defineGetter__ callback 12043 if (structure->IsAccessorPair()) { 12044 Object* getter = AccessorPair::cast(structure)->getter(); 12045 if (getter->IsSpecFunction()) { 12046 // TODO(rossberg): nicer would be to cast to some JSCallable here... 12047 return GetPropertyWithDefinedGetter(receiver, JSReceiver::cast(getter)); 12048 } 12049 // Getter is not a function. 12050 return isolate->heap()->undefined_value(); 12051 } 12052 12053 if (structure->IsDeclaredAccessorInfo()) { 12054 return GetDeclaredAccessorProperty(receiver, 12055 DeclaredAccessorInfo::cast(structure), 12056 isolate); 12057 } 12058 12059 UNREACHABLE(); 12060 return NULL; 12061 } 12062 12063 12064 Handle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, 12065 Handle<Object> structure, 12066 uint32_t index, 12067 Handle<Object> value, 12068 Handle<JSObject> holder, 12069 StrictModeFlag strict_mode) { 12070 Isolate* isolate = object->GetIsolate(); 12071 12072 // We should never get here to initialize a const with the hole 12073 // value since a const declaration would conflict with the setter. 12074 ASSERT(!value->IsTheHole()); 12075 12076 // To accommodate both the old and the new api we switch on the 12077 // data structure used to store the callbacks. Eventually foreign 12078 // callbacks should be phased out. 12079 ASSERT(!structure->IsForeign()); 12080 12081 if (structure->IsExecutableAccessorInfo()) { 12082 // api style callbacks 12083 Handle<ExecutableAccessorInfo> data = 12084 Handle<ExecutableAccessorInfo>::cast(structure); 12085 Object* call_obj = data->setter(); 12086 v8::AccessorSetterCallback call_fun = 12087 v8::ToCData<v8::AccessorSetterCallback>(call_obj); 12088 if (call_fun == NULL) return value; 12089 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12090 Handle<String> key(isolate->factory()->NumberToString(number)); 12091 LOG(isolate, ApiNamedPropertyAccess("store", *object, *key)); 12092 PropertyCallbackArguments 12093 args(isolate, data->data(), *object, *holder); 12094 args.Call(call_fun, 12095 v8::Utils::ToLocal(key), 12096 v8::Utils::ToLocal(value)); 12097 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 12098 return value; 12099 } 12100 12101 if (structure->IsAccessorPair()) { 12102 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate); 12103 if (setter->IsSpecFunction()) { 12104 // TODO(rossberg): nicer would be to cast to some JSCallable here... 12105 return SetPropertyWithDefinedSetter( 12106 object, Handle<JSReceiver>::cast(setter), value); 12107 } else { 12108 if (strict_mode == kNonStrictMode) { 12109 return value; 12110 } 12111 Handle<Object> key(isolate->factory()->NewNumberFromUint(index)); 12112 Handle<Object> args[2] = { key, holder }; 12113 Handle<Object> error = isolate->factory()->NewTypeError( 12114 "no_setter_in_callback", HandleVector(args, 2)); 12115 isolate->Throw(*error); 12116 return Handle<Object>(); 12117 } 12118 } 12119 12120 // TODO(dcarney): Handle correctly. 12121 if (structure->IsDeclaredAccessorInfo()) return value; 12122 12123 UNREACHABLE(); 12124 return Handle<Object>(); 12125 } 12126 12127 12128 bool JSObject::HasFastArgumentsElements() { 12129 Heap* heap = GetHeap(); 12130 if (!elements()->IsFixedArray()) return false; 12131 FixedArray* elements = FixedArray::cast(this->elements()); 12132 if (elements->map() != heap->non_strict_arguments_elements_map()) { 12133 return false; 12134 } 12135 FixedArray* arguments = FixedArray::cast(elements->get(1)); 12136 return !arguments->IsDictionary(); 12137 } 12138 12139 12140 bool JSObject::HasDictionaryArgumentsElements() { 12141 Heap* heap = GetHeap(); 12142 if (!elements()->IsFixedArray()) return false; 12143 FixedArray* elements = FixedArray::cast(this->elements()); 12144 if (elements->map() != heap->non_strict_arguments_elements_map()) { 12145 return false; 12146 } 12147 FixedArray* arguments = FixedArray::cast(elements->get(1)); 12148 return arguments->IsDictionary(); 12149 } 12150 12151 12152 // Adding n elements in fast case is O(n*n). 12153 // Note: revisit design to have dual undefined values to capture absent 12154 // elements. 12155 Handle<Object> JSObject::SetFastElement(Handle<JSObject> object, 12156 uint32_t index, 12157 Handle<Object> value, 12158 StrictModeFlag strict_mode, 12159 bool check_prototype) { 12160 ASSERT(object->HasFastSmiOrObjectElements() || 12161 object->HasFastArgumentsElements()); 12162 12163 Isolate* isolate = object->GetIsolate(); 12164 12165 // Array optimizations rely on the prototype lookups of Array objects always 12166 // returning undefined. If there is a store to the initial prototype object, 12167 // make sure all of these optimizations are invalidated. 12168 if (isolate->is_initial_object_prototype(*object) || 12169 isolate->is_initial_array_prototype(*object)) { 12170 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate, 12171 DependentCode::kElementsCantBeAddedGroup); 12172 } 12173 12174 Handle<FixedArray> backing_store(FixedArray::cast(object->elements())); 12175 if (backing_store->map() == 12176 isolate->heap()->non_strict_arguments_elements_map()) { 12177 backing_store = handle(FixedArray::cast(backing_store->get(1))); 12178 } else { 12179 backing_store = EnsureWritableFastElements(object); 12180 } 12181 uint32_t capacity = static_cast<uint32_t>(backing_store->length()); 12182 12183 if (check_prototype && 12184 (index >= capacity || backing_store->get(index)->IsTheHole())) { 12185 bool found; 12186 Handle<Object> result = SetElementWithCallbackSetterInPrototypes( 12187 object, index, value, &found, strict_mode); 12188 if (found) return result; 12189 } 12190 12191 uint32_t new_capacity = capacity; 12192 // Check if the length property of this object needs to be updated. 12193 uint32_t array_length = 0; 12194 bool must_update_array_length = false; 12195 bool introduces_holes = true; 12196 if (object->IsJSArray()) { 12197 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length)); 12198 introduces_holes = index > array_length; 12199 if (index >= array_length) { 12200 must_update_array_length = true; 12201 array_length = index + 1; 12202 } 12203 } else { 12204 introduces_holes = index >= capacity; 12205 } 12206 12207 // If the array is growing, and it's not growth by a single element at the 12208 // end, make sure that the ElementsKind is HOLEY. 12209 ElementsKind elements_kind = object->GetElementsKind(); 12210 if (introduces_holes && 12211 IsFastElementsKind(elements_kind) && 12212 !IsFastHoleyElementsKind(elements_kind)) { 12213 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind); 12214 TransitionElementsKind(object, transitioned_kind); 12215 } 12216 12217 // Check if the capacity of the backing store needs to be increased, or if 12218 // a transition to slow elements is necessary. 12219 if (index >= capacity) { 12220 bool convert_to_slow = true; 12221 if ((index - capacity) < kMaxGap) { 12222 new_capacity = NewElementsCapacity(index + 1); 12223 ASSERT(new_capacity > index); 12224 if (!object->ShouldConvertToSlowElements(new_capacity)) { 12225 convert_to_slow = false; 12226 } 12227 } 12228 if (convert_to_slow) { 12229 NormalizeElements(object); 12230 return SetDictionaryElement(object, index, value, NONE, strict_mode, 12231 check_prototype); 12232 } 12233 } 12234 // Convert to fast double elements if appropriate. 12235 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) { 12236 // Consider fixing the boilerplate as well if we have one. 12237 ElementsKind to_kind = IsHoleyElementsKind(elements_kind) 12238 ? FAST_HOLEY_DOUBLE_ELEMENTS 12239 : FAST_DOUBLE_ELEMENTS; 12240 12241 UpdateAllocationSite(object, to_kind); 12242 12243 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length); 12244 FixedDoubleArray::cast(object->elements())->set(index, value->Number()); 12245 object->ValidateElements(); 12246 return value; 12247 } 12248 // Change elements kind from Smi-only to generic FAST if necessary. 12249 if (object->HasFastSmiElements() && !value->IsSmi()) { 12250 ElementsKind kind = object->HasFastHoleyElements() 12251 ? FAST_HOLEY_ELEMENTS 12252 : FAST_ELEMENTS; 12253 12254 UpdateAllocationSite(object, kind); 12255 Handle<Map> new_map = GetElementsTransitionMap(object, kind); 12256 object->set_map(*new_map); 12257 ASSERT(IsFastObjectElementsKind(object->GetElementsKind())); 12258 } 12259 // Increase backing store capacity if that's been decided previously. 12260 if (new_capacity != capacity) { 12261 SetFastElementsCapacitySmiMode smi_mode = 12262 value->IsSmi() && object->HasFastSmiElements() 12263 ? kAllowSmiElements 12264 : kDontAllowSmiElements; 12265 Handle<FixedArray> new_elements = 12266 SetFastElementsCapacityAndLength(object, new_capacity, array_length, 12267 smi_mode); 12268 new_elements->set(index, *value); 12269 object->ValidateElements(); 12270 return value; 12271 } 12272 12273 // Finally, set the new element and length. 12274 ASSERT(object->elements()->IsFixedArray()); 12275 backing_store->set(index, *value); 12276 if (must_update_array_length) { 12277 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length)); 12278 } 12279 return value; 12280 } 12281 12282 12283 Handle<Object> JSObject::SetDictionaryElement(Handle<JSObject> object, 12284 uint32_t index, 12285 Handle<Object> value, 12286 PropertyAttributes attributes, 12287 StrictModeFlag strict_mode, 12288 bool check_prototype, 12289 SetPropertyMode set_mode) { 12290 ASSERT(object->HasDictionaryElements() || 12291 object->HasDictionaryArgumentsElements()); 12292 Isolate* isolate = object->GetIsolate(); 12293 12294 // Insert element in the dictionary. 12295 Handle<FixedArray> elements(FixedArray::cast(object->elements())); 12296 bool is_arguments = 12297 (elements->map() == isolate->heap()->non_strict_arguments_elements_map()); 12298 Handle<SeededNumberDictionary> dictionary(is_arguments 12299 ? SeededNumberDictionary::cast(elements->get(1)) 12300 : SeededNumberDictionary::cast(*elements)); 12301 12302 int entry = dictionary->FindEntry(index); 12303 if (entry != SeededNumberDictionary::kNotFound) { 12304 Handle<Object> element(dictionary->ValueAt(entry), isolate); 12305 PropertyDetails details = dictionary->DetailsAt(entry); 12306 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) { 12307 return SetElementWithCallback(object, element, index, value, object, 12308 strict_mode); 12309 } else { 12310 dictionary->UpdateMaxNumberKey(index); 12311 // If a value has not been initialized we allow writing to it even if it 12312 // is read-only (a declared const that has not been initialized). If a 12313 // value is being defined we skip attribute checks completely. 12314 if (set_mode == DEFINE_PROPERTY) { 12315 details = PropertyDetails( 12316 attributes, NORMAL, details.dictionary_index()); 12317 dictionary->DetailsAtPut(entry, details); 12318 } else if (details.IsReadOnly() && !element->IsTheHole()) { 12319 if (strict_mode == kNonStrictMode) { 12320 return isolate->factory()->undefined_value(); 12321 } else { 12322 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12323 Handle<Object> args[2] = { number, object }; 12324 Handle<Object> error = 12325 isolate->factory()->NewTypeError("strict_read_only_property", 12326 HandleVector(args, 2)); 12327 isolate->Throw(*error); 12328 return Handle<Object>(); 12329 } 12330 } 12331 // Elements of the arguments object in slow mode might be slow aliases. 12332 if (is_arguments && element->IsAliasedArgumentsEntry()) { 12333 Handle<AliasedArgumentsEntry> entry = 12334 Handle<AliasedArgumentsEntry>::cast(element); 12335 Handle<Context> context(Context::cast(elements->get(0))); 12336 int context_index = entry->aliased_context_slot(); 12337 ASSERT(!context->get(context_index)->IsTheHole()); 12338 context->set(context_index, *value); 12339 // For elements that are still writable we keep slow aliasing. 12340 if (!details.IsReadOnly()) value = element; 12341 } 12342 dictionary->ValueAtPut(entry, *value); 12343 } 12344 } else { 12345 // Index not already used. Look for an accessor in the prototype chain. 12346 // Can cause GC! 12347 if (check_prototype) { 12348 bool found; 12349 Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object, 12350 index, value, &found, strict_mode); 12351 if (found) return result; 12352 } 12353 12354 // When we set the is_extensible flag to false we always force the 12355 // element into dictionary mode (and force them to stay there). 12356 if (!object->map()->is_extensible()) { 12357 if (strict_mode == kNonStrictMode) { 12358 return isolate->factory()->undefined_value(); 12359 } else { 12360 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12361 Handle<String> name = isolate->factory()->NumberToString(number); 12362 Handle<Object> args[1] = { name }; 12363 Handle<Object> error = 12364 isolate->factory()->NewTypeError("object_not_extensible", 12365 HandleVector(args, 1)); 12366 isolate->Throw(*error); 12367 return Handle<Object>(); 12368 } 12369 } 12370 12371 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); 12372 Handle<SeededNumberDictionary> new_dictionary = 12373 SeededNumberDictionary::AddNumberEntry(dictionary, index, value, 12374 details); 12375 if (*dictionary != *new_dictionary) { 12376 if (is_arguments) { 12377 elements->set(1, *new_dictionary); 12378 } else { 12379 object->set_elements(*new_dictionary); 12380 } 12381 dictionary = new_dictionary; 12382 } 12383 } 12384 12385 // Update the array length if this JSObject is an array. 12386 if (object->IsJSArray()) { 12387 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index, 12388 value); 12389 } 12390 12391 // Attempt to put this object back in fast case. 12392 if (object->ShouldConvertToFastElements()) { 12393 uint32_t new_length = 0; 12394 if (object->IsJSArray()) { 12395 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length)); 12396 } else { 12397 new_length = dictionary->max_number_key() + 1; 12398 } 12399 SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays 12400 ? kAllowSmiElements 12401 : kDontAllowSmiElements; 12402 bool has_smi_only_elements = false; 12403 bool should_convert_to_fast_double_elements = 12404 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements); 12405 if (has_smi_only_elements) { 12406 smi_mode = kForceSmiElements; 12407 } 12408 12409 if (should_convert_to_fast_double_elements) { 12410 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length); 12411 } else { 12412 SetFastElementsCapacityAndLength(object, new_length, new_length, 12413 smi_mode); 12414 } 12415 object->ValidateElements(); 12416 #ifdef DEBUG 12417 if (FLAG_trace_normalization) { 12418 PrintF("Object elements are fast case again:\n"); 12419 object->Print(); 12420 } 12421 #endif 12422 } 12423 return value; 12424 } 12425 12426 Handle<Object> JSObject::SetFastDoubleElement( 12427 Handle<JSObject> object, 12428 uint32_t index, 12429 Handle<Object> value, 12430 StrictModeFlag strict_mode, 12431 bool check_prototype) { 12432 ASSERT(object->HasFastDoubleElements()); 12433 12434 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements())); 12435 uint32_t elms_length = static_cast<uint32_t>(base_elms->length()); 12436 12437 // If storing to an element that isn't in the array, pass the store request 12438 // up the prototype chain before storing in the receiver's elements. 12439 if (check_prototype && 12440 (index >= elms_length || 12441 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) { 12442 bool found; 12443 Handle<Object> result = SetElementWithCallbackSetterInPrototypes(object, 12444 index, value, &found, strict_mode); 12445 if (found) return result; 12446 } 12447 12448 // If the value object is not a heap number, switch to fast elements and try 12449 // again. 12450 bool value_is_smi = value->IsSmi(); 12451 bool introduces_holes = true; 12452 uint32_t length = elms_length; 12453 if (object->IsJSArray()) { 12454 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length)); 12455 introduces_holes = index > length; 12456 } else { 12457 introduces_holes = index >= elms_length; 12458 } 12459 12460 if (!value->IsNumber()) { 12461 SetFastElementsCapacityAndLength(object, elms_length, length, 12462 kDontAllowSmiElements); 12463 Handle<Object> result = SetFastElement(object, index, value, strict_mode, 12464 check_prototype); 12465 RETURN_IF_EMPTY_HANDLE_VALUE(object->GetIsolate(), result, 12466 Handle<Object>()); 12467 object->ValidateElements(); 12468 return result; 12469 } 12470 12471 double double_value = value_is_smi 12472 ? static_cast<double>(Handle<Smi>::cast(value)->value()) 12473 : Handle<HeapNumber>::cast(value)->value(); 12474 12475 // If the array is growing, and it's not growth by a single element at the 12476 // end, make sure that the ElementsKind is HOLEY. 12477 ElementsKind elements_kind = object->GetElementsKind(); 12478 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) { 12479 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind); 12480 TransitionElementsKind(object, transitioned_kind); 12481 } 12482 12483 // Check whether there is extra space in the fixed array. 12484 if (index < elms_length) { 12485 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements())); 12486 elms->set(index, double_value); 12487 if (object->IsJSArray()) { 12488 // Update the length of the array if needed. 12489 uint32_t array_length = 0; 12490 CHECK( 12491 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length)); 12492 if (index >= array_length) { 12493 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1)); 12494 } 12495 } 12496 return value; 12497 } 12498 12499 // Allow gap in fast case. 12500 if ((index - elms_length) < kMaxGap) { 12501 // Try allocating extra space. 12502 int new_capacity = NewElementsCapacity(index+1); 12503 if (!object->ShouldConvertToSlowElements(new_capacity)) { 12504 ASSERT(static_cast<uint32_t>(new_capacity) > index); 12505 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1); 12506 FixedDoubleArray::cast(object->elements())->set(index, double_value); 12507 object->ValidateElements(); 12508 return value; 12509 } 12510 } 12511 12512 // Otherwise default to slow case. 12513 ASSERT(object->HasFastDoubleElements()); 12514 ASSERT(object->map()->has_fast_double_elements()); 12515 ASSERT(object->elements()->IsFixedDoubleArray()); 12516 12517 NormalizeElements(object); 12518 ASSERT(object->HasDictionaryElements()); 12519 return SetElement(object, index, value, NONE, strict_mode, check_prototype); 12520 } 12521 12522 12523 Handle<Object> JSReceiver::SetElement(Handle<JSReceiver> object, 12524 uint32_t index, 12525 Handle<Object> value, 12526 PropertyAttributes attributes, 12527 StrictModeFlag strict_mode) { 12528 if (object->IsJSProxy()) { 12529 return JSProxy::SetElementWithHandler( 12530 Handle<JSProxy>::cast(object), object, index, value, strict_mode); 12531 } 12532 return JSObject::SetElement( 12533 Handle<JSObject>::cast(object), index, value, attributes, strict_mode); 12534 } 12535 12536 12537 Handle<Object> JSObject::SetOwnElement(Handle<JSObject> object, 12538 uint32_t index, 12539 Handle<Object> value, 12540 StrictModeFlag strict_mode) { 12541 ASSERT(!object->HasExternalArrayElements()); 12542 return JSObject::SetElement(object, index, value, NONE, strict_mode, false); 12543 } 12544 12545 12546 Handle<Object> JSObject::SetElement(Handle<JSObject> object, 12547 uint32_t index, 12548 Handle<Object> value, 12549 PropertyAttributes attributes, 12550 StrictModeFlag strict_mode, 12551 bool check_prototype, 12552 SetPropertyMode set_mode) { 12553 Isolate* isolate = object->GetIsolate(); 12554 12555 if (object->HasExternalArrayElements()) { 12556 if (!value->IsNumber() && !value->IsUndefined()) { 12557 bool has_exception; 12558 Handle<Object> number = 12559 Execution::ToNumber(isolate, value, &has_exception); 12560 if (has_exception) return Handle<Object>(); 12561 value = number; 12562 } 12563 } 12564 12565 // Check access rights if needed. 12566 if (object->IsAccessCheckNeeded()) { 12567 if (!isolate->MayIndexedAccess(*object, index, v8::ACCESS_SET)) { 12568 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_SET); 12569 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 12570 return value; 12571 } 12572 } 12573 12574 if (object->IsJSGlobalProxy()) { 12575 Handle<Object> proto(object->GetPrototype(), isolate); 12576 if (proto->IsNull()) return value; 12577 ASSERT(proto->IsJSGlobalObject()); 12578 return SetElement(Handle<JSObject>::cast(proto), index, value, attributes, 12579 strict_mode, 12580 check_prototype, 12581 set_mode); 12582 } 12583 12584 // Don't allow element properties to be redefined for external arrays. 12585 if (object->HasExternalArrayElements() && set_mode == DEFINE_PROPERTY) { 12586 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12587 Handle<Object> args[] = { object, number }; 12588 Handle<Object> error = isolate->factory()->NewTypeError( 12589 "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args))); 12590 isolate->Throw(*error); 12591 return Handle<Object>(); 12592 } 12593 12594 // Normalize the elements to enable attributes on the property. 12595 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) { 12596 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 12597 // Make sure that we never go back to fast case. 12598 dictionary->set_requires_slow_elements(); 12599 } 12600 12601 if (!(FLAG_harmony_observation && object->map()->is_observed())) { 12602 return object->HasIndexedInterceptor() 12603 ? SetElementWithInterceptor(object, index, value, attributes, strict_mode, 12604 check_prototype, 12605 set_mode) 12606 : SetElementWithoutInterceptor(object, index, value, attributes, 12607 strict_mode, 12608 check_prototype, 12609 set_mode); 12610 } 12611 12612 PropertyAttributes old_attributes = object->GetLocalElementAttribute(index); 12613 Handle<Object> old_value = isolate->factory()->the_hole_value(); 12614 Handle<Object> old_length_handle; 12615 Handle<Object> new_length_handle; 12616 12617 if (old_attributes != ABSENT) { 12618 if (object->GetLocalElementAccessorPair(index) == NULL) 12619 old_value = Object::GetElement(isolate, object, index); 12620 } else if (object->IsJSArray()) { 12621 // Store old array length in case adding an element grows the array. 12622 old_length_handle = handle(Handle<JSArray>::cast(object)->length(), 12623 isolate); 12624 } 12625 12626 // Check for lookup interceptor 12627 Handle<Object> result = object->HasIndexedInterceptor() 12628 ? SetElementWithInterceptor(object, index, value, attributes, strict_mode, 12629 check_prototype, 12630 set_mode) 12631 : SetElementWithoutInterceptor(object, index, value, attributes, 12632 strict_mode, 12633 check_prototype, 12634 set_mode); 12635 RETURN_IF_EMPTY_HANDLE_VALUE(isolate, result, Handle<Object>()); 12636 12637 Handle<String> name = isolate->factory()->Uint32ToString(index); 12638 PropertyAttributes new_attributes = object->GetLocalElementAttribute(index); 12639 if (old_attributes == ABSENT) { 12640 if (object->IsJSArray() && 12641 !old_length_handle->SameValue( 12642 Handle<JSArray>::cast(object)->length())) { 12643 new_length_handle = handle(Handle<JSArray>::cast(object)->length(), 12644 isolate); 12645 uint32_t old_length = 0; 12646 uint32_t new_length = 0; 12647 CHECK(old_length_handle->ToArrayIndex(&old_length)); 12648 CHECK(new_length_handle->ToArrayIndex(&new_length)); 12649 12650 BeginPerformSplice(Handle<JSArray>::cast(object)); 12651 EnqueueChangeRecord(object, "add", name, old_value); 12652 EnqueueChangeRecord(object, "update", isolate->factory()->length_string(), 12653 old_length_handle); 12654 EndPerformSplice(Handle<JSArray>::cast(object)); 12655 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0); 12656 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted, 12657 new_length - old_length); 12658 } else { 12659 EnqueueChangeRecord(object, "add", name, old_value); 12660 } 12661 } else if (old_value->IsTheHole()) { 12662 EnqueueChangeRecord(object, "reconfigure", name, old_value); 12663 } else { 12664 Handle<Object> new_value = Object::GetElement(isolate, object, index); 12665 bool value_changed = !old_value->SameValue(*new_value); 12666 if (old_attributes != new_attributes) { 12667 if (!value_changed) old_value = isolate->factory()->the_hole_value(); 12668 EnqueueChangeRecord(object, "reconfigure", name, old_value); 12669 } else if (value_changed) { 12670 EnqueueChangeRecord(object, "update", name, old_value); 12671 } 12672 } 12673 12674 return result; 12675 } 12676 12677 12678 Handle<Object> JSObject::SetElementWithoutInterceptor( 12679 Handle<JSObject> object, 12680 uint32_t index, 12681 Handle<Object> value, 12682 PropertyAttributes attributes, 12683 StrictModeFlag strict_mode, 12684 bool check_prototype, 12685 SetPropertyMode set_mode) { 12686 ASSERT(object->HasDictionaryElements() || 12687 object->HasDictionaryArgumentsElements() || 12688 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0); 12689 Isolate* isolate = object->GetIsolate(); 12690 if (FLAG_trace_external_array_abuse && 12691 IsExternalArrayElementsKind(object->GetElementsKind())) { 12692 CheckArrayAbuse(*object, "external elements write", index); 12693 } 12694 if (FLAG_trace_js_array_abuse && 12695 !IsExternalArrayElementsKind(object->GetElementsKind())) { 12696 if (object->IsJSArray()) { 12697 CheckArrayAbuse(*object, "elements write", index, true); 12698 } 12699 } 12700 switch (object->GetElementsKind()) { 12701 case FAST_SMI_ELEMENTS: 12702 case FAST_ELEMENTS: 12703 case FAST_HOLEY_SMI_ELEMENTS: 12704 case FAST_HOLEY_ELEMENTS: 12705 return SetFastElement(object, index, value, strict_mode, check_prototype); 12706 case FAST_DOUBLE_ELEMENTS: 12707 case FAST_HOLEY_DOUBLE_ELEMENTS: 12708 return SetFastDoubleElement(object, index, value, strict_mode, 12709 check_prototype); 12710 case EXTERNAL_PIXEL_ELEMENTS: { 12711 ExternalPixelArray* pixels = ExternalPixelArray::cast(object->elements()); 12712 return handle(pixels->SetValue(index, *value), isolate); 12713 } 12714 case EXTERNAL_BYTE_ELEMENTS: { 12715 Handle<ExternalByteArray> array( 12716 ExternalByteArray::cast(object->elements())); 12717 return ExternalByteArray::SetValue(array, index, value); 12718 } 12719 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: { 12720 Handle<ExternalUnsignedByteArray> array( 12721 ExternalUnsignedByteArray::cast(object->elements())); 12722 return ExternalUnsignedByteArray::SetValue(array, index, value); 12723 } 12724 case EXTERNAL_SHORT_ELEMENTS: { 12725 Handle<ExternalShortArray> array(ExternalShortArray::cast( 12726 object->elements())); 12727 return ExternalShortArray::SetValue(array, index, value); 12728 } 12729 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: { 12730 Handle<ExternalUnsignedShortArray> array( 12731 ExternalUnsignedShortArray::cast(object->elements())); 12732 return ExternalUnsignedShortArray::SetValue(array, index, value); 12733 } 12734 case EXTERNAL_INT_ELEMENTS: { 12735 Handle<ExternalIntArray> array( 12736 ExternalIntArray::cast(object->elements())); 12737 return ExternalIntArray::SetValue(array, index, value); 12738 } 12739 case EXTERNAL_UNSIGNED_INT_ELEMENTS: { 12740 Handle<ExternalUnsignedIntArray> array( 12741 ExternalUnsignedIntArray::cast(object->elements())); 12742 return ExternalUnsignedIntArray::SetValue(array, index, value); 12743 } 12744 case EXTERNAL_FLOAT_ELEMENTS: { 12745 Handle<ExternalFloatArray> array( 12746 ExternalFloatArray::cast(object->elements())); 12747 return ExternalFloatArray::SetValue(array, index, value); 12748 } 12749 case EXTERNAL_DOUBLE_ELEMENTS: { 12750 Handle<ExternalDoubleArray> array( 12751 ExternalDoubleArray::cast(object->elements())); 12752 return ExternalDoubleArray::SetValue(array, index, value); 12753 } 12754 case DICTIONARY_ELEMENTS: 12755 return SetDictionaryElement(object, index, value, attributes, strict_mode, 12756 check_prototype, 12757 set_mode); 12758 case NON_STRICT_ARGUMENTS_ELEMENTS: { 12759 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements())); 12760 uint32_t length = parameter_map->length(); 12761 Handle<Object> probe = index < length - 2 ? 12762 Handle<Object>(parameter_map->get(index + 2), isolate) : 12763 Handle<Object>(); 12764 if (!probe.is_null() && !probe->IsTheHole()) { 12765 Handle<Context> context(Context::cast(parameter_map->get(0))); 12766 int context_index = Handle<Smi>::cast(probe)->value(); 12767 ASSERT(!context->get(context_index)->IsTheHole()); 12768 context->set(context_index, *value); 12769 // Redefining attributes of an aliased element destroys fast aliasing. 12770 if (set_mode == SET_PROPERTY || attributes == NONE) return value; 12771 parameter_map->set_the_hole(index + 2); 12772 // For elements that are still writable we re-establish slow aliasing. 12773 if ((attributes & READ_ONLY) == 0) { 12774 value = Handle<Object>::cast( 12775 isolate->factory()->NewAliasedArgumentsEntry(context_index)); 12776 } 12777 } 12778 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1))); 12779 if (arguments->IsDictionary()) { 12780 return SetDictionaryElement(object, index, value, attributes, 12781 strict_mode, 12782 check_prototype, 12783 set_mode); 12784 } else { 12785 return SetFastElement(object, index, value, strict_mode, 12786 check_prototype); 12787 } 12788 } 12789 } 12790 // All possible cases have been handled above. Add a return to avoid the 12791 // complaints from the compiler. 12792 UNREACHABLE(); 12793 return isolate->factory()->null_value(); 12794 } 12795 12796 12797 void JSObject::TransitionElementsKind(Handle<JSObject> object, 12798 ElementsKind to_kind) { 12799 CALL_HEAP_FUNCTION_VOID(object->GetIsolate(), 12800 object->TransitionElementsKind(to_kind)); 12801 } 12802 12803 12804 const double AllocationSite::kPretenureRatio = 0.60; 12805 12806 12807 bool AllocationSite::IsNestedSite() { 12808 ASSERT(FLAG_trace_track_allocation_sites); 12809 Object* current = GetHeap()->allocation_sites_list(); 12810 while (current != NULL && current->IsAllocationSite()) { 12811 AllocationSite* current_site = AllocationSite::cast(current); 12812 if (current_site->nested_site() == this) { 12813 return true; 12814 } 12815 current = current_site->weak_next(); 12816 } 12817 return false; 12818 } 12819 12820 12821 MaybeObject* AllocationSite::DigestTransitionFeedback(ElementsKind to_kind) { 12822 Isolate* isolate = GetIsolate(); 12823 12824 if (SitePointsToLiteral() && transition_info()->IsJSArray()) { 12825 JSArray* transition_info = JSArray::cast(this->transition_info()); 12826 ElementsKind kind = transition_info->GetElementsKind(); 12827 // if kind is holey ensure that to_kind is as well. 12828 if (IsHoleyElementsKind(kind)) { 12829 to_kind = GetHoleyElementsKind(to_kind); 12830 } 12831 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) { 12832 // If the array is huge, it's not likely to be defined in a local 12833 // function, so we shouldn't make new instances of it very often. 12834 uint32_t length = 0; 12835 CHECK(transition_info->length()->ToArrayIndex(&length)); 12836 if (length <= kMaximumArrayBytesToPretransition) { 12837 if (FLAG_trace_track_allocation_sites) { 12838 bool is_nested = IsNestedSite(); 12839 PrintF( 12840 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n", 12841 reinterpret_cast<void*>(this), 12842 is_nested ? "(nested)" : "", 12843 ElementsKindToString(kind), 12844 ElementsKindToString(to_kind)); 12845 } 12846 MaybeObject* result = transition_info->TransitionElementsKind(to_kind); 12847 if (result->IsFailure()) return result; 12848 dependent_code()->DeoptimizeDependentCodeGroup( 12849 isolate, DependentCode::kAllocationSiteTransitionChangedGroup); 12850 } 12851 } 12852 } else { 12853 ElementsKind kind = GetElementsKind(); 12854 // if kind is holey ensure that to_kind is as well. 12855 if (IsHoleyElementsKind(kind)) { 12856 to_kind = GetHoleyElementsKind(to_kind); 12857 } 12858 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) { 12859 if (FLAG_trace_track_allocation_sites) { 12860 PrintF("AllocationSite: JSArray %p site updated %s->%s\n", 12861 reinterpret_cast<void*>(this), 12862 ElementsKindToString(kind), 12863 ElementsKindToString(to_kind)); 12864 } 12865 SetElementsKind(to_kind); 12866 dependent_code()->DeoptimizeDependentCodeGroup( 12867 isolate, DependentCode::kAllocationSiteTransitionChangedGroup); 12868 } 12869 } 12870 return this; 12871 } 12872 12873 12874 void AllocationSite::AddDependentCompilationInfo(Reason reason, 12875 CompilationInfo* info) { 12876 DependentCode::DependencyGroup group = ToDependencyGroup(reason); 12877 Handle<DependentCode> dep(dependent_code()); 12878 Handle<DependentCode> codes = 12879 DependentCode::Insert(dep, group, info->object_wrapper()); 12880 if (*codes != dependent_code()) set_dependent_code(*codes); 12881 info->dependencies(group)->Add(Handle<HeapObject>(this), info->zone()); 12882 } 12883 12884 12885 void AllocationSite::AddDependentCode(Reason reason, Handle<Code> code) { 12886 DependentCode::DependencyGroup group = ToDependencyGroup(reason); 12887 Handle<DependentCode> codes = DependentCode::Insert( 12888 Handle<DependentCode>(dependent_code()), group, code); 12889 if (*codes != dependent_code()) set_dependent_code(*codes); 12890 } 12891 12892 12893 void JSObject::UpdateAllocationSite(Handle<JSObject> object, 12894 ElementsKind to_kind) { 12895 CALL_HEAP_FUNCTION_VOID(object->GetIsolate(), 12896 object->UpdateAllocationSite(to_kind)); 12897 } 12898 12899 12900 MaybeObject* JSObject::UpdateAllocationSite(ElementsKind to_kind) { 12901 if (!FLAG_track_allocation_sites || !IsJSArray()) { 12902 return this; 12903 } 12904 12905 AllocationMemento* memento = AllocationMemento::FindForJSObject(this); 12906 if (memento == NULL || !memento->IsValid()) { 12907 return this; 12908 } 12909 12910 // Walk through to the Allocation Site 12911 AllocationSite* site = memento->GetAllocationSite(); 12912 return site->DigestTransitionFeedback(to_kind); 12913 } 12914 12915 12916 MaybeObject* JSObject::TransitionElementsKind(ElementsKind to_kind) { 12917 ElementsKind from_kind = map()->elements_kind(); 12918 12919 if (IsFastHoleyElementsKind(from_kind)) { 12920 to_kind = GetHoleyElementsKind(to_kind); 12921 } 12922 12923 if (from_kind == to_kind) return this; 12924 // Don't update the site if to_kind isn't fast 12925 if (IsFastElementsKind(to_kind)) { 12926 MaybeObject* maybe_failure = UpdateAllocationSite(to_kind); 12927 if (maybe_failure->IsFailure()) return maybe_failure; 12928 } 12929 12930 Isolate* isolate = GetIsolate(); 12931 if (elements() == isolate->heap()->empty_fixed_array() || 12932 (IsFastSmiOrObjectElementsKind(from_kind) && 12933 IsFastSmiOrObjectElementsKind(to_kind)) || 12934 (from_kind == FAST_DOUBLE_ELEMENTS && 12935 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) { 12936 ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND); 12937 // No change is needed to the elements() buffer, the transition 12938 // only requires a map change. 12939 MaybeObject* maybe_new_map = GetElementsTransitionMap(isolate, to_kind); 12940 Map* new_map; 12941 if (!maybe_new_map->To(&new_map)) return maybe_new_map; 12942 set_map(new_map); 12943 if (FLAG_trace_elements_transitions) { 12944 FixedArrayBase* elms = FixedArrayBase::cast(elements()); 12945 PrintElementsTransition(stdout, from_kind, elms, to_kind, elms); 12946 } 12947 return this; 12948 } 12949 12950 FixedArrayBase* elms = FixedArrayBase::cast(elements()); 12951 uint32_t capacity = static_cast<uint32_t>(elms->length()); 12952 uint32_t length = capacity; 12953 12954 if (IsJSArray()) { 12955 Object* raw_length = JSArray::cast(this)->length(); 12956 if (raw_length->IsUndefined()) { 12957 // If length is undefined, then JSArray is being initialized and has no 12958 // elements, assume a length of zero. 12959 length = 0; 12960 } else { 12961 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length)); 12962 } 12963 } 12964 12965 if (IsFastSmiElementsKind(from_kind) && 12966 IsFastDoubleElementsKind(to_kind)) { 12967 MaybeObject* maybe_result = 12968 SetFastDoubleElementsCapacityAndLength(capacity, length); 12969 if (maybe_result->IsFailure()) return maybe_result; 12970 ValidateElements(); 12971 return this; 12972 } 12973 12974 if (IsFastDoubleElementsKind(from_kind) && 12975 IsFastObjectElementsKind(to_kind)) { 12976 MaybeObject* maybe_result = SetFastElementsCapacityAndLength( 12977 capacity, length, kDontAllowSmiElements); 12978 if (maybe_result->IsFailure()) return maybe_result; 12979 ValidateElements(); 12980 return this; 12981 } 12982 12983 // This method should never be called for any other case than the ones 12984 // handled above. 12985 UNREACHABLE(); 12986 return GetIsolate()->heap()->null_value(); 12987 } 12988 12989 12990 // static 12991 bool Map::IsValidElementsTransition(ElementsKind from_kind, 12992 ElementsKind to_kind) { 12993 // Transitions can't go backwards. 12994 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) { 12995 return false; 12996 } 12997 12998 // Transitions from HOLEY -> PACKED are not allowed. 12999 return !IsFastHoleyElementsKind(from_kind) || 13000 IsFastHoleyElementsKind(to_kind); 13001 } 13002 13003 13004 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array, 13005 uint32_t index, 13006 Handle<Object> value) { 13007 CALL_HEAP_FUNCTION_VOID(array->GetIsolate(), 13008 array->JSArrayUpdateLengthFromIndex(index, *value)); 13009 } 13010 13011 13012 MaybeObject* JSArray::JSArrayUpdateLengthFromIndex(uint32_t index, 13013 Object* value) { 13014 uint32_t old_len = 0; 13015 CHECK(length()->ToArrayIndex(&old_len)); 13016 // Check to see if we need to update the length. For now, we make 13017 // sure that the length stays within 32-bits (unsigned). 13018 if (index >= old_len && index != 0xffffffff) { 13019 Object* len; 13020 { MaybeObject* maybe_len = 13021 GetHeap()->NumberFromDouble(static_cast<double>(index) + 1); 13022 if (!maybe_len->ToObject(&len)) return maybe_len; 13023 } 13024 set_length(len); 13025 } 13026 return value; 13027 } 13028 13029 13030 MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver, 13031 uint32_t index) { 13032 Isolate* isolate = GetIsolate(); 13033 HandleScope scope(isolate); 13034 13035 // Make sure that the top context does not change when doing 13036 // callbacks or interceptor calls. 13037 AssertNoContextChange ncc(isolate); 13038 13039 Handle<InterceptorInfo> interceptor(GetIndexedInterceptor(), isolate); 13040 Handle<Object> this_handle(receiver, isolate); 13041 Handle<JSObject> holder_handle(this, isolate); 13042 if (!interceptor->getter()->IsUndefined()) { 13043 v8::IndexedPropertyGetterCallback getter = 13044 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter()); 13045 LOG(isolate, 13046 ApiIndexedPropertyAccess("interceptor-indexed-get", this, index)); 13047 PropertyCallbackArguments 13048 args(isolate, interceptor->data(), receiver, this); 13049 v8::Handle<v8::Value> result = args.Call(getter, index); 13050 RETURN_IF_SCHEDULED_EXCEPTION(isolate); 13051 if (!result.IsEmpty()) { 13052 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 13053 result_internal->VerifyApiCallResultType(); 13054 return *result_internal; 13055 } 13056 } 13057 13058 Heap* heap = holder_handle->GetHeap(); 13059 ElementsAccessor* handler = holder_handle->GetElementsAccessor(); 13060 MaybeObject* raw_result = handler->Get(*this_handle, 13061 *holder_handle, 13062 index); 13063 if (raw_result != heap->the_hole_value()) return raw_result; 13064 13065 RETURN_IF_SCHEDULED_EXCEPTION(isolate); 13066 13067 Object* pt = holder_handle->GetPrototype(); 13068 if (pt == heap->null_value()) return heap->undefined_value(); 13069 return pt->GetElementWithReceiver(isolate, *this_handle, index); 13070 } 13071 13072 13073 bool JSObject::HasDenseElements() { 13074 int capacity = 0; 13075 int used = 0; 13076 GetElementsCapacityAndUsage(&capacity, &used); 13077 return (capacity == 0) || (used > (capacity / 2)); 13078 } 13079 13080 13081 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) { 13082 *capacity = 0; 13083 *used = 0; 13084 13085 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements()); 13086 FixedArray* backing_store = NULL; 13087 switch (GetElementsKind()) { 13088 case NON_STRICT_ARGUMENTS_ELEMENTS: 13089 backing_store_base = 13090 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1)); 13091 backing_store = FixedArray::cast(backing_store_base); 13092 if (backing_store->IsDictionary()) { 13093 SeededNumberDictionary* dictionary = 13094 SeededNumberDictionary::cast(backing_store); 13095 *capacity = dictionary->Capacity(); 13096 *used = dictionary->NumberOfElements(); 13097 break; 13098 } 13099 // Fall through. 13100 case FAST_SMI_ELEMENTS: 13101 case FAST_ELEMENTS: 13102 if (IsJSArray()) { 13103 *capacity = backing_store_base->length(); 13104 *used = Smi::cast(JSArray::cast(this)->length())->value(); 13105 break; 13106 } 13107 // Fall through if packing is not guaranteed. 13108 case FAST_HOLEY_SMI_ELEMENTS: 13109 case FAST_HOLEY_ELEMENTS: 13110 backing_store = FixedArray::cast(backing_store_base); 13111 *capacity = backing_store->length(); 13112 for (int i = 0; i < *capacity; ++i) { 13113 if (!backing_store->get(i)->IsTheHole()) ++(*used); 13114 } 13115 break; 13116 case DICTIONARY_ELEMENTS: { 13117 SeededNumberDictionary* dictionary = element_dictionary(); 13118 *capacity = dictionary->Capacity(); 13119 *used = dictionary->NumberOfElements(); 13120 break; 13121 } 13122 case FAST_DOUBLE_ELEMENTS: 13123 if (IsJSArray()) { 13124 *capacity = backing_store_base->length(); 13125 *used = Smi::cast(JSArray::cast(this)->length())->value(); 13126 break; 13127 } 13128 // Fall through if packing is not guaranteed. 13129 case FAST_HOLEY_DOUBLE_ELEMENTS: { 13130 FixedDoubleArray* elms = FixedDoubleArray::cast(elements()); 13131 *capacity = elms->length(); 13132 for (int i = 0; i < *capacity; i++) { 13133 if (!elms->is_the_hole(i)) ++(*used); 13134 } 13135 break; 13136 } 13137 case EXTERNAL_BYTE_ELEMENTS: 13138 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 13139 case EXTERNAL_SHORT_ELEMENTS: 13140 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 13141 case EXTERNAL_INT_ELEMENTS: 13142 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 13143 case EXTERNAL_FLOAT_ELEMENTS: 13144 case EXTERNAL_DOUBLE_ELEMENTS: 13145 case EXTERNAL_PIXEL_ELEMENTS: 13146 // External arrays are considered 100% used. 13147 ExternalArray* external_array = ExternalArray::cast(elements()); 13148 *capacity = external_array->length(); 13149 *used = external_array->length(); 13150 break; 13151 } 13152 } 13153 13154 13155 bool JSObject::ShouldConvertToSlowElements(int new_capacity) { 13156 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <= 13157 kMaxUncheckedFastElementsLength); 13158 if (new_capacity <= kMaxUncheckedOldFastElementsLength || 13159 (new_capacity <= kMaxUncheckedFastElementsLength && 13160 GetHeap()->InNewSpace(this))) { 13161 return false; 13162 } 13163 // If the fast-case backing storage takes up roughly three times as 13164 // much space (in machine words) as a dictionary backing storage 13165 // would, the object should have slow elements. 13166 int old_capacity = 0; 13167 int used_elements = 0; 13168 GetElementsCapacityAndUsage(&old_capacity, &used_elements); 13169 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) * 13170 SeededNumberDictionary::kEntrySize; 13171 return 3 * dictionary_size <= new_capacity; 13172 } 13173 13174 13175 bool JSObject::ShouldConvertToFastElements() { 13176 ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements()); 13177 // If the elements are sparse, we should not go back to fast case. 13178 if (!HasDenseElements()) return false; 13179 // An object requiring access checks is never allowed to have fast 13180 // elements. If it had fast elements we would skip security checks. 13181 if (IsAccessCheckNeeded()) return false; 13182 // Observed objects may not go to fast mode because they rely on map checks, 13183 // and for fast element accesses we sometimes check element kinds only. 13184 if (FLAG_harmony_observation && map()->is_observed()) return false; 13185 13186 FixedArray* elements = FixedArray::cast(this->elements()); 13187 SeededNumberDictionary* dictionary = NULL; 13188 if (elements->map() == GetHeap()->non_strict_arguments_elements_map()) { 13189 dictionary = SeededNumberDictionary::cast(elements->get(1)); 13190 } else { 13191 dictionary = SeededNumberDictionary::cast(elements); 13192 } 13193 // If an element has been added at a very high index in the elements 13194 // dictionary, we cannot go back to fast case. 13195 if (dictionary->requires_slow_elements()) return false; 13196 // If the dictionary backing storage takes up roughly half as much 13197 // space (in machine words) as a fast-case backing storage would, 13198 // the object should have fast elements. 13199 uint32_t array_size = 0; 13200 if (IsJSArray()) { 13201 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size)); 13202 } else { 13203 array_size = dictionary->max_number_key(); 13204 } 13205 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) * 13206 SeededNumberDictionary::kEntrySize; 13207 return 2 * dictionary_size >= array_size; 13208 } 13209 13210 13211 bool JSObject::ShouldConvertToFastDoubleElements( 13212 bool* has_smi_only_elements) { 13213 *has_smi_only_elements = false; 13214 if (FLAG_unbox_double_arrays) { 13215 ASSERT(HasDictionaryElements()); 13216 SeededNumberDictionary* dictionary = element_dictionary(); 13217 bool found_double = false; 13218 for (int i = 0; i < dictionary->Capacity(); i++) { 13219 Object* key = dictionary->KeyAt(i); 13220 if (key->IsNumber()) { 13221 Object* value = dictionary->ValueAt(i); 13222 if (!value->IsNumber()) return false; 13223 if (!value->IsSmi()) { 13224 found_double = true; 13225 } 13226 } 13227 } 13228 *has_smi_only_elements = !found_double; 13229 return found_double; 13230 } else { 13231 return false; 13232 } 13233 } 13234 13235 13236 // Certain compilers request function template instantiation when they 13237 // see the definition of the other template functions in the 13238 // class. This requires us to have the template functions put 13239 // together, so even though this function belongs in objects-debug.cc, 13240 // we keep it here instead to satisfy certain compilers. 13241 #ifdef OBJECT_PRINT 13242 template<typename Shape, typename Key> 13243 void Dictionary<Shape, Key>::Print(FILE* out) { 13244 int capacity = HashTable<Shape, Key>::Capacity(); 13245 for (int i = 0; i < capacity; i++) { 13246 Object* k = HashTable<Shape, Key>::KeyAt(i); 13247 if (HashTable<Shape, Key>::IsKey(k)) { 13248 PrintF(out, " "); 13249 if (k->IsString()) { 13250 String::cast(k)->StringPrint(out); 13251 } else { 13252 k->ShortPrint(out); 13253 } 13254 PrintF(out, ": "); 13255 ValueAt(i)->ShortPrint(out); 13256 PrintF(out, "\n"); 13257 } 13258 } 13259 } 13260 #endif 13261 13262 13263 template<typename Shape, typename Key> 13264 void Dictionary<Shape, Key>::CopyValuesTo(FixedArray* elements) { 13265 int pos = 0; 13266 int capacity = HashTable<Shape, Key>::Capacity(); 13267 DisallowHeapAllocation no_gc; 13268 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc); 13269 for (int i = 0; i < capacity; i++) { 13270 Object* k = Dictionary<Shape, Key>::KeyAt(i); 13271 if (Dictionary<Shape, Key>::IsKey(k)) { 13272 elements->set(pos++, ValueAt(i), mode); 13273 } 13274 } 13275 ASSERT(pos == elements->length()); 13276 } 13277 13278 13279 InterceptorInfo* JSObject::GetNamedInterceptor() { 13280 ASSERT(map()->has_named_interceptor()); 13281 JSFunction* constructor = JSFunction::cast(map()->constructor()); 13282 ASSERT(constructor->shared()->IsApiFunction()); 13283 Object* result = 13284 constructor->shared()->get_api_func_data()->named_property_handler(); 13285 return InterceptorInfo::cast(result); 13286 } 13287 13288 13289 InterceptorInfo* JSObject::GetIndexedInterceptor() { 13290 ASSERT(map()->has_indexed_interceptor()); 13291 JSFunction* constructor = JSFunction::cast(map()->constructor()); 13292 ASSERT(constructor->shared()->IsApiFunction()); 13293 Object* result = 13294 constructor->shared()->get_api_func_data()->indexed_property_handler(); 13295 return InterceptorInfo::cast(result); 13296 } 13297 13298 13299 Handle<Object> JSObject::GetPropertyPostInterceptor( 13300 Handle<JSObject> object, 13301 Handle<Object> receiver, 13302 Handle<Name> name, 13303 PropertyAttributes* attributes) { 13304 // Check local property in holder, ignore interceptor. 13305 Isolate* isolate = object->GetIsolate(); 13306 LookupResult lookup(isolate); 13307 object->LocalLookupRealNamedProperty(*name, &lookup); 13308 Handle<Object> result; 13309 if (lookup.IsFound()) { 13310 result = GetProperty(object, receiver, &lookup, name, attributes); 13311 } else { 13312 // Continue searching via the prototype chain. 13313 Handle<Object> prototype(object->GetPrototype(), isolate); 13314 *attributes = ABSENT; 13315 if (prototype->IsNull()) return isolate->factory()->undefined_value(); 13316 result = GetPropertyWithReceiver(prototype, receiver, name, attributes); 13317 } 13318 return result; 13319 } 13320 13321 13322 MaybeObject* JSObject::GetLocalPropertyPostInterceptor( 13323 Object* receiver, 13324 Name* name, 13325 PropertyAttributes* attributes) { 13326 // Check local property in holder, ignore interceptor. 13327 LookupResult result(GetIsolate()); 13328 LocalLookupRealNamedProperty(name, &result); 13329 if (result.IsFound()) { 13330 return GetProperty(receiver, &result, name, attributes); 13331 } 13332 return GetHeap()->undefined_value(); 13333 } 13334 13335 13336 Handle<Object> JSObject::GetPropertyWithInterceptor( 13337 Handle<JSObject> object, 13338 Handle<Object> receiver, 13339 Handle<Name> name, 13340 PropertyAttributes* attributes) { 13341 Isolate* isolate = object->GetIsolate(); 13342 13343 // TODO(rossberg): Support symbols in the API. 13344 if (name->IsSymbol()) return isolate->factory()->undefined_value(); 13345 13346 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor(), isolate); 13347 Handle<String> name_string = Handle<String>::cast(name); 13348 13349 if (!interceptor->getter()->IsUndefined()) { 13350 v8::NamedPropertyGetterCallback getter = 13351 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter()); 13352 LOG(isolate, 13353 ApiNamedPropertyAccess("interceptor-named-get", *object, *name)); 13354 PropertyCallbackArguments 13355 args(isolate, interceptor->data(), *receiver, *object); 13356 v8::Handle<v8::Value> result = 13357 args.Call(getter, v8::Utils::ToLocal(name_string)); 13358 RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, Object); 13359 if (!result.IsEmpty()) { 13360 *attributes = NONE; 13361 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 13362 result_internal->VerifyApiCallResultType(); 13363 // Rebox handle to escape this scope. 13364 return handle(*result_internal, isolate); 13365 } 13366 } 13367 13368 return GetPropertyPostInterceptor(object, receiver, name, attributes); 13369 } 13370 13371 13372 bool JSObject::HasRealNamedProperty(Handle<JSObject> object, 13373 Handle<Name> key) { 13374 Isolate* isolate = object->GetIsolate(); 13375 SealHandleScope shs(isolate); 13376 // Check access rights if needed. 13377 if (object->IsAccessCheckNeeded()) { 13378 if (!isolate->MayNamedAccess(*object, *key, v8::ACCESS_HAS)) { 13379 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_HAS); 13380 return false; 13381 } 13382 } 13383 13384 LookupResult result(isolate); 13385 object->LocalLookupRealNamedProperty(*key, &result); 13386 return result.IsFound() && !result.IsInterceptor(); 13387 } 13388 13389 13390 bool JSObject::HasRealElementProperty(Handle<JSObject> object, uint32_t index) { 13391 Isolate* isolate = object->GetIsolate(); 13392 SealHandleScope shs(isolate); 13393 // Check access rights if needed. 13394 if (object->IsAccessCheckNeeded()) { 13395 if (!isolate->MayIndexedAccess(*object, index, v8::ACCESS_HAS)) { 13396 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_HAS); 13397 return false; 13398 } 13399 } 13400 13401 if (object->IsJSGlobalProxy()) { 13402 HandleScope scope(isolate); 13403 Handle<Object> proto(object->GetPrototype(), isolate); 13404 if (proto->IsNull()) return false; 13405 ASSERT(proto->IsJSGlobalObject()); 13406 return HasRealElementProperty(Handle<JSObject>::cast(proto), index); 13407 } 13408 13409 return object->GetElementAttributeWithoutInterceptor( 13410 *object, index, false) != ABSENT; 13411 } 13412 13413 13414 bool JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object, 13415 Handle<Name> key) { 13416 Isolate* isolate = object->GetIsolate(); 13417 SealHandleScope shs(isolate); 13418 // Check access rights if needed. 13419 if (object->IsAccessCheckNeeded()) { 13420 if (!isolate->MayNamedAccess(*object, *key, v8::ACCESS_HAS)) { 13421 isolate->ReportFailedAccessCheck(*object, v8::ACCESS_HAS); 13422 return false; 13423 } 13424 } 13425 13426 LookupResult result(isolate); 13427 object->LocalLookupRealNamedProperty(*key, &result); 13428 return result.IsPropertyCallbacks(); 13429 } 13430 13431 13432 int JSObject::NumberOfLocalProperties(PropertyAttributes filter) { 13433 if (HasFastProperties()) { 13434 Map* map = this->map(); 13435 if (filter == NONE) return map->NumberOfOwnDescriptors(); 13436 if (filter & DONT_ENUM) { 13437 int result = map->EnumLength(); 13438 if (result != kInvalidEnumCacheSentinel) return result; 13439 } 13440 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter); 13441 } 13442 return property_dictionary()->NumberOfElementsFilterAttributes(filter); 13443 } 13444 13445 13446 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) { 13447 Object* temp = get(i); 13448 set(i, get(j)); 13449 set(j, temp); 13450 if (this != numbers) { 13451 temp = numbers->get(i); 13452 numbers->set(i, Smi::cast(numbers->get(j))); 13453 numbers->set(j, Smi::cast(temp)); 13454 } 13455 } 13456 13457 13458 static void InsertionSortPairs(FixedArray* content, 13459 FixedArray* numbers, 13460 int len) { 13461 for (int i = 1; i < len; i++) { 13462 int j = i; 13463 while (j > 0 && 13464 (NumberToUint32(numbers->get(j - 1)) > 13465 NumberToUint32(numbers->get(j)))) { 13466 content->SwapPairs(numbers, j - 1, j); 13467 j--; 13468 } 13469 } 13470 } 13471 13472 13473 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) { 13474 // In-place heap sort. 13475 ASSERT(content->length() == numbers->length()); 13476 13477 // Bottom-up max-heap construction. 13478 for (int i = 1; i < len; ++i) { 13479 int child_index = i; 13480 while (child_index > 0) { 13481 int parent_index = ((child_index + 1) >> 1) - 1; 13482 uint32_t parent_value = NumberToUint32(numbers->get(parent_index)); 13483 uint32_t child_value = NumberToUint32(numbers->get(child_index)); 13484 if (parent_value < child_value) { 13485 content->SwapPairs(numbers, parent_index, child_index); 13486 } else { 13487 break; 13488 } 13489 child_index = parent_index; 13490 } 13491 } 13492 13493 // Extract elements and create sorted array. 13494 for (int i = len - 1; i > 0; --i) { 13495 // Put max element at the back of the array. 13496 content->SwapPairs(numbers, 0, i); 13497 // Sift down the new top element. 13498 int parent_index = 0; 13499 while (true) { 13500 int child_index = ((parent_index + 1) << 1) - 1; 13501 if (child_index >= i) break; 13502 uint32_t child1_value = NumberToUint32(numbers->get(child_index)); 13503 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1)); 13504 uint32_t parent_value = NumberToUint32(numbers->get(parent_index)); 13505 if (child_index + 1 >= i || child1_value > child2_value) { 13506 if (parent_value > child1_value) break; 13507 content->SwapPairs(numbers, parent_index, child_index); 13508 parent_index = child_index; 13509 } else { 13510 if (parent_value > child2_value) break; 13511 content->SwapPairs(numbers, parent_index, child_index + 1); 13512 parent_index = child_index + 1; 13513 } 13514 } 13515 } 13516 } 13517 13518 13519 // Sort this array and the numbers as pairs wrt. the (distinct) numbers. 13520 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) { 13521 ASSERT(this->length() == numbers->length()); 13522 // For small arrays, simply use insertion sort. 13523 if (len <= 10) { 13524 InsertionSortPairs(this, numbers, len); 13525 return; 13526 } 13527 // Check the range of indices. 13528 uint32_t min_index = NumberToUint32(numbers->get(0)); 13529 uint32_t max_index = min_index; 13530 uint32_t i; 13531 for (i = 1; i < len; i++) { 13532 if (NumberToUint32(numbers->get(i)) < min_index) { 13533 min_index = NumberToUint32(numbers->get(i)); 13534 } else if (NumberToUint32(numbers->get(i)) > max_index) { 13535 max_index = NumberToUint32(numbers->get(i)); 13536 } 13537 } 13538 if (max_index - min_index + 1 == len) { 13539 // Indices form a contiguous range, unless there are duplicates. 13540 // Do an in-place linear time sort assuming distinct numbers, but 13541 // avoid hanging in case they are not. 13542 for (i = 0; i < len; i++) { 13543 uint32_t p; 13544 uint32_t j = 0; 13545 // While the current element at i is not at its correct position p, 13546 // swap the elements at these two positions. 13547 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i && 13548 j++ < len) { 13549 SwapPairs(numbers, i, p); 13550 } 13551 } 13552 } else { 13553 HeapSortPairs(this, numbers, len); 13554 return; 13555 } 13556 } 13557 13558 13559 // Fill in the names of local properties into the supplied storage. The main 13560 // purpose of this function is to provide reflection information for the object 13561 // mirrors. 13562 void JSObject::GetLocalPropertyNames( 13563 FixedArray* storage, int index, PropertyAttributes filter) { 13564 ASSERT(storage->length() >= (NumberOfLocalProperties(filter) - index)); 13565 if (HasFastProperties()) { 13566 int real_size = map()->NumberOfOwnDescriptors(); 13567 DescriptorArray* descs = map()->instance_descriptors(); 13568 for (int i = 0; i < real_size; i++) { 13569 if ((descs->GetDetails(i).attributes() & filter) == 0 && 13570 ((filter & SYMBOLIC) == 0 || !descs->GetKey(i)->IsSymbol())) { 13571 storage->set(index++, descs->GetKey(i)); 13572 } 13573 } 13574 } else { 13575 property_dictionary()->CopyKeysTo(storage, 13576 index, 13577 filter, 13578 NameDictionary::UNSORTED); 13579 } 13580 } 13581 13582 13583 int JSObject::NumberOfLocalElements(PropertyAttributes filter) { 13584 return GetLocalElementKeys(NULL, filter); 13585 } 13586 13587 13588 int JSObject::NumberOfEnumElements() { 13589 // Fast case for objects with no elements. 13590 if (!IsJSValue() && HasFastObjectElements()) { 13591 uint32_t length = IsJSArray() ? 13592 static_cast<uint32_t>( 13593 Smi::cast(JSArray::cast(this)->length())->value()) : 13594 static_cast<uint32_t>(FixedArray::cast(elements())->length()); 13595 if (length == 0) return 0; 13596 } 13597 // Compute the number of enumerable elements. 13598 return NumberOfLocalElements(static_cast<PropertyAttributes>(DONT_ENUM)); 13599 } 13600 13601 13602 int JSObject::GetLocalElementKeys(FixedArray* storage, 13603 PropertyAttributes filter) { 13604 int counter = 0; 13605 switch (GetElementsKind()) { 13606 case FAST_SMI_ELEMENTS: 13607 case FAST_ELEMENTS: 13608 case FAST_HOLEY_SMI_ELEMENTS: 13609 case FAST_HOLEY_ELEMENTS: { 13610 int length = IsJSArray() ? 13611 Smi::cast(JSArray::cast(this)->length())->value() : 13612 FixedArray::cast(elements())->length(); 13613 for (int i = 0; i < length; i++) { 13614 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) { 13615 if (storage != NULL) { 13616 storage->set(counter, Smi::FromInt(i)); 13617 } 13618 counter++; 13619 } 13620 } 13621 ASSERT(!storage || storage->length() >= counter); 13622 break; 13623 } 13624 case FAST_DOUBLE_ELEMENTS: 13625 case FAST_HOLEY_DOUBLE_ELEMENTS: { 13626 int length = IsJSArray() ? 13627 Smi::cast(JSArray::cast(this)->length())->value() : 13628 FixedDoubleArray::cast(elements())->length(); 13629 for (int i = 0; i < length; i++) { 13630 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) { 13631 if (storage != NULL) { 13632 storage->set(counter, Smi::FromInt(i)); 13633 } 13634 counter++; 13635 } 13636 } 13637 ASSERT(!storage || storage->length() >= counter); 13638 break; 13639 } 13640 case EXTERNAL_PIXEL_ELEMENTS: { 13641 int length = ExternalPixelArray::cast(elements())->length(); 13642 while (counter < length) { 13643 if (storage != NULL) { 13644 storage->set(counter, Smi::FromInt(counter)); 13645 } 13646 counter++; 13647 } 13648 ASSERT(!storage || storage->length() >= counter); 13649 break; 13650 } 13651 case EXTERNAL_BYTE_ELEMENTS: 13652 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 13653 case EXTERNAL_SHORT_ELEMENTS: 13654 case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: 13655 case EXTERNAL_INT_ELEMENTS: 13656 case EXTERNAL_UNSIGNED_INT_ELEMENTS: 13657 case EXTERNAL_FLOAT_ELEMENTS: 13658 case EXTERNAL_DOUBLE_ELEMENTS: { 13659 int length = ExternalArray::cast(elements())->length(); 13660 while (counter < length) { 13661 if (storage != NULL) { 13662 storage->set(counter, Smi::FromInt(counter)); 13663 } 13664 counter++; 13665 } 13666 ASSERT(!storage || storage->length() >= counter); 13667 break; 13668 } 13669 case DICTIONARY_ELEMENTS: { 13670 if (storage != NULL) { 13671 element_dictionary()->CopyKeysTo(storage, 13672 filter, 13673 SeededNumberDictionary::SORTED); 13674 } 13675 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter); 13676 break; 13677 } 13678 case NON_STRICT_ARGUMENTS_ELEMENTS: { 13679 FixedArray* parameter_map = FixedArray::cast(elements()); 13680 int mapped_length = parameter_map->length() - 2; 13681 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 13682 if (arguments->IsDictionary()) { 13683 // Copy the keys from arguments first, because Dictionary::CopyKeysTo 13684 // will insert in storage starting at index 0. 13685 SeededNumberDictionary* dictionary = 13686 SeededNumberDictionary::cast(arguments); 13687 if (storage != NULL) { 13688 dictionary->CopyKeysTo( 13689 storage, filter, SeededNumberDictionary::UNSORTED); 13690 } 13691 counter += dictionary->NumberOfElementsFilterAttributes(filter); 13692 for (int i = 0; i < mapped_length; ++i) { 13693 if (!parameter_map->get(i + 2)->IsTheHole()) { 13694 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 13695 ++counter; 13696 } 13697 } 13698 if (storage != NULL) storage->SortPairs(storage, counter); 13699 13700 } else { 13701 int backing_length = arguments->length(); 13702 int i = 0; 13703 for (; i < mapped_length; ++i) { 13704 if (!parameter_map->get(i + 2)->IsTheHole()) { 13705 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 13706 ++counter; 13707 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) { 13708 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 13709 ++counter; 13710 } 13711 } 13712 for (; i < backing_length; ++i) { 13713 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 13714 ++counter; 13715 } 13716 } 13717 break; 13718 } 13719 } 13720 13721 if (this->IsJSValue()) { 13722 Object* val = JSValue::cast(this)->value(); 13723 if (val->IsString()) { 13724 String* str = String::cast(val); 13725 if (storage) { 13726 for (int i = 0; i < str->length(); i++) { 13727 storage->set(counter + i, Smi::FromInt(i)); 13728 } 13729 } 13730 counter += str->length(); 13731 } 13732 } 13733 ASSERT(!storage || storage->length() == counter); 13734 return counter; 13735 } 13736 13737 13738 int JSObject::GetEnumElementKeys(FixedArray* storage) { 13739 return GetLocalElementKeys(storage, 13740 static_cast<PropertyAttributes>(DONT_ENUM)); 13741 } 13742 13743 13744 // StringKey simply carries a string object as key. 13745 class StringKey : public HashTableKey { 13746 public: 13747 explicit StringKey(String* string) : 13748 string_(string), 13749 hash_(HashForObject(string)) { } 13750 13751 bool IsMatch(Object* string) { 13752 // We know that all entries in a hash table had their hash keys created. 13753 // Use that knowledge to have fast failure. 13754 if (hash_ != HashForObject(string)) { 13755 return false; 13756 } 13757 return string_->Equals(String::cast(string)); 13758 } 13759 13760 uint32_t Hash() { return hash_; } 13761 13762 uint32_t HashForObject(Object* other) { return String::cast(other)->Hash(); } 13763 13764 Object* AsObject(Heap* heap) { return string_; } 13765 13766 String* string_; 13767 uint32_t hash_; 13768 }; 13769 13770 13771 // StringSharedKeys are used as keys in the eval cache. 13772 class StringSharedKey : public HashTableKey { 13773 public: 13774 StringSharedKey(String* source, 13775 SharedFunctionInfo* shared, 13776 LanguageMode language_mode, 13777 int scope_position) 13778 : source_(source), 13779 shared_(shared), 13780 language_mode_(language_mode), 13781 scope_position_(scope_position) { } 13782 13783 bool IsMatch(Object* other) { 13784 if (!other->IsFixedArray()) return false; 13785 FixedArray* other_array = FixedArray::cast(other); 13786 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); 13787 if (shared != shared_) return false; 13788 int language_unchecked = Smi::cast(other_array->get(2))->value(); 13789 ASSERT(language_unchecked == CLASSIC_MODE || 13790 language_unchecked == STRICT_MODE || 13791 language_unchecked == EXTENDED_MODE); 13792 LanguageMode language_mode = static_cast<LanguageMode>(language_unchecked); 13793 if (language_mode != language_mode_) return false; 13794 int scope_position = Smi::cast(other_array->get(3))->value(); 13795 if (scope_position != scope_position_) return false; 13796 String* source = String::cast(other_array->get(1)); 13797 return source->Equals(source_); 13798 } 13799 13800 static uint32_t StringSharedHashHelper(String* source, 13801 SharedFunctionInfo* shared, 13802 LanguageMode language_mode, 13803 int scope_position) { 13804 uint32_t hash = source->Hash(); 13805 if (shared->HasSourceCode()) { 13806 // Instead of using the SharedFunctionInfo pointer in the hash 13807 // code computation, we use a combination of the hash of the 13808 // script source code and the start position of the calling scope. 13809 // We do this to ensure that the cache entries can survive garbage 13810 // collection. 13811 Script* script = Script::cast(shared->script()); 13812 hash ^= String::cast(script->source())->Hash(); 13813 if (language_mode == STRICT_MODE) hash ^= 0x8000; 13814 if (language_mode == EXTENDED_MODE) hash ^= 0x0080; 13815 hash += scope_position; 13816 } 13817 return hash; 13818 } 13819 13820 uint32_t Hash() { 13821 return StringSharedHashHelper( 13822 source_, shared_, language_mode_, scope_position_); 13823 } 13824 13825 uint32_t HashForObject(Object* obj) { 13826 FixedArray* other_array = FixedArray::cast(obj); 13827 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); 13828 String* source = String::cast(other_array->get(1)); 13829 int language_unchecked = Smi::cast(other_array->get(2))->value(); 13830 ASSERT(language_unchecked == CLASSIC_MODE || 13831 language_unchecked == STRICT_MODE || 13832 language_unchecked == EXTENDED_MODE); 13833 LanguageMode language_mode = static_cast<LanguageMode>(language_unchecked); 13834 int scope_position = Smi::cast(other_array->get(3))->value(); 13835 return StringSharedHashHelper( 13836 source, shared, language_mode, scope_position); 13837 } 13838 13839 MUST_USE_RESULT MaybeObject* AsObject(Heap* heap) { 13840 Object* obj; 13841 { MaybeObject* maybe_obj = heap->AllocateFixedArray(4); 13842 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 13843 } 13844 FixedArray* other_array = FixedArray::cast(obj); 13845 other_array->set(0, shared_); 13846 other_array->set(1, source_); 13847 other_array->set(2, Smi::FromInt(language_mode_)); 13848 other_array->set(3, Smi::FromInt(scope_position_)); 13849 return other_array; 13850 } 13851 13852 private: 13853 String* source_; 13854 SharedFunctionInfo* shared_; 13855 LanguageMode language_mode_; 13856 int scope_position_; 13857 }; 13858 13859 13860 // RegExpKey carries the source and flags of a regular expression as key. 13861 class RegExpKey : public HashTableKey { 13862 public: 13863 RegExpKey(String* string, JSRegExp::Flags flags) 13864 : string_(string), 13865 flags_(Smi::FromInt(flags.value())) { } 13866 13867 // Rather than storing the key in the hash table, a pointer to the 13868 // stored value is stored where the key should be. IsMatch then 13869 // compares the search key to the found object, rather than comparing 13870 // a key to a key. 13871 bool IsMatch(Object* obj) { 13872 FixedArray* val = FixedArray::cast(obj); 13873 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex))) 13874 && (flags_ == val->get(JSRegExp::kFlagsIndex)); 13875 } 13876 13877 uint32_t Hash() { return RegExpHash(string_, flags_); } 13878 13879 Object* AsObject(Heap* heap) { 13880 // Plain hash maps, which is where regexp keys are used, don't 13881 // use this function. 13882 UNREACHABLE(); 13883 return NULL; 13884 } 13885 13886 uint32_t HashForObject(Object* obj) { 13887 FixedArray* val = FixedArray::cast(obj); 13888 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)), 13889 Smi::cast(val->get(JSRegExp::kFlagsIndex))); 13890 } 13891 13892 static uint32_t RegExpHash(String* string, Smi* flags) { 13893 return string->Hash() + flags->value(); 13894 } 13895 13896 String* string_; 13897 Smi* flags_; 13898 }; 13899 13900 13901 // Utf8StringKey carries a vector of chars as key. 13902 class Utf8StringKey : public HashTableKey { 13903 public: 13904 explicit Utf8StringKey(Vector<const char> string, uint32_t seed) 13905 : string_(string), hash_field_(0), seed_(seed) { } 13906 13907 bool IsMatch(Object* string) { 13908 return String::cast(string)->IsUtf8EqualTo(string_); 13909 } 13910 13911 uint32_t Hash() { 13912 if (hash_field_ != 0) return hash_field_ >> String::kHashShift; 13913 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_); 13914 uint32_t result = hash_field_ >> String::kHashShift; 13915 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed. 13916 return result; 13917 } 13918 13919 uint32_t HashForObject(Object* other) { 13920 return String::cast(other)->Hash(); 13921 } 13922 13923 MaybeObject* AsObject(Heap* heap) { 13924 if (hash_field_ == 0) Hash(); 13925 return heap->AllocateInternalizedStringFromUtf8(string_, 13926 chars_, 13927 hash_field_); 13928 } 13929 13930 Vector<const char> string_; 13931 uint32_t hash_field_; 13932 int chars_; // Caches the number of characters when computing the hash code. 13933 uint32_t seed_; 13934 }; 13935 13936 13937 template <typename Char> 13938 class SequentialStringKey : public HashTableKey { 13939 public: 13940 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed) 13941 : string_(string), hash_field_(0), seed_(seed) { } 13942 13943 uint32_t Hash() { 13944 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(), 13945 string_.length(), 13946 seed_); 13947 13948 uint32_t result = hash_field_ >> String::kHashShift; 13949 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed. 13950 return result; 13951 } 13952 13953 13954 uint32_t HashForObject(Object* other) { 13955 return String::cast(other)->Hash(); 13956 } 13957 13958 Vector<const Char> string_; 13959 uint32_t hash_field_; 13960 uint32_t seed_; 13961 }; 13962 13963 13964 13965 class OneByteStringKey : public SequentialStringKey<uint8_t> { 13966 public: 13967 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed) 13968 : SequentialStringKey<uint8_t>(str, seed) { } 13969 13970 bool IsMatch(Object* string) { 13971 return String::cast(string)->IsOneByteEqualTo(string_); 13972 } 13973 13974 MaybeObject* AsObject(Heap* heap) { 13975 if (hash_field_ == 0) Hash(); 13976 return heap->AllocateOneByteInternalizedString(string_, hash_field_); 13977 } 13978 }; 13979 13980 13981 class SubStringOneByteStringKey : public HashTableKey { 13982 public: 13983 explicit SubStringOneByteStringKey(Handle<SeqOneByteString> string, 13984 int from, 13985 int length) 13986 : string_(string), from_(from), length_(length) { } 13987 13988 uint32_t Hash() { 13989 ASSERT(length_ >= 0); 13990 ASSERT(from_ + length_ <= string_->length()); 13991 uint8_t* chars = string_->GetChars() + from_; 13992 hash_field_ = StringHasher::HashSequentialString( 13993 chars, length_, string_->GetHeap()->HashSeed()); 13994 uint32_t result = hash_field_ >> String::kHashShift; 13995 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed. 13996 return result; 13997 } 13998 13999 14000 uint32_t HashForObject(Object* other) { 14001 return String::cast(other)->Hash(); 14002 } 14003 14004 bool IsMatch(Object* string) { 14005 Vector<const uint8_t> chars(string_->GetChars() + from_, length_); 14006 return String::cast(string)->IsOneByteEqualTo(chars); 14007 } 14008 14009 MaybeObject* AsObject(Heap* heap) { 14010 if (hash_field_ == 0) Hash(); 14011 Vector<const uint8_t> chars(string_->GetChars() + from_, length_); 14012 return heap->AllocateOneByteInternalizedString(chars, hash_field_); 14013 } 14014 14015 private: 14016 Handle<SeqOneByteString> string_; 14017 int from_; 14018 int length_; 14019 uint32_t hash_field_; 14020 }; 14021 14022 14023 class TwoByteStringKey : public SequentialStringKey<uc16> { 14024 public: 14025 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed) 14026 : SequentialStringKey<uc16>(str, seed) { } 14027 14028 bool IsMatch(Object* string) { 14029 return String::cast(string)->IsTwoByteEqualTo(string_); 14030 } 14031 14032 MaybeObject* AsObject(Heap* heap) { 14033 if (hash_field_ == 0) Hash(); 14034 return heap->AllocateTwoByteInternalizedString(string_, hash_field_); 14035 } 14036 }; 14037 14038 14039 // InternalizedStringKey carries a string/internalized-string object as key. 14040 class InternalizedStringKey : public HashTableKey { 14041 public: 14042 explicit InternalizedStringKey(String* string) 14043 : string_(string) { } 14044 14045 bool IsMatch(Object* string) { 14046 return String::cast(string)->Equals(string_); 14047 } 14048 14049 uint32_t Hash() { return string_->Hash(); } 14050 14051 uint32_t HashForObject(Object* other) { 14052 return String::cast(other)->Hash(); 14053 } 14054 14055 MaybeObject* AsObject(Heap* heap) { 14056 // Attempt to flatten the string, so that internalized strings will most 14057 // often be flat strings. 14058 string_ = string_->TryFlattenGetString(); 14059 // Internalize the string if possible. 14060 Map* map = heap->InternalizedStringMapForString(string_); 14061 if (map != NULL) { 14062 string_->set_map_no_write_barrier(map); 14063 ASSERT(string_->IsInternalizedString()); 14064 return string_; 14065 } 14066 // Otherwise allocate a new internalized string. 14067 return heap->AllocateInternalizedStringImpl( 14068 string_, string_->length(), string_->hash_field()); 14069 } 14070 14071 static uint32_t StringHash(Object* obj) { 14072 return String::cast(obj)->Hash(); 14073 } 14074 14075 String* string_; 14076 }; 14077 14078 14079 template<typename Shape, typename Key> 14080 void HashTable<Shape, Key>::IteratePrefix(ObjectVisitor* v) { 14081 IteratePointers(v, 0, kElementsStartOffset); 14082 } 14083 14084 14085 template<typename Shape, typename Key> 14086 void HashTable<Shape, Key>::IterateElements(ObjectVisitor* v) { 14087 IteratePointers(v, 14088 kElementsStartOffset, 14089 kHeaderSize + length() * kPointerSize); 14090 } 14091 14092 14093 template<typename Shape, typename Key> 14094 MaybeObject* HashTable<Shape, Key>::Allocate(Heap* heap, 14095 int at_least_space_for, 14096 MinimumCapacity capacity_option, 14097 PretenureFlag pretenure) { 14098 ASSERT(!capacity_option || IS_POWER_OF_TWO(at_least_space_for)); 14099 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY) 14100 ? at_least_space_for 14101 : ComputeCapacity(at_least_space_for); 14102 if (capacity > HashTable::kMaxCapacity) { 14103 return Failure::OutOfMemoryException(0x10); 14104 } 14105 14106 Object* obj; 14107 { MaybeObject* maybe_obj = 14108 heap-> AllocateHashTable(EntryToIndex(capacity), pretenure); 14109 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 14110 } 14111 HashTable::cast(obj)->SetNumberOfElements(0); 14112 HashTable::cast(obj)->SetNumberOfDeletedElements(0); 14113 HashTable::cast(obj)->SetCapacity(capacity); 14114 return obj; 14115 } 14116 14117 14118 // Find entry for key otherwise return kNotFound. 14119 int NameDictionary::FindEntry(Name* key) { 14120 if (!key->IsUniqueName()) { 14121 return HashTable<NameDictionaryShape, Name*>::FindEntry(key); 14122 } 14123 14124 // Optimized for unique names. Knowledge of the key type allows: 14125 // 1. Move the check if the key is unique out of the loop. 14126 // 2. Avoid comparing hash codes in unique-to-unique comparison. 14127 // 3. Detect a case when a dictionary key is not unique but the key is. 14128 // In case of positive result the dictionary key may be replaced by the 14129 // internalized string with minimal performance penalty. It gives a chance 14130 // to perform further lookups in code stubs (and significant performance 14131 // boost a certain style of code). 14132 14133 // EnsureCapacity will guarantee the hash table is never full. 14134 uint32_t capacity = Capacity(); 14135 uint32_t entry = FirstProbe(key->Hash(), capacity); 14136 uint32_t count = 1; 14137 14138 while (true) { 14139 int index = EntryToIndex(entry); 14140 Object* element = get(index); 14141 if (element->IsUndefined()) break; // Empty entry. 14142 if (key == element) return entry; 14143 if (!element->IsUniqueName() && 14144 !element->IsTheHole() && 14145 Name::cast(element)->Equals(key)) { 14146 // Replace a key that is a non-internalized string by the equivalent 14147 // internalized string for faster further lookups. 14148 set(index, key); 14149 return entry; 14150 } 14151 ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(key)); 14152 entry = NextProbe(entry, count++, capacity); 14153 } 14154 return kNotFound; 14155 } 14156 14157 14158 template<typename Shape, typename Key> 14159 MaybeObject* HashTable<Shape, Key>::Rehash(HashTable* new_table, Key key) { 14160 ASSERT(NumberOfElements() < new_table->Capacity()); 14161 14162 DisallowHeapAllocation no_gc; 14163 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc); 14164 14165 // Copy prefix to new array. 14166 for (int i = kPrefixStartIndex; 14167 i < kPrefixStartIndex + Shape::kPrefixSize; 14168 i++) { 14169 new_table->set(i, get(i), mode); 14170 } 14171 14172 // Rehash the elements. 14173 int capacity = Capacity(); 14174 for (int i = 0; i < capacity; i++) { 14175 uint32_t from_index = EntryToIndex(i); 14176 Object* k = get(from_index); 14177 if (IsKey(k)) { 14178 uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k); 14179 uint32_t insertion_index = 14180 EntryToIndex(new_table->FindInsertionEntry(hash)); 14181 for (int j = 0; j < Shape::kEntrySize; j++) { 14182 new_table->set(insertion_index + j, get(from_index + j), mode); 14183 } 14184 } 14185 } 14186 new_table->SetNumberOfElements(NumberOfElements()); 14187 new_table->SetNumberOfDeletedElements(0); 14188 return new_table; 14189 } 14190 14191 14192 template<typename Shape, typename Key> 14193 uint32_t HashTable<Shape, Key>::EntryForProbe(Key key, 14194 Object* k, 14195 int probe, 14196 uint32_t expected) { 14197 uint32_t hash = HashTable<Shape, Key>::HashForObject(key, k); 14198 uint32_t capacity = Capacity(); 14199 uint32_t entry = FirstProbe(hash, capacity); 14200 for (int i = 1; i < probe; i++) { 14201 if (entry == expected) return expected; 14202 entry = NextProbe(entry, i, capacity); 14203 } 14204 return entry; 14205 } 14206 14207 14208 template<typename Shape, typename Key> 14209 void HashTable<Shape, Key>::Swap(uint32_t entry1, 14210 uint32_t entry2, 14211 WriteBarrierMode mode) { 14212 int index1 = EntryToIndex(entry1); 14213 int index2 = EntryToIndex(entry2); 14214 Object* temp[Shape::kEntrySize]; 14215 for (int j = 0; j < Shape::kEntrySize; j++) { 14216 temp[j] = get(index1 + j); 14217 } 14218 for (int j = 0; j < Shape::kEntrySize; j++) { 14219 set(index1 + j, get(index2 + j), mode); 14220 } 14221 for (int j = 0; j < Shape::kEntrySize; j++) { 14222 set(index2 + j, temp[j], mode); 14223 } 14224 } 14225 14226 14227 template<typename Shape, typename Key> 14228 void HashTable<Shape, Key>::Rehash(Key key) { 14229 DisallowHeapAllocation no_gc; 14230 WriteBarrierMode mode = GetWriteBarrierMode(no_gc); 14231 uint32_t capacity = Capacity(); 14232 bool done = false; 14233 for (int probe = 1; !done; probe++) { 14234 // All elements at entries given by one of the first _probe_ probes 14235 // are placed correctly. Other elements might need to be moved. 14236 done = true; 14237 for (uint32_t current = 0; current < capacity; current++) { 14238 Object* current_key = get(EntryToIndex(current)); 14239 if (IsKey(current_key)) { 14240 uint32_t target = EntryForProbe(key, current_key, probe, current); 14241 if (current == target) continue; 14242 Object* target_key = get(EntryToIndex(target)); 14243 if (!IsKey(target_key) || 14244 EntryForProbe(key, target_key, probe, target) != target) { 14245 // Put the current element into the correct position. 14246 Swap(current, target, mode); 14247 // The other element will be processed on the next iteration. 14248 current--; 14249 } else { 14250 // The place for the current element is occupied. Leave the element 14251 // for the next probe. 14252 done = false; 14253 } 14254 } 14255 } 14256 } 14257 } 14258 14259 14260 template<typename Shape, typename Key> 14261 MaybeObject* HashTable<Shape, Key>::EnsureCapacity(int n, 14262 Key key, 14263 PretenureFlag pretenure) { 14264 int capacity = Capacity(); 14265 int nof = NumberOfElements() + n; 14266 int nod = NumberOfDeletedElements(); 14267 // Return if: 14268 // 50% is still free after adding n elements and 14269 // at most 50% of the free elements are deleted elements. 14270 if (nod <= (capacity - nof) >> 1) { 14271 int needed_free = nof >> 1; 14272 if (nof + needed_free <= capacity) return this; 14273 } 14274 14275 const int kMinCapacityForPretenure = 256; 14276 bool should_pretenure = pretenure == TENURED || 14277 ((capacity > kMinCapacityForPretenure) && !GetHeap()->InNewSpace(this)); 14278 Object* obj; 14279 { MaybeObject* maybe_obj = 14280 Allocate(GetHeap(), 14281 nof * 2, 14282 USE_DEFAULT_MINIMUM_CAPACITY, 14283 should_pretenure ? TENURED : NOT_TENURED); 14284 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 14285 } 14286 14287 return Rehash(HashTable::cast(obj), key); 14288 } 14289 14290 14291 template<typename Shape, typename Key> 14292 MaybeObject* HashTable<Shape, Key>::Shrink(Key key) { 14293 int capacity = Capacity(); 14294 int nof = NumberOfElements(); 14295 14296 // Shrink to fit the number of elements if only a quarter of the 14297 // capacity is filled with elements. 14298 if (nof > (capacity >> 2)) return this; 14299 // Allocate a new dictionary with room for at least the current 14300 // number of elements. The allocation method will make sure that 14301 // there is extra room in the dictionary for additions. Don't go 14302 // lower than room for 16 elements. 14303 int at_least_room_for = nof; 14304 if (at_least_room_for < 16) return this; 14305 14306 const int kMinCapacityForPretenure = 256; 14307 bool pretenure = 14308 (at_least_room_for > kMinCapacityForPretenure) && 14309 !GetHeap()->InNewSpace(this); 14310 Object* obj; 14311 { MaybeObject* maybe_obj = 14312 Allocate(GetHeap(), 14313 at_least_room_for, 14314 USE_DEFAULT_MINIMUM_CAPACITY, 14315 pretenure ? TENURED : NOT_TENURED); 14316 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 14317 } 14318 14319 return Rehash(HashTable::cast(obj), key); 14320 } 14321 14322 14323 template<typename Shape, typename Key> 14324 uint32_t HashTable<Shape, Key>::FindInsertionEntry(uint32_t hash) { 14325 uint32_t capacity = Capacity(); 14326 uint32_t entry = FirstProbe(hash, capacity); 14327 uint32_t count = 1; 14328 // EnsureCapacity will guarantee the hash table is never full. 14329 while (true) { 14330 Object* element = KeyAt(entry); 14331 if (element->IsUndefined() || element->IsTheHole()) break; 14332 entry = NextProbe(entry, count++, capacity); 14333 } 14334 return entry; 14335 } 14336 14337 14338 // Force instantiation of template instances class. 14339 // Please note this list is compiler dependent. 14340 14341 template class HashTable<StringTableShape, HashTableKey*>; 14342 14343 template class HashTable<CompilationCacheShape, HashTableKey*>; 14344 14345 template class HashTable<MapCacheShape, HashTableKey*>; 14346 14347 template class HashTable<ObjectHashTableShape<1>, Object*>; 14348 14349 template class HashTable<ObjectHashTableShape<2>, Object*>; 14350 14351 template class HashTable<WeakHashTableShape<2>, Object*>; 14352 14353 template class Dictionary<NameDictionaryShape, Name*>; 14354 14355 template class Dictionary<SeededNumberDictionaryShape, uint32_t>; 14356 14357 template class Dictionary<UnseededNumberDictionaryShape, uint32_t>; 14358 14359 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>:: 14360 Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure); 14361 14362 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: 14363 Allocate(Heap* heap, int at_least_space_for, PretenureFlag pretenure); 14364 14365 template MaybeObject* Dictionary<NameDictionaryShape, Name*>:: 14366 Allocate(Heap* heap, int n, PretenureFlag pretenure); 14367 14368 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::AtPut( 14369 uint32_t, Object*); 14370 14371 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: 14372 AtPut(uint32_t, Object*); 14373 14374 template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>:: 14375 SlowReverseLookup(Object* value); 14376 14377 template Object* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: 14378 SlowReverseLookup(Object* value); 14379 14380 template Object* Dictionary<NameDictionaryShape, Name*>::SlowReverseLookup( 14381 Object*); 14382 14383 template void Dictionary<SeededNumberDictionaryShape, uint32_t>::CopyKeysTo( 14384 FixedArray*, 14385 PropertyAttributes, 14386 Dictionary<SeededNumberDictionaryShape, uint32_t>::SortMode); 14387 14388 template Object* Dictionary<NameDictionaryShape, Name*>::DeleteProperty( 14389 int, JSObject::DeleteMode); 14390 14391 template Object* Dictionary<SeededNumberDictionaryShape, uint32_t>:: 14392 DeleteProperty(int, JSObject::DeleteMode); 14393 14394 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Shrink(Name* n); 14395 14396 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Shrink( 14397 uint32_t); 14398 14399 template void Dictionary<NameDictionaryShape, Name*>::CopyKeysTo( 14400 FixedArray*, 14401 int, 14402 PropertyAttributes, 14403 Dictionary<NameDictionaryShape, Name*>::SortMode); 14404 14405 template int 14406 Dictionary<NameDictionaryShape, Name*>::NumberOfElementsFilterAttributes( 14407 PropertyAttributes); 14408 14409 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::Add( 14410 Name*, Object*, PropertyDetails); 14411 14412 template MaybeObject* 14413 Dictionary<NameDictionaryShape, Name*>::GenerateNewEnumerationIndices(); 14414 14415 template int 14416 Dictionary<SeededNumberDictionaryShape, uint32_t>:: 14417 NumberOfElementsFilterAttributes(PropertyAttributes); 14418 14419 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>::Add( 14420 uint32_t, Object*, PropertyDetails); 14421 14422 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>::Add( 14423 uint32_t, Object*, PropertyDetails); 14424 14425 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>:: 14426 EnsureCapacity(int, uint32_t); 14427 14428 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: 14429 EnsureCapacity(int, uint32_t); 14430 14431 template MaybeObject* Dictionary<NameDictionaryShape, Name*>:: 14432 EnsureCapacity(int, Name*); 14433 14434 template MaybeObject* Dictionary<SeededNumberDictionaryShape, uint32_t>:: 14435 AddEntry(uint32_t, Object*, PropertyDetails, uint32_t); 14436 14437 template MaybeObject* Dictionary<UnseededNumberDictionaryShape, uint32_t>:: 14438 AddEntry(uint32_t, Object*, PropertyDetails, uint32_t); 14439 14440 template MaybeObject* Dictionary<NameDictionaryShape, Name*>::AddEntry( 14441 Name*, Object*, PropertyDetails, uint32_t); 14442 14443 template 14444 int Dictionary<SeededNumberDictionaryShape, uint32_t>::NumberOfEnumElements(); 14445 14446 template 14447 int Dictionary<NameDictionaryShape, Name*>::NumberOfEnumElements(); 14448 14449 template 14450 int HashTable<SeededNumberDictionaryShape, uint32_t>::FindEntry(uint32_t); 14451 14452 14453 Handle<Object> JSObject::PrepareSlowElementsForSort( 14454 Handle<JSObject> object, uint32_t limit) { 14455 CALL_HEAP_FUNCTION(object->GetIsolate(), 14456 object->PrepareSlowElementsForSort(limit), 14457 Object); 14458 } 14459 14460 14461 // Collates undefined and unexisting elements below limit from position 14462 // zero of the elements. The object stays in Dictionary mode. 14463 MaybeObject* JSObject::PrepareSlowElementsForSort(uint32_t limit) { 14464 ASSERT(HasDictionaryElements()); 14465 // Must stay in dictionary mode, either because of requires_slow_elements, 14466 // or because we are not going to sort (and therefore compact) all of the 14467 // elements. 14468 SeededNumberDictionary* dict = element_dictionary(); 14469 HeapNumber* result_double = NULL; 14470 if (limit > static_cast<uint32_t>(Smi::kMaxValue)) { 14471 // Allocate space for result before we start mutating the object. 14472 Object* new_double; 14473 { MaybeObject* maybe_new_double = GetHeap()->AllocateHeapNumber(0.0); 14474 if (!maybe_new_double->ToObject(&new_double)) return maybe_new_double; 14475 } 14476 result_double = HeapNumber::cast(new_double); 14477 } 14478 14479 Object* obj; 14480 { MaybeObject* maybe_obj = 14481 SeededNumberDictionary::Allocate(GetHeap(), dict->NumberOfElements()); 14482 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 14483 } 14484 SeededNumberDictionary* new_dict = SeededNumberDictionary::cast(obj); 14485 14486 DisallowHeapAllocation no_alloc; 14487 14488 uint32_t pos = 0; 14489 uint32_t undefs = 0; 14490 int capacity = dict->Capacity(); 14491 for (int i = 0; i < capacity; i++) { 14492 Object* k = dict->KeyAt(i); 14493 if (dict->IsKey(k)) { 14494 ASSERT(k->IsNumber()); 14495 ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0); 14496 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); 14497 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32); 14498 Object* value = dict->ValueAt(i); 14499 PropertyDetails details = dict->DetailsAt(i); 14500 if (details.type() == CALLBACKS || details.IsReadOnly()) { 14501 // Bail out and do the sorting of undefineds and array holes in JS. 14502 // Also bail out if the element is not supposed to be moved. 14503 return Smi::FromInt(-1); 14504 } 14505 uint32_t key = NumberToUint32(k); 14506 // In the following we assert that adding the entry to the new dictionary 14507 // does not cause GC. This is the case because we made sure to allocate 14508 // the dictionary big enough above, so it need not grow. 14509 if (key < limit) { 14510 if (value->IsUndefined()) { 14511 undefs++; 14512 } else { 14513 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { 14514 // Adding an entry with the key beyond smi-range requires 14515 // allocation. Bailout. 14516 return Smi::FromInt(-1); 14517 } 14518 new_dict->AddNumberEntry(pos, value, details)->ToObjectUnchecked(); 14519 pos++; 14520 } 14521 } else { 14522 if (key > static_cast<uint32_t>(Smi::kMaxValue)) { 14523 // Adding an entry with the key beyond smi-range requires 14524 // allocation. Bailout. 14525 return Smi::FromInt(-1); 14526 } 14527 new_dict->AddNumberEntry(key, value, details)->ToObjectUnchecked(); 14528 } 14529 } 14530 } 14531 14532 uint32_t result = pos; 14533 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0); 14534 Heap* heap = GetHeap(); 14535 while (undefs > 0) { 14536 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { 14537 // Adding an entry with the key beyond smi-range requires 14538 // allocation. Bailout. 14539 return Smi::FromInt(-1); 14540 } 14541 new_dict->AddNumberEntry(pos, heap->undefined_value(), no_details)-> 14542 ToObjectUnchecked(); 14543 pos++; 14544 undefs--; 14545 } 14546 14547 set_elements(new_dict); 14548 14549 if (result <= static_cast<uint32_t>(Smi::kMaxValue)) { 14550 return Smi::FromInt(static_cast<int>(result)); 14551 } 14552 14553 ASSERT_NE(NULL, result_double); 14554 result_double->set_value(static_cast<double>(result)); 14555 return result_double; 14556 } 14557 14558 14559 // Collects all defined (non-hole) and non-undefined (array) elements at 14560 // the start of the elements array. 14561 // If the object is in dictionary mode, it is converted to fast elements 14562 // mode. 14563 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object, 14564 uint32_t limit) { 14565 Isolate* isolate = object->GetIsolate(); 14566 14567 ASSERT(!object->map()->is_observed()); 14568 if (object->HasDictionaryElements()) { 14569 // Convert to fast elements containing only the existing properties. 14570 // Ordering is irrelevant, since we are going to sort anyway. 14571 Handle<SeededNumberDictionary> dict(object->element_dictionary()); 14572 if (object->IsJSArray() || dict->requires_slow_elements() || 14573 dict->max_number_key() >= limit) { 14574 return JSObject::PrepareSlowElementsForSort(object, limit); 14575 } 14576 // Convert to fast elements. 14577 14578 Handle<Map> new_map = 14579 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS); 14580 14581 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ? 14582 NOT_TENURED: TENURED; 14583 Handle<FixedArray> fast_elements = 14584 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure); 14585 dict->CopyValuesTo(*fast_elements); 14586 object->ValidateElements(); 14587 14588 object->set_map_and_elements(*new_map, *fast_elements); 14589 } else if (object->HasExternalArrayElements()) { 14590 // External arrays cannot have holes or undefined elements. 14591 return handle(Smi::FromInt( 14592 ExternalArray::cast(object->elements())->length()), isolate); 14593 } else if (!object->HasFastDoubleElements()) { 14594 EnsureWritableFastElements(object); 14595 } 14596 ASSERT(object->HasFastSmiOrObjectElements() || 14597 object->HasFastDoubleElements()); 14598 14599 // Collect holes at the end, undefined before that and the rest at the 14600 // start, and return the number of non-hole, non-undefined values. 14601 14602 Handle<FixedArrayBase> elements_base(object->elements()); 14603 uint32_t elements_length = static_cast<uint32_t>(elements_base->length()); 14604 if (limit > elements_length) { 14605 limit = elements_length ; 14606 } 14607 if (limit == 0) { 14608 return handle(Smi::FromInt(0), isolate); 14609 } 14610 14611 uint32_t result = 0; 14612 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) { 14613 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base); 14614 // Split elements into defined and the_hole, in that order. 14615 unsigned int holes = limit; 14616 // Assume most arrays contain no holes and undefined values, so minimize the 14617 // number of stores of non-undefined, non-the-hole values. 14618 for (unsigned int i = 0; i < holes; i++) { 14619 if (elements->is_the_hole(i)) { 14620 holes--; 14621 } else { 14622 continue; 14623 } 14624 // Position i needs to be filled. 14625 while (holes > i) { 14626 if (elements->is_the_hole(holes)) { 14627 holes--; 14628 } else { 14629 elements->set(i, elements->get_scalar(holes)); 14630 break; 14631 } 14632 } 14633 } 14634 result = holes; 14635 while (holes < limit) { 14636 elements->set_the_hole(holes); 14637 holes++; 14638 } 14639 } else { 14640 FixedArray* elements = FixedArray::cast(*elements_base); 14641 DisallowHeapAllocation no_gc; 14642 14643 // Split elements into defined, undefined and the_hole, in that order. Only 14644 // count locations for undefined and the hole, and fill them afterwards. 14645 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc); 14646 unsigned int undefs = limit; 14647 unsigned int holes = limit; 14648 // Assume most arrays contain no holes and undefined values, so minimize the 14649 // number of stores of non-undefined, non-the-hole values. 14650 for (unsigned int i = 0; i < undefs; i++) { 14651 Object* current = elements->get(i); 14652 if (current->IsTheHole()) { 14653 holes--; 14654 undefs--; 14655 } else if (current->IsUndefined()) { 14656 undefs--; 14657 } else { 14658 continue; 14659 } 14660 // Position i needs to be filled. 14661 while (undefs > i) { 14662 current = elements->get(undefs); 14663 if (current->IsTheHole()) { 14664 holes--; 14665 undefs--; 14666 } else if (current->IsUndefined()) { 14667 undefs--; 14668 } else { 14669 elements->set(i, current, write_barrier); 14670 break; 14671 } 14672 } 14673 } 14674 result = undefs; 14675 while (undefs < holes) { 14676 elements->set_undefined(undefs); 14677 undefs++; 14678 } 14679 while (holes < limit) { 14680 elements->set_the_hole(holes); 14681 holes++; 14682 } 14683 } 14684 14685 return isolate->factory()->NewNumberFromUint(result); 14686 } 14687 14688 14689 ExternalArrayType JSTypedArray::type() { 14690 switch (elements()->map()->instance_type()) { 14691 case EXTERNAL_BYTE_ARRAY_TYPE: 14692 return kExternalByteArray; 14693 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: 14694 return kExternalUnsignedByteArray; 14695 case EXTERNAL_SHORT_ARRAY_TYPE: 14696 return kExternalShortArray; 14697 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: 14698 return kExternalUnsignedShortArray; 14699 case EXTERNAL_INT_ARRAY_TYPE: 14700 return kExternalIntArray; 14701 case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: 14702 return kExternalUnsignedIntArray; 14703 case EXTERNAL_FLOAT_ARRAY_TYPE: 14704 return kExternalFloatArray; 14705 case EXTERNAL_DOUBLE_ARRAY_TYPE: 14706 return kExternalDoubleArray; 14707 case EXTERNAL_PIXEL_ARRAY_TYPE: 14708 return kExternalPixelArray; 14709 default: 14710 return static_cast<ExternalArrayType>(-1); 14711 } 14712 } 14713 14714 14715 size_t JSTypedArray::element_size() { 14716 switch (elements()->map()->instance_type()) { 14717 case EXTERNAL_BYTE_ARRAY_TYPE: 14718 return 1; 14719 case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: 14720 return 1; 14721 case EXTERNAL_SHORT_ARRAY_TYPE: 14722 return 2; 14723 case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: 14724 return 2; 14725 case EXTERNAL_INT_ARRAY_TYPE: 14726 return 4; 14727 case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: 14728 return 4; 14729 case EXTERNAL_FLOAT_ARRAY_TYPE: 14730 return 4; 14731 case EXTERNAL_DOUBLE_ARRAY_TYPE: 14732 return 8; 14733 case EXTERNAL_PIXEL_ARRAY_TYPE: 14734 return 1; 14735 default: 14736 UNREACHABLE(); 14737 return 0; 14738 } 14739 } 14740 14741 14742 Object* ExternalPixelArray::SetValue(uint32_t index, Object* value) { 14743 uint8_t clamped_value = 0; 14744 if (index < static_cast<uint32_t>(length())) { 14745 if (value->IsSmi()) { 14746 int int_value = Smi::cast(value)->value(); 14747 if (int_value < 0) { 14748 clamped_value = 0; 14749 } else if (int_value > 255) { 14750 clamped_value = 255; 14751 } else { 14752 clamped_value = static_cast<uint8_t>(int_value); 14753 } 14754 } else if (value->IsHeapNumber()) { 14755 double double_value = HeapNumber::cast(value)->value(); 14756 if (!(double_value > 0)) { 14757 // NaN and less than zero clamp to zero. 14758 clamped_value = 0; 14759 } else if (double_value > 255) { 14760 // Greater than 255 clamp to 255. 14761 clamped_value = 255; 14762 } else { 14763 // Other doubles are rounded to the nearest integer. 14764 clamped_value = static_cast<uint8_t>(lrint(double_value)); 14765 } 14766 } else { 14767 // Clamp undefined to zero (default). All other types have been 14768 // converted to a number type further up in the call chain. 14769 ASSERT(value->IsUndefined()); 14770 } 14771 set(index, clamped_value); 14772 } 14773 return Smi::FromInt(clamped_value); 14774 } 14775 14776 14777 template<typename ExternalArrayClass, typename ValueType> 14778 static MaybeObject* ExternalArrayIntSetter(Heap* heap, 14779 ExternalArrayClass* receiver, 14780 uint32_t index, 14781 Object* value) { 14782 ValueType cast_value = 0; 14783 if (index < static_cast<uint32_t>(receiver->length())) { 14784 if (value->IsSmi()) { 14785 int int_value = Smi::cast(value)->value(); 14786 cast_value = static_cast<ValueType>(int_value); 14787 } else if (value->IsHeapNumber()) { 14788 double double_value = HeapNumber::cast(value)->value(); 14789 cast_value = static_cast<ValueType>(DoubleToInt32(double_value)); 14790 } else { 14791 // Clamp undefined to zero (default). All other types have been 14792 // converted to a number type further up in the call chain. 14793 ASSERT(value->IsUndefined()); 14794 } 14795 receiver->set(index, cast_value); 14796 } 14797 return heap->NumberFromInt32(cast_value); 14798 } 14799 14800 14801 Handle<Object> ExternalByteArray::SetValue(Handle<ExternalByteArray> array, 14802 uint32_t index, 14803 Handle<Object> value) { 14804 CALL_HEAP_FUNCTION(array->GetIsolate(), 14805 array->SetValue(index, *value), 14806 Object); 14807 } 14808 14809 14810 MaybeObject* ExternalByteArray::SetValue(uint32_t index, Object* value) { 14811 return ExternalArrayIntSetter<ExternalByteArray, int8_t> 14812 (GetHeap(), this, index, value); 14813 } 14814 14815 14816 Handle<Object> ExternalUnsignedByteArray::SetValue( 14817 Handle<ExternalUnsignedByteArray> array, 14818 uint32_t index, 14819 Handle<Object> value) { 14820 CALL_HEAP_FUNCTION(array->GetIsolate(), 14821 array->SetValue(index, *value), 14822 Object); 14823 } 14824 14825 14826 MaybeObject* ExternalUnsignedByteArray::SetValue(uint32_t index, 14827 Object* value) { 14828 return ExternalArrayIntSetter<ExternalUnsignedByteArray, uint8_t> 14829 (GetHeap(), this, index, value); 14830 } 14831 14832 14833 Handle<Object> ExternalShortArray::SetValue( 14834 Handle<ExternalShortArray> array, 14835 uint32_t index, 14836 Handle<Object> value) { 14837 CALL_HEAP_FUNCTION(array->GetIsolate(), 14838 array->SetValue(index, *value), 14839 Object); 14840 } 14841 14842 14843 MaybeObject* ExternalShortArray::SetValue(uint32_t index, 14844 Object* value) { 14845 return ExternalArrayIntSetter<ExternalShortArray, int16_t> 14846 (GetHeap(), this, index, value); 14847 } 14848 14849 14850 Handle<Object> ExternalUnsignedShortArray::SetValue( 14851 Handle<ExternalUnsignedShortArray> array, 14852 uint32_t index, 14853 Handle<Object> value) { 14854 CALL_HEAP_FUNCTION(array->GetIsolate(), 14855 array->SetValue(index, *value), 14856 Object); 14857 } 14858 14859 14860 MaybeObject* ExternalUnsignedShortArray::SetValue(uint32_t index, 14861 Object* value) { 14862 return ExternalArrayIntSetter<ExternalUnsignedShortArray, uint16_t> 14863 (GetHeap(), this, index, value); 14864 } 14865 14866 14867 Handle<Object> ExternalIntArray::SetValue(Handle<ExternalIntArray> array, 14868 uint32_t index, 14869 Handle<Object> value) { 14870 CALL_HEAP_FUNCTION(array->GetIsolate(), 14871 array->SetValue(index, *value), 14872 Object); 14873 } 14874 14875 14876 MaybeObject* ExternalIntArray::SetValue(uint32_t index, Object* value) { 14877 return ExternalArrayIntSetter<ExternalIntArray, int32_t> 14878 (GetHeap(), this, index, value); 14879 } 14880 14881 14882 Handle<Object> ExternalUnsignedIntArray::SetValue( 14883 Handle<ExternalUnsignedIntArray> array, 14884 uint32_t index, 14885 Handle<Object> value) { 14886 CALL_HEAP_FUNCTION(array->GetIsolate(), 14887 array->SetValue(index, *value), 14888 Object); 14889 } 14890 14891 14892 MaybeObject* ExternalUnsignedIntArray::SetValue(uint32_t index, Object* value) { 14893 uint32_t cast_value = 0; 14894 Heap* heap = GetHeap(); 14895 if (index < static_cast<uint32_t>(length())) { 14896 if (value->IsSmi()) { 14897 int int_value = Smi::cast(value)->value(); 14898 cast_value = static_cast<uint32_t>(int_value); 14899 } else if (value->IsHeapNumber()) { 14900 double double_value = HeapNumber::cast(value)->value(); 14901 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value)); 14902 } else { 14903 // Clamp undefined to zero (default). All other types have been 14904 // converted to a number type further up in the call chain. 14905 ASSERT(value->IsUndefined()); 14906 } 14907 set(index, cast_value); 14908 } 14909 return heap->NumberFromUint32(cast_value); 14910 } 14911 14912 14913 Handle<Object> ExternalFloatArray::SetValue(Handle<ExternalFloatArray> array, 14914 uint32_t index, 14915 Handle<Object> value) { 14916 CALL_HEAP_FUNCTION(array->GetIsolate(), 14917 array->SetValue(index, *value), 14918 Object); 14919 } 14920 14921 14922 MaybeObject* ExternalFloatArray::SetValue(uint32_t index, Object* value) { 14923 float cast_value = static_cast<float>(OS::nan_value()); 14924 Heap* heap = GetHeap(); 14925 if (index < static_cast<uint32_t>(length())) { 14926 if (value->IsSmi()) { 14927 int int_value = Smi::cast(value)->value(); 14928 cast_value = static_cast<float>(int_value); 14929 } else if (value->IsHeapNumber()) { 14930 double double_value = HeapNumber::cast(value)->value(); 14931 cast_value = static_cast<float>(double_value); 14932 } else { 14933 // Clamp undefined to NaN (default). All other types have been 14934 // converted to a number type further up in the call chain. 14935 ASSERT(value->IsUndefined()); 14936 } 14937 set(index, cast_value); 14938 } 14939 return heap->AllocateHeapNumber(cast_value); 14940 } 14941 14942 14943 Handle<Object> ExternalDoubleArray::SetValue(Handle<ExternalDoubleArray> array, 14944 uint32_t index, 14945 Handle<Object> value) { 14946 CALL_HEAP_FUNCTION(array->GetIsolate(), 14947 array->SetValue(index, *value), 14948 Object); 14949 } 14950 14951 14952 MaybeObject* ExternalDoubleArray::SetValue(uint32_t index, Object* value) { 14953 double double_value = OS::nan_value(); 14954 Heap* heap = GetHeap(); 14955 if (index < static_cast<uint32_t>(length())) { 14956 if (value->IsSmi()) { 14957 int int_value = Smi::cast(value)->value(); 14958 double_value = static_cast<double>(int_value); 14959 } else if (value->IsHeapNumber()) { 14960 double_value = HeapNumber::cast(value)->value(); 14961 } else { 14962 // Clamp undefined to NaN (default). All other types have been 14963 // converted to a number type further up in the call chain. 14964 ASSERT(value->IsUndefined()); 14965 } 14966 set(index, double_value); 14967 } 14968 return heap->AllocateHeapNumber(double_value); 14969 } 14970 14971 14972 PropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) { 14973 ASSERT(!HasFastProperties()); 14974 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry()); 14975 return PropertyCell::cast(value); 14976 } 14977 14978 14979 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell( 14980 Handle<JSGlobalObject> global, 14981 Handle<Name> name) { 14982 ASSERT(!global->HasFastProperties()); 14983 int entry = global->property_dictionary()->FindEntry(*name); 14984 if (entry == NameDictionary::kNotFound) { 14985 Isolate* isolate = global->GetIsolate(); 14986 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell( 14987 isolate->factory()->the_hole_value()); 14988 PropertyDetails details(NONE, NORMAL, 0); 14989 details = details.AsDeleted(); 14990 Handle<NameDictionary> dictionary = NameDictionaryAdd( 14991 handle(global->property_dictionary()), name, cell, details); 14992 global->set_properties(*dictionary); 14993 return cell; 14994 } else { 14995 Object* value = global->property_dictionary()->ValueAt(entry); 14996 ASSERT(value->IsPropertyCell()); 14997 return handle(PropertyCell::cast(value)); 14998 } 14999 } 15000 15001 15002 MaybeObject* StringTable::LookupString(String* string, Object** s) { 15003 InternalizedStringKey key(string); 15004 return LookupKey(&key, s); 15005 } 15006 15007 15008 // This class is used for looking up two character strings in the string table. 15009 // If we don't have a hit we don't want to waste much time so we unroll the 15010 // string hash calculation loop here for speed. Doesn't work if the two 15011 // characters form a decimal integer, since such strings have a different hash 15012 // algorithm. 15013 class TwoCharHashTableKey : public HashTableKey { 15014 public: 15015 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed) 15016 : c1_(c1), c2_(c2) { 15017 // Char 1. 15018 uint32_t hash = seed; 15019 hash += c1; 15020 hash += hash << 10; 15021 hash ^= hash >> 6; 15022 // Char 2. 15023 hash += c2; 15024 hash += hash << 10; 15025 hash ^= hash >> 6; 15026 // GetHash. 15027 hash += hash << 3; 15028 hash ^= hash >> 11; 15029 hash += hash << 15; 15030 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash; 15031 hash_ = hash; 15032 #ifdef DEBUG 15033 // If this assert fails then we failed to reproduce the two-character 15034 // version of the string hashing algorithm above. One reason could be 15035 // that we were passed two digits as characters, since the hash 15036 // algorithm is different in that case. 15037 uint16_t chars[2] = {c1, c2}; 15038 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed); 15039 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask; 15040 ASSERT_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash)); 15041 #endif 15042 } 15043 15044 bool IsMatch(Object* o) { 15045 if (!o->IsString()) return false; 15046 String* other = String::cast(o); 15047 if (other->length() != 2) return false; 15048 if (other->Get(0) != c1_) return false; 15049 return other->Get(1) == c2_; 15050 } 15051 15052 uint32_t Hash() { return hash_; } 15053 uint32_t HashForObject(Object* key) { 15054 if (!key->IsString()) return 0; 15055 return String::cast(key)->Hash(); 15056 } 15057 15058 Object* AsObject(Heap* heap) { 15059 // The TwoCharHashTableKey is only used for looking in the string 15060 // table, not for adding to it. 15061 UNREACHABLE(); 15062 return NULL; 15063 } 15064 15065 private: 15066 uint16_t c1_; 15067 uint16_t c2_; 15068 uint32_t hash_; 15069 }; 15070 15071 15072 bool StringTable::LookupStringIfExists(String* string, String** result) { 15073 InternalizedStringKey key(string); 15074 int entry = FindEntry(&key); 15075 if (entry == kNotFound) { 15076 return false; 15077 } else { 15078 *result = String::cast(KeyAt(entry)); 15079 ASSERT(StringShape(*result).IsInternalized()); 15080 return true; 15081 } 15082 } 15083 15084 15085 bool StringTable::LookupTwoCharsStringIfExists(uint16_t c1, 15086 uint16_t c2, 15087 String** result) { 15088 TwoCharHashTableKey key(c1, c2, GetHeap()->HashSeed()); 15089 int entry = FindEntry(&key); 15090 if (entry == kNotFound) { 15091 return false; 15092 } else { 15093 *result = String::cast(KeyAt(entry)); 15094 ASSERT(StringShape(*result).IsInternalized()); 15095 return true; 15096 } 15097 } 15098 15099 15100 MaybeObject* StringTable::LookupUtf8String(Vector<const char> str, 15101 Object** s) { 15102 Utf8StringKey key(str, GetHeap()->HashSeed()); 15103 return LookupKey(&key, s); 15104 } 15105 15106 15107 MaybeObject* StringTable::LookupOneByteString(Vector<const uint8_t> str, 15108 Object** s) { 15109 OneByteStringKey key(str, GetHeap()->HashSeed()); 15110 return LookupKey(&key, s); 15111 } 15112 15113 15114 MaybeObject* StringTable::LookupSubStringOneByteString( 15115 Handle<SeqOneByteString> str, 15116 int from, 15117 int length, 15118 Object** s) { 15119 SubStringOneByteStringKey key(str, from, length); 15120 return LookupKey(&key, s); 15121 } 15122 15123 15124 MaybeObject* StringTable::LookupTwoByteString(Vector<const uc16> str, 15125 Object** s) { 15126 TwoByteStringKey key(str, GetHeap()->HashSeed()); 15127 return LookupKey(&key, s); 15128 } 15129 15130 15131 MaybeObject* StringTable::LookupKey(HashTableKey* key, Object** s) { 15132 int entry = FindEntry(key); 15133 15134 // String already in table. 15135 if (entry != kNotFound) { 15136 *s = KeyAt(entry); 15137 return this; 15138 } 15139 15140 // Adding new string. Grow table if needed. 15141 Object* obj; 15142 { MaybeObject* maybe_obj = EnsureCapacity(1, key); 15143 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15144 } 15145 15146 // Create string object. 15147 Object* string; 15148 { MaybeObject* maybe_string = key->AsObject(GetHeap()); 15149 if (!maybe_string->ToObject(&string)) return maybe_string; 15150 } 15151 15152 // If the string table grew as part of EnsureCapacity, obj is not 15153 // the current string table and therefore we cannot use 15154 // StringTable::cast here. 15155 StringTable* table = reinterpret_cast<StringTable*>(obj); 15156 15157 // Add the new string and return it along with the string table. 15158 entry = table->FindInsertionEntry(key->Hash()); 15159 table->set(EntryToIndex(entry), string); 15160 table->ElementAdded(); 15161 *s = string; 15162 return table; 15163 } 15164 15165 15166 // The key for the script compilation cache is dependent on the mode flags, 15167 // because they change the global language mode and thus binding behaviour. 15168 // If flags change at some point, we must ensure that we do not hit the cache 15169 // for code compiled with different settings. 15170 static LanguageMode CurrentGlobalLanguageMode() { 15171 return FLAG_use_strict 15172 ? (FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE) 15173 : CLASSIC_MODE; 15174 } 15175 15176 15177 Object* CompilationCacheTable::Lookup(String* src, Context* context) { 15178 SharedFunctionInfo* shared = context->closure()->shared(); 15179 StringSharedKey key(src, 15180 shared, 15181 CurrentGlobalLanguageMode(), 15182 RelocInfo::kNoPosition); 15183 int entry = FindEntry(&key); 15184 if (entry == kNotFound) return GetHeap()->undefined_value(); 15185 return get(EntryToIndex(entry) + 1); 15186 } 15187 15188 15189 Object* CompilationCacheTable::LookupEval(String* src, 15190 Context* context, 15191 LanguageMode language_mode, 15192 int scope_position) { 15193 StringSharedKey key(src, 15194 context->closure()->shared(), 15195 language_mode, 15196 scope_position); 15197 int entry = FindEntry(&key); 15198 if (entry == kNotFound) return GetHeap()->undefined_value(); 15199 return get(EntryToIndex(entry) + 1); 15200 } 15201 15202 15203 Object* CompilationCacheTable::LookupRegExp(String* src, 15204 JSRegExp::Flags flags) { 15205 RegExpKey key(src, flags); 15206 int entry = FindEntry(&key); 15207 if (entry == kNotFound) return GetHeap()->undefined_value(); 15208 return get(EntryToIndex(entry) + 1); 15209 } 15210 15211 15212 MaybeObject* CompilationCacheTable::Put(String* src, 15213 Context* context, 15214 Object* value) { 15215 SharedFunctionInfo* shared = context->closure()->shared(); 15216 StringSharedKey key(src, 15217 shared, 15218 CurrentGlobalLanguageMode(), 15219 RelocInfo::kNoPosition); 15220 CompilationCacheTable* cache; 15221 MaybeObject* maybe_cache = EnsureCapacity(1, &key); 15222 if (!maybe_cache->To(&cache)) return maybe_cache; 15223 15224 Object* k; 15225 MaybeObject* maybe_k = key.AsObject(GetHeap()); 15226 if (!maybe_k->To(&k)) return maybe_k; 15227 15228 int entry = cache->FindInsertionEntry(key.Hash()); 15229 cache->set(EntryToIndex(entry), k); 15230 cache->set(EntryToIndex(entry) + 1, value); 15231 cache->ElementAdded(); 15232 return cache; 15233 } 15234 15235 15236 MaybeObject* CompilationCacheTable::PutEval(String* src, 15237 Context* context, 15238 SharedFunctionInfo* value, 15239 int scope_position) { 15240 StringSharedKey key(src, 15241 context->closure()->shared(), 15242 value->language_mode(), 15243 scope_position); 15244 Object* obj; 15245 { MaybeObject* maybe_obj = EnsureCapacity(1, &key); 15246 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15247 } 15248 15249 CompilationCacheTable* cache = 15250 reinterpret_cast<CompilationCacheTable*>(obj); 15251 int entry = cache->FindInsertionEntry(key.Hash()); 15252 15253 Object* k; 15254 { MaybeObject* maybe_k = key.AsObject(GetHeap()); 15255 if (!maybe_k->ToObject(&k)) return maybe_k; 15256 } 15257 15258 cache->set(EntryToIndex(entry), k); 15259 cache->set(EntryToIndex(entry) + 1, value); 15260 cache->ElementAdded(); 15261 return cache; 15262 } 15263 15264 15265 MaybeObject* CompilationCacheTable::PutRegExp(String* src, 15266 JSRegExp::Flags flags, 15267 FixedArray* value) { 15268 RegExpKey key(src, flags); 15269 Object* obj; 15270 { MaybeObject* maybe_obj = EnsureCapacity(1, &key); 15271 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15272 } 15273 15274 CompilationCacheTable* cache = 15275 reinterpret_cast<CompilationCacheTable*>(obj); 15276 int entry = cache->FindInsertionEntry(key.Hash()); 15277 // We store the value in the key slot, and compare the search key 15278 // to the stored value with a custon IsMatch function during lookups. 15279 cache->set(EntryToIndex(entry), value); 15280 cache->set(EntryToIndex(entry) + 1, value); 15281 cache->ElementAdded(); 15282 return cache; 15283 } 15284 15285 15286 void CompilationCacheTable::Remove(Object* value) { 15287 Object* the_hole_value = GetHeap()->the_hole_value(); 15288 for (int entry = 0, size = Capacity(); entry < size; entry++) { 15289 int entry_index = EntryToIndex(entry); 15290 int value_index = entry_index + 1; 15291 if (get(value_index) == value) { 15292 NoWriteBarrierSet(this, entry_index, the_hole_value); 15293 NoWriteBarrierSet(this, value_index, the_hole_value); 15294 ElementRemoved(); 15295 } 15296 } 15297 return; 15298 } 15299 15300 15301 // StringsKey used for HashTable where key is array of internalized strings. 15302 class StringsKey : public HashTableKey { 15303 public: 15304 explicit StringsKey(FixedArray* strings) : strings_(strings) { } 15305 15306 bool IsMatch(Object* strings) { 15307 FixedArray* o = FixedArray::cast(strings); 15308 int len = strings_->length(); 15309 if (o->length() != len) return false; 15310 for (int i = 0; i < len; i++) { 15311 if (o->get(i) != strings_->get(i)) return false; 15312 } 15313 return true; 15314 } 15315 15316 uint32_t Hash() { return HashForObject(strings_); } 15317 15318 uint32_t HashForObject(Object* obj) { 15319 FixedArray* strings = FixedArray::cast(obj); 15320 int len = strings->length(); 15321 uint32_t hash = 0; 15322 for (int i = 0; i < len; i++) { 15323 hash ^= String::cast(strings->get(i))->Hash(); 15324 } 15325 return hash; 15326 } 15327 15328 Object* AsObject(Heap* heap) { return strings_; } 15329 15330 private: 15331 FixedArray* strings_; 15332 }; 15333 15334 15335 Object* MapCache::Lookup(FixedArray* array) { 15336 StringsKey key(array); 15337 int entry = FindEntry(&key); 15338 if (entry == kNotFound) return GetHeap()->undefined_value(); 15339 return get(EntryToIndex(entry) + 1); 15340 } 15341 15342 15343 MaybeObject* MapCache::Put(FixedArray* array, Map* value) { 15344 StringsKey key(array); 15345 Object* obj; 15346 { MaybeObject* maybe_obj = EnsureCapacity(1, &key); 15347 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15348 } 15349 15350 MapCache* cache = reinterpret_cast<MapCache*>(obj); 15351 int entry = cache->FindInsertionEntry(key.Hash()); 15352 cache->set(EntryToIndex(entry), array); 15353 cache->set(EntryToIndex(entry) + 1, value); 15354 cache->ElementAdded(); 15355 return cache; 15356 } 15357 15358 15359 template<typename Shape, typename Key> 15360 MaybeObject* Dictionary<Shape, Key>::Allocate(Heap* heap, 15361 int at_least_space_for, 15362 PretenureFlag pretenure) { 15363 Object* obj; 15364 { MaybeObject* maybe_obj = 15365 HashTable<Shape, Key>::Allocate( 15366 heap, 15367 at_least_space_for, 15368 USE_DEFAULT_MINIMUM_CAPACITY, 15369 pretenure); 15370 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15371 } 15372 // Initialize the next enumeration index. 15373 Dictionary<Shape, Key>::cast(obj)-> 15374 SetNextEnumerationIndex(PropertyDetails::kInitialIndex); 15375 return obj; 15376 } 15377 15378 15379 void NameDictionary::DoGenerateNewEnumerationIndices( 15380 Handle<NameDictionary> dictionary) { 15381 CALL_HEAP_FUNCTION_VOID(dictionary->GetIsolate(), 15382 dictionary->GenerateNewEnumerationIndices()); 15383 } 15384 15385 template<typename Shape, typename Key> 15386 MaybeObject* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() { 15387 Heap* heap = Dictionary<Shape, Key>::GetHeap(); 15388 int length = HashTable<Shape, Key>::NumberOfElements(); 15389 15390 // Allocate and initialize iteration order array. 15391 Object* obj; 15392 { MaybeObject* maybe_obj = heap->AllocateFixedArray(length); 15393 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15394 } 15395 FixedArray* iteration_order = FixedArray::cast(obj); 15396 for (int i = 0; i < length; i++) { 15397 iteration_order->set(i, Smi::FromInt(i)); 15398 } 15399 15400 // Allocate array with enumeration order. 15401 { MaybeObject* maybe_obj = heap->AllocateFixedArray(length); 15402 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15403 } 15404 FixedArray* enumeration_order = FixedArray::cast(obj); 15405 15406 // Fill the enumeration order array with property details. 15407 int capacity = HashTable<Shape, Key>::Capacity(); 15408 int pos = 0; 15409 for (int i = 0; i < capacity; i++) { 15410 if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) { 15411 int index = DetailsAt(i).dictionary_index(); 15412 enumeration_order->set(pos++, Smi::FromInt(index)); 15413 } 15414 } 15415 15416 // Sort the arrays wrt. enumeration order. 15417 iteration_order->SortPairs(enumeration_order, enumeration_order->length()); 15418 15419 // Overwrite the enumeration_order with the enumeration indices. 15420 for (int i = 0; i < length; i++) { 15421 int index = Smi::cast(iteration_order->get(i))->value(); 15422 int enum_index = PropertyDetails::kInitialIndex + i; 15423 enumeration_order->set(index, Smi::FromInt(enum_index)); 15424 } 15425 15426 // Update the dictionary with new indices. 15427 capacity = HashTable<Shape, Key>::Capacity(); 15428 pos = 0; 15429 for (int i = 0; i < capacity; i++) { 15430 if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) { 15431 int enum_index = Smi::cast(enumeration_order->get(pos++))->value(); 15432 PropertyDetails details = DetailsAt(i); 15433 PropertyDetails new_details = PropertyDetails( 15434 details.attributes(), details.type(), enum_index); 15435 DetailsAtPut(i, new_details); 15436 } 15437 } 15438 15439 // Set the next enumeration index. 15440 SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length); 15441 return this; 15442 } 15443 15444 template<typename Shape, typename Key> 15445 MaybeObject* Dictionary<Shape, Key>::EnsureCapacity(int n, Key key) { 15446 // Check whether there are enough enumeration indices to add n elements. 15447 if (Shape::kIsEnumerable && 15448 !PropertyDetails::IsValidIndex(NextEnumerationIndex() + n)) { 15449 // If not, we generate new indices for the properties. 15450 Object* result; 15451 { MaybeObject* maybe_result = GenerateNewEnumerationIndices(); 15452 if (!maybe_result->ToObject(&result)) return maybe_result; 15453 } 15454 } 15455 return HashTable<Shape, Key>::EnsureCapacity(n, key); 15456 } 15457 15458 15459 template<typename Shape, typename Key> 15460 Object* Dictionary<Shape, Key>::DeleteProperty(int entry, 15461 JSReceiver::DeleteMode mode) { 15462 Heap* heap = Dictionary<Shape, Key>::GetHeap(); 15463 PropertyDetails details = DetailsAt(entry); 15464 // Ignore attributes if forcing a deletion. 15465 if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) { 15466 return heap->false_value(); 15467 } 15468 SetEntry(entry, heap->the_hole_value(), heap->the_hole_value()); 15469 HashTable<Shape, Key>::ElementRemoved(); 15470 return heap->true_value(); 15471 } 15472 15473 15474 template<typename Shape, typename Key> 15475 MaybeObject* Dictionary<Shape, Key>::Shrink(Key key) { 15476 return HashTable<Shape, Key>::Shrink(key); 15477 } 15478 15479 15480 template<typename Shape, typename Key> 15481 MaybeObject* Dictionary<Shape, Key>::AtPut(Key key, Object* value) { 15482 int entry = this->FindEntry(key); 15483 15484 // If the entry is present set the value; 15485 if (entry != Dictionary<Shape, Key>::kNotFound) { 15486 ValueAtPut(entry, value); 15487 return this; 15488 } 15489 15490 // Check whether the dictionary should be extended. 15491 Object* obj; 15492 { MaybeObject* maybe_obj = EnsureCapacity(1, key); 15493 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15494 } 15495 15496 Object* k; 15497 { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key); 15498 if (!maybe_k->ToObject(&k)) return maybe_k; 15499 } 15500 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0); 15501 15502 return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details, 15503 Dictionary<Shape, Key>::Hash(key)); 15504 } 15505 15506 15507 template<typename Shape, typename Key> 15508 MaybeObject* Dictionary<Shape, Key>::Add(Key key, 15509 Object* value, 15510 PropertyDetails details) { 15511 // Valdate key is absent. 15512 SLOW_ASSERT((this->FindEntry(key) == Dictionary<Shape, Key>::kNotFound)); 15513 // Check whether the dictionary should be extended. 15514 Object* obj; 15515 { MaybeObject* maybe_obj = EnsureCapacity(1, key); 15516 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 15517 } 15518 15519 return Dictionary<Shape, Key>::cast(obj)->AddEntry(key, value, details, 15520 Dictionary<Shape, Key>::Hash(key)); 15521 } 15522 15523 15524 // Add a key, value pair to the dictionary. 15525 template<typename Shape, typename Key> 15526 MaybeObject* Dictionary<Shape, Key>::AddEntry(Key key, 15527 Object* value, 15528 PropertyDetails details, 15529 uint32_t hash) { 15530 // Compute the key object. 15531 Object* k; 15532 { MaybeObject* maybe_k = Shape::AsObject(this->GetHeap(), key); 15533 if (!maybe_k->ToObject(&k)) return maybe_k; 15534 } 15535 15536 uint32_t entry = Dictionary<Shape, Key>::FindInsertionEntry(hash); 15537 // Insert element at empty or deleted entry 15538 if (!details.IsDeleted() && 15539 details.dictionary_index() == 0 && 15540 Shape::kIsEnumerable) { 15541 // Assign an enumeration index to the property and update 15542 // SetNextEnumerationIndex. 15543 int index = NextEnumerationIndex(); 15544 details = PropertyDetails(details.attributes(), details.type(), index); 15545 SetNextEnumerationIndex(index + 1); 15546 } 15547 SetEntry(entry, k, value, details); 15548 ASSERT((Dictionary<Shape, Key>::KeyAt(entry)->IsNumber() || 15549 Dictionary<Shape, Key>::KeyAt(entry)->IsName())); 15550 HashTable<Shape, Key>::ElementAdded(); 15551 return this; 15552 } 15553 15554 15555 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) { 15556 // If the dictionary requires slow elements an element has already 15557 // been added at a high index. 15558 if (requires_slow_elements()) return; 15559 // Check if this index is high enough that we should require slow 15560 // elements. 15561 if (key > kRequiresSlowElementsLimit) { 15562 set_requires_slow_elements(); 15563 return; 15564 } 15565 // Update max key value. 15566 Object* max_index_object = get(kMaxNumberKeyIndex); 15567 if (!max_index_object->IsSmi() || max_number_key() < key) { 15568 FixedArray::set(kMaxNumberKeyIndex, 15569 Smi::FromInt(key << kRequiresSlowElementsTagSize)); 15570 } 15571 } 15572 15573 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry( 15574 Handle<SeededNumberDictionary> dictionary, 15575 uint32_t key, 15576 Handle<Object> value, 15577 PropertyDetails details) { 15578 CALL_HEAP_FUNCTION(dictionary->GetIsolate(), 15579 dictionary->AddNumberEntry(key, *value, details), 15580 SeededNumberDictionary); 15581 } 15582 15583 MaybeObject* SeededNumberDictionary::AddNumberEntry(uint32_t key, 15584 Object* value, 15585 PropertyDetails details) { 15586 UpdateMaxNumberKey(key); 15587 SLOW_ASSERT(this->FindEntry(key) == kNotFound); 15588 return Add(key, value, details); 15589 } 15590 15591 15592 MaybeObject* UnseededNumberDictionary::AddNumberEntry(uint32_t key, 15593 Object* value) { 15594 SLOW_ASSERT(this->FindEntry(key) == kNotFound); 15595 return Add(key, value, PropertyDetails(NONE, NORMAL, 0)); 15596 } 15597 15598 15599 MaybeObject* SeededNumberDictionary::AtNumberPut(uint32_t key, Object* value) { 15600 UpdateMaxNumberKey(key); 15601 return AtPut(key, value); 15602 } 15603 15604 15605 MaybeObject* UnseededNumberDictionary::AtNumberPut(uint32_t key, 15606 Object* value) { 15607 return AtPut(key, value); 15608 } 15609 15610 15611 Handle<SeededNumberDictionary> SeededNumberDictionary::Set( 15612 Handle<SeededNumberDictionary> dictionary, 15613 uint32_t index, 15614 Handle<Object> value, 15615 PropertyDetails details) { 15616 CALL_HEAP_FUNCTION(dictionary->GetIsolate(), 15617 dictionary->Set(index, *value, details), 15618 SeededNumberDictionary); 15619 } 15620 15621 15622 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set( 15623 Handle<UnseededNumberDictionary> dictionary, 15624 uint32_t index, 15625 Handle<Object> value) { 15626 CALL_HEAP_FUNCTION(dictionary->GetIsolate(), 15627 dictionary->Set(index, *value), 15628 UnseededNumberDictionary); 15629 } 15630 15631 15632 MaybeObject* SeededNumberDictionary::Set(uint32_t key, 15633 Object* value, 15634 PropertyDetails details) { 15635 int entry = FindEntry(key); 15636 if (entry == kNotFound) return AddNumberEntry(key, value, details); 15637 // Preserve enumeration index. 15638 details = PropertyDetails(details.attributes(), 15639 details.type(), 15640 DetailsAt(entry).dictionary_index()); 15641 MaybeObject* maybe_object_key = 15642 SeededNumberDictionaryShape::AsObject(GetHeap(), key); 15643 Object* object_key; 15644 if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key; 15645 SetEntry(entry, object_key, value, details); 15646 return this; 15647 } 15648 15649 15650 MaybeObject* UnseededNumberDictionary::Set(uint32_t key, 15651 Object* value) { 15652 int entry = FindEntry(key); 15653 if (entry == kNotFound) return AddNumberEntry(key, value); 15654 MaybeObject* maybe_object_key = 15655 UnseededNumberDictionaryShape::AsObject(GetHeap(), key); 15656 Object* object_key; 15657 if (!maybe_object_key->ToObject(&object_key)) return maybe_object_key; 15658 SetEntry(entry, object_key, value); 15659 return this; 15660 } 15661 15662 15663 15664 template<typename Shape, typename Key> 15665 int Dictionary<Shape, Key>::NumberOfElementsFilterAttributes( 15666 PropertyAttributes filter) { 15667 int capacity = HashTable<Shape, Key>::Capacity(); 15668 int result = 0; 15669 for (int i = 0; i < capacity; i++) { 15670 Object* k = HashTable<Shape, Key>::KeyAt(i); 15671 if (HashTable<Shape, Key>::IsKey(k) && 15672 ((filter & SYMBOLIC) == 0 || !k->IsSymbol())) { 15673 PropertyDetails details = DetailsAt(i); 15674 if (details.IsDeleted()) continue; 15675 PropertyAttributes attr = details.attributes(); 15676 if ((attr & filter) == 0) result++; 15677 } 15678 } 15679 return result; 15680 } 15681 15682 15683 template<typename Shape, typename Key> 15684 int Dictionary<Shape, Key>::NumberOfEnumElements() { 15685 return NumberOfElementsFilterAttributes( 15686 static_cast<PropertyAttributes>(DONT_ENUM)); 15687 } 15688 15689 15690 template<typename Shape, typename Key> 15691 void Dictionary<Shape, Key>::CopyKeysTo( 15692 FixedArray* storage, 15693 PropertyAttributes filter, 15694 typename Dictionary<Shape, Key>::SortMode sort_mode) { 15695 ASSERT(storage->length() >= NumberOfEnumElements()); 15696 int capacity = HashTable<Shape, Key>::Capacity(); 15697 int index = 0; 15698 for (int i = 0; i < capacity; i++) { 15699 Object* k = HashTable<Shape, Key>::KeyAt(i); 15700 if (HashTable<Shape, Key>::IsKey(k)) { 15701 PropertyDetails details = DetailsAt(i); 15702 if (details.IsDeleted()) continue; 15703 PropertyAttributes attr = details.attributes(); 15704 if ((attr & filter) == 0) storage->set(index++, k); 15705 } 15706 } 15707 if (sort_mode == Dictionary<Shape, Key>::SORTED) { 15708 storage->SortPairs(storage, index); 15709 } 15710 ASSERT(storage->length() >= index); 15711 } 15712 15713 15714 FixedArray* NameDictionary::CopyEnumKeysTo(FixedArray* storage) { 15715 int length = storage->length(); 15716 ASSERT(length >= NumberOfEnumElements()); 15717 Heap* heap = GetHeap(); 15718 Object* undefined_value = heap->undefined_value(); 15719 int capacity = Capacity(); 15720 int properties = 0; 15721 15722 // Fill in the enumeration array by assigning enumerable keys at their 15723 // enumeration index. This will leave holes in the array if there are keys 15724 // that are deleted or not enumerable. 15725 for (int i = 0; i < capacity; i++) { 15726 Object* k = KeyAt(i); 15727 if (IsKey(k) && !k->IsSymbol()) { 15728 PropertyDetails details = DetailsAt(i); 15729 if (details.IsDeleted() || details.IsDontEnum()) continue; 15730 properties++; 15731 storage->set(details.dictionary_index() - 1, k); 15732 if (properties == length) break; 15733 } 15734 } 15735 15736 // There are holes in the enumeration array if less properties were assigned 15737 // than the length of the array. If so, crunch all the existing properties 15738 // together by shifting them to the left (maintaining the enumeration order), 15739 // and trimming of the right side of the array. 15740 if (properties < length) { 15741 if (properties == 0) return heap->empty_fixed_array(); 15742 properties = 0; 15743 for (int i = 0; i < length; ++i) { 15744 Object* value = storage->get(i); 15745 if (value != undefined_value) { 15746 storage->set(properties, value); 15747 ++properties; 15748 } 15749 } 15750 RightTrimFixedArray<FROM_MUTATOR>(heap, storage, length - properties); 15751 } 15752 return storage; 15753 } 15754 15755 15756 template<typename Shape, typename Key> 15757 void Dictionary<Shape, Key>::CopyKeysTo( 15758 FixedArray* storage, 15759 int index, 15760 PropertyAttributes filter, 15761 typename Dictionary<Shape, Key>::SortMode sort_mode) { 15762 ASSERT(storage->length() >= NumberOfElementsFilterAttributes( 15763 static_cast<PropertyAttributes>(NONE))); 15764 int capacity = HashTable<Shape, Key>::Capacity(); 15765 for (int i = 0; i < capacity; i++) { 15766 Object* k = HashTable<Shape, Key>::KeyAt(i); 15767 if (HashTable<Shape, Key>::IsKey(k)) { 15768 PropertyDetails details = DetailsAt(i); 15769 if (details.IsDeleted()) continue; 15770 PropertyAttributes attr = details.attributes(); 15771 if ((attr & filter) == 0) storage->set(index++, k); 15772 } 15773 } 15774 if (sort_mode == Dictionary<Shape, Key>::SORTED) { 15775 storage->SortPairs(storage, index); 15776 } 15777 ASSERT(storage->length() >= index); 15778 } 15779 15780 15781 // Backwards lookup (slow). 15782 template<typename Shape, typename Key> 15783 Object* Dictionary<Shape, Key>::SlowReverseLookup(Object* value) { 15784 int capacity = HashTable<Shape, Key>::Capacity(); 15785 for (int i = 0; i < capacity; i++) { 15786 Object* k = HashTable<Shape, Key>::KeyAt(i); 15787 if (Dictionary<Shape, Key>::IsKey(k)) { 15788 Object* e = ValueAt(i); 15789 if (e->IsPropertyCell()) { 15790 e = PropertyCell::cast(e)->value(); 15791 } 15792 if (e == value) return k; 15793 } 15794 } 15795 Heap* heap = Dictionary<Shape, Key>::GetHeap(); 15796 return heap->undefined_value(); 15797 } 15798 15799 15800 MaybeObject* NameDictionary::TransformPropertiesToFastFor( 15801 JSObject* obj, int unused_property_fields) { 15802 // Make sure we preserve dictionary representation if there are too many 15803 // descriptors. 15804 int number_of_elements = NumberOfElements(); 15805 if (number_of_elements > kMaxNumberOfDescriptors) return obj; 15806 15807 if (number_of_elements != NextEnumerationIndex()) { 15808 MaybeObject* maybe_result = GenerateNewEnumerationIndices(); 15809 if (maybe_result->IsFailure()) return maybe_result; 15810 } 15811 15812 int instance_descriptor_length = 0; 15813 int number_of_fields = 0; 15814 15815 Heap* heap = GetHeap(); 15816 15817 // Compute the length of the instance descriptor. 15818 int capacity = Capacity(); 15819 for (int i = 0; i < capacity; i++) { 15820 Object* k = KeyAt(i); 15821 if (IsKey(k)) { 15822 Object* value = ValueAt(i); 15823 PropertyType type = DetailsAt(i).type(); 15824 ASSERT(type != FIELD); 15825 instance_descriptor_length++; 15826 if (type == NORMAL && !value->IsJSFunction()) { 15827 number_of_fields += 1; 15828 } 15829 } 15830 } 15831 15832 int inobject_props = obj->map()->inobject_properties(); 15833 15834 // Allocate new map. 15835 Map* new_map; 15836 MaybeObject* maybe_new_map = obj->map()->CopyDropDescriptors(); 15837 if (!maybe_new_map->To(&new_map)) return maybe_new_map; 15838 new_map->set_dictionary_map(false); 15839 15840 if (instance_descriptor_length == 0) { 15841 ASSERT_LE(unused_property_fields, inobject_props); 15842 // Transform the object. 15843 new_map->set_unused_property_fields(inobject_props); 15844 obj->set_map(new_map); 15845 obj->set_properties(heap->empty_fixed_array()); 15846 // Check that it really works. 15847 ASSERT(obj->HasFastProperties()); 15848 return obj; 15849 } 15850 15851 // Allocate the instance descriptor. 15852 DescriptorArray* descriptors; 15853 MaybeObject* maybe_descriptors = 15854 DescriptorArray::Allocate(GetIsolate(), instance_descriptor_length); 15855 if (!maybe_descriptors->To(&descriptors)) { 15856 return maybe_descriptors; 15857 } 15858 15859 DescriptorArray::WhitenessWitness witness(descriptors); 15860 15861 int number_of_allocated_fields = 15862 number_of_fields + unused_property_fields - inobject_props; 15863 if (number_of_allocated_fields < 0) { 15864 // There is enough inobject space for all fields (including unused). 15865 number_of_allocated_fields = 0; 15866 unused_property_fields = inobject_props - number_of_fields; 15867 } 15868 15869 // Allocate the fixed array for the fields. 15870 FixedArray* fields; 15871 MaybeObject* maybe_fields = 15872 heap->AllocateFixedArray(number_of_allocated_fields); 15873 if (!maybe_fields->To(&fields)) return maybe_fields; 15874 15875 // Fill in the instance descriptor and the fields. 15876 int current_offset = 0; 15877 for (int i = 0; i < capacity; i++) { 15878 Object* k = KeyAt(i); 15879 if (IsKey(k)) { 15880 Object* value = ValueAt(i); 15881 Name* key; 15882 if (k->IsSymbol()) { 15883 key = Symbol::cast(k); 15884 } else { 15885 // Ensure the key is a unique name before writing into the 15886 // instance descriptor. 15887 MaybeObject* maybe_key = heap->InternalizeString(String::cast(k)); 15888 if (!maybe_key->To(&key)) return maybe_key; 15889 } 15890 15891 PropertyDetails details = DetailsAt(i); 15892 int enumeration_index = details.dictionary_index(); 15893 PropertyType type = details.type(); 15894 15895 if (value->IsJSFunction()) { 15896 ConstantDescriptor d(key, value, details.attributes()); 15897 descriptors->Set(enumeration_index - 1, &d, witness); 15898 } else if (type == NORMAL) { 15899 if (current_offset < inobject_props) { 15900 obj->InObjectPropertyAtPut(current_offset, 15901 value, 15902 UPDATE_WRITE_BARRIER); 15903 } else { 15904 int offset = current_offset - inobject_props; 15905 fields->set(offset, value); 15906 } 15907 FieldDescriptor d(key, 15908 current_offset++, 15909 details.attributes(), 15910 // TODO(verwaest): value->OptimalRepresentation(); 15911 Representation::Tagged()); 15912 descriptors->Set(enumeration_index - 1, &d, witness); 15913 } else if (type == CALLBACKS) { 15914 CallbacksDescriptor d(key, 15915 value, 15916 details.attributes()); 15917 descriptors->Set(enumeration_index - 1, &d, witness); 15918 } else { 15919 UNREACHABLE(); 15920 } 15921 } 15922 } 15923 ASSERT(current_offset == number_of_fields); 15924 15925 descriptors->Sort(); 15926 15927 new_map->InitializeDescriptors(descriptors); 15928 new_map->set_unused_property_fields(unused_property_fields); 15929 15930 // Transform the object. 15931 obj->set_map(new_map); 15932 15933 obj->set_properties(fields); 15934 ASSERT(obj->IsJSObject()); 15935 15936 // Check that it really works. 15937 ASSERT(obj->HasFastProperties()); 15938 15939 return obj; 15940 } 15941 15942 15943 Handle<ObjectHashSet> ObjectHashSet::EnsureCapacity( 15944 Handle<ObjectHashSet> table, 15945 int n, 15946 Handle<Object> key, 15947 PretenureFlag pretenure) { 15948 Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table; 15949 CALL_HEAP_FUNCTION(table_base->GetIsolate(), 15950 table_base->EnsureCapacity(n, *key, pretenure), 15951 ObjectHashSet); 15952 } 15953 15954 15955 Handle<ObjectHashSet> ObjectHashSet::Shrink(Handle<ObjectHashSet> table, 15956 Handle<Object> key) { 15957 Handle<HashTable<ObjectHashTableShape<1>, Object*> > table_base = table; 15958 CALL_HEAP_FUNCTION(table_base->GetIsolate(), 15959 table_base->Shrink(*key), 15960 ObjectHashSet); 15961 } 15962 15963 15964 bool ObjectHashSet::Contains(Object* key) { 15965 ASSERT(IsKey(key)); 15966 15967 // If the object does not have an identity hash, it was never used as a key. 15968 Object* hash = key->GetHash(); 15969 if (hash->IsUndefined()) return false; 15970 15971 return (FindEntry(key) != kNotFound); 15972 } 15973 15974 15975 Handle<ObjectHashSet> ObjectHashSet::Add(Handle<ObjectHashSet> table, 15976 Handle<Object> key) { 15977 ASSERT(table->IsKey(*key)); 15978 15979 // Make sure the key object has an identity hash code. 15980 Handle<Object> object_hash = Object::GetOrCreateHash(key, 15981 table->GetIsolate()); 15982 15983 int entry = table->FindEntry(*key); 15984 15985 // Check whether key is already present. 15986 if (entry != kNotFound) return table; 15987 15988 // Check whether the hash set should be extended and add entry. 15989 Handle<ObjectHashSet> new_table = 15990 ObjectHashSet::EnsureCapacity(table, 1, key); 15991 entry = new_table->FindInsertionEntry(Smi::cast(*object_hash)->value()); 15992 new_table->set(EntryToIndex(entry), *key); 15993 new_table->ElementAdded(); 15994 return new_table; 15995 } 15996 15997 15998 Handle<ObjectHashSet> ObjectHashSet::Remove(Handle<ObjectHashSet> table, 15999 Handle<Object> key) { 16000 ASSERT(table->IsKey(*key)); 16001 16002 // If the object does not have an identity hash, it was never used as a key. 16003 if (key->GetHash()->IsUndefined()) return table; 16004 16005 int entry = table->FindEntry(*key); 16006 16007 // Check whether key is actually present. 16008 if (entry == kNotFound) return table; 16009 16010 // Remove entry and try to shrink this hash set. 16011 table->set_the_hole(EntryToIndex(entry)); 16012 table->ElementRemoved(); 16013 16014 return ObjectHashSet::Shrink(table, key); 16015 } 16016 16017 16018 Handle<ObjectHashTable> ObjectHashTable::EnsureCapacity( 16019 Handle<ObjectHashTable> table, 16020 int n, 16021 Handle<Object> key, 16022 PretenureFlag pretenure) { 16023 Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table; 16024 CALL_HEAP_FUNCTION(table_base->GetIsolate(), 16025 table_base->EnsureCapacity(n, *key, pretenure), 16026 ObjectHashTable); 16027 } 16028 16029 16030 Handle<ObjectHashTable> ObjectHashTable::Shrink( 16031 Handle<ObjectHashTable> table, Handle<Object> key) { 16032 Handle<HashTable<ObjectHashTableShape<2>, Object*> > table_base = table; 16033 CALL_HEAP_FUNCTION(table_base->GetIsolate(), 16034 table_base->Shrink(*key), 16035 ObjectHashTable); 16036 } 16037 16038 16039 Object* ObjectHashTable::Lookup(Object* key) { 16040 ASSERT(IsKey(key)); 16041 16042 // If the object does not have an identity hash, it was never used as a key. 16043 Object* hash = key->GetHash(); 16044 if (hash->IsUndefined()) { 16045 return GetHeap()->the_hole_value(); 16046 } 16047 int entry = FindEntry(key); 16048 if (entry == kNotFound) return GetHeap()->the_hole_value(); 16049 return get(EntryToIndex(entry) + 1); 16050 } 16051 16052 16053 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table, 16054 Handle<Object> key, 16055 Handle<Object> value) { 16056 ASSERT(table->IsKey(*key)); 16057 16058 Isolate* isolate = table->GetIsolate(); 16059 16060 // Make sure the key object has an identity hash code. 16061 Handle<Object> hash = Object::GetOrCreateHash(key, isolate); 16062 16063 int entry = table->FindEntry(*key); 16064 16065 // Check whether to perform removal operation. 16066 if (value->IsTheHole()) { 16067 if (entry == kNotFound) return table; 16068 table->RemoveEntry(entry); 16069 return Shrink(table, key); 16070 } 16071 16072 // Key is already in table, just overwrite value. 16073 if (entry != kNotFound) { 16074 table->set(EntryToIndex(entry) + 1, *value); 16075 return table; 16076 } 16077 16078 // Check whether the hash table should be extended. 16079 table = EnsureCapacity(table, 1, key); 16080 table->AddEntry(table->FindInsertionEntry(Handle<Smi>::cast(hash)->value()), 16081 *key, 16082 *value); 16083 return table; 16084 } 16085 16086 16087 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) { 16088 set(EntryToIndex(entry), key); 16089 set(EntryToIndex(entry) + 1, value); 16090 ElementAdded(); 16091 } 16092 16093 16094 void ObjectHashTable::RemoveEntry(int entry) { 16095 set_the_hole(EntryToIndex(entry)); 16096 set_the_hole(EntryToIndex(entry) + 1); 16097 ElementRemoved(); 16098 } 16099 16100 16101 Object* WeakHashTable::Lookup(Object* key) { 16102 ASSERT(IsKey(key)); 16103 int entry = FindEntry(key); 16104 if (entry == kNotFound) return GetHeap()->the_hole_value(); 16105 return get(EntryToValueIndex(entry)); 16106 } 16107 16108 16109 MaybeObject* WeakHashTable::Put(Object* key, Object* value) { 16110 ASSERT(IsKey(key)); 16111 int entry = FindEntry(key); 16112 // Key is already in table, just overwrite value. 16113 if (entry != kNotFound) { 16114 set(EntryToValueIndex(entry), value); 16115 return this; 16116 } 16117 16118 // Check whether the hash table should be extended. 16119 Object* obj; 16120 { MaybeObject* maybe_obj = EnsureCapacity(1, key, TENURED); 16121 if (!maybe_obj->ToObject(&obj)) return maybe_obj; 16122 } 16123 WeakHashTable* table = WeakHashTable::cast(obj); 16124 table->AddEntry(table->FindInsertionEntry(Hash(key)), key, value); 16125 return table; 16126 } 16127 16128 16129 void WeakHashTable::AddEntry(int entry, Object* key, Object* value) { 16130 set(EntryToIndex(entry), key); 16131 set(EntryToValueIndex(entry), value); 16132 ElementAdded(); 16133 } 16134 16135 16136 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator( 16137 DeclaredAccessorDescriptor* descriptor) 16138 : array_(descriptor->serialized_data()->GetDataStartAddress()), 16139 length_(descriptor->serialized_data()->length()), 16140 offset_(0) { 16141 } 16142 16143 16144 const DeclaredAccessorDescriptorData* 16145 DeclaredAccessorDescriptorIterator::Next() { 16146 ASSERT(offset_ < length_); 16147 uint8_t* ptr = &array_[offset_]; 16148 ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0); 16149 const DeclaredAccessorDescriptorData* data = 16150 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr); 16151 offset_ += sizeof(*data); 16152 ASSERT(offset_ <= length_); 16153 return data; 16154 } 16155 16156 16157 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create( 16158 Isolate* isolate, 16159 const DeclaredAccessorDescriptorData& descriptor, 16160 Handle<DeclaredAccessorDescriptor> previous) { 16161 int previous_length = 16162 previous.is_null() ? 0 : previous->serialized_data()->length(); 16163 int length = sizeof(descriptor) + previous_length; 16164 Handle<ByteArray> serialized_descriptor = 16165 isolate->factory()->NewByteArray(length); 16166 Handle<DeclaredAccessorDescriptor> value = 16167 isolate->factory()->NewDeclaredAccessorDescriptor(); 16168 value->set_serialized_data(*serialized_descriptor); 16169 // Copy in the data. 16170 { 16171 DisallowHeapAllocation no_allocation; 16172 uint8_t* array = serialized_descriptor->GetDataStartAddress(); 16173 if (previous_length != 0) { 16174 uint8_t* previous_array = 16175 previous->serialized_data()->GetDataStartAddress(); 16176 OS::MemCopy(array, previous_array, previous_length); 16177 array += previous_length; 16178 } 16179 ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0); 16180 DeclaredAccessorDescriptorData* data = 16181 reinterpret_cast<DeclaredAccessorDescriptorData*>(array); 16182 *data = descriptor; 16183 } 16184 return value; 16185 } 16186 16187 16188 #ifdef ENABLE_DEBUGGER_SUPPORT 16189 // Check if there is a break point at this code position. 16190 bool DebugInfo::HasBreakPoint(int code_position) { 16191 // Get the break point info object for this code position. 16192 Object* break_point_info = GetBreakPointInfo(code_position); 16193 16194 // If there is no break point info object or no break points in the break 16195 // point info object there is no break point at this code position. 16196 if (break_point_info->IsUndefined()) return false; 16197 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0; 16198 } 16199 16200 16201 // Get the break point info object for this code position. 16202 Object* DebugInfo::GetBreakPointInfo(int code_position) { 16203 // Find the index of the break point info object for this code position. 16204 int index = GetBreakPointInfoIndex(code_position); 16205 16206 // Return the break point info object if any. 16207 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value(); 16208 return BreakPointInfo::cast(break_points()->get(index)); 16209 } 16210 16211 16212 // Clear a break point at the specified code position. 16213 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info, 16214 int code_position, 16215 Handle<Object> break_point_object) { 16216 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position), 16217 debug_info->GetIsolate()); 16218 if (break_point_info->IsUndefined()) return; 16219 BreakPointInfo::ClearBreakPoint( 16220 Handle<BreakPointInfo>::cast(break_point_info), 16221 break_point_object); 16222 } 16223 16224 16225 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info, 16226 int code_position, 16227 int source_position, 16228 int statement_position, 16229 Handle<Object> break_point_object) { 16230 Isolate* isolate = debug_info->GetIsolate(); 16231 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position), 16232 isolate); 16233 if (!break_point_info->IsUndefined()) { 16234 BreakPointInfo::SetBreakPoint( 16235 Handle<BreakPointInfo>::cast(break_point_info), 16236 break_point_object); 16237 return; 16238 } 16239 16240 // Adding a new break point for a code position which did not have any 16241 // break points before. Try to find a free slot. 16242 int index = kNoBreakPointInfo; 16243 for (int i = 0; i < debug_info->break_points()->length(); i++) { 16244 if (debug_info->break_points()->get(i)->IsUndefined()) { 16245 index = i; 16246 break; 16247 } 16248 } 16249 if (index == kNoBreakPointInfo) { 16250 // No free slot - extend break point info array. 16251 Handle<FixedArray> old_break_points = 16252 Handle<FixedArray>(FixedArray::cast(debug_info->break_points())); 16253 Handle<FixedArray> new_break_points = 16254 isolate->factory()->NewFixedArray( 16255 old_break_points->length() + 16256 Debug::kEstimatedNofBreakPointsInFunction); 16257 16258 debug_info->set_break_points(*new_break_points); 16259 for (int i = 0; i < old_break_points->length(); i++) { 16260 new_break_points->set(i, old_break_points->get(i)); 16261 } 16262 index = old_break_points->length(); 16263 } 16264 ASSERT(index != kNoBreakPointInfo); 16265 16266 // Allocate new BreakPointInfo object and set the break point. 16267 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast( 16268 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE)); 16269 new_break_point_info->set_code_position(Smi::FromInt(code_position)); 16270 new_break_point_info->set_source_position(Smi::FromInt(source_position)); 16271 new_break_point_info-> 16272 set_statement_position(Smi::FromInt(statement_position)); 16273 new_break_point_info->set_break_point_objects( 16274 isolate->heap()->undefined_value()); 16275 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object); 16276 debug_info->break_points()->set(index, *new_break_point_info); 16277 } 16278 16279 16280 // Get the break point objects for a code position. 16281 Object* DebugInfo::GetBreakPointObjects(int code_position) { 16282 Object* break_point_info = GetBreakPointInfo(code_position); 16283 if (break_point_info->IsUndefined()) { 16284 return GetHeap()->undefined_value(); 16285 } 16286 return BreakPointInfo::cast(break_point_info)->break_point_objects(); 16287 } 16288 16289 16290 // Get the total number of break points. 16291 int DebugInfo::GetBreakPointCount() { 16292 if (break_points()->IsUndefined()) return 0; 16293 int count = 0; 16294 for (int i = 0; i < break_points()->length(); i++) { 16295 if (!break_points()->get(i)->IsUndefined()) { 16296 BreakPointInfo* break_point_info = 16297 BreakPointInfo::cast(break_points()->get(i)); 16298 count += break_point_info->GetBreakPointCount(); 16299 } 16300 } 16301 return count; 16302 } 16303 16304 16305 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info, 16306 Handle<Object> break_point_object) { 16307 Heap* heap = debug_info->GetHeap(); 16308 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value(); 16309 for (int i = 0; i < debug_info->break_points()->length(); i++) { 16310 if (!debug_info->break_points()->get(i)->IsUndefined()) { 16311 Handle<BreakPointInfo> break_point_info = 16312 Handle<BreakPointInfo>(BreakPointInfo::cast( 16313 debug_info->break_points()->get(i))); 16314 if (BreakPointInfo::HasBreakPointObject(break_point_info, 16315 break_point_object)) { 16316 return *break_point_info; 16317 } 16318 } 16319 } 16320 return heap->undefined_value(); 16321 } 16322 16323 16324 // Find the index of the break point info object for the specified code 16325 // position. 16326 int DebugInfo::GetBreakPointInfoIndex(int code_position) { 16327 if (break_points()->IsUndefined()) return kNoBreakPointInfo; 16328 for (int i = 0; i < break_points()->length(); i++) { 16329 if (!break_points()->get(i)->IsUndefined()) { 16330 BreakPointInfo* break_point_info = 16331 BreakPointInfo::cast(break_points()->get(i)); 16332 if (break_point_info->code_position()->value() == code_position) { 16333 return i; 16334 } 16335 } 16336 } 16337 return kNoBreakPointInfo; 16338 } 16339 16340 16341 // Remove the specified break point object. 16342 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info, 16343 Handle<Object> break_point_object) { 16344 Isolate* isolate = break_point_info->GetIsolate(); 16345 // If there are no break points just ignore. 16346 if (break_point_info->break_point_objects()->IsUndefined()) return; 16347 // If there is a single break point clear it if it is the same. 16348 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16349 if (break_point_info->break_point_objects() == *break_point_object) { 16350 break_point_info->set_break_point_objects( 16351 isolate->heap()->undefined_value()); 16352 } 16353 return; 16354 } 16355 // If there are multiple break points shrink the array 16356 ASSERT(break_point_info->break_point_objects()->IsFixedArray()); 16357 Handle<FixedArray> old_array = 16358 Handle<FixedArray>( 16359 FixedArray::cast(break_point_info->break_point_objects())); 16360 Handle<FixedArray> new_array = 16361 isolate->factory()->NewFixedArray(old_array->length() - 1); 16362 int found_count = 0; 16363 for (int i = 0; i < old_array->length(); i++) { 16364 if (old_array->get(i) == *break_point_object) { 16365 ASSERT(found_count == 0); 16366 found_count++; 16367 } else { 16368 new_array->set(i - found_count, old_array->get(i)); 16369 } 16370 } 16371 // If the break point was found in the list change it. 16372 if (found_count > 0) break_point_info->set_break_point_objects(*new_array); 16373 } 16374 16375 16376 // Add the specified break point object. 16377 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info, 16378 Handle<Object> break_point_object) { 16379 Isolate* isolate = break_point_info->GetIsolate(); 16380 16381 // If there was no break point objects before just set it. 16382 if (break_point_info->break_point_objects()->IsUndefined()) { 16383 break_point_info->set_break_point_objects(*break_point_object); 16384 return; 16385 } 16386 // If the break point object is the same as before just ignore. 16387 if (break_point_info->break_point_objects() == *break_point_object) return; 16388 // If there was one break point object before replace with array. 16389 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16390 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2); 16391 array->set(0, break_point_info->break_point_objects()); 16392 array->set(1, *break_point_object); 16393 break_point_info->set_break_point_objects(*array); 16394 return; 16395 } 16396 // If there was more than one break point before extend array. 16397 Handle<FixedArray> old_array = 16398 Handle<FixedArray>( 16399 FixedArray::cast(break_point_info->break_point_objects())); 16400 Handle<FixedArray> new_array = 16401 isolate->factory()->NewFixedArray(old_array->length() + 1); 16402 for (int i = 0; i < old_array->length(); i++) { 16403 // If the break point was there before just ignore. 16404 if (old_array->get(i) == *break_point_object) return; 16405 new_array->set(i, old_array->get(i)); 16406 } 16407 // Add the new break point. 16408 new_array->set(old_array->length(), *break_point_object); 16409 break_point_info->set_break_point_objects(*new_array); 16410 } 16411 16412 16413 bool BreakPointInfo::HasBreakPointObject( 16414 Handle<BreakPointInfo> break_point_info, 16415 Handle<Object> break_point_object) { 16416 // No break point. 16417 if (break_point_info->break_point_objects()->IsUndefined()) return false; 16418 // Single break point. 16419 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16420 return break_point_info->break_point_objects() == *break_point_object; 16421 } 16422 // Multiple break points. 16423 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects()); 16424 for (int i = 0; i < array->length(); i++) { 16425 if (array->get(i) == *break_point_object) { 16426 return true; 16427 } 16428 } 16429 return false; 16430 } 16431 16432 16433 // Get the number of break points. 16434 int BreakPointInfo::GetBreakPointCount() { 16435 // No break point. 16436 if (break_point_objects()->IsUndefined()) return 0; 16437 // Single break point. 16438 if (!break_point_objects()->IsFixedArray()) return 1; 16439 // Multiple break points. 16440 return FixedArray::cast(break_point_objects())->length(); 16441 } 16442 #endif // ENABLE_DEBUGGER_SUPPORT 16443 16444 16445 Object* JSDate::GetField(Object* object, Smi* index) { 16446 return JSDate::cast(object)->DoGetField( 16447 static_cast<FieldIndex>(index->value())); 16448 } 16449 16450 16451 Object* JSDate::DoGetField(FieldIndex index) { 16452 ASSERT(index != kDateValue); 16453 16454 DateCache* date_cache = GetIsolate()->date_cache(); 16455 16456 if (index < kFirstUncachedField) { 16457 Object* stamp = cache_stamp(); 16458 if (stamp != date_cache->stamp() && stamp->IsSmi()) { 16459 // Since the stamp is not NaN, the value is also not NaN. 16460 int64_t local_time_ms = 16461 date_cache->ToLocal(static_cast<int64_t>(value()->Number())); 16462 SetLocalFields(local_time_ms, date_cache); 16463 } 16464 switch (index) { 16465 case kYear: return year(); 16466 case kMonth: return month(); 16467 case kDay: return day(); 16468 case kWeekday: return weekday(); 16469 case kHour: return hour(); 16470 case kMinute: return min(); 16471 case kSecond: return sec(); 16472 default: UNREACHABLE(); 16473 } 16474 } 16475 16476 if (index >= kFirstUTCField) { 16477 return GetUTCField(index, value()->Number(), date_cache); 16478 } 16479 16480 double time = value()->Number(); 16481 if (std::isnan(time)) return GetIsolate()->heap()->nan_value(); 16482 16483 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time)); 16484 int days = DateCache::DaysFromTime(local_time_ms); 16485 16486 if (index == kDays) return Smi::FromInt(days); 16487 16488 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days); 16489 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000); 16490 ASSERT(index == kTimeInDay); 16491 return Smi::FromInt(time_in_day_ms); 16492 } 16493 16494 16495 Object* JSDate::GetUTCField(FieldIndex index, 16496 double value, 16497 DateCache* date_cache) { 16498 ASSERT(index >= kFirstUTCField); 16499 16500 if (std::isnan(value)) return GetIsolate()->heap()->nan_value(); 16501 16502 int64_t time_ms = static_cast<int64_t>(value); 16503 16504 if (index == kTimezoneOffset) { 16505 return Smi::FromInt(date_cache->TimezoneOffset(time_ms)); 16506 } 16507 16508 int days = DateCache::DaysFromTime(time_ms); 16509 16510 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days)); 16511 16512 if (index <= kDayUTC) { 16513 int year, month, day; 16514 date_cache->YearMonthDayFromDays(days, &year, &month, &day); 16515 if (index == kYearUTC) return Smi::FromInt(year); 16516 if (index == kMonthUTC) return Smi::FromInt(month); 16517 ASSERT(index == kDayUTC); 16518 return Smi::FromInt(day); 16519 } 16520 16521 int time_in_day_ms = DateCache::TimeInDay(time_ms, days); 16522 switch (index) { 16523 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000)); 16524 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60); 16525 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60); 16526 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000); 16527 case kDaysUTC: return Smi::FromInt(days); 16528 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms); 16529 default: UNREACHABLE(); 16530 } 16531 16532 UNREACHABLE(); 16533 return NULL; 16534 } 16535 16536 16537 void JSDate::SetValue(Object* value, bool is_value_nan) { 16538 set_value(value); 16539 if (is_value_nan) { 16540 HeapNumber* nan = GetIsolate()->heap()->nan_value(); 16541 set_cache_stamp(nan, SKIP_WRITE_BARRIER); 16542 set_year(nan, SKIP_WRITE_BARRIER); 16543 set_month(nan, SKIP_WRITE_BARRIER); 16544 set_day(nan, SKIP_WRITE_BARRIER); 16545 set_hour(nan, SKIP_WRITE_BARRIER); 16546 set_min(nan, SKIP_WRITE_BARRIER); 16547 set_sec(nan, SKIP_WRITE_BARRIER); 16548 set_weekday(nan, SKIP_WRITE_BARRIER); 16549 } else { 16550 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER); 16551 } 16552 } 16553 16554 16555 void JSDate::SetLocalFields(int64_t local_time_ms, DateCache* date_cache) { 16556 int days = DateCache::DaysFromTime(local_time_ms); 16557 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days); 16558 int year, month, day; 16559 date_cache->YearMonthDayFromDays(days, &year, &month, &day); 16560 int weekday = date_cache->Weekday(days); 16561 int hour = time_in_day_ms / (60 * 60 * 1000); 16562 int min = (time_in_day_ms / (60 * 1000)) % 60; 16563 int sec = (time_in_day_ms / 1000) % 60; 16564 set_cache_stamp(date_cache->stamp()); 16565 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); 16566 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); 16567 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); 16568 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); 16569 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); 16570 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); 16571 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); 16572 } 16573 16574 16575 void JSArrayBuffer::Neuter() { 16576 ASSERT(is_external()); 16577 set_backing_store(NULL); 16578 set_byte_length(Smi::FromInt(0)); 16579 } 16580 16581 16582 void JSArrayBufferView::NeuterView() { 16583 set_byte_offset(Smi::FromInt(0)); 16584 set_byte_length(Smi::FromInt(0)); 16585 } 16586 16587 16588 void JSDataView::Neuter() { 16589 NeuterView(); 16590 } 16591 16592 16593 void JSTypedArray::Neuter() { 16594 NeuterView(); 16595 set_length(Smi::FromInt(0)); 16596 set_elements(GetHeap()->EmptyExternalArrayForMap(map())); 16597 } 16598 16599 16600 Type* PropertyCell::type() { 16601 return static_cast<Type*>(type_raw()); 16602 } 16603 16604 16605 void PropertyCell::set_type(Type* type, WriteBarrierMode ignored) { 16606 ASSERT(IsPropertyCell()); 16607 set_type_raw(type, ignored); 16608 } 16609 16610 16611 Handle<Type> PropertyCell::UpdatedType(Handle<PropertyCell> cell, 16612 Handle<Object> value) { 16613 Isolate* isolate = cell->GetIsolate(); 16614 Handle<Type> old_type(cell->type(), isolate); 16615 // TODO(2803): Do not track ConsString as constant because they cannot be 16616 // embedded into code. 16617 Handle<Type> new_type(value->IsConsString() || value->IsTheHole() 16618 ? Type::Any() 16619 : Type::Constant(value, isolate), isolate); 16620 16621 if (new_type->Is(old_type)) { 16622 return old_type; 16623 } 16624 16625 cell->dependent_code()->DeoptimizeDependentCodeGroup( 16626 isolate, DependentCode::kPropertyCellChangedGroup); 16627 16628 if (old_type->Is(Type::None()) || old_type->Is(Type::Undefined())) { 16629 return new_type; 16630 } 16631 16632 return handle(Type::Any(), isolate); 16633 } 16634 16635 16636 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell, 16637 Handle<Object> value) { 16638 cell->set_value(*value); 16639 if (!Type::Any()->Is(cell->type())) { 16640 Handle<Type> new_type = UpdatedType(cell, value); 16641 cell->set_type(*new_type); 16642 } 16643 } 16644 16645 16646 void PropertyCell::AddDependentCompilationInfo(CompilationInfo* info) { 16647 Handle<DependentCode> dep(dependent_code()); 16648 Handle<DependentCode> codes = 16649 DependentCode::Insert(dep, DependentCode::kPropertyCellChangedGroup, 16650 info->object_wrapper()); 16651 if (*codes != dependent_code()) set_dependent_code(*codes); 16652 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add( 16653 Handle<HeapObject>(this), info->zone()); 16654 } 16655 16656 16657 void PropertyCell::AddDependentCode(Handle<Code> code) { 16658 Handle<DependentCode> codes = DependentCode::Insert( 16659 Handle<DependentCode>(dependent_code()), 16660 DependentCode::kPropertyCellChangedGroup, code); 16661 if (*codes != dependent_code()) set_dependent_code(*codes); 16662 } 16663 16664 16665 const char* GetBailoutReason(BailoutReason reason) { 16666 ASSERT(reason < kLastErrorMessage); 16667 #define ERROR_MESSAGES_TEXTS(C, T) T, 16668 static const char* error_messages_[] = { 16669 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) 16670 }; 16671 #undef ERROR_MESSAGES_TEXTS 16672 return error_messages_[reason]; 16673 } 16674 16675 16676 } } // namespace v8::internal 16677