1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/v8.h" 6 7 #include "src/accessors.h" 8 #include "src/allocation-site-scopes.h" 9 #include "src/api.h" 10 #include "src/arguments.h" 11 #include "src/bootstrapper.h" 12 #include "src/codegen.h" 13 #include "src/code-stubs.h" 14 #include "src/cpu-profiler.h" 15 #include "src/debug.h" 16 #include "src/deoptimizer.h" 17 #include "src/date.h" 18 #include "src/elements.h" 19 #include "src/execution.h" 20 #include "src/field-index.h" 21 #include "src/field-index-inl.h" 22 #include "src/full-codegen.h" 23 #include "src/hydrogen.h" 24 #include "src/isolate-inl.h" 25 #include "src/log.h" 26 #include "src/lookup.h" 27 #include "src/objects-inl.h" 28 #include "src/objects-visiting-inl.h" 29 #include "src/macro-assembler.h" 30 #include "src/mark-compact.h" 31 #include "src/safepoint-table.h" 32 #include "src/string-search.h" 33 #include "src/string-stream.h" 34 #include "src/utils.h" 35 36 #ifdef ENABLE_DISASSEMBLER 37 #include "src/disasm.h" 38 #include "src/disassembler.h" 39 #endif 40 41 namespace v8 { 42 namespace internal { 43 44 Handle<HeapType> Object::OptimalType(Isolate* isolate, 45 Representation representation) { 46 if (representation.IsNone()) return HeapType::None(isolate); 47 if (FLAG_track_field_types) { 48 if (representation.IsHeapObject() && IsHeapObject()) { 49 // We can track only JavaScript objects with stable maps. 50 Handle<Map> map(HeapObject::cast(this)->map(), isolate); 51 if (map->is_stable() && 52 map->instance_type() >= FIRST_NONCALLABLE_SPEC_OBJECT_TYPE && 53 map->instance_type() <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE) { 54 return HeapType::Class(map, isolate); 55 } 56 } 57 } 58 return HeapType::Any(isolate); 59 } 60 61 62 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate, 63 Handle<Object> object, 64 Handle<Context> native_context) { 65 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object); 66 Handle<JSFunction> constructor; 67 if (object->IsNumber()) { 68 constructor = handle(native_context->number_function(), isolate); 69 } else if (object->IsBoolean()) { 70 constructor = handle(native_context->boolean_function(), isolate); 71 } else if (object->IsString()) { 72 constructor = handle(native_context->string_function(), isolate); 73 } else if (object->IsSymbol()) { 74 constructor = handle(native_context->symbol_function(), isolate); 75 } else { 76 return MaybeHandle<JSReceiver>(); 77 } 78 Handle<JSObject> result = isolate->factory()->NewJSObject(constructor); 79 Handle<JSValue>::cast(result)->set_value(*object); 80 return result; 81 } 82 83 84 bool Object::BooleanValue() { 85 if (IsBoolean()) return IsTrue(); 86 if (IsSmi()) return Smi::cast(this)->value() != 0; 87 if (IsUndefined() || IsNull()) return false; 88 if (IsUndetectableObject()) return false; // Undetectable object is false. 89 if (IsString()) return String::cast(this)->length() != 0; 90 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue(); 91 return true; 92 } 93 94 95 bool Object::IsCallable() { 96 Object* fun = this; 97 while (fun->IsJSFunctionProxy()) { 98 fun = JSFunctionProxy::cast(fun)->call_trap(); 99 } 100 return fun->IsJSFunction() || 101 (fun->IsHeapObject() && 102 HeapObject::cast(fun)->map()->has_instance_call_handler()); 103 } 104 105 106 void Object::Lookup(Handle<Name> name, LookupResult* result) { 107 DisallowHeapAllocation no_gc; 108 Object* holder = NULL; 109 if (IsJSReceiver()) { 110 holder = this; 111 } else { 112 Context* native_context = result->isolate()->context()->native_context(); 113 if (IsNumber()) { 114 holder = native_context->number_function()->instance_prototype(); 115 } else if (IsString()) { 116 holder = native_context->string_function()->instance_prototype(); 117 } else if (IsSymbol()) { 118 holder = native_context->symbol_function()->instance_prototype(); 119 } else if (IsBoolean()) { 120 holder = native_context->boolean_function()->instance_prototype(); 121 } else { 122 result->isolate()->PushStackTraceAndDie( 123 0xDEAD0000, this, JSReceiver::cast(this)->map(), 0xDEAD0001); 124 } 125 } 126 ASSERT(holder != NULL); // Cannot handle null or undefined. 127 JSReceiver::cast(holder)->Lookup(name, result); 128 } 129 130 131 MaybeHandle<Object> Object::GetProperty(LookupIterator* it) { 132 for (; it->IsFound(); it->Next()) { 133 switch (it->state()) { 134 case LookupIterator::NOT_FOUND: 135 UNREACHABLE(); 136 case LookupIterator::JSPROXY: 137 return JSProxy::GetPropertyWithHandler( 138 it->GetJSProxy(), it->GetReceiver(), it->name()); 139 case LookupIterator::INTERCEPTOR: { 140 MaybeHandle<Object> maybe_result = JSObject::GetPropertyWithInterceptor( 141 it->GetHolder(), it->GetReceiver(), it->name()); 142 if (!maybe_result.is_null()) return maybe_result; 143 if (it->isolate()->has_pending_exception()) return maybe_result; 144 break; 145 } 146 case LookupIterator::ACCESS_CHECK: 147 if (it->HasAccess(v8::ACCESS_GET)) break; 148 return JSObject::GetPropertyWithFailedAccessCheck(it); 149 case LookupIterator::PROPERTY: 150 if (it->HasProperty()) { 151 switch (it->property_kind()) { 152 case LookupIterator::ACCESSOR: 153 return GetPropertyWithAccessor( 154 it->GetReceiver(), it->name(), 155 it->GetHolder(), it->GetAccessors()); 156 case LookupIterator::DATA: 157 return it->GetDataValue(); 158 } 159 } 160 break; 161 } 162 } 163 return it->factory()->undefined_value(); 164 } 165 166 167 bool Object::ToInt32(int32_t* value) { 168 if (IsSmi()) { 169 *value = Smi::cast(this)->value(); 170 return true; 171 } 172 if (IsHeapNumber()) { 173 double num = HeapNumber::cast(this)->value(); 174 if (FastI2D(FastD2I(num)) == num) { 175 *value = FastD2I(num); 176 return true; 177 } 178 } 179 return false; 180 } 181 182 183 bool Object::ToUint32(uint32_t* value) { 184 if (IsSmi()) { 185 int num = Smi::cast(this)->value(); 186 if (num >= 0) { 187 *value = static_cast<uint32_t>(num); 188 return true; 189 } 190 } 191 if (IsHeapNumber()) { 192 double num = HeapNumber::cast(this)->value(); 193 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) { 194 *value = FastD2UI(num); 195 return true; 196 } 197 } 198 return false; 199 } 200 201 202 bool FunctionTemplateInfo::IsTemplateFor(Object* object) { 203 if (!object->IsHeapObject()) return false; 204 return IsTemplateFor(HeapObject::cast(object)->map()); 205 } 206 207 208 bool FunctionTemplateInfo::IsTemplateFor(Map* map) { 209 // There is a constraint on the object; check. 210 if (!map->IsJSObjectMap()) return false; 211 // Fetch the constructor function of the object. 212 Object* cons_obj = map->constructor(); 213 if (!cons_obj->IsJSFunction()) return false; 214 JSFunction* fun = JSFunction::cast(cons_obj); 215 // Iterate through the chain of inheriting function templates to 216 // see if the required one occurs. 217 for (Object* type = fun->shared()->function_data(); 218 type->IsFunctionTemplateInfo(); 219 type = FunctionTemplateInfo::cast(type)->parent_template()) { 220 if (type == this) return true; 221 } 222 // Didn't find the required type in the inheritance chain. 223 return false; 224 } 225 226 227 template<typename To> 228 static inline To* CheckedCast(void *from) { 229 uintptr_t temp = reinterpret_cast<uintptr_t>(from); 230 ASSERT(temp % sizeof(To) == 0); 231 return reinterpret_cast<To*>(temp); 232 } 233 234 235 static Handle<Object> PerformCompare(const BitmaskCompareDescriptor& descriptor, 236 char* ptr, 237 Isolate* isolate) { 238 uint32_t bitmask = descriptor.bitmask; 239 uint32_t compare_value = descriptor.compare_value; 240 uint32_t value; 241 switch (descriptor.size) { 242 case 1: 243 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr)); 244 compare_value &= 0xff; 245 bitmask &= 0xff; 246 break; 247 case 2: 248 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr)); 249 compare_value &= 0xffff; 250 bitmask &= 0xffff; 251 break; 252 case 4: 253 value = *CheckedCast<uint32_t>(ptr); 254 break; 255 default: 256 UNREACHABLE(); 257 return isolate->factory()->undefined_value(); 258 } 259 return isolate->factory()->ToBoolean( 260 (bitmask & value) == (bitmask & compare_value)); 261 } 262 263 264 static Handle<Object> PerformCompare(const PointerCompareDescriptor& descriptor, 265 char* ptr, 266 Isolate* isolate) { 267 uintptr_t compare_value = 268 reinterpret_cast<uintptr_t>(descriptor.compare_value); 269 uintptr_t value = *CheckedCast<uintptr_t>(ptr); 270 return isolate->factory()->ToBoolean(compare_value == value); 271 } 272 273 274 static Handle<Object> GetPrimitiveValue( 275 const PrimitiveValueDescriptor& descriptor, 276 char* ptr, 277 Isolate* isolate) { 278 int32_t int32_value = 0; 279 switch (descriptor.data_type) { 280 case kDescriptorInt8Type: 281 int32_value = *CheckedCast<int8_t>(ptr); 282 break; 283 case kDescriptorUint8Type: 284 int32_value = *CheckedCast<uint8_t>(ptr); 285 break; 286 case kDescriptorInt16Type: 287 int32_value = *CheckedCast<int16_t>(ptr); 288 break; 289 case kDescriptorUint16Type: 290 int32_value = *CheckedCast<uint16_t>(ptr); 291 break; 292 case kDescriptorInt32Type: 293 int32_value = *CheckedCast<int32_t>(ptr); 294 break; 295 case kDescriptorUint32Type: { 296 uint32_t value = *CheckedCast<uint32_t>(ptr); 297 AllowHeapAllocation allow_gc; 298 return isolate->factory()->NewNumberFromUint(value); 299 } 300 case kDescriptorBoolType: { 301 uint8_t byte = *CheckedCast<uint8_t>(ptr); 302 return isolate->factory()->ToBoolean( 303 byte & (0x1 << descriptor.bool_offset)); 304 } 305 case kDescriptorFloatType: { 306 float value = *CheckedCast<float>(ptr); 307 AllowHeapAllocation allow_gc; 308 return isolate->factory()->NewNumber(value); 309 } 310 case kDescriptorDoubleType: { 311 double value = *CheckedCast<double>(ptr); 312 AllowHeapAllocation allow_gc; 313 return isolate->factory()->NewNumber(value); 314 } 315 } 316 AllowHeapAllocation allow_gc; 317 return isolate->factory()->NewNumberFromInt(int32_value); 318 } 319 320 321 static Handle<Object> GetDeclaredAccessorProperty( 322 Handle<Object> receiver, 323 Handle<DeclaredAccessorInfo> info, 324 Isolate* isolate) { 325 DisallowHeapAllocation no_gc; 326 char* current = reinterpret_cast<char*>(*receiver); 327 DeclaredAccessorDescriptorIterator iterator(info->descriptor()); 328 while (true) { 329 const DeclaredAccessorDescriptorData* data = iterator.Next(); 330 switch (data->type) { 331 case kDescriptorReturnObject: { 332 ASSERT(iterator.Complete()); 333 current = *CheckedCast<char*>(current); 334 return handle(*CheckedCast<Object*>(current), isolate); 335 } 336 case kDescriptorPointerDereference: 337 ASSERT(!iterator.Complete()); 338 current = *reinterpret_cast<char**>(current); 339 break; 340 case kDescriptorPointerShift: 341 ASSERT(!iterator.Complete()); 342 current += data->pointer_shift_descriptor.byte_offset; 343 break; 344 case kDescriptorObjectDereference: { 345 ASSERT(!iterator.Complete()); 346 Object* object = CheckedCast<Object>(current); 347 int field = data->object_dereference_descriptor.internal_field; 348 Object* smi = JSObject::cast(object)->GetInternalField(field); 349 ASSERT(smi->IsSmi()); 350 current = reinterpret_cast<char*>(smi); 351 break; 352 } 353 case kDescriptorBitmaskCompare: 354 ASSERT(iterator.Complete()); 355 return PerformCompare(data->bitmask_compare_descriptor, 356 current, 357 isolate); 358 case kDescriptorPointerCompare: 359 ASSERT(iterator.Complete()); 360 return PerformCompare(data->pointer_compare_descriptor, 361 current, 362 isolate); 363 case kDescriptorPrimitiveValue: 364 ASSERT(iterator.Complete()); 365 return GetPrimitiveValue(data->primitive_value_descriptor, 366 current, 367 isolate); 368 } 369 } 370 UNREACHABLE(); 371 return isolate->factory()->undefined_value(); 372 } 373 374 375 Handle<FixedArray> JSObject::EnsureWritableFastElements( 376 Handle<JSObject> object) { 377 ASSERT(object->HasFastSmiOrObjectElements()); 378 Isolate* isolate = object->GetIsolate(); 379 Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate); 380 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; 381 Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap( 382 elems, isolate->factory()->fixed_array_map()); 383 object->set_elements(*writable_elems); 384 isolate->counters()->cow_arrays_converted()->Increment(); 385 return writable_elems; 386 } 387 388 389 MaybeHandle<Object> JSProxy::GetPropertyWithHandler(Handle<JSProxy> proxy, 390 Handle<Object> receiver, 391 Handle<Name> name) { 392 Isolate* isolate = proxy->GetIsolate(); 393 394 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 395 if (name->IsSymbol()) return isolate->factory()->undefined_value(); 396 397 Handle<Object> args[] = { receiver, name }; 398 return CallTrap( 399 proxy, "get", isolate->derived_get_trap(), ARRAY_SIZE(args), args); 400 } 401 402 403 MaybeHandle<Object> Object::GetPropertyWithAccessor(Handle<Object> receiver, 404 Handle<Name> name, 405 Handle<JSObject> holder, 406 Handle<Object> structure) { 407 Isolate* isolate = name->GetIsolate(); 408 ASSERT(!structure->IsForeign()); 409 // api style callbacks. 410 if (structure->IsAccessorInfo()) { 411 Handle<AccessorInfo> accessor_info = Handle<AccessorInfo>::cast(structure); 412 if (!accessor_info->IsCompatibleReceiver(*receiver)) { 413 Handle<Object> args[2] = { name, receiver }; 414 Handle<Object> error = 415 isolate->factory()->NewTypeError("incompatible_method_receiver", 416 HandleVector(args, 417 ARRAY_SIZE(args))); 418 return isolate->Throw<Object>(error); 419 } 420 // TODO(rossberg): Handling symbols in the API requires changing the API, 421 // so we do not support it for now. 422 if (name->IsSymbol()) return isolate->factory()->undefined_value(); 423 if (structure->IsDeclaredAccessorInfo()) { 424 return GetDeclaredAccessorProperty( 425 receiver, 426 Handle<DeclaredAccessorInfo>::cast(structure), 427 isolate); 428 } 429 430 Handle<ExecutableAccessorInfo> data = 431 Handle<ExecutableAccessorInfo>::cast(structure); 432 v8::AccessorGetterCallback call_fun = 433 v8::ToCData<v8::AccessorGetterCallback>(data->getter()); 434 if (call_fun == NULL) return isolate->factory()->undefined_value(); 435 436 Handle<String> key = Handle<String>::cast(name); 437 LOG(isolate, ApiNamedPropertyAccess("load", *holder, *name)); 438 PropertyCallbackArguments args(isolate, data->data(), *receiver, *holder); 439 v8::Handle<v8::Value> result = 440 args.Call(call_fun, v8::Utils::ToLocal(key)); 441 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 442 if (result.IsEmpty()) { 443 return isolate->factory()->undefined_value(); 444 } 445 Handle<Object> return_value = v8::Utils::OpenHandle(*result); 446 return_value->VerifyApiCallResultType(); 447 // Rebox handle before return. 448 return handle(*return_value, isolate); 449 } 450 451 // __defineGetter__ callback 452 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(), 453 isolate); 454 if (getter->IsSpecFunction()) { 455 // TODO(rossberg): nicer would be to cast to some JSCallable here... 456 return Object::GetPropertyWithDefinedGetter( 457 receiver, Handle<JSReceiver>::cast(getter)); 458 } 459 // Getter is not a function. 460 return isolate->factory()->undefined_value(); 461 } 462 463 464 MaybeHandle<Object> Object::SetPropertyWithCallback(Handle<Object> receiver, 465 Handle<Name> name, 466 Handle<Object> value, 467 Handle<JSObject> holder, 468 Handle<Object> structure, 469 StrictMode strict_mode) { 470 Isolate* isolate = name->GetIsolate(); 471 472 // We should never get here to initialize a const with the hole 473 // value since a const declaration would conflict with the setter. 474 ASSERT(!value->IsTheHole()); 475 ASSERT(!structure->IsForeign()); 476 if (structure->IsExecutableAccessorInfo()) { 477 // api style callbacks 478 ExecutableAccessorInfo* data = ExecutableAccessorInfo::cast(*structure); 479 if (!data->IsCompatibleReceiver(*receiver)) { 480 Handle<Object> args[2] = { name, receiver }; 481 Handle<Object> error = 482 isolate->factory()->NewTypeError("incompatible_method_receiver", 483 HandleVector(args, 484 ARRAY_SIZE(args))); 485 return isolate->Throw<Object>(error); 486 } 487 // TODO(rossberg): Support symbols in the API. 488 if (name->IsSymbol()) return value; 489 Object* call_obj = data->setter(); 490 v8::AccessorSetterCallback call_fun = 491 v8::ToCData<v8::AccessorSetterCallback>(call_obj); 492 if (call_fun == NULL) return value; 493 Handle<String> key = Handle<String>::cast(name); 494 LOG(isolate, ApiNamedPropertyAccess("store", *holder, *name)); 495 PropertyCallbackArguments args(isolate, data->data(), *receiver, *holder); 496 args.Call(call_fun, 497 v8::Utils::ToLocal(key), 498 v8::Utils::ToLocal(value)); 499 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 500 return value; 501 } 502 503 if (structure->IsAccessorPair()) { 504 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate); 505 if (setter->IsSpecFunction()) { 506 // TODO(rossberg): nicer would be to cast to some JSCallable here... 507 return SetPropertyWithDefinedSetter( 508 receiver, Handle<JSReceiver>::cast(setter), value); 509 } else { 510 if (strict_mode == SLOPPY) return value; 511 Handle<Object> args[2] = { name, holder }; 512 Handle<Object> error = 513 isolate->factory()->NewTypeError("no_setter_in_callback", 514 HandleVector(args, 2)); 515 return isolate->Throw<Object>(error); 516 } 517 } 518 519 // TODO(dcarney): Handle correctly. 520 if (structure->IsDeclaredAccessorInfo()) { 521 return value; 522 } 523 524 UNREACHABLE(); 525 return MaybeHandle<Object>(); 526 } 527 528 529 MaybeHandle<Object> Object::GetPropertyWithDefinedGetter( 530 Handle<Object> receiver, 531 Handle<JSReceiver> getter) { 532 Isolate* isolate = getter->GetIsolate(); 533 Debug* debug = isolate->debug(); 534 // Handle stepping into a getter if step into is active. 535 // TODO(rossberg): should this apply to getters that are function proxies? 536 if (debug->StepInActive() && getter->IsJSFunction()) { 537 debug->HandleStepIn( 538 Handle<JSFunction>::cast(getter), Handle<Object>::null(), 0, false); 539 } 540 541 return Execution::Call(isolate, getter, receiver, 0, NULL, true); 542 } 543 544 545 MaybeHandle<Object> Object::SetPropertyWithDefinedSetter( 546 Handle<Object> receiver, 547 Handle<JSReceiver> setter, 548 Handle<Object> value) { 549 Isolate* isolate = setter->GetIsolate(); 550 551 Debug* debug = isolate->debug(); 552 // Handle stepping into a setter if step into is active. 553 // TODO(rossberg): should this apply to getters that are function proxies? 554 if (debug->StepInActive() && setter->IsJSFunction()) { 555 debug->HandleStepIn( 556 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false); 557 } 558 559 Handle<Object> argv[] = { value }; 560 RETURN_ON_EXCEPTION( 561 isolate, 562 Execution::Call(isolate, setter, receiver, ARRAY_SIZE(argv), argv), 563 Object); 564 return value; 565 } 566 567 568 static bool FindAllCanReadHolder(LookupIterator* it) { 569 it->skip_interceptor(); 570 it->skip_access_check(); 571 for (; it->IsFound(); it->Next()) { 572 if (it->state() == LookupIterator::PROPERTY && 573 it->HasProperty() && 574 it->property_kind() == LookupIterator::ACCESSOR) { 575 Handle<Object> accessors = it->GetAccessors(); 576 if (accessors->IsAccessorInfo()) { 577 if (AccessorInfo::cast(*accessors)->all_can_read()) return true; 578 } else if (accessors->IsAccessorPair()) { 579 if (AccessorPair::cast(*accessors)->all_can_read()) return true; 580 } 581 } 582 } 583 return false; 584 } 585 586 587 MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck( 588 LookupIterator* it) { 589 Handle<JSObject> checked = Handle<JSObject>::cast(it->GetHolder()); 590 if (FindAllCanReadHolder(it)) { 591 return GetPropertyWithAccessor( 592 it->GetReceiver(), it->name(), it->GetHolder(), it->GetAccessors()); 593 } 594 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_GET); 595 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object); 596 return it->factory()->undefined_value(); 597 } 598 599 600 PropertyAttributes JSObject::GetPropertyAttributesWithFailedAccessCheck( 601 LookupIterator* it) { 602 Handle<JSObject> checked = Handle<JSObject>::cast(it->GetHolder()); 603 if (FindAllCanReadHolder(it)) return it->property_details().attributes(); 604 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_HAS); 605 // TODO(yangguo): Issue 3269, check for scheduled exception missing? 606 return ABSENT; 607 } 608 609 610 static bool FindAllCanWriteHolder(LookupResult* result, 611 Handle<Name> name, 612 bool check_prototype) { 613 if (result->IsInterceptor()) { 614 result->holder()->LookupOwnRealNamedProperty(name, result); 615 } 616 617 while (result->IsProperty()) { 618 if (result->type() == CALLBACKS) { 619 Object* callback_obj = result->GetCallbackObject(); 620 if (callback_obj->IsAccessorInfo()) { 621 if (AccessorInfo::cast(callback_obj)->all_can_write()) return true; 622 } else if (callback_obj->IsAccessorPair()) { 623 if (AccessorPair::cast(callback_obj)->all_can_write()) return true; 624 } 625 } 626 if (!check_prototype) break; 627 result->holder()->LookupRealNamedPropertyInPrototypes(name, result); 628 } 629 return false; 630 } 631 632 633 MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck( 634 Handle<JSObject> object, 635 LookupResult* result, 636 Handle<Name> name, 637 Handle<Object> value, 638 bool check_prototype, 639 StrictMode strict_mode) { 640 if (check_prototype && !result->IsProperty()) { 641 object->LookupRealNamedPropertyInPrototypes(name, result); 642 } 643 644 if (FindAllCanWriteHolder(result, name, check_prototype)) { 645 Handle<JSObject> holder(result->holder()); 646 Handle<Object> callbacks(result->GetCallbackObject(), result->isolate()); 647 return SetPropertyWithCallback( 648 object, name, value, holder, callbacks, strict_mode); 649 } 650 651 Isolate* isolate = object->GetIsolate(); 652 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); 653 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 654 return value; 655 } 656 657 658 Object* JSObject::GetNormalizedProperty(const LookupResult* result) { 659 ASSERT(!HasFastProperties()); 660 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry()); 661 if (IsGlobalObject()) { 662 value = PropertyCell::cast(value)->value(); 663 } 664 ASSERT(!value->IsPropertyCell() && !value->IsCell()); 665 return value; 666 } 667 668 669 Handle<Object> JSObject::GetNormalizedProperty(Handle<JSObject> object, 670 const LookupResult* result) { 671 ASSERT(!object->HasFastProperties()); 672 Isolate* isolate = object->GetIsolate(); 673 Handle<Object> value(object->property_dictionary()->ValueAt( 674 result->GetDictionaryEntry()), isolate); 675 if (object->IsGlobalObject()) { 676 value = Handle<Object>(Handle<PropertyCell>::cast(value)->value(), isolate); 677 } 678 ASSERT(!value->IsPropertyCell() && !value->IsCell()); 679 return value; 680 } 681 682 683 void JSObject::SetNormalizedProperty(Handle<JSObject> object, 684 const LookupResult* result, 685 Handle<Object> value) { 686 ASSERT(!object->HasFastProperties()); 687 NameDictionary* property_dictionary = object->property_dictionary(); 688 if (object->IsGlobalObject()) { 689 Handle<PropertyCell> cell(PropertyCell::cast( 690 property_dictionary->ValueAt(result->GetDictionaryEntry()))); 691 PropertyCell::SetValueInferType(cell, value); 692 } else { 693 property_dictionary->ValueAtPut(result->GetDictionaryEntry(), *value); 694 } 695 } 696 697 698 void JSObject::SetNormalizedProperty(Handle<JSObject> object, 699 Handle<Name> name, 700 Handle<Object> value, 701 PropertyDetails details) { 702 ASSERT(!object->HasFastProperties()); 703 Handle<NameDictionary> property_dictionary(object->property_dictionary()); 704 705 if (!name->IsUniqueName()) { 706 name = object->GetIsolate()->factory()->InternalizeString( 707 Handle<String>::cast(name)); 708 } 709 710 int entry = property_dictionary->FindEntry(name); 711 if (entry == NameDictionary::kNotFound) { 712 Handle<Object> store_value = value; 713 if (object->IsGlobalObject()) { 714 store_value = object->GetIsolate()->factory()->NewPropertyCell(value); 715 } 716 717 property_dictionary = NameDictionary::Add( 718 property_dictionary, name, store_value, details); 719 object->set_properties(*property_dictionary); 720 return; 721 } 722 723 PropertyDetails original_details = property_dictionary->DetailsAt(entry); 724 int enumeration_index; 725 // Preserve the enumeration index unless the property was deleted. 726 if (original_details.IsDeleted()) { 727 enumeration_index = property_dictionary->NextEnumerationIndex(); 728 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1); 729 } else { 730 enumeration_index = original_details.dictionary_index(); 731 ASSERT(enumeration_index > 0); 732 } 733 734 details = PropertyDetails( 735 details.attributes(), details.type(), enumeration_index); 736 737 if (object->IsGlobalObject()) { 738 Handle<PropertyCell> cell( 739 PropertyCell::cast(property_dictionary->ValueAt(entry))); 740 PropertyCell::SetValueInferType(cell, value); 741 // Please note we have to update the property details. 742 property_dictionary->DetailsAtPut(entry, details); 743 } else { 744 property_dictionary->SetEntry(entry, name, value, details); 745 } 746 } 747 748 749 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object, 750 Handle<Name> name, 751 DeleteMode mode) { 752 ASSERT(!object->HasFastProperties()); 753 Isolate* isolate = object->GetIsolate(); 754 Handle<NameDictionary> dictionary(object->property_dictionary()); 755 int entry = dictionary->FindEntry(name); 756 if (entry != NameDictionary::kNotFound) { 757 // If we have a global object set the cell to the hole. 758 if (object->IsGlobalObject()) { 759 PropertyDetails details = dictionary->DetailsAt(entry); 760 if (details.IsDontDelete()) { 761 if (mode != FORCE_DELETION) return isolate->factory()->false_value(); 762 // When forced to delete global properties, we have to make a 763 // map change to invalidate any ICs that think they can load 764 // from the DontDelete cell without checking if it contains 765 // the hole value. 766 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); 767 ASSERT(new_map->is_dictionary_map()); 768 object->set_map(*new_map); 769 } 770 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry))); 771 Handle<Object> value = isolate->factory()->the_hole_value(); 772 PropertyCell::SetValueInferType(cell, value); 773 dictionary->DetailsAtPut(entry, details.AsDeleted()); 774 } else { 775 Handle<Object> deleted( 776 NameDictionary::DeleteProperty(dictionary, entry, mode)); 777 if (*deleted == isolate->heap()->true_value()) { 778 Handle<NameDictionary> new_properties = 779 NameDictionary::Shrink(dictionary, name); 780 object->set_properties(*new_properties); 781 } 782 return deleted; 783 } 784 } 785 return isolate->factory()->true_value(); 786 } 787 788 789 bool JSObject::IsDirty() { 790 Object* cons_obj = map()->constructor(); 791 if (!cons_obj->IsJSFunction()) 792 return true; 793 JSFunction* fun = JSFunction::cast(cons_obj); 794 if (!fun->shared()->IsApiFunction()) 795 return true; 796 // If the object is fully fast case and has the same map it was 797 // created with then no changes can have been made to it. 798 return map() != fun->initial_map() 799 || !HasFastObjectElements() 800 || !HasFastProperties(); 801 } 802 803 804 MaybeHandle<Object> Object::GetElementWithReceiver(Isolate* isolate, 805 Handle<Object> object, 806 Handle<Object> receiver, 807 uint32_t index) { 808 Handle<Object> holder; 809 810 // Iterate up the prototype chain until an element is found or the null 811 // prototype is encountered. 812 for (holder = object; 813 !holder->IsNull(); 814 holder = Handle<Object>(holder->GetPrototype(isolate), isolate)) { 815 if (!holder->IsJSObject()) { 816 Context* native_context = isolate->context()->native_context(); 817 if (holder->IsNumber()) { 818 holder = Handle<Object>( 819 native_context->number_function()->instance_prototype(), isolate); 820 } else if (holder->IsString()) { 821 holder = Handle<Object>( 822 native_context->string_function()->instance_prototype(), isolate); 823 } else if (holder->IsSymbol()) { 824 holder = Handle<Object>( 825 native_context->symbol_function()->instance_prototype(), isolate); 826 } else if (holder->IsBoolean()) { 827 holder = Handle<Object>( 828 native_context->boolean_function()->instance_prototype(), isolate); 829 } else if (holder->IsJSProxy()) { 830 return JSProxy::GetElementWithHandler( 831 Handle<JSProxy>::cast(holder), receiver, index); 832 } else { 833 // Undefined and null have no indexed properties. 834 ASSERT(holder->IsUndefined() || holder->IsNull()); 835 return isolate->factory()->undefined_value(); 836 } 837 } 838 839 // Inline the case for JSObjects. Doing so significantly improves the 840 // performance of fetching elements where checking the prototype chain is 841 // necessary. 842 Handle<JSObject> js_object = Handle<JSObject>::cast(holder); 843 844 // Check access rights if needed. 845 if (js_object->IsAccessCheckNeeded()) { 846 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) { 847 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET); 848 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 849 return isolate->factory()->undefined_value(); 850 } 851 } 852 853 if (js_object->HasIndexedInterceptor()) { 854 return JSObject::GetElementWithInterceptor(js_object, receiver, index); 855 } 856 857 if (js_object->elements() != isolate->heap()->empty_fixed_array()) { 858 Handle<Object> result; 859 ASSIGN_RETURN_ON_EXCEPTION( 860 isolate, result, 861 js_object->GetElementsAccessor()->Get(receiver, js_object, index), 862 Object); 863 if (!result->IsTheHole()) return result; 864 } 865 } 866 867 return isolate->factory()->undefined_value(); 868 } 869 870 871 Object* Object::GetPrototype(Isolate* isolate) { 872 DisallowHeapAllocation no_alloc; 873 if (IsSmi()) { 874 Context* context = isolate->context()->native_context(); 875 return context->number_function()->instance_prototype(); 876 } 877 878 HeapObject* heap_object = HeapObject::cast(this); 879 880 // The object is either a number, a string, a boolean, 881 // a real JS object, or a Harmony proxy. 882 if (heap_object->IsJSReceiver()) { 883 return heap_object->map()->prototype(); 884 } 885 Context* context = isolate->context()->native_context(); 886 887 if (heap_object->IsHeapNumber()) { 888 return context->number_function()->instance_prototype(); 889 } 890 if (heap_object->IsString()) { 891 return context->string_function()->instance_prototype(); 892 } 893 if (heap_object->IsSymbol()) { 894 return context->symbol_function()->instance_prototype(); 895 } 896 if (heap_object->IsBoolean()) { 897 return context->boolean_function()->instance_prototype(); 898 } else { 899 return isolate->heap()->null_value(); 900 } 901 } 902 903 904 Handle<Object> Object::GetPrototype(Isolate* isolate, 905 Handle<Object> object) { 906 return handle(object->GetPrototype(isolate), isolate); 907 } 908 909 910 Object* Object::GetHash() { 911 // The object is either a number, a name, an odd-ball, 912 // a real JS object, or a Harmony proxy. 913 if (IsNumber()) { 914 uint32_t hash = ComputeLongHash(double_to_uint64(Number())); 915 return Smi::FromInt(hash & Smi::kMaxValue); 916 } 917 if (IsName()) { 918 uint32_t hash = Name::cast(this)->Hash(); 919 return Smi::FromInt(hash); 920 } 921 if (IsOddball()) { 922 uint32_t hash = Oddball::cast(this)->to_string()->Hash(); 923 return Smi::FromInt(hash); 924 } 925 926 ASSERT(IsJSReceiver()); 927 return JSReceiver::cast(this)->GetIdentityHash(); 928 } 929 930 931 Handle<Smi> Object::GetOrCreateHash(Isolate* isolate, Handle<Object> object) { 932 Handle<Object> hash(object->GetHash(), isolate); 933 if (hash->IsSmi()) return Handle<Smi>::cast(hash); 934 935 ASSERT(object->IsJSReceiver()); 936 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object)); 937 } 938 939 940 bool Object::SameValue(Object* other) { 941 if (other == this) return true; 942 943 // The object is either a number, a name, an odd-ball, 944 // a real JS object, or a Harmony proxy. 945 if (IsNumber() && other->IsNumber()) { 946 double this_value = Number(); 947 double other_value = other->Number(); 948 bool equal = this_value == other_value; 949 // SameValue(NaN, NaN) is true. 950 if (!equal) return std::isnan(this_value) && std::isnan(other_value); 951 // SameValue(0.0, -0.0) is false. 952 return (this_value != 0) || ((1 / this_value) == (1 / other_value)); 953 } 954 if (IsString() && other->IsString()) { 955 return String::cast(this)->Equals(String::cast(other)); 956 } 957 return false; 958 } 959 960 961 bool Object::SameValueZero(Object* other) { 962 if (other == this) return true; 963 964 // The object is either a number, a name, an odd-ball, 965 // a real JS object, or a Harmony proxy. 966 if (IsNumber() && other->IsNumber()) { 967 double this_value = Number(); 968 double other_value = other->Number(); 969 // +0 == -0 is true 970 return this_value == other_value 971 || (std::isnan(this_value) && std::isnan(other_value)); 972 } 973 if (IsString() && other->IsString()) { 974 return String::cast(this)->Equals(String::cast(other)); 975 } 976 return false; 977 } 978 979 980 void Object::ShortPrint(FILE* out) { 981 HeapStringAllocator allocator; 982 StringStream accumulator(&allocator); 983 ShortPrint(&accumulator); 984 accumulator.OutputToFile(out); 985 } 986 987 988 void Object::ShortPrint(StringStream* accumulator) { 989 if (IsSmi()) { 990 Smi::cast(this)->SmiPrint(accumulator); 991 } else { 992 HeapObject::cast(this)->HeapObjectShortPrint(accumulator); 993 } 994 } 995 996 997 void Smi::SmiPrint(FILE* out) { 998 PrintF(out, "%d", value()); 999 } 1000 1001 1002 void Smi::SmiPrint(StringStream* accumulator) { 1003 accumulator->Add("%d", value()); 1004 } 1005 1006 1007 // Should a word be prefixed by 'a' or 'an' in order to read naturally in 1008 // English? Returns false for non-ASCII or words that don't start with 1009 // a capital letter. The a/an rule follows pronunciation in English. 1010 // We don't use the BBC's overcorrect "an historic occasion" though if 1011 // you speak a dialect you may well say "an 'istoric occasion". 1012 static bool AnWord(String* str) { 1013 if (str->length() == 0) return false; // A nothing. 1014 int c0 = str->Get(0); 1015 int c1 = str->length() > 1 ? str->Get(1) : 0; 1016 if (c0 == 'U') { 1017 if (c1 > 'Z') { 1018 return true; // An Umpire, but a UTF8String, a U. 1019 } 1020 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') { 1021 return true; // An Ape, an ABCBook. 1022 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) && 1023 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' || 1024 c0 == 'S' || c0 == 'X')) { 1025 return true; // An MP3File, an M. 1026 } 1027 return false; 1028 } 1029 1030 1031 Handle<String> String::SlowFlatten(Handle<ConsString> cons, 1032 PretenureFlag pretenure) { 1033 ASSERT(AllowHeapAllocation::IsAllowed()); 1034 ASSERT(cons->second()->length() != 0); 1035 Isolate* isolate = cons->GetIsolate(); 1036 int length = cons->length(); 1037 PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure 1038 : TENURED; 1039 Handle<SeqString> result; 1040 if (cons->IsOneByteRepresentation()) { 1041 Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString( 1042 length, tenure).ToHandleChecked(); 1043 DisallowHeapAllocation no_gc; 1044 WriteToFlat(*cons, flat->GetChars(), 0, length); 1045 result = flat; 1046 } else { 1047 Handle<SeqTwoByteString> flat = isolate->factory()->NewRawTwoByteString( 1048 length, tenure).ToHandleChecked(); 1049 DisallowHeapAllocation no_gc; 1050 WriteToFlat(*cons, flat->GetChars(), 0, length); 1051 result = flat; 1052 } 1053 cons->set_first(*result); 1054 cons->set_second(isolate->heap()->empty_string()); 1055 ASSERT(result->IsFlat()); 1056 return result; 1057 } 1058 1059 1060 1061 bool String::MakeExternal(v8::String::ExternalStringResource* resource) { 1062 // Externalizing twice leaks the external resource, so it's 1063 // prohibited by the API. 1064 ASSERT(!this->IsExternalString()); 1065 #ifdef ENABLE_SLOW_ASSERTS 1066 if (FLAG_enable_slow_asserts) { 1067 // Assert that the resource and the string are equivalent. 1068 ASSERT(static_cast<size_t>(this->length()) == resource->length()); 1069 ScopedVector<uc16> smart_chars(this->length()); 1070 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 1071 ASSERT(memcmp(smart_chars.start(), 1072 resource->data(), 1073 resource->length() * sizeof(smart_chars[0])) == 0); 1074 } 1075 #endif // DEBUG 1076 Heap* heap = GetHeap(); 1077 int size = this->Size(); // Byte size of the original string. 1078 if (size < ExternalString::kShortSize) { 1079 return false; 1080 } 1081 bool is_ascii = this->IsOneByteRepresentation(); 1082 bool is_internalized = this->IsInternalizedString(); 1083 1084 // Morph the string to an external string by replacing the map and 1085 // reinitializing the fields. This won't work if 1086 // - the space the existing string occupies is too small for a regular 1087 // external string. 1088 // - the existing string is in old pointer space and the backing store of 1089 // the external string is not aligned. The GC cannot deal with a field 1090 // containing a possibly unaligned address to outside of V8's heap. 1091 // In either case we resort to a short external string instead, omitting 1092 // the field caching the address of the backing store. When we encounter 1093 // short external strings in generated code, we need to bailout to runtime. 1094 Map* new_map; 1095 if (size < ExternalString::kSize || 1096 heap->old_pointer_space()->Contains(this)) { 1097 new_map = is_internalized 1098 ? (is_ascii 1099 ? heap-> 1100 short_external_internalized_string_with_one_byte_data_map() 1101 : heap->short_external_internalized_string_map()) 1102 : (is_ascii 1103 ? heap->short_external_string_with_one_byte_data_map() 1104 : heap->short_external_string_map()); 1105 } else { 1106 new_map = is_internalized 1107 ? (is_ascii 1108 ? heap->external_internalized_string_with_one_byte_data_map() 1109 : heap->external_internalized_string_map()) 1110 : (is_ascii 1111 ? heap->external_string_with_one_byte_data_map() 1112 : heap->external_string_map()); 1113 } 1114 1115 // Byte size of the external String object. 1116 int new_size = this->SizeFromMap(new_map); 1117 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); 1118 1119 // We are storing the new map using release store after creating a filler for 1120 // the left-over space to avoid races with the sweeper thread. 1121 this->synchronized_set_map(new_map); 1122 1123 ExternalTwoByteString* self = ExternalTwoByteString::cast(this); 1124 self->set_resource(resource); 1125 if (is_internalized) self->Hash(); // Force regeneration of the hash value. 1126 1127 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR); 1128 return true; 1129 } 1130 1131 1132 bool String::MakeExternal(v8::String::ExternalAsciiStringResource* resource) { 1133 #ifdef ENABLE_SLOW_ASSERTS 1134 if (FLAG_enable_slow_asserts) { 1135 // Assert that the resource and the string are equivalent. 1136 ASSERT(static_cast<size_t>(this->length()) == resource->length()); 1137 if (this->IsTwoByteRepresentation()) { 1138 ScopedVector<uint16_t> smart_chars(this->length()); 1139 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 1140 ASSERT(String::IsOneByte(smart_chars.start(), this->length())); 1141 } 1142 ScopedVector<char> smart_chars(this->length()); 1143 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 1144 ASSERT(memcmp(smart_chars.start(), 1145 resource->data(), 1146 resource->length() * sizeof(smart_chars[0])) == 0); 1147 } 1148 #endif // DEBUG 1149 Heap* heap = GetHeap(); 1150 int size = this->Size(); // Byte size of the original string. 1151 if (size < ExternalString::kShortSize) { 1152 return false; 1153 } 1154 bool is_internalized = this->IsInternalizedString(); 1155 1156 // Morph the string to an external string by replacing the map and 1157 // reinitializing the fields. This won't work if 1158 // - the space the existing string occupies is too small for a regular 1159 // external string. 1160 // - the existing string is in old pointer space and the backing store of 1161 // the external string is not aligned. The GC cannot deal with a field 1162 // containing a possibly unaligned address to outside of V8's heap. 1163 // In either case we resort to a short external string instead, omitting 1164 // the field caching the address of the backing store. When we encounter 1165 // short external strings in generated code, we need to bailout to runtime. 1166 Map* new_map; 1167 if (size < ExternalString::kSize || 1168 heap->old_pointer_space()->Contains(this)) { 1169 new_map = is_internalized 1170 ? heap->short_external_ascii_internalized_string_map() 1171 : heap->short_external_ascii_string_map(); 1172 } else { 1173 new_map = is_internalized 1174 ? heap->external_ascii_internalized_string_map() 1175 : heap->external_ascii_string_map(); 1176 } 1177 1178 // Byte size of the external String object. 1179 int new_size = this->SizeFromMap(new_map); 1180 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); 1181 1182 // We are storing the new map using release store after creating a filler for 1183 // the left-over space to avoid races with the sweeper thread. 1184 this->synchronized_set_map(new_map); 1185 1186 ExternalAsciiString* self = ExternalAsciiString::cast(this); 1187 self->set_resource(resource); 1188 if (is_internalized) self->Hash(); // Force regeneration of the hash value. 1189 1190 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR); 1191 return true; 1192 } 1193 1194 1195 void String::StringShortPrint(StringStream* accumulator) { 1196 int len = length(); 1197 if (len > kMaxShortPrintLength) { 1198 accumulator->Add("<Very long string[%u]>", len); 1199 return; 1200 } 1201 1202 if (!LooksValid()) { 1203 accumulator->Add("<Invalid String>"); 1204 return; 1205 } 1206 1207 ConsStringIteratorOp op; 1208 StringCharacterStream stream(this, &op); 1209 1210 bool truncated = false; 1211 if (len > kMaxShortPrintLength) { 1212 len = kMaxShortPrintLength; 1213 truncated = true; 1214 } 1215 bool ascii = true; 1216 for (int i = 0; i < len; i++) { 1217 uint16_t c = stream.GetNext(); 1218 1219 if (c < 32 || c >= 127) { 1220 ascii = false; 1221 } 1222 } 1223 stream.Reset(this); 1224 if (ascii) { 1225 accumulator->Add("<String[%u]: ", length()); 1226 for (int i = 0; i < len; i++) { 1227 accumulator->Put(static_cast<char>(stream.GetNext())); 1228 } 1229 accumulator->Put('>'); 1230 } else { 1231 // Backslash indicates that the string contains control 1232 // characters and that backslashes are therefore escaped. 1233 accumulator->Add("<String[%u]\\: ", length()); 1234 for (int i = 0; i < len; i++) { 1235 uint16_t c = stream.GetNext(); 1236 if (c == '\n') { 1237 accumulator->Add("\\n"); 1238 } else if (c == '\r') { 1239 accumulator->Add("\\r"); 1240 } else if (c == '\\') { 1241 accumulator->Add("\\\\"); 1242 } else if (c < 32 || c > 126) { 1243 accumulator->Add("\\x%02x", c); 1244 } else { 1245 accumulator->Put(static_cast<char>(c)); 1246 } 1247 } 1248 if (truncated) { 1249 accumulator->Put('.'); 1250 accumulator->Put('.'); 1251 accumulator->Put('.'); 1252 } 1253 accumulator->Put('>'); 1254 } 1255 return; 1256 } 1257 1258 1259 void JSObject::JSObjectShortPrint(StringStream* accumulator) { 1260 switch (map()->instance_type()) { 1261 case JS_ARRAY_TYPE: { 1262 double length = JSArray::cast(this)->length()->IsUndefined() 1263 ? 0 1264 : JSArray::cast(this)->length()->Number(); 1265 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length)); 1266 break; 1267 } 1268 case JS_WEAK_MAP_TYPE: { 1269 accumulator->Add("<JS WeakMap>"); 1270 break; 1271 } 1272 case JS_WEAK_SET_TYPE: { 1273 accumulator->Add("<JS WeakSet>"); 1274 break; 1275 } 1276 case JS_REGEXP_TYPE: { 1277 accumulator->Add("<JS RegExp>"); 1278 break; 1279 } 1280 case JS_FUNCTION_TYPE: { 1281 JSFunction* function = JSFunction::cast(this); 1282 Object* fun_name = function->shared()->DebugName(); 1283 bool printed = false; 1284 if (fun_name->IsString()) { 1285 String* str = String::cast(fun_name); 1286 if (str->length() > 0) { 1287 accumulator->Add("<JS Function "); 1288 accumulator->Put(str); 1289 printed = true; 1290 } 1291 } 1292 if (!printed) { 1293 accumulator->Add("<JS Function"); 1294 } 1295 accumulator->Add(" (SharedFunctionInfo %p)", 1296 reinterpret_cast<void*>(function->shared())); 1297 accumulator->Put('>'); 1298 break; 1299 } 1300 case JS_GENERATOR_OBJECT_TYPE: { 1301 accumulator->Add("<JS Generator>"); 1302 break; 1303 } 1304 case JS_MODULE_TYPE: { 1305 accumulator->Add("<JS Module>"); 1306 break; 1307 } 1308 // All other JSObjects are rather similar to each other (JSObject, 1309 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue). 1310 default: { 1311 Map* map_of_this = map(); 1312 Heap* heap = GetHeap(); 1313 Object* constructor = map_of_this->constructor(); 1314 bool printed = false; 1315 if (constructor->IsHeapObject() && 1316 !heap->Contains(HeapObject::cast(constructor))) { 1317 accumulator->Add("!!!INVALID CONSTRUCTOR!!!"); 1318 } else { 1319 bool global_object = IsJSGlobalProxy(); 1320 if (constructor->IsJSFunction()) { 1321 if (!heap->Contains(JSFunction::cast(constructor)->shared())) { 1322 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!"); 1323 } else { 1324 Object* constructor_name = 1325 JSFunction::cast(constructor)->shared()->name(); 1326 if (constructor_name->IsString()) { 1327 String* str = String::cast(constructor_name); 1328 if (str->length() > 0) { 1329 bool vowel = AnWord(str); 1330 accumulator->Add("<%sa%s ", 1331 global_object ? "Global Object: " : "", 1332 vowel ? "n" : ""); 1333 accumulator->Put(str); 1334 accumulator->Add(" with %smap %p", 1335 map_of_this->is_deprecated() ? "deprecated " : "", 1336 map_of_this); 1337 printed = true; 1338 } 1339 } 1340 } 1341 } 1342 if (!printed) { 1343 accumulator->Add("<JS %sObject", global_object ? "Global " : ""); 1344 } 1345 } 1346 if (IsJSValue()) { 1347 accumulator->Add(" value = "); 1348 JSValue::cast(this)->value()->ShortPrint(accumulator); 1349 } 1350 accumulator->Put('>'); 1351 break; 1352 } 1353 } 1354 } 1355 1356 1357 void JSObject::PrintElementsTransition( 1358 FILE* file, Handle<JSObject> object, 1359 ElementsKind from_kind, Handle<FixedArrayBase> from_elements, 1360 ElementsKind to_kind, Handle<FixedArrayBase> to_elements) { 1361 if (from_kind != to_kind) { 1362 PrintF(file, "elements transition ["); 1363 PrintElementsKind(file, from_kind); 1364 PrintF(file, " -> "); 1365 PrintElementsKind(file, to_kind); 1366 PrintF(file, "] in "); 1367 JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true); 1368 PrintF(file, " for "); 1369 object->ShortPrint(file); 1370 PrintF(file, " from "); 1371 from_elements->ShortPrint(file); 1372 PrintF(file, " to "); 1373 to_elements->ShortPrint(file); 1374 PrintF(file, "\n"); 1375 } 1376 } 1377 1378 1379 void Map::PrintGeneralization(FILE* file, 1380 const char* reason, 1381 int modify_index, 1382 int split, 1383 int descriptors, 1384 bool constant_to_field, 1385 Representation old_representation, 1386 Representation new_representation, 1387 HeapType* old_field_type, 1388 HeapType* new_field_type) { 1389 PrintF(file, "[generalizing "); 1390 constructor_name()->PrintOn(file); 1391 PrintF(file, "] "); 1392 Name* name = instance_descriptors()->GetKey(modify_index); 1393 if (name->IsString()) { 1394 String::cast(name)->PrintOn(file); 1395 } else { 1396 PrintF(file, "{symbol %p}", static_cast<void*>(name)); 1397 } 1398 PrintF(file, ":"); 1399 if (constant_to_field) { 1400 PrintF(file, "c"); 1401 } else { 1402 PrintF(file, "%s", old_representation.Mnemonic()); 1403 PrintF(file, "{"); 1404 old_field_type->TypePrint(file, HeapType::SEMANTIC_DIM); 1405 PrintF(file, "}"); 1406 } 1407 PrintF(file, "->%s", new_representation.Mnemonic()); 1408 PrintF(file, "{"); 1409 new_field_type->TypePrint(file, HeapType::SEMANTIC_DIM); 1410 PrintF(file, "}"); 1411 PrintF(file, " ("); 1412 if (strlen(reason) > 0) { 1413 PrintF(file, "%s", reason); 1414 } else { 1415 PrintF(file, "+%i maps", descriptors - split); 1416 } 1417 PrintF(file, ") ["); 1418 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true); 1419 PrintF(file, "]\n"); 1420 } 1421 1422 1423 void JSObject::PrintInstanceMigration(FILE* file, 1424 Map* original_map, 1425 Map* new_map) { 1426 PrintF(file, "[migrating "); 1427 map()->constructor_name()->PrintOn(file); 1428 PrintF(file, "] "); 1429 DescriptorArray* o = original_map->instance_descriptors(); 1430 DescriptorArray* n = new_map->instance_descriptors(); 1431 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) { 1432 Representation o_r = o->GetDetails(i).representation(); 1433 Representation n_r = n->GetDetails(i).representation(); 1434 if (!o_r.Equals(n_r)) { 1435 String::cast(o->GetKey(i))->PrintOn(file); 1436 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic()); 1437 } else if (o->GetDetails(i).type() == CONSTANT && 1438 n->GetDetails(i).type() == FIELD) { 1439 Name* name = o->GetKey(i); 1440 if (name->IsString()) { 1441 String::cast(name)->PrintOn(file); 1442 } else { 1443 PrintF(file, "{symbol %p}", static_cast<void*>(name)); 1444 } 1445 PrintF(file, " "); 1446 } 1447 } 1448 PrintF(file, "\n"); 1449 } 1450 1451 1452 void HeapObject::HeapObjectShortPrint(StringStream* accumulator) { 1453 Heap* heap = GetHeap(); 1454 if (!heap->Contains(this)) { 1455 accumulator->Add("!!!INVALID POINTER!!!"); 1456 return; 1457 } 1458 if (!heap->Contains(map())) { 1459 accumulator->Add("!!!INVALID MAP!!!"); 1460 return; 1461 } 1462 1463 accumulator->Add("%p ", this); 1464 1465 if (IsString()) { 1466 String::cast(this)->StringShortPrint(accumulator); 1467 return; 1468 } 1469 if (IsJSObject()) { 1470 JSObject::cast(this)->JSObjectShortPrint(accumulator); 1471 return; 1472 } 1473 switch (map()->instance_type()) { 1474 case MAP_TYPE: 1475 accumulator->Add("<Map(elements=%u)>", Map::cast(this)->elements_kind()); 1476 break; 1477 case FIXED_ARRAY_TYPE: 1478 accumulator->Add("<FixedArray[%u]>", FixedArray::cast(this)->length()); 1479 break; 1480 case FIXED_DOUBLE_ARRAY_TYPE: 1481 accumulator->Add("<FixedDoubleArray[%u]>", 1482 FixedDoubleArray::cast(this)->length()); 1483 break; 1484 case BYTE_ARRAY_TYPE: 1485 accumulator->Add("<ByteArray[%u]>", ByteArray::cast(this)->length()); 1486 break; 1487 case FREE_SPACE_TYPE: 1488 accumulator->Add("<FreeSpace[%u]>", FreeSpace::cast(this)->Size()); 1489 break; 1490 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \ 1491 case EXTERNAL_##TYPE##_ARRAY_TYPE: \ 1492 accumulator->Add("<External" #Type "Array[%u]>", \ 1493 External##Type##Array::cast(this)->length()); \ 1494 break; \ 1495 case FIXED_##TYPE##_ARRAY_TYPE: \ 1496 accumulator->Add("<Fixed" #Type "Array[%u]>", \ 1497 Fixed##Type##Array::cast(this)->length()); \ 1498 break; 1499 1500 TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT) 1501 #undef TYPED_ARRAY_SHORT_PRINT 1502 1503 case SHARED_FUNCTION_INFO_TYPE: { 1504 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this); 1505 SmartArrayPointer<char> debug_name = 1506 shared->DebugName()->ToCString(); 1507 if (debug_name[0] != 0) { 1508 accumulator->Add("<SharedFunctionInfo %s>", debug_name.get()); 1509 } else { 1510 accumulator->Add("<SharedFunctionInfo>"); 1511 } 1512 break; 1513 } 1514 case JS_MESSAGE_OBJECT_TYPE: 1515 accumulator->Add("<JSMessageObject>"); 1516 break; 1517 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 1518 case NAME##_TYPE: \ 1519 accumulator->Put('<'); \ 1520 accumulator->Add(#Name); \ 1521 accumulator->Put('>'); \ 1522 break; 1523 STRUCT_LIST(MAKE_STRUCT_CASE) 1524 #undef MAKE_STRUCT_CASE 1525 case CODE_TYPE: 1526 accumulator->Add("<Code>"); 1527 break; 1528 case ODDBALL_TYPE: { 1529 if (IsUndefined()) 1530 accumulator->Add("<undefined>"); 1531 else if (IsTheHole()) 1532 accumulator->Add("<the hole>"); 1533 else if (IsNull()) 1534 accumulator->Add("<null>"); 1535 else if (IsTrue()) 1536 accumulator->Add("<true>"); 1537 else if (IsFalse()) 1538 accumulator->Add("<false>"); 1539 else 1540 accumulator->Add("<Odd Oddball>"); 1541 break; 1542 } 1543 case SYMBOL_TYPE: { 1544 Symbol* symbol = Symbol::cast(this); 1545 accumulator->Add("<Symbol: %d", symbol->Hash()); 1546 if (!symbol->name()->IsUndefined()) { 1547 accumulator->Add(" "); 1548 String::cast(symbol->name())->StringShortPrint(accumulator); 1549 } 1550 accumulator->Add(">"); 1551 break; 1552 } 1553 case HEAP_NUMBER_TYPE: 1554 accumulator->Add("<Number: "); 1555 HeapNumber::cast(this)->HeapNumberPrint(accumulator); 1556 accumulator->Put('>'); 1557 break; 1558 case JS_PROXY_TYPE: 1559 accumulator->Add("<JSProxy>"); 1560 break; 1561 case JS_FUNCTION_PROXY_TYPE: 1562 accumulator->Add("<JSFunctionProxy>"); 1563 break; 1564 case FOREIGN_TYPE: 1565 accumulator->Add("<Foreign>"); 1566 break; 1567 case CELL_TYPE: 1568 accumulator->Add("Cell for "); 1569 Cell::cast(this)->value()->ShortPrint(accumulator); 1570 break; 1571 case PROPERTY_CELL_TYPE: 1572 accumulator->Add("PropertyCell for "); 1573 PropertyCell::cast(this)->value()->ShortPrint(accumulator); 1574 break; 1575 default: 1576 accumulator->Add("<Other heap object (%d)>", map()->instance_type()); 1577 break; 1578 } 1579 } 1580 1581 1582 void HeapObject::Iterate(ObjectVisitor* v) { 1583 // Handle header 1584 IteratePointer(v, kMapOffset); 1585 // Handle object body 1586 Map* m = map(); 1587 IterateBody(m->instance_type(), SizeFromMap(m), v); 1588 } 1589 1590 1591 void HeapObject::IterateBody(InstanceType type, int object_size, 1592 ObjectVisitor* v) { 1593 // Avoiding <Type>::cast(this) because it accesses the map pointer field. 1594 // During GC, the map pointer field is encoded. 1595 if (type < FIRST_NONSTRING_TYPE) { 1596 switch (type & kStringRepresentationMask) { 1597 case kSeqStringTag: 1598 break; 1599 case kConsStringTag: 1600 ConsString::BodyDescriptor::IterateBody(this, v); 1601 break; 1602 case kSlicedStringTag: 1603 SlicedString::BodyDescriptor::IterateBody(this, v); 1604 break; 1605 case kExternalStringTag: 1606 if ((type & kStringEncodingMask) == kOneByteStringTag) { 1607 reinterpret_cast<ExternalAsciiString*>(this)-> 1608 ExternalAsciiStringIterateBody(v); 1609 } else { 1610 reinterpret_cast<ExternalTwoByteString*>(this)-> 1611 ExternalTwoByteStringIterateBody(v); 1612 } 1613 break; 1614 } 1615 return; 1616 } 1617 1618 switch (type) { 1619 case FIXED_ARRAY_TYPE: 1620 FixedArray::BodyDescriptor::IterateBody(this, object_size, v); 1621 break; 1622 case CONSTANT_POOL_ARRAY_TYPE: 1623 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v); 1624 break; 1625 case FIXED_DOUBLE_ARRAY_TYPE: 1626 break; 1627 case JS_OBJECT_TYPE: 1628 case JS_CONTEXT_EXTENSION_OBJECT_TYPE: 1629 case JS_GENERATOR_OBJECT_TYPE: 1630 case JS_MODULE_TYPE: 1631 case JS_VALUE_TYPE: 1632 case JS_DATE_TYPE: 1633 case JS_ARRAY_TYPE: 1634 case JS_ARRAY_BUFFER_TYPE: 1635 case JS_TYPED_ARRAY_TYPE: 1636 case JS_DATA_VIEW_TYPE: 1637 case JS_SET_TYPE: 1638 case JS_MAP_TYPE: 1639 case JS_SET_ITERATOR_TYPE: 1640 case JS_MAP_ITERATOR_TYPE: 1641 case JS_WEAK_MAP_TYPE: 1642 case JS_WEAK_SET_TYPE: 1643 case JS_REGEXP_TYPE: 1644 case JS_GLOBAL_PROXY_TYPE: 1645 case JS_GLOBAL_OBJECT_TYPE: 1646 case JS_BUILTINS_OBJECT_TYPE: 1647 case JS_MESSAGE_OBJECT_TYPE: 1648 JSObject::BodyDescriptor::IterateBody(this, object_size, v); 1649 break; 1650 case JS_FUNCTION_TYPE: 1651 reinterpret_cast<JSFunction*>(this) 1652 ->JSFunctionIterateBody(object_size, v); 1653 break; 1654 case ODDBALL_TYPE: 1655 Oddball::BodyDescriptor::IterateBody(this, v); 1656 break; 1657 case JS_PROXY_TYPE: 1658 JSProxy::BodyDescriptor::IterateBody(this, v); 1659 break; 1660 case JS_FUNCTION_PROXY_TYPE: 1661 JSFunctionProxy::BodyDescriptor::IterateBody(this, v); 1662 break; 1663 case FOREIGN_TYPE: 1664 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v); 1665 break; 1666 case MAP_TYPE: 1667 Map::BodyDescriptor::IterateBody(this, v); 1668 break; 1669 case CODE_TYPE: 1670 reinterpret_cast<Code*>(this)->CodeIterateBody(v); 1671 break; 1672 case CELL_TYPE: 1673 Cell::BodyDescriptor::IterateBody(this, v); 1674 break; 1675 case PROPERTY_CELL_TYPE: 1676 PropertyCell::BodyDescriptor::IterateBody(this, v); 1677 break; 1678 case SYMBOL_TYPE: 1679 Symbol::BodyDescriptor::IterateBody(this, v); 1680 break; 1681 1682 case HEAP_NUMBER_TYPE: 1683 case FILLER_TYPE: 1684 case BYTE_ARRAY_TYPE: 1685 case FREE_SPACE_TYPE: 1686 break; 1687 1688 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 1689 case EXTERNAL_##TYPE##_ARRAY_TYPE: \ 1690 case FIXED_##TYPE##_ARRAY_TYPE: \ 1691 break; 1692 1693 TYPED_ARRAYS(TYPED_ARRAY_CASE) 1694 #undef TYPED_ARRAY_CASE 1695 1696 case SHARED_FUNCTION_INFO_TYPE: { 1697 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v); 1698 break; 1699 } 1700 1701 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 1702 case NAME##_TYPE: 1703 STRUCT_LIST(MAKE_STRUCT_CASE) 1704 #undef MAKE_STRUCT_CASE 1705 if (type == ALLOCATION_SITE_TYPE) { 1706 AllocationSite::BodyDescriptor::IterateBody(this, v); 1707 } else { 1708 StructBodyDescriptor::IterateBody(this, object_size, v); 1709 } 1710 break; 1711 default: 1712 PrintF("Unknown type: %d\n", type); 1713 UNREACHABLE(); 1714 } 1715 } 1716 1717 1718 bool HeapNumber::HeapNumberBooleanValue() { 1719 // NaN, +0, and -0 should return the false object 1720 #if __BYTE_ORDER == __LITTLE_ENDIAN 1721 union IeeeDoubleLittleEndianArchType u; 1722 #elif __BYTE_ORDER == __BIG_ENDIAN 1723 union IeeeDoubleBigEndianArchType u; 1724 #endif 1725 u.d = value(); 1726 if (u.bits.exp == 2047) { 1727 // Detect NaN for IEEE double precision floating point. 1728 if ((u.bits.man_low | u.bits.man_high) != 0) return false; 1729 } 1730 if (u.bits.exp == 0) { 1731 // Detect +0, and -0 for IEEE double precision floating point. 1732 if ((u.bits.man_low | u.bits.man_high) == 0) return false; 1733 } 1734 return true; 1735 } 1736 1737 1738 void HeapNumber::HeapNumberPrint(FILE* out) { 1739 PrintF(out, "%.16g", Number()); 1740 } 1741 1742 1743 void HeapNumber::HeapNumberPrint(StringStream* accumulator) { 1744 // The Windows version of vsnprintf can allocate when printing a %g string 1745 // into a buffer that may not be big enough. We don't want random memory 1746 // allocation when producing post-crash stack traces, so we print into a 1747 // buffer that is plenty big enough for any floating point number, then 1748 // print that using vsnprintf (which may truncate but never allocate if 1749 // there is no more space in the buffer). 1750 EmbeddedVector<char, 100> buffer; 1751 SNPrintF(buffer, "%.16g", Number()); 1752 accumulator->Add("%s", buffer.start()); 1753 } 1754 1755 1756 String* JSReceiver::class_name() { 1757 if (IsJSFunction() && IsJSFunctionProxy()) { 1758 return GetHeap()->function_class_string(); 1759 } 1760 if (map()->constructor()->IsJSFunction()) { 1761 JSFunction* constructor = JSFunction::cast(map()->constructor()); 1762 return String::cast(constructor->shared()->instance_class_name()); 1763 } 1764 // If the constructor is not present, return "Object". 1765 return GetHeap()->Object_string(); 1766 } 1767 1768 1769 String* Map::constructor_name() { 1770 if (constructor()->IsJSFunction()) { 1771 JSFunction* constructor = JSFunction::cast(this->constructor()); 1772 String* name = String::cast(constructor->shared()->name()); 1773 if (name->length() > 0) return name; 1774 String* inferred_name = constructor->shared()->inferred_name(); 1775 if (inferred_name->length() > 0) return inferred_name; 1776 Object* proto = prototype(); 1777 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name(); 1778 } 1779 // TODO(rossberg): what about proxies? 1780 // If the constructor is not present, return "Object". 1781 return GetHeap()->Object_string(); 1782 } 1783 1784 1785 String* JSReceiver::constructor_name() { 1786 return map()->constructor_name(); 1787 } 1788 1789 1790 MaybeHandle<Map> Map::CopyWithField(Handle<Map> map, 1791 Handle<Name> name, 1792 Handle<HeapType> type, 1793 PropertyAttributes attributes, 1794 Representation representation, 1795 TransitionFlag flag) { 1796 ASSERT(DescriptorArray::kNotFound == 1797 map->instance_descriptors()->Search( 1798 *name, map->NumberOfOwnDescriptors())); 1799 1800 // Ensure the descriptor array does not get too big. 1801 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) { 1802 return MaybeHandle<Map>(); 1803 } 1804 1805 Isolate* isolate = map->GetIsolate(); 1806 1807 // Compute the new index for new field. 1808 int index = map->NextFreePropertyIndex(); 1809 1810 if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) { 1811 representation = Representation::Tagged(); 1812 type = HeapType::Any(isolate); 1813 } 1814 1815 FieldDescriptor new_field_desc(name, index, type, attributes, representation); 1816 Handle<Map> new_map = Map::CopyAddDescriptor(map, &new_field_desc, flag); 1817 int unused_property_fields = new_map->unused_property_fields() - 1; 1818 if (unused_property_fields < 0) { 1819 unused_property_fields += JSObject::kFieldsAdded; 1820 } 1821 new_map->set_unused_property_fields(unused_property_fields); 1822 return new_map; 1823 } 1824 1825 1826 MaybeHandle<Map> Map::CopyWithConstant(Handle<Map> map, 1827 Handle<Name> name, 1828 Handle<Object> constant, 1829 PropertyAttributes attributes, 1830 TransitionFlag flag) { 1831 // Ensure the descriptor array does not get too big. 1832 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) { 1833 return MaybeHandle<Map>(); 1834 } 1835 1836 // Allocate new instance descriptors with (name, constant) added. 1837 ConstantDescriptor new_constant_desc(name, constant, attributes); 1838 return Map::CopyAddDescriptor(map, &new_constant_desc, flag); 1839 } 1840 1841 1842 void JSObject::AddFastProperty(Handle<JSObject> object, 1843 Handle<Name> name, 1844 Handle<Object> value, 1845 PropertyAttributes attributes, 1846 StoreFromKeyed store_mode, 1847 ValueType value_type, 1848 TransitionFlag flag) { 1849 ASSERT(!object->IsJSGlobalProxy()); 1850 1851 MaybeHandle<Map> maybe_map; 1852 if (value->IsJSFunction()) { 1853 maybe_map = Map::CopyWithConstant( 1854 handle(object->map()), name, value, attributes, flag); 1855 } else if (!object->TooManyFastProperties(store_mode)) { 1856 Isolate* isolate = object->GetIsolate(); 1857 Representation representation = value->OptimalRepresentation(value_type); 1858 maybe_map = Map::CopyWithField( 1859 handle(object->map(), isolate), name, 1860 value->OptimalType(isolate, representation), 1861 attributes, representation, flag); 1862 } 1863 1864 Handle<Map> new_map; 1865 if (!maybe_map.ToHandle(&new_map)) { 1866 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 1867 return; 1868 } 1869 1870 JSObject::MigrateToNewProperty(object, new_map, value); 1871 } 1872 1873 1874 void JSObject::AddSlowProperty(Handle<JSObject> object, 1875 Handle<Name> name, 1876 Handle<Object> value, 1877 PropertyAttributes attributes) { 1878 ASSERT(!object->HasFastProperties()); 1879 Isolate* isolate = object->GetIsolate(); 1880 Handle<NameDictionary> dict(object->property_dictionary()); 1881 if (object->IsGlobalObject()) { 1882 // In case name is an orphaned property reuse the cell. 1883 int entry = dict->FindEntry(name); 1884 if (entry != NameDictionary::kNotFound) { 1885 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry))); 1886 PropertyCell::SetValueInferType(cell, value); 1887 // Assign an enumeration index to the property and update 1888 // SetNextEnumerationIndex. 1889 int index = dict->NextEnumerationIndex(); 1890 PropertyDetails details = PropertyDetails(attributes, NORMAL, index); 1891 dict->SetNextEnumerationIndex(index + 1); 1892 dict->SetEntry(entry, name, cell, details); 1893 return; 1894 } 1895 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value); 1896 PropertyCell::SetValueInferType(cell, value); 1897 value = cell; 1898 } 1899 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); 1900 Handle<NameDictionary> result = 1901 NameDictionary::Add(dict, name, value, details); 1902 if (*dict != *result) object->set_properties(*result); 1903 } 1904 1905 1906 MaybeHandle<Object> JSObject::AddProperty( 1907 Handle<JSObject> object, 1908 Handle<Name> name, 1909 Handle<Object> value, 1910 PropertyAttributes attributes, 1911 StrictMode strict_mode, 1912 JSReceiver::StoreFromKeyed store_mode, 1913 ExtensibilityCheck extensibility_check, 1914 ValueType value_type, 1915 StoreMode mode, 1916 TransitionFlag transition_flag) { 1917 ASSERT(!object->IsJSGlobalProxy()); 1918 Isolate* isolate = object->GetIsolate(); 1919 1920 if (!name->IsUniqueName()) { 1921 name = isolate->factory()->InternalizeString( 1922 Handle<String>::cast(name)); 1923 } 1924 1925 if (extensibility_check == PERFORM_EXTENSIBILITY_CHECK && 1926 !object->map()->is_extensible()) { 1927 if (strict_mode == SLOPPY) { 1928 return value; 1929 } else { 1930 Handle<Object> args[1] = { name }; 1931 Handle<Object> error = isolate->factory()->NewTypeError( 1932 "object_not_extensible", HandleVector(args, ARRAY_SIZE(args))); 1933 return isolate->Throw<Object>(error); 1934 } 1935 } 1936 1937 if (object->HasFastProperties()) { 1938 AddFastProperty(object, name, value, attributes, store_mode, 1939 value_type, transition_flag); 1940 } 1941 1942 if (!object->HasFastProperties()) { 1943 AddSlowProperty(object, name, value, attributes); 1944 } 1945 1946 if (object->map()->is_observed() && 1947 *name != isolate->heap()->hidden_string()) { 1948 Handle<Object> old_value = isolate->factory()->the_hole_value(); 1949 EnqueueChangeRecord(object, "add", name, old_value); 1950 } 1951 1952 return value; 1953 } 1954 1955 1956 Context* JSObject::GetCreationContext() { 1957 Object* constructor = this->map()->constructor(); 1958 JSFunction* function; 1959 if (!constructor->IsJSFunction()) { 1960 // Functions have null as a constructor, 1961 // but any JSFunction knows its context immediately. 1962 function = JSFunction::cast(this); 1963 } else { 1964 function = JSFunction::cast(constructor); 1965 } 1966 1967 return function->context()->native_context(); 1968 } 1969 1970 1971 void JSObject::EnqueueChangeRecord(Handle<JSObject> object, 1972 const char* type_str, 1973 Handle<Name> name, 1974 Handle<Object> old_value) { 1975 ASSERT(!object->IsJSGlobalProxy()); 1976 ASSERT(!object->IsJSGlobalObject()); 1977 Isolate* isolate = object->GetIsolate(); 1978 HandleScope scope(isolate); 1979 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str); 1980 Handle<Object> args[] = { type, object, name, old_value }; 1981 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4; 1982 1983 Execution::Call(isolate, 1984 Handle<JSFunction>(isolate->observers_notify_change()), 1985 isolate->factory()->undefined_value(), 1986 argc, args).Assert(); 1987 } 1988 1989 1990 MaybeHandle<Object> JSObject::SetPropertyPostInterceptor( 1991 Handle<JSObject> object, 1992 Handle<Name> name, 1993 Handle<Object> value, 1994 PropertyAttributes attributes, 1995 StrictMode strict_mode) { 1996 // Check own property, ignore interceptor. 1997 Isolate* isolate = object->GetIsolate(); 1998 LookupResult result(isolate); 1999 object->LookupOwnRealNamedProperty(name, &result); 2000 if (!result.IsFound()) { 2001 object->map()->LookupTransition(*object, *name, &result); 2002 } 2003 return SetPropertyForResult(object, &result, name, value, attributes, 2004 strict_mode, MAY_BE_STORE_FROM_KEYED); 2005 } 2006 2007 2008 static void ReplaceSlowProperty(Handle<JSObject> object, 2009 Handle<Name> name, 2010 Handle<Object> value, 2011 PropertyAttributes attributes) { 2012 NameDictionary* dictionary = object->property_dictionary(); 2013 int old_index = dictionary->FindEntry(name); 2014 int new_enumeration_index = 0; // 0 means "Use the next available index." 2015 if (old_index != -1) { 2016 // All calls to ReplaceSlowProperty have had all transitions removed. 2017 new_enumeration_index = dictionary->DetailsAt(old_index).dictionary_index(); 2018 } 2019 2020 PropertyDetails new_details(attributes, NORMAL, new_enumeration_index); 2021 JSObject::SetNormalizedProperty(object, name, value, new_details); 2022 } 2023 2024 2025 const char* Representation::Mnemonic() const { 2026 switch (kind_) { 2027 case kNone: return "v"; 2028 case kTagged: return "t"; 2029 case kSmi: return "s"; 2030 case kDouble: return "d"; 2031 case kInteger32: return "i"; 2032 case kHeapObject: return "h"; 2033 case kExternal: return "x"; 2034 default: 2035 UNREACHABLE(); 2036 return NULL; 2037 } 2038 } 2039 2040 2041 static void ZapEndOfFixedArray(Address new_end, int to_trim) { 2042 // If we are doing a big trim in old space then we zap the space. 2043 Object** zap = reinterpret_cast<Object**>(new_end); 2044 zap++; // Header of filler must be at least one word so skip that. 2045 for (int i = 1; i < to_trim; i++) { 2046 *zap++ = Smi::FromInt(0); 2047 } 2048 } 2049 2050 2051 template<Heap::InvocationMode mode> 2052 static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { 2053 ASSERT(elms->map() != heap->fixed_cow_array_map()); 2054 // For now this trick is only applied to fixed arrays in new and paged space. 2055 ASSERT(!heap->lo_space()->Contains(elms)); 2056 2057 const int len = elms->length(); 2058 2059 ASSERT(to_trim < len); 2060 2061 Address new_end = elms->address() + FixedArray::SizeFor(len - to_trim); 2062 2063 if (mode != Heap::FROM_GC || Heap::ShouldZapGarbage()) { 2064 ZapEndOfFixedArray(new_end, to_trim); 2065 } 2066 2067 int size_delta = to_trim * kPointerSize; 2068 2069 // Technically in new space this write might be omitted (except for 2070 // debug mode which iterates through the heap), but to play safer 2071 // we still do it. 2072 heap->CreateFillerObjectAt(new_end, size_delta); 2073 2074 // We are storing the new length using release store after creating a filler 2075 // for the left-over space to avoid races with the sweeper thread. 2076 elms->synchronized_set_length(len - to_trim); 2077 2078 heap->AdjustLiveBytes(elms->address(), -size_delta, mode); 2079 2080 // The array may not be moved during GC, 2081 // and size has to be adjusted nevertheless. 2082 HeapProfiler* profiler = heap->isolate()->heap_profiler(); 2083 if (profiler->is_tracking_allocations()) { 2084 profiler->UpdateObjectSizeEvent(elms->address(), elms->Size()); 2085 } 2086 } 2087 2088 2089 bool Map::InstancesNeedRewriting(Map* target, 2090 int target_number_of_fields, 2091 int target_inobject, 2092 int target_unused) { 2093 // If fields were added (or removed), rewrite the instance. 2094 int number_of_fields = NumberOfFields(); 2095 ASSERT(target_number_of_fields >= number_of_fields); 2096 if (target_number_of_fields != number_of_fields) return true; 2097 2098 // If smi descriptors were replaced by double descriptors, rewrite. 2099 DescriptorArray* old_desc = instance_descriptors(); 2100 DescriptorArray* new_desc = target->instance_descriptors(); 2101 int limit = NumberOfOwnDescriptors(); 2102 for (int i = 0; i < limit; i++) { 2103 if (new_desc->GetDetails(i).representation().IsDouble() && 2104 !old_desc->GetDetails(i).representation().IsDouble()) { 2105 return true; 2106 } 2107 } 2108 2109 // If no fields were added, and no inobject properties were removed, setting 2110 // the map is sufficient. 2111 if (target_inobject == inobject_properties()) return false; 2112 // In-object slack tracking may have reduced the object size of the new map. 2113 // In that case, succeed if all existing fields were inobject, and they still 2114 // fit within the new inobject size. 2115 ASSERT(target_inobject < inobject_properties()); 2116 if (target_number_of_fields <= target_inobject) { 2117 ASSERT(target_number_of_fields + target_unused == target_inobject); 2118 return false; 2119 } 2120 // Otherwise, properties will need to be moved to the backing store. 2121 return true; 2122 } 2123 2124 2125 Handle<TransitionArray> Map::SetElementsTransitionMap( 2126 Handle<Map> map, Handle<Map> transitioned_map) { 2127 Handle<TransitionArray> transitions = TransitionArray::CopyInsert( 2128 map, 2129 map->GetIsolate()->factory()->elements_transition_symbol(), 2130 transitioned_map, 2131 FULL_TRANSITION); 2132 map->set_transitions(*transitions); 2133 return transitions; 2134 } 2135 2136 2137 // To migrate an instance to a map: 2138 // - First check whether the instance needs to be rewritten. If not, simply 2139 // change the map. 2140 // - Otherwise, allocate a fixed array large enough to hold all fields, in 2141 // addition to unused space. 2142 // - Copy all existing properties in, in the following order: backing store 2143 // properties, unused fields, inobject properties. 2144 // - If all allocation succeeded, commit the state atomically: 2145 // * Copy inobject properties from the backing store back into the object. 2146 // * Trim the difference in instance size of the object. This also cleanly 2147 // frees inobject properties that moved to the backing store. 2148 // * If there are properties left in the backing store, trim of the space used 2149 // to temporarily store the inobject properties. 2150 // * If there are properties left in the backing store, install the backing 2151 // store. 2152 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) { 2153 Isolate* isolate = object->GetIsolate(); 2154 Handle<Map> old_map(object->map()); 2155 int number_of_fields = new_map->NumberOfFields(); 2156 int inobject = new_map->inobject_properties(); 2157 int unused = new_map->unused_property_fields(); 2158 2159 // Nothing to do if no functions were converted to fields and no smis were 2160 // converted to doubles. 2161 if (!old_map->InstancesNeedRewriting( 2162 *new_map, number_of_fields, inobject, unused)) { 2163 // Writing the new map here does not require synchronization since it does 2164 // not change the actual object size. 2165 object->synchronized_set_map(*new_map); 2166 return; 2167 } 2168 2169 int total_size = number_of_fields + unused; 2170 int external = total_size - inobject; 2171 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size); 2172 2173 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors()); 2174 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors()); 2175 int old_nof = old_map->NumberOfOwnDescriptors(); 2176 int new_nof = new_map->NumberOfOwnDescriptors(); 2177 2178 // This method only supports generalizing instances to at least the same 2179 // number of properties. 2180 ASSERT(old_nof <= new_nof); 2181 2182 for (int i = 0; i < old_nof; i++) { 2183 PropertyDetails details = new_descriptors->GetDetails(i); 2184 if (details.type() != FIELD) continue; 2185 PropertyDetails old_details = old_descriptors->GetDetails(i); 2186 if (old_details.type() == CALLBACKS) { 2187 ASSERT(details.representation().IsTagged()); 2188 continue; 2189 } 2190 ASSERT(old_details.type() == CONSTANT || 2191 old_details.type() == FIELD); 2192 Object* raw_value = old_details.type() == CONSTANT 2193 ? old_descriptors->GetValue(i) 2194 : object->RawFastPropertyAt(FieldIndex::ForDescriptor(*old_map, i)); 2195 Handle<Object> value(raw_value, isolate); 2196 if (!old_details.representation().IsDouble() && 2197 details.representation().IsDouble()) { 2198 if (old_details.representation().IsNone()) { 2199 value = handle(Smi::FromInt(0), isolate); 2200 } 2201 value = Object::NewStorageFor(isolate, value, details.representation()); 2202 } 2203 ASSERT(!(details.representation().IsDouble() && value->IsSmi())); 2204 int target_index = new_descriptors->GetFieldIndex(i) - inobject; 2205 if (target_index < 0) target_index += total_size; 2206 array->set(target_index, *value); 2207 } 2208 2209 for (int i = old_nof; i < new_nof; i++) { 2210 PropertyDetails details = new_descriptors->GetDetails(i); 2211 if (details.type() != FIELD) continue; 2212 Handle<Object> value; 2213 if (details.representation().IsDouble()) { 2214 value = isolate->factory()->NewHeapNumber(0); 2215 } else { 2216 value = isolate->factory()->uninitialized_value(); 2217 } 2218 int target_index = new_descriptors->GetFieldIndex(i) - inobject; 2219 if (target_index < 0) target_index += total_size; 2220 array->set(target_index, *value); 2221 } 2222 2223 // From here on we cannot fail and we shouldn't GC anymore. 2224 DisallowHeapAllocation no_allocation; 2225 2226 // Copy (real) inobject properties. If necessary, stop at number_of_fields to 2227 // avoid overwriting |one_pointer_filler_map|. 2228 int limit = Min(inobject, number_of_fields); 2229 for (int i = 0; i < limit; i++) { 2230 FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i); 2231 object->FastPropertyAtPut(index, array->get(external + i)); 2232 } 2233 2234 // Create filler object past the new instance size. 2235 int new_instance_size = new_map->instance_size(); 2236 int instance_size_delta = old_map->instance_size() - new_instance_size; 2237 ASSERT(instance_size_delta >= 0); 2238 Address address = object->address() + new_instance_size; 2239 2240 // The trimming is performed on a newly allocated object, which is on a 2241 // fresly allocated page or on an already swept page. Hence, the sweeper 2242 // thread can not get confused with the filler creation. No synchronization 2243 // needed. 2244 isolate->heap()->CreateFillerObjectAt(address, instance_size_delta); 2245 2246 // If there are properties in the new backing store, trim it to the correct 2247 // size and install the backing store into the object. 2248 if (external > 0) { 2249 RightTrimFixedArray<Heap::FROM_MUTATOR>(isolate->heap(), *array, inobject); 2250 object->set_properties(*array); 2251 } 2252 2253 // The trimming is performed on a newly allocated object, which is on a 2254 // fresly allocated page or on an already swept page. Hence, the sweeper 2255 // thread can not get confused with the filler creation. No synchronization 2256 // needed. 2257 object->set_map(*new_map); 2258 } 2259 2260 2261 void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object, 2262 int modify_index, 2263 Representation new_representation, 2264 Handle<HeapType> new_field_type, 2265 StoreMode store_mode) { 2266 Handle<Map> new_map = Map::GeneralizeRepresentation( 2267 handle(object->map()), modify_index, new_representation, 2268 new_field_type, store_mode); 2269 if (object->map() == *new_map) return; 2270 return MigrateToMap(object, new_map); 2271 } 2272 2273 2274 int Map::NumberOfFields() { 2275 DescriptorArray* descriptors = instance_descriptors(); 2276 int result = 0; 2277 for (int i = 0; i < NumberOfOwnDescriptors(); i++) { 2278 if (descriptors->GetDetails(i).type() == FIELD) result++; 2279 } 2280 return result; 2281 } 2282 2283 2284 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map, 2285 int modify_index, 2286 StoreMode store_mode, 2287 PropertyAttributes attributes, 2288 const char* reason) { 2289 Isolate* isolate = map->GetIsolate(); 2290 Handle<Map> new_map = Copy(map); 2291 2292 DescriptorArray* descriptors = new_map->instance_descriptors(); 2293 int length = descriptors->number_of_descriptors(); 2294 for (int i = 0; i < length; i++) { 2295 descriptors->SetRepresentation(i, Representation::Tagged()); 2296 if (descriptors->GetDetails(i).type() == FIELD) { 2297 descriptors->SetValue(i, HeapType::Any()); 2298 } 2299 } 2300 2301 // Unless the instance is being migrated, ensure that modify_index is a field. 2302 PropertyDetails details = descriptors->GetDetails(modify_index); 2303 if (store_mode == FORCE_FIELD && details.type() != FIELD) { 2304 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate), 2305 new_map->NumberOfFields(), 2306 attributes, 2307 Representation::Tagged()); 2308 descriptors->Replace(modify_index, &d); 2309 int unused_property_fields = new_map->unused_property_fields() - 1; 2310 if (unused_property_fields < 0) { 2311 unused_property_fields += JSObject::kFieldsAdded; 2312 } 2313 new_map->set_unused_property_fields(unused_property_fields); 2314 } 2315 2316 if (FLAG_trace_generalization) { 2317 HeapType* field_type = (details.type() == FIELD) 2318 ? map->instance_descriptors()->GetFieldType(modify_index) 2319 : NULL; 2320 map->PrintGeneralization(stdout, reason, modify_index, 2321 new_map->NumberOfOwnDescriptors(), 2322 new_map->NumberOfOwnDescriptors(), 2323 details.type() == CONSTANT && store_mode == FORCE_FIELD, 2324 details.representation(), Representation::Tagged(), 2325 field_type, HeapType::Any()); 2326 } 2327 return new_map; 2328 } 2329 2330 2331 // static 2332 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map, 2333 int modify_index, 2334 StoreMode store_mode, 2335 const char* reason) { 2336 PropertyDetails details = 2337 map->instance_descriptors()->GetDetails(modify_index); 2338 return CopyGeneralizeAllRepresentations(map, modify_index, store_mode, 2339 details.attributes(), reason); 2340 } 2341 2342 2343 void Map::DeprecateTransitionTree() { 2344 if (is_deprecated()) return; 2345 if (HasTransitionArray()) { 2346 TransitionArray* transitions = this->transitions(); 2347 for (int i = 0; i < transitions->number_of_transitions(); i++) { 2348 transitions->GetTarget(i)->DeprecateTransitionTree(); 2349 } 2350 } 2351 deprecate(); 2352 dependent_code()->DeoptimizeDependentCodeGroup( 2353 GetIsolate(), DependentCode::kTransitionGroup); 2354 NotifyLeafMapLayoutChange(); 2355 } 2356 2357 2358 // Invalidates a transition target at |key|, and installs |new_descriptors| over 2359 // the current instance_descriptors to ensure proper sharing of descriptor 2360 // arrays. 2361 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) { 2362 if (HasTransitionArray()) { 2363 TransitionArray* transitions = this->transitions(); 2364 int transition = transitions->Search(key); 2365 if (transition != TransitionArray::kNotFound) { 2366 transitions->GetTarget(transition)->DeprecateTransitionTree(); 2367 } 2368 } 2369 2370 // Don't overwrite the empty descriptor array. 2371 if (NumberOfOwnDescriptors() == 0) return; 2372 2373 DescriptorArray* to_replace = instance_descriptors(); 2374 Map* current = this; 2375 GetHeap()->incremental_marking()->RecordWrites(to_replace); 2376 while (current->instance_descriptors() == to_replace) { 2377 current->SetEnumLength(kInvalidEnumCacheSentinel); 2378 current->set_instance_descriptors(new_descriptors); 2379 Object* next = current->GetBackPointer(); 2380 if (next->IsUndefined()) break; 2381 current = Map::cast(next); 2382 } 2383 2384 set_owns_descriptors(false); 2385 } 2386 2387 2388 Map* Map::FindRootMap() { 2389 Map* result = this; 2390 while (true) { 2391 Object* back = result->GetBackPointer(); 2392 if (back->IsUndefined()) return result; 2393 result = Map::cast(back); 2394 } 2395 } 2396 2397 2398 Map* Map::FindLastMatchMap(int verbatim, 2399 int length, 2400 DescriptorArray* descriptors) { 2401 DisallowHeapAllocation no_allocation; 2402 2403 // This can only be called on roots of transition trees. 2404 ASSERT(GetBackPointer()->IsUndefined()); 2405 2406 Map* current = this; 2407 2408 for (int i = verbatim; i < length; i++) { 2409 if (!current->HasTransitionArray()) break; 2410 Name* name = descriptors->GetKey(i); 2411 TransitionArray* transitions = current->transitions(); 2412 int transition = transitions->Search(name); 2413 if (transition == TransitionArray::kNotFound) break; 2414 2415 Map* next = transitions->GetTarget(transition); 2416 DescriptorArray* next_descriptors = next->instance_descriptors(); 2417 2418 PropertyDetails details = descriptors->GetDetails(i); 2419 PropertyDetails next_details = next_descriptors->GetDetails(i); 2420 if (details.type() != next_details.type()) break; 2421 if (details.attributes() != next_details.attributes()) break; 2422 if (!details.representation().Equals(next_details.representation())) break; 2423 if (next_details.type() == FIELD) { 2424 if (!descriptors->GetFieldType(i)->NowIs( 2425 next_descriptors->GetFieldType(i))) break; 2426 } else { 2427 if (descriptors->GetValue(i) != next_descriptors->GetValue(i)) break; 2428 } 2429 2430 current = next; 2431 } 2432 return current; 2433 } 2434 2435 2436 Map* Map::FindFieldOwner(int descriptor) { 2437 DisallowHeapAllocation no_allocation; 2438 ASSERT_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type()); 2439 Map* result = this; 2440 while (true) { 2441 Object* back = result->GetBackPointer(); 2442 if (back->IsUndefined()) break; 2443 Map* parent = Map::cast(back); 2444 if (parent->NumberOfOwnDescriptors() <= descriptor) break; 2445 result = parent; 2446 } 2447 return result; 2448 } 2449 2450 2451 void Map::UpdateDescriptor(int descriptor_number, Descriptor* desc) { 2452 DisallowHeapAllocation no_allocation; 2453 if (HasTransitionArray()) { 2454 TransitionArray* transitions = this->transitions(); 2455 for (int i = 0; i < transitions->number_of_transitions(); ++i) { 2456 transitions->GetTarget(i)->UpdateDescriptor(descriptor_number, desc); 2457 } 2458 } 2459 instance_descriptors()->Replace(descriptor_number, desc);; 2460 } 2461 2462 2463 // static 2464 Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1, 2465 Handle<HeapType> type2, 2466 Isolate* isolate) { 2467 static const int kMaxClassesPerFieldType = 5; 2468 if (type1->NowIs(type2)) return type2; 2469 if (type2->NowIs(type1)) return type1; 2470 if (type1->NowStable() && type2->NowStable()) { 2471 Handle<HeapType> type = HeapType::Union(type1, type2, isolate); 2472 if (type->NumClasses() <= kMaxClassesPerFieldType) { 2473 ASSERT(type->NowStable()); 2474 ASSERT(type1->NowIs(type)); 2475 ASSERT(type2->NowIs(type)); 2476 return type; 2477 } 2478 } 2479 return HeapType::Any(isolate); 2480 } 2481 2482 2483 // static 2484 void Map::GeneralizeFieldType(Handle<Map> map, 2485 int modify_index, 2486 Handle<HeapType> new_field_type) { 2487 Isolate* isolate = map->GetIsolate(); 2488 2489 // Check if we actually need to generalize the field type at all. 2490 Handle<HeapType> old_field_type( 2491 map->instance_descriptors()->GetFieldType(modify_index), isolate); 2492 if (new_field_type->NowIs(old_field_type)) { 2493 ASSERT(Map::GeneralizeFieldType(old_field_type, 2494 new_field_type, 2495 isolate)->NowIs(old_field_type)); 2496 return; 2497 } 2498 2499 // Determine the field owner. 2500 Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate); 2501 Handle<DescriptorArray> descriptors( 2502 field_owner->instance_descriptors(), isolate); 2503 ASSERT_EQ(*old_field_type, descriptors->GetFieldType(modify_index)); 2504 2505 // Determine the generalized new field type. 2506 new_field_type = Map::GeneralizeFieldType( 2507 old_field_type, new_field_type, isolate); 2508 2509 PropertyDetails details = descriptors->GetDetails(modify_index); 2510 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate), 2511 descriptors->GetFieldIndex(modify_index), 2512 new_field_type, 2513 details.attributes(), 2514 details.representation()); 2515 field_owner->UpdateDescriptor(modify_index, &d); 2516 field_owner->dependent_code()->DeoptimizeDependentCodeGroup( 2517 isolate, DependentCode::kFieldTypeGroup); 2518 2519 if (FLAG_trace_generalization) { 2520 map->PrintGeneralization( 2521 stdout, "field type generalization", 2522 modify_index, map->NumberOfOwnDescriptors(), 2523 map->NumberOfOwnDescriptors(), false, 2524 details.representation(), details.representation(), 2525 *old_field_type, *new_field_type); 2526 } 2527 } 2528 2529 2530 // Generalize the representation of the descriptor at |modify_index|. 2531 // This method rewrites the transition tree to reflect the new change. To avoid 2532 // high degrees over polymorphism, and to stabilize quickly, on every rewrite 2533 // the new type is deduced by merging the current type with any potential new 2534 // (partial) version of the type in the transition tree. 2535 // To do this, on each rewrite: 2536 // - Search the root of the transition tree using FindRootMap. 2537 // - Find |target_map|, the newest matching version of this map using the keys 2538 // in the |old_map|'s descriptor array to walk the transition tree. 2539 // - Merge/generalize the descriptor array of the |old_map| and |target_map|. 2540 // - Generalize the |modify_index| descriptor using |new_representation| and 2541 // |new_field_type|. 2542 // - Walk the tree again starting from the root towards |target_map|. Stop at 2543 // |split_map|, the first map who's descriptor array does not match the merged 2544 // descriptor array. 2545 // - If |target_map| == |split_map|, |target_map| is in the expected state. 2546 // Return it. 2547 // - Otherwise, invalidate the outdated transition target from |target_map|, and 2548 // replace its transition tree with a new branch for the updated descriptors. 2549 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, 2550 int modify_index, 2551 Representation new_representation, 2552 Handle<HeapType> new_field_type, 2553 StoreMode store_mode) { 2554 Isolate* isolate = old_map->GetIsolate(); 2555 2556 Handle<DescriptorArray> old_descriptors( 2557 old_map->instance_descriptors(), isolate); 2558 int old_nof = old_map->NumberOfOwnDescriptors(); 2559 PropertyDetails old_details = old_descriptors->GetDetails(modify_index); 2560 Representation old_representation = old_details.representation(); 2561 2562 // It's fine to transition from None to anything but double without any 2563 // modification to the object, because the default uninitialized value for 2564 // representation None can be overwritten by both smi and tagged values. 2565 // Doubles, however, would require a box allocation. 2566 if (old_representation.IsNone() && 2567 !new_representation.IsNone() && 2568 !new_representation.IsDouble()) { 2569 ASSERT(old_details.type() == FIELD); 2570 ASSERT(old_descriptors->GetFieldType(modify_index)->NowIs( 2571 HeapType::None())); 2572 if (FLAG_trace_generalization) { 2573 old_map->PrintGeneralization( 2574 stdout, "uninitialized field", 2575 modify_index, old_map->NumberOfOwnDescriptors(), 2576 old_map->NumberOfOwnDescriptors(), false, 2577 old_representation, new_representation, 2578 old_descriptors->GetFieldType(modify_index), *new_field_type); 2579 } 2580 old_descriptors->SetRepresentation(modify_index, new_representation); 2581 old_descriptors->SetValue(modify_index, *new_field_type); 2582 return old_map; 2583 } 2584 2585 // Check the state of the root map. 2586 Handle<Map> root_map(old_map->FindRootMap(), isolate); 2587 if (!old_map->EquivalentToForTransition(*root_map)) { 2588 return CopyGeneralizeAllRepresentations( 2589 old_map, modify_index, store_mode, "not equivalent"); 2590 } 2591 int root_nof = root_map->NumberOfOwnDescriptors(); 2592 if (modify_index < root_nof) { 2593 PropertyDetails old_details = old_descriptors->GetDetails(modify_index); 2594 if ((old_details.type() != FIELD && store_mode == FORCE_FIELD) || 2595 (old_details.type() == FIELD && 2596 (!new_field_type->NowIs(old_descriptors->GetFieldType(modify_index)) || 2597 !new_representation.fits_into(old_details.representation())))) { 2598 return CopyGeneralizeAllRepresentations( 2599 old_map, modify_index, store_mode, "root modification"); 2600 } 2601 } 2602 2603 Handle<Map> target_map = root_map; 2604 for (int i = root_nof; i < old_nof; ++i) { 2605 int j = target_map->SearchTransition(old_descriptors->GetKey(i)); 2606 if (j == TransitionArray::kNotFound) break; 2607 Handle<Map> tmp_map(target_map->GetTransition(j), isolate); 2608 Handle<DescriptorArray> tmp_descriptors = handle( 2609 tmp_map->instance_descriptors(), isolate); 2610 2611 // Check if target map is incompatible. 2612 PropertyDetails old_details = old_descriptors->GetDetails(i); 2613 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i); 2614 PropertyType old_type = old_details.type(); 2615 PropertyType tmp_type = tmp_details.type(); 2616 if (tmp_details.attributes() != old_details.attributes() || 2617 ((tmp_type == CALLBACKS || old_type == CALLBACKS) && 2618 (tmp_type != old_type || 2619 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) { 2620 return CopyGeneralizeAllRepresentations( 2621 old_map, modify_index, store_mode, "incompatible"); 2622 } 2623 Representation old_representation = old_details.representation(); 2624 Representation tmp_representation = tmp_details.representation(); 2625 if (!old_representation.fits_into(tmp_representation) || 2626 (!new_representation.fits_into(tmp_representation) && 2627 modify_index == i)) { 2628 break; 2629 } 2630 if (tmp_type == FIELD) { 2631 // Generalize the field type as necessary. 2632 Handle<HeapType> old_field_type = (old_type == FIELD) 2633 ? handle(old_descriptors->GetFieldType(i), isolate) 2634 : old_descriptors->GetValue(i)->OptimalType( 2635 isolate, tmp_representation); 2636 if (modify_index == i) { 2637 old_field_type = GeneralizeFieldType( 2638 new_field_type, old_field_type, isolate); 2639 } 2640 GeneralizeFieldType(tmp_map, i, old_field_type); 2641 } else if (tmp_type == CONSTANT) { 2642 if (old_type != CONSTANT || 2643 old_descriptors->GetConstant(i) != tmp_descriptors->GetConstant(i)) { 2644 break; 2645 } 2646 } else { 2647 ASSERT_EQ(tmp_type, old_type); 2648 ASSERT_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i)); 2649 } 2650 target_map = tmp_map; 2651 } 2652 2653 // Directly change the map if the target map is more general. 2654 Handle<DescriptorArray> target_descriptors( 2655 target_map->instance_descriptors(), isolate); 2656 int target_nof = target_map->NumberOfOwnDescriptors(); 2657 if (target_nof == old_nof && 2658 (store_mode != FORCE_FIELD || 2659 target_descriptors->GetDetails(modify_index).type() == FIELD)) { 2660 ASSERT(modify_index < target_nof); 2661 ASSERT(new_representation.fits_into( 2662 target_descriptors->GetDetails(modify_index).representation())); 2663 ASSERT(target_descriptors->GetDetails(modify_index).type() != FIELD || 2664 new_field_type->NowIs( 2665 target_descriptors->GetFieldType(modify_index))); 2666 return target_map; 2667 } 2668 2669 // Find the last compatible target map in the transition tree. 2670 for (int i = target_nof; i < old_nof; ++i) { 2671 int j = target_map->SearchTransition(old_descriptors->GetKey(i)); 2672 if (j == TransitionArray::kNotFound) break; 2673 Handle<Map> tmp_map(target_map->GetTransition(j), isolate); 2674 Handle<DescriptorArray> tmp_descriptors( 2675 tmp_map->instance_descriptors(), isolate); 2676 2677 // Check if target map is compatible. 2678 PropertyDetails old_details = old_descriptors->GetDetails(i); 2679 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i); 2680 if (tmp_details.attributes() != old_details.attributes() || 2681 ((tmp_details.type() == CALLBACKS || old_details.type() == CALLBACKS) && 2682 (tmp_details.type() != old_details.type() || 2683 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) { 2684 return CopyGeneralizeAllRepresentations( 2685 old_map, modify_index, store_mode, "incompatible"); 2686 } 2687 target_map = tmp_map; 2688 } 2689 target_nof = target_map->NumberOfOwnDescriptors(); 2690 target_descriptors = handle(target_map->instance_descriptors(), isolate); 2691 2692 // Allocate a new descriptor array large enough to hold the required 2693 // descriptors, with minimally the exact same size as the old descriptor 2694 // array. 2695 int new_slack = Max( 2696 old_nof, old_descriptors->number_of_descriptors()) - old_nof; 2697 Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate( 2698 isolate, old_nof, new_slack); 2699 ASSERT(new_descriptors->length() > target_descriptors->length() || 2700 new_descriptors->NumberOfSlackDescriptors() > 0 || 2701 new_descriptors->number_of_descriptors() == 2702 old_descriptors->number_of_descriptors()); 2703 ASSERT(new_descriptors->number_of_descriptors() == old_nof); 2704 2705 // 0 -> |root_nof| 2706 int current_offset = 0; 2707 for (int i = 0; i < root_nof; ++i) { 2708 PropertyDetails old_details = old_descriptors->GetDetails(i); 2709 if (old_details.type() == FIELD) current_offset++; 2710 Descriptor d(handle(old_descriptors->GetKey(i), isolate), 2711 handle(old_descriptors->GetValue(i), isolate), 2712 old_details); 2713 new_descriptors->Set(i, &d); 2714 } 2715 2716 // |root_nof| -> |target_nof| 2717 for (int i = root_nof; i < target_nof; ++i) { 2718 Handle<Name> target_key(target_descriptors->GetKey(i), isolate); 2719 PropertyDetails old_details = old_descriptors->GetDetails(i); 2720 PropertyDetails target_details = target_descriptors->GetDetails(i); 2721 target_details = target_details.CopyWithRepresentation( 2722 old_details.representation().generalize( 2723 target_details.representation())); 2724 if (modify_index == i) { 2725 target_details = target_details.CopyWithRepresentation( 2726 new_representation.generalize(target_details.representation())); 2727 } 2728 ASSERT_EQ(old_details.attributes(), target_details.attributes()); 2729 if (old_details.type() == FIELD || 2730 target_details.type() == FIELD || 2731 (modify_index == i && store_mode == FORCE_FIELD) || 2732 (target_descriptors->GetValue(i) != old_descriptors->GetValue(i))) { 2733 Handle<HeapType> old_field_type = (old_details.type() == FIELD) 2734 ? handle(old_descriptors->GetFieldType(i), isolate) 2735 : old_descriptors->GetValue(i)->OptimalType( 2736 isolate, target_details.representation()); 2737 Handle<HeapType> target_field_type = (target_details.type() == FIELD) 2738 ? handle(target_descriptors->GetFieldType(i), isolate) 2739 : target_descriptors->GetValue(i)->OptimalType( 2740 isolate, target_details.representation()); 2741 target_field_type = GeneralizeFieldType( 2742 target_field_type, old_field_type, isolate); 2743 if (modify_index == i) { 2744 target_field_type = GeneralizeFieldType( 2745 target_field_type, new_field_type, isolate); 2746 } 2747 FieldDescriptor d(target_key, 2748 current_offset++, 2749 target_field_type, 2750 target_details.attributes(), 2751 target_details.representation()); 2752 new_descriptors->Set(i, &d); 2753 } else { 2754 ASSERT_NE(FIELD, target_details.type()); 2755 Descriptor d(target_key, 2756 handle(target_descriptors->GetValue(i), isolate), 2757 target_details); 2758 new_descriptors->Set(i, &d); 2759 } 2760 } 2761 2762 // |target_nof| -> |old_nof| 2763 for (int i = target_nof; i < old_nof; ++i) { 2764 PropertyDetails old_details = old_descriptors->GetDetails(i); 2765 Handle<Name> old_key(old_descriptors->GetKey(i), isolate); 2766 if (modify_index == i) { 2767 old_details = old_details.CopyWithRepresentation( 2768 new_representation.generalize(old_details.representation())); 2769 } 2770 if (old_details.type() == FIELD) { 2771 Handle<HeapType> old_field_type( 2772 old_descriptors->GetFieldType(i), isolate); 2773 if (modify_index == i) { 2774 old_field_type = GeneralizeFieldType( 2775 old_field_type, new_field_type, isolate); 2776 } 2777 FieldDescriptor d(old_key, 2778 current_offset++, 2779 old_field_type, 2780 old_details.attributes(), 2781 old_details.representation()); 2782 new_descriptors->Set(i, &d); 2783 } else { 2784 ASSERT(old_details.type() == CONSTANT || old_details.type() == CALLBACKS); 2785 if (modify_index == i && store_mode == FORCE_FIELD) { 2786 FieldDescriptor d(old_key, 2787 current_offset++, 2788 GeneralizeFieldType( 2789 old_descriptors->GetValue(i)->OptimalType( 2790 isolate, old_details.representation()), 2791 new_field_type, isolate), 2792 old_details.attributes(), 2793 old_details.representation()); 2794 new_descriptors->Set(i, &d); 2795 } else { 2796 ASSERT_NE(FIELD, old_details.type()); 2797 Descriptor d(old_key, 2798 handle(old_descriptors->GetValue(i), isolate), 2799 old_details); 2800 new_descriptors->Set(i, &d); 2801 } 2802 } 2803 } 2804 2805 new_descriptors->Sort(); 2806 2807 ASSERT(store_mode != FORCE_FIELD || 2808 new_descriptors->GetDetails(modify_index).type() == FIELD); 2809 2810 Handle<Map> split_map(root_map->FindLastMatchMap( 2811 root_nof, old_nof, *new_descriptors), isolate); 2812 int split_nof = split_map->NumberOfOwnDescriptors(); 2813 ASSERT_NE(old_nof, split_nof); 2814 2815 split_map->DeprecateTarget( 2816 old_descriptors->GetKey(split_nof), *new_descriptors); 2817 2818 if (FLAG_trace_generalization) { 2819 PropertyDetails old_details = old_descriptors->GetDetails(modify_index); 2820 PropertyDetails new_details = new_descriptors->GetDetails(modify_index); 2821 Handle<HeapType> old_field_type = (old_details.type() == FIELD) 2822 ? handle(old_descriptors->GetFieldType(modify_index), isolate) 2823 : HeapType::Constant(handle(old_descriptors->GetValue(modify_index), 2824 isolate), isolate); 2825 Handle<HeapType> new_field_type = (new_details.type() == FIELD) 2826 ? handle(new_descriptors->GetFieldType(modify_index), isolate) 2827 : HeapType::Constant(handle(new_descriptors->GetValue(modify_index), 2828 isolate), isolate); 2829 old_map->PrintGeneralization( 2830 stdout, "", modify_index, split_nof, old_nof, 2831 old_details.type() == CONSTANT && store_mode == FORCE_FIELD, 2832 old_details.representation(), new_details.representation(), 2833 *old_field_type, *new_field_type); 2834 } 2835 2836 // Add missing transitions. 2837 Handle<Map> new_map = split_map; 2838 for (int i = split_nof; i < old_nof; ++i) { 2839 new_map = CopyInstallDescriptors(new_map, i, new_descriptors); 2840 } 2841 new_map->set_owns_descriptors(true); 2842 return new_map; 2843 } 2844 2845 2846 // Generalize the representation of all FIELD descriptors. 2847 Handle<Map> Map::GeneralizeAllFieldRepresentations( 2848 Handle<Map> map) { 2849 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 2850 for (int i = 0; i < map->NumberOfOwnDescriptors(); ++i) { 2851 if (descriptors->GetDetails(i).type() == FIELD) { 2852 map = GeneralizeRepresentation(map, i, Representation::Tagged(), 2853 HeapType::Any(map->GetIsolate()), 2854 FORCE_FIELD); 2855 } 2856 } 2857 return map; 2858 } 2859 2860 2861 // static 2862 MaybeHandle<Map> Map::CurrentMapForDeprecated(Handle<Map> map) { 2863 Handle<Map> proto_map(map); 2864 while (proto_map->prototype()->IsJSObject()) { 2865 Handle<JSObject> holder(JSObject::cast(proto_map->prototype())); 2866 proto_map = Handle<Map>(holder->map()); 2867 if (proto_map->is_deprecated() && JSObject::TryMigrateInstance(holder)) { 2868 proto_map = Handle<Map>(holder->map()); 2869 } 2870 } 2871 return CurrentMapForDeprecatedInternal(map); 2872 } 2873 2874 2875 // static 2876 MaybeHandle<Map> Map::CurrentMapForDeprecatedInternal(Handle<Map> old_map) { 2877 DisallowHeapAllocation no_allocation; 2878 DisallowDeoptimization no_deoptimization(old_map->GetIsolate()); 2879 2880 if (!old_map->is_deprecated()) return old_map; 2881 2882 // Check the state of the root map. 2883 Map* root_map = old_map->FindRootMap(); 2884 if (!old_map->EquivalentToForTransition(root_map)) return MaybeHandle<Map>(); 2885 int root_nof = root_map->NumberOfOwnDescriptors(); 2886 2887 int old_nof = old_map->NumberOfOwnDescriptors(); 2888 DescriptorArray* old_descriptors = old_map->instance_descriptors(); 2889 2890 Map* new_map = root_map; 2891 for (int i = root_nof; i < old_nof; ++i) { 2892 int j = new_map->SearchTransition(old_descriptors->GetKey(i)); 2893 if (j == TransitionArray::kNotFound) return MaybeHandle<Map>(); 2894 new_map = new_map->GetTransition(j); 2895 DescriptorArray* new_descriptors = new_map->instance_descriptors(); 2896 2897 PropertyDetails new_details = new_descriptors->GetDetails(i); 2898 PropertyDetails old_details = old_descriptors->GetDetails(i); 2899 if (old_details.attributes() != new_details.attributes() || 2900 !old_details.representation().fits_into(new_details.representation())) { 2901 return MaybeHandle<Map>(); 2902 } 2903 PropertyType new_type = new_details.type(); 2904 PropertyType old_type = old_details.type(); 2905 Object* new_value = new_descriptors->GetValue(i); 2906 Object* old_value = old_descriptors->GetValue(i); 2907 switch (new_type) { 2908 case FIELD: 2909 if ((old_type == FIELD && 2910 !HeapType::cast(old_value)->NowIs(HeapType::cast(new_value))) || 2911 (old_type == CONSTANT && 2912 !HeapType::cast(new_value)->NowContains(old_value)) || 2913 (old_type == CALLBACKS && 2914 !HeapType::Any()->Is(HeapType::cast(new_value)))) { 2915 return MaybeHandle<Map>(); 2916 } 2917 break; 2918 2919 case CONSTANT: 2920 case CALLBACKS: 2921 if (old_type != new_type || old_value != new_value) { 2922 return MaybeHandle<Map>(); 2923 } 2924 break; 2925 2926 case NORMAL: 2927 case HANDLER: 2928 case INTERCEPTOR: 2929 case NONEXISTENT: 2930 UNREACHABLE(); 2931 } 2932 } 2933 if (new_map->NumberOfOwnDescriptors() != old_nof) return MaybeHandle<Map>(); 2934 return handle(new_map); 2935 } 2936 2937 2938 MaybeHandle<Object> JSObject::SetPropertyWithInterceptor( 2939 Handle<JSObject> object, 2940 Handle<Name> name, 2941 Handle<Object> value, 2942 PropertyAttributes attributes, 2943 StrictMode strict_mode) { 2944 // TODO(rossberg): Support symbols in the API. 2945 if (name->IsSymbol()) return value; 2946 Isolate* isolate = object->GetIsolate(); 2947 Handle<String> name_string = Handle<String>::cast(name); 2948 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); 2949 if (!interceptor->setter()->IsUndefined()) { 2950 LOG(isolate, 2951 ApiNamedPropertyAccess("interceptor-named-set", *object, *name)); 2952 PropertyCallbackArguments args( 2953 isolate, interceptor->data(), *object, *object); 2954 v8::NamedPropertySetterCallback setter = 2955 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter()); 2956 Handle<Object> value_unhole = value->IsTheHole() 2957 ? Handle<Object>(isolate->factory()->undefined_value()) : value; 2958 v8::Handle<v8::Value> result = args.Call(setter, 2959 v8::Utils::ToLocal(name_string), 2960 v8::Utils::ToLocal(value_unhole)); 2961 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 2962 if (!result.IsEmpty()) return value; 2963 } 2964 return SetPropertyPostInterceptor( 2965 object, name, value, attributes, strict_mode); 2966 } 2967 2968 2969 MaybeHandle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, 2970 Handle<Name> name, 2971 Handle<Object> value, 2972 PropertyAttributes attributes, 2973 StrictMode strict_mode, 2974 StoreFromKeyed store_mode) { 2975 LookupResult result(object->GetIsolate()); 2976 object->LookupOwn(name, &result, true); 2977 if (!result.IsFound()) { 2978 object->map()->LookupTransition(JSObject::cast(*object), *name, &result); 2979 } 2980 return SetProperty(object, &result, name, value, attributes, strict_mode, 2981 store_mode); 2982 } 2983 2984 2985 MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes( 2986 Handle<JSObject> object, 2987 uint32_t index, 2988 Handle<Object> value, 2989 bool* found, 2990 StrictMode strict_mode) { 2991 Isolate *isolate = object->GetIsolate(); 2992 for (Handle<Object> proto = handle(object->GetPrototype(), isolate); 2993 !proto->IsNull(); 2994 proto = handle(proto->GetPrototype(isolate), isolate)) { 2995 if (proto->IsJSProxy()) { 2996 return JSProxy::SetPropertyViaPrototypesWithHandler( 2997 Handle<JSProxy>::cast(proto), 2998 object, 2999 isolate->factory()->Uint32ToString(index), // name 3000 value, 3001 NONE, 3002 strict_mode, 3003 found); 3004 } 3005 Handle<JSObject> js_proto = Handle<JSObject>::cast(proto); 3006 if (!js_proto->HasDictionaryElements()) { 3007 continue; 3008 } 3009 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary()); 3010 int entry = dictionary->FindEntry(index); 3011 if (entry != SeededNumberDictionary::kNotFound) { 3012 PropertyDetails details = dictionary->DetailsAt(entry); 3013 if (details.type() == CALLBACKS) { 3014 *found = true; 3015 Handle<Object> structure(dictionary->ValueAt(entry), isolate); 3016 return SetElementWithCallback(object, structure, index, value, js_proto, 3017 strict_mode); 3018 } 3019 } 3020 } 3021 *found = false; 3022 return isolate->factory()->the_hole_value(); 3023 } 3024 3025 3026 MaybeHandle<Object> JSObject::SetPropertyViaPrototypes( 3027 Handle<JSObject> object, 3028 Handle<Name> name, 3029 Handle<Object> value, 3030 PropertyAttributes attributes, 3031 StrictMode strict_mode, 3032 bool* done) { 3033 Isolate* isolate = object->GetIsolate(); 3034 3035 *done = false; 3036 // We could not find an own property, so let's check whether there is an 3037 // accessor that wants to handle the property, or whether the property is 3038 // read-only on the prototype chain. 3039 LookupResult result(isolate); 3040 object->LookupRealNamedPropertyInPrototypes(name, &result); 3041 if (result.IsFound()) { 3042 switch (result.type()) { 3043 case NORMAL: 3044 case FIELD: 3045 case CONSTANT: 3046 *done = result.IsReadOnly(); 3047 break; 3048 case INTERCEPTOR: { 3049 LookupIterator it(object, name, handle(result.holder())); 3050 PropertyAttributes attr = GetPropertyAttributes(&it); 3051 *done = !!(attr & READ_ONLY); 3052 break; 3053 } 3054 case CALLBACKS: { 3055 *done = true; 3056 if (!result.IsReadOnly()) { 3057 Handle<Object> callback_object(result.GetCallbackObject(), isolate); 3058 return SetPropertyWithCallback(object, name, value, 3059 handle(result.holder()), 3060 callback_object, strict_mode); 3061 } 3062 break; 3063 } 3064 case HANDLER: { 3065 Handle<JSProxy> proxy(result.proxy()); 3066 return JSProxy::SetPropertyViaPrototypesWithHandler( 3067 proxy, object, name, value, attributes, strict_mode, done); 3068 } 3069 case NONEXISTENT: 3070 UNREACHABLE(); 3071 break; 3072 } 3073 } 3074 3075 // If we get here with *done true, we have encountered a read-only property. 3076 if (*done) { 3077 if (strict_mode == SLOPPY) return value; 3078 Handle<Object> args[] = { name, object }; 3079 Handle<Object> error = isolate->factory()->NewTypeError( 3080 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); 3081 return isolate->Throw<Object>(error); 3082 } 3083 return isolate->factory()->the_hole_value(); 3084 } 3085 3086 3087 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) { 3088 // Only supports adding slack to owned descriptors. 3089 ASSERT(map->owns_descriptors()); 3090 3091 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 3092 int old_size = map->NumberOfOwnDescriptors(); 3093 if (slack <= descriptors->NumberOfSlackDescriptors()) return; 3094 3095 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( 3096 descriptors, old_size, slack); 3097 3098 if (old_size == 0) { 3099 map->set_instance_descriptors(*new_descriptors); 3100 return; 3101 } 3102 3103 // If the source descriptors had an enum cache we copy it. This ensures 3104 // that the maps to which we push the new descriptor array back can rely 3105 // on a cache always being available once it is set. If the map has more 3106 // enumerated descriptors than available in the original cache, the cache 3107 // will be lazily replaced by the extended cache when needed. 3108 if (descriptors->HasEnumCache()) { 3109 new_descriptors->CopyEnumCacheFrom(*descriptors); 3110 } 3111 3112 // Replace descriptors by new_descriptors in all maps that share it. 3113 map->GetHeap()->incremental_marking()->RecordWrites(*descriptors); 3114 3115 Map* walk_map; 3116 for (Object* current = map->GetBackPointer(); 3117 !current->IsUndefined(); 3118 current = walk_map->GetBackPointer()) { 3119 walk_map = Map::cast(current); 3120 if (walk_map->instance_descriptors() != *descriptors) break; 3121 walk_map->set_instance_descriptors(*new_descriptors); 3122 } 3123 3124 map->set_instance_descriptors(*new_descriptors); 3125 } 3126 3127 3128 template<class T> 3129 static int AppendUniqueCallbacks(NeanderArray* callbacks, 3130 Handle<typename T::Array> array, 3131 int valid_descriptors) { 3132 int nof_callbacks = callbacks->length(); 3133 3134 Isolate* isolate = array->GetIsolate(); 3135 // Ensure the keys are unique names before writing them into the 3136 // instance descriptor. Since it may cause a GC, it has to be done before we 3137 // temporarily put the heap in an invalid state while appending descriptors. 3138 for (int i = 0; i < nof_callbacks; ++i) { 3139 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i))); 3140 if (entry->name()->IsUniqueName()) continue; 3141 Handle<String> key = 3142 isolate->factory()->InternalizeString( 3143 Handle<String>(String::cast(entry->name()))); 3144 entry->set_name(*key); 3145 } 3146 3147 // Fill in new callback descriptors. Process the callbacks from 3148 // back to front so that the last callback with a given name takes 3149 // precedence over previously added callbacks with that name. 3150 for (int i = nof_callbacks - 1; i >= 0; i--) { 3151 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i))); 3152 Handle<Name> key(Name::cast(entry->name())); 3153 // Check if a descriptor with this name already exists before writing. 3154 if (!T::Contains(key, entry, valid_descriptors, array)) { 3155 T::Insert(key, entry, valid_descriptors, array); 3156 valid_descriptors++; 3157 } 3158 } 3159 3160 return valid_descriptors; 3161 } 3162 3163 struct DescriptorArrayAppender { 3164 typedef DescriptorArray Array; 3165 static bool Contains(Handle<Name> key, 3166 Handle<AccessorInfo> entry, 3167 int valid_descriptors, 3168 Handle<DescriptorArray> array) { 3169 DisallowHeapAllocation no_gc; 3170 return array->Search(*key, valid_descriptors) != DescriptorArray::kNotFound; 3171 } 3172 static void Insert(Handle<Name> key, 3173 Handle<AccessorInfo> entry, 3174 int valid_descriptors, 3175 Handle<DescriptorArray> array) { 3176 DisallowHeapAllocation no_gc; 3177 CallbacksDescriptor desc(key, entry, entry->property_attributes()); 3178 array->Append(&desc); 3179 } 3180 }; 3181 3182 3183 struct FixedArrayAppender { 3184 typedef FixedArray Array; 3185 static bool Contains(Handle<Name> key, 3186 Handle<AccessorInfo> entry, 3187 int valid_descriptors, 3188 Handle<FixedArray> array) { 3189 for (int i = 0; i < valid_descriptors; i++) { 3190 if (*key == AccessorInfo::cast(array->get(i))->name()) return true; 3191 } 3192 return false; 3193 } 3194 static void Insert(Handle<Name> key, 3195 Handle<AccessorInfo> entry, 3196 int valid_descriptors, 3197 Handle<FixedArray> array) { 3198 DisallowHeapAllocation no_gc; 3199 array->set(valid_descriptors, *entry); 3200 } 3201 }; 3202 3203 3204 void Map::AppendCallbackDescriptors(Handle<Map> map, 3205 Handle<Object> descriptors) { 3206 int nof = map->NumberOfOwnDescriptors(); 3207 Handle<DescriptorArray> array(map->instance_descriptors()); 3208 NeanderArray callbacks(descriptors); 3209 ASSERT(array->NumberOfSlackDescriptors() >= callbacks.length()); 3210 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof); 3211 map->SetNumberOfOwnDescriptors(nof); 3212 } 3213 3214 3215 int AccessorInfo::AppendUnique(Handle<Object> descriptors, 3216 Handle<FixedArray> array, 3217 int valid_descriptors) { 3218 NeanderArray callbacks(descriptors); 3219 ASSERT(array->length() >= callbacks.length() + valid_descriptors); 3220 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks, 3221 array, 3222 valid_descriptors); 3223 } 3224 3225 3226 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) { 3227 ASSERT(!map.is_null()); 3228 for (int i = 0; i < maps->length(); ++i) { 3229 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true; 3230 } 3231 return false; 3232 } 3233 3234 3235 template <class T> 3236 static Handle<T> MaybeNull(T* p) { 3237 if (p == NULL) return Handle<T>::null(); 3238 return Handle<T>(p); 3239 } 3240 3241 3242 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) { 3243 ElementsKind kind = elements_kind(); 3244 Handle<Map> transitioned_map = Handle<Map>::null(); 3245 Handle<Map> current_map(this); 3246 bool packed = IsFastPackedElementsKind(kind); 3247 if (IsTransitionableFastElementsKind(kind)) { 3248 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) { 3249 kind = GetNextMoreGeneralFastElementsKind(kind, false); 3250 Handle<Map> maybe_transitioned_map = 3251 MaybeNull(current_map->LookupElementsTransitionMap(kind)); 3252 if (maybe_transitioned_map.is_null()) break; 3253 if (ContainsMap(candidates, maybe_transitioned_map) && 3254 (packed || !IsFastPackedElementsKind(kind))) { 3255 transitioned_map = maybe_transitioned_map; 3256 if (!IsFastPackedElementsKind(kind)) packed = false; 3257 } 3258 current_map = maybe_transitioned_map; 3259 } 3260 } 3261 return transitioned_map; 3262 } 3263 3264 3265 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) { 3266 Map* current_map = map; 3267 int target_kind = 3268 IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind) 3269 ? to_kind 3270 : TERMINAL_FAST_ELEMENTS_KIND; 3271 3272 // Support for legacy API: SetIndexedPropertiesTo{External,Pixel}Data 3273 // allows to change elements from arbitrary kind to any ExternalArray 3274 // elements kind. Satisfy its requirements, checking whether we already 3275 // have the cached transition. 3276 if (IsExternalArrayElementsKind(to_kind) && 3277 !IsFixedTypedArrayElementsKind(map->elements_kind())) { 3278 if (map->HasElementsTransition()) { 3279 Map* next_map = map->elements_transition_map(); 3280 if (next_map->elements_kind() == to_kind) return next_map; 3281 } 3282 return map; 3283 } 3284 3285 ElementsKind kind = map->elements_kind(); 3286 while (kind != target_kind) { 3287 kind = GetNextTransitionElementsKind(kind); 3288 if (!current_map->HasElementsTransition()) return current_map; 3289 current_map = current_map->elements_transition_map(); 3290 } 3291 3292 if (to_kind != kind && current_map->HasElementsTransition()) { 3293 ASSERT(to_kind == DICTIONARY_ELEMENTS); 3294 Map* next_map = current_map->elements_transition_map(); 3295 if (next_map->elements_kind() == to_kind) return next_map; 3296 } 3297 3298 ASSERT(current_map->elements_kind() == target_kind); 3299 return current_map; 3300 } 3301 3302 3303 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) { 3304 Map* to_map = FindClosestElementsTransition(this, to_kind); 3305 if (to_map->elements_kind() == to_kind) return to_map; 3306 return NULL; 3307 } 3308 3309 3310 bool Map::IsMapInArrayPrototypeChain() { 3311 Isolate* isolate = GetIsolate(); 3312 if (isolate->initial_array_prototype()->map() == this) { 3313 return true; 3314 } 3315 3316 if (isolate->initial_object_prototype()->map() == this) { 3317 return true; 3318 } 3319 3320 return false; 3321 } 3322 3323 3324 static Handle<Map> AddMissingElementsTransitions(Handle<Map> map, 3325 ElementsKind to_kind) { 3326 ASSERT(IsTransitionElementsKind(map->elements_kind())); 3327 3328 Handle<Map> current_map = map; 3329 3330 ElementsKind kind = map->elements_kind(); 3331 while (kind != to_kind && !IsTerminalElementsKind(kind)) { 3332 kind = GetNextTransitionElementsKind(kind); 3333 current_map = Map::CopyAsElementsKind( 3334 current_map, kind, INSERT_TRANSITION); 3335 } 3336 3337 // In case we are exiting the fast elements kind system, just add the map in 3338 // the end. 3339 if (kind != to_kind) { 3340 current_map = Map::CopyAsElementsKind( 3341 current_map, to_kind, INSERT_TRANSITION); 3342 } 3343 3344 ASSERT(current_map->elements_kind() == to_kind); 3345 return current_map; 3346 } 3347 3348 3349 Handle<Map> Map::TransitionElementsTo(Handle<Map> map, 3350 ElementsKind to_kind) { 3351 ElementsKind from_kind = map->elements_kind(); 3352 if (from_kind == to_kind) return map; 3353 3354 Isolate* isolate = map->GetIsolate(); 3355 Context* native_context = isolate->context()->native_context(); 3356 Object* maybe_array_maps = native_context->js_array_maps(); 3357 if (maybe_array_maps->IsFixedArray()) { 3358 DisallowHeapAllocation no_gc; 3359 FixedArray* array_maps = FixedArray::cast(maybe_array_maps); 3360 if (array_maps->get(from_kind) == *map) { 3361 Object* maybe_transitioned_map = array_maps->get(to_kind); 3362 if (maybe_transitioned_map->IsMap()) { 3363 return handle(Map::cast(maybe_transitioned_map)); 3364 } 3365 } 3366 } 3367 3368 return TransitionElementsToSlow(map, to_kind); 3369 } 3370 3371 3372 Handle<Map> Map::TransitionElementsToSlow(Handle<Map> map, 3373 ElementsKind to_kind) { 3374 ElementsKind from_kind = map->elements_kind(); 3375 3376 if (from_kind == to_kind) { 3377 return map; 3378 } 3379 3380 bool allow_store_transition = 3381 // Only remember the map transition if there is not an already existing 3382 // non-matching element transition. 3383 !map->IsUndefined() && !map->is_shared() && 3384 IsTransitionElementsKind(from_kind); 3385 3386 // Only store fast element maps in ascending generality. 3387 if (IsFastElementsKind(to_kind)) { 3388 allow_store_transition &= 3389 IsTransitionableFastElementsKind(from_kind) && 3390 IsMoreGeneralElementsKindTransition(from_kind, to_kind); 3391 } 3392 3393 if (!allow_store_transition) { 3394 return Map::CopyAsElementsKind(map, to_kind, OMIT_TRANSITION); 3395 } 3396 3397 return Map::AsElementsKind(map, to_kind); 3398 } 3399 3400 3401 // static 3402 Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) { 3403 Handle<Map> closest_map(FindClosestElementsTransition(*map, kind)); 3404 3405 if (closest_map->elements_kind() == kind) { 3406 return closest_map; 3407 } 3408 3409 return AddMissingElementsTransitions(closest_map, kind); 3410 } 3411 3412 3413 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object, 3414 ElementsKind to_kind) { 3415 Handle<Map> map(object->map()); 3416 return Map::TransitionElementsTo(map, to_kind); 3417 } 3418 3419 3420 void JSObject::LookupOwnRealNamedProperty(Handle<Name> name, 3421 LookupResult* result) { 3422 DisallowHeapAllocation no_gc; 3423 if (IsJSGlobalProxy()) { 3424 Object* proto = GetPrototype(); 3425 if (proto->IsNull()) return result->NotFound(); 3426 ASSERT(proto->IsJSGlobalObject()); 3427 return JSObject::cast(proto)->LookupOwnRealNamedProperty(name, result); 3428 } 3429 3430 if (HasFastProperties()) { 3431 map()->LookupDescriptor(this, *name, result); 3432 // A property or a map transition was found. We return all of these result 3433 // types because LookupOwnRealNamedProperty is used when setting 3434 // properties where map transitions are handled. 3435 ASSERT(!result->IsFound() || 3436 (result->holder() == this && result->IsFastPropertyType())); 3437 // Disallow caching for uninitialized constants. These can only 3438 // occur as fields. 3439 if (result->IsField() && 3440 result->IsReadOnly() && 3441 RawFastPropertyAt(result->GetFieldIndex())->IsTheHole()) { 3442 result->DisallowCaching(); 3443 } 3444 return; 3445 } 3446 3447 int entry = property_dictionary()->FindEntry(name); 3448 if (entry != NameDictionary::kNotFound) { 3449 Object* value = property_dictionary()->ValueAt(entry); 3450 if (IsGlobalObject()) { 3451 PropertyDetails d = property_dictionary()->DetailsAt(entry); 3452 if (d.IsDeleted()) { 3453 result->NotFound(); 3454 return; 3455 } 3456 value = PropertyCell::cast(value)->value(); 3457 } 3458 // Make sure to disallow caching for uninitialized constants 3459 // found in the dictionary-mode objects. 3460 if (value->IsTheHole()) result->DisallowCaching(); 3461 result->DictionaryResult(this, entry); 3462 return; 3463 } 3464 3465 result->NotFound(); 3466 } 3467 3468 3469 void JSObject::LookupRealNamedProperty(Handle<Name> name, 3470 LookupResult* result) { 3471 DisallowHeapAllocation no_gc; 3472 LookupOwnRealNamedProperty(name, result); 3473 if (result->IsFound()) return; 3474 3475 LookupRealNamedPropertyInPrototypes(name, result); 3476 } 3477 3478 3479 void JSObject::LookupRealNamedPropertyInPrototypes(Handle<Name> name, 3480 LookupResult* result) { 3481 DisallowHeapAllocation no_gc; 3482 Isolate* isolate = GetIsolate(); 3483 Heap* heap = isolate->heap(); 3484 for (Object* pt = GetPrototype(); 3485 pt != heap->null_value(); 3486 pt = pt->GetPrototype(isolate)) { 3487 if (pt->IsJSProxy()) { 3488 return result->HandlerResult(JSProxy::cast(pt)); 3489 } 3490 JSObject::cast(pt)->LookupOwnRealNamedProperty(name, result); 3491 ASSERT(!(result->IsFound() && result->type() == INTERCEPTOR)); 3492 if (result->IsFound()) return; 3493 } 3494 result->NotFound(); 3495 } 3496 3497 3498 MaybeHandle<Object> JSReceiver::SetProperty(Handle<JSReceiver> object, 3499 LookupResult* result, 3500 Handle<Name> key, 3501 Handle<Object> value, 3502 PropertyAttributes attributes, 3503 StrictMode strict_mode, 3504 StoreFromKeyed store_mode) { 3505 if (result->IsHandler()) { 3506 return JSProxy::SetPropertyWithHandler(handle(result->proxy()), 3507 object, key, value, attributes, strict_mode); 3508 } else { 3509 return JSObject::SetPropertyForResult(Handle<JSObject>::cast(object), 3510 result, key, value, attributes, strict_mode, store_mode); 3511 } 3512 } 3513 3514 3515 bool JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy, Handle<Name> name) { 3516 Isolate* isolate = proxy->GetIsolate(); 3517 3518 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3519 if (name->IsSymbol()) return false; 3520 3521 Handle<Object> args[] = { name }; 3522 Handle<Object> result; 3523 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3524 isolate, result, 3525 CallTrap(proxy, 3526 "has", 3527 isolate->derived_has_trap(), 3528 ARRAY_SIZE(args), 3529 args), 3530 false); 3531 3532 return result->BooleanValue(); 3533 } 3534 3535 3536 MaybeHandle<Object> JSProxy::SetPropertyWithHandler( 3537 Handle<JSProxy> proxy, 3538 Handle<JSReceiver> receiver, 3539 Handle<Name> name, 3540 Handle<Object> value, 3541 PropertyAttributes attributes, 3542 StrictMode strict_mode) { 3543 Isolate* isolate = proxy->GetIsolate(); 3544 3545 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3546 if (name->IsSymbol()) return value; 3547 3548 Handle<Object> args[] = { receiver, name, value }; 3549 RETURN_ON_EXCEPTION( 3550 isolate, 3551 CallTrap(proxy, 3552 "set", 3553 isolate->derived_set_trap(), 3554 ARRAY_SIZE(args), 3555 args), 3556 Object); 3557 3558 return value; 3559 } 3560 3561 3562 MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( 3563 Handle<JSProxy> proxy, 3564 Handle<JSReceiver> receiver, 3565 Handle<Name> name, 3566 Handle<Object> value, 3567 PropertyAttributes attributes, 3568 StrictMode strict_mode, 3569 bool* done) { 3570 Isolate* isolate = proxy->GetIsolate(); 3571 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy. 3572 3573 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3574 if (name->IsSymbol()) { 3575 *done = false; 3576 return isolate->factory()->the_hole_value(); 3577 } 3578 3579 *done = true; // except where redefined... 3580 Handle<Object> args[] = { name }; 3581 Handle<Object> result; 3582 ASSIGN_RETURN_ON_EXCEPTION( 3583 isolate, result, 3584 CallTrap(proxy, 3585 "getPropertyDescriptor", 3586 Handle<Object>(), 3587 ARRAY_SIZE(args), 3588 args), 3589 Object); 3590 3591 if (result->IsUndefined()) { 3592 *done = false; 3593 return isolate->factory()->the_hole_value(); 3594 } 3595 3596 // Emulate [[GetProperty]] semantics for proxies. 3597 Handle<Object> argv[] = { result }; 3598 Handle<Object> desc; 3599 ASSIGN_RETURN_ON_EXCEPTION( 3600 isolate, desc, 3601 Execution::Call(isolate, 3602 isolate->to_complete_property_descriptor(), 3603 result, 3604 ARRAY_SIZE(argv), 3605 argv), 3606 Object); 3607 3608 // [[GetProperty]] requires to check that all properties are configurable. 3609 Handle<String> configurable_name = 3610 isolate->factory()->InternalizeOneByteString( 3611 STATIC_ASCII_VECTOR("configurable_")); 3612 Handle<Object> configurable = 3613 Object::GetProperty(desc, configurable_name).ToHandleChecked(); 3614 ASSERT(configurable->IsBoolean()); 3615 if (configurable->IsFalse()) { 3616 Handle<String> trap = 3617 isolate->factory()->InternalizeOneByteString( 3618 STATIC_ASCII_VECTOR("getPropertyDescriptor")); 3619 Handle<Object> args[] = { handler, trap, name }; 3620 Handle<Object> error = isolate->factory()->NewTypeError( 3621 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args))); 3622 return isolate->Throw<Object>(error); 3623 } 3624 ASSERT(configurable->IsTrue()); 3625 3626 // Check for DataDescriptor. 3627 Handle<String> hasWritable_name = 3628 isolate->factory()->InternalizeOneByteString( 3629 STATIC_ASCII_VECTOR("hasWritable_")); 3630 Handle<Object> hasWritable = 3631 Object::GetProperty(desc, hasWritable_name).ToHandleChecked(); 3632 ASSERT(hasWritable->IsBoolean()); 3633 if (hasWritable->IsTrue()) { 3634 Handle<String> writable_name = 3635 isolate->factory()->InternalizeOneByteString( 3636 STATIC_ASCII_VECTOR("writable_")); 3637 Handle<Object> writable = 3638 Object::GetProperty(desc, writable_name).ToHandleChecked(); 3639 ASSERT(writable->IsBoolean()); 3640 *done = writable->IsFalse(); 3641 if (!*done) return isolate->factory()->the_hole_value(); 3642 if (strict_mode == SLOPPY) return value; 3643 Handle<Object> args[] = { name, receiver }; 3644 Handle<Object> error = isolate->factory()->NewTypeError( 3645 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); 3646 return isolate->Throw<Object>(error); 3647 } 3648 3649 // We have an AccessorDescriptor. 3650 Handle<String> set_name = isolate->factory()->InternalizeOneByteString( 3651 STATIC_ASCII_VECTOR("set_")); 3652 Handle<Object> setter = Object::GetProperty(desc, set_name).ToHandleChecked(); 3653 if (!setter->IsUndefined()) { 3654 // TODO(rossberg): nicer would be to cast to some JSCallable here... 3655 return SetPropertyWithDefinedSetter( 3656 receiver, Handle<JSReceiver>::cast(setter), value); 3657 } 3658 3659 if (strict_mode == SLOPPY) return value; 3660 Handle<Object> args2[] = { name, proxy }; 3661 Handle<Object> error = isolate->factory()->NewTypeError( 3662 "no_setter_in_callback", HandleVector(args2, ARRAY_SIZE(args2))); 3663 return isolate->Throw<Object>(error); 3664 } 3665 3666 3667 MaybeHandle<Object> JSProxy::DeletePropertyWithHandler( 3668 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) { 3669 Isolate* isolate = proxy->GetIsolate(); 3670 3671 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3672 if (name->IsSymbol()) return isolate->factory()->false_value(); 3673 3674 Handle<Object> args[] = { name }; 3675 Handle<Object> result; 3676 ASSIGN_RETURN_ON_EXCEPTION( 3677 isolate, result, 3678 CallTrap(proxy, 3679 "delete", 3680 Handle<Object>(), 3681 ARRAY_SIZE(args), 3682 args), 3683 Object); 3684 3685 bool result_bool = result->BooleanValue(); 3686 if (mode == STRICT_DELETION && !result_bool) { 3687 Handle<Object> handler(proxy->handler(), isolate); 3688 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString( 3689 STATIC_ASCII_VECTOR("delete")); 3690 Handle<Object> args[] = { handler, trap_name }; 3691 Handle<Object> error = isolate->factory()->NewTypeError( 3692 "handler_failed", HandleVector(args, ARRAY_SIZE(args))); 3693 return isolate->Throw<Object>(error); 3694 } 3695 return isolate->factory()->ToBoolean(result_bool); 3696 } 3697 3698 3699 MaybeHandle<Object> JSProxy::DeleteElementWithHandler( 3700 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) { 3701 Isolate* isolate = proxy->GetIsolate(); 3702 Handle<String> name = isolate->factory()->Uint32ToString(index); 3703 return JSProxy::DeletePropertyWithHandler(proxy, name, mode); 3704 } 3705 3706 3707 PropertyAttributes JSProxy::GetPropertyAttributesWithHandler( 3708 Handle<JSProxy> proxy, 3709 Handle<Object> receiver, 3710 Handle<Name> name) { 3711 Isolate* isolate = proxy->GetIsolate(); 3712 HandleScope scope(isolate); 3713 3714 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3715 if (name->IsSymbol()) return ABSENT; 3716 3717 Handle<Object> args[] = { name }; 3718 Handle<Object> result; 3719 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3720 isolate, result, 3721 proxy->CallTrap(proxy, 3722 "getPropertyDescriptor", 3723 Handle<Object>(), 3724 ARRAY_SIZE(args), 3725 args), 3726 NONE); 3727 3728 if (result->IsUndefined()) return ABSENT; 3729 3730 Handle<Object> argv[] = { result }; 3731 Handle<Object> desc; 3732 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3733 isolate, desc, 3734 Execution::Call(isolate, 3735 isolate->to_complete_property_descriptor(), 3736 result, 3737 ARRAY_SIZE(argv), 3738 argv), 3739 NONE); 3740 3741 // Convert result to PropertyAttributes. 3742 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString( 3743 STATIC_ASCII_VECTOR("enumerable_")); 3744 Handle<Object> enumerable; 3745 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3746 isolate, enumerable, Object::GetProperty(desc, enum_n), NONE); 3747 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString( 3748 STATIC_ASCII_VECTOR("configurable_")); 3749 Handle<Object> configurable; 3750 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3751 isolate, configurable, Object::GetProperty(desc, conf_n), NONE); 3752 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString( 3753 STATIC_ASCII_VECTOR("writable_")); 3754 Handle<Object> writable; 3755 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3756 isolate, writable, Object::GetProperty(desc, writ_n), NONE); 3757 if (!writable->BooleanValue()) { 3758 Handle<String> set_n = isolate->factory()->InternalizeOneByteString( 3759 STATIC_ASCII_VECTOR("set_")); 3760 Handle<Object> setter; 3761 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3762 isolate, setter, Object::GetProperty(desc, set_n), NONE); 3763 writable = isolate->factory()->ToBoolean(!setter->IsUndefined()); 3764 } 3765 3766 if (configurable->IsFalse()) { 3767 Handle<Object> handler(proxy->handler(), isolate); 3768 Handle<String> trap = isolate->factory()->InternalizeOneByteString( 3769 STATIC_ASCII_VECTOR("getPropertyDescriptor")); 3770 Handle<Object> args[] = { handler, trap, name }; 3771 Handle<Object> error = isolate->factory()->NewTypeError( 3772 "proxy_prop_not_configurable", HandleVector(args, ARRAY_SIZE(args))); 3773 isolate->Throw(*error); 3774 return NONE; 3775 } 3776 3777 int attributes = NONE; 3778 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM; 3779 if (!configurable->BooleanValue()) attributes |= DONT_DELETE; 3780 if (!writable->BooleanValue()) attributes |= READ_ONLY; 3781 return static_cast<PropertyAttributes>(attributes); 3782 } 3783 3784 3785 PropertyAttributes JSProxy::GetElementAttributeWithHandler( 3786 Handle<JSProxy> proxy, 3787 Handle<JSReceiver> receiver, 3788 uint32_t index) { 3789 Isolate* isolate = proxy->GetIsolate(); 3790 Handle<String> name = isolate->factory()->Uint32ToString(index); 3791 return GetPropertyAttributesWithHandler(proxy, receiver, name); 3792 } 3793 3794 3795 void JSProxy::Fix(Handle<JSProxy> proxy) { 3796 Isolate* isolate = proxy->GetIsolate(); 3797 3798 // Save identity hash. 3799 Handle<Object> hash(proxy->GetIdentityHash(), isolate); 3800 3801 if (proxy->IsJSFunctionProxy()) { 3802 isolate->factory()->BecomeJSFunction(proxy); 3803 // Code will be set on the JavaScript side. 3804 } else { 3805 isolate->factory()->BecomeJSObject(proxy); 3806 } 3807 ASSERT(proxy->IsJSObject()); 3808 3809 // Inherit identity, if it was present. 3810 if (hash->IsSmi()) { 3811 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy), 3812 Handle<Smi>::cast(hash)); 3813 } 3814 } 3815 3816 3817 MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy, 3818 const char* name, 3819 Handle<Object> derived, 3820 int argc, 3821 Handle<Object> argv[]) { 3822 Isolate* isolate = proxy->GetIsolate(); 3823 Handle<Object> handler(proxy->handler(), isolate); 3824 3825 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name); 3826 Handle<Object> trap; 3827 ASSIGN_RETURN_ON_EXCEPTION( 3828 isolate, trap, 3829 Object::GetPropertyOrElement(handler, trap_name), 3830 Object); 3831 3832 if (trap->IsUndefined()) { 3833 if (derived.is_null()) { 3834 Handle<Object> args[] = { handler, trap_name }; 3835 Handle<Object> error = isolate->factory()->NewTypeError( 3836 "handler_trap_missing", HandleVector(args, ARRAY_SIZE(args))); 3837 return isolate->Throw<Object>(error); 3838 } 3839 trap = Handle<Object>(derived); 3840 } 3841 3842 return Execution::Call(isolate, trap, handler, argc, argv); 3843 } 3844 3845 3846 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) { 3847 ASSERT(object->map()->inobject_properties() == map->inobject_properties()); 3848 ElementsKind obj_kind = object->map()->elements_kind(); 3849 ElementsKind map_kind = map->elements_kind(); 3850 if (map_kind != obj_kind) { 3851 ElementsKind to_kind = map_kind; 3852 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) || 3853 IsDictionaryElementsKind(obj_kind)) { 3854 to_kind = obj_kind; 3855 } 3856 if (IsDictionaryElementsKind(to_kind)) { 3857 NormalizeElements(object); 3858 } else { 3859 TransitionElementsKind(object, to_kind); 3860 } 3861 map = Map::AsElementsKind(map, to_kind); 3862 } 3863 JSObject::MigrateToMap(object, map); 3864 } 3865 3866 3867 void JSObject::MigrateInstance(Handle<JSObject> object) { 3868 // Converting any field to the most specific type will cause the 3869 // GeneralizeFieldRepresentation algorithm to create the most general existing 3870 // transition that matches the object. This achieves what is needed. 3871 Handle<Map> original_map(object->map()); 3872 GeneralizeFieldRepresentation( 3873 object, 0, Representation::None(), 3874 HeapType::None(object->GetIsolate()), 3875 ALLOW_AS_CONSTANT); 3876 object->map()->set_migration_target(true); 3877 if (FLAG_trace_migration) { 3878 object->PrintInstanceMigration(stdout, *original_map, object->map()); 3879 } 3880 } 3881 3882 3883 // static 3884 bool JSObject::TryMigrateInstance(Handle<JSObject> object) { 3885 Isolate* isolate = object->GetIsolate(); 3886 DisallowDeoptimization no_deoptimization(isolate); 3887 Handle<Map> original_map(object->map(), isolate); 3888 Handle<Map> new_map; 3889 if (!Map::CurrentMapForDeprecatedInternal(original_map).ToHandle(&new_map)) { 3890 return false; 3891 } 3892 JSObject::MigrateToMap(object, new_map); 3893 if (FLAG_trace_migration) { 3894 object->PrintInstanceMigration(stdout, *original_map, object->map()); 3895 } 3896 return true; 3897 } 3898 3899 3900 MaybeHandle<Object> JSObject::SetPropertyUsingTransition( 3901 Handle<JSObject> object, 3902 LookupResult* lookup, 3903 Handle<Name> name, 3904 Handle<Object> value, 3905 PropertyAttributes attributes) { 3906 Handle<Map> transition_map(lookup->GetTransitionTarget()); 3907 int descriptor = transition_map->LastAdded(); 3908 3909 Handle<DescriptorArray> descriptors(transition_map->instance_descriptors()); 3910 PropertyDetails details = descriptors->GetDetails(descriptor); 3911 3912 if (details.type() == CALLBACKS || attributes != details.attributes()) { 3913 // AddProperty will either normalize the object, or create a new fast copy 3914 // of the map. If we get a fast copy of the map, all field representations 3915 // will be tagged since the transition is omitted. 3916 return JSObject::AddProperty( 3917 object, name, value, attributes, SLOPPY, 3918 JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED, 3919 JSReceiver::OMIT_EXTENSIBILITY_CHECK, 3920 JSObject::FORCE_TAGGED, FORCE_FIELD, OMIT_TRANSITION); 3921 } 3922 3923 // Keep the target CONSTANT if the same value is stored. 3924 // TODO(verwaest): Also support keeping the placeholder 3925 // (value->IsUninitialized) as constant. 3926 if (!lookup->CanHoldValue(value)) { 3927 Representation field_representation = value->OptimalRepresentation(); 3928 Handle<HeapType> field_type = value->OptimalType( 3929 lookup->isolate(), field_representation); 3930 transition_map = Map::GeneralizeRepresentation( 3931 transition_map, descriptor, 3932 field_representation, field_type, FORCE_FIELD); 3933 } 3934 3935 JSObject::MigrateToNewProperty(object, transition_map, value); 3936 return value; 3937 } 3938 3939 3940 void JSObject::MigrateToNewProperty(Handle<JSObject> object, 3941 Handle<Map> map, 3942 Handle<Object> value) { 3943 JSObject::MigrateToMap(object, map); 3944 if (map->GetLastDescriptorDetails().type() != FIELD) return; 3945 object->WriteToField(map->LastAdded(), *value); 3946 } 3947 3948 3949 void JSObject::WriteToField(int descriptor, Object* value) { 3950 DisallowHeapAllocation no_gc; 3951 3952 DescriptorArray* desc = map()->instance_descriptors(); 3953 PropertyDetails details = desc->GetDetails(descriptor); 3954 3955 ASSERT(details.type() == FIELD); 3956 3957 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor); 3958 if (details.representation().IsDouble()) { 3959 // Nothing more to be done. 3960 if (value->IsUninitialized()) return; 3961 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index)); 3962 box->set_value(value->Number()); 3963 } else { 3964 FastPropertyAtPut(index, value); 3965 } 3966 } 3967 3968 3969 static void SetPropertyToField(LookupResult* lookup, 3970 Handle<Object> value) { 3971 if (lookup->type() == CONSTANT || !lookup->CanHoldValue(value)) { 3972 Representation field_representation = value->OptimalRepresentation(); 3973 Handle<HeapType> field_type = value->OptimalType( 3974 lookup->isolate(), field_representation); 3975 JSObject::GeneralizeFieldRepresentation(handle(lookup->holder()), 3976 lookup->GetDescriptorIndex(), 3977 field_representation, field_type, 3978 FORCE_FIELD); 3979 } 3980 lookup->holder()->WriteToField(lookup->GetDescriptorIndex(), *value); 3981 } 3982 3983 3984 static void ConvertAndSetOwnProperty(LookupResult* lookup, 3985 Handle<Name> name, 3986 Handle<Object> value, 3987 PropertyAttributes attributes) { 3988 Handle<JSObject> object(lookup->holder()); 3989 if (object->TooManyFastProperties()) { 3990 JSObject::NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 3991 } 3992 3993 if (!object->HasFastProperties()) { 3994 ReplaceSlowProperty(object, name, value, attributes); 3995 return; 3996 } 3997 3998 int descriptor_index = lookup->GetDescriptorIndex(); 3999 if (lookup->GetAttributes() == attributes) { 4000 JSObject::GeneralizeFieldRepresentation( 4001 object, descriptor_index, Representation::Tagged(), 4002 HeapType::Any(lookup->isolate()), FORCE_FIELD); 4003 } else { 4004 Handle<Map> old_map(object->map()); 4005 Handle<Map> new_map = Map::CopyGeneralizeAllRepresentations(old_map, 4006 descriptor_index, FORCE_FIELD, attributes, "attributes mismatch"); 4007 JSObject::MigrateToMap(object, new_map); 4008 } 4009 4010 object->WriteToField(descriptor_index, *value); 4011 } 4012 4013 4014 static void SetPropertyToFieldWithAttributes(LookupResult* lookup, 4015 Handle<Name> name, 4016 Handle<Object> value, 4017 PropertyAttributes attributes) { 4018 if (lookup->GetAttributes() == attributes) { 4019 if (value->IsUninitialized()) return; 4020 SetPropertyToField(lookup, value); 4021 } else { 4022 ConvertAndSetOwnProperty(lookup, name, value, attributes); 4023 } 4024 } 4025 4026 4027 MaybeHandle<Object> JSObject::SetPropertyForResult( 4028 Handle<JSObject> object, 4029 LookupResult* lookup, 4030 Handle<Name> name, 4031 Handle<Object> value, 4032 PropertyAttributes attributes, 4033 StrictMode strict_mode, 4034 StoreFromKeyed store_mode) { 4035 Isolate* isolate = object->GetIsolate(); 4036 4037 // Make sure that the top context does not change when doing callbacks or 4038 // interceptor calls. 4039 AssertNoContextChange ncc(isolate); 4040 4041 // Optimization for 2-byte strings often used as keys in a decompression 4042 // dictionary. We internalize these short keys to avoid constantly 4043 // reallocating them. 4044 if (name->IsString() && !name->IsInternalizedString() && 4045 Handle<String>::cast(name)->length() <= 2) { 4046 name = isolate->factory()->InternalizeString(Handle<String>::cast(name)); 4047 } 4048 4049 // Check access rights if needed. 4050 if (object->IsAccessCheckNeeded()) { 4051 if (!isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { 4052 return SetPropertyWithFailedAccessCheck(object, lookup, name, value, 4053 true, strict_mode); 4054 } 4055 } 4056 4057 if (object->IsJSGlobalProxy()) { 4058 Handle<Object> proto(object->GetPrototype(), isolate); 4059 if (proto->IsNull()) return value; 4060 ASSERT(proto->IsJSGlobalObject()); 4061 return SetPropertyForResult(Handle<JSObject>::cast(proto), 4062 lookup, name, value, attributes, strict_mode, store_mode); 4063 } 4064 4065 ASSERT(!lookup->IsFound() || lookup->holder() == *object || 4066 lookup->holder()->map()->is_hidden_prototype()); 4067 4068 if (!lookup->IsProperty() && !object->IsJSContextExtensionObject()) { 4069 bool done = false; 4070 Handle<Object> result_object; 4071 ASSIGN_RETURN_ON_EXCEPTION( 4072 isolate, result_object, 4073 SetPropertyViaPrototypes( 4074 object, name, value, attributes, strict_mode, &done), 4075 Object); 4076 if (done) return result_object; 4077 } 4078 4079 if (!lookup->IsFound()) { 4080 // Neither properties nor transitions found. 4081 return AddProperty( 4082 object, name, value, attributes, strict_mode, store_mode); 4083 } 4084 4085 if (lookup->IsProperty() && lookup->IsReadOnly()) { 4086 if (strict_mode == STRICT) { 4087 Handle<Object> args[] = { name, object }; 4088 Handle<Object> error = isolate->factory()->NewTypeError( 4089 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); 4090 return isolate->Throw<Object>(error); 4091 } else { 4092 return value; 4093 } 4094 } 4095 4096 Handle<Object> old_value = isolate->factory()->the_hole_value(); 4097 bool is_observed = object->map()->is_observed() && 4098 *name != isolate->heap()->hidden_string(); 4099 if (is_observed && lookup->IsDataProperty()) { 4100 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked(); 4101 } 4102 4103 // This is a real property that is not read-only, or it is a 4104 // transition or null descriptor and there are no setters in the prototypes. 4105 MaybeHandle<Object> maybe_result = value; 4106 if (lookup->IsTransition()) { 4107 maybe_result = SetPropertyUsingTransition(handle(lookup->holder()), lookup, 4108 name, value, attributes); 4109 } else { 4110 switch (lookup->type()) { 4111 case NORMAL: 4112 SetNormalizedProperty(handle(lookup->holder()), lookup, value); 4113 break; 4114 case FIELD: 4115 SetPropertyToField(lookup, value); 4116 break; 4117 case CONSTANT: 4118 // Only replace the constant if necessary. 4119 if (*value == lookup->GetConstant()) return value; 4120 SetPropertyToField(lookup, value); 4121 break; 4122 case CALLBACKS: { 4123 Handle<Object> callback_object(lookup->GetCallbackObject(), isolate); 4124 return SetPropertyWithCallback(object, name, value, 4125 handle(lookup->holder()), 4126 callback_object, strict_mode); 4127 } 4128 case INTERCEPTOR: 4129 maybe_result = SetPropertyWithInterceptor( 4130 handle(lookup->holder()), name, value, attributes, strict_mode); 4131 break; 4132 case HANDLER: 4133 case NONEXISTENT: 4134 UNREACHABLE(); 4135 } 4136 } 4137 4138 Handle<Object> result; 4139 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object); 4140 4141 if (is_observed) { 4142 if (lookup->IsTransition()) { 4143 EnqueueChangeRecord(object, "add", name, old_value); 4144 } else { 4145 LookupResult new_lookup(isolate); 4146 object->LookupOwn(name, &new_lookup, true); 4147 if (new_lookup.IsDataProperty()) { 4148 Handle<Object> new_value = 4149 Object::GetPropertyOrElement(object, name).ToHandleChecked(); 4150 if (!new_value->SameValue(*old_value)) { 4151 EnqueueChangeRecord(object, "update", name, old_value); 4152 } 4153 } 4154 } 4155 } 4156 4157 return result; 4158 } 4159 4160 4161 // Set a real own property, even if it is READ_ONLY. If the property is not 4162 // present, add it with attributes NONE. This code is an exact clone of 4163 // SetProperty, with the check for IsReadOnly and the check for a 4164 // callback setter removed. The two lines looking up the LookupResult 4165 // result are also added. If one of the functions is changed, the other 4166 // should be. 4167 MaybeHandle<Object> JSObject::SetOwnPropertyIgnoreAttributes( 4168 Handle<JSObject> object, 4169 Handle<Name> name, 4170 Handle<Object> value, 4171 PropertyAttributes attributes, 4172 ValueType value_type, 4173 StoreMode mode, 4174 ExtensibilityCheck extensibility_check, 4175 StoreFromKeyed store_from_keyed, 4176 ExecutableAccessorInfoHandling handling) { 4177 Isolate* isolate = object->GetIsolate(); 4178 4179 // Make sure that the top context does not change when doing callbacks or 4180 // interceptor calls. 4181 AssertNoContextChange ncc(isolate); 4182 4183 LookupResult lookup(isolate); 4184 object->LookupOwn(name, &lookup, true); 4185 if (!lookup.IsFound()) { 4186 object->map()->LookupTransition(*object, *name, &lookup); 4187 } 4188 4189 // Check access rights if needed. 4190 if (object->IsAccessCheckNeeded()) { 4191 if (!isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { 4192 return SetPropertyWithFailedAccessCheck(object, &lookup, name, value, 4193 false, SLOPPY); 4194 } 4195 } 4196 4197 if (object->IsJSGlobalProxy()) { 4198 Handle<Object> proto(object->GetPrototype(), isolate); 4199 if (proto->IsNull()) return value; 4200 ASSERT(proto->IsJSGlobalObject()); 4201 return SetOwnPropertyIgnoreAttributes(Handle<JSObject>::cast(proto), 4202 name, value, attributes, value_type, mode, extensibility_check); 4203 } 4204 4205 if (lookup.IsInterceptor() || 4206 (lookup.IsDescriptorOrDictionary() && lookup.type() == CALLBACKS)) { 4207 object->LookupOwnRealNamedProperty(name, &lookup); 4208 } 4209 4210 // Check for accessor in prototype chain removed here in clone. 4211 if (!lookup.IsFound()) { 4212 object->map()->LookupTransition(*object, *name, &lookup); 4213 TransitionFlag flag = lookup.IsFound() 4214 ? OMIT_TRANSITION : INSERT_TRANSITION; 4215 // Neither properties nor transitions found. 4216 return AddProperty(object, name, value, attributes, SLOPPY, 4217 store_from_keyed, extensibility_check, value_type, mode, flag); 4218 } 4219 4220 Handle<Object> old_value = isolate->factory()->the_hole_value(); 4221 PropertyAttributes old_attributes = ABSENT; 4222 bool is_observed = object->map()->is_observed() && 4223 *name != isolate->heap()->hidden_string(); 4224 if (is_observed && lookup.IsProperty()) { 4225 if (lookup.IsDataProperty()) { 4226 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked(); 4227 } 4228 old_attributes = lookup.GetAttributes(); 4229 } 4230 4231 bool executed_set_prototype = false; 4232 4233 // Check of IsReadOnly removed from here in clone. 4234 if (lookup.IsTransition()) { 4235 Handle<Object> result; 4236 ASSIGN_RETURN_ON_EXCEPTION( 4237 isolate, result, 4238 SetPropertyUsingTransition( 4239 handle(lookup.holder()), &lookup, name, value, attributes), 4240 Object); 4241 } else { 4242 switch (lookup.type()) { 4243 case NORMAL: 4244 ReplaceSlowProperty(object, name, value, attributes); 4245 break; 4246 case FIELD: 4247 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes); 4248 break; 4249 case CONSTANT: 4250 // Only replace the constant if necessary. 4251 if (lookup.GetAttributes() != attributes || 4252 *value != lookup.GetConstant()) { 4253 SetPropertyToFieldWithAttributes(&lookup, name, value, attributes); 4254 } 4255 break; 4256 case CALLBACKS: 4257 { 4258 Handle<Object> callback(lookup.GetCallbackObject(), isolate); 4259 if (callback->IsExecutableAccessorInfo() && 4260 handling == DONT_FORCE_FIELD) { 4261 Handle<Object> result; 4262 ASSIGN_RETURN_ON_EXCEPTION( 4263 isolate, result, 4264 JSObject::SetPropertyWithCallback(object, 4265 name, 4266 value, 4267 handle(lookup.holder()), 4268 callback, 4269 STRICT), 4270 Object); 4271 4272 if (attributes != lookup.GetAttributes()) { 4273 Handle<ExecutableAccessorInfo> new_data = 4274 Accessors::CloneAccessor( 4275 isolate, Handle<ExecutableAccessorInfo>::cast(callback)); 4276 new_data->set_property_attributes(attributes); 4277 if (attributes & READ_ONLY) { 4278 // This way we don't have to introduce a lookup to the setter, 4279 // simply make it unavailable to reflect the attributes. 4280 new_data->clear_setter(); 4281 } 4282 4283 SetPropertyCallback(object, name, new_data, attributes); 4284 } 4285 if (is_observed) { 4286 // If we are setting the prototype of a function and are observed, 4287 // don't send change records because the prototype handles that 4288 // itself. 4289 executed_set_prototype = object->IsJSFunction() && 4290 String::Equals(isolate->factory()->prototype_string(), 4291 Handle<String>::cast(name)) && 4292 Handle<JSFunction>::cast(object)->should_have_prototype(); 4293 } 4294 } else { 4295 ConvertAndSetOwnProperty(&lookup, name, value, attributes); 4296 } 4297 break; 4298 } 4299 case NONEXISTENT: 4300 case HANDLER: 4301 case INTERCEPTOR: 4302 UNREACHABLE(); 4303 } 4304 } 4305 4306 if (is_observed && !executed_set_prototype) { 4307 if (lookup.IsTransition()) { 4308 EnqueueChangeRecord(object, "add", name, old_value); 4309 } else if (old_value->IsTheHole()) { 4310 EnqueueChangeRecord(object, "reconfigure", name, old_value); 4311 } else { 4312 LookupResult new_lookup(isolate); 4313 object->LookupOwn(name, &new_lookup, true); 4314 bool value_changed = false; 4315 if (new_lookup.IsDataProperty()) { 4316 Handle<Object> new_value = 4317 Object::GetPropertyOrElement(object, name).ToHandleChecked(); 4318 value_changed = !old_value->SameValue(*new_value); 4319 } 4320 if (new_lookup.GetAttributes() != old_attributes) { 4321 if (!value_changed) old_value = isolate->factory()->the_hole_value(); 4322 EnqueueChangeRecord(object, "reconfigure", name, old_value); 4323 } else if (value_changed) { 4324 EnqueueChangeRecord(object, "update", name, old_value); 4325 } 4326 } 4327 } 4328 4329 return value; 4330 } 4331 4332 4333 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithInterceptor( 4334 Handle<JSObject> holder, 4335 Handle<Object> receiver, 4336 Handle<Name> name) { 4337 // TODO(rossberg): Support symbols in the API. 4338 if (name->IsSymbol()) return Maybe<PropertyAttributes>(ABSENT); 4339 4340 Isolate* isolate = holder->GetIsolate(); 4341 HandleScope scope(isolate); 4342 4343 // Make sure that the top context does not change when doing 4344 // callbacks or interceptor calls. 4345 AssertNoContextChange ncc(isolate); 4346 4347 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor()); 4348 PropertyCallbackArguments args( 4349 isolate, interceptor->data(), *receiver, *holder); 4350 if (!interceptor->query()->IsUndefined()) { 4351 v8::NamedPropertyQueryCallback query = 4352 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query()); 4353 LOG(isolate, 4354 ApiNamedPropertyAccess("interceptor-named-has", *holder, *name)); 4355 v8::Handle<v8::Integer> result = 4356 args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name))); 4357 if (!result.IsEmpty()) { 4358 ASSERT(result->IsInt32()); 4359 return Maybe<PropertyAttributes>( 4360 static_cast<PropertyAttributes>(result->Int32Value())); 4361 } 4362 } else if (!interceptor->getter()->IsUndefined()) { 4363 v8::NamedPropertyGetterCallback getter = 4364 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter()); 4365 LOG(isolate, 4366 ApiNamedPropertyAccess("interceptor-named-get-has", *holder, *name)); 4367 v8::Handle<v8::Value> result = 4368 args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name))); 4369 if (!result.IsEmpty()) return Maybe<PropertyAttributes>(DONT_ENUM); 4370 } 4371 return Maybe<PropertyAttributes>(); 4372 } 4373 4374 4375 PropertyAttributes JSReceiver::GetOwnPropertyAttributes( 4376 Handle<JSReceiver> object, Handle<Name> name) { 4377 // Check whether the name is an array index. 4378 uint32_t index = 0; 4379 if (object->IsJSObject() && name->AsArrayIndex(&index)) { 4380 return GetOwnElementAttribute(object, index); 4381 } 4382 LookupIterator it(object, name, LookupIterator::CHECK_OWN); 4383 return GetPropertyAttributes(&it); 4384 } 4385 4386 4387 PropertyAttributes JSReceiver::GetPropertyAttributes(LookupIterator* it) { 4388 for (; it->IsFound(); it->Next()) { 4389 switch (it->state()) { 4390 case LookupIterator::NOT_FOUND: 4391 UNREACHABLE(); 4392 case LookupIterator::JSPROXY: 4393 return JSProxy::GetPropertyAttributesWithHandler( 4394 it->GetJSProxy(), it->GetReceiver(), it->name()); 4395 case LookupIterator::INTERCEPTOR: { 4396 Maybe<PropertyAttributes> result = 4397 JSObject::GetPropertyAttributesWithInterceptor( 4398 it->GetHolder(), it->GetReceiver(), it->name()); 4399 if (result.has_value) return result.value; 4400 break; 4401 } 4402 case LookupIterator::ACCESS_CHECK: 4403 if (it->HasAccess(v8::ACCESS_HAS)) break; 4404 return JSObject::GetPropertyAttributesWithFailedAccessCheck(it); 4405 case LookupIterator::PROPERTY: 4406 if (it->HasProperty()) return it->property_details().attributes(); 4407 break; 4408 } 4409 } 4410 return ABSENT; 4411 } 4412 4413 4414 PropertyAttributes JSObject::GetElementAttributeWithReceiver( 4415 Handle<JSObject> object, 4416 Handle<JSReceiver> receiver, 4417 uint32_t index, 4418 bool check_prototype) { 4419 Isolate* isolate = object->GetIsolate(); 4420 4421 // Check access rights if needed. 4422 if (object->IsAccessCheckNeeded()) { 4423 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) { 4424 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); 4425 // TODO(yangguo): Issue 3269, check for scheduled exception missing? 4426 return ABSENT; 4427 } 4428 } 4429 4430 if (object->IsJSGlobalProxy()) { 4431 Handle<Object> proto(object->GetPrototype(), isolate); 4432 if (proto->IsNull()) return ABSENT; 4433 ASSERT(proto->IsJSGlobalObject()); 4434 return JSObject::GetElementAttributeWithReceiver( 4435 Handle<JSObject>::cast(proto), receiver, index, check_prototype); 4436 } 4437 4438 // Check for lookup interceptor except when bootstrapping. 4439 if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) { 4440 return JSObject::GetElementAttributeWithInterceptor( 4441 object, receiver, index, check_prototype); 4442 } 4443 4444 return GetElementAttributeWithoutInterceptor( 4445 object, receiver, index, check_prototype); 4446 } 4447 4448 4449 PropertyAttributes JSObject::GetElementAttributeWithInterceptor( 4450 Handle<JSObject> object, 4451 Handle<JSReceiver> receiver, 4452 uint32_t index, 4453 bool check_prototype) { 4454 Isolate* isolate = object->GetIsolate(); 4455 HandleScope scope(isolate); 4456 4457 // Make sure that the top context does not change when doing 4458 // callbacks or interceptor calls. 4459 AssertNoContextChange ncc(isolate); 4460 4461 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 4462 PropertyCallbackArguments args( 4463 isolate, interceptor->data(), *receiver, *object); 4464 if (!interceptor->query()->IsUndefined()) { 4465 v8::IndexedPropertyQueryCallback query = 4466 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query()); 4467 LOG(isolate, 4468 ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index)); 4469 v8::Handle<v8::Integer> result = args.Call(query, index); 4470 if (!result.IsEmpty()) 4471 return static_cast<PropertyAttributes>(result->Int32Value()); 4472 } else if (!interceptor->getter()->IsUndefined()) { 4473 v8::IndexedPropertyGetterCallback getter = 4474 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter()); 4475 LOG(isolate, 4476 ApiIndexedPropertyAccess( 4477 "interceptor-indexed-get-has", *object, index)); 4478 v8::Handle<v8::Value> result = args.Call(getter, index); 4479 if (!result.IsEmpty()) return NONE; 4480 } 4481 4482 return GetElementAttributeWithoutInterceptor( 4483 object, receiver, index, check_prototype); 4484 } 4485 4486 4487 PropertyAttributes JSObject::GetElementAttributeWithoutInterceptor( 4488 Handle<JSObject> object, 4489 Handle<JSReceiver> receiver, 4490 uint32_t index, 4491 bool check_prototype) { 4492 PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes( 4493 receiver, object, index); 4494 if (attr != ABSENT) return attr; 4495 4496 // Handle [] on String objects. 4497 if (object->IsStringObjectWithCharacterAt(index)) { 4498 return static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE); 4499 } 4500 4501 if (!check_prototype) return ABSENT; 4502 4503 Handle<Object> proto(object->GetPrototype(), object->GetIsolate()); 4504 if (proto->IsJSProxy()) { 4505 // We need to follow the spec and simulate a call to [[GetOwnProperty]]. 4506 return JSProxy::GetElementAttributeWithHandler( 4507 Handle<JSProxy>::cast(proto), receiver, index); 4508 } 4509 if (proto->IsNull()) return ABSENT; 4510 return GetElementAttributeWithReceiver( 4511 Handle<JSObject>::cast(proto), receiver, index, true); 4512 } 4513 4514 4515 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) { 4516 Handle<FixedArray> array( 4517 isolate->factory()->NewFixedArray(kEntries, TENURED)); 4518 return Handle<NormalizedMapCache>::cast(array); 4519 } 4520 4521 4522 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map, 4523 PropertyNormalizationMode mode) { 4524 DisallowHeapAllocation no_gc; 4525 Object* value = FixedArray::get(GetIndex(fast_map)); 4526 if (!value->IsMap() || 4527 !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) { 4528 return MaybeHandle<Map>(); 4529 } 4530 return handle(Map::cast(value)); 4531 } 4532 4533 4534 void NormalizedMapCache::Set(Handle<Map> fast_map, 4535 Handle<Map> normalized_map) { 4536 DisallowHeapAllocation no_gc; 4537 ASSERT(normalized_map->is_dictionary_map()); 4538 FixedArray::set(GetIndex(fast_map), *normalized_map); 4539 } 4540 4541 4542 void NormalizedMapCache::Clear() { 4543 int entries = length(); 4544 for (int i = 0; i != entries; i++) { 4545 set_undefined(i); 4546 } 4547 } 4548 4549 4550 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object, 4551 Handle<Name> name, 4552 Handle<Code> code) { 4553 Handle<Map> map(object->map()); 4554 Map::UpdateCodeCache(map, name, code); 4555 } 4556 4557 4558 void JSObject::NormalizeProperties(Handle<JSObject> object, 4559 PropertyNormalizationMode mode, 4560 int expected_additional_properties) { 4561 if (!object->HasFastProperties()) return; 4562 4563 // The global object is always normalized. 4564 ASSERT(!object->IsGlobalObject()); 4565 // JSGlobalProxy must never be normalized 4566 ASSERT(!object->IsJSGlobalProxy()); 4567 4568 Isolate* isolate = object->GetIsolate(); 4569 HandleScope scope(isolate); 4570 Handle<Map> map(object->map()); 4571 Handle<Map> new_map = Map::Normalize(map, mode); 4572 4573 // Allocate new content. 4574 int real_size = map->NumberOfOwnDescriptors(); 4575 int property_count = real_size; 4576 if (expected_additional_properties > 0) { 4577 property_count += expected_additional_properties; 4578 } else { 4579 property_count += 2; // Make space for two more properties. 4580 } 4581 Handle<NameDictionary> dictionary = 4582 NameDictionary::New(isolate, property_count); 4583 4584 Handle<DescriptorArray> descs(map->instance_descriptors()); 4585 for (int i = 0; i < real_size; i++) { 4586 PropertyDetails details = descs->GetDetails(i); 4587 switch (details.type()) { 4588 case CONSTANT: { 4589 Handle<Name> key(descs->GetKey(i)); 4590 Handle<Object> value(descs->GetConstant(i), isolate); 4591 PropertyDetails d = PropertyDetails( 4592 details.attributes(), NORMAL, i + 1); 4593 dictionary = NameDictionary::Add(dictionary, key, value, d); 4594 break; 4595 } 4596 case FIELD: { 4597 Handle<Name> key(descs->GetKey(i)); 4598 FieldIndex index = FieldIndex::ForDescriptor(*map, i); 4599 Handle<Object> value( 4600 object->RawFastPropertyAt(index), isolate); 4601 PropertyDetails d = 4602 PropertyDetails(details.attributes(), NORMAL, i + 1); 4603 dictionary = NameDictionary::Add(dictionary, key, value, d); 4604 break; 4605 } 4606 case CALLBACKS: { 4607 Handle<Name> key(descs->GetKey(i)); 4608 Handle<Object> value(descs->GetCallbacksObject(i), isolate); 4609 PropertyDetails d = PropertyDetails( 4610 details.attributes(), CALLBACKS, i + 1); 4611 dictionary = NameDictionary::Add(dictionary, key, value, d); 4612 break; 4613 } 4614 case INTERCEPTOR: 4615 break; 4616 case HANDLER: 4617 case NORMAL: 4618 case NONEXISTENT: 4619 UNREACHABLE(); 4620 break; 4621 } 4622 } 4623 4624 // Copy the next enumeration index from instance descriptor. 4625 dictionary->SetNextEnumerationIndex(real_size + 1); 4626 4627 // From here on we cannot fail and we shouldn't GC anymore. 4628 DisallowHeapAllocation no_allocation; 4629 4630 // Resize the object in the heap if necessary. 4631 int new_instance_size = new_map->instance_size(); 4632 int instance_size_delta = map->instance_size() - new_instance_size; 4633 ASSERT(instance_size_delta >= 0); 4634 Heap* heap = isolate->heap(); 4635 heap->CreateFillerObjectAt(object->address() + new_instance_size, 4636 instance_size_delta); 4637 heap->AdjustLiveBytes(object->address(), 4638 -instance_size_delta, 4639 Heap::FROM_MUTATOR); 4640 4641 // We are storing the new map using release store after creating a filler for 4642 // the left-over space to avoid races with the sweeper thread. 4643 object->synchronized_set_map(*new_map); 4644 4645 object->set_properties(*dictionary); 4646 4647 isolate->counters()->props_to_dictionary()->Increment(); 4648 4649 #ifdef DEBUG 4650 if (FLAG_trace_normalization) { 4651 PrintF("Object properties have been normalized:\n"); 4652 object->Print(); 4653 } 4654 #endif 4655 } 4656 4657 4658 void JSObject::TransformToFastProperties(Handle<JSObject> object, 4659 int unused_property_fields) { 4660 if (object->HasFastProperties()) return; 4661 ASSERT(!object->IsGlobalObject()); 4662 Isolate* isolate = object->GetIsolate(); 4663 Factory* factory = isolate->factory(); 4664 Handle<NameDictionary> dictionary(object->property_dictionary()); 4665 4666 // Make sure we preserve dictionary representation if there are too many 4667 // descriptors. 4668 int number_of_elements = dictionary->NumberOfElements(); 4669 if (number_of_elements > kMaxNumberOfDescriptors) return; 4670 4671 if (number_of_elements != dictionary->NextEnumerationIndex()) { 4672 NameDictionary::DoGenerateNewEnumerationIndices(dictionary); 4673 } 4674 4675 int instance_descriptor_length = 0; 4676 int number_of_fields = 0; 4677 4678 // Compute the length of the instance descriptor. 4679 int capacity = dictionary->Capacity(); 4680 for (int i = 0; i < capacity; i++) { 4681 Object* k = dictionary->KeyAt(i); 4682 if (dictionary->IsKey(k)) { 4683 Object* value = dictionary->ValueAt(i); 4684 PropertyType type = dictionary->DetailsAt(i).type(); 4685 ASSERT(type != FIELD); 4686 instance_descriptor_length++; 4687 if (type == NORMAL && !value->IsJSFunction()) { 4688 number_of_fields += 1; 4689 } 4690 } 4691 } 4692 4693 int inobject_props = object->map()->inobject_properties(); 4694 4695 // Allocate new map. 4696 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); 4697 new_map->set_dictionary_map(false); 4698 4699 if (instance_descriptor_length == 0) { 4700 DisallowHeapAllocation no_gc; 4701 ASSERT_LE(unused_property_fields, inobject_props); 4702 // Transform the object. 4703 new_map->set_unused_property_fields(inobject_props); 4704 object->set_map(*new_map); 4705 object->set_properties(isolate->heap()->empty_fixed_array()); 4706 // Check that it really works. 4707 ASSERT(object->HasFastProperties()); 4708 return; 4709 } 4710 4711 // Allocate the instance descriptor. 4712 Handle<DescriptorArray> descriptors = DescriptorArray::Allocate( 4713 isolate, instance_descriptor_length); 4714 4715 int number_of_allocated_fields = 4716 number_of_fields + unused_property_fields - inobject_props; 4717 if (number_of_allocated_fields < 0) { 4718 // There is enough inobject space for all fields (including unused). 4719 number_of_allocated_fields = 0; 4720 unused_property_fields = inobject_props - number_of_fields; 4721 } 4722 4723 // Allocate the fixed array for the fields. 4724 Handle<FixedArray> fields = factory->NewFixedArray( 4725 number_of_allocated_fields); 4726 4727 // Fill in the instance descriptor and the fields. 4728 int current_offset = 0; 4729 for (int i = 0; i < capacity; i++) { 4730 Object* k = dictionary->KeyAt(i); 4731 if (dictionary->IsKey(k)) { 4732 Object* value = dictionary->ValueAt(i); 4733 Handle<Name> key; 4734 if (k->IsSymbol()) { 4735 key = handle(Symbol::cast(k)); 4736 } else { 4737 // Ensure the key is a unique name before writing into the 4738 // instance descriptor. 4739 key = factory->InternalizeString(handle(String::cast(k))); 4740 } 4741 4742 PropertyDetails details = dictionary->DetailsAt(i); 4743 int enumeration_index = details.dictionary_index(); 4744 PropertyType type = details.type(); 4745 4746 if (value->IsJSFunction()) { 4747 ConstantDescriptor d(key, 4748 handle(value, isolate), 4749 details.attributes()); 4750 descriptors->Set(enumeration_index - 1, &d); 4751 } else if (type == NORMAL) { 4752 if (current_offset < inobject_props) { 4753 object->InObjectPropertyAtPut(current_offset, 4754 value, 4755 UPDATE_WRITE_BARRIER); 4756 } else { 4757 int offset = current_offset - inobject_props; 4758 fields->set(offset, value); 4759 } 4760 FieldDescriptor d(key, 4761 current_offset++, 4762 details.attributes(), 4763 // TODO(verwaest): value->OptimalRepresentation(); 4764 Representation::Tagged()); 4765 descriptors->Set(enumeration_index - 1, &d); 4766 } else if (type == CALLBACKS) { 4767 CallbacksDescriptor d(key, 4768 handle(value, isolate), 4769 details.attributes()); 4770 descriptors->Set(enumeration_index - 1, &d); 4771 } else { 4772 UNREACHABLE(); 4773 } 4774 } 4775 } 4776 ASSERT(current_offset == number_of_fields); 4777 4778 descriptors->Sort(); 4779 4780 DisallowHeapAllocation no_gc; 4781 new_map->InitializeDescriptors(*descriptors); 4782 new_map->set_unused_property_fields(unused_property_fields); 4783 4784 // Transform the object. 4785 object->set_map(*new_map); 4786 4787 object->set_properties(*fields); 4788 ASSERT(object->IsJSObject()); 4789 4790 // Check that it really works. 4791 ASSERT(object->HasFastProperties()); 4792 } 4793 4794 4795 void JSObject::ResetElements(Handle<JSObject> object) { 4796 Heap* heap = object->GetIsolate()->heap(); 4797 CHECK(object->map() != heap->sloppy_arguments_elements_map()); 4798 object->set_elements(object->map()->GetInitialElements()); 4799 } 4800 4801 4802 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary( 4803 Handle<FixedArrayBase> array, 4804 int length, 4805 Handle<SeededNumberDictionary> dictionary) { 4806 Isolate* isolate = array->GetIsolate(); 4807 Factory* factory = isolate->factory(); 4808 bool has_double_elements = array->IsFixedDoubleArray(); 4809 for (int i = 0; i < length; i++) { 4810 Handle<Object> value; 4811 if (has_double_elements) { 4812 Handle<FixedDoubleArray> double_array = 4813 Handle<FixedDoubleArray>::cast(array); 4814 if (double_array->is_the_hole(i)) { 4815 value = factory->the_hole_value(); 4816 } else { 4817 value = factory->NewHeapNumber(double_array->get_scalar(i)); 4818 } 4819 } else { 4820 value = handle(Handle<FixedArray>::cast(array)->get(i), isolate); 4821 } 4822 if (!value->IsTheHole()) { 4823 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0); 4824 dictionary = 4825 SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details); 4826 } 4827 } 4828 return dictionary; 4829 } 4830 4831 4832 Handle<SeededNumberDictionary> JSObject::NormalizeElements( 4833 Handle<JSObject> object) { 4834 ASSERT(!object->HasExternalArrayElements() && 4835 !object->HasFixedTypedArrayElements()); 4836 Isolate* isolate = object->GetIsolate(); 4837 4838 // Find the backing store. 4839 Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements())); 4840 bool is_arguments = 4841 (array->map() == isolate->heap()->sloppy_arguments_elements_map()); 4842 if (is_arguments) { 4843 array = handle(FixedArrayBase::cast( 4844 Handle<FixedArray>::cast(array)->get(1))); 4845 } 4846 if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array); 4847 4848 ASSERT(object->HasFastSmiOrObjectElements() || 4849 object->HasFastDoubleElements() || 4850 object->HasFastArgumentsElements()); 4851 // Compute the effective length and allocate a new backing store. 4852 int length = object->IsJSArray() 4853 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value() 4854 : array->length(); 4855 int old_capacity = 0; 4856 int used_elements = 0; 4857 object->GetElementsCapacityAndUsage(&old_capacity, &used_elements); 4858 Handle<SeededNumberDictionary> dictionary = 4859 SeededNumberDictionary::New(isolate, used_elements); 4860 4861 dictionary = CopyFastElementsToDictionary(array, length, dictionary); 4862 4863 // Switch to using the dictionary as the backing storage for elements. 4864 if (is_arguments) { 4865 FixedArray::cast(object->elements())->set(1, *dictionary); 4866 } else { 4867 // Set the new map first to satify the elements type assert in 4868 // set_elements(). 4869 Handle<Map> new_map = 4870 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS); 4871 4872 JSObject::MigrateToMap(object, new_map); 4873 object->set_elements(*dictionary); 4874 } 4875 4876 isolate->counters()->elements_to_dictionary()->Increment(); 4877 4878 #ifdef DEBUG 4879 if (FLAG_trace_normalization) { 4880 PrintF("Object elements have been normalized:\n"); 4881 object->Print(); 4882 } 4883 #endif 4884 4885 ASSERT(object->HasDictionaryElements() || 4886 object->HasDictionaryArgumentsElements()); 4887 return dictionary; 4888 } 4889 4890 4891 static Smi* GenerateIdentityHash(Isolate* isolate) { 4892 int hash_value; 4893 int attempts = 0; 4894 do { 4895 // Generate a random 32-bit hash value but limit range to fit 4896 // within a smi. 4897 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue; 4898 attempts++; 4899 } while (hash_value == 0 && attempts < 30); 4900 hash_value = hash_value != 0 ? hash_value : 1; // never return 0 4901 4902 return Smi::FromInt(hash_value); 4903 } 4904 4905 4906 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) { 4907 ASSERT(!object->IsJSGlobalProxy()); 4908 Isolate* isolate = object->GetIsolate(); 4909 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash); 4910 } 4911 4912 4913 template<typename ProxyType> 4914 static Handle<Smi> GetOrCreateIdentityHashHelper(Handle<ProxyType> proxy) { 4915 Isolate* isolate = proxy->GetIsolate(); 4916 4917 Handle<Object> maybe_hash(proxy->hash(), isolate); 4918 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash); 4919 4920 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate); 4921 proxy->set_hash(*hash); 4922 return hash; 4923 } 4924 4925 4926 Object* JSObject::GetIdentityHash() { 4927 DisallowHeapAllocation no_gc; 4928 Isolate* isolate = GetIsolate(); 4929 if (IsJSGlobalProxy()) { 4930 return JSGlobalProxy::cast(this)->hash(); 4931 } 4932 Object* stored_value = 4933 GetHiddenProperty(isolate->factory()->identity_hash_string()); 4934 return stored_value->IsSmi() 4935 ? stored_value 4936 : isolate->heap()->undefined_value(); 4937 } 4938 4939 4940 Handle<Smi> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) { 4941 if (object->IsJSGlobalProxy()) { 4942 return GetOrCreateIdentityHashHelper(Handle<JSGlobalProxy>::cast(object)); 4943 } 4944 4945 Isolate* isolate = object->GetIsolate(); 4946 4947 Handle<Object> maybe_hash(object->GetIdentityHash(), isolate); 4948 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash); 4949 4950 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate); 4951 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash); 4952 return hash; 4953 } 4954 4955 4956 Object* JSProxy::GetIdentityHash() { 4957 return this->hash(); 4958 } 4959 4960 4961 Handle<Smi> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) { 4962 return GetOrCreateIdentityHashHelper(proxy); 4963 } 4964 4965 4966 Object* JSObject::GetHiddenProperty(Handle<Name> key) { 4967 DisallowHeapAllocation no_gc; 4968 ASSERT(key->IsUniqueName()); 4969 if (IsJSGlobalProxy()) { 4970 // JSGlobalProxies store their hash internally. 4971 ASSERT(*key != GetHeap()->identity_hash_string()); 4972 // For a proxy, use the prototype as target object. 4973 Object* proxy_parent = GetPrototype(); 4974 // If the proxy is detached, return undefined. 4975 if (proxy_parent->IsNull()) return GetHeap()->the_hole_value(); 4976 ASSERT(proxy_parent->IsJSGlobalObject()); 4977 return JSObject::cast(proxy_parent)->GetHiddenProperty(key); 4978 } 4979 ASSERT(!IsJSGlobalProxy()); 4980 Object* inline_value = GetHiddenPropertiesHashTable(); 4981 4982 if (inline_value->IsSmi()) { 4983 // Handle inline-stored identity hash. 4984 if (*key == GetHeap()->identity_hash_string()) { 4985 return inline_value; 4986 } else { 4987 return GetHeap()->the_hole_value(); 4988 } 4989 } 4990 4991 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value(); 4992 4993 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value); 4994 Object* entry = hashtable->Lookup(key); 4995 return entry; 4996 } 4997 4998 4999 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object, 5000 Handle<Name> key, 5001 Handle<Object> value) { 5002 Isolate* isolate = object->GetIsolate(); 5003 5004 ASSERT(key->IsUniqueName()); 5005 if (object->IsJSGlobalProxy()) { 5006 // JSGlobalProxies store their hash internally. 5007 ASSERT(*key != *isolate->factory()->identity_hash_string()); 5008 // For a proxy, use the prototype as target object. 5009 Handle<Object> proxy_parent(object->GetPrototype(), isolate); 5010 // If the proxy is detached, return undefined. 5011 if (proxy_parent->IsNull()) return isolate->factory()->undefined_value(); 5012 ASSERT(proxy_parent->IsJSGlobalObject()); 5013 return SetHiddenProperty(Handle<JSObject>::cast(proxy_parent), key, value); 5014 } 5015 ASSERT(!object->IsJSGlobalProxy()); 5016 5017 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate); 5018 5019 // If there is no backing store yet, store the identity hash inline. 5020 if (value->IsSmi() && 5021 *key == *isolate->factory()->identity_hash_string() && 5022 (inline_value->IsUndefined() || inline_value->IsSmi())) { 5023 return JSObject::SetHiddenPropertiesHashTable(object, value); 5024 } 5025 5026 Handle<ObjectHashTable> hashtable = 5027 GetOrCreateHiddenPropertiesHashtable(object); 5028 5029 // If it was found, check if the key is already in the dictionary. 5030 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key, 5031 value); 5032 if (*new_table != *hashtable) { 5033 // If adding the key expanded the dictionary (i.e., Add returned a new 5034 // dictionary), store it back to the object. 5035 SetHiddenPropertiesHashTable(object, new_table); 5036 } 5037 5038 // Return this to mark success. 5039 return object; 5040 } 5041 5042 5043 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) { 5044 Isolate* isolate = object->GetIsolate(); 5045 ASSERT(key->IsUniqueName()); 5046 5047 if (object->IsJSGlobalProxy()) { 5048 Handle<Object> proto(object->GetPrototype(), isolate); 5049 if (proto->IsNull()) return; 5050 ASSERT(proto->IsJSGlobalObject()); 5051 return DeleteHiddenProperty(Handle<JSObject>::cast(proto), key); 5052 } 5053 5054 Object* inline_value = object->GetHiddenPropertiesHashTable(); 5055 5056 // We never delete (inline-stored) identity hashes. 5057 ASSERT(*key != *isolate->factory()->identity_hash_string()); 5058 if (inline_value->IsUndefined() || inline_value->IsSmi()) return; 5059 5060 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value)); 5061 bool was_present = false; 5062 ObjectHashTable::Remove(hashtable, key, &was_present); 5063 } 5064 5065 5066 bool JSObject::HasHiddenProperties(Handle<JSObject> object) { 5067 Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string(); 5068 LookupIterator it(object, hidden, LookupIterator::CHECK_OWN_REAL); 5069 return GetPropertyAttributes(&it) != ABSENT; 5070 } 5071 5072 5073 Object* JSObject::GetHiddenPropertiesHashTable() { 5074 ASSERT(!IsJSGlobalProxy()); 5075 if (HasFastProperties()) { 5076 // If the object has fast properties, check whether the first slot 5077 // in the descriptor array matches the hidden string. Since the 5078 // hidden strings hash code is zero (and no other name has hash 5079 // code zero) it will always occupy the first entry if present. 5080 DescriptorArray* descriptors = this->map()->instance_descriptors(); 5081 if (descriptors->number_of_descriptors() > 0) { 5082 int sorted_index = descriptors->GetSortedKeyIndex(0); 5083 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() && 5084 sorted_index < map()->NumberOfOwnDescriptors()) { 5085 ASSERT(descriptors->GetType(sorted_index) == FIELD); 5086 ASSERT(descriptors->GetDetails(sorted_index).representation(). 5087 IsCompatibleForLoad(Representation::Tagged())); 5088 FieldIndex index = FieldIndex::ForDescriptor(this->map(), 5089 sorted_index); 5090 return this->RawFastPropertyAt(index); 5091 } else { 5092 return GetHeap()->undefined_value(); 5093 } 5094 } else { 5095 return GetHeap()->undefined_value(); 5096 } 5097 } else { 5098 Isolate* isolate = GetIsolate(); 5099 LookupResult result(isolate); 5100 LookupOwnRealNamedProperty(isolate->factory()->hidden_string(), &result); 5101 if (result.IsFound()) { 5102 ASSERT(result.IsNormal()); 5103 ASSERT(result.holder() == this); 5104 Object* value = GetNormalizedProperty(&result); 5105 if (!value->IsTheHole()) return value; 5106 } 5107 return GetHeap()->undefined_value(); 5108 } 5109 } 5110 5111 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable( 5112 Handle<JSObject> object) { 5113 Isolate* isolate = object->GetIsolate(); 5114 5115 static const int kInitialCapacity = 4; 5116 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate); 5117 if (inline_value->IsHashTable()) { 5118 return Handle<ObjectHashTable>::cast(inline_value); 5119 } 5120 5121 Handle<ObjectHashTable> hashtable = ObjectHashTable::New( 5122 isolate, kInitialCapacity, USE_CUSTOM_MINIMUM_CAPACITY); 5123 5124 if (inline_value->IsSmi()) { 5125 // We were storing the identity hash inline and now allocated an actual 5126 // dictionary. Put the identity hash into the new dictionary. 5127 hashtable = ObjectHashTable::Put(hashtable, 5128 isolate->factory()->identity_hash_string(), 5129 inline_value); 5130 } 5131 5132 JSObject::SetOwnPropertyIgnoreAttributes( 5133 object, 5134 isolate->factory()->hidden_string(), 5135 hashtable, 5136 DONT_ENUM, 5137 OPTIMAL_REPRESENTATION, 5138 ALLOW_AS_CONSTANT, 5139 OMIT_EXTENSIBILITY_CHECK).Assert(); 5140 5141 return hashtable; 5142 } 5143 5144 5145 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object, 5146 Handle<Object> value) { 5147 ASSERT(!object->IsJSGlobalProxy()); 5148 5149 Isolate* isolate = object->GetIsolate(); 5150 5151 // We can store the identity hash inline iff there is no backing store 5152 // for hidden properties yet. 5153 ASSERT(JSObject::HasHiddenProperties(object) != value->IsSmi()); 5154 if (object->HasFastProperties()) { 5155 // If the object has fast properties, check whether the first slot 5156 // in the descriptor array matches the hidden string. Since the 5157 // hidden strings hash code is zero (and no other name has hash 5158 // code zero) it will always occupy the first entry if present. 5159 DescriptorArray* descriptors = object->map()->instance_descriptors(); 5160 if (descriptors->number_of_descriptors() > 0) { 5161 int sorted_index = descriptors->GetSortedKeyIndex(0); 5162 if (descriptors->GetKey(sorted_index) == isolate->heap()->hidden_string() 5163 && sorted_index < object->map()->NumberOfOwnDescriptors()) { 5164 object->WriteToField(sorted_index, *value); 5165 return object; 5166 } 5167 } 5168 } 5169 5170 SetOwnPropertyIgnoreAttributes(object, 5171 isolate->factory()->hidden_string(), 5172 value, 5173 DONT_ENUM, 5174 OPTIMAL_REPRESENTATION, 5175 ALLOW_AS_CONSTANT, 5176 OMIT_EXTENSIBILITY_CHECK).Assert(); 5177 return object; 5178 } 5179 5180 5181 Handle<Object> JSObject::DeletePropertyPostInterceptor(Handle<JSObject> object, 5182 Handle<Name> name, 5183 DeleteMode mode) { 5184 // Check own property, ignore interceptor. 5185 Isolate* isolate = object->GetIsolate(); 5186 LookupResult result(isolate); 5187 object->LookupOwnRealNamedProperty(name, &result); 5188 if (!result.IsFound()) return isolate->factory()->true_value(); 5189 5190 // Normalize object if needed. 5191 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 5192 5193 return DeleteNormalizedProperty(object, name, mode); 5194 } 5195 5196 5197 MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor( 5198 Handle<JSObject> object, Handle<Name> name) { 5199 Isolate* isolate = object->GetIsolate(); 5200 5201 // TODO(rossberg): Support symbols in the API. 5202 if (name->IsSymbol()) return isolate->factory()->false_value(); 5203 5204 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); 5205 if (!interceptor->deleter()->IsUndefined()) { 5206 v8::NamedPropertyDeleterCallback deleter = 5207 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter()); 5208 LOG(isolate, 5209 ApiNamedPropertyAccess("interceptor-named-delete", *object, *name)); 5210 PropertyCallbackArguments args( 5211 isolate, interceptor->data(), *object, *object); 5212 v8::Handle<v8::Boolean> result = 5213 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name))); 5214 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 5215 if (!result.IsEmpty()) { 5216 ASSERT(result->IsBoolean()); 5217 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 5218 result_internal->VerifyApiCallResultType(); 5219 // Rebox CustomArguments::kReturnValueOffset before returning. 5220 return handle(*result_internal, isolate); 5221 } 5222 } 5223 Handle<Object> result = 5224 DeletePropertyPostInterceptor(object, name, NORMAL_DELETION); 5225 return result; 5226 } 5227 5228 5229 MaybeHandle<Object> JSObject::DeleteElementWithInterceptor( 5230 Handle<JSObject> object, 5231 uint32_t index) { 5232 Isolate* isolate = object->GetIsolate(); 5233 Factory* factory = isolate->factory(); 5234 5235 // Make sure that the top context does not change when doing 5236 // callbacks or interceptor calls. 5237 AssertNoContextChange ncc(isolate); 5238 5239 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 5240 if (interceptor->deleter()->IsUndefined()) return factory->false_value(); 5241 v8::IndexedPropertyDeleterCallback deleter = 5242 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter()); 5243 LOG(isolate, 5244 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index)); 5245 PropertyCallbackArguments args( 5246 isolate, interceptor->data(), *object, *object); 5247 v8::Handle<v8::Boolean> result = args.Call(deleter, index); 5248 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 5249 if (!result.IsEmpty()) { 5250 ASSERT(result->IsBoolean()); 5251 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 5252 result_internal->VerifyApiCallResultType(); 5253 // Rebox CustomArguments::kReturnValueOffset before returning. 5254 return handle(*result_internal, isolate); 5255 } 5256 MaybeHandle<Object> delete_result = object->GetElementsAccessor()->Delete( 5257 object, index, NORMAL_DELETION); 5258 return delete_result; 5259 } 5260 5261 5262 MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object, 5263 uint32_t index, 5264 DeleteMode mode) { 5265 Isolate* isolate = object->GetIsolate(); 5266 Factory* factory = isolate->factory(); 5267 5268 // Check access rights if needed. 5269 if (object->IsAccessCheckNeeded() && 5270 !isolate->MayIndexedAccess(object, index, v8::ACCESS_DELETE)) { 5271 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE); 5272 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 5273 return factory->false_value(); 5274 } 5275 5276 if (object->IsStringObjectWithCharacterAt(index)) { 5277 if (mode == STRICT_DELETION) { 5278 // Deleting a non-configurable property in strict mode. 5279 Handle<Object> name = factory->NewNumberFromUint(index); 5280 Handle<Object> args[2] = { name, object }; 5281 Handle<Object> error = 5282 factory->NewTypeError("strict_delete_property", 5283 HandleVector(args, 2)); 5284 isolate->Throw(*error); 5285 return Handle<Object>(); 5286 } 5287 return factory->false_value(); 5288 } 5289 5290 if (object->IsJSGlobalProxy()) { 5291 Handle<Object> proto(object->GetPrototype(), isolate); 5292 if (proto->IsNull()) return factory->false_value(); 5293 ASSERT(proto->IsJSGlobalObject()); 5294 return DeleteElement(Handle<JSObject>::cast(proto), index, mode); 5295 } 5296 5297 Handle<Object> old_value; 5298 bool should_enqueue_change_record = false; 5299 if (object->map()->is_observed()) { 5300 should_enqueue_change_record = HasOwnElement(object, index); 5301 if (should_enqueue_change_record) { 5302 if (!GetOwnElementAccessorPair(object, index).is_null()) { 5303 old_value = Handle<Object>::cast(factory->the_hole_value()); 5304 } else { 5305 old_value = Object::GetElement( 5306 isolate, object, index).ToHandleChecked(); 5307 } 5308 } 5309 } 5310 5311 // Skip interceptor if forcing deletion. 5312 MaybeHandle<Object> maybe_result; 5313 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) { 5314 maybe_result = DeleteElementWithInterceptor(object, index); 5315 } else { 5316 maybe_result = object->GetElementsAccessor()->Delete(object, index, mode); 5317 } 5318 Handle<Object> result; 5319 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object); 5320 5321 if (should_enqueue_change_record && !HasOwnElement(object, index)) { 5322 Handle<String> name = factory->Uint32ToString(index); 5323 EnqueueChangeRecord(object, "delete", name, old_value); 5324 } 5325 5326 return result; 5327 } 5328 5329 5330 MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object, 5331 Handle<Name> name, 5332 DeleteMode mode) { 5333 Isolate* isolate = object->GetIsolate(); 5334 // ECMA-262, 3rd, 8.6.2.5 5335 ASSERT(name->IsName()); 5336 5337 // Check access rights if needed. 5338 if (object->IsAccessCheckNeeded() && 5339 !isolate->MayNamedAccess(object, name, v8::ACCESS_DELETE)) { 5340 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE); 5341 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 5342 return isolate->factory()->false_value(); 5343 } 5344 5345 if (object->IsJSGlobalProxy()) { 5346 Object* proto = object->GetPrototype(); 5347 if (proto->IsNull()) return isolate->factory()->false_value(); 5348 ASSERT(proto->IsJSGlobalObject()); 5349 return JSGlobalObject::DeleteProperty( 5350 handle(JSGlobalObject::cast(proto)), name, mode); 5351 } 5352 5353 uint32_t index = 0; 5354 if (name->AsArrayIndex(&index)) { 5355 return DeleteElement(object, index, mode); 5356 } 5357 5358 LookupResult lookup(isolate); 5359 object->LookupOwn(name, &lookup, true); 5360 if (!lookup.IsFound()) return isolate->factory()->true_value(); 5361 // Ignore attributes if forcing a deletion. 5362 if (lookup.IsDontDelete() && mode != FORCE_DELETION) { 5363 if (mode == STRICT_DELETION) { 5364 // Deleting a non-configurable property in strict mode. 5365 Handle<Object> args[2] = { name, object }; 5366 Handle<Object> error = isolate->factory()->NewTypeError( 5367 "strict_delete_property", HandleVector(args, ARRAY_SIZE(args))); 5368 isolate->Throw(*error); 5369 return Handle<Object>(); 5370 } 5371 return isolate->factory()->false_value(); 5372 } 5373 5374 Handle<Object> old_value = isolate->factory()->the_hole_value(); 5375 bool is_observed = object->map()->is_observed() && 5376 *name != isolate->heap()->hidden_string(); 5377 if (is_observed && lookup.IsDataProperty()) { 5378 old_value = Object::GetPropertyOrElement(object, name).ToHandleChecked(); 5379 } 5380 Handle<Object> result; 5381 5382 // Check for interceptor. 5383 if (lookup.IsInterceptor()) { 5384 // Skip interceptor if forcing a deletion. 5385 if (mode == FORCE_DELETION) { 5386 result = DeletePropertyPostInterceptor(object, name, mode); 5387 } else { 5388 ASSIGN_RETURN_ON_EXCEPTION( 5389 isolate, result, 5390 DeletePropertyWithInterceptor(object, name), 5391 Object); 5392 } 5393 } else { 5394 // Normalize object if needed. 5395 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 5396 // Make sure the properties are normalized before removing the entry. 5397 result = DeleteNormalizedProperty(object, name, mode); 5398 } 5399 5400 if (is_observed && !HasOwnProperty(object, name)) { 5401 EnqueueChangeRecord(object, "delete", name, old_value); 5402 } 5403 5404 return result; 5405 } 5406 5407 5408 MaybeHandle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object, 5409 uint32_t index, 5410 DeleteMode mode) { 5411 if (object->IsJSProxy()) { 5412 return JSProxy::DeleteElementWithHandler( 5413 Handle<JSProxy>::cast(object), index, mode); 5414 } 5415 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode); 5416 } 5417 5418 5419 MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object, 5420 Handle<Name> name, 5421 DeleteMode mode) { 5422 if (object->IsJSProxy()) { 5423 return JSProxy::DeletePropertyWithHandler( 5424 Handle<JSProxy>::cast(object), name, mode); 5425 } 5426 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode); 5427 } 5428 5429 5430 bool JSObject::ReferencesObjectFromElements(FixedArray* elements, 5431 ElementsKind kind, 5432 Object* object) { 5433 ASSERT(IsFastObjectElementsKind(kind) || 5434 kind == DICTIONARY_ELEMENTS); 5435 if (IsFastObjectElementsKind(kind)) { 5436 int length = IsJSArray() 5437 ? Smi::cast(JSArray::cast(this)->length())->value() 5438 : elements->length(); 5439 for (int i = 0; i < length; ++i) { 5440 Object* element = elements->get(i); 5441 if (!element->IsTheHole() && element == object) return true; 5442 } 5443 } else { 5444 Object* key = 5445 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object); 5446 if (!key->IsUndefined()) return true; 5447 } 5448 return false; 5449 } 5450 5451 5452 // Check whether this object references another object. 5453 bool JSObject::ReferencesObject(Object* obj) { 5454 Map* map_of_this = map(); 5455 Heap* heap = GetHeap(); 5456 DisallowHeapAllocation no_allocation; 5457 5458 // Is the object the constructor for this object? 5459 if (map_of_this->constructor() == obj) { 5460 return true; 5461 } 5462 5463 // Is the object the prototype for this object? 5464 if (map_of_this->prototype() == obj) { 5465 return true; 5466 } 5467 5468 // Check if the object is among the named properties. 5469 Object* key = SlowReverseLookup(obj); 5470 if (!key->IsUndefined()) { 5471 return true; 5472 } 5473 5474 // Check if the object is among the indexed properties. 5475 ElementsKind kind = GetElementsKind(); 5476 switch (kind) { 5477 // Raw pixels and external arrays do not reference other 5478 // objects. 5479 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 5480 case EXTERNAL_##TYPE##_ELEMENTS: \ 5481 case TYPE##_ELEMENTS: \ 5482 break; 5483 5484 TYPED_ARRAYS(TYPED_ARRAY_CASE) 5485 #undef TYPED_ARRAY_CASE 5486 5487 case FAST_DOUBLE_ELEMENTS: 5488 case FAST_HOLEY_DOUBLE_ELEMENTS: 5489 break; 5490 case FAST_SMI_ELEMENTS: 5491 case FAST_HOLEY_SMI_ELEMENTS: 5492 break; 5493 case FAST_ELEMENTS: 5494 case FAST_HOLEY_ELEMENTS: 5495 case DICTIONARY_ELEMENTS: { 5496 FixedArray* elements = FixedArray::cast(this->elements()); 5497 if (ReferencesObjectFromElements(elements, kind, obj)) return true; 5498 break; 5499 } 5500 case SLOPPY_ARGUMENTS_ELEMENTS: { 5501 FixedArray* parameter_map = FixedArray::cast(elements()); 5502 // Check the mapped parameters. 5503 int length = parameter_map->length(); 5504 for (int i = 2; i < length; ++i) { 5505 Object* value = parameter_map->get(i); 5506 if (!value->IsTheHole() && value == obj) return true; 5507 } 5508 // Check the arguments. 5509 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 5510 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS : 5511 FAST_HOLEY_ELEMENTS; 5512 if (ReferencesObjectFromElements(arguments, kind, obj)) return true; 5513 break; 5514 } 5515 } 5516 5517 // For functions check the context. 5518 if (IsJSFunction()) { 5519 // Get the constructor function for arguments array. 5520 JSObject* arguments_boilerplate = 5521 heap->isolate()->context()->native_context()-> 5522 sloppy_arguments_boilerplate(); 5523 JSFunction* arguments_function = 5524 JSFunction::cast(arguments_boilerplate->map()->constructor()); 5525 5526 // Get the context and don't check if it is the native context. 5527 JSFunction* f = JSFunction::cast(this); 5528 Context* context = f->context(); 5529 if (context->IsNativeContext()) { 5530 return false; 5531 } 5532 5533 // Check the non-special context slots. 5534 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) { 5535 // Only check JS objects. 5536 if (context->get(i)->IsJSObject()) { 5537 JSObject* ctxobj = JSObject::cast(context->get(i)); 5538 // If it is an arguments array check the content. 5539 if (ctxobj->map()->constructor() == arguments_function) { 5540 if (ctxobj->ReferencesObject(obj)) { 5541 return true; 5542 } 5543 } else if (ctxobj == obj) { 5544 return true; 5545 } 5546 } 5547 } 5548 5549 // Check the context extension (if any) if it can have references. 5550 if (context->has_extension() && !context->IsCatchContext()) { 5551 // With harmony scoping, a JSFunction may have a global context. 5552 // TODO(mvstanton): walk into the ScopeInfo. 5553 if (FLAG_harmony_scoping && context->IsGlobalContext()) { 5554 return false; 5555 } 5556 5557 return JSObject::cast(context->extension())->ReferencesObject(obj); 5558 } 5559 } 5560 5561 // No references to object. 5562 return false; 5563 } 5564 5565 5566 MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { 5567 Isolate* isolate = object->GetIsolate(); 5568 5569 if (!object->map()->is_extensible()) return object; 5570 5571 if (object->IsAccessCheckNeeded() && 5572 !isolate->MayNamedAccess( 5573 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { 5574 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS); 5575 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 5576 return isolate->factory()->false_value(); 5577 } 5578 5579 if (object->IsJSGlobalProxy()) { 5580 Handle<Object> proto(object->GetPrototype(), isolate); 5581 if (proto->IsNull()) return object; 5582 ASSERT(proto->IsJSGlobalObject()); 5583 return PreventExtensions(Handle<JSObject>::cast(proto)); 5584 } 5585 5586 // It's not possible to seal objects with external array elements 5587 if (object->HasExternalArrayElements() || 5588 object->HasFixedTypedArrayElements()) { 5589 Handle<Object> error = 5590 isolate->factory()->NewTypeError( 5591 "cant_prevent_ext_external_array_elements", 5592 HandleVector(&object, 1)); 5593 return isolate->Throw<Object>(error); 5594 } 5595 5596 // If there are fast elements we normalize. 5597 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 5598 ASSERT(object->HasDictionaryElements() || 5599 object->HasDictionaryArgumentsElements()); 5600 5601 // Make sure that we never go back to fast case. 5602 dictionary->set_requires_slow_elements(); 5603 5604 // Do a map transition, other objects with this map may still 5605 // be extensible. 5606 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps. 5607 Handle<Map> new_map = Map::Copy(handle(object->map())); 5608 5609 new_map->set_is_extensible(false); 5610 JSObject::MigrateToMap(object, new_map); 5611 ASSERT(!object->map()->is_extensible()); 5612 5613 if (object->map()->is_observed()) { 5614 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(), 5615 isolate->factory()->the_hole_value()); 5616 } 5617 return object; 5618 } 5619 5620 5621 template<typename Dictionary> 5622 static void FreezeDictionary(Dictionary* dictionary) { 5623 int capacity = dictionary->Capacity(); 5624 for (int i = 0; i < capacity; i++) { 5625 Object* k = dictionary->KeyAt(i); 5626 if (dictionary->IsKey(k)) { 5627 PropertyDetails details = dictionary->DetailsAt(i); 5628 int attrs = DONT_DELETE; 5629 // READ_ONLY is an invalid attribute for JS setters/getters. 5630 if (details.type() == CALLBACKS) { 5631 Object* v = dictionary->ValueAt(i); 5632 if (v->IsPropertyCell()) v = PropertyCell::cast(v)->value(); 5633 if (!v->IsAccessorPair()) attrs |= READ_ONLY; 5634 } else { 5635 attrs |= READ_ONLY; 5636 } 5637 details = details.CopyAddAttributes( 5638 static_cast<PropertyAttributes>(attrs)); 5639 dictionary->DetailsAtPut(i, details); 5640 } 5641 } 5642 } 5643 5644 5645 MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) { 5646 // Freezing sloppy arguments should be handled elsewhere. 5647 ASSERT(!object->HasSloppyArgumentsElements()); 5648 ASSERT(!object->map()->is_observed()); 5649 5650 if (object->map()->is_frozen()) return object; 5651 5652 Isolate* isolate = object->GetIsolate(); 5653 if (object->IsAccessCheckNeeded() && 5654 !isolate->MayNamedAccess( 5655 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { 5656 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS); 5657 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 5658 return isolate->factory()->false_value(); 5659 } 5660 5661 if (object->IsJSGlobalProxy()) { 5662 Handle<Object> proto(object->GetPrototype(), isolate); 5663 if (proto->IsNull()) return object; 5664 ASSERT(proto->IsJSGlobalObject()); 5665 return Freeze(Handle<JSObject>::cast(proto)); 5666 } 5667 5668 // It's not possible to freeze objects with external array elements 5669 if (object->HasExternalArrayElements() || 5670 object->HasFixedTypedArrayElements()) { 5671 Handle<Object> error = 5672 isolate->factory()->NewTypeError( 5673 "cant_prevent_ext_external_array_elements", 5674 HandleVector(&object, 1)); 5675 return isolate->Throw<Object>(error); 5676 } 5677 5678 Handle<SeededNumberDictionary> new_element_dictionary; 5679 if (!object->elements()->IsDictionary()) { 5680 int length = object->IsJSArray() 5681 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value() 5682 : object->elements()->length(); 5683 if (length > 0) { 5684 int capacity = 0; 5685 int used = 0; 5686 object->GetElementsCapacityAndUsage(&capacity, &used); 5687 new_element_dictionary = SeededNumberDictionary::New(isolate, used); 5688 5689 // Move elements to a dictionary; avoid calling NormalizeElements to avoid 5690 // unnecessary transitions. 5691 new_element_dictionary = CopyFastElementsToDictionary( 5692 handle(object->elements()), length, new_element_dictionary); 5693 } else { 5694 // No existing elements, use a pre-allocated empty backing store 5695 new_element_dictionary = 5696 isolate->factory()->empty_slow_element_dictionary(); 5697 } 5698 } 5699 5700 Handle<Map> old_map(object->map(), isolate); 5701 int transition_index = old_map->SearchTransition( 5702 isolate->heap()->frozen_symbol()); 5703 if (transition_index != TransitionArray::kNotFound) { 5704 Handle<Map> transition_map(old_map->GetTransition(transition_index)); 5705 ASSERT(transition_map->has_dictionary_elements()); 5706 ASSERT(transition_map->is_frozen()); 5707 ASSERT(!transition_map->is_extensible()); 5708 JSObject::MigrateToMap(object, transition_map); 5709 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { 5710 // Create a new descriptor array with fully-frozen properties 5711 Handle<Map> new_map = Map::CopyForFreeze(old_map); 5712 JSObject::MigrateToMap(object, new_map); 5713 } else { 5714 // Slow path: need to normalize properties for safety 5715 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 5716 5717 // Create a new map, since other objects with this map may be extensible. 5718 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps. 5719 Handle<Map> new_map = Map::Copy(handle(object->map())); 5720 new_map->freeze(); 5721 new_map->set_is_extensible(false); 5722 new_map->set_elements_kind(DICTIONARY_ELEMENTS); 5723 JSObject::MigrateToMap(object, new_map); 5724 5725 // Freeze dictionary-mode properties 5726 FreezeDictionary(object->property_dictionary()); 5727 } 5728 5729 ASSERT(object->map()->has_dictionary_elements()); 5730 if (!new_element_dictionary.is_null()) { 5731 object->set_elements(*new_element_dictionary); 5732 } 5733 5734 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) { 5735 SeededNumberDictionary* dictionary = object->element_dictionary(); 5736 // Make sure we never go back to the fast case 5737 dictionary->set_requires_slow_elements(); 5738 // Freeze all elements in the dictionary 5739 FreezeDictionary(dictionary); 5740 } 5741 5742 return object; 5743 } 5744 5745 5746 void JSObject::SetObserved(Handle<JSObject> object) { 5747 ASSERT(!object->IsJSGlobalProxy()); 5748 ASSERT(!object->IsJSGlobalObject()); 5749 Isolate* isolate = object->GetIsolate(); 5750 Handle<Map> new_map; 5751 Handle<Map> old_map(object->map(), isolate); 5752 ASSERT(!old_map->is_observed()); 5753 int transition_index = old_map->SearchTransition( 5754 isolate->heap()->observed_symbol()); 5755 if (transition_index != TransitionArray::kNotFound) { 5756 new_map = handle(old_map->GetTransition(transition_index), isolate); 5757 ASSERT(new_map->is_observed()); 5758 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { 5759 new_map = Map::CopyForObserved(old_map); 5760 } else { 5761 new_map = Map::Copy(old_map); 5762 new_map->set_is_observed(); 5763 } 5764 JSObject::MigrateToMap(object, new_map); 5765 } 5766 5767 5768 Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object, 5769 Representation representation, 5770 FieldIndex index) { 5771 Isolate* isolate = object->GetIsolate(); 5772 Handle<Object> raw_value(object->RawFastPropertyAt(index), isolate); 5773 return Object::NewStorageFor(isolate, raw_value, representation); 5774 } 5775 5776 5777 template<class ContextObject> 5778 class JSObjectWalkVisitor { 5779 public: 5780 JSObjectWalkVisitor(ContextObject* site_context, bool copying, 5781 JSObject::DeepCopyHints hints) 5782 : site_context_(site_context), 5783 copying_(copying), 5784 hints_(hints) {} 5785 5786 MUST_USE_RESULT MaybeHandle<JSObject> StructureWalk(Handle<JSObject> object); 5787 5788 protected: 5789 MUST_USE_RESULT inline MaybeHandle<JSObject> VisitElementOrProperty( 5790 Handle<JSObject> object, 5791 Handle<JSObject> value) { 5792 Handle<AllocationSite> current_site = site_context()->EnterNewScope(); 5793 MaybeHandle<JSObject> copy_of_value = StructureWalk(value); 5794 site_context()->ExitScope(current_site, value); 5795 return copy_of_value; 5796 } 5797 5798 inline ContextObject* site_context() { return site_context_; } 5799 inline Isolate* isolate() { return site_context()->isolate(); } 5800 5801 inline bool copying() const { return copying_; } 5802 5803 private: 5804 ContextObject* site_context_; 5805 const bool copying_; 5806 const JSObject::DeepCopyHints hints_; 5807 }; 5808 5809 5810 template <class ContextObject> 5811 MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( 5812 Handle<JSObject> object) { 5813 Isolate* isolate = this->isolate(); 5814 bool copying = this->copying(); 5815 bool shallow = hints_ == JSObject::kObjectIsShallowArray; 5816 5817 if (!shallow) { 5818 StackLimitCheck check(isolate); 5819 5820 if (check.HasOverflowed()) { 5821 isolate->StackOverflow(); 5822 return MaybeHandle<JSObject>(); 5823 } 5824 } 5825 5826 if (object->map()->is_deprecated()) { 5827 JSObject::MigrateInstance(object); 5828 } 5829 5830 Handle<JSObject> copy; 5831 if (copying) { 5832 Handle<AllocationSite> site_to_pass; 5833 if (site_context()->ShouldCreateMemento(object)) { 5834 site_to_pass = site_context()->current(); 5835 } 5836 copy = isolate->factory()->CopyJSObjectWithAllocationSite( 5837 object, site_to_pass); 5838 } else { 5839 copy = object; 5840 } 5841 5842 ASSERT(copying || copy.is_identical_to(object)); 5843 5844 ElementsKind kind = copy->GetElementsKind(); 5845 if (copying && IsFastSmiOrObjectElementsKind(kind) && 5846 FixedArray::cast(copy->elements())->map() == 5847 isolate->heap()->fixed_cow_array_map()) { 5848 isolate->counters()->cow_arrays_created_runtime()->Increment(); 5849 } 5850 5851 if (!shallow) { 5852 HandleScope scope(isolate); 5853 5854 // Deep copy own properties. 5855 if (copy->HasFastProperties()) { 5856 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors()); 5857 int limit = copy->map()->NumberOfOwnDescriptors(); 5858 for (int i = 0; i < limit; i++) { 5859 PropertyDetails details = descriptors->GetDetails(i); 5860 if (details.type() != FIELD) continue; 5861 FieldIndex index = FieldIndex::ForDescriptor(copy->map(), i); 5862 Handle<Object> value(object->RawFastPropertyAt(index), isolate); 5863 if (value->IsJSObject()) { 5864 ASSIGN_RETURN_ON_EXCEPTION( 5865 isolate, value, 5866 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), 5867 JSObject); 5868 } else { 5869 Representation representation = details.representation(); 5870 value = Object::NewStorageFor(isolate, value, representation); 5871 } 5872 if (copying) { 5873 copy->FastPropertyAtPut(index, *value); 5874 } 5875 } 5876 } else { 5877 Handle<FixedArray> names = 5878 isolate->factory()->NewFixedArray(copy->NumberOfOwnProperties()); 5879 copy->GetOwnPropertyNames(*names, 0); 5880 for (int i = 0; i < names->length(); i++) { 5881 ASSERT(names->get(i)->IsString()); 5882 Handle<String> key_string(String::cast(names->get(i))); 5883 PropertyAttributes attributes = 5884 JSReceiver::GetOwnPropertyAttributes(copy, key_string); 5885 // Only deep copy fields from the object literal expression. 5886 // In particular, don't try to copy the length attribute of 5887 // an array. 5888 if (attributes != NONE) continue; 5889 Handle<Object> value = 5890 Object::GetProperty(copy, key_string).ToHandleChecked(); 5891 if (value->IsJSObject()) { 5892 Handle<JSObject> result; 5893 ASSIGN_RETURN_ON_EXCEPTION( 5894 isolate, result, 5895 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), 5896 JSObject); 5897 if (copying) { 5898 // Creating object copy for literals. No strict mode needed. 5899 JSObject::SetProperty( 5900 copy, key_string, result, NONE, SLOPPY).Assert(); 5901 } 5902 } 5903 } 5904 } 5905 5906 // Deep copy own elements. 5907 // Pixel elements cannot be created using an object literal. 5908 ASSERT(!copy->HasExternalArrayElements()); 5909 switch (kind) { 5910 case FAST_SMI_ELEMENTS: 5911 case FAST_ELEMENTS: 5912 case FAST_HOLEY_SMI_ELEMENTS: 5913 case FAST_HOLEY_ELEMENTS: { 5914 Handle<FixedArray> elements(FixedArray::cast(copy->elements())); 5915 if (elements->map() == isolate->heap()->fixed_cow_array_map()) { 5916 #ifdef DEBUG 5917 for (int i = 0; i < elements->length(); i++) { 5918 ASSERT(!elements->get(i)->IsJSObject()); 5919 } 5920 #endif 5921 } else { 5922 for (int i = 0; i < elements->length(); i++) { 5923 Handle<Object> value(elements->get(i), isolate); 5924 ASSERT(value->IsSmi() || 5925 value->IsTheHole() || 5926 (IsFastObjectElementsKind(copy->GetElementsKind()))); 5927 if (value->IsJSObject()) { 5928 Handle<JSObject> result; 5929 ASSIGN_RETURN_ON_EXCEPTION( 5930 isolate, result, 5931 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), 5932 JSObject); 5933 if (copying) { 5934 elements->set(i, *result); 5935 } 5936 } 5937 } 5938 } 5939 break; 5940 } 5941 case DICTIONARY_ELEMENTS: { 5942 Handle<SeededNumberDictionary> element_dictionary( 5943 copy->element_dictionary()); 5944 int capacity = element_dictionary->Capacity(); 5945 for (int i = 0; i < capacity; i++) { 5946 Object* k = element_dictionary->KeyAt(i); 5947 if (element_dictionary->IsKey(k)) { 5948 Handle<Object> value(element_dictionary->ValueAt(i), isolate); 5949 if (value->IsJSObject()) { 5950 Handle<JSObject> result; 5951 ASSIGN_RETURN_ON_EXCEPTION( 5952 isolate, result, 5953 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), 5954 JSObject); 5955 if (copying) { 5956 element_dictionary->ValueAtPut(i, *result); 5957 } 5958 } 5959 } 5960 } 5961 break; 5962 } 5963 case SLOPPY_ARGUMENTS_ELEMENTS: 5964 UNIMPLEMENTED(); 5965 break; 5966 5967 5968 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 5969 case EXTERNAL_##TYPE##_ELEMENTS: \ 5970 case TYPE##_ELEMENTS: \ 5971 5972 TYPED_ARRAYS(TYPED_ARRAY_CASE) 5973 #undef TYPED_ARRAY_CASE 5974 5975 case FAST_DOUBLE_ELEMENTS: 5976 case FAST_HOLEY_DOUBLE_ELEMENTS: 5977 // No contained objects, nothing to do. 5978 break; 5979 } 5980 } 5981 5982 return copy; 5983 } 5984 5985 5986 MaybeHandle<JSObject> JSObject::DeepWalk( 5987 Handle<JSObject> object, 5988 AllocationSiteCreationContext* site_context) { 5989 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false, 5990 kNoHints); 5991 MaybeHandle<JSObject> result = v.StructureWalk(object); 5992 Handle<JSObject> for_assert; 5993 ASSERT(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object)); 5994 return result; 5995 } 5996 5997 5998 MaybeHandle<JSObject> JSObject::DeepCopy( 5999 Handle<JSObject> object, 6000 AllocationSiteUsageContext* site_context, 6001 DeepCopyHints hints) { 6002 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints); 6003 MaybeHandle<JSObject> copy = v.StructureWalk(object); 6004 Handle<JSObject> for_assert; 6005 ASSERT(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object)); 6006 return copy; 6007 } 6008 6009 6010 Handle<Object> JSObject::GetDataProperty(Handle<JSObject> object, 6011 Handle<Name> key) { 6012 Isolate* isolate = object->GetIsolate(); 6013 LookupResult lookup(isolate); 6014 { 6015 DisallowHeapAllocation no_allocation; 6016 object->LookupRealNamedProperty(key, &lookup); 6017 } 6018 Handle<Object> result = isolate->factory()->undefined_value(); 6019 if (lookup.IsFound() && !lookup.IsTransition()) { 6020 switch (lookup.type()) { 6021 case NORMAL: 6022 result = GetNormalizedProperty( 6023 Handle<JSObject>(lookup.holder(), isolate), &lookup); 6024 break; 6025 case FIELD: 6026 result = FastPropertyAt(Handle<JSObject>(lookup.holder(), isolate), 6027 lookup.representation(), 6028 lookup.GetFieldIndex()); 6029 break; 6030 case CONSTANT: 6031 result = Handle<Object>(lookup.GetConstant(), isolate); 6032 break; 6033 case CALLBACKS: 6034 case HANDLER: 6035 case INTERCEPTOR: 6036 break; 6037 case NONEXISTENT: 6038 UNREACHABLE(); 6039 } 6040 } 6041 return result; 6042 } 6043 6044 6045 // Tests for the fast common case for property enumeration: 6046 // - This object and all prototypes has an enum cache (which means that 6047 // it is no proxy, has no interceptors and needs no access checks). 6048 // - This object has no elements. 6049 // - No prototype has enumerable properties/elements. 6050 bool JSReceiver::IsSimpleEnum() { 6051 Heap* heap = GetHeap(); 6052 for (Object* o = this; 6053 o != heap->null_value(); 6054 o = JSObject::cast(o)->GetPrototype()) { 6055 if (!o->IsJSObject()) return false; 6056 JSObject* curr = JSObject::cast(o); 6057 int enum_length = curr->map()->EnumLength(); 6058 if (enum_length == kInvalidEnumCacheSentinel) return false; 6059 if (curr->IsAccessCheckNeeded()) return false; 6060 ASSERT(!curr->HasNamedInterceptor()); 6061 ASSERT(!curr->HasIndexedInterceptor()); 6062 if (curr->NumberOfEnumElements() > 0) return false; 6063 if (curr != this && enum_length != 0) return false; 6064 } 6065 return true; 6066 } 6067 6068 6069 static bool FilterKey(Object* key, PropertyAttributes filter) { 6070 if ((filter & SYMBOLIC) && key->IsSymbol()) { 6071 return true; 6072 } 6073 6074 if ((filter & PRIVATE_SYMBOL) && 6075 key->IsSymbol() && Symbol::cast(key)->is_private()) { 6076 return true; 6077 } 6078 6079 if ((filter & STRING) && !key->IsSymbol()) { 6080 return true; 6081 } 6082 6083 return false; 6084 } 6085 6086 6087 int Map::NumberOfDescribedProperties(DescriptorFlag which, 6088 PropertyAttributes filter) { 6089 int result = 0; 6090 DescriptorArray* descs = instance_descriptors(); 6091 int limit = which == ALL_DESCRIPTORS 6092 ? descs->number_of_descriptors() 6093 : NumberOfOwnDescriptors(); 6094 for (int i = 0; i < limit; i++) { 6095 if ((descs->GetDetails(i).attributes() & filter) == 0 && 6096 !FilterKey(descs->GetKey(i), filter)) { 6097 result++; 6098 } 6099 } 6100 return result; 6101 } 6102 6103 6104 int Map::NextFreePropertyIndex() { 6105 int max_index = -1; 6106 int number_of_own_descriptors = NumberOfOwnDescriptors(); 6107 DescriptorArray* descs = instance_descriptors(); 6108 for (int i = 0; i < number_of_own_descriptors; i++) { 6109 if (descs->GetType(i) == FIELD) { 6110 int current_index = descs->GetFieldIndex(i); 6111 if (current_index > max_index) max_index = current_index; 6112 } 6113 } 6114 return max_index + 1; 6115 } 6116 6117 6118 void JSReceiver::LookupOwn( 6119 Handle<Name> name, LookupResult* result, bool search_hidden_prototypes) { 6120 DisallowHeapAllocation no_gc; 6121 ASSERT(name->IsName()); 6122 6123 if (IsJSGlobalProxy()) { 6124 Object* proto = GetPrototype(); 6125 if (proto->IsNull()) return result->NotFound(); 6126 ASSERT(proto->IsJSGlobalObject()); 6127 return JSReceiver::cast(proto)->LookupOwn( 6128 name, result, search_hidden_prototypes); 6129 } 6130 6131 if (IsJSProxy()) { 6132 result->HandlerResult(JSProxy::cast(this)); 6133 return; 6134 } 6135 6136 // Do not use inline caching if the object is a non-global object 6137 // that requires access checks. 6138 if (IsAccessCheckNeeded()) { 6139 result->DisallowCaching(); 6140 } 6141 6142 JSObject* js_object = JSObject::cast(this); 6143 6144 // Check for lookup interceptor except when bootstrapping. 6145 if (js_object->HasNamedInterceptor() && 6146 !GetIsolate()->bootstrapper()->IsActive()) { 6147 result->InterceptorResult(js_object); 6148 return; 6149 } 6150 6151 js_object->LookupOwnRealNamedProperty(name, result); 6152 if (result->IsFound() || !search_hidden_prototypes) return; 6153 6154 Object* proto = js_object->GetPrototype(); 6155 if (!proto->IsJSReceiver()) return; 6156 JSReceiver* receiver = JSReceiver::cast(proto); 6157 if (receiver->map()->is_hidden_prototype()) { 6158 receiver->LookupOwn(name, result, search_hidden_prototypes); 6159 } 6160 } 6161 6162 6163 void JSReceiver::Lookup(Handle<Name> name, LookupResult* result) { 6164 DisallowHeapAllocation no_gc; 6165 // Ecma-262 3rd 8.6.2.4 6166 Handle<Object> null_value = GetIsolate()->factory()->null_value(); 6167 for (Object* current = this; 6168 current != *null_value; 6169 current = JSObject::cast(current)->GetPrototype()) { 6170 JSReceiver::cast(current)->LookupOwn(name, result, false); 6171 if (result->IsFound()) return; 6172 } 6173 result->NotFound(); 6174 } 6175 6176 6177 static bool ContainsOnlyValidKeys(Handle<FixedArray> array) { 6178 int len = array->length(); 6179 for (int i = 0; i < len; i++) { 6180 Object* e = array->get(i); 6181 if (!(e->IsString() || e->IsNumber())) return false; 6182 } 6183 return true; 6184 } 6185 6186 6187 static Handle<FixedArray> ReduceFixedArrayTo( 6188 Handle<FixedArray> array, int length) { 6189 ASSERT(array->length() >= length); 6190 if (array->length() == length) return array; 6191 6192 Handle<FixedArray> new_array = 6193 array->GetIsolate()->factory()->NewFixedArray(length); 6194 for (int i = 0; i < length; ++i) new_array->set(i, array->get(i)); 6195 return new_array; 6196 } 6197 6198 6199 static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, 6200 bool cache_result) { 6201 Isolate* isolate = object->GetIsolate(); 6202 if (object->HasFastProperties()) { 6203 int own_property_count = object->map()->EnumLength(); 6204 // If the enum length of the given map is set to kInvalidEnumCache, this 6205 // means that the map itself has never used the present enum cache. The 6206 // first step to using the cache is to set the enum length of the map by 6207 // counting the number of own descriptors that are not DONT_ENUM or 6208 // SYMBOLIC. 6209 if (own_property_count == kInvalidEnumCacheSentinel) { 6210 own_property_count = object->map()->NumberOfDescribedProperties( 6211 OWN_DESCRIPTORS, DONT_SHOW); 6212 } else { 6213 ASSERT(own_property_count == object->map()->NumberOfDescribedProperties( 6214 OWN_DESCRIPTORS, DONT_SHOW)); 6215 } 6216 6217 if (object->map()->instance_descriptors()->HasEnumCache()) { 6218 DescriptorArray* desc = object->map()->instance_descriptors(); 6219 Handle<FixedArray> keys(desc->GetEnumCache(), isolate); 6220 6221 // In case the number of properties required in the enum are actually 6222 // present, we can reuse the enum cache. Otherwise, this means that the 6223 // enum cache was generated for a previous (smaller) version of the 6224 // Descriptor Array. In that case we regenerate the enum cache. 6225 if (own_property_count <= keys->length()) { 6226 if (cache_result) object->map()->SetEnumLength(own_property_count); 6227 isolate->counters()->enum_cache_hits()->Increment(); 6228 return ReduceFixedArrayTo(keys, own_property_count); 6229 } 6230 } 6231 6232 Handle<Map> map(object->map()); 6233 6234 if (map->instance_descriptors()->IsEmpty()) { 6235 isolate->counters()->enum_cache_hits()->Increment(); 6236 if (cache_result) map->SetEnumLength(0); 6237 return isolate->factory()->empty_fixed_array(); 6238 } 6239 6240 isolate->counters()->enum_cache_misses()->Increment(); 6241 6242 Handle<FixedArray> storage = isolate->factory()->NewFixedArray( 6243 own_property_count); 6244 Handle<FixedArray> indices = isolate->factory()->NewFixedArray( 6245 own_property_count); 6246 6247 Handle<DescriptorArray> descs = 6248 Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate); 6249 6250 int size = map->NumberOfOwnDescriptors(); 6251 int index = 0; 6252 6253 for (int i = 0; i < size; i++) { 6254 PropertyDetails details = descs->GetDetails(i); 6255 Object* key = descs->GetKey(i); 6256 if (!(details.IsDontEnum() || key->IsSymbol())) { 6257 storage->set(index, key); 6258 if (!indices.is_null()) { 6259 if (details.type() != FIELD) { 6260 indices = Handle<FixedArray>(); 6261 } else { 6262 FieldIndex field_index = FieldIndex::ForDescriptor(*map, i); 6263 int load_by_field_index = field_index.GetLoadByFieldIndex(); 6264 indices->set(index, Smi::FromInt(load_by_field_index)); 6265 } 6266 } 6267 index++; 6268 } 6269 } 6270 ASSERT(index == storage->length()); 6271 6272 Handle<FixedArray> bridge_storage = 6273 isolate->factory()->NewFixedArray( 6274 DescriptorArray::kEnumCacheBridgeLength); 6275 DescriptorArray* desc = object->map()->instance_descriptors(); 6276 desc->SetEnumCache(*bridge_storage, 6277 *storage, 6278 indices.is_null() ? Object::cast(Smi::FromInt(0)) 6279 : Object::cast(*indices)); 6280 if (cache_result) { 6281 object->map()->SetEnumLength(own_property_count); 6282 } 6283 return storage; 6284 } else { 6285 Handle<NameDictionary> dictionary(object->property_dictionary()); 6286 int length = dictionary->NumberOfEnumElements(); 6287 if (length == 0) { 6288 return Handle<FixedArray>(isolate->heap()->empty_fixed_array()); 6289 } 6290 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length); 6291 dictionary->CopyEnumKeysTo(*storage); 6292 return storage; 6293 } 6294 } 6295 6296 6297 MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object, 6298 KeyCollectionType type) { 6299 USE(ContainsOnlyValidKeys); 6300 Isolate* isolate = object->GetIsolate(); 6301 Handle<FixedArray> content = isolate->factory()->empty_fixed_array(); 6302 Handle<JSObject> arguments_boilerplate = Handle<JSObject>( 6303 isolate->context()->native_context()->sloppy_arguments_boilerplate(), 6304 isolate); 6305 Handle<JSFunction> arguments_function = Handle<JSFunction>( 6306 JSFunction::cast(arguments_boilerplate->map()->constructor()), 6307 isolate); 6308 6309 // Only collect keys if access is permitted. 6310 for (Handle<Object> p = object; 6311 *p != isolate->heap()->null_value(); 6312 p = Handle<Object>(p->GetPrototype(isolate), isolate)) { 6313 if (p->IsJSProxy()) { 6314 Handle<JSProxy> proxy(JSProxy::cast(*p), isolate); 6315 Handle<Object> args[] = { proxy }; 6316 Handle<Object> names; 6317 ASSIGN_RETURN_ON_EXCEPTION( 6318 isolate, names, 6319 Execution::Call(isolate, 6320 isolate->proxy_enumerate(), 6321 object, 6322 ARRAY_SIZE(args), 6323 args), 6324 FixedArray); 6325 ASSIGN_RETURN_ON_EXCEPTION( 6326 isolate, content, 6327 FixedArray::AddKeysFromArrayLike( 6328 content, Handle<JSObject>::cast(names)), 6329 FixedArray); 6330 break; 6331 } 6332 6333 Handle<JSObject> current(JSObject::cast(*p), isolate); 6334 6335 // Check access rights if required. 6336 if (current->IsAccessCheckNeeded() && 6337 !isolate->MayNamedAccess( 6338 current, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { 6339 isolate->ReportFailedAccessCheck(current, v8::ACCESS_KEYS); 6340 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, FixedArray); 6341 break; 6342 } 6343 6344 // Compute the element keys. 6345 Handle<FixedArray> element_keys = 6346 isolate->factory()->NewFixedArray(current->NumberOfEnumElements()); 6347 current->GetEnumElementKeys(*element_keys); 6348 ASSIGN_RETURN_ON_EXCEPTION( 6349 isolate, content, 6350 FixedArray::UnionOfKeys(content, element_keys), 6351 FixedArray); 6352 ASSERT(ContainsOnlyValidKeys(content)); 6353 6354 // Add the element keys from the interceptor. 6355 if (current->HasIndexedInterceptor()) { 6356 Handle<JSObject> result; 6357 if (JSObject::GetKeysForIndexedInterceptor( 6358 current, object).ToHandle(&result)) { 6359 ASSIGN_RETURN_ON_EXCEPTION( 6360 isolate, content, 6361 FixedArray::AddKeysFromArrayLike(content, result), 6362 FixedArray); 6363 } 6364 ASSERT(ContainsOnlyValidKeys(content)); 6365 } 6366 6367 // We can cache the computed property keys if access checks are 6368 // not needed and no interceptors are involved. 6369 // 6370 // We do not use the cache if the object has elements and 6371 // therefore it does not make sense to cache the property names 6372 // for arguments objects. Arguments objects will always have 6373 // elements. 6374 // Wrapped strings have elements, but don't have an elements 6375 // array or dictionary. So the fast inline test for whether to 6376 // use the cache says yes, so we should not create a cache. 6377 bool cache_enum_keys = 6378 ((current->map()->constructor() != *arguments_function) && 6379 !current->IsJSValue() && 6380 !current->IsAccessCheckNeeded() && 6381 !current->HasNamedInterceptor() && 6382 !current->HasIndexedInterceptor()); 6383 // Compute the property keys and cache them if possible. 6384 ASSIGN_RETURN_ON_EXCEPTION( 6385 isolate, content, 6386 FixedArray::UnionOfKeys( 6387 content, GetEnumPropertyKeys(current, cache_enum_keys)), 6388 FixedArray); 6389 ASSERT(ContainsOnlyValidKeys(content)); 6390 6391 // Add the property keys from the interceptor. 6392 if (current->HasNamedInterceptor()) { 6393 Handle<JSObject> result; 6394 if (JSObject::GetKeysForNamedInterceptor( 6395 current, object).ToHandle(&result)) { 6396 ASSIGN_RETURN_ON_EXCEPTION( 6397 isolate, content, 6398 FixedArray::AddKeysFromArrayLike(content, result), 6399 FixedArray); 6400 } 6401 ASSERT(ContainsOnlyValidKeys(content)); 6402 } 6403 6404 // If we only want own properties we bail out after the first 6405 // iteration. 6406 if (type == OWN_ONLY) break; 6407 } 6408 return content; 6409 } 6410 6411 6412 // Try to update an accessor in an elements dictionary. Return true if the 6413 // update succeeded, and false otherwise. 6414 static bool UpdateGetterSetterInDictionary( 6415 SeededNumberDictionary* dictionary, 6416 uint32_t index, 6417 Object* getter, 6418 Object* setter, 6419 PropertyAttributes attributes) { 6420 int entry = dictionary->FindEntry(index); 6421 if (entry != SeededNumberDictionary::kNotFound) { 6422 Object* result = dictionary->ValueAt(entry); 6423 PropertyDetails details = dictionary->DetailsAt(entry); 6424 if (details.type() == CALLBACKS && result->IsAccessorPair()) { 6425 ASSERT(!details.IsDontDelete()); 6426 if (details.attributes() != attributes) { 6427 dictionary->DetailsAtPut( 6428 entry, 6429 PropertyDetails(attributes, CALLBACKS, index)); 6430 } 6431 AccessorPair::cast(result)->SetComponents(getter, setter); 6432 return true; 6433 } 6434 } 6435 return false; 6436 } 6437 6438 6439 void JSObject::DefineElementAccessor(Handle<JSObject> object, 6440 uint32_t index, 6441 Handle<Object> getter, 6442 Handle<Object> setter, 6443 PropertyAttributes attributes, 6444 v8::AccessControl access_control) { 6445 switch (object->GetElementsKind()) { 6446 case FAST_SMI_ELEMENTS: 6447 case FAST_ELEMENTS: 6448 case FAST_DOUBLE_ELEMENTS: 6449 case FAST_HOLEY_SMI_ELEMENTS: 6450 case FAST_HOLEY_ELEMENTS: 6451 case FAST_HOLEY_DOUBLE_ELEMENTS: 6452 break; 6453 6454 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 6455 case EXTERNAL_##TYPE##_ELEMENTS: \ 6456 case TYPE##_ELEMENTS: \ 6457 6458 TYPED_ARRAYS(TYPED_ARRAY_CASE) 6459 #undef TYPED_ARRAY_CASE 6460 // Ignore getters and setters on pixel and external array elements. 6461 return; 6462 6463 case DICTIONARY_ELEMENTS: 6464 if (UpdateGetterSetterInDictionary(object->element_dictionary(), 6465 index, 6466 *getter, 6467 *setter, 6468 attributes)) { 6469 return; 6470 } 6471 break; 6472 case SLOPPY_ARGUMENTS_ELEMENTS: { 6473 // Ascertain whether we have read-only properties or an existing 6474 // getter/setter pair in an arguments elements dictionary backing 6475 // store. 6476 FixedArray* parameter_map = FixedArray::cast(object->elements()); 6477 uint32_t length = parameter_map->length(); 6478 Object* probe = 6479 index < (length - 2) ? parameter_map->get(index + 2) : NULL; 6480 if (probe == NULL || probe->IsTheHole()) { 6481 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 6482 if (arguments->IsDictionary()) { 6483 SeededNumberDictionary* dictionary = 6484 SeededNumberDictionary::cast(arguments); 6485 if (UpdateGetterSetterInDictionary(dictionary, 6486 index, 6487 *getter, 6488 *setter, 6489 attributes)) { 6490 return; 6491 } 6492 } 6493 } 6494 break; 6495 } 6496 } 6497 6498 Isolate* isolate = object->GetIsolate(); 6499 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair(); 6500 accessors->SetComponents(*getter, *setter); 6501 accessors->set_access_flags(access_control); 6502 6503 SetElementCallback(object, index, accessors, attributes); 6504 } 6505 6506 6507 Handle<AccessorPair> JSObject::CreateAccessorPairFor(Handle<JSObject> object, 6508 Handle<Name> name) { 6509 Isolate* isolate = object->GetIsolate(); 6510 LookupResult result(isolate); 6511 object->LookupOwnRealNamedProperty(name, &result); 6512 if (result.IsPropertyCallbacks()) { 6513 // Note that the result can actually have IsDontDelete() == true when we 6514 // e.g. have to fall back to the slow case while adding a setter after 6515 // successfully reusing a map transition for a getter. Nevertheless, this is 6516 // OK, because the assertion only holds for the whole addition of both 6517 // accessors, not for the addition of each part. See first comment in 6518 // DefinePropertyAccessor below. 6519 Object* obj = result.GetCallbackObject(); 6520 if (obj->IsAccessorPair()) { 6521 return AccessorPair::Copy(handle(AccessorPair::cast(obj), isolate)); 6522 } 6523 } 6524 return isolate->factory()->NewAccessorPair(); 6525 } 6526 6527 6528 void JSObject::DefinePropertyAccessor(Handle<JSObject> object, 6529 Handle<Name> name, 6530 Handle<Object> getter, 6531 Handle<Object> setter, 6532 PropertyAttributes attributes, 6533 v8::AccessControl access_control) { 6534 // We could assert that the property is configurable here, but we would need 6535 // to do a lookup, which seems to be a bit of overkill. 6536 bool only_attribute_changes = getter->IsNull() && setter->IsNull(); 6537 if (object->HasFastProperties() && !only_attribute_changes && 6538 access_control == v8::DEFAULT && 6539 (object->map()->NumberOfOwnDescriptors() <= kMaxNumberOfDescriptors)) { 6540 bool getterOk = getter->IsNull() || 6541 DefineFastAccessor(object, name, ACCESSOR_GETTER, getter, attributes); 6542 bool setterOk = !getterOk || setter->IsNull() || 6543 DefineFastAccessor(object, name, ACCESSOR_SETTER, setter, attributes); 6544 if (getterOk && setterOk) return; 6545 } 6546 6547 Handle<AccessorPair> accessors = CreateAccessorPairFor(object, name); 6548 accessors->SetComponents(*getter, *setter); 6549 accessors->set_access_flags(access_control); 6550 6551 SetPropertyCallback(object, name, accessors, attributes); 6552 } 6553 6554 6555 bool Map::DictionaryElementsInPrototypeChainOnly() { 6556 Heap* heap = GetHeap(); 6557 6558 if (IsDictionaryElementsKind(elements_kind())) { 6559 return false; 6560 } 6561 6562 for (Object* prototype = this->prototype(); 6563 prototype != heap->null_value(); 6564 prototype = prototype->GetPrototype(GetIsolate())) { 6565 if (prototype->IsJSProxy()) { 6566 // Be conservative, don't walk into proxies. 6567 return true; 6568 } 6569 6570 if (IsDictionaryElementsKind( 6571 JSObject::cast(prototype)->map()->elements_kind())) { 6572 return true; 6573 } 6574 } 6575 6576 return false; 6577 } 6578 6579 6580 void JSObject::SetElementCallback(Handle<JSObject> object, 6581 uint32_t index, 6582 Handle<Object> structure, 6583 PropertyAttributes attributes) { 6584 Heap* heap = object->GetHeap(); 6585 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0); 6586 6587 // Normalize elements to make this operation simple. 6588 bool had_dictionary_elements = object->HasDictionaryElements(); 6589 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 6590 ASSERT(object->HasDictionaryElements() || 6591 object->HasDictionaryArgumentsElements()); 6592 // Update the dictionary with the new CALLBACKS property. 6593 dictionary = SeededNumberDictionary::Set(dictionary, index, structure, 6594 details); 6595 dictionary->set_requires_slow_elements(); 6596 6597 // Update the dictionary backing store on the object. 6598 if (object->elements()->map() == heap->sloppy_arguments_elements_map()) { 6599 // Also delete any parameter alias. 6600 // 6601 // TODO(kmillikin): when deleting the last parameter alias we could 6602 // switch to a direct backing store without the parameter map. This 6603 // would allow GC of the context. 6604 FixedArray* parameter_map = FixedArray::cast(object->elements()); 6605 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) { 6606 parameter_map->set(index + 2, heap->the_hole_value()); 6607 } 6608 parameter_map->set(1, *dictionary); 6609 } else { 6610 object->set_elements(*dictionary); 6611 6612 if (!had_dictionary_elements) { 6613 // KeyedStoreICs (at least the non-generic ones) need a reset. 6614 heap->ClearAllICsByKind(Code::KEYED_STORE_IC); 6615 } 6616 } 6617 } 6618 6619 6620 void JSObject::SetPropertyCallback(Handle<JSObject> object, 6621 Handle<Name> name, 6622 Handle<Object> structure, 6623 PropertyAttributes attributes) { 6624 // Normalize object to make this operation simple. 6625 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 6626 6627 // For the global object allocate a new map to invalidate the global inline 6628 // caches which have a global property cell reference directly in the code. 6629 if (object->IsGlobalObject()) { 6630 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); 6631 ASSERT(new_map->is_dictionary_map()); 6632 object->set_map(*new_map); 6633 6634 // When running crankshaft, changing the map is not enough. We 6635 // need to deoptimize all functions that rely on this global 6636 // object. 6637 Deoptimizer::DeoptimizeGlobalObject(*object); 6638 } 6639 6640 // Update the dictionary with the new CALLBACKS property. 6641 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0); 6642 SetNormalizedProperty(object, name, structure, details); 6643 } 6644 6645 6646 void JSObject::DefineAccessor(Handle<JSObject> object, 6647 Handle<Name> name, 6648 Handle<Object> getter, 6649 Handle<Object> setter, 6650 PropertyAttributes attributes, 6651 v8::AccessControl access_control) { 6652 Isolate* isolate = object->GetIsolate(); 6653 // Check access rights if needed. 6654 if (object->IsAccessCheckNeeded() && 6655 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { 6656 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); 6657 // TODO(yangguo): Issue 3269, check for scheduled exception missing? 6658 return; 6659 } 6660 6661 if (object->IsJSGlobalProxy()) { 6662 Handle<Object> proto(object->GetPrototype(), isolate); 6663 if (proto->IsNull()) return; 6664 ASSERT(proto->IsJSGlobalObject()); 6665 DefineAccessor(Handle<JSObject>::cast(proto), 6666 name, 6667 getter, 6668 setter, 6669 attributes, 6670 access_control); 6671 return; 6672 } 6673 6674 // Make sure that the top context does not change when doing callbacks or 6675 // interceptor calls. 6676 AssertNoContextChange ncc(isolate); 6677 6678 // Try to flatten before operating on the string. 6679 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); 6680 6681 uint32_t index = 0; 6682 bool is_element = name->AsArrayIndex(&index); 6683 6684 Handle<Object> old_value = isolate->factory()->the_hole_value(); 6685 bool is_observed = object->map()->is_observed() && 6686 *name != isolate->heap()->hidden_string(); 6687 bool preexists = false; 6688 if (is_observed) { 6689 if (is_element) { 6690 preexists = HasOwnElement(object, index); 6691 if (preexists && GetOwnElementAccessorPair(object, index).is_null()) { 6692 old_value = 6693 Object::GetElement(isolate, object, index).ToHandleChecked(); 6694 } 6695 } else { 6696 LookupResult lookup(isolate); 6697 object->LookupOwn(name, &lookup, true); 6698 preexists = lookup.IsProperty(); 6699 if (preexists && lookup.IsDataProperty()) { 6700 old_value = 6701 Object::GetPropertyOrElement(object, name).ToHandleChecked(); 6702 } 6703 } 6704 } 6705 6706 if (is_element) { 6707 DefineElementAccessor( 6708 object, index, getter, setter, attributes, access_control); 6709 } else { 6710 DefinePropertyAccessor( 6711 object, name, getter, setter, attributes, access_control); 6712 } 6713 6714 if (is_observed) { 6715 const char* type = preexists ? "reconfigure" : "add"; 6716 EnqueueChangeRecord(object, type, name, old_value); 6717 } 6718 } 6719 6720 6721 static bool TryAccessorTransition(Handle<JSObject> self, 6722 Handle<Map> transitioned_map, 6723 int target_descriptor, 6724 AccessorComponent component, 6725 Handle<Object> accessor, 6726 PropertyAttributes attributes) { 6727 DescriptorArray* descs = transitioned_map->instance_descriptors(); 6728 PropertyDetails details = descs->GetDetails(target_descriptor); 6729 6730 // If the transition target was not callbacks, fall back to the slow case. 6731 if (details.type() != CALLBACKS) return false; 6732 Object* descriptor = descs->GetCallbacksObject(target_descriptor); 6733 if (!descriptor->IsAccessorPair()) return false; 6734 6735 Object* target_accessor = AccessorPair::cast(descriptor)->get(component); 6736 PropertyAttributes target_attributes = details.attributes(); 6737 6738 // Reuse transition if adding same accessor with same attributes. 6739 if (target_accessor == *accessor && target_attributes == attributes) { 6740 JSObject::MigrateToMap(self, transitioned_map); 6741 return true; 6742 } 6743 6744 // If either not the same accessor, or not the same attributes, fall back to 6745 // the slow case. 6746 return false; 6747 } 6748 6749 6750 bool JSObject::DefineFastAccessor(Handle<JSObject> object, 6751 Handle<Name> name, 6752 AccessorComponent component, 6753 Handle<Object> accessor, 6754 PropertyAttributes attributes) { 6755 ASSERT(accessor->IsSpecFunction() || accessor->IsUndefined()); 6756 Isolate* isolate = object->GetIsolate(); 6757 LookupResult result(isolate); 6758 object->LookupOwn(name, &result); 6759 6760 if (result.IsFound() && !result.IsPropertyCallbacks()) { 6761 return false; 6762 } 6763 6764 // Return success if the same accessor with the same attributes already exist. 6765 AccessorPair* source_accessors = NULL; 6766 if (result.IsPropertyCallbacks()) { 6767 Object* callback_value = result.GetCallbackObject(); 6768 if (callback_value->IsAccessorPair()) { 6769 source_accessors = AccessorPair::cast(callback_value); 6770 Object* entry = source_accessors->get(component); 6771 if (entry == *accessor && result.GetAttributes() == attributes) { 6772 return true; 6773 } 6774 } else { 6775 return false; 6776 } 6777 6778 int descriptor_number = result.GetDescriptorIndex(); 6779 6780 object->map()->LookupTransition(*object, *name, &result); 6781 6782 if (result.IsFound()) { 6783 Handle<Map> target(result.GetTransitionTarget()); 6784 ASSERT(target->NumberOfOwnDescriptors() == 6785 object->map()->NumberOfOwnDescriptors()); 6786 // This works since descriptors are sorted in order of addition. 6787 ASSERT(Name::Equals(handle(object->map()->instance_descriptors()-> 6788 GetKey(descriptor_number)), name)); 6789 return TryAccessorTransition(object, target, descriptor_number, 6790 component, accessor, attributes); 6791 } 6792 } else { 6793 // If not, lookup a transition. 6794 object->map()->LookupTransition(*object, *name, &result); 6795 6796 // If there is a transition, try to follow it. 6797 if (result.IsFound()) { 6798 Handle<Map> target(result.GetTransitionTarget()); 6799 int descriptor_number = target->LastAdded(); 6800 ASSERT(Name::Equals(name, 6801 handle(target->instance_descriptors()->GetKey(descriptor_number)))); 6802 return TryAccessorTransition(object, target, descriptor_number, 6803 component, accessor, attributes); 6804 } 6805 } 6806 6807 // If there is no transition yet, add a transition to the a new accessor pair 6808 // containing the accessor. Allocate a new pair if there were no source 6809 // accessors. Otherwise, copy the pair and modify the accessor. 6810 Handle<AccessorPair> accessors = source_accessors != NULL 6811 ? AccessorPair::Copy(Handle<AccessorPair>(source_accessors)) 6812 : isolate->factory()->NewAccessorPair(); 6813 accessors->set(component, *accessor); 6814 6815 CallbacksDescriptor new_accessors_desc(name, accessors, attributes); 6816 Handle<Map> new_map = Map::CopyInsertDescriptor( 6817 handle(object->map()), &new_accessors_desc, INSERT_TRANSITION); 6818 6819 JSObject::MigrateToMap(object, new_map); 6820 return true; 6821 } 6822 6823 6824 MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object, 6825 Handle<AccessorInfo> info) { 6826 Isolate* isolate = object->GetIsolate(); 6827 Factory* factory = isolate->factory(); 6828 Handle<Name> name(Name::cast(info->name())); 6829 6830 // Check access rights if needed. 6831 if (object->IsAccessCheckNeeded() && 6832 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { 6833 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); 6834 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 6835 return factory->undefined_value(); 6836 } 6837 6838 if (object->IsJSGlobalProxy()) { 6839 Handle<Object> proto(object->GetPrototype(), isolate); 6840 if (proto->IsNull()) return object; 6841 ASSERT(proto->IsJSGlobalObject()); 6842 return SetAccessor(Handle<JSObject>::cast(proto), info); 6843 } 6844 6845 // Make sure that the top context does not change when doing callbacks or 6846 // interceptor calls. 6847 AssertNoContextChange ncc(isolate); 6848 6849 // Try to flatten before operating on the string. 6850 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); 6851 6852 uint32_t index = 0; 6853 bool is_element = name->AsArrayIndex(&index); 6854 6855 if (is_element) { 6856 if (object->IsJSArray()) return factory->undefined_value(); 6857 6858 // Accessors overwrite previous callbacks (cf. with getters/setters). 6859 switch (object->GetElementsKind()) { 6860 case FAST_SMI_ELEMENTS: 6861 case FAST_ELEMENTS: 6862 case FAST_DOUBLE_ELEMENTS: 6863 case FAST_HOLEY_SMI_ELEMENTS: 6864 case FAST_HOLEY_ELEMENTS: 6865 case FAST_HOLEY_DOUBLE_ELEMENTS: 6866 break; 6867 6868 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 6869 case EXTERNAL_##TYPE##_ELEMENTS: \ 6870 case TYPE##_ELEMENTS: \ 6871 6872 TYPED_ARRAYS(TYPED_ARRAY_CASE) 6873 #undef TYPED_ARRAY_CASE 6874 // Ignore getters and setters on pixel and external array 6875 // elements. 6876 return factory->undefined_value(); 6877 6878 case DICTIONARY_ELEMENTS: 6879 break; 6880 case SLOPPY_ARGUMENTS_ELEMENTS: 6881 UNIMPLEMENTED(); 6882 break; 6883 } 6884 6885 SetElementCallback(object, index, info, info->property_attributes()); 6886 } else { 6887 // Lookup the name. 6888 LookupResult result(isolate); 6889 object->LookupOwn(name, &result, true); 6890 // ES5 forbids turning a property into an accessor if it's not 6891 // configurable (that is IsDontDelete in ES3 and v8), see 8.6.1 (Table 5). 6892 if (result.IsFound() && (result.IsReadOnly() || result.IsDontDelete())) { 6893 return factory->undefined_value(); 6894 } 6895 6896 SetPropertyCallback(object, name, info, info->property_attributes()); 6897 } 6898 6899 return object; 6900 } 6901 6902 6903 MaybeHandle<Object> JSObject::GetAccessor(Handle<JSObject> object, 6904 Handle<Name> name, 6905 AccessorComponent component) { 6906 Isolate* isolate = object->GetIsolate(); 6907 6908 // Make sure that the top context does not change when doing callbacks or 6909 // interceptor calls. 6910 AssertNoContextChange ncc(isolate); 6911 6912 // Make the lookup and include prototypes. 6913 uint32_t index = 0; 6914 if (name->AsArrayIndex(&index)) { 6915 for (Handle<Object> obj = object; 6916 !obj->IsNull(); 6917 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) { 6918 if (obj->IsAccessCheckNeeded() && 6919 !isolate->MayNamedAccess(Handle<JSObject>::cast(obj), name, 6920 v8::ACCESS_HAS)) { 6921 isolate->ReportFailedAccessCheck(Handle<JSObject>::cast(obj), 6922 v8::ACCESS_HAS); 6923 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 6924 return isolate->factory()->undefined_value(); 6925 } 6926 6927 if (obj->IsJSObject() && JSObject::cast(*obj)->HasDictionaryElements()) { 6928 JSObject* js_object = JSObject::cast(*obj); 6929 SeededNumberDictionary* dictionary = js_object->element_dictionary(); 6930 int entry = dictionary->FindEntry(index); 6931 if (entry != SeededNumberDictionary::kNotFound) { 6932 Object* element = dictionary->ValueAt(entry); 6933 if (dictionary->DetailsAt(entry).type() == CALLBACKS && 6934 element->IsAccessorPair()) { 6935 return handle(AccessorPair::cast(element)->GetComponent(component), 6936 isolate); 6937 } 6938 } 6939 } 6940 } 6941 } else { 6942 for (Handle<Object> obj = object; 6943 !obj->IsNull(); 6944 obj = handle(JSReceiver::cast(*obj)->GetPrototype(), isolate)) { 6945 if (obj->IsAccessCheckNeeded() && 6946 !isolate->MayNamedAccess(Handle<JSObject>::cast(obj), name, 6947 v8::ACCESS_HAS)) { 6948 isolate->ReportFailedAccessCheck(Handle<JSObject>::cast(obj), 6949 v8::ACCESS_HAS); 6950 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 6951 return isolate->factory()->undefined_value(); 6952 } 6953 LookupResult result(isolate); 6954 JSReceiver::cast(*obj)->LookupOwn(name, &result); 6955 if (result.IsFound()) { 6956 if (result.IsReadOnly()) return isolate->factory()->undefined_value(); 6957 if (result.IsPropertyCallbacks()) { 6958 Object* obj = result.GetCallbackObject(); 6959 if (obj->IsAccessorPair()) { 6960 return handle(AccessorPair::cast(obj)->GetComponent(component), 6961 isolate); 6962 } 6963 } 6964 } 6965 } 6966 } 6967 return isolate->factory()->undefined_value(); 6968 } 6969 6970 6971 Object* JSObject::SlowReverseLookup(Object* value) { 6972 if (HasFastProperties()) { 6973 int number_of_own_descriptors = map()->NumberOfOwnDescriptors(); 6974 DescriptorArray* descs = map()->instance_descriptors(); 6975 for (int i = 0; i < number_of_own_descriptors; i++) { 6976 if (descs->GetType(i) == FIELD) { 6977 Object* property = 6978 RawFastPropertyAt(FieldIndex::ForDescriptor(map(), i)); 6979 if (descs->GetDetails(i).representation().IsDouble()) { 6980 ASSERT(property->IsHeapNumber()); 6981 if (value->IsNumber() && property->Number() == value->Number()) { 6982 return descs->GetKey(i); 6983 } 6984 } else if (property == value) { 6985 return descs->GetKey(i); 6986 } 6987 } else if (descs->GetType(i) == CONSTANT) { 6988 if (descs->GetConstant(i) == value) { 6989 return descs->GetKey(i); 6990 } 6991 } 6992 } 6993 return GetHeap()->undefined_value(); 6994 } else { 6995 return property_dictionary()->SlowReverseLookup(value); 6996 } 6997 } 6998 6999 7000 Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) { 7001 Handle<Map> result = map->GetIsolate()->factory()->NewMap( 7002 map->instance_type(), instance_size); 7003 result->set_prototype(map->prototype()); 7004 result->set_constructor(map->constructor()); 7005 result->set_bit_field(map->bit_field()); 7006 result->set_bit_field2(map->bit_field2()); 7007 int new_bit_field3 = map->bit_field3(); 7008 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true); 7009 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0); 7010 new_bit_field3 = EnumLengthBits::update(new_bit_field3, 7011 kInvalidEnumCacheSentinel); 7012 new_bit_field3 = Deprecated::update(new_bit_field3, false); 7013 if (!map->is_dictionary_map()) { 7014 new_bit_field3 = IsUnstable::update(new_bit_field3, false); 7015 } 7016 new_bit_field3 = ConstructionCount::update(new_bit_field3, 7017 JSFunction::kNoSlackTracking); 7018 result->set_bit_field3(new_bit_field3); 7019 return result; 7020 } 7021 7022 7023 Handle<Map> Map::Normalize(Handle<Map> fast_map, 7024 PropertyNormalizationMode mode) { 7025 ASSERT(!fast_map->is_dictionary_map()); 7026 7027 Isolate* isolate = fast_map->GetIsolate(); 7028 Handle<NormalizedMapCache> cache( 7029 isolate->context()->native_context()->normalized_map_cache()); 7030 7031 Handle<Map> new_map; 7032 if (cache->Get(fast_map, mode).ToHandle(&new_map)) { 7033 #ifdef VERIFY_HEAP 7034 if (FLAG_verify_heap) { 7035 new_map->SharedMapVerify(); 7036 } 7037 #endif 7038 #ifdef ENABLE_SLOW_ASSERTS 7039 if (FLAG_enable_slow_asserts) { 7040 // The cached map should match newly created normalized map bit-by-bit, 7041 // except for the code cache, which can contain some ics which can be 7042 // applied to the shared map. 7043 Handle<Map> fresh = Map::CopyNormalized( 7044 fast_map, mode, SHARED_NORMALIZED_MAP); 7045 7046 ASSERT(memcmp(fresh->address(), 7047 new_map->address(), 7048 Map::kCodeCacheOffset) == 0); 7049 STATIC_ASSERT(Map::kDependentCodeOffset == 7050 Map::kCodeCacheOffset + kPointerSize); 7051 int offset = Map::kDependentCodeOffset + kPointerSize; 7052 ASSERT(memcmp(fresh->address() + offset, 7053 new_map->address() + offset, 7054 Map::kSize - offset) == 0); 7055 } 7056 #endif 7057 } else { 7058 new_map = Map::CopyNormalized(fast_map, mode, SHARED_NORMALIZED_MAP); 7059 cache->Set(fast_map, new_map); 7060 isolate->counters()->normalized_maps()->Increment(); 7061 } 7062 fast_map->NotifyLeafMapLayoutChange(); 7063 return new_map; 7064 } 7065 7066 7067 Handle<Map> Map::CopyNormalized(Handle<Map> map, 7068 PropertyNormalizationMode mode, 7069 NormalizedMapSharingMode sharing) { 7070 int new_instance_size = map->instance_size(); 7071 if (mode == CLEAR_INOBJECT_PROPERTIES) { 7072 new_instance_size -= map->inobject_properties() * kPointerSize; 7073 } 7074 7075 Handle<Map> result = RawCopy(map, new_instance_size); 7076 7077 if (mode != CLEAR_INOBJECT_PROPERTIES) { 7078 result->set_inobject_properties(map->inobject_properties()); 7079 } 7080 7081 result->set_is_shared(sharing == SHARED_NORMALIZED_MAP); 7082 result->set_dictionary_map(true); 7083 result->set_migration_target(false); 7084 7085 #ifdef VERIFY_HEAP 7086 if (FLAG_verify_heap && result->is_shared()) { 7087 result->SharedMapVerify(); 7088 } 7089 #endif 7090 7091 return result; 7092 } 7093 7094 7095 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) { 7096 Handle<Map> result = RawCopy(map, map->instance_size()); 7097 7098 // Please note instance_type and instance_size are set when allocated. 7099 result->set_inobject_properties(map->inobject_properties()); 7100 result->set_unused_property_fields(map->unused_property_fields()); 7101 7102 result->set_pre_allocated_property_fields( 7103 map->pre_allocated_property_fields()); 7104 result->set_is_shared(false); 7105 result->ClearCodeCache(map->GetHeap()); 7106 map->NotifyLeafMapLayoutChange(); 7107 return result; 7108 } 7109 7110 7111 Handle<Map> Map::ShareDescriptor(Handle<Map> map, 7112 Handle<DescriptorArray> descriptors, 7113 Descriptor* descriptor) { 7114 // Sanity check. This path is only to be taken if the map owns its descriptor 7115 // array, implying that its NumberOfOwnDescriptors equals the number of 7116 // descriptors in the descriptor array. 7117 ASSERT(map->NumberOfOwnDescriptors() == 7118 map->instance_descriptors()->number_of_descriptors()); 7119 7120 Handle<Map> result = CopyDropDescriptors(map); 7121 Handle<Name> name = descriptor->GetKey(); 7122 Handle<TransitionArray> transitions = 7123 TransitionArray::CopyInsert(map, name, result, SIMPLE_TRANSITION); 7124 7125 // Ensure there's space for the new descriptor in the shared descriptor array. 7126 if (descriptors->NumberOfSlackDescriptors() == 0) { 7127 int old_size = descriptors->number_of_descriptors(); 7128 if (old_size == 0) { 7129 descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1); 7130 } else { 7131 EnsureDescriptorSlack(map, old_size < 4 ? 1 : old_size / 2); 7132 descriptors = handle(map->instance_descriptors()); 7133 } 7134 } 7135 7136 // Commit the state atomically. 7137 DisallowHeapAllocation no_gc; 7138 7139 descriptors->Append(descriptor); 7140 result->SetBackPointer(*map); 7141 result->InitializeDescriptors(*descriptors); 7142 7143 ASSERT(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1); 7144 7145 map->set_transitions(*transitions); 7146 map->set_owns_descriptors(false); 7147 7148 return result; 7149 } 7150 7151 7152 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map, 7153 Handle<DescriptorArray> descriptors, 7154 TransitionFlag flag, 7155 MaybeHandle<Name> maybe_name, 7156 SimpleTransitionFlag simple_flag) { 7157 ASSERT(descriptors->IsSortedNoDuplicates()); 7158 7159 Handle<Map> result = CopyDropDescriptors(map); 7160 result->InitializeDescriptors(*descriptors); 7161 7162 if (flag == INSERT_TRANSITION && map->CanHaveMoreTransitions()) { 7163 Handle<Name> name; 7164 CHECK(maybe_name.ToHandle(&name)); 7165 Handle<TransitionArray> transitions = TransitionArray::CopyInsert( 7166 map, name, result, simple_flag); 7167 map->set_transitions(*transitions); 7168 result->SetBackPointer(*map); 7169 } else { 7170 int length = descriptors->number_of_descriptors(); 7171 for (int i = 0; i < length; i++) { 7172 descriptors->SetRepresentation(i, Representation::Tagged()); 7173 if (descriptors->GetDetails(i).type() == FIELD) { 7174 descriptors->SetValue(i, HeapType::Any()); 7175 } 7176 } 7177 } 7178 7179 return result; 7180 } 7181 7182 7183 // Since this method is used to rewrite an existing transition tree, it can 7184 // always insert transitions without checking. 7185 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map, 7186 int new_descriptor, 7187 Handle<DescriptorArray> descriptors) { 7188 ASSERT(descriptors->IsSortedNoDuplicates()); 7189 7190 Handle<Map> result = CopyDropDescriptors(map); 7191 7192 result->InitializeDescriptors(*descriptors); 7193 result->SetNumberOfOwnDescriptors(new_descriptor + 1); 7194 7195 int unused_property_fields = map->unused_property_fields(); 7196 if (descriptors->GetDetails(new_descriptor).type() == FIELD) { 7197 unused_property_fields = map->unused_property_fields() - 1; 7198 if (unused_property_fields < 0) { 7199 unused_property_fields += JSObject::kFieldsAdded; 7200 } 7201 } 7202 7203 result->set_unused_property_fields(unused_property_fields); 7204 result->set_owns_descriptors(false); 7205 7206 Handle<Name> name = handle(descriptors->GetKey(new_descriptor)); 7207 Handle<TransitionArray> transitions = TransitionArray::CopyInsert( 7208 map, name, result, SIMPLE_TRANSITION); 7209 7210 map->set_transitions(*transitions); 7211 result->SetBackPointer(*map); 7212 7213 return result; 7214 } 7215 7216 7217 Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind, 7218 TransitionFlag flag) { 7219 if (flag == INSERT_TRANSITION) { 7220 ASSERT(!map->HasElementsTransition() || 7221 ((map->elements_transition_map()->elements_kind() == 7222 DICTIONARY_ELEMENTS || 7223 IsExternalArrayElementsKind( 7224 map->elements_transition_map()->elements_kind())) && 7225 (kind == DICTIONARY_ELEMENTS || 7226 IsExternalArrayElementsKind(kind)))); 7227 ASSERT(!IsFastElementsKind(kind) || 7228 IsMoreGeneralElementsKindTransition(map->elements_kind(), kind)); 7229 ASSERT(kind != map->elements_kind()); 7230 } 7231 7232 bool insert_transition = 7233 flag == INSERT_TRANSITION && !map->HasElementsTransition(); 7234 7235 if (insert_transition && map->owns_descriptors()) { 7236 // In case the map owned its own descriptors, share the descriptors and 7237 // transfer ownership to the new map. 7238 Handle<Map> new_map = CopyDropDescriptors(map); 7239 7240 SetElementsTransitionMap(map, new_map); 7241 7242 new_map->set_elements_kind(kind); 7243 new_map->InitializeDescriptors(map->instance_descriptors()); 7244 new_map->SetBackPointer(*map); 7245 map->set_owns_descriptors(false); 7246 return new_map; 7247 } 7248 7249 // In case the map did not own its own descriptors, a split is forced by 7250 // copying the map; creating a new descriptor array cell. 7251 // Create a new free-floating map only if we are not allowed to store it. 7252 Handle<Map> new_map = Copy(map); 7253 7254 new_map->set_elements_kind(kind); 7255 7256 if (insert_transition) { 7257 SetElementsTransitionMap(map, new_map); 7258 new_map->SetBackPointer(*map); 7259 } 7260 7261 return new_map; 7262 } 7263 7264 7265 Handle<Map> Map::CopyForObserved(Handle<Map> map) { 7266 ASSERT(!map->is_observed()); 7267 7268 Isolate* isolate = map->GetIsolate(); 7269 7270 // In case the map owned its own descriptors, share the descriptors and 7271 // transfer ownership to the new map. 7272 Handle<Map> new_map; 7273 if (map->owns_descriptors()) { 7274 new_map = CopyDropDescriptors(map); 7275 } else { 7276 new_map = Copy(map); 7277 } 7278 7279 Handle<TransitionArray> transitions = TransitionArray::CopyInsert( 7280 map, isolate->factory()->observed_symbol(), new_map, FULL_TRANSITION); 7281 7282 map->set_transitions(*transitions); 7283 7284 new_map->set_is_observed(); 7285 7286 if (map->owns_descriptors()) { 7287 new_map->InitializeDescriptors(map->instance_descriptors()); 7288 map->set_owns_descriptors(false); 7289 } 7290 7291 new_map->SetBackPointer(*map); 7292 return new_map; 7293 } 7294 7295 7296 Handle<Map> Map::Copy(Handle<Map> map) { 7297 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 7298 int number_of_own_descriptors = map->NumberOfOwnDescriptors(); 7299 Handle<DescriptorArray> new_descriptors = 7300 DescriptorArray::CopyUpTo(descriptors, number_of_own_descriptors); 7301 return CopyReplaceDescriptors( 7302 map, new_descriptors, OMIT_TRANSITION, MaybeHandle<Name>()); 7303 } 7304 7305 7306 Handle<Map> Map::Create(Handle<JSFunction> constructor, 7307 int extra_inobject_properties) { 7308 Handle<Map> copy = Copy(handle(constructor->initial_map())); 7309 7310 // Check that we do not overflow the instance size when adding the 7311 // extra inobject properties. 7312 int instance_size_delta = extra_inobject_properties * kPointerSize; 7313 int max_instance_size_delta = 7314 JSObject::kMaxInstanceSize - copy->instance_size(); 7315 int max_extra_properties = max_instance_size_delta >> kPointerSizeLog2; 7316 7317 // If the instance size overflows, we allocate as many properties as we can as 7318 // inobject properties. 7319 if (extra_inobject_properties > max_extra_properties) { 7320 instance_size_delta = max_instance_size_delta; 7321 extra_inobject_properties = max_extra_properties; 7322 } 7323 7324 // Adjust the map with the extra inobject properties. 7325 int inobject_properties = 7326 copy->inobject_properties() + extra_inobject_properties; 7327 copy->set_inobject_properties(inobject_properties); 7328 copy->set_unused_property_fields(inobject_properties); 7329 copy->set_instance_size(copy->instance_size() + instance_size_delta); 7330 copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy)); 7331 return copy; 7332 } 7333 7334 7335 Handle<Map> Map::CopyForFreeze(Handle<Map> map) { 7336 int num_descriptors = map->NumberOfOwnDescriptors(); 7337 Isolate* isolate = map->GetIsolate(); 7338 Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes( 7339 handle(map->instance_descriptors(), isolate), num_descriptors, FROZEN); 7340 Handle<Map> new_map = CopyReplaceDescriptors( 7341 map, new_desc, INSERT_TRANSITION, isolate->factory()->frozen_symbol()); 7342 new_map->freeze(); 7343 new_map->set_is_extensible(false); 7344 new_map->set_elements_kind(DICTIONARY_ELEMENTS); 7345 return new_map; 7346 } 7347 7348 7349 Handle<Map> Map::CopyAddDescriptor(Handle<Map> map, 7350 Descriptor* descriptor, 7351 TransitionFlag flag) { 7352 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 7353 7354 // Ensure the key is unique. 7355 descriptor->KeyToUniqueName(); 7356 7357 if (flag == INSERT_TRANSITION && 7358 map->owns_descriptors() && 7359 map->CanHaveMoreTransitions()) { 7360 return ShareDescriptor(map, descriptors, descriptor); 7361 } 7362 7363 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( 7364 descriptors, map->NumberOfOwnDescriptors(), 1); 7365 new_descriptors->Append(descriptor); 7366 7367 return CopyReplaceDescriptors( 7368 map, new_descriptors, flag, descriptor->GetKey(), SIMPLE_TRANSITION); 7369 } 7370 7371 7372 Handle<Map> Map::CopyInsertDescriptor(Handle<Map> map, 7373 Descriptor* descriptor, 7374 TransitionFlag flag) { 7375 Handle<DescriptorArray> old_descriptors(map->instance_descriptors()); 7376 7377 // Ensure the key is unique. 7378 descriptor->KeyToUniqueName(); 7379 7380 // We replace the key if it is already present. 7381 int index = old_descriptors->SearchWithCache(*descriptor->GetKey(), *map); 7382 if (index != DescriptorArray::kNotFound) { 7383 return CopyReplaceDescriptor(map, old_descriptors, descriptor, index, flag); 7384 } 7385 return CopyAddDescriptor(map, descriptor, flag); 7386 } 7387 7388 7389 Handle<DescriptorArray> DescriptorArray::CopyUpTo( 7390 Handle<DescriptorArray> desc, 7391 int enumeration_index, 7392 int slack) { 7393 return DescriptorArray::CopyUpToAddAttributes( 7394 desc, enumeration_index, NONE, slack); 7395 } 7396 7397 7398 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes( 7399 Handle<DescriptorArray> desc, 7400 int enumeration_index, 7401 PropertyAttributes attributes, 7402 int slack) { 7403 if (enumeration_index + slack == 0) { 7404 return desc->GetIsolate()->factory()->empty_descriptor_array(); 7405 } 7406 7407 int size = enumeration_index; 7408 7409 Handle<DescriptorArray> descriptors = 7410 DescriptorArray::Allocate(desc->GetIsolate(), size, slack); 7411 DescriptorArray::WhitenessWitness witness(*descriptors); 7412 7413 if (attributes != NONE) { 7414 for (int i = 0; i < size; ++i) { 7415 Object* value = desc->GetValue(i); 7416 PropertyDetails details = desc->GetDetails(i); 7417 int mask = DONT_DELETE | DONT_ENUM; 7418 // READ_ONLY is an invalid attribute for JS setters/getters. 7419 if (details.type() != CALLBACKS || !value->IsAccessorPair()) { 7420 mask |= READ_ONLY; 7421 } 7422 details = details.CopyAddAttributes( 7423 static_cast<PropertyAttributes>(attributes & mask)); 7424 Descriptor inner_desc(handle(desc->GetKey(i)), 7425 handle(value, desc->GetIsolate()), 7426 details); 7427 descriptors->Set(i, &inner_desc, witness); 7428 } 7429 } else { 7430 for (int i = 0; i < size; ++i) { 7431 descriptors->CopyFrom(i, *desc, witness); 7432 } 7433 } 7434 7435 if (desc->number_of_descriptors() != enumeration_index) descriptors->Sort(); 7436 7437 return descriptors; 7438 } 7439 7440 7441 Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map, 7442 Handle<DescriptorArray> descriptors, 7443 Descriptor* descriptor, 7444 int insertion_index, 7445 TransitionFlag flag) { 7446 // Ensure the key is unique. 7447 descriptor->KeyToUniqueName(); 7448 7449 Handle<Name> key = descriptor->GetKey(); 7450 ASSERT(*key == descriptors->GetKey(insertion_index)); 7451 7452 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( 7453 descriptors, map->NumberOfOwnDescriptors()); 7454 7455 new_descriptors->Replace(insertion_index, descriptor); 7456 7457 SimpleTransitionFlag simple_flag = 7458 (insertion_index == descriptors->number_of_descriptors() - 1) 7459 ? SIMPLE_TRANSITION 7460 : FULL_TRANSITION; 7461 return CopyReplaceDescriptors(map, new_descriptors, flag, key, simple_flag); 7462 } 7463 7464 7465 void Map::UpdateCodeCache(Handle<Map> map, 7466 Handle<Name> name, 7467 Handle<Code> code) { 7468 Isolate* isolate = map->GetIsolate(); 7469 HandleScope scope(isolate); 7470 // Allocate the code cache if not present. 7471 if (map->code_cache()->IsFixedArray()) { 7472 Handle<Object> result = isolate->factory()->NewCodeCache(); 7473 map->set_code_cache(*result); 7474 } 7475 7476 // Update the code cache. 7477 Handle<CodeCache> code_cache(CodeCache::cast(map->code_cache()), isolate); 7478 CodeCache::Update(code_cache, name, code); 7479 } 7480 7481 7482 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) { 7483 // Do a lookup if a code cache exists. 7484 if (!code_cache()->IsFixedArray()) { 7485 return CodeCache::cast(code_cache())->Lookup(name, flags); 7486 } else { 7487 return GetHeap()->undefined_value(); 7488 } 7489 } 7490 7491 7492 int Map::IndexInCodeCache(Object* name, Code* code) { 7493 // Get the internal index if a code cache exists. 7494 if (!code_cache()->IsFixedArray()) { 7495 return CodeCache::cast(code_cache())->GetIndex(name, code); 7496 } 7497 return -1; 7498 } 7499 7500 7501 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) { 7502 // No GC is supposed to happen between a call to IndexInCodeCache and 7503 // RemoveFromCodeCache so the code cache must be there. 7504 ASSERT(!code_cache()->IsFixedArray()); 7505 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index); 7506 } 7507 7508 7509 // An iterator over all map transitions in an descriptor array, reusing the 7510 // constructor field of the map while it is running. Negative values in 7511 // the constructor field indicate an active map transition iteration. The 7512 // original constructor is restored after iterating over all entries. 7513 class IntrusiveMapTransitionIterator { 7514 public: 7515 IntrusiveMapTransitionIterator( 7516 Map* map, TransitionArray* transition_array, Object* constructor) 7517 : map_(map), 7518 transition_array_(transition_array), 7519 constructor_(constructor) { } 7520 7521 void StartIfNotStarted() { 7522 ASSERT(!(*IteratorField())->IsSmi() || IsIterating()); 7523 if (!(*IteratorField())->IsSmi()) { 7524 ASSERT(*IteratorField() == constructor_); 7525 *IteratorField() = Smi::FromInt(-1); 7526 } 7527 } 7528 7529 bool IsIterating() { 7530 return (*IteratorField())->IsSmi() && 7531 Smi::cast(*IteratorField())->value() < 0; 7532 } 7533 7534 Map* Next() { 7535 ASSERT(IsIterating()); 7536 int value = Smi::cast(*IteratorField())->value(); 7537 int index = -value - 1; 7538 int number_of_transitions = transition_array_->number_of_transitions(); 7539 while (index < number_of_transitions) { 7540 *IteratorField() = Smi::FromInt(value - 1); 7541 return transition_array_->GetTarget(index); 7542 } 7543 7544 *IteratorField() = constructor_; 7545 return NULL; 7546 } 7547 7548 private: 7549 Object** IteratorField() { 7550 return HeapObject::RawField(map_, Map::kConstructorOffset); 7551 } 7552 7553 Map* map_; 7554 TransitionArray* transition_array_; 7555 Object* constructor_; 7556 }; 7557 7558 7559 // An iterator over all prototype transitions, reusing the constructor field 7560 // of the map while it is running. Positive values in the constructor field 7561 // indicate an active prototype transition iteration. The original constructor 7562 // is restored after iterating over all entries. 7563 class IntrusivePrototypeTransitionIterator { 7564 public: 7565 IntrusivePrototypeTransitionIterator( 7566 Map* map, HeapObject* proto_trans, Object* constructor) 7567 : map_(map), proto_trans_(proto_trans), constructor_(constructor) { } 7568 7569 void StartIfNotStarted() { 7570 if (!(*IteratorField())->IsSmi()) { 7571 ASSERT(*IteratorField() == constructor_); 7572 *IteratorField() = Smi::FromInt(0); 7573 } 7574 } 7575 7576 bool IsIterating() { 7577 return (*IteratorField())->IsSmi() && 7578 Smi::cast(*IteratorField())->value() >= 0; 7579 } 7580 7581 Map* Next() { 7582 ASSERT(IsIterating()); 7583 int transitionNumber = Smi::cast(*IteratorField())->value(); 7584 if (transitionNumber < NumberOfTransitions()) { 7585 *IteratorField() = Smi::FromInt(transitionNumber + 1); 7586 return GetTransition(transitionNumber); 7587 } 7588 *IteratorField() = constructor_; 7589 return NULL; 7590 } 7591 7592 private: 7593 Object** IteratorField() { 7594 return HeapObject::RawField(map_, Map::kConstructorOffset); 7595 } 7596 7597 int NumberOfTransitions() { 7598 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_); 7599 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset); 7600 return Smi::cast(num)->value(); 7601 } 7602 7603 Map* GetTransition(int transitionNumber) { 7604 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_); 7605 return Map::cast(proto_trans->get(IndexFor(transitionNumber))); 7606 } 7607 7608 int IndexFor(int transitionNumber) { 7609 return Map::kProtoTransitionHeaderSize + 7610 Map::kProtoTransitionMapOffset + 7611 transitionNumber * Map::kProtoTransitionElementsPerEntry; 7612 } 7613 7614 Map* map_; 7615 HeapObject* proto_trans_; 7616 Object* constructor_; 7617 }; 7618 7619 7620 // To traverse the transition tree iteratively, we have to store two kinds of 7621 // information in a map: The parent map in the traversal and which children of a 7622 // node have already been visited. To do this without additional memory, we 7623 // temporarily reuse two fields with known values: 7624 // 7625 // (1) The map of the map temporarily holds the parent, and is restored to the 7626 // meta map afterwards. 7627 // 7628 // (2) The info which children have already been visited depends on which part 7629 // of the map we currently iterate. We use the constructor field of the 7630 // map to store the current index. We can do that because the constructor 7631 // is the same for all involved maps. 7632 // 7633 // (a) If we currently follow normal map transitions, we temporarily store 7634 // the current index in the constructor field, and restore it to the 7635 // original constructor afterwards. Note that a single descriptor can 7636 // have 0, 1, or 2 transitions. 7637 // 7638 // (b) If we currently follow prototype transitions, we temporarily store 7639 // the current index in the constructor field, and restore it to the 7640 // original constructor afterwards. 7641 // 7642 // Note that the child iterator is just a concatenation of two iterators: One 7643 // iterating over map transitions and one iterating over prototype transisitons. 7644 class TraversableMap : public Map { 7645 public: 7646 // Record the parent in the traversal within this map. Note that this destroys 7647 // this map's map! 7648 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); } 7649 7650 // Reset the current map's map, returning the parent previously stored in it. 7651 TraversableMap* GetAndResetParent() { 7652 TraversableMap* old_parent = static_cast<TraversableMap*>(map()); 7653 set_map_no_write_barrier(GetHeap()->meta_map()); 7654 return old_parent; 7655 } 7656 7657 // If we have an unvisited child map, return that one and advance. If we have 7658 // none, return NULL and restore the overwritten constructor field. 7659 TraversableMap* ChildIteratorNext(Object* constructor) { 7660 if (!HasTransitionArray()) return NULL; 7661 7662 TransitionArray* transition_array = transitions(); 7663 if (transition_array->HasPrototypeTransitions()) { 7664 HeapObject* proto_transitions = 7665 transition_array->GetPrototypeTransitions(); 7666 IntrusivePrototypeTransitionIterator proto_iterator(this, 7667 proto_transitions, 7668 constructor); 7669 proto_iterator.StartIfNotStarted(); 7670 if (proto_iterator.IsIterating()) { 7671 Map* next = proto_iterator.Next(); 7672 if (next != NULL) return static_cast<TraversableMap*>(next); 7673 } 7674 } 7675 7676 IntrusiveMapTransitionIterator transition_iterator(this, 7677 transition_array, 7678 constructor); 7679 transition_iterator.StartIfNotStarted(); 7680 if (transition_iterator.IsIterating()) { 7681 Map* next = transition_iterator.Next(); 7682 if (next != NULL) return static_cast<TraversableMap*>(next); 7683 } 7684 7685 return NULL; 7686 } 7687 }; 7688 7689 7690 // Traverse the transition tree in postorder without using the C++ stack by 7691 // doing pointer reversal. 7692 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) { 7693 // Make sure that we do not allocate in the callback. 7694 DisallowHeapAllocation no_allocation; 7695 7696 TraversableMap* current = static_cast<TraversableMap*>(this); 7697 // Get the root constructor here to restore it later when finished iterating 7698 // over maps. 7699 Object* root_constructor = constructor(); 7700 while (true) { 7701 TraversableMap* child = current->ChildIteratorNext(root_constructor); 7702 if (child != NULL) { 7703 child->SetParent(current); 7704 current = child; 7705 } else { 7706 TraversableMap* parent = current->GetAndResetParent(); 7707 callback(current, data); 7708 if (current == this) break; 7709 current = parent; 7710 } 7711 } 7712 } 7713 7714 7715 void CodeCache::Update( 7716 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { 7717 // The number of monomorphic stubs for normal load/store/call IC's can grow to 7718 // a large number and therefore they need to go into a hash table. They are 7719 // used to load global properties from cells. 7720 if (code->type() == Code::NORMAL) { 7721 // Make sure that a hash table is allocated for the normal load code cache. 7722 if (code_cache->normal_type_cache()->IsUndefined()) { 7723 Handle<Object> result = 7724 CodeCacheHashTable::New(code_cache->GetIsolate(), 7725 CodeCacheHashTable::kInitialSize); 7726 code_cache->set_normal_type_cache(*result); 7727 } 7728 UpdateNormalTypeCache(code_cache, name, code); 7729 } else { 7730 ASSERT(code_cache->default_cache()->IsFixedArray()); 7731 UpdateDefaultCache(code_cache, name, code); 7732 } 7733 } 7734 7735 7736 void CodeCache::UpdateDefaultCache( 7737 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { 7738 // When updating the default code cache we disregard the type encoded in the 7739 // flags. This allows call constant stubs to overwrite call field 7740 // stubs, etc. 7741 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags()); 7742 7743 // First check whether we can update existing code cache without 7744 // extending it. 7745 Handle<FixedArray> cache = handle(code_cache->default_cache()); 7746 int length = cache->length(); 7747 { 7748 DisallowHeapAllocation no_alloc; 7749 int deleted_index = -1; 7750 for (int i = 0; i < length; i += kCodeCacheEntrySize) { 7751 Object* key = cache->get(i); 7752 if (key->IsNull()) { 7753 if (deleted_index < 0) deleted_index = i; 7754 continue; 7755 } 7756 if (key->IsUndefined()) { 7757 if (deleted_index >= 0) i = deleted_index; 7758 cache->set(i + kCodeCacheEntryNameOffset, *name); 7759 cache->set(i + kCodeCacheEntryCodeOffset, *code); 7760 return; 7761 } 7762 if (name->Equals(Name::cast(key))) { 7763 Code::Flags found = 7764 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags(); 7765 if (Code::RemoveTypeFromFlags(found) == flags) { 7766 cache->set(i + kCodeCacheEntryCodeOffset, *code); 7767 return; 7768 } 7769 } 7770 } 7771 7772 // Reached the end of the code cache. If there were deleted 7773 // elements, reuse the space for the first of them. 7774 if (deleted_index >= 0) { 7775 cache->set(deleted_index + kCodeCacheEntryNameOffset, *name); 7776 cache->set(deleted_index + kCodeCacheEntryCodeOffset, *code); 7777 return; 7778 } 7779 } 7780 7781 // Extend the code cache with some new entries (at least one). Must be a 7782 // multiple of the entry size. 7783 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize; 7784 new_length = new_length - new_length % kCodeCacheEntrySize; 7785 ASSERT((new_length % kCodeCacheEntrySize) == 0); 7786 cache = FixedArray::CopySize(cache, new_length); 7787 7788 // Add the (name, code) pair to the new cache. 7789 cache->set(length + kCodeCacheEntryNameOffset, *name); 7790 cache->set(length + kCodeCacheEntryCodeOffset, *code); 7791 code_cache->set_default_cache(*cache); 7792 } 7793 7794 7795 void CodeCache::UpdateNormalTypeCache( 7796 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { 7797 // Adding a new entry can cause a new cache to be allocated. 7798 Handle<CodeCacheHashTable> cache( 7799 CodeCacheHashTable::cast(code_cache->normal_type_cache())); 7800 Handle<Object> new_cache = CodeCacheHashTable::Put(cache, name, code); 7801 code_cache->set_normal_type_cache(*new_cache); 7802 } 7803 7804 7805 Object* CodeCache::Lookup(Name* name, Code::Flags flags) { 7806 Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags)); 7807 if (result->IsCode()) { 7808 if (Code::cast(result)->flags() == flags) return result; 7809 return GetHeap()->undefined_value(); 7810 } 7811 return LookupNormalTypeCache(name, flags); 7812 } 7813 7814 7815 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) { 7816 FixedArray* cache = default_cache(); 7817 int length = cache->length(); 7818 for (int i = 0; i < length; i += kCodeCacheEntrySize) { 7819 Object* key = cache->get(i + kCodeCacheEntryNameOffset); 7820 // Skip deleted elements. 7821 if (key->IsNull()) continue; 7822 if (key->IsUndefined()) return key; 7823 if (name->Equals(Name::cast(key))) { 7824 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset)); 7825 if (Code::RemoveTypeFromFlags(code->flags()) == flags) { 7826 return code; 7827 } 7828 } 7829 } 7830 return GetHeap()->undefined_value(); 7831 } 7832 7833 7834 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) { 7835 if (!normal_type_cache()->IsUndefined()) { 7836 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7837 return cache->Lookup(name, flags); 7838 } else { 7839 return GetHeap()->undefined_value(); 7840 } 7841 } 7842 7843 7844 int CodeCache::GetIndex(Object* name, Code* code) { 7845 if (code->type() == Code::NORMAL) { 7846 if (normal_type_cache()->IsUndefined()) return -1; 7847 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7848 return cache->GetIndex(Name::cast(name), code->flags()); 7849 } 7850 7851 FixedArray* array = default_cache(); 7852 int len = array->length(); 7853 for (int i = 0; i < len; i += kCodeCacheEntrySize) { 7854 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1; 7855 } 7856 return -1; 7857 } 7858 7859 7860 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) { 7861 if (code->type() == Code::NORMAL) { 7862 ASSERT(!normal_type_cache()->IsUndefined()); 7863 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7864 ASSERT(cache->GetIndex(Name::cast(name), code->flags()) == index); 7865 cache->RemoveByIndex(index); 7866 } else { 7867 FixedArray* array = default_cache(); 7868 ASSERT(array->length() >= index && array->get(index)->IsCode()); 7869 // Use null instead of undefined for deleted elements to distinguish 7870 // deleted elements from unused elements. This distinction is used 7871 // when looking up in the cache and when updating the cache. 7872 ASSERT_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset); 7873 array->set_null(index - 1); // Name. 7874 array->set_null(index); // Code. 7875 } 7876 } 7877 7878 7879 // The key in the code cache hash table consists of the property name and the 7880 // code object. The actual match is on the name and the code flags. If a key 7881 // is created using the flags and not a code object it can only be used for 7882 // lookup not to create a new entry. 7883 class CodeCacheHashTableKey : public HashTableKey { 7884 public: 7885 CodeCacheHashTableKey(Handle<Name> name, Code::Flags flags) 7886 : name_(name), flags_(flags), code_() { } 7887 7888 CodeCacheHashTableKey(Handle<Name> name, Handle<Code> code) 7889 : name_(name), flags_(code->flags()), code_(code) { } 7890 7891 bool IsMatch(Object* other) V8_OVERRIDE { 7892 if (!other->IsFixedArray()) return false; 7893 FixedArray* pair = FixedArray::cast(other); 7894 Name* name = Name::cast(pair->get(0)); 7895 Code::Flags flags = Code::cast(pair->get(1))->flags(); 7896 if (flags != flags_) { 7897 return false; 7898 } 7899 return name_->Equals(name); 7900 } 7901 7902 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) { 7903 return name->Hash() ^ flags; 7904 } 7905 7906 uint32_t Hash() V8_OVERRIDE { return NameFlagsHashHelper(*name_, flags_); } 7907 7908 uint32_t HashForObject(Object* obj) V8_OVERRIDE { 7909 FixedArray* pair = FixedArray::cast(obj); 7910 Name* name = Name::cast(pair->get(0)); 7911 Code* code = Code::cast(pair->get(1)); 7912 return NameFlagsHashHelper(name, code->flags()); 7913 } 7914 7915 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { 7916 Handle<Code> code = code_.ToHandleChecked(); 7917 Handle<FixedArray> pair = isolate->factory()->NewFixedArray(2); 7918 pair->set(0, *name_); 7919 pair->set(1, *code); 7920 return pair; 7921 } 7922 7923 private: 7924 Handle<Name> name_; 7925 Code::Flags flags_; 7926 // TODO(jkummerow): We should be able to get by without this. 7927 MaybeHandle<Code> code_; 7928 }; 7929 7930 7931 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) { 7932 DisallowHeapAllocation no_alloc; 7933 CodeCacheHashTableKey key(handle(name), flags); 7934 int entry = FindEntry(&key); 7935 if (entry == kNotFound) return GetHeap()->undefined_value(); 7936 return get(EntryToIndex(entry) + 1); 7937 } 7938 7939 7940 Handle<CodeCacheHashTable> CodeCacheHashTable::Put( 7941 Handle<CodeCacheHashTable> cache, Handle<Name> name, Handle<Code> code) { 7942 CodeCacheHashTableKey key(name, code); 7943 7944 Handle<CodeCacheHashTable> new_cache = EnsureCapacity(cache, 1, &key); 7945 7946 int entry = new_cache->FindInsertionEntry(key.Hash()); 7947 Handle<Object> k = key.AsHandle(cache->GetIsolate()); 7948 7949 new_cache->set(EntryToIndex(entry), *k); 7950 new_cache->set(EntryToIndex(entry) + 1, *code); 7951 new_cache->ElementAdded(); 7952 return new_cache; 7953 } 7954 7955 7956 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) { 7957 DisallowHeapAllocation no_alloc; 7958 CodeCacheHashTableKey key(handle(name), flags); 7959 int entry = FindEntry(&key); 7960 return (entry == kNotFound) ? -1 : entry; 7961 } 7962 7963 7964 void CodeCacheHashTable::RemoveByIndex(int index) { 7965 ASSERT(index >= 0); 7966 Heap* heap = GetHeap(); 7967 set(EntryToIndex(index), heap->the_hole_value()); 7968 set(EntryToIndex(index) + 1, heap->the_hole_value()); 7969 ElementRemoved(); 7970 } 7971 7972 7973 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache, 7974 MapHandleList* maps, 7975 Code::Flags flags, 7976 Handle<Code> code) { 7977 Isolate* isolate = code_cache->GetIsolate(); 7978 if (code_cache->cache()->IsUndefined()) { 7979 Handle<PolymorphicCodeCacheHashTable> result = 7980 PolymorphicCodeCacheHashTable::New( 7981 isolate, 7982 PolymorphicCodeCacheHashTable::kInitialSize); 7983 code_cache->set_cache(*result); 7984 } else { 7985 // This entry shouldn't be contained in the cache yet. 7986 ASSERT(PolymorphicCodeCacheHashTable::cast(code_cache->cache()) 7987 ->Lookup(maps, flags)->IsUndefined()); 7988 } 7989 Handle<PolymorphicCodeCacheHashTable> hash_table = 7990 handle(PolymorphicCodeCacheHashTable::cast(code_cache->cache())); 7991 Handle<PolymorphicCodeCacheHashTable> new_cache = 7992 PolymorphicCodeCacheHashTable::Put(hash_table, maps, flags, code); 7993 code_cache->set_cache(*new_cache); 7994 } 7995 7996 7997 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps, 7998 Code::Flags flags) { 7999 if (!cache()->IsUndefined()) { 8000 PolymorphicCodeCacheHashTable* hash_table = 8001 PolymorphicCodeCacheHashTable::cast(cache()); 8002 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate()); 8003 } else { 8004 return GetIsolate()->factory()->undefined_value(); 8005 } 8006 } 8007 8008 8009 // Despite their name, object of this class are not stored in the actual 8010 // hash table; instead they're temporarily used for lookups. It is therefore 8011 // safe to have a weak (non-owning) pointer to a MapList as a member field. 8012 class PolymorphicCodeCacheHashTableKey : public HashTableKey { 8013 public: 8014 // Callers must ensure that |maps| outlives the newly constructed object. 8015 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags) 8016 : maps_(maps), 8017 code_flags_(code_flags) {} 8018 8019 bool IsMatch(Object* other) V8_OVERRIDE { 8020 MapHandleList other_maps(kDefaultListAllocationSize); 8021 int other_flags; 8022 FromObject(other, &other_flags, &other_maps); 8023 if (code_flags_ != other_flags) return false; 8024 if (maps_->length() != other_maps.length()) return false; 8025 // Compare just the hashes first because it's faster. 8026 int this_hash = MapsHashHelper(maps_, code_flags_); 8027 int other_hash = MapsHashHelper(&other_maps, other_flags); 8028 if (this_hash != other_hash) return false; 8029 8030 // Full comparison: for each map in maps_, look for an equivalent map in 8031 // other_maps. This implementation is slow, but probably good enough for 8032 // now because the lists are short (<= 4 elements currently). 8033 for (int i = 0; i < maps_->length(); ++i) { 8034 bool match_found = false; 8035 for (int j = 0; j < other_maps.length(); ++j) { 8036 if (*(maps_->at(i)) == *(other_maps.at(j))) { 8037 match_found = true; 8038 break; 8039 } 8040 } 8041 if (!match_found) return false; 8042 } 8043 return true; 8044 } 8045 8046 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) { 8047 uint32_t hash = code_flags; 8048 for (int i = 0; i < maps->length(); ++i) { 8049 hash ^= maps->at(i)->Hash(); 8050 } 8051 return hash; 8052 } 8053 8054 uint32_t Hash() V8_OVERRIDE { 8055 return MapsHashHelper(maps_, code_flags_); 8056 } 8057 8058 uint32_t HashForObject(Object* obj) V8_OVERRIDE { 8059 MapHandleList other_maps(kDefaultListAllocationSize); 8060 int other_flags; 8061 FromObject(obj, &other_flags, &other_maps); 8062 return MapsHashHelper(&other_maps, other_flags); 8063 } 8064 8065 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { 8066 // The maps in |maps_| must be copied to a newly allocated FixedArray, 8067 // both because the referenced MapList is short-lived, and because C++ 8068 // objects can't be stored in the heap anyway. 8069 Handle<FixedArray> list = 8070 isolate->factory()->NewUninitializedFixedArray(maps_->length() + 1); 8071 list->set(0, Smi::FromInt(code_flags_)); 8072 for (int i = 0; i < maps_->length(); ++i) { 8073 list->set(i + 1, *maps_->at(i)); 8074 } 8075 return list; 8076 } 8077 8078 private: 8079 static MapHandleList* FromObject(Object* obj, 8080 int* code_flags, 8081 MapHandleList* maps) { 8082 FixedArray* list = FixedArray::cast(obj); 8083 maps->Rewind(0); 8084 *code_flags = Smi::cast(list->get(0))->value(); 8085 for (int i = 1; i < list->length(); ++i) { 8086 maps->Add(Handle<Map>(Map::cast(list->get(i)))); 8087 } 8088 return maps; 8089 } 8090 8091 MapHandleList* maps_; // weak. 8092 int code_flags_; 8093 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1; 8094 }; 8095 8096 8097 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps, 8098 int code_kind) { 8099 DisallowHeapAllocation no_alloc; 8100 PolymorphicCodeCacheHashTableKey key(maps, code_kind); 8101 int entry = FindEntry(&key); 8102 if (entry == kNotFound) return GetHeap()->undefined_value(); 8103 return get(EntryToIndex(entry) + 1); 8104 } 8105 8106 8107 Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put( 8108 Handle<PolymorphicCodeCacheHashTable> hash_table, 8109 MapHandleList* maps, 8110 int code_kind, 8111 Handle<Code> code) { 8112 PolymorphicCodeCacheHashTableKey key(maps, code_kind); 8113 Handle<PolymorphicCodeCacheHashTable> cache = 8114 EnsureCapacity(hash_table, 1, &key); 8115 int entry = cache->FindInsertionEntry(key.Hash()); 8116 8117 Handle<Object> obj = key.AsHandle(hash_table->GetIsolate()); 8118 cache->set(EntryToIndex(entry), *obj); 8119 cache->set(EntryToIndex(entry) + 1, *code); 8120 cache->ElementAdded(); 8121 return cache; 8122 } 8123 8124 8125 void FixedArray::Shrink(int new_length) { 8126 ASSERT(0 <= new_length && new_length <= length()); 8127 if (new_length < length()) { 8128 RightTrimFixedArray<Heap::FROM_MUTATOR>( 8129 GetHeap(), this, length() - new_length); 8130 } 8131 } 8132 8133 8134 MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike( 8135 Handle<FixedArray> content, 8136 Handle<JSObject> array) { 8137 ASSERT(array->IsJSArray() || array->HasSloppyArgumentsElements()); 8138 ElementsAccessor* accessor = array->GetElementsAccessor(); 8139 Handle<FixedArray> result; 8140 ASSIGN_RETURN_ON_EXCEPTION( 8141 array->GetIsolate(), result, 8142 accessor->AddElementsToFixedArray(array, array, content), 8143 FixedArray); 8144 8145 #ifdef ENABLE_SLOW_ASSERTS 8146 if (FLAG_enable_slow_asserts) { 8147 DisallowHeapAllocation no_allocation; 8148 for (int i = 0; i < result->length(); i++) { 8149 Object* current = result->get(i); 8150 ASSERT(current->IsNumber() || current->IsName()); 8151 } 8152 } 8153 #endif 8154 return result; 8155 } 8156 8157 8158 MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first, 8159 Handle<FixedArray> second) { 8160 ElementsAccessor* accessor = ElementsAccessor::ForArray(second); 8161 Handle<FixedArray> result; 8162 ASSIGN_RETURN_ON_EXCEPTION( 8163 first->GetIsolate(), result, 8164 accessor->AddElementsToFixedArray( 8165 Handle<Object>::null(), // receiver 8166 Handle<JSObject>::null(), // holder 8167 first, 8168 Handle<FixedArrayBase>::cast(second)), 8169 FixedArray); 8170 8171 #ifdef ENABLE_SLOW_ASSERTS 8172 if (FLAG_enable_slow_asserts) { 8173 DisallowHeapAllocation no_allocation; 8174 for (int i = 0; i < result->length(); i++) { 8175 Object* current = result->get(i); 8176 ASSERT(current->IsNumber() || current->IsName()); 8177 } 8178 } 8179 #endif 8180 return result; 8181 } 8182 8183 8184 Handle<FixedArray> FixedArray::CopySize( 8185 Handle<FixedArray> array, int new_length, PretenureFlag pretenure) { 8186 Isolate* isolate = array->GetIsolate(); 8187 if (new_length == 0) return isolate->factory()->empty_fixed_array(); 8188 Handle<FixedArray> result = 8189 isolate->factory()->NewFixedArray(new_length, pretenure); 8190 // Copy the content 8191 DisallowHeapAllocation no_gc; 8192 int len = array->length(); 8193 if (new_length < len) len = new_length; 8194 // We are taking the map from the old fixed array so the map is sure to 8195 // be an immortal immutable object. 8196 result->set_map_no_write_barrier(array->map()); 8197 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 8198 for (int i = 0; i < len; i++) { 8199 result->set(i, array->get(i), mode); 8200 } 8201 return result; 8202 } 8203 8204 8205 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) { 8206 DisallowHeapAllocation no_gc; 8207 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc); 8208 for (int index = 0; index < len; index++) { 8209 dest->set(dest_pos+index, get(pos+index), mode); 8210 } 8211 } 8212 8213 8214 #ifdef DEBUG 8215 bool FixedArray::IsEqualTo(FixedArray* other) { 8216 if (length() != other->length()) return false; 8217 for (int i = 0 ; i < length(); ++i) { 8218 if (get(i) != other->get(i)) return false; 8219 } 8220 return true; 8221 } 8222 #endif 8223 8224 8225 Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate, 8226 int number_of_descriptors, 8227 int slack) { 8228 ASSERT(0 <= number_of_descriptors); 8229 Factory* factory = isolate->factory(); 8230 // Do not use DescriptorArray::cast on incomplete object. 8231 int size = number_of_descriptors + slack; 8232 if (size == 0) return factory->empty_descriptor_array(); 8233 // Allocate the array of keys. 8234 Handle<FixedArray> result = factory->NewFixedArray(LengthFor(size)); 8235 8236 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors)); 8237 result->set(kEnumCacheIndex, Smi::FromInt(0)); 8238 return Handle<DescriptorArray>::cast(result); 8239 } 8240 8241 8242 void DescriptorArray::ClearEnumCache() { 8243 set(kEnumCacheIndex, Smi::FromInt(0)); 8244 } 8245 8246 8247 void DescriptorArray::Replace(int index, Descriptor* descriptor) { 8248 descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index)); 8249 Set(index, descriptor); 8250 } 8251 8252 8253 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage, 8254 FixedArray* new_cache, 8255 Object* new_index_cache) { 8256 ASSERT(bridge_storage->length() >= kEnumCacheBridgeLength); 8257 ASSERT(new_index_cache->IsSmi() || new_index_cache->IsFixedArray()); 8258 ASSERT(!IsEmpty()); 8259 ASSERT(!HasEnumCache() || new_cache->length() > GetEnumCache()->length()); 8260 FixedArray::cast(bridge_storage)-> 8261 set(kEnumCacheBridgeCacheIndex, new_cache); 8262 FixedArray::cast(bridge_storage)-> 8263 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache); 8264 set(kEnumCacheIndex, bridge_storage); 8265 } 8266 8267 8268 void DescriptorArray::CopyFrom(int index, 8269 DescriptorArray* src, 8270 const WhitenessWitness& witness) { 8271 Object* value = src->GetValue(index); 8272 PropertyDetails details = src->GetDetails(index); 8273 Descriptor desc(handle(src->GetKey(index)), 8274 handle(value, src->GetIsolate()), 8275 details); 8276 Set(index, &desc, witness); 8277 } 8278 8279 8280 // We need the whiteness witness since sort will reshuffle the entries in the 8281 // descriptor array. If the descriptor array were to be black, the shuffling 8282 // would move a slot that was already recorded as pointing into an evacuation 8283 // candidate. This would result in missing updates upon evacuation. 8284 void DescriptorArray::Sort() { 8285 // In-place heap sort. 8286 int len = number_of_descriptors(); 8287 // Reset sorting since the descriptor array might contain invalid pointers. 8288 for (int i = 0; i < len; ++i) SetSortedKey(i, i); 8289 // Bottom-up max-heap construction. 8290 // Index of the last node with children 8291 const int max_parent_index = (len / 2) - 1; 8292 for (int i = max_parent_index; i >= 0; --i) { 8293 int parent_index = i; 8294 const uint32_t parent_hash = GetSortedKey(i)->Hash(); 8295 while (parent_index <= max_parent_index) { 8296 int child_index = 2 * parent_index + 1; 8297 uint32_t child_hash = GetSortedKey(child_index)->Hash(); 8298 if (child_index + 1 < len) { 8299 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash(); 8300 if (right_child_hash > child_hash) { 8301 child_index++; 8302 child_hash = right_child_hash; 8303 } 8304 } 8305 if (child_hash <= parent_hash) break; 8306 SwapSortedKeys(parent_index, child_index); 8307 // Now element at child_index could be < its children. 8308 parent_index = child_index; // parent_hash remains correct. 8309 } 8310 } 8311 8312 // Extract elements and create sorted array. 8313 for (int i = len - 1; i > 0; --i) { 8314 // Put max element at the back of the array. 8315 SwapSortedKeys(0, i); 8316 // Shift down the new top element. 8317 int parent_index = 0; 8318 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash(); 8319 const int max_parent_index = (i / 2) - 1; 8320 while (parent_index <= max_parent_index) { 8321 int child_index = parent_index * 2 + 1; 8322 uint32_t child_hash = GetSortedKey(child_index)->Hash(); 8323 if (child_index + 1 < i) { 8324 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash(); 8325 if (right_child_hash > child_hash) { 8326 child_index++; 8327 child_hash = right_child_hash; 8328 } 8329 } 8330 if (child_hash <= parent_hash) break; 8331 SwapSortedKeys(parent_index, child_index); 8332 parent_index = child_index; 8333 } 8334 } 8335 ASSERT(IsSortedNoDuplicates()); 8336 } 8337 8338 8339 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) { 8340 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair(); 8341 copy->set_getter(pair->getter()); 8342 copy->set_setter(pair->setter()); 8343 return copy; 8344 } 8345 8346 8347 Object* AccessorPair::GetComponent(AccessorComponent component) { 8348 Object* accessor = get(component); 8349 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor; 8350 } 8351 8352 8353 Handle<DeoptimizationInputData> DeoptimizationInputData::New( 8354 Isolate* isolate, 8355 int deopt_entry_count, 8356 PretenureFlag pretenure) { 8357 ASSERT(deopt_entry_count > 0); 8358 return Handle<DeoptimizationInputData>::cast( 8359 isolate->factory()->NewFixedArray( 8360 LengthFor(deopt_entry_count), pretenure)); 8361 } 8362 8363 8364 Handle<DeoptimizationOutputData> DeoptimizationOutputData::New( 8365 Isolate* isolate, 8366 int number_of_deopt_points, 8367 PretenureFlag pretenure) { 8368 Handle<FixedArray> result; 8369 if (number_of_deopt_points == 0) { 8370 result = isolate->factory()->empty_fixed_array(); 8371 } else { 8372 result = isolate->factory()->NewFixedArray( 8373 LengthOfFixedArray(number_of_deopt_points), pretenure); 8374 } 8375 return Handle<DeoptimizationOutputData>::cast(result); 8376 } 8377 8378 8379 #ifdef DEBUG 8380 bool DescriptorArray::IsEqualTo(DescriptorArray* other) { 8381 if (IsEmpty()) return other->IsEmpty(); 8382 if (other->IsEmpty()) return false; 8383 if (length() != other->length()) return false; 8384 for (int i = 0; i < length(); ++i) { 8385 if (get(i) != other->get(i)) return false; 8386 } 8387 return true; 8388 } 8389 #endif 8390 8391 8392 bool String::LooksValid() { 8393 if (!GetIsolate()->heap()->Contains(this)) return false; 8394 return true; 8395 } 8396 8397 8398 String::FlatContent String::GetFlatContent() { 8399 ASSERT(!AllowHeapAllocation::IsAllowed()); 8400 int length = this->length(); 8401 StringShape shape(this); 8402 String* string = this; 8403 int offset = 0; 8404 if (shape.representation_tag() == kConsStringTag) { 8405 ConsString* cons = ConsString::cast(string); 8406 if (cons->second()->length() != 0) { 8407 return FlatContent(); 8408 } 8409 string = cons->first(); 8410 shape = StringShape(string); 8411 } 8412 if (shape.representation_tag() == kSlicedStringTag) { 8413 SlicedString* slice = SlicedString::cast(string); 8414 offset = slice->offset(); 8415 string = slice->parent(); 8416 shape = StringShape(string); 8417 ASSERT(shape.representation_tag() != kConsStringTag && 8418 shape.representation_tag() != kSlicedStringTag); 8419 } 8420 if (shape.encoding_tag() == kOneByteStringTag) { 8421 const uint8_t* start; 8422 if (shape.representation_tag() == kSeqStringTag) { 8423 start = SeqOneByteString::cast(string)->GetChars(); 8424 } else { 8425 start = ExternalAsciiString::cast(string)->GetChars(); 8426 } 8427 return FlatContent(start + offset, length); 8428 } else { 8429 ASSERT(shape.encoding_tag() == kTwoByteStringTag); 8430 const uc16* start; 8431 if (shape.representation_tag() == kSeqStringTag) { 8432 start = SeqTwoByteString::cast(string)->GetChars(); 8433 } else { 8434 start = ExternalTwoByteString::cast(string)->GetChars(); 8435 } 8436 return FlatContent(start + offset, length); 8437 } 8438 } 8439 8440 8441 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls, 8442 RobustnessFlag robust_flag, 8443 int offset, 8444 int length, 8445 int* length_return) { 8446 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) { 8447 return SmartArrayPointer<char>(NULL); 8448 } 8449 Heap* heap = GetHeap(); 8450 8451 // Negative length means the to the end of the string. 8452 if (length < 0) length = kMaxInt - offset; 8453 8454 // Compute the size of the UTF-8 string. Start at the specified offset. 8455 Access<ConsStringIteratorOp> op( 8456 heap->isolate()->objects_string_iterator()); 8457 StringCharacterStream stream(this, op.value(), offset); 8458 int character_position = offset; 8459 int utf8_bytes = 0; 8460 int last = unibrow::Utf16::kNoPreviousCharacter; 8461 while (stream.HasMore() && character_position++ < offset + length) { 8462 uint16_t character = stream.GetNext(); 8463 utf8_bytes += unibrow::Utf8::Length(character, last); 8464 last = character; 8465 } 8466 8467 if (length_return) { 8468 *length_return = utf8_bytes; 8469 } 8470 8471 char* result = NewArray<char>(utf8_bytes + 1); 8472 8473 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset. 8474 stream.Reset(this, offset); 8475 character_position = offset; 8476 int utf8_byte_position = 0; 8477 last = unibrow::Utf16::kNoPreviousCharacter; 8478 while (stream.HasMore() && character_position++ < offset + length) { 8479 uint16_t character = stream.GetNext(); 8480 if (allow_nulls == DISALLOW_NULLS && character == 0) { 8481 character = ' '; 8482 } 8483 utf8_byte_position += 8484 unibrow::Utf8::Encode(result + utf8_byte_position, character, last); 8485 last = character; 8486 } 8487 result[utf8_byte_position] = 0; 8488 return SmartArrayPointer<char>(result); 8489 } 8490 8491 8492 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls, 8493 RobustnessFlag robust_flag, 8494 int* length_return) { 8495 return ToCString(allow_nulls, robust_flag, 0, -1, length_return); 8496 } 8497 8498 8499 const uc16* String::GetTwoByteData(unsigned start) { 8500 ASSERT(!IsOneByteRepresentationUnderneath()); 8501 switch (StringShape(this).representation_tag()) { 8502 case kSeqStringTag: 8503 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start); 8504 case kExternalStringTag: 8505 return ExternalTwoByteString::cast(this)-> 8506 ExternalTwoByteStringGetData(start); 8507 case kSlicedStringTag: { 8508 SlicedString* slice = SlicedString::cast(this); 8509 return slice->parent()->GetTwoByteData(start + slice->offset()); 8510 } 8511 case kConsStringTag: 8512 UNREACHABLE(); 8513 return NULL; 8514 } 8515 UNREACHABLE(); 8516 return NULL; 8517 } 8518 8519 8520 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) { 8521 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) { 8522 return SmartArrayPointer<uc16>(); 8523 } 8524 Heap* heap = GetHeap(); 8525 8526 Access<ConsStringIteratorOp> op( 8527 heap->isolate()->objects_string_iterator()); 8528 StringCharacterStream stream(this, op.value()); 8529 8530 uc16* result = NewArray<uc16>(length() + 1); 8531 8532 int i = 0; 8533 while (stream.HasMore()) { 8534 uint16_t character = stream.GetNext(); 8535 result[i++] = character; 8536 } 8537 result[i] = 0; 8538 return SmartArrayPointer<uc16>(result); 8539 } 8540 8541 8542 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) { 8543 return reinterpret_cast<uc16*>( 8544 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start; 8545 } 8546 8547 8548 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) { 8549 Relocatable* current = isolate->relocatable_top(); 8550 while (current != NULL) { 8551 current->PostGarbageCollection(); 8552 current = current->prev_; 8553 } 8554 } 8555 8556 8557 // Reserve space for statics needing saving and restoring. 8558 int Relocatable::ArchiveSpacePerThread() { 8559 return sizeof(Relocatable*); // NOLINT 8560 } 8561 8562 8563 // Archive statics that are thread-local. 8564 char* Relocatable::ArchiveState(Isolate* isolate, char* to) { 8565 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top(); 8566 isolate->set_relocatable_top(NULL); 8567 return to + ArchiveSpacePerThread(); 8568 } 8569 8570 8571 // Restore statics that are thread-local. 8572 char* Relocatable::RestoreState(Isolate* isolate, char* from) { 8573 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from)); 8574 return from + ArchiveSpacePerThread(); 8575 } 8576 8577 8578 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) { 8579 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage); 8580 Iterate(v, top); 8581 return thread_storage + ArchiveSpacePerThread(); 8582 } 8583 8584 8585 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) { 8586 Iterate(v, isolate->relocatable_top()); 8587 } 8588 8589 8590 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) { 8591 Relocatable* current = top; 8592 while (current != NULL) { 8593 current->IterateInstance(v); 8594 current = current->prev_; 8595 } 8596 } 8597 8598 8599 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str) 8600 : Relocatable(isolate), 8601 str_(str.location()), 8602 length_(str->length()) { 8603 PostGarbageCollection(); 8604 } 8605 8606 8607 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input) 8608 : Relocatable(isolate), 8609 str_(0), 8610 is_ascii_(true), 8611 length_(input.length()), 8612 start_(input.start()) { } 8613 8614 8615 void FlatStringReader::PostGarbageCollection() { 8616 if (str_ == NULL) return; 8617 Handle<String> str(str_); 8618 ASSERT(str->IsFlat()); 8619 DisallowHeapAllocation no_gc; 8620 // This does not actually prevent the vector from being relocated later. 8621 String::FlatContent content = str->GetFlatContent(); 8622 ASSERT(content.IsFlat()); 8623 is_ascii_ = content.IsAscii(); 8624 if (is_ascii_) { 8625 start_ = content.ToOneByteVector().start(); 8626 } else { 8627 start_ = content.ToUC16Vector().start(); 8628 } 8629 } 8630 8631 8632 void ConsStringIteratorOp::Initialize(ConsString* cons_string, int offset) { 8633 ASSERT(cons_string != NULL); 8634 root_ = cons_string; 8635 consumed_ = offset; 8636 // Force stack blown condition to trigger restart. 8637 depth_ = 1; 8638 maximum_depth_ = kStackSize + depth_; 8639 ASSERT(StackBlown()); 8640 } 8641 8642 8643 String* ConsStringIteratorOp::Continue(int* offset_out) { 8644 ASSERT(depth_ != 0); 8645 ASSERT_EQ(0, *offset_out); 8646 bool blew_stack = StackBlown(); 8647 String* string = NULL; 8648 // Get the next leaf if there is one. 8649 if (!blew_stack) string = NextLeaf(&blew_stack); 8650 // Restart search from root. 8651 if (blew_stack) { 8652 ASSERT(string == NULL); 8653 string = Search(offset_out); 8654 } 8655 // Ensure future calls return null immediately. 8656 if (string == NULL) Reset(NULL); 8657 return string; 8658 } 8659 8660 8661 String* ConsStringIteratorOp::Search(int* offset_out) { 8662 ConsString* cons_string = root_; 8663 // Reset the stack, pushing the root string. 8664 depth_ = 1; 8665 maximum_depth_ = 1; 8666 frames_[0] = cons_string; 8667 const int consumed = consumed_; 8668 int offset = 0; 8669 while (true) { 8670 // Loop until the string is found which contains the target offset. 8671 String* string = cons_string->first(); 8672 int length = string->length(); 8673 int32_t type; 8674 if (consumed < offset + length) { 8675 // Target offset is in the left branch. 8676 // Keep going if we're still in a ConString. 8677 type = string->map()->instance_type(); 8678 if ((type & kStringRepresentationMask) == kConsStringTag) { 8679 cons_string = ConsString::cast(string); 8680 PushLeft(cons_string); 8681 continue; 8682 } 8683 // Tell the stack we're done descending. 8684 AdjustMaximumDepth(); 8685 } else { 8686 // Descend right. 8687 // Update progress through the string. 8688 offset += length; 8689 // Keep going if we're still in a ConString. 8690 string = cons_string->second(); 8691 type = string->map()->instance_type(); 8692 if ((type & kStringRepresentationMask) == kConsStringTag) { 8693 cons_string = ConsString::cast(string); 8694 PushRight(cons_string); 8695 continue; 8696 } 8697 // Need this to be updated for the current string. 8698 length = string->length(); 8699 // Account for the possibility of an empty right leaf. 8700 // This happens only if we have asked for an offset outside the string. 8701 if (length == 0) { 8702 // Reset so future operations will return null immediately. 8703 Reset(NULL); 8704 return NULL; 8705 } 8706 // Tell the stack we're done descending. 8707 AdjustMaximumDepth(); 8708 // Pop stack so next iteration is in correct place. 8709 Pop(); 8710 } 8711 ASSERT(length != 0); 8712 // Adjust return values and exit. 8713 consumed_ = offset + length; 8714 *offset_out = consumed - offset; 8715 return string; 8716 } 8717 UNREACHABLE(); 8718 return NULL; 8719 } 8720 8721 8722 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) { 8723 while (true) { 8724 // Tree traversal complete. 8725 if (depth_ == 0) { 8726 *blew_stack = false; 8727 return NULL; 8728 } 8729 // We've lost track of higher nodes. 8730 if (StackBlown()) { 8731 *blew_stack = true; 8732 return NULL; 8733 } 8734 // Go right. 8735 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)]; 8736 String* string = cons_string->second(); 8737 int32_t type = string->map()->instance_type(); 8738 if ((type & kStringRepresentationMask) != kConsStringTag) { 8739 // Pop stack so next iteration is in correct place. 8740 Pop(); 8741 int length = string->length(); 8742 // Could be a flattened ConsString. 8743 if (length == 0) continue; 8744 consumed_ += length; 8745 return string; 8746 } 8747 cons_string = ConsString::cast(string); 8748 PushRight(cons_string); 8749 // Need to traverse all the way left. 8750 while (true) { 8751 // Continue left. 8752 string = cons_string->first(); 8753 type = string->map()->instance_type(); 8754 if ((type & kStringRepresentationMask) != kConsStringTag) { 8755 AdjustMaximumDepth(); 8756 int length = string->length(); 8757 ASSERT(length != 0); 8758 consumed_ += length; 8759 return string; 8760 } 8761 cons_string = ConsString::cast(string); 8762 PushLeft(cons_string); 8763 } 8764 } 8765 UNREACHABLE(); 8766 return NULL; 8767 } 8768 8769 8770 uint16_t ConsString::ConsStringGet(int index) { 8771 ASSERT(index >= 0 && index < this->length()); 8772 8773 // Check for a flattened cons string 8774 if (second()->length() == 0) { 8775 String* left = first(); 8776 return left->Get(index); 8777 } 8778 8779 String* string = String::cast(this); 8780 8781 while (true) { 8782 if (StringShape(string).IsCons()) { 8783 ConsString* cons_string = ConsString::cast(string); 8784 String* left = cons_string->first(); 8785 if (left->length() > index) { 8786 string = left; 8787 } else { 8788 index -= left->length(); 8789 string = cons_string->second(); 8790 } 8791 } else { 8792 return string->Get(index); 8793 } 8794 } 8795 8796 UNREACHABLE(); 8797 return 0; 8798 } 8799 8800 8801 uint16_t SlicedString::SlicedStringGet(int index) { 8802 return parent()->Get(offset() + index); 8803 } 8804 8805 8806 template <typename sinkchar> 8807 void String::WriteToFlat(String* src, 8808 sinkchar* sink, 8809 int f, 8810 int t) { 8811 String* source = src; 8812 int from = f; 8813 int to = t; 8814 while (true) { 8815 ASSERT(0 <= from && from <= to && to <= source->length()); 8816 switch (StringShape(source).full_representation_tag()) { 8817 case kOneByteStringTag | kExternalStringTag: { 8818 CopyChars(sink, 8819 ExternalAsciiString::cast(source)->GetChars() + from, 8820 to - from); 8821 return; 8822 } 8823 case kTwoByteStringTag | kExternalStringTag: { 8824 const uc16* data = 8825 ExternalTwoByteString::cast(source)->GetChars(); 8826 CopyChars(sink, 8827 data + from, 8828 to - from); 8829 return; 8830 } 8831 case kOneByteStringTag | kSeqStringTag: { 8832 CopyChars(sink, 8833 SeqOneByteString::cast(source)->GetChars() + from, 8834 to - from); 8835 return; 8836 } 8837 case kTwoByteStringTag | kSeqStringTag: { 8838 CopyChars(sink, 8839 SeqTwoByteString::cast(source)->GetChars() + from, 8840 to - from); 8841 return; 8842 } 8843 case kOneByteStringTag | kConsStringTag: 8844 case kTwoByteStringTag | kConsStringTag: { 8845 ConsString* cons_string = ConsString::cast(source); 8846 String* first = cons_string->first(); 8847 int boundary = first->length(); 8848 if (to - boundary >= boundary - from) { 8849 // Right hand side is longer. Recurse over left. 8850 if (from < boundary) { 8851 WriteToFlat(first, sink, from, boundary); 8852 sink += boundary - from; 8853 from = 0; 8854 } else { 8855 from -= boundary; 8856 } 8857 to -= boundary; 8858 source = cons_string->second(); 8859 } else { 8860 // Left hand side is longer. Recurse over right. 8861 if (to > boundary) { 8862 String* second = cons_string->second(); 8863 // When repeatedly appending to a string, we get a cons string that 8864 // is unbalanced to the left, a list, essentially. We inline the 8865 // common case of sequential ascii right child. 8866 if (to - boundary == 1) { 8867 sink[boundary - from] = static_cast<sinkchar>(second->Get(0)); 8868 } else if (second->IsSeqOneByteString()) { 8869 CopyChars(sink + boundary - from, 8870 SeqOneByteString::cast(second)->GetChars(), 8871 to - boundary); 8872 } else { 8873 WriteToFlat(second, 8874 sink + boundary - from, 8875 0, 8876 to - boundary); 8877 } 8878 to = boundary; 8879 } 8880 source = first; 8881 } 8882 break; 8883 } 8884 case kOneByteStringTag | kSlicedStringTag: 8885 case kTwoByteStringTag | kSlicedStringTag: { 8886 SlicedString* slice = SlicedString::cast(source); 8887 unsigned offset = slice->offset(); 8888 WriteToFlat(slice->parent(), sink, from + offset, to + offset); 8889 return; 8890 } 8891 } 8892 } 8893 } 8894 8895 8896 8897 template <typename SourceChar> 8898 static void CalculateLineEndsImpl(Isolate* isolate, 8899 List<int>* line_ends, 8900 Vector<const SourceChar> src, 8901 bool include_ending_line) { 8902 const int src_len = src.length(); 8903 StringSearch<uint8_t, SourceChar> search(isolate, STATIC_ASCII_VECTOR("\n")); 8904 8905 // Find and record line ends. 8906 int position = 0; 8907 while (position != -1 && position < src_len) { 8908 position = search.Search(src, position); 8909 if (position != -1) { 8910 line_ends->Add(position); 8911 position++; 8912 } else if (include_ending_line) { 8913 // Even if the last line misses a line end, it is counted. 8914 line_ends->Add(src_len); 8915 return; 8916 } 8917 } 8918 } 8919 8920 8921 Handle<FixedArray> String::CalculateLineEnds(Handle<String> src, 8922 bool include_ending_line) { 8923 src = Flatten(src); 8924 // Rough estimate of line count based on a roughly estimated average 8925 // length of (unpacked) code. 8926 int line_count_estimate = src->length() >> 4; 8927 List<int> line_ends(line_count_estimate); 8928 Isolate* isolate = src->GetIsolate(); 8929 { DisallowHeapAllocation no_allocation; // ensure vectors stay valid. 8930 // Dispatch on type of strings. 8931 String::FlatContent content = src->GetFlatContent(); 8932 ASSERT(content.IsFlat()); 8933 if (content.IsAscii()) { 8934 CalculateLineEndsImpl(isolate, 8935 &line_ends, 8936 content.ToOneByteVector(), 8937 include_ending_line); 8938 } else { 8939 CalculateLineEndsImpl(isolate, 8940 &line_ends, 8941 content.ToUC16Vector(), 8942 include_ending_line); 8943 } 8944 } 8945 int line_count = line_ends.length(); 8946 Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count); 8947 for (int i = 0; i < line_count; i++) { 8948 array->set(i, Smi::FromInt(line_ends[i])); 8949 } 8950 return array; 8951 } 8952 8953 8954 // Compares the contents of two strings by reading and comparing 8955 // int-sized blocks of characters. 8956 template <typename Char> 8957 static inline bool CompareRawStringContents(const Char* const a, 8958 const Char* const b, 8959 int length) { 8960 int i = 0; 8961 #ifndef V8_HOST_CAN_READ_UNALIGNED 8962 // If this architecture isn't comfortable reading unaligned ints 8963 // then we have to check that the strings are aligned before 8964 // comparing them blockwise. 8965 const int kAlignmentMask = sizeof(uint32_t) - 1; // NOLINT 8966 uint32_t pa_addr = reinterpret_cast<uint32_t>(a); 8967 uint32_t pb_addr = reinterpret_cast<uint32_t>(b); 8968 if (((pa_addr & kAlignmentMask) | (pb_addr & kAlignmentMask)) == 0) { 8969 #endif 8970 const int kStepSize = sizeof(int) / sizeof(Char); // NOLINT 8971 int endpoint = length - kStepSize; 8972 // Compare blocks until we reach near the end of the string. 8973 for (; i <= endpoint; i += kStepSize) { 8974 uint32_t wa = *reinterpret_cast<const uint32_t*>(a + i); 8975 uint32_t wb = *reinterpret_cast<const uint32_t*>(b + i); 8976 if (wa != wb) { 8977 return false; 8978 } 8979 } 8980 #ifndef V8_HOST_CAN_READ_UNALIGNED 8981 } 8982 #endif 8983 // Compare the remaining characters that didn't fit into a block. 8984 for (; i < length; i++) { 8985 if (a[i] != b[i]) { 8986 return false; 8987 } 8988 } 8989 return true; 8990 } 8991 8992 8993 template<typename Chars1, typename Chars2> 8994 class RawStringComparator : public AllStatic { 8995 public: 8996 static inline bool compare(const Chars1* a, const Chars2* b, int len) { 8997 ASSERT(sizeof(Chars1) != sizeof(Chars2)); 8998 for (int i = 0; i < len; i++) { 8999 if (a[i] != b[i]) { 9000 return false; 9001 } 9002 } 9003 return true; 9004 } 9005 }; 9006 9007 9008 template<> 9009 class RawStringComparator<uint16_t, uint16_t> { 9010 public: 9011 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) { 9012 return CompareRawStringContents(a, b, len); 9013 } 9014 }; 9015 9016 9017 template<> 9018 class RawStringComparator<uint8_t, uint8_t> { 9019 public: 9020 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) { 9021 return CompareRawStringContents(a, b, len); 9022 } 9023 }; 9024 9025 9026 class StringComparator { 9027 class State { 9028 public: 9029 explicit inline State(ConsStringIteratorOp* op) 9030 : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {} 9031 9032 inline void Init(String* string) { 9033 ConsString* cons_string = String::VisitFlat(this, string); 9034 op_->Reset(cons_string); 9035 if (cons_string != NULL) { 9036 int offset; 9037 string = op_->Next(&offset); 9038 String::VisitFlat(this, string, offset); 9039 } 9040 } 9041 9042 inline void VisitOneByteString(const uint8_t* chars, int length) { 9043 is_one_byte_ = true; 9044 buffer8_ = chars; 9045 length_ = length; 9046 } 9047 9048 inline void VisitTwoByteString(const uint16_t* chars, int length) { 9049 is_one_byte_ = false; 9050 buffer16_ = chars; 9051 length_ = length; 9052 } 9053 9054 void Advance(int consumed) { 9055 ASSERT(consumed <= length_); 9056 // Still in buffer. 9057 if (length_ != consumed) { 9058 if (is_one_byte_) { 9059 buffer8_ += consumed; 9060 } else { 9061 buffer16_ += consumed; 9062 } 9063 length_ -= consumed; 9064 return; 9065 } 9066 // Advance state. 9067 int offset; 9068 String* next = op_->Next(&offset); 9069 ASSERT_EQ(0, offset); 9070 ASSERT(next != NULL); 9071 String::VisitFlat(this, next); 9072 } 9073 9074 ConsStringIteratorOp* const op_; 9075 bool is_one_byte_; 9076 int length_; 9077 union { 9078 const uint8_t* buffer8_; 9079 const uint16_t* buffer16_; 9080 }; 9081 9082 private: 9083 DISALLOW_IMPLICIT_CONSTRUCTORS(State); 9084 }; 9085 9086 public: 9087 inline StringComparator(ConsStringIteratorOp* op_1, 9088 ConsStringIteratorOp* op_2) 9089 : state_1_(op_1), 9090 state_2_(op_2) { 9091 } 9092 9093 template<typename Chars1, typename Chars2> 9094 static inline bool Equals(State* state_1, State* state_2, int to_check) { 9095 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_); 9096 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_); 9097 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check); 9098 } 9099 9100 bool Equals(String* string_1, String* string_2) { 9101 int length = string_1->length(); 9102 state_1_.Init(string_1); 9103 state_2_.Init(string_2); 9104 while (true) { 9105 int to_check = Min(state_1_.length_, state_2_.length_); 9106 ASSERT(to_check > 0 && to_check <= length); 9107 bool is_equal; 9108 if (state_1_.is_one_byte_) { 9109 if (state_2_.is_one_byte_) { 9110 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check); 9111 } else { 9112 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check); 9113 } 9114 } else { 9115 if (state_2_.is_one_byte_) { 9116 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check); 9117 } else { 9118 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check); 9119 } 9120 } 9121 // Looping done. 9122 if (!is_equal) return false; 9123 length -= to_check; 9124 // Exit condition. Strings are equal. 9125 if (length == 0) return true; 9126 state_1_.Advance(to_check); 9127 state_2_.Advance(to_check); 9128 } 9129 } 9130 9131 private: 9132 State state_1_; 9133 State state_2_; 9134 DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator); 9135 }; 9136 9137 9138 bool String::SlowEquals(String* other) { 9139 DisallowHeapAllocation no_gc; 9140 // Fast check: negative check with lengths. 9141 int len = length(); 9142 if (len != other->length()) return false; 9143 if (len == 0) return true; 9144 9145 // Fast check: if hash code is computed for both strings 9146 // a fast negative check can be performed. 9147 if (HasHashCode() && other->HasHashCode()) { 9148 #ifdef ENABLE_SLOW_ASSERTS 9149 if (FLAG_enable_slow_asserts) { 9150 if (Hash() != other->Hash()) { 9151 bool found_difference = false; 9152 for (int i = 0; i < len; i++) { 9153 if (Get(i) != other->Get(i)) { 9154 found_difference = true; 9155 break; 9156 } 9157 } 9158 ASSERT(found_difference); 9159 } 9160 } 9161 #endif 9162 if (Hash() != other->Hash()) return false; 9163 } 9164 9165 // We know the strings are both non-empty. Compare the first chars 9166 // before we try to flatten the strings. 9167 if (this->Get(0) != other->Get(0)) return false; 9168 9169 if (IsSeqOneByteString() && other->IsSeqOneByteString()) { 9170 const uint8_t* str1 = SeqOneByteString::cast(this)->GetChars(); 9171 const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars(); 9172 return CompareRawStringContents(str1, str2, len); 9173 } 9174 9175 Isolate* isolate = GetIsolate(); 9176 StringComparator comparator(isolate->objects_string_compare_iterator_a(), 9177 isolate->objects_string_compare_iterator_b()); 9178 9179 return comparator.Equals(this, other); 9180 } 9181 9182 9183 bool String::SlowEquals(Handle<String> one, Handle<String> two) { 9184 // Fast check: negative check with lengths. 9185 int one_length = one->length(); 9186 if (one_length != two->length()) return false; 9187 if (one_length == 0) return true; 9188 9189 // Fast check: if hash code is computed for both strings 9190 // a fast negative check can be performed. 9191 if (one->HasHashCode() && two->HasHashCode()) { 9192 #ifdef ENABLE_SLOW_ASSERTS 9193 if (FLAG_enable_slow_asserts) { 9194 if (one->Hash() != two->Hash()) { 9195 bool found_difference = false; 9196 for (int i = 0; i < one_length; i++) { 9197 if (one->Get(i) != two->Get(i)) { 9198 found_difference = true; 9199 break; 9200 } 9201 } 9202 ASSERT(found_difference); 9203 } 9204 } 9205 #endif 9206 if (one->Hash() != two->Hash()) return false; 9207 } 9208 9209 // We know the strings are both non-empty. Compare the first chars 9210 // before we try to flatten the strings. 9211 if (one->Get(0) != two->Get(0)) return false; 9212 9213 one = String::Flatten(one); 9214 two = String::Flatten(two); 9215 9216 DisallowHeapAllocation no_gc; 9217 String::FlatContent flat1 = one->GetFlatContent(); 9218 String::FlatContent flat2 = two->GetFlatContent(); 9219 9220 if (flat1.IsAscii() && flat2.IsAscii()) { 9221 return CompareRawStringContents(flat1.ToOneByteVector().start(), 9222 flat2.ToOneByteVector().start(), 9223 one_length); 9224 } else { 9225 for (int i = 0; i < one_length; i++) { 9226 if (flat1.Get(i) != flat2.Get(i)) return false; 9227 } 9228 return true; 9229 } 9230 } 9231 9232 9233 bool String::MarkAsUndetectable() { 9234 if (StringShape(this).IsInternalized()) return false; 9235 9236 Map* map = this->map(); 9237 Heap* heap = GetHeap(); 9238 if (map == heap->string_map()) { 9239 this->set_map(heap->undetectable_string_map()); 9240 return true; 9241 } else if (map == heap->ascii_string_map()) { 9242 this->set_map(heap->undetectable_ascii_string_map()); 9243 return true; 9244 } 9245 // Rest cannot be marked as undetectable 9246 return false; 9247 } 9248 9249 9250 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) { 9251 int slen = length(); 9252 // Can't check exact length equality, but we can check bounds. 9253 int str_len = str.length(); 9254 if (!allow_prefix_match && 9255 (str_len < slen || 9256 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) { 9257 return false; 9258 } 9259 int i; 9260 unsigned remaining_in_str = static_cast<unsigned>(str_len); 9261 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start()); 9262 for (i = 0; i < slen && remaining_in_str > 0; i++) { 9263 unsigned cursor = 0; 9264 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor); 9265 ASSERT(cursor > 0 && cursor <= remaining_in_str); 9266 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) { 9267 if (i > slen - 1) return false; 9268 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false; 9269 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false; 9270 } else { 9271 if (Get(i) != r) return false; 9272 } 9273 utf8_data += cursor; 9274 remaining_in_str -= cursor; 9275 } 9276 return (allow_prefix_match || i == slen) && remaining_in_str == 0; 9277 } 9278 9279 9280 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) { 9281 int slen = length(); 9282 if (str.length() != slen) return false; 9283 DisallowHeapAllocation no_gc; 9284 FlatContent content = GetFlatContent(); 9285 if (content.IsAscii()) { 9286 return CompareChars(content.ToOneByteVector().start(), 9287 str.start(), slen) == 0; 9288 } 9289 for (int i = 0; i < slen; i++) { 9290 if (Get(i) != static_cast<uint16_t>(str[i])) return false; 9291 } 9292 return true; 9293 } 9294 9295 9296 bool String::IsTwoByteEqualTo(Vector<const uc16> str) { 9297 int slen = length(); 9298 if (str.length() != slen) return false; 9299 DisallowHeapAllocation no_gc; 9300 FlatContent content = GetFlatContent(); 9301 if (content.IsTwoByte()) { 9302 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0; 9303 } 9304 for (int i = 0; i < slen; i++) { 9305 if (Get(i) != str[i]) return false; 9306 } 9307 return true; 9308 } 9309 9310 9311 class IteratingStringHasher: public StringHasher { 9312 public: 9313 static inline uint32_t Hash(String* string, uint32_t seed) { 9314 IteratingStringHasher hasher(string->length(), seed); 9315 // Nothing to do. 9316 if (hasher.has_trivial_hash()) return hasher.GetHashField(); 9317 ConsString* cons_string = String::VisitFlat(&hasher, string); 9318 // The string was flat. 9319 if (cons_string == NULL) return hasher.GetHashField(); 9320 // This is a ConsString, iterate across it. 9321 ConsStringIteratorOp op(cons_string); 9322 int offset; 9323 while (NULL != (string = op.Next(&offset))) { 9324 String::VisitFlat(&hasher, string, offset); 9325 } 9326 return hasher.GetHashField(); 9327 } 9328 inline void VisitOneByteString(const uint8_t* chars, int length) { 9329 AddCharacters(chars, length); 9330 } 9331 inline void VisitTwoByteString(const uint16_t* chars, int length) { 9332 AddCharacters(chars, length); 9333 } 9334 9335 private: 9336 inline IteratingStringHasher(int len, uint32_t seed) 9337 : StringHasher(len, seed) { 9338 } 9339 DISALLOW_COPY_AND_ASSIGN(IteratingStringHasher); 9340 }; 9341 9342 9343 uint32_t String::ComputeAndSetHash() { 9344 // Should only be called if hash code has not yet been computed. 9345 ASSERT(!HasHashCode()); 9346 9347 // Store the hash code in the object. 9348 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed()); 9349 set_hash_field(field); 9350 9351 // Check the hash code is there. 9352 ASSERT(HasHashCode()); 9353 uint32_t result = field >> kHashShift; 9354 ASSERT(result != 0); // Ensure that the hash value of 0 is never computed. 9355 return result; 9356 } 9357 9358 9359 bool String::ComputeArrayIndex(uint32_t* index) { 9360 int length = this->length(); 9361 if (length == 0 || length > kMaxArrayIndexSize) return false; 9362 ConsStringIteratorOp op; 9363 StringCharacterStream stream(this, &op); 9364 uint16_t ch = stream.GetNext(); 9365 9366 // If the string begins with a '0' character, it must only consist 9367 // of it to be a legal array index. 9368 if (ch == '0') { 9369 *index = 0; 9370 return length == 1; 9371 } 9372 9373 // Convert string to uint32 array index; character by character. 9374 int d = ch - '0'; 9375 if (d < 0 || d > 9) return false; 9376 uint32_t result = d; 9377 while (stream.HasMore()) { 9378 d = stream.GetNext() - '0'; 9379 if (d < 0 || d > 9) return false; 9380 // Check that the new result is below the 32 bit limit. 9381 if (result > 429496729U - ((d > 5) ? 1 : 0)) return false; 9382 result = (result * 10) + d; 9383 } 9384 9385 *index = result; 9386 return true; 9387 } 9388 9389 9390 bool String::SlowAsArrayIndex(uint32_t* index) { 9391 if (length() <= kMaxCachedArrayIndexLength) { 9392 Hash(); // force computation of hash code 9393 uint32_t field = hash_field(); 9394 if ((field & kIsNotArrayIndexMask) != 0) return false; 9395 // Isolate the array index form the full hash field. 9396 *index = ArrayIndexValueBits::decode(field); 9397 return true; 9398 } else { 9399 return ComputeArrayIndex(index); 9400 } 9401 } 9402 9403 9404 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) { 9405 int new_size, old_size; 9406 int old_length = string->length(); 9407 if (old_length <= new_length) return string; 9408 9409 if (string->IsSeqOneByteString()) { 9410 old_size = SeqOneByteString::SizeFor(old_length); 9411 new_size = SeqOneByteString::SizeFor(new_length); 9412 } else { 9413 ASSERT(string->IsSeqTwoByteString()); 9414 old_size = SeqTwoByteString::SizeFor(old_length); 9415 new_size = SeqTwoByteString::SizeFor(new_length); 9416 } 9417 9418 int delta = old_size - new_size; 9419 9420 Address start_of_string = string->address(); 9421 ASSERT_OBJECT_ALIGNED(start_of_string); 9422 ASSERT_OBJECT_ALIGNED(start_of_string + new_size); 9423 9424 Heap* heap = string->GetHeap(); 9425 NewSpace* newspace = heap->new_space(); 9426 if (newspace->Contains(start_of_string) && 9427 newspace->top() == start_of_string + old_size) { 9428 // Last allocated object in new space. Simply lower allocation top. 9429 newspace->set_top(start_of_string + new_size); 9430 } else { 9431 // Sizes are pointer size aligned, so that we can use filler objects 9432 // that are a multiple of pointer size. 9433 heap->CreateFillerObjectAt(start_of_string + new_size, delta); 9434 } 9435 heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR); 9436 9437 // We are storing the new length using release store after creating a filler 9438 // for the left-over space to avoid races with the sweeper thread. 9439 string->synchronized_set_length(new_length); 9440 9441 if (new_length == 0) return heap->isolate()->factory()->empty_string(); 9442 return string; 9443 } 9444 9445 9446 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) { 9447 // For array indexes mix the length into the hash as an array index could 9448 // be zero. 9449 ASSERT(length > 0); 9450 ASSERT(length <= String::kMaxArrayIndexSize); 9451 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < 9452 (1 << String::kArrayIndexValueBits)); 9453 9454 value <<= String::ArrayIndexValueBits::kShift; 9455 value |= length << String::ArrayIndexLengthBits::kShift; 9456 9457 ASSERT((value & String::kIsNotArrayIndexMask) == 0); 9458 ASSERT((length > String::kMaxCachedArrayIndexLength) || 9459 (value & String::kContainsCachedArrayIndexMask) == 0); 9460 return value; 9461 } 9462 9463 9464 uint32_t StringHasher::GetHashField() { 9465 if (length_ <= String::kMaxHashCalcLength) { 9466 if (is_array_index_) { 9467 return MakeArrayIndexHash(array_index_, length_); 9468 } 9469 return (GetHashCore(raw_running_hash_) << String::kHashShift) | 9470 String::kIsNotArrayIndexMask; 9471 } else { 9472 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask; 9473 } 9474 } 9475 9476 9477 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars, 9478 uint32_t seed, 9479 int* utf16_length_out) { 9480 int vector_length = chars.length(); 9481 // Handle some edge cases 9482 if (vector_length <= 1) { 9483 ASSERT(vector_length == 0 || 9484 static_cast<uint8_t>(chars.start()[0]) <= 9485 unibrow::Utf8::kMaxOneByteChar); 9486 *utf16_length_out = vector_length; 9487 return HashSequentialString(chars.start(), vector_length, seed); 9488 } 9489 // Start with a fake length which won't affect computation. 9490 // It will be updated later. 9491 StringHasher hasher(String::kMaxArrayIndexSize, seed); 9492 unsigned remaining = static_cast<unsigned>(vector_length); 9493 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start()); 9494 int utf16_length = 0; 9495 bool is_index = true; 9496 ASSERT(hasher.is_array_index_); 9497 while (remaining > 0) { 9498 unsigned consumed = 0; 9499 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed); 9500 ASSERT(consumed > 0 && consumed <= remaining); 9501 stream += consumed; 9502 remaining -= consumed; 9503 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode; 9504 utf16_length += is_two_characters ? 2 : 1; 9505 // No need to keep hashing. But we do need to calculate utf16_length. 9506 if (utf16_length > String::kMaxHashCalcLength) continue; 9507 if (is_two_characters) { 9508 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c); 9509 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c); 9510 hasher.AddCharacter(c1); 9511 hasher.AddCharacter(c2); 9512 if (is_index) is_index = hasher.UpdateIndex(c1); 9513 if (is_index) is_index = hasher.UpdateIndex(c2); 9514 } else { 9515 hasher.AddCharacter(c); 9516 if (is_index) is_index = hasher.UpdateIndex(c); 9517 } 9518 } 9519 *utf16_length_out = static_cast<int>(utf16_length); 9520 // Must set length here so that hash computation is correct. 9521 hasher.length_ = utf16_length; 9522 return hasher.GetHashField(); 9523 } 9524 9525 9526 void String::PrintOn(FILE* file) { 9527 int length = this->length(); 9528 for (int i = 0; i < length; i++) { 9529 PrintF(file, "%c", Get(i)); 9530 } 9531 } 9532 9533 9534 static void TrimEnumCache(Heap* heap, Map* map, DescriptorArray* descriptors) { 9535 int live_enum = map->EnumLength(); 9536 if (live_enum == kInvalidEnumCacheSentinel) { 9537 live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM); 9538 } 9539 if (live_enum == 0) return descriptors->ClearEnumCache(); 9540 9541 FixedArray* enum_cache = descriptors->GetEnumCache(); 9542 9543 int to_trim = enum_cache->length() - live_enum; 9544 if (to_trim <= 0) return; 9545 RightTrimFixedArray<Heap::FROM_GC>( 9546 heap, descriptors->GetEnumCache(), to_trim); 9547 9548 if (!descriptors->HasEnumIndicesCache()) return; 9549 FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache(); 9550 RightTrimFixedArray<Heap::FROM_GC>(heap, enum_indices_cache, to_trim); 9551 } 9552 9553 9554 static void TrimDescriptorArray(Heap* heap, 9555 Map* map, 9556 DescriptorArray* descriptors, 9557 int number_of_own_descriptors) { 9558 int number_of_descriptors = descriptors->number_of_descriptors_storage(); 9559 int to_trim = number_of_descriptors - number_of_own_descriptors; 9560 if (to_trim == 0) return; 9561 9562 RightTrimFixedArray<Heap::FROM_GC>( 9563 heap, descriptors, to_trim * DescriptorArray::kDescriptorSize); 9564 descriptors->SetNumberOfDescriptors(number_of_own_descriptors); 9565 9566 if (descriptors->HasEnumCache()) TrimEnumCache(heap, map, descriptors); 9567 descriptors->Sort(); 9568 } 9569 9570 9571 // Clear a possible back pointer in case the transition leads to a dead map. 9572 // Return true in case a back pointer has been cleared and false otherwise. 9573 static bool ClearBackPointer(Heap* heap, Map* target) { 9574 if (Marking::MarkBitFrom(target).Get()) return false; 9575 target->SetBackPointer(heap->undefined_value(), SKIP_WRITE_BARRIER); 9576 return true; 9577 } 9578 9579 9580 // TODO(mstarzinger): This method should be moved into MarkCompactCollector, 9581 // because it cannot be called from outside the GC and we already have methods 9582 // depending on the transitions layout in the GC anyways. 9583 void Map::ClearNonLiveTransitions(Heap* heap) { 9584 // If there are no transitions to be cleared, return. 9585 // TODO(verwaest) Should be an assert, otherwise back pointers are not 9586 // properly cleared. 9587 if (!HasTransitionArray()) return; 9588 9589 TransitionArray* t = transitions(); 9590 MarkCompactCollector* collector = heap->mark_compact_collector(); 9591 9592 int transition_index = 0; 9593 9594 DescriptorArray* descriptors = instance_descriptors(); 9595 bool descriptors_owner_died = false; 9596 9597 // Compact all live descriptors to the left. 9598 for (int i = 0; i < t->number_of_transitions(); ++i) { 9599 Map* target = t->GetTarget(i); 9600 if (ClearBackPointer(heap, target)) { 9601 if (target->instance_descriptors() == descriptors) { 9602 descriptors_owner_died = true; 9603 } 9604 } else { 9605 if (i != transition_index) { 9606 Name* key = t->GetKey(i); 9607 t->SetKey(transition_index, key); 9608 Object** key_slot = t->GetKeySlot(transition_index); 9609 collector->RecordSlot(key_slot, key_slot, key); 9610 // Target slots do not need to be recorded since maps are not compacted. 9611 t->SetTarget(transition_index, t->GetTarget(i)); 9612 } 9613 transition_index++; 9614 } 9615 } 9616 9617 // If there are no transitions to be cleared, return. 9618 // TODO(verwaest) Should be an assert, otherwise back pointers are not 9619 // properly cleared. 9620 if (transition_index == t->number_of_transitions()) return; 9621 9622 int number_of_own_descriptors = NumberOfOwnDescriptors(); 9623 9624 if (descriptors_owner_died) { 9625 if (number_of_own_descriptors > 0) { 9626 TrimDescriptorArray(heap, this, descriptors, number_of_own_descriptors); 9627 ASSERT(descriptors->number_of_descriptors() == number_of_own_descriptors); 9628 set_owns_descriptors(true); 9629 } else { 9630 ASSERT(descriptors == GetHeap()->empty_descriptor_array()); 9631 } 9632 } 9633 9634 // Note that we never eliminate a transition array, though we might right-trim 9635 // such that number_of_transitions() == 0. If this assumption changes, 9636 // TransitionArray::CopyInsert() will need to deal with the case that a 9637 // transition array disappeared during GC. 9638 int trim = t->number_of_transitions() - transition_index; 9639 if (trim > 0) { 9640 RightTrimFixedArray<Heap::FROM_GC>(heap, t, t->IsSimpleTransition() 9641 ? trim : trim * TransitionArray::kTransitionSize); 9642 } 9643 ASSERT(HasTransitionArray()); 9644 } 9645 9646 9647 int Map::Hash() { 9648 // For performance reasons we only hash the 3 most variable fields of a map: 9649 // constructor, prototype and bit_field2. 9650 9651 // Shift away the tag. 9652 int hash = (static_cast<uint32_t>( 9653 reinterpret_cast<uintptr_t>(constructor())) >> 2); 9654 9655 // XOR-ing the prototype and constructor directly yields too many zero bits 9656 // when the two pointers are close (which is fairly common). 9657 // To avoid this we shift the prototype 4 bits relatively to the constructor. 9658 hash ^= (static_cast<uint32_t>( 9659 reinterpret_cast<uintptr_t>(prototype())) << 2); 9660 9661 return hash ^ (hash >> 16) ^ bit_field2(); 9662 } 9663 9664 9665 static bool CheckEquivalent(Map* first, Map* second) { 9666 return 9667 first->constructor() == second->constructor() && 9668 first->prototype() == second->prototype() && 9669 first->instance_type() == second->instance_type() && 9670 first->bit_field() == second->bit_field() && 9671 first->bit_field2() == second->bit_field2() && 9672 first->is_frozen() == second->is_frozen() && 9673 first->has_instance_call_handler() == second->has_instance_call_handler(); 9674 } 9675 9676 9677 bool Map::EquivalentToForTransition(Map* other) { 9678 return CheckEquivalent(this, other); 9679 } 9680 9681 9682 bool Map::EquivalentToForNormalization(Map* other, 9683 PropertyNormalizationMode mode) { 9684 int properties = mode == CLEAR_INOBJECT_PROPERTIES 9685 ? 0 : other->inobject_properties(); 9686 return CheckEquivalent(this, other) && inobject_properties() == properties; 9687 } 9688 9689 9690 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) { 9691 ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR); 9692 while (!code_iter.is_finished()) { 9693 v->VisitCodeEntry(reinterpret_cast<Address>( 9694 RawFieldOfElementAt(code_iter.next_index()))); 9695 } 9696 9697 ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR); 9698 while (!heap_iter.is_finished()) { 9699 v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index())); 9700 } 9701 } 9702 9703 9704 void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) { 9705 Type type[] = { CODE_PTR, HEAP_PTR }; 9706 Address default_value[] = { 9707 isolate->builtins()->builtin(Builtins::kIllegal)->entry(), 9708 reinterpret_cast<Address>(isolate->heap()->undefined_value()) }; 9709 9710 for (int i = 0; i < 2; ++i) { 9711 for (int s = 0; s <= final_section(); ++s) { 9712 LayoutSection section = static_cast<LayoutSection>(s); 9713 if (number_of_entries(type[i], section) > 0) { 9714 int offset = OffsetOfElementAt(first_index(type[i], section)); 9715 MemsetPointer( 9716 reinterpret_cast<Address*>(HeapObject::RawField(this, offset)), 9717 default_value[i], 9718 number_of_entries(type[i], section)); 9719 } 9720 } 9721 } 9722 } 9723 9724 9725 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) { 9726 // Iterate over all fields in the body but take care in dealing with 9727 // the code entry. 9728 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset); 9729 v->VisitCodeEntry(this->address() + kCodeEntryOffset); 9730 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size); 9731 } 9732 9733 9734 void JSFunction::MarkForOptimization() { 9735 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); 9736 ASSERT(!IsOptimized()); 9737 ASSERT(shared()->allows_lazy_compilation() || 9738 code()->optimizable()); 9739 ASSERT(!shared()->is_generator()); 9740 set_code_no_write_barrier( 9741 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized)); 9742 // No write barrier required, since the builtin is part of the root set. 9743 } 9744 9745 9746 void JSFunction::MarkForConcurrentOptimization() { 9747 ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); 9748 ASSERT(!IsOptimized()); 9749 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); 9750 ASSERT(!shared()->is_generator()); 9751 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); 9752 if (FLAG_trace_concurrent_recompilation) { 9753 PrintF(" ** Marking "); 9754 PrintName(); 9755 PrintF(" for concurrent recompilation.\n"); 9756 } 9757 set_code_no_write_barrier( 9758 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent)); 9759 // No write barrier required, since the builtin is part of the root set. 9760 } 9761 9762 9763 void JSFunction::MarkInOptimizationQueue() { 9764 // We can only arrive here via the concurrent-recompilation builtin. If 9765 // break points were set, the code would point to the lazy-compile builtin. 9766 ASSERT(!GetIsolate()->DebuggerHasBreakPoints()); 9767 ASSERT(IsMarkedForConcurrentOptimization() && !IsOptimized()); 9768 ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); 9769 ASSERT(GetIsolate()->concurrent_recompilation_enabled()); 9770 if (FLAG_trace_concurrent_recompilation) { 9771 PrintF(" ** Queueing "); 9772 PrintName(); 9773 PrintF(" for concurrent recompilation.\n"); 9774 } 9775 set_code_no_write_barrier( 9776 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue)); 9777 // No write barrier required, since the builtin is part of the root set. 9778 } 9779 9780 9781 void SharedFunctionInfo::AddToOptimizedCodeMap( 9782 Handle<SharedFunctionInfo> shared, 9783 Handle<Context> native_context, 9784 Handle<Code> code, 9785 Handle<FixedArray> literals, 9786 BailoutId osr_ast_id) { 9787 Isolate* isolate = shared->GetIsolate(); 9788 ASSERT(code->kind() == Code::OPTIMIZED_FUNCTION); 9789 ASSERT(native_context->IsNativeContext()); 9790 STATIC_ASSERT(kEntryLength == 4); 9791 Handle<FixedArray> new_code_map; 9792 Handle<Object> value(shared->optimized_code_map(), isolate); 9793 int old_length; 9794 if (value->IsSmi()) { 9795 // No optimized code map. 9796 ASSERT_EQ(0, Smi::cast(*value)->value()); 9797 // Create 3 entries per context {context, code, literals}. 9798 new_code_map = isolate->factory()->NewFixedArray(kInitialLength); 9799 old_length = kEntriesStart; 9800 } else { 9801 // Copy old map and append one new entry. 9802 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value); 9803 ASSERT_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id)); 9804 old_length = old_code_map->length(); 9805 new_code_map = FixedArray::CopySize( 9806 old_code_map, old_length + kEntryLength); 9807 // Zap the old map for the sake of the heap verifier. 9808 if (Heap::ShouldZapGarbage()) { 9809 Object** data = old_code_map->data_start(); 9810 MemsetPointer(data, isolate->heap()->the_hole_value(), old_length); 9811 } 9812 } 9813 new_code_map->set(old_length + kContextOffset, *native_context); 9814 new_code_map->set(old_length + kCachedCodeOffset, *code); 9815 new_code_map->set(old_length + kLiteralsOffset, *literals); 9816 new_code_map->set(old_length + kOsrAstIdOffset, 9817 Smi::FromInt(osr_ast_id.ToInt())); 9818 9819 #ifdef DEBUG 9820 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { 9821 ASSERT(new_code_map->get(i + kContextOffset)->IsNativeContext()); 9822 ASSERT(new_code_map->get(i + kCachedCodeOffset)->IsCode()); 9823 ASSERT(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() == 9824 Code::OPTIMIZED_FUNCTION); 9825 ASSERT(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); 9826 ASSERT(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); 9827 } 9828 #endif 9829 shared->set_optimized_code_map(*new_code_map); 9830 } 9831 9832 9833 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) { 9834 ASSERT(index > kEntriesStart); 9835 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9836 if (!bound()) { 9837 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); 9838 ASSERT_NE(NULL, cached_literals); 9839 return cached_literals; 9840 } 9841 return NULL; 9842 } 9843 9844 9845 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) { 9846 ASSERT(index > kEntriesStart); 9847 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9848 Code* code = Code::cast(code_map->get(index)); 9849 ASSERT_NE(NULL, code); 9850 return code; 9851 } 9852 9853 9854 void SharedFunctionInfo::ClearOptimizedCodeMap() { 9855 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9856 9857 // If the next map link slot is already used then the function was 9858 // enqueued with code flushing and we remove it now. 9859 if (!code_map->get(kNextMapIndex)->IsUndefined()) { 9860 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); 9861 flusher->EvictOptimizedCodeMap(this); 9862 } 9863 9864 ASSERT(code_map->get(kNextMapIndex)->IsUndefined()); 9865 set_optimized_code_map(Smi::FromInt(0)); 9866 } 9867 9868 9869 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, 9870 const char* reason) { 9871 DisallowHeapAllocation no_gc; 9872 if (optimized_code_map()->IsSmi()) return; 9873 9874 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9875 int dst = kEntriesStart; 9876 int length = code_map->length(); 9877 for (int src = kEntriesStart; src < length; src += kEntryLength) { 9878 ASSERT(code_map->get(src)->IsNativeContext()); 9879 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) { 9880 // Evict the src entry by not copying it to the dst entry. 9881 if (FLAG_trace_opt) { 9882 PrintF("[evicting entry from optimizing code map (%s) for ", reason); 9883 ShortPrint(); 9884 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); 9885 if (osr.IsNone()) { 9886 PrintF("]\n"); 9887 } else { 9888 PrintF(" (osr ast id %d)]\n", osr.ToInt()); 9889 } 9890 } 9891 } else { 9892 // Keep the src entry by copying it to the dst entry. 9893 if (dst != src) { 9894 code_map->set(dst + kContextOffset, 9895 code_map->get(src + kContextOffset)); 9896 code_map->set(dst + kCachedCodeOffset, 9897 code_map->get(src + kCachedCodeOffset)); 9898 code_map->set(dst + kLiteralsOffset, 9899 code_map->get(src + kLiteralsOffset)); 9900 code_map->set(dst + kOsrAstIdOffset, 9901 code_map->get(src + kOsrAstIdOffset)); 9902 } 9903 dst += kEntryLength; 9904 } 9905 } 9906 if (dst != length) { 9907 // Always trim even when array is cleared because of heap verifier. 9908 RightTrimFixedArray<Heap::FROM_MUTATOR>(GetHeap(), code_map, length - dst); 9909 if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap(); 9910 } 9911 } 9912 9913 9914 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { 9915 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9916 ASSERT(shrink_by % kEntryLength == 0); 9917 ASSERT(shrink_by <= code_map->length() - kEntriesStart); 9918 // Always trim even when array is cleared because of heap verifier. 9919 RightTrimFixedArray<Heap::FROM_GC>(GetHeap(), code_map, shrink_by); 9920 if (code_map->length() == kEntriesStart) { 9921 ClearOptimizedCodeMap(); 9922 } 9923 } 9924 9925 9926 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) { 9927 if (object->IsGlobalObject()) return; 9928 9929 // Make sure prototypes are fast objects and their maps have the bit set 9930 // so they remain fast. 9931 if (!object->HasFastProperties()) { 9932 TransformToFastProperties(object, 0); 9933 } 9934 } 9935 9936 9937 Handle<Object> CacheInitialJSArrayMaps( 9938 Handle<Context> native_context, Handle<Map> initial_map) { 9939 // Replace all of the cached initial array maps in the native context with 9940 // the appropriate transitioned elements kind maps. 9941 Factory* factory = native_context->GetIsolate()->factory(); 9942 Handle<FixedArray> maps = factory->NewFixedArrayWithHoles( 9943 kElementsKindCount, TENURED); 9944 9945 Handle<Map> current_map = initial_map; 9946 ElementsKind kind = current_map->elements_kind(); 9947 ASSERT(kind == GetInitialFastElementsKind()); 9948 maps->set(kind, *current_map); 9949 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1; 9950 i < kFastElementsKindCount; ++i) { 9951 Handle<Map> new_map; 9952 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i); 9953 if (current_map->HasElementsTransition()) { 9954 new_map = handle(current_map->elements_transition_map()); 9955 ASSERT(new_map->elements_kind() == next_kind); 9956 } else { 9957 new_map = Map::CopyAsElementsKind( 9958 current_map, next_kind, INSERT_TRANSITION); 9959 } 9960 maps->set(next_kind, *new_map); 9961 current_map = new_map; 9962 } 9963 native_context->set_js_array_maps(*maps); 9964 return initial_map; 9965 } 9966 9967 9968 void JSFunction::SetInstancePrototype(Handle<JSFunction> function, 9969 Handle<Object> value) { 9970 Isolate* isolate = function->GetIsolate(); 9971 9972 ASSERT(value->IsJSReceiver()); 9973 9974 // First some logic for the map of the prototype to make sure it is in fast 9975 // mode. 9976 if (value->IsJSObject()) { 9977 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value)); 9978 } 9979 9980 // Now some logic for the maps of the objects that are created by using this 9981 // function as a constructor. 9982 if (function->has_initial_map()) { 9983 // If the function has allocated the initial map replace it with a 9984 // copy containing the new prototype. Also complete any in-object 9985 // slack tracking that is in progress at this point because it is 9986 // still tracking the old copy. 9987 if (function->IsInobjectSlackTrackingInProgress()) { 9988 function->CompleteInobjectSlackTracking(); 9989 } 9990 Handle<Map> initial_map(function->initial_map(), isolate); 9991 Handle<Map> new_map = Map::Copy(initial_map); 9992 new_map->set_prototype(*value); 9993 9994 // If the function is used as the global Array function, cache the 9995 // initial map (and transitioned versions) in the native context. 9996 Context* native_context = function->context()->native_context(); 9997 Object* array_function = native_context->get(Context::ARRAY_FUNCTION_INDEX); 9998 if (array_function->IsJSFunction() && 9999 *function == JSFunction::cast(array_function)) { 10000 CacheInitialJSArrayMaps(handle(native_context, isolate), new_map); 10001 } 10002 10003 function->set_initial_map(*new_map); 10004 10005 // Deoptimize all code that embeds the previous initial map. 10006 initial_map->dependent_code()->DeoptimizeDependentCodeGroup( 10007 isolate, DependentCode::kInitialMapChangedGroup); 10008 } else { 10009 // Put the value in the initial map field until an initial map is 10010 // needed. At that point, a new initial map is created and the 10011 // prototype is put into the initial map where it belongs. 10012 function->set_prototype_or_initial_map(*value); 10013 } 10014 isolate->heap()->ClearInstanceofCache(); 10015 } 10016 10017 10018 void JSFunction::SetPrototype(Handle<JSFunction> function, 10019 Handle<Object> value) { 10020 ASSERT(function->should_have_prototype()); 10021 Handle<Object> construct_prototype = value; 10022 10023 // If the value is not a JSReceiver, store the value in the map's 10024 // constructor field so it can be accessed. Also, set the prototype 10025 // used for constructing objects to the original object prototype. 10026 // See ECMA-262 13.2.2. 10027 if (!value->IsJSReceiver()) { 10028 // Copy the map so this does not affect unrelated functions. 10029 // Remove map transitions because they point to maps with a 10030 // different prototype. 10031 Handle<Map> new_map = Map::Copy(handle(function->map())); 10032 10033 JSObject::MigrateToMap(function, new_map); 10034 new_map->set_constructor(*value); 10035 new_map->set_non_instance_prototype(true); 10036 Isolate* isolate = new_map->GetIsolate(); 10037 construct_prototype = handle( 10038 isolate->context()->native_context()->initial_object_prototype(), 10039 isolate); 10040 } else { 10041 function->map()->set_non_instance_prototype(false); 10042 } 10043 10044 return SetInstancePrototype(function, construct_prototype); 10045 } 10046 10047 10048 bool JSFunction::RemovePrototype() { 10049 Context* native_context = context()->native_context(); 10050 Map* no_prototype_map = shared()->strict_mode() == SLOPPY 10051 ? native_context->sloppy_function_without_prototype_map() 10052 : native_context->strict_function_without_prototype_map(); 10053 10054 if (map() == no_prototype_map) return true; 10055 10056 #ifdef DEBUG 10057 if (map() != (shared()->strict_mode() == SLOPPY 10058 ? native_context->sloppy_function_map() 10059 : native_context->strict_function_map())) { 10060 return false; 10061 } 10062 #endif 10063 10064 set_map(no_prototype_map); 10065 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value()); 10066 return true; 10067 } 10068 10069 10070 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) { 10071 if (function->has_initial_map()) return; 10072 Isolate* isolate = function->GetIsolate(); 10073 10074 // First create a new map with the size and number of in-object properties 10075 // suggested by the function. 10076 InstanceType instance_type; 10077 int instance_size; 10078 int in_object_properties; 10079 if (function->shared()->is_generator()) { 10080 instance_type = JS_GENERATOR_OBJECT_TYPE; 10081 instance_size = JSGeneratorObject::kSize; 10082 in_object_properties = 0; 10083 } else { 10084 instance_type = JS_OBJECT_TYPE; 10085 instance_size = function->shared()->CalculateInstanceSize(); 10086 in_object_properties = function->shared()->CalculateInObjectProperties(); 10087 } 10088 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size); 10089 10090 // Fetch or allocate prototype. 10091 Handle<Object> prototype; 10092 if (function->has_instance_prototype()) { 10093 prototype = handle(function->instance_prototype(), isolate); 10094 for (Handle<Object> p = prototype; !p->IsNull() && !p->IsJSProxy(); 10095 p = Object::GetPrototype(isolate, p)) { 10096 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(p)); 10097 } 10098 } else { 10099 prototype = isolate->factory()->NewFunctionPrototype(function); 10100 } 10101 map->set_inobject_properties(in_object_properties); 10102 map->set_unused_property_fields(in_object_properties); 10103 map->set_prototype(*prototype); 10104 ASSERT(map->has_fast_object_elements()); 10105 10106 // Finally link initial map and constructor function. 10107 function->set_initial_map(*map); 10108 map->set_constructor(*function); 10109 10110 if (!function->shared()->is_generator()) { 10111 function->StartInobjectSlackTracking(); 10112 } 10113 } 10114 10115 10116 void JSFunction::SetInstanceClassName(String* name) { 10117 shared()->set_instance_class_name(name); 10118 } 10119 10120 10121 void JSFunction::PrintName(FILE* out) { 10122 SmartArrayPointer<char> name = shared()->DebugName()->ToCString(); 10123 PrintF(out, "%s", name.get()); 10124 } 10125 10126 10127 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) { 10128 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex)); 10129 } 10130 10131 10132 // The filter is a pattern that matches function names in this way: 10133 // "*" all; the default 10134 // "-" all but the top-level function 10135 // "-name" all but the function "name" 10136 // "" only the top-level function 10137 // "name" only the function "name" 10138 // "name*" only functions starting with "name" 10139 bool JSFunction::PassesFilter(const char* raw_filter) { 10140 if (*raw_filter == '*') return true; 10141 String* name = shared()->DebugName(); 10142 Vector<const char> filter = CStrVector(raw_filter); 10143 if (filter.length() == 0) return name->length() == 0; 10144 if (filter[0] == '-') { 10145 // Negative filter. 10146 if (filter.length() == 1) { 10147 return (name->length() != 0); 10148 } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) { 10149 return false; 10150 } 10151 if (filter[filter.length() - 1] == '*' && 10152 name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) { 10153 return false; 10154 } 10155 return true; 10156 10157 } else if (name->IsUtf8EqualTo(filter)) { 10158 return true; 10159 } 10160 if (filter[filter.length() - 1] == '*' && 10161 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) { 10162 return true; 10163 } 10164 return false; 10165 } 10166 10167 10168 void Oddball::Initialize(Isolate* isolate, 10169 Handle<Oddball> oddball, 10170 const char* to_string, 10171 Handle<Object> to_number, 10172 byte kind) { 10173 Handle<String> internalized_to_string = 10174 isolate->factory()->InternalizeUtf8String(to_string); 10175 oddball->set_to_string(*internalized_to_string); 10176 oddball->set_to_number(*to_number); 10177 oddball->set_kind(kind); 10178 } 10179 10180 10181 void Script::InitLineEnds(Handle<Script> script) { 10182 if (!script->line_ends()->IsUndefined()) return; 10183 10184 Isolate* isolate = script->GetIsolate(); 10185 10186 if (!script->source()->IsString()) { 10187 ASSERT(script->source()->IsUndefined()); 10188 Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0); 10189 script->set_line_ends(*empty); 10190 ASSERT(script->line_ends()->IsFixedArray()); 10191 return; 10192 } 10193 10194 Handle<String> src(String::cast(script->source()), isolate); 10195 10196 Handle<FixedArray> array = String::CalculateLineEnds(src, true); 10197 10198 if (*array != isolate->heap()->empty_fixed_array()) { 10199 array->set_map(isolate->heap()->fixed_cow_array_map()); 10200 } 10201 10202 script->set_line_ends(*array); 10203 ASSERT(script->line_ends()->IsFixedArray()); 10204 } 10205 10206 10207 int Script::GetColumnNumber(Handle<Script> script, int code_pos) { 10208 int line_number = GetLineNumber(script, code_pos); 10209 if (line_number == -1) return -1; 10210 10211 DisallowHeapAllocation no_allocation; 10212 FixedArray* line_ends_array = FixedArray::cast(script->line_ends()); 10213 line_number = line_number - script->line_offset()->value(); 10214 if (line_number == 0) return code_pos + script->column_offset()->value(); 10215 int prev_line_end_pos = 10216 Smi::cast(line_ends_array->get(line_number - 1))->value(); 10217 return code_pos - (prev_line_end_pos + 1); 10218 } 10219 10220 10221 int Script::GetLineNumberWithArray(int code_pos) { 10222 DisallowHeapAllocation no_allocation; 10223 ASSERT(line_ends()->IsFixedArray()); 10224 FixedArray* line_ends_array = FixedArray::cast(line_ends()); 10225 int line_ends_len = line_ends_array->length(); 10226 if (line_ends_len == 0) return -1; 10227 10228 if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) { 10229 return line_offset()->value(); 10230 } 10231 10232 int left = 0; 10233 int right = line_ends_len; 10234 while (int half = (right - left) / 2) { 10235 if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) { 10236 right -= half; 10237 } else { 10238 left += half; 10239 } 10240 } 10241 return right + line_offset()->value(); 10242 } 10243 10244 10245 int Script::GetLineNumber(Handle<Script> script, int code_pos) { 10246 InitLineEnds(script); 10247 return script->GetLineNumberWithArray(code_pos); 10248 } 10249 10250 10251 int Script::GetLineNumber(int code_pos) { 10252 DisallowHeapAllocation no_allocation; 10253 if (!line_ends()->IsUndefined()) return GetLineNumberWithArray(code_pos); 10254 10255 // Slow mode: we do not have line_ends. We have to iterate through source. 10256 if (!source()->IsString()) return -1; 10257 10258 String* source_string = String::cast(source()); 10259 int line = 0; 10260 int len = source_string->length(); 10261 for (int pos = 0; pos < len; pos++) { 10262 if (pos == code_pos) break; 10263 if (source_string->Get(pos) == '\n') line++; 10264 } 10265 return line; 10266 } 10267 10268 10269 Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) { 10270 Isolate* isolate = script->GetIsolate(); 10271 Handle<String> name_or_source_url_key = 10272 isolate->factory()->InternalizeOneByteString( 10273 STATIC_ASCII_VECTOR("nameOrSourceURL")); 10274 Handle<JSObject> script_wrapper = Script::GetWrapper(script); 10275 Handle<Object> property = Object::GetProperty( 10276 script_wrapper, name_or_source_url_key).ToHandleChecked(); 10277 ASSERT(property->IsJSFunction()); 10278 Handle<JSFunction> method = Handle<JSFunction>::cast(property); 10279 Handle<Object> result; 10280 // Do not check against pending exception, since this function may be called 10281 // when an exception has already been pending. 10282 if (!Execution::TryCall(method, script_wrapper, 0, NULL).ToHandle(&result)) { 10283 return isolate->factory()->undefined_value(); 10284 } 10285 return result; 10286 } 10287 10288 10289 // Wrappers for scripts are kept alive and cached in weak global 10290 // handles referred from foreign objects held by the scripts as long as 10291 // they are used. When they are not used anymore, the garbage 10292 // collector will call the weak callback on the global handle 10293 // associated with the wrapper and get rid of both the wrapper and the 10294 // handle. 10295 static void ClearWrapperCache( 10296 const v8::WeakCallbackData<v8::Value, void>& data) { 10297 Object** location = reinterpret_cast<Object**>(data.GetParameter()); 10298 JSValue* wrapper = JSValue::cast(*location); 10299 Foreign* foreign = Script::cast(wrapper->value())->wrapper(); 10300 ASSERT_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location)); 10301 foreign->set_foreign_address(0); 10302 GlobalHandles::Destroy(location); 10303 Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate()); 10304 isolate->counters()->script_wrappers()->Decrement(); 10305 } 10306 10307 10308 Handle<JSObject> Script::GetWrapper(Handle<Script> script) { 10309 if (script->wrapper()->foreign_address() != NULL) { 10310 // Return a handle for the existing script wrapper from the cache. 10311 return Handle<JSValue>( 10312 *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address())); 10313 } 10314 Isolate* isolate = script->GetIsolate(); 10315 // Construct a new script wrapper. 10316 isolate->counters()->script_wrappers()->Increment(); 10317 Handle<JSFunction> constructor = isolate->script_function(); 10318 Handle<JSValue> result = 10319 Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor)); 10320 10321 result->set_value(*script); 10322 10323 // Create a new weak global handle and use it to cache the wrapper 10324 // for future use. The cache will automatically be cleared by the 10325 // garbage collector when it is not used anymore. 10326 Handle<Object> handle = isolate->global_handles()->Create(*result); 10327 GlobalHandles::MakeWeak(handle.location(), 10328 reinterpret_cast<void*>(handle.location()), 10329 &ClearWrapperCache); 10330 script->wrapper()->set_foreign_address( 10331 reinterpret_cast<Address>(handle.location())); 10332 return result; 10333 } 10334 10335 10336 String* SharedFunctionInfo::DebugName() { 10337 Object* n = name(); 10338 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name(); 10339 return String::cast(n); 10340 } 10341 10342 10343 bool SharedFunctionInfo::HasSourceCode() { 10344 return !script()->IsUndefined() && 10345 !reinterpret_cast<Script*>(script())->source()->IsUndefined(); 10346 } 10347 10348 10349 Handle<Object> SharedFunctionInfo::GetSourceCode() { 10350 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value(); 10351 Handle<String> source(String::cast(Script::cast(script())->source())); 10352 return GetIsolate()->factory()->NewSubString( 10353 source, start_position(), end_position()); 10354 } 10355 10356 10357 bool SharedFunctionInfo::IsInlineable() { 10358 // Check that the function has a script associated with it. 10359 if (!script()->IsScript()) return false; 10360 if (optimization_disabled()) return false; 10361 // If we never ran this (unlikely) then lets try to optimize it. 10362 if (code()->kind() != Code::FUNCTION) return true; 10363 return code()->optimizable(); 10364 } 10365 10366 10367 int SharedFunctionInfo::SourceSize() { 10368 return end_position() - start_position(); 10369 } 10370 10371 10372 int SharedFunctionInfo::CalculateInstanceSize() { 10373 int instance_size = 10374 JSObject::kHeaderSize + 10375 expected_nof_properties() * kPointerSize; 10376 if (instance_size > JSObject::kMaxInstanceSize) { 10377 instance_size = JSObject::kMaxInstanceSize; 10378 } 10379 return instance_size; 10380 } 10381 10382 10383 int SharedFunctionInfo::CalculateInObjectProperties() { 10384 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize; 10385 } 10386 10387 10388 // Support function for printing the source code to a StringStream 10389 // without any allocation in the heap. 10390 void SharedFunctionInfo::SourceCodePrint(StringStream* accumulator, 10391 int max_length) { 10392 // For some native functions there is no source. 10393 if (!HasSourceCode()) { 10394 accumulator->Add("<No Source>"); 10395 return; 10396 } 10397 10398 // Get the source for the script which this function came from. 10399 // Don't use String::cast because we don't want more assertion errors while 10400 // we are already creating a stack dump. 10401 String* script_source = 10402 reinterpret_cast<String*>(Script::cast(script())->source()); 10403 10404 if (!script_source->LooksValid()) { 10405 accumulator->Add("<Invalid Source>"); 10406 return; 10407 } 10408 10409 if (!is_toplevel()) { 10410 accumulator->Add("function "); 10411 Object* name = this->name(); 10412 if (name->IsString() && String::cast(name)->length() > 0) { 10413 accumulator->PrintName(name); 10414 } 10415 } 10416 10417 int len = end_position() - start_position(); 10418 if (len <= max_length || max_length < 0) { 10419 accumulator->Put(script_source, start_position(), end_position()); 10420 } else { 10421 accumulator->Put(script_source, 10422 start_position(), 10423 start_position() + max_length); 10424 accumulator->Add("...\n"); 10425 } 10426 } 10427 10428 10429 static bool IsCodeEquivalent(Code* code, Code* recompiled) { 10430 if (code->instruction_size() != recompiled->instruction_size()) return false; 10431 ByteArray* code_relocation = code->relocation_info(); 10432 ByteArray* recompiled_relocation = recompiled->relocation_info(); 10433 int length = code_relocation->length(); 10434 if (length != recompiled_relocation->length()) return false; 10435 int compare = memcmp(code_relocation->GetDataStartAddress(), 10436 recompiled_relocation->GetDataStartAddress(), 10437 length); 10438 return compare == 0; 10439 } 10440 10441 10442 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) { 10443 ASSERT(!has_deoptimization_support()); 10444 DisallowHeapAllocation no_allocation; 10445 Code* code = this->code(); 10446 if (IsCodeEquivalent(code, recompiled)) { 10447 // Copy the deoptimization data from the recompiled code. 10448 code->set_deoptimization_data(recompiled->deoptimization_data()); 10449 code->set_has_deoptimization_support(true); 10450 } else { 10451 // TODO(3025757): In case the recompiled isn't equivalent to the 10452 // old code, we have to replace it. We should try to avoid this 10453 // altogether because it flushes valuable type feedback by 10454 // effectively resetting all IC state. 10455 ReplaceCode(recompiled); 10456 } 10457 ASSERT(has_deoptimization_support()); 10458 } 10459 10460 10461 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) { 10462 // Disable optimization for the shared function info and mark the 10463 // code as non-optimizable. The marker on the shared function info 10464 // is there because we flush non-optimized code thereby loosing the 10465 // non-optimizable information for the code. When the code is 10466 // regenerated and set on the shared function info it is marked as 10467 // non-optimizable if optimization is disabled for the shared 10468 // function info. 10469 set_optimization_disabled(true); 10470 set_bailout_reason(reason); 10471 // Code should be the lazy compilation stub or else unoptimized. If the 10472 // latter, disable optimization for the code too. 10473 ASSERT(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN); 10474 if (code()->kind() == Code::FUNCTION) { 10475 code()->set_optimizable(false); 10476 } 10477 PROFILE(GetIsolate(), CodeDisableOptEvent(code(), this)); 10478 if (FLAG_trace_opt) { 10479 PrintF("[disabled optimization for "); 10480 ShortPrint(); 10481 PrintF(", reason: %s]\n", GetBailoutReason(reason)); 10482 } 10483 } 10484 10485 10486 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) { 10487 ASSERT(!id.IsNone()); 10488 Code* unoptimized = code(); 10489 DeoptimizationOutputData* data = 10490 DeoptimizationOutputData::cast(unoptimized->deoptimization_data()); 10491 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this); 10492 USE(ignore); 10493 return true; // Return true if there was no ASSERT. 10494 } 10495 10496 10497 void JSFunction::StartInobjectSlackTracking() { 10498 ASSERT(has_initial_map() && !IsInobjectSlackTrackingInProgress()); 10499 10500 if (!FLAG_clever_optimizations) return; 10501 Map* map = initial_map(); 10502 10503 // Only initiate the tracking the first time. 10504 if (map->done_inobject_slack_tracking()) return; 10505 map->set_done_inobject_slack_tracking(true); 10506 10507 // No tracking during the snapshot construction phase. 10508 Isolate* isolate = GetIsolate(); 10509 if (isolate->serializer_enabled()) return; 10510 10511 if (map->unused_property_fields() == 0) return; 10512 10513 map->set_construction_count(kGenerousAllocationCount); 10514 } 10515 10516 10517 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) { 10518 code()->ClearInlineCaches(); 10519 // If we clear ICs, we need to clear the type feedback vector too, since 10520 // CallICs are synced with a feedback vector slot. 10521 ClearTypeFeedbackInfo(); 10522 set_ic_age(new_ic_age); 10523 if (code()->kind() == Code::FUNCTION) { 10524 code()->set_profiler_ticks(0); 10525 if (optimization_disabled() && 10526 opt_count() >= FLAG_max_opt_count) { 10527 // Re-enable optimizations if they were disabled due to opt_count limit. 10528 set_optimization_disabled(false); 10529 code()->set_optimizable(true); 10530 } 10531 set_opt_count(0); 10532 set_deopt_count(0); 10533 } 10534 } 10535 10536 10537 static void GetMinInobjectSlack(Map* map, void* data) { 10538 int slack = map->unused_property_fields(); 10539 if (*reinterpret_cast<int*>(data) > slack) { 10540 *reinterpret_cast<int*>(data) = slack; 10541 } 10542 } 10543 10544 10545 static void ShrinkInstanceSize(Map* map, void* data) { 10546 int slack = *reinterpret_cast<int*>(data); 10547 map->set_inobject_properties(map->inobject_properties() - slack); 10548 map->set_unused_property_fields(map->unused_property_fields() - slack); 10549 map->set_instance_size(map->instance_size() - slack * kPointerSize); 10550 10551 // Visitor id might depend on the instance size, recalculate it. 10552 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map)); 10553 } 10554 10555 10556 void JSFunction::CompleteInobjectSlackTracking() { 10557 ASSERT(has_initial_map()); 10558 Map* map = initial_map(); 10559 10560 ASSERT(map->done_inobject_slack_tracking()); 10561 map->set_construction_count(kNoSlackTracking); 10562 10563 int slack = map->unused_property_fields(); 10564 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack); 10565 if (slack != 0) { 10566 // Resize the initial map and all maps in its transition tree. 10567 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack); 10568 } 10569 } 10570 10571 10572 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context, 10573 BailoutId osr_ast_id) { 10574 DisallowHeapAllocation no_gc; 10575 ASSERT(native_context->IsNativeContext()); 10576 if (!FLAG_cache_optimized_code) return -1; 10577 Object* value = optimized_code_map(); 10578 if (!value->IsSmi()) { 10579 FixedArray* optimized_code_map = FixedArray::cast(value); 10580 int length = optimized_code_map->length(); 10581 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); 10582 for (int i = kEntriesStart; i < length; i += kEntryLength) { 10583 if (optimized_code_map->get(i + kContextOffset) == native_context && 10584 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { 10585 return i + kCachedCodeOffset; 10586 } 10587 } 10588 if (FLAG_trace_opt) { 10589 PrintF("[didn't find optimized code in optimized code map for "); 10590 ShortPrint(); 10591 PrintF("]\n"); 10592 } 10593 } 10594 return -1; 10595 } 10596 10597 10598 #define DECLARE_TAG(ignore1, name, ignore2) name, 10599 const char* const VisitorSynchronization::kTags[ 10600 VisitorSynchronization::kNumberOfSyncTags] = { 10601 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG) 10602 }; 10603 #undef DECLARE_TAG 10604 10605 10606 #define DECLARE_TAG(ignore1, ignore2, name) name, 10607 const char* const VisitorSynchronization::kTagNames[ 10608 VisitorSynchronization::kNumberOfSyncTags] = { 10609 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG) 10610 }; 10611 #undef DECLARE_TAG 10612 10613 10614 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) { 10615 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 10616 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 10617 Object* old_target = target; 10618 VisitPointer(&target); 10619 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target. 10620 } 10621 10622 10623 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) { 10624 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); 10625 Object* stub = rinfo->code_age_stub(); 10626 if (stub) { 10627 VisitPointer(&stub); 10628 } 10629 } 10630 10631 10632 void ObjectVisitor::VisitCodeEntry(Address entry_address) { 10633 Object* code = Code::GetObjectFromEntryAddress(entry_address); 10634 Object* old_code = code; 10635 VisitPointer(&code); 10636 if (code != old_code) { 10637 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry(); 10638 } 10639 } 10640 10641 10642 void ObjectVisitor::VisitCell(RelocInfo* rinfo) { 10643 ASSERT(rinfo->rmode() == RelocInfo::CELL); 10644 Object* cell = rinfo->target_cell(); 10645 Object* old_cell = cell; 10646 VisitPointer(&cell); 10647 if (cell != old_cell) { 10648 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell)); 10649 } 10650 } 10651 10652 10653 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) { 10654 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 10655 rinfo->IsPatchedReturnSequence()) || 10656 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 10657 rinfo->IsPatchedDebugBreakSlotSequence())); 10658 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); 10659 Object* old_target = target; 10660 VisitPointer(&target); 10661 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target. 10662 } 10663 10664 10665 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) { 10666 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); 10667 Object* p = rinfo->target_object(); 10668 VisitPointer(&p); 10669 } 10670 10671 10672 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) { 10673 Address p = rinfo->target_reference(); 10674 VisitExternalReference(&p); 10675 } 10676 10677 10678 void Code::InvalidateRelocation() { 10679 set_relocation_info(GetHeap()->empty_byte_array()); 10680 } 10681 10682 10683 void Code::InvalidateEmbeddedObjects() { 10684 Object* undefined = GetHeap()->undefined_value(); 10685 Cell* undefined_cell = GetHeap()->undefined_cell(); 10686 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 10687 RelocInfo::ModeMask(RelocInfo::CELL); 10688 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { 10689 RelocInfo::Mode mode = it.rinfo()->rmode(); 10690 if (mode == RelocInfo::EMBEDDED_OBJECT) { 10691 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER); 10692 } else if (mode == RelocInfo::CELL) { 10693 it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER); 10694 } 10695 } 10696 } 10697 10698 10699 void Code::Relocate(intptr_t delta) { 10700 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) { 10701 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH); 10702 } 10703 CPU::FlushICache(instruction_start(), instruction_size()); 10704 } 10705 10706 10707 void Code::CopyFrom(const CodeDesc& desc) { 10708 ASSERT(Marking::Color(this) == Marking::WHITE_OBJECT); 10709 10710 // copy code 10711 CopyBytes(instruction_start(), desc.buffer, 10712 static_cast<size_t>(desc.instr_size)); 10713 10714 // copy reloc info 10715 CopyBytes(relocation_start(), 10716 desc.buffer + desc.buffer_size - desc.reloc_size, 10717 static_cast<size_t>(desc.reloc_size)); 10718 10719 // unbox handles and relocate 10720 intptr_t delta = instruction_start() - desc.buffer; 10721 int mode_mask = RelocInfo::kCodeTargetMask | 10722 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 10723 RelocInfo::ModeMask(RelocInfo::CELL) | 10724 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | 10725 RelocInfo::kApplyMask; 10726 // Needed to find target_object and runtime_entry on X64 10727 Assembler* origin = desc.origin; 10728 AllowDeferredHandleDereference embedding_raw_address; 10729 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { 10730 RelocInfo::Mode mode = it.rinfo()->rmode(); 10731 if (mode == RelocInfo::EMBEDDED_OBJECT) { 10732 Handle<Object> p = it.rinfo()->target_object_handle(origin); 10733 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH); 10734 } else if (mode == RelocInfo::CELL) { 10735 Handle<Cell> cell = it.rinfo()->target_cell_handle(); 10736 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH); 10737 } else if (RelocInfo::IsCodeTarget(mode)) { 10738 // rewrite code handles in inline cache targets to direct 10739 // pointers to the first instruction in the code object 10740 Handle<Object> p = it.rinfo()->target_object_handle(origin); 10741 Code* code = Code::cast(*p); 10742 it.rinfo()->set_target_address(code->instruction_start(), 10743 SKIP_WRITE_BARRIER, 10744 SKIP_ICACHE_FLUSH); 10745 } else if (RelocInfo::IsRuntimeEntry(mode)) { 10746 Address p = it.rinfo()->target_runtime_entry(origin); 10747 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER, 10748 SKIP_ICACHE_FLUSH); 10749 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) { 10750 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin); 10751 Code* code = Code::cast(*p); 10752 it.rinfo()->set_code_age_stub(code, SKIP_ICACHE_FLUSH); 10753 } else { 10754 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH); 10755 } 10756 } 10757 CPU::FlushICache(instruction_start(), instruction_size()); 10758 } 10759 10760 10761 // Locate the source position which is closest to the address in the code. This 10762 // is using the source position information embedded in the relocation info. 10763 // The position returned is relative to the beginning of the script where the 10764 // source for this function is found. 10765 int Code::SourcePosition(Address pc) { 10766 int distance = kMaxInt; 10767 int position = RelocInfo::kNoPosition; // Initially no position found. 10768 // Run through all the relocation info to find the best matching source 10769 // position. All the code needs to be considered as the sequence of the 10770 // instructions in the code does not necessarily follow the same order as the 10771 // source. 10772 RelocIterator it(this, RelocInfo::kPositionMask); 10773 while (!it.done()) { 10774 // Only look at positions after the current pc. 10775 if (it.rinfo()->pc() < pc) { 10776 // Get position and distance. 10777 10778 int dist = static_cast<int>(pc - it.rinfo()->pc()); 10779 int pos = static_cast<int>(it.rinfo()->data()); 10780 // If this position is closer than the current candidate or if it has the 10781 // same distance as the current candidate and the position is higher then 10782 // this position is the new candidate. 10783 if ((dist < distance) || 10784 (dist == distance && pos > position)) { 10785 position = pos; 10786 distance = dist; 10787 } 10788 } 10789 it.next(); 10790 } 10791 return position; 10792 } 10793 10794 10795 // Same as Code::SourcePosition above except it only looks for statement 10796 // positions. 10797 int Code::SourceStatementPosition(Address pc) { 10798 // First find the position as close as possible using all position 10799 // information. 10800 int position = SourcePosition(pc); 10801 // Now find the closest statement position before the position. 10802 int statement_position = 0; 10803 RelocIterator it(this, RelocInfo::kPositionMask); 10804 while (!it.done()) { 10805 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) { 10806 int p = static_cast<int>(it.rinfo()->data()); 10807 if (statement_position < p && p <= position) { 10808 statement_position = p; 10809 } 10810 } 10811 it.next(); 10812 } 10813 return statement_position; 10814 } 10815 10816 10817 SafepointEntry Code::GetSafepointEntry(Address pc) { 10818 SafepointTable table(this); 10819 return table.FindEntry(pc); 10820 } 10821 10822 10823 Object* Code::FindNthObject(int n, Map* match_map) { 10824 ASSERT(is_inline_cache_stub()); 10825 DisallowHeapAllocation no_allocation; 10826 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10827 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10828 RelocInfo* info = it.rinfo(); 10829 Object* object = info->target_object(); 10830 if (object->IsHeapObject()) { 10831 if (HeapObject::cast(object)->map() == match_map) { 10832 if (--n == 0) return object; 10833 } 10834 } 10835 } 10836 return NULL; 10837 } 10838 10839 10840 AllocationSite* Code::FindFirstAllocationSite() { 10841 Object* result = FindNthObject(1, GetHeap()->allocation_site_map()); 10842 return (result != NULL) ? AllocationSite::cast(result) : NULL; 10843 } 10844 10845 10846 Map* Code::FindFirstMap() { 10847 Object* result = FindNthObject(1, GetHeap()->meta_map()); 10848 return (result != NULL) ? Map::cast(result) : NULL; 10849 } 10850 10851 10852 void Code::FindAndReplace(const FindAndReplacePattern& pattern) { 10853 ASSERT(is_inline_cache_stub() || is_handler()); 10854 DisallowHeapAllocation no_allocation; 10855 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10856 STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32); 10857 int current_pattern = 0; 10858 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10859 RelocInfo* info = it.rinfo(); 10860 Object* object = info->target_object(); 10861 if (object->IsHeapObject()) { 10862 Map* map = HeapObject::cast(object)->map(); 10863 if (map == *pattern.find_[current_pattern]) { 10864 info->set_target_object(*pattern.replace_[current_pattern]); 10865 if (++current_pattern == pattern.count_) return; 10866 } 10867 } 10868 } 10869 UNREACHABLE(); 10870 } 10871 10872 10873 void Code::FindAllMaps(MapHandleList* maps) { 10874 ASSERT(is_inline_cache_stub()); 10875 DisallowHeapAllocation no_allocation; 10876 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10877 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10878 RelocInfo* info = it.rinfo(); 10879 Object* object = info->target_object(); 10880 if (object->IsMap()) maps->Add(handle(Map::cast(object))); 10881 } 10882 } 10883 10884 10885 Code* Code::FindFirstHandler() { 10886 ASSERT(is_inline_cache_stub()); 10887 DisallowHeapAllocation no_allocation; 10888 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); 10889 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10890 RelocInfo* info = it.rinfo(); 10891 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); 10892 if (code->kind() == Code::HANDLER) return code; 10893 } 10894 return NULL; 10895 } 10896 10897 10898 bool Code::FindHandlers(CodeHandleList* code_list, int length) { 10899 ASSERT(is_inline_cache_stub()); 10900 DisallowHeapAllocation no_allocation; 10901 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); 10902 int i = 0; 10903 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10904 if (i == length) return true; 10905 RelocInfo* info = it.rinfo(); 10906 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); 10907 // IC stubs with handlers never contain non-handler code objects before 10908 // handler targets. 10909 if (code->kind() != Code::HANDLER) break; 10910 code_list->Add(Handle<Code>(code)); 10911 i++; 10912 } 10913 return i == length; 10914 } 10915 10916 10917 Name* Code::FindFirstName() { 10918 ASSERT(is_inline_cache_stub()); 10919 DisallowHeapAllocation no_allocation; 10920 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10921 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10922 RelocInfo* info = it.rinfo(); 10923 Object* object = info->target_object(); 10924 if (object->IsName()) return Name::cast(object); 10925 } 10926 return NULL; 10927 } 10928 10929 10930 void Code::ClearInlineCaches() { 10931 ClearInlineCaches(NULL); 10932 } 10933 10934 10935 void Code::ClearInlineCaches(Code::Kind kind) { 10936 ClearInlineCaches(&kind); 10937 } 10938 10939 10940 void Code::ClearInlineCaches(Code::Kind* kind) { 10941 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | 10942 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) | 10943 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID); 10944 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10945 RelocInfo* info = it.rinfo(); 10946 Code* target(Code::GetCodeFromTargetAddress(info->target_address())); 10947 if (target->is_inline_cache_stub()) { 10948 if (kind == NULL || *kind == target->kind()) { 10949 IC::Clear(this->GetIsolate(), info->pc(), 10950 info->host()->constant_pool()); 10951 } 10952 } 10953 } 10954 } 10955 10956 10957 void SharedFunctionInfo::ClearTypeFeedbackInfo() { 10958 FixedArray* vector = feedback_vector(); 10959 Heap* heap = GetHeap(); 10960 int length = vector->length(); 10961 10962 for (int i = 0; i < length; i++) { 10963 Object* obj = vector->get(i); 10964 if (obj->IsHeapObject()) { 10965 InstanceType instance_type = 10966 HeapObject::cast(obj)->map()->instance_type(); 10967 switch (instance_type) { 10968 case ALLOCATION_SITE_TYPE: 10969 // AllocationSites are not cleared because they do not store 10970 // information that leaks. 10971 break; 10972 // Fall through... 10973 default: 10974 vector->set(i, TypeFeedbackInfo::RawUninitializedSentinel(heap), 10975 SKIP_WRITE_BARRIER); 10976 } 10977 } 10978 } 10979 } 10980 10981 10982 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) { 10983 DisallowHeapAllocation no_gc; 10984 ASSERT(kind() == FUNCTION); 10985 BackEdgeTable back_edges(this, &no_gc); 10986 for (uint32_t i = 0; i < back_edges.length(); i++) { 10987 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i); 10988 } 10989 return BailoutId::None(); 10990 } 10991 10992 10993 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) { 10994 DisallowHeapAllocation no_gc; 10995 ASSERT(kind() == FUNCTION); 10996 BackEdgeTable back_edges(this, &no_gc); 10997 for (uint32_t i = 0; i < back_edges.length(); i++) { 10998 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i); 10999 } 11000 UNREACHABLE(); // We expect to find the back edge. 11001 return 0; 11002 } 11003 11004 11005 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) { 11006 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY); 11007 } 11008 11009 11010 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) { 11011 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge, 11012 NO_MARKING_PARITY); 11013 } 11014 11015 11016 static Code::Age EffectiveAge(Code::Age age) { 11017 if (age == Code::kNotExecutedCodeAge) { 11018 // Treat that's never been executed as old immediately. 11019 age = Code::kIsOldCodeAge; 11020 } else if (age == Code::kExecutedOnceCodeAge) { 11021 // Pre-age code that has only been executed once. 11022 age = Code::kPreAgedCodeAge; 11023 } 11024 return age; 11025 } 11026 11027 11028 void Code::MakeOlder(MarkingParity current_parity) { 11029 byte* sequence = FindCodeAgeSequence(); 11030 if (sequence != NULL) { 11031 Age age; 11032 MarkingParity code_parity; 11033 Isolate* isolate = GetIsolate(); 11034 GetCodeAgeAndParity(isolate, sequence, &age, &code_parity); 11035 age = EffectiveAge(age); 11036 if (age != kLastCodeAge && code_parity != current_parity) { 11037 PatchPlatformCodeAge(isolate, 11038 sequence, 11039 static_cast<Age>(age + 1), 11040 current_parity); 11041 } 11042 } 11043 } 11044 11045 11046 bool Code::IsOld() { 11047 return GetAge() >= kIsOldCodeAge; 11048 } 11049 11050 11051 byte* Code::FindCodeAgeSequence() { 11052 return FLAG_age_code && 11053 prologue_offset() != Code::kPrologueOffsetNotSet && 11054 (kind() == OPTIMIZED_FUNCTION || 11055 (kind() == FUNCTION && !has_debug_break_slots())) 11056 ? instruction_start() + prologue_offset() 11057 : NULL; 11058 } 11059 11060 11061 Code::Age Code::GetAge() { 11062 return EffectiveAge(GetRawAge()); 11063 } 11064 11065 11066 Code::Age Code::GetRawAge() { 11067 byte* sequence = FindCodeAgeSequence(); 11068 if (sequence == NULL) { 11069 return kNoAgeCodeAge; 11070 } 11071 Age age; 11072 MarkingParity parity; 11073 GetCodeAgeAndParity(GetIsolate(), sequence, &age, &parity); 11074 return age; 11075 } 11076 11077 11078 void Code::GetCodeAgeAndParity(Code* code, Age* age, 11079 MarkingParity* parity) { 11080 Isolate* isolate = code->GetIsolate(); 11081 Builtins* builtins = isolate->builtins(); 11082 Code* stub = NULL; 11083 #define HANDLE_CODE_AGE(AGE) \ 11084 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \ 11085 if (code == stub) { \ 11086 *age = k##AGE##CodeAge; \ 11087 *parity = EVEN_MARKING_PARITY; \ 11088 return; \ 11089 } \ 11090 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \ 11091 if (code == stub) { \ 11092 *age = k##AGE##CodeAge; \ 11093 *parity = ODD_MARKING_PARITY; \ 11094 return; \ 11095 } 11096 CODE_AGE_LIST(HANDLE_CODE_AGE) 11097 #undef HANDLE_CODE_AGE 11098 stub = *builtins->MarkCodeAsExecutedOnce(); 11099 if (code == stub) { 11100 *age = kNotExecutedCodeAge; 11101 *parity = NO_MARKING_PARITY; 11102 return; 11103 } 11104 stub = *builtins->MarkCodeAsExecutedTwice(); 11105 if (code == stub) { 11106 *age = kExecutedOnceCodeAge; 11107 *parity = NO_MARKING_PARITY; 11108 return; 11109 } 11110 UNREACHABLE(); 11111 } 11112 11113 11114 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) { 11115 Builtins* builtins = isolate->builtins(); 11116 switch (age) { 11117 #define HANDLE_CODE_AGE(AGE) \ 11118 case k##AGE##CodeAge: { \ 11119 Code* stub = parity == EVEN_MARKING_PARITY \ 11120 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \ 11121 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \ 11122 return stub; \ 11123 } 11124 CODE_AGE_LIST(HANDLE_CODE_AGE) 11125 #undef HANDLE_CODE_AGE 11126 case kNotExecutedCodeAge: { 11127 ASSERT(parity == NO_MARKING_PARITY); 11128 return *builtins->MarkCodeAsExecutedOnce(); 11129 } 11130 case kExecutedOnceCodeAge: { 11131 ASSERT(parity == NO_MARKING_PARITY); 11132 return *builtins->MarkCodeAsExecutedTwice(); 11133 } 11134 default: 11135 UNREACHABLE(); 11136 break; 11137 } 11138 return NULL; 11139 } 11140 11141 11142 void Code::PrintDeoptLocation(FILE* out, int bailout_id) { 11143 const char* last_comment = NULL; 11144 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT) 11145 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); 11146 for (RelocIterator it(this, mask); !it.done(); it.next()) { 11147 RelocInfo* info = it.rinfo(); 11148 if (info->rmode() == RelocInfo::COMMENT) { 11149 last_comment = reinterpret_cast<const char*>(info->data()); 11150 } else if (last_comment != NULL) { 11151 if ((bailout_id == Deoptimizer::GetDeoptimizationId( 11152 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) || 11153 (bailout_id == Deoptimizer::GetDeoptimizationId( 11154 GetIsolate(), info->target_address(), Deoptimizer::SOFT))) { 11155 CHECK(RelocInfo::IsRuntimeEntry(info->rmode())); 11156 PrintF(out, " %s\n", last_comment); 11157 return; 11158 } 11159 } 11160 } 11161 } 11162 11163 11164 bool Code::CanDeoptAt(Address pc) { 11165 DeoptimizationInputData* deopt_data = 11166 DeoptimizationInputData::cast(deoptimization_data()); 11167 Address code_start_address = instruction_start(); 11168 for (int i = 0; i < deopt_data->DeoptCount(); i++) { 11169 if (deopt_data->Pc(i)->value() == -1) continue; 11170 Address address = code_start_address + deopt_data->Pc(i)->value(); 11171 if (address == pc) return true; 11172 } 11173 return false; 11174 } 11175 11176 11177 // Identify kind of code. 11178 const char* Code::Kind2String(Kind kind) { 11179 switch (kind) { 11180 #define CASE(name) case name: return #name; 11181 CODE_KIND_LIST(CASE) 11182 #undef CASE 11183 case NUMBER_OF_KINDS: break; 11184 } 11185 UNREACHABLE(); 11186 return NULL; 11187 } 11188 11189 11190 #ifdef ENABLE_DISASSEMBLER 11191 11192 void DeoptimizationInputData::DeoptimizationInputDataPrint(FILE* out) { 11193 disasm::NameConverter converter; 11194 int deopt_count = DeoptCount(); 11195 PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count); 11196 if (0 == deopt_count) return; 11197 11198 PrintF(out, "%6s %6s %6s %6s %12s\n", "index", "ast id", "argc", "pc", 11199 FLAG_print_code_verbose ? "commands" : ""); 11200 for (int i = 0; i < deopt_count; i++) { 11201 PrintF(out, "%6d %6d %6d %6d", 11202 i, 11203 AstId(i).ToInt(), 11204 ArgumentsStackHeight(i)->value(), 11205 Pc(i)->value()); 11206 11207 if (!FLAG_print_code_verbose) { 11208 PrintF(out, "\n"); 11209 continue; 11210 } 11211 // Print details of the frame translation. 11212 int translation_index = TranslationIndex(i)->value(); 11213 TranslationIterator iterator(TranslationByteArray(), translation_index); 11214 Translation::Opcode opcode = 11215 static_cast<Translation::Opcode>(iterator.Next()); 11216 ASSERT(Translation::BEGIN == opcode); 11217 int frame_count = iterator.Next(); 11218 int jsframe_count = iterator.Next(); 11219 PrintF(out, " %s {frame count=%d, js frame count=%d}\n", 11220 Translation::StringFor(opcode), 11221 frame_count, 11222 jsframe_count); 11223 11224 while (iterator.HasNext() && 11225 Translation::BEGIN != 11226 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) { 11227 PrintF(out, "%24s %s ", "", Translation::StringFor(opcode)); 11228 11229 switch (opcode) { 11230 case Translation::BEGIN: 11231 UNREACHABLE(); 11232 break; 11233 11234 case Translation::JS_FRAME: { 11235 int ast_id = iterator.Next(); 11236 int function_id = iterator.Next(); 11237 unsigned height = iterator.Next(); 11238 PrintF(out, "{ast_id=%d, function=", ast_id); 11239 if (function_id != Translation::kSelfLiteralId) { 11240 Object* function = LiteralArray()->get(function_id); 11241 JSFunction::cast(function)->PrintName(out); 11242 } else { 11243 PrintF(out, "<self>"); 11244 } 11245 PrintF(out, ", height=%u}", height); 11246 break; 11247 } 11248 11249 case Translation::COMPILED_STUB_FRAME: { 11250 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next()); 11251 PrintF(out, "{kind=%d}", stub_kind); 11252 break; 11253 } 11254 11255 case Translation::ARGUMENTS_ADAPTOR_FRAME: 11256 case Translation::CONSTRUCT_STUB_FRAME: { 11257 int function_id = iterator.Next(); 11258 JSFunction* function = 11259 JSFunction::cast(LiteralArray()->get(function_id)); 11260 unsigned height = iterator.Next(); 11261 PrintF(out, "{function="); 11262 function->PrintName(out); 11263 PrintF(out, ", height=%u}", height); 11264 break; 11265 } 11266 11267 case Translation::GETTER_STUB_FRAME: 11268 case Translation::SETTER_STUB_FRAME: { 11269 int function_id = iterator.Next(); 11270 JSFunction* function = 11271 JSFunction::cast(LiteralArray()->get(function_id)); 11272 PrintF(out, "{function="); 11273 function->PrintName(out); 11274 PrintF(out, "}"); 11275 break; 11276 } 11277 11278 case Translation::REGISTER: { 11279 int reg_code = iterator.Next(); 11280 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code)); 11281 break; 11282 } 11283 11284 case Translation::INT32_REGISTER: { 11285 int reg_code = iterator.Next(); 11286 PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code)); 11287 break; 11288 } 11289 11290 case Translation::UINT32_REGISTER: { 11291 int reg_code = iterator.Next(); 11292 PrintF(out, "{input=%s (unsigned)}", 11293 converter.NameOfCPURegister(reg_code)); 11294 break; 11295 } 11296 11297 case Translation::DOUBLE_REGISTER: { 11298 int reg_code = iterator.Next(); 11299 PrintF(out, "{input=%s}", 11300 DoubleRegister::AllocationIndexToString(reg_code)); 11301 break; 11302 } 11303 11304 case Translation::STACK_SLOT: { 11305 int input_slot_index = iterator.Next(); 11306 PrintF(out, "{input=%d}", input_slot_index); 11307 break; 11308 } 11309 11310 case Translation::INT32_STACK_SLOT: { 11311 int input_slot_index = iterator.Next(); 11312 PrintF(out, "{input=%d}", input_slot_index); 11313 break; 11314 } 11315 11316 case Translation::UINT32_STACK_SLOT: { 11317 int input_slot_index = iterator.Next(); 11318 PrintF(out, "{input=%d (unsigned)}", input_slot_index); 11319 break; 11320 } 11321 11322 case Translation::DOUBLE_STACK_SLOT: { 11323 int input_slot_index = iterator.Next(); 11324 PrintF(out, "{input=%d}", input_slot_index); 11325 break; 11326 } 11327 11328 case Translation::LITERAL: { 11329 unsigned literal_index = iterator.Next(); 11330 PrintF(out, "{literal_id=%u}", literal_index); 11331 break; 11332 } 11333 11334 case Translation::DUPLICATED_OBJECT: { 11335 int object_index = iterator.Next(); 11336 PrintF(out, "{object_index=%d}", object_index); 11337 break; 11338 } 11339 11340 case Translation::ARGUMENTS_OBJECT: 11341 case Translation::CAPTURED_OBJECT: { 11342 int args_length = iterator.Next(); 11343 PrintF(out, "{length=%d}", args_length); 11344 break; 11345 } 11346 } 11347 PrintF(out, "\n"); 11348 } 11349 } 11350 } 11351 11352 11353 void DeoptimizationOutputData::DeoptimizationOutputDataPrint(FILE* out) { 11354 PrintF(out, "Deoptimization Output Data (deopt points = %d)\n", 11355 this->DeoptPoints()); 11356 if (this->DeoptPoints() == 0) return; 11357 11358 PrintF(out, "%6s %8s %s\n", "ast id", "pc", "state"); 11359 for (int i = 0; i < this->DeoptPoints(); i++) { 11360 int pc_and_state = this->PcAndState(i)->value(); 11361 PrintF(out, "%6d %8d %s\n", 11362 this->AstId(i).ToInt(), 11363 FullCodeGenerator::PcField::decode(pc_and_state), 11364 FullCodeGenerator::State2String( 11365 FullCodeGenerator::StateField::decode(pc_and_state))); 11366 } 11367 } 11368 11369 11370 const char* Code::ICState2String(InlineCacheState state) { 11371 switch (state) { 11372 case UNINITIALIZED: return "UNINITIALIZED"; 11373 case PREMONOMORPHIC: return "PREMONOMORPHIC"; 11374 case MONOMORPHIC: return "MONOMORPHIC"; 11375 case MONOMORPHIC_PROTOTYPE_FAILURE: return "MONOMORPHIC_PROTOTYPE_FAILURE"; 11376 case POLYMORPHIC: return "POLYMORPHIC"; 11377 case MEGAMORPHIC: return "MEGAMORPHIC"; 11378 case GENERIC: return "GENERIC"; 11379 case DEBUG_STUB: return "DEBUG_STUB"; 11380 } 11381 UNREACHABLE(); 11382 return NULL; 11383 } 11384 11385 11386 const char* Code::StubType2String(StubType type) { 11387 switch (type) { 11388 case NORMAL: return "NORMAL"; 11389 case FAST: return "FAST"; 11390 } 11391 UNREACHABLE(); // keep the compiler happy 11392 return NULL; 11393 } 11394 11395 11396 void Code::PrintExtraICState(FILE* out, Kind kind, ExtraICState extra) { 11397 PrintF(out, "extra_ic_state = "); 11398 const char* name = NULL; 11399 switch (kind) { 11400 case STORE_IC: 11401 case KEYED_STORE_IC: 11402 if (extra == STRICT) name = "STRICT"; 11403 break; 11404 default: 11405 break; 11406 } 11407 if (name != NULL) { 11408 PrintF(out, "%s\n", name); 11409 } else { 11410 PrintF(out, "%d\n", extra); 11411 } 11412 } 11413 11414 11415 void Code::Disassemble(const char* name, FILE* out) { 11416 PrintF(out, "kind = %s\n", Kind2String(kind())); 11417 if (has_major_key()) { 11418 PrintF(out, "major_key = %s\n", 11419 CodeStub::MajorName(CodeStub::GetMajorKey(this), true)); 11420 } 11421 if (is_inline_cache_stub()) { 11422 PrintF(out, "ic_state = %s\n", ICState2String(ic_state())); 11423 PrintExtraICState(out, kind(), extra_ic_state()); 11424 if (ic_state() == MONOMORPHIC) { 11425 PrintF(out, "type = %s\n", StubType2String(type())); 11426 } 11427 if (is_compare_ic_stub()) { 11428 ASSERT(major_key() == CodeStub::CompareIC); 11429 CompareIC::State left_state, right_state, handler_state; 11430 Token::Value op; 11431 ICCompareStub::DecodeMinorKey(stub_info(), &left_state, &right_state, 11432 &handler_state, &op); 11433 PrintF(out, "compare_state = %s*%s -> %s\n", 11434 CompareIC::GetStateName(left_state), 11435 CompareIC::GetStateName(right_state), 11436 CompareIC::GetStateName(handler_state)); 11437 PrintF(out, "compare_operation = %s\n", Token::Name(op)); 11438 } 11439 } 11440 if ((name != NULL) && (name[0] != '\0')) { 11441 PrintF(out, "name = %s\n", name); 11442 } 11443 if (kind() == OPTIMIZED_FUNCTION) { 11444 PrintF(out, "stack_slots = %d\n", stack_slots()); 11445 } 11446 11447 PrintF(out, "Instructions (size = %d)\n", instruction_size()); 11448 Disassembler::Decode(out, this); 11449 PrintF(out, "\n"); 11450 11451 if (kind() == FUNCTION) { 11452 DeoptimizationOutputData* data = 11453 DeoptimizationOutputData::cast(this->deoptimization_data()); 11454 data->DeoptimizationOutputDataPrint(out); 11455 } else if (kind() == OPTIMIZED_FUNCTION) { 11456 DeoptimizationInputData* data = 11457 DeoptimizationInputData::cast(this->deoptimization_data()); 11458 data->DeoptimizationInputDataPrint(out); 11459 } 11460 PrintF(out, "\n"); 11461 11462 if (is_crankshafted()) { 11463 SafepointTable table(this); 11464 PrintF(out, "Safepoints (size = %u)\n", table.size()); 11465 for (unsigned i = 0; i < table.length(); i++) { 11466 unsigned pc_offset = table.GetPcOffset(i); 11467 PrintF(out, "%p %4d ", (instruction_start() + pc_offset), pc_offset); 11468 table.PrintEntry(i, out); 11469 PrintF(out, " (sp -> fp)"); 11470 SafepointEntry entry = table.GetEntry(i); 11471 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) { 11472 PrintF(out, " %6d", entry.deoptimization_index()); 11473 } else { 11474 PrintF(out, " <none>"); 11475 } 11476 if (entry.argument_count() > 0) { 11477 PrintF(out, " argc: %d", entry.argument_count()); 11478 } 11479 PrintF(out, "\n"); 11480 } 11481 PrintF(out, "\n"); 11482 } else if (kind() == FUNCTION) { 11483 unsigned offset = back_edge_table_offset(); 11484 // If there is no back edge table, the "table start" will be at or after 11485 // (due to alignment) the end of the instruction stream. 11486 if (static_cast<int>(offset) < instruction_size()) { 11487 DisallowHeapAllocation no_gc; 11488 BackEdgeTable back_edges(this, &no_gc); 11489 11490 PrintF(out, "Back edges (size = %u)\n", back_edges.length()); 11491 PrintF(out, "ast_id pc_offset loop_depth\n"); 11492 11493 for (uint32_t i = 0; i < back_edges.length(); i++) { 11494 PrintF(out, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(), 11495 back_edges.pc_offset(i), 11496 back_edges.loop_depth(i)); 11497 } 11498 11499 PrintF(out, "\n"); 11500 } 11501 #ifdef OBJECT_PRINT 11502 if (!type_feedback_info()->IsUndefined()) { 11503 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(out); 11504 PrintF(out, "\n"); 11505 } 11506 #endif 11507 } 11508 11509 PrintF(out, "RelocInfo (size = %d)\n", relocation_size()); 11510 for (RelocIterator it(this); !it.done(); it.next()) { 11511 it.rinfo()->Print(GetIsolate(), out); 11512 } 11513 PrintF(out, "\n"); 11514 } 11515 #endif // ENABLE_DISASSEMBLER 11516 11517 11518 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength( 11519 Handle<JSObject> object, 11520 int capacity, 11521 int length, 11522 SetFastElementsCapacitySmiMode smi_mode) { 11523 // We should never end in here with a pixel or external array. 11524 ASSERT(!object->HasExternalArrayElements()); 11525 11526 // Allocate a new fast elements backing store. 11527 Handle<FixedArray> new_elements = 11528 object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity); 11529 11530 ElementsKind elements_kind = object->GetElementsKind(); 11531 ElementsKind new_elements_kind; 11532 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it, 11533 // or if it's allowed and the old elements array contained only SMIs. 11534 bool has_fast_smi_elements = 11535 (smi_mode == kForceSmiElements) || 11536 ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements()); 11537 if (has_fast_smi_elements) { 11538 if (IsHoleyElementsKind(elements_kind)) { 11539 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS; 11540 } else { 11541 new_elements_kind = FAST_SMI_ELEMENTS; 11542 } 11543 } else { 11544 if (IsHoleyElementsKind(elements_kind)) { 11545 new_elements_kind = FAST_HOLEY_ELEMENTS; 11546 } else { 11547 new_elements_kind = FAST_ELEMENTS; 11548 } 11549 } 11550 Handle<FixedArrayBase> old_elements(object->elements()); 11551 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind); 11552 accessor->CopyElements(object, new_elements, elements_kind); 11553 11554 if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) { 11555 Handle<Map> new_map = (new_elements_kind != elements_kind) 11556 ? GetElementsTransitionMap(object, new_elements_kind) 11557 : handle(object->map()); 11558 JSObject::ValidateElements(object); 11559 JSObject::SetMapAndElements(object, new_map, new_elements); 11560 11561 // Transition through the allocation site as well if present. 11562 JSObject::UpdateAllocationSite(object, new_elements_kind); 11563 } else { 11564 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements); 11565 parameter_map->set(1, *new_elements); 11566 } 11567 11568 if (FLAG_trace_elements_transitions) { 11569 PrintElementsTransition(stdout, object, elements_kind, old_elements, 11570 object->GetElementsKind(), new_elements); 11571 } 11572 11573 if (object->IsJSArray()) { 11574 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length)); 11575 } 11576 return new_elements; 11577 } 11578 11579 11580 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object, 11581 int capacity, 11582 int length) { 11583 // We should never end in here with a pixel or external array. 11584 ASSERT(!object->HasExternalArrayElements()); 11585 11586 Handle<FixedArrayBase> elems = 11587 object->GetIsolate()->factory()->NewFixedDoubleArray(capacity); 11588 11589 ElementsKind elements_kind = object->GetElementsKind(); 11590 CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS); 11591 ElementsKind new_elements_kind = elements_kind; 11592 if (IsHoleyElementsKind(elements_kind)) { 11593 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS; 11594 } else { 11595 new_elements_kind = FAST_DOUBLE_ELEMENTS; 11596 } 11597 11598 Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind); 11599 11600 Handle<FixedArrayBase> old_elements(object->elements()); 11601 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS); 11602 accessor->CopyElements(object, elems, elements_kind); 11603 11604 JSObject::ValidateElements(object); 11605 JSObject::SetMapAndElements(object, new_map, elems); 11606 11607 if (FLAG_trace_elements_transitions) { 11608 PrintElementsTransition(stdout, object, elements_kind, old_elements, 11609 object->GetElementsKind(), elems); 11610 } 11611 11612 if (object->IsJSArray()) { 11613 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length)); 11614 } 11615 } 11616 11617 11618 // static 11619 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) { 11620 ASSERT(capacity >= 0); 11621 array->GetIsolate()->factory()->NewJSArrayStorage( 11622 array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); 11623 } 11624 11625 11626 void JSArray::Expand(Handle<JSArray> array, int required_size) { 11627 ElementsAccessor* accessor = array->GetElementsAccessor(); 11628 accessor->SetCapacityAndLength(array, required_size, required_size); 11629 } 11630 11631 11632 // Returns false if the passed-in index is marked non-configurable, 11633 // which will cause the ES5 truncation operation to halt, and thus 11634 // no further old values need be collected. 11635 static bool GetOldValue(Isolate* isolate, 11636 Handle<JSObject> object, 11637 uint32_t index, 11638 List<Handle<Object> >* old_values, 11639 List<uint32_t>* indices) { 11640 PropertyAttributes attributes = 11641 JSReceiver::GetOwnElementAttribute(object, index); 11642 ASSERT(attributes != ABSENT); 11643 if (attributes == DONT_DELETE) return false; 11644 Handle<Object> value; 11645 if (!JSObject::GetOwnElementAccessorPair(object, index).is_null()) { 11646 value = Handle<Object>::cast(isolate->factory()->the_hole_value()); 11647 } else { 11648 value = Object::GetElement(isolate, object, index).ToHandleChecked(); 11649 } 11650 old_values->Add(value); 11651 indices->Add(index); 11652 return true; 11653 } 11654 11655 static void EnqueueSpliceRecord(Handle<JSArray> object, 11656 uint32_t index, 11657 Handle<JSArray> deleted, 11658 uint32_t add_count) { 11659 Isolate* isolate = object->GetIsolate(); 11660 HandleScope scope(isolate); 11661 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index); 11662 Handle<Object> add_count_object = 11663 isolate->factory()->NewNumberFromUint(add_count); 11664 11665 Handle<Object> args[] = 11666 { object, index_object, deleted, add_count_object }; 11667 11668 Execution::Call(isolate, 11669 Handle<JSFunction>(isolate->observers_enqueue_splice()), 11670 isolate->factory()->undefined_value(), 11671 ARRAY_SIZE(args), 11672 args).Assert(); 11673 } 11674 11675 11676 static void BeginPerformSplice(Handle<JSArray> object) { 11677 Isolate* isolate = object->GetIsolate(); 11678 HandleScope scope(isolate); 11679 Handle<Object> args[] = { object }; 11680 11681 Execution::Call(isolate, 11682 Handle<JSFunction>(isolate->observers_begin_perform_splice()), 11683 isolate->factory()->undefined_value(), 11684 ARRAY_SIZE(args), 11685 args).Assert(); 11686 } 11687 11688 11689 static void EndPerformSplice(Handle<JSArray> object) { 11690 Isolate* isolate = object->GetIsolate(); 11691 HandleScope scope(isolate); 11692 Handle<Object> args[] = { object }; 11693 11694 Execution::Call(isolate, 11695 Handle<JSFunction>(isolate->observers_end_perform_splice()), 11696 isolate->factory()->undefined_value(), 11697 ARRAY_SIZE(args), 11698 args).Assert(); 11699 } 11700 11701 11702 MaybeHandle<Object> JSArray::SetElementsLength( 11703 Handle<JSArray> array, 11704 Handle<Object> new_length_handle) { 11705 // We should never end in here with a pixel or external array. 11706 ASSERT(array->AllowsSetElementsLength()); 11707 if (!array->map()->is_observed()) { 11708 return array->GetElementsAccessor()->SetLength(array, new_length_handle); 11709 } 11710 11711 Isolate* isolate = array->GetIsolate(); 11712 List<uint32_t> indices; 11713 List<Handle<Object> > old_values; 11714 Handle<Object> old_length_handle(array->length(), isolate); 11715 uint32_t old_length = 0; 11716 CHECK(old_length_handle->ToArrayIndex(&old_length)); 11717 uint32_t new_length = 0; 11718 CHECK(new_length_handle->ToArrayIndex(&new_length)); 11719 11720 static const PropertyAttributes kNoAttrFilter = NONE; 11721 int num_elements = array->NumberOfOwnElements(kNoAttrFilter); 11722 if (num_elements > 0) { 11723 if (old_length == static_cast<uint32_t>(num_elements)) { 11724 // Simple case for arrays without holes. 11725 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) { 11726 if (!GetOldValue(isolate, array, i, &old_values, &indices)) break; 11727 } 11728 } else { 11729 // For sparse arrays, only iterate over existing elements. 11730 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over 11731 // the to-be-removed indices twice. 11732 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements); 11733 array->GetOwnElementKeys(*keys, kNoAttrFilter); 11734 while (num_elements-- > 0) { 11735 uint32_t index = NumberToUint32(keys->get(num_elements)); 11736 if (index < new_length) break; 11737 if (!GetOldValue(isolate, array, index, &old_values, &indices)) break; 11738 } 11739 } 11740 } 11741 11742 Handle<Object> hresult; 11743 ASSIGN_RETURN_ON_EXCEPTION( 11744 isolate, hresult, 11745 array->GetElementsAccessor()->SetLength(array, new_length_handle), 11746 Object); 11747 11748 CHECK(array->length()->ToArrayIndex(&new_length)); 11749 if (old_length == new_length) return hresult; 11750 11751 BeginPerformSplice(array); 11752 11753 for (int i = 0; i < indices.length(); ++i) { 11754 // For deletions where the property was an accessor, old_values[i] 11755 // will be the hole, which instructs EnqueueChangeRecord to elide 11756 // the "oldValue" property. 11757 JSObject::EnqueueChangeRecord( 11758 array, "delete", isolate->factory()->Uint32ToString(indices[i]), 11759 old_values[i]); 11760 } 11761 JSObject::EnqueueChangeRecord( 11762 array, "update", isolate->factory()->length_string(), 11763 old_length_handle); 11764 11765 EndPerformSplice(array); 11766 11767 uint32_t index = Min(old_length, new_length); 11768 uint32_t add_count = new_length > old_length ? new_length - old_length : 0; 11769 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0; 11770 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0); 11771 if (delete_count > 0) { 11772 for (int i = indices.length() - 1; i >= 0; i--) { 11773 // Skip deletions where the property was an accessor, leaving holes 11774 // in the array of old values. 11775 if (old_values[i]->IsTheHole()) continue; 11776 JSObject::SetElement( 11777 deleted, indices[i] - index, old_values[i], NONE, SLOPPY).Assert(); 11778 } 11779 11780 SetProperty(deleted, isolate->factory()->length_string(), 11781 isolate->factory()->NewNumberFromUint(delete_count), 11782 NONE, SLOPPY).Assert(); 11783 } 11784 11785 EnqueueSpliceRecord(array, index, deleted, add_count); 11786 11787 return hresult; 11788 } 11789 11790 11791 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map, 11792 Handle<Object> prototype) { 11793 FixedArray* cache = map->GetPrototypeTransitions(); 11794 int number_of_transitions = map->NumberOfProtoTransitions(); 11795 const int proto_offset = 11796 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset; 11797 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset; 11798 const int step = kProtoTransitionElementsPerEntry; 11799 for (int i = 0; i < number_of_transitions; i++) { 11800 if (cache->get(proto_offset + i * step) == *prototype) { 11801 Object* result = cache->get(map_offset + i * step); 11802 return Handle<Map>(Map::cast(result)); 11803 } 11804 } 11805 return Handle<Map>(); 11806 } 11807 11808 11809 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map, 11810 Handle<Object> prototype, 11811 Handle<Map> target_map) { 11812 ASSERT(target_map->IsMap()); 11813 ASSERT(HeapObject::cast(*prototype)->map()->IsMap()); 11814 // Don't cache prototype transition if this map is shared. 11815 if (map->is_shared() || !FLAG_cache_prototype_transitions) return map; 11816 11817 const int step = kProtoTransitionElementsPerEntry; 11818 const int header = kProtoTransitionHeaderSize; 11819 11820 Handle<FixedArray> cache(map->GetPrototypeTransitions()); 11821 int capacity = (cache->length() - header) / step; 11822 int transitions = map->NumberOfProtoTransitions() + 1; 11823 11824 if (transitions > capacity) { 11825 if (capacity > kMaxCachedPrototypeTransitions) return map; 11826 11827 // Grow array by factor 2 over and above what we need. 11828 cache = FixedArray::CopySize(cache, transitions * 2 * step + header); 11829 11830 SetPrototypeTransitions(map, cache); 11831 } 11832 11833 // Reload number of transitions as GC might shrink them. 11834 int last = map->NumberOfProtoTransitions(); 11835 int entry = header + last * step; 11836 11837 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype); 11838 cache->set(entry + kProtoTransitionMapOffset, *target_map); 11839 map->SetNumberOfProtoTransitions(last + 1); 11840 11841 return map; 11842 } 11843 11844 11845 void Map::ZapTransitions() { 11846 TransitionArray* transition_array = transitions(); 11847 // TODO(mstarzinger): Temporarily use a slower version instead of the faster 11848 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer. 11849 Object** data = transition_array->data_start(); 11850 Object* the_hole = GetHeap()->the_hole_value(); 11851 int length = transition_array->length(); 11852 for (int i = 0; i < length; i++) { 11853 data[i] = the_hole; 11854 } 11855 } 11856 11857 11858 void Map::ZapPrototypeTransitions() { 11859 FixedArray* proto_transitions = GetPrototypeTransitions(); 11860 MemsetPointer(proto_transitions->data_start(), 11861 GetHeap()->the_hole_value(), 11862 proto_transitions->length()); 11863 } 11864 11865 11866 // static 11867 void Map::AddDependentCompilationInfo(Handle<Map> map, 11868 DependentCode::DependencyGroup group, 11869 CompilationInfo* info) { 11870 Handle<DependentCode> codes = 11871 DependentCode::Insert(handle(map->dependent_code(), info->isolate()), 11872 group, info->object_wrapper()); 11873 if (*codes != map->dependent_code()) map->set_dependent_code(*codes); 11874 info->dependencies(group)->Add(map, info->zone()); 11875 } 11876 11877 11878 // static 11879 void Map::AddDependentCode(Handle<Map> map, 11880 DependentCode::DependencyGroup group, 11881 Handle<Code> code) { 11882 Handle<DependentCode> codes = DependentCode::Insert( 11883 Handle<DependentCode>(map->dependent_code()), group, code); 11884 if (*codes != map->dependent_code()) map->set_dependent_code(*codes); 11885 } 11886 11887 11888 // static 11889 void Map::AddDependentIC(Handle<Map> map, 11890 Handle<Code> stub) { 11891 ASSERT(stub->next_code_link()->IsUndefined()); 11892 int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup); 11893 if (n == 0) { 11894 // Slow path: insert the head of the list with possible heap allocation. 11895 Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub); 11896 } else { 11897 // Fast path: link the stub to the existing head of the list without any 11898 // heap allocation. 11899 ASSERT(n == 1); 11900 map->dependent_code()->AddToDependentICList(stub); 11901 } 11902 } 11903 11904 11905 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) { 11906 Recompute(entries); 11907 } 11908 11909 11910 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) { 11911 start_indexes_[0] = 0; 11912 for (int g = 1; g <= kGroupCount; g++) { 11913 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1)); 11914 start_indexes_[g] = start_indexes_[g - 1] + count; 11915 } 11916 } 11917 11918 11919 DependentCode* DependentCode::ForObject(Handle<HeapObject> object, 11920 DependencyGroup group) { 11921 AllowDeferredHandleDereference dependencies_are_safe; 11922 if (group == DependentCode::kPropertyCellChangedGroup) { 11923 return Handle<PropertyCell>::cast(object)->dependent_code(); 11924 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup || 11925 group == DependentCode::kAllocationSiteTransitionChangedGroup) { 11926 return Handle<AllocationSite>::cast(object)->dependent_code(); 11927 } 11928 return Handle<Map>::cast(object)->dependent_code(); 11929 } 11930 11931 11932 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries, 11933 DependencyGroup group, 11934 Handle<Object> object) { 11935 GroupStartIndexes starts(*entries); 11936 int start = starts.at(group); 11937 int end = starts.at(group + 1); 11938 int number_of_entries = starts.number_of_entries(); 11939 // Check for existing entry to avoid duplicates. 11940 for (int i = start; i < end; i++) { 11941 if (entries->object_at(i) == *object) return entries; 11942 } 11943 if (entries->length() < kCodesStartIndex + number_of_entries + 1) { 11944 int capacity = kCodesStartIndex + number_of_entries + 1; 11945 if (capacity > 5) capacity = capacity * 5 / 4; 11946 Handle<DependentCode> new_entries = Handle<DependentCode>::cast( 11947 FixedArray::CopySize(entries, capacity, TENURED)); 11948 // The number of codes can change after GC. 11949 starts.Recompute(*entries); 11950 start = starts.at(group); 11951 end = starts.at(group + 1); 11952 number_of_entries = starts.number_of_entries(); 11953 for (int i = 0; i < number_of_entries; i++) { 11954 entries->clear_at(i); 11955 } 11956 // If the old fixed array was empty, we need to reset counters of the 11957 // new array. 11958 if (number_of_entries == 0) { 11959 for (int g = 0; g < kGroupCount; g++) { 11960 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0); 11961 } 11962 } 11963 entries = new_entries; 11964 } 11965 entries->ExtendGroup(group); 11966 entries->set_object_at(end, *object); 11967 entries->set_number_of_entries(group, end + 1 - start); 11968 return entries; 11969 } 11970 11971 11972 void DependentCode::UpdateToFinishedCode(DependencyGroup group, 11973 CompilationInfo* info, 11974 Code* code) { 11975 DisallowHeapAllocation no_gc; 11976 AllowDeferredHandleDereference get_object_wrapper; 11977 Foreign* info_wrapper = *info->object_wrapper(); 11978 GroupStartIndexes starts(this); 11979 int start = starts.at(group); 11980 int end = starts.at(group + 1); 11981 for (int i = start; i < end; i++) { 11982 if (object_at(i) == info_wrapper) { 11983 set_object_at(i, code); 11984 break; 11985 } 11986 } 11987 11988 #ifdef DEBUG 11989 for (int i = start; i < end; i++) { 11990 ASSERT(is_code_at(i) || compilation_info_at(i) != info); 11991 } 11992 #endif 11993 } 11994 11995 11996 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group, 11997 CompilationInfo* info) { 11998 DisallowHeapAllocation no_allocation; 11999 AllowDeferredHandleDereference get_object_wrapper; 12000 Foreign* info_wrapper = *info->object_wrapper(); 12001 GroupStartIndexes starts(this); 12002 int start = starts.at(group); 12003 int end = starts.at(group + 1); 12004 // Find compilation info wrapper. 12005 int info_pos = -1; 12006 for (int i = start; i < end; i++) { 12007 if (object_at(i) == info_wrapper) { 12008 info_pos = i; 12009 break; 12010 } 12011 } 12012 if (info_pos == -1) return; // Not found. 12013 int gap = info_pos; 12014 // Use the last of each group to fill the gap in the previous group. 12015 for (int i = group; i < kGroupCount; i++) { 12016 int last_of_group = starts.at(i + 1) - 1; 12017 ASSERT(last_of_group >= gap); 12018 if (last_of_group == gap) continue; 12019 copy(last_of_group, gap); 12020 gap = last_of_group; 12021 } 12022 ASSERT(gap == starts.number_of_entries() - 1); 12023 clear_at(gap); // Clear last gap. 12024 set_number_of_entries(group, end - start - 1); 12025 12026 #ifdef DEBUG 12027 for (int i = start; i < end - 1; i++) { 12028 ASSERT(is_code_at(i) || compilation_info_at(i) != info); 12029 } 12030 #endif 12031 } 12032 12033 12034 static bool CodeListContains(Object* head, Code* code) { 12035 while (!head->IsUndefined()) { 12036 if (head == code) return true; 12037 head = Code::cast(head)->next_code_link(); 12038 } 12039 return false; 12040 } 12041 12042 12043 bool DependentCode::Contains(DependencyGroup group, Code* code) { 12044 GroupStartIndexes starts(this); 12045 int start = starts.at(group); 12046 int end = starts.at(group + 1); 12047 if (group == kWeakICGroup) { 12048 return CodeListContains(object_at(start), code); 12049 } 12050 for (int i = start; i < end; i++) { 12051 if (object_at(i) == code) return true; 12052 } 12053 return false; 12054 } 12055 12056 12057 bool DependentCode::MarkCodeForDeoptimization( 12058 Isolate* isolate, 12059 DependentCode::DependencyGroup group) { 12060 DisallowHeapAllocation no_allocation_scope; 12061 DependentCode::GroupStartIndexes starts(this); 12062 int start = starts.at(group); 12063 int end = starts.at(group + 1); 12064 int code_entries = starts.number_of_entries(); 12065 if (start == end) return false; 12066 12067 // Mark all the code that needs to be deoptimized. 12068 bool marked = false; 12069 for (int i = start; i < end; i++) { 12070 if (is_code_at(i)) { 12071 Code* code = code_at(i); 12072 if (!code->marked_for_deoptimization()) { 12073 code->set_marked_for_deoptimization(true); 12074 marked = true; 12075 } 12076 } else { 12077 CompilationInfo* info = compilation_info_at(i); 12078 info->AbortDueToDependencyChange(); 12079 } 12080 } 12081 // Compact the array by moving all subsequent groups to fill in the new holes. 12082 for (int src = end, dst = start; src < code_entries; src++, dst++) { 12083 copy(src, dst); 12084 } 12085 // Now the holes are at the end of the array, zap them for heap-verifier. 12086 int removed = end - start; 12087 for (int i = code_entries - removed; i < code_entries; i++) { 12088 clear_at(i); 12089 } 12090 set_number_of_entries(group, 0); 12091 return marked; 12092 } 12093 12094 12095 void DependentCode::DeoptimizeDependentCodeGroup( 12096 Isolate* isolate, 12097 DependentCode::DependencyGroup group) { 12098 ASSERT(AllowCodeDependencyChange::IsAllowed()); 12099 DisallowHeapAllocation no_allocation_scope; 12100 bool marked = MarkCodeForDeoptimization(isolate, group); 12101 12102 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate); 12103 } 12104 12105 12106 void DependentCode::AddToDependentICList(Handle<Code> stub) { 12107 DisallowHeapAllocation no_heap_allocation; 12108 GroupStartIndexes starts(this); 12109 int i = starts.at(kWeakICGroup); 12110 Object* head = object_at(i); 12111 // Try to insert the stub after the head of the list to minimize number of 12112 // writes to the DependentCode array, since a write to the array can make it 12113 // strong if it was alread marked by incremental marker. 12114 if (head->IsCode()) { 12115 stub->set_next_code_link(Code::cast(head)->next_code_link()); 12116 Code::cast(head)->set_next_code_link(*stub); 12117 } else { 12118 stub->set_next_code_link(head); 12119 set_object_at(i, *stub); 12120 } 12121 } 12122 12123 12124 Handle<Map> Map::TransitionToPrototype(Handle<Map> map, 12125 Handle<Object> prototype) { 12126 Handle<Map> new_map = GetPrototypeTransition(map, prototype); 12127 if (new_map.is_null()) { 12128 new_map = Copy(map); 12129 PutPrototypeTransition(map, prototype, new_map); 12130 new_map->set_prototype(*prototype); 12131 } 12132 return new_map; 12133 } 12134 12135 12136 MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object, 12137 Handle<Object> value, 12138 bool skip_hidden_prototypes) { 12139 #ifdef DEBUG 12140 int size = object->Size(); 12141 #endif 12142 12143 Isolate* isolate = object->GetIsolate(); 12144 Heap* heap = isolate->heap(); 12145 // Silently ignore the change if value is not a JSObject or null. 12146 // SpiderMonkey behaves this way. 12147 if (!value->IsJSReceiver() && !value->IsNull()) return value; 12148 12149 // From 8.6.2 Object Internal Methods 12150 // ... 12151 // In addition, if [[Extensible]] is false the value of the [[Class]] and 12152 // [[Prototype]] internal properties of the object may not be modified. 12153 // ... 12154 // Implementation specific extensions that modify [[Class]], [[Prototype]] 12155 // or [[Extensible]] must not violate the invariants defined in the preceding 12156 // paragraph. 12157 if (!object->map()->is_extensible()) { 12158 Handle<Object> args[] = { object }; 12159 Handle<Object> error = isolate->factory()->NewTypeError( 12160 "non_extensible_proto", HandleVector(args, ARRAY_SIZE(args))); 12161 return isolate->Throw<Object>(error); 12162 } 12163 12164 // Before we can set the prototype we need to be sure 12165 // prototype cycles are prevented. 12166 // It is sufficient to validate that the receiver is not in the new prototype 12167 // chain. 12168 for (Object* pt = *value; 12169 pt != heap->null_value(); 12170 pt = pt->GetPrototype(isolate)) { 12171 if (JSReceiver::cast(pt) == *object) { 12172 // Cycle detected. 12173 Handle<Object> error = isolate->factory()->NewError( 12174 "cyclic_proto", HandleVector<Object>(NULL, 0)); 12175 return isolate->Throw<Object>(error); 12176 } 12177 } 12178 12179 bool dictionary_elements_in_chain = 12180 object->map()->DictionaryElementsInPrototypeChainOnly(); 12181 Handle<JSObject> real_receiver = object; 12182 12183 if (skip_hidden_prototypes) { 12184 // Find the first object in the chain whose prototype object is not 12185 // hidden and set the new prototype on that object. 12186 Object* current_proto = real_receiver->GetPrototype(); 12187 while (current_proto->IsJSObject() && 12188 JSObject::cast(current_proto)->map()->is_hidden_prototype()) { 12189 real_receiver = handle(JSObject::cast(current_proto), isolate); 12190 current_proto = current_proto->GetPrototype(isolate); 12191 } 12192 } 12193 12194 // Set the new prototype of the object. 12195 Handle<Map> map(real_receiver->map()); 12196 12197 // Nothing to do if prototype is already set. 12198 if (map->prototype() == *value) return value; 12199 12200 if (value->IsJSObject()) { 12201 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value)); 12202 } 12203 12204 Handle<Map> new_map = Map::TransitionToPrototype(map, value); 12205 ASSERT(new_map->prototype() == *value); 12206 JSObject::MigrateToMap(real_receiver, new_map); 12207 12208 if (!dictionary_elements_in_chain && 12209 new_map->DictionaryElementsInPrototypeChainOnly()) { 12210 // If the prototype chain didn't previously have element callbacks, then 12211 // KeyedStoreICs need to be cleared to ensure any that involve this 12212 // map go generic. 12213 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC); 12214 } 12215 12216 heap->ClearInstanceofCache(); 12217 ASSERT(size == object->Size()); 12218 return value; 12219 } 12220 12221 12222 void JSObject::EnsureCanContainElements(Handle<JSObject> object, 12223 Arguments* args, 12224 uint32_t first_arg, 12225 uint32_t arg_count, 12226 EnsureElementsMode mode) { 12227 // Elements in |Arguments| are ordered backwards (because they're on the 12228 // stack), but the method that's called here iterates over them in forward 12229 // direction. 12230 return EnsureCanContainElements( 12231 object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode); 12232 } 12233 12234 12235 MaybeHandle<AccessorPair> JSObject::GetOwnPropertyAccessorPair( 12236 Handle<JSObject> object, 12237 Handle<Name> name) { 12238 uint32_t index = 0; 12239 if (name->AsArrayIndex(&index)) { 12240 return GetOwnElementAccessorPair(object, index); 12241 } 12242 12243 Isolate* isolate = object->GetIsolate(); 12244 LookupResult lookup(isolate); 12245 object->LookupOwnRealNamedProperty(name, &lookup); 12246 12247 if (lookup.IsPropertyCallbacks() && 12248 lookup.GetCallbackObject()->IsAccessorPair()) { 12249 return handle(AccessorPair::cast(lookup.GetCallbackObject()), isolate); 12250 } 12251 return MaybeHandle<AccessorPair>(); 12252 } 12253 12254 12255 MaybeHandle<AccessorPair> JSObject::GetOwnElementAccessorPair( 12256 Handle<JSObject> object, 12257 uint32_t index) { 12258 if (object->IsJSGlobalProxy()) { 12259 Handle<Object> proto(object->GetPrototype(), object->GetIsolate()); 12260 if (proto->IsNull()) return MaybeHandle<AccessorPair>(); 12261 ASSERT(proto->IsJSGlobalObject()); 12262 return GetOwnElementAccessorPair(Handle<JSObject>::cast(proto), index); 12263 } 12264 12265 // Check for lookup interceptor. 12266 if (object->HasIndexedInterceptor()) return MaybeHandle<AccessorPair>(); 12267 12268 return object->GetElementsAccessor()->GetAccessorPair(object, object, index); 12269 } 12270 12271 12272 MaybeHandle<Object> JSObject::SetElementWithInterceptor( 12273 Handle<JSObject> object, 12274 uint32_t index, 12275 Handle<Object> value, 12276 PropertyAttributes attributes, 12277 StrictMode strict_mode, 12278 bool check_prototype, 12279 SetPropertyMode set_mode) { 12280 Isolate* isolate = object->GetIsolate(); 12281 12282 // Make sure that the top context does not change when doing 12283 // callbacks or interceptor calls. 12284 AssertNoContextChange ncc(isolate); 12285 12286 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 12287 if (!interceptor->setter()->IsUndefined()) { 12288 v8::IndexedPropertySetterCallback setter = 12289 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter()); 12290 LOG(isolate, 12291 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index)); 12292 PropertyCallbackArguments args(isolate, interceptor->data(), *object, 12293 *object); 12294 v8::Handle<v8::Value> result = 12295 args.Call(setter, index, v8::Utils::ToLocal(value)); 12296 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 12297 if (!result.IsEmpty()) return value; 12298 } 12299 12300 return SetElementWithoutInterceptor(object, index, value, attributes, 12301 strict_mode, 12302 check_prototype, 12303 set_mode); 12304 } 12305 12306 12307 MaybeHandle<Object> JSObject::GetElementWithCallback( 12308 Handle<JSObject> object, 12309 Handle<Object> receiver, 12310 Handle<Object> structure, 12311 uint32_t index, 12312 Handle<Object> holder) { 12313 Isolate* isolate = object->GetIsolate(); 12314 ASSERT(!structure->IsForeign()); 12315 // api style callbacks. 12316 if (structure->IsExecutableAccessorInfo()) { 12317 Handle<ExecutableAccessorInfo> data = 12318 Handle<ExecutableAccessorInfo>::cast(structure); 12319 Object* fun_obj = data->getter(); 12320 v8::AccessorGetterCallback call_fun = 12321 v8::ToCData<v8::AccessorGetterCallback>(fun_obj); 12322 if (call_fun == NULL) return isolate->factory()->undefined_value(); 12323 Handle<JSObject> holder_handle = Handle<JSObject>::cast(holder); 12324 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12325 Handle<String> key = isolate->factory()->NumberToString(number); 12326 LOG(isolate, ApiNamedPropertyAccess("load", *holder_handle, *key)); 12327 PropertyCallbackArguments 12328 args(isolate, data->data(), *receiver, *holder_handle); 12329 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key)); 12330 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 12331 if (result.IsEmpty()) return isolate->factory()->undefined_value(); 12332 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 12333 result_internal->VerifyApiCallResultType(); 12334 // Rebox handle before return. 12335 return handle(*result_internal, isolate); 12336 } 12337 12338 // __defineGetter__ callback 12339 if (structure->IsAccessorPair()) { 12340 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(), 12341 isolate); 12342 if (getter->IsSpecFunction()) { 12343 // TODO(rossberg): nicer would be to cast to some JSCallable here... 12344 return GetPropertyWithDefinedGetter( 12345 receiver, Handle<JSReceiver>::cast(getter)); 12346 } 12347 // Getter is not a function. 12348 return isolate->factory()->undefined_value(); 12349 } 12350 12351 if (structure->IsDeclaredAccessorInfo()) { 12352 return GetDeclaredAccessorProperty( 12353 receiver, Handle<DeclaredAccessorInfo>::cast(structure), isolate); 12354 } 12355 12356 UNREACHABLE(); 12357 return MaybeHandle<Object>(); 12358 } 12359 12360 12361 MaybeHandle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, 12362 Handle<Object> structure, 12363 uint32_t index, 12364 Handle<Object> value, 12365 Handle<JSObject> holder, 12366 StrictMode strict_mode) { 12367 Isolate* isolate = object->GetIsolate(); 12368 12369 // We should never get here to initialize a const with the hole 12370 // value since a const declaration would conflict with the setter. 12371 ASSERT(!value->IsTheHole()); 12372 ASSERT(!structure->IsForeign()); 12373 if (structure->IsExecutableAccessorInfo()) { 12374 // api style callbacks 12375 Handle<ExecutableAccessorInfo> data = 12376 Handle<ExecutableAccessorInfo>::cast(structure); 12377 Object* call_obj = data->setter(); 12378 v8::AccessorSetterCallback call_fun = 12379 v8::ToCData<v8::AccessorSetterCallback>(call_obj); 12380 if (call_fun == NULL) return value; 12381 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12382 Handle<String> key(isolate->factory()->NumberToString(number)); 12383 LOG(isolate, ApiNamedPropertyAccess("store", *object, *key)); 12384 PropertyCallbackArguments 12385 args(isolate, data->data(), *object, *holder); 12386 args.Call(call_fun, 12387 v8::Utils::ToLocal(key), 12388 v8::Utils::ToLocal(value)); 12389 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 12390 return value; 12391 } 12392 12393 if (structure->IsAccessorPair()) { 12394 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate); 12395 if (setter->IsSpecFunction()) { 12396 // TODO(rossberg): nicer would be to cast to some JSCallable here... 12397 return SetPropertyWithDefinedSetter( 12398 object, Handle<JSReceiver>::cast(setter), value); 12399 } else { 12400 if (strict_mode == SLOPPY) return value; 12401 Handle<Object> key(isolate->factory()->NewNumberFromUint(index)); 12402 Handle<Object> args[2] = { key, holder }; 12403 Handle<Object> error = isolate->factory()->NewTypeError( 12404 "no_setter_in_callback", HandleVector(args, 2)); 12405 return isolate->Throw<Object>(error); 12406 } 12407 } 12408 12409 // TODO(dcarney): Handle correctly. 12410 if (structure->IsDeclaredAccessorInfo()) return value; 12411 12412 UNREACHABLE(); 12413 return MaybeHandle<Object>(); 12414 } 12415 12416 12417 bool JSObject::HasFastArgumentsElements() { 12418 Heap* heap = GetHeap(); 12419 if (!elements()->IsFixedArray()) return false; 12420 FixedArray* elements = FixedArray::cast(this->elements()); 12421 if (elements->map() != heap->sloppy_arguments_elements_map()) { 12422 return false; 12423 } 12424 FixedArray* arguments = FixedArray::cast(elements->get(1)); 12425 return !arguments->IsDictionary(); 12426 } 12427 12428 12429 bool JSObject::HasDictionaryArgumentsElements() { 12430 Heap* heap = GetHeap(); 12431 if (!elements()->IsFixedArray()) return false; 12432 FixedArray* elements = FixedArray::cast(this->elements()); 12433 if (elements->map() != heap->sloppy_arguments_elements_map()) { 12434 return false; 12435 } 12436 FixedArray* arguments = FixedArray::cast(elements->get(1)); 12437 return arguments->IsDictionary(); 12438 } 12439 12440 12441 // Adding n elements in fast case is O(n*n). 12442 // Note: revisit design to have dual undefined values to capture absent 12443 // elements. 12444 MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object, 12445 uint32_t index, 12446 Handle<Object> value, 12447 StrictMode strict_mode, 12448 bool check_prototype) { 12449 ASSERT(object->HasFastSmiOrObjectElements() || 12450 object->HasFastArgumentsElements()); 12451 12452 Isolate* isolate = object->GetIsolate(); 12453 12454 // Array optimizations rely on the prototype lookups of Array objects always 12455 // returning undefined. If there is a store to the initial prototype object, 12456 // make sure all of these optimizations are invalidated. 12457 if (isolate->is_initial_object_prototype(*object) || 12458 isolate->is_initial_array_prototype(*object)) { 12459 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate, 12460 DependentCode::kElementsCantBeAddedGroup); 12461 } 12462 12463 Handle<FixedArray> backing_store(FixedArray::cast(object->elements())); 12464 if (backing_store->map() == 12465 isolate->heap()->sloppy_arguments_elements_map()) { 12466 backing_store = handle(FixedArray::cast(backing_store->get(1))); 12467 } else { 12468 backing_store = EnsureWritableFastElements(object); 12469 } 12470 uint32_t capacity = static_cast<uint32_t>(backing_store->length()); 12471 12472 if (check_prototype && 12473 (index >= capacity || backing_store->get(index)->IsTheHole())) { 12474 bool found; 12475 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( 12476 object, index, value, &found, strict_mode); 12477 if (found) return result; 12478 } 12479 12480 uint32_t new_capacity = capacity; 12481 // Check if the length property of this object needs to be updated. 12482 uint32_t array_length = 0; 12483 bool must_update_array_length = false; 12484 bool introduces_holes = true; 12485 if (object->IsJSArray()) { 12486 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length)); 12487 introduces_holes = index > array_length; 12488 if (index >= array_length) { 12489 must_update_array_length = true; 12490 array_length = index + 1; 12491 } 12492 } else { 12493 introduces_holes = index >= capacity; 12494 } 12495 12496 // If the array is growing, and it's not growth by a single element at the 12497 // end, make sure that the ElementsKind is HOLEY. 12498 ElementsKind elements_kind = object->GetElementsKind(); 12499 if (introduces_holes && 12500 IsFastElementsKind(elements_kind) && 12501 !IsFastHoleyElementsKind(elements_kind)) { 12502 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind); 12503 TransitionElementsKind(object, transitioned_kind); 12504 } 12505 12506 // Check if the capacity of the backing store needs to be increased, or if 12507 // a transition to slow elements is necessary. 12508 if (index >= capacity) { 12509 bool convert_to_slow = true; 12510 if ((index - capacity) < kMaxGap) { 12511 new_capacity = NewElementsCapacity(index + 1); 12512 ASSERT(new_capacity > index); 12513 if (!object->ShouldConvertToSlowElements(new_capacity)) { 12514 convert_to_slow = false; 12515 } 12516 } 12517 if (convert_to_slow) { 12518 NormalizeElements(object); 12519 return SetDictionaryElement(object, index, value, NONE, strict_mode, 12520 check_prototype); 12521 } 12522 } 12523 // Convert to fast double elements if appropriate. 12524 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) { 12525 // Consider fixing the boilerplate as well if we have one. 12526 ElementsKind to_kind = IsHoleyElementsKind(elements_kind) 12527 ? FAST_HOLEY_DOUBLE_ELEMENTS 12528 : FAST_DOUBLE_ELEMENTS; 12529 12530 UpdateAllocationSite(object, to_kind); 12531 12532 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length); 12533 FixedDoubleArray::cast(object->elements())->set(index, value->Number()); 12534 JSObject::ValidateElements(object); 12535 return value; 12536 } 12537 // Change elements kind from Smi-only to generic FAST if necessary. 12538 if (object->HasFastSmiElements() && !value->IsSmi()) { 12539 ElementsKind kind = object->HasFastHoleyElements() 12540 ? FAST_HOLEY_ELEMENTS 12541 : FAST_ELEMENTS; 12542 12543 UpdateAllocationSite(object, kind); 12544 Handle<Map> new_map = GetElementsTransitionMap(object, kind); 12545 JSObject::MigrateToMap(object, new_map); 12546 ASSERT(IsFastObjectElementsKind(object->GetElementsKind())); 12547 } 12548 // Increase backing store capacity if that's been decided previously. 12549 if (new_capacity != capacity) { 12550 SetFastElementsCapacitySmiMode smi_mode = 12551 value->IsSmi() && object->HasFastSmiElements() 12552 ? kAllowSmiElements 12553 : kDontAllowSmiElements; 12554 Handle<FixedArray> new_elements = 12555 SetFastElementsCapacityAndLength(object, new_capacity, array_length, 12556 smi_mode); 12557 new_elements->set(index, *value); 12558 JSObject::ValidateElements(object); 12559 return value; 12560 } 12561 12562 // Finally, set the new element and length. 12563 ASSERT(object->elements()->IsFixedArray()); 12564 backing_store->set(index, *value); 12565 if (must_update_array_length) { 12566 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length)); 12567 } 12568 return value; 12569 } 12570 12571 12572 MaybeHandle<Object> JSObject::SetDictionaryElement( 12573 Handle<JSObject> object, 12574 uint32_t index, 12575 Handle<Object> value, 12576 PropertyAttributes attributes, 12577 StrictMode strict_mode, 12578 bool check_prototype, 12579 SetPropertyMode set_mode) { 12580 ASSERT(object->HasDictionaryElements() || 12581 object->HasDictionaryArgumentsElements()); 12582 Isolate* isolate = object->GetIsolate(); 12583 12584 // Insert element in the dictionary. 12585 Handle<FixedArray> elements(FixedArray::cast(object->elements())); 12586 bool is_arguments = 12587 (elements->map() == isolate->heap()->sloppy_arguments_elements_map()); 12588 Handle<SeededNumberDictionary> dictionary(is_arguments 12589 ? SeededNumberDictionary::cast(elements->get(1)) 12590 : SeededNumberDictionary::cast(*elements)); 12591 12592 int entry = dictionary->FindEntry(index); 12593 if (entry != SeededNumberDictionary::kNotFound) { 12594 Handle<Object> element(dictionary->ValueAt(entry), isolate); 12595 PropertyDetails details = dictionary->DetailsAt(entry); 12596 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) { 12597 return SetElementWithCallback(object, element, index, value, object, 12598 strict_mode); 12599 } else { 12600 dictionary->UpdateMaxNumberKey(index); 12601 // If a value has not been initialized we allow writing to it even if it 12602 // is read-only (a declared const that has not been initialized). If a 12603 // value is being defined we skip attribute checks completely. 12604 if (set_mode == DEFINE_PROPERTY) { 12605 details = PropertyDetails( 12606 attributes, NORMAL, details.dictionary_index()); 12607 dictionary->DetailsAtPut(entry, details); 12608 } else if (details.IsReadOnly() && !element->IsTheHole()) { 12609 if (strict_mode == SLOPPY) { 12610 return isolate->factory()->undefined_value(); 12611 } else { 12612 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12613 Handle<Object> args[2] = { number, object }; 12614 Handle<Object> error = 12615 isolate->factory()->NewTypeError("strict_read_only_property", 12616 HandleVector(args, 2)); 12617 return isolate->Throw<Object>(error); 12618 } 12619 } 12620 // Elements of the arguments object in slow mode might be slow aliases. 12621 if (is_arguments && element->IsAliasedArgumentsEntry()) { 12622 Handle<AliasedArgumentsEntry> entry = 12623 Handle<AliasedArgumentsEntry>::cast(element); 12624 Handle<Context> context(Context::cast(elements->get(0))); 12625 int context_index = entry->aliased_context_slot(); 12626 ASSERT(!context->get(context_index)->IsTheHole()); 12627 context->set(context_index, *value); 12628 // For elements that are still writable we keep slow aliasing. 12629 if (!details.IsReadOnly()) value = element; 12630 } 12631 dictionary->ValueAtPut(entry, *value); 12632 } 12633 } else { 12634 // Index not already used. Look for an accessor in the prototype chain. 12635 // Can cause GC! 12636 if (check_prototype) { 12637 bool found; 12638 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( 12639 object, index, value, &found, strict_mode); 12640 if (found) return result; 12641 } 12642 12643 // When we set the is_extensible flag to false we always force the 12644 // element into dictionary mode (and force them to stay there). 12645 if (!object->map()->is_extensible()) { 12646 if (strict_mode == SLOPPY) { 12647 return isolate->factory()->undefined_value(); 12648 } else { 12649 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12650 Handle<String> name = isolate->factory()->NumberToString(number); 12651 Handle<Object> args[1] = { name }; 12652 Handle<Object> error = 12653 isolate->factory()->NewTypeError("object_not_extensible", 12654 HandleVector(args, 1)); 12655 return isolate->Throw<Object>(error); 12656 } 12657 } 12658 12659 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); 12660 Handle<SeededNumberDictionary> new_dictionary = 12661 SeededNumberDictionary::AddNumberEntry(dictionary, index, value, 12662 details); 12663 if (*dictionary != *new_dictionary) { 12664 if (is_arguments) { 12665 elements->set(1, *new_dictionary); 12666 } else { 12667 object->set_elements(*new_dictionary); 12668 } 12669 dictionary = new_dictionary; 12670 } 12671 } 12672 12673 // Update the array length if this JSObject is an array. 12674 if (object->IsJSArray()) { 12675 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index, 12676 value); 12677 } 12678 12679 // Attempt to put this object back in fast case. 12680 if (object->ShouldConvertToFastElements()) { 12681 uint32_t new_length = 0; 12682 if (object->IsJSArray()) { 12683 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length)); 12684 } else { 12685 new_length = dictionary->max_number_key() + 1; 12686 } 12687 SetFastElementsCapacitySmiMode smi_mode = FLAG_smi_only_arrays 12688 ? kAllowSmiElements 12689 : kDontAllowSmiElements; 12690 bool has_smi_only_elements = false; 12691 bool should_convert_to_fast_double_elements = 12692 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements); 12693 if (has_smi_only_elements) { 12694 smi_mode = kForceSmiElements; 12695 } 12696 12697 if (should_convert_to_fast_double_elements) { 12698 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length); 12699 } else { 12700 SetFastElementsCapacityAndLength(object, new_length, new_length, 12701 smi_mode); 12702 } 12703 JSObject::ValidateElements(object); 12704 #ifdef DEBUG 12705 if (FLAG_trace_normalization) { 12706 PrintF("Object elements are fast case again:\n"); 12707 object->Print(); 12708 } 12709 #endif 12710 } 12711 return value; 12712 } 12713 12714 MaybeHandle<Object> JSObject::SetFastDoubleElement( 12715 Handle<JSObject> object, 12716 uint32_t index, 12717 Handle<Object> value, 12718 StrictMode strict_mode, 12719 bool check_prototype) { 12720 ASSERT(object->HasFastDoubleElements()); 12721 12722 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements())); 12723 uint32_t elms_length = static_cast<uint32_t>(base_elms->length()); 12724 12725 // If storing to an element that isn't in the array, pass the store request 12726 // up the prototype chain before storing in the receiver's elements. 12727 if (check_prototype && 12728 (index >= elms_length || 12729 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) { 12730 bool found; 12731 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( 12732 object, index, value, &found, strict_mode); 12733 if (found) return result; 12734 } 12735 12736 // If the value object is not a heap number, switch to fast elements and try 12737 // again. 12738 bool value_is_smi = value->IsSmi(); 12739 bool introduces_holes = true; 12740 uint32_t length = elms_length; 12741 if (object->IsJSArray()) { 12742 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length)); 12743 introduces_holes = index > length; 12744 } else { 12745 introduces_holes = index >= elms_length; 12746 } 12747 12748 if (!value->IsNumber()) { 12749 SetFastElementsCapacityAndLength(object, elms_length, length, 12750 kDontAllowSmiElements); 12751 Handle<Object> result; 12752 ASSIGN_RETURN_ON_EXCEPTION( 12753 object->GetIsolate(), result, 12754 SetFastElement(object, index, value, strict_mode, check_prototype), 12755 Object); 12756 JSObject::ValidateElements(object); 12757 return result; 12758 } 12759 12760 double double_value = value_is_smi 12761 ? static_cast<double>(Handle<Smi>::cast(value)->value()) 12762 : Handle<HeapNumber>::cast(value)->value(); 12763 12764 // If the array is growing, and it's not growth by a single element at the 12765 // end, make sure that the ElementsKind is HOLEY. 12766 ElementsKind elements_kind = object->GetElementsKind(); 12767 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) { 12768 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind); 12769 TransitionElementsKind(object, transitioned_kind); 12770 } 12771 12772 // Check whether there is extra space in the fixed array. 12773 if (index < elms_length) { 12774 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements())); 12775 elms->set(index, double_value); 12776 if (object->IsJSArray()) { 12777 // Update the length of the array if needed. 12778 uint32_t array_length = 0; 12779 CHECK( 12780 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length)); 12781 if (index >= array_length) { 12782 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1)); 12783 } 12784 } 12785 return value; 12786 } 12787 12788 // Allow gap in fast case. 12789 if ((index - elms_length) < kMaxGap) { 12790 // Try allocating extra space. 12791 int new_capacity = NewElementsCapacity(index+1); 12792 if (!object->ShouldConvertToSlowElements(new_capacity)) { 12793 ASSERT(static_cast<uint32_t>(new_capacity) > index); 12794 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1); 12795 FixedDoubleArray::cast(object->elements())->set(index, double_value); 12796 JSObject::ValidateElements(object); 12797 return value; 12798 } 12799 } 12800 12801 // Otherwise default to slow case. 12802 ASSERT(object->HasFastDoubleElements()); 12803 ASSERT(object->map()->has_fast_double_elements()); 12804 ASSERT(object->elements()->IsFixedDoubleArray() || 12805 object->elements()->length() == 0); 12806 12807 NormalizeElements(object); 12808 ASSERT(object->HasDictionaryElements()); 12809 return SetElement(object, index, value, NONE, strict_mode, check_prototype); 12810 } 12811 12812 12813 MaybeHandle<Object> JSReceiver::SetElement(Handle<JSReceiver> object, 12814 uint32_t index, 12815 Handle<Object> value, 12816 PropertyAttributes attributes, 12817 StrictMode strict_mode) { 12818 if (object->IsJSProxy()) { 12819 return JSProxy::SetElementWithHandler( 12820 Handle<JSProxy>::cast(object), object, index, value, strict_mode); 12821 } 12822 return JSObject::SetElement( 12823 Handle<JSObject>::cast(object), index, value, attributes, strict_mode); 12824 } 12825 12826 12827 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object, 12828 uint32_t index, 12829 Handle<Object> value, 12830 StrictMode strict_mode) { 12831 ASSERT(!object->HasExternalArrayElements()); 12832 return JSObject::SetElement(object, index, value, NONE, strict_mode, false); 12833 } 12834 12835 12836 MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object, 12837 uint32_t index, 12838 Handle<Object> value, 12839 PropertyAttributes attributes, 12840 StrictMode strict_mode, 12841 bool check_prototype, 12842 SetPropertyMode set_mode) { 12843 Isolate* isolate = object->GetIsolate(); 12844 12845 if (object->HasExternalArrayElements() || 12846 object->HasFixedTypedArrayElements()) { 12847 if (!value->IsNumber() && !value->IsUndefined()) { 12848 ASSIGN_RETURN_ON_EXCEPTION( 12849 isolate, value, 12850 Execution::ToNumber(isolate, value), Object); 12851 } 12852 } 12853 12854 // Check access rights if needed. 12855 if (object->IsAccessCheckNeeded()) { 12856 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_SET)) { 12857 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); 12858 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 12859 return value; 12860 } 12861 } 12862 12863 if (object->IsJSGlobalProxy()) { 12864 Handle<Object> proto(object->GetPrototype(), isolate); 12865 if (proto->IsNull()) return value; 12866 ASSERT(proto->IsJSGlobalObject()); 12867 return SetElement(Handle<JSObject>::cast(proto), index, value, attributes, 12868 strict_mode, 12869 check_prototype, 12870 set_mode); 12871 } 12872 12873 // Don't allow element properties to be redefined for external arrays. 12874 if ((object->HasExternalArrayElements() || 12875 object->HasFixedTypedArrayElements()) && 12876 set_mode == DEFINE_PROPERTY) { 12877 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12878 Handle<Object> args[] = { object, number }; 12879 Handle<Object> error = isolate->factory()->NewTypeError( 12880 "redef_external_array_element", HandleVector(args, ARRAY_SIZE(args))); 12881 return isolate->Throw<Object>(error); 12882 } 12883 12884 // Normalize the elements to enable attributes on the property. 12885 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) { 12886 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 12887 // Make sure that we never go back to fast case. 12888 dictionary->set_requires_slow_elements(); 12889 } 12890 12891 if (!object->map()->is_observed()) { 12892 return object->HasIndexedInterceptor() 12893 ? SetElementWithInterceptor(object, index, value, attributes, 12894 strict_mode, check_prototype, set_mode) 12895 : SetElementWithoutInterceptor(object, index, value, attributes, 12896 strict_mode, check_prototype, set_mode); 12897 } 12898 12899 PropertyAttributes old_attributes = 12900 JSReceiver::GetOwnElementAttribute(object, index); 12901 Handle<Object> old_value = isolate->factory()->the_hole_value(); 12902 Handle<Object> old_length_handle; 12903 Handle<Object> new_length_handle; 12904 12905 if (old_attributes != ABSENT) { 12906 if (GetOwnElementAccessorPair(object, index).is_null()) { 12907 old_value = Object::GetElement(isolate, object, index).ToHandleChecked(); 12908 } 12909 } else if (object->IsJSArray()) { 12910 // Store old array length in case adding an element grows the array. 12911 old_length_handle = handle(Handle<JSArray>::cast(object)->length(), 12912 isolate); 12913 } 12914 12915 // Check for lookup interceptor 12916 Handle<Object> result; 12917 ASSIGN_RETURN_ON_EXCEPTION( 12918 isolate, result, 12919 object->HasIndexedInterceptor() 12920 ? SetElementWithInterceptor( 12921 object, index, value, attributes, 12922 strict_mode, check_prototype, set_mode) 12923 : SetElementWithoutInterceptor( 12924 object, index, value, attributes, 12925 strict_mode, check_prototype, set_mode), 12926 Object); 12927 12928 Handle<String> name = isolate->factory()->Uint32ToString(index); 12929 PropertyAttributes new_attributes = GetOwnElementAttribute(object, index); 12930 if (old_attributes == ABSENT) { 12931 if (object->IsJSArray() && 12932 !old_length_handle->SameValue( 12933 Handle<JSArray>::cast(object)->length())) { 12934 new_length_handle = handle(Handle<JSArray>::cast(object)->length(), 12935 isolate); 12936 uint32_t old_length = 0; 12937 uint32_t new_length = 0; 12938 CHECK(old_length_handle->ToArrayIndex(&old_length)); 12939 CHECK(new_length_handle->ToArrayIndex(&new_length)); 12940 12941 BeginPerformSplice(Handle<JSArray>::cast(object)); 12942 EnqueueChangeRecord(object, "add", name, old_value); 12943 EnqueueChangeRecord(object, "update", isolate->factory()->length_string(), 12944 old_length_handle); 12945 EndPerformSplice(Handle<JSArray>::cast(object)); 12946 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0); 12947 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted, 12948 new_length - old_length); 12949 } else { 12950 EnqueueChangeRecord(object, "add", name, old_value); 12951 } 12952 } else if (old_value->IsTheHole()) { 12953 EnqueueChangeRecord(object, "reconfigure", name, old_value); 12954 } else { 12955 Handle<Object> new_value = 12956 Object::GetElement(isolate, object, index).ToHandleChecked(); 12957 bool value_changed = !old_value->SameValue(*new_value); 12958 if (old_attributes != new_attributes) { 12959 if (!value_changed) old_value = isolate->factory()->the_hole_value(); 12960 EnqueueChangeRecord(object, "reconfigure", name, old_value); 12961 } else if (value_changed) { 12962 EnqueueChangeRecord(object, "update", name, old_value); 12963 } 12964 } 12965 12966 return result; 12967 } 12968 12969 12970 MaybeHandle<Object> JSObject::SetElementWithoutInterceptor( 12971 Handle<JSObject> object, 12972 uint32_t index, 12973 Handle<Object> value, 12974 PropertyAttributes attributes, 12975 StrictMode strict_mode, 12976 bool check_prototype, 12977 SetPropertyMode set_mode) { 12978 ASSERT(object->HasDictionaryElements() || 12979 object->HasDictionaryArgumentsElements() || 12980 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0); 12981 Isolate* isolate = object->GetIsolate(); 12982 if (FLAG_trace_external_array_abuse && 12983 IsExternalArrayElementsKind(object->GetElementsKind())) { 12984 CheckArrayAbuse(object, "external elements write", index); 12985 } 12986 if (FLAG_trace_js_array_abuse && 12987 !IsExternalArrayElementsKind(object->GetElementsKind())) { 12988 if (object->IsJSArray()) { 12989 CheckArrayAbuse(object, "elements write", index, true); 12990 } 12991 } 12992 if (object->IsJSArray() && JSArray::WouldChangeReadOnlyLength( 12993 Handle<JSArray>::cast(object), index)) { 12994 if (strict_mode == SLOPPY) { 12995 return value; 12996 } else { 12997 return JSArray::ReadOnlyLengthError(Handle<JSArray>::cast(object)); 12998 } 12999 } 13000 switch (object->GetElementsKind()) { 13001 case FAST_SMI_ELEMENTS: 13002 case FAST_ELEMENTS: 13003 case FAST_HOLEY_SMI_ELEMENTS: 13004 case FAST_HOLEY_ELEMENTS: 13005 return SetFastElement(object, index, value, strict_mode, check_prototype); 13006 case FAST_DOUBLE_ELEMENTS: 13007 case FAST_HOLEY_DOUBLE_ELEMENTS: 13008 return SetFastDoubleElement(object, index, value, strict_mode, 13009 check_prototype); 13010 13011 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 13012 case EXTERNAL_##TYPE##_ELEMENTS: { \ 13013 Handle<External##Type##Array> array( \ 13014 External##Type##Array::cast(object->elements())); \ 13015 return External##Type##Array::SetValue(array, index, value); \ 13016 } \ 13017 case TYPE##_ELEMENTS: { \ 13018 Handle<Fixed##Type##Array> array( \ 13019 Fixed##Type##Array::cast(object->elements())); \ 13020 return Fixed##Type##Array::SetValue(array, index, value); \ 13021 } 13022 13023 TYPED_ARRAYS(TYPED_ARRAY_CASE) 13024 13025 #undef TYPED_ARRAY_CASE 13026 13027 case DICTIONARY_ELEMENTS: 13028 return SetDictionaryElement(object, index, value, attributes, strict_mode, 13029 check_prototype, 13030 set_mode); 13031 case SLOPPY_ARGUMENTS_ELEMENTS: { 13032 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements())); 13033 uint32_t length = parameter_map->length(); 13034 Handle<Object> probe = index < length - 2 ? 13035 Handle<Object>(parameter_map->get(index + 2), isolate) : 13036 Handle<Object>(); 13037 if (!probe.is_null() && !probe->IsTheHole()) { 13038 Handle<Context> context(Context::cast(parameter_map->get(0))); 13039 int context_index = Handle<Smi>::cast(probe)->value(); 13040 ASSERT(!context->get(context_index)->IsTheHole()); 13041 context->set(context_index, *value); 13042 // Redefining attributes of an aliased element destroys fast aliasing. 13043 if (set_mode == SET_PROPERTY || attributes == NONE) return value; 13044 parameter_map->set_the_hole(index + 2); 13045 // For elements that are still writable we re-establish slow aliasing. 13046 if ((attributes & READ_ONLY) == 0) { 13047 value = Handle<Object>::cast( 13048 isolate->factory()->NewAliasedArgumentsEntry(context_index)); 13049 } 13050 } 13051 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1))); 13052 if (arguments->IsDictionary()) { 13053 return SetDictionaryElement(object, index, value, attributes, 13054 strict_mode, 13055 check_prototype, 13056 set_mode); 13057 } else { 13058 return SetFastElement(object, index, value, strict_mode, 13059 check_prototype); 13060 } 13061 } 13062 } 13063 // All possible cases have been handled above. Add a return to avoid the 13064 // complaints from the compiler. 13065 UNREACHABLE(); 13066 return isolate->factory()->null_value(); 13067 } 13068 13069 13070 const double AllocationSite::kPretenureRatio = 0.85; 13071 13072 13073 void AllocationSite::ResetPretenureDecision() { 13074 set_pretenure_decision(kUndecided); 13075 set_memento_found_count(0); 13076 set_memento_create_count(0); 13077 } 13078 13079 13080 PretenureFlag AllocationSite::GetPretenureMode() { 13081 PretenureDecision mode = pretenure_decision(); 13082 // Zombie objects "decide" to be untenured. 13083 return mode == kTenure ? TENURED : NOT_TENURED; 13084 } 13085 13086 13087 bool AllocationSite::IsNestedSite() { 13088 ASSERT(FLAG_trace_track_allocation_sites); 13089 Object* current = GetHeap()->allocation_sites_list(); 13090 while (current->IsAllocationSite()) { 13091 AllocationSite* current_site = AllocationSite::cast(current); 13092 if (current_site->nested_site() == this) { 13093 return true; 13094 } 13095 current = current_site->weak_next(); 13096 } 13097 return false; 13098 } 13099 13100 13101 void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site, 13102 ElementsKind to_kind) { 13103 Isolate* isolate = site->GetIsolate(); 13104 13105 if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) { 13106 Handle<JSArray> transition_info = 13107 handle(JSArray::cast(site->transition_info())); 13108 ElementsKind kind = transition_info->GetElementsKind(); 13109 // if kind is holey ensure that to_kind is as well. 13110 if (IsHoleyElementsKind(kind)) { 13111 to_kind = GetHoleyElementsKind(to_kind); 13112 } 13113 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) { 13114 // If the array is huge, it's not likely to be defined in a local 13115 // function, so we shouldn't make new instances of it very often. 13116 uint32_t length = 0; 13117 CHECK(transition_info->length()->ToArrayIndex(&length)); 13118 if (length <= kMaximumArrayBytesToPretransition) { 13119 if (FLAG_trace_track_allocation_sites) { 13120 bool is_nested = site->IsNestedSite(); 13121 PrintF( 13122 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n", 13123 reinterpret_cast<void*>(*site), 13124 is_nested ? "(nested)" : "", 13125 ElementsKindToString(kind), 13126 ElementsKindToString(to_kind)); 13127 } 13128 JSObject::TransitionElementsKind(transition_info, to_kind); 13129 site->dependent_code()->DeoptimizeDependentCodeGroup( 13130 isolate, DependentCode::kAllocationSiteTransitionChangedGroup); 13131 } 13132 } 13133 } else { 13134 ElementsKind kind = site->GetElementsKind(); 13135 // if kind is holey ensure that to_kind is as well. 13136 if (IsHoleyElementsKind(kind)) { 13137 to_kind = GetHoleyElementsKind(to_kind); 13138 } 13139 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) { 13140 if (FLAG_trace_track_allocation_sites) { 13141 PrintF("AllocationSite: JSArray %p site updated %s->%s\n", 13142 reinterpret_cast<void*>(*site), 13143 ElementsKindToString(kind), 13144 ElementsKindToString(to_kind)); 13145 } 13146 site->SetElementsKind(to_kind); 13147 site->dependent_code()->DeoptimizeDependentCodeGroup( 13148 isolate, DependentCode::kAllocationSiteTransitionChangedGroup); 13149 } 13150 } 13151 } 13152 13153 13154 // static 13155 void AllocationSite::AddDependentCompilationInfo(Handle<AllocationSite> site, 13156 Reason reason, 13157 CompilationInfo* info) { 13158 DependentCode::DependencyGroup group = site->ToDependencyGroup(reason); 13159 Handle<DependentCode> dep(site->dependent_code()); 13160 Handle<DependentCode> codes = 13161 DependentCode::Insert(dep, group, info->object_wrapper()); 13162 if (*codes != site->dependent_code()) site->set_dependent_code(*codes); 13163 info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone()); 13164 } 13165 13166 13167 const char* AllocationSite::PretenureDecisionName(PretenureDecision decision) { 13168 switch (decision) { 13169 case kUndecided: return "undecided"; 13170 case kDontTenure: return "don't tenure"; 13171 case kMaybeTenure: return "maybe tenure"; 13172 case kTenure: return "tenure"; 13173 case kZombie: return "zombie"; 13174 default: UNREACHABLE(); 13175 } 13176 return NULL; 13177 } 13178 13179 13180 void JSObject::UpdateAllocationSite(Handle<JSObject> object, 13181 ElementsKind to_kind) { 13182 if (!object->IsJSArray()) return; 13183 13184 Heap* heap = object->GetHeap(); 13185 if (!heap->InNewSpace(*object)) return; 13186 13187 Handle<AllocationSite> site; 13188 { 13189 DisallowHeapAllocation no_allocation; 13190 13191 AllocationMemento* memento = heap->FindAllocationMemento(*object); 13192 if (memento == NULL) return; 13193 13194 // Walk through to the Allocation Site 13195 site = handle(memento->GetAllocationSite()); 13196 } 13197 AllocationSite::DigestTransitionFeedback(site, to_kind); 13198 } 13199 13200 13201 void JSObject::TransitionElementsKind(Handle<JSObject> object, 13202 ElementsKind to_kind) { 13203 ElementsKind from_kind = object->map()->elements_kind(); 13204 13205 if (IsFastHoleyElementsKind(from_kind)) { 13206 to_kind = GetHoleyElementsKind(to_kind); 13207 } 13208 13209 if (from_kind == to_kind) return; 13210 // Don't update the site if to_kind isn't fast 13211 if (IsFastElementsKind(to_kind)) { 13212 UpdateAllocationSite(object, to_kind); 13213 } 13214 13215 Isolate* isolate = object->GetIsolate(); 13216 if (object->elements() == isolate->heap()->empty_fixed_array() || 13217 (IsFastSmiOrObjectElementsKind(from_kind) && 13218 IsFastSmiOrObjectElementsKind(to_kind)) || 13219 (from_kind == FAST_DOUBLE_ELEMENTS && 13220 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) { 13221 ASSERT(from_kind != TERMINAL_FAST_ELEMENTS_KIND); 13222 // No change is needed to the elements() buffer, the transition 13223 // only requires a map change. 13224 Handle<Map> new_map = GetElementsTransitionMap(object, to_kind); 13225 MigrateToMap(object, new_map); 13226 if (FLAG_trace_elements_transitions) { 13227 Handle<FixedArrayBase> elms(object->elements()); 13228 PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms); 13229 } 13230 return; 13231 } 13232 13233 Handle<FixedArrayBase> elms(object->elements()); 13234 uint32_t capacity = static_cast<uint32_t>(elms->length()); 13235 uint32_t length = capacity; 13236 13237 if (object->IsJSArray()) { 13238 Object* raw_length = Handle<JSArray>::cast(object)->length(); 13239 if (raw_length->IsUndefined()) { 13240 // If length is undefined, then JSArray is being initialized and has no 13241 // elements, assume a length of zero. 13242 length = 0; 13243 } else { 13244 CHECK(raw_length->ToArrayIndex(&length)); 13245 } 13246 } 13247 13248 if (IsFastSmiElementsKind(from_kind) && 13249 IsFastDoubleElementsKind(to_kind)) { 13250 SetFastDoubleElementsCapacityAndLength(object, capacity, length); 13251 JSObject::ValidateElements(object); 13252 return; 13253 } 13254 13255 if (IsFastDoubleElementsKind(from_kind) && 13256 IsFastObjectElementsKind(to_kind)) { 13257 SetFastElementsCapacityAndLength(object, capacity, length, 13258 kDontAllowSmiElements); 13259 JSObject::ValidateElements(object); 13260 return; 13261 } 13262 13263 // This method should never be called for any other case than the ones 13264 // handled above. 13265 UNREACHABLE(); 13266 } 13267 13268 13269 // static 13270 bool Map::IsValidElementsTransition(ElementsKind from_kind, 13271 ElementsKind to_kind) { 13272 // Transitions can't go backwards. 13273 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) { 13274 return false; 13275 } 13276 13277 // Transitions from HOLEY -> PACKED are not allowed. 13278 return !IsFastHoleyElementsKind(from_kind) || 13279 IsFastHoleyElementsKind(to_kind); 13280 } 13281 13282 13283 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array, 13284 uint32_t index, 13285 Handle<Object> value) { 13286 uint32_t old_len = 0; 13287 CHECK(array->length()->ToArrayIndex(&old_len)); 13288 // Check to see if we need to update the length. For now, we make 13289 // sure that the length stays within 32-bits (unsigned). 13290 if (index >= old_len && index != 0xffffffff) { 13291 Handle<Object> len = array->GetIsolate()->factory()->NewNumber( 13292 static_cast<double>(index) + 1); 13293 array->set_length(*len); 13294 } 13295 } 13296 13297 13298 bool JSArray::IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map) { 13299 Isolate* isolate = jsarray_map->GetIsolate(); 13300 ASSERT(!jsarray_map->is_dictionary_map()); 13301 LookupResult lookup(isolate); 13302 Handle<Name> length_string = isolate->factory()->length_string(); 13303 jsarray_map->LookupDescriptor(NULL, *length_string, &lookup); 13304 return lookup.IsReadOnly(); 13305 } 13306 13307 13308 bool JSArray::WouldChangeReadOnlyLength(Handle<JSArray> array, 13309 uint32_t index) { 13310 uint32_t length = 0; 13311 CHECK(array->length()->ToArrayIndex(&length)); 13312 if (length <= index) { 13313 Isolate* isolate = array->GetIsolate(); 13314 LookupResult lookup(isolate); 13315 Handle<Name> length_string = isolate->factory()->length_string(); 13316 array->LookupOwnRealNamedProperty(length_string, &lookup); 13317 return lookup.IsReadOnly(); 13318 } 13319 return false; 13320 } 13321 13322 13323 MaybeHandle<Object> JSArray::ReadOnlyLengthError(Handle<JSArray> array) { 13324 Isolate* isolate = array->GetIsolate(); 13325 Handle<Name> length = isolate->factory()->length_string(); 13326 Handle<Object> args[2] = { length, array }; 13327 Handle<Object> error = isolate->factory()->NewTypeError( 13328 "strict_read_only_property", HandleVector(args, ARRAY_SIZE(args))); 13329 return isolate->Throw<Object>(error); 13330 } 13331 13332 13333 MaybeHandle<Object> JSObject::GetElementWithInterceptor( 13334 Handle<JSObject> object, 13335 Handle<Object> receiver, 13336 uint32_t index) { 13337 Isolate* isolate = object->GetIsolate(); 13338 13339 // Make sure that the top context does not change when doing 13340 // callbacks or interceptor calls. 13341 AssertNoContextChange ncc(isolate); 13342 13343 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate); 13344 if (!interceptor->getter()->IsUndefined()) { 13345 v8::IndexedPropertyGetterCallback getter = 13346 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter()); 13347 LOG(isolate, 13348 ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index)); 13349 PropertyCallbackArguments 13350 args(isolate, interceptor->data(), *receiver, *object); 13351 v8::Handle<v8::Value> result = args.Call(getter, index); 13352 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 13353 if (!result.IsEmpty()) { 13354 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 13355 result_internal->VerifyApiCallResultType(); 13356 // Rebox handle before return. 13357 return handle(*result_internal, isolate); 13358 } 13359 } 13360 13361 ElementsAccessor* handler = object->GetElementsAccessor(); 13362 Handle<Object> result; 13363 ASSIGN_RETURN_ON_EXCEPTION( 13364 isolate, result, handler->Get(receiver, object, index), 13365 Object); 13366 if (!result->IsTheHole()) return result; 13367 13368 Handle<Object> proto(object->GetPrototype(), isolate); 13369 if (proto->IsNull()) return isolate->factory()->undefined_value(); 13370 return Object::GetElementWithReceiver(isolate, proto, receiver, index); 13371 } 13372 13373 13374 bool JSObject::HasDenseElements() { 13375 int capacity = 0; 13376 int used = 0; 13377 GetElementsCapacityAndUsage(&capacity, &used); 13378 return (capacity == 0) || (used > (capacity / 2)); 13379 } 13380 13381 13382 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) { 13383 *capacity = 0; 13384 *used = 0; 13385 13386 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements()); 13387 FixedArray* backing_store = NULL; 13388 switch (GetElementsKind()) { 13389 case SLOPPY_ARGUMENTS_ELEMENTS: 13390 backing_store_base = 13391 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1)); 13392 backing_store = FixedArray::cast(backing_store_base); 13393 if (backing_store->IsDictionary()) { 13394 SeededNumberDictionary* dictionary = 13395 SeededNumberDictionary::cast(backing_store); 13396 *capacity = dictionary->Capacity(); 13397 *used = dictionary->NumberOfElements(); 13398 break; 13399 } 13400 // Fall through. 13401 case FAST_SMI_ELEMENTS: 13402 case FAST_ELEMENTS: 13403 if (IsJSArray()) { 13404 *capacity = backing_store_base->length(); 13405 *used = Smi::cast(JSArray::cast(this)->length())->value(); 13406 break; 13407 } 13408 // Fall through if packing is not guaranteed. 13409 case FAST_HOLEY_SMI_ELEMENTS: 13410 case FAST_HOLEY_ELEMENTS: 13411 backing_store = FixedArray::cast(backing_store_base); 13412 *capacity = backing_store->length(); 13413 for (int i = 0; i < *capacity; ++i) { 13414 if (!backing_store->get(i)->IsTheHole()) ++(*used); 13415 } 13416 break; 13417 case DICTIONARY_ELEMENTS: { 13418 SeededNumberDictionary* dictionary = element_dictionary(); 13419 *capacity = dictionary->Capacity(); 13420 *used = dictionary->NumberOfElements(); 13421 break; 13422 } 13423 case FAST_DOUBLE_ELEMENTS: 13424 if (IsJSArray()) { 13425 *capacity = backing_store_base->length(); 13426 *used = Smi::cast(JSArray::cast(this)->length())->value(); 13427 break; 13428 } 13429 // Fall through if packing is not guaranteed. 13430 case FAST_HOLEY_DOUBLE_ELEMENTS: { 13431 *capacity = elements()->length(); 13432 if (*capacity == 0) break; 13433 FixedDoubleArray * elms = FixedDoubleArray::cast(elements()); 13434 for (int i = 0; i < *capacity; i++) { 13435 if (!elms->is_the_hole(i)) ++(*used); 13436 } 13437 break; 13438 } 13439 13440 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 13441 case EXTERNAL_##TYPE##_ELEMENTS: \ 13442 case TYPE##_ELEMENTS: \ 13443 13444 TYPED_ARRAYS(TYPED_ARRAY_CASE) 13445 #undef TYPED_ARRAY_CASE 13446 { 13447 // External arrays are considered 100% used. 13448 FixedArrayBase* external_array = FixedArrayBase::cast(elements()); 13449 *capacity = external_array->length(); 13450 *used = external_array->length(); 13451 break; 13452 } 13453 } 13454 } 13455 13456 13457 bool JSObject::WouldConvertToSlowElements(Handle<Object> key) { 13458 uint32_t index; 13459 if (HasFastElements() && key->ToArrayIndex(&index)) { 13460 Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements())); 13461 uint32_t capacity = static_cast<uint32_t>(backing_store->length()); 13462 if (index >= capacity) { 13463 if ((index - capacity) >= kMaxGap) return true; 13464 uint32_t new_capacity = NewElementsCapacity(index + 1); 13465 return ShouldConvertToSlowElements(new_capacity); 13466 } 13467 } 13468 return false; 13469 } 13470 13471 13472 bool JSObject::ShouldConvertToSlowElements(int new_capacity) { 13473 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <= 13474 kMaxUncheckedFastElementsLength); 13475 if (new_capacity <= kMaxUncheckedOldFastElementsLength || 13476 (new_capacity <= kMaxUncheckedFastElementsLength && 13477 GetHeap()->InNewSpace(this))) { 13478 return false; 13479 } 13480 // If the fast-case backing storage takes up roughly three times as 13481 // much space (in machine words) as a dictionary backing storage 13482 // would, the object should have slow elements. 13483 int old_capacity = 0; 13484 int used_elements = 0; 13485 GetElementsCapacityAndUsage(&old_capacity, &used_elements); 13486 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) * 13487 SeededNumberDictionary::kEntrySize; 13488 return 3 * dictionary_size <= new_capacity; 13489 } 13490 13491 13492 bool JSObject::ShouldConvertToFastElements() { 13493 ASSERT(HasDictionaryElements() || HasDictionaryArgumentsElements()); 13494 // If the elements are sparse, we should not go back to fast case. 13495 if (!HasDenseElements()) return false; 13496 // An object requiring access checks is never allowed to have fast 13497 // elements. If it had fast elements we would skip security checks. 13498 if (IsAccessCheckNeeded()) return false; 13499 // Observed objects may not go to fast mode because they rely on map checks, 13500 // and for fast element accesses we sometimes check element kinds only. 13501 if (map()->is_observed()) return false; 13502 13503 FixedArray* elements = FixedArray::cast(this->elements()); 13504 SeededNumberDictionary* dictionary = NULL; 13505 if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) { 13506 dictionary = SeededNumberDictionary::cast(elements->get(1)); 13507 } else { 13508 dictionary = SeededNumberDictionary::cast(elements); 13509 } 13510 // If an element has been added at a very high index in the elements 13511 // dictionary, we cannot go back to fast case. 13512 if (dictionary->requires_slow_elements()) return false; 13513 // If the dictionary backing storage takes up roughly half as much 13514 // space (in machine words) as a fast-case backing storage would, 13515 // the object should have fast elements. 13516 uint32_t array_size = 0; 13517 if (IsJSArray()) { 13518 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size)); 13519 } else { 13520 array_size = dictionary->max_number_key(); 13521 } 13522 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) * 13523 SeededNumberDictionary::kEntrySize; 13524 return 2 * dictionary_size >= array_size; 13525 } 13526 13527 13528 bool JSObject::ShouldConvertToFastDoubleElements( 13529 bool* has_smi_only_elements) { 13530 *has_smi_only_elements = false; 13531 if (HasSloppyArgumentsElements()) return false; 13532 if (FLAG_unbox_double_arrays) { 13533 ASSERT(HasDictionaryElements()); 13534 SeededNumberDictionary* dictionary = element_dictionary(); 13535 bool found_double = false; 13536 for (int i = 0; i < dictionary->Capacity(); i++) { 13537 Object* key = dictionary->KeyAt(i); 13538 if (key->IsNumber()) { 13539 Object* value = dictionary->ValueAt(i); 13540 if (!value->IsNumber()) return false; 13541 if (!value->IsSmi()) { 13542 found_double = true; 13543 } 13544 } 13545 } 13546 *has_smi_only_elements = !found_double; 13547 return found_double; 13548 } else { 13549 return false; 13550 } 13551 } 13552 13553 13554 // Certain compilers request function template instantiation when they 13555 // see the definition of the other template functions in the 13556 // class. This requires us to have the template functions put 13557 // together, so even though this function belongs in objects-debug.cc, 13558 // we keep it here instead to satisfy certain compilers. 13559 #ifdef OBJECT_PRINT 13560 template<typename Derived, typename Shape, typename Key> 13561 void Dictionary<Derived, Shape, Key>::Print(FILE* out) { 13562 int capacity = DerivedHashTable::Capacity(); 13563 for (int i = 0; i < capacity; i++) { 13564 Object* k = DerivedHashTable::KeyAt(i); 13565 if (DerivedHashTable::IsKey(k)) { 13566 PrintF(out, " "); 13567 if (k->IsString()) { 13568 String::cast(k)->StringPrint(out); 13569 } else { 13570 k->ShortPrint(out); 13571 } 13572 PrintF(out, ": "); 13573 ValueAt(i)->ShortPrint(out); 13574 PrintF(out, "\n"); 13575 } 13576 } 13577 } 13578 #endif 13579 13580 13581 template<typename Derived, typename Shape, typename Key> 13582 void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) { 13583 int pos = 0; 13584 int capacity = DerivedHashTable::Capacity(); 13585 DisallowHeapAllocation no_gc; 13586 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc); 13587 for (int i = 0; i < capacity; i++) { 13588 Object* k = Dictionary::KeyAt(i); 13589 if (Dictionary::IsKey(k)) { 13590 elements->set(pos++, ValueAt(i), mode); 13591 } 13592 } 13593 ASSERT(pos == elements->length()); 13594 } 13595 13596 13597 InterceptorInfo* JSObject::GetNamedInterceptor() { 13598 ASSERT(map()->has_named_interceptor()); 13599 JSFunction* constructor = JSFunction::cast(map()->constructor()); 13600 ASSERT(constructor->shared()->IsApiFunction()); 13601 Object* result = 13602 constructor->shared()->get_api_func_data()->named_property_handler(); 13603 return InterceptorInfo::cast(result); 13604 } 13605 13606 13607 InterceptorInfo* JSObject::GetIndexedInterceptor() { 13608 ASSERT(map()->has_indexed_interceptor()); 13609 JSFunction* constructor = JSFunction::cast(map()->constructor()); 13610 ASSERT(constructor->shared()->IsApiFunction()); 13611 Object* result = 13612 constructor->shared()->get_api_func_data()->indexed_property_handler(); 13613 return InterceptorInfo::cast(result); 13614 } 13615 13616 13617 MaybeHandle<Object> JSObject::GetPropertyWithInterceptor( 13618 Handle<JSObject> holder, 13619 Handle<Object> receiver, 13620 Handle<Name> name) { 13621 Isolate* isolate = holder->GetIsolate(); 13622 13623 // TODO(rossberg): Support symbols in the API. 13624 if (name->IsSymbol()) return isolate->factory()->undefined_value(); 13625 13626 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor(), isolate); 13627 Handle<String> name_string = Handle<String>::cast(name); 13628 13629 if (interceptor->getter()->IsUndefined()) return MaybeHandle<Object>(); 13630 13631 v8::NamedPropertyGetterCallback getter = 13632 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter()); 13633 LOG(isolate, 13634 ApiNamedPropertyAccess("interceptor-named-get", *holder, *name)); 13635 PropertyCallbackArguments 13636 args(isolate, interceptor->data(), *receiver, *holder); 13637 v8::Handle<v8::Value> result = 13638 args.Call(getter, v8::Utils::ToLocal(name_string)); 13639 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 13640 if (result.IsEmpty()) return MaybeHandle<Object>(); 13641 13642 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 13643 result_internal->VerifyApiCallResultType(); 13644 // Rebox handle before return 13645 return handle(*result_internal, isolate); 13646 } 13647 13648 13649 // Compute the property keys from the interceptor. 13650 // TODO(rossberg): support symbols in API, and filter here if needed. 13651 MaybeHandle<JSObject> JSObject::GetKeysForNamedInterceptor( 13652 Handle<JSObject> object, Handle<JSReceiver> receiver) { 13653 Isolate* isolate = receiver->GetIsolate(); 13654 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); 13655 PropertyCallbackArguments 13656 args(isolate, interceptor->data(), *receiver, *object); 13657 v8::Handle<v8::Object> result; 13658 if (!interceptor->enumerator()->IsUndefined()) { 13659 v8::NamedPropertyEnumeratorCallback enum_fun = 13660 v8::ToCData<v8::NamedPropertyEnumeratorCallback>( 13661 interceptor->enumerator()); 13662 LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object)); 13663 result = args.Call(enum_fun); 13664 } 13665 if (result.IsEmpty()) return MaybeHandle<JSObject>(); 13666 #if ENABLE_EXTRA_CHECKS 13667 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() || 13668 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements()); 13669 #endif 13670 // Rebox before returning. 13671 return handle(*v8::Utils::OpenHandle(*result), isolate); 13672 } 13673 13674 13675 // Compute the element keys from the interceptor. 13676 MaybeHandle<JSObject> JSObject::GetKeysForIndexedInterceptor( 13677 Handle<JSObject> object, Handle<JSReceiver> receiver) { 13678 Isolate* isolate = receiver->GetIsolate(); 13679 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 13680 PropertyCallbackArguments 13681 args(isolate, interceptor->data(), *receiver, *object); 13682 v8::Handle<v8::Object> result; 13683 if (!interceptor->enumerator()->IsUndefined()) { 13684 v8::IndexedPropertyEnumeratorCallback enum_fun = 13685 v8::ToCData<v8::IndexedPropertyEnumeratorCallback>( 13686 interceptor->enumerator()); 13687 LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object)); 13688 result = args.Call(enum_fun); 13689 } 13690 if (result.IsEmpty()) return MaybeHandle<JSObject>(); 13691 #if ENABLE_EXTRA_CHECKS 13692 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() || 13693 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements()); 13694 #endif 13695 // Rebox before returning. 13696 return handle(*v8::Utils::OpenHandle(*result), isolate); 13697 } 13698 13699 13700 bool JSObject::HasRealNamedProperty(Handle<JSObject> object, 13701 Handle<Name> key) { 13702 Isolate* isolate = object->GetIsolate(); 13703 SealHandleScope shs(isolate); 13704 // Check access rights if needed. 13705 if (object->IsAccessCheckNeeded()) { 13706 if (!isolate->MayNamedAccess(object, key, v8::ACCESS_HAS)) { 13707 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); 13708 // TODO(yangguo): Issue 3269, check for scheduled exception missing? 13709 return false; 13710 } 13711 } 13712 13713 LookupResult result(isolate); 13714 object->LookupOwnRealNamedProperty(key, &result); 13715 return result.IsFound() && !result.IsInterceptor(); 13716 } 13717 13718 13719 bool JSObject::HasRealElementProperty(Handle<JSObject> object, uint32_t index) { 13720 Isolate* isolate = object->GetIsolate(); 13721 HandleScope scope(isolate); 13722 // Check access rights if needed. 13723 if (object->IsAccessCheckNeeded()) { 13724 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) { 13725 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); 13726 // TODO(yangguo): Issue 3269, check for scheduled exception missing? 13727 return false; 13728 } 13729 } 13730 13731 if (object->IsJSGlobalProxy()) { 13732 HandleScope scope(isolate); 13733 Handle<Object> proto(object->GetPrototype(), isolate); 13734 if (proto->IsNull()) return false; 13735 ASSERT(proto->IsJSGlobalObject()); 13736 return HasRealElementProperty(Handle<JSObject>::cast(proto), index); 13737 } 13738 13739 return GetElementAttributeWithoutInterceptor( 13740 object, object, index, false) != ABSENT; 13741 } 13742 13743 13744 bool JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object, 13745 Handle<Name> key) { 13746 Isolate* isolate = object->GetIsolate(); 13747 SealHandleScope shs(isolate); 13748 // Check access rights if needed. 13749 if (object->IsAccessCheckNeeded()) { 13750 if (!isolate->MayNamedAccess(object, key, v8::ACCESS_HAS)) { 13751 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); 13752 // TODO(yangguo): Issue 3269, check for scheduled exception missing? 13753 return false; 13754 } 13755 } 13756 13757 LookupResult result(isolate); 13758 object->LookupOwnRealNamedProperty(key, &result); 13759 return result.IsPropertyCallbacks(); 13760 } 13761 13762 13763 int JSObject::NumberOfOwnProperties(PropertyAttributes filter) { 13764 if (HasFastProperties()) { 13765 Map* map = this->map(); 13766 if (filter == NONE) return map->NumberOfOwnDescriptors(); 13767 if (filter & DONT_ENUM) { 13768 int result = map->EnumLength(); 13769 if (result != kInvalidEnumCacheSentinel) return result; 13770 } 13771 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter); 13772 } 13773 return property_dictionary()->NumberOfElementsFilterAttributes(filter); 13774 } 13775 13776 13777 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) { 13778 Object* temp = get(i); 13779 set(i, get(j)); 13780 set(j, temp); 13781 if (this != numbers) { 13782 temp = numbers->get(i); 13783 numbers->set(i, Smi::cast(numbers->get(j))); 13784 numbers->set(j, Smi::cast(temp)); 13785 } 13786 } 13787 13788 13789 static void InsertionSortPairs(FixedArray* content, 13790 FixedArray* numbers, 13791 int len) { 13792 for (int i = 1; i < len; i++) { 13793 int j = i; 13794 while (j > 0 && 13795 (NumberToUint32(numbers->get(j - 1)) > 13796 NumberToUint32(numbers->get(j)))) { 13797 content->SwapPairs(numbers, j - 1, j); 13798 j--; 13799 } 13800 } 13801 } 13802 13803 13804 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) { 13805 // In-place heap sort. 13806 ASSERT(content->length() == numbers->length()); 13807 13808 // Bottom-up max-heap construction. 13809 for (int i = 1; i < len; ++i) { 13810 int child_index = i; 13811 while (child_index > 0) { 13812 int parent_index = ((child_index + 1) >> 1) - 1; 13813 uint32_t parent_value = NumberToUint32(numbers->get(parent_index)); 13814 uint32_t child_value = NumberToUint32(numbers->get(child_index)); 13815 if (parent_value < child_value) { 13816 content->SwapPairs(numbers, parent_index, child_index); 13817 } else { 13818 break; 13819 } 13820 child_index = parent_index; 13821 } 13822 } 13823 13824 // Extract elements and create sorted array. 13825 for (int i = len - 1; i > 0; --i) { 13826 // Put max element at the back of the array. 13827 content->SwapPairs(numbers, 0, i); 13828 // Sift down the new top element. 13829 int parent_index = 0; 13830 while (true) { 13831 int child_index = ((parent_index + 1) << 1) - 1; 13832 if (child_index >= i) break; 13833 uint32_t child1_value = NumberToUint32(numbers->get(child_index)); 13834 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1)); 13835 uint32_t parent_value = NumberToUint32(numbers->get(parent_index)); 13836 if (child_index + 1 >= i || child1_value > child2_value) { 13837 if (parent_value > child1_value) break; 13838 content->SwapPairs(numbers, parent_index, child_index); 13839 parent_index = child_index; 13840 } else { 13841 if (parent_value > child2_value) break; 13842 content->SwapPairs(numbers, parent_index, child_index + 1); 13843 parent_index = child_index + 1; 13844 } 13845 } 13846 } 13847 } 13848 13849 13850 // Sort this array and the numbers as pairs wrt. the (distinct) numbers. 13851 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) { 13852 ASSERT(this->length() == numbers->length()); 13853 // For small arrays, simply use insertion sort. 13854 if (len <= 10) { 13855 InsertionSortPairs(this, numbers, len); 13856 return; 13857 } 13858 // Check the range of indices. 13859 uint32_t min_index = NumberToUint32(numbers->get(0)); 13860 uint32_t max_index = min_index; 13861 uint32_t i; 13862 for (i = 1; i < len; i++) { 13863 if (NumberToUint32(numbers->get(i)) < min_index) { 13864 min_index = NumberToUint32(numbers->get(i)); 13865 } else if (NumberToUint32(numbers->get(i)) > max_index) { 13866 max_index = NumberToUint32(numbers->get(i)); 13867 } 13868 } 13869 if (max_index - min_index + 1 == len) { 13870 // Indices form a contiguous range, unless there are duplicates. 13871 // Do an in-place linear time sort assuming distinct numbers, but 13872 // avoid hanging in case they are not. 13873 for (i = 0; i < len; i++) { 13874 uint32_t p; 13875 uint32_t j = 0; 13876 // While the current element at i is not at its correct position p, 13877 // swap the elements at these two positions. 13878 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i && 13879 j++ < len) { 13880 SwapPairs(numbers, i, p); 13881 } 13882 } 13883 } else { 13884 HeapSortPairs(this, numbers, len); 13885 return; 13886 } 13887 } 13888 13889 13890 // Fill in the names of own properties into the supplied storage. The main 13891 // purpose of this function is to provide reflection information for the object 13892 // mirrors. 13893 void JSObject::GetOwnPropertyNames( 13894 FixedArray* storage, int index, PropertyAttributes filter) { 13895 ASSERT(storage->length() >= (NumberOfOwnProperties(filter) - index)); 13896 if (HasFastProperties()) { 13897 int real_size = map()->NumberOfOwnDescriptors(); 13898 DescriptorArray* descs = map()->instance_descriptors(); 13899 for (int i = 0; i < real_size; i++) { 13900 if ((descs->GetDetails(i).attributes() & filter) == 0 && 13901 !FilterKey(descs->GetKey(i), filter)) { 13902 storage->set(index++, descs->GetKey(i)); 13903 } 13904 } 13905 } else { 13906 property_dictionary()->CopyKeysTo(storage, 13907 index, 13908 filter, 13909 NameDictionary::UNSORTED); 13910 } 13911 } 13912 13913 13914 int JSObject::NumberOfOwnElements(PropertyAttributes filter) { 13915 return GetOwnElementKeys(NULL, filter); 13916 } 13917 13918 13919 int JSObject::NumberOfEnumElements() { 13920 // Fast case for objects with no elements. 13921 if (!IsJSValue() && HasFastObjectElements()) { 13922 uint32_t length = IsJSArray() ? 13923 static_cast<uint32_t>( 13924 Smi::cast(JSArray::cast(this)->length())->value()) : 13925 static_cast<uint32_t>(FixedArray::cast(elements())->length()); 13926 if (length == 0) return 0; 13927 } 13928 // Compute the number of enumerable elements. 13929 return NumberOfOwnElements(static_cast<PropertyAttributes>(DONT_ENUM)); 13930 } 13931 13932 13933 int JSObject::GetOwnElementKeys(FixedArray* storage, 13934 PropertyAttributes filter) { 13935 int counter = 0; 13936 switch (GetElementsKind()) { 13937 case FAST_SMI_ELEMENTS: 13938 case FAST_ELEMENTS: 13939 case FAST_HOLEY_SMI_ELEMENTS: 13940 case FAST_HOLEY_ELEMENTS: { 13941 int length = IsJSArray() ? 13942 Smi::cast(JSArray::cast(this)->length())->value() : 13943 FixedArray::cast(elements())->length(); 13944 for (int i = 0; i < length; i++) { 13945 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) { 13946 if (storage != NULL) { 13947 storage->set(counter, Smi::FromInt(i)); 13948 } 13949 counter++; 13950 } 13951 } 13952 ASSERT(!storage || storage->length() >= counter); 13953 break; 13954 } 13955 case FAST_DOUBLE_ELEMENTS: 13956 case FAST_HOLEY_DOUBLE_ELEMENTS: { 13957 int length = IsJSArray() ? 13958 Smi::cast(JSArray::cast(this)->length())->value() : 13959 FixedArrayBase::cast(elements())->length(); 13960 for (int i = 0; i < length; i++) { 13961 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) { 13962 if (storage != NULL) { 13963 storage->set(counter, Smi::FromInt(i)); 13964 } 13965 counter++; 13966 } 13967 } 13968 ASSERT(!storage || storage->length() >= counter); 13969 break; 13970 } 13971 13972 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 13973 case EXTERNAL_##TYPE##_ELEMENTS: \ 13974 case TYPE##_ELEMENTS: \ 13975 13976 TYPED_ARRAYS(TYPED_ARRAY_CASE) 13977 #undef TYPED_ARRAY_CASE 13978 { 13979 int length = FixedArrayBase::cast(elements())->length(); 13980 while (counter < length) { 13981 if (storage != NULL) { 13982 storage->set(counter, Smi::FromInt(counter)); 13983 } 13984 counter++; 13985 } 13986 ASSERT(!storage || storage->length() >= counter); 13987 break; 13988 } 13989 13990 case DICTIONARY_ELEMENTS: { 13991 if (storage != NULL) { 13992 element_dictionary()->CopyKeysTo(storage, 13993 filter, 13994 SeededNumberDictionary::SORTED); 13995 } 13996 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter); 13997 break; 13998 } 13999 case SLOPPY_ARGUMENTS_ELEMENTS: { 14000 FixedArray* parameter_map = FixedArray::cast(elements()); 14001 int mapped_length = parameter_map->length() - 2; 14002 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 14003 if (arguments->IsDictionary()) { 14004 // Copy the keys from arguments first, because Dictionary::CopyKeysTo 14005 // will insert in storage starting at index 0. 14006 SeededNumberDictionary* dictionary = 14007 SeededNumberDictionary::cast(arguments); 14008 if (storage != NULL) { 14009 dictionary->CopyKeysTo( 14010 storage, filter, SeededNumberDictionary::UNSORTED); 14011 } 14012 counter += dictionary->NumberOfElementsFilterAttributes(filter); 14013 for (int i = 0; i < mapped_length; ++i) { 14014 if (!parameter_map->get(i + 2)->IsTheHole()) { 14015 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 14016 ++counter; 14017 } 14018 } 14019 if (storage != NULL) storage->SortPairs(storage, counter); 14020 14021 } else { 14022 int backing_length = arguments->length(); 14023 int i = 0; 14024 for (; i < mapped_length; ++i) { 14025 if (!parameter_map->get(i + 2)->IsTheHole()) { 14026 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 14027 ++counter; 14028 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) { 14029 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 14030 ++counter; 14031 } 14032 } 14033 for (; i < backing_length; ++i) { 14034 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 14035 ++counter; 14036 } 14037 } 14038 break; 14039 } 14040 } 14041 14042 if (this->IsJSValue()) { 14043 Object* val = JSValue::cast(this)->value(); 14044 if (val->IsString()) { 14045 String* str = String::cast(val); 14046 if (storage) { 14047 for (int i = 0; i < str->length(); i++) { 14048 storage->set(counter + i, Smi::FromInt(i)); 14049 } 14050 } 14051 counter += str->length(); 14052 } 14053 } 14054 ASSERT(!storage || storage->length() == counter); 14055 return counter; 14056 } 14057 14058 14059 int JSObject::GetEnumElementKeys(FixedArray* storage) { 14060 return GetOwnElementKeys(storage, static_cast<PropertyAttributes>(DONT_ENUM)); 14061 } 14062 14063 14064 // StringKey simply carries a string object as key. 14065 class StringKey : public HashTableKey { 14066 public: 14067 explicit StringKey(String* string) : 14068 string_(string), 14069 hash_(HashForObject(string)) { } 14070 14071 bool IsMatch(Object* string) { 14072 // We know that all entries in a hash table had their hash keys created. 14073 // Use that knowledge to have fast failure. 14074 if (hash_ != HashForObject(string)) { 14075 return false; 14076 } 14077 return string_->Equals(String::cast(string)); 14078 } 14079 14080 uint32_t Hash() { return hash_; } 14081 14082 uint32_t HashForObject(Object* other) { return String::cast(other)->Hash(); } 14083 14084 Object* AsObject(Heap* heap) { return string_; } 14085 14086 String* string_; 14087 uint32_t hash_; 14088 }; 14089 14090 14091 // StringSharedKeys are used as keys in the eval cache. 14092 class StringSharedKey : public HashTableKey { 14093 public: 14094 StringSharedKey(Handle<String> source, 14095 Handle<SharedFunctionInfo> shared, 14096 StrictMode strict_mode, 14097 int scope_position) 14098 : source_(source), 14099 shared_(shared), 14100 strict_mode_(strict_mode), 14101 scope_position_(scope_position) { } 14102 14103 bool IsMatch(Object* other) V8_OVERRIDE { 14104 DisallowHeapAllocation no_allocation; 14105 if (!other->IsFixedArray()) return false; 14106 FixedArray* other_array = FixedArray::cast(other); 14107 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); 14108 if (shared != *shared_) return false; 14109 int strict_unchecked = Smi::cast(other_array->get(2))->value(); 14110 ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT); 14111 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked); 14112 if (strict_mode != strict_mode_) return false; 14113 int scope_position = Smi::cast(other_array->get(3))->value(); 14114 if (scope_position != scope_position_) return false; 14115 String* source = String::cast(other_array->get(1)); 14116 return source->Equals(*source_); 14117 } 14118 14119 static uint32_t StringSharedHashHelper(String* source, 14120 SharedFunctionInfo* shared, 14121 StrictMode strict_mode, 14122 int scope_position) { 14123 uint32_t hash = source->Hash(); 14124 if (shared->HasSourceCode()) { 14125 // Instead of using the SharedFunctionInfo pointer in the hash 14126 // code computation, we use a combination of the hash of the 14127 // script source code and the start position of the calling scope. 14128 // We do this to ensure that the cache entries can survive garbage 14129 // collection. 14130 Script* script(Script::cast(shared->script())); 14131 hash ^= String::cast(script->source())->Hash(); 14132 if (strict_mode == STRICT) hash ^= 0x8000; 14133 hash += scope_position; 14134 } 14135 return hash; 14136 } 14137 14138 uint32_t Hash() V8_OVERRIDE { 14139 return StringSharedHashHelper(*source_, *shared_, strict_mode_, 14140 scope_position_); 14141 } 14142 14143 uint32_t HashForObject(Object* obj) V8_OVERRIDE { 14144 DisallowHeapAllocation no_allocation; 14145 FixedArray* other_array = FixedArray::cast(obj); 14146 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); 14147 String* source = String::cast(other_array->get(1)); 14148 int strict_unchecked = Smi::cast(other_array->get(2))->value(); 14149 ASSERT(strict_unchecked == SLOPPY || strict_unchecked == STRICT); 14150 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked); 14151 int scope_position = Smi::cast(other_array->get(3))->value(); 14152 return StringSharedHashHelper( 14153 source, shared, strict_mode, scope_position); 14154 } 14155 14156 14157 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { 14158 Handle<FixedArray> array = isolate->factory()->NewFixedArray(4); 14159 array->set(0, *shared_); 14160 array->set(1, *source_); 14161 array->set(2, Smi::FromInt(strict_mode_)); 14162 array->set(3, Smi::FromInt(scope_position_)); 14163 return array; 14164 } 14165 14166 private: 14167 Handle<String> source_; 14168 Handle<SharedFunctionInfo> shared_; 14169 StrictMode strict_mode_; 14170 int scope_position_; 14171 }; 14172 14173 14174 // RegExpKey carries the source and flags of a regular expression as key. 14175 class RegExpKey : public HashTableKey { 14176 public: 14177 RegExpKey(Handle<String> string, JSRegExp::Flags flags) 14178 : string_(string), 14179 flags_(Smi::FromInt(flags.value())) { } 14180 14181 // Rather than storing the key in the hash table, a pointer to the 14182 // stored value is stored where the key should be. IsMatch then 14183 // compares the search key to the found object, rather than comparing 14184 // a key to a key. 14185 bool IsMatch(Object* obj) V8_OVERRIDE { 14186 FixedArray* val = FixedArray::cast(obj); 14187 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex))) 14188 && (flags_ == val->get(JSRegExp::kFlagsIndex)); 14189 } 14190 14191 uint32_t Hash() V8_OVERRIDE { return RegExpHash(*string_, flags_); } 14192 14193 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { 14194 // Plain hash maps, which is where regexp keys are used, don't 14195 // use this function. 14196 UNREACHABLE(); 14197 return MaybeHandle<Object>().ToHandleChecked(); 14198 } 14199 14200 uint32_t HashForObject(Object* obj) V8_OVERRIDE { 14201 FixedArray* val = FixedArray::cast(obj); 14202 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)), 14203 Smi::cast(val->get(JSRegExp::kFlagsIndex))); 14204 } 14205 14206 static uint32_t RegExpHash(String* string, Smi* flags) { 14207 return string->Hash() + flags->value(); 14208 } 14209 14210 Handle<String> string_; 14211 Smi* flags_; 14212 }; 14213 14214 14215 Handle<Object> OneByteStringKey::AsHandle(Isolate* isolate) { 14216 if (hash_field_ == 0) Hash(); 14217 return isolate->factory()->NewOneByteInternalizedString(string_, hash_field_); 14218 } 14219 14220 14221 Handle<Object> TwoByteStringKey::AsHandle(Isolate* isolate) { 14222 if (hash_field_ == 0) Hash(); 14223 return isolate->factory()->NewTwoByteInternalizedString(string_, hash_field_); 14224 } 14225 14226 14227 template<> 14228 const uint8_t* SubStringKey<uint8_t>::GetChars() { 14229 return string_->IsSeqOneByteString() 14230 ? SeqOneByteString::cast(*string_)->GetChars() 14231 : ExternalAsciiString::cast(*string_)->GetChars(); 14232 } 14233 14234 14235 template<> 14236 const uint16_t* SubStringKey<uint16_t>::GetChars() { 14237 return string_->IsSeqTwoByteString() 14238 ? SeqTwoByteString::cast(*string_)->GetChars() 14239 : ExternalTwoByteString::cast(*string_)->GetChars(); 14240 } 14241 14242 14243 template<> 14244 Handle<Object> SubStringKey<uint8_t>::AsHandle(Isolate* isolate) { 14245 if (hash_field_ == 0) Hash(); 14246 Vector<const uint8_t> chars(GetChars() + from_, length_); 14247 return isolate->factory()->NewOneByteInternalizedString(chars, hash_field_); 14248 } 14249 14250 14251 template<> 14252 Handle<Object> SubStringKey<uint16_t>::AsHandle(Isolate* isolate) { 14253 if (hash_field_ == 0) Hash(); 14254 Vector<const uint16_t> chars(GetChars() + from_, length_); 14255 return isolate->factory()->NewTwoByteInternalizedString(chars, hash_field_); 14256 } 14257 14258 14259 template<> 14260 bool SubStringKey<uint8_t>::IsMatch(Object* string) { 14261 Vector<const uint8_t> chars(GetChars() + from_, length_); 14262 return String::cast(string)->IsOneByteEqualTo(chars); 14263 } 14264 14265 14266 template<> 14267 bool SubStringKey<uint16_t>::IsMatch(Object* string) { 14268 Vector<const uint16_t> chars(GetChars() + from_, length_); 14269 return String::cast(string)->IsTwoByteEqualTo(chars); 14270 } 14271 14272 14273 template class SubStringKey<uint8_t>; 14274 template class SubStringKey<uint16_t>; 14275 14276 14277 // InternalizedStringKey carries a string/internalized-string object as key. 14278 class InternalizedStringKey : public HashTableKey { 14279 public: 14280 explicit InternalizedStringKey(Handle<String> string) 14281 : string_(string) { } 14282 14283 virtual bool IsMatch(Object* string) V8_OVERRIDE { 14284 return String::cast(string)->Equals(*string_); 14285 } 14286 14287 virtual uint32_t Hash() V8_OVERRIDE { return string_->Hash(); } 14288 14289 virtual uint32_t HashForObject(Object* other) V8_OVERRIDE { 14290 return String::cast(other)->Hash(); 14291 } 14292 14293 virtual Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { 14294 // Internalize the string if possible. 14295 MaybeHandle<Map> maybe_map = 14296 isolate->factory()->InternalizedStringMapForString(string_); 14297 Handle<Map> map; 14298 if (maybe_map.ToHandle(&map)) { 14299 string_->set_map_no_write_barrier(*map); 14300 ASSERT(string_->IsInternalizedString()); 14301 return string_; 14302 } 14303 // Otherwise allocate a new internalized string. 14304 return isolate->factory()->NewInternalizedStringImpl( 14305 string_, string_->length(), string_->hash_field()); 14306 } 14307 14308 static uint32_t StringHash(Object* obj) { 14309 return String::cast(obj)->Hash(); 14310 } 14311 14312 Handle<String> string_; 14313 }; 14314 14315 14316 template<typename Derived, typename Shape, typename Key> 14317 void HashTable<Derived, Shape, Key>::IteratePrefix(ObjectVisitor* v) { 14318 IteratePointers(v, 0, kElementsStartOffset); 14319 } 14320 14321 14322 template<typename Derived, typename Shape, typename Key> 14323 void HashTable<Derived, Shape, Key>::IterateElements(ObjectVisitor* v) { 14324 IteratePointers(v, 14325 kElementsStartOffset, 14326 kHeaderSize + length() * kPointerSize); 14327 } 14328 14329 14330 template<typename Derived, typename Shape, typename Key> 14331 Handle<Derived> HashTable<Derived, Shape, Key>::New( 14332 Isolate* isolate, 14333 int at_least_space_for, 14334 MinimumCapacity capacity_option, 14335 PretenureFlag pretenure) { 14336 ASSERT(0 <= at_least_space_for); 14337 ASSERT(!capacity_option || IsPowerOf2(at_least_space_for)); 14338 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY) 14339 ? at_least_space_for 14340 : ComputeCapacity(at_least_space_for); 14341 if (capacity > HashTable::kMaxCapacity) { 14342 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true); 14343 } 14344 14345 Factory* factory = isolate->factory(); 14346 int length = EntryToIndex(capacity); 14347 Handle<FixedArray> array = factory->NewFixedArray(length, pretenure); 14348 array->set_map_no_write_barrier(*factory->hash_table_map()); 14349 Handle<Derived> table = Handle<Derived>::cast(array); 14350 14351 table->SetNumberOfElements(0); 14352 table->SetNumberOfDeletedElements(0); 14353 table->SetCapacity(capacity); 14354 return table; 14355 } 14356 14357 14358 // Find entry for key otherwise return kNotFound. 14359 int NameDictionary::FindEntry(Handle<Name> key) { 14360 if (!key->IsUniqueName()) { 14361 return DerivedHashTable::FindEntry(key); 14362 } 14363 14364 // Optimized for unique names. Knowledge of the key type allows: 14365 // 1. Move the check if the key is unique out of the loop. 14366 // 2. Avoid comparing hash codes in unique-to-unique comparison. 14367 // 3. Detect a case when a dictionary key is not unique but the key is. 14368 // In case of positive result the dictionary key may be replaced by the 14369 // internalized string with minimal performance penalty. It gives a chance 14370 // to perform further lookups in code stubs (and significant performance 14371 // boost a certain style of code). 14372 14373 // EnsureCapacity will guarantee the hash table is never full. 14374 uint32_t capacity = Capacity(); 14375 uint32_t entry = FirstProbe(key->Hash(), capacity); 14376 uint32_t count = 1; 14377 14378 while (true) { 14379 int index = EntryToIndex(entry); 14380 Object* element = get(index); 14381 if (element->IsUndefined()) break; // Empty entry. 14382 if (*key == element) return entry; 14383 if (!element->IsUniqueName() && 14384 !element->IsTheHole() && 14385 Name::cast(element)->Equals(*key)) { 14386 // Replace a key that is a non-internalized string by the equivalent 14387 // internalized string for faster further lookups. 14388 set(index, *key); 14389 return entry; 14390 } 14391 ASSERT(element->IsTheHole() || !Name::cast(element)->Equals(*key)); 14392 entry = NextProbe(entry, count++, capacity); 14393 } 14394 return kNotFound; 14395 } 14396 14397 14398 template<typename Derived, typename Shape, typename Key> 14399 void HashTable<Derived, Shape, Key>::Rehash( 14400 Handle<Derived> new_table, 14401 Key key) { 14402 ASSERT(NumberOfElements() < new_table->Capacity()); 14403 14404 DisallowHeapAllocation no_gc; 14405 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc); 14406 14407 // Copy prefix to new array. 14408 for (int i = kPrefixStartIndex; 14409 i < kPrefixStartIndex + Shape::kPrefixSize; 14410 i++) { 14411 new_table->set(i, get(i), mode); 14412 } 14413 14414 // Rehash the elements. 14415 int capacity = Capacity(); 14416 for (int i = 0; i < capacity; i++) { 14417 uint32_t from_index = EntryToIndex(i); 14418 Object* k = get(from_index); 14419 if (IsKey(k)) { 14420 uint32_t hash = HashTable::HashForObject(key, k); 14421 uint32_t insertion_index = 14422 EntryToIndex(new_table->FindInsertionEntry(hash)); 14423 for (int j = 0; j < Shape::kEntrySize; j++) { 14424 new_table->set(insertion_index + j, get(from_index + j), mode); 14425 } 14426 } 14427 } 14428 new_table->SetNumberOfElements(NumberOfElements()); 14429 new_table->SetNumberOfDeletedElements(0); 14430 } 14431 14432 14433 template<typename Derived, typename Shape, typename Key> 14434 uint32_t HashTable<Derived, Shape, Key>::EntryForProbe( 14435 Key key, 14436 Object* k, 14437 int probe, 14438 uint32_t expected) { 14439 uint32_t hash = HashTable::HashForObject(key, k); 14440 uint32_t capacity = Capacity(); 14441 uint32_t entry = FirstProbe(hash, capacity); 14442 for (int i = 1; i < probe; i++) { 14443 if (entry == expected) return expected; 14444 entry = NextProbe(entry, i, capacity); 14445 } 14446 return entry; 14447 } 14448 14449 14450 template<typename Derived, typename Shape, typename Key> 14451 void HashTable<Derived, Shape, Key>::Swap(uint32_t entry1, 14452 uint32_t entry2, 14453 WriteBarrierMode mode) { 14454 int index1 = EntryToIndex(entry1); 14455 int index2 = EntryToIndex(entry2); 14456 Object* temp[Shape::kEntrySize]; 14457 for (int j = 0; j < Shape::kEntrySize; j++) { 14458 temp[j] = get(index1 + j); 14459 } 14460 for (int j = 0; j < Shape::kEntrySize; j++) { 14461 set(index1 + j, get(index2 + j), mode); 14462 } 14463 for (int j = 0; j < Shape::kEntrySize; j++) { 14464 set(index2 + j, temp[j], mode); 14465 } 14466 } 14467 14468 14469 template<typename Derived, typename Shape, typename Key> 14470 void HashTable<Derived, Shape, Key>::Rehash(Key key) { 14471 DisallowHeapAllocation no_gc; 14472 WriteBarrierMode mode = GetWriteBarrierMode(no_gc); 14473 uint32_t capacity = Capacity(); 14474 bool done = false; 14475 for (int probe = 1; !done; probe++) { 14476 // All elements at entries given by one of the first _probe_ probes 14477 // are placed correctly. Other elements might need to be moved. 14478 done = true; 14479 for (uint32_t current = 0; current < capacity; current++) { 14480 Object* current_key = get(EntryToIndex(current)); 14481 if (IsKey(current_key)) { 14482 uint32_t target = EntryForProbe(key, current_key, probe, current); 14483 if (current == target) continue; 14484 Object* target_key = get(EntryToIndex(target)); 14485 if (!IsKey(target_key) || 14486 EntryForProbe(key, target_key, probe, target) != target) { 14487 // Put the current element into the correct position. 14488 Swap(current, target, mode); 14489 // The other element will be processed on the next iteration. 14490 current--; 14491 } else { 14492 // The place for the current element is occupied. Leave the element 14493 // for the next probe. 14494 done = false; 14495 } 14496 } 14497 } 14498 } 14499 } 14500 14501 14502 template<typename Derived, typename Shape, typename Key> 14503 Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity( 14504 Handle<Derived> table, 14505 int n, 14506 Key key, 14507 PretenureFlag pretenure) { 14508 Isolate* isolate = table->GetIsolate(); 14509 int capacity = table->Capacity(); 14510 int nof = table->NumberOfElements() + n; 14511 int nod = table->NumberOfDeletedElements(); 14512 // Return if: 14513 // 50% is still free after adding n elements and 14514 // at most 50% of the free elements are deleted elements. 14515 if (nod <= (capacity - nof) >> 1) { 14516 int needed_free = nof >> 1; 14517 if (nof + needed_free <= capacity) return table; 14518 } 14519 14520 const int kMinCapacityForPretenure = 256; 14521 bool should_pretenure = pretenure == TENURED || 14522 ((capacity > kMinCapacityForPretenure) && 14523 !isolate->heap()->InNewSpace(*table)); 14524 Handle<Derived> new_table = HashTable::New( 14525 isolate, 14526 nof * 2, 14527 USE_DEFAULT_MINIMUM_CAPACITY, 14528 should_pretenure ? TENURED : NOT_TENURED); 14529 14530 table->Rehash(new_table, key); 14531 return new_table; 14532 } 14533 14534 14535 template<typename Derived, typename Shape, typename Key> 14536 Handle<Derived> HashTable<Derived, Shape, Key>::Shrink(Handle<Derived> table, 14537 Key key) { 14538 int capacity = table->Capacity(); 14539 int nof = table->NumberOfElements(); 14540 14541 // Shrink to fit the number of elements if only a quarter of the 14542 // capacity is filled with elements. 14543 if (nof > (capacity >> 2)) return table; 14544 // Allocate a new dictionary with room for at least the current 14545 // number of elements. The allocation method will make sure that 14546 // there is extra room in the dictionary for additions. Don't go 14547 // lower than room for 16 elements. 14548 int at_least_room_for = nof; 14549 if (at_least_room_for < 16) return table; 14550 14551 Isolate* isolate = table->GetIsolate(); 14552 const int kMinCapacityForPretenure = 256; 14553 bool pretenure = 14554 (at_least_room_for > kMinCapacityForPretenure) && 14555 !isolate->heap()->InNewSpace(*table); 14556 Handle<Derived> new_table = HashTable::New( 14557 isolate, 14558 at_least_room_for, 14559 USE_DEFAULT_MINIMUM_CAPACITY, 14560 pretenure ? TENURED : NOT_TENURED); 14561 14562 table->Rehash(new_table, key); 14563 return new_table; 14564 } 14565 14566 14567 template<typename Derived, typename Shape, typename Key> 14568 uint32_t HashTable<Derived, Shape, Key>::FindInsertionEntry(uint32_t hash) { 14569 uint32_t capacity = Capacity(); 14570 uint32_t entry = FirstProbe(hash, capacity); 14571 uint32_t count = 1; 14572 // EnsureCapacity will guarantee the hash table is never full. 14573 while (true) { 14574 Object* element = KeyAt(entry); 14575 if (element->IsUndefined() || element->IsTheHole()) break; 14576 entry = NextProbe(entry, count++, capacity); 14577 } 14578 return entry; 14579 } 14580 14581 14582 // Force instantiation of template instances class. 14583 // Please note this list is compiler dependent. 14584 14585 template class HashTable<StringTable, StringTableShape, HashTableKey*>; 14586 14587 template class HashTable<CompilationCacheTable, 14588 CompilationCacheShape, 14589 HashTableKey*>; 14590 14591 template class HashTable<MapCache, MapCacheShape, HashTableKey*>; 14592 14593 template class HashTable<ObjectHashTable, 14594 ObjectHashTableShape, 14595 Handle<Object> >; 14596 14597 template class HashTable<WeakHashTable, WeakHashTableShape<2>, Handle<Object> >; 14598 14599 template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >; 14600 14601 template class Dictionary<SeededNumberDictionary, 14602 SeededNumberDictionaryShape, 14603 uint32_t>; 14604 14605 template class Dictionary<UnseededNumberDictionary, 14606 UnseededNumberDictionaryShape, 14607 uint32_t>; 14608 14609 template Handle<SeededNumberDictionary> 14610 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14611 New(Isolate*, int at_least_space_for, PretenureFlag pretenure); 14612 14613 template Handle<UnseededNumberDictionary> 14614 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14615 New(Isolate*, int at_least_space_for, PretenureFlag pretenure); 14616 14617 template Handle<NameDictionary> 14618 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14619 New(Isolate*, int n, PretenureFlag pretenure); 14620 14621 template Handle<SeededNumberDictionary> 14622 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14623 AtPut(Handle<SeededNumberDictionary>, uint32_t, Handle<Object>); 14624 14625 template Handle<UnseededNumberDictionary> 14626 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14627 AtPut(Handle<UnseededNumberDictionary>, uint32_t, Handle<Object>); 14628 14629 template Object* 14630 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14631 SlowReverseLookup(Object* value); 14632 14633 template Object* 14634 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14635 SlowReverseLookup(Object* value); 14636 14637 template Object* 14638 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14639 SlowReverseLookup(Object* value); 14640 14641 template void 14642 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14643 CopyKeysTo( 14644 FixedArray*, 14645 PropertyAttributes, 14646 Dictionary<SeededNumberDictionary, 14647 SeededNumberDictionaryShape, 14648 uint32_t>::SortMode); 14649 14650 template Handle<Object> 14651 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::DeleteProperty( 14652 Handle<NameDictionary>, int, JSObject::DeleteMode); 14653 14654 template Handle<Object> 14655 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14656 DeleteProperty(Handle<SeededNumberDictionary>, int, JSObject::DeleteMode); 14657 14658 template Handle<NameDictionary> 14659 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14660 New(Isolate*, int, MinimumCapacity, PretenureFlag); 14661 14662 template Handle<NameDictionary> 14663 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14664 Shrink(Handle<NameDictionary>, Handle<Name>); 14665 14666 template Handle<SeededNumberDictionary> 14667 HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14668 Shrink(Handle<SeededNumberDictionary>, uint32_t); 14669 14670 template void Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14671 CopyKeysTo( 14672 FixedArray*, 14673 int, 14674 PropertyAttributes, 14675 Dictionary< 14676 NameDictionary, NameDictionaryShape, Handle<Name> >::SortMode); 14677 14678 template int 14679 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14680 NumberOfElementsFilterAttributes(PropertyAttributes); 14681 14682 template Handle<NameDictionary> 14683 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::Add( 14684 Handle<NameDictionary>, Handle<Name>, Handle<Object>, PropertyDetails); 14685 14686 template void 14687 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14688 GenerateNewEnumerationIndices(Handle<NameDictionary>); 14689 14690 template int 14691 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14692 NumberOfElementsFilterAttributes(PropertyAttributes); 14693 14694 template Handle<SeededNumberDictionary> 14695 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14696 Add(Handle<SeededNumberDictionary>, 14697 uint32_t, 14698 Handle<Object>, 14699 PropertyDetails); 14700 14701 template Handle<UnseededNumberDictionary> 14702 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14703 Add(Handle<UnseededNumberDictionary>, 14704 uint32_t, 14705 Handle<Object>, 14706 PropertyDetails); 14707 14708 template Handle<SeededNumberDictionary> 14709 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14710 EnsureCapacity(Handle<SeededNumberDictionary>, int, uint32_t); 14711 14712 template Handle<UnseededNumberDictionary> 14713 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14714 EnsureCapacity(Handle<UnseededNumberDictionary>, int, uint32_t); 14715 14716 template Handle<NameDictionary> 14717 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14718 EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>); 14719 14720 template 14721 int Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14722 NumberOfEnumElements(); 14723 14724 template 14725 int Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14726 NumberOfEnumElements(); 14727 14728 template 14729 int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14730 FindEntry(uint32_t); 14731 14732 14733 Handle<Object> JSObject::PrepareSlowElementsForSort( 14734 Handle<JSObject> object, uint32_t limit) { 14735 ASSERT(object->HasDictionaryElements()); 14736 Isolate* isolate = object->GetIsolate(); 14737 // Must stay in dictionary mode, either because of requires_slow_elements, 14738 // or because we are not going to sort (and therefore compact) all of the 14739 // elements. 14740 Handle<SeededNumberDictionary> dict(object->element_dictionary(), isolate); 14741 Handle<SeededNumberDictionary> new_dict = 14742 SeededNumberDictionary::New(isolate, dict->NumberOfElements()); 14743 14744 uint32_t pos = 0; 14745 uint32_t undefs = 0; 14746 int capacity = dict->Capacity(); 14747 Handle<Smi> bailout(Smi::FromInt(-1), isolate); 14748 // Entry to the new dictionary does not cause it to grow, as we have 14749 // allocated one that is large enough for all entries. 14750 DisallowHeapAllocation no_gc; 14751 for (int i = 0; i < capacity; i++) { 14752 Object* k = dict->KeyAt(i); 14753 if (!dict->IsKey(k)) continue; 14754 14755 ASSERT(k->IsNumber()); 14756 ASSERT(!k->IsSmi() || Smi::cast(k)->value() >= 0); 14757 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); 14758 ASSERT(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32); 14759 14760 HandleScope scope(isolate); 14761 Handle<Object> value(dict->ValueAt(i), isolate); 14762 PropertyDetails details = dict->DetailsAt(i); 14763 if (details.type() == CALLBACKS || details.IsReadOnly()) { 14764 // Bail out and do the sorting of undefineds and array holes in JS. 14765 // Also bail out if the element is not supposed to be moved. 14766 return bailout; 14767 } 14768 14769 uint32_t key = NumberToUint32(k); 14770 if (key < limit) { 14771 if (value->IsUndefined()) { 14772 undefs++; 14773 } else if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { 14774 // Adding an entry with the key beyond smi-range requires 14775 // allocation. Bailout. 14776 return bailout; 14777 } else { 14778 Handle<Object> result = SeededNumberDictionary::AddNumberEntry( 14779 new_dict, pos, value, details); 14780 ASSERT(result.is_identical_to(new_dict)); 14781 USE(result); 14782 pos++; 14783 } 14784 } else if (key > static_cast<uint32_t>(Smi::kMaxValue)) { 14785 // Adding an entry with the key beyond smi-range requires 14786 // allocation. Bailout. 14787 return bailout; 14788 } else { 14789 Handle<Object> result = SeededNumberDictionary::AddNumberEntry( 14790 new_dict, key, value, details); 14791 ASSERT(result.is_identical_to(new_dict)); 14792 USE(result); 14793 } 14794 } 14795 14796 uint32_t result = pos; 14797 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0); 14798 while (undefs > 0) { 14799 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { 14800 // Adding an entry with the key beyond smi-range requires 14801 // allocation. Bailout. 14802 return bailout; 14803 } 14804 HandleScope scope(isolate); 14805 Handle<Object> result = SeededNumberDictionary::AddNumberEntry( 14806 new_dict, pos, isolate->factory()->undefined_value(), no_details); 14807 ASSERT(result.is_identical_to(new_dict)); 14808 USE(result); 14809 pos++; 14810 undefs--; 14811 } 14812 14813 object->set_elements(*new_dict); 14814 14815 AllowHeapAllocation allocate_return_value; 14816 return isolate->factory()->NewNumberFromUint(result); 14817 } 14818 14819 14820 // Collects all defined (non-hole) and non-undefined (array) elements at 14821 // the start of the elements array. 14822 // If the object is in dictionary mode, it is converted to fast elements 14823 // mode. 14824 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object, 14825 uint32_t limit) { 14826 Isolate* isolate = object->GetIsolate(); 14827 if (object->HasSloppyArgumentsElements() || 14828 object->map()->is_observed()) { 14829 return handle(Smi::FromInt(-1), isolate); 14830 } 14831 14832 if (object->HasDictionaryElements()) { 14833 // Convert to fast elements containing only the existing properties. 14834 // Ordering is irrelevant, since we are going to sort anyway. 14835 Handle<SeededNumberDictionary> dict(object->element_dictionary()); 14836 if (object->IsJSArray() || dict->requires_slow_elements() || 14837 dict->max_number_key() >= limit) { 14838 return JSObject::PrepareSlowElementsForSort(object, limit); 14839 } 14840 // Convert to fast elements. 14841 14842 Handle<Map> new_map = 14843 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS); 14844 14845 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ? 14846 NOT_TENURED: TENURED; 14847 Handle<FixedArray> fast_elements = 14848 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure); 14849 dict->CopyValuesTo(*fast_elements); 14850 JSObject::ValidateElements(object); 14851 14852 JSObject::SetMapAndElements(object, new_map, fast_elements); 14853 } else if (object->HasExternalArrayElements() || 14854 object->HasFixedTypedArrayElements()) { 14855 // Typed arrays cannot have holes or undefined elements. 14856 return handle(Smi::FromInt( 14857 FixedArrayBase::cast(object->elements())->length()), isolate); 14858 } else if (!object->HasFastDoubleElements()) { 14859 EnsureWritableFastElements(object); 14860 } 14861 ASSERT(object->HasFastSmiOrObjectElements() || 14862 object->HasFastDoubleElements()); 14863 14864 // Collect holes at the end, undefined before that and the rest at the 14865 // start, and return the number of non-hole, non-undefined values. 14866 14867 Handle<FixedArrayBase> elements_base(object->elements()); 14868 uint32_t elements_length = static_cast<uint32_t>(elements_base->length()); 14869 if (limit > elements_length) { 14870 limit = elements_length ; 14871 } 14872 if (limit == 0) { 14873 return handle(Smi::FromInt(0), isolate); 14874 } 14875 14876 uint32_t result = 0; 14877 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) { 14878 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base); 14879 // Split elements into defined and the_hole, in that order. 14880 unsigned int holes = limit; 14881 // Assume most arrays contain no holes and undefined values, so minimize the 14882 // number of stores of non-undefined, non-the-hole values. 14883 for (unsigned int i = 0; i < holes; i++) { 14884 if (elements->is_the_hole(i)) { 14885 holes--; 14886 } else { 14887 continue; 14888 } 14889 // Position i needs to be filled. 14890 while (holes > i) { 14891 if (elements->is_the_hole(holes)) { 14892 holes--; 14893 } else { 14894 elements->set(i, elements->get_scalar(holes)); 14895 break; 14896 } 14897 } 14898 } 14899 result = holes; 14900 while (holes < limit) { 14901 elements->set_the_hole(holes); 14902 holes++; 14903 } 14904 } else { 14905 FixedArray* elements = FixedArray::cast(*elements_base); 14906 DisallowHeapAllocation no_gc; 14907 14908 // Split elements into defined, undefined and the_hole, in that order. Only 14909 // count locations for undefined and the hole, and fill them afterwards. 14910 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc); 14911 unsigned int undefs = limit; 14912 unsigned int holes = limit; 14913 // Assume most arrays contain no holes and undefined values, so minimize the 14914 // number of stores of non-undefined, non-the-hole values. 14915 for (unsigned int i = 0; i < undefs; i++) { 14916 Object* current = elements->get(i); 14917 if (current->IsTheHole()) { 14918 holes--; 14919 undefs--; 14920 } else if (current->IsUndefined()) { 14921 undefs--; 14922 } else { 14923 continue; 14924 } 14925 // Position i needs to be filled. 14926 while (undefs > i) { 14927 current = elements->get(undefs); 14928 if (current->IsTheHole()) { 14929 holes--; 14930 undefs--; 14931 } else if (current->IsUndefined()) { 14932 undefs--; 14933 } else { 14934 elements->set(i, current, write_barrier); 14935 break; 14936 } 14937 } 14938 } 14939 result = undefs; 14940 while (undefs < holes) { 14941 elements->set_undefined(undefs); 14942 undefs++; 14943 } 14944 while (holes < limit) { 14945 elements->set_the_hole(holes); 14946 holes++; 14947 } 14948 } 14949 14950 return isolate->factory()->NewNumberFromUint(result); 14951 } 14952 14953 14954 ExternalArrayType JSTypedArray::type() { 14955 switch (elements()->map()->instance_type()) { 14956 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \ 14957 case EXTERNAL_##TYPE##_ARRAY_TYPE: \ 14958 case FIXED_##TYPE##_ARRAY_TYPE: \ 14959 return kExternal##Type##Array; 14960 14961 TYPED_ARRAYS(INSTANCE_TYPE_TO_ARRAY_TYPE) 14962 #undef INSTANCE_TYPE_TO_ARRAY_TYPE 14963 14964 default: 14965 UNREACHABLE(); 14966 return static_cast<ExternalArrayType>(-1); 14967 } 14968 } 14969 14970 14971 size_t JSTypedArray::element_size() { 14972 switch (elements()->map()->instance_type()) { 14973 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \ 14974 case EXTERNAL_##TYPE##_ARRAY_TYPE: \ 14975 return size; 14976 14977 TYPED_ARRAYS(INSTANCE_TYPE_TO_ELEMENT_SIZE) 14978 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE 14979 14980 default: 14981 UNREACHABLE(); 14982 return 0; 14983 } 14984 } 14985 14986 14987 Handle<Object> ExternalUint8ClampedArray::SetValue( 14988 Handle<ExternalUint8ClampedArray> array, 14989 uint32_t index, 14990 Handle<Object> value) { 14991 uint8_t clamped_value = 0; 14992 if (index < static_cast<uint32_t>(array->length())) { 14993 if (value->IsSmi()) { 14994 int int_value = Handle<Smi>::cast(value)->value(); 14995 if (int_value < 0) { 14996 clamped_value = 0; 14997 } else if (int_value > 255) { 14998 clamped_value = 255; 14999 } else { 15000 clamped_value = static_cast<uint8_t>(int_value); 15001 } 15002 } else if (value->IsHeapNumber()) { 15003 double double_value = Handle<HeapNumber>::cast(value)->value(); 15004 if (!(double_value > 0)) { 15005 // NaN and less than zero clamp to zero. 15006 clamped_value = 0; 15007 } else if (double_value > 255) { 15008 // Greater than 255 clamp to 255. 15009 clamped_value = 255; 15010 } else { 15011 // Other doubles are rounded to the nearest integer. 15012 clamped_value = static_cast<uint8_t>(lrint(double_value)); 15013 } 15014 } else { 15015 // Clamp undefined to zero (default). All other types have been 15016 // converted to a number type further up in the call chain. 15017 ASSERT(value->IsUndefined()); 15018 } 15019 array->set(index, clamped_value); 15020 } 15021 return handle(Smi::FromInt(clamped_value), array->GetIsolate()); 15022 } 15023 15024 15025 template<typename ExternalArrayClass, typename ValueType> 15026 static Handle<Object> ExternalArrayIntSetter( 15027 Isolate* isolate, 15028 Handle<ExternalArrayClass> receiver, 15029 uint32_t index, 15030 Handle<Object> value) { 15031 ValueType cast_value = 0; 15032 if (index < static_cast<uint32_t>(receiver->length())) { 15033 if (value->IsSmi()) { 15034 int int_value = Handle<Smi>::cast(value)->value(); 15035 cast_value = static_cast<ValueType>(int_value); 15036 } else if (value->IsHeapNumber()) { 15037 double double_value = Handle<HeapNumber>::cast(value)->value(); 15038 cast_value = static_cast<ValueType>(DoubleToInt32(double_value)); 15039 } else { 15040 // Clamp undefined to zero (default). All other types have been 15041 // converted to a number type further up in the call chain. 15042 ASSERT(value->IsUndefined()); 15043 } 15044 receiver->set(index, cast_value); 15045 } 15046 return isolate->factory()->NewNumberFromInt(cast_value); 15047 } 15048 15049 15050 Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array, 15051 uint32_t index, 15052 Handle<Object> value) { 15053 return ExternalArrayIntSetter<ExternalInt8Array, int8_t>( 15054 array->GetIsolate(), array, index, value); 15055 } 15056 15057 15058 Handle<Object> ExternalUint8Array::SetValue(Handle<ExternalUint8Array> array, 15059 uint32_t index, 15060 Handle<Object> value) { 15061 return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>( 15062 array->GetIsolate(), array, index, value); 15063 } 15064 15065 15066 Handle<Object> ExternalInt16Array::SetValue(Handle<ExternalInt16Array> array, 15067 uint32_t index, 15068 Handle<Object> value) { 15069 return ExternalArrayIntSetter<ExternalInt16Array, int16_t>( 15070 array->GetIsolate(), array, index, value); 15071 } 15072 15073 15074 Handle<Object> ExternalUint16Array::SetValue(Handle<ExternalUint16Array> array, 15075 uint32_t index, 15076 Handle<Object> value) { 15077 return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>( 15078 array->GetIsolate(), array, index, value); 15079 } 15080 15081 15082 Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array, 15083 uint32_t index, 15084 Handle<Object> value) { 15085 return ExternalArrayIntSetter<ExternalInt32Array, int32_t>( 15086 array->GetIsolate(), array, index, value); 15087 } 15088 15089 15090 Handle<Object> ExternalUint32Array::SetValue( 15091 Handle<ExternalUint32Array> array, 15092 uint32_t index, 15093 Handle<Object> value) { 15094 uint32_t cast_value = 0; 15095 if (index < static_cast<uint32_t>(array->length())) { 15096 if (value->IsSmi()) { 15097 int int_value = Handle<Smi>::cast(value)->value(); 15098 cast_value = static_cast<uint32_t>(int_value); 15099 } else if (value->IsHeapNumber()) { 15100 double double_value = Handle<HeapNumber>::cast(value)->value(); 15101 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value)); 15102 } else { 15103 // Clamp undefined to zero (default). All other types have been 15104 // converted to a number type further up in the call chain. 15105 ASSERT(value->IsUndefined()); 15106 } 15107 array->set(index, cast_value); 15108 } 15109 return array->GetIsolate()->factory()->NewNumberFromUint(cast_value); 15110 } 15111 15112 15113 Handle<Object> ExternalFloat32Array::SetValue( 15114 Handle<ExternalFloat32Array> array, 15115 uint32_t index, 15116 Handle<Object> value) { 15117 float cast_value = static_cast<float>(OS::nan_value()); 15118 if (index < static_cast<uint32_t>(array->length())) { 15119 if (value->IsSmi()) { 15120 int int_value = Handle<Smi>::cast(value)->value(); 15121 cast_value = static_cast<float>(int_value); 15122 } else if (value->IsHeapNumber()) { 15123 double double_value = Handle<HeapNumber>::cast(value)->value(); 15124 cast_value = static_cast<float>(double_value); 15125 } else { 15126 // Clamp undefined to NaN (default). All other types have been 15127 // converted to a number type further up in the call chain. 15128 ASSERT(value->IsUndefined()); 15129 } 15130 array->set(index, cast_value); 15131 } 15132 return array->GetIsolate()->factory()->NewNumber(cast_value); 15133 } 15134 15135 15136 Handle<Object> ExternalFloat64Array::SetValue( 15137 Handle<ExternalFloat64Array> array, 15138 uint32_t index, 15139 Handle<Object> value) { 15140 double double_value = OS::nan_value(); 15141 if (index < static_cast<uint32_t>(array->length())) { 15142 if (value->IsNumber()) { 15143 double_value = value->Number(); 15144 } else { 15145 // Clamp undefined to NaN (default). All other types have been 15146 // converted to a number type further up in the call chain. 15147 ASSERT(value->IsUndefined()); 15148 } 15149 array->set(index, double_value); 15150 } 15151 return array->GetIsolate()->factory()->NewNumber(double_value); 15152 } 15153 15154 15155 PropertyCell* GlobalObject::GetPropertyCell(LookupResult* result) { 15156 ASSERT(!HasFastProperties()); 15157 Object* value = property_dictionary()->ValueAt(result->GetDictionaryEntry()); 15158 return PropertyCell::cast(value); 15159 } 15160 15161 15162 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell( 15163 Handle<JSGlobalObject> global, 15164 Handle<Name> name) { 15165 ASSERT(!global->HasFastProperties()); 15166 int entry = global->property_dictionary()->FindEntry(name); 15167 if (entry == NameDictionary::kNotFound) { 15168 Isolate* isolate = global->GetIsolate(); 15169 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell( 15170 isolate->factory()->the_hole_value()); 15171 PropertyDetails details(NONE, NORMAL, 0); 15172 details = details.AsDeleted(); 15173 Handle<NameDictionary> dictionary = NameDictionary::Add( 15174 handle(global->property_dictionary()), name, cell, details); 15175 global->set_properties(*dictionary); 15176 return cell; 15177 } else { 15178 Object* value = global->property_dictionary()->ValueAt(entry); 15179 ASSERT(value->IsPropertyCell()); 15180 return handle(PropertyCell::cast(value)); 15181 } 15182 } 15183 15184 15185 // This class is used for looking up two character strings in the string table. 15186 // If we don't have a hit we don't want to waste much time so we unroll the 15187 // string hash calculation loop here for speed. Doesn't work if the two 15188 // characters form a decimal integer, since such strings have a different hash 15189 // algorithm. 15190 class TwoCharHashTableKey : public HashTableKey { 15191 public: 15192 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed) 15193 : c1_(c1), c2_(c2) { 15194 // Char 1. 15195 uint32_t hash = seed; 15196 hash += c1; 15197 hash += hash << 10; 15198 hash ^= hash >> 6; 15199 // Char 2. 15200 hash += c2; 15201 hash += hash << 10; 15202 hash ^= hash >> 6; 15203 // GetHash. 15204 hash += hash << 3; 15205 hash ^= hash >> 11; 15206 hash += hash << 15; 15207 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash; 15208 hash_ = hash; 15209 #ifdef DEBUG 15210 // If this assert fails then we failed to reproduce the two-character 15211 // version of the string hashing algorithm above. One reason could be 15212 // that we were passed two digits as characters, since the hash 15213 // algorithm is different in that case. 15214 uint16_t chars[2] = {c1, c2}; 15215 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed); 15216 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask; 15217 ASSERT_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash)); 15218 #endif 15219 } 15220 15221 bool IsMatch(Object* o) V8_OVERRIDE { 15222 if (!o->IsString()) return false; 15223 String* other = String::cast(o); 15224 if (other->length() != 2) return false; 15225 if (other->Get(0) != c1_) return false; 15226 return other->Get(1) == c2_; 15227 } 15228 15229 uint32_t Hash() V8_OVERRIDE { return hash_; } 15230 uint32_t HashForObject(Object* key) V8_OVERRIDE { 15231 if (!key->IsString()) return 0; 15232 return String::cast(key)->Hash(); 15233 } 15234 15235 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { 15236 // The TwoCharHashTableKey is only used for looking in the string 15237 // table, not for adding to it. 15238 UNREACHABLE(); 15239 return MaybeHandle<Object>().ToHandleChecked(); 15240 } 15241 15242 private: 15243 uint16_t c1_; 15244 uint16_t c2_; 15245 uint32_t hash_; 15246 }; 15247 15248 15249 MaybeHandle<String> StringTable::InternalizeStringIfExists( 15250 Isolate* isolate, 15251 Handle<String> string) { 15252 if (string->IsInternalizedString()) { 15253 return string; 15254 } 15255 return LookupStringIfExists(isolate, string); 15256 } 15257 15258 15259 MaybeHandle<String> StringTable::LookupStringIfExists( 15260 Isolate* isolate, 15261 Handle<String> string) { 15262 Handle<StringTable> string_table = isolate->factory()->string_table(); 15263 InternalizedStringKey key(string); 15264 int entry = string_table->FindEntry(&key); 15265 if (entry == kNotFound) { 15266 return MaybeHandle<String>(); 15267 } else { 15268 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate); 15269 ASSERT(StringShape(*result).IsInternalized()); 15270 return result; 15271 } 15272 } 15273 15274 15275 MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists( 15276 Isolate* isolate, 15277 uint16_t c1, 15278 uint16_t c2) { 15279 Handle<StringTable> string_table = isolate->factory()->string_table(); 15280 TwoCharHashTableKey key(c1, c2, isolate->heap()->HashSeed()); 15281 int entry = string_table->FindEntry(&key); 15282 if (entry == kNotFound) { 15283 return MaybeHandle<String>(); 15284 } else { 15285 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate); 15286 ASSERT(StringShape(*result).IsInternalized()); 15287 return result; 15288 } 15289 } 15290 15291 15292 Handle<String> StringTable::LookupString(Isolate* isolate, 15293 Handle<String> string) { 15294 InternalizedStringKey key(string); 15295 return LookupKey(isolate, &key); 15296 } 15297 15298 15299 Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) { 15300 Handle<StringTable> table = isolate->factory()->string_table(); 15301 int entry = table->FindEntry(key); 15302 15303 // String already in table. 15304 if (entry != kNotFound) { 15305 return handle(String::cast(table->KeyAt(entry)), isolate); 15306 } 15307 15308 // Adding new string. Grow table if needed. 15309 table = StringTable::EnsureCapacity(table, 1, key); 15310 15311 // Create string object. 15312 Handle<Object> string = key->AsHandle(isolate); 15313 // There must be no attempts to internalize strings that could throw 15314 // InvalidStringLength error. 15315 CHECK(!string.is_null()); 15316 15317 // Add the new string and return it along with the string table. 15318 entry = table->FindInsertionEntry(key->Hash()); 15319 table->set(EntryToIndex(entry), *string); 15320 table->ElementAdded(); 15321 15322 isolate->factory()->set_string_table(table); 15323 return Handle<String>::cast(string); 15324 } 15325 15326 15327 Handle<Object> CompilationCacheTable::Lookup(Handle<String> src, 15328 Handle<Context> context) { 15329 Isolate* isolate = GetIsolate(); 15330 Handle<SharedFunctionInfo> shared(context->closure()->shared()); 15331 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY, 15332 RelocInfo::kNoPosition); 15333 int entry = FindEntry(&key); 15334 if (entry == kNotFound) return isolate->factory()->undefined_value(); 15335 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); 15336 } 15337 15338 15339 Handle<Object> CompilationCacheTable::LookupEval(Handle<String> src, 15340 Handle<Context> context, 15341 StrictMode strict_mode, 15342 int scope_position) { 15343 Isolate* isolate = GetIsolate(); 15344 Handle<SharedFunctionInfo> shared(context->closure()->shared()); 15345 StringSharedKey key(src, shared, strict_mode, scope_position); 15346 int entry = FindEntry(&key); 15347 if (entry == kNotFound) return isolate->factory()->undefined_value(); 15348 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); 15349 } 15350 15351 15352 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src, 15353 JSRegExp::Flags flags) { 15354 Isolate* isolate = GetIsolate(); 15355 DisallowHeapAllocation no_allocation; 15356 RegExpKey key(src, flags); 15357 int entry = FindEntry(&key); 15358 if (entry == kNotFound) return isolate->factory()->undefined_value(); 15359 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); 15360 } 15361 15362 15363 Handle<CompilationCacheTable> CompilationCacheTable::Put( 15364 Handle<CompilationCacheTable> cache, Handle<String> src, 15365 Handle<Context> context, Handle<Object> value) { 15366 Isolate* isolate = cache->GetIsolate(); 15367 Handle<SharedFunctionInfo> shared(context->closure()->shared()); 15368 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY, 15369 RelocInfo::kNoPosition); 15370 cache = EnsureCapacity(cache, 1, &key); 15371 Handle<Object> k = key.AsHandle(isolate); 15372 int entry = cache->FindInsertionEntry(key.Hash()); 15373 cache->set(EntryToIndex(entry), *k); 15374 cache->set(EntryToIndex(entry) + 1, *value); 15375 cache->ElementAdded(); 15376 return cache; 15377 } 15378 15379 15380 Handle<CompilationCacheTable> CompilationCacheTable::PutEval( 15381 Handle<CompilationCacheTable> cache, Handle<String> src, 15382 Handle<Context> context, Handle<SharedFunctionInfo> value, 15383 int scope_position) { 15384 Isolate* isolate = cache->GetIsolate(); 15385 Handle<SharedFunctionInfo> shared(context->closure()->shared()); 15386 StringSharedKey key(src, shared, value->strict_mode(), scope_position); 15387 cache = EnsureCapacity(cache, 1, &key); 15388 Handle<Object> k = key.AsHandle(isolate); 15389 int entry = cache->FindInsertionEntry(key.Hash()); 15390 cache->set(EntryToIndex(entry), *k); 15391 cache->set(EntryToIndex(entry) + 1, *value); 15392 cache->ElementAdded(); 15393 return cache; 15394 } 15395 15396 15397 Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp( 15398 Handle<CompilationCacheTable> cache, Handle<String> src, 15399 JSRegExp::Flags flags, Handle<FixedArray> value) { 15400 RegExpKey key(src, flags); 15401 cache = EnsureCapacity(cache, 1, &key); 15402 int entry = cache->FindInsertionEntry(key.Hash()); 15403 // We store the value in the key slot, and compare the search key 15404 // to the stored value with a custon IsMatch function during lookups. 15405 cache->set(EntryToIndex(entry), *value); 15406 cache->set(EntryToIndex(entry) + 1, *value); 15407 cache->ElementAdded(); 15408 return cache; 15409 } 15410 15411 15412 void CompilationCacheTable::Remove(Object* value) { 15413 DisallowHeapAllocation no_allocation; 15414 Object* the_hole_value = GetHeap()->the_hole_value(); 15415 for (int entry = 0, size = Capacity(); entry < size; entry++) { 15416 int entry_index = EntryToIndex(entry); 15417 int value_index = entry_index + 1; 15418 if (get(value_index) == value) { 15419 NoWriteBarrierSet(this, entry_index, the_hole_value); 15420 NoWriteBarrierSet(this, value_index, the_hole_value); 15421 ElementRemoved(); 15422 } 15423 } 15424 return; 15425 } 15426 15427 15428 // StringsKey used for HashTable where key is array of internalized strings. 15429 class StringsKey : public HashTableKey { 15430 public: 15431 explicit StringsKey(Handle<FixedArray> strings) : strings_(strings) { } 15432 15433 bool IsMatch(Object* strings) V8_OVERRIDE { 15434 FixedArray* o = FixedArray::cast(strings); 15435 int len = strings_->length(); 15436 if (o->length() != len) return false; 15437 for (int i = 0; i < len; i++) { 15438 if (o->get(i) != strings_->get(i)) return false; 15439 } 15440 return true; 15441 } 15442 15443 uint32_t Hash() V8_OVERRIDE { return HashForObject(*strings_); } 15444 15445 uint32_t HashForObject(Object* obj) V8_OVERRIDE { 15446 FixedArray* strings = FixedArray::cast(obj); 15447 int len = strings->length(); 15448 uint32_t hash = 0; 15449 for (int i = 0; i < len; i++) { 15450 hash ^= String::cast(strings->get(i))->Hash(); 15451 } 15452 return hash; 15453 } 15454 15455 Handle<Object> AsHandle(Isolate* isolate) V8_OVERRIDE { return strings_; } 15456 15457 private: 15458 Handle<FixedArray> strings_; 15459 }; 15460 15461 15462 Object* MapCache::Lookup(FixedArray* array) { 15463 DisallowHeapAllocation no_alloc; 15464 StringsKey key(handle(array)); 15465 int entry = FindEntry(&key); 15466 if (entry == kNotFound) return GetHeap()->undefined_value(); 15467 return get(EntryToIndex(entry) + 1); 15468 } 15469 15470 15471 Handle<MapCache> MapCache::Put( 15472 Handle<MapCache> map_cache, Handle<FixedArray> array, Handle<Map> value) { 15473 StringsKey key(array); 15474 15475 Handle<MapCache> new_cache = EnsureCapacity(map_cache, 1, &key); 15476 int entry = new_cache->FindInsertionEntry(key.Hash()); 15477 new_cache->set(EntryToIndex(entry), *array); 15478 new_cache->set(EntryToIndex(entry) + 1, *value); 15479 new_cache->ElementAdded(); 15480 return new_cache; 15481 } 15482 15483 15484 template<typename Derived, typename Shape, typename Key> 15485 Handle<Derived> Dictionary<Derived, Shape, Key>::New( 15486 Isolate* isolate, 15487 int at_least_space_for, 15488 PretenureFlag pretenure) { 15489 ASSERT(0 <= at_least_space_for); 15490 Handle<Derived> dict = DerivedHashTable::New(isolate, 15491 at_least_space_for, 15492 USE_DEFAULT_MINIMUM_CAPACITY, 15493 pretenure); 15494 15495 // Initialize the next enumeration index. 15496 dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex); 15497 return dict; 15498 } 15499 15500 15501 template<typename Derived, typename Shape, typename Key> 15502 void Dictionary<Derived, Shape, Key>::GenerateNewEnumerationIndices( 15503 Handle<Derived> dictionary) { 15504 Factory* factory = dictionary->GetIsolate()->factory(); 15505 int length = dictionary->NumberOfElements(); 15506 15507 // Allocate and initialize iteration order array. 15508 Handle<FixedArray> iteration_order = factory->NewFixedArray(length); 15509 for (int i = 0; i < length; i++) { 15510 iteration_order->set(i, Smi::FromInt(i)); 15511 } 15512 15513 // Allocate array with enumeration order. 15514 Handle<FixedArray> enumeration_order = factory->NewFixedArray(length); 15515 15516 // Fill the enumeration order array with property details. 15517 int capacity = dictionary->Capacity(); 15518 int pos = 0; 15519 for (int i = 0; i < capacity; i++) { 15520 if (dictionary->IsKey(dictionary->KeyAt(i))) { 15521 int index = dictionary->DetailsAt(i).dictionary_index(); 15522 enumeration_order->set(pos++, Smi::FromInt(index)); 15523 } 15524 } 15525 15526 // Sort the arrays wrt. enumeration order. 15527 iteration_order->SortPairs(*enumeration_order, enumeration_order->length()); 15528 15529 // Overwrite the enumeration_order with the enumeration indices. 15530 for (int i = 0; i < length; i++) { 15531 int index = Smi::cast(iteration_order->get(i))->value(); 15532 int enum_index = PropertyDetails::kInitialIndex + i; 15533 enumeration_order->set(index, Smi::FromInt(enum_index)); 15534 } 15535 15536 // Update the dictionary with new indices. 15537 capacity = dictionary->Capacity(); 15538 pos = 0; 15539 for (int i = 0; i < capacity; i++) { 15540 if (dictionary->IsKey(dictionary->KeyAt(i))) { 15541 int enum_index = Smi::cast(enumeration_order->get(pos++))->value(); 15542 PropertyDetails details = dictionary->DetailsAt(i); 15543 PropertyDetails new_details = PropertyDetails( 15544 details.attributes(), details.type(), enum_index); 15545 dictionary->DetailsAtPut(i, new_details); 15546 } 15547 } 15548 15549 // Set the next enumeration index. 15550 dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length); 15551 } 15552 15553 15554 template<typename Derived, typename Shape, typename Key> 15555 Handle<Derived> Dictionary<Derived, Shape, Key>::EnsureCapacity( 15556 Handle<Derived> dictionary, int n, Key key) { 15557 // Check whether there are enough enumeration indices to add n elements. 15558 if (Shape::kIsEnumerable && 15559 !PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) { 15560 // If not, we generate new indices for the properties. 15561 GenerateNewEnumerationIndices(dictionary); 15562 } 15563 return DerivedHashTable::EnsureCapacity(dictionary, n, key); 15564 } 15565 15566 15567 template<typename Derived, typename Shape, typename Key> 15568 Handle<Object> Dictionary<Derived, Shape, Key>::DeleteProperty( 15569 Handle<Derived> dictionary, 15570 int entry, 15571 JSObject::DeleteMode mode) { 15572 Factory* factory = dictionary->GetIsolate()->factory(); 15573 PropertyDetails details = dictionary->DetailsAt(entry); 15574 // Ignore attributes if forcing a deletion. 15575 if (details.IsDontDelete() && mode != JSReceiver::FORCE_DELETION) { 15576 return factory->false_value(); 15577 } 15578 15579 dictionary->SetEntry( 15580 entry, factory->the_hole_value(), factory->the_hole_value()); 15581 dictionary->ElementRemoved(); 15582 return factory->true_value(); 15583 } 15584 15585 15586 template<typename Derived, typename Shape, typename Key> 15587 Handle<Derived> Dictionary<Derived, Shape, Key>::AtPut( 15588 Handle<Derived> dictionary, Key key, Handle<Object> value) { 15589 int entry = dictionary->FindEntry(key); 15590 15591 // If the entry is present set the value; 15592 if (entry != Dictionary::kNotFound) { 15593 dictionary->ValueAtPut(entry, *value); 15594 return dictionary; 15595 } 15596 15597 // Check whether the dictionary should be extended. 15598 dictionary = EnsureCapacity(dictionary, 1, key); 15599 #ifdef DEBUG 15600 USE(Shape::AsHandle(dictionary->GetIsolate(), key)); 15601 #endif 15602 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0); 15603 15604 AddEntry(dictionary, key, value, details, dictionary->Hash(key)); 15605 return dictionary; 15606 } 15607 15608 15609 template<typename Derived, typename Shape, typename Key> 15610 Handle<Derived> Dictionary<Derived, Shape, Key>::Add( 15611 Handle<Derived> dictionary, 15612 Key key, 15613 Handle<Object> value, 15614 PropertyDetails details) { 15615 // Valdate key is absent. 15616 SLOW_ASSERT((dictionary->FindEntry(key) == Dictionary::kNotFound)); 15617 // Check whether the dictionary should be extended. 15618 dictionary = EnsureCapacity(dictionary, 1, key); 15619 15620 AddEntry(dictionary, key, value, details, dictionary->Hash(key)); 15621 return dictionary; 15622 } 15623 15624 15625 // Add a key, value pair to the dictionary. 15626 template<typename Derived, typename Shape, typename Key> 15627 void Dictionary<Derived, Shape, Key>::AddEntry( 15628 Handle<Derived> dictionary, 15629 Key key, 15630 Handle<Object> value, 15631 PropertyDetails details, 15632 uint32_t hash) { 15633 // Compute the key object. 15634 Handle<Object> k = Shape::AsHandle(dictionary->GetIsolate(), key); 15635 15636 uint32_t entry = dictionary->FindInsertionEntry(hash); 15637 // Insert element at empty or deleted entry 15638 if (!details.IsDeleted() && 15639 details.dictionary_index() == 0 && 15640 Shape::kIsEnumerable) { 15641 // Assign an enumeration index to the property and update 15642 // SetNextEnumerationIndex. 15643 int index = dictionary->NextEnumerationIndex(); 15644 details = PropertyDetails(details.attributes(), details.type(), index); 15645 dictionary->SetNextEnumerationIndex(index + 1); 15646 } 15647 dictionary->SetEntry(entry, k, value, details); 15648 ASSERT((dictionary->KeyAt(entry)->IsNumber() || 15649 dictionary->KeyAt(entry)->IsName())); 15650 dictionary->ElementAdded(); 15651 } 15652 15653 15654 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) { 15655 DisallowHeapAllocation no_allocation; 15656 // If the dictionary requires slow elements an element has already 15657 // been added at a high index. 15658 if (requires_slow_elements()) return; 15659 // Check if this index is high enough that we should require slow 15660 // elements. 15661 if (key > kRequiresSlowElementsLimit) { 15662 set_requires_slow_elements(); 15663 return; 15664 } 15665 // Update max key value. 15666 Object* max_index_object = get(kMaxNumberKeyIndex); 15667 if (!max_index_object->IsSmi() || max_number_key() < key) { 15668 FixedArray::set(kMaxNumberKeyIndex, 15669 Smi::FromInt(key << kRequiresSlowElementsTagSize)); 15670 } 15671 } 15672 15673 15674 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry( 15675 Handle<SeededNumberDictionary> dictionary, 15676 uint32_t key, 15677 Handle<Object> value, 15678 PropertyDetails details) { 15679 dictionary->UpdateMaxNumberKey(key); 15680 SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound); 15681 return Add(dictionary, key, value, details); 15682 } 15683 15684 15685 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry( 15686 Handle<UnseededNumberDictionary> dictionary, 15687 uint32_t key, 15688 Handle<Object> value) { 15689 SLOW_ASSERT(dictionary->FindEntry(key) == kNotFound); 15690 return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0)); 15691 } 15692 15693 15694 Handle<SeededNumberDictionary> SeededNumberDictionary::AtNumberPut( 15695 Handle<SeededNumberDictionary> dictionary, 15696 uint32_t key, 15697 Handle<Object> value) { 15698 dictionary->UpdateMaxNumberKey(key); 15699 return AtPut(dictionary, key, value); 15700 } 15701 15702 15703 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AtNumberPut( 15704 Handle<UnseededNumberDictionary> dictionary, 15705 uint32_t key, 15706 Handle<Object> value) { 15707 return AtPut(dictionary, key, value); 15708 } 15709 15710 15711 Handle<SeededNumberDictionary> SeededNumberDictionary::Set( 15712 Handle<SeededNumberDictionary> dictionary, 15713 uint32_t key, 15714 Handle<Object> value, 15715 PropertyDetails details) { 15716 int entry = dictionary->FindEntry(key); 15717 if (entry == kNotFound) { 15718 return AddNumberEntry(dictionary, key, value, details); 15719 } 15720 // Preserve enumeration index. 15721 details = PropertyDetails(details.attributes(), 15722 details.type(), 15723 dictionary->DetailsAt(entry).dictionary_index()); 15724 Handle<Object> object_key = 15725 SeededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key); 15726 dictionary->SetEntry(entry, object_key, value, details); 15727 return dictionary; 15728 } 15729 15730 15731 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set( 15732 Handle<UnseededNumberDictionary> dictionary, 15733 uint32_t key, 15734 Handle<Object> value) { 15735 int entry = dictionary->FindEntry(key); 15736 if (entry == kNotFound) return AddNumberEntry(dictionary, key, value); 15737 Handle<Object> object_key = 15738 UnseededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key); 15739 dictionary->SetEntry(entry, object_key, value); 15740 return dictionary; 15741 } 15742 15743 15744 15745 template<typename Derived, typename Shape, typename Key> 15746 int Dictionary<Derived, Shape, Key>::NumberOfElementsFilterAttributes( 15747 PropertyAttributes filter) { 15748 int capacity = DerivedHashTable::Capacity(); 15749 int result = 0; 15750 for (int i = 0; i < capacity; i++) { 15751 Object* k = DerivedHashTable::KeyAt(i); 15752 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { 15753 PropertyDetails details = DetailsAt(i); 15754 if (details.IsDeleted()) continue; 15755 PropertyAttributes attr = details.attributes(); 15756 if ((attr & filter) == 0) result++; 15757 } 15758 } 15759 return result; 15760 } 15761 15762 15763 template<typename Derived, typename Shape, typename Key> 15764 int Dictionary<Derived, Shape, Key>::NumberOfEnumElements() { 15765 return NumberOfElementsFilterAttributes( 15766 static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC)); 15767 } 15768 15769 15770 template<typename Derived, typename Shape, typename Key> 15771 void Dictionary<Derived, Shape, Key>::CopyKeysTo( 15772 FixedArray* storage, 15773 PropertyAttributes filter, 15774 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) { 15775 ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter)); 15776 int capacity = DerivedHashTable::Capacity(); 15777 int index = 0; 15778 for (int i = 0; i < capacity; i++) { 15779 Object* k = DerivedHashTable::KeyAt(i); 15780 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { 15781 PropertyDetails details = DetailsAt(i); 15782 if (details.IsDeleted()) continue; 15783 PropertyAttributes attr = details.attributes(); 15784 if ((attr & filter) == 0) storage->set(index++, k); 15785 } 15786 } 15787 if (sort_mode == Dictionary::SORTED) { 15788 storage->SortPairs(storage, index); 15789 } 15790 ASSERT(storage->length() >= index); 15791 } 15792 15793 15794 struct EnumIndexComparator { 15795 explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { } 15796 bool operator() (Smi* a, Smi* b) { 15797 PropertyDetails da(dict->DetailsAt(a->value())); 15798 PropertyDetails db(dict->DetailsAt(b->value())); 15799 return da.dictionary_index() < db.dictionary_index(); 15800 } 15801 NameDictionary* dict; 15802 }; 15803 15804 15805 void NameDictionary::CopyEnumKeysTo(FixedArray* storage) { 15806 int length = storage->length(); 15807 int capacity = Capacity(); 15808 int properties = 0; 15809 for (int i = 0; i < capacity; i++) { 15810 Object* k = KeyAt(i); 15811 if (IsKey(k) && !k->IsSymbol()) { 15812 PropertyDetails details = DetailsAt(i); 15813 if (details.IsDeleted() || details.IsDontEnum()) continue; 15814 storage->set(properties, Smi::FromInt(i)); 15815 properties++; 15816 if (properties == length) break; 15817 } 15818 } 15819 CHECK_EQ(length, properties); 15820 EnumIndexComparator cmp(this); 15821 Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress()); 15822 std::sort(start, start + length, cmp); 15823 for (int i = 0; i < length; i++) { 15824 int index = Smi::cast(storage->get(i))->value(); 15825 storage->set(i, KeyAt(index)); 15826 } 15827 } 15828 15829 15830 template<typename Derived, typename Shape, typename Key> 15831 void Dictionary<Derived, Shape, Key>::CopyKeysTo( 15832 FixedArray* storage, 15833 int index, 15834 PropertyAttributes filter, 15835 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) { 15836 ASSERT(storage->length() >= NumberOfElementsFilterAttributes(filter)); 15837 int capacity = DerivedHashTable::Capacity(); 15838 for (int i = 0; i < capacity; i++) { 15839 Object* k = DerivedHashTable::KeyAt(i); 15840 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { 15841 PropertyDetails details = DetailsAt(i); 15842 if (details.IsDeleted()) continue; 15843 PropertyAttributes attr = details.attributes(); 15844 if ((attr & filter) == 0) storage->set(index++, k); 15845 } 15846 } 15847 if (sort_mode == Dictionary::SORTED) { 15848 storage->SortPairs(storage, index); 15849 } 15850 ASSERT(storage->length() >= index); 15851 } 15852 15853 15854 // Backwards lookup (slow). 15855 template<typename Derived, typename Shape, typename Key> 15856 Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) { 15857 int capacity = DerivedHashTable::Capacity(); 15858 for (int i = 0; i < capacity; i++) { 15859 Object* k = DerivedHashTable::KeyAt(i); 15860 if (Dictionary::IsKey(k)) { 15861 Object* e = ValueAt(i); 15862 if (e->IsPropertyCell()) { 15863 e = PropertyCell::cast(e)->value(); 15864 } 15865 if (e == value) return k; 15866 } 15867 } 15868 Heap* heap = Dictionary::GetHeap(); 15869 return heap->undefined_value(); 15870 } 15871 15872 15873 Object* ObjectHashTable::Lookup(Handle<Object> key) { 15874 DisallowHeapAllocation no_gc; 15875 ASSERT(IsKey(*key)); 15876 15877 // If the object does not have an identity hash, it was never used as a key. 15878 Object* hash = key->GetHash(); 15879 if (hash->IsUndefined()) { 15880 return GetHeap()->the_hole_value(); 15881 } 15882 int entry = FindEntry(key); 15883 if (entry == kNotFound) return GetHeap()->the_hole_value(); 15884 return get(EntryToIndex(entry) + 1); 15885 } 15886 15887 15888 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table, 15889 Handle<Object> key, 15890 Handle<Object> value) { 15891 ASSERT(table->IsKey(*key)); 15892 ASSERT(!value->IsTheHole()); 15893 15894 Isolate* isolate = table->GetIsolate(); 15895 15896 // Make sure the key object has an identity hash code. 15897 Handle<Smi> hash = Object::GetOrCreateHash(isolate, key); 15898 15899 int entry = table->FindEntry(key); 15900 15901 // Key is already in table, just overwrite value. 15902 if (entry != kNotFound) { 15903 table->set(EntryToIndex(entry) + 1, *value); 15904 return table; 15905 } 15906 15907 // Check whether the hash table should be extended. 15908 table = EnsureCapacity(table, 1, key); 15909 table->AddEntry(table->FindInsertionEntry(hash->value()), 15910 *key, 15911 *value); 15912 return table; 15913 } 15914 15915 15916 Handle<ObjectHashTable> ObjectHashTable::Remove(Handle<ObjectHashTable> table, 15917 Handle<Object> key, 15918 bool* was_present) { 15919 ASSERT(table->IsKey(*key)); 15920 15921 Object* hash = key->GetHash(); 15922 if (hash->IsUndefined()) { 15923 *was_present = false; 15924 return table; 15925 } 15926 15927 int entry = table->FindEntry(key); 15928 if (entry == kNotFound) { 15929 *was_present = false; 15930 return table; 15931 } 15932 15933 *was_present = true; 15934 table->RemoveEntry(entry); 15935 return Shrink(table, key); 15936 } 15937 15938 15939 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) { 15940 set(EntryToIndex(entry), key); 15941 set(EntryToIndex(entry) + 1, value); 15942 ElementAdded(); 15943 } 15944 15945 15946 void ObjectHashTable::RemoveEntry(int entry) { 15947 set_the_hole(EntryToIndex(entry)); 15948 set_the_hole(EntryToIndex(entry) + 1); 15949 ElementRemoved(); 15950 } 15951 15952 15953 Object* WeakHashTable::Lookup(Handle<Object> key) { 15954 DisallowHeapAllocation no_gc; 15955 ASSERT(IsKey(*key)); 15956 int entry = FindEntry(key); 15957 if (entry == kNotFound) return GetHeap()->the_hole_value(); 15958 return get(EntryToValueIndex(entry)); 15959 } 15960 15961 15962 Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table, 15963 Handle<Object> key, 15964 Handle<Object> value) { 15965 ASSERT(table->IsKey(*key)); 15966 int entry = table->FindEntry(key); 15967 // Key is already in table, just overwrite value. 15968 if (entry != kNotFound) { 15969 // TODO(ulan): Skipping write barrier is a temporary solution to avoid 15970 // memory leaks. Remove this once we have special visitor for weak fixed 15971 // arrays. 15972 table->set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER); 15973 return table; 15974 } 15975 15976 // Check whether the hash table should be extended. 15977 table = EnsureCapacity(table, 1, key, TENURED); 15978 15979 table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value); 15980 return table; 15981 } 15982 15983 15984 void WeakHashTable::AddEntry(int entry, 15985 Handle<Object> key, 15986 Handle<Object> value) { 15987 DisallowHeapAllocation no_allocation; 15988 // TODO(ulan): Skipping write barrier is a temporary solution to avoid 15989 // memory leaks. Remove this once we have special visitor for weak fixed 15990 // arrays. 15991 set(EntryToIndex(entry), *key, SKIP_WRITE_BARRIER); 15992 set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER); 15993 ElementAdded(); 15994 } 15995 15996 15997 template<class Derived, class Iterator, int entrysize> 15998 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate( 15999 Isolate* isolate, int capacity, PretenureFlag pretenure) { 16000 // Capacity must be a power of two, since we depend on being able 16001 // to divide and multiple by 2 (kLoadFactor) to derive capacity 16002 // from number of buckets. If we decide to change kLoadFactor 16003 // to something other than 2, capacity should be stored as another 16004 // field of this object. 16005 capacity = RoundUpToPowerOf2(Max(kMinCapacity, capacity)); 16006 if (capacity > kMaxCapacity) { 16007 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true); 16008 } 16009 int num_buckets = capacity / kLoadFactor; 16010 Handle<FixedArray> backing_store = isolate->factory()->NewFixedArray( 16011 kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure); 16012 backing_store->set_map_no_write_barrier( 16013 isolate->heap()->ordered_hash_table_map()); 16014 Handle<Derived> table = Handle<Derived>::cast(backing_store); 16015 for (int i = 0; i < num_buckets; ++i) { 16016 table->set(kHashTableStartIndex + i, Smi::FromInt(kNotFound)); 16017 } 16018 table->SetNumberOfBuckets(num_buckets); 16019 table->SetNumberOfElements(0); 16020 table->SetNumberOfDeletedElements(0); 16021 return table; 16022 } 16023 16024 16025 template<class Derived, class Iterator, int entrysize> 16026 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable( 16027 Handle<Derived> table) { 16028 ASSERT(!table->IsObsolete()); 16029 16030 int nof = table->NumberOfElements(); 16031 int nod = table->NumberOfDeletedElements(); 16032 int capacity = table->Capacity(); 16033 if ((nof + nod) < capacity) return table; 16034 // Don't need to grow if we can simply clear out deleted entries instead. 16035 // Note that we can't compact in place, though, so we always allocate 16036 // a new table. 16037 return Rehash(table, (nod < (capacity >> 1)) ? capacity << 1 : capacity); 16038 } 16039 16040 16041 template<class Derived, class Iterator, int entrysize> 16042 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink( 16043 Handle<Derived> table) { 16044 ASSERT(!table->IsObsolete()); 16045 16046 int nof = table->NumberOfElements(); 16047 int capacity = table->Capacity(); 16048 if (nof >= (capacity >> 2)) return table; 16049 return Rehash(table, capacity / 2); 16050 } 16051 16052 16053 template<class Derived, class Iterator, int entrysize> 16054 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear( 16055 Handle<Derived> table) { 16056 ASSERT(!table->IsObsolete()); 16057 16058 Handle<Derived> new_table = 16059 Allocate(table->GetIsolate(), 16060 kMinCapacity, 16061 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED); 16062 16063 table->SetNextTable(*new_table); 16064 table->SetNumberOfDeletedElements(-1); 16065 16066 return new_table; 16067 } 16068 16069 16070 template<class Derived, class Iterator, int entrysize> 16071 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Remove( 16072 Handle<Derived> table, Handle<Object> key, bool* was_present) { 16073 int entry = table->FindEntry(key); 16074 if (entry == kNotFound) { 16075 *was_present = false; 16076 return table; 16077 } 16078 *was_present = true; 16079 table->RemoveEntry(entry); 16080 return Shrink(table); 16081 } 16082 16083 16084 template<class Derived, class Iterator, int entrysize> 16085 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash( 16086 Handle<Derived> table, int new_capacity) { 16087 ASSERT(!table->IsObsolete()); 16088 16089 Handle<Derived> new_table = 16090 Allocate(table->GetIsolate(), 16091 new_capacity, 16092 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED); 16093 int nof = table->NumberOfElements(); 16094 int nod = table->NumberOfDeletedElements(); 16095 int new_buckets = new_table->NumberOfBuckets(); 16096 int new_entry = 0; 16097 int removed_holes_index = 0; 16098 16099 for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) { 16100 Object* key = table->KeyAt(old_entry); 16101 if (key->IsTheHole()) { 16102 table->SetRemovedIndexAt(removed_holes_index++, old_entry); 16103 continue; 16104 } 16105 16106 Object* hash = key->GetHash(); 16107 int bucket = Smi::cast(hash)->value() & (new_buckets - 1); 16108 Object* chain_entry = new_table->get(kHashTableStartIndex + bucket); 16109 new_table->set(kHashTableStartIndex + bucket, Smi::FromInt(new_entry)); 16110 int new_index = new_table->EntryToIndex(new_entry); 16111 int old_index = table->EntryToIndex(old_entry); 16112 for (int i = 0; i < entrysize; ++i) { 16113 Object* value = table->get(old_index + i); 16114 new_table->set(new_index + i, value); 16115 } 16116 new_table->set(new_index + kChainOffset, chain_entry); 16117 ++new_entry; 16118 } 16119 16120 ASSERT_EQ(nod, removed_holes_index); 16121 16122 new_table->SetNumberOfElements(nof); 16123 table->SetNextTable(*new_table); 16124 16125 return new_table; 16126 } 16127 16128 16129 template<class Derived, class Iterator, int entrysize> 16130 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry( 16131 Handle<Object> key) { 16132 ASSERT(!IsObsolete()); 16133 16134 DisallowHeapAllocation no_gc; 16135 ASSERT(!key->IsTheHole()); 16136 Object* hash = key->GetHash(); 16137 if (hash->IsUndefined()) return kNotFound; 16138 for (int entry = HashToEntry(Smi::cast(hash)->value()); 16139 entry != kNotFound; 16140 entry = ChainAt(entry)) { 16141 Object* candidate = KeyAt(entry); 16142 if (candidate->SameValueZero(*key)) 16143 return entry; 16144 } 16145 return kNotFound; 16146 } 16147 16148 16149 template<class Derived, class Iterator, int entrysize> 16150 int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) { 16151 ASSERT(!IsObsolete()); 16152 16153 int entry = UsedCapacity(); 16154 int bucket = HashToBucket(hash); 16155 int index = EntryToIndex(entry); 16156 Object* chain_entry = get(kHashTableStartIndex + bucket); 16157 set(kHashTableStartIndex + bucket, Smi::FromInt(entry)); 16158 set(index + kChainOffset, chain_entry); 16159 SetNumberOfElements(NumberOfElements() + 1); 16160 return index; 16161 } 16162 16163 16164 template<class Derived, class Iterator, int entrysize> 16165 void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) { 16166 ASSERT(!IsObsolete()); 16167 16168 int index = EntryToIndex(entry); 16169 for (int i = 0; i < entrysize; ++i) { 16170 set_the_hole(index + i); 16171 } 16172 SetNumberOfElements(NumberOfElements() - 1); 16173 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1); 16174 } 16175 16176 16177 template Handle<OrderedHashSet> 16178 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Allocate( 16179 Isolate* isolate, int capacity, PretenureFlag pretenure); 16180 16181 template Handle<OrderedHashSet> 16182 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::EnsureGrowable( 16183 Handle<OrderedHashSet> table); 16184 16185 template Handle<OrderedHashSet> 16186 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Shrink( 16187 Handle<OrderedHashSet> table); 16188 16189 template Handle<OrderedHashSet> 16190 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Clear( 16191 Handle<OrderedHashSet> table); 16192 16193 template Handle<OrderedHashSet> 16194 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Remove( 16195 Handle<OrderedHashSet> table, Handle<Object> key, bool* was_present); 16196 16197 template int 16198 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry( 16199 Handle<Object> key); 16200 16201 template int 16202 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::AddEntry(int hash); 16203 16204 template void 16205 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::RemoveEntry(int entry); 16206 16207 16208 template Handle<OrderedHashMap> 16209 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Allocate( 16210 Isolate* isolate, int capacity, PretenureFlag pretenure); 16211 16212 template Handle<OrderedHashMap> 16213 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::EnsureGrowable( 16214 Handle<OrderedHashMap> table); 16215 16216 template Handle<OrderedHashMap> 16217 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Shrink( 16218 Handle<OrderedHashMap> table); 16219 16220 template Handle<OrderedHashMap> 16221 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Clear( 16222 Handle<OrderedHashMap> table); 16223 16224 template Handle<OrderedHashMap> 16225 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Remove( 16226 Handle<OrderedHashMap> table, Handle<Object> key, bool* was_present); 16227 16228 template int 16229 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry( 16230 Handle<Object> key); 16231 16232 template int 16233 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::AddEntry(int hash); 16234 16235 template void 16236 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::RemoveEntry(int entry); 16237 16238 16239 bool OrderedHashSet::Contains(Handle<Object> key) { 16240 return FindEntry(key) != kNotFound; 16241 } 16242 16243 16244 Handle<OrderedHashSet> OrderedHashSet::Add(Handle<OrderedHashSet> table, 16245 Handle<Object> key) { 16246 if (table->FindEntry(key) != kNotFound) return table; 16247 16248 table = EnsureGrowable(table); 16249 16250 Handle<Smi> hash = GetOrCreateHash(table->GetIsolate(), key); 16251 int index = table->AddEntry(hash->value()); 16252 table->set(index, *key); 16253 return table; 16254 } 16255 16256 16257 Object* OrderedHashMap::Lookup(Handle<Object> key) { 16258 DisallowHeapAllocation no_gc; 16259 int entry = FindEntry(key); 16260 if (entry == kNotFound) return GetHeap()->the_hole_value(); 16261 return ValueAt(entry); 16262 } 16263 16264 16265 Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table, 16266 Handle<Object> key, 16267 Handle<Object> value) { 16268 ASSERT(!key->IsTheHole()); 16269 16270 int entry = table->FindEntry(key); 16271 16272 if (entry != kNotFound) { 16273 table->set(table->EntryToIndex(entry) + kValueOffset, *value); 16274 return table; 16275 } 16276 16277 table = EnsureGrowable(table); 16278 16279 Handle<Smi> hash = GetOrCreateHash(table->GetIsolate(), key); 16280 int index = table->AddEntry(hash->value()); 16281 table->set(index, *key); 16282 table->set(index + kValueOffset, *value); 16283 return table; 16284 } 16285 16286 16287 template<class Derived, class TableType> 16288 Handle<JSObject> OrderedHashTableIterator<Derived, TableType>::Next( 16289 Handle<Derived> iterator) { 16290 Isolate* isolate = iterator->GetIsolate(); 16291 Factory* factory = isolate->factory(); 16292 16293 Handle<Object> maybe_table(iterator->table(), isolate); 16294 if (!maybe_table->IsUndefined()) { 16295 iterator->Transition(); 16296 16297 Handle<TableType> table(TableType::cast(iterator->table()), isolate); 16298 int index = Smi::cast(iterator->index())->value(); 16299 int used_capacity = table->UsedCapacity(); 16300 16301 while (index < used_capacity && table->KeyAt(index)->IsTheHole()) { 16302 index++; 16303 } 16304 16305 if (index < used_capacity) { 16306 int entry_index = table->EntryToIndex(index); 16307 Handle<Object> value = 16308 Derived::ValueForKind(iterator, entry_index); 16309 iterator->set_index(Smi::FromInt(index + 1)); 16310 return factory->NewIteratorResultObject(value, false); 16311 } 16312 16313 iterator->set_table(iterator->GetHeap()->undefined_value()); 16314 } 16315 16316 return factory->NewIteratorResultObject(factory->undefined_value(), true); 16317 } 16318 16319 16320 template<class Derived, class TableType> 16321 void OrderedHashTableIterator<Derived, TableType>::Transition() { 16322 Isolate* isolate = GetIsolate(); 16323 Handle<TableType> table(TableType::cast(this->table()), isolate); 16324 if (!table->IsObsolete()) return; 16325 16326 int index = Smi::cast(this->index())->value(); 16327 while (table->IsObsolete()) { 16328 Handle<TableType> next_table(table->NextTable(), isolate); 16329 16330 if (index > 0) { 16331 int nod = table->NumberOfDeletedElements(); 16332 16333 // When we clear the table we set the number of deleted elements to -1. 16334 if (nod == -1) { 16335 index = 0; 16336 } else { 16337 int old_index = index; 16338 for (int i = 0; i < nod; ++i) { 16339 int removed_index = table->RemovedIndexAt(i); 16340 if (removed_index >= old_index) break; 16341 --index; 16342 } 16343 } 16344 } 16345 16346 table = next_table; 16347 } 16348 16349 set_table(*table); 16350 set_index(Smi::FromInt(index)); 16351 } 16352 16353 16354 template Handle<JSObject> 16355 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Next( 16356 Handle<JSSetIterator> iterator); 16357 16358 template void 16359 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Transition(); 16360 16361 16362 template Handle<JSObject> 16363 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Next( 16364 Handle<JSMapIterator> iterator); 16365 16366 template void 16367 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Transition(); 16368 16369 16370 Handle<Object> JSSetIterator::ValueForKind( 16371 Handle<JSSetIterator> iterator, int entry_index) { 16372 int kind = iterator->kind()->value(); 16373 // Set.prototype only has values and entries. 16374 ASSERT(kind == kKindValues || kind == kKindEntries); 16375 16376 Isolate* isolate = iterator->GetIsolate(); 16377 Factory* factory = isolate->factory(); 16378 16379 Handle<OrderedHashSet> table( 16380 OrderedHashSet::cast(iterator->table()), isolate); 16381 Handle<Object> value = Handle<Object>(table->get(entry_index), isolate); 16382 16383 if (kind == kKindEntries) { 16384 Handle<FixedArray> array = factory->NewFixedArray(2); 16385 array->set(0, *value); 16386 array->set(1, *value); 16387 return factory->NewJSArrayWithElements(array); 16388 } 16389 16390 return value; 16391 } 16392 16393 16394 Handle<Object> JSMapIterator::ValueForKind( 16395 Handle<JSMapIterator> iterator, int entry_index) { 16396 int kind = iterator->kind()->value(); 16397 ASSERT(kind == kKindKeys || kind == kKindValues || kind == kKindEntries); 16398 16399 Isolate* isolate = iterator->GetIsolate(); 16400 Factory* factory = isolate->factory(); 16401 16402 Handle<OrderedHashMap> table( 16403 OrderedHashMap::cast(iterator->table()), isolate); 16404 16405 switch (kind) { 16406 case kKindKeys: 16407 return Handle<Object>(table->get(entry_index), isolate); 16408 16409 case kKindValues: 16410 return Handle<Object>(table->get(entry_index + 1), isolate); 16411 16412 case kKindEntries: { 16413 Handle<Object> key(table->get(entry_index), isolate); 16414 Handle<Object> value(table->get(entry_index + 1), isolate); 16415 Handle<FixedArray> array = factory->NewFixedArray(2); 16416 array->set(0, *key); 16417 array->set(1, *value); 16418 return factory->NewJSArrayWithElements(array); 16419 } 16420 } 16421 16422 UNREACHABLE(); 16423 return factory->undefined_value(); 16424 } 16425 16426 16427 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator( 16428 DeclaredAccessorDescriptor* descriptor) 16429 : array_(descriptor->serialized_data()->GetDataStartAddress()), 16430 length_(descriptor->serialized_data()->length()), 16431 offset_(0) { 16432 } 16433 16434 16435 const DeclaredAccessorDescriptorData* 16436 DeclaredAccessorDescriptorIterator::Next() { 16437 ASSERT(offset_ < length_); 16438 uint8_t* ptr = &array_[offset_]; 16439 ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0); 16440 const DeclaredAccessorDescriptorData* data = 16441 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr); 16442 offset_ += sizeof(*data); 16443 ASSERT(offset_ <= length_); 16444 return data; 16445 } 16446 16447 16448 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create( 16449 Isolate* isolate, 16450 const DeclaredAccessorDescriptorData& descriptor, 16451 Handle<DeclaredAccessorDescriptor> previous) { 16452 int previous_length = 16453 previous.is_null() ? 0 : previous->serialized_data()->length(); 16454 int length = sizeof(descriptor) + previous_length; 16455 Handle<ByteArray> serialized_descriptor = 16456 isolate->factory()->NewByteArray(length); 16457 Handle<DeclaredAccessorDescriptor> value = 16458 isolate->factory()->NewDeclaredAccessorDescriptor(); 16459 value->set_serialized_data(*serialized_descriptor); 16460 // Copy in the data. 16461 { 16462 DisallowHeapAllocation no_allocation; 16463 uint8_t* array = serialized_descriptor->GetDataStartAddress(); 16464 if (previous_length != 0) { 16465 uint8_t* previous_array = 16466 previous->serialized_data()->GetDataStartAddress(); 16467 MemCopy(array, previous_array, previous_length); 16468 array += previous_length; 16469 } 16470 ASSERT(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0); 16471 DeclaredAccessorDescriptorData* data = 16472 reinterpret_cast<DeclaredAccessorDescriptorData*>(array); 16473 *data = descriptor; 16474 } 16475 return value; 16476 } 16477 16478 16479 // Check if there is a break point at this code position. 16480 bool DebugInfo::HasBreakPoint(int code_position) { 16481 // Get the break point info object for this code position. 16482 Object* break_point_info = GetBreakPointInfo(code_position); 16483 16484 // If there is no break point info object or no break points in the break 16485 // point info object there is no break point at this code position. 16486 if (break_point_info->IsUndefined()) return false; 16487 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0; 16488 } 16489 16490 16491 // Get the break point info object for this code position. 16492 Object* DebugInfo::GetBreakPointInfo(int code_position) { 16493 // Find the index of the break point info object for this code position. 16494 int index = GetBreakPointInfoIndex(code_position); 16495 16496 // Return the break point info object if any. 16497 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value(); 16498 return BreakPointInfo::cast(break_points()->get(index)); 16499 } 16500 16501 16502 // Clear a break point at the specified code position. 16503 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info, 16504 int code_position, 16505 Handle<Object> break_point_object) { 16506 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position), 16507 debug_info->GetIsolate()); 16508 if (break_point_info->IsUndefined()) return; 16509 BreakPointInfo::ClearBreakPoint( 16510 Handle<BreakPointInfo>::cast(break_point_info), 16511 break_point_object); 16512 } 16513 16514 16515 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info, 16516 int code_position, 16517 int source_position, 16518 int statement_position, 16519 Handle<Object> break_point_object) { 16520 Isolate* isolate = debug_info->GetIsolate(); 16521 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position), 16522 isolate); 16523 if (!break_point_info->IsUndefined()) { 16524 BreakPointInfo::SetBreakPoint( 16525 Handle<BreakPointInfo>::cast(break_point_info), 16526 break_point_object); 16527 return; 16528 } 16529 16530 // Adding a new break point for a code position which did not have any 16531 // break points before. Try to find a free slot. 16532 int index = kNoBreakPointInfo; 16533 for (int i = 0; i < debug_info->break_points()->length(); i++) { 16534 if (debug_info->break_points()->get(i)->IsUndefined()) { 16535 index = i; 16536 break; 16537 } 16538 } 16539 if (index == kNoBreakPointInfo) { 16540 // No free slot - extend break point info array. 16541 Handle<FixedArray> old_break_points = 16542 Handle<FixedArray>(FixedArray::cast(debug_info->break_points())); 16543 Handle<FixedArray> new_break_points = 16544 isolate->factory()->NewFixedArray( 16545 old_break_points->length() + 16546 DebugInfo::kEstimatedNofBreakPointsInFunction); 16547 16548 debug_info->set_break_points(*new_break_points); 16549 for (int i = 0; i < old_break_points->length(); i++) { 16550 new_break_points->set(i, old_break_points->get(i)); 16551 } 16552 index = old_break_points->length(); 16553 } 16554 ASSERT(index != kNoBreakPointInfo); 16555 16556 // Allocate new BreakPointInfo object and set the break point. 16557 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast( 16558 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE)); 16559 new_break_point_info->set_code_position(Smi::FromInt(code_position)); 16560 new_break_point_info->set_source_position(Smi::FromInt(source_position)); 16561 new_break_point_info-> 16562 set_statement_position(Smi::FromInt(statement_position)); 16563 new_break_point_info->set_break_point_objects( 16564 isolate->heap()->undefined_value()); 16565 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object); 16566 debug_info->break_points()->set(index, *new_break_point_info); 16567 } 16568 16569 16570 // Get the break point objects for a code position. 16571 Object* DebugInfo::GetBreakPointObjects(int code_position) { 16572 Object* break_point_info = GetBreakPointInfo(code_position); 16573 if (break_point_info->IsUndefined()) { 16574 return GetHeap()->undefined_value(); 16575 } 16576 return BreakPointInfo::cast(break_point_info)->break_point_objects(); 16577 } 16578 16579 16580 // Get the total number of break points. 16581 int DebugInfo::GetBreakPointCount() { 16582 if (break_points()->IsUndefined()) return 0; 16583 int count = 0; 16584 for (int i = 0; i < break_points()->length(); i++) { 16585 if (!break_points()->get(i)->IsUndefined()) { 16586 BreakPointInfo* break_point_info = 16587 BreakPointInfo::cast(break_points()->get(i)); 16588 count += break_point_info->GetBreakPointCount(); 16589 } 16590 } 16591 return count; 16592 } 16593 16594 16595 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info, 16596 Handle<Object> break_point_object) { 16597 Heap* heap = debug_info->GetHeap(); 16598 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value(); 16599 for (int i = 0; i < debug_info->break_points()->length(); i++) { 16600 if (!debug_info->break_points()->get(i)->IsUndefined()) { 16601 Handle<BreakPointInfo> break_point_info = 16602 Handle<BreakPointInfo>(BreakPointInfo::cast( 16603 debug_info->break_points()->get(i))); 16604 if (BreakPointInfo::HasBreakPointObject(break_point_info, 16605 break_point_object)) { 16606 return *break_point_info; 16607 } 16608 } 16609 } 16610 return heap->undefined_value(); 16611 } 16612 16613 16614 // Find the index of the break point info object for the specified code 16615 // position. 16616 int DebugInfo::GetBreakPointInfoIndex(int code_position) { 16617 if (break_points()->IsUndefined()) return kNoBreakPointInfo; 16618 for (int i = 0; i < break_points()->length(); i++) { 16619 if (!break_points()->get(i)->IsUndefined()) { 16620 BreakPointInfo* break_point_info = 16621 BreakPointInfo::cast(break_points()->get(i)); 16622 if (break_point_info->code_position()->value() == code_position) { 16623 return i; 16624 } 16625 } 16626 } 16627 return kNoBreakPointInfo; 16628 } 16629 16630 16631 // Remove the specified break point object. 16632 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info, 16633 Handle<Object> break_point_object) { 16634 Isolate* isolate = break_point_info->GetIsolate(); 16635 // If there are no break points just ignore. 16636 if (break_point_info->break_point_objects()->IsUndefined()) return; 16637 // If there is a single break point clear it if it is the same. 16638 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16639 if (break_point_info->break_point_objects() == *break_point_object) { 16640 break_point_info->set_break_point_objects( 16641 isolate->heap()->undefined_value()); 16642 } 16643 return; 16644 } 16645 // If there are multiple break points shrink the array 16646 ASSERT(break_point_info->break_point_objects()->IsFixedArray()); 16647 Handle<FixedArray> old_array = 16648 Handle<FixedArray>( 16649 FixedArray::cast(break_point_info->break_point_objects())); 16650 Handle<FixedArray> new_array = 16651 isolate->factory()->NewFixedArray(old_array->length() - 1); 16652 int found_count = 0; 16653 for (int i = 0; i < old_array->length(); i++) { 16654 if (old_array->get(i) == *break_point_object) { 16655 ASSERT(found_count == 0); 16656 found_count++; 16657 } else { 16658 new_array->set(i - found_count, old_array->get(i)); 16659 } 16660 } 16661 // If the break point was found in the list change it. 16662 if (found_count > 0) break_point_info->set_break_point_objects(*new_array); 16663 } 16664 16665 16666 // Add the specified break point object. 16667 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info, 16668 Handle<Object> break_point_object) { 16669 Isolate* isolate = break_point_info->GetIsolate(); 16670 16671 // If there was no break point objects before just set it. 16672 if (break_point_info->break_point_objects()->IsUndefined()) { 16673 break_point_info->set_break_point_objects(*break_point_object); 16674 return; 16675 } 16676 // If the break point object is the same as before just ignore. 16677 if (break_point_info->break_point_objects() == *break_point_object) return; 16678 // If there was one break point object before replace with array. 16679 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16680 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2); 16681 array->set(0, break_point_info->break_point_objects()); 16682 array->set(1, *break_point_object); 16683 break_point_info->set_break_point_objects(*array); 16684 return; 16685 } 16686 // If there was more than one break point before extend array. 16687 Handle<FixedArray> old_array = 16688 Handle<FixedArray>( 16689 FixedArray::cast(break_point_info->break_point_objects())); 16690 Handle<FixedArray> new_array = 16691 isolate->factory()->NewFixedArray(old_array->length() + 1); 16692 for (int i = 0; i < old_array->length(); i++) { 16693 // If the break point was there before just ignore. 16694 if (old_array->get(i) == *break_point_object) return; 16695 new_array->set(i, old_array->get(i)); 16696 } 16697 // Add the new break point. 16698 new_array->set(old_array->length(), *break_point_object); 16699 break_point_info->set_break_point_objects(*new_array); 16700 } 16701 16702 16703 bool BreakPointInfo::HasBreakPointObject( 16704 Handle<BreakPointInfo> break_point_info, 16705 Handle<Object> break_point_object) { 16706 // No break point. 16707 if (break_point_info->break_point_objects()->IsUndefined()) return false; 16708 // Single break point. 16709 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16710 return break_point_info->break_point_objects() == *break_point_object; 16711 } 16712 // Multiple break points. 16713 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects()); 16714 for (int i = 0; i < array->length(); i++) { 16715 if (array->get(i) == *break_point_object) { 16716 return true; 16717 } 16718 } 16719 return false; 16720 } 16721 16722 16723 // Get the number of break points. 16724 int BreakPointInfo::GetBreakPointCount() { 16725 // No break point. 16726 if (break_point_objects()->IsUndefined()) return 0; 16727 // Single break point. 16728 if (!break_point_objects()->IsFixedArray()) return 1; 16729 // Multiple break points. 16730 return FixedArray::cast(break_point_objects())->length(); 16731 } 16732 16733 16734 Object* JSDate::GetField(Object* object, Smi* index) { 16735 return JSDate::cast(object)->DoGetField( 16736 static_cast<FieldIndex>(index->value())); 16737 } 16738 16739 16740 Object* JSDate::DoGetField(FieldIndex index) { 16741 ASSERT(index != kDateValue); 16742 16743 DateCache* date_cache = GetIsolate()->date_cache(); 16744 16745 if (index < kFirstUncachedField) { 16746 Object* stamp = cache_stamp(); 16747 if (stamp != date_cache->stamp() && stamp->IsSmi()) { 16748 // Since the stamp is not NaN, the value is also not NaN. 16749 int64_t local_time_ms = 16750 date_cache->ToLocal(static_cast<int64_t>(value()->Number())); 16751 SetCachedFields(local_time_ms, date_cache); 16752 } 16753 switch (index) { 16754 case kYear: return year(); 16755 case kMonth: return month(); 16756 case kDay: return day(); 16757 case kWeekday: return weekday(); 16758 case kHour: return hour(); 16759 case kMinute: return min(); 16760 case kSecond: return sec(); 16761 default: UNREACHABLE(); 16762 } 16763 } 16764 16765 if (index >= kFirstUTCField) { 16766 return GetUTCField(index, value()->Number(), date_cache); 16767 } 16768 16769 double time = value()->Number(); 16770 if (std::isnan(time)) return GetIsolate()->heap()->nan_value(); 16771 16772 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time)); 16773 int days = DateCache::DaysFromTime(local_time_ms); 16774 16775 if (index == kDays) return Smi::FromInt(days); 16776 16777 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days); 16778 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000); 16779 ASSERT(index == kTimeInDay); 16780 return Smi::FromInt(time_in_day_ms); 16781 } 16782 16783 16784 Object* JSDate::GetUTCField(FieldIndex index, 16785 double value, 16786 DateCache* date_cache) { 16787 ASSERT(index >= kFirstUTCField); 16788 16789 if (std::isnan(value)) return GetIsolate()->heap()->nan_value(); 16790 16791 int64_t time_ms = static_cast<int64_t>(value); 16792 16793 if (index == kTimezoneOffset) { 16794 return Smi::FromInt(date_cache->TimezoneOffset(time_ms)); 16795 } 16796 16797 int days = DateCache::DaysFromTime(time_ms); 16798 16799 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days)); 16800 16801 if (index <= kDayUTC) { 16802 int year, month, day; 16803 date_cache->YearMonthDayFromDays(days, &year, &month, &day); 16804 if (index == kYearUTC) return Smi::FromInt(year); 16805 if (index == kMonthUTC) return Smi::FromInt(month); 16806 ASSERT(index == kDayUTC); 16807 return Smi::FromInt(day); 16808 } 16809 16810 int time_in_day_ms = DateCache::TimeInDay(time_ms, days); 16811 switch (index) { 16812 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000)); 16813 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60); 16814 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60); 16815 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000); 16816 case kDaysUTC: return Smi::FromInt(days); 16817 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms); 16818 default: UNREACHABLE(); 16819 } 16820 16821 UNREACHABLE(); 16822 return NULL; 16823 } 16824 16825 16826 void JSDate::SetValue(Object* value, bool is_value_nan) { 16827 set_value(value); 16828 if (is_value_nan) { 16829 HeapNumber* nan = GetIsolate()->heap()->nan_value(); 16830 set_cache_stamp(nan, SKIP_WRITE_BARRIER); 16831 set_year(nan, SKIP_WRITE_BARRIER); 16832 set_month(nan, SKIP_WRITE_BARRIER); 16833 set_day(nan, SKIP_WRITE_BARRIER); 16834 set_hour(nan, SKIP_WRITE_BARRIER); 16835 set_min(nan, SKIP_WRITE_BARRIER); 16836 set_sec(nan, SKIP_WRITE_BARRIER); 16837 set_weekday(nan, SKIP_WRITE_BARRIER); 16838 } else { 16839 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER); 16840 } 16841 } 16842 16843 16844 void JSDate::SetCachedFields(int64_t local_time_ms, DateCache* date_cache) { 16845 int days = DateCache::DaysFromTime(local_time_ms); 16846 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days); 16847 int year, month, day; 16848 date_cache->YearMonthDayFromDays(days, &year, &month, &day); 16849 int weekday = date_cache->Weekday(days); 16850 int hour = time_in_day_ms / (60 * 60 * 1000); 16851 int min = (time_in_day_ms / (60 * 1000)) % 60; 16852 int sec = (time_in_day_ms / 1000) % 60; 16853 set_cache_stamp(date_cache->stamp()); 16854 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); 16855 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); 16856 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); 16857 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); 16858 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); 16859 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); 16860 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); 16861 } 16862 16863 16864 void JSArrayBuffer::Neuter() { 16865 ASSERT(is_external()); 16866 set_backing_store(NULL); 16867 set_byte_length(Smi::FromInt(0)); 16868 } 16869 16870 16871 void JSArrayBufferView::NeuterView() { 16872 set_byte_offset(Smi::FromInt(0)); 16873 set_byte_length(Smi::FromInt(0)); 16874 } 16875 16876 16877 void JSDataView::Neuter() { 16878 NeuterView(); 16879 } 16880 16881 16882 void JSTypedArray::Neuter() { 16883 NeuterView(); 16884 set_length(Smi::FromInt(0)); 16885 set_elements(GetHeap()->EmptyExternalArrayForMap(map())); 16886 } 16887 16888 16889 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) { 16890 switch (elements_kind) { 16891 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 16892 case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS; 16893 16894 TYPED_ARRAYS(TYPED_ARRAY_CASE) 16895 #undef TYPED_ARRAY_CASE 16896 16897 default: 16898 UNREACHABLE(); 16899 return FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND; 16900 } 16901 } 16902 16903 16904 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer( 16905 Handle<JSTypedArray> typed_array) { 16906 16907 Handle<Map> map(typed_array->map()); 16908 Isolate* isolate = typed_array->GetIsolate(); 16909 16910 ASSERT(IsFixedTypedArrayElementsKind(map->elements_kind())); 16911 16912 Handle<Map> new_map = Map::TransitionElementsTo( 16913 map, 16914 FixedToExternalElementsKind(map->elements_kind())); 16915 16916 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer(); 16917 Handle<FixedTypedArrayBase> fixed_typed_array( 16918 FixedTypedArrayBase::cast(typed_array->elements())); 16919 Runtime::SetupArrayBufferAllocatingData(isolate, buffer, 16920 fixed_typed_array->DataSize(), false); 16921 memcpy(buffer->backing_store(), 16922 fixed_typed_array->DataPtr(), 16923 fixed_typed_array->DataSize()); 16924 Handle<ExternalArray> new_elements = 16925 isolate->factory()->NewExternalArray( 16926 fixed_typed_array->length(), typed_array->type(), 16927 static_cast<uint8_t*>(buffer->backing_store())); 16928 16929 buffer->set_weak_first_view(*typed_array); 16930 ASSERT(typed_array->weak_next() == isolate->heap()->undefined_value()); 16931 typed_array->set_buffer(*buffer); 16932 JSObject::SetMapAndElements(typed_array, new_map, new_elements); 16933 16934 return buffer; 16935 } 16936 16937 16938 Handle<JSArrayBuffer> JSTypedArray::GetBuffer() { 16939 Handle<Object> result(buffer(), GetIsolate()); 16940 if (*result != Smi::FromInt(0)) { 16941 ASSERT(IsExternalArrayElementsKind(map()->elements_kind())); 16942 return Handle<JSArrayBuffer>::cast(result); 16943 } 16944 Handle<JSTypedArray> self(this); 16945 return MaterializeArrayBuffer(self); 16946 } 16947 16948 16949 HeapType* PropertyCell::type() { 16950 return static_cast<HeapType*>(type_raw()); 16951 } 16952 16953 16954 void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) { 16955 ASSERT(IsPropertyCell()); 16956 set_type_raw(type, ignored); 16957 } 16958 16959 16960 Handle<HeapType> PropertyCell::UpdatedType(Handle<PropertyCell> cell, 16961 Handle<Object> value) { 16962 Isolate* isolate = cell->GetIsolate(); 16963 Handle<HeapType> old_type(cell->type(), isolate); 16964 // TODO(2803): Do not track ConsString as constant because they cannot be 16965 // embedded into code. 16966 Handle<HeapType> new_type = value->IsConsString() || value->IsTheHole() 16967 ? HeapType::Any(isolate) : HeapType::Constant(value, isolate); 16968 16969 if (new_type->Is(old_type)) { 16970 return old_type; 16971 } 16972 16973 cell->dependent_code()->DeoptimizeDependentCodeGroup( 16974 isolate, DependentCode::kPropertyCellChangedGroup); 16975 16976 if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) { 16977 return new_type; 16978 } 16979 16980 return HeapType::Any(isolate); 16981 } 16982 16983 16984 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell, 16985 Handle<Object> value) { 16986 cell->set_value(*value); 16987 if (!HeapType::Any()->Is(cell->type())) { 16988 Handle<HeapType> new_type = UpdatedType(cell, value); 16989 cell->set_type(*new_type); 16990 } 16991 } 16992 16993 16994 // static 16995 void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell, 16996 CompilationInfo* info) { 16997 Handle<DependentCode> codes = 16998 DependentCode::Insert(handle(cell->dependent_code(), info->isolate()), 16999 DependentCode::kPropertyCellChangedGroup, 17000 info->object_wrapper()); 17001 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes); 17002 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add( 17003 cell, info->zone()); 17004 } 17005 17006 17007 const char* GetBailoutReason(BailoutReason reason) { 17008 ASSERT(reason < kLastErrorMessage); 17009 #define ERROR_MESSAGES_TEXTS(C, T) T, 17010 static const char* error_messages_[] = { 17011 ERROR_MESSAGES_LIST(ERROR_MESSAGES_TEXTS) 17012 }; 17013 #undef ERROR_MESSAGES_TEXTS 17014 return error_messages_[reason]; 17015 } 17016 17017 17018 } } // namespace v8::internal 17019