1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/v8.h" 6 7 #include "src/accessors.h" 8 #include "src/allocation-site-scopes.h" 9 #include "src/api.h" 10 #include "src/arguments.h" 11 #include "src/base/bits.h" 12 #include "src/bootstrapper.h" 13 #include "src/code-stubs.h" 14 #include "src/codegen.h" 15 #include "src/cpu-profiler.h" 16 #include "src/date.h" 17 #include "src/debug.h" 18 #include "src/deoptimizer.h" 19 #include "src/elements.h" 20 #include "src/execution.h" 21 #include "src/field-index-inl.h" 22 #include "src/field-index.h" 23 #include "src/full-codegen.h" 24 #include "src/heap/mark-compact.h" 25 #include "src/heap/objects-visiting-inl.h" 26 #include "src/hydrogen.h" 27 #include "src/ic/ic.h" 28 #include "src/isolate-inl.h" 29 #include "src/log.h" 30 #include "src/lookup.h" 31 #include "src/macro-assembler.h" 32 #include "src/objects-inl.h" 33 #include "src/prototype.h" 34 #include "src/safepoint-table.h" 35 #include "src/string-search.h" 36 #include "src/string-stream.h" 37 #include "src/utils.h" 38 39 #ifdef ENABLE_DISASSEMBLER 40 #include "src/disasm.h" 41 #include "src/disassembler.h" 42 #endif 43 44 namespace v8 { 45 namespace internal { 46 47 Handle<HeapType> Object::OptimalType(Isolate* isolate, 48 Representation representation) { 49 if (representation.IsNone()) return HeapType::None(isolate); 50 if (FLAG_track_field_types) { 51 if (representation.IsHeapObject() && IsHeapObject()) { 52 // We can track only JavaScript objects with stable maps. 53 Handle<Map> map(HeapObject::cast(this)->map(), isolate); 54 if (map->is_stable() && 55 map->instance_type() >= FIRST_NONCALLABLE_SPEC_OBJECT_TYPE && 56 map->instance_type() <= LAST_NONCALLABLE_SPEC_OBJECT_TYPE) { 57 return HeapType::Class(map, isolate); 58 } 59 } 60 } 61 return HeapType::Any(isolate); 62 } 63 64 65 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate, 66 Handle<Object> object, 67 Handle<Context> native_context) { 68 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object); 69 Handle<JSFunction> constructor; 70 if (object->IsNumber()) { 71 constructor = handle(native_context->number_function(), isolate); 72 } else if (object->IsBoolean()) { 73 constructor = handle(native_context->boolean_function(), isolate); 74 } else if (object->IsString()) { 75 constructor = handle(native_context->string_function(), isolate); 76 } else if (object->IsSymbol()) { 77 constructor = handle(native_context->symbol_function(), isolate); 78 } else { 79 return MaybeHandle<JSReceiver>(); 80 } 81 Handle<JSObject> result = isolate->factory()->NewJSObject(constructor); 82 Handle<JSValue>::cast(result)->set_value(*object); 83 return result; 84 } 85 86 87 bool Object::BooleanValue() { 88 if (IsBoolean()) return IsTrue(); 89 if (IsSmi()) return Smi::cast(this)->value() != 0; 90 if (IsUndefined() || IsNull()) return false; 91 if (IsUndetectableObject()) return false; // Undetectable object is false. 92 if (IsString()) return String::cast(this)->length() != 0; 93 if (IsHeapNumber()) return HeapNumber::cast(this)->HeapNumberBooleanValue(); 94 return true; 95 } 96 97 98 bool Object::IsCallable() const { 99 const Object* fun = this; 100 while (fun->IsJSFunctionProxy()) { 101 fun = JSFunctionProxy::cast(fun)->call_trap(); 102 } 103 return fun->IsJSFunction() || 104 (fun->IsHeapObject() && 105 HeapObject::cast(fun)->map()->has_instance_call_handler()); 106 } 107 108 109 MaybeHandle<Object> Object::GetProperty(LookupIterator* it) { 110 for (; it->IsFound(); it->Next()) { 111 switch (it->state()) { 112 case LookupIterator::NOT_FOUND: 113 case LookupIterator::TRANSITION: 114 UNREACHABLE(); 115 case LookupIterator::JSPROXY: 116 return JSProxy::GetPropertyWithHandler(it->GetHolder<JSProxy>(), 117 it->GetReceiver(), it->name()); 118 case LookupIterator::INTERCEPTOR: { 119 MaybeHandle<Object> maybe_result = JSObject::GetPropertyWithInterceptor( 120 it->GetHolder<JSObject>(), it->GetReceiver(), it->name()); 121 if (!maybe_result.is_null()) return maybe_result; 122 if (it->isolate()->has_pending_exception()) return maybe_result; 123 break; 124 } 125 case LookupIterator::ACCESS_CHECK: 126 if (it->HasAccess(v8::ACCESS_GET)) break; 127 return JSObject::GetPropertyWithFailedAccessCheck(it); 128 case LookupIterator::ACCESSOR: 129 return GetPropertyWithAccessor(it->GetReceiver(), it->name(), 130 it->GetHolder<JSObject>(), 131 it->GetAccessors()); 132 case LookupIterator::DATA: 133 return it->GetDataValue(); 134 } 135 } 136 return it->factory()->undefined_value(); 137 } 138 139 140 Handle<Object> JSObject::GetDataProperty(Handle<JSObject> object, 141 Handle<Name> key) { 142 LookupIterator it(object, key, 143 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR); 144 return GetDataProperty(&it); 145 } 146 147 148 Handle<Object> JSObject::GetDataProperty(LookupIterator* it) { 149 for (; it->IsFound(); it->Next()) { 150 switch (it->state()) { 151 case LookupIterator::INTERCEPTOR: 152 case LookupIterator::NOT_FOUND: 153 case LookupIterator::TRANSITION: 154 UNREACHABLE(); 155 case LookupIterator::ACCESS_CHECK: 156 if (it->HasAccess(v8::ACCESS_GET)) continue; 157 // Fall through. 158 case LookupIterator::JSPROXY: 159 it->NotFound(); 160 return it->isolate()->factory()->undefined_value(); 161 case LookupIterator::ACCESSOR: 162 // TODO(verwaest): For now this doesn't call into 163 // ExecutableAccessorInfo, since clients don't need it. Update once 164 // relevant. 165 it->NotFound(); 166 return it->isolate()->factory()->undefined_value(); 167 case LookupIterator::DATA: 168 return it->GetDataValue(); 169 } 170 } 171 return it->isolate()->factory()->undefined_value(); 172 } 173 174 175 bool Object::ToInt32(int32_t* value) { 176 if (IsSmi()) { 177 *value = Smi::cast(this)->value(); 178 return true; 179 } 180 if (IsHeapNumber()) { 181 double num = HeapNumber::cast(this)->value(); 182 if (FastI2D(FastD2I(num)) == num) { 183 *value = FastD2I(num); 184 return true; 185 } 186 } 187 return false; 188 } 189 190 191 bool Object::ToUint32(uint32_t* value) { 192 if (IsSmi()) { 193 int num = Smi::cast(this)->value(); 194 if (num >= 0) { 195 *value = static_cast<uint32_t>(num); 196 return true; 197 } 198 } 199 if (IsHeapNumber()) { 200 double num = HeapNumber::cast(this)->value(); 201 if (num >= 0 && FastUI2D(FastD2UI(num)) == num) { 202 *value = FastD2UI(num); 203 return true; 204 } 205 } 206 return false; 207 } 208 209 210 bool FunctionTemplateInfo::IsTemplateFor(Object* object) { 211 if (!object->IsHeapObject()) return false; 212 return IsTemplateFor(HeapObject::cast(object)->map()); 213 } 214 215 216 bool FunctionTemplateInfo::IsTemplateFor(Map* map) { 217 // There is a constraint on the object; check. 218 if (!map->IsJSObjectMap()) return false; 219 // Fetch the constructor function of the object. 220 Object* cons_obj = map->constructor(); 221 if (!cons_obj->IsJSFunction()) return false; 222 JSFunction* fun = JSFunction::cast(cons_obj); 223 // Iterate through the chain of inheriting function templates to 224 // see if the required one occurs. 225 for (Object* type = fun->shared()->function_data(); 226 type->IsFunctionTemplateInfo(); 227 type = FunctionTemplateInfo::cast(type)->parent_template()) { 228 if (type == this) return true; 229 } 230 // Didn't find the required type in the inheritance chain. 231 return false; 232 } 233 234 235 template<typename To> 236 static inline To* CheckedCast(void *from) { 237 uintptr_t temp = reinterpret_cast<uintptr_t>(from); 238 DCHECK(temp % sizeof(To) == 0); 239 return reinterpret_cast<To*>(temp); 240 } 241 242 243 static Handle<Object> PerformCompare(const BitmaskCompareDescriptor& descriptor, 244 char* ptr, 245 Isolate* isolate) { 246 uint32_t bitmask = descriptor.bitmask; 247 uint32_t compare_value = descriptor.compare_value; 248 uint32_t value; 249 switch (descriptor.size) { 250 case 1: 251 value = static_cast<uint32_t>(*CheckedCast<uint8_t>(ptr)); 252 compare_value &= 0xff; 253 bitmask &= 0xff; 254 break; 255 case 2: 256 value = static_cast<uint32_t>(*CheckedCast<uint16_t>(ptr)); 257 compare_value &= 0xffff; 258 bitmask &= 0xffff; 259 break; 260 case 4: 261 value = *CheckedCast<uint32_t>(ptr); 262 break; 263 default: 264 UNREACHABLE(); 265 return isolate->factory()->undefined_value(); 266 } 267 return isolate->factory()->ToBoolean( 268 (bitmask & value) == (bitmask & compare_value)); 269 } 270 271 272 static Handle<Object> PerformCompare(const PointerCompareDescriptor& descriptor, 273 char* ptr, 274 Isolate* isolate) { 275 uintptr_t compare_value = 276 reinterpret_cast<uintptr_t>(descriptor.compare_value); 277 uintptr_t value = *CheckedCast<uintptr_t>(ptr); 278 return isolate->factory()->ToBoolean(compare_value == value); 279 } 280 281 282 static Handle<Object> GetPrimitiveValue( 283 const PrimitiveValueDescriptor& descriptor, 284 char* ptr, 285 Isolate* isolate) { 286 int32_t int32_value = 0; 287 switch (descriptor.data_type) { 288 case kDescriptorInt8Type: 289 int32_value = *CheckedCast<int8_t>(ptr); 290 break; 291 case kDescriptorUint8Type: 292 int32_value = *CheckedCast<uint8_t>(ptr); 293 break; 294 case kDescriptorInt16Type: 295 int32_value = *CheckedCast<int16_t>(ptr); 296 break; 297 case kDescriptorUint16Type: 298 int32_value = *CheckedCast<uint16_t>(ptr); 299 break; 300 case kDescriptorInt32Type: 301 int32_value = *CheckedCast<int32_t>(ptr); 302 break; 303 case kDescriptorUint32Type: { 304 uint32_t value = *CheckedCast<uint32_t>(ptr); 305 AllowHeapAllocation allow_gc; 306 return isolate->factory()->NewNumberFromUint(value); 307 } 308 case kDescriptorBoolType: { 309 uint8_t byte = *CheckedCast<uint8_t>(ptr); 310 return isolate->factory()->ToBoolean( 311 byte & (0x1 << descriptor.bool_offset)); 312 } 313 case kDescriptorFloatType: { 314 float value = *CheckedCast<float>(ptr); 315 AllowHeapAllocation allow_gc; 316 return isolate->factory()->NewNumber(value); 317 } 318 case kDescriptorDoubleType: { 319 double value = *CheckedCast<double>(ptr); 320 AllowHeapAllocation allow_gc; 321 return isolate->factory()->NewNumber(value); 322 } 323 } 324 AllowHeapAllocation allow_gc; 325 return isolate->factory()->NewNumberFromInt(int32_value); 326 } 327 328 329 static Handle<Object> GetDeclaredAccessorProperty( 330 Handle<Object> receiver, 331 Handle<DeclaredAccessorInfo> info, 332 Isolate* isolate) { 333 DisallowHeapAllocation no_gc; 334 char* current = reinterpret_cast<char*>(*receiver); 335 DeclaredAccessorDescriptorIterator iterator(info->descriptor()); 336 while (true) { 337 const DeclaredAccessorDescriptorData* data = iterator.Next(); 338 switch (data->type) { 339 case kDescriptorReturnObject: { 340 DCHECK(iterator.Complete()); 341 current = *CheckedCast<char*>(current); 342 return handle(*CheckedCast<Object*>(current), isolate); 343 } 344 case kDescriptorPointerDereference: 345 DCHECK(!iterator.Complete()); 346 current = *reinterpret_cast<char**>(current); 347 break; 348 case kDescriptorPointerShift: 349 DCHECK(!iterator.Complete()); 350 current += data->pointer_shift_descriptor.byte_offset; 351 break; 352 case kDescriptorObjectDereference: { 353 DCHECK(!iterator.Complete()); 354 Object* object = CheckedCast<Object>(current); 355 int field = data->object_dereference_descriptor.internal_field; 356 Object* smi = JSObject::cast(object)->GetInternalField(field); 357 DCHECK(smi->IsSmi()); 358 current = reinterpret_cast<char*>(smi); 359 break; 360 } 361 case kDescriptorBitmaskCompare: 362 DCHECK(iterator.Complete()); 363 return PerformCompare(data->bitmask_compare_descriptor, 364 current, 365 isolate); 366 case kDescriptorPointerCompare: 367 DCHECK(iterator.Complete()); 368 return PerformCompare(data->pointer_compare_descriptor, 369 current, 370 isolate); 371 case kDescriptorPrimitiveValue: 372 DCHECK(iterator.Complete()); 373 return GetPrimitiveValue(data->primitive_value_descriptor, 374 current, 375 isolate); 376 } 377 } 378 UNREACHABLE(); 379 return isolate->factory()->undefined_value(); 380 } 381 382 383 Handle<FixedArray> JSObject::EnsureWritableFastElements( 384 Handle<JSObject> object) { 385 DCHECK(object->HasFastSmiOrObjectElements()); 386 Isolate* isolate = object->GetIsolate(); 387 Handle<FixedArray> elems(FixedArray::cast(object->elements()), isolate); 388 if (elems->map() != isolate->heap()->fixed_cow_array_map()) return elems; 389 Handle<FixedArray> writable_elems = isolate->factory()->CopyFixedArrayWithMap( 390 elems, isolate->factory()->fixed_array_map()); 391 object->set_elements(*writable_elems); 392 isolate->counters()->cow_arrays_converted()->Increment(); 393 return writable_elems; 394 } 395 396 397 MaybeHandle<Object> JSProxy::GetPropertyWithHandler(Handle<JSProxy> proxy, 398 Handle<Object> receiver, 399 Handle<Name> name) { 400 Isolate* isolate = proxy->GetIsolate(); 401 402 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 403 if (name->IsSymbol()) return isolate->factory()->undefined_value(); 404 405 Handle<Object> args[] = { receiver, name }; 406 return CallTrap( 407 proxy, "get", isolate->derived_get_trap(), arraysize(args), args); 408 } 409 410 411 MaybeHandle<Object> Object::GetPropertyWithAccessor(Handle<Object> receiver, 412 Handle<Name> name, 413 Handle<JSObject> holder, 414 Handle<Object> structure) { 415 Isolate* isolate = name->GetIsolate(); 416 DCHECK(!structure->IsForeign()); 417 // api style callbacks. 418 if (structure->IsAccessorInfo()) { 419 Handle<AccessorInfo> info = Handle<AccessorInfo>::cast(structure); 420 if (!info->IsCompatibleReceiver(*receiver)) { 421 Handle<Object> args[2] = { name, receiver }; 422 THROW_NEW_ERROR(isolate, 423 NewTypeError("incompatible_method_receiver", 424 HandleVector(args, arraysize(args))), 425 Object); 426 } 427 if (structure->IsDeclaredAccessorInfo()) { 428 return GetDeclaredAccessorProperty( 429 receiver, 430 Handle<DeclaredAccessorInfo>::cast(structure), 431 isolate); 432 } 433 434 Handle<ExecutableAccessorInfo> data = 435 Handle<ExecutableAccessorInfo>::cast(structure); 436 v8::AccessorNameGetterCallback call_fun = 437 v8::ToCData<v8::AccessorNameGetterCallback>(data->getter()); 438 if (call_fun == NULL) return isolate->factory()->undefined_value(); 439 440 LOG(isolate, ApiNamedPropertyAccess("load", *holder, *name)); 441 PropertyCallbackArguments args(isolate, data->data(), *receiver, *holder); 442 v8::Handle<v8::Value> result = 443 args.Call(call_fun, v8::Utils::ToLocal(name)); 444 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 445 if (result.IsEmpty()) { 446 return isolate->factory()->undefined_value(); 447 } 448 Handle<Object> return_value = v8::Utils::OpenHandle(*result); 449 return_value->VerifyApiCallResultType(); 450 // Rebox handle before return. 451 return handle(*return_value, isolate); 452 } 453 454 // __defineGetter__ callback 455 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(), 456 isolate); 457 if (getter->IsSpecFunction()) { 458 // TODO(rossberg): nicer would be to cast to some JSCallable here... 459 return Object::GetPropertyWithDefinedGetter( 460 receiver, Handle<JSReceiver>::cast(getter)); 461 } 462 // Getter is not a function. 463 return isolate->factory()->undefined_value(); 464 } 465 466 467 bool AccessorInfo::IsCompatibleReceiverType(Isolate* isolate, 468 Handle<AccessorInfo> info, 469 Handle<HeapType> type) { 470 if (!info->HasExpectedReceiverType()) return true; 471 Handle<Map> map = IC::TypeToMap(*type, isolate); 472 if (!map->IsJSObjectMap()) return false; 473 return FunctionTemplateInfo::cast(info->expected_receiver_type()) 474 ->IsTemplateFor(*map); 475 } 476 477 478 MaybeHandle<Object> Object::SetPropertyWithAccessor( 479 Handle<Object> receiver, Handle<Name> name, Handle<Object> value, 480 Handle<JSObject> holder, Handle<Object> structure, StrictMode strict_mode) { 481 Isolate* isolate = name->GetIsolate(); 482 483 // We should never get here to initialize a const with the hole 484 // value since a const declaration would conflict with the setter. 485 DCHECK(!structure->IsForeign()); 486 if (structure->IsExecutableAccessorInfo()) { 487 // Don't call executable accessor setters with non-JSObject receivers. 488 if (!receiver->IsJSObject()) return value; 489 // api style callbacks 490 ExecutableAccessorInfo* info = ExecutableAccessorInfo::cast(*structure); 491 if (!info->IsCompatibleReceiver(*receiver)) { 492 Handle<Object> args[2] = { name, receiver }; 493 THROW_NEW_ERROR(isolate, 494 NewTypeError("incompatible_method_receiver", 495 HandleVector(args, arraysize(args))), 496 Object); 497 } 498 Object* call_obj = info->setter(); 499 v8::AccessorNameSetterCallback call_fun = 500 v8::ToCData<v8::AccessorNameSetterCallback>(call_obj); 501 if (call_fun == NULL) return value; 502 LOG(isolate, ApiNamedPropertyAccess("store", *holder, *name)); 503 PropertyCallbackArguments args(isolate, info->data(), *receiver, *holder); 504 args.Call(call_fun, 505 v8::Utils::ToLocal(name), 506 v8::Utils::ToLocal(value)); 507 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 508 return value; 509 } 510 511 if (structure->IsAccessorPair()) { 512 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate); 513 if (setter->IsSpecFunction()) { 514 // TODO(rossberg): nicer would be to cast to some JSCallable here... 515 return SetPropertyWithDefinedSetter( 516 receiver, Handle<JSReceiver>::cast(setter), value); 517 } else { 518 if (strict_mode == SLOPPY) return value; 519 Handle<Object> args[2] = { name, holder }; 520 THROW_NEW_ERROR( 521 isolate, NewTypeError("no_setter_in_callback", HandleVector(args, 2)), 522 Object); 523 } 524 } 525 526 // TODO(dcarney): Handle correctly. 527 if (structure->IsDeclaredAccessorInfo()) { 528 return value; 529 } 530 531 UNREACHABLE(); 532 return MaybeHandle<Object>(); 533 } 534 535 536 MaybeHandle<Object> Object::GetPropertyWithDefinedGetter( 537 Handle<Object> receiver, 538 Handle<JSReceiver> getter) { 539 Isolate* isolate = getter->GetIsolate(); 540 Debug* debug = isolate->debug(); 541 // Handle stepping into a getter if step into is active. 542 // TODO(rossberg): should this apply to getters that are function proxies? 543 if (debug->StepInActive() && getter->IsJSFunction()) { 544 debug->HandleStepIn( 545 Handle<JSFunction>::cast(getter), Handle<Object>::null(), 0, false); 546 } 547 548 return Execution::Call(isolate, getter, receiver, 0, NULL, true); 549 } 550 551 552 MaybeHandle<Object> Object::SetPropertyWithDefinedSetter( 553 Handle<Object> receiver, 554 Handle<JSReceiver> setter, 555 Handle<Object> value) { 556 Isolate* isolate = setter->GetIsolate(); 557 558 Debug* debug = isolate->debug(); 559 // Handle stepping into a setter if step into is active. 560 // TODO(rossberg): should this apply to getters that are function proxies? 561 if (debug->StepInActive() && setter->IsJSFunction()) { 562 debug->HandleStepIn( 563 Handle<JSFunction>::cast(setter), Handle<Object>::null(), 0, false); 564 } 565 566 Handle<Object> argv[] = { value }; 567 RETURN_ON_EXCEPTION(isolate, Execution::Call(isolate, setter, receiver, 568 arraysize(argv), argv, true), 569 Object); 570 return value; 571 } 572 573 574 static bool FindAllCanReadHolder(LookupIterator* it) { 575 for (; it->IsFound(); it->Next()) { 576 if (it->state() == LookupIterator::ACCESSOR) { 577 Handle<Object> accessors = it->GetAccessors(); 578 if (accessors->IsAccessorInfo()) { 579 if (AccessorInfo::cast(*accessors)->all_can_read()) return true; 580 } 581 } 582 } 583 return false; 584 } 585 586 587 MaybeHandle<Object> JSObject::GetPropertyWithFailedAccessCheck( 588 LookupIterator* it) { 589 Handle<JSObject> checked = it->GetHolder<JSObject>(); 590 if (FindAllCanReadHolder(it)) { 591 return GetPropertyWithAccessor(it->GetReceiver(), it->name(), 592 it->GetHolder<JSObject>(), 593 it->GetAccessors()); 594 } 595 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_GET); 596 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object); 597 return it->factory()->undefined_value(); 598 } 599 600 601 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithFailedAccessCheck( 602 LookupIterator* it) { 603 Handle<JSObject> checked = it->GetHolder<JSObject>(); 604 if (FindAllCanReadHolder(it)) 605 return maybe(it->property_details().attributes()); 606 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_HAS); 607 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(it->isolate(), 608 Maybe<PropertyAttributes>()); 609 return maybe(ABSENT); 610 } 611 612 613 static bool FindAllCanWriteHolder(LookupIterator* it) { 614 for (; it->IsFound(); it->Next()) { 615 if (it->state() == LookupIterator::ACCESSOR) { 616 Handle<Object> accessors = it->GetAccessors(); 617 if (accessors->IsAccessorInfo()) { 618 if (AccessorInfo::cast(*accessors)->all_can_write()) return true; 619 } 620 } 621 } 622 return false; 623 } 624 625 626 MaybeHandle<Object> JSObject::SetPropertyWithFailedAccessCheck( 627 LookupIterator* it, Handle<Object> value, StrictMode strict_mode) { 628 Handle<JSObject> checked = it->GetHolder<JSObject>(); 629 if (FindAllCanWriteHolder(it)) { 630 return SetPropertyWithAccessor(it->GetReceiver(), it->name(), value, 631 it->GetHolder<JSObject>(), 632 it->GetAccessors(), strict_mode); 633 } 634 635 it->isolate()->ReportFailedAccessCheck(checked, v8::ACCESS_SET); 636 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object); 637 return value; 638 } 639 640 641 void JSObject::SetNormalizedProperty(Handle<JSObject> object, 642 Handle<Name> name, 643 Handle<Object> value, 644 PropertyDetails details) { 645 DCHECK(!object->HasFastProperties()); 646 Handle<NameDictionary> property_dictionary(object->property_dictionary()); 647 648 if (!name->IsUniqueName()) { 649 name = object->GetIsolate()->factory()->InternalizeString( 650 Handle<String>::cast(name)); 651 } 652 653 int entry = property_dictionary->FindEntry(name); 654 if (entry == NameDictionary::kNotFound) { 655 Handle<Object> store_value = value; 656 if (object->IsGlobalObject()) { 657 store_value = object->GetIsolate()->factory()->NewPropertyCell(value); 658 } 659 660 property_dictionary = NameDictionary::Add( 661 property_dictionary, name, store_value, details); 662 object->set_properties(*property_dictionary); 663 return; 664 } 665 666 PropertyDetails original_details = property_dictionary->DetailsAt(entry); 667 int enumeration_index; 668 // Preserve the enumeration index unless the property was deleted. 669 if (original_details.IsDeleted()) { 670 enumeration_index = property_dictionary->NextEnumerationIndex(); 671 property_dictionary->SetNextEnumerationIndex(enumeration_index + 1); 672 } else { 673 enumeration_index = original_details.dictionary_index(); 674 DCHECK(enumeration_index > 0); 675 } 676 677 details = PropertyDetails( 678 details.attributes(), details.type(), enumeration_index); 679 680 if (object->IsGlobalObject()) { 681 Handle<PropertyCell> cell( 682 PropertyCell::cast(property_dictionary->ValueAt(entry))); 683 PropertyCell::SetValueInferType(cell, value); 684 // Please note we have to update the property details. 685 property_dictionary->DetailsAtPut(entry, details); 686 } else { 687 property_dictionary->SetEntry(entry, name, value, details); 688 } 689 } 690 691 692 Handle<Object> JSObject::DeleteNormalizedProperty(Handle<JSObject> object, 693 Handle<Name> name, 694 DeleteMode mode) { 695 DCHECK(!object->HasFastProperties()); 696 Isolate* isolate = object->GetIsolate(); 697 Handle<NameDictionary> dictionary(object->property_dictionary()); 698 int entry = dictionary->FindEntry(name); 699 if (entry != NameDictionary::kNotFound) { 700 // If we have a global object set the cell to the hole. 701 if (object->IsGlobalObject()) { 702 PropertyDetails details = dictionary->DetailsAt(entry); 703 if (!details.IsConfigurable()) { 704 if (mode != FORCE_DELETION) return isolate->factory()->false_value(); 705 // When forced to delete global properties, we have to make a 706 // map change to invalidate any ICs that think they can load 707 // from the non-configurable cell without checking if it contains 708 // the hole value. 709 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); 710 DCHECK(new_map->is_dictionary_map()); 711 JSObject::MigrateToMap(object, new_map); 712 } 713 Handle<PropertyCell> cell(PropertyCell::cast(dictionary->ValueAt(entry))); 714 Handle<Object> value = isolate->factory()->the_hole_value(); 715 PropertyCell::SetValueInferType(cell, value); 716 dictionary->DetailsAtPut(entry, details.AsDeleted()); 717 } else { 718 Handle<Object> deleted( 719 NameDictionary::DeleteProperty(dictionary, entry, mode)); 720 if (*deleted == isolate->heap()->true_value()) { 721 Handle<NameDictionary> new_properties = 722 NameDictionary::Shrink(dictionary, name); 723 object->set_properties(*new_properties); 724 } 725 return deleted; 726 } 727 } 728 return isolate->factory()->true_value(); 729 } 730 731 732 bool JSObject::IsDirty() { 733 Object* cons_obj = map()->constructor(); 734 if (!cons_obj->IsJSFunction()) 735 return true; 736 JSFunction* fun = JSFunction::cast(cons_obj); 737 if (!fun->shared()->IsApiFunction()) 738 return true; 739 // If the object is fully fast case and has the same map it was 740 // created with then no changes can have been made to it. 741 return map() != fun->initial_map() 742 || !HasFastObjectElements() 743 || !HasFastProperties(); 744 } 745 746 747 MaybeHandle<Object> Object::GetElementWithReceiver(Isolate* isolate, 748 Handle<Object> object, 749 Handle<Object> receiver, 750 uint32_t index) { 751 if (object->IsUndefined()) { 752 // TODO(verwaest): Why is this check here? 753 UNREACHABLE(); 754 return isolate->factory()->undefined_value(); 755 } 756 757 // Iterate up the prototype chain until an element is found or the null 758 // prototype is encountered. 759 for (PrototypeIterator iter(isolate, object, 760 object->IsJSProxy() || object->IsJSObject() 761 ? PrototypeIterator::START_AT_RECEIVER 762 : PrototypeIterator::START_AT_PROTOTYPE); 763 !iter.IsAtEnd(); iter.Advance()) { 764 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) { 765 return JSProxy::GetElementWithHandler( 766 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), receiver, 767 index); 768 } 769 770 // Inline the case for JSObjects. Doing so significantly improves the 771 // performance of fetching elements where checking the prototype chain is 772 // necessary. 773 Handle<JSObject> js_object = 774 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)); 775 776 // Check access rights if needed. 777 if (js_object->IsAccessCheckNeeded()) { 778 if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) { 779 isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET); 780 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 781 return isolate->factory()->undefined_value(); 782 } 783 } 784 785 if (js_object->HasIndexedInterceptor()) { 786 return JSObject::GetElementWithInterceptor(js_object, receiver, index); 787 } 788 789 if (js_object->elements() != isolate->heap()->empty_fixed_array()) { 790 Handle<Object> result; 791 ASSIGN_RETURN_ON_EXCEPTION( 792 isolate, result, 793 js_object->GetElementsAccessor()->Get(receiver, js_object, index), 794 Object); 795 if (!result->IsTheHole()) return result; 796 } 797 } 798 799 return isolate->factory()->undefined_value(); 800 } 801 802 803 Map* Object::GetRootMap(Isolate* isolate) { 804 DisallowHeapAllocation no_alloc; 805 if (IsSmi()) { 806 Context* context = isolate->context()->native_context(); 807 return context->number_function()->initial_map(); 808 } 809 810 HeapObject* heap_object = HeapObject::cast(this); 811 812 // The object is either a number, a string, a boolean, 813 // a real JS object, or a Harmony proxy. 814 if (heap_object->IsJSReceiver()) { 815 return heap_object->map(); 816 } 817 Context* context = isolate->context()->native_context(); 818 819 if (heap_object->IsHeapNumber()) { 820 return context->number_function()->initial_map(); 821 } 822 if (heap_object->IsString()) { 823 return context->string_function()->initial_map(); 824 } 825 if (heap_object->IsSymbol()) { 826 return context->symbol_function()->initial_map(); 827 } 828 if (heap_object->IsBoolean()) { 829 return context->boolean_function()->initial_map(); 830 } 831 return isolate->heap()->null_value()->map(); 832 } 833 834 835 Object* Object::GetHash() { 836 // The object is either a number, a name, an odd-ball, 837 // a real JS object, or a Harmony proxy. 838 if (IsNumber()) { 839 uint32_t hash = ComputeLongHash(double_to_uint64(Number())); 840 return Smi::FromInt(hash & Smi::kMaxValue); 841 } 842 if (IsName()) { 843 uint32_t hash = Name::cast(this)->Hash(); 844 return Smi::FromInt(hash); 845 } 846 if (IsOddball()) { 847 uint32_t hash = Oddball::cast(this)->to_string()->Hash(); 848 return Smi::FromInt(hash); 849 } 850 851 DCHECK(IsJSReceiver()); 852 return JSReceiver::cast(this)->GetIdentityHash(); 853 } 854 855 856 Handle<Smi> Object::GetOrCreateHash(Isolate* isolate, Handle<Object> object) { 857 Handle<Object> hash(object->GetHash(), isolate); 858 if (hash->IsSmi()) return Handle<Smi>::cast(hash); 859 860 DCHECK(object->IsJSReceiver()); 861 return JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver>::cast(object)); 862 } 863 864 865 bool Object::SameValue(Object* other) { 866 if (other == this) return true; 867 868 // The object is either a number, a name, an odd-ball, 869 // a real JS object, or a Harmony proxy. 870 if (IsNumber() && other->IsNumber()) { 871 double this_value = Number(); 872 double other_value = other->Number(); 873 bool equal = this_value == other_value; 874 // SameValue(NaN, NaN) is true. 875 if (!equal) return std::isnan(this_value) && std::isnan(other_value); 876 // SameValue(0.0, -0.0) is false. 877 return (this_value != 0) || ((1 / this_value) == (1 / other_value)); 878 } 879 if (IsString() && other->IsString()) { 880 return String::cast(this)->Equals(String::cast(other)); 881 } 882 return false; 883 } 884 885 886 bool Object::SameValueZero(Object* other) { 887 if (other == this) return true; 888 889 // The object is either a number, a name, an odd-ball, 890 // a real JS object, or a Harmony proxy. 891 if (IsNumber() && other->IsNumber()) { 892 double this_value = Number(); 893 double other_value = other->Number(); 894 // +0 == -0 is true 895 return this_value == other_value 896 || (std::isnan(this_value) && std::isnan(other_value)); 897 } 898 if (IsString() && other->IsString()) { 899 return String::cast(this)->Equals(String::cast(other)); 900 } 901 return false; 902 } 903 904 905 void Object::ShortPrint(FILE* out) { 906 OFStream os(out); 907 os << Brief(this); 908 } 909 910 911 void Object::ShortPrint(StringStream* accumulator) { 912 OStringStream os; 913 os << Brief(this); 914 accumulator->Add(os.c_str()); 915 } 916 917 918 OStream& operator<<(OStream& os, const Brief& v) { 919 if (v.value->IsSmi()) { 920 Smi::cast(v.value)->SmiPrint(os); 921 } else { 922 // TODO(svenpanne) Const-correct HeapObjectShortPrint! 923 HeapObject* obj = const_cast<HeapObject*>(HeapObject::cast(v.value)); 924 obj->HeapObjectShortPrint(os); 925 } 926 return os; 927 } 928 929 930 void Smi::SmiPrint(OStream& os) const { // NOLINT 931 os << value(); 932 } 933 934 935 // Should a word be prefixed by 'a' or 'an' in order to read naturally in 936 // English? Returns false for non-ASCII or words that don't start with 937 // a capital letter. The a/an rule follows pronunciation in English. 938 // We don't use the BBC's overcorrect "an historic occasion" though if 939 // you speak a dialect you may well say "an 'istoric occasion". 940 static bool AnWord(String* str) { 941 if (str->length() == 0) return false; // A nothing. 942 int c0 = str->Get(0); 943 int c1 = str->length() > 1 ? str->Get(1) : 0; 944 if (c0 == 'U') { 945 if (c1 > 'Z') { 946 return true; // An Umpire, but a UTF8String, a U. 947 } 948 } else if (c0 == 'A' || c0 == 'E' || c0 == 'I' || c0 == 'O') { 949 return true; // An Ape, an ABCBook. 950 } else if ((c1 == 0 || (c1 >= 'A' && c1 <= 'Z')) && 951 (c0 == 'F' || c0 == 'H' || c0 == 'M' || c0 == 'N' || c0 == 'R' || 952 c0 == 'S' || c0 == 'X')) { 953 return true; // An MP3File, an M. 954 } 955 return false; 956 } 957 958 959 Handle<String> String::SlowFlatten(Handle<ConsString> cons, 960 PretenureFlag pretenure) { 961 DCHECK(AllowHeapAllocation::IsAllowed()); 962 DCHECK(cons->second()->length() != 0); 963 Isolate* isolate = cons->GetIsolate(); 964 int length = cons->length(); 965 PretenureFlag tenure = isolate->heap()->InNewSpace(*cons) ? pretenure 966 : TENURED; 967 Handle<SeqString> result; 968 if (cons->IsOneByteRepresentation()) { 969 Handle<SeqOneByteString> flat = isolate->factory()->NewRawOneByteString( 970 length, tenure).ToHandleChecked(); 971 DisallowHeapAllocation no_gc; 972 WriteToFlat(*cons, flat->GetChars(), 0, length); 973 result = flat; 974 } else { 975 Handle<SeqTwoByteString> flat = isolate->factory()->NewRawTwoByteString( 976 length, tenure).ToHandleChecked(); 977 DisallowHeapAllocation no_gc; 978 WriteToFlat(*cons, flat->GetChars(), 0, length); 979 result = flat; 980 } 981 cons->set_first(*result); 982 cons->set_second(isolate->heap()->empty_string()); 983 DCHECK(result->IsFlat()); 984 return result; 985 } 986 987 988 989 bool String::MakeExternal(v8::String::ExternalStringResource* resource) { 990 // Externalizing twice leaks the external resource, so it's 991 // prohibited by the API. 992 DCHECK(!this->IsExternalString()); 993 #ifdef ENABLE_SLOW_DCHECKS 994 if (FLAG_enable_slow_asserts) { 995 // Assert that the resource and the string are equivalent. 996 DCHECK(static_cast<size_t>(this->length()) == resource->length()); 997 ScopedVector<uc16> smart_chars(this->length()); 998 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 999 DCHECK(memcmp(smart_chars.start(), 1000 resource->data(), 1001 resource->length() * sizeof(smart_chars[0])) == 0); 1002 } 1003 #endif // DEBUG 1004 int size = this->Size(); // Byte size of the original string. 1005 // Abort if size does not allow in-place conversion. 1006 if (size < ExternalString::kShortSize) return false; 1007 Heap* heap = GetHeap(); 1008 bool is_one_byte = this->IsOneByteRepresentation(); 1009 bool is_internalized = this->IsInternalizedString(); 1010 1011 // Morph the string to an external string by replacing the map and 1012 // reinitializing the fields. This won't work if the space the existing 1013 // string occupies is too small for a regular external string. 1014 // Instead, we resort to a short external string instead, omitting 1015 // the field caching the address of the backing store. When we encounter 1016 // short external strings in generated code, we need to bailout to runtime. 1017 Map* new_map; 1018 if (size < ExternalString::kSize) { 1019 new_map = is_internalized 1020 ? (is_one_byte 1021 ? heap->short_external_internalized_string_with_one_byte_data_map() 1022 : heap->short_external_internalized_string_map()) 1023 : (is_one_byte ? heap->short_external_string_with_one_byte_data_map() 1024 : heap->short_external_string_map()); 1025 } else { 1026 new_map = is_internalized 1027 ? (is_one_byte 1028 ? heap->external_internalized_string_with_one_byte_data_map() 1029 : heap->external_internalized_string_map()) 1030 : (is_one_byte ? heap->external_string_with_one_byte_data_map() 1031 : heap->external_string_map()); 1032 } 1033 1034 // Byte size of the external String object. 1035 int new_size = this->SizeFromMap(new_map); 1036 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); 1037 1038 // We are storing the new map using release store after creating a filler for 1039 // the left-over space to avoid races with the sweeper thread. 1040 this->synchronized_set_map(new_map); 1041 1042 ExternalTwoByteString* self = ExternalTwoByteString::cast(this); 1043 self->set_resource(resource); 1044 if (is_internalized) self->Hash(); // Force regeneration of the hash value. 1045 1046 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR); 1047 return true; 1048 } 1049 1050 1051 bool String::MakeExternal(v8::String::ExternalOneByteStringResource* resource) { 1052 // Externalizing twice leaks the external resource, so it's 1053 // prohibited by the API. 1054 DCHECK(!this->IsExternalString()); 1055 #ifdef ENABLE_SLOW_DCHECKS 1056 if (FLAG_enable_slow_asserts) { 1057 // Assert that the resource and the string are equivalent. 1058 DCHECK(static_cast<size_t>(this->length()) == resource->length()); 1059 if (this->IsTwoByteRepresentation()) { 1060 ScopedVector<uint16_t> smart_chars(this->length()); 1061 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 1062 DCHECK(String::IsOneByte(smart_chars.start(), this->length())); 1063 } 1064 ScopedVector<char> smart_chars(this->length()); 1065 String::WriteToFlat(this, smart_chars.start(), 0, this->length()); 1066 DCHECK(memcmp(smart_chars.start(), 1067 resource->data(), 1068 resource->length() * sizeof(smart_chars[0])) == 0); 1069 } 1070 #endif // DEBUG 1071 int size = this->Size(); // Byte size of the original string. 1072 // Abort if size does not allow in-place conversion. 1073 if (size < ExternalString::kShortSize) return false; 1074 Heap* heap = GetHeap(); 1075 bool is_internalized = this->IsInternalizedString(); 1076 1077 // Morph the string to an external string by replacing the map and 1078 // reinitializing the fields. This won't work if the space the existing 1079 // string occupies is too small for a regular external string. 1080 // Instead, we resort to a short external string instead, omitting 1081 // the field caching the address of the backing store. When we encounter 1082 // short external strings in generated code, we need to bailout to runtime. 1083 Map* new_map; 1084 if (size < ExternalString::kSize) { 1085 new_map = is_internalized 1086 ? heap->short_external_one_byte_internalized_string_map() 1087 : heap->short_external_one_byte_string_map(); 1088 } else { 1089 new_map = is_internalized 1090 ? heap->external_one_byte_internalized_string_map() 1091 : heap->external_one_byte_string_map(); 1092 } 1093 1094 // Byte size of the external String object. 1095 int new_size = this->SizeFromMap(new_map); 1096 heap->CreateFillerObjectAt(this->address() + new_size, size - new_size); 1097 1098 // We are storing the new map using release store after creating a filler for 1099 // the left-over space to avoid races with the sweeper thread. 1100 this->synchronized_set_map(new_map); 1101 1102 ExternalOneByteString* self = ExternalOneByteString::cast(this); 1103 self->set_resource(resource); 1104 if (is_internalized) self->Hash(); // Force regeneration of the hash value. 1105 1106 heap->AdjustLiveBytes(this->address(), new_size - size, Heap::FROM_MUTATOR); 1107 return true; 1108 } 1109 1110 1111 void String::StringShortPrint(StringStream* accumulator) { 1112 int len = length(); 1113 if (len > kMaxShortPrintLength) { 1114 accumulator->Add("<Very long string[%u]>", len); 1115 return; 1116 } 1117 1118 if (!LooksValid()) { 1119 accumulator->Add("<Invalid String>"); 1120 return; 1121 } 1122 1123 ConsStringIteratorOp op; 1124 StringCharacterStream stream(this, &op); 1125 1126 bool truncated = false; 1127 if (len > kMaxShortPrintLength) { 1128 len = kMaxShortPrintLength; 1129 truncated = true; 1130 } 1131 bool one_byte = true; 1132 for (int i = 0; i < len; i++) { 1133 uint16_t c = stream.GetNext(); 1134 1135 if (c < 32 || c >= 127) { 1136 one_byte = false; 1137 } 1138 } 1139 stream.Reset(this); 1140 if (one_byte) { 1141 accumulator->Add("<String[%u]: ", length()); 1142 for (int i = 0; i < len; i++) { 1143 accumulator->Put(static_cast<char>(stream.GetNext())); 1144 } 1145 accumulator->Put('>'); 1146 } else { 1147 // Backslash indicates that the string contains control 1148 // characters and that backslashes are therefore escaped. 1149 accumulator->Add("<String[%u]\\: ", length()); 1150 for (int i = 0; i < len; i++) { 1151 uint16_t c = stream.GetNext(); 1152 if (c == '\n') { 1153 accumulator->Add("\\n"); 1154 } else if (c == '\r') { 1155 accumulator->Add("\\r"); 1156 } else if (c == '\\') { 1157 accumulator->Add("\\\\"); 1158 } else if (c < 32 || c > 126) { 1159 accumulator->Add("\\x%02x", c); 1160 } else { 1161 accumulator->Put(static_cast<char>(c)); 1162 } 1163 } 1164 if (truncated) { 1165 accumulator->Put('.'); 1166 accumulator->Put('.'); 1167 accumulator->Put('.'); 1168 } 1169 accumulator->Put('>'); 1170 } 1171 return; 1172 } 1173 1174 1175 void String::PrintUC16(OStream& os, int start, int end) { // NOLINT 1176 if (end < 0) end = length(); 1177 ConsStringIteratorOp op; 1178 StringCharacterStream stream(this, &op, start); 1179 for (int i = start; i < end && stream.HasMore(); i++) { 1180 os << AsUC16(stream.GetNext()); 1181 } 1182 } 1183 1184 1185 void JSObject::JSObjectShortPrint(StringStream* accumulator) { 1186 switch (map()->instance_type()) { 1187 case JS_ARRAY_TYPE: { 1188 double length = JSArray::cast(this)->length()->IsUndefined() 1189 ? 0 1190 : JSArray::cast(this)->length()->Number(); 1191 accumulator->Add("<JS Array[%u]>", static_cast<uint32_t>(length)); 1192 break; 1193 } 1194 case JS_WEAK_MAP_TYPE: { 1195 accumulator->Add("<JS WeakMap>"); 1196 break; 1197 } 1198 case JS_WEAK_SET_TYPE: { 1199 accumulator->Add("<JS WeakSet>"); 1200 break; 1201 } 1202 case JS_REGEXP_TYPE: { 1203 accumulator->Add("<JS RegExp>"); 1204 break; 1205 } 1206 case JS_FUNCTION_TYPE: { 1207 JSFunction* function = JSFunction::cast(this); 1208 Object* fun_name = function->shared()->DebugName(); 1209 bool printed = false; 1210 if (fun_name->IsString()) { 1211 String* str = String::cast(fun_name); 1212 if (str->length() > 0) { 1213 accumulator->Add("<JS Function "); 1214 accumulator->Put(str); 1215 printed = true; 1216 } 1217 } 1218 if (!printed) { 1219 accumulator->Add("<JS Function"); 1220 } 1221 accumulator->Add(" (SharedFunctionInfo %p)", 1222 reinterpret_cast<void*>(function->shared())); 1223 accumulator->Put('>'); 1224 break; 1225 } 1226 case JS_GENERATOR_OBJECT_TYPE: { 1227 accumulator->Add("<JS Generator>"); 1228 break; 1229 } 1230 case JS_MODULE_TYPE: { 1231 accumulator->Add("<JS Module>"); 1232 break; 1233 } 1234 // All other JSObjects are rather similar to each other (JSObject, 1235 // JSGlobalProxy, JSGlobalObject, JSUndetectableObject, JSValue). 1236 default: { 1237 Map* map_of_this = map(); 1238 Heap* heap = GetHeap(); 1239 Object* constructor = map_of_this->constructor(); 1240 bool printed = false; 1241 if (constructor->IsHeapObject() && 1242 !heap->Contains(HeapObject::cast(constructor))) { 1243 accumulator->Add("!!!INVALID CONSTRUCTOR!!!"); 1244 } else { 1245 bool global_object = IsJSGlobalProxy(); 1246 if (constructor->IsJSFunction()) { 1247 if (!heap->Contains(JSFunction::cast(constructor)->shared())) { 1248 accumulator->Add("!!!INVALID SHARED ON CONSTRUCTOR!!!"); 1249 } else { 1250 Object* constructor_name = 1251 JSFunction::cast(constructor)->shared()->name(); 1252 if (constructor_name->IsString()) { 1253 String* str = String::cast(constructor_name); 1254 if (str->length() > 0) { 1255 bool vowel = AnWord(str); 1256 accumulator->Add("<%sa%s ", 1257 global_object ? "Global Object: " : "", 1258 vowel ? "n" : ""); 1259 accumulator->Put(str); 1260 accumulator->Add(" with %smap %p", 1261 map_of_this->is_deprecated() ? "deprecated " : "", 1262 map_of_this); 1263 printed = true; 1264 } 1265 } 1266 } 1267 } 1268 if (!printed) { 1269 accumulator->Add("<JS %sObject", global_object ? "Global " : ""); 1270 } 1271 } 1272 if (IsJSValue()) { 1273 accumulator->Add(" value = "); 1274 JSValue::cast(this)->value()->ShortPrint(accumulator); 1275 } 1276 accumulator->Put('>'); 1277 break; 1278 } 1279 } 1280 } 1281 1282 1283 void JSObject::PrintElementsTransition( 1284 FILE* file, Handle<JSObject> object, 1285 ElementsKind from_kind, Handle<FixedArrayBase> from_elements, 1286 ElementsKind to_kind, Handle<FixedArrayBase> to_elements) { 1287 if (from_kind != to_kind) { 1288 OFStream os(file); 1289 os << "elements transition [" << ElementsKindToString(from_kind) << " -> " 1290 << ElementsKindToString(to_kind) << "] in "; 1291 JavaScriptFrame::PrintTop(object->GetIsolate(), file, false, true); 1292 PrintF(file, " for "); 1293 object->ShortPrint(file); 1294 PrintF(file, " from "); 1295 from_elements->ShortPrint(file); 1296 PrintF(file, " to "); 1297 to_elements->ShortPrint(file); 1298 PrintF(file, "\n"); 1299 } 1300 } 1301 1302 1303 void Map::PrintGeneralization(FILE* file, 1304 const char* reason, 1305 int modify_index, 1306 int split, 1307 int descriptors, 1308 bool constant_to_field, 1309 Representation old_representation, 1310 Representation new_representation, 1311 HeapType* old_field_type, 1312 HeapType* new_field_type) { 1313 OFStream os(file); 1314 os << "[generalizing "; 1315 constructor_name()->PrintOn(file); 1316 os << "] "; 1317 Name* name = instance_descriptors()->GetKey(modify_index); 1318 if (name->IsString()) { 1319 String::cast(name)->PrintOn(file); 1320 } else { 1321 os << "{symbol " << static_cast<void*>(name) << "}"; 1322 } 1323 os << ":"; 1324 if (constant_to_field) { 1325 os << "c"; 1326 } else { 1327 os << old_representation.Mnemonic() << "{"; 1328 old_field_type->PrintTo(os, HeapType::SEMANTIC_DIM); 1329 os << "}"; 1330 } 1331 os << "->" << new_representation.Mnemonic() << "{"; 1332 new_field_type->PrintTo(os, HeapType::SEMANTIC_DIM); 1333 os << "} ("; 1334 if (strlen(reason) > 0) { 1335 os << reason; 1336 } else { 1337 os << "+" << (descriptors - split) << " maps"; 1338 } 1339 os << ") ["; 1340 JavaScriptFrame::PrintTop(GetIsolate(), file, false, true); 1341 os << "]\n"; 1342 } 1343 1344 1345 void JSObject::PrintInstanceMigration(FILE* file, 1346 Map* original_map, 1347 Map* new_map) { 1348 PrintF(file, "[migrating "); 1349 map()->constructor_name()->PrintOn(file); 1350 PrintF(file, "] "); 1351 DescriptorArray* o = original_map->instance_descriptors(); 1352 DescriptorArray* n = new_map->instance_descriptors(); 1353 for (int i = 0; i < original_map->NumberOfOwnDescriptors(); i++) { 1354 Representation o_r = o->GetDetails(i).representation(); 1355 Representation n_r = n->GetDetails(i).representation(); 1356 if (!o_r.Equals(n_r)) { 1357 String::cast(o->GetKey(i))->PrintOn(file); 1358 PrintF(file, ":%s->%s ", o_r.Mnemonic(), n_r.Mnemonic()); 1359 } else if (o->GetDetails(i).type() == CONSTANT && 1360 n->GetDetails(i).type() == FIELD) { 1361 Name* name = o->GetKey(i); 1362 if (name->IsString()) { 1363 String::cast(name)->PrintOn(file); 1364 } else { 1365 PrintF(file, "{symbol %p}", static_cast<void*>(name)); 1366 } 1367 PrintF(file, " "); 1368 } 1369 } 1370 PrintF(file, "\n"); 1371 } 1372 1373 1374 void HeapObject::HeapObjectShortPrint(OStream& os) { // NOLINT 1375 Heap* heap = GetHeap(); 1376 if (!heap->Contains(this)) { 1377 os << "!!!INVALID POINTER!!!"; 1378 return; 1379 } 1380 if (!heap->Contains(map())) { 1381 os << "!!!INVALID MAP!!!"; 1382 return; 1383 } 1384 1385 os << this << " "; 1386 1387 if (IsString()) { 1388 HeapStringAllocator allocator; 1389 StringStream accumulator(&allocator); 1390 String::cast(this)->StringShortPrint(&accumulator); 1391 os << accumulator.ToCString().get(); 1392 return; 1393 } 1394 if (IsJSObject()) { 1395 HeapStringAllocator allocator; 1396 StringStream accumulator(&allocator); 1397 JSObject::cast(this)->JSObjectShortPrint(&accumulator); 1398 os << accumulator.ToCString().get(); 1399 return; 1400 } 1401 switch (map()->instance_type()) { 1402 case MAP_TYPE: 1403 os << "<Map(elements=" << Map::cast(this)->elements_kind() << ")>"; 1404 break; 1405 case FIXED_ARRAY_TYPE: 1406 os << "<FixedArray[" << FixedArray::cast(this)->length() << "]>"; 1407 break; 1408 case FIXED_DOUBLE_ARRAY_TYPE: 1409 os << "<FixedDoubleArray[" << FixedDoubleArray::cast(this)->length() 1410 << "]>"; 1411 break; 1412 case BYTE_ARRAY_TYPE: 1413 os << "<ByteArray[" << ByteArray::cast(this)->length() << "]>"; 1414 break; 1415 case FREE_SPACE_TYPE: 1416 os << "<FreeSpace[" << FreeSpace::cast(this)->Size() << "]>"; 1417 break; 1418 #define TYPED_ARRAY_SHORT_PRINT(Type, type, TYPE, ctype, size) \ 1419 case EXTERNAL_##TYPE##_ARRAY_TYPE: \ 1420 os << "<External" #Type "Array[" \ 1421 << External##Type##Array::cast(this)->length() << "]>"; \ 1422 break; \ 1423 case FIXED_##TYPE##_ARRAY_TYPE: \ 1424 os << "<Fixed" #Type "Array[" << Fixed##Type##Array::cast(this)->length() \ 1425 << "]>"; \ 1426 break; 1427 1428 TYPED_ARRAYS(TYPED_ARRAY_SHORT_PRINT) 1429 #undef TYPED_ARRAY_SHORT_PRINT 1430 1431 case SHARED_FUNCTION_INFO_TYPE: { 1432 SharedFunctionInfo* shared = SharedFunctionInfo::cast(this); 1433 SmartArrayPointer<char> debug_name = 1434 shared->DebugName()->ToCString(); 1435 if (debug_name[0] != 0) { 1436 os << "<SharedFunctionInfo " << debug_name.get() << ">"; 1437 } else { 1438 os << "<SharedFunctionInfo>"; 1439 } 1440 break; 1441 } 1442 case JS_MESSAGE_OBJECT_TYPE: 1443 os << "<JSMessageObject>"; 1444 break; 1445 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 1446 case NAME##_TYPE: \ 1447 os << "<" #Name ">"; \ 1448 break; 1449 STRUCT_LIST(MAKE_STRUCT_CASE) 1450 #undef MAKE_STRUCT_CASE 1451 case CODE_TYPE: { 1452 Code* code = Code::cast(this); 1453 os << "<Code: " << Code::Kind2String(code->kind()) << ">"; 1454 break; 1455 } 1456 case ODDBALL_TYPE: { 1457 if (IsUndefined()) { 1458 os << "<undefined>"; 1459 } else if (IsTheHole()) { 1460 os << "<the hole>"; 1461 } else if (IsNull()) { 1462 os << "<null>"; 1463 } else if (IsTrue()) { 1464 os << "<true>"; 1465 } else if (IsFalse()) { 1466 os << "<false>"; 1467 } else { 1468 os << "<Odd Oddball>"; 1469 } 1470 break; 1471 } 1472 case SYMBOL_TYPE: { 1473 Symbol* symbol = Symbol::cast(this); 1474 os << "<Symbol: " << symbol->Hash(); 1475 if (!symbol->name()->IsUndefined()) { 1476 os << " "; 1477 HeapStringAllocator allocator; 1478 StringStream accumulator(&allocator); 1479 String::cast(symbol->name())->StringShortPrint(&accumulator); 1480 os << accumulator.ToCString().get(); 1481 } 1482 os << ">"; 1483 break; 1484 } 1485 case HEAP_NUMBER_TYPE: { 1486 os << "<Number: "; 1487 HeapNumber::cast(this)->HeapNumberPrint(os); 1488 os << ">"; 1489 break; 1490 } 1491 case MUTABLE_HEAP_NUMBER_TYPE: { 1492 os << "<MutableNumber: "; 1493 HeapNumber::cast(this)->HeapNumberPrint(os); 1494 os << '>'; 1495 break; 1496 } 1497 case JS_PROXY_TYPE: 1498 os << "<JSProxy>"; 1499 break; 1500 case JS_FUNCTION_PROXY_TYPE: 1501 os << "<JSFunctionProxy>"; 1502 break; 1503 case FOREIGN_TYPE: 1504 os << "<Foreign>"; 1505 break; 1506 case CELL_TYPE: { 1507 os << "Cell for "; 1508 HeapStringAllocator allocator; 1509 StringStream accumulator(&allocator); 1510 Cell::cast(this)->value()->ShortPrint(&accumulator); 1511 os << accumulator.ToCString().get(); 1512 break; 1513 } 1514 case PROPERTY_CELL_TYPE: { 1515 os << "PropertyCell for "; 1516 HeapStringAllocator allocator; 1517 StringStream accumulator(&allocator); 1518 PropertyCell::cast(this)->value()->ShortPrint(&accumulator); 1519 os << accumulator.ToCString().get(); 1520 break; 1521 } 1522 default: 1523 os << "<Other heap object (" << map()->instance_type() << ")>"; 1524 break; 1525 } 1526 } 1527 1528 1529 void HeapObject::Iterate(ObjectVisitor* v) { 1530 // Handle header 1531 IteratePointer(v, kMapOffset); 1532 // Handle object body 1533 Map* m = map(); 1534 IterateBody(m->instance_type(), SizeFromMap(m), v); 1535 } 1536 1537 1538 void HeapObject::IterateBody(InstanceType type, int object_size, 1539 ObjectVisitor* v) { 1540 // Avoiding <Type>::cast(this) because it accesses the map pointer field. 1541 // During GC, the map pointer field is encoded. 1542 if (type < FIRST_NONSTRING_TYPE) { 1543 switch (type & kStringRepresentationMask) { 1544 case kSeqStringTag: 1545 break; 1546 case kConsStringTag: 1547 ConsString::BodyDescriptor::IterateBody(this, v); 1548 break; 1549 case kSlicedStringTag: 1550 SlicedString::BodyDescriptor::IterateBody(this, v); 1551 break; 1552 case kExternalStringTag: 1553 if ((type & kStringEncodingMask) == kOneByteStringTag) { 1554 reinterpret_cast<ExternalOneByteString*>(this) 1555 ->ExternalOneByteStringIterateBody(v); 1556 } else { 1557 reinterpret_cast<ExternalTwoByteString*>(this)-> 1558 ExternalTwoByteStringIterateBody(v); 1559 } 1560 break; 1561 } 1562 return; 1563 } 1564 1565 switch (type) { 1566 case FIXED_ARRAY_TYPE: 1567 FixedArray::BodyDescriptor::IterateBody(this, object_size, v); 1568 break; 1569 case CONSTANT_POOL_ARRAY_TYPE: 1570 reinterpret_cast<ConstantPoolArray*>(this)->ConstantPoolIterateBody(v); 1571 break; 1572 case FIXED_DOUBLE_ARRAY_TYPE: 1573 break; 1574 case JS_OBJECT_TYPE: 1575 case JS_CONTEXT_EXTENSION_OBJECT_TYPE: 1576 case JS_GENERATOR_OBJECT_TYPE: 1577 case JS_MODULE_TYPE: 1578 case JS_VALUE_TYPE: 1579 case JS_DATE_TYPE: 1580 case JS_ARRAY_TYPE: 1581 case JS_ARRAY_BUFFER_TYPE: 1582 case JS_TYPED_ARRAY_TYPE: 1583 case JS_DATA_VIEW_TYPE: 1584 case JS_SET_TYPE: 1585 case JS_MAP_TYPE: 1586 case JS_SET_ITERATOR_TYPE: 1587 case JS_MAP_ITERATOR_TYPE: 1588 case JS_WEAK_MAP_TYPE: 1589 case JS_WEAK_SET_TYPE: 1590 case JS_REGEXP_TYPE: 1591 case JS_GLOBAL_PROXY_TYPE: 1592 case JS_GLOBAL_OBJECT_TYPE: 1593 case JS_BUILTINS_OBJECT_TYPE: 1594 case JS_MESSAGE_OBJECT_TYPE: 1595 JSObject::BodyDescriptor::IterateBody(this, object_size, v); 1596 break; 1597 case JS_FUNCTION_TYPE: 1598 reinterpret_cast<JSFunction*>(this) 1599 ->JSFunctionIterateBody(object_size, v); 1600 break; 1601 case ODDBALL_TYPE: 1602 Oddball::BodyDescriptor::IterateBody(this, v); 1603 break; 1604 case JS_PROXY_TYPE: 1605 JSProxy::BodyDescriptor::IterateBody(this, v); 1606 break; 1607 case JS_FUNCTION_PROXY_TYPE: 1608 JSFunctionProxy::BodyDescriptor::IterateBody(this, v); 1609 break; 1610 case FOREIGN_TYPE: 1611 reinterpret_cast<Foreign*>(this)->ForeignIterateBody(v); 1612 break; 1613 case MAP_TYPE: 1614 Map::BodyDescriptor::IterateBody(this, v); 1615 break; 1616 case CODE_TYPE: 1617 reinterpret_cast<Code*>(this)->CodeIterateBody(v); 1618 break; 1619 case CELL_TYPE: 1620 Cell::BodyDescriptor::IterateBody(this, v); 1621 break; 1622 case PROPERTY_CELL_TYPE: 1623 PropertyCell::BodyDescriptor::IterateBody(this, v); 1624 break; 1625 case SYMBOL_TYPE: 1626 Symbol::BodyDescriptor::IterateBody(this, v); 1627 break; 1628 1629 case HEAP_NUMBER_TYPE: 1630 case MUTABLE_HEAP_NUMBER_TYPE: 1631 case FILLER_TYPE: 1632 case BYTE_ARRAY_TYPE: 1633 case FREE_SPACE_TYPE: 1634 break; 1635 1636 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 1637 case EXTERNAL_##TYPE##_ARRAY_TYPE: \ 1638 case FIXED_##TYPE##_ARRAY_TYPE: \ 1639 break; 1640 1641 TYPED_ARRAYS(TYPED_ARRAY_CASE) 1642 #undef TYPED_ARRAY_CASE 1643 1644 case SHARED_FUNCTION_INFO_TYPE: { 1645 SharedFunctionInfo::BodyDescriptor::IterateBody(this, v); 1646 break; 1647 } 1648 1649 #define MAKE_STRUCT_CASE(NAME, Name, name) \ 1650 case NAME##_TYPE: 1651 STRUCT_LIST(MAKE_STRUCT_CASE) 1652 #undef MAKE_STRUCT_CASE 1653 if (type == ALLOCATION_SITE_TYPE) { 1654 AllocationSite::BodyDescriptor::IterateBody(this, v); 1655 } else { 1656 StructBodyDescriptor::IterateBody(this, object_size, v); 1657 } 1658 break; 1659 default: 1660 PrintF("Unknown type: %d\n", type); 1661 UNREACHABLE(); 1662 } 1663 } 1664 1665 1666 bool HeapNumber::HeapNumberBooleanValue() { 1667 return DoubleToBoolean(value()); 1668 } 1669 1670 1671 void HeapNumber::HeapNumberPrint(OStream& os) { // NOLINT 1672 os << value(); 1673 } 1674 1675 1676 String* JSReceiver::class_name() { 1677 if (IsJSFunction() || IsJSFunctionProxy()) { 1678 return GetHeap()->Function_string(); 1679 } 1680 if (map()->constructor()->IsJSFunction()) { 1681 JSFunction* constructor = JSFunction::cast(map()->constructor()); 1682 return String::cast(constructor->shared()->instance_class_name()); 1683 } 1684 // If the constructor is not present, return "Object". 1685 return GetHeap()->Object_string(); 1686 } 1687 1688 1689 String* Map::constructor_name() { 1690 if (constructor()->IsJSFunction()) { 1691 JSFunction* constructor = JSFunction::cast(this->constructor()); 1692 String* name = String::cast(constructor->shared()->name()); 1693 if (name->length() > 0) return name; 1694 String* inferred_name = constructor->shared()->inferred_name(); 1695 if (inferred_name->length() > 0) return inferred_name; 1696 Object* proto = prototype(); 1697 if (proto->IsJSObject()) return JSObject::cast(proto)->constructor_name(); 1698 } 1699 // TODO(rossberg): what about proxies? 1700 // If the constructor is not present, return "Object". 1701 return GetHeap()->Object_string(); 1702 } 1703 1704 1705 String* JSReceiver::constructor_name() { 1706 return map()->constructor_name(); 1707 } 1708 1709 1710 MaybeHandle<Map> Map::CopyWithField(Handle<Map> map, 1711 Handle<Name> name, 1712 Handle<HeapType> type, 1713 PropertyAttributes attributes, 1714 Representation representation, 1715 TransitionFlag flag) { 1716 DCHECK(DescriptorArray::kNotFound == 1717 map->instance_descriptors()->Search( 1718 *name, map->NumberOfOwnDescriptors())); 1719 1720 // Ensure the descriptor array does not get too big. 1721 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) { 1722 return MaybeHandle<Map>(); 1723 } 1724 1725 Isolate* isolate = map->GetIsolate(); 1726 1727 // Compute the new index for new field. 1728 int index = map->NextFreePropertyIndex(); 1729 1730 if (map->instance_type() == JS_CONTEXT_EXTENSION_OBJECT_TYPE) { 1731 representation = Representation::Tagged(); 1732 type = HeapType::Any(isolate); 1733 } 1734 1735 FieldDescriptor new_field_desc(name, index, type, attributes, representation); 1736 Handle<Map> new_map = Map::CopyAddDescriptor(map, &new_field_desc, flag); 1737 int unused_property_fields = new_map->unused_property_fields() - 1; 1738 if (unused_property_fields < 0) { 1739 unused_property_fields += JSObject::kFieldsAdded; 1740 } 1741 new_map->set_unused_property_fields(unused_property_fields); 1742 return new_map; 1743 } 1744 1745 1746 MaybeHandle<Map> Map::CopyWithConstant(Handle<Map> map, 1747 Handle<Name> name, 1748 Handle<Object> constant, 1749 PropertyAttributes attributes, 1750 TransitionFlag flag) { 1751 // Ensure the descriptor array does not get too big. 1752 if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors) { 1753 return MaybeHandle<Map>(); 1754 } 1755 1756 // Allocate new instance descriptors with (name, constant) added. 1757 ConstantDescriptor new_constant_desc(name, constant, attributes); 1758 return Map::CopyAddDescriptor(map, &new_constant_desc, flag); 1759 } 1760 1761 1762 void JSObject::AddSlowProperty(Handle<JSObject> object, 1763 Handle<Name> name, 1764 Handle<Object> value, 1765 PropertyAttributes attributes) { 1766 DCHECK(!object->HasFastProperties()); 1767 Isolate* isolate = object->GetIsolate(); 1768 Handle<NameDictionary> dict(object->property_dictionary()); 1769 if (object->IsGlobalObject()) { 1770 // In case name is an orphaned property reuse the cell. 1771 int entry = dict->FindEntry(name); 1772 if (entry != NameDictionary::kNotFound) { 1773 Handle<PropertyCell> cell(PropertyCell::cast(dict->ValueAt(entry))); 1774 PropertyCell::SetValueInferType(cell, value); 1775 // Assign an enumeration index to the property and update 1776 // SetNextEnumerationIndex. 1777 int index = dict->NextEnumerationIndex(); 1778 PropertyDetails details = PropertyDetails(attributes, NORMAL, index); 1779 dict->SetNextEnumerationIndex(index + 1); 1780 dict->SetEntry(entry, name, cell, details); 1781 return; 1782 } 1783 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell(value); 1784 PropertyCell::SetValueInferType(cell, value); 1785 value = cell; 1786 } 1787 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); 1788 Handle<NameDictionary> result = 1789 NameDictionary::Add(dict, name, value, details); 1790 if (*dict != *result) object->set_properties(*result); 1791 } 1792 1793 1794 Context* JSObject::GetCreationContext() { 1795 Object* constructor = this->map()->constructor(); 1796 JSFunction* function; 1797 if (!constructor->IsJSFunction()) { 1798 // Functions have null as a constructor, 1799 // but any JSFunction knows its context immediately. 1800 function = JSFunction::cast(this); 1801 } else { 1802 function = JSFunction::cast(constructor); 1803 } 1804 1805 return function->context()->native_context(); 1806 } 1807 1808 1809 void JSObject::EnqueueChangeRecord(Handle<JSObject> object, 1810 const char* type_str, 1811 Handle<Name> name, 1812 Handle<Object> old_value) { 1813 DCHECK(!object->IsJSGlobalProxy()); 1814 DCHECK(!object->IsJSGlobalObject()); 1815 Isolate* isolate = object->GetIsolate(); 1816 HandleScope scope(isolate); 1817 Handle<String> type = isolate->factory()->InternalizeUtf8String(type_str); 1818 Handle<Object> args[] = { type, object, name, old_value }; 1819 int argc = name.is_null() ? 2 : old_value->IsTheHole() ? 3 : 4; 1820 1821 Execution::Call(isolate, 1822 Handle<JSFunction>(isolate->observers_notify_change()), 1823 isolate->factory()->undefined_value(), 1824 argc, args).Assert(); 1825 } 1826 1827 1828 const char* Representation::Mnemonic() const { 1829 switch (kind_) { 1830 case kNone: return "v"; 1831 case kTagged: return "t"; 1832 case kSmi: return "s"; 1833 case kDouble: return "d"; 1834 case kInteger32: return "i"; 1835 case kHeapObject: return "h"; 1836 case kExternal: return "x"; 1837 default: 1838 UNREACHABLE(); 1839 return NULL; 1840 } 1841 } 1842 1843 1844 bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields, 1845 int target_inobject, int target_unused, 1846 int* old_number_of_fields) { 1847 // If fields were added (or removed), rewrite the instance. 1848 *old_number_of_fields = NumberOfFields(); 1849 DCHECK(target_number_of_fields >= *old_number_of_fields); 1850 if (target_number_of_fields != *old_number_of_fields) return true; 1851 1852 // If smi descriptors were replaced by double descriptors, rewrite. 1853 DescriptorArray* old_desc = instance_descriptors(); 1854 DescriptorArray* new_desc = target->instance_descriptors(); 1855 int limit = NumberOfOwnDescriptors(); 1856 for (int i = 0; i < limit; i++) { 1857 if (new_desc->GetDetails(i).representation().IsDouble() != 1858 old_desc->GetDetails(i).representation().IsDouble()) { 1859 return true; 1860 } 1861 } 1862 1863 // If no fields were added, and no inobject properties were removed, setting 1864 // the map is sufficient. 1865 if (target_inobject == inobject_properties()) return false; 1866 // In-object slack tracking may have reduced the object size of the new map. 1867 // In that case, succeed if all existing fields were inobject, and they still 1868 // fit within the new inobject size. 1869 DCHECK(target_inobject < inobject_properties()); 1870 if (target_number_of_fields <= target_inobject) { 1871 DCHECK(target_number_of_fields + target_unused == target_inobject); 1872 return false; 1873 } 1874 // Otherwise, properties will need to be moved to the backing store. 1875 return true; 1876 } 1877 1878 1879 void Map::ConnectElementsTransition(Handle<Map> parent, Handle<Map> child) { 1880 Isolate* isolate = parent->GetIsolate(); 1881 Handle<Name> name = isolate->factory()->elements_transition_symbol(); 1882 ConnectTransition(parent, child, name, FULL_TRANSITION); 1883 } 1884 1885 1886 void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) { 1887 if (object->map() == *new_map) return; 1888 if (object->HasFastProperties()) { 1889 if (!new_map->is_dictionary_map()) { 1890 Handle<Map> old_map(object->map()); 1891 MigrateFastToFast(object, new_map); 1892 if (old_map->is_prototype_map()) { 1893 // Clear out the old descriptor array to avoid problems to sharing 1894 // the descriptor array without using an explicit. 1895 old_map->InitializeDescriptors( 1896 old_map->GetHeap()->empty_descriptor_array()); 1897 // Ensure that no transition was inserted for prototype migrations. 1898 DCHECK(!old_map->HasTransitionArray()); 1899 DCHECK(new_map->GetBackPointer()->IsUndefined()); 1900 } 1901 } else { 1902 MigrateFastToSlow(object, new_map, 0); 1903 } 1904 } else { 1905 // For slow-to-fast migrations JSObject::TransformToFastProperties() 1906 // must be used instead. 1907 CHECK(new_map->is_dictionary_map()); 1908 1909 // Slow-to-slow migration is trivial. 1910 object->set_map(*new_map); 1911 } 1912 } 1913 1914 1915 // To migrate a fast instance to a fast map: 1916 // - First check whether the instance needs to be rewritten. If not, simply 1917 // change the map. 1918 // - Otherwise, allocate a fixed array large enough to hold all fields, in 1919 // addition to unused space. 1920 // - Copy all existing properties in, in the following order: backing store 1921 // properties, unused fields, inobject properties. 1922 // - If all allocation succeeded, commit the state atomically: 1923 // * Copy inobject properties from the backing store back into the object. 1924 // * Trim the difference in instance size of the object. This also cleanly 1925 // frees inobject properties that moved to the backing store. 1926 // * If there are properties left in the backing store, trim of the space used 1927 // to temporarily store the inobject properties. 1928 // * If there are properties left in the backing store, install the backing 1929 // store. 1930 void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) { 1931 Isolate* isolate = object->GetIsolate(); 1932 Handle<Map> old_map(object->map()); 1933 int old_number_of_fields; 1934 int number_of_fields = new_map->NumberOfFields(); 1935 int inobject = new_map->inobject_properties(); 1936 int unused = new_map->unused_property_fields(); 1937 1938 // Nothing to do if no functions were converted to fields and no smis were 1939 // converted to doubles. 1940 if (!old_map->InstancesNeedRewriting(*new_map, number_of_fields, inobject, 1941 unused, &old_number_of_fields)) { 1942 object->synchronized_set_map(*new_map); 1943 return; 1944 } 1945 1946 int total_size = number_of_fields + unused; 1947 int external = total_size - inobject; 1948 1949 if (number_of_fields != old_number_of_fields && 1950 new_map->GetBackPointer() == *old_map) { 1951 PropertyDetails details = new_map->GetLastDescriptorDetails(); 1952 1953 if (old_map->unused_property_fields() > 0) { 1954 if (details.representation().IsDouble()) { 1955 Handle<Object> value = isolate->factory()->NewHeapNumber(0, MUTABLE); 1956 FieldIndex index = 1957 FieldIndex::ForDescriptor(*new_map, new_map->LastAdded()); 1958 object->FastPropertyAtPut(index, *value); 1959 } 1960 object->synchronized_set_map(*new_map); 1961 return; 1962 } 1963 1964 DCHECK(number_of_fields == old_number_of_fields + 1); 1965 // This migration is a transition from a map that has run out out property 1966 // space. Therefore it could be done by extending the backing store. 1967 Handle<FixedArray> old_storage = handle(object->properties(), isolate); 1968 Handle<FixedArray> new_storage = 1969 FixedArray::CopySize(old_storage, external); 1970 1971 // Properly initialize newly added property. 1972 Handle<Object> value; 1973 if (details.representation().IsDouble()) { 1974 value = isolate->factory()->NewHeapNumber(0, MUTABLE); 1975 } else { 1976 value = isolate->factory()->uninitialized_value(); 1977 } 1978 DCHECK(details.type() == FIELD); 1979 int target_index = details.field_index() - inobject; 1980 DCHECK(target_index >= 0); // Must be a backing store index. 1981 new_storage->set(target_index, *value); 1982 1983 // From here on we cannot fail and we shouldn't GC anymore. 1984 DisallowHeapAllocation no_allocation; 1985 1986 // Set the new property value and do the map transition. 1987 object->set_properties(*new_storage); 1988 object->synchronized_set_map(*new_map); 1989 return; 1990 } 1991 Handle<FixedArray> array = isolate->factory()->NewFixedArray(total_size); 1992 1993 Handle<DescriptorArray> old_descriptors(old_map->instance_descriptors()); 1994 Handle<DescriptorArray> new_descriptors(new_map->instance_descriptors()); 1995 int old_nof = old_map->NumberOfOwnDescriptors(); 1996 int new_nof = new_map->NumberOfOwnDescriptors(); 1997 1998 // This method only supports generalizing instances to at least the same 1999 // number of properties. 2000 DCHECK(old_nof <= new_nof); 2001 2002 for (int i = 0; i < old_nof; i++) { 2003 PropertyDetails details = new_descriptors->GetDetails(i); 2004 if (details.type() != FIELD) continue; 2005 PropertyDetails old_details = old_descriptors->GetDetails(i); 2006 if (old_details.type() == CALLBACKS) { 2007 DCHECK(details.representation().IsTagged()); 2008 continue; 2009 } 2010 DCHECK(old_details.type() == CONSTANT || 2011 old_details.type() == FIELD); 2012 Object* raw_value = old_details.type() == CONSTANT 2013 ? old_descriptors->GetValue(i) 2014 : object->RawFastPropertyAt(FieldIndex::ForDescriptor(*old_map, i)); 2015 Handle<Object> value(raw_value, isolate); 2016 if (!old_details.representation().IsDouble() && 2017 details.representation().IsDouble()) { 2018 if (old_details.representation().IsNone()) { 2019 value = handle(Smi::FromInt(0), isolate); 2020 } 2021 value = Object::NewStorageFor(isolate, value, details.representation()); 2022 } else if (old_details.representation().IsDouble() && 2023 !details.representation().IsDouble()) { 2024 value = Object::WrapForRead(isolate, value, old_details.representation()); 2025 } 2026 DCHECK(!(details.representation().IsDouble() && value->IsSmi())); 2027 int target_index = new_descriptors->GetFieldIndex(i) - inobject; 2028 if (target_index < 0) target_index += total_size; 2029 array->set(target_index, *value); 2030 } 2031 2032 for (int i = old_nof; i < new_nof; i++) { 2033 PropertyDetails details = new_descriptors->GetDetails(i); 2034 if (details.type() != FIELD) continue; 2035 Handle<Object> value; 2036 if (details.representation().IsDouble()) { 2037 value = isolate->factory()->NewHeapNumber(0, MUTABLE); 2038 } else { 2039 value = isolate->factory()->uninitialized_value(); 2040 } 2041 int target_index = new_descriptors->GetFieldIndex(i) - inobject; 2042 if (target_index < 0) target_index += total_size; 2043 array->set(target_index, *value); 2044 } 2045 2046 // From here on we cannot fail and we shouldn't GC anymore. 2047 DisallowHeapAllocation no_allocation; 2048 2049 // Copy (real) inobject properties. If necessary, stop at number_of_fields to 2050 // avoid overwriting |one_pointer_filler_map|. 2051 int limit = Min(inobject, number_of_fields); 2052 for (int i = 0; i < limit; i++) { 2053 FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i); 2054 object->FastPropertyAtPut(index, array->get(external + i)); 2055 } 2056 2057 Heap* heap = isolate->heap(); 2058 2059 // If there are properties in the new backing store, trim it to the correct 2060 // size and install the backing store into the object. 2061 if (external > 0) { 2062 heap->RightTrimFixedArray<Heap::FROM_MUTATOR>(*array, inobject); 2063 object->set_properties(*array); 2064 } 2065 2066 // Create filler object past the new instance size. 2067 int new_instance_size = new_map->instance_size(); 2068 int instance_size_delta = old_map->instance_size() - new_instance_size; 2069 DCHECK(instance_size_delta >= 0); 2070 2071 if (instance_size_delta > 0) { 2072 Address address = object->address(); 2073 heap->CreateFillerObjectAt( 2074 address + new_instance_size, instance_size_delta); 2075 heap->AdjustLiveBytes(address, -instance_size_delta, Heap::FROM_MUTATOR); 2076 } 2077 2078 // We are storing the new map using release store after creating a filler for 2079 // the left-over space to avoid races with the sweeper thread. 2080 object->synchronized_set_map(*new_map); 2081 } 2082 2083 2084 void JSObject::GeneralizeFieldRepresentation(Handle<JSObject> object, 2085 int modify_index, 2086 Representation new_representation, 2087 Handle<HeapType> new_field_type) { 2088 Handle<Map> new_map = Map::GeneralizeRepresentation( 2089 handle(object->map()), modify_index, new_representation, new_field_type, 2090 FORCE_FIELD); 2091 MigrateToMap(object, new_map); 2092 } 2093 2094 2095 int Map::NumberOfFields() { 2096 DescriptorArray* descriptors = instance_descriptors(); 2097 int result = 0; 2098 for (int i = 0; i < NumberOfOwnDescriptors(); i++) { 2099 if (descriptors->GetDetails(i).type() == FIELD) result++; 2100 } 2101 return result; 2102 } 2103 2104 2105 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map, 2106 int modify_index, 2107 StoreMode store_mode, 2108 PropertyAttributes attributes, 2109 const char* reason) { 2110 Isolate* isolate = map->GetIsolate(); 2111 Handle<Map> new_map = Copy(map); 2112 2113 DescriptorArray* descriptors = new_map->instance_descriptors(); 2114 int length = descriptors->number_of_descriptors(); 2115 for (int i = 0; i < length; i++) { 2116 descriptors->SetRepresentation(i, Representation::Tagged()); 2117 if (descriptors->GetDetails(i).type() == FIELD) { 2118 descriptors->SetValue(i, HeapType::Any()); 2119 } 2120 } 2121 2122 // Unless the instance is being migrated, ensure that modify_index is a field. 2123 PropertyDetails details = descriptors->GetDetails(modify_index); 2124 if (store_mode == FORCE_FIELD && 2125 (details.type() != FIELD || details.attributes() != attributes)) { 2126 int field_index = details.type() == FIELD ? details.field_index() 2127 : new_map->NumberOfFields(); 2128 FieldDescriptor d(handle(descriptors->GetKey(modify_index), isolate), 2129 field_index, attributes, Representation::Tagged()); 2130 descriptors->Replace(modify_index, &d); 2131 if (details.type() != FIELD) { 2132 int unused_property_fields = new_map->unused_property_fields() - 1; 2133 if (unused_property_fields < 0) { 2134 unused_property_fields += JSObject::kFieldsAdded; 2135 } 2136 new_map->set_unused_property_fields(unused_property_fields); 2137 } 2138 } else { 2139 DCHECK(details.attributes() == attributes); 2140 } 2141 2142 if (FLAG_trace_generalization) { 2143 HeapType* field_type = (details.type() == FIELD) 2144 ? map->instance_descriptors()->GetFieldType(modify_index) 2145 : NULL; 2146 map->PrintGeneralization(stdout, reason, modify_index, 2147 new_map->NumberOfOwnDescriptors(), 2148 new_map->NumberOfOwnDescriptors(), 2149 details.type() == CONSTANT && store_mode == FORCE_FIELD, 2150 details.representation(), Representation::Tagged(), 2151 field_type, HeapType::Any()); 2152 } 2153 return new_map; 2154 } 2155 2156 2157 // static 2158 Handle<Map> Map::CopyGeneralizeAllRepresentations(Handle<Map> map, 2159 int modify_index, 2160 StoreMode store_mode, 2161 const char* reason) { 2162 PropertyDetails details = 2163 map->instance_descriptors()->GetDetails(modify_index); 2164 return CopyGeneralizeAllRepresentations(map, modify_index, store_mode, 2165 details.attributes(), reason); 2166 } 2167 2168 2169 void Map::DeprecateTransitionTree() { 2170 if (is_deprecated()) return; 2171 if (HasTransitionArray()) { 2172 TransitionArray* transitions = this->transitions(); 2173 for (int i = 0; i < transitions->number_of_transitions(); i++) { 2174 transitions->GetTarget(i)->DeprecateTransitionTree(); 2175 } 2176 } 2177 deprecate(); 2178 dependent_code()->DeoptimizeDependentCodeGroup( 2179 GetIsolate(), DependentCode::kTransitionGroup); 2180 NotifyLeafMapLayoutChange(); 2181 } 2182 2183 2184 // Invalidates a transition target at |key|, and installs |new_descriptors| over 2185 // the current instance_descriptors to ensure proper sharing of descriptor 2186 // arrays. 2187 void Map::DeprecateTarget(Name* key, DescriptorArray* new_descriptors) { 2188 if (HasTransitionArray()) { 2189 TransitionArray* transitions = this->transitions(); 2190 int transition = transitions->Search(key); 2191 if (transition != TransitionArray::kNotFound) { 2192 transitions->GetTarget(transition)->DeprecateTransitionTree(); 2193 } 2194 } 2195 2196 // Don't overwrite the empty descriptor array. 2197 if (NumberOfOwnDescriptors() == 0) return; 2198 2199 DescriptorArray* to_replace = instance_descriptors(); 2200 Map* current = this; 2201 GetHeap()->incremental_marking()->RecordWrites(to_replace); 2202 while (current->instance_descriptors() == to_replace) { 2203 current->SetEnumLength(kInvalidEnumCacheSentinel); 2204 current->set_instance_descriptors(new_descriptors); 2205 Object* next = current->GetBackPointer(); 2206 if (next->IsUndefined()) break; 2207 current = Map::cast(next); 2208 } 2209 2210 set_owns_descriptors(false); 2211 } 2212 2213 2214 Map* Map::FindRootMap() { 2215 Map* result = this; 2216 while (true) { 2217 Object* back = result->GetBackPointer(); 2218 if (back->IsUndefined()) return result; 2219 result = Map::cast(back); 2220 } 2221 } 2222 2223 2224 Map* Map::FindLastMatchMap(int verbatim, 2225 int length, 2226 DescriptorArray* descriptors) { 2227 DisallowHeapAllocation no_allocation; 2228 2229 // This can only be called on roots of transition trees. 2230 DCHECK(GetBackPointer()->IsUndefined()); 2231 2232 Map* current = this; 2233 2234 for (int i = verbatim; i < length; i++) { 2235 if (!current->HasTransitionArray()) break; 2236 Name* name = descriptors->GetKey(i); 2237 TransitionArray* transitions = current->transitions(); 2238 int transition = transitions->Search(name); 2239 if (transition == TransitionArray::kNotFound) break; 2240 2241 Map* next = transitions->GetTarget(transition); 2242 DescriptorArray* next_descriptors = next->instance_descriptors(); 2243 2244 PropertyDetails details = descriptors->GetDetails(i); 2245 PropertyDetails next_details = next_descriptors->GetDetails(i); 2246 if (details.type() != next_details.type()) break; 2247 if (details.attributes() != next_details.attributes()) break; 2248 if (!details.representation().Equals(next_details.representation())) break; 2249 if (next_details.type() == FIELD) { 2250 if (!descriptors->GetFieldType(i)->NowIs( 2251 next_descriptors->GetFieldType(i))) break; 2252 } else { 2253 if (descriptors->GetValue(i) != next_descriptors->GetValue(i)) break; 2254 } 2255 2256 current = next; 2257 } 2258 return current; 2259 } 2260 2261 2262 Map* Map::FindFieldOwner(int descriptor) { 2263 DisallowHeapAllocation no_allocation; 2264 DCHECK_EQ(FIELD, instance_descriptors()->GetDetails(descriptor).type()); 2265 Map* result = this; 2266 while (true) { 2267 Object* back = result->GetBackPointer(); 2268 if (back->IsUndefined()) break; 2269 Map* parent = Map::cast(back); 2270 if (parent->NumberOfOwnDescriptors() <= descriptor) break; 2271 result = parent; 2272 } 2273 return result; 2274 } 2275 2276 2277 void Map::UpdateFieldType(int descriptor, Handle<Name> name, 2278 Handle<HeapType> new_type) { 2279 DisallowHeapAllocation no_allocation; 2280 PropertyDetails details = instance_descriptors()->GetDetails(descriptor); 2281 if (details.type() != FIELD) return; 2282 if (HasTransitionArray()) { 2283 TransitionArray* transitions = this->transitions(); 2284 for (int i = 0; i < transitions->number_of_transitions(); ++i) { 2285 transitions->GetTarget(i)->UpdateFieldType(descriptor, name, new_type); 2286 } 2287 } 2288 // Skip if already updated the shared descriptor. 2289 if (instance_descriptors()->GetFieldType(descriptor) == *new_type) return; 2290 FieldDescriptor d(name, instance_descriptors()->GetFieldIndex(descriptor), 2291 new_type, details.attributes(), details.representation()); 2292 instance_descriptors()->Replace(descriptor, &d); 2293 } 2294 2295 2296 // static 2297 Handle<HeapType> Map::GeneralizeFieldType(Handle<HeapType> type1, 2298 Handle<HeapType> type2, 2299 Isolate* isolate) { 2300 static const int kMaxClassesPerFieldType = 5; 2301 if (type1->NowIs(type2)) return type2; 2302 if (type2->NowIs(type1)) return type1; 2303 if (type1->NowStable() && type2->NowStable()) { 2304 Handle<HeapType> type = HeapType::Union(type1, type2, isolate); 2305 if (type->NumClasses() <= kMaxClassesPerFieldType) { 2306 DCHECK(type->NowStable()); 2307 DCHECK(type1->NowIs(type)); 2308 DCHECK(type2->NowIs(type)); 2309 return type; 2310 } 2311 } 2312 return HeapType::Any(isolate); 2313 } 2314 2315 2316 // static 2317 void Map::GeneralizeFieldType(Handle<Map> map, 2318 int modify_index, 2319 Handle<HeapType> new_field_type) { 2320 Isolate* isolate = map->GetIsolate(); 2321 2322 // Check if we actually need to generalize the field type at all. 2323 Handle<HeapType> old_field_type( 2324 map->instance_descriptors()->GetFieldType(modify_index), isolate); 2325 if (new_field_type->NowIs(old_field_type)) { 2326 DCHECK(Map::GeneralizeFieldType(old_field_type, 2327 new_field_type, 2328 isolate)->NowIs(old_field_type)); 2329 return; 2330 } 2331 2332 // Determine the field owner. 2333 Handle<Map> field_owner(map->FindFieldOwner(modify_index), isolate); 2334 Handle<DescriptorArray> descriptors( 2335 field_owner->instance_descriptors(), isolate); 2336 DCHECK_EQ(*old_field_type, descriptors->GetFieldType(modify_index)); 2337 2338 // Determine the generalized new field type. 2339 new_field_type = Map::GeneralizeFieldType( 2340 old_field_type, new_field_type, isolate); 2341 2342 PropertyDetails details = descriptors->GetDetails(modify_index); 2343 Handle<Name> name(descriptors->GetKey(modify_index)); 2344 field_owner->UpdateFieldType(modify_index, name, new_field_type); 2345 field_owner->dependent_code()->DeoptimizeDependentCodeGroup( 2346 isolate, DependentCode::kFieldTypeGroup); 2347 2348 if (FLAG_trace_generalization) { 2349 map->PrintGeneralization( 2350 stdout, "field type generalization", 2351 modify_index, map->NumberOfOwnDescriptors(), 2352 map->NumberOfOwnDescriptors(), false, 2353 details.representation(), details.representation(), 2354 *old_field_type, *new_field_type); 2355 } 2356 } 2357 2358 2359 // Generalize the representation of the descriptor at |modify_index|. 2360 // This method rewrites the transition tree to reflect the new change. To avoid 2361 // high degrees over polymorphism, and to stabilize quickly, on every rewrite 2362 // the new type is deduced by merging the current type with any potential new 2363 // (partial) version of the type in the transition tree. 2364 // To do this, on each rewrite: 2365 // - Search the root of the transition tree using FindRootMap. 2366 // - Find |target_map|, the newest matching version of this map using the keys 2367 // in the |old_map|'s descriptor array to walk the transition tree. 2368 // - Merge/generalize the descriptor array of the |old_map| and |target_map|. 2369 // - Generalize the |modify_index| descriptor using |new_representation| and 2370 // |new_field_type|. 2371 // - Walk the tree again starting from the root towards |target_map|. Stop at 2372 // |split_map|, the first map who's descriptor array does not match the merged 2373 // descriptor array. 2374 // - If |target_map| == |split_map|, |target_map| is in the expected state. 2375 // Return it. 2376 // - Otherwise, invalidate the outdated transition target from |target_map|, and 2377 // replace its transition tree with a new branch for the updated descriptors. 2378 Handle<Map> Map::GeneralizeRepresentation(Handle<Map> old_map, 2379 int modify_index, 2380 Representation new_representation, 2381 Handle<HeapType> new_field_type, 2382 StoreMode store_mode) { 2383 Isolate* isolate = old_map->GetIsolate(); 2384 2385 Handle<DescriptorArray> old_descriptors( 2386 old_map->instance_descriptors(), isolate); 2387 int old_nof = old_map->NumberOfOwnDescriptors(); 2388 PropertyDetails old_details = old_descriptors->GetDetails(modify_index); 2389 Representation old_representation = old_details.representation(); 2390 2391 // It's fine to transition from None to anything but double without any 2392 // modification to the object, because the default uninitialized value for 2393 // representation None can be overwritten by both smi and tagged values. 2394 // Doubles, however, would require a box allocation. 2395 if (old_representation.IsNone() && 2396 !new_representation.IsNone() && 2397 !new_representation.IsDouble()) { 2398 DCHECK(old_details.type() == FIELD); 2399 DCHECK(old_descriptors->GetFieldType(modify_index)->NowIs( 2400 HeapType::None())); 2401 if (FLAG_trace_generalization) { 2402 old_map->PrintGeneralization( 2403 stdout, "uninitialized field", 2404 modify_index, old_map->NumberOfOwnDescriptors(), 2405 old_map->NumberOfOwnDescriptors(), false, 2406 old_representation, new_representation, 2407 old_descriptors->GetFieldType(modify_index), *new_field_type); 2408 } 2409 old_descriptors->SetRepresentation(modify_index, new_representation); 2410 old_descriptors->SetValue(modify_index, *new_field_type); 2411 return old_map; 2412 } 2413 2414 // Check the state of the root map. 2415 Handle<Map> root_map(old_map->FindRootMap(), isolate); 2416 if (!old_map->EquivalentToForTransition(*root_map)) { 2417 return CopyGeneralizeAllRepresentations( 2418 old_map, modify_index, store_mode, "not equivalent"); 2419 } 2420 int root_nof = root_map->NumberOfOwnDescriptors(); 2421 if (modify_index < root_nof) { 2422 PropertyDetails old_details = old_descriptors->GetDetails(modify_index); 2423 if ((old_details.type() != FIELD && store_mode == FORCE_FIELD) || 2424 (old_details.type() == FIELD && 2425 (!new_field_type->NowIs(old_descriptors->GetFieldType(modify_index)) || 2426 !new_representation.fits_into(old_details.representation())))) { 2427 return CopyGeneralizeAllRepresentations( 2428 old_map, modify_index, store_mode, "root modification"); 2429 } 2430 } 2431 2432 Handle<Map> target_map = root_map; 2433 for (int i = root_nof; i < old_nof; ++i) { 2434 int j = target_map->SearchTransition(old_descriptors->GetKey(i)); 2435 if (j == TransitionArray::kNotFound) break; 2436 Handle<Map> tmp_map(target_map->GetTransition(j), isolate); 2437 Handle<DescriptorArray> tmp_descriptors = handle( 2438 tmp_map->instance_descriptors(), isolate); 2439 2440 // Check if target map is incompatible. 2441 PropertyDetails old_details = old_descriptors->GetDetails(i); 2442 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i); 2443 PropertyType old_type = old_details.type(); 2444 PropertyType tmp_type = tmp_details.type(); 2445 if (tmp_details.attributes() != old_details.attributes() || 2446 ((tmp_type == CALLBACKS || old_type == CALLBACKS) && 2447 (tmp_type != old_type || 2448 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) { 2449 return CopyGeneralizeAllRepresentations( 2450 old_map, modify_index, store_mode, "incompatible"); 2451 } 2452 Representation old_representation = old_details.representation(); 2453 Representation tmp_representation = tmp_details.representation(); 2454 if (!old_representation.fits_into(tmp_representation) || 2455 (!new_representation.fits_into(tmp_representation) && 2456 modify_index == i)) { 2457 break; 2458 } 2459 if (tmp_type == FIELD) { 2460 // Generalize the field type as necessary. 2461 Handle<HeapType> old_field_type = (old_type == FIELD) 2462 ? handle(old_descriptors->GetFieldType(i), isolate) 2463 : old_descriptors->GetValue(i)->OptimalType( 2464 isolate, tmp_representation); 2465 if (modify_index == i) { 2466 old_field_type = GeneralizeFieldType( 2467 new_field_type, old_field_type, isolate); 2468 } 2469 GeneralizeFieldType(tmp_map, i, old_field_type); 2470 } else if (tmp_type == CONSTANT) { 2471 if (old_type != CONSTANT || 2472 old_descriptors->GetConstant(i) != tmp_descriptors->GetConstant(i)) { 2473 break; 2474 } 2475 } else { 2476 DCHECK_EQ(tmp_type, old_type); 2477 DCHECK_EQ(tmp_descriptors->GetValue(i), old_descriptors->GetValue(i)); 2478 } 2479 target_map = tmp_map; 2480 } 2481 2482 // Directly change the map if the target map is more general. 2483 Handle<DescriptorArray> target_descriptors( 2484 target_map->instance_descriptors(), isolate); 2485 int target_nof = target_map->NumberOfOwnDescriptors(); 2486 if (target_nof == old_nof && 2487 (store_mode != FORCE_FIELD || 2488 target_descriptors->GetDetails(modify_index).type() == FIELD)) { 2489 DCHECK(modify_index < target_nof); 2490 DCHECK(new_representation.fits_into( 2491 target_descriptors->GetDetails(modify_index).representation())); 2492 DCHECK(target_descriptors->GetDetails(modify_index).type() != FIELD || 2493 new_field_type->NowIs( 2494 target_descriptors->GetFieldType(modify_index))); 2495 return target_map; 2496 } 2497 2498 // Find the last compatible target map in the transition tree. 2499 for (int i = target_nof; i < old_nof; ++i) { 2500 int j = target_map->SearchTransition(old_descriptors->GetKey(i)); 2501 if (j == TransitionArray::kNotFound) break; 2502 Handle<Map> tmp_map(target_map->GetTransition(j), isolate); 2503 Handle<DescriptorArray> tmp_descriptors( 2504 tmp_map->instance_descriptors(), isolate); 2505 2506 // Check if target map is compatible. 2507 PropertyDetails old_details = old_descriptors->GetDetails(i); 2508 PropertyDetails tmp_details = tmp_descriptors->GetDetails(i); 2509 if (tmp_details.attributes() != old_details.attributes() || 2510 ((tmp_details.type() == CALLBACKS || old_details.type() == CALLBACKS) && 2511 (tmp_details.type() != old_details.type() || 2512 tmp_descriptors->GetValue(i) != old_descriptors->GetValue(i)))) { 2513 return CopyGeneralizeAllRepresentations( 2514 old_map, modify_index, store_mode, "incompatible"); 2515 } 2516 target_map = tmp_map; 2517 } 2518 target_nof = target_map->NumberOfOwnDescriptors(); 2519 target_descriptors = handle(target_map->instance_descriptors(), isolate); 2520 2521 // Allocate a new descriptor array large enough to hold the required 2522 // descriptors, with minimally the exact same size as the old descriptor 2523 // array. 2524 int new_slack = Max( 2525 old_nof, old_descriptors->number_of_descriptors()) - old_nof; 2526 Handle<DescriptorArray> new_descriptors = DescriptorArray::Allocate( 2527 isolate, old_nof, new_slack); 2528 DCHECK(new_descriptors->length() > target_descriptors->length() || 2529 new_descriptors->NumberOfSlackDescriptors() > 0 || 2530 new_descriptors->number_of_descriptors() == 2531 old_descriptors->number_of_descriptors()); 2532 DCHECK(new_descriptors->number_of_descriptors() == old_nof); 2533 2534 // 0 -> |root_nof| 2535 int current_offset = 0; 2536 for (int i = 0; i < root_nof; ++i) { 2537 PropertyDetails old_details = old_descriptors->GetDetails(i); 2538 if (old_details.type() == FIELD) current_offset++; 2539 Descriptor d(handle(old_descriptors->GetKey(i), isolate), 2540 handle(old_descriptors->GetValue(i), isolate), 2541 old_details); 2542 new_descriptors->Set(i, &d); 2543 } 2544 2545 // |root_nof| -> |target_nof| 2546 for (int i = root_nof; i < target_nof; ++i) { 2547 Handle<Name> target_key(target_descriptors->GetKey(i), isolate); 2548 PropertyDetails old_details = old_descriptors->GetDetails(i); 2549 PropertyDetails target_details = target_descriptors->GetDetails(i); 2550 target_details = target_details.CopyWithRepresentation( 2551 old_details.representation().generalize( 2552 target_details.representation())); 2553 if (modify_index == i) { 2554 target_details = target_details.CopyWithRepresentation( 2555 new_representation.generalize(target_details.representation())); 2556 } 2557 DCHECK_EQ(old_details.attributes(), target_details.attributes()); 2558 if (old_details.type() == FIELD || 2559 target_details.type() == FIELD || 2560 (modify_index == i && store_mode == FORCE_FIELD) || 2561 (target_descriptors->GetValue(i) != old_descriptors->GetValue(i))) { 2562 Handle<HeapType> old_field_type = (old_details.type() == FIELD) 2563 ? handle(old_descriptors->GetFieldType(i), isolate) 2564 : old_descriptors->GetValue(i)->OptimalType( 2565 isolate, target_details.representation()); 2566 Handle<HeapType> target_field_type = (target_details.type() == FIELD) 2567 ? handle(target_descriptors->GetFieldType(i), isolate) 2568 : target_descriptors->GetValue(i)->OptimalType( 2569 isolate, target_details.representation()); 2570 target_field_type = GeneralizeFieldType( 2571 target_field_type, old_field_type, isolate); 2572 if (modify_index == i) { 2573 target_field_type = GeneralizeFieldType( 2574 target_field_type, new_field_type, isolate); 2575 } 2576 FieldDescriptor d(target_key, 2577 current_offset++, 2578 target_field_type, 2579 target_details.attributes(), 2580 target_details.representation()); 2581 new_descriptors->Set(i, &d); 2582 } else { 2583 DCHECK_NE(FIELD, target_details.type()); 2584 Descriptor d(target_key, 2585 handle(target_descriptors->GetValue(i), isolate), 2586 target_details); 2587 new_descriptors->Set(i, &d); 2588 } 2589 } 2590 2591 // |target_nof| -> |old_nof| 2592 for (int i = target_nof; i < old_nof; ++i) { 2593 PropertyDetails old_details = old_descriptors->GetDetails(i); 2594 Handle<Name> old_key(old_descriptors->GetKey(i), isolate); 2595 if (modify_index == i) { 2596 old_details = old_details.CopyWithRepresentation( 2597 new_representation.generalize(old_details.representation())); 2598 } 2599 if (old_details.type() == FIELD) { 2600 Handle<HeapType> old_field_type( 2601 old_descriptors->GetFieldType(i), isolate); 2602 if (modify_index == i) { 2603 old_field_type = GeneralizeFieldType( 2604 old_field_type, new_field_type, isolate); 2605 } 2606 FieldDescriptor d(old_key, 2607 current_offset++, 2608 old_field_type, 2609 old_details.attributes(), 2610 old_details.representation()); 2611 new_descriptors->Set(i, &d); 2612 } else { 2613 DCHECK(old_details.type() == CONSTANT || old_details.type() == CALLBACKS); 2614 if (modify_index == i && store_mode == FORCE_FIELD) { 2615 FieldDescriptor d(old_key, 2616 current_offset++, 2617 GeneralizeFieldType( 2618 old_descriptors->GetValue(i)->OptimalType( 2619 isolate, old_details.representation()), 2620 new_field_type, isolate), 2621 old_details.attributes(), 2622 old_details.representation()); 2623 new_descriptors->Set(i, &d); 2624 } else { 2625 DCHECK_NE(FIELD, old_details.type()); 2626 Descriptor d(old_key, 2627 handle(old_descriptors->GetValue(i), isolate), 2628 old_details); 2629 new_descriptors->Set(i, &d); 2630 } 2631 } 2632 } 2633 2634 new_descriptors->Sort(); 2635 2636 DCHECK(store_mode != FORCE_FIELD || 2637 new_descriptors->GetDetails(modify_index).type() == FIELD); 2638 2639 Handle<Map> split_map(root_map->FindLastMatchMap( 2640 root_nof, old_nof, *new_descriptors), isolate); 2641 int split_nof = split_map->NumberOfOwnDescriptors(); 2642 DCHECK_NE(old_nof, split_nof); 2643 2644 split_map->DeprecateTarget( 2645 old_descriptors->GetKey(split_nof), *new_descriptors); 2646 2647 if (FLAG_trace_generalization) { 2648 PropertyDetails old_details = old_descriptors->GetDetails(modify_index); 2649 PropertyDetails new_details = new_descriptors->GetDetails(modify_index); 2650 Handle<HeapType> old_field_type = (old_details.type() == FIELD) 2651 ? handle(old_descriptors->GetFieldType(modify_index), isolate) 2652 : HeapType::Constant(handle(old_descriptors->GetValue(modify_index), 2653 isolate), isolate); 2654 Handle<HeapType> new_field_type = (new_details.type() == FIELD) 2655 ? handle(new_descriptors->GetFieldType(modify_index), isolate) 2656 : HeapType::Constant(handle(new_descriptors->GetValue(modify_index), 2657 isolate), isolate); 2658 old_map->PrintGeneralization( 2659 stdout, "", modify_index, split_nof, old_nof, 2660 old_details.type() == CONSTANT && store_mode == FORCE_FIELD, 2661 old_details.representation(), new_details.representation(), 2662 *old_field_type, *new_field_type); 2663 } 2664 2665 // Add missing transitions. 2666 Handle<Map> new_map = split_map; 2667 for (int i = split_nof; i < old_nof; ++i) { 2668 new_map = CopyInstallDescriptors(new_map, i, new_descriptors); 2669 } 2670 new_map->set_owns_descriptors(true); 2671 return new_map; 2672 } 2673 2674 2675 // Generalize the representation of all FIELD descriptors. 2676 Handle<Map> Map::GeneralizeAllFieldRepresentations( 2677 Handle<Map> map) { 2678 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 2679 for (int i = 0; i < map->NumberOfOwnDescriptors(); ++i) { 2680 if (descriptors->GetDetails(i).type() == FIELD) { 2681 map = GeneralizeRepresentation(map, i, Representation::Tagged(), 2682 HeapType::Any(map->GetIsolate()), 2683 FORCE_FIELD); 2684 } 2685 } 2686 return map; 2687 } 2688 2689 2690 // static 2691 MaybeHandle<Map> Map::TryUpdate(Handle<Map> map) { 2692 Handle<Map> proto_map(map); 2693 while (proto_map->prototype()->IsJSObject()) { 2694 Handle<JSObject> holder(JSObject::cast(proto_map->prototype())); 2695 proto_map = Handle<Map>(holder->map()); 2696 if (proto_map->is_deprecated() && JSObject::TryMigrateInstance(holder)) { 2697 proto_map = Handle<Map>(holder->map()); 2698 } 2699 } 2700 return TryUpdateInternal(map); 2701 } 2702 2703 2704 // static 2705 Handle<Map> Map::Update(Handle<Map> map) { 2706 if (!map->is_deprecated()) return map; 2707 return GeneralizeRepresentation(map, 0, Representation::None(), 2708 HeapType::None(map->GetIsolate()), 2709 ALLOW_AS_CONSTANT); 2710 } 2711 2712 2713 // static 2714 MaybeHandle<Map> Map::TryUpdateInternal(Handle<Map> old_map) { 2715 DisallowHeapAllocation no_allocation; 2716 DisallowDeoptimization no_deoptimization(old_map->GetIsolate()); 2717 2718 if (!old_map->is_deprecated()) return old_map; 2719 2720 // Check the state of the root map. 2721 Map* root_map = old_map->FindRootMap(); 2722 if (!old_map->EquivalentToForTransition(root_map)) return MaybeHandle<Map>(); 2723 int root_nof = root_map->NumberOfOwnDescriptors(); 2724 2725 int old_nof = old_map->NumberOfOwnDescriptors(); 2726 DescriptorArray* old_descriptors = old_map->instance_descriptors(); 2727 2728 Map* new_map = root_map; 2729 for (int i = root_nof; i < old_nof; ++i) { 2730 int j = new_map->SearchTransition(old_descriptors->GetKey(i)); 2731 if (j == TransitionArray::kNotFound) return MaybeHandle<Map>(); 2732 new_map = new_map->GetTransition(j); 2733 DescriptorArray* new_descriptors = new_map->instance_descriptors(); 2734 2735 PropertyDetails new_details = new_descriptors->GetDetails(i); 2736 PropertyDetails old_details = old_descriptors->GetDetails(i); 2737 if (old_details.attributes() != new_details.attributes() || 2738 !old_details.representation().fits_into(new_details.representation())) { 2739 return MaybeHandle<Map>(); 2740 } 2741 PropertyType new_type = new_details.type(); 2742 PropertyType old_type = old_details.type(); 2743 Object* new_value = new_descriptors->GetValue(i); 2744 Object* old_value = old_descriptors->GetValue(i); 2745 switch (new_type) { 2746 case FIELD: 2747 if ((old_type == FIELD && 2748 !HeapType::cast(old_value)->NowIs(HeapType::cast(new_value))) || 2749 (old_type == CONSTANT && 2750 !HeapType::cast(new_value)->NowContains(old_value)) || 2751 (old_type == CALLBACKS && 2752 !HeapType::Any()->Is(HeapType::cast(new_value)))) { 2753 return MaybeHandle<Map>(); 2754 } 2755 break; 2756 2757 case CONSTANT: 2758 case CALLBACKS: 2759 if (old_type != new_type || old_value != new_value) { 2760 return MaybeHandle<Map>(); 2761 } 2762 break; 2763 2764 case NORMAL: 2765 UNREACHABLE(); 2766 } 2767 } 2768 if (new_map->NumberOfOwnDescriptors() != old_nof) return MaybeHandle<Map>(); 2769 return handle(new_map); 2770 } 2771 2772 2773 MaybeHandle<Object> JSObject::SetPropertyWithInterceptor(LookupIterator* it, 2774 Handle<Object> value) { 2775 // TODO(rossberg): Support symbols in the API. 2776 if (it->name()->IsSymbol()) return value; 2777 2778 Handle<String> name_string = Handle<String>::cast(it->name()); 2779 Handle<JSObject> holder = it->GetHolder<JSObject>(); 2780 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor()); 2781 if (interceptor->setter()->IsUndefined()) return MaybeHandle<Object>(); 2782 2783 LOG(it->isolate(), 2784 ApiNamedPropertyAccess("interceptor-named-set", *holder, *name_string)); 2785 PropertyCallbackArguments args(it->isolate(), interceptor->data(), *holder, 2786 *holder); 2787 v8::NamedPropertySetterCallback setter = 2788 v8::ToCData<v8::NamedPropertySetterCallback>(interceptor->setter()); 2789 v8::Handle<v8::Value> result = args.Call( 2790 setter, v8::Utils::ToLocal(name_string), v8::Utils::ToLocal(value)); 2791 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it->isolate(), Object); 2792 if (!result.IsEmpty()) return value; 2793 2794 return MaybeHandle<Object>(); 2795 } 2796 2797 2798 MaybeHandle<Object> Object::SetProperty(Handle<Object> object, 2799 Handle<Name> name, Handle<Object> value, 2800 StrictMode strict_mode, 2801 StoreFromKeyed store_mode) { 2802 LookupIterator it(object, name); 2803 return SetProperty(&it, value, strict_mode, store_mode); 2804 } 2805 2806 2807 MaybeHandle<Object> Object::SetProperty(LookupIterator* it, 2808 Handle<Object> value, 2809 StrictMode strict_mode, 2810 StoreFromKeyed store_mode) { 2811 // Make sure that the top context does not change when doing callbacks or 2812 // interceptor calls. 2813 AssertNoContextChange ncc(it->isolate()); 2814 2815 bool done = false; 2816 for (; it->IsFound(); it->Next()) { 2817 switch (it->state()) { 2818 case LookupIterator::NOT_FOUND: 2819 UNREACHABLE(); 2820 2821 case LookupIterator::ACCESS_CHECK: 2822 // TODO(verwaest): Remove the distinction. This is mostly bogus since we 2823 // don't know whether we'll want to fetch attributes or call a setter 2824 // until we find the property. 2825 if (it->HasAccess(v8::ACCESS_SET)) break; 2826 return JSObject::SetPropertyWithFailedAccessCheck(it, value, 2827 strict_mode); 2828 2829 case LookupIterator::JSPROXY: 2830 if (it->HolderIsReceiverOrHiddenPrototype()) { 2831 return JSProxy::SetPropertyWithHandler(it->GetHolder<JSProxy>(), 2832 it->GetReceiver(), it->name(), 2833 value, strict_mode); 2834 } else { 2835 // TODO(verwaest): Use the MaybeHandle to indicate result. 2836 bool has_result = false; 2837 MaybeHandle<Object> maybe_result = 2838 JSProxy::SetPropertyViaPrototypesWithHandler( 2839 it->GetHolder<JSProxy>(), it->GetReceiver(), it->name(), 2840 value, strict_mode, &has_result); 2841 if (has_result) return maybe_result; 2842 done = true; 2843 } 2844 break; 2845 2846 case LookupIterator::INTERCEPTOR: 2847 if (it->HolderIsReceiverOrHiddenPrototype()) { 2848 MaybeHandle<Object> maybe_result = 2849 JSObject::SetPropertyWithInterceptor(it, value); 2850 if (!maybe_result.is_null()) return maybe_result; 2851 if (it->isolate()->has_pending_exception()) return maybe_result; 2852 } else { 2853 Maybe<PropertyAttributes> maybe_attributes = 2854 JSObject::GetPropertyAttributesWithInterceptor( 2855 it->GetHolder<JSObject>(), it->GetReceiver(), it->name()); 2856 if (!maybe_attributes.has_value) return MaybeHandle<Object>(); 2857 done = maybe_attributes.value != ABSENT; 2858 if (done && (maybe_attributes.value & READ_ONLY) != 0) { 2859 return WriteToReadOnlyProperty(it, value, strict_mode); 2860 } 2861 } 2862 break; 2863 2864 case LookupIterator::ACCESSOR: 2865 if (it->property_details().IsReadOnly()) { 2866 return WriteToReadOnlyProperty(it, value, strict_mode); 2867 } 2868 if (it->HolderIsReceiverOrHiddenPrototype() || 2869 !it->GetAccessors()->IsDeclaredAccessorInfo()) { 2870 return SetPropertyWithAccessor(it->GetReceiver(), it->name(), value, 2871 it->GetHolder<JSObject>(), 2872 it->GetAccessors(), strict_mode); 2873 } 2874 done = true; 2875 break; 2876 2877 case LookupIterator::DATA: 2878 if (it->property_details().IsReadOnly()) { 2879 return WriteToReadOnlyProperty(it, value, strict_mode); 2880 } 2881 if (it->HolderIsReceiverOrHiddenPrototype()) { 2882 return SetDataProperty(it, value); 2883 } 2884 done = true; 2885 break; 2886 2887 case LookupIterator::TRANSITION: 2888 done = true; 2889 break; 2890 } 2891 2892 if (done) break; 2893 } 2894 2895 // If the receiver is the JSGlobalObject, the store was contextual. In case 2896 // the property did not exist yet on the global object itself, we have to 2897 // throw a reference error in strict mode. 2898 if (it->GetReceiver()->IsJSGlobalObject() && strict_mode == STRICT) { 2899 Handle<Object> args[1] = {it->name()}; 2900 THROW_NEW_ERROR(it->isolate(), 2901 NewReferenceError("not_defined", HandleVector(args, 1)), 2902 Object); 2903 } 2904 2905 return AddDataProperty(it, value, NONE, strict_mode, store_mode); 2906 } 2907 2908 2909 MaybeHandle<Object> Object::WriteToReadOnlyProperty(LookupIterator* it, 2910 Handle<Object> value, 2911 StrictMode strict_mode) { 2912 if (strict_mode != STRICT) return value; 2913 2914 Handle<Object> args[] = {it->name(), it->GetReceiver()}; 2915 THROW_NEW_ERROR(it->isolate(), 2916 NewTypeError("strict_read_only_property", 2917 HandleVector(args, arraysize(args))), 2918 Object); 2919 } 2920 2921 2922 Handle<Object> Object::SetDataProperty(LookupIterator* it, 2923 Handle<Object> value) { 2924 // Proxies are handled on the WithHandler path. Other non-JSObjects cannot 2925 // have own properties. 2926 Handle<JSObject> receiver = Handle<JSObject>::cast(it->GetReceiver()); 2927 2928 // Store on the holder which may be hidden behind the receiver. 2929 DCHECK(it->HolderIsReceiverOrHiddenPrototype()); 2930 2931 // Old value for the observation change record. 2932 // Fetch before transforming the object since the encoding may become 2933 // incompatible with what's cached in |it|. 2934 bool is_observed = 2935 receiver->map()->is_observed() && 2936 !it->name().is_identical_to(it->factory()->hidden_string()); 2937 MaybeHandle<Object> maybe_old; 2938 if (is_observed) maybe_old = it->GetDataValue(); 2939 2940 // Possibly migrate to the most up-to-date map that will be able to store 2941 // |value| under it->name(). 2942 it->PrepareForDataProperty(value); 2943 2944 // Write the property value. 2945 it->WriteDataValue(value); 2946 2947 // Send the change record if there are observers. 2948 if (is_observed && !value->SameValue(*maybe_old.ToHandleChecked())) { 2949 JSObject::EnqueueChangeRecord(receiver, "update", it->name(), 2950 maybe_old.ToHandleChecked()); 2951 } 2952 2953 return value; 2954 } 2955 2956 2957 MaybeHandle<Object> Object::AddDataProperty(LookupIterator* it, 2958 Handle<Object> value, 2959 PropertyAttributes attributes, 2960 StrictMode strict_mode, 2961 StoreFromKeyed store_mode) { 2962 DCHECK(!it->GetReceiver()->IsJSProxy()); 2963 if (!it->GetReceiver()->IsJSObject()) { 2964 // TODO(verwaest): Throw a TypeError with a more specific message. 2965 return WriteToReadOnlyProperty(it, value, strict_mode); 2966 } 2967 2968 Handle<JSObject> receiver = it->GetStoreTarget(); 2969 2970 // If the receiver is a JSGlobalProxy, store on the prototype (JSGlobalObject) 2971 // instead. If the prototype is Null, the proxy is detached. 2972 if (receiver->IsJSGlobalProxy()) return value; 2973 2974 // Possibly migrate to the most up-to-date map that will be able to store 2975 // |value| under it->name() with |attributes|. 2976 it->PrepareTransitionToDataProperty(value, attributes, store_mode); 2977 if (it->state() != LookupIterator::TRANSITION) { 2978 if (strict_mode == SLOPPY) return value; 2979 2980 Handle<Object> args[1] = {it->name()}; 2981 THROW_NEW_ERROR(it->isolate(), 2982 NewTypeError("object_not_extensible", 2983 HandleVector(args, arraysize(args))), 2984 Object); 2985 } 2986 it->ApplyTransitionToDataProperty(); 2987 2988 // TODO(verwaest): Encapsulate dictionary handling better. 2989 if (receiver->map()->is_dictionary_map()) { 2990 // TODO(verwaest): Probably should ensure this is done beforehand. 2991 it->InternalizeName(); 2992 JSObject::AddSlowProperty(receiver, it->name(), value, attributes); 2993 } else { 2994 // Write the property value. 2995 it->WriteDataValue(value); 2996 } 2997 2998 // Send the change record if there are observers. 2999 if (receiver->map()->is_observed() && 3000 !it->name().is_identical_to(it->factory()->hidden_string())) { 3001 JSObject::EnqueueChangeRecord(receiver, "add", it->name(), 3002 it->factory()->the_hole_value()); 3003 } 3004 3005 return value; 3006 } 3007 3008 3009 MaybeHandle<Object> JSObject::SetElementWithCallbackSetterInPrototypes( 3010 Handle<JSObject> object, 3011 uint32_t index, 3012 Handle<Object> value, 3013 bool* found, 3014 StrictMode strict_mode) { 3015 Isolate *isolate = object->GetIsolate(); 3016 for (PrototypeIterator iter(isolate, object); !iter.IsAtEnd(); 3017 iter.Advance()) { 3018 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) { 3019 return JSProxy::SetPropertyViaPrototypesWithHandler( 3020 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), object, 3021 isolate->factory()->Uint32ToString(index), // name 3022 value, strict_mode, found); 3023 } 3024 Handle<JSObject> js_proto = 3025 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)); 3026 if (!js_proto->HasDictionaryElements()) { 3027 continue; 3028 } 3029 Handle<SeededNumberDictionary> dictionary(js_proto->element_dictionary()); 3030 int entry = dictionary->FindEntry(index); 3031 if (entry != SeededNumberDictionary::kNotFound) { 3032 PropertyDetails details = dictionary->DetailsAt(entry); 3033 if (details.type() == CALLBACKS) { 3034 *found = true; 3035 Handle<Object> structure(dictionary->ValueAt(entry), isolate); 3036 return SetElementWithCallback(object, structure, index, value, js_proto, 3037 strict_mode); 3038 } 3039 } 3040 } 3041 *found = false; 3042 return isolate->factory()->the_hole_value(); 3043 } 3044 3045 3046 void Map::EnsureDescriptorSlack(Handle<Map> map, int slack) { 3047 // Only supports adding slack to owned descriptors. 3048 DCHECK(map->owns_descriptors()); 3049 3050 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 3051 int old_size = map->NumberOfOwnDescriptors(); 3052 if (slack <= descriptors->NumberOfSlackDescriptors()) return; 3053 3054 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( 3055 descriptors, old_size, slack); 3056 3057 if (old_size == 0) { 3058 map->set_instance_descriptors(*new_descriptors); 3059 return; 3060 } 3061 3062 // If the source descriptors had an enum cache we copy it. This ensures 3063 // that the maps to which we push the new descriptor array back can rely 3064 // on a cache always being available once it is set. If the map has more 3065 // enumerated descriptors than available in the original cache, the cache 3066 // will be lazily replaced by the extended cache when needed. 3067 if (descriptors->HasEnumCache()) { 3068 new_descriptors->CopyEnumCacheFrom(*descriptors); 3069 } 3070 3071 // Replace descriptors by new_descriptors in all maps that share it. 3072 map->GetHeap()->incremental_marking()->RecordWrites(*descriptors); 3073 3074 Map* walk_map; 3075 for (Object* current = map->GetBackPointer(); 3076 !current->IsUndefined(); 3077 current = walk_map->GetBackPointer()) { 3078 walk_map = Map::cast(current); 3079 if (walk_map->instance_descriptors() != *descriptors) break; 3080 walk_map->set_instance_descriptors(*new_descriptors); 3081 } 3082 3083 map->set_instance_descriptors(*new_descriptors); 3084 } 3085 3086 3087 template<class T> 3088 static int AppendUniqueCallbacks(NeanderArray* callbacks, 3089 Handle<typename T::Array> array, 3090 int valid_descriptors) { 3091 int nof_callbacks = callbacks->length(); 3092 3093 Isolate* isolate = array->GetIsolate(); 3094 // Ensure the keys are unique names before writing them into the 3095 // instance descriptor. Since it may cause a GC, it has to be done before we 3096 // temporarily put the heap in an invalid state while appending descriptors. 3097 for (int i = 0; i < nof_callbacks; ++i) { 3098 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i))); 3099 if (entry->name()->IsUniqueName()) continue; 3100 Handle<String> key = 3101 isolate->factory()->InternalizeString( 3102 Handle<String>(String::cast(entry->name()))); 3103 entry->set_name(*key); 3104 } 3105 3106 // Fill in new callback descriptors. Process the callbacks from 3107 // back to front so that the last callback with a given name takes 3108 // precedence over previously added callbacks with that name. 3109 for (int i = nof_callbacks - 1; i >= 0; i--) { 3110 Handle<AccessorInfo> entry(AccessorInfo::cast(callbacks->get(i))); 3111 Handle<Name> key(Name::cast(entry->name())); 3112 // Check if a descriptor with this name already exists before writing. 3113 if (!T::Contains(key, entry, valid_descriptors, array)) { 3114 T::Insert(key, entry, valid_descriptors, array); 3115 valid_descriptors++; 3116 } 3117 } 3118 3119 return valid_descriptors; 3120 } 3121 3122 struct DescriptorArrayAppender { 3123 typedef DescriptorArray Array; 3124 static bool Contains(Handle<Name> key, 3125 Handle<AccessorInfo> entry, 3126 int valid_descriptors, 3127 Handle<DescriptorArray> array) { 3128 DisallowHeapAllocation no_gc; 3129 return array->Search(*key, valid_descriptors) != DescriptorArray::kNotFound; 3130 } 3131 static void Insert(Handle<Name> key, 3132 Handle<AccessorInfo> entry, 3133 int valid_descriptors, 3134 Handle<DescriptorArray> array) { 3135 DisallowHeapAllocation no_gc; 3136 CallbacksDescriptor desc(key, entry, entry->property_attributes()); 3137 array->Append(&desc); 3138 } 3139 }; 3140 3141 3142 struct FixedArrayAppender { 3143 typedef FixedArray Array; 3144 static bool Contains(Handle<Name> key, 3145 Handle<AccessorInfo> entry, 3146 int valid_descriptors, 3147 Handle<FixedArray> array) { 3148 for (int i = 0; i < valid_descriptors; i++) { 3149 if (*key == AccessorInfo::cast(array->get(i))->name()) return true; 3150 } 3151 return false; 3152 } 3153 static void Insert(Handle<Name> key, 3154 Handle<AccessorInfo> entry, 3155 int valid_descriptors, 3156 Handle<FixedArray> array) { 3157 DisallowHeapAllocation no_gc; 3158 array->set(valid_descriptors, *entry); 3159 } 3160 }; 3161 3162 3163 void Map::AppendCallbackDescriptors(Handle<Map> map, 3164 Handle<Object> descriptors) { 3165 int nof = map->NumberOfOwnDescriptors(); 3166 Handle<DescriptorArray> array(map->instance_descriptors()); 3167 NeanderArray callbacks(descriptors); 3168 DCHECK(array->NumberOfSlackDescriptors() >= callbacks.length()); 3169 nof = AppendUniqueCallbacks<DescriptorArrayAppender>(&callbacks, array, nof); 3170 map->SetNumberOfOwnDescriptors(nof); 3171 } 3172 3173 3174 int AccessorInfo::AppendUnique(Handle<Object> descriptors, 3175 Handle<FixedArray> array, 3176 int valid_descriptors) { 3177 NeanderArray callbacks(descriptors); 3178 DCHECK(array->length() >= callbacks.length() + valid_descriptors); 3179 return AppendUniqueCallbacks<FixedArrayAppender>(&callbacks, 3180 array, 3181 valid_descriptors); 3182 } 3183 3184 3185 static bool ContainsMap(MapHandleList* maps, Handle<Map> map) { 3186 DCHECK(!map.is_null()); 3187 for (int i = 0; i < maps->length(); ++i) { 3188 if (!maps->at(i).is_null() && maps->at(i).is_identical_to(map)) return true; 3189 } 3190 return false; 3191 } 3192 3193 3194 template <class T> 3195 static Handle<T> MaybeNull(T* p) { 3196 if (p == NULL) return Handle<T>::null(); 3197 return Handle<T>(p); 3198 } 3199 3200 3201 Handle<Map> Map::FindTransitionedMap(MapHandleList* candidates) { 3202 ElementsKind kind = elements_kind(); 3203 Handle<Map> transitioned_map = Handle<Map>::null(); 3204 Handle<Map> current_map(this); 3205 bool packed = IsFastPackedElementsKind(kind); 3206 if (IsTransitionableFastElementsKind(kind)) { 3207 while (CanTransitionToMoreGeneralFastElementsKind(kind, false)) { 3208 kind = GetNextMoreGeneralFastElementsKind(kind, false); 3209 Handle<Map> maybe_transitioned_map = 3210 MaybeNull(current_map->LookupElementsTransitionMap(kind)); 3211 if (maybe_transitioned_map.is_null()) break; 3212 if (ContainsMap(candidates, maybe_transitioned_map) && 3213 (packed || !IsFastPackedElementsKind(kind))) { 3214 transitioned_map = maybe_transitioned_map; 3215 if (!IsFastPackedElementsKind(kind)) packed = false; 3216 } 3217 current_map = maybe_transitioned_map; 3218 } 3219 } 3220 return transitioned_map; 3221 } 3222 3223 3224 static Map* FindClosestElementsTransition(Map* map, ElementsKind to_kind) { 3225 Map* current_map = map; 3226 int target_kind = 3227 IsFastElementsKind(to_kind) || IsExternalArrayElementsKind(to_kind) 3228 ? to_kind 3229 : TERMINAL_FAST_ELEMENTS_KIND; 3230 3231 // Support for legacy API: SetIndexedPropertiesTo{External,Pixel}Data 3232 // allows to change elements from arbitrary kind to any ExternalArray 3233 // elements kind. Satisfy its requirements, checking whether we already 3234 // have the cached transition. 3235 if (IsExternalArrayElementsKind(to_kind) && 3236 !IsFixedTypedArrayElementsKind(map->elements_kind())) { 3237 if (map->HasElementsTransition()) { 3238 Map* next_map = map->elements_transition_map(); 3239 if (next_map->elements_kind() == to_kind) return next_map; 3240 } 3241 return map; 3242 } 3243 3244 ElementsKind kind = map->elements_kind(); 3245 while (kind != target_kind) { 3246 kind = GetNextTransitionElementsKind(kind); 3247 if (!current_map->HasElementsTransition()) return current_map; 3248 current_map = current_map->elements_transition_map(); 3249 } 3250 3251 if (to_kind != kind && current_map->HasElementsTransition()) { 3252 DCHECK(to_kind == DICTIONARY_ELEMENTS); 3253 Map* next_map = current_map->elements_transition_map(); 3254 if (next_map->elements_kind() == to_kind) return next_map; 3255 } 3256 3257 DCHECK(current_map->elements_kind() == target_kind); 3258 return current_map; 3259 } 3260 3261 3262 Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) { 3263 Map* to_map = FindClosestElementsTransition(this, to_kind); 3264 if (to_map->elements_kind() == to_kind) return to_map; 3265 return NULL; 3266 } 3267 3268 3269 bool Map::IsMapInArrayPrototypeChain() { 3270 Isolate* isolate = GetIsolate(); 3271 if (isolate->initial_array_prototype()->map() == this) { 3272 return true; 3273 } 3274 3275 if (isolate->initial_object_prototype()->map() == this) { 3276 return true; 3277 } 3278 3279 return false; 3280 } 3281 3282 3283 static Handle<Map> AddMissingElementsTransitions(Handle<Map> map, 3284 ElementsKind to_kind) { 3285 DCHECK(IsTransitionElementsKind(map->elements_kind())); 3286 3287 Handle<Map> current_map = map; 3288 3289 ElementsKind kind = map->elements_kind(); 3290 if (!map->is_prototype_map()) { 3291 while (kind != to_kind && !IsTerminalElementsKind(kind)) { 3292 kind = GetNextTransitionElementsKind(kind); 3293 current_map = 3294 Map::CopyAsElementsKind(current_map, kind, INSERT_TRANSITION); 3295 } 3296 } 3297 3298 // In case we are exiting the fast elements kind system, just add the map in 3299 // the end. 3300 if (kind != to_kind) { 3301 current_map = Map::CopyAsElementsKind( 3302 current_map, to_kind, INSERT_TRANSITION); 3303 } 3304 3305 DCHECK(current_map->elements_kind() == to_kind); 3306 return current_map; 3307 } 3308 3309 3310 Handle<Map> Map::TransitionElementsTo(Handle<Map> map, 3311 ElementsKind to_kind) { 3312 ElementsKind from_kind = map->elements_kind(); 3313 if (from_kind == to_kind) return map; 3314 3315 Isolate* isolate = map->GetIsolate(); 3316 Context* native_context = isolate->context()->native_context(); 3317 Object* maybe_array_maps = native_context->js_array_maps(); 3318 if (maybe_array_maps->IsFixedArray()) { 3319 DisallowHeapAllocation no_gc; 3320 FixedArray* array_maps = FixedArray::cast(maybe_array_maps); 3321 if (array_maps->get(from_kind) == *map) { 3322 Object* maybe_transitioned_map = array_maps->get(to_kind); 3323 if (maybe_transitioned_map->IsMap()) { 3324 return handle(Map::cast(maybe_transitioned_map)); 3325 } 3326 } 3327 } 3328 3329 return TransitionElementsToSlow(map, to_kind); 3330 } 3331 3332 3333 Handle<Map> Map::TransitionElementsToSlow(Handle<Map> map, 3334 ElementsKind to_kind) { 3335 ElementsKind from_kind = map->elements_kind(); 3336 3337 if (from_kind == to_kind) { 3338 return map; 3339 } 3340 3341 bool allow_store_transition = 3342 // Only remember the map transition if there is not an already existing 3343 // non-matching element transition. 3344 !map->IsUndefined() && !map->is_dictionary_map() && 3345 IsTransitionElementsKind(from_kind); 3346 3347 // Only store fast element maps in ascending generality. 3348 if (IsFastElementsKind(to_kind)) { 3349 allow_store_transition &= 3350 IsTransitionableFastElementsKind(from_kind) && 3351 IsMoreGeneralElementsKindTransition(from_kind, to_kind); 3352 } 3353 3354 if (!allow_store_transition) { 3355 return Map::CopyAsElementsKind(map, to_kind, OMIT_TRANSITION); 3356 } 3357 3358 return Map::AsElementsKind(map, to_kind); 3359 } 3360 3361 3362 // static 3363 Handle<Map> Map::AsElementsKind(Handle<Map> map, ElementsKind kind) { 3364 Handle<Map> closest_map(FindClosestElementsTransition(*map, kind)); 3365 3366 if (closest_map->elements_kind() == kind) { 3367 return closest_map; 3368 } 3369 3370 return AddMissingElementsTransitions(closest_map, kind); 3371 } 3372 3373 3374 Handle<Map> JSObject::GetElementsTransitionMap(Handle<JSObject> object, 3375 ElementsKind to_kind) { 3376 Handle<Map> map(object->map()); 3377 return Map::TransitionElementsTo(map, to_kind); 3378 } 3379 3380 3381 Maybe<bool> JSProxy::HasPropertyWithHandler(Handle<JSProxy> proxy, 3382 Handle<Name> name) { 3383 Isolate* isolate = proxy->GetIsolate(); 3384 3385 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3386 if (name->IsSymbol()) return maybe(false); 3387 3388 Handle<Object> args[] = { name }; 3389 Handle<Object> result; 3390 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3391 isolate, result, CallTrap(proxy, "has", isolate->derived_has_trap(), 3392 arraysize(args), args), 3393 Maybe<bool>()); 3394 3395 return maybe(result->BooleanValue()); 3396 } 3397 3398 3399 MaybeHandle<Object> JSProxy::SetPropertyWithHandler(Handle<JSProxy> proxy, 3400 Handle<Object> receiver, 3401 Handle<Name> name, 3402 Handle<Object> value, 3403 StrictMode strict_mode) { 3404 Isolate* isolate = proxy->GetIsolate(); 3405 3406 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3407 if (name->IsSymbol()) return value; 3408 3409 Handle<Object> args[] = { receiver, name, value }; 3410 RETURN_ON_EXCEPTION( 3411 isolate, 3412 CallTrap(proxy, 3413 "set", 3414 isolate->derived_set_trap(), 3415 arraysize(args), 3416 args), 3417 Object); 3418 3419 return value; 3420 } 3421 3422 3423 MaybeHandle<Object> JSProxy::SetPropertyViaPrototypesWithHandler( 3424 Handle<JSProxy> proxy, Handle<Object> receiver, Handle<Name> name, 3425 Handle<Object> value, StrictMode strict_mode, bool* done) { 3426 Isolate* isolate = proxy->GetIsolate(); 3427 Handle<Object> handler(proxy->handler(), isolate); // Trap might morph proxy. 3428 3429 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3430 if (name->IsSymbol()) { 3431 *done = false; 3432 return isolate->factory()->the_hole_value(); 3433 } 3434 3435 *done = true; // except where redefined... 3436 Handle<Object> args[] = { name }; 3437 Handle<Object> result; 3438 ASSIGN_RETURN_ON_EXCEPTION( 3439 isolate, result, 3440 CallTrap(proxy, 3441 "getPropertyDescriptor", 3442 Handle<Object>(), 3443 arraysize(args), 3444 args), 3445 Object); 3446 3447 if (result->IsUndefined()) { 3448 *done = false; 3449 return isolate->factory()->the_hole_value(); 3450 } 3451 3452 // Emulate [[GetProperty]] semantics for proxies. 3453 Handle<Object> argv[] = { result }; 3454 Handle<Object> desc; 3455 ASSIGN_RETURN_ON_EXCEPTION( 3456 isolate, desc, 3457 Execution::Call(isolate, 3458 isolate->to_complete_property_descriptor(), 3459 result, 3460 arraysize(argv), 3461 argv), 3462 Object); 3463 3464 // [[GetProperty]] requires to check that all properties are configurable. 3465 Handle<String> configurable_name = 3466 isolate->factory()->InternalizeOneByteString( 3467 STATIC_CHAR_VECTOR("configurable_")); 3468 Handle<Object> configurable = 3469 Object::GetProperty(desc, configurable_name).ToHandleChecked(); 3470 DCHECK(configurable->IsBoolean()); 3471 if (configurable->IsFalse()) { 3472 Handle<String> trap = isolate->factory()->InternalizeOneByteString( 3473 STATIC_CHAR_VECTOR("getPropertyDescriptor")); 3474 Handle<Object> args[] = { handler, trap, name }; 3475 THROW_NEW_ERROR(isolate, NewTypeError("proxy_prop_not_configurable", 3476 HandleVector(args, arraysize(args))), 3477 Object); 3478 } 3479 DCHECK(configurable->IsTrue()); 3480 3481 // Check for DataDescriptor. 3482 Handle<String> hasWritable_name = 3483 isolate->factory()->InternalizeOneByteString( 3484 STATIC_CHAR_VECTOR("hasWritable_")); 3485 Handle<Object> hasWritable = 3486 Object::GetProperty(desc, hasWritable_name).ToHandleChecked(); 3487 DCHECK(hasWritable->IsBoolean()); 3488 if (hasWritable->IsTrue()) { 3489 Handle<String> writable_name = isolate->factory()->InternalizeOneByteString( 3490 STATIC_CHAR_VECTOR("writable_")); 3491 Handle<Object> writable = 3492 Object::GetProperty(desc, writable_name).ToHandleChecked(); 3493 DCHECK(writable->IsBoolean()); 3494 *done = writable->IsFalse(); 3495 if (!*done) return isolate->factory()->the_hole_value(); 3496 if (strict_mode == SLOPPY) return value; 3497 Handle<Object> args[] = { name, receiver }; 3498 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property", 3499 HandleVector(args, arraysize(args))), 3500 Object); 3501 } 3502 3503 // We have an AccessorDescriptor. 3504 Handle<String> set_name = 3505 isolate->factory()->InternalizeOneByteString(STATIC_CHAR_VECTOR("set_")); 3506 Handle<Object> setter = Object::GetProperty(desc, set_name).ToHandleChecked(); 3507 if (!setter->IsUndefined()) { 3508 // TODO(rossberg): nicer would be to cast to some JSCallable here... 3509 return SetPropertyWithDefinedSetter( 3510 receiver, Handle<JSReceiver>::cast(setter), value); 3511 } 3512 3513 if (strict_mode == SLOPPY) return value; 3514 Handle<Object> args2[] = { name, proxy }; 3515 THROW_NEW_ERROR(isolate, NewTypeError("no_setter_in_callback", 3516 HandleVector(args2, arraysize(args2))), 3517 Object); 3518 } 3519 3520 3521 MaybeHandle<Object> JSProxy::DeletePropertyWithHandler( 3522 Handle<JSProxy> proxy, Handle<Name> name, DeleteMode mode) { 3523 Isolate* isolate = proxy->GetIsolate(); 3524 3525 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3526 if (name->IsSymbol()) return isolate->factory()->false_value(); 3527 3528 Handle<Object> args[] = { name }; 3529 Handle<Object> result; 3530 ASSIGN_RETURN_ON_EXCEPTION( 3531 isolate, result, 3532 CallTrap(proxy, 3533 "delete", 3534 Handle<Object>(), 3535 arraysize(args), 3536 args), 3537 Object); 3538 3539 bool result_bool = result->BooleanValue(); 3540 if (mode == STRICT_DELETION && !result_bool) { 3541 Handle<Object> handler(proxy->handler(), isolate); 3542 Handle<String> trap_name = isolate->factory()->InternalizeOneByteString( 3543 STATIC_CHAR_VECTOR("delete")); 3544 Handle<Object> args[] = { handler, trap_name }; 3545 THROW_NEW_ERROR(isolate, NewTypeError("handler_failed", 3546 HandleVector(args, arraysize(args))), 3547 Object); 3548 } 3549 return isolate->factory()->ToBoolean(result_bool); 3550 } 3551 3552 3553 MaybeHandle<Object> JSProxy::DeleteElementWithHandler( 3554 Handle<JSProxy> proxy, uint32_t index, DeleteMode mode) { 3555 Isolate* isolate = proxy->GetIsolate(); 3556 Handle<String> name = isolate->factory()->Uint32ToString(index); 3557 return JSProxy::DeletePropertyWithHandler(proxy, name, mode); 3558 } 3559 3560 3561 Maybe<PropertyAttributes> JSProxy::GetPropertyAttributesWithHandler( 3562 Handle<JSProxy> proxy, Handle<Object> receiver, Handle<Name> name) { 3563 Isolate* isolate = proxy->GetIsolate(); 3564 HandleScope scope(isolate); 3565 3566 // TODO(rossberg): adjust once there is a story for symbols vs proxies. 3567 if (name->IsSymbol()) return maybe(ABSENT); 3568 3569 Handle<Object> args[] = { name }; 3570 Handle<Object> result; 3571 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3572 isolate, result, 3573 proxy->CallTrap(proxy, "getPropertyDescriptor", Handle<Object>(), 3574 arraysize(args), args), 3575 Maybe<PropertyAttributes>()); 3576 3577 if (result->IsUndefined()) return maybe(ABSENT); 3578 3579 Handle<Object> argv[] = { result }; 3580 Handle<Object> desc; 3581 ASSIGN_RETURN_ON_EXCEPTION_VALUE( 3582 isolate, desc, 3583 Execution::Call(isolate, isolate->to_complete_property_descriptor(), 3584 result, arraysize(argv), argv), 3585 Maybe<PropertyAttributes>()); 3586 3587 // Convert result to PropertyAttributes. 3588 Handle<String> enum_n = isolate->factory()->InternalizeOneByteString( 3589 STATIC_CHAR_VECTOR("enumerable_")); 3590 Handle<Object> enumerable; 3591 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, enumerable, 3592 Object::GetProperty(desc, enum_n), 3593 Maybe<PropertyAttributes>()); 3594 Handle<String> conf_n = isolate->factory()->InternalizeOneByteString( 3595 STATIC_CHAR_VECTOR("configurable_")); 3596 Handle<Object> configurable; 3597 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, configurable, 3598 Object::GetProperty(desc, conf_n), 3599 Maybe<PropertyAttributes>()); 3600 Handle<String> writ_n = isolate->factory()->InternalizeOneByteString( 3601 STATIC_CHAR_VECTOR("writable_")); 3602 Handle<Object> writable; 3603 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, writable, 3604 Object::GetProperty(desc, writ_n), 3605 Maybe<PropertyAttributes>()); 3606 if (!writable->BooleanValue()) { 3607 Handle<String> set_n = isolate->factory()->InternalizeOneByteString( 3608 STATIC_CHAR_VECTOR("set_")); 3609 Handle<Object> setter; 3610 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, setter, 3611 Object::GetProperty(desc, set_n), 3612 Maybe<PropertyAttributes>()); 3613 writable = isolate->factory()->ToBoolean(!setter->IsUndefined()); 3614 } 3615 3616 if (configurable->IsFalse()) { 3617 Handle<Object> handler(proxy->handler(), isolate); 3618 Handle<String> trap = isolate->factory()->InternalizeOneByteString( 3619 STATIC_CHAR_VECTOR("getPropertyDescriptor")); 3620 Handle<Object> args[] = { handler, trap, name }; 3621 Handle<Object> error; 3622 MaybeHandle<Object> maybe_error = isolate->factory()->NewTypeError( 3623 "proxy_prop_not_configurable", HandleVector(args, arraysize(args))); 3624 if (maybe_error.ToHandle(&error)) isolate->Throw(*error); 3625 return maybe(NONE); 3626 } 3627 3628 int attributes = NONE; 3629 if (!enumerable->BooleanValue()) attributes |= DONT_ENUM; 3630 if (!configurable->BooleanValue()) attributes |= DONT_DELETE; 3631 if (!writable->BooleanValue()) attributes |= READ_ONLY; 3632 return maybe(static_cast<PropertyAttributes>(attributes)); 3633 } 3634 3635 3636 Maybe<PropertyAttributes> JSProxy::GetElementAttributeWithHandler( 3637 Handle<JSProxy> proxy, Handle<JSReceiver> receiver, uint32_t index) { 3638 Isolate* isolate = proxy->GetIsolate(); 3639 Handle<String> name = isolate->factory()->Uint32ToString(index); 3640 return GetPropertyAttributesWithHandler(proxy, receiver, name); 3641 } 3642 3643 3644 void JSProxy::Fix(Handle<JSProxy> proxy) { 3645 Isolate* isolate = proxy->GetIsolate(); 3646 3647 // Save identity hash. 3648 Handle<Object> hash(proxy->GetIdentityHash(), isolate); 3649 3650 if (proxy->IsJSFunctionProxy()) { 3651 isolate->factory()->BecomeJSFunction(proxy); 3652 // Code will be set on the JavaScript side. 3653 } else { 3654 isolate->factory()->BecomeJSObject(proxy); 3655 } 3656 DCHECK(proxy->IsJSObject()); 3657 3658 // Inherit identity, if it was present. 3659 if (hash->IsSmi()) { 3660 JSObject::SetIdentityHash(Handle<JSObject>::cast(proxy), 3661 Handle<Smi>::cast(hash)); 3662 } 3663 } 3664 3665 3666 MaybeHandle<Object> JSProxy::CallTrap(Handle<JSProxy> proxy, 3667 const char* name, 3668 Handle<Object> derived, 3669 int argc, 3670 Handle<Object> argv[]) { 3671 Isolate* isolate = proxy->GetIsolate(); 3672 Handle<Object> handler(proxy->handler(), isolate); 3673 3674 Handle<String> trap_name = isolate->factory()->InternalizeUtf8String(name); 3675 Handle<Object> trap; 3676 ASSIGN_RETURN_ON_EXCEPTION( 3677 isolate, trap, 3678 Object::GetPropertyOrElement(handler, trap_name), 3679 Object); 3680 3681 if (trap->IsUndefined()) { 3682 if (derived.is_null()) { 3683 Handle<Object> args[] = { handler, trap_name }; 3684 THROW_NEW_ERROR(isolate, 3685 NewTypeError("handler_trap_missing", 3686 HandleVector(args, arraysize(args))), 3687 Object); 3688 } 3689 trap = Handle<Object>(derived); 3690 } 3691 3692 return Execution::Call(isolate, trap, handler, argc, argv); 3693 } 3694 3695 3696 void JSObject::AllocateStorageForMap(Handle<JSObject> object, Handle<Map> map) { 3697 DCHECK(object->map()->inobject_properties() == map->inobject_properties()); 3698 ElementsKind obj_kind = object->map()->elements_kind(); 3699 ElementsKind map_kind = map->elements_kind(); 3700 if (map_kind != obj_kind) { 3701 ElementsKind to_kind = map_kind; 3702 if (IsMoreGeneralElementsKindTransition(map_kind, obj_kind) || 3703 IsDictionaryElementsKind(obj_kind)) { 3704 to_kind = obj_kind; 3705 } 3706 if (IsDictionaryElementsKind(to_kind)) { 3707 NormalizeElements(object); 3708 } else { 3709 TransitionElementsKind(object, to_kind); 3710 } 3711 map = Map::AsElementsKind(map, to_kind); 3712 } 3713 JSObject::MigrateToMap(object, map); 3714 } 3715 3716 3717 void JSObject::MigrateInstance(Handle<JSObject> object) { 3718 Handle<Map> original_map(object->map()); 3719 Handle<Map> map = Map::Update(original_map); 3720 map->set_migration_target(true); 3721 MigrateToMap(object, map); 3722 if (FLAG_trace_migration) { 3723 object->PrintInstanceMigration(stdout, *original_map, *map); 3724 } 3725 } 3726 3727 3728 // static 3729 bool JSObject::TryMigrateInstance(Handle<JSObject> object) { 3730 Isolate* isolate = object->GetIsolate(); 3731 DisallowDeoptimization no_deoptimization(isolate); 3732 Handle<Map> original_map(object->map(), isolate); 3733 Handle<Map> new_map; 3734 if (!Map::TryUpdate(original_map).ToHandle(&new_map)) { 3735 return false; 3736 } 3737 JSObject::MigrateToMap(object, new_map); 3738 if (FLAG_trace_migration) { 3739 object->PrintInstanceMigration(stdout, *original_map, object->map()); 3740 } 3741 return true; 3742 } 3743 3744 3745 void JSObject::MigrateToNewProperty(Handle<JSObject> object, 3746 Handle<Map> map, 3747 Handle<Object> value) { 3748 JSObject::MigrateToMap(object, map); 3749 if (map->GetLastDescriptorDetails().type() != FIELD) return; 3750 object->WriteToField(map->LastAdded(), *value); 3751 } 3752 3753 3754 void JSObject::WriteToField(int descriptor, Object* value) { 3755 DisallowHeapAllocation no_gc; 3756 3757 DescriptorArray* desc = map()->instance_descriptors(); 3758 PropertyDetails details = desc->GetDetails(descriptor); 3759 3760 DCHECK(details.type() == FIELD); 3761 3762 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor); 3763 if (details.representation().IsDouble()) { 3764 // Nothing more to be done. 3765 if (value->IsUninitialized()) return; 3766 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index)); 3767 DCHECK(box->IsMutableHeapNumber()); 3768 box->set_value(value->Number()); 3769 } else { 3770 FastPropertyAtPut(index, value); 3771 } 3772 } 3773 3774 3775 void JSObject::AddProperty(Handle<JSObject> object, Handle<Name> name, 3776 Handle<Object> value, 3777 PropertyAttributes attributes) { 3778 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR); 3779 CHECK_NE(LookupIterator::ACCESS_CHECK, it.state()); 3780 #ifdef DEBUG 3781 uint32_t index; 3782 DCHECK(!object->IsJSProxy()); 3783 DCHECK(!name->AsArrayIndex(&index)); 3784 Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it); 3785 DCHECK(maybe.has_value); 3786 DCHECK(!it.IsFound()); 3787 DCHECK(object->map()->is_extensible() || 3788 name.is_identical_to(it.isolate()->factory()->hidden_string())); 3789 #endif 3790 AddDataProperty(&it, value, attributes, STRICT, 3791 CERTAINLY_NOT_STORE_FROM_KEYED).Check(); 3792 } 3793 3794 3795 // Reconfigures a property to a data property with attributes, even if it is not 3796 // reconfigurable. 3797 MaybeHandle<Object> JSObject::SetOwnPropertyIgnoreAttributes( 3798 Handle<JSObject> object, 3799 Handle<Name> name, 3800 Handle<Object> value, 3801 PropertyAttributes attributes, 3802 ExecutableAccessorInfoHandling handling) { 3803 DCHECK(!value->IsTheHole()); 3804 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR); 3805 bool is_observed = object->map()->is_observed() && 3806 *name != it.isolate()->heap()->hidden_string(); 3807 for (; it.IsFound(); it.Next()) { 3808 switch (it.state()) { 3809 case LookupIterator::INTERCEPTOR: 3810 case LookupIterator::JSPROXY: 3811 case LookupIterator::NOT_FOUND: 3812 case LookupIterator::TRANSITION: 3813 UNREACHABLE(); 3814 3815 case LookupIterator::ACCESS_CHECK: 3816 if (!it.isolate()->MayNamedAccess(object, name, v8::ACCESS_SET)) { 3817 return SetPropertyWithFailedAccessCheck(&it, value, SLOPPY); 3818 } 3819 break; 3820 3821 case LookupIterator::ACCESSOR: { 3822 PropertyDetails details = it.property_details(); 3823 Handle<Object> old_value = it.isolate()->factory()->the_hole_value(); 3824 // Ensure the context isn't changed after calling into accessors. 3825 AssertNoContextChange ncc(it.isolate()); 3826 3827 Handle<Object> accessors = it.GetAccessors(); 3828 3829 if (is_observed && accessors->IsAccessorInfo()) { 3830 ASSIGN_RETURN_ON_EXCEPTION( 3831 it.isolate(), old_value, 3832 GetPropertyWithAccessor(it.GetReceiver(), it.name(), 3833 it.GetHolder<JSObject>(), accessors), 3834 Object); 3835 } 3836 3837 // Special handling for ExecutableAccessorInfo, which behaves like a 3838 // data property. 3839 if (handling == DONT_FORCE_FIELD && 3840 accessors->IsExecutableAccessorInfo()) { 3841 Handle<Object> result; 3842 ASSIGN_RETURN_ON_EXCEPTION( 3843 it.isolate(), result, 3844 JSObject::SetPropertyWithAccessor(it.GetReceiver(), it.name(), 3845 value, it.GetHolder<JSObject>(), 3846 accessors, STRICT), 3847 Object); 3848 DCHECK(result->SameValue(*value)); 3849 3850 if (details.attributes() == attributes) { 3851 // Regular property update if the attributes match. 3852 if (is_observed && !old_value->SameValue(*value)) { 3853 // If we are setting the prototype of a function and are 3854 // observed, don't send change records because the prototype 3855 // handles that itself. 3856 if (!object->IsJSFunction() || 3857 !Name::Equals(it.isolate()->factory()->prototype_string(), 3858 name) || 3859 !Handle<JSFunction>::cast(object)->should_have_prototype()) { 3860 EnqueueChangeRecord(object, "update", name, old_value); 3861 } 3862 } 3863 return value; 3864 } 3865 3866 // Reconfigure the accessor if attributes mismatch. 3867 Handle<ExecutableAccessorInfo> new_data = Accessors::CloneAccessor( 3868 it.isolate(), Handle<ExecutableAccessorInfo>::cast(accessors)); 3869 new_data->set_property_attributes(attributes); 3870 // By clearing the setter we don't have to introduce a lookup to 3871 // the setter, simply make it unavailable to reflect the 3872 // attributes. 3873 if (attributes & READ_ONLY) new_data->clear_setter(); 3874 SetPropertyCallback(object, name, new_data, attributes); 3875 if (is_observed) { 3876 if (old_value->SameValue(*value)) { 3877 old_value = it.isolate()->factory()->the_hole_value(); 3878 } 3879 EnqueueChangeRecord(object, "reconfigure", name, old_value); 3880 } 3881 return value; 3882 } 3883 3884 it.ReconfigureDataProperty(value, attributes); 3885 it.PrepareForDataProperty(value); 3886 it.WriteDataValue(value); 3887 3888 if (is_observed) { 3889 if (old_value->SameValue(*value)) { 3890 old_value = it.isolate()->factory()->the_hole_value(); 3891 } 3892 EnqueueChangeRecord(object, "reconfigure", name, old_value); 3893 } 3894 3895 return value; 3896 } 3897 3898 case LookupIterator::DATA: { 3899 PropertyDetails details = it.property_details(); 3900 Handle<Object> old_value = it.isolate()->factory()->the_hole_value(); 3901 // Regular property update if the attributes match. 3902 if (details.attributes() == attributes) { 3903 return SetDataProperty(&it, value); 3904 } 3905 // Reconfigure the data property if the attributes mismatch. 3906 if (is_observed) old_value = it.GetDataValue(); 3907 3908 it.ReconfigureDataProperty(value, attributes); 3909 it.PrepareForDataProperty(value); 3910 it.WriteDataValue(value); 3911 3912 if (is_observed) { 3913 if (old_value->SameValue(*value)) { 3914 old_value = it.isolate()->factory()->the_hole_value(); 3915 } 3916 EnqueueChangeRecord(object, "reconfigure", name, old_value); 3917 } 3918 3919 return value; 3920 } 3921 } 3922 } 3923 3924 return AddDataProperty(&it, value, attributes, STRICT, 3925 CERTAINLY_NOT_STORE_FROM_KEYED); 3926 } 3927 3928 3929 Maybe<PropertyAttributes> JSObject::GetPropertyAttributesWithInterceptor( 3930 Handle<JSObject> holder, 3931 Handle<Object> receiver, 3932 Handle<Name> name) { 3933 // TODO(rossberg): Support symbols in the API. 3934 if (name->IsSymbol()) return maybe(ABSENT); 3935 3936 Isolate* isolate = holder->GetIsolate(); 3937 HandleScope scope(isolate); 3938 3939 // Make sure that the top context does not change when doing 3940 // callbacks or interceptor calls. 3941 AssertNoContextChange ncc(isolate); 3942 3943 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor()); 3944 PropertyCallbackArguments args( 3945 isolate, interceptor->data(), *receiver, *holder); 3946 if (!interceptor->query()->IsUndefined()) { 3947 v8::NamedPropertyQueryCallback query = 3948 v8::ToCData<v8::NamedPropertyQueryCallback>(interceptor->query()); 3949 LOG(isolate, 3950 ApiNamedPropertyAccess("interceptor-named-has", *holder, *name)); 3951 v8::Handle<v8::Integer> result = 3952 args.Call(query, v8::Utils::ToLocal(Handle<String>::cast(name))); 3953 if (!result.IsEmpty()) { 3954 DCHECK(result->IsInt32()); 3955 return maybe(static_cast<PropertyAttributes>(result->Int32Value())); 3956 } 3957 } else if (!interceptor->getter()->IsUndefined()) { 3958 v8::NamedPropertyGetterCallback getter = 3959 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter()); 3960 LOG(isolate, 3961 ApiNamedPropertyAccess("interceptor-named-get-has", *holder, *name)); 3962 v8::Handle<v8::Value> result = 3963 args.Call(getter, v8::Utils::ToLocal(Handle<String>::cast(name))); 3964 if (!result.IsEmpty()) return maybe(DONT_ENUM); 3965 } 3966 3967 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>()); 3968 return maybe(ABSENT); 3969 } 3970 3971 3972 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes( 3973 Handle<JSReceiver> object, Handle<Name> name) { 3974 // Check whether the name is an array index. 3975 uint32_t index = 0; 3976 if (object->IsJSObject() && name->AsArrayIndex(&index)) { 3977 return GetOwnElementAttribute(object, index); 3978 } 3979 LookupIterator it(object, name, LookupIterator::HIDDEN); 3980 return GetPropertyAttributes(&it); 3981 } 3982 3983 3984 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes( 3985 LookupIterator* it) { 3986 for (; it->IsFound(); it->Next()) { 3987 switch (it->state()) { 3988 case LookupIterator::NOT_FOUND: 3989 case LookupIterator::TRANSITION: 3990 UNREACHABLE(); 3991 case LookupIterator::JSPROXY: 3992 return JSProxy::GetPropertyAttributesWithHandler( 3993 it->GetHolder<JSProxy>(), it->GetReceiver(), it->name()); 3994 case LookupIterator::INTERCEPTOR: { 3995 Maybe<PropertyAttributes> result = 3996 JSObject::GetPropertyAttributesWithInterceptor( 3997 it->GetHolder<JSObject>(), it->GetReceiver(), it->name()); 3998 if (!result.has_value) return result; 3999 if (result.value != ABSENT) return result; 4000 break; 4001 } 4002 case LookupIterator::ACCESS_CHECK: 4003 if (it->HasAccess(v8::ACCESS_HAS)) break; 4004 return JSObject::GetPropertyAttributesWithFailedAccessCheck(it); 4005 case LookupIterator::ACCESSOR: 4006 case LookupIterator::DATA: 4007 return maybe(it->property_details().attributes()); 4008 } 4009 } 4010 return maybe(ABSENT); 4011 } 4012 4013 4014 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithReceiver( 4015 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index, 4016 bool check_prototype) { 4017 Isolate* isolate = object->GetIsolate(); 4018 4019 // Check access rights if needed. 4020 if (object->IsAccessCheckNeeded()) { 4021 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) { 4022 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); 4023 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<PropertyAttributes>()); 4024 return maybe(ABSENT); 4025 } 4026 } 4027 4028 if (object->IsJSGlobalProxy()) { 4029 PrototypeIterator iter(isolate, object); 4030 if (iter.IsAtEnd()) return maybe(ABSENT); 4031 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 4032 return JSObject::GetElementAttributeWithReceiver( 4033 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver, 4034 index, check_prototype); 4035 } 4036 4037 // Check for lookup interceptor except when bootstrapping. 4038 if (object->HasIndexedInterceptor() && !isolate->bootstrapper()->IsActive()) { 4039 return JSObject::GetElementAttributeWithInterceptor( 4040 object, receiver, index, check_prototype); 4041 } 4042 4043 return GetElementAttributeWithoutInterceptor( 4044 object, receiver, index, check_prototype); 4045 } 4046 4047 4048 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithInterceptor( 4049 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index, 4050 bool check_prototype) { 4051 Isolate* isolate = object->GetIsolate(); 4052 HandleScope scope(isolate); 4053 4054 // Make sure that the top context does not change when doing 4055 // callbacks or interceptor calls. 4056 AssertNoContextChange ncc(isolate); 4057 4058 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 4059 PropertyCallbackArguments args( 4060 isolate, interceptor->data(), *receiver, *object); 4061 if (!interceptor->query()->IsUndefined()) { 4062 v8::IndexedPropertyQueryCallback query = 4063 v8::ToCData<v8::IndexedPropertyQueryCallback>(interceptor->query()); 4064 LOG(isolate, 4065 ApiIndexedPropertyAccess("interceptor-indexed-has", *object, index)); 4066 v8::Handle<v8::Integer> result = args.Call(query, index); 4067 if (!result.IsEmpty()) 4068 return maybe(static_cast<PropertyAttributes>(result->Int32Value())); 4069 } else if (!interceptor->getter()->IsUndefined()) { 4070 v8::IndexedPropertyGetterCallback getter = 4071 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter()); 4072 LOG(isolate, 4073 ApiIndexedPropertyAccess( 4074 "interceptor-indexed-get-has", *object, index)); 4075 v8::Handle<v8::Value> result = args.Call(getter, index); 4076 if (!result.IsEmpty()) return maybe(NONE); 4077 } 4078 4079 return GetElementAttributeWithoutInterceptor( 4080 object, receiver, index, check_prototype); 4081 } 4082 4083 4084 Maybe<PropertyAttributes> JSObject::GetElementAttributeWithoutInterceptor( 4085 Handle<JSObject> object, Handle<JSReceiver> receiver, uint32_t index, 4086 bool check_prototype) { 4087 PropertyAttributes attr = object->GetElementsAccessor()->GetAttributes( 4088 receiver, object, index); 4089 if (attr != ABSENT) return maybe(attr); 4090 4091 // Handle [] on String objects. 4092 if (object->IsStringObjectWithCharacterAt(index)) { 4093 return maybe(static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE)); 4094 } 4095 4096 if (!check_prototype) return maybe(ABSENT); 4097 4098 PrototypeIterator iter(object->GetIsolate(), object); 4099 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) { 4100 // We need to follow the spec and simulate a call to [[GetOwnProperty]]. 4101 return JSProxy::GetElementAttributeWithHandler( 4102 Handle<JSProxy>::cast(PrototypeIterator::GetCurrent(iter)), receiver, 4103 index); 4104 } 4105 if (iter.IsAtEnd()) return maybe(ABSENT); 4106 return GetElementAttributeWithReceiver( 4107 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), receiver, 4108 index, true); 4109 } 4110 4111 4112 Handle<NormalizedMapCache> NormalizedMapCache::New(Isolate* isolate) { 4113 Handle<FixedArray> array( 4114 isolate->factory()->NewFixedArray(kEntries, TENURED)); 4115 return Handle<NormalizedMapCache>::cast(array); 4116 } 4117 4118 4119 MaybeHandle<Map> NormalizedMapCache::Get(Handle<Map> fast_map, 4120 PropertyNormalizationMode mode) { 4121 DisallowHeapAllocation no_gc; 4122 Object* value = FixedArray::get(GetIndex(fast_map)); 4123 if (!value->IsMap() || 4124 !Map::cast(value)->EquivalentToForNormalization(*fast_map, mode)) { 4125 return MaybeHandle<Map>(); 4126 } 4127 return handle(Map::cast(value)); 4128 } 4129 4130 4131 void NormalizedMapCache::Set(Handle<Map> fast_map, 4132 Handle<Map> normalized_map) { 4133 DisallowHeapAllocation no_gc; 4134 DCHECK(normalized_map->is_dictionary_map()); 4135 FixedArray::set(GetIndex(fast_map), *normalized_map); 4136 } 4137 4138 4139 void NormalizedMapCache::Clear() { 4140 int entries = length(); 4141 for (int i = 0; i != entries; i++) { 4142 set_undefined(i); 4143 } 4144 } 4145 4146 4147 void HeapObject::UpdateMapCodeCache(Handle<HeapObject> object, 4148 Handle<Name> name, 4149 Handle<Code> code) { 4150 Handle<Map> map(object->map()); 4151 Map::UpdateCodeCache(map, name, code); 4152 } 4153 4154 4155 void JSObject::NormalizeProperties(Handle<JSObject> object, 4156 PropertyNormalizationMode mode, 4157 int expected_additional_properties) { 4158 if (!object->HasFastProperties()) return; 4159 4160 Handle<Map> map(object->map()); 4161 Handle<Map> new_map = Map::Normalize(map, mode); 4162 4163 MigrateFastToSlow(object, new_map, expected_additional_properties); 4164 } 4165 4166 4167 void JSObject::MigrateFastToSlow(Handle<JSObject> object, 4168 Handle<Map> new_map, 4169 int expected_additional_properties) { 4170 // The global object is always normalized. 4171 DCHECK(!object->IsGlobalObject()); 4172 // JSGlobalProxy must never be normalized 4173 DCHECK(!object->IsJSGlobalProxy()); 4174 4175 Isolate* isolate = object->GetIsolate(); 4176 HandleScope scope(isolate); 4177 Handle<Map> map(object->map()); 4178 4179 // Allocate new content. 4180 int real_size = map->NumberOfOwnDescriptors(); 4181 int property_count = real_size; 4182 if (expected_additional_properties > 0) { 4183 property_count += expected_additional_properties; 4184 } else { 4185 property_count += 2; // Make space for two more properties. 4186 } 4187 Handle<NameDictionary> dictionary = 4188 NameDictionary::New(isolate, property_count); 4189 4190 Handle<DescriptorArray> descs(map->instance_descriptors()); 4191 for (int i = 0; i < real_size; i++) { 4192 PropertyDetails details = descs->GetDetails(i); 4193 switch (details.type()) { 4194 case CONSTANT: { 4195 Handle<Name> key(descs->GetKey(i)); 4196 Handle<Object> value(descs->GetConstant(i), isolate); 4197 PropertyDetails d = PropertyDetails( 4198 details.attributes(), NORMAL, i + 1); 4199 dictionary = NameDictionary::Add(dictionary, key, value, d); 4200 break; 4201 } 4202 case FIELD: { 4203 Handle<Name> key(descs->GetKey(i)); 4204 FieldIndex index = FieldIndex::ForDescriptor(*map, i); 4205 Handle<Object> value( 4206 object->RawFastPropertyAt(index), isolate); 4207 if (details.representation().IsDouble()) { 4208 DCHECK(value->IsMutableHeapNumber()); 4209 Handle<HeapNumber> old = Handle<HeapNumber>::cast(value); 4210 value = isolate->factory()->NewHeapNumber(old->value()); 4211 } 4212 PropertyDetails d = 4213 PropertyDetails(details.attributes(), NORMAL, i + 1); 4214 dictionary = NameDictionary::Add(dictionary, key, value, d); 4215 break; 4216 } 4217 case CALLBACKS: { 4218 Handle<Name> key(descs->GetKey(i)); 4219 Handle<Object> value(descs->GetCallbacksObject(i), isolate); 4220 PropertyDetails d = PropertyDetails( 4221 details.attributes(), CALLBACKS, i + 1); 4222 dictionary = NameDictionary::Add(dictionary, key, value, d); 4223 break; 4224 } 4225 case NORMAL: 4226 UNREACHABLE(); 4227 break; 4228 } 4229 } 4230 4231 // Copy the next enumeration index from instance descriptor. 4232 dictionary->SetNextEnumerationIndex(real_size + 1); 4233 4234 // From here on we cannot fail and we shouldn't GC anymore. 4235 DisallowHeapAllocation no_allocation; 4236 4237 // Resize the object in the heap if necessary. 4238 int new_instance_size = new_map->instance_size(); 4239 int instance_size_delta = map->instance_size() - new_instance_size; 4240 DCHECK(instance_size_delta >= 0); 4241 4242 if (instance_size_delta > 0) { 4243 Heap* heap = isolate->heap(); 4244 heap->CreateFillerObjectAt(object->address() + new_instance_size, 4245 instance_size_delta); 4246 heap->AdjustLiveBytes(object->address(), -instance_size_delta, 4247 Heap::FROM_MUTATOR); 4248 } 4249 4250 // We are storing the new map using release store after creating a filler for 4251 // the left-over space to avoid races with the sweeper thread. 4252 object->synchronized_set_map(*new_map); 4253 4254 object->set_properties(*dictionary); 4255 4256 isolate->counters()->props_to_dictionary()->Increment(); 4257 4258 #ifdef DEBUG 4259 if (FLAG_trace_normalization) { 4260 OFStream os(stdout); 4261 os << "Object properties have been normalized:\n"; 4262 object->Print(os); 4263 } 4264 #endif 4265 } 4266 4267 4268 void JSObject::MigrateSlowToFast(Handle<JSObject> object, 4269 int unused_property_fields) { 4270 if (object->HasFastProperties()) return; 4271 DCHECK(!object->IsGlobalObject()); 4272 Isolate* isolate = object->GetIsolate(); 4273 Factory* factory = isolate->factory(); 4274 Handle<NameDictionary> dictionary(object->property_dictionary()); 4275 4276 // Make sure we preserve dictionary representation if there are too many 4277 // descriptors. 4278 int number_of_elements = dictionary->NumberOfElements(); 4279 if (number_of_elements > kMaxNumberOfDescriptors) return; 4280 4281 if (number_of_elements != dictionary->NextEnumerationIndex()) { 4282 NameDictionary::DoGenerateNewEnumerationIndices(dictionary); 4283 } 4284 4285 int instance_descriptor_length = 0; 4286 int number_of_fields = 0; 4287 4288 // Compute the length of the instance descriptor. 4289 int capacity = dictionary->Capacity(); 4290 for (int i = 0; i < capacity; i++) { 4291 Object* k = dictionary->KeyAt(i); 4292 if (dictionary->IsKey(k)) { 4293 Object* value = dictionary->ValueAt(i); 4294 PropertyType type = dictionary->DetailsAt(i).type(); 4295 DCHECK(type != FIELD); 4296 instance_descriptor_length++; 4297 if (type == NORMAL && !value->IsJSFunction()) { 4298 number_of_fields += 1; 4299 } 4300 } 4301 } 4302 4303 int inobject_props = object->map()->inobject_properties(); 4304 4305 // Allocate new map. 4306 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); 4307 new_map->set_dictionary_map(false); 4308 4309 if (instance_descriptor_length == 0) { 4310 DisallowHeapAllocation no_gc; 4311 DCHECK_LE(unused_property_fields, inobject_props); 4312 // Transform the object. 4313 new_map->set_unused_property_fields(inobject_props); 4314 object->synchronized_set_map(*new_map); 4315 object->set_properties(isolate->heap()->empty_fixed_array()); 4316 // Check that it really works. 4317 DCHECK(object->HasFastProperties()); 4318 return; 4319 } 4320 4321 // Allocate the instance descriptor. 4322 Handle<DescriptorArray> descriptors = DescriptorArray::Allocate( 4323 isolate, instance_descriptor_length); 4324 4325 int number_of_allocated_fields = 4326 number_of_fields + unused_property_fields - inobject_props; 4327 if (number_of_allocated_fields < 0) { 4328 // There is enough inobject space for all fields (including unused). 4329 number_of_allocated_fields = 0; 4330 unused_property_fields = inobject_props - number_of_fields; 4331 } 4332 4333 // Allocate the fixed array for the fields. 4334 Handle<FixedArray> fields = factory->NewFixedArray( 4335 number_of_allocated_fields); 4336 4337 // Fill in the instance descriptor and the fields. 4338 int current_offset = 0; 4339 for (int i = 0; i < capacity; i++) { 4340 Object* k = dictionary->KeyAt(i); 4341 if (dictionary->IsKey(k)) { 4342 Object* value = dictionary->ValueAt(i); 4343 Handle<Name> key; 4344 if (k->IsSymbol()) { 4345 key = handle(Symbol::cast(k)); 4346 } else { 4347 // Ensure the key is a unique name before writing into the 4348 // instance descriptor. 4349 key = factory->InternalizeString(handle(String::cast(k))); 4350 } 4351 4352 PropertyDetails details = dictionary->DetailsAt(i); 4353 int enumeration_index = details.dictionary_index(); 4354 PropertyType type = details.type(); 4355 4356 if (value->IsJSFunction()) { 4357 ConstantDescriptor d(key, 4358 handle(value, isolate), 4359 details.attributes()); 4360 descriptors->Set(enumeration_index - 1, &d); 4361 } else if (type == NORMAL) { 4362 if (current_offset < inobject_props) { 4363 object->InObjectPropertyAtPut(current_offset, 4364 value, 4365 UPDATE_WRITE_BARRIER); 4366 } else { 4367 int offset = current_offset - inobject_props; 4368 fields->set(offset, value); 4369 } 4370 FieldDescriptor d(key, 4371 current_offset++, 4372 details.attributes(), 4373 // TODO(verwaest): value->OptimalRepresentation(); 4374 Representation::Tagged()); 4375 descriptors->Set(enumeration_index - 1, &d); 4376 } else if (type == CALLBACKS) { 4377 CallbacksDescriptor d(key, 4378 handle(value, isolate), 4379 details.attributes()); 4380 descriptors->Set(enumeration_index - 1, &d); 4381 } else { 4382 UNREACHABLE(); 4383 } 4384 } 4385 } 4386 DCHECK(current_offset == number_of_fields); 4387 4388 descriptors->Sort(); 4389 4390 DisallowHeapAllocation no_gc; 4391 new_map->InitializeDescriptors(*descriptors); 4392 new_map->set_unused_property_fields(unused_property_fields); 4393 4394 // Transform the object. 4395 object->synchronized_set_map(*new_map); 4396 4397 object->set_properties(*fields); 4398 DCHECK(object->IsJSObject()); 4399 4400 // Check that it really works. 4401 DCHECK(object->HasFastProperties()); 4402 } 4403 4404 4405 void JSObject::ResetElements(Handle<JSObject> object) { 4406 Isolate* isolate = object->GetIsolate(); 4407 CHECK(object->map() != isolate->heap()->sloppy_arguments_elements_map()); 4408 if (object->map()->has_dictionary_elements()) { 4409 Handle<SeededNumberDictionary> new_elements = 4410 SeededNumberDictionary::New(isolate, 0); 4411 object->set_elements(*new_elements); 4412 } else { 4413 object->set_elements(object->map()->GetInitialElements()); 4414 } 4415 } 4416 4417 4418 static Handle<SeededNumberDictionary> CopyFastElementsToDictionary( 4419 Handle<FixedArrayBase> array, 4420 int length, 4421 Handle<SeededNumberDictionary> dictionary) { 4422 Isolate* isolate = array->GetIsolate(); 4423 Factory* factory = isolate->factory(); 4424 bool has_double_elements = array->IsFixedDoubleArray(); 4425 for (int i = 0; i < length; i++) { 4426 Handle<Object> value; 4427 if (has_double_elements) { 4428 Handle<FixedDoubleArray> double_array = 4429 Handle<FixedDoubleArray>::cast(array); 4430 if (double_array->is_the_hole(i)) { 4431 value = factory->the_hole_value(); 4432 } else { 4433 value = factory->NewHeapNumber(double_array->get_scalar(i)); 4434 } 4435 } else { 4436 value = handle(Handle<FixedArray>::cast(array)->get(i), isolate); 4437 } 4438 if (!value->IsTheHole()) { 4439 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0); 4440 dictionary = 4441 SeededNumberDictionary::AddNumberEntry(dictionary, i, value, details); 4442 } 4443 } 4444 return dictionary; 4445 } 4446 4447 4448 Handle<SeededNumberDictionary> JSObject::NormalizeElements( 4449 Handle<JSObject> object) { 4450 DCHECK(!object->HasExternalArrayElements() && 4451 !object->HasFixedTypedArrayElements()); 4452 Isolate* isolate = object->GetIsolate(); 4453 4454 // Find the backing store. 4455 Handle<FixedArrayBase> array(FixedArrayBase::cast(object->elements())); 4456 bool is_arguments = 4457 (array->map() == isolate->heap()->sloppy_arguments_elements_map()); 4458 if (is_arguments) { 4459 array = handle(FixedArrayBase::cast( 4460 Handle<FixedArray>::cast(array)->get(1))); 4461 } 4462 if (array->IsDictionary()) return Handle<SeededNumberDictionary>::cast(array); 4463 4464 DCHECK(object->HasFastSmiOrObjectElements() || 4465 object->HasFastDoubleElements() || 4466 object->HasFastArgumentsElements()); 4467 // Compute the effective length and allocate a new backing store. 4468 int length = object->IsJSArray() 4469 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value() 4470 : array->length(); 4471 int old_capacity = 0; 4472 int used_elements = 0; 4473 object->GetElementsCapacityAndUsage(&old_capacity, &used_elements); 4474 Handle<SeededNumberDictionary> dictionary = 4475 SeededNumberDictionary::New(isolate, used_elements); 4476 4477 dictionary = CopyFastElementsToDictionary(array, length, dictionary); 4478 4479 // Switch to using the dictionary as the backing storage for elements. 4480 if (is_arguments) { 4481 FixedArray::cast(object->elements())->set(1, *dictionary); 4482 } else { 4483 // Set the new map first to satify the elements type assert in 4484 // set_elements(). 4485 Handle<Map> new_map = 4486 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS); 4487 4488 JSObject::MigrateToMap(object, new_map); 4489 object->set_elements(*dictionary); 4490 } 4491 4492 isolate->counters()->elements_to_dictionary()->Increment(); 4493 4494 #ifdef DEBUG 4495 if (FLAG_trace_normalization) { 4496 OFStream os(stdout); 4497 os << "Object elements have been normalized:\n"; 4498 object->Print(os); 4499 } 4500 #endif 4501 4502 DCHECK(object->HasDictionaryElements() || 4503 object->HasDictionaryArgumentsElements()); 4504 return dictionary; 4505 } 4506 4507 4508 static Smi* GenerateIdentityHash(Isolate* isolate) { 4509 int hash_value; 4510 int attempts = 0; 4511 do { 4512 // Generate a random 32-bit hash value but limit range to fit 4513 // within a smi. 4514 hash_value = isolate->random_number_generator()->NextInt() & Smi::kMaxValue; 4515 attempts++; 4516 } while (hash_value == 0 && attempts < 30); 4517 hash_value = hash_value != 0 ? hash_value : 1; // never return 0 4518 4519 return Smi::FromInt(hash_value); 4520 } 4521 4522 4523 void JSObject::SetIdentityHash(Handle<JSObject> object, Handle<Smi> hash) { 4524 DCHECK(!object->IsJSGlobalProxy()); 4525 Isolate* isolate = object->GetIsolate(); 4526 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash); 4527 } 4528 4529 4530 template<typename ProxyType> 4531 static Handle<Smi> GetOrCreateIdentityHashHelper(Handle<ProxyType> proxy) { 4532 Isolate* isolate = proxy->GetIsolate(); 4533 4534 Handle<Object> maybe_hash(proxy->hash(), isolate); 4535 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash); 4536 4537 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate); 4538 proxy->set_hash(*hash); 4539 return hash; 4540 } 4541 4542 4543 Object* JSObject::GetIdentityHash() { 4544 DisallowHeapAllocation no_gc; 4545 Isolate* isolate = GetIsolate(); 4546 if (IsJSGlobalProxy()) { 4547 return JSGlobalProxy::cast(this)->hash(); 4548 } 4549 Object* stored_value = 4550 GetHiddenProperty(isolate->factory()->identity_hash_string()); 4551 return stored_value->IsSmi() 4552 ? stored_value 4553 : isolate->heap()->undefined_value(); 4554 } 4555 4556 4557 Handle<Smi> JSObject::GetOrCreateIdentityHash(Handle<JSObject> object) { 4558 if (object->IsJSGlobalProxy()) { 4559 return GetOrCreateIdentityHashHelper(Handle<JSGlobalProxy>::cast(object)); 4560 } 4561 4562 Isolate* isolate = object->GetIsolate(); 4563 4564 Handle<Object> maybe_hash(object->GetIdentityHash(), isolate); 4565 if (maybe_hash->IsSmi()) return Handle<Smi>::cast(maybe_hash); 4566 4567 Handle<Smi> hash(GenerateIdentityHash(isolate), isolate); 4568 SetHiddenProperty(object, isolate->factory()->identity_hash_string(), hash); 4569 return hash; 4570 } 4571 4572 4573 Object* JSProxy::GetIdentityHash() { 4574 return this->hash(); 4575 } 4576 4577 4578 Handle<Smi> JSProxy::GetOrCreateIdentityHash(Handle<JSProxy> proxy) { 4579 return GetOrCreateIdentityHashHelper(proxy); 4580 } 4581 4582 4583 Object* JSObject::GetHiddenProperty(Handle<Name> key) { 4584 DisallowHeapAllocation no_gc; 4585 DCHECK(key->IsUniqueName()); 4586 if (IsJSGlobalProxy()) { 4587 // JSGlobalProxies store their hash internally. 4588 DCHECK(*key != GetHeap()->identity_hash_string()); 4589 // For a proxy, use the prototype as target object. 4590 PrototypeIterator iter(GetIsolate(), this); 4591 // If the proxy is detached, return undefined. 4592 if (iter.IsAtEnd()) return GetHeap()->the_hole_value(); 4593 DCHECK(iter.GetCurrent()->IsJSGlobalObject()); 4594 return JSObject::cast(iter.GetCurrent())->GetHiddenProperty(key); 4595 } 4596 DCHECK(!IsJSGlobalProxy()); 4597 Object* inline_value = GetHiddenPropertiesHashTable(); 4598 4599 if (inline_value->IsSmi()) { 4600 // Handle inline-stored identity hash. 4601 if (*key == GetHeap()->identity_hash_string()) { 4602 return inline_value; 4603 } else { 4604 return GetHeap()->the_hole_value(); 4605 } 4606 } 4607 4608 if (inline_value->IsUndefined()) return GetHeap()->the_hole_value(); 4609 4610 ObjectHashTable* hashtable = ObjectHashTable::cast(inline_value); 4611 Object* entry = hashtable->Lookup(key); 4612 return entry; 4613 } 4614 4615 4616 Handle<Object> JSObject::SetHiddenProperty(Handle<JSObject> object, 4617 Handle<Name> key, 4618 Handle<Object> value) { 4619 Isolate* isolate = object->GetIsolate(); 4620 4621 DCHECK(key->IsUniqueName()); 4622 if (object->IsJSGlobalProxy()) { 4623 // JSGlobalProxies store their hash internally. 4624 DCHECK(*key != *isolate->factory()->identity_hash_string()); 4625 // For a proxy, use the prototype as target object. 4626 PrototypeIterator iter(isolate, object); 4627 // If the proxy is detached, return undefined. 4628 if (iter.IsAtEnd()) return isolate->factory()->undefined_value(); 4629 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 4630 return SetHiddenProperty( 4631 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key, 4632 value); 4633 } 4634 DCHECK(!object->IsJSGlobalProxy()); 4635 4636 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate); 4637 4638 // If there is no backing store yet, store the identity hash inline. 4639 if (value->IsSmi() && 4640 *key == *isolate->factory()->identity_hash_string() && 4641 (inline_value->IsUndefined() || inline_value->IsSmi())) { 4642 return JSObject::SetHiddenPropertiesHashTable(object, value); 4643 } 4644 4645 Handle<ObjectHashTable> hashtable = 4646 GetOrCreateHiddenPropertiesHashtable(object); 4647 4648 // If it was found, check if the key is already in the dictionary. 4649 Handle<ObjectHashTable> new_table = ObjectHashTable::Put(hashtable, key, 4650 value); 4651 if (*new_table != *hashtable) { 4652 // If adding the key expanded the dictionary (i.e., Add returned a new 4653 // dictionary), store it back to the object. 4654 SetHiddenPropertiesHashTable(object, new_table); 4655 } 4656 4657 // Return this to mark success. 4658 return object; 4659 } 4660 4661 4662 void JSObject::DeleteHiddenProperty(Handle<JSObject> object, Handle<Name> key) { 4663 Isolate* isolate = object->GetIsolate(); 4664 DCHECK(key->IsUniqueName()); 4665 4666 if (object->IsJSGlobalProxy()) { 4667 PrototypeIterator iter(isolate, object); 4668 if (iter.IsAtEnd()) return; 4669 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 4670 return DeleteHiddenProperty( 4671 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), key); 4672 } 4673 4674 Object* inline_value = object->GetHiddenPropertiesHashTable(); 4675 4676 // We never delete (inline-stored) identity hashes. 4677 DCHECK(*key != *isolate->factory()->identity_hash_string()); 4678 if (inline_value->IsUndefined() || inline_value->IsSmi()) return; 4679 4680 Handle<ObjectHashTable> hashtable(ObjectHashTable::cast(inline_value)); 4681 bool was_present = false; 4682 ObjectHashTable::Remove(hashtable, key, &was_present); 4683 } 4684 4685 4686 bool JSObject::HasHiddenProperties(Handle<JSObject> object) { 4687 Handle<Name> hidden = object->GetIsolate()->factory()->hidden_string(); 4688 LookupIterator it(object, hidden, LookupIterator::OWN_SKIP_INTERCEPTOR); 4689 Maybe<PropertyAttributes> maybe = GetPropertyAttributes(&it); 4690 // Cannot get an exception since the hidden_string isn't accessible to JS. 4691 DCHECK(maybe.has_value); 4692 return maybe.value != ABSENT; 4693 } 4694 4695 4696 Object* JSObject::GetHiddenPropertiesHashTable() { 4697 DCHECK(!IsJSGlobalProxy()); 4698 if (HasFastProperties()) { 4699 // If the object has fast properties, check whether the first slot 4700 // in the descriptor array matches the hidden string. Since the 4701 // hidden strings hash code is zero (and no other name has hash 4702 // code zero) it will always occupy the first entry if present. 4703 DescriptorArray* descriptors = this->map()->instance_descriptors(); 4704 if (descriptors->number_of_descriptors() > 0) { 4705 int sorted_index = descriptors->GetSortedKeyIndex(0); 4706 if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() && 4707 sorted_index < map()->NumberOfOwnDescriptors()) { 4708 DCHECK(descriptors->GetType(sorted_index) == FIELD); 4709 DCHECK(descriptors->GetDetails(sorted_index).representation(). 4710 IsCompatibleForLoad(Representation::Tagged())); 4711 FieldIndex index = FieldIndex::ForDescriptor(this->map(), 4712 sorted_index); 4713 return this->RawFastPropertyAt(index); 4714 } else { 4715 return GetHeap()->undefined_value(); 4716 } 4717 } else { 4718 return GetHeap()->undefined_value(); 4719 } 4720 } else { 4721 Isolate* isolate = GetIsolate(); 4722 LookupIterator it(handle(this), isolate->factory()->hidden_string(), 4723 LookupIterator::OWN_SKIP_INTERCEPTOR); 4724 // Access check is always skipped for the hidden string anyways. 4725 return *GetDataProperty(&it); 4726 } 4727 } 4728 4729 Handle<ObjectHashTable> JSObject::GetOrCreateHiddenPropertiesHashtable( 4730 Handle<JSObject> object) { 4731 Isolate* isolate = object->GetIsolate(); 4732 4733 static const int kInitialCapacity = 4; 4734 Handle<Object> inline_value(object->GetHiddenPropertiesHashTable(), isolate); 4735 if (inline_value->IsHashTable()) { 4736 return Handle<ObjectHashTable>::cast(inline_value); 4737 } 4738 4739 Handle<ObjectHashTable> hashtable = ObjectHashTable::New( 4740 isolate, kInitialCapacity, USE_CUSTOM_MINIMUM_CAPACITY); 4741 4742 if (inline_value->IsSmi()) { 4743 // We were storing the identity hash inline and now allocated an actual 4744 // dictionary. Put the identity hash into the new dictionary. 4745 hashtable = ObjectHashTable::Put(hashtable, 4746 isolate->factory()->identity_hash_string(), 4747 inline_value); 4748 } 4749 4750 SetHiddenPropertiesHashTable(object, hashtable); 4751 return hashtable; 4752 } 4753 4754 4755 Handle<Object> JSObject::SetHiddenPropertiesHashTable(Handle<JSObject> object, 4756 Handle<Object> value) { 4757 DCHECK(!object->IsJSGlobalProxy()); 4758 Isolate* isolate = object->GetIsolate(); 4759 Handle<Name> name = isolate->factory()->hidden_string(); 4760 SetOwnPropertyIgnoreAttributes(object, name, value, DONT_ENUM).Assert(); 4761 return object; 4762 } 4763 4764 4765 MaybeHandle<Object> JSObject::DeletePropertyWithInterceptor( 4766 Handle<JSObject> holder, Handle<JSObject> receiver, Handle<Name> name) { 4767 Isolate* isolate = holder->GetIsolate(); 4768 4769 // TODO(rossberg): Support symbols in the API. 4770 if (name->IsSymbol()) return MaybeHandle<Object>(); 4771 4772 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor()); 4773 if (interceptor->deleter()->IsUndefined()) return MaybeHandle<Object>(); 4774 4775 v8::NamedPropertyDeleterCallback deleter = 4776 v8::ToCData<v8::NamedPropertyDeleterCallback>(interceptor->deleter()); 4777 LOG(isolate, 4778 ApiNamedPropertyAccess("interceptor-named-delete", *holder, *name)); 4779 PropertyCallbackArguments args(isolate, interceptor->data(), *receiver, 4780 *holder); 4781 v8::Handle<v8::Boolean> result = 4782 args.Call(deleter, v8::Utils::ToLocal(Handle<String>::cast(name))); 4783 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 4784 if (result.IsEmpty()) return MaybeHandle<Object>(); 4785 4786 DCHECK(result->IsBoolean()); 4787 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 4788 result_internal->VerifyApiCallResultType(); 4789 // Rebox CustomArguments::kReturnValueOffset before returning. 4790 return handle(*result_internal, isolate); 4791 } 4792 4793 4794 MaybeHandle<Object> JSObject::DeleteElementWithInterceptor( 4795 Handle<JSObject> object, 4796 uint32_t index) { 4797 Isolate* isolate = object->GetIsolate(); 4798 Factory* factory = isolate->factory(); 4799 4800 // Make sure that the top context does not change when doing 4801 // callbacks or interceptor calls. 4802 AssertNoContextChange ncc(isolate); 4803 4804 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 4805 if (interceptor->deleter()->IsUndefined()) return factory->false_value(); 4806 v8::IndexedPropertyDeleterCallback deleter = 4807 v8::ToCData<v8::IndexedPropertyDeleterCallback>(interceptor->deleter()); 4808 LOG(isolate, 4809 ApiIndexedPropertyAccess("interceptor-indexed-delete", *object, index)); 4810 PropertyCallbackArguments args( 4811 isolate, interceptor->data(), *object, *object); 4812 v8::Handle<v8::Boolean> result = args.Call(deleter, index); 4813 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 4814 if (!result.IsEmpty()) { 4815 DCHECK(result->IsBoolean()); 4816 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 4817 result_internal->VerifyApiCallResultType(); 4818 // Rebox CustomArguments::kReturnValueOffset before returning. 4819 return handle(*result_internal, isolate); 4820 } 4821 MaybeHandle<Object> delete_result = object->GetElementsAccessor()->Delete( 4822 object, index, NORMAL_DELETION); 4823 return delete_result; 4824 } 4825 4826 4827 MaybeHandle<Object> JSObject::DeleteElement(Handle<JSObject> object, 4828 uint32_t index, 4829 DeleteMode mode) { 4830 Isolate* isolate = object->GetIsolate(); 4831 Factory* factory = isolate->factory(); 4832 4833 // Check access rights if needed. 4834 if (object->IsAccessCheckNeeded() && 4835 !isolate->MayIndexedAccess(object, index, v8::ACCESS_DELETE)) { 4836 isolate->ReportFailedAccessCheck(object, v8::ACCESS_DELETE); 4837 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 4838 return factory->false_value(); 4839 } 4840 4841 if (object->IsStringObjectWithCharacterAt(index)) { 4842 if (mode == STRICT_DELETION) { 4843 // Deleting a non-configurable property in strict mode. 4844 Handle<Object> name = factory->NewNumberFromUint(index); 4845 Handle<Object> args[2] = { name, object }; 4846 THROW_NEW_ERROR(isolate, NewTypeError("strict_delete_property", 4847 HandleVector(args, 2)), 4848 Object); 4849 } 4850 return factory->false_value(); 4851 } 4852 4853 if (object->IsJSGlobalProxy()) { 4854 PrototypeIterator iter(isolate, object); 4855 if (iter.IsAtEnd()) return factory->false_value(); 4856 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 4857 return DeleteElement( 4858 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index, 4859 mode); 4860 } 4861 4862 Handle<Object> old_value; 4863 bool should_enqueue_change_record = false; 4864 if (object->map()->is_observed()) { 4865 Maybe<bool> maybe = HasOwnElement(object, index); 4866 if (!maybe.has_value) return MaybeHandle<Object>(); 4867 should_enqueue_change_record = maybe.value; 4868 if (should_enqueue_change_record) { 4869 if (!GetOwnElementAccessorPair(object, index).is_null()) { 4870 old_value = Handle<Object>::cast(factory->the_hole_value()); 4871 } else { 4872 old_value = Object::GetElement( 4873 isolate, object, index).ToHandleChecked(); 4874 } 4875 } 4876 } 4877 4878 // Skip interceptor if forcing deletion. 4879 MaybeHandle<Object> maybe_result; 4880 if (object->HasIndexedInterceptor() && mode != FORCE_DELETION) { 4881 maybe_result = DeleteElementWithInterceptor(object, index); 4882 } else { 4883 maybe_result = object->GetElementsAccessor()->Delete(object, index, mode); 4884 } 4885 Handle<Object> result; 4886 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, maybe_result, Object); 4887 4888 if (should_enqueue_change_record) { 4889 Maybe<bool> maybe = HasOwnElement(object, index); 4890 if (!maybe.has_value) return MaybeHandle<Object>(); 4891 if (!maybe.value) { 4892 Handle<String> name = factory->Uint32ToString(index); 4893 EnqueueChangeRecord(object, "delete", name, old_value); 4894 } 4895 } 4896 4897 return result; 4898 } 4899 4900 4901 MaybeHandle<Object> JSObject::DeleteProperty(Handle<JSObject> object, 4902 Handle<Name> name, 4903 DeleteMode delete_mode) { 4904 // ECMA-262, 3rd, 8.6.2.5 4905 DCHECK(name->IsName()); 4906 4907 uint32_t index = 0; 4908 if (name->AsArrayIndex(&index)) { 4909 return DeleteElement(object, index, delete_mode); 4910 } 4911 4912 // Skip interceptors on FORCE_DELETION. 4913 LookupIterator::Configuration config = 4914 delete_mode == FORCE_DELETION ? LookupIterator::HIDDEN_SKIP_INTERCEPTOR 4915 : LookupIterator::HIDDEN; 4916 4917 LookupIterator it(object, name, config); 4918 4919 bool is_observed = object->map()->is_observed() && 4920 *name != it.isolate()->heap()->hidden_string(); 4921 Handle<Object> old_value = it.isolate()->factory()->the_hole_value(); 4922 4923 for (; it.IsFound(); it.Next()) { 4924 switch (it.state()) { 4925 case LookupIterator::JSPROXY: 4926 case LookupIterator::NOT_FOUND: 4927 case LookupIterator::TRANSITION: 4928 UNREACHABLE(); 4929 case LookupIterator::ACCESS_CHECK: 4930 if (it.HasAccess(v8::ACCESS_DELETE)) break; 4931 it.isolate()->ReportFailedAccessCheck(it.GetHolder<JSObject>(), 4932 v8::ACCESS_DELETE); 4933 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(it.isolate(), Object); 4934 return it.isolate()->factory()->false_value(); 4935 case LookupIterator::INTERCEPTOR: { 4936 MaybeHandle<Object> maybe_result = 4937 JSObject::DeletePropertyWithInterceptor(it.GetHolder<JSObject>(), 4938 object, it.name()); 4939 // Delete with interceptor succeeded. Return result. 4940 if (!maybe_result.is_null()) return maybe_result; 4941 // An exception was thrown in the interceptor. Propagate. 4942 if (it.isolate()->has_pending_exception()) return maybe_result; 4943 break; 4944 } 4945 case LookupIterator::DATA: 4946 if (is_observed) { 4947 old_value = it.GetDataValue(); 4948 } 4949 // Fall through. 4950 case LookupIterator::ACCESSOR: { 4951 if (delete_mode != FORCE_DELETION && !it.IsConfigurable()) { 4952 // Fail if the property is not configurable. 4953 if (delete_mode == STRICT_DELETION) { 4954 Handle<Object> args[2] = {name, object}; 4955 THROW_NEW_ERROR(it.isolate(), 4956 NewTypeError("strict_delete_property", 4957 HandleVector(args, arraysize(args))), 4958 Object); 4959 } 4960 return it.isolate()->factory()->false_value(); 4961 } 4962 4963 PropertyNormalizationMode mode = object->map()->is_prototype_map() 4964 ? KEEP_INOBJECT_PROPERTIES 4965 : CLEAR_INOBJECT_PROPERTIES; 4966 Handle<JSObject> holder = it.GetHolder<JSObject>(); 4967 // TODO(verwaest): Remove this temporary compatibility hack when blink 4968 // tests are updated. 4969 if (!holder.is_identical_to(object) && 4970 !(object->IsJSGlobalProxy() && holder->IsJSGlobalObject())) { 4971 return it.isolate()->factory()->true_value(); 4972 } 4973 NormalizeProperties(holder, mode, 0); 4974 Handle<Object> result = 4975 DeleteNormalizedProperty(holder, name, delete_mode); 4976 ReoptimizeIfPrototype(holder); 4977 4978 if (is_observed) { 4979 EnqueueChangeRecord(object, "delete", name, old_value); 4980 } 4981 4982 return result; 4983 } 4984 } 4985 } 4986 4987 return it.isolate()->factory()->true_value(); 4988 } 4989 4990 4991 MaybeHandle<Object> JSReceiver::DeleteElement(Handle<JSReceiver> object, 4992 uint32_t index, 4993 DeleteMode mode) { 4994 if (object->IsJSProxy()) { 4995 return JSProxy::DeleteElementWithHandler( 4996 Handle<JSProxy>::cast(object), index, mode); 4997 } 4998 return JSObject::DeleteElement(Handle<JSObject>::cast(object), index, mode); 4999 } 5000 5001 5002 MaybeHandle<Object> JSReceiver::DeleteProperty(Handle<JSReceiver> object, 5003 Handle<Name> name, 5004 DeleteMode mode) { 5005 if (object->IsJSProxy()) { 5006 return JSProxy::DeletePropertyWithHandler( 5007 Handle<JSProxy>::cast(object), name, mode); 5008 } 5009 return JSObject::DeleteProperty(Handle<JSObject>::cast(object), name, mode); 5010 } 5011 5012 5013 bool JSObject::ReferencesObjectFromElements(FixedArray* elements, 5014 ElementsKind kind, 5015 Object* object) { 5016 DCHECK(IsFastObjectElementsKind(kind) || 5017 kind == DICTIONARY_ELEMENTS); 5018 if (IsFastObjectElementsKind(kind)) { 5019 int length = IsJSArray() 5020 ? Smi::cast(JSArray::cast(this)->length())->value() 5021 : elements->length(); 5022 for (int i = 0; i < length; ++i) { 5023 Object* element = elements->get(i); 5024 if (!element->IsTheHole() && element == object) return true; 5025 } 5026 } else { 5027 Object* key = 5028 SeededNumberDictionary::cast(elements)->SlowReverseLookup(object); 5029 if (!key->IsUndefined()) return true; 5030 } 5031 return false; 5032 } 5033 5034 5035 // Check whether this object references another object. 5036 bool JSObject::ReferencesObject(Object* obj) { 5037 Map* map_of_this = map(); 5038 Heap* heap = GetHeap(); 5039 DisallowHeapAllocation no_allocation; 5040 5041 // Is the object the constructor for this object? 5042 if (map_of_this->constructor() == obj) { 5043 return true; 5044 } 5045 5046 // Is the object the prototype for this object? 5047 if (map_of_this->prototype() == obj) { 5048 return true; 5049 } 5050 5051 // Check if the object is among the named properties. 5052 Object* key = SlowReverseLookup(obj); 5053 if (!key->IsUndefined()) { 5054 return true; 5055 } 5056 5057 // Check if the object is among the indexed properties. 5058 ElementsKind kind = GetElementsKind(); 5059 switch (kind) { 5060 // Raw pixels and external arrays do not reference other 5061 // objects. 5062 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 5063 case EXTERNAL_##TYPE##_ELEMENTS: \ 5064 case TYPE##_ELEMENTS: \ 5065 break; 5066 5067 TYPED_ARRAYS(TYPED_ARRAY_CASE) 5068 #undef TYPED_ARRAY_CASE 5069 5070 case FAST_DOUBLE_ELEMENTS: 5071 case FAST_HOLEY_DOUBLE_ELEMENTS: 5072 break; 5073 case FAST_SMI_ELEMENTS: 5074 case FAST_HOLEY_SMI_ELEMENTS: 5075 break; 5076 case FAST_ELEMENTS: 5077 case FAST_HOLEY_ELEMENTS: 5078 case DICTIONARY_ELEMENTS: { 5079 FixedArray* elements = FixedArray::cast(this->elements()); 5080 if (ReferencesObjectFromElements(elements, kind, obj)) return true; 5081 break; 5082 } 5083 case SLOPPY_ARGUMENTS_ELEMENTS: { 5084 FixedArray* parameter_map = FixedArray::cast(elements()); 5085 // Check the mapped parameters. 5086 int length = parameter_map->length(); 5087 for (int i = 2; i < length; ++i) { 5088 Object* value = parameter_map->get(i); 5089 if (!value->IsTheHole() && value == obj) return true; 5090 } 5091 // Check the arguments. 5092 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 5093 kind = arguments->IsDictionary() ? DICTIONARY_ELEMENTS : 5094 FAST_HOLEY_ELEMENTS; 5095 if (ReferencesObjectFromElements(arguments, kind, obj)) return true; 5096 break; 5097 } 5098 } 5099 5100 // For functions check the context. 5101 if (IsJSFunction()) { 5102 // Get the constructor function for arguments array. 5103 Map* arguments_map = 5104 heap->isolate()->context()->native_context()->sloppy_arguments_map(); 5105 JSFunction* arguments_function = 5106 JSFunction::cast(arguments_map->constructor()); 5107 5108 // Get the context and don't check if it is the native context. 5109 JSFunction* f = JSFunction::cast(this); 5110 Context* context = f->context(); 5111 if (context->IsNativeContext()) { 5112 return false; 5113 } 5114 5115 // Check the non-special context slots. 5116 for (int i = Context::MIN_CONTEXT_SLOTS; i < context->length(); i++) { 5117 // Only check JS objects. 5118 if (context->get(i)->IsJSObject()) { 5119 JSObject* ctxobj = JSObject::cast(context->get(i)); 5120 // If it is an arguments array check the content. 5121 if (ctxobj->map()->constructor() == arguments_function) { 5122 if (ctxobj->ReferencesObject(obj)) { 5123 return true; 5124 } 5125 } else if (ctxobj == obj) { 5126 return true; 5127 } 5128 } 5129 } 5130 5131 // Check the context extension (if any) if it can have references. 5132 if (context->has_extension() && !context->IsCatchContext()) { 5133 // With harmony scoping, a JSFunction may have a global context. 5134 // TODO(mvstanton): walk into the ScopeInfo. 5135 if (FLAG_harmony_scoping && context->IsGlobalContext()) { 5136 return false; 5137 } 5138 5139 return JSObject::cast(context->extension())->ReferencesObject(obj); 5140 } 5141 } 5142 5143 // No references to object. 5144 return false; 5145 } 5146 5147 5148 MaybeHandle<Object> JSObject::PreventExtensions(Handle<JSObject> object) { 5149 Isolate* isolate = object->GetIsolate(); 5150 5151 if (!object->map()->is_extensible()) return object; 5152 5153 if (object->IsAccessCheckNeeded() && 5154 !isolate->MayNamedAccess( 5155 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { 5156 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS); 5157 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 5158 return isolate->factory()->false_value(); 5159 } 5160 5161 if (object->IsJSGlobalProxy()) { 5162 PrototypeIterator iter(isolate, object); 5163 if (iter.IsAtEnd()) return object; 5164 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 5165 return PreventExtensions( 5166 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter))); 5167 } 5168 5169 // It's not possible to seal objects with external array elements 5170 if (object->HasExternalArrayElements() || 5171 object->HasFixedTypedArrayElements()) { 5172 THROW_NEW_ERROR(isolate, 5173 NewTypeError("cant_prevent_ext_external_array_elements", 5174 HandleVector(&object, 1)), 5175 Object); 5176 } 5177 5178 // If there are fast elements we normalize. 5179 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 5180 DCHECK(object->HasDictionaryElements() || 5181 object->HasDictionaryArgumentsElements()); 5182 5183 // Make sure that we never go back to fast case. 5184 dictionary->set_requires_slow_elements(); 5185 5186 // Do a map transition, other objects with this map may still 5187 // be extensible. 5188 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps. 5189 Handle<Map> new_map = Map::Copy(handle(object->map())); 5190 5191 new_map->set_is_extensible(false); 5192 JSObject::MigrateToMap(object, new_map); 5193 DCHECK(!object->map()->is_extensible()); 5194 5195 if (object->map()->is_observed()) { 5196 EnqueueChangeRecord(object, "preventExtensions", Handle<Name>(), 5197 isolate->factory()->the_hole_value()); 5198 } 5199 return object; 5200 } 5201 5202 5203 template<typename Dictionary> 5204 static void FreezeDictionary(Dictionary* dictionary) { 5205 int capacity = dictionary->Capacity(); 5206 for (int i = 0; i < capacity; i++) { 5207 Object* k = dictionary->KeyAt(i); 5208 if (dictionary->IsKey(k) && 5209 !(k->IsSymbol() && Symbol::cast(k)->is_private())) { 5210 PropertyDetails details = dictionary->DetailsAt(i); 5211 int attrs = DONT_DELETE; 5212 // READ_ONLY is an invalid attribute for JS setters/getters. 5213 if (details.type() == CALLBACKS) { 5214 Object* v = dictionary->ValueAt(i); 5215 if (v->IsPropertyCell()) v = PropertyCell::cast(v)->value(); 5216 if (!v->IsAccessorPair()) attrs |= READ_ONLY; 5217 } else { 5218 attrs |= READ_ONLY; 5219 } 5220 details = details.CopyAddAttributes( 5221 static_cast<PropertyAttributes>(attrs)); 5222 dictionary->DetailsAtPut(i, details); 5223 } 5224 } 5225 } 5226 5227 5228 MaybeHandle<Object> JSObject::Freeze(Handle<JSObject> object) { 5229 // Freezing sloppy arguments should be handled elsewhere. 5230 DCHECK(!object->HasSloppyArgumentsElements()); 5231 DCHECK(!object->map()->is_observed()); 5232 5233 if (object->map()->is_frozen()) return object; 5234 5235 Isolate* isolate = object->GetIsolate(); 5236 if (object->IsAccessCheckNeeded() && 5237 !isolate->MayNamedAccess( 5238 object, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { 5239 isolate->ReportFailedAccessCheck(object, v8::ACCESS_KEYS); 5240 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 5241 return isolate->factory()->false_value(); 5242 } 5243 5244 if (object->IsJSGlobalProxy()) { 5245 PrototypeIterator iter(isolate, object); 5246 if (iter.IsAtEnd()) return object; 5247 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 5248 return Freeze(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter))); 5249 } 5250 5251 // It's not possible to freeze objects with external array elements 5252 if (object->HasExternalArrayElements() || 5253 object->HasFixedTypedArrayElements()) { 5254 THROW_NEW_ERROR(isolate, 5255 NewTypeError("cant_prevent_ext_external_array_elements", 5256 HandleVector(&object, 1)), 5257 Object); 5258 } 5259 5260 Handle<SeededNumberDictionary> new_element_dictionary; 5261 if (!object->elements()->IsDictionary()) { 5262 int length = object->IsJSArray() 5263 ? Smi::cast(Handle<JSArray>::cast(object)->length())->value() 5264 : object->elements()->length(); 5265 if (length > 0) { 5266 int capacity = 0; 5267 int used = 0; 5268 object->GetElementsCapacityAndUsage(&capacity, &used); 5269 new_element_dictionary = SeededNumberDictionary::New(isolate, used); 5270 5271 // Move elements to a dictionary; avoid calling NormalizeElements to avoid 5272 // unnecessary transitions. 5273 new_element_dictionary = CopyFastElementsToDictionary( 5274 handle(object->elements()), length, new_element_dictionary); 5275 } else { 5276 // No existing elements, use a pre-allocated empty backing store 5277 new_element_dictionary = 5278 isolate->factory()->empty_slow_element_dictionary(); 5279 } 5280 } 5281 5282 Handle<Map> old_map(object->map(), isolate); 5283 int transition_index = old_map->SearchTransition( 5284 isolate->heap()->frozen_symbol()); 5285 if (transition_index != TransitionArray::kNotFound) { 5286 Handle<Map> transition_map(old_map->GetTransition(transition_index)); 5287 DCHECK(transition_map->has_dictionary_elements()); 5288 DCHECK(transition_map->is_frozen()); 5289 DCHECK(!transition_map->is_extensible()); 5290 JSObject::MigrateToMap(object, transition_map); 5291 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { 5292 // Create a new descriptor array with fully-frozen properties 5293 Handle<Map> new_map = Map::CopyForFreeze(old_map); 5294 JSObject::MigrateToMap(object, new_map); 5295 } else { 5296 DCHECK(old_map->is_dictionary_map() || !old_map->is_prototype_map()); 5297 // Slow path: need to normalize properties for safety 5298 NormalizeProperties(object, CLEAR_INOBJECT_PROPERTIES, 0); 5299 5300 // Create a new map, since other objects with this map may be extensible. 5301 // TODO(adamk): Extend the NormalizedMapCache to handle non-extensible maps. 5302 Handle<Map> new_map = Map::Copy(handle(object->map())); 5303 new_map->freeze(); 5304 new_map->set_is_extensible(false); 5305 new_map->set_elements_kind(DICTIONARY_ELEMENTS); 5306 JSObject::MigrateToMap(object, new_map); 5307 5308 // Freeze dictionary-mode properties 5309 FreezeDictionary(object->property_dictionary()); 5310 } 5311 5312 DCHECK(object->map()->has_dictionary_elements()); 5313 if (!new_element_dictionary.is_null()) { 5314 object->set_elements(*new_element_dictionary); 5315 } 5316 5317 if (object->elements() != isolate->heap()->empty_slow_element_dictionary()) { 5318 SeededNumberDictionary* dictionary = object->element_dictionary(); 5319 // Make sure we never go back to the fast case 5320 dictionary->set_requires_slow_elements(); 5321 // Freeze all elements in the dictionary 5322 FreezeDictionary(dictionary); 5323 } 5324 5325 return object; 5326 } 5327 5328 5329 void JSObject::SetObserved(Handle<JSObject> object) { 5330 DCHECK(!object->IsJSGlobalProxy()); 5331 DCHECK(!object->IsJSGlobalObject()); 5332 Isolate* isolate = object->GetIsolate(); 5333 Handle<Map> new_map; 5334 Handle<Map> old_map(object->map(), isolate); 5335 DCHECK(!old_map->is_observed()); 5336 int transition_index = old_map->SearchTransition( 5337 isolate->heap()->observed_symbol()); 5338 if (transition_index != TransitionArray::kNotFound) { 5339 new_map = handle(old_map->GetTransition(transition_index), isolate); 5340 DCHECK(new_map->is_observed()); 5341 } else if (object->HasFastProperties() && old_map->CanHaveMoreTransitions()) { 5342 new_map = Map::CopyForObserved(old_map); 5343 } else { 5344 new_map = Map::Copy(old_map); 5345 new_map->set_is_observed(); 5346 } 5347 JSObject::MigrateToMap(object, new_map); 5348 } 5349 5350 5351 Handle<Object> JSObject::FastPropertyAt(Handle<JSObject> object, 5352 Representation representation, 5353 FieldIndex index) { 5354 Isolate* isolate = object->GetIsolate(); 5355 Handle<Object> raw_value(object->RawFastPropertyAt(index), isolate); 5356 return Object::WrapForRead(isolate, raw_value, representation); 5357 } 5358 5359 5360 template<class ContextObject> 5361 class JSObjectWalkVisitor { 5362 public: 5363 JSObjectWalkVisitor(ContextObject* site_context, bool copying, 5364 JSObject::DeepCopyHints hints) 5365 : site_context_(site_context), 5366 copying_(copying), 5367 hints_(hints) {} 5368 5369 MUST_USE_RESULT MaybeHandle<JSObject> StructureWalk(Handle<JSObject> object); 5370 5371 protected: 5372 MUST_USE_RESULT inline MaybeHandle<JSObject> VisitElementOrProperty( 5373 Handle<JSObject> object, 5374 Handle<JSObject> value) { 5375 Handle<AllocationSite> current_site = site_context()->EnterNewScope(); 5376 MaybeHandle<JSObject> copy_of_value = StructureWalk(value); 5377 site_context()->ExitScope(current_site, value); 5378 return copy_of_value; 5379 } 5380 5381 inline ContextObject* site_context() { return site_context_; } 5382 inline Isolate* isolate() { return site_context()->isolate(); } 5383 5384 inline bool copying() const { return copying_; } 5385 5386 private: 5387 ContextObject* site_context_; 5388 const bool copying_; 5389 const JSObject::DeepCopyHints hints_; 5390 }; 5391 5392 5393 template <class ContextObject> 5394 MaybeHandle<JSObject> JSObjectWalkVisitor<ContextObject>::StructureWalk( 5395 Handle<JSObject> object) { 5396 Isolate* isolate = this->isolate(); 5397 bool copying = this->copying(); 5398 bool shallow = hints_ == JSObject::kObjectIsShallow; 5399 5400 if (!shallow) { 5401 StackLimitCheck check(isolate); 5402 5403 if (check.HasOverflowed()) { 5404 isolate->StackOverflow(); 5405 return MaybeHandle<JSObject>(); 5406 } 5407 } 5408 5409 if (object->map()->is_deprecated()) { 5410 JSObject::MigrateInstance(object); 5411 } 5412 5413 Handle<JSObject> copy; 5414 if (copying) { 5415 Handle<AllocationSite> site_to_pass; 5416 if (site_context()->ShouldCreateMemento(object)) { 5417 site_to_pass = site_context()->current(); 5418 } 5419 copy = isolate->factory()->CopyJSObjectWithAllocationSite( 5420 object, site_to_pass); 5421 } else { 5422 copy = object; 5423 } 5424 5425 DCHECK(copying || copy.is_identical_to(object)); 5426 5427 ElementsKind kind = copy->GetElementsKind(); 5428 if (copying && IsFastSmiOrObjectElementsKind(kind) && 5429 FixedArray::cast(copy->elements())->map() == 5430 isolate->heap()->fixed_cow_array_map()) { 5431 isolate->counters()->cow_arrays_created_runtime()->Increment(); 5432 } 5433 5434 if (!shallow) { 5435 HandleScope scope(isolate); 5436 5437 // Deep copy own properties. 5438 if (copy->HasFastProperties()) { 5439 Handle<DescriptorArray> descriptors(copy->map()->instance_descriptors()); 5440 int limit = copy->map()->NumberOfOwnDescriptors(); 5441 for (int i = 0; i < limit; i++) { 5442 PropertyDetails details = descriptors->GetDetails(i); 5443 if (details.type() != FIELD) continue; 5444 FieldIndex index = FieldIndex::ForDescriptor(copy->map(), i); 5445 Handle<Object> value(object->RawFastPropertyAt(index), isolate); 5446 if (value->IsJSObject()) { 5447 ASSIGN_RETURN_ON_EXCEPTION( 5448 isolate, value, 5449 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), 5450 JSObject); 5451 } else { 5452 Representation representation = details.representation(); 5453 value = Object::NewStorageFor(isolate, value, representation); 5454 } 5455 if (copying) { 5456 copy->FastPropertyAtPut(index, *value); 5457 } 5458 } 5459 } else { 5460 Handle<FixedArray> names = 5461 isolate->factory()->NewFixedArray(copy->NumberOfOwnProperties()); 5462 copy->GetOwnPropertyNames(*names, 0); 5463 for (int i = 0; i < names->length(); i++) { 5464 DCHECK(names->get(i)->IsString()); 5465 Handle<String> key_string(String::cast(names->get(i))); 5466 Maybe<PropertyAttributes> maybe = 5467 JSReceiver::GetOwnPropertyAttributes(copy, key_string); 5468 DCHECK(maybe.has_value); 5469 PropertyAttributes attributes = maybe.value; 5470 // Only deep copy fields from the object literal expression. 5471 // In particular, don't try to copy the length attribute of 5472 // an array. 5473 if (attributes != NONE) continue; 5474 Handle<Object> value = 5475 Object::GetProperty(copy, key_string).ToHandleChecked(); 5476 if (value->IsJSObject()) { 5477 Handle<JSObject> result; 5478 ASSIGN_RETURN_ON_EXCEPTION( 5479 isolate, result, 5480 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), 5481 JSObject); 5482 if (copying) { 5483 // Creating object copy for literals. No strict mode needed. 5484 JSObject::SetProperty(copy, key_string, result, SLOPPY).Assert(); 5485 } 5486 } 5487 } 5488 } 5489 5490 // Deep copy own elements. 5491 // Pixel elements cannot be created using an object literal. 5492 DCHECK(!copy->HasExternalArrayElements()); 5493 switch (kind) { 5494 case FAST_SMI_ELEMENTS: 5495 case FAST_ELEMENTS: 5496 case FAST_HOLEY_SMI_ELEMENTS: 5497 case FAST_HOLEY_ELEMENTS: { 5498 Handle<FixedArray> elements(FixedArray::cast(copy->elements())); 5499 if (elements->map() == isolate->heap()->fixed_cow_array_map()) { 5500 #ifdef DEBUG 5501 for (int i = 0; i < elements->length(); i++) { 5502 DCHECK(!elements->get(i)->IsJSObject()); 5503 } 5504 #endif 5505 } else { 5506 for (int i = 0; i < elements->length(); i++) { 5507 Handle<Object> value(elements->get(i), isolate); 5508 DCHECK(value->IsSmi() || 5509 value->IsTheHole() || 5510 (IsFastObjectElementsKind(copy->GetElementsKind()))); 5511 if (value->IsJSObject()) { 5512 Handle<JSObject> result; 5513 ASSIGN_RETURN_ON_EXCEPTION( 5514 isolate, result, 5515 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), 5516 JSObject); 5517 if (copying) { 5518 elements->set(i, *result); 5519 } 5520 } 5521 } 5522 } 5523 break; 5524 } 5525 case DICTIONARY_ELEMENTS: { 5526 Handle<SeededNumberDictionary> element_dictionary( 5527 copy->element_dictionary()); 5528 int capacity = element_dictionary->Capacity(); 5529 for (int i = 0; i < capacity; i++) { 5530 Object* k = element_dictionary->KeyAt(i); 5531 if (element_dictionary->IsKey(k)) { 5532 Handle<Object> value(element_dictionary->ValueAt(i), isolate); 5533 if (value->IsJSObject()) { 5534 Handle<JSObject> result; 5535 ASSIGN_RETURN_ON_EXCEPTION( 5536 isolate, result, 5537 VisitElementOrProperty(copy, Handle<JSObject>::cast(value)), 5538 JSObject); 5539 if (copying) { 5540 element_dictionary->ValueAtPut(i, *result); 5541 } 5542 } 5543 } 5544 } 5545 break; 5546 } 5547 case SLOPPY_ARGUMENTS_ELEMENTS: 5548 UNIMPLEMENTED(); 5549 break; 5550 5551 5552 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 5553 case EXTERNAL_##TYPE##_ELEMENTS: \ 5554 case TYPE##_ELEMENTS: \ 5555 5556 TYPED_ARRAYS(TYPED_ARRAY_CASE) 5557 #undef TYPED_ARRAY_CASE 5558 5559 case FAST_DOUBLE_ELEMENTS: 5560 case FAST_HOLEY_DOUBLE_ELEMENTS: 5561 // No contained objects, nothing to do. 5562 break; 5563 } 5564 } 5565 5566 return copy; 5567 } 5568 5569 5570 MaybeHandle<JSObject> JSObject::DeepWalk( 5571 Handle<JSObject> object, 5572 AllocationSiteCreationContext* site_context) { 5573 JSObjectWalkVisitor<AllocationSiteCreationContext> v(site_context, false, 5574 kNoHints); 5575 MaybeHandle<JSObject> result = v.StructureWalk(object); 5576 Handle<JSObject> for_assert; 5577 DCHECK(!result.ToHandle(&for_assert) || for_assert.is_identical_to(object)); 5578 return result; 5579 } 5580 5581 5582 MaybeHandle<JSObject> JSObject::DeepCopy( 5583 Handle<JSObject> object, 5584 AllocationSiteUsageContext* site_context, 5585 DeepCopyHints hints) { 5586 JSObjectWalkVisitor<AllocationSiteUsageContext> v(site_context, true, hints); 5587 MaybeHandle<JSObject> copy = v.StructureWalk(object); 5588 Handle<JSObject> for_assert; 5589 DCHECK(!copy.ToHandle(&for_assert) || !for_assert.is_identical_to(object)); 5590 return copy; 5591 } 5592 5593 5594 // Tests for the fast common case for property enumeration: 5595 // - This object and all prototypes has an enum cache (which means that 5596 // it is no proxy, has no interceptors and needs no access checks). 5597 // - This object has no elements. 5598 // - No prototype has enumerable properties/elements. 5599 bool JSReceiver::IsSimpleEnum() { 5600 for (PrototypeIterator iter(GetIsolate(), this, 5601 PrototypeIterator::START_AT_RECEIVER); 5602 !iter.IsAtEnd(); iter.Advance()) { 5603 if (!iter.GetCurrent()->IsJSObject()) return false; 5604 JSObject* curr = JSObject::cast(iter.GetCurrent()); 5605 int enum_length = curr->map()->EnumLength(); 5606 if (enum_length == kInvalidEnumCacheSentinel) return false; 5607 if (curr->IsAccessCheckNeeded()) return false; 5608 DCHECK(!curr->HasNamedInterceptor()); 5609 DCHECK(!curr->HasIndexedInterceptor()); 5610 if (curr->NumberOfEnumElements() > 0) return false; 5611 if (curr != this && enum_length != 0) return false; 5612 } 5613 return true; 5614 } 5615 5616 5617 static bool FilterKey(Object* key, PropertyAttributes filter) { 5618 if ((filter & SYMBOLIC) && key->IsSymbol()) { 5619 return true; 5620 } 5621 5622 if ((filter & PRIVATE_SYMBOL) && 5623 key->IsSymbol() && Symbol::cast(key)->is_private()) { 5624 return true; 5625 } 5626 5627 if ((filter & STRING) && !key->IsSymbol()) { 5628 return true; 5629 } 5630 5631 return false; 5632 } 5633 5634 5635 int Map::NumberOfDescribedProperties(DescriptorFlag which, 5636 PropertyAttributes filter) { 5637 int result = 0; 5638 DescriptorArray* descs = instance_descriptors(); 5639 int limit = which == ALL_DESCRIPTORS 5640 ? descs->number_of_descriptors() 5641 : NumberOfOwnDescriptors(); 5642 for (int i = 0; i < limit; i++) { 5643 if ((descs->GetDetails(i).attributes() & filter) == 0 && 5644 !FilterKey(descs->GetKey(i), filter)) { 5645 result++; 5646 } 5647 } 5648 return result; 5649 } 5650 5651 5652 int Map::NextFreePropertyIndex() { 5653 int max_index = -1; 5654 int number_of_own_descriptors = NumberOfOwnDescriptors(); 5655 DescriptorArray* descs = instance_descriptors(); 5656 for (int i = 0; i < number_of_own_descriptors; i++) { 5657 if (descs->GetType(i) == FIELD) { 5658 int current_index = descs->GetFieldIndex(i); 5659 if (current_index > max_index) max_index = current_index; 5660 } 5661 } 5662 return max_index + 1; 5663 } 5664 5665 5666 static bool ContainsOnlyValidKeys(Handle<FixedArray> array) { 5667 int len = array->length(); 5668 for (int i = 0; i < len; i++) { 5669 Object* e = array->get(i); 5670 if (!(e->IsString() || e->IsNumber())) return false; 5671 } 5672 return true; 5673 } 5674 5675 5676 static Handle<FixedArray> ReduceFixedArrayTo( 5677 Handle<FixedArray> array, int length) { 5678 DCHECK(array->length() >= length); 5679 if (array->length() == length) return array; 5680 5681 Handle<FixedArray> new_array = 5682 array->GetIsolate()->factory()->NewFixedArray(length); 5683 for (int i = 0; i < length; ++i) new_array->set(i, array->get(i)); 5684 return new_array; 5685 } 5686 5687 5688 static Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object, 5689 bool cache_result) { 5690 Isolate* isolate = object->GetIsolate(); 5691 if (object->HasFastProperties()) { 5692 int own_property_count = object->map()->EnumLength(); 5693 // If the enum length of the given map is set to kInvalidEnumCache, this 5694 // means that the map itself has never used the present enum cache. The 5695 // first step to using the cache is to set the enum length of the map by 5696 // counting the number of own descriptors that are not DONT_ENUM or 5697 // SYMBOLIC. 5698 if (own_property_count == kInvalidEnumCacheSentinel) { 5699 own_property_count = object->map()->NumberOfDescribedProperties( 5700 OWN_DESCRIPTORS, DONT_SHOW); 5701 } else { 5702 DCHECK(own_property_count == object->map()->NumberOfDescribedProperties( 5703 OWN_DESCRIPTORS, DONT_SHOW)); 5704 } 5705 5706 if (object->map()->instance_descriptors()->HasEnumCache()) { 5707 DescriptorArray* desc = object->map()->instance_descriptors(); 5708 Handle<FixedArray> keys(desc->GetEnumCache(), isolate); 5709 5710 // In case the number of properties required in the enum are actually 5711 // present, we can reuse the enum cache. Otherwise, this means that the 5712 // enum cache was generated for a previous (smaller) version of the 5713 // Descriptor Array. In that case we regenerate the enum cache. 5714 if (own_property_count <= keys->length()) { 5715 if (cache_result) object->map()->SetEnumLength(own_property_count); 5716 isolate->counters()->enum_cache_hits()->Increment(); 5717 return ReduceFixedArrayTo(keys, own_property_count); 5718 } 5719 } 5720 5721 Handle<Map> map(object->map()); 5722 5723 if (map->instance_descriptors()->IsEmpty()) { 5724 isolate->counters()->enum_cache_hits()->Increment(); 5725 if (cache_result) map->SetEnumLength(0); 5726 return isolate->factory()->empty_fixed_array(); 5727 } 5728 5729 isolate->counters()->enum_cache_misses()->Increment(); 5730 5731 Handle<FixedArray> storage = isolate->factory()->NewFixedArray( 5732 own_property_count); 5733 Handle<FixedArray> indices = isolate->factory()->NewFixedArray( 5734 own_property_count); 5735 5736 Handle<DescriptorArray> descs = 5737 Handle<DescriptorArray>(object->map()->instance_descriptors(), isolate); 5738 5739 int size = map->NumberOfOwnDescriptors(); 5740 int index = 0; 5741 5742 for (int i = 0; i < size; i++) { 5743 PropertyDetails details = descs->GetDetails(i); 5744 Object* key = descs->GetKey(i); 5745 if (!(details.IsDontEnum() || key->IsSymbol())) { 5746 storage->set(index, key); 5747 if (!indices.is_null()) { 5748 if (details.type() != FIELD) { 5749 indices = Handle<FixedArray>(); 5750 } else { 5751 FieldIndex field_index = FieldIndex::ForDescriptor(*map, i); 5752 int load_by_field_index = field_index.GetLoadByFieldIndex(); 5753 indices->set(index, Smi::FromInt(load_by_field_index)); 5754 } 5755 } 5756 index++; 5757 } 5758 } 5759 DCHECK(index == storage->length()); 5760 5761 Handle<FixedArray> bridge_storage = 5762 isolate->factory()->NewFixedArray( 5763 DescriptorArray::kEnumCacheBridgeLength); 5764 DescriptorArray* desc = object->map()->instance_descriptors(); 5765 desc->SetEnumCache(*bridge_storage, 5766 *storage, 5767 indices.is_null() ? Object::cast(Smi::FromInt(0)) 5768 : Object::cast(*indices)); 5769 if (cache_result) { 5770 object->map()->SetEnumLength(own_property_count); 5771 } 5772 return storage; 5773 } else { 5774 Handle<NameDictionary> dictionary(object->property_dictionary()); 5775 int length = dictionary->NumberOfEnumElements(); 5776 if (length == 0) { 5777 return Handle<FixedArray>(isolate->heap()->empty_fixed_array()); 5778 } 5779 Handle<FixedArray> storage = isolate->factory()->NewFixedArray(length); 5780 dictionary->CopyEnumKeysTo(*storage); 5781 return storage; 5782 } 5783 } 5784 5785 5786 MaybeHandle<FixedArray> JSReceiver::GetKeys(Handle<JSReceiver> object, 5787 KeyCollectionType type) { 5788 USE(ContainsOnlyValidKeys); 5789 Isolate* isolate = object->GetIsolate(); 5790 Handle<FixedArray> content = isolate->factory()->empty_fixed_array(); 5791 Handle<JSFunction> arguments_function( 5792 JSFunction::cast(isolate->sloppy_arguments_map()->constructor())); 5793 5794 // Only collect keys if access is permitted. 5795 for (PrototypeIterator iter(isolate, object, 5796 PrototypeIterator::START_AT_RECEIVER); 5797 !iter.IsAtEnd(); iter.Advance()) { 5798 if (PrototypeIterator::GetCurrent(iter)->IsJSProxy()) { 5799 Handle<JSProxy> proxy(JSProxy::cast(*PrototypeIterator::GetCurrent(iter)), 5800 isolate); 5801 Handle<Object> args[] = { proxy }; 5802 Handle<Object> names; 5803 ASSIGN_RETURN_ON_EXCEPTION( 5804 isolate, names, 5805 Execution::Call(isolate, 5806 isolate->proxy_enumerate(), 5807 object, 5808 arraysize(args), 5809 args), 5810 FixedArray); 5811 ASSIGN_RETURN_ON_EXCEPTION( 5812 isolate, content, 5813 FixedArray::AddKeysFromArrayLike( 5814 content, Handle<JSObject>::cast(names)), 5815 FixedArray); 5816 break; 5817 } 5818 5819 Handle<JSObject> current = 5820 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)); 5821 5822 // Check access rights if required. 5823 if (current->IsAccessCheckNeeded() && 5824 !isolate->MayNamedAccess( 5825 current, isolate->factory()->undefined_value(), v8::ACCESS_KEYS)) { 5826 isolate->ReportFailedAccessCheck(current, v8::ACCESS_KEYS); 5827 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, FixedArray); 5828 break; 5829 } 5830 5831 // Compute the element keys. 5832 Handle<FixedArray> element_keys = 5833 isolate->factory()->NewFixedArray(current->NumberOfEnumElements()); 5834 current->GetEnumElementKeys(*element_keys); 5835 ASSIGN_RETURN_ON_EXCEPTION( 5836 isolate, content, 5837 FixedArray::UnionOfKeys(content, element_keys), 5838 FixedArray); 5839 DCHECK(ContainsOnlyValidKeys(content)); 5840 5841 // Add the element keys from the interceptor. 5842 if (current->HasIndexedInterceptor()) { 5843 Handle<JSObject> result; 5844 if (JSObject::GetKeysForIndexedInterceptor( 5845 current, object).ToHandle(&result)) { 5846 ASSIGN_RETURN_ON_EXCEPTION( 5847 isolate, content, 5848 FixedArray::AddKeysFromArrayLike(content, result), 5849 FixedArray); 5850 } 5851 DCHECK(ContainsOnlyValidKeys(content)); 5852 } 5853 5854 // We can cache the computed property keys if access checks are 5855 // not needed and no interceptors are involved. 5856 // 5857 // We do not use the cache if the object has elements and 5858 // therefore it does not make sense to cache the property names 5859 // for arguments objects. Arguments objects will always have 5860 // elements. 5861 // Wrapped strings have elements, but don't have an elements 5862 // array or dictionary. So the fast inline test for whether to 5863 // use the cache says yes, so we should not create a cache. 5864 bool cache_enum_keys = 5865 ((current->map()->constructor() != *arguments_function) && 5866 !current->IsJSValue() && 5867 !current->IsAccessCheckNeeded() && 5868 !current->HasNamedInterceptor() && 5869 !current->HasIndexedInterceptor()); 5870 // Compute the property keys and cache them if possible. 5871 ASSIGN_RETURN_ON_EXCEPTION( 5872 isolate, content, 5873 FixedArray::UnionOfKeys( 5874 content, GetEnumPropertyKeys(current, cache_enum_keys)), 5875 FixedArray); 5876 DCHECK(ContainsOnlyValidKeys(content)); 5877 5878 // Add the property keys from the interceptor. 5879 if (current->HasNamedInterceptor()) { 5880 Handle<JSObject> result; 5881 if (JSObject::GetKeysForNamedInterceptor( 5882 current, object).ToHandle(&result)) { 5883 ASSIGN_RETURN_ON_EXCEPTION( 5884 isolate, content, 5885 FixedArray::AddKeysFromArrayLike(content, result), 5886 FixedArray); 5887 } 5888 DCHECK(ContainsOnlyValidKeys(content)); 5889 } 5890 5891 // If we only want own properties we bail out after the first 5892 // iteration. 5893 if (type == OWN_ONLY) break; 5894 } 5895 return content; 5896 } 5897 5898 5899 // Try to update an accessor in an elements dictionary. Return true if the 5900 // update succeeded, and false otherwise. 5901 static bool UpdateGetterSetterInDictionary( 5902 SeededNumberDictionary* dictionary, 5903 uint32_t index, 5904 Object* getter, 5905 Object* setter, 5906 PropertyAttributes attributes) { 5907 int entry = dictionary->FindEntry(index); 5908 if (entry != SeededNumberDictionary::kNotFound) { 5909 Object* result = dictionary->ValueAt(entry); 5910 PropertyDetails details = dictionary->DetailsAt(entry); 5911 if (details.type() == CALLBACKS && result->IsAccessorPair()) { 5912 DCHECK(details.IsConfigurable()); 5913 if (details.attributes() != attributes) { 5914 dictionary->DetailsAtPut( 5915 entry, 5916 PropertyDetails(attributes, CALLBACKS, index)); 5917 } 5918 AccessorPair::cast(result)->SetComponents(getter, setter); 5919 return true; 5920 } 5921 } 5922 return false; 5923 } 5924 5925 5926 void JSObject::DefineElementAccessor(Handle<JSObject> object, 5927 uint32_t index, 5928 Handle<Object> getter, 5929 Handle<Object> setter, 5930 PropertyAttributes attributes) { 5931 switch (object->GetElementsKind()) { 5932 case FAST_SMI_ELEMENTS: 5933 case FAST_ELEMENTS: 5934 case FAST_DOUBLE_ELEMENTS: 5935 case FAST_HOLEY_SMI_ELEMENTS: 5936 case FAST_HOLEY_ELEMENTS: 5937 case FAST_HOLEY_DOUBLE_ELEMENTS: 5938 break; 5939 5940 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 5941 case EXTERNAL_##TYPE##_ELEMENTS: \ 5942 case TYPE##_ELEMENTS: \ 5943 5944 TYPED_ARRAYS(TYPED_ARRAY_CASE) 5945 #undef TYPED_ARRAY_CASE 5946 // Ignore getters and setters on pixel and external array elements. 5947 return; 5948 5949 case DICTIONARY_ELEMENTS: 5950 if (UpdateGetterSetterInDictionary(object->element_dictionary(), 5951 index, 5952 *getter, 5953 *setter, 5954 attributes)) { 5955 return; 5956 } 5957 break; 5958 case SLOPPY_ARGUMENTS_ELEMENTS: { 5959 // Ascertain whether we have read-only properties or an existing 5960 // getter/setter pair in an arguments elements dictionary backing 5961 // store. 5962 FixedArray* parameter_map = FixedArray::cast(object->elements()); 5963 uint32_t length = parameter_map->length(); 5964 Object* probe = 5965 index < (length - 2) ? parameter_map->get(index + 2) : NULL; 5966 if (probe == NULL || probe->IsTheHole()) { 5967 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 5968 if (arguments->IsDictionary()) { 5969 SeededNumberDictionary* dictionary = 5970 SeededNumberDictionary::cast(arguments); 5971 if (UpdateGetterSetterInDictionary(dictionary, 5972 index, 5973 *getter, 5974 *setter, 5975 attributes)) { 5976 return; 5977 } 5978 } 5979 } 5980 break; 5981 } 5982 } 5983 5984 Isolate* isolate = object->GetIsolate(); 5985 Handle<AccessorPair> accessors = isolate->factory()->NewAccessorPair(); 5986 accessors->SetComponents(*getter, *setter); 5987 5988 SetElementCallback(object, index, accessors, attributes); 5989 } 5990 5991 5992 bool Map::DictionaryElementsInPrototypeChainOnly() { 5993 if (IsDictionaryElementsKind(elements_kind())) { 5994 return false; 5995 } 5996 5997 for (PrototypeIterator iter(this); !iter.IsAtEnd(); iter.Advance()) { 5998 if (iter.GetCurrent()->IsJSProxy()) { 5999 // Be conservative, don't walk into proxies. 6000 return true; 6001 } 6002 6003 if (IsDictionaryElementsKind( 6004 JSObject::cast(iter.GetCurrent())->map()->elements_kind())) { 6005 return true; 6006 } 6007 } 6008 6009 return false; 6010 } 6011 6012 6013 void JSObject::SetElementCallback(Handle<JSObject> object, 6014 uint32_t index, 6015 Handle<Object> structure, 6016 PropertyAttributes attributes) { 6017 Heap* heap = object->GetHeap(); 6018 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0); 6019 6020 // Normalize elements to make this operation simple. 6021 bool had_dictionary_elements = object->HasDictionaryElements(); 6022 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 6023 DCHECK(object->HasDictionaryElements() || 6024 object->HasDictionaryArgumentsElements()); 6025 // Update the dictionary with the new CALLBACKS property. 6026 dictionary = SeededNumberDictionary::Set(dictionary, index, structure, 6027 details); 6028 dictionary->set_requires_slow_elements(); 6029 6030 // Update the dictionary backing store on the object. 6031 if (object->elements()->map() == heap->sloppy_arguments_elements_map()) { 6032 // Also delete any parameter alias. 6033 // 6034 // TODO(kmillikin): when deleting the last parameter alias we could 6035 // switch to a direct backing store without the parameter map. This 6036 // would allow GC of the context. 6037 FixedArray* parameter_map = FixedArray::cast(object->elements()); 6038 if (index < static_cast<uint32_t>(parameter_map->length()) - 2) { 6039 parameter_map->set(index + 2, heap->the_hole_value()); 6040 } 6041 parameter_map->set(1, *dictionary); 6042 } else { 6043 object->set_elements(*dictionary); 6044 6045 if (!had_dictionary_elements) { 6046 // KeyedStoreICs (at least the non-generic ones) need a reset. 6047 heap->ClearAllICsByKind(Code::KEYED_STORE_IC); 6048 } 6049 } 6050 } 6051 6052 6053 void JSObject::SetPropertyCallback(Handle<JSObject> object, 6054 Handle<Name> name, 6055 Handle<Object> structure, 6056 PropertyAttributes attributes) { 6057 PropertyNormalizationMode mode = object->map()->is_prototype_map() 6058 ? KEEP_INOBJECT_PROPERTIES 6059 : CLEAR_INOBJECT_PROPERTIES; 6060 // Normalize object to make this operation simple. 6061 NormalizeProperties(object, mode, 0); 6062 6063 // For the global object allocate a new map to invalidate the global inline 6064 // caches which have a global property cell reference directly in the code. 6065 if (object->IsGlobalObject()) { 6066 Handle<Map> new_map = Map::CopyDropDescriptors(handle(object->map())); 6067 DCHECK(new_map->is_dictionary_map()); 6068 JSObject::MigrateToMap(object, new_map); 6069 6070 // When running crankshaft, changing the map is not enough. We 6071 // need to deoptimize all functions that rely on this global 6072 // object. 6073 Deoptimizer::DeoptimizeGlobalObject(*object); 6074 } 6075 6076 // Update the dictionary with the new CALLBACKS property. 6077 PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0); 6078 SetNormalizedProperty(object, name, structure, details); 6079 6080 ReoptimizeIfPrototype(object); 6081 } 6082 6083 6084 MaybeHandle<Object> JSObject::DefineAccessor(Handle<JSObject> object, 6085 Handle<Name> name, 6086 Handle<Object> getter, 6087 Handle<Object> setter, 6088 PropertyAttributes attributes) { 6089 Isolate* isolate = object->GetIsolate(); 6090 // Check access rights if needed. 6091 if (object->IsAccessCheckNeeded() && 6092 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { 6093 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); 6094 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 6095 return isolate->factory()->undefined_value(); 6096 } 6097 6098 if (object->IsJSGlobalProxy()) { 6099 PrototypeIterator iter(isolate, object); 6100 if (iter.IsAtEnd()) return isolate->factory()->undefined_value(); 6101 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 6102 DefineAccessor(Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), 6103 name, getter, setter, attributes); 6104 return isolate->factory()->undefined_value(); 6105 } 6106 6107 // Make sure that the top context does not change when doing callbacks or 6108 // interceptor calls. 6109 AssertNoContextChange ncc(isolate); 6110 6111 // Try to flatten before operating on the string. 6112 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); 6113 6114 uint32_t index = 0; 6115 bool is_element = name->AsArrayIndex(&index); 6116 6117 Handle<Object> old_value = isolate->factory()->the_hole_value(); 6118 bool is_observed = object->map()->is_observed() && 6119 *name != isolate->heap()->hidden_string(); 6120 bool preexists = false; 6121 if (is_observed) { 6122 if (is_element) { 6123 Maybe<bool> maybe = HasOwnElement(object, index); 6124 // Workaround for a GCC 4.4.3 bug which leads to "preexists may be used 6125 // uninitialized in this function". 6126 if (!maybe.has_value) { 6127 DCHECK(false); 6128 return isolate->factory()->undefined_value(); 6129 } 6130 preexists = maybe.value; 6131 if (preexists && GetOwnElementAccessorPair(object, index).is_null()) { 6132 old_value = 6133 Object::GetElement(isolate, object, index).ToHandleChecked(); 6134 } 6135 } else { 6136 LookupIterator it(object, name, LookupIterator::HIDDEN_SKIP_INTERCEPTOR); 6137 CHECK(GetPropertyAttributes(&it).has_value); 6138 preexists = it.IsFound(); 6139 if (preexists && (it.state() == LookupIterator::DATA || 6140 it.GetAccessors()->IsAccessorInfo())) { 6141 old_value = GetProperty(&it).ToHandleChecked(); 6142 } 6143 } 6144 } 6145 6146 if (is_element) { 6147 DefineElementAccessor(object, index, getter, setter, attributes); 6148 } else { 6149 DCHECK(getter->IsSpecFunction() || getter->IsUndefined() || 6150 getter->IsNull()); 6151 DCHECK(setter->IsSpecFunction() || setter->IsUndefined() || 6152 setter->IsNull()); 6153 // At least one of the accessors needs to be a new value. 6154 DCHECK(!getter->IsNull() || !setter->IsNull()); 6155 LookupIterator it(object, name, LookupIterator::OWN_SKIP_INTERCEPTOR); 6156 if (it.state() == LookupIterator::ACCESS_CHECK) { 6157 // We already did an access check before. We do have access. 6158 it.Next(); 6159 } 6160 if (!getter->IsNull()) { 6161 it.TransitionToAccessorProperty(ACCESSOR_GETTER, getter, attributes); 6162 } 6163 if (!setter->IsNull()) { 6164 it.TransitionToAccessorProperty(ACCESSOR_SETTER, setter, attributes); 6165 } 6166 } 6167 6168 if (is_observed) { 6169 const char* type = preexists ? "reconfigure" : "add"; 6170 EnqueueChangeRecord(object, type, name, old_value); 6171 } 6172 6173 return isolate->factory()->undefined_value(); 6174 } 6175 6176 6177 MaybeHandle<Object> JSObject::SetAccessor(Handle<JSObject> object, 6178 Handle<AccessorInfo> info) { 6179 Isolate* isolate = object->GetIsolate(); 6180 Factory* factory = isolate->factory(); 6181 Handle<Name> name(Name::cast(info->name())); 6182 6183 // Check access rights if needed. 6184 if (object->IsAccessCheckNeeded() && 6185 !isolate->MayNamedAccess(object, name, v8::ACCESS_SET)) { 6186 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); 6187 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 6188 return factory->undefined_value(); 6189 } 6190 6191 if (object->IsJSGlobalProxy()) { 6192 PrototypeIterator iter(isolate, object); 6193 if (iter.IsAtEnd()) return object; 6194 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 6195 return SetAccessor( 6196 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), info); 6197 } 6198 6199 // Make sure that the top context does not change when doing callbacks or 6200 // interceptor calls. 6201 AssertNoContextChange ncc(isolate); 6202 6203 // Try to flatten before operating on the string. 6204 if (name->IsString()) name = String::Flatten(Handle<String>::cast(name)); 6205 6206 uint32_t index = 0; 6207 bool is_element = name->AsArrayIndex(&index); 6208 6209 if (is_element) { 6210 if (object->IsJSArray()) return factory->undefined_value(); 6211 6212 // Accessors overwrite previous callbacks (cf. with getters/setters). 6213 switch (object->GetElementsKind()) { 6214 case FAST_SMI_ELEMENTS: 6215 case FAST_ELEMENTS: 6216 case FAST_DOUBLE_ELEMENTS: 6217 case FAST_HOLEY_SMI_ELEMENTS: 6218 case FAST_HOLEY_ELEMENTS: 6219 case FAST_HOLEY_DOUBLE_ELEMENTS: 6220 break; 6221 6222 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 6223 case EXTERNAL_##TYPE##_ELEMENTS: \ 6224 case TYPE##_ELEMENTS: \ 6225 6226 TYPED_ARRAYS(TYPED_ARRAY_CASE) 6227 #undef TYPED_ARRAY_CASE 6228 // Ignore getters and setters on pixel and external array 6229 // elements. 6230 return factory->undefined_value(); 6231 6232 case DICTIONARY_ELEMENTS: 6233 break; 6234 case SLOPPY_ARGUMENTS_ELEMENTS: 6235 UNIMPLEMENTED(); 6236 break; 6237 } 6238 6239 SetElementCallback(object, index, info, info->property_attributes()); 6240 } else { 6241 // Lookup the name. 6242 LookupIterator it(object, name, LookupIterator::HIDDEN_SKIP_INTERCEPTOR); 6243 CHECK(GetPropertyAttributes(&it).has_value); 6244 // ES5 forbids turning a property into an accessor if it's not 6245 // configurable. See 8.6.1 (Table 5). 6246 if (it.IsFound() && (it.IsReadOnly() || !it.IsConfigurable())) { 6247 return factory->undefined_value(); 6248 } 6249 6250 SetPropertyCallback(object, name, info, info->property_attributes()); 6251 } 6252 6253 return object; 6254 } 6255 6256 6257 MaybeHandle<Object> JSObject::GetAccessor(Handle<JSObject> object, 6258 Handle<Name> name, 6259 AccessorComponent component) { 6260 Isolate* isolate = object->GetIsolate(); 6261 6262 // Make sure that the top context does not change when doing callbacks or 6263 // interceptor calls. 6264 AssertNoContextChange ncc(isolate); 6265 6266 // Make the lookup and include prototypes. 6267 uint32_t index = 0; 6268 if (name->AsArrayIndex(&index)) { 6269 for (PrototypeIterator iter(isolate, object, 6270 PrototypeIterator::START_AT_RECEIVER); 6271 !iter.IsAtEnd(); iter.Advance()) { 6272 Handle<Object> current = PrototypeIterator::GetCurrent(iter); 6273 // Check access rights if needed. 6274 if (current->IsAccessCheckNeeded() && 6275 !isolate->MayNamedAccess(Handle<JSObject>::cast(current), name, 6276 v8::ACCESS_HAS)) { 6277 isolate->ReportFailedAccessCheck(Handle<JSObject>::cast(current), 6278 v8::ACCESS_HAS); 6279 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 6280 return isolate->factory()->undefined_value(); 6281 } 6282 6283 if (current->IsJSObject() && 6284 Handle<JSObject>::cast(current)->HasDictionaryElements()) { 6285 JSObject* js_object = JSObject::cast(*current); 6286 SeededNumberDictionary* dictionary = js_object->element_dictionary(); 6287 int entry = dictionary->FindEntry(index); 6288 if (entry != SeededNumberDictionary::kNotFound) { 6289 Object* element = dictionary->ValueAt(entry); 6290 if (dictionary->DetailsAt(entry).type() == CALLBACKS && 6291 element->IsAccessorPair()) { 6292 return handle(AccessorPair::cast(element)->GetComponent(component), 6293 isolate); 6294 } 6295 } 6296 } 6297 } 6298 } else { 6299 LookupIterator it(object, name, 6300 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR); 6301 for (; it.IsFound(); it.Next()) { 6302 switch (it.state()) { 6303 case LookupIterator::INTERCEPTOR: 6304 case LookupIterator::NOT_FOUND: 6305 case LookupIterator::TRANSITION: 6306 UNREACHABLE(); 6307 6308 case LookupIterator::ACCESS_CHECK: 6309 if (it.HasAccess(v8::ACCESS_HAS)) continue; 6310 isolate->ReportFailedAccessCheck(it.GetHolder<JSObject>(), 6311 v8::ACCESS_HAS); 6312 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 6313 return isolate->factory()->undefined_value(); 6314 6315 case LookupIterator::JSPROXY: 6316 return isolate->factory()->undefined_value(); 6317 6318 case LookupIterator::DATA: 6319 continue; 6320 case LookupIterator::ACCESSOR: { 6321 Handle<Object> maybe_pair = it.GetAccessors(); 6322 if (maybe_pair->IsAccessorPair()) { 6323 return handle( 6324 AccessorPair::cast(*maybe_pair)->GetComponent(component), 6325 isolate); 6326 } 6327 } 6328 } 6329 } 6330 } 6331 return isolate->factory()->undefined_value(); 6332 } 6333 6334 6335 Object* JSObject::SlowReverseLookup(Object* value) { 6336 if (HasFastProperties()) { 6337 int number_of_own_descriptors = map()->NumberOfOwnDescriptors(); 6338 DescriptorArray* descs = map()->instance_descriptors(); 6339 for (int i = 0; i < number_of_own_descriptors; i++) { 6340 if (descs->GetType(i) == FIELD) { 6341 Object* property = 6342 RawFastPropertyAt(FieldIndex::ForDescriptor(map(), i)); 6343 if (descs->GetDetails(i).representation().IsDouble()) { 6344 DCHECK(property->IsMutableHeapNumber()); 6345 if (value->IsNumber() && property->Number() == value->Number()) { 6346 return descs->GetKey(i); 6347 } 6348 } else if (property == value) { 6349 return descs->GetKey(i); 6350 } 6351 } else if (descs->GetType(i) == CONSTANT) { 6352 if (descs->GetConstant(i) == value) { 6353 return descs->GetKey(i); 6354 } 6355 } 6356 } 6357 return GetHeap()->undefined_value(); 6358 } else { 6359 return property_dictionary()->SlowReverseLookup(value); 6360 } 6361 } 6362 6363 6364 Handle<Map> Map::RawCopy(Handle<Map> map, int instance_size) { 6365 Handle<Map> result = map->GetIsolate()->factory()->NewMap( 6366 map->instance_type(), instance_size); 6367 result->set_prototype(map->prototype()); 6368 result->set_constructor(map->constructor()); 6369 result->set_bit_field(map->bit_field()); 6370 result->set_bit_field2(map->bit_field2()); 6371 int new_bit_field3 = map->bit_field3(); 6372 new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true); 6373 new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0); 6374 new_bit_field3 = EnumLengthBits::update(new_bit_field3, 6375 kInvalidEnumCacheSentinel); 6376 new_bit_field3 = Deprecated::update(new_bit_field3, false); 6377 if (!map->is_dictionary_map()) { 6378 new_bit_field3 = IsUnstable::update(new_bit_field3, false); 6379 } 6380 new_bit_field3 = ConstructionCount::update(new_bit_field3, 6381 JSFunction::kNoSlackTracking); 6382 result->set_bit_field3(new_bit_field3); 6383 return result; 6384 } 6385 6386 6387 Handle<Map> Map::Normalize(Handle<Map> fast_map, 6388 PropertyNormalizationMode mode) { 6389 DCHECK(!fast_map->is_dictionary_map()); 6390 6391 Isolate* isolate = fast_map->GetIsolate(); 6392 Handle<Object> maybe_cache(isolate->native_context()->normalized_map_cache(), 6393 isolate); 6394 bool use_cache = !maybe_cache->IsUndefined(); 6395 Handle<NormalizedMapCache> cache; 6396 if (use_cache) cache = Handle<NormalizedMapCache>::cast(maybe_cache); 6397 6398 Handle<Map> new_map; 6399 if (use_cache && cache->Get(fast_map, mode).ToHandle(&new_map)) { 6400 #ifdef VERIFY_HEAP 6401 if (FLAG_verify_heap) new_map->DictionaryMapVerify(); 6402 #endif 6403 #ifdef ENABLE_SLOW_DCHECKS 6404 if (FLAG_enable_slow_asserts) { 6405 // The cached map should match newly created normalized map bit-by-bit, 6406 // except for the code cache, which can contain some ics which can be 6407 // applied to the shared map. 6408 Handle<Map> fresh = Map::CopyNormalized(fast_map, mode); 6409 6410 DCHECK(memcmp(fresh->address(), 6411 new_map->address(), 6412 Map::kCodeCacheOffset) == 0); 6413 STATIC_ASSERT(Map::kDependentCodeOffset == 6414 Map::kCodeCacheOffset + kPointerSize); 6415 int offset = Map::kDependentCodeOffset + kPointerSize; 6416 DCHECK(memcmp(fresh->address() + offset, 6417 new_map->address() + offset, 6418 Map::kSize - offset) == 0); 6419 } 6420 #endif 6421 } else { 6422 new_map = Map::CopyNormalized(fast_map, mode); 6423 if (use_cache) { 6424 cache->Set(fast_map, new_map); 6425 isolate->counters()->normalized_maps()->Increment(); 6426 } 6427 } 6428 fast_map->NotifyLeafMapLayoutChange(); 6429 return new_map; 6430 } 6431 6432 6433 Handle<Map> Map::CopyNormalized(Handle<Map> map, 6434 PropertyNormalizationMode mode) { 6435 int new_instance_size = map->instance_size(); 6436 if (mode == CLEAR_INOBJECT_PROPERTIES) { 6437 new_instance_size -= map->inobject_properties() * kPointerSize; 6438 } 6439 6440 Handle<Map> result = RawCopy(map, new_instance_size); 6441 6442 if (mode != CLEAR_INOBJECT_PROPERTIES) { 6443 result->set_inobject_properties(map->inobject_properties()); 6444 } 6445 6446 result->set_dictionary_map(true); 6447 result->set_migration_target(false); 6448 6449 #ifdef VERIFY_HEAP 6450 if (FLAG_verify_heap) result->DictionaryMapVerify(); 6451 #endif 6452 6453 return result; 6454 } 6455 6456 6457 Handle<Map> Map::CopyDropDescriptors(Handle<Map> map) { 6458 Handle<Map> result = RawCopy(map, map->instance_size()); 6459 6460 // Please note instance_type and instance_size are set when allocated. 6461 result->set_inobject_properties(map->inobject_properties()); 6462 result->set_unused_property_fields(map->unused_property_fields()); 6463 6464 result->set_pre_allocated_property_fields( 6465 map->pre_allocated_property_fields()); 6466 result->ClearCodeCache(map->GetHeap()); 6467 map->NotifyLeafMapLayoutChange(); 6468 return result; 6469 } 6470 6471 6472 Handle<Map> Map::ShareDescriptor(Handle<Map> map, 6473 Handle<DescriptorArray> descriptors, 6474 Descriptor* descriptor) { 6475 // Sanity check. This path is only to be taken if the map owns its descriptor 6476 // array, implying that its NumberOfOwnDescriptors equals the number of 6477 // descriptors in the descriptor array. 6478 DCHECK(map->NumberOfOwnDescriptors() == 6479 map->instance_descriptors()->number_of_descriptors()); 6480 6481 Handle<Map> result = CopyDropDescriptors(map); 6482 Handle<Name> name = descriptor->GetKey(); 6483 6484 // Ensure there's space for the new descriptor in the shared descriptor array. 6485 if (descriptors->NumberOfSlackDescriptors() == 0) { 6486 int old_size = descriptors->number_of_descriptors(); 6487 if (old_size == 0) { 6488 descriptors = DescriptorArray::Allocate(map->GetIsolate(), 0, 1); 6489 } else { 6490 EnsureDescriptorSlack(map, old_size < 4 ? 1 : old_size / 2); 6491 descriptors = handle(map->instance_descriptors()); 6492 } 6493 } 6494 6495 { 6496 DisallowHeapAllocation no_gc; 6497 descriptors->Append(descriptor); 6498 result->InitializeDescriptors(*descriptors); 6499 } 6500 6501 DCHECK(result->NumberOfOwnDescriptors() == map->NumberOfOwnDescriptors() + 1); 6502 ConnectTransition(map, result, name, SIMPLE_TRANSITION); 6503 6504 return result; 6505 } 6506 6507 6508 void Map::ConnectTransition(Handle<Map> parent, Handle<Map> child, 6509 Handle<Name> name, SimpleTransitionFlag flag) { 6510 parent->set_owns_descriptors(false); 6511 if (parent->is_prototype_map()) { 6512 DCHECK(child->is_prototype_map()); 6513 } else { 6514 Handle<TransitionArray> transitions = 6515 TransitionArray::CopyInsert(parent, name, child, flag); 6516 parent->set_transitions(*transitions); 6517 child->SetBackPointer(*parent); 6518 } 6519 } 6520 6521 6522 Handle<Map> Map::CopyReplaceDescriptors(Handle<Map> map, 6523 Handle<DescriptorArray> descriptors, 6524 TransitionFlag flag, 6525 MaybeHandle<Name> maybe_name, 6526 SimpleTransitionFlag simple_flag) { 6527 DCHECK(descriptors->IsSortedNoDuplicates()); 6528 6529 Handle<Map> result = CopyDropDescriptors(map); 6530 result->InitializeDescriptors(*descriptors); 6531 6532 if (!map->is_prototype_map()) { 6533 if (flag == INSERT_TRANSITION && map->CanHaveMoreTransitions()) { 6534 Handle<Name> name; 6535 CHECK(maybe_name.ToHandle(&name)); 6536 ConnectTransition(map, result, name, simple_flag); 6537 } else { 6538 int length = descriptors->number_of_descriptors(); 6539 for (int i = 0; i < length; i++) { 6540 descriptors->SetRepresentation(i, Representation::Tagged()); 6541 if (descriptors->GetDetails(i).type() == FIELD) { 6542 descriptors->SetValue(i, HeapType::Any()); 6543 } 6544 } 6545 } 6546 } 6547 6548 return result; 6549 } 6550 6551 6552 // Since this method is used to rewrite an existing transition tree, it can 6553 // always insert transitions without checking. 6554 Handle<Map> Map::CopyInstallDescriptors(Handle<Map> map, 6555 int new_descriptor, 6556 Handle<DescriptorArray> descriptors) { 6557 DCHECK(descriptors->IsSortedNoDuplicates()); 6558 6559 Handle<Map> result = CopyDropDescriptors(map); 6560 6561 result->InitializeDescriptors(*descriptors); 6562 result->SetNumberOfOwnDescriptors(new_descriptor + 1); 6563 6564 int unused_property_fields = map->unused_property_fields(); 6565 if (descriptors->GetDetails(new_descriptor).type() == FIELD) { 6566 unused_property_fields = map->unused_property_fields() - 1; 6567 if (unused_property_fields < 0) { 6568 unused_property_fields += JSObject::kFieldsAdded; 6569 } 6570 } 6571 6572 result->set_unused_property_fields(unused_property_fields); 6573 6574 Handle<Name> name = handle(descriptors->GetKey(new_descriptor)); 6575 ConnectTransition(map, result, name, SIMPLE_TRANSITION); 6576 6577 return result; 6578 } 6579 6580 6581 Handle<Map> Map::CopyAsElementsKind(Handle<Map> map, ElementsKind kind, 6582 TransitionFlag flag) { 6583 if (flag == INSERT_TRANSITION) { 6584 DCHECK(!map->HasElementsTransition() || 6585 ((map->elements_transition_map()->elements_kind() == 6586 DICTIONARY_ELEMENTS || 6587 IsExternalArrayElementsKind( 6588 map->elements_transition_map()->elements_kind())) && 6589 (kind == DICTIONARY_ELEMENTS || 6590 IsExternalArrayElementsKind(kind)))); 6591 DCHECK(!IsFastElementsKind(kind) || 6592 IsMoreGeneralElementsKindTransition(map->elements_kind(), kind)); 6593 DCHECK(kind != map->elements_kind()); 6594 } 6595 6596 bool insert_transition = 6597 flag == INSERT_TRANSITION && !map->HasElementsTransition(); 6598 6599 if (insert_transition && map->owns_descriptors()) { 6600 // In case the map owned its own descriptors, share the descriptors and 6601 // transfer ownership to the new map. 6602 Handle<Map> new_map = CopyDropDescriptors(map); 6603 6604 ConnectElementsTransition(map, new_map); 6605 6606 new_map->set_elements_kind(kind); 6607 new_map->InitializeDescriptors(map->instance_descriptors()); 6608 return new_map; 6609 } 6610 6611 // In case the map did not own its own descriptors, a split is forced by 6612 // copying the map; creating a new descriptor array cell. 6613 // Create a new free-floating map only if we are not allowed to store it. 6614 Handle<Map> new_map = Copy(map); 6615 6616 new_map->set_elements_kind(kind); 6617 6618 if (insert_transition) { 6619 ConnectElementsTransition(map, new_map); 6620 } 6621 6622 return new_map; 6623 } 6624 6625 6626 Handle<Map> Map::CopyForObserved(Handle<Map> map) { 6627 DCHECK(!map->is_observed()); 6628 6629 Isolate* isolate = map->GetIsolate(); 6630 6631 // In case the map owned its own descriptors, share the descriptors and 6632 // transfer ownership to the new map. 6633 Handle<Map> new_map; 6634 if (map->owns_descriptors()) { 6635 new_map = CopyDropDescriptors(map); 6636 } else { 6637 DCHECK(!map->is_prototype_map()); 6638 new_map = Copy(map); 6639 } 6640 6641 new_map->set_is_observed(); 6642 if (map->owns_descriptors()) { 6643 new_map->InitializeDescriptors(map->instance_descriptors()); 6644 } 6645 6646 Handle<Name> name = isolate->factory()->observed_symbol(); 6647 ConnectTransition(map, new_map, name, FULL_TRANSITION); 6648 6649 return new_map; 6650 } 6651 6652 6653 Handle<Map> Map::Copy(Handle<Map> map) { 6654 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 6655 int number_of_own_descriptors = map->NumberOfOwnDescriptors(); 6656 Handle<DescriptorArray> new_descriptors = 6657 DescriptorArray::CopyUpTo(descriptors, number_of_own_descriptors); 6658 return CopyReplaceDescriptors( 6659 map, new_descriptors, OMIT_TRANSITION, MaybeHandle<Name>()); 6660 } 6661 6662 6663 Handle<Map> Map::Create(Isolate* isolate, int inobject_properties) { 6664 Handle<Map> copy = Copy(handle(isolate->object_function()->initial_map())); 6665 6666 // Check that we do not overflow the instance size when adding the extra 6667 // inobject properties. If the instance size overflows, we allocate as many 6668 // properties as we can as inobject properties. 6669 int max_extra_properties = 6670 (JSObject::kMaxInstanceSize - JSObject::kHeaderSize) >> kPointerSizeLog2; 6671 6672 if (inobject_properties > max_extra_properties) { 6673 inobject_properties = max_extra_properties; 6674 } 6675 6676 int new_instance_size = 6677 JSObject::kHeaderSize + kPointerSize * inobject_properties; 6678 6679 // Adjust the map with the extra inobject properties. 6680 copy->set_inobject_properties(inobject_properties); 6681 copy->set_unused_property_fields(inobject_properties); 6682 copy->set_instance_size(new_instance_size); 6683 copy->set_visitor_id(StaticVisitorBase::GetVisitorId(*copy)); 6684 return copy; 6685 } 6686 6687 6688 Handle<Map> Map::CopyForFreeze(Handle<Map> map) { 6689 int num_descriptors = map->NumberOfOwnDescriptors(); 6690 Isolate* isolate = map->GetIsolate(); 6691 Handle<DescriptorArray> new_desc = DescriptorArray::CopyUpToAddAttributes( 6692 handle(map->instance_descriptors(), isolate), num_descriptors, FROZEN); 6693 Handle<Map> new_map = CopyReplaceDescriptors( 6694 map, new_desc, INSERT_TRANSITION, isolate->factory()->frozen_symbol()); 6695 new_map->freeze(); 6696 new_map->set_is_extensible(false); 6697 new_map->set_elements_kind(DICTIONARY_ELEMENTS); 6698 return new_map; 6699 } 6700 6701 6702 bool DescriptorArray::CanHoldValue(int descriptor, Object* value) { 6703 PropertyDetails details = GetDetails(descriptor); 6704 switch (details.type()) { 6705 case FIELD: 6706 return value->FitsRepresentation(details.representation()) && 6707 GetFieldType(descriptor)->NowContains(value); 6708 6709 case CONSTANT: 6710 DCHECK(GetConstant(descriptor) != value || 6711 value->FitsRepresentation(details.representation())); 6712 return GetConstant(descriptor) == value; 6713 6714 case CALLBACKS: 6715 return false; 6716 6717 case NORMAL: 6718 UNREACHABLE(); 6719 break; 6720 } 6721 6722 UNREACHABLE(); 6723 return false; 6724 } 6725 6726 6727 Handle<Map> Map::PrepareForDataProperty(Handle<Map> map, int descriptor, 6728 Handle<Object> value) { 6729 // Dictionaries can store any property value. 6730 if (map->is_dictionary_map()) return map; 6731 6732 // Migrate to the newest map before storing the property. 6733 map = Update(map); 6734 6735 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 6736 6737 if (descriptors->CanHoldValue(descriptor, *value)) return map; 6738 6739 Isolate* isolate = map->GetIsolate(); 6740 Representation representation = value->OptimalRepresentation(); 6741 Handle<HeapType> type = value->OptimalType(isolate, representation); 6742 6743 return GeneralizeRepresentation(map, descriptor, representation, type, 6744 FORCE_FIELD); 6745 } 6746 6747 6748 Handle<Map> Map::TransitionToDataProperty(Handle<Map> map, Handle<Name> name, 6749 Handle<Object> value, 6750 PropertyAttributes attributes, 6751 StoreFromKeyed store_mode) { 6752 // Dictionary maps can always have additional data properties. 6753 if (map->is_dictionary_map()) return map; 6754 6755 // Migrate to the newest map before storing the property. 6756 map = Update(map); 6757 6758 int index = map->SearchTransition(*name); 6759 if (index != TransitionArray::kNotFound) { 6760 Handle<Map> transition(map->GetTransition(index)); 6761 int descriptor = transition->LastAdded(); 6762 6763 // TODO(verwaest): Handle attributes better. 6764 DescriptorArray* descriptors = transition->instance_descriptors(); 6765 if (descriptors->GetDetails(descriptor).attributes() != attributes) { 6766 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES); 6767 } 6768 6769 return Map::PrepareForDataProperty(transition, descriptor, value); 6770 } 6771 6772 TransitionFlag flag = INSERT_TRANSITION; 6773 MaybeHandle<Map> maybe_map; 6774 if (value->IsJSFunction()) { 6775 maybe_map = Map::CopyWithConstant(map, name, value, attributes, flag); 6776 } else if (!map->TooManyFastProperties(store_mode)) { 6777 Isolate* isolate = name->GetIsolate(); 6778 Representation representation = value->OptimalRepresentation(); 6779 Handle<HeapType> type = value->OptimalType(isolate, representation); 6780 maybe_map = 6781 Map::CopyWithField(map, name, type, attributes, representation, flag); 6782 } 6783 6784 Handle<Map> result; 6785 if (!maybe_map.ToHandle(&result)) { 6786 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES); 6787 } 6788 6789 return result; 6790 } 6791 6792 6793 Handle<Map> Map::ReconfigureDataProperty(Handle<Map> map, int descriptor, 6794 PropertyAttributes attributes) { 6795 // Dictionaries have to be reconfigured in-place. 6796 DCHECK(!map->is_dictionary_map()); 6797 6798 // For now, give up on transitioning and just create a unique map. 6799 // TODO(verwaest/ishell): Cache transitions with different attributes. 6800 return CopyGeneralizeAllRepresentations(map, descriptor, FORCE_FIELD, 6801 attributes, "attributes mismatch"); 6802 } 6803 6804 6805 Handle<Map> Map::TransitionToAccessorProperty(Handle<Map> map, 6806 Handle<Name> name, 6807 AccessorComponent component, 6808 Handle<Object> accessor, 6809 PropertyAttributes attributes) { 6810 Isolate* isolate = name->GetIsolate(); 6811 6812 // Dictionary maps can always have additional data properties. 6813 if (map->is_dictionary_map()) { 6814 // For global objects, property cells are inlined. We need to change the 6815 // map. 6816 if (map->IsGlobalObjectMap()) return Copy(map); 6817 return map; 6818 } 6819 6820 // Migrate to the newest map before transitioning to the new property. 6821 map = Update(map); 6822 6823 PropertyNormalizationMode mode = map->is_prototype_map() 6824 ? KEEP_INOBJECT_PROPERTIES 6825 : CLEAR_INOBJECT_PROPERTIES; 6826 6827 int index = map->SearchTransition(*name); 6828 if (index != TransitionArray::kNotFound) { 6829 Handle<Map> transition(map->GetTransition(index)); 6830 DescriptorArray* descriptors = transition->instance_descriptors(); 6831 // Fast path, assume that we're modifying the last added descriptor. 6832 int descriptor = transition->LastAdded(); 6833 if (descriptors->GetKey(descriptor) != *name) { 6834 // If not, search for the descriptor. 6835 descriptor = descriptors->SearchWithCache(*name, *transition); 6836 } 6837 6838 if (descriptors->GetDetails(descriptor).type() != CALLBACKS) { 6839 return Map::Normalize(map, mode); 6840 } 6841 6842 // TODO(verwaest): Handle attributes better. 6843 if (descriptors->GetDetails(descriptor).attributes() != attributes) { 6844 return Map::Normalize(map, mode); 6845 } 6846 6847 Handle<Object> maybe_pair(descriptors->GetValue(descriptor), isolate); 6848 if (!maybe_pair->IsAccessorPair()) { 6849 return Map::Normalize(map, mode); 6850 } 6851 6852 Handle<AccessorPair> pair = Handle<AccessorPair>::cast(maybe_pair); 6853 if (pair->get(component) != *accessor) { 6854 return Map::Normalize(map, mode); 6855 } 6856 6857 return transition; 6858 } 6859 6860 Handle<AccessorPair> pair; 6861 DescriptorArray* old_descriptors = map->instance_descriptors(); 6862 int descriptor = old_descriptors->SearchWithCache(*name, *map); 6863 if (descriptor != DescriptorArray::kNotFound) { 6864 PropertyDetails old_details = old_descriptors->GetDetails(descriptor); 6865 if (old_details.type() != CALLBACKS) { 6866 return Map::Normalize(map, mode); 6867 } 6868 6869 if (old_details.attributes() != attributes) { 6870 return Map::Normalize(map, mode); 6871 } 6872 6873 Handle<Object> maybe_pair(old_descriptors->GetValue(descriptor), isolate); 6874 if (!maybe_pair->IsAccessorPair()) { 6875 return Map::Normalize(map, mode); 6876 } 6877 6878 Object* current = Handle<AccessorPair>::cast(maybe_pair)->get(component); 6879 if (current == *accessor) return map; 6880 6881 if (!current->IsTheHole()) { 6882 return Map::Normalize(map, mode); 6883 } 6884 6885 pair = AccessorPair::Copy(Handle<AccessorPair>::cast(maybe_pair)); 6886 } else if (map->NumberOfOwnDescriptors() >= kMaxNumberOfDescriptors || 6887 map->TooManyFastProperties(CERTAINLY_NOT_STORE_FROM_KEYED)) { 6888 return Map::Normalize(map, CLEAR_INOBJECT_PROPERTIES); 6889 } else { 6890 pair = isolate->factory()->NewAccessorPair(); 6891 } 6892 6893 pair->set(component, *accessor); 6894 TransitionFlag flag = INSERT_TRANSITION; 6895 CallbacksDescriptor new_desc(name, pair, attributes); 6896 return Map::CopyInsertDescriptor(map, &new_desc, flag); 6897 } 6898 6899 6900 Handle<Map> Map::CopyAddDescriptor(Handle<Map> map, 6901 Descriptor* descriptor, 6902 TransitionFlag flag) { 6903 Handle<DescriptorArray> descriptors(map->instance_descriptors()); 6904 6905 // Ensure the key is unique. 6906 descriptor->KeyToUniqueName(); 6907 6908 if (flag == INSERT_TRANSITION && 6909 map->owns_descriptors() && 6910 map->CanHaveMoreTransitions()) { 6911 return ShareDescriptor(map, descriptors, descriptor); 6912 } 6913 6914 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( 6915 descriptors, map->NumberOfOwnDescriptors(), 1); 6916 new_descriptors->Append(descriptor); 6917 6918 return CopyReplaceDescriptors( 6919 map, new_descriptors, flag, descriptor->GetKey(), SIMPLE_TRANSITION); 6920 } 6921 6922 6923 Handle<Map> Map::CopyInsertDescriptor(Handle<Map> map, 6924 Descriptor* descriptor, 6925 TransitionFlag flag) { 6926 Handle<DescriptorArray> old_descriptors(map->instance_descriptors()); 6927 6928 // Ensure the key is unique. 6929 descriptor->KeyToUniqueName(); 6930 6931 // We replace the key if it is already present. 6932 int index = old_descriptors->SearchWithCache(*descriptor->GetKey(), *map); 6933 if (index != DescriptorArray::kNotFound) { 6934 return CopyReplaceDescriptor(map, old_descriptors, descriptor, index, flag); 6935 } 6936 return CopyAddDescriptor(map, descriptor, flag); 6937 } 6938 6939 6940 Handle<DescriptorArray> DescriptorArray::CopyUpTo( 6941 Handle<DescriptorArray> desc, 6942 int enumeration_index, 6943 int slack) { 6944 return DescriptorArray::CopyUpToAddAttributes( 6945 desc, enumeration_index, NONE, slack); 6946 } 6947 6948 6949 Handle<DescriptorArray> DescriptorArray::CopyUpToAddAttributes( 6950 Handle<DescriptorArray> desc, 6951 int enumeration_index, 6952 PropertyAttributes attributes, 6953 int slack) { 6954 if (enumeration_index + slack == 0) { 6955 return desc->GetIsolate()->factory()->empty_descriptor_array(); 6956 } 6957 6958 int size = enumeration_index; 6959 6960 Handle<DescriptorArray> descriptors = 6961 DescriptorArray::Allocate(desc->GetIsolate(), size, slack); 6962 DescriptorArray::WhitenessWitness witness(*descriptors); 6963 6964 if (attributes != NONE) { 6965 for (int i = 0; i < size; ++i) { 6966 Object* value = desc->GetValue(i); 6967 Name* key = desc->GetKey(i); 6968 PropertyDetails details = desc->GetDetails(i); 6969 // Bulk attribute changes never affect private properties. 6970 if (!key->IsSymbol() || !Symbol::cast(key)->is_private()) { 6971 int mask = DONT_DELETE | DONT_ENUM; 6972 // READ_ONLY is an invalid attribute for JS setters/getters. 6973 if (details.type() != CALLBACKS || !value->IsAccessorPair()) { 6974 mask |= READ_ONLY; 6975 } 6976 details = details.CopyAddAttributes( 6977 static_cast<PropertyAttributes>(attributes & mask)); 6978 } 6979 Descriptor inner_desc( 6980 handle(key), handle(value, desc->GetIsolate()), details); 6981 descriptors->Set(i, &inner_desc, witness); 6982 } 6983 } else { 6984 for (int i = 0; i < size; ++i) { 6985 descriptors->CopyFrom(i, *desc, witness); 6986 } 6987 } 6988 6989 if (desc->number_of_descriptors() != enumeration_index) descriptors->Sort(); 6990 6991 return descriptors; 6992 } 6993 6994 6995 Handle<Map> Map::CopyReplaceDescriptor(Handle<Map> map, 6996 Handle<DescriptorArray> descriptors, 6997 Descriptor* descriptor, 6998 int insertion_index, 6999 TransitionFlag flag) { 7000 // Ensure the key is unique. 7001 descriptor->KeyToUniqueName(); 7002 7003 Handle<Name> key = descriptor->GetKey(); 7004 DCHECK(*key == descriptors->GetKey(insertion_index)); 7005 7006 Handle<DescriptorArray> new_descriptors = DescriptorArray::CopyUpTo( 7007 descriptors, map->NumberOfOwnDescriptors()); 7008 7009 new_descriptors->Replace(insertion_index, descriptor); 7010 7011 SimpleTransitionFlag simple_flag = 7012 (insertion_index == descriptors->number_of_descriptors() - 1) 7013 ? SIMPLE_TRANSITION 7014 : FULL_TRANSITION; 7015 return CopyReplaceDescriptors(map, new_descriptors, flag, key, simple_flag); 7016 } 7017 7018 7019 void Map::UpdateCodeCache(Handle<Map> map, 7020 Handle<Name> name, 7021 Handle<Code> code) { 7022 Isolate* isolate = map->GetIsolate(); 7023 HandleScope scope(isolate); 7024 // Allocate the code cache if not present. 7025 if (map->code_cache()->IsFixedArray()) { 7026 Handle<Object> result = isolate->factory()->NewCodeCache(); 7027 map->set_code_cache(*result); 7028 } 7029 7030 // Update the code cache. 7031 Handle<CodeCache> code_cache(CodeCache::cast(map->code_cache()), isolate); 7032 CodeCache::Update(code_cache, name, code); 7033 } 7034 7035 7036 Object* Map::FindInCodeCache(Name* name, Code::Flags flags) { 7037 // Do a lookup if a code cache exists. 7038 if (!code_cache()->IsFixedArray()) { 7039 return CodeCache::cast(code_cache())->Lookup(name, flags); 7040 } else { 7041 return GetHeap()->undefined_value(); 7042 } 7043 } 7044 7045 7046 int Map::IndexInCodeCache(Object* name, Code* code) { 7047 // Get the internal index if a code cache exists. 7048 if (!code_cache()->IsFixedArray()) { 7049 return CodeCache::cast(code_cache())->GetIndex(name, code); 7050 } 7051 return -1; 7052 } 7053 7054 7055 void Map::RemoveFromCodeCache(Name* name, Code* code, int index) { 7056 // No GC is supposed to happen between a call to IndexInCodeCache and 7057 // RemoveFromCodeCache so the code cache must be there. 7058 DCHECK(!code_cache()->IsFixedArray()); 7059 CodeCache::cast(code_cache())->RemoveByIndex(name, code, index); 7060 } 7061 7062 7063 // An iterator over all map transitions in an descriptor array, reusing the 7064 // constructor field of the map while it is running. Negative values in 7065 // the constructor field indicate an active map transition iteration. The 7066 // original constructor is restored after iterating over all entries. 7067 class IntrusiveMapTransitionIterator { 7068 public: 7069 IntrusiveMapTransitionIterator( 7070 Map* map, TransitionArray* transition_array, Object* constructor) 7071 : map_(map), 7072 transition_array_(transition_array), 7073 constructor_(constructor) { } 7074 7075 void StartIfNotStarted() { 7076 DCHECK(!(*IteratorField())->IsSmi() || IsIterating()); 7077 if (!(*IteratorField())->IsSmi()) { 7078 DCHECK(*IteratorField() == constructor_); 7079 *IteratorField() = Smi::FromInt(-1); 7080 } 7081 } 7082 7083 bool IsIterating() { 7084 return (*IteratorField())->IsSmi() && 7085 Smi::cast(*IteratorField())->value() < 0; 7086 } 7087 7088 Map* Next() { 7089 DCHECK(IsIterating()); 7090 int value = Smi::cast(*IteratorField())->value(); 7091 int index = -value - 1; 7092 int number_of_transitions = transition_array_->number_of_transitions(); 7093 while (index < number_of_transitions) { 7094 *IteratorField() = Smi::FromInt(value - 1); 7095 return transition_array_->GetTarget(index); 7096 } 7097 7098 *IteratorField() = constructor_; 7099 return NULL; 7100 } 7101 7102 private: 7103 Object** IteratorField() { 7104 return HeapObject::RawField(map_, Map::kConstructorOffset); 7105 } 7106 7107 Map* map_; 7108 TransitionArray* transition_array_; 7109 Object* constructor_; 7110 }; 7111 7112 7113 // An iterator over all prototype transitions, reusing the constructor field 7114 // of the map while it is running. Positive values in the constructor field 7115 // indicate an active prototype transition iteration. The original constructor 7116 // is restored after iterating over all entries. 7117 class IntrusivePrototypeTransitionIterator { 7118 public: 7119 IntrusivePrototypeTransitionIterator( 7120 Map* map, HeapObject* proto_trans, Object* constructor) 7121 : map_(map), proto_trans_(proto_trans), constructor_(constructor) { } 7122 7123 void StartIfNotStarted() { 7124 if (!(*IteratorField())->IsSmi()) { 7125 DCHECK(*IteratorField() == constructor_); 7126 *IteratorField() = Smi::FromInt(0); 7127 } 7128 } 7129 7130 bool IsIterating() { 7131 return (*IteratorField())->IsSmi() && 7132 Smi::cast(*IteratorField())->value() >= 0; 7133 } 7134 7135 Map* Next() { 7136 DCHECK(IsIterating()); 7137 int transitionNumber = Smi::cast(*IteratorField())->value(); 7138 if (transitionNumber < NumberOfTransitions()) { 7139 *IteratorField() = Smi::FromInt(transitionNumber + 1); 7140 return GetTransition(transitionNumber); 7141 } 7142 *IteratorField() = constructor_; 7143 return NULL; 7144 } 7145 7146 private: 7147 Object** IteratorField() { 7148 return HeapObject::RawField(map_, Map::kConstructorOffset); 7149 } 7150 7151 int NumberOfTransitions() { 7152 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_); 7153 Object* num = proto_trans->get(Map::kProtoTransitionNumberOfEntriesOffset); 7154 return Smi::cast(num)->value(); 7155 } 7156 7157 Map* GetTransition(int transitionNumber) { 7158 FixedArray* proto_trans = reinterpret_cast<FixedArray*>(proto_trans_); 7159 return Map::cast(proto_trans->get(IndexFor(transitionNumber))); 7160 } 7161 7162 int IndexFor(int transitionNumber) { 7163 return Map::kProtoTransitionHeaderSize + 7164 Map::kProtoTransitionMapOffset + 7165 transitionNumber * Map::kProtoTransitionElementsPerEntry; 7166 } 7167 7168 Map* map_; 7169 HeapObject* proto_trans_; 7170 Object* constructor_; 7171 }; 7172 7173 7174 // To traverse the transition tree iteratively, we have to store two kinds of 7175 // information in a map: The parent map in the traversal and which children of a 7176 // node have already been visited. To do this without additional memory, we 7177 // temporarily reuse two fields with known values: 7178 // 7179 // (1) The map of the map temporarily holds the parent, and is restored to the 7180 // meta map afterwards. 7181 // 7182 // (2) The info which children have already been visited depends on which part 7183 // of the map we currently iterate. We use the constructor field of the 7184 // map to store the current index. We can do that because the constructor 7185 // is the same for all involved maps. 7186 // 7187 // (a) If we currently follow normal map transitions, we temporarily store 7188 // the current index in the constructor field, and restore it to the 7189 // original constructor afterwards. Note that a single descriptor can 7190 // have 0, 1, or 2 transitions. 7191 // 7192 // (b) If we currently follow prototype transitions, we temporarily store 7193 // the current index in the constructor field, and restore it to the 7194 // original constructor afterwards. 7195 // 7196 // Note that the child iterator is just a concatenation of two iterators: One 7197 // iterating over map transitions and one iterating over prototype transisitons. 7198 class TraversableMap : public Map { 7199 public: 7200 // Record the parent in the traversal within this map. Note that this destroys 7201 // this map's map! 7202 void SetParent(TraversableMap* parent) { set_map_no_write_barrier(parent); } 7203 7204 // Reset the current map's map, returning the parent previously stored in it. 7205 TraversableMap* GetAndResetParent() { 7206 TraversableMap* old_parent = static_cast<TraversableMap*>(map()); 7207 set_map_no_write_barrier(GetHeap()->meta_map()); 7208 return old_parent; 7209 } 7210 7211 // If we have an unvisited child map, return that one and advance. If we have 7212 // none, return NULL and restore the overwritten constructor field. 7213 TraversableMap* ChildIteratorNext(Object* constructor) { 7214 if (!HasTransitionArray()) return NULL; 7215 7216 TransitionArray* transition_array = transitions(); 7217 if (transition_array->HasPrototypeTransitions()) { 7218 HeapObject* proto_transitions = 7219 transition_array->GetPrototypeTransitions(); 7220 IntrusivePrototypeTransitionIterator proto_iterator(this, 7221 proto_transitions, 7222 constructor); 7223 proto_iterator.StartIfNotStarted(); 7224 if (proto_iterator.IsIterating()) { 7225 Map* next = proto_iterator.Next(); 7226 if (next != NULL) return static_cast<TraversableMap*>(next); 7227 } 7228 } 7229 7230 IntrusiveMapTransitionIterator transition_iterator(this, 7231 transition_array, 7232 constructor); 7233 transition_iterator.StartIfNotStarted(); 7234 if (transition_iterator.IsIterating()) { 7235 Map* next = transition_iterator.Next(); 7236 if (next != NULL) return static_cast<TraversableMap*>(next); 7237 } 7238 7239 return NULL; 7240 } 7241 }; 7242 7243 7244 // Traverse the transition tree in postorder without using the C++ stack by 7245 // doing pointer reversal. 7246 void Map::TraverseTransitionTree(TraverseCallback callback, void* data) { 7247 // Make sure that we do not allocate in the callback. 7248 DisallowHeapAllocation no_allocation; 7249 7250 TraversableMap* current = static_cast<TraversableMap*>(this); 7251 // Get the root constructor here to restore it later when finished iterating 7252 // over maps. 7253 Object* root_constructor = constructor(); 7254 while (true) { 7255 TraversableMap* child = current->ChildIteratorNext(root_constructor); 7256 if (child != NULL) { 7257 child->SetParent(current); 7258 current = child; 7259 } else { 7260 TraversableMap* parent = current->GetAndResetParent(); 7261 callback(current, data); 7262 if (current == this) break; 7263 current = parent; 7264 } 7265 } 7266 } 7267 7268 7269 void CodeCache::Update( 7270 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { 7271 // The number of monomorphic stubs for normal load/store/call IC's can grow to 7272 // a large number and therefore they need to go into a hash table. They are 7273 // used to load global properties from cells. 7274 if (code->type() == Code::NORMAL) { 7275 // Make sure that a hash table is allocated for the normal load code cache. 7276 if (code_cache->normal_type_cache()->IsUndefined()) { 7277 Handle<Object> result = 7278 CodeCacheHashTable::New(code_cache->GetIsolate(), 7279 CodeCacheHashTable::kInitialSize); 7280 code_cache->set_normal_type_cache(*result); 7281 } 7282 UpdateNormalTypeCache(code_cache, name, code); 7283 } else { 7284 DCHECK(code_cache->default_cache()->IsFixedArray()); 7285 UpdateDefaultCache(code_cache, name, code); 7286 } 7287 } 7288 7289 7290 void CodeCache::UpdateDefaultCache( 7291 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { 7292 // When updating the default code cache we disregard the type encoded in the 7293 // flags. This allows call constant stubs to overwrite call field 7294 // stubs, etc. 7295 Code::Flags flags = Code::RemoveTypeFromFlags(code->flags()); 7296 7297 // First check whether we can update existing code cache without 7298 // extending it. 7299 Handle<FixedArray> cache = handle(code_cache->default_cache()); 7300 int length = cache->length(); 7301 { 7302 DisallowHeapAllocation no_alloc; 7303 int deleted_index = -1; 7304 for (int i = 0; i < length; i += kCodeCacheEntrySize) { 7305 Object* key = cache->get(i); 7306 if (key->IsNull()) { 7307 if (deleted_index < 0) deleted_index = i; 7308 continue; 7309 } 7310 if (key->IsUndefined()) { 7311 if (deleted_index >= 0) i = deleted_index; 7312 cache->set(i + kCodeCacheEntryNameOffset, *name); 7313 cache->set(i + kCodeCacheEntryCodeOffset, *code); 7314 return; 7315 } 7316 if (name->Equals(Name::cast(key))) { 7317 Code::Flags found = 7318 Code::cast(cache->get(i + kCodeCacheEntryCodeOffset))->flags(); 7319 if (Code::RemoveTypeFromFlags(found) == flags) { 7320 cache->set(i + kCodeCacheEntryCodeOffset, *code); 7321 return; 7322 } 7323 } 7324 } 7325 7326 // Reached the end of the code cache. If there were deleted 7327 // elements, reuse the space for the first of them. 7328 if (deleted_index >= 0) { 7329 cache->set(deleted_index + kCodeCacheEntryNameOffset, *name); 7330 cache->set(deleted_index + kCodeCacheEntryCodeOffset, *code); 7331 return; 7332 } 7333 } 7334 7335 // Extend the code cache with some new entries (at least one). Must be a 7336 // multiple of the entry size. 7337 int new_length = length + ((length >> 1)) + kCodeCacheEntrySize; 7338 new_length = new_length - new_length % kCodeCacheEntrySize; 7339 DCHECK((new_length % kCodeCacheEntrySize) == 0); 7340 cache = FixedArray::CopySize(cache, new_length); 7341 7342 // Add the (name, code) pair to the new cache. 7343 cache->set(length + kCodeCacheEntryNameOffset, *name); 7344 cache->set(length + kCodeCacheEntryCodeOffset, *code); 7345 code_cache->set_default_cache(*cache); 7346 } 7347 7348 7349 void CodeCache::UpdateNormalTypeCache( 7350 Handle<CodeCache> code_cache, Handle<Name> name, Handle<Code> code) { 7351 // Adding a new entry can cause a new cache to be allocated. 7352 Handle<CodeCacheHashTable> cache( 7353 CodeCacheHashTable::cast(code_cache->normal_type_cache())); 7354 Handle<Object> new_cache = CodeCacheHashTable::Put(cache, name, code); 7355 code_cache->set_normal_type_cache(*new_cache); 7356 } 7357 7358 7359 Object* CodeCache::Lookup(Name* name, Code::Flags flags) { 7360 Object* result = LookupDefaultCache(name, Code::RemoveTypeFromFlags(flags)); 7361 if (result->IsCode()) { 7362 if (Code::cast(result)->flags() == flags) return result; 7363 return GetHeap()->undefined_value(); 7364 } 7365 return LookupNormalTypeCache(name, flags); 7366 } 7367 7368 7369 Object* CodeCache::LookupDefaultCache(Name* name, Code::Flags flags) { 7370 FixedArray* cache = default_cache(); 7371 int length = cache->length(); 7372 for (int i = 0; i < length; i += kCodeCacheEntrySize) { 7373 Object* key = cache->get(i + kCodeCacheEntryNameOffset); 7374 // Skip deleted elements. 7375 if (key->IsNull()) continue; 7376 if (key->IsUndefined()) return key; 7377 if (name->Equals(Name::cast(key))) { 7378 Code* code = Code::cast(cache->get(i + kCodeCacheEntryCodeOffset)); 7379 if (Code::RemoveTypeFromFlags(code->flags()) == flags) { 7380 return code; 7381 } 7382 } 7383 } 7384 return GetHeap()->undefined_value(); 7385 } 7386 7387 7388 Object* CodeCache::LookupNormalTypeCache(Name* name, Code::Flags flags) { 7389 if (!normal_type_cache()->IsUndefined()) { 7390 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7391 return cache->Lookup(name, flags); 7392 } else { 7393 return GetHeap()->undefined_value(); 7394 } 7395 } 7396 7397 7398 int CodeCache::GetIndex(Object* name, Code* code) { 7399 if (code->type() == Code::NORMAL) { 7400 if (normal_type_cache()->IsUndefined()) return -1; 7401 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7402 return cache->GetIndex(Name::cast(name), code->flags()); 7403 } 7404 7405 FixedArray* array = default_cache(); 7406 int len = array->length(); 7407 for (int i = 0; i < len; i += kCodeCacheEntrySize) { 7408 if (array->get(i + kCodeCacheEntryCodeOffset) == code) return i + 1; 7409 } 7410 return -1; 7411 } 7412 7413 7414 void CodeCache::RemoveByIndex(Object* name, Code* code, int index) { 7415 if (code->type() == Code::NORMAL) { 7416 DCHECK(!normal_type_cache()->IsUndefined()); 7417 CodeCacheHashTable* cache = CodeCacheHashTable::cast(normal_type_cache()); 7418 DCHECK(cache->GetIndex(Name::cast(name), code->flags()) == index); 7419 cache->RemoveByIndex(index); 7420 } else { 7421 FixedArray* array = default_cache(); 7422 DCHECK(array->length() >= index && array->get(index)->IsCode()); 7423 // Use null instead of undefined for deleted elements to distinguish 7424 // deleted elements from unused elements. This distinction is used 7425 // when looking up in the cache and when updating the cache. 7426 DCHECK_EQ(1, kCodeCacheEntryCodeOffset - kCodeCacheEntryNameOffset); 7427 array->set_null(index - 1); // Name. 7428 array->set_null(index); // Code. 7429 } 7430 } 7431 7432 7433 // The key in the code cache hash table consists of the property name and the 7434 // code object. The actual match is on the name and the code flags. If a key 7435 // is created using the flags and not a code object it can only be used for 7436 // lookup not to create a new entry. 7437 class CodeCacheHashTableKey : public HashTableKey { 7438 public: 7439 CodeCacheHashTableKey(Handle<Name> name, Code::Flags flags) 7440 : name_(name), flags_(flags), code_() { } 7441 7442 CodeCacheHashTableKey(Handle<Name> name, Handle<Code> code) 7443 : name_(name), flags_(code->flags()), code_(code) { } 7444 7445 bool IsMatch(Object* other) OVERRIDE { 7446 if (!other->IsFixedArray()) return false; 7447 FixedArray* pair = FixedArray::cast(other); 7448 Name* name = Name::cast(pair->get(0)); 7449 Code::Flags flags = Code::cast(pair->get(1))->flags(); 7450 if (flags != flags_) { 7451 return false; 7452 } 7453 return name_->Equals(name); 7454 } 7455 7456 static uint32_t NameFlagsHashHelper(Name* name, Code::Flags flags) { 7457 return name->Hash() ^ flags; 7458 } 7459 7460 uint32_t Hash() OVERRIDE { return NameFlagsHashHelper(*name_, flags_); } 7461 7462 uint32_t HashForObject(Object* obj) OVERRIDE { 7463 FixedArray* pair = FixedArray::cast(obj); 7464 Name* name = Name::cast(pair->get(0)); 7465 Code* code = Code::cast(pair->get(1)); 7466 return NameFlagsHashHelper(name, code->flags()); 7467 } 7468 7469 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { 7470 Handle<Code> code = code_.ToHandleChecked(); 7471 Handle<FixedArray> pair = isolate->factory()->NewFixedArray(2); 7472 pair->set(0, *name_); 7473 pair->set(1, *code); 7474 return pair; 7475 } 7476 7477 private: 7478 Handle<Name> name_; 7479 Code::Flags flags_; 7480 // TODO(jkummerow): We should be able to get by without this. 7481 MaybeHandle<Code> code_; 7482 }; 7483 7484 7485 Object* CodeCacheHashTable::Lookup(Name* name, Code::Flags flags) { 7486 DisallowHeapAllocation no_alloc; 7487 CodeCacheHashTableKey key(handle(name), flags); 7488 int entry = FindEntry(&key); 7489 if (entry == kNotFound) return GetHeap()->undefined_value(); 7490 return get(EntryToIndex(entry) + 1); 7491 } 7492 7493 7494 Handle<CodeCacheHashTable> CodeCacheHashTable::Put( 7495 Handle<CodeCacheHashTable> cache, Handle<Name> name, Handle<Code> code) { 7496 CodeCacheHashTableKey key(name, code); 7497 7498 Handle<CodeCacheHashTable> new_cache = EnsureCapacity(cache, 1, &key); 7499 7500 int entry = new_cache->FindInsertionEntry(key.Hash()); 7501 Handle<Object> k = key.AsHandle(cache->GetIsolate()); 7502 7503 new_cache->set(EntryToIndex(entry), *k); 7504 new_cache->set(EntryToIndex(entry) + 1, *code); 7505 new_cache->ElementAdded(); 7506 return new_cache; 7507 } 7508 7509 7510 int CodeCacheHashTable::GetIndex(Name* name, Code::Flags flags) { 7511 DisallowHeapAllocation no_alloc; 7512 CodeCacheHashTableKey key(handle(name), flags); 7513 int entry = FindEntry(&key); 7514 return (entry == kNotFound) ? -1 : entry; 7515 } 7516 7517 7518 void CodeCacheHashTable::RemoveByIndex(int index) { 7519 DCHECK(index >= 0); 7520 Heap* heap = GetHeap(); 7521 set(EntryToIndex(index), heap->the_hole_value()); 7522 set(EntryToIndex(index) + 1, heap->the_hole_value()); 7523 ElementRemoved(); 7524 } 7525 7526 7527 void PolymorphicCodeCache::Update(Handle<PolymorphicCodeCache> code_cache, 7528 MapHandleList* maps, 7529 Code::Flags flags, 7530 Handle<Code> code) { 7531 Isolate* isolate = code_cache->GetIsolate(); 7532 if (code_cache->cache()->IsUndefined()) { 7533 Handle<PolymorphicCodeCacheHashTable> result = 7534 PolymorphicCodeCacheHashTable::New( 7535 isolate, 7536 PolymorphicCodeCacheHashTable::kInitialSize); 7537 code_cache->set_cache(*result); 7538 } else { 7539 // This entry shouldn't be contained in the cache yet. 7540 DCHECK(PolymorphicCodeCacheHashTable::cast(code_cache->cache()) 7541 ->Lookup(maps, flags)->IsUndefined()); 7542 } 7543 Handle<PolymorphicCodeCacheHashTable> hash_table = 7544 handle(PolymorphicCodeCacheHashTable::cast(code_cache->cache())); 7545 Handle<PolymorphicCodeCacheHashTable> new_cache = 7546 PolymorphicCodeCacheHashTable::Put(hash_table, maps, flags, code); 7547 code_cache->set_cache(*new_cache); 7548 } 7549 7550 7551 Handle<Object> PolymorphicCodeCache::Lookup(MapHandleList* maps, 7552 Code::Flags flags) { 7553 if (!cache()->IsUndefined()) { 7554 PolymorphicCodeCacheHashTable* hash_table = 7555 PolymorphicCodeCacheHashTable::cast(cache()); 7556 return Handle<Object>(hash_table->Lookup(maps, flags), GetIsolate()); 7557 } else { 7558 return GetIsolate()->factory()->undefined_value(); 7559 } 7560 } 7561 7562 7563 // Despite their name, object of this class are not stored in the actual 7564 // hash table; instead they're temporarily used for lookups. It is therefore 7565 // safe to have a weak (non-owning) pointer to a MapList as a member field. 7566 class PolymorphicCodeCacheHashTableKey : public HashTableKey { 7567 public: 7568 // Callers must ensure that |maps| outlives the newly constructed object. 7569 PolymorphicCodeCacheHashTableKey(MapHandleList* maps, int code_flags) 7570 : maps_(maps), 7571 code_flags_(code_flags) {} 7572 7573 bool IsMatch(Object* other) OVERRIDE { 7574 MapHandleList other_maps(kDefaultListAllocationSize); 7575 int other_flags; 7576 FromObject(other, &other_flags, &other_maps); 7577 if (code_flags_ != other_flags) return false; 7578 if (maps_->length() != other_maps.length()) return false; 7579 // Compare just the hashes first because it's faster. 7580 int this_hash = MapsHashHelper(maps_, code_flags_); 7581 int other_hash = MapsHashHelper(&other_maps, other_flags); 7582 if (this_hash != other_hash) return false; 7583 7584 // Full comparison: for each map in maps_, look for an equivalent map in 7585 // other_maps. This implementation is slow, but probably good enough for 7586 // now because the lists are short (<= 4 elements currently). 7587 for (int i = 0; i < maps_->length(); ++i) { 7588 bool match_found = false; 7589 for (int j = 0; j < other_maps.length(); ++j) { 7590 if (*(maps_->at(i)) == *(other_maps.at(j))) { 7591 match_found = true; 7592 break; 7593 } 7594 } 7595 if (!match_found) return false; 7596 } 7597 return true; 7598 } 7599 7600 static uint32_t MapsHashHelper(MapHandleList* maps, int code_flags) { 7601 uint32_t hash = code_flags; 7602 for (int i = 0; i < maps->length(); ++i) { 7603 hash ^= maps->at(i)->Hash(); 7604 } 7605 return hash; 7606 } 7607 7608 uint32_t Hash() OVERRIDE { 7609 return MapsHashHelper(maps_, code_flags_); 7610 } 7611 7612 uint32_t HashForObject(Object* obj) OVERRIDE { 7613 MapHandleList other_maps(kDefaultListAllocationSize); 7614 int other_flags; 7615 FromObject(obj, &other_flags, &other_maps); 7616 return MapsHashHelper(&other_maps, other_flags); 7617 } 7618 7619 MUST_USE_RESULT Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { 7620 // The maps in |maps_| must be copied to a newly allocated FixedArray, 7621 // both because the referenced MapList is short-lived, and because C++ 7622 // objects can't be stored in the heap anyway. 7623 Handle<FixedArray> list = 7624 isolate->factory()->NewUninitializedFixedArray(maps_->length() + 1); 7625 list->set(0, Smi::FromInt(code_flags_)); 7626 for (int i = 0; i < maps_->length(); ++i) { 7627 list->set(i + 1, *maps_->at(i)); 7628 } 7629 return list; 7630 } 7631 7632 private: 7633 static MapHandleList* FromObject(Object* obj, 7634 int* code_flags, 7635 MapHandleList* maps) { 7636 FixedArray* list = FixedArray::cast(obj); 7637 maps->Rewind(0); 7638 *code_flags = Smi::cast(list->get(0))->value(); 7639 for (int i = 1; i < list->length(); ++i) { 7640 maps->Add(Handle<Map>(Map::cast(list->get(i)))); 7641 } 7642 return maps; 7643 } 7644 7645 MapHandleList* maps_; // weak. 7646 int code_flags_; 7647 static const int kDefaultListAllocationSize = kMaxKeyedPolymorphism + 1; 7648 }; 7649 7650 7651 Object* PolymorphicCodeCacheHashTable::Lookup(MapHandleList* maps, 7652 int code_kind) { 7653 DisallowHeapAllocation no_alloc; 7654 PolymorphicCodeCacheHashTableKey key(maps, code_kind); 7655 int entry = FindEntry(&key); 7656 if (entry == kNotFound) return GetHeap()->undefined_value(); 7657 return get(EntryToIndex(entry) + 1); 7658 } 7659 7660 7661 Handle<PolymorphicCodeCacheHashTable> PolymorphicCodeCacheHashTable::Put( 7662 Handle<PolymorphicCodeCacheHashTable> hash_table, 7663 MapHandleList* maps, 7664 int code_kind, 7665 Handle<Code> code) { 7666 PolymorphicCodeCacheHashTableKey key(maps, code_kind); 7667 Handle<PolymorphicCodeCacheHashTable> cache = 7668 EnsureCapacity(hash_table, 1, &key); 7669 int entry = cache->FindInsertionEntry(key.Hash()); 7670 7671 Handle<Object> obj = key.AsHandle(hash_table->GetIsolate()); 7672 cache->set(EntryToIndex(entry), *obj); 7673 cache->set(EntryToIndex(entry) + 1, *code); 7674 cache->ElementAdded(); 7675 return cache; 7676 } 7677 7678 7679 void FixedArray::Shrink(int new_length) { 7680 DCHECK(0 <= new_length && new_length <= length()); 7681 if (new_length < length()) { 7682 GetHeap()->RightTrimFixedArray<Heap::FROM_MUTATOR>( 7683 this, length() - new_length); 7684 } 7685 } 7686 7687 7688 MaybeHandle<FixedArray> FixedArray::AddKeysFromArrayLike( 7689 Handle<FixedArray> content, 7690 Handle<JSObject> array) { 7691 DCHECK(array->IsJSArray() || array->HasSloppyArgumentsElements()); 7692 ElementsAccessor* accessor = array->GetElementsAccessor(); 7693 Handle<FixedArray> result; 7694 ASSIGN_RETURN_ON_EXCEPTION( 7695 array->GetIsolate(), result, 7696 accessor->AddElementsToFixedArray(array, array, content), 7697 FixedArray); 7698 7699 #ifdef ENABLE_SLOW_DCHECKS 7700 if (FLAG_enable_slow_asserts) { 7701 DisallowHeapAllocation no_allocation; 7702 for (int i = 0; i < result->length(); i++) { 7703 Object* current = result->get(i); 7704 DCHECK(current->IsNumber() || current->IsName()); 7705 } 7706 } 7707 #endif 7708 return result; 7709 } 7710 7711 7712 MaybeHandle<FixedArray> FixedArray::UnionOfKeys(Handle<FixedArray> first, 7713 Handle<FixedArray> second) { 7714 ElementsAccessor* accessor = ElementsAccessor::ForArray(second); 7715 Handle<FixedArray> result; 7716 ASSIGN_RETURN_ON_EXCEPTION( 7717 first->GetIsolate(), result, 7718 accessor->AddElementsToFixedArray( 7719 Handle<Object>::null(), // receiver 7720 Handle<JSObject>::null(), // holder 7721 first, 7722 Handle<FixedArrayBase>::cast(second)), 7723 FixedArray); 7724 7725 #ifdef ENABLE_SLOW_DCHECKS 7726 if (FLAG_enable_slow_asserts) { 7727 DisallowHeapAllocation no_allocation; 7728 for (int i = 0; i < result->length(); i++) { 7729 Object* current = result->get(i); 7730 DCHECK(current->IsNumber() || current->IsName()); 7731 } 7732 } 7733 #endif 7734 return result; 7735 } 7736 7737 7738 Handle<FixedArray> FixedArray::CopySize( 7739 Handle<FixedArray> array, int new_length, PretenureFlag pretenure) { 7740 Isolate* isolate = array->GetIsolate(); 7741 if (new_length == 0) return isolate->factory()->empty_fixed_array(); 7742 Handle<FixedArray> result = 7743 isolate->factory()->NewFixedArray(new_length, pretenure); 7744 // Copy the content 7745 DisallowHeapAllocation no_gc; 7746 int len = array->length(); 7747 if (new_length < len) len = new_length; 7748 // We are taking the map from the old fixed array so the map is sure to 7749 // be an immortal immutable object. 7750 result->set_map_no_write_barrier(array->map()); 7751 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 7752 for (int i = 0; i < len; i++) { 7753 result->set(i, array->get(i), mode); 7754 } 7755 return result; 7756 } 7757 7758 7759 void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) { 7760 DisallowHeapAllocation no_gc; 7761 WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc); 7762 for (int index = 0; index < len; index++) { 7763 dest->set(dest_pos+index, get(pos+index), mode); 7764 } 7765 } 7766 7767 7768 #ifdef DEBUG 7769 bool FixedArray::IsEqualTo(FixedArray* other) { 7770 if (length() != other->length()) return false; 7771 for (int i = 0 ; i < length(); ++i) { 7772 if (get(i) != other->get(i)) return false; 7773 } 7774 return true; 7775 } 7776 #endif 7777 7778 7779 Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate, 7780 int number_of_descriptors, 7781 int slack) { 7782 DCHECK(0 <= number_of_descriptors); 7783 Factory* factory = isolate->factory(); 7784 // Do not use DescriptorArray::cast on incomplete object. 7785 int size = number_of_descriptors + slack; 7786 if (size == 0) return factory->empty_descriptor_array(); 7787 // Allocate the array of keys. 7788 Handle<FixedArray> result = factory->NewFixedArray(LengthFor(size)); 7789 7790 result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors)); 7791 result->set(kEnumCacheIndex, Smi::FromInt(0)); 7792 return Handle<DescriptorArray>::cast(result); 7793 } 7794 7795 7796 void DescriptorArray::ClearEnumCache() { 7797 set(kEnumCacheIndex, Smi::FromInt(0)); 7798 } 7799 7800 7801 void DescriptorArray::Replace(int index, Descriptor* descriptor) { 7802 descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index)); 7803 Set(index, descriptor); 7804 } 7805 7806 7807 void DescriptorArray::SetEnumCache(FixedArray* bridge_storage, 7808 FixedArray* new_cache, 7809 Object* new_index_cache) { 7810 DCHECK(bridge_storage->length() >= kEnumCacheBridgeLength); 7811 DCHECK(new_index_cache->IsSmi() || new_index_cache->IsFixedArray()); 7812 DCHECK(!IsEmpty()); 7813 DCHECK(!HasEnumCache() || new_cache->length() > GetEnumCache()->length()); 7814 FixedArray::cast(bridge_storage)-> 7815 set(kEnumCacheBridgeCacheIndex, new_cache); 7816 FixedArray::cast(bridge_storage)-> 7817 set(kEnumCacheBridgeIndicesCacheIndex, new_index_cache); 7818 set(kEnumCacheIndex, bridge_storage); 7819 } 7820 7821 7822 void DescriptorArray::CopyFrom(int index, 7823 DescriptorArray* src, 7824 const WhitenessWitness& witness) { 7825 Object* value = src->GetValue(index); 7826 PropertyDetails details = src->GetDetails(index); 7827 Descriptor desc(handle(src->GetKey(index)), 7828 handle(value, src->GetIsolate()), 7829 details); 7830 Set(index, &desc, witness); 7831 } 7832 7833 7834 // We need the whiteness witness since sort will reshuffle the entries in the 7835 // descriptor array. If the descriptor array were to be black, the shuffling 7836 // would move a slot that was already recorded as pointing into an evacuation 7837 // candidate. This would result in missing updates upon evacuation. 7838 void DescriptorArray::Sort() { 7839 // In-place heap sort. 7840 int len = number_of_descriptors(); 7841 // Reset sorting since the descriptor array might contain invalid pointers. 7842 for (int i = 0; i < len; ++i) SetSortedKey(i, i); 7843 // Bottom-up max-heap construction. 7844 // Index of the last node with children 7845 const int max_parent_index = (len / 2) - 1; 7846 for (int i = max_parent_index; i >= 0; --i) { 7847 int parent_index = i; 7848 const uint32_t parent_hash = GetSortedKey(i)->Hash(); 7849 while (parent_index <= max_parent_index) { 7850 int child_index = 2 * parent_index + 1; 7851 uint32_t child_hash = GetSortedKey(child_index)->Hash(); 7852 if (child_index + 1 < len) { 7853 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash(); 7854 if (right_child_hash > child_hash) { 7855 child_index++; 7856 child_hash = right_child_hash; 7857 } 7858 } 7859 if (child_hash <= parent_hash) break; 7860 SwapSortedKeys(parent_index, child_index); 7861 // Now element at child_index could be < its children. 7862 parent_index = child_index; // parent_hash remains correct. 7863 } 7864 } 7865 7866 // Extract elements and create sorted array. 7867 for (int i = len - 1; i > 0; --i) { 7868 // Put max element at the back of the array. 7869 SwapSortedKeys(0, i); 7870 // Shift down the new top element. 7871 int parent_index = 0; 7872 const uint32_t parent_hash = GetSortedKey(parent_index)->Hash(); 7873 const int max_parent_index = (i / 2) - 1; 7874 while (parent_index <= max_parent_index) { 7875 int child_index = parent_index * 2 + 1; 7876 uint32_t child_hash = GetSortedKey(child_index)->Hash(); 7877 if (child_index + 1 < i) { 7878 uint32_t right_child_hash = GetSortedKey(child_index + 1)->Hash(); 7879 if (right_child_hash > child_hash) { 7880 child_index++; 7881 child_hash = right_child_hash; 7882 } 7883 } 7884 if (child_hash <= parent_hash) break; 7885 SwapSortedKeys(parent_index, child_index); 7886 parent_index = child_index; 7887 } 7888 } 7889 DCHECK(IsSortedNoDuplicates()); 7890 } 7891 7892 7893 Handle<AccessorPair> AccessorPair::Copy(Handle<AccessorPair> pair) { 7894 Handle<AccessorPair> copy = pair->GetIsolate()->factory()->NewAccessorPair(); 7895 copy->set_getter(pair->getter()); 7896 copy->set_setter(pair->setter()); 7897 return copy; 7898 } 7899 7900 7901 Object* AccessorPair::GetComponent(AccessorComponent component) { 7902 Object* accessor = get(component); 7903 return accessor->IsTheHole() ? GetHeap()->undefined_value() : accessor; 7904 } 7905 7906 7907 Handle<DeoptimizationInputData> DeoptimizationInputData::New( 7908 Isolate* isolate, int deopt_entry_count, PretenureFlag pretenure) { 7909 DCHECK(deopt_entry_count > 0); 7910 return Handle<DeoptimizationInputData>::cast( 7911 isolate->factory()->NewFixedArray(LengthFor(deopt_entry_count), 7912 pretenure)); 7913 } 7914 7915 7916 Handle<DeoptimizationOutputData> DeoptimizationOutputData::New( 7917 Isolate* isolate, 7918 int number_of_deopt_points, 7919 PretenureFlag pretenure) { 7920 Handle<FixedArray> result; 7921 if (number_of_deopt_points == 0) { 7922 result = isolate->factory()->empty_fixed_array(); 7923 } else { 7924 result = isolate->factory()->NewFixedArray( 7925 LengthOfFixedArray(number_of_deopt_points), pretenure); 7926 } 7927 return Handle<DeoptimizationOutputData>::cast(result); 7928 } 7929 7930 7931 #ifdef DEBUG 7932 bool DescriptorArray::IsEqualTo(DescriptorArray* other) { 7933 if (IsEmpty()) return other->IsEmpty(); 7934 if (other->IsEmpty()) return false; 7935 if (length() != other->length()) return false; 7936 for (int i = 0; i < length(); ++i) { 7937 if (get(i) != other->get(i)) return false; 7938 } 7939 return true; 7940 } 7941 #endif 7942 7943 7944 bool String::LooksValid() { 7945 if (!GetIsolate()->heap()->Contains(this)) return false; 7946 return true; 7947 } 7948 7949 7950 String::FlatContent String::GetFlatContent() { 7951 DCHECK(!AllowHeapAllocation::IsAllowed()); 7952 int length = this->length(); 7953 StringShape shape(this); 7954 String* string = this; 7955 int offset = 0; 7956 if (shape.representation_tag() == kConsStringTag) { 7957 ConsString* cons = ConsString::cast(string); 7958 if (cons->second()->length() != 0) { 7959 return FlatContent(); 7960 } 7961 string = cons->first(); 7962 shape = StringShape(string); 7963 } 7964 if (shape.representation_tag() == kSlicedStringTag) { 7965 SlicedString* slice = SlicedString::cast(string); 7966 offset = slice->offset(); 7967 string = slice->parent(); 7968 shape = StringShape(string); 7969 DCHECK(shape.representation_tag() != kConsStringTag && 7970 shape.representation_tag() != kSlicedStringTag); 7971 } 7972 if (shape.encoding_tag() == kOneByteStringTag) { 7973 const uint8_t* start; 7974 if (shape.representation_tag() == kSeqStringTag) { 7975 start = SeqOneByteString::cast(string)->GetChars(); 7976 } else { 7977 start = ExternalOneByteString::cast(string)->GetChars(); 7978 } 7979 return FlatContent(start + offset, length); 7980 } else { 7981 DCHECK(shape.encoding_tag() == kTwoByteStringTag); 7982 const uc16* start; 7983 if (shape.representation_tag() == kSeqStringTag) { 7984 start = SeqTwoByteString::cast(string)->GetChars(); 7985 } else { 7986 start = ExternalTwoByteString::cast(string)->GetChars(); 7987 } 7988 return FlatContent(start + offset, length); 7989 } 7990 } 7991 7992 7993 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls, 7994 RobustnessFlag robust_flag, 7995 int offset, 7996 int length, 7997 int* length_return) { 7998 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) { 7999 return SmartArrayPointer<char>(NULL); 8000 } 8001 Heap* heap = GetHeap(); 8002 8003 // Negative length means the to the end of the string. 8004 if (length < 0) length = kMaxInt - offset; 8005 8006 // Compute the size of the UTF-8 string. Start at the specified offset. 8007 Access<ConsStringIteratorOp> op( 8008 heap->isolate()->objects_string_iterator()); 8009 StringCharacterStream stream(this, op.value(), offset); 8010 int character_position = offset; 8011 int utf8_bytes = 0; 8012 int last = unibrow::Utf16::kNoPreviousCharacter; 8013 while (stream.HasMore() && character_position++ < offset + length) { 8014 uint16_t character = stream.GetNext(); 8015 utf8_bytes += unibrow::Utf8::Length(character, last); 8016 last = character; 8017 } 8018 8019 if (length_return) { 8020 *length_return = utf8_bytes; 8021 } 8022 8023 char* result = NewArray<char>(utf8_bytes + 1); 8024 8025 // Convert the UTF-16 string to a UTF-8 buffer. Start at the specified offset. 8026 stream.Reset(this, offset); 8027 character_position = offset; 8028 int utf8_byte_position = 0; 8029 last = unibrow::Utf16::kNoPreviousCharacter; 8030 while (stream.HasMore() && character_position++ < offset + length) { 8031 uint16_t character = stream.GetNext(); 8032 if (allow_nulls == DISALLOW_NULLS && character == 0) { 8033 character = ' '; 8034 } 8035 utf8_byte_position += 8036 unibrow::Utf8::Encode(result + utf8_byte_position, character, last); 8037 last = character; 8038 } 8039 result[utf8_byte_position] = 0; 8040 return SmartArrayPointer<char>(result); 8041 } 8042 8043 8044 SmartArrayPointer<char> String::ToCString(AllowNullsFlag allow_nulls, 8045 RobustnessFlag robust_flag, 8046 int* length_return) { 8047 return ToCString(allow_nulls, robust_flag, 0, -1, length_return); 8048 } 8049 8050 8051 const uc16* String::GetTwoByteData(unsigned start) { 8052 DCHECK(!IsOneByteRepresentationUnderneath()); 8053 switch (StringShape(this).representation_tag()) { 8054 case kSeqStringTag: 8055 return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start); 8056 case kExternalStringTag: 8057 return ExternalTwoByteString::cast(this)-> 8058 ExternalTwoByteStringGetData(start); 8059 case kSlicedStringTag: { 8060 SlicedString* slice = SlicedString::cast(this); 8061 return slice->parent()->GetTwoByteData(start + slice->offset()); 8062 } 8063 case kConsStringTag: 8064 UNREACHABLE(); 8065 return NULL; 8066 } 8067 UNREACHABLE(); 8068 return NULL; 8069 } 8070 8071 8072 SmartArrayPointer<uc16> String::ToWideCString(RobustnessFlag robust_flag) { 8073 if (robust_flag == ROBUST_STRING_TRAVERSAL && !LooksValid()) { 8074 return SmartArrayPointer<uc16>(); 8075 } 8076 Heap* heap = GetHeap(); 8077 8078 Access<ConsStringIteratorOp> op( 8079 heap->isolate()->objects_string_iterator()); 8080 StringCharacterStream stream(this, op.value()); 8081 8082 uc16* result = NewArray<uc16>(length() + 1); 8083 8084 int i = 0; 8085 while (stream.HasMore()) { 8086 uint16_t character = stream.GetNext(); 8087 result[i++] = character; 8088 } 8089 result[i] = 0; 8090 return SmartArrayPointer<uc16>(result); 8091 } 8092 8093 8094 const uc16* SeqTwoByteString::SeqTwoByteStringGetData(unsigned start) { 8095 return reinterpret_cast<uc16*>( 8096 reinterpret_cast<char*>(this) - kHeapObjectTag + kHeaderSize) + start; 8097 } 8098 8099 8100 void Relocatable::PostGarbageCollectionProcessing(Isolate* isolate) { 8101 Relocatable* current = isolate->relocatable_top(); 8102 while (current != NULL) { 8103 current->PostGarbageCollection(); 8104 current = current->prev_; 8105 } 8106 } 8107 8108 8109 // Reserve space for statics needing saving and restoring. 8110 int Relocatable::ArchiveSpacePerThread() { 8111 return sizeof(Relocatable*); // NOLINT 8112 } 8113 8114 8115 // Archive statics that are thread-local. 8116 char* Relocatable::ArchiveState(Isolate* isolate, char* to) { 8117 *reinterpret_cast<Relocatable**>(to) = isolate->relocatable_top(); 8118 isolate->set_relocatable_top(NULL); 8119 return to + ArchiveSpacePerThread(); 8120 } 8121 8122 8123 // Restore statics that are thread-local. 8124 char* Relocatable::RestoreState(Isolate* isolate, char* from) { 8125 isolate->set_relocatable_top(*reinterpret_cast<Relocatable**>(from)); 8126 return from + ArchiveSpacePerThread(); 8127 } 8128 8129 8130 char* Relocatable::Iterate(ObjectVisitor* v, char* thread_storage) { 8131 Relocatable* top = *reinterpret_cast<Relocatable**>(thread_storage); 8132 Iterate(v, top); 8133 return thread_storage + ArchiveSpacePerThread(); 8134 } 8135 8136 8137 void Relocatable::Iterate(Isolate* isolate, ObjectVisitor* v) { 8138 Iterate(v, isolate->relocatable_top()); 8139 } 8140 8141 8142 void Relocatable::Iterate(ObjectVisitor* v, Relocatable* top) { 8143 Relocatable* current = top; 8144 while (current != NULL) { 8145 current->IterateInstance(v); 8146 current = current->prev_; 8147 } 8148 } 8149 8150 8151 FlatStringReader::FlatStringReader(Isolate* isolate, Handle<String> str) 8152 : Relocatable(isolate), 8153 str_(str.location()), 8154 length_(str->length()) { 8155 PostGarbageCollection(); 8156 } 8157 8158 8159 FlatStringReader::FlatStringReader(Isolate* isolate, Vector<const char> input) 8160 : Relocatable(isolate), 8161 str_(0), 8162 is_one_byte_(true), 8163 length_(input.length()), 8164 start_(input.start()) {} 8165 8166 8167 void FlatStringReader::PostGarbageCollection() { 8168 if (str_ == NULL) return; 8169 Handle<String> str(str_); 8170 DCHECK(str->IsFlat()); 8171 DisallowHeapAllocation no_gc; 8172 // This does not actually prevent the vector from being relocated later. 8173 String::FlatContent content = str->GetFlatContent(); 8174 DCHECK(content.IsFlat()); 8175 is_one_byte_ = content.IsOneByte(); 8176 if (is_one_byte_) { 8177 start_ = content.ToOneByteVector().start(); 8178 } else { 8179 start_ = content.ToUC16Vector().start(); 8180 } 8181 } 8182 8183 8184 void ConsStringIteratorOp::Initialize(ConsString* cons_string, int offset) { 8185 DCHECK(cons_string != NULL); 8186 root_ = cons_string; 8187 consumed_ = offset; 8188 // Force stack blown condition to trigger restart. 8189 depth_ = 1; 8190 maximum_depth_ = kStackSize + depth_; 8191 DCHECK(StackBlown()); 8192 } 8193 8194 8195 String* ConsStringIteratorOp::Continue(int* offset_out) { 8196 DCHECK(depth_ != 0); 8197 DCHECK_EQ(0, *offset_out); 8198 bool blew_stack = StackBlown(); 8199 String* string = NULL; 8200 // Get the next leaf if there is one. 8201 if (!blew_stack) string = NextLeaf(&blew_stack); 8202 // Restart search from root. 8203 if (blew_stack) { 8204 DCHECK(string == NULL); 8205 string = Search(offset_out); 8206 } 8207 // Ensure future calls return null immediately. 8208 if (string == NULL) Reset(NULL); 8209 return string; 8210 } 8211 8212 8213 String* ConsStringIteratorOp::Search(int* offset_out) { 8214 ConsString* cons_string = root_; 8215 // Reset the stack, pushing the root string. 8216 depth_ = 1; 8217 maximum_depth_ = 1; 8218 frames_[0] = cons_string; 8219 const int consumed = consumed_; 8220 int offset = 0; 8221 while (true) { 8222 // Loop until the string is found which contains the target offset. 8223 String* string = cons_string->first(); 8224 int length = string->length(); 8225 int32_t type; 8226 if (consumed < offset + length) { 8227 // Target offset is in the left branch. 8228 // Keep going if we're still in a ConString. 8229 type = string->map()->instance_type(); 8230 if ((type & kStringRepresentationMask) == kConsStringTag) { 8231 cons_string = ConsString::cast(string); 8232 PushLeft(cons_string); 8233 continue; 8234 } 8235 // Tell the stack we're done descending. 8236 AdjustMaximumDepth(); 8237 } else { 8238 // Descend right. 8239 // Update progress through the string. 8240 offset += length; 8241 // Keep going if we're still in a ConString. 8242 string = cons_string->second(); 8243 type = string->map()->instance_type(); 8244 if ((type & kStringRepresentationMask) == kConsStringTag) { 8245 cons_string = ConsString::cast(string); 8246 PushRight(cons_string); 8247 continue; 8248 } 8249 // Need this to be updated for the current string. 8250 length = string->length(); 8251 // Account for the possibility of an empty right leaf. 8252 // This happens only if we have asked for an offset outside the string. 8253 if (length == 0) { 8254 // Reset so future operations will return null immediately. 8255 Reset(NULL); 8256 return NULL; 8257 } 8258 // Tell the stack we're done descending. 8259 AdjustMaximumDepth(); 8260 // Pop stack so next iteration is in correct place. 8261 Pop(); 8262 } 8263 DCHECK(length != 0); 8264 // Adjust return values and exit. 8265 consumed_ = offset + length; 8266 *offset_out = consumed - offset; 8267 return string; 8268 } 8269 UNREACHABLE(); 8270 return NULL; 8271 } 8272 8273 8274 String* ConsStringIteratorOp::NextLeaf(bool* blew_stack) { 8275 while (true) { 8276 // Tree traversal complete. 8277 if (depth_ == 0) { 8278 *blew_stack = false; 8279 return NULL; 8280 } 8281 // We've lost track of higher nodes. 8282 if (StackBlown()) { 8283 *blew_stack = true; 8284 return NULL; 8285 } 8286 // Go right. 8287 ConsString* cons_string = frames_[OffsetForDepth(depth_ - 1)]; 8288 String* string = cons_string->second(); 8289 int32_t type = string->map()->instance_type(); 8290 if ((type & kStringRepresentationMask) != kConsStringTag) { 8291 // Pop stack so next iteration is in correct place. 8292 Pop(); 8293 int length = string->length(); 8294 // Could be a flattened ConsString. 8295 if (length == 0) continue; 8296 consumed_ += length; 8297 return string; 8298 } 8299 cons_string = ConsString::cast(string); 8300 PushRight(cons_string); 8301 // Need to traverse all the way left. 8302 while (true) { 8303 // Continue left. 8304 string = cons_string->first(); 8305 type = string->map()->instance_type(); 8306 if ((type & kStringRepresentationMask) != kConsStringTag) { 8307 AdjustMaximumDepth(); 8308 int length = string->length(); 8309 DCHECK(length != 0); 8310 consumed_ += length; 8311 return string; 8312 } 8313 cons_string = ConsString::cast(string); 8314 PushLeft(cons_string); 8315 } 8316 } 8317 UNREACHABLE(); 8318 return NULL; 8319 } 8320 8321 8322 uint16_t ConsString::ConsStringGet(int index) { 8323 DCHECK(index >= 0 && index < this->length()); 8324 8325 // Check for a flattened cons string 8326 if (second()->length() == 0) { 8327 String* left = first(); 8328 return left->Get(index); 8329 } 8330 8331 String* string = String::cast(this); 8332 8333 while (true) { 8334 if (StringShape(string).IsCons()) { 8335 ConsString* cons_string = ConsString::cast(string); 8336 String* left = cons_string->first(); 8337 if (left->length() > index) { 8338 string = left; 8339 } else { 8340 index -= left->length(); 8341 string = cons_string->second(); 8342 } 8343 } else { 8344 return string->Get(index); 8345 } 8346 } 8347 8348 UNREACHABLE(); 8349 return 0; 8350 } 8351 8352 8353 uint16_t SlicedString::SlicedStringGet(int index) { 8354 return parent()->Get(offset() + index); 8355 } 8356 8357 8358 template <typename sinkchar> 8359 void String::WriteToFlat(String* src, 8360 sinkchar* sink, 8361 int f, 8362 int t) { 8363 String* source = src; 8364 int from = f; 8365 int to = t; 8366 while (true) { 8367 DCHECK(0 <= from && from <= to && to <= source->length()); 8368 switch (StringShape(source).full_representation_tag()) { 8369 case kOneByteStringTag | kExternalStringTag: { 8370 CopyChars(sink, ExternalOneByteString::cast(source)->GetChars() + from, 8371 to - from); 8372 return; 8373 } 8374 case kTwoByteStringTag | kExternalStringTag: { 8375 const uc16* data = 8376 ExternalTwoByteString::cast(source)->GetChars(); 8377 CopyChars(sink, 8378 data + from, 8379 to - from); 8380 return; 8381 } 8382 case kOneByteStringTag | kSeqStringTag: { 8383 CopyChars(sink, 8384 SeqOneByteString::cast(source)->GetChars() + from, 8385 to - from); 8386 return; 8387 } 8388 case kTwoByteStringTag | kSeqStringTag: { 8389 CopyChars(sink, 8390 SeqTwoByteString::cast(source)->GetChars() + from, 8391 to - from); 8392 return; 8393 } 8394 case kOneByteStringTag | kConsStringTag: 8395 case kTwoByteStringTag | kConsStringTag: { 8396 ConsString* cons_string = ConsString::cast(source); 8397 String* first = cons_string->first(); 8398 int boundary = first->length(); 8399 if (to - boundary >= boundary - from) { 8400 // Right hand side is longer. Recurse over left. 8401 if (from < boundary) { 8402 WriteToFlat(first, sink, from, boundary); 8403 sink += boundary - from; 8404 from = 0; 8405 } else { 8406 from -= boundary; 8407 } 8408 to -= boundary; 8409 source = cons_string->second(); 8410 } else { 8411 // Left hand side is longer. Recurse over right. 8412 if (to > boundary) { 8413 String* second = cons_string->second(); 8414 // When repeatedly appending to a string, we get a cons string that 8415 // is unbalanced to the left, a list, essentially. We inline the 8416 // common case of sequential one-byte right child. 8417 if (to - boundary == 1) { 8418 sink[boundary - from] = static_cast<sinkchar>(second->Get(0)); 8419 } else if (second->IsSeqOneByteString()) { 8420 CopyChars(sink + boundary - from, 8421 SeqOneByteString::cast(second)->GetChars(), 8422 to - boundary); 8423 } else { 8424 WriteToFlat(second, 8425 sink + boundary - from, 8426 0, 8427 to - boundary); 8428 } 8429 to = boundary; 8430 } 8431 source = first; 8432 } 8433 break; 8434 } 8435 case kOneByteStringTag | kSlicedStringTag: 8436 case kTwoByteStringTag | kSlicedStringTag: { 8437 SlicedString* slice = SlicedString::cast(source); 8438 unsigned offset = slice->offset(); 8439 WriteToFlat(slice->parent(), sink, from + offset, to + offset); 8440 return; 8441 } 8442 } 8443 } 8444 } 8445 8446 8447 8448 template <typename SourceChar> 8449 static void CalculateLineEndsImpl(Isolate* isolate, 8450 List<int>* line_ends, 8451 Vector<const SourceChar> src, 8452 bool include_ending_line) { 8453 const int src_len = src.length(); 8454 StringSearch<uint8_t, SourceChar> search(isolate, STATIC_CHAR_VECTOR("\n")); 8455 8456 // Find and record line ends. 8457 int position = 0; 8458 while (position != -1 && position < src_len) { 8459 position = search.Search(src, position); 8460 if (position != -1) { 8461 line_ends->Add(position); 8462 position++; 8463 } else if (include_ending_line) { 8464 // Even if the last line misses a line end, it is counted. 8465 line_ends->Add(src_len); 8466 return; 8467 } 8468 } 8469 } 8470 8471 8472 Handle<FixedArray> String::CalculateLineEnds(Handle<String> src, 8473 bool include_ending_line) { 8474 src = Flatten(src); 8475 // Rough estimate of line count based on a roughly estimated average 8476 // length of (unpacked) code. 8477 int line_count_estimate = src->length() >> 4; 8478 List<int> line_ends(line_count_estimate); 8479 Isolate* isolate = src->GetIsolate(); 8480 { DisallowHeapAllocation no_allocation; // ensure vectors stay valid. 8481 // Dispatch on type of strings. 8482 String::FlatContent content = src->GetFlatContent(); 8483 DCHECK(content.IsFlat()); 8484 if (content.IsOneByte()) { 8485 CalculateLineEndsImpl(isolate, 8486 &line_ends, 8487 content.ToOneByteVector(), 8488 include_ending_line); 8489 } else { 8490 CalculateLineEndsImpl(isolate, 8491 &line_ends, 8492 content.ToUC16Vector(), 8493 include_ending_line); 8494 } 8495 } 8496 int line_count = line_ends.length(); 8497 Handle<FixedArray> array = isolate->factory()->NewFixedArray(line_count); 8498 for (int i = 0; i < line_count; i++) { 8499 array->set(i, Smi::FromInt(line_ends[i])); 8500 } 8501 return array; 8502 } 8503 8504 8505 // Compares the contents of two strings by reading and comparing 8506 // int-sized blocks of characters. 8507 template <typename Char> 8508 static inline bool CompareRawStringContents(const Char* const a, 8509 const Char* const b, 8510 int length) { 8511 return CompareChars(a, b, length) == 0; 8512 } 8513 8514 8515 template<typename Chars1, typename Chars2> 8516 class RawStringComparator : public AllStatic { 8517 public: 8518 static inline bool compare(const Chars1* a, const Chars2* b, int len) { 8519 DCHECK(sizeof(Chars1) != sizeof(Chars2)); 8520 for (int i = 0; i < len; i++) { 8521 if (a[i] != b[i]) { 8522 return false; 8523 } 8524 } 8525 return true; 8526 } 8527 }; 8528 8529 8530 template<> 8531 class RawStringComparator<uint16_t, uint16_t> { 8532 public: 8533 static inline bool compare(const uint16_t* a, const uint16_t* b, int len) { 8534 return CompareRawStringContents(a, b, len); 8535 } 8536 }; 8537 8538 8539 template<> 8540 class RawStringComparator<uint8_t, uint8_t> { 8541 public: 8542 static inline bool compare(const uint8_t* a, const uint8_t* b, int len) { 8543 return CompareRawStringContents(a, b, len); 8544 } 8545 }; 8546 8547 8548 class StringComparator { 8549 class State { 8550 public: 8551 explicit inline State(ConsStringIteratorOp* op) 8552 : op_(op), is_one_byte_(true), length_(0), buffer8_(NULL) {} 8553 8554 inline void Init(String* string) { 8555 ConsString* cons_string = String::VisitFlat(this, string); 8556 op_->Reset(cons_string); 8557 if (cons_string != NULL) { 8558 int offset; 8559 string = op_->Next(&offset); 8560 String::VisitFlat(this, string, offset); 8561 } 8562 } 8563 8564 inline void VisitOneByteString(const uint8_t* chars, int length) { 8565 is_one_byte_ = true; 8566 buffer8_ = chars; 8567 length_ = length; 8568 } 8569 8570 inline void VisitTwoByteString(const uint16_t* chars, int length) { 8571 is_one_byte_ = false; 8572 buffer16_ = chars; 8573 length_ = length; 8574 } 8575 8576 void Advance(int consumed) { 8577 DCHECK(consumed <= length_); 8578 // Still in buffer. 8579 if (length_ != consumed) { 8580 if (is_one_byte_) { 8581 buffer8_ += consumed; 8582 } else { 8583 buffer16_ += consumed; 8584 } 8585 length_ -= consumed; 8586 return; 8587 } 8588 // Advance state. 8589 int offset; 8590 String* next = op_->Next(&offset); 8591 DCHECK_EQ(0, offset); 8592 DCHECK(next != NULL); 8593 String::VisitFlat(this, next); 8594 } 8595 8596 ConsStringIteratorOp* const op_; 8597 bool is_one_byte_; 8598 int length_; 8599 union { 8600 const uint8_t* buffer8_; 8601 const uint16_t* buffer16_; 8602 }; 8603 8604 private: 8605 DISALLOW_IMPLICIT_CONSTRUCTORS(State); 8606 }; 8607 8608 public: 8609 inline StringComparator(ConsStringIteratorOp* op_1, 8610 ConsStringIteratorOp* op_2) 8611 : state_1_(op_1), 8612 state_2_(op_2) { 8613 } 8614 8615 template<typename Chars1, typename Chars2> 8616 static inline bool Equals(State* state_1, State* state_2, int to_check) { 8617 const Chars1* a = reinterpret_cast<const Chars1*>(state_1->buffer8_); 8618 const Chars2* b = reinterpret_cast<const Chars2*>(state_2->buffer8_); 8619 return RawStringComparator<Chars1, Chars2>::compare(a, b, to_check); 8620 } 8621 8622 bool Equals(String* string_1, String* string_2) { 8623 int length = string_1->length(); 8624 state_1_.Init(string_1); 8625 state_2_.Init(string_2); 8626 while (true) { 8627 int to_check = Min(state_1_.length_, state_2_.length_); 8628 DCHECK(to_check > 0 && to_check <= length); 8629 bool is_equal; 8630 if (state_1_.is_one_byte_) { 8631 if (state_2_.is_one_byte_) { 8632 is_equal = Equals<uint8_t, uint8_t>(&state_1_, &state_2_, to_check); 8633 } else { 8634 is_equal = Equals<uint8_t, uint16_t>(&state_1_, &state_2_, to_check); 8635 } 8636 } else { 8637 if (state_2_.is_one_byte_) { 8638 is_equal = Equals<uint16_t, uint8_t>(&state_1_, &state_2_, to_check); 8639 } else { 8640 is_equal = Equals<uint16_t, uint16_t>(&state_1_, &state_2_, to_check); 8641 } 8642 } 8643 // Looping done. 8644 if (!is_equal) return false; 8645 length -= to_check; 8646 // Exit condition. Strings are equal. 8647 if (length == 0) return true; 8648 state_1_.Advance(to_check); 8649 state_2_.Advance(to_check); 8650 } 8651 } 8652 8653 private: 8654 State state_1_; 8655 State state_2_; 8656 DISALLOW_IMPLICIT_CONSTRUCTORS(StringComparator); 8657 }; 8658 8659 8660 bool String::SlowEquals(String* other) { 8661 DisallowHeapAllocation no_gc; 8662 // Fast check: negative check with lengths. 8663 int len = length(); 8664 if (len != other->length()) return false; 8665 if (len == 0) return true; 8666 8667 // Fast check: if hash code is computed for both strings 8668 // a fast negative check can be performed. 8669 if (HasHashCode() && other->HasHashCode()) { 8670 #ifdef ENABLE_SLOW_DCHECKS 8671 if (FLAG_enable_slow_asserts) { 8672 if (Hash() != other->Hash()) { 8673 bool found_difference = false; 8674 for (int i = 0; i < len; i++) { 8675 if (Get(i) != other->Get(i)) { 8676 found_difference = true; 8677 break; 8678 } 8679 } 8680 DCHECK(found_difference); 8681 } 8682 } 8683 #endif 8684 if (Hash() != other->Hash()) return false; 8685 } 8686 8687 // We know the strings are both non-empty. Compare the first chars 8688 // before we try to flatten the strings. 8689 if (this->Get(0) != other->Get(0)) return false; 8690 8691 if (IsSeqOneByteString() && other->IsSeqOneByteString()) { 8692 const uint8_t* str1 = SeqOneByteString::cast(this)->GetChars(); 8693 const uint8_t* str2 = SeqOneByteString::cast(other)->GetChars(); 8694 return CompareRawStringContents(str1, str2, len); 8695 } 8696 8697 Isolate* isolate = GetIsolate(); 8698 StringComparator comparator(isolate->objects_string_compare_iterator_a(), 8699 isolate->objects_string_compare_iterator_b()); 8700 8701 return comparator.Equals(this, other); 8702 } 8703 8704 8705 bool String::SlowEquals(Handle<String> one, Handle<String> two) { 8706 // Fast check: negative check with lengths. 8707 int one_length = one->length(); 8708 if (one_length != two->length()) return false; 8709 if (one_length == 0) return true; 8710 8711 // Fast check: if hash code is computed for both strings 8712 // a fast negative check can be performed. 8713 if (one->HasHashCode() && two->HasHashCode()) { 8714 #ifdef ENABLE_SLOW_DCHECKS 8715 if (FLAG_enable_slow_asserts) { 8716 if (one->Hash() != two->Hash()) { 8717 bool found_difference = false; 8718 for (int i = 0; i < one_length; i++) { 8719 if (one->Get(i) != two->Get(i)) { 8720 found_difference = true; 8721 break; 8722 } 8723 } 8724 DCHECK(found_difference); 8725 } 8726 } 8727 #endif 8728 if (one->Hash() != two->Hash()) return false; 8729 } 8730 8731 // We know the strings are both non-empty. Compare the first chars 8732 // before we try to flatten the strings. 8733 if (one->Get(0) != two->Get(0)) return false; 8734 8735 one = String::Flatten(one); 8736 two = String::Flatten(two); 8737 8738 DisallowHeapAllocation no_gc; 8739 String::FlatContent flat1 = one->GetFlatContent(); 8740 String::FlatContent flat2 = two->GetFlatContent(); 8741 8742 if (flat1.IsOneByte() && flat2.IsOneByte()) { 8743 return CompareRawStringContents(flat1.ToOneByteVector().start(), 8744 flat2.ToOneByteVector().start(), 8745 one_length); 8746 } else { 8747 for (int i = 0; i < one_length; i++) { 8748 if (flat1.Get(i) != flat2.Get(i)) return false; 8749 } 8750 return true; 8751 } 8752 } 8753 8754 8755 bool String::MarkAsUndetectable() { 8756 if (StringShape(this).IsInternalized()) return false; 8757 8758 Map* map = this->map(); 8759 Heap* heap = GetHeap(); 8760 if (map == heap->string_map()) { 8761 this->set_map(heap->undetectable_string_map()); 8762 return true; 8763 } else if (map == heap->one_byte_string_map()) { 8764 this->set_map(heap->undetectable_one_byte_string_map()); 8765 return true; 8766 } 8767 // Rest cannot be marked as undetectable 8768 return false; 8769 } 8770 8771 8772 bool String::IsUtf8EqualTo(Vector<const char> str, bool allow_prefix_match) { 8773 int slen = length(); 8774 // Can't check exact length equality, but we can check bounds. 8775 int str_len = str.length(); 8776 if (!allow_prefix_match && 8777 (str_len < slen || 8778 str_len > slen*static_cast<int>(unibrow::Utf8::kMaxEncodedSize))) { 8779 return false; 8780 } 8781 int i; 8782 unsigned remaining_in_str = static_cast<unsigned>(str_len); 8783 const uint8_t* utf8_data = reinterpret_cast<const uint8_t*>(str.start()); 8784 for (i = 0; i < slen && remaining_in_str > 0; i++) { 8785 unsigned cursor = 0; 8786 uint32_t r = unibrow::Utf8::ValueOf(utf8_data, remaining_in_str, &cursor); 8787 DCHECK(cursor > 0 && cursor <= remaining_in_str); 8788 if (r > unibrow::Utf16::kMaxNonSurrogateCharCode) { 8789 if (i > slen - 1) return false; 8790 if (Get(i++) != unibrow::Utf16::LeadSurrogate(r)) return false; 8791 if (Get(i) != unibrow::Utf16::TrailSurrogate(r)) return false; 8792 } else { 8793 if (Get(i) != r) return false; 8794 } 8795 utf8_data += cursor; 8796 remaining_in_str -= cursor; 8797 } 8798 return (allow_prefix_match || i == slen) && remaining_in_str == 0; 8799 } 8800 8801 8802 bool String::IsOneByteEqualTo(Vector<const uint8_t> str) { 8803 int slen = length(); 8804 if (str.length() != slen) return false; 8805 DisallowHeapAllocation no_gc; 8806 FlatContent content = GetFlatContent(); 8807 if (content.IsOneByte()) { 8808 return CompareChars(content.ToOneByteVector().start(), 8809 str.start(), slen) == 0; 8810 } 8811 for (int i = 0; i < slen; i++) { 8812 if (Get(i) != static_cast<uint16_t>(str[i])) return false; 8813 } 8814 return true; 8815 } 8816 8817 8818 bool String::IsTwoByteEqualTo(Vector<const uc16> str) { 8819 int slen = length(); 8820 if (str.length() != slen) return false; 8821 DisallowHeapAllocation no_gc; 8822 FlatContent content = GetFlatContent(); 8823 if (content.IsTwoByte()) { 8824 return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0; 8825 } 8826 for (int i = 0; i < slen; i++) { 8827 if (Get(i) != str[i]) return false; 8828 } 8829 return true; 8830 } 8831 8832 8833 uint32_t String::ComputeAndSetHash() { 8834 // Should only be called if hash code has not yet been computed. 8835 DCHECK(!HasHashCode()); 8836 8837 // Store the hash code in the object. 8838 uint32_t field = IteratingStringHasher::Hash(this, GetHeap()->HashSeed()); 8839 set_hash_field(field); 8840 8841 // Check the hash code is there. 8842 DCHECK(HasHashCode()); 8843 uint32_t result = field >> kHashShift; 8844 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed. 8845 return result; 8846 } 8847 8848 8849 bool String::ComputeArrayIndex(uint32_t* index) { 8850 int length = this->length(); 8851 if (length == 0 || length > kMaxArrayIndexSize) return false; 8852 ConsStringIteratorOp op; 8853 StringCharacterStream stream(this, &op); 8854 return StringToArrayIndex(&stream, index); 8855 } 8856 8857 8858 bool String::SlowAsArrayIndex(uint32_t* index) { 8859 if (length() <= kMaxCachedArrayIndexLength) { 8860 Hash(); // force computation of hash code 8861 uint32_t field = hash_field(); 8862 if ((field & kIsNotArrayIndexMask) != 0) return false; 8863 // Isolate the array index form the full hash field. 8864 *index = ArrayIndexValueBits::decode(field); 8865 return true; 8866 } else { 8867 return ComputeArrayIndex(index); 8868 } 8869 } 8870 8871 8872 Handle<String> SeqString::Truncate(Handle<SeqString> string, int new_length) { 8873 int new_size, old_size; 8874 int old_length = string->length(); 8875 if (old_length <= new_length) return string; 8876 8877 if (string->IsSeqOneByteString()) { 8878 old_size = SeqOneByteString::SizeFor(old_length); 8879 new_size = SeqOneByteString::SizeFor(new_length); 8880 } else { 8881 DCHECK(string->IsSeqTwoByteString()); 8882 old_size = SeqTwoByteString::SizeFor(old_length); 8883 new_size = SeqTwoByteString::SizeFor(new_length); 8884 } 8885 8886 int delta = old_size - new_size; 8887 8888 Address start_of_string = string->address(); 8889 DCHECK_OBJECT_ALIGNED(start_of_string); 8890 DCHECK_OBJECT_ALIGNED(start_of_string + new_size); 8891 8892 Heap* heap = string->GetHeap(); 8893 NewSpace* newspace = heap->new_space(); 8894 if (newspace->Contains(start_of_string) && 8895 newspace->top() == start_of_string + old_size) { 8896 // Last allocated object in new space. Simply lower allocation top. 8897 newspace->set_top(start_of_string + new_size); 8898 } else { 8899 // Sizes are pointer size aligned, so that we can use filler objects 8900 // that are a multiple of pointer size. 8901 heap->CreateFillerObjectAt(start_of_string + new_size, delta); 8902 } 8903 heap->AdjustLiveBytes(start_of_string, -delta, Heap::FROM_MUTATOR); 8904 8905 // We are storing the new length using release store after creating a filler 8906 // for the left-over space to avoid races with the sweeper thread. 8907 string->synchronized_set_length(new_length); 8908 8909 if (new_length == 0) return heap->isolate()->factory()->empty_string(); 8910 return string; 8911 } 8912 8913 8914 uint32_t StringHasher::MakeArrayIndexHash(uint32_t value, int length) { 8915 // For array indexes mix the length into the hash as an array index could 8916 // be zero. 8917 DCHECK(length > 0); 8918 DCHECK(length <= String::kMaxArrayIndexSize); 8919 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) < 8920 (1 << String::kArrayIndexValueBits)); 8921 8922 value <<= String::ArrayIndexValueBits::kShift; 8923 value |= length << String::ArrayIndexLengthBits::kShift; 8924 8925 DCHECK((value & String::kIsNotArrayIndexMask) == 0); 8926 DCHECK((length > String::kMaxCachedArrayIndexLength) || 8927 (value & String::kContainsCachedArrayIndexMask) == 0); 8928 return value; 8929 } 8930 8931 8932 uint32_t StringHasher::GetHashField() { 8933 if (length_ <= String::kMaxHashCalcLength) { 8934 if (is_array_index_) { 8935 return MakeArrayIndexHash(array_index_, length_); 8936 } 8937 return (GetHashCore(raw_running_hash_) << String::kHashShift) | 8938 String::kIsNotArrayIndexMask; 8939 } else { 8940 return (length_ << String::kHashShift) | String::kIsNotArrayIndexMask; 8941 } 8942 } 8943 8944 8945 uint32_t StringHasher::ComputeUtf8Hash(Vector<const char> chars, 8946 uint32_t seed, 8947 int* utf16_length_out) { 8948 int vector_length = chars.length(); 8949 // Handle some edge cases 8950 if (vector_length <= 1) { 8951 DCHECK(vector_length == 0 || 8952 static_cast<uint8_t>(chars.start()[0]) <= 8953 unibrow::Utf8::kMaxOneByteChar); 8954 *utf16_length_out = vector_length; 8955 return HashSequentialString(chars.start(), vector_length, seed); 8956 } 8957 // Start with a fake length which won't affect computation. 8958 // It will be updated later. 8959 StringHasher hasher(String::kMaxArrayIndexSize, seed); 8960 unsigned remaining = static_cast<unsigned>(vector_length); 8961 const uint8_t* stream = reinterpret_cast<const uint8_t*>(chars.start()); 8962 int utf16_length = 0; 8963 bool is_index = true; 8964 DCHECK(hasher.is_array_index_); 8965 while (remaining > 0) { 8966 unsigned consumed = 0; 8967 uint32_t c = unibrow::Utf8::ValueOf(stream, remaining, &consumed); 8968 DCHECK(consumed > 0 && consumed <= remaining); 8969 stream += consumed; 8970 remaining -= consumed; 8971 bool is_two_characters = c > unibrow::Utf16::kMaxNonSurrogateCharCode; 8972 utf16_length += is_two_characters ? 2 : 1; 8973 // No need to keep hashing. But we do need to calculate utf16_length. 8974 if (utf16_length > String::kMaxHashCalcLength) continue; 8975 if (is_two_characters) { 8976 uint16_t c1 = unibrow::Utf16::LeadSurrogate(c); 8977 uint16_t c2 = unibrow::Utf16::TrailSurrogate(c); 8978 hasher.AddCharacter(c1); 8979 hasher.AddCharacter(c2); 8980 if (is_index) is_index = hasher.UpdateIndex(c1); 8981 if (is_index) is_index = hasher.UpdateIndex(c2); 8982 } else { 8983 hasher.AddCharacter(c); 8984 if (is_index) is_index = hasher.UpdateIndex(c); 8985 } 8986 } 8987 *utf16_length_out = static_cast<int>(utf16_length); 8988 // Must set length here so that hash computation is correct. 8989 hasher.length_ = utf16_length; 8990 return hasher.GetHashField(); 8991 } 8992 8993 8994 void String::PrintOn(FILE* file) { 8995 int length = this->length(); 8996 for (int i = 0; i < length; i++) { 8997 PrintF(file, "%c", Get(i)); 8998 } 8999 } 9000 9001 9002 int Map::Hash() { 9003 // For performance reasons we only hash the 3 most variable fields of a map: 9004 // constructor, prototype and bit_field2. 9005 9006 // Shift away the tag. 9007 int hash = (static_cast<uint32_t>( 9008 reinterpret_cast<uintptr_t>(constructor())) >> 2); 9009 9010 // XOR-ing the prototype and constructor directly yields too many zero bits 9011 // when the two pointers are close (which is fairly common). 9012 // To avoid this we shift the prototype 4 bits relatively to the constructor. 9013 hash ^= (static_cast<uint32_t>( 9014 reinterpret_cast<uintptr_t>(prototype())) << 2); 9015 9016 return hash ^ (hash >> 16) ^ bit_field2(); 9017 } 9018 9019 9020 static bool CheckEquivalent(Map* first, Map* second) { 9021 return 9022 first->constructor() == second->constructor() && 9023 first->prototype() == second->prototype() && 9024 first->instance_type() == second->instance_type() && 9025 first->bit_field() == second->bit_field() && 9026 first->bit_field2() == second->bit_field2() && 9027 first->is_frozen() == second->is_frozen() && 9028 first->has_instance_call_handler() == second->has_instance_call_handler(); 9029 } 9030 9031 9032 bool Map::EquivalentToForTransition(Map* other) { 9033 return CheckEquivalent(this, other); 9034 } 9035 9036 9037 bool Map::EquivalentToForNormalization(Map* other, 9038 PropertyNormalizationMode mode) { 9039 int properties = mode == CLEAR_INOBJECT_PROPERTIES 9040 ? 0 : other->inobject_properties(); 9041 return CheckEquivalent(this, other) && inobject_properties() == properties; 9042 } 9043 9044 9045 void ConstantPoolArray::ConstantPoolIterateBody(ObjectVisitor* v) { 9046 // Unfortunately the serializer relies on pointers within an object being 9047 // visited in-order, so we have to iterate both the code and heap pointers in 9048 // the small section before doing so in the extended section. 9049 for (int s = 0; s <= final_section(); ++s) { 9050 LayoutSection section = static_cast<LayoutSection>(s); 9051 ConstantPoolArray::Iterator code_iter(this, ConstantPoolArray::CODE_PTR, 9052 section); 9053 while (!code_iter.is_finished()) { 9054 v->VisitCodeEntry(reinterpret_cast<Address>( 9055 RawFieldOfElementAt(code_iter.next_index()))); 9056 } 9057 9058 ConstantPoolArray::Iterator heap_iter(this, ConstantPoolArray::HEAP_PTR, 9059 section); 9060 while (!heap_iter.is_finished()) { 9061 v->VisitPointer(RawFieldOfElementAt(heap_iter.next_index())); 9062 } 9063 } 9064 } 9065 9066 9067 void ConstantPoolArray::ClearPtrEntries(Isolate* isolate) { 9068 Type type[] = { CODE_PTR, HEAP_PTR }; 9069 Address default_value[] = { 9070 isolate->builtins()->builtin(Builtins::kIllegal)->entry(), 9071 reinterpret_cast<Address>(isolate->heap()->undefined_value()) }; 9072 9073 for (int i = 0; i < 2; ++i) { 9074 for (int s = 0; s <= final_section(); ++s) { 9075 LayoutSection section = static_cast<LayoutSection>(s); 9076 if (number_of_entries(type[i], section) > 0) { 9077 int offset = OffsetOfElementAt(first_index(type[i], section)); 9078 MemsetPointer( 9079 reinterpret_cast<Address*>(HeapObject::RawField(this, offset)), 9080 default_value[i], 9081 number_of_entries(type[i], section)); 9082 } 9083 } 9084 } 9085 } 9086 9087 9088 void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) { 9089 // Iterate over all fields in the body but take care in dealing with 9090 // the code entry. 9091 IteratePointers(v, kPropertiesOffset, kCodeEntryOffset); 9092 v->VisitCodeEntry(this->address() + kCodeEntryOffset); 9093 IteratePointers(v, kCodeEntryOffset + kPointerSize, object_size); 9094 } 9095 9096 9097 void JSFunction::MarkForOptimization() { 9098 DCHECK(!IsOptimized()); 9099 DCHECK(shared()->allows_lazy_compilation() || 9100 code()->optimizable()); 9101 DCHECK(!shared()->is_generator()); 9102 set_code_no_write_barrier( 9103 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized)); 9104 // No write barrier required, since the builtin is part of the root set. 9105 } 9106 9107 9108 void JSFunction::MarkForConcurrentOptimization() { 9109 DCHECK(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); 9110 DCHECK(!IsOptimized()); 9111 DCHECK(shared()->allows_lazy_compilation() || code()->optimizable()); 9112 DCHECK(!shared()->is_generator()); 9113 DCHECK(GetIsolate()->concurrent_recompilation_enabled()); 9114 if (FLAG_trace_concurrent_recompilation) { 9115 PrintF(" ** Marking "); 9116 ShortPrint(); 9117 PrintF(" for concurrent recompilation.\n"); 9118 } 9119 set_code_no_write_barrier( 9120 GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent)); 9121 // No write barrier required, since the builtin is part of the root set. 9122 } 9123 9124 9125 void JSFunction::MarkInOptimizationQueue() { 9126 // We can only arrive here via the concurrent-recompilation builtin. If 9127 // break points were set, the code would point to the lazy-compile builtin. 9128 DCHECK(!GetIsolate()->DebuggerHasBreakPoints()); 9129 DCHECK(IsMarkedForConcurrentOptimization() && !IsOptimized()); 9130 DCHECK(shared()->allows_lazy_compilation() || code()->optimizable()); 9131 DCHECK(GetIsolate()->concurrent_recompilation_enabled()); 9132 if (FLAG_trace_concurrent_recompilation) { 9133 PrintF(" ** Queueing "); 9134 ShortPrint(); 9135 PrintF(" for concurrent recompilation.\n"); 9136 } 9137 set_code_no_write_barrier( 9138 GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue)); 9139 // No write barrier required, since the builtin is part of the root set. 9140 } 9141 9142 9143 Handle<JSFunction> JSFunction::CloneClosure(Handle<JSFunction> function) { 9144 Isolate* isolate = function->GetIsolate(); 9145 Handle<Map> map(function->map()); 9146 Handle<SharedFunctionInfo> shared(function->shared()); 9147 Handle<Context> context(function->context()); 9148 Handle<JSFunction> clone = 9149 isolate->factory()->NewFunctionFromSharedFunctionInfo(shared, context); 9150 9151 if (shared->bound()) { 9152 clone->set_function_bindings(function->function_bindings()); 9153 } 9154 9155 // In typical case, __proto__ of ``function`` is the default Function 9156 // prototype, which means that SetPrototype below is a no-op. 9157 // In rare cases when that is not true, we mutate the clone's __proto__. 9158 Handle<Object> original_prototype(map->prototype(), isolate); 9159 if (*original_prototype != clone->map()->prototype()) { 9160 JSObject::SetPrototype(clone, original_prototype, false).Assert(); 9161 } 9162 9163 return clone; 9164 } 9165 9166 9167 void SharedFunctionInfo::AddToOptimizedCodeMap( 9168 Handle<SharedFunctionInfo> shared, 9169 Handle<Context> native_context, 9170 Handle<Code> code, 9171 Handle<FixedArray> literals, 9172 BailoutId osr_ast_id) { 9173 Isolate* isolate = shared->GetIsolate(); 9174 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); 9175 DCHECK(native_context->IsNativeContext()); 9176 STATIC_ASSERT(kEntryLength == 4); 9177 Handle<FixedArray> new_code_map; 9178 Handle<Object> value(shared->optimized_code_map(), isolate); 9179 int old_length; 9180 if (value->IsSmi()) { 9181 // No optimized code map. 9182 DCHECK_EQ(0, Smi::cast(*value)->value()); 9183 // Create 3 entries per context {context, code, literals}. 9184 new_code_map = isolate->factory()->NewFixedArray(kInitialLength); 9185 old_length = kEntriesStart; 9186 } else { 9187 // Copy old map and append one new entry. 9188 Handle<FixedArray> old_code_map = Handle<FixedArray>::cast(value); 9189 DCHECK_EQ(-1, shared->SearchOptimizedCodeMap(*native_context, osr_ast_id)); 9190 old_length = old_code_map->length(); 9191 new_code_map = FixedArray::CopySize( 9192 old_code_map, old_length + kEntryLength); 9193 // Zap the old map for the sake of the heap verifier. 9194 if (Heap::ShouldZapGarbage()) { 9195 Object** data = old_code_map->data_start(); 9196 MemsetPointer(data, isolate->heap()->the_hole_value(), old_length); 9197 } 9198 } 9199 new_code_map->set(old_length + kContextOffset, *native_context); 9200 new_code_map->set(old_length + kCachedCodeOffset, *code); 9201 new_code_map->set(old_length + kLiteralsOffset, *literals); 9202 new_code_map->set(old_length + kOsrAstIdOffset, 9203 Smi::FromInt(osr_ast_id.ToInt())); 9204 9205 #ifdef DEBUG 9206 for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { 9207 DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); 9208 DCHECK(new_code_map->get(i + kCachedCodeOffset)->IsCode()); 9209 DCHECK(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() == 9210 Code::OPTIMIZED_FUNCTION); 9211 DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); 9212 DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); 9213 } 9214 #endif 9215 shared->set_optimized_code_map(*new_code_map); 9216 } 9217 9218 9219 FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) { 9220 DCHECK(index > kEntriesStart); 9221 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9222 if (!bound()) { 9223 FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1)); 9224 DCHECK_NE(NULL, cached_literals); 9225 return cached_literals; 9226 } 9227 return NULL; 9228 } 9229 9230 9231 Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) { 9232 DCHECK(index > kEntriesStart); 9233 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9234 Code* code = Code::cast(code_map->get(index)); 9235 DCHECK_NE(NULL, code); 9236 return code; 9237 } 9238 9239 9240 void SharedFunctionInfo::ClearOptimizedCodeMap() { 9241 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9242 9243 // If the next map link slot is already used then the function was 9244 // enqueued with code flushing and we remove it now. 9245 if (!code_map->get(kNextMapIndex)->IsUndefined()) { 9246 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher(); 9247 flusher->EvictOptimizedCodeMap(this); 9248 } 9249 9250 DCHECK(code_map->get(kNextMapIndex)->IsUndefined()); 9251 set_optimized_code_map(Smi::FromInt(0)); 9252 } 9253 9254 9255 void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, 9256 const char* reason) { 9257 DisallowHeapAllocation no_gc; 9258 if (optimized_code_map()->IsSmi()) return; 9259 9260 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9261 int dst = kEntriesStart; 9262 int length = code_map->length(); 9263 for (int src = kEntriesStart; src < length; src += kEntryLength) { 9264 DCHECK(code_map->get(src)->IsNativeContext()); 9265 if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) { 9266 // Evict the src entry by not copying it to the dst entry. 9267 if (FLAG_trace_opt) { 9268 PrintF("[evicting entry from optimizing code map (%s) for ", reason); 9269 ShortPrint(); 9270 BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); 9271 if (osr.IsNone()) { 9272 PrintF("]\n"); 9273 } else { 9274 PrintF(" (osr ast id %d)]\n", osr.ToInt()); 9275 } 9276 } 9277 } else { 9278 // Keep the src entry by copying it to the dst entry. 9279 if (dst != src) { 9280 code_map->set(dst + kContextOffset, 9281 code_map->get(src + kContextOffset)); 9282 code_map->set(dst + kCachedCodeOffset, 9283 code_map->get(src + kCachedCodeOffset)); 9284 code_map->set(dst + kLiteralsOffset, 9285 code_map->get(src + kLiteralsOffset)); 9286 code_map->set(dst + kOsrAstIdOffset, 9287 code_map->get(src + kOsrAstIdOffset)); 9288 } 9289 dst += kEntryLength; 9290 } 9291 } 9292 if (dst != length) { 9293 // Always trim even when array is cleared because of heap verifier. 9294 GetHeap()->RightTrimFixedArray<Heap::FROM_MUTATOR>(code_map, length - dst); 9295 if (code_map->length() == kEntriesStart) ClearOptimizedCodeMap(); 9296 } 9297 } 9298 9299 9300 void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) { 9301 FixedArray* code_map = FixedArray::cast(optimized_code_map()); 9302 DCHECK(shrink_by % kEntryLength == 0); 9303 DCHECK(shrink_by <= code_map->length() - kEntriesStart); 9304 // Always trim even when array is cleared because of heap verifier. 9305 GetHeap()->RightTrimFixedArray<Heap::FROM_GC>(code_map, shrink_by); 9306 if (code_map->length() == kEntriesStart) { 9307 ClearOptimizedCodeMap(); 9308 } 9309 } 9310 9311 9312 void JSObject::OptimizeAsPrototype(Handle<JSObject> object, 9313 PrototypeOptimizationMode mode) { 9314 if (object->IsGlobalObject()) return; 9315 if (object->IsJSGlobalProxy()) return; 9316 if (mode == FAST_PROTOTYPE && !object->map()->is_prototype_map()) { 9317 // First normalize to ensure all JSFunctions are CONSTANT. 9318 JSObject::NormalizeProperties(object, KEEP_INOBJECT_PROPERTIES, 0); 9319 } 9320 if (!object->HasFastProperties()) { 9321 JSObject::MigrateSlowToFast(object, 0); 9322 } 9323 if (mode == FAST_PROTOTYPE && object->HasFastProperties() && 9324 !object->map()->is_prototype_map()) { 9325 Handle<Map> new_map = Map::Copy(handle(object->map())); 9326 JSObject::MigrateToMap(object, new_map); 9327 object->map()->set_is_prototype_map(true); 9328 } 9329 } 9330 9331 9332 void JSObject::ReoptimizeIfPrototype(Handle<JSObject> object) { 9333 if (!object->map()->is_prototype_map()) return; 9334 OptimizeAsPrototype(object, FAST_PROTOTYPE); 9335 } 9336 9337 9338 Handle<Object> CacheInitialJSArrayMaps( 9339 Handle<Context> native_context, Handle<Map> initial_map) { 9340 // Replace all of the cached initial array maps in the native context with 9341 // the appropriate transitioned elements kind maps. 9342 Factory* factory = native_context->GetIsolate()->factory(); 9343 Handle<FixedArray> maps = factory->NewFixedArrayWithHoles( 9344 kElementsKindCount, TENURED); 9345 9346 Handle<Map> current_map = initial_map; 9347 ElementsKind kind = current_map->elements_kind(); 9348 DCHECK(kind == GetInitialFastElementsKind()); 9349 maps->set(kind, *current_map); 9350 for (int i = GetSequenceIndexFromFastElementsKind(kind) + 1; 9351 i < kFastElementsKindCount; ++i) { 9352 Handle<Map> new_map; 9353 ElementsKind next_kind = GetFastElementsKindFromSequenceIndex(i); 9354 if (current_map->HasElementsTransition()) { 9355 new_map = handle(current_map->elements_transition_map()); 9356 DCHECK(new_map->elements_kind() == next_kind); 9357 } else { 9358 new_map = Map::CopyAsElementsKind( 9359 current_map, next_kind, INSERT_TRANSITION); 9360 } 9361 maps->set(next_kind, *new_map); 9362 current_map = new_map; 9363 } 9364 native_context->set_js_array_maps(*maps); 9365 return initial_map; 9366 } 9367 9368 9369 void JSFunction::SetInstancePrototype(Handle<JSFunction> function, 9370 Handle<Object> value) { 9371 Isolate* isolate = function->GetIsolate(); 9372 9373 DCHECK(value->IsJSReceiver()); 9374 9375 // Now some logic for the maps of the objects that are created by using this 9376 // function as a constructor. 9377 if (function->has_initial_map()) { 9378 // If the function has allocated the initial map replace it with a 9379 // copy containing the new prototype. Also complete any in-object 9380 // slack tracking that is in progress at this point because it is 9381 // still tracking the old copy. 9382 if (function->IsInobjectSlackTrackingInProgress()) { 9383 function->CompleteInobjectSlackTracking(); 9384 } 9385 9386 Handle<Map> initial_map(function->initial_map(), isolate); 9387 9388 if (!initial_map->GetIsolate()->bootstrapper()->IsActive() && 9389 initial_map->instance_type() == JS_OBJECT_TYPE) { 9390 // Put the value in the initial map field until an initial map is needed. 9391 // At that point, a new initial map is created and the prototype is put 9392 // into the initial map where it belongs. 9393 function->set_prototype_or_initial_map(*value); 9394 } else { 9395 Handle<Map> new_map = Map::Copy(initial_map); 9396 JSFunction::SetInitialMap(function, new_map, value); 9397 9398 // If the function is used as the global Array function, cache the 9399 // initial map (and transitioned versions) in the native context. 9400 Context* native_context = function->context()->native_context(); 9401 Object* array_function = 9402 native_context->get(Context::ARRAY_FUNCTION_INDEX); 9403 if (array_function->IsJSFunction() && 9404 *function == JSFunction::cast(array_function)) { 9405 CacheInitialJSArrayMaps(handle(native_context, isolate), new_map); 9406 } 9407 } 9408 9409 // Deoptimize all code that embeds the previous initial map. 9410 initial_map->dependent_code()->DeoptimizeDependentCodeGroup( 9411 isolate, DependentCode::kInitialMapChangedGroup); 9412 } else { 9413 // Put the value in the initial map field until an initial map is 9414 // needed. At that point, a new initial map is created and the 9415 // prototype is put into the initial map where it belongs. 9416 function->set_prototype_or_initial_map(*value); 9417 } 9418 isolate->heap()->ClearInstanceofCache(); 9419 } 9420 9421 9422 void JSFunction::SetPrototype(Handle<JSFunction> function, 9423 Handle<Object> value) { 9424 DCHECK(function->should_have_prototype()); 9425 Handle<Object> construct_prototype = value; 9426 9427 // If the value is not a JSReceiver, store the value in the map's 9428 // constructor field so it can be accessed. Also, set the prototype 9429 // used for constructing objects to the original object prototype. 9430 // See ECMA-262 13.2.2. 9431 if (!value->IsJSReceiver()) { 9432 // Copy the map so this does not affect unrelated functions. 9433 // Remove map transitions because they point to maps with a 9434 // different prototype. 9435 Handle<Map> new_map = Map::Copy(handle(function->map())); 9436 9437 JSObject::MigrateToMap(function, new_map); 9438 new_map->set_constructor(*value); 9439 new_map->set_non_instance_prototype(true); 9440 Isolate* isolate = new_map->GetIsolate(); 9441 construct_prototype = handle( 9442 isolate->context()->native_context()->initial_object_prototype(), 9443 isolate); 9444 } else { 9445 function->map()->set_non_instance_prototype(false); 9446 } 9447 9448 return SetInstancePrototype(function, construct_prototype); 9449 } 9450 9451 9452 bool JSFunction::RemovePrototype() { 9453 Context* native_context = context()->native_context(); 9454 Map* no_prototype_map = shared()->strict_mode() == SLOPPY 9455 ? native_context->sloppy_function_without_prototype_map() 9456 : native_context->strict_function_without_prototype_map(); 9457 9458 if (map() == no_prototype_map) return true; 9459 9460 #ifdef DEBUG 9461 if (map() != (shared()->strict_mode() == SLOPPY 9462 ? native_context->sloppy_function_map() 9463 : native_context->strict_function_map())) { 9464 return false; 9465 } 9466 #endif 9467 9468 set_map(no_prototype_map); 9469 set_prototype_or_initial_map(no_prototype_map->GetHeap()->the_hole_value()); 9470 return true; 9471 } 9472 9473 9474 void JSFunction::SetInitialMap(Handle<JSFunction> function, Handle<Map> map, 9475 Handle<Object> prototype) { 9476 if (prototype->IsJSObject()) { 9477 Handle<JSObject> js_proto = Handle<JSObject>::cast(prototype); 9478 JSObject::OptimizeAsPrototype(js_proto, FAST_PROTOTYPE); 9479 } 9480 map->set_prototype(*prototype); 9481 function->set_prototype_or_initial_map(*map); 9482 map->set_constructor(*function); 9483 } 9484 9485 9486 void JSFunction::EnsureHasInitialMap(Handle<JSFunction> function) { 9487 if (function->has_initial_map()) return; 9488 Isolate* isolate = function->GetIsolate(); 9489 9490 // First create a new map with the size and number of in-object properties 9491 // suggested by the function. 9492 InstanceType instance_type; 9493 int instance_size; 9494 int in_object_properties; 9495 if (function->shared()->is_generator()) { 9496 instance_type = JS_GENERATOR_OBJECT_TYPE; 9497 instance_size = JSGeneratorObject::kSize; 9498 in_object_properties = 0; 9499 } else { 9500 instance_type = JS_OBJECT_TYPE; 9501 instance_size = function->shared()->CalculateInstanceSize(); 9502 in_object_properties = function->shared()->CalculateInObjectProperties(); 9503 } 9504 Handle<Map> map = isolate->factory()->NewMap(instance_type, instance_size); 9505 9506 // Fetch or allocate prototype. 9507 Handle<Object> prototype; 9508 if (function->has_instance_prototype()) { 9509 prototype = handle(function->instance_prototype(), isolate); 9510 } else { 9511 prototype = isolate->factory()->NewFunctionPrototype(function); 9512 } 9513 map->set_inobject_properties(in_object_properties); 9514 map->set_unused_property_fields(in_object_properties); 9515 DCHECK(map->has_fast_object_elements()); 9516 9517 // Finally link initial map and constructor function. 9518 JSFunction::SetInitialMap(function, map, Handle<JSReceiver>::cast(prototype)); 9519 9520 if (!function->shared()->is_generator()) { 9521 function->StartInobjectSlackTracking(); 9522 } 9523 } 9524 9525 9526 void JSFunction::SetInstanceClassName(String* name) { 9527 shared()->set_instance_class_name(name); 9528 } 9529 9530 9531 void JSFunction::PrintName(FILE* out) { 9532 SmartArrayPointer<char> name = shared()->DebugName()->ToCString(); 9533 PrintF(out, "%s", name.get()); 9534 } 9535 9536 9537 Context* JSFunction::NativeContextFromLiterals(FixedArray* literals) { 9538 return Context::cast(literals->get(JSFunction::kLiteralNativeContextIndex)); 9539 } 9540 9541 9542 // The filter is a pattern that matches function names in this way: 9543 // "*" all; the default 9544 // "-" all but the top-level function 9545 // "-name" all but the function "name" 9546 // "" only the top-level function 9547 // "name" only the function "name" 9548 // "name*" only functions starting with "name" 9549 // "~" none; the tilde is not an identifier 9550 bool JSFunction::PassesFilter(const char* raw_filter) { 9551 if (*raw_filter == '*') return true; 9552 String* name = shared()->DebugName(); 9553 Vector<const char> filter = CStrVector(raw_filter); 9554 if (filter.length() == 0) return name->length() == 0; 9555 if (filter[0] == '-') { 9556 // Negative filter. 9557 if (filter.length() == 1) { 9558 return (name->length() != 0); 9559 } else if (name->IsUtf8EqualTo(filter.SubVector(1, filter.length()))) { 9560 return false; 9561 } 9562 if (filter[filter.length() - 1] == '*' && 9563 name->IsUtf8EqualTo(filter.SubVector(1, filter.length() - 1), true)) { 9564 return false; 9565 } 9566 return true; 9567 9568 } else if (name->IsUtf8EqualTo(filter)) { 9569 return true; 9570 } 9571 if (filter[filter.length() - 1] == '*' && 9572 name->IsUtf8EqualTo(filter.SubVector(0, filter.length() - 1), true)) { 9573 return true; 9574 } 9575 return false; 9576 } 9577 9578 9579 void Oddball::Initialize(Isolate* isolate, 9580 Handle<Oddball> oddball, 9581 const char* to_string, 9582 Handle<Object> to_number, 9583 byte kind) { 9584 Handle<String> internalized_to_string = 9585 isolate->factory()->InternalizeUtf8String(to_string); 9586 oddball->set_to_string(*internalized_to_string); 9587 oddball->set_to_number(*to_number); 9588 oddball->set_kind(kind); 9589 } 9590 9591 9592 void Script::InitLineEnds(Handle<Script> script) { 9593 if (!script->line_ends()->IsUndefined()) return; 9594 9595 Isolate* isolate = script->GetIsolate(); 9596 9597 if (!script->source()->IsString()) { 9598 DCHECK(script->source()->IsUndefined()); 9599 Handle<FixedArray> empty = isolate->factory()->NewFixedArray(0); 9600 script->set_line_ends(*empty); 9601 DCHECK(script->line_ends()->IsFixedArray()); 9602 return; 9603 } 9604 9605 Handle<String> src(String::cast(script->source()), isolate); 9606 9607 Handle<FixedArray> array = String::CalculateLineEnds(src, true); 9608 9609 if (*array != isolate->heap()->empty_fixed_array()) { 9610 array->set_map(isolate->heap()->fixed_cow_array_map()); 9611 } 9612 9613 script->set_line_ends(*array); 9614 DCHECK(script->line_ends()->IsFixedArray()); 9615 } 9616 9617 9618 int Script::GetColumnNumber(Handle<Script> script, int code_pos) { 9619 int line_number = GetLineNumber(script, code_pos); 9620 if (line_number == -1) return -1; 9621 9622 DisallowHeapAllocation no_allocation; 9623 FixedArray* line_ends_array = FixedArray::cast(script->line_ends()); 9624 line_number = line_number - script->line_offset()->value(); 9625 if (line_number == 0) return code_pos + script->column_offset()->value(); 9626 int prev_line_end_pos = 9627 Smi::cast(line_ends_array->get(line_number - 1))->value(); 9628 return code_pos - (prev_line_end_pos + 1); 9629 } 9630 9631 9632 int Script::GetLineNumberWithArray(int code_pos) { 9633 DisallowHeapAllocation no_allocation; 9634 DCHECK(line_ends()->IsFixedArray()); 9635 FixedArray* line_ends_array = FixedArray::cast(line_ends()); 9636 int line_ends_len = line_ends_array->length(); 9637 if (line_ends_len == 0) return -1; 9638 9639 if ((Smi::cast(line_ends_array->get(0)))->value() >= code_pos) { 9640 return line_offset()->value(); 9641 } 9642 9643 int left = 0; 9644 int right = line_ends_len; 9645 while (int half = (right - left) / 2) { 9646 if ((Smi::cast(line_ends_array->get(left + half)))->value() > code_pos) { 9647 right -= half; 9648 } else { 9649 left += half; 9650 } 9651 } 9652 return right + line_offset()->value(); 9653 } 9654 9655 9656 int Script::GetLineNumber(Handle<Script> script, int code_pos) { 9657 InitLineEnds(script); 9658 return script->GetLineNumberWithArray(code_pos); 9659 } 9660 9661 9662 int Script::GetLineNumber(int code_pos) { 9663 DisallowHeapAllocation no_allocation; 9664 if (!line_ends()->IsUndefined()) return GetLineNumberWithArray(code_pos); 9665 9666 // Slow mode: we do not have line_ends. We have to iterate through source. 9667 if (!source()->IsString()) return -1; 9668 9669 String* source_string = String::cast(source()); 9670 int line = 0; 9671 int len = source_string->length(); 9672 for (int pos = 0; pos < len; pos++) { 9673 if (pos == code_pos) break; 9674 if (source_string->Get(pos) == '\n') line++; 9675 } 9676 return line; 9677 } 9678 9679 9680 Handle<Object> Script::GetNameOrSourceURL(Handle<Script> script) { 9681 Isolate* isolate = script->GetIsolate(); 9682 Handle<String> name_or_source_url_key = 9683 isolate->factory()->InternalizeOneByteString( 9684 STATIC_CHAR_VECTOR("nameOrSourceURL")); 9685 Handle<JSObject> script_wrapper = Script::GetWrapper(script); 9686 Handle<Object> property = Object::GetProperty( 9687 script_wrapper, name_or_source_url_key).ToHandleChecked(); 9688 DCHECK(property->IsJSFunction()); 9689 Handle<JSFunction> method = Handle<JSFunction>::cast(property); 9690 Handle<Object> result; 9691 // Do not check against pending exception, since this function may be called 9692 // when an exception has already been pending. 9693 if (!Execution::TryCall(method, script_wrapper, 0, NULL).ToHandle(&result)) { 9694 return isolate->factory()->undefined_value(); 9695 } 9696 return result; 9697 } 9698 9699 9700 // Wrappers for scripts are kept alive and cached in weak global 9701 // handles referred from foreign objects held by the scripts as long as 9702 // they are used. When they are not used anymore, the garbage 9703 // collector will call the weak callback on the global handle 9704 // associated with the wrapper and get rid of both the wrapper and the 9705 // handle. 9706 static void ClearWrapperCacheWeakCallback( 9707 const v8::WeakCallbackData<v8::Value, void>& data) { 9708 Object** location = reinterpret_cast<Object**>(data.GetParameter()); 9709 JSValue* wrapper = JSValue::cast(*location); 9710 Script::cast(wrapper->value())->ClearWrapperCache(); 9711 } 9712 9713 9714 void Script::ClearWrapperCache() { 9715 Foreign* foreign = wrapper(); 9716 Object** location = reinterpret_cast<Object**>(foreign->foreign_address()); 9717 DCHECK_EQ(foreign->foreign_address(), reinterpret_cast<Address>(location)); 9718 foreign->set_foreign_address(0); 9719 GlobalHandles::Destroy(location); 9720 GetIsolate()->counters()->script_wrappers()->Decrement(); 9721 } 9722 9723 9724 Handle<JSObject> Script::GetWrapper(Handle<Script> script) { 9725 if (script->wrapper()->foreign_address() != NULL) { 9726 // Return a handle for the existing script wrapper from the cache. 9727 return Handle<JSValue>( 9728 *reinterpret_cast<JSValue**>(script->wrapper()->foreign_address())); 9729 } 9730 Isolate* isolate = script->GetIsolate(); 9731 // Construct a new script wrapper. 9732 isolate->counters()->script_wrappers()->Increment(); 9733 Handle<JSFunction> constructor = isolate->script_function(); 9734 Handle<JSValue> result = 9735 Handle<JSValue>::cast(isolate->factory()->NewJSObject(constructor)); 9736 9737 result->set_value(*script); 9738 9739 // Create a new weak global handle and use it to cache the wrapper 9740 // for future use. The cache will automatically be cleared by the 9741 // garbage collector when it is not used anymore. 9742 Handle<Object> handle = isolate->global_handles()->Create(*result); 9743 GlobalHandles::MakeWeak(handle.location(), 9744 reinterpret_cast<void*>(handle.location()), 9745 &ClearWrapperCacheWeakCallback); 9746 script->wrapper()->set_foreign_address( 9747 reinterpret_cast<Address>(handle.location())); 9748 return result; 9749 } 9750 9751 9752 String* SharedFunctionInfo::DebugName() { 9753 Object* n = name(); 9754 if (!n->IsString() || String::cast(n)->length() == 0) return inferred_name(); 9755 return String::cast(n); 9756 } 9757 9758 9759 bool SharedFunctionInfo::HasSourceCode() const { 9760 return !script()->IsUndefined() && 9761 !reinterpret_cast<Script*>(script())->source()->IsUndefined(); 9762 } 9763 9764 9765 Handle<Object> SharedFunctionInfo::GetSourceCode() { 9766 if (!HasSourceCode()) return GetIsolate()->factory()->undefined_value(); 9767 Handle<String> source(String::cast(Script::cast(script())->source())); 9768 return GetIsolate()->factory()->NewSubString( 9769 source, start_position(), end_position()); 9770 } 9771 9772 9773 bool SharedFunctionInfo::IsInlineable() { 9774 // Check that the function has a script associated with it. 9775 if (!script()->IsScript()) return false; 9776 if (optimization_disabled()) return false; 9777 // If we never ran this (unlikely) then lets try to optimize it. 9778 if (code()->kind() != Code::FUNCTION) return true; 9779 return code()->optimizable(); 9780 } 9781 9782 9783 int SharedFunctionInfo::SourceSize() { 9784 return end_position() - start_position(); 9785 } 9786 9787 9788 int SharedFunctionInfo::CalculateInstanceSize() { 9789 int instance_size = 9790 JSObject::kHeaderSize + 9791 expected_nof_properties() * kPointerSize; 9792 if (instance_size > JSObject::kMaxInstanceSize) { 9793 instance_size = JSObject::kMaxInstanceSize; 9794 } 9795 return instance_size; 9796 } 9797 9798 9799 int SharedFunctionInfo::CalculateInObjectProperties() { 9800 return (CalculateInstanceSize() - JSObject::kHeaderSize) / kPointerSize; 9801 } 9802 9803 9804 // Output the source code without any allocation in the heap. 9805 OStream& operator<<(OStream& os, const SourceCodeOf& v) { 9806 const SharedFunctionInfo* s = v.value; 9807 // For some native functions there is no source. 9808 if (!s->HasSourceCode()) return os << "<No Source>"; 9809 9810 // Get the source for the script which this function came from. 9811 // Don't use String::cast because we don't want more assertion errors while 9812 // we are already creating a stack dump. 9813 String* script_source = 9814 reinterpret_cast<String*>(Script::cast(s->script())->source()); 9815 9816 if (!script_source->LooksValid()) return os << "<Invalid Source>"; 9817 9818 if (!s->is_toplevel()) { 9819 os << "function "; 9820 Object* name = s->name(); 9821 if (name->IsString() && String::cast(name)->length() > 0) { 9822 String::cast(name)->PrintUC16(os); 9823 } 9824 } 9825 9826 int len = s->end_position() - s->start_position(); 9827 if (len <= v.max_length || v.max_length < 0) { 9828 script_source->PrintUC16(os, s->start_position(), s->end_position()); 9829 return os; 9830 } else { 9831 script_source->PrintUC16(os, s->start_position(), 9832 s->start_position() + v.max_length); 9833 return os << "...\n"; 9834 } 9835 } 9836 9837 9838 static bool IsCodeEquivalent(Code* code, Code* recompiled) { 9839 if (code->instruction_size() != recompiled->instruction_size()) return false; 9840 ByteArray* code_relocation = code->relocation_info(); 9841 ByteArray* recompiled_relocation = recompiled->relocation_info(); 9842 int length = code_relocation->length(); 9843 if (length != recompiled_relocation->length()) return false; 9844 int compare = memcmp(code_relocation->GetDataStartAddress(), 9845 recompiled_relocation->GetDataStartAddress(), 9846 length); 9847 return compare == 0; 9848 } 9849 9850 9851 void SharedFunctionInfo::EnableDeoptimizationSupport(Code* recompiled) { 9852 DCHECK(!has_deoptimization_support()); 9853 DisallowHeapAllocation no_allocation; 9854 Code* code = this->code(); 9855 if (IsCodeEquivalent(code, recompiled)) { 9856 // Copy the deoptimization data from the recompiled code. 9857 code->set_deoptimization_data(recompiled->deoptimization_data()); 9858 code->set_has_deoptimization_support(true); 9859 } else { 9860 // TODO(3025757): In case the recompiled isn't equivalent to the 9861 // old code, we have to replace it. We should try to avoid this 9862 // altogether because it flushes valuable type feedback by 9863 // effectively resetting all IC state. 9864 ReplaceCode(recompiled); 9865 } 9866 DCHECK(has_deoptimization_support()); 9867 } 9868 9869 9870 void SharedFunctionInfo::DisableOptimization(BailoutReason reason) { 9871 // Disable optimization for the shared function info and mark the 9872 // code as non-optimizable. The marker on the shared function info 9873 // is there because we flush non-optimized code thereby loosing the 9874 // non-optimizable information for the code. When the code is 9875 // regenerated and set on the shared function info it is marked as 9876 // non-optimizable if optimization is disabled for the shared 9877 // function info. 9878 set_optimization_disabled(true); 9879 set_bailout_reason(reason); 9880 // Code should be the lazy compilation stub or else unoptimized. If the 9881 // latter, disable optimization for the code too. 9882 DCHECK(code()->kind() == Code::FUNCTION || code()->kind() == Code::BUILTIN); 9883 if (code()->kind() == Code::FUNCTION) { 9884 code()->set_optimizable(false); 9885 } 9886 PROFILE(GetIsolate(), CodeDisableOptEvent(code(), this)); 9887 if (FLAG_trace_opt) { 9888 PrintF("[disabled optimization for "); 9889 ShortPrint(); 9890 PrintF(", reason: %s]\n", GetBailoutReason(reason)); 9891 } 9892 } 9893 9894 9895 bool SharedFunctionInfo::VerifyBailoutId(BailoutId id) { 9896 DCHECK(!id.IsNone()); 9897 Code* unoptimized = code(); 9898 DeoptimizationOutputData* data = 9899 DeoptimizationOutputData::cast(unoptimized->deoptimization_data()); 9900 unsigned ignore = Deoptimizer::GetOutputInfo(data, id, this); 9901 USE(ignore); 9902 return true; // Return true if there was no DCHECK. 9903 } 9904 9905 9906 void JSFunction::StartInobjectSlackTracking() { 9907 DCHECK(has_initial_map() && !IsInobjectSlackTrackingInProgress()); 9908 9909 if (!FLAG_clever_optimizations) return; 9910 Map* map = initial_map(); 9911 9912 // Only initiate the tracking the first time. 9913 if (map->done_inobject_slack_tracking()) return; 9914 map->set_done_inobject_slack_tracking(true); 9915 9916 // No tracking during the snapshot construction phase. 9917 Isolate* isolate = GetIsolate(); 9918 if (isolate->serializer_enabled()) return; 9919 9920 if (map->unused_property_fields() == 0) return; 9921 9922 map->set_construction_count(kGenerousAllocationCount); 9923 } 9924 9925 9926 void SharedFunctionInfo::ResetForNewContext(int new_ic_age) { 9927 code()->ClearInlineCaches(); 9928 // If we clear ICs, we need to clear the type feedback vector too, since 9929 // CallICs are synced with a feedback vector slot. 9930 ClearTypeFeedbackInfo(); 9931 set_ic_age(new_ic_age); 9932 if (code()->kind() == Code::FUNCTION) { 9933 code()->set_profiler_ticks(0); 9934 if (optimization_disabled() && 9935 opt_count() >= FLAG_max_opt_count) { 9936 // Re-enable optimizations if they were disabled due to opt_count limit. 9937 set_optimization_disabled(false); 9938 code()->set_optimizable(true); 9939 } 9940 set_opt_count(0); 9941 set_deopt_count(0); 9942 } 9943 } 9944 9945 9946 static void GetMinInobjectSlack(Map* map, void* data) { 9947 int slack = map->unused_property_fields(); 9948 if (*reinterpret_cast<int*>(data) > slack) { 9949 *reinterpret_cast<int*>(data) = slack; 9950 } 9951 } 9952 9953 9954 static void ShrinkInstanceSize(Map* map, void* data) { 9955 int slack = *reinterpret_cast<int*>(data); 9956 map->set_inobject_properties(map->inobject_properties() - slack); 9957 map->set_unused_property_fields(map->unused_property_fields() - slack); 9958 map->set_instance_size(map->instance_size() - slack * kPointerSize); 9959 9960 // Visitor id might depend on the instance size, recalculate it. 9961 map->set_visitor_id(StaticVisitorBase::GetVisitorId(map)); 9962 } 9963 9964 9965 void JSFunction::CompleteInobjectSlackTracking() { 9966 DCHECK(has_initial_map()); 9967 Map* map = initial_map(); 9968 9969 DCHECK(map->done_inobject_slack_tracking()); 9970 map->set_construction_count(kNoSlackTracking); 9971 9972 int slack = map->unused_property_fields(); 9973 map->TraverseTransitionTree(&GetMinInobjectSlack, &slack); 9974 if (slack != 0) { 9975 // Resize the initial map and all maps in its transition tree. 9976 map->TraverseTransitionTree(&ShrinkInstanceSize, &slack); 9977 } 9978 } 9979 9980 9981 int SharedFunctionInfo::SearchOptimizedCodeMap(Context* native_context, 9982 BailoutId osr_ast_id) { 9983 DisallowHeapAllocation no_gc; 9984 DCHECK(native_context->IsNativeContext()); 9985 if (!FLAG_cache_optimized_code) return -1; 9986 Object* value = optimized_code_map(); 9987 if (!value->IsSmi()) { 9988 FixedArray* optimized_code_map = FixedArray::cast(value); 9989 int length = optimized_code_map->length(); 9990 Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt()); 9991 for (int i = kEntriesStart; i < length; i += kEntryLength) { 9992 if (optimized_code_map->get(i + kContextOffset) == native_context && 9993 optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { 9994 return i + kCachedCodeOffset; 9995 } 9996 } 9997 if (FLAG_trace_opt) { 9998 PrintF("[didn't find optimized code in optimized code map for "); 9999 ShortPrint(); 10000 PrintF("]\n"); 10001 } 10002 } 10003 return -1; 10004 } 10005 10006 10007 #define DECLARE_TAG(ignore1, name, ignore2) name, 10008 const char* const VisitorSynchronization::kTags[ 10009 VisitorSynchronization::kNumberOfSyncTags] = { 10010 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG) 10011 }; 10012 #undef DECLARE_TAG 10013 10014 10015 #define DECLARE_TAG(ignore1, ignore2, name) name, 10016 const char* const VisitorSynchronization::kTagNames[ 10017 VisitorSynchronization::kNumberOfSyncTags] = { 10018 VISITOR_SYNCHRONIZATION_TAGS_LIST(DECLARE_TAG) 10019 }; 10020 #undef DECLARE_TAG 10021 10022 10023 void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) { 10024 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode())); 10025 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 10026 Object* old_target = target; 10027 VisitPointer(&target); 10028 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target. 10029 } 10030 10031 10032 void ObjectVisitor::VisitCodeAgeSequence(RelocInfo* rinfo) { 10033 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode())); 10034 Object* stub = rinfo->code_age_stub(); 10035 if (stub) { 10036 VisitPointer(&stub); 10037 } 10038 } 10039 10040 10041 void ObjectVisitor::VisitCodeEntry(Address entry_address) { 10042 Object* code = Code::GetObjectFromEntryAddress(entry_address); 10043 Object* old_code = code; 10044 VisitPointer(&code); 10045 if (code != old_code) { 10046 Memory::Address_at(entry_address) = reinterpret_cast<Code*>(code)->entry(); 10047 } 10048 } 10049 10050 10051 void ObjectVisitor::VisitCell(RelocInfo* rinfo) { 10052 DCHECK(rinfo->rmode() == RelocInfo::CELL); 10053 Object* cell = rinfo->target_cell(); 10054 Object* old_cell = cell; 10055 VisitPointer(&cell); 10056 if (cell != old_cell) { 10057 rinfo->set_target_cell(reinterpret_cast<Cell*>(cell)); 10058 } 10059 } 10060 10061 10062 void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) { 10063 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) && 10064 rinfo->IsPatchedReturnSequence()) || 10065 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 10066 rinfo->IsPatchedDebugBreakSlotSequence())); 10067 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); 10068 Object* old_target = target; 10069 VisitPointer(&target); 10070 CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target. 10071 } 10072 10073 10074 void ObjectVisitor::VisitEmbeddedPointer(RelocInfo* rinfo) { 10075 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT); 10076 Object* p = rinfo->target_object(); 10077 VisitPointer(&p); 10078 } 10079 10080 10081 void ObjectVisitor::VisitExternalReference(RelocInfo* rinfo) { 10082 Address p = rinfo->target_reference(); 10083 VisitExternalReference(&p); 10084 } 10085 10086 10087 void Code::InvalidateRelocation() { 10088 InvalidateEmbeddedObjects(); 10089 set_relocation_info(GetHeap()->empty_byte_array()); 10090 } 10091 10092 10093 void Code::InvalidateEmbeddedObjects() { 10094 Object* undefined = GetHeap()->undefined_value(); 10095 Cell* undefined_cell = GetHeap()->undefined_cell(); 10096 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 10097 RelocInfo::ModeMask(RelocInfo::CELL); 10098 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { 10099 RelocInfo::Mode mode = it.rinfo()->rmode(); 10100 if (mode == RelocInfo::EMBEDDED_OBJECT) { 10101 it.rinfo()->set_target_object(undefined, SKIP_WRITE_BARRIER); 10102 } else if (mode == RelocInfo::CELL) { 10103 it.rinfo()->set_target_cell(undefined_cell, SKIP_WRITE_BARRIER); 10104 } 10105 } 10106 } 10107 10108 10109 void Code::Relocate(intptr_t delta) { 10110 for (RelocIterator it(this, RelocInfo::kApplyMask); !it.done(); it.next()) { 10111 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH); 10112 } 10113 CpuFeatures::FlushICache(instruction_start(), instruction_size()); 10114 } 10115 10116 10117 void Code::CopyFrom(const CodeDesc& desc) { 10118 DCHECK(Marking::Color(this) == Marking::WHITE_OBJECT); 10119 10120 // copy code 10121 CopyBytes(instruction_start(), desc.buffer, 10122 static_cast<size_t>(desc.instr_size)); 10123 10124 // copy reloc info 10125 CopyBytes(relocation_start(), 10126 desc.buffer + desc.buffer_size - desc.reloc_size, 10127 static_cast<size_t>(desc.reloc_size)); 10128 10129 // unbox handles and relocate 10130 intptr_t delta = instruction_start() - desc.buffer; 10131 int mode_mask = RelocInfo::kCodeTargetMask | 10132 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | 10133 RelocInfo::ModeMask(RelocInfo::CELL) | 10134 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) | 10135 RelocInfo::kApplyMask; 10136 // Needed to find target_object and runtime_entry on X64 10137 Assembler* origin = desc.origin; 10138 AllowDeferredHandleDereference embedding_raw_address; 10139 for (RelocIterator it(this, mode_mask); !it.done(); it.next()) { 10140 RelocInfo::Mode mode = it.rinfo()->rmode(); 10141 if (mode == RelocInfo::EMBEDDED_OBJECT) { 10142 Handle<Object> p = it.rinfo()->target_object_handle(origin); 10143 it.rinfo()->set_target_object(*p, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH); 10144 } else if (mode == RelocInfo::CELL) { 10145 Handle<Cell> cell = it.rinfo()->target_cell_handle(); 10146 it.rinfo()->set_target_cell(*cell, SKIP_WRITE_BARRIER, SKIP_ICACHE_FLUSH); 10147 } else if (RelocInfo::IsCodeTarget(mode)) { 10148 // rewrite code handles in inline cache targets to direct 10149 // pointers to the first instruction in the code object 10150 Handle<Object> p = it.rinfo()->target_object_handle(origin); 10151 Code* code = Code::cast(*p); 10152 it.rinfo()->set_target_address(code->instruction_start(), 10153 SKIP_WRITE_BARRIER, 10154 SKIP_ICACHE_FLUSH); 10155 } else if (RelocInfo::IsRuntimeEntry(mode)) { 10156 Address p = it.rinfo()->target_runtime_entry(origin); 10157 it.rinfo()->set_target_runtime_entry(p, SKIP_WRITE_BARRIER, 10158 SKIP_ICACHE_FLUSH); 10159 } else if (mode == RelocInfo::CODE_AGE_SEQUENCE) { 10160 Handle<Object> p = it.rinfo()->code_age_stub_handle(origin); 10161 Code* code = Code::cast(*p); 10162 it.rinfo()->set_code_age_stub(code, SKIP_ICACHE_FLUSH); 10163 } else { 10164 it.rinfo()->apply(delta, SKIP_ICACHE_FLUSH); 10165 } 10166 } 10167 CpuFeatures::FlushICache(instruction_start(), instruction_size()); 10168 } 10169 10170 10171 // Locate the source position which is closest to the address in the code. This 10172 // is using the source position information embedded in the relocation info. 10173 // The position returned is relative to the beginning of the script where the 10174 // source for this function is found. 10175 int Code::SourcePosition(Address pc) { 10176 int distance = kMaxInt; 10177 int position = RelocInfo::kNoPosition; // Initially no position found. 10178 // Run through all the relocation info to find the best matching source 10179 // position. All the code needs to be considered as the sequence of the 10180 // instructions in the code does not necessarily follow the same order as the 10181 // source. 10182 RelocIterator it(this, RelocInfo::kPositionMask); 10183 while (!it.done()) { 10184 // Only look at positions after the current pc. 10185 if (it.rinfo()->pc() < pc) { 10186 // Get position and distance. 10187 10188 int dist = static_cast<int>(pc - it.rinfo()->pc()); 10189 int pos = static_cast<int>(it.rinfo()->data()); 10190 // If this position is closer than the current candidate or if it has the 10191 // same distance as the current candidate and the position is higher then 10192 // this position is the new candidate. 10193 if ((dist < distance) || 10194 (dist == distance && pos > position)) { 10195 position = pos; 10196 distance = dist; 10197 } 10198 } 10199 it.next(); 10200 } 10201 return position; 10202 } 10203 10204 10205 // Same as Code::SourcePosition above except it only looks for statement 10206 // positions. 10207 int Code::SourceStatementPosition(Address pc) { 10208 // First find the position as close as possible using all position 10209 // information. 10210 int position = SourcePosition(pc); 10211 // Now find the closest statement position before the position. 10212 int statement_position = 0; 10213 RelocIterator it(this, RelocInfo::kPositionMask); 10214 while (!it.done()) { 10215 if (RelocInfo::IsStatementPosition(it.rinfo()->rmode())) { 10216 int p = static_cast<int>(it.rinfo()->data()); 10217 if (statement_position < p && p <= position) { 10218 statement_position = p; 10219 } 10220 } 10221 it.next(); 10222 } 10223 return statement_position; 10224 } 10225 10226 10227 SafepointEntry Code::GetSafepointEntry(Address pc) { 10228 SafepointTable table(this); 10229 return table.FindEntry(pc); 10230 } 10231 10232 10233 Object* Code::FindNthObject(int n, Map* match_map) { 10234 DCHECK(is_inline_cache_stub()); 10235 DisallowHeapAllocation no_allocation; 10236 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10237 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10238 RelocInfo* info = it.rinfo(); 10239 Object* object = info->target_object(); 10240 if (object->IsHeapObject()) { 10241 if (HeapObject::cast(object)->map() == match_map) { 10242 if (--n == 0) return object; 10243 } 10244 } 10245 } 10246 return NULL; 10247 } 10248 10249 10250 AllocationSite* Code::FindFirstAllocationSite() { 10251 Object* result = FindNthObject(1, GetHeap()->allocation_site_map()); 10252 return (result != NULL) ? AllocationSite::cast(result) : NULL; 10253 } 10254 10255 10256 Map* Code::FindFirstMap() { 10257 Object* result = FindNthObject(1, GetHeap()->meta_map()); 10258 return (result != NULL) ? Map::cast(result) : NULL; 10259 } 10260 10261 10262 void Code::FindAndReplace(const FindAndReplacePattern& pattern) { 10263 DCHECK(is_inline_cache_stub() || is_handler()); 10264 DisallowHeapAllocation no_allocation; 10265 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10266 STATIC_ASSERT(FindAndReplacePattern::kMaxCount < 32); 10267 int current_pattern = 0; 10268 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10269 RelocInfo* info = it.rinfo(); 10270 Object* object = info->target_object(); 10271 if (object->IsHeapObject()) { 10272 Map* map = HeapObject::cast(object)->map(); 10273 if (map == *pattern.find_[current_pattern]) { 10274 info->set_target_object(*pattern.replace_[current_pattern]); 10275 if (++current_pattern == pattern.count_) return; 10276 } 10277 } 10278 } 10279 UNREACHABLE(); 10280 } 10281 10282 10283 void Code::FindAllMaps(MapHandleList* maps) { 10284 DCHECK(is_inline_cache_stub()); 10285 DisallowHeapAllocation no_allocation; 10286 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10287 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10288 RelocInfo* info = it.rinfo(); 10289 Object* object = info->target_object(); 10290 if (object->IsMap()) maps->Add(handle(Map::cast(object))); 10291 } 10292 } 10293 10294 10295 Code* Code::FindFirstHandler() { 10296 DCHECK(is_inline_cache_stub()); 10297 DisallowHeapAllocation no_allocation; 10298 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); 10299 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10300 RelocInfo* info = it.rinfo(); 10301 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); 10302 if (code->kind() == Code::HANDLER) return code; 10303 } 10304 return NULL; 10305 } 10306 10307 10308 bool Code::FindHandlers(CodeHandleList* code_list, int length) { 10309 DCHECK(is_inline_cache_stub()); 10310 DisallowHeapAllocation no_allocation; 10311 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET); 10312 int i = 0; 10313 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10314 if (i == length) return true; 10315 RelocInfo* info = it.rinfo(); 10316 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); 10317 // IC stubs with handlers never contain non-handler code objects before 10318 // handler targets. 10319 if (code->kind() != Code::HANDLER) break; 10320 code_list->Add(Handle<Code>(code)); 10321 i++; 10322 } 10323 return i == length; 10324 } 10325 10326 10327 MaybeHandle<Code> Code::FindHandlerForMap(Map* map) { 10328 DCHECK(is_inline_cache_stub()); 10329 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | 10330 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10331 bool return_next = false; 10332 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10333 RelocInfo* info = it.rinfo(); 10334 if (info->rmode() == RelocInfo::EMBEDDED_OBJECT) { 10335 Object* object = info->target_object(); 10336 if (object == map) return_next = true; 10337 } else if (return_next) { 10338 Code* code = Code::GetCodeFromTargetAddress(info->target_address()); 10339 DCHECK(code->kind() == Code::HANDLER); 10340 return handle(code); 10341 } 10342 } 10343 return MaybeHandle<Code>(); 10344 } 10345 10346 10347 Name* Code::FindFirstName() { 10348 DCHECK(is_inline_cache_stub()); 10349 DisallowHeapAllocation no_allocation; 10350 int mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); 10351 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10352 RelocInfo* info = it.rinfo(); 10353 Object* object = info->target_object(); 10354 if (object->IsName()) return Name::cast(object); 10355 } 10356 return NULL; 10357 } 10358 10359 10360 void Code::ClearInlineCaches() { 10361 ClearInlineCaches(NULL); 10362 } 10363 10364 10365 void Code::ClearInlineCaches(Code::Kind kind) { 10366 ClearInlineCaches(&kind); 10367 } 10368 10369 10370 void Code::ClearInlineCaches(Code::Kind* kind) { 10371 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | 10372 RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) | 10373 RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID); 10374 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10375 RelocInfo* info = it.rinfo(); 10376 Code* target(Code::GetCodeFromTargetAddress(info->target_address())); 10377 if (target->is_inline_cache_stub()) { 10378 if (kind == NULL || *kind == target->kind()) { 10379 IC::Clear(this->GetIsolate(), info->pc(), 10380 info->host()->constant_pool()); 10381 } 10382 } 10383 } 10384 } 10385 10386 10387 void SharedFunctionInfo::ClearTypeFeedbackInfo() { 10388 TypeFeedbackVector* vector = feedback_vector(); 10389 Heap* heap = GetHeap(); 10390 int length = vector->length(); 10391 10392 for (int i = 0; i < length; i++) { 10393 Object* obj = vector->get(i); 10394 if (obj->IsHeapObject()) { 10395 InstanceType instance_type = 10396 HeapObject::cast(obj)->map()->instance_type(); 10397 switch (instance_type) { 10398 case ALLOCATION_SITE_TYPE: 10399 // AllocationSites are not cleared because they do not store 10400 // information that leaks. 10401 break; 10402 // Fall through... 10403 default: 10404 vector->set(i, TypeFeedbackVector::RawUninitializedSentinel(heap), 10405 SKIP_WRITE_BARRIER); 10406 } 10407 } 10408 } 10409 } 10410 10411 10412 BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) { 10413 DisallowHeapAllocation no_gc; 10414 DCHECK(kind() == FUNCTION); 10415 BackEdgeTable back_edges(this, &no_gc); 10416 for (uint32_t i = 0; i < back_edges.length(); i++) { 10417 if (back_edges.pc_offset(i) == pc_offset) return back_edges.ast_id(i); 10418 } 10419 return BailoutId::None(); 10420 } 10421 10422 10423 uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) { 10424 DisallowHeapAllocation no_gc; 10425 DCHECK(kind() == FUNCTION); 10426 BackEdgeTable back_edges(this, &no_gc); 10427 for (uint32_t i = 0; i < back_edges.length(); i++) { 10428 if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i); 10429 } 10430 UNREACHABLE(); // We expect to find the back edge. 10431 return 0; 10432 } 10433 10434 10435 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) { 10436 PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY); 10437 } 10438 10439 10440 void Code::MarkCodeAsExecuted(byte* sequence, Isolate* isolate) { 10441 PatchPlatformCodeAge(isolate, sequence, kExecutedOnceCodeAge, 10442 NO_MARKING_PARITY); 10443 } 10444 10445 10446 static Code::Age EffectiveAge(Code::Age age) { 10447 if (age == Code::kNotExecutedCodeAge) { 10448 // Treat that's never been executed as old immediately. 10449 age = Code::kIsOldCodeAge; 10450 } else if (age == Code::kExecutedOnceCodeAge) { 10451 // Pre-age code that has only been executed once. 10452 age = Code::kPreAgedCodeAge; 10453 } 10454 return age; 10455 } 10456 10457 10458 void Code::MakeOlder(MarkingParity current_parity) { 10459 byte* sequence = FindCodeAgeSequence(); 10460 if (sequence != NULL) { 10461 Age age; 10462 MarkingParity code_parity; 10463 Isolate* isolate = GetIsolate(); 10464 GetCodeAgeAndParity(isolate, sequence, &age, &code_parity); 10465 age = EffectiveAge(age); 10466 if (age != kLastCodeAge && code_parity != current_parity) { 10467 PatchPlatformCodeAge(isolate, 10468 sequence, 10469 static_cast<Age>(age + 1), 10470 current_parity); 10471 } 10472 } 10473 } 10474 10475 10476 bool Code::IsOld() { 10477 return GetAge() >= kIsOldCodeAge; 10478 } 10479 10480 10481 byte* Code::FindCodeAgeSequence() { 10482 return FLAG_age_code && 10483 prologue_offset() != Code::kPrologueOffsetNotSet && 10484 (kind() == OPTIMIZED_FUNCTION || 10485 (kind() == FUNCTION && !has_debug_break_slots())) 10486 ? instruction_start() + prologue_offset() 10487 : NULL; 10488 } 10489 10490 10491 Code::Age Code::GetAge() { 10492 return EffectiveAge(GetRawAge()); 10493 } 10494 10495 10496 Code::Age Code::GetRawAge() { 10497 byte* sequence = FindCodeAgeSequence(); 10498 if (sequence == NULL) { 10499 return kNoAgeCodeAge; 10500 } 10501 Age age; 10502 MarkingParity parity; 10503 GetCodeAgeAndParity(GetIsolate(), sequence, &age, &parity); 10504 return age; 10505 } 10506 10507 10508 void Code::GetCodeAgeAndParity(Code* code, Age* age, 10509 MarkingParity* parity) { 10510 Isolate* isolate = code->GetIsolate(); 10511 Builtins* builtins = isolate->builtins(); 10512 Code* stub = NULL; 10513 #define HANDLE_CODE_AGE(AGE) \ 10514 stub = *builtins->Make##AGE##CodeYoungAgainEvenMarking(); \ 10515 if (code == stub) { \ 10516 *age = k##AGE##CodeAge; \ 10517 *parity = EVEN_MARKING_PARITY; \ 10518 return; \ 10519 } \ 10520 stub = *builtins->Make##AGE##CodeYoungAgainOddMarking(); \ 10521 if (code == stub) { \ 10522 *age = k##AGE##CodeAge; \ 10523 *parity = ODD_MARKING_PARITY; \ 10524 return; \ 10525 } 10526 CODE_AGE_LIST(HANDLE_CODE_AGE) 10527 #undef HANDLE_CODE_AGE 10528 stub = *builtins->MarkCodeAsExecutedOnce(); 10529 if (code == stub) { 10530 *age = kNotExecutedCodeAge; 10531 *parity = NO_MARKING_PARITY; 10532 return; 10533 } 10534 stub = *builtins->MarkCodeAsExecutedTwice(); 10535 if (code == stub) { 10536 *age = kExecutedOnceCodeAge; 10537 *parity = NO_MARKING_PARITY; 10538 return; 10539 } 10540 UNREACHABLE(); 10541 } 10542 10543 10544 Code* Code::GetCodeAgeStub(Isolate* isolate, Age age, MarkingParity parity) { 10545 Builtins* builtins = isolate->builtins(); 10546 switch (age) { 10547 #define HANDLE_CODE_AGE(AGE) \ 10548 case k##AGE##CodeAge: { \ 10549 Code* stub = parity == EVEN_MARKING_PARITY \ 10550 ? *builtins->Make##AGE##CodeYoungAgainEvenMarking() \ 10551 : *builtins->Make##AGE##CodeYoungAgainOddMarking(); \ 10552 return stub; \ 10553 } 10554 CODE_AGE_LIST(HANDLE_CODE_AGE) 10555 #undef HANDLE_CODE_AGE 10556 case kNotExecutedCodeAge: { 10557 DCHECK(parity == NO_MARKING_PARITY); 10558 return *builtins->MarkCodeAsExecutedOnce(); 10559 } 10560 case kExecutedOnceCodeAge: { 10561 DCHECK(parity == NO_MARKING_PARITY); 10562 return *builtins->MarkCodeAsExecutedTwice(); 10563 } 10564 default: 10565 UNREACHABLE(); 10566 break; 10567 } 10568 return NULL; 10569 } 10570 10571 10572 void Code::PrintDeoptLocation(FILE* out, int bailout_id) { 10573 const char* last_comment = NULL; 10574 int mask = RelocInfo::ModeMask(RelocInfo::COMMENT) 10575 | RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); 10576 for (RelocIterator it(this, mask); !it.done(); it.next()) { 10577 RelocInfo* info = it.rinfo(); 10578 if (info->rmode() == RelocInfo::COMMENT) { 10579 last_comment = reinterpret_cast<const char*>(info->data()); 10580 } else if (last_comment != NULL) { 10581 if ((bailout_id == Deoptimizer::GetDeoptimizationId( 10582 GetIsolate(), info->target_address(), Deoptimizer::EAGER)) || 10583 (bailout_id == Deoptimizer::GetDeoptimizationId( 10584 GetIsolate(), info->target_address(), Deoptimizer::SOFT)) || 10585 (bailout_id == Deoptimizer::GetDeoptimizationId( 10586 GetIsolate(), info->target_address(), Deoptimizer::LAZY))) { 10587 CHECK(RelocInfo::IsRuntimeEntry(info->rmode())); 10588 PrintF(out, " %s\n", last_comment); 10589 return; 10590 } 10591 } 10592 } 10593 } 10594 10595 10596 bool Code::CanDeoptAt(Address pc) { 10597 DeoptimizationInputData* deopt_data = 10598 DeoptimizationInputData::cast(deoptimization_data()); 10599 Address code_start_address = instruction_start(); 10600 for (int i = 0; i < deopt_data->DeoptCount(); i++) { 10601 if (deopt_data->Pc(i)->value() == -1) continue; 10602 Address address = code_start_address + deopt_data->Pc(i)->value(); 10603 if (address == pc) return true; 10604 } 10605 return false; 10606 } 10607 10608 10609 // Identify kind of code. 10610 const char* Code::Kind2String(Kind kind) { 10611 switch (kind) { 10612 #define CASE(name) case name: return #name; 10613 CODE_KIND_LIST(CASE) 10614 #undef CASE 10615 case NUMBER_OF_KINDS: break; 10616 } 10617 UNREACHABLE(); 10618 return NULL; 10619 } 10620 10621 10622 #ifdef ENABLE_DISASSEMBLER 10623 10624 void DeoptimizationInputData::DeoptimizationInputDataPrint( 10625 OStream& os) { // NOLINT 10626 disasm::NameConverter converter; 10627 int deopt_count = DeoptCount(); 10628 os << "Deoptimization Input Data (deopt points = " << deopt_count << ")\n"; 10629 if (0 != deopt_count) { 10630 os << " index ast id argc pc"; 10631 if (FLAG_print_code_verbose) os << " commands"; 10632 os << "\n"; 10633 } 10634 for (int i = 0; i < deopt_count; i++) { 10635 // TODO(svenpanne) Add some basic formatting to our streams. 10636 Vector<char> buf1 = Vector<char>::New(128); 10637 SNPrintF(buf1, "%6d %6d %6d %6d", i, AstId(i).ToInt(), 10638 ArgumentsStackHeight(i)->value(), Pc(i)->value()); 10639 os << buf1.start(); 10640 10641 if (!FLAG_print_code_verbose) { 10642 os << "\n"; 10643 continue; 10644 } 10645 // Print details of the frame translation. 10646 int translation_index = TranslationIndex(i)->value(); 10647 TranslationIterator iterator(TranslationByteArray(), translation_index); 10648 Translation::Opcode opcode = 10649 static_cast<Translation::Opcode>(iterator.Next()); 10650 DCHECK(Translation::BEGIN == opcode); 10651 int frame_count = iterator.Next(); 10652 int jsframe_count = iterator.Next(); 10653 os << " " << Translation::StringFor(opcode) 10654 << " {frame count=" << frame_count 10655 << ", js frame count=" << jsframe_count << "}\n"; 10656 10657 while (iterator.HasNext() && 10658 Translation::BEGIN != 10659 (opcode = static_cast<Translation::Opcode>(iterator.Next()))) { 10660 Vector<char> buf2 = Vector<char>::New(128); 10661 SNPrintF(buf2, "%27s %s ", "", Translation::StringFor(opcode)); 10662 os << buf2.start(); 10663 10664 switch (opcode) { 10665 case Translation::BEGIN: 10666 UNREACHABLE(); 10667 break; 10668 10669 case Translation::JS_FRAME: { 10670 int ast_id = iterator.Next(); 10671 int function_id = iterator.Next(); 10672 unsigned height = iterator.Next(); 10673 os << "{ast_id=" << ast_id << ", function="; 10674 if (function_id != Translation::kSelfLiteralId) { 10675 Object* function = LiteralArray()->get(function_id); 10676 os << Brief(JSFunction::cast(function)->shared()->DebugName()); 10677 } else { 10678 os << "<self>"; 10679 } 10680 os << ", height=" << height << "}"; 10681 break; 10682 } 10683 10684 case Translation::COMPILED_STUB_FRAME: { 10685 Code::Kind stub_kind = static_cast<Code::Kind>(iterator.Next()); 10686 os << "{kind=" << stub_kind << "}"; 10687 break; 10688 } 10689 10690 case Translation::ARGUMENTS_ADAPTOR_FRAME: 10691 case Translation::CONSTRUCT_STUB_FRAME: { 10692 int function_id = iterator.Next(); 10693 JSFunction* function = 10694 JSFunction::cast(LiteralArray()->get(function_id)); 10695 unsigned height = iterator.Next(); 10696 os << "{function=" << Brief(function->shared()->DebugName()) 10697 << ", height=" << height << "}"; 10698 break; 10699 } 10700 10701 case Translation::GETTER_STUB_FRAME: 10702 case Translation::SETTER_STUB_FRAME: { 10703 int function_id = iterator.Next(); 10704 JSFunction* function = 10705 JSFunction::cast(LiteralArray()->get(function_id)); 10706 os << "{function=" << Brief(function->shared()->DebugName()) << "}"; 10707 break; 10708 } 10709 10710 case Translation::REGISTER: { 10711 int reg_code = iterator.Next(); 10712 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}"; 10713 break; 10714 } 10715 10716 case Translation::INT32_REGISTER: { 10717 int reg_code = iterator.Next(); 10718 os << "{input=" << converter.NameOfCPURegister(reg_code) << "}"; 10719 break; 10720 } 10721 10722 case Translation::UINT32_REGISTER: { 10723 int reg_code = iterator.Next(); 10724 os << "{input=" << converter.NameOfCPURegister(reg_code) 10725 << " (unsigned)}"; 10726 break; 10727 } 10728 10729 case Translation::DOUBLE_REGISTER: { 10730 int reg_code = iterator.Next(); 10731 os << "{input=" << DoubleRegister::AllocationIndexToString(reg_code) 10732 << "}"; 10733 break; 10734 } 10735 10736 case Translation::STACK_SLOT: { 10737 int input_slot_index = iterator.Next(); 10738 os << "{input=" << input_slot_index << "}"; 10739 break; 10740 } 10741 10742 case Translation::INT32_STACK_SLOT: { 10743 int input_slot_index = iterator.Next(); 10744 os << "{input=" << input_slot_index << "}"; 10745 break; 10746 } 10747 10748 case Translation::UINT32_STACK_SLOT: { 10749 int input_slot_index = iterator.Next(); 10750 os << "{input=" << input_slot_index << " (unsigned)}"; 10751 break; 10752 } 10753 10754 case Translation::DOUBLE_STACK_SLOT: { 10755 int input_slot_index = iterator.Next(); 10756 os << "{input=" << input_slot_index << "}"; 10757 break; 10758 } 10759 10760 case Translation::LITERAL: { 10761 unsigned literal_index = iterator.Next(); 10762 os << "{literal_id=" << literal_index << "}"; 10763 break; 10764 } 10765 10766 case Translation::DUPLICATED_OBJECT: { 10767 int object_index = iterator.Next(); 10768 os << "{object_index=" << object_index << "}"; 10769 break; 10770 } 10771 10772 case Translation::ARGUMENTS_OBJECT: 10773 case Translation::CAPTURED_OBJECT: { 10774 int args_length = iterator.Next(); 10775 os << "{length=" << args_length << "}"; 10776 break; 10777 } 10778 } 10779 os << "\n"; 10780 } 10781 } 10782 } 10783 10784 10785 void DeoptimizationOutputData::DeoptimizationOutputDataPrint( 10786 OStream& os) { // NOLINT 10787 os << "Deoptimization Output Data (deopt points = " << this->DeoptPoints() 10788 << ")\n"; 10789 if (this->DeoptPoints() == 0) return; 10790 10791 os << "ast id pc state\n"; 10792 for (int i = 0; i < this->DeoptPoints(); i++) { 10793 int pc_and_state = this->PcAndState(i)->value(); 10794 // TODO(svenpanne) Add some basic formatting to our streams. 10795 Vector<char> buf = Vector<char>::New(100); 10796 SNPrintF(buf, "%6d %8d %s\n", this->AstId(i).ToInt(), 10797 FullCodeGenerator::PcField::decode(pc_and_state), 10798 FullCodeGenerator::State2String( 10799 FullCodeGenerator::StateField::decode(pc_and_state))); 10800 os << buf.start(); 10801 } 10802 } 10803 10804 10805 const char* Code::ICState2String(InlineCacheState state) { 10806 switch (state) { 10807 case UNINITIALIZED: return "UNINITIALIZED"; 10808 case PREMONOMORPHIC: return "PREMONOMORPHIC"; 10809 case MONOMORPHIC: return "MONOMORPHIC"; 10810 case PROTOTYPE_FAILURE: 10811 return "PROTOTYPE_FAILURE"; 10812 case POLYMORPHIC: return "POLYMORPHIC"; 10813 case MEGAMORPHIC: return "MEGAMORPHIC"; 10814 case GENERIC: return "GENERIC"; 10815 case DEBUG_STUB: return "DEBUG_STUB"; 10816 case DEFAULT: 10817 return "DEFAULT"; 10818 } 10819 UNREACHABLE(); 10820 return NULL; 10821 } 10822 10823 10824 const char* Code::StubType2String(StubType type) { 10825 switch (type) { 10826 case NORMAL: return "NORMAL"; 10827 case FAST: return "FAST"; 10828 } 10829 UNREACHABLE(); // keep the compiler happy 10830 return NULL; 10831 } 10832 10833 10834 void Code::PrintExtraICState(OStream& os, // NOLINT 10835 Kind kind, ExtraICState extra) { 10836 os << "extra_ic_state = "; 10837 if ((kind == STORE_IC || kind == KEYED_STORE_IC) && (extra == STRICT)) { 10838 os << "STRICT\n"; 10839 } else { 10840 os << extra << "\n"; 10841 } 10842 } 10843 10844 10845 void Code::Disassemble(const char* name, OStream& os) { // NOLINT 10846 os << "kind = " << Kind2String(kind()) << "\n"; 10847 if (IsCodeStubOrIC()) { 10848 const char* n = CodeStub::MajorName(CodeStub::GetMajorKey(this), true); 10849 os << "major_key = " << (n == NULL ? "null" : n) << "\n"; 10850 } 10851 if (is_inline_cache_stub()) { 10852 os << "ic_state = " << ICState2String(ic_state()) << "\n"; 10853 PrintExtraICState(os, kind(), extra_ic_state()); 10854 if (ic_state() == MONOMORPHIC) { 10855 os << "type = " << StubType2String(type()) << "\n"; 10856 } 10857 if (is_compare_ic_stub()) { 10858 DCHECK(CodeStub::GetMajorKey(this) == CodeStub::CompareIC); 10859 CompareICStub stub(stub_key(), GetIsolate()); 10860 os << "compare_state = " << CompareICState::GetStateName(stub.left()) 10861 << "*" << CompareICState::GetStateName(stub.right()) << " -> " 10862 << CompareICState::GetStateName(stub.state()) << "\n"; 10863 os << "compare_operation = " << Token::Name(stub.op()) << "\n"; 10864 } 10865 } 10866 if ((name != NULL) && (name[0] != '\0')) { 10867 os << "name = " << name << "\n"; 10868 } 10869 if (kind() == OPTIMIZED_FUNCTION) { 10870 os << "stack_slots = " << stack_slots() << "\n"; 10871 } 10872 10873 os << "Instructions (size = " << instruction_size() << ")\n"; 10874 // TODO(svenpanne) The Disassembler should use streams, too! 10875 { 10876 CodeTracer::Scope trace_scope(GetIsolate()->GetCodeTracer()); 10877 Disassembler::Decode(trace_scope.file(), this); 10878 } 10879 os << "\n"; 10880 10881 if (kind() == FUNCTION) { 10882 DeoptimizationOutputData* data = 10883 DeoptimizationOutputData::cast(this->deoptimization_data()); 10884 data->DeoptimizationOutputDataPrint(os); 10885 } else if (kind() == OPTIMIZED_FUNCTION) { 10886 DeoptimizationInputData* data = 10887 DeoptimizationInputData::cast(this->deoptimization_data()); 10888 data->DeoptimizationInputDataPrint(os); 10889 } 10890 os << "\n"; 10891 10892 if (is_crankshafted()) { 10893 SafepointTable table(this); 10894 os << "Safepoints (size = " << table.size() << ")\n"; 10895 for (unsigned i = 0; i < table.length(); i++) { 10896 unsigned pc_offset = table.GetPcOffset(i); 10897 os << (instruction_start() + pc_offset) << " "; 10898 // TODO(svenpanne) Add some basic formatting to our streams. 10899 Vector<char> buf1 = Vector<char>::New(30); 10900 SNPrintF(buf1, "%4d", pc_offset); 10901 os << buf1.start() << " "; 10902 table.PrintEntry(i, os); 10903 os << " (sp -> fp) "; 10904 SafepointEntry entry = table.GetEntry(i); 10905 if (entry.deoptimization_index() != Safepoint::kNoDeoptimizationIndex) { 10906 Vector<char> buf2 = Vector<char>::New(30); 10907 SNPrintF(buf2, "%6d", entry.deoptimization_index()); 10908 os << buf2.start(); 10909 } else { 10910 os << "<none>"; 10911 } 10912 if (entry.argument_count() > 0) { 10913 os << " argc: " << entry.argument_count(); 10914 } 10915 os << "\n"; 10916 } 10917 os << "\n"; 10918 } else if (kind() == FUNCTION) { 10919 unsigned offset = back_edge_table_offset(); 10920 // If there is no back edge table, the "table start" will be at or after 10921 // (due to alignment) the end of the instruction stream. 10922 if (static_cast<int>(offset) < instruction_size()) { 10923 DisallowHeapAllocation no_gc; 10924 BackEdgeTable back_edges(this, &no_gc); 10925 10926 os << "Back edges (size = " << back_edges.length() << ")\n"; 10927 os << "ast_id pc_offset loop_depth\n"; 10928 10929 for (uint32_t i = 0; i < back_edges.length(); i++) { 10930 Vector<char> buf = Vector<char>::New(100); 10931 SNPrintF(buf, "%6d %9u %10u\n", back_edges.ast_id(i).ToInt(), 10932 back_edges.pc_offset(i), back_edges.loop_depth(i)); 10933 os << buf.start(); 10934 } 10935 10936 os << "\n"; 10937 } 10938 #ifdef OBJECT_PRINT 10939 if (!type_feedback_info()->IsUndefined()) { 10940 OFStream os(stdout); 10941 TypeFeedbackInfo::cast(type_feedback_info())->TypeFeedbackInfoPrint(os); 10942 os << "\n"; 10943 } 10944 #endif 10945 } 10946 10947 os << "RelocInfo (size = " << relocation_size() << ")\n"; 10948 for (RelocIterator it(this); !it.done(); it.next()) { 10949 it.rinfo()->Print(GetIsolate(), os); 10950 } 10951 os << "\n"; 10952 } 10953 #endif // ENABLE_DISASSEMBLER 10954 10955 10956 Handle<FixedArray> JSObject::SetFastElementsCapacityAndLength( 10957 Handle<JSObject> object, 10958 int capacity, 10959 int length, 10960 SetFastElementsCapacitySmiMode smi_mode) { 10961 // We should never end in here with a pixel or external array. 10962 DCHECK(!object->HasExternalArrayElements()); 10963 10964 // Allocate a new fast elements backing store. 10965 Handle<FixedArray> new_elements = 10966 object->GetIsolate()->factory()->NewUninitializedFixedArray(capacity); 10967 10968 ElementsKind elements_kind = object->GetElementsKind(); 10969 ElementsKind new_elements_kind; 10970 // The resized array has FAST_*_SMI_ELEMENTS if the capacity mode forces it, 10971 // or if it's allowed and the old elements array contained only SMIs. 10972 bool has_fast_smi_elements = 10973 (smi_mode == kForceSmiElements) || 10974 ((smi_mode == kAllowSmiElements) && object->HasFastSmiElements()); 10975 if (has_fast_smi_elements) { 10976 if (IsHoleyElementsKind(elements_kind)) { 10977 new_elements_kind = FAST_HOLEY_SMI_ELEMENTS; 10978 } else { 10979 new_elements_kind = FAST_SMI_ELEMENTS; 10980 } 10981 } else { 10982 if (IsHoleyElementsKind(elements_kind)) { 10983 new_elements_kind = FAST_HOLEY_ELEMENTS; 10984 } else { 10985 new_elements_kind = FAST_ELEMENTS; 10986 } 10987 } 10988 Handle<FixedArrayBase> old_elements(object->elements()); 10989 ElementsAccessor* accessor = ElementsAccessor::ForKind(new_elements_kind); 10990 accessor->CopyElements(object, new_elements, elements_kind); 10991 10992 if (elements_kind != SLOPPY_ARGUMENTS_ELEMENTS) { 10993 Handle<Map> new_map = (new_elements_kind != elements_kind) 10994 ? GetElementsTransitionMap(object, new_elements_kind) 10995 : handle(object->map()); 10996 JSObject::ValidateElements(object); 10997 JSObject::SetMapAndElements(object, new_map, new_elements); 10998 10999 // Transition through the allocation site as well if present. 11000 JSObject::UpdateAllocationSite(object, new_elements_kind); 11001 } else { 11002 Handle<FixedArray> parameter_map = Handle<FixedArray>::cast(old_elements); 11003 parameter_map->set(1, *new_elements); 11004 } 11005 11006 if (FLAG_trace_elements_transitions) { 11007 PrintElementsTransition(stdout, object, elements_kind, old_elements, 11008 object->GetElementsKind(), new_elements); 11009 } 11010 11011 if (object->IsJSArray()) { 11012 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length)); 11013 } 11014 return new_elements; 11015 } 11016 11017 11018 void JSObject::SetFastDoubleElementsCapacityAndLength(Handle<JSObject> object, 11019 int capacity, 11020 int length) { 11021 // We should never end in here with a pixel or external array. 11022 DCHECK(!object->HasExternalArrayElements()); 11023 11024 Handle<FixedArrayBase> elems = 11025 object->GetIsolate()->factory()->NewFixedDoubleArray(capacity); 11026 11027 ElementsKind elements_kind = object->GetElementsKind(); 11028 CHECK(elements_kind != SLOPPY_ARGUMENTS_ELEMENTS); 11029 ElementsKind new_elements_kind = elements_kind; 11030 if (IsHoleyElementsKind(elements_kind)) { 11031 new_elements_kind = FAST_HOLEY_DOUBLE_ELEMENTS; 11032 } else { 11033 new_elements_kind = FAST_DOUBLE_ELEMENTS; 11034 } 11035 11036 Handle<Map> new_map = GetElementsTransitionMap(object, new_elements_kind); 11037 11038 Handle<FixedArrayBase> old_elements(object->elements()); 11039 ElementsAccessor* accessor = ElementsAccessor::ForKind(FAST_DOUBLE_ELEMENTS); 11040 accessor->CopyElements(object, elems, elements_kind); 11041 11042 JSObject::ValidateElements(object); 11043 JSObject::SetMapAndElements(object, new_map, elems); 11044 11045 if (FLAG_trace_elements_transitions) { 11046 PrintElementsTransition(stdout, object, elements_kind, old_elements, 11047 object->GetElementsKind(), elems); 11048 } 11049 11050 if (object->IsJSArray()) { 11051 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(length)); 11052 } 11053 } 11054 11055 11056 // static 11057 void JSArray::Initialize(Handle<JSArray> array, int capacity, int length) { 11058 DCHECK(capacity >= 0); 11059 array->GetIsolate()->factory()->NewJSArrayStorage( 11060 array, length, capacity, INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); 11061 } 11062 11063 11064 void JSArray::Expand(Handle<JSArray> array, int required_size) { 11065 ElementsAccessor* accessor = array->GetElementsAccessor(); 11066 accessor->SetCapacityAndLength(array, required_size, required_size); 11067 } 11068 11069 11070 // Returns false if the passed-in index is marked non-configurable, 11071 // which will cause the ES5 truncation operation to halt, and thus 11072 // no further old values need be collected. 11073 static bool GetOldValue(Isolate* isolate, 11074 Handle<JSObject> object, 11075 uint32_t index, 11076 List<Handle<Object> >* old_values, 11077 List<uint32_t>* indices) { 11078 Maybe<PropertyAttributes> maybe = 11079 JSReceiver::GetOwnElementAttribute(object, index); 11080 DCHECK(maybe.has_value); 11081 DCHECK(maybe.value != ABSENT); 11082 if (maybe.value == DONT_DELETE) return false; 11083 Handle<Object> value; 11084 if (!JSObject::GetOwnElementAccessorPair(object, index).is_null()) { 11085 value = Handle<Object>::cast(isolate->factory()->the_hole_value()); 11086 } else { 11087 value = Object::GetElement(isolate, object, index).ToHandleChecked(); 11088 } 11089 old_values->Add(value); 11090 indices->Add(index); 11091 return true; 11092 } 11093 11094 static void EnqueueSpliceRecord(Handle<JSArray> object, 11095 uint32_t index, 11096 Handle<JSArray> deleted, 11097 uint32_t add_count) { 11098 Isolate* isolate = object->GetIsolate(); 11099 HandleScope scope(isolate); 11100 Handle<Object> index_object = isolate->factory()->NewNumberFromUint(index); 11101 Handle<Object> add_count_object = 11102 isolate->factory()->NewNumberFromUint(add_count); 11103 11104 Handle<Object> args[] = 11105 { object, index_object, deleted, add_count_object }; 11106 11107 Execution::Call(isolate, 11108 Handle<JSFunction>(isolate->observers_enqueue_splice()), 11109 isolate->factory()->undefined_value(), 11110 arraysize(args), 11111 args).Assert(); 11112 } 11113 11114 11115 static void BeginPerformSplice(Handle<JSArray> object) { 11116 Isolate* isolate = object->GetIsolate(); 11117 HandleScope scope(isolate); 11118 Handle<Object> args[] = { object }; 11119 11120 Execution::Call(isolate, 11121 Handle<JSFunction>(isolate->observers_begin_perform_splice()), 11122 isolate->factory()->undefined_value(), 11123 arraysize(args), 11124 args).Assert(); 11125 } 11126 11127 11128 static void EndPerformSplice(Handle<JSArray> object) { 11129 Isolate* isolate = object->GetIsolate(); 11130 HandleScope scope(isolate); 11131 Handle<Object> args[] = { object }; 11132 11133 Execution::Call(isolate, 11134 Handle<JSFunction>(isolate->observers_end_perform_splice()), 11135 isolate->factory()->undefined_value(), 11136 arraysize(args), 11137 args).Assert(); 11138 } 11139 11140 11141 MaybeHandle<Object> JSArray::SetElementsLength( 11142 Handle<JSArray> array, 11143 Handle<Object> new_length_handle) { 11144 if (array->HasFastElements()) { 11145 // If the new array won't fit in a some non-trivial fraction of the max old 11146 // space size, then force it to go dictionary mode. 11147 int max_fast_array_size = static_cast<int>( 11148 (array->GetHeap()->MaxOldGenerationSize() / kDoubleSize) / 4); 11149 if (new_length_handle->IsNumber() && 11150 NumberToInt32(*new_length_handle) >= max_fast_array_size) { 11151 NormalizeElements(array); 11152 } 11153 } 11154 11155 // We should never end in here with a pixel or external array. 11156 DCHECK(array->AllowsSetElementsLength()); 11157 if (!array->map()->is_observed()) { 11158 return array->GetElementsAccessor()->SetLength(array, new_length_handle); 11159 } 11160 11161 Isolate* isolate = array->GetIsolate(); 11162 List<uint32_t> indices; 11163 List<Handle<Object> > old_values; 11164 Handle<Object> old_length_handle(array->length(), isolate); 11165 uint32_t old_length = 0; 11166 CHECK(old_length_handle->ToArrayIndex(&old_length)); 11167 uint32_t new_length = 0; 11168 CHECK(new_length_handle->ToArrayIndex(&new_length)); 11169 11170 static const PropertyAttributes kNoAttrFilter = NONE; 11171 int num_elements = array->NumberOfOwnElements(kNoAttrFilter); 11172 if (num_elements > 0) { 11173 if (old_length == static_cast<uint32_t>(num_elements)) { 11174 // Simple case for arrays without holes. 11175 for (uint32_t i = old_length - 1; i + 1 > new_length; --i) { 11176 if (!GetOldValue(isolate, array, i, &old_values, &indices)) break; 11177 } 11178 } else { 11179 // For sparse arrays, only iterate over existing elements. 11180 // TODO(rafaelw): For fast, sparse arrays, we can avoid iterating over 11181 // the to-be-removed indices twice. 11182 Handle<FixedArray> keys = isolate->factory()->NewFixedArray(num_elements); 11183 array->GetOwnElementKeys(*keys, kNoAttrFilter); 11184 while (num_elements-- > 0) { 11185 uint32_t index = NumberToUint32(keys->get(num_elements)); 11186 if (index < new_length) break; 11187 if (!GetOldValue(isolate, array, index, &old_values, &indices)) break; 11188 } 11189 } 11190 } 11191 11192 Handle<Object> hresult; 11193 ASSIGN_RETURN_ON_EXCEPTION( 11194 isolate, hresult, 11195 array->GetElementsAccessor()->SetLength(array, new_length_handle), 11196 Object); 11197 11198 CHECK(array->length()->ToArrayIndex(&new_length)); 11199 if (old_length == new_length) return hresult; 11200 11201 BeginPerformSplice(array); 11202 11203 for (int i = 0; i < indices.length(); ++i) { 11204 // For deletions where the property was an accessor, old_values[i] 11205 // will be the hole, which instructs EnqueueChangeRecord to elide 11206 // the "oldValue" property. 11207 JSObject::EnqueueChangeRecord( 11208 array, "delete", isolate->factory()->Uint32ToString(indices[i]), 11209 old_values[i]); 11210 } 11211 JSObject::EnqueueChangeRecord( 11212 array, "update", isolate->factory()->length_string(), 11213 old_length_handle); 11214 11215 EndPerformSplice(array); 11216 11217 uint32_t index = Min(old_length, new_length); 11218 uint32_t add_count = new_length > old_length ? new_length - old_length : 0; 11219 uint32_t delete_count = new_length < old_length ? old_length - new_length : 0; 11220 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0); 11221 if (delete_count > 0) { 11222 for (int i = indices.length() - 1; i >= 0; i--) { 11223 // Skip deletions where the property was an accessor, leaving holes 11224 // in the array of old values. 11225 if (old_values[i]->IsTheHole()) continue; 11226 JSObject::SetElement( 11227 deleted, indices[i] - index, old_values[i], NONE, SLOPPY).Assert(); 11228 } 11229 11230 SetProperty(deleted, isolate->factory()->length_string(), 11231 isolate->factory()->NewNumberFromUint(delete_count), 11232 STRICT).Assert(); 11233 } 11234 11235 EnqueueSpliceRecord(array, index, deleted, add_count); 11236 11237 return hresult; 11238 } 11239 11240 11241 Handle<Map> Map::GetPrototypeTransition(Handle<Map> map, 11242 Handle<Object> prototype) { 11243 FixedArray* cache = map->GetPrototypeTransitions(); 11244 int number_of_transitions = map->NumberOfProtoTransitions(); 11245 const int proto_offset = 11246 kProtoTransitionHeaderSize + kProtoTransitionPrototypeOffset; 11247 const int map_offset = kProtoTransitionHeaderSize + kProtoTransitionMapOffset; 11248 const int step = kProtoTransitionElementsPerEntry; 11249 for (int i = 0; i < number_of_transitions; i++) { 11250 if (cache->get(proto_offset + i * step) == *prototype) { 11251 Object* result = cache->get(map_offset + i * step); 11252 return Handle<Map>(Map::cast(result)); 11253 } 11254 } 11255 return Handle<Map>(); 11256 } 11257 11258 11259 Handle<Map> Map::PutPrototypeTransition(Handle<Map> map, 11260 Handle<Object> prototype, 11261 Handle<Map> target_map) { 11262 DCHECK(target_map->IsMap()); 11263 DCHECK(HeapObject::cast(*prototype)->map()->IsMap()); 11264 // Don't cache prototype transition if this map is either shared, or a map of 11265 // a prototype. 11266 if (map->is_prototype_map()) return map; 11267 if (map->is_dictionary_map() || !FLAG_cache_prototype_transitions) return map; 11268 11269 const int step = kProtoTransitionElementsPerEntry; 11270 const int header = kProtoTransitionHeaderSize; 11271 11272 Handle<FixedArray> cache(map->GetPrototypeTransitions()); 11273 int capacity = (cache->length() - header) / step; 11274 int transitions = map->NumberOfProtoTransitions() + 1; 11275 11276 if (transitions > capacity) { 11277 if (capacity > kMaxCachedPrototypeTransitions) return map; 11278 11279 // Grow array by factor 2 over and above what we need. 11280 cache = FixedArray::CopySize(cache, transitions * 2 * step + header); 11281 11282 SetPrototypeTransitions(map, cache); 11283 } 11284 11285 // Reload number of transitions as GC might shrink them. 11286 int last = map->NumberOfProtoTransitions(); 11287 int entry = header + last * step; 11288 11289 cache->set(entry + kProtoTransitionPrototypeOffset, *prototype); 11290 cache->set(entry + kProtoTransitionMapOffset, *target_map); 11291 map->SetNumberOfProtoTransitions(last + 1); 11292 11293 return map; 11294 } 11295 11296 11297 void Map::ZapTransitions() { 11298 TransitionArray* transition_array = transitions(); 11299 // TODO(mstarzinger): Temporarily use a slower version instead of the faster 11300 // MemsetPointer to investigate a crasher. Switch back to MemsetPointer. 11301 Object** data = transition_array->data_start(); 11302 Object* the_hole = GetHeap()->the_hole_value(); 11303 int length = transition_array->length(); 11304 for (int i = 0; i < length; i++) { 11305 data[i] = the_hole; 11306 } 11307 } 11308 11309 11310 void Map::ZapPrototypeTransitions() { 11311 FixedArray* proto_transitions = GetPrototypeTransitions(); 11312 MemsetPointer(proto_transitions->data_start(), 11313 GetHeap()->the_hole_value(), 11314 proto_transitions->length()); 11315 } 11316 11317 11318 // static 11319 void Map::AddDependentCompilationInfo(Handle<Map> map, 11320 DependentCode::DependencyGroup group, 11321 CompilationInfo* info) { 11322 Handle<DependentCode> codes = 11323 DependentCode::Insert(handle(map->dependent_code(), info->isolate()), 11324 group, info->object_wrapper()); 11325 if (*codes != map->dependent_code()) map->set_dependent_code(*codes); 11326 info->dependencies(group)->Add(map, info->zone()); 11327 } 11328 11329 11330 // static 11331 void Map::AddDependentCode(Handle<Map> map, 11332 DependentCode::DependencyGroup group, 11333 Handle<Code> code) { 11334 Handle<DependentCode> codes = DependentCode::Insert( 11335 Handle<DependentCode>(map->dependent_code()), group, code); 11336 if (*codes != map->dependent_code()) map->set_dependent_code(*codes); 11337 } 11338 11339 11340 // static 11341 void Map::AddDependentIC(Handle<Map> map, 11342 Handle<Code> stub) { 11343 DCHECK(stub->next_code_link()->IsUndefined()); 11344 int n = map->dependent_code()->number_of_entries(DependentCode::kWeakICGroup); 11345 if (n == 0) { 11346 // Slow path: insert the head of the list with possible heap allocation. 11347 Map::AddDependentCode(map, DependentCode::kWeakICGroup, stub); 11348 } else { 11349 // Fast path: link the stub to the existing head of the list without any 11350 // heap allocation. 11351 DCHECK(n == 1); 11352 map->dependent_code()->AddToDependentICList(stub); 11353 } 11354 } 11355 11356 11357 DependentCode::GroupStartIndexes::GroupStartIndexes(DependentCode* entries) { 11358 Recompute(entries); 11359 } 11360 11361 11362 void DependentCode::GroupStartIndexes::Recompute(DependentCode* entries) { 11363 start_indexes_[0] = 0; 11364 for (int g = 1; g <= kGroupCount; g++) { 11365 int count = entries->number_of_entries(static_cast<DependencyGroup>(g - 1)); 11366 start_indexes_[g] = start_indexes_[g - 1] + count; 11367 } 11368 } 11369 11370 11371 DependentCode* DependentCode::ForObject(Handle<HeapObject> object, 11372 DependencyGroup group) { 11373 AllowDeferredHandleDereference dependencies_are_safe; 11374 if (group == DependentCode::kPropertyCellChangedGroup) { 11375 return Handle<PropertyCell>::cast(object)->dependent_code(); 11376 } else if (group == DependentCode::kAllocationSiteTenuringChangedGroup || 11377 group == DependentCode::kAllocationSiteTransitionChangedGroup) { 11378 return Handle<AllocationSite>::cast(object)->dependent_code(); 11379 } 11380 return Handle<Map>::cast(object)->dependent_code(); 11381 } 11382 11383 11384 Handle<DependentCode> DependentCode::Insert(Handle<DependentCode> entries, 11385 DependencyGroup group, 11386 Handle<Object> object) { 11387 GroupStartIndexes starts(*entries); 11388 int start = starts.at(group); 11389 int end = starts.at(group + 1); 11390 int number_of_entries = starts.number_of_entries(); 11391 // Check for existing entry to avoid duplicates. 11392 for (int i = start; i < end; i++) { 11393 if (entries->object_at(i) == *object) return entries; 11394 } 11395 if (entries->length() < kCodesStartIndex + number_of_entries + 1) { 11396 int capacity = kCodesStartIndex + number_of_entries + 1; 11397 if (capacity > 5) capacity = capacity * 5 / 4; 11398 Handle<DependentCode> new_entries = Handle<DependentCode>::cast( 11399 FixedArray::CopySize(entries, capacity, TENURED)); 11400 // The number of codes can change after GC. 11401 starts.Recompute(*entries); 11402 start = starts.at(group); 11403 end = starts.at(group + 1); 11404 number_of_entries = starts.number_of_entries(); 11405 for (int i = 0; i < number_of_entries; i++) { 11406 entries->clear_at(i); 11407 } 11408 // If the old fixed array was empty, we need to reset counters of the 11409 // new array. 11410 if (number_of_entries == 0) { 11411 for (int g = 0; g < kGroupCount; g++) { 11412 new_entries->set_number_of_entries(static_cast<DependencyGroup>(g), 0); 11413 } 11414 } 11415 entries = new_entries; 11416 } 11417 entries->ExtendGroup(group); 11418 entries->set_object_at(end, *object); 11419 entries->set_number_of_entries(group, end + 1 - start); 11420 return entries; 11421 } 11422 11423 11424 void DependentCode::UpdateToFinishedCode(DependencyGroup group, 11425 CompilationInfo* info, 11426 Code* code) { 11427 DisallowHeapAllocation no_gc; 11428 AllowDeferredHandleDereference get_object_wrapper; 11429 Foreign* info_wrapper = *info->object_wrapper(); 11430 GroupStartIndexes starts(this); 11431 int start = starts.at(group); 11432 int end = starts.at(group + 1); 11433 for (int i = start; i < end; i++) { 11434 if (object_at(i) == info_wrapper) { 11435 set_object_at(i, code); 11436 break; 11437 } 11438 } 11439 11440 #ifdef DEBUG 11441 for (int i = start; i < end; i++) { 11442 DCHECK(is_code_at(i) || compilation_info_at(i) != info); 11443 } 11444 #endif 11445 } 11446 11447 11448 void DependentCode::RemoveCompilationInfo(DependentCode::DependencyGroup group, 11449 CompilationInfo* info) { 11450 DisallowHeapAllocation no_allocation; 11451 AllowDeferredHandleDereference get_object_wrapper; 11452 Foreign* info_wrapper = *info->object_wrapper(); 11453 GroupStartIndexes starts(this); 11454 int start = starts.at(group); 11455 int end = starts.at(group + 1); 11456 // Find compilation info wrapper. 11457 int info_pos = -1; 11458 for (int i = start; i < end; i++) { 11459 if (object_at(i) == info_wrapper) { 11460 info_pos = i; 11461 break; 11462 } 11463 } 11464 if (info_pos == -1) return; // Not found. 11465 int gap = info_pos; 11466 // Use the last of each group to fill the gap in the previous group. 11467 for (int i = group; i < kGroupCount; i++) { 11468 int last_of_group = starts.at(i + 1) - 1; 11469 DCHECK(last_of_group >= gap); 11470 if (last_of_group == gap) continue; 11471 copy(last_of_group, gap); 11472 gap = last_of_group; 11473 } 11474 DCHECK(gap == starts.number_of_entries() - 1); 11475 clear_at(gap); // Clear last gap. 11476 set_number_of_entries(group, end - start - 1); 11477 11478 #ifdef DEBUG 11479 for (int i = start; i < end - 1; i++) { 11480 DCHECK(is_code_at(i) || compilation_info_at(i) != info); 11481 } 11482 #endif 11483 } 11484 11485 11486 static bool CodeListContains(Object* head, Code* code) { 11487 while (!head->IsUndefined()) { 11488 if (head == code) return true; 11489 head = Code::cast(head)->next_code_link(); 11490 } 11491 return false; 11492 } 11493 11494 11495 bool DependentCode::Contains(DependencyGroup group, Code* code) { 11496 GroupStartIndexes starts(this); 11497 int start = starts.at(group); 11498 int end = starts.at(group + 1); 11499 if (group == kWeakICGroup) { 11500 return CodeListContains(object_at(start), code); 11501 } 11502 for (int i = start; i < end; i++) { 11503 if (object_at(i) == code) return true; 11504 } 11505 return false; 11506 } 11507 11508 11509 bool DependentCode::MarkCodeForDeoptimization( 11510 Isolate* isolate, 11511 DependentCode::DependencyGroup group) { 11512 DisallowHeapAllocation no_allocation_scope; 11513 DependentCode::GroupStartIndexes starts(this); 11514 int start = starts.at(group); 11515 int end = starts.at(group + 1); 11516 int code_entries = starts.number_of_entries(); 11517 if (start == end) return false; 11518 11519 // Mark all the code that needs to be deoptimized. 11520 bool marked = false; 11521 for (int i = start; i < end; i++) { 11522 if (is_code_at(i)) { 11523 Code* code = code_at(i); 11524 if (!code->marked_for_deoptimization()) { 11525 SetMarkedForDeoptimization(code, group); 11526 marked = true; 11527 } 11528 } else { 11529 CompilationInfo* info = compilation_info_at(i); 11530 info->AbortDueToDependencyChange(); 11531 } 11532 } 11533 // Compact the array by moving all subsequent groups to fill in the new holes. 11534 for (int src = end, dst = start; src < code_entries; src++, dst++) { 11535 copy(src, dst); 11536 } 11537 // Now the holes are at the end of the array, zap them for heap-verifier. 11538 int removed = end - start; 11539 for (int i = code_entries - removed; i < code_entries; i++) { 11540 clear_at(i); 11541 } 11542 set_number_of_entries(group, 0); 11543 return marked; 11544 } 11545 11546 11547 void DependentCode::DeoptimizeDependentCodeGroup( 11548 Isolate* isolate, 11549 DependentCode::DependencyGroup group) { 11550 DCHECK(AllowCodeDependencyChange::IsAllowed()); 11551 DisallowHeapAllocation no_allocation_scope; 11552 bool marked = MarkCodeForDeoptimization(isolate, group); 11553 11554 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate); 11555 } 11556 11557 11558 void DependentCode::AddToDependentICList(Handle<Code> stub) { 11559 DisallowHeapAllocation no_heap_allocation; 11560 GroupStartIndexes starts(this); 11561 int i = starts.at(kWeakICGroup); 11562 Object* head = object_at(i); 11563 // Try to insert the stub after the head of the list to minimize number of 11564 // writes to the DependentCode array, since a write to the array can make it 11565 // strong if it was alread marked by incremental marker. 11566 if (head->IsCode()) { 11567 stub->set_next_code_link(Code::cast(head)->next_code_link()); 11568 Code::cast(head)->set_next_code_link(*stub); 11569 } else { 11570 stub->set_next_code_link(head); 11571 set_object_at(i, *stub); 11572 } 11573 } 11574 11575 11576 void DependentCode::SetMarkedForDeoptimization(Code* code, 11577 DependencyGroup group) { 11578 code->set_marked_for_deoptimization(true); 11579 if (FLAG_trace_deopt && 11580 (code->deoptimization_data() != code->GetHeap()->empty_fixed_array())) { 11581 DeoptimizationInputData* deopt_data = 11582 DeoptimizationInputData::cast(code->deoptimization_data()); 11583 CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer()); 11584 PrintF(scope.file(), "[marking dependent code 0x%08" V8PRIxPTR 11585 " (opt #%d) for deoptimization, reason: %s]\n", 11586 reinterpret_cast<intptr_t>(code), 11587 deopt_data->OptimizationId()->value(), DependencyGroupName(group)); 11588 } 11589 } 11590 11591 11592 const char* DependentCode::DependencyGroupName(DependencyGroup group) { 11593 switch (group) { 11594 case kWeakICGroup: 11595 return "weak-ic"; 11596 case kWeakCodeGroup: 11597 return "weak-code"; 11598 case kTransitionGroup: 11599 return "transition"; 11600 case kPrototypeCheckGroup: 11601 return "prototype-check"; 11602 case kElementsCantBeAddedGroup: 11603 return "elements-cant-be-added"; 11604 case kPropertyCellChangedGroup: 11605 return "property-cell-changed"; 11606 case kFieldTypeGroup: 11607 return "field-type"; 11608 case kInitialMapChangedGroup: 11609 return "initial-map-changed"; 11610 case kAllocationSiteTenuringChangedGroup: 11611 return "allocation-site-tenuring-changed"; 11612 case kAllocationSiteTransitionChangedGroup: 11613 return "allocation-site-transition-changed"; 11614 } 11615 UNREACHABLE(); 11616 return "?"; 11617 } 11618 11619 11620 Handle<Map> Map::TransitionToPrototype(Handle<Map> map, 11621 Handle<Object> prototype) { 11622 Handle<Map> new_map = GetPrototypeTransition(map, prototype); 11623 if (new_map.is_null()) { 11624 new_map = Copy(map); 11625 PutPrototypeTransition(map, prototype, new_map); 11626 new_map->set_prototype(*prototype); 11627 } 11628 return new_map; 11629 } 11630 11631 11632 MaybeHandle<Object> JSObject::SetPrototype(Handle<JSObject> object, 11633 Handle<Object> value, 11634 bool from_javascript) { 11635 #ifdef DEBUG 11636 int size = object->Size(); 11637 #endif 11638 11639 Isolate* isolate = object->GetIsolate(); 11640 Heap* heap = isolate->heap(); 11641 // Silently ignore the change if value is not a JSObject or null. 11642 // SpiderMonkey behaves this way. 11643 if (!value->IsJSReceiver() && !value->IsNull()) return value; 11644 11645 // From 8.6.2 Object Internal Methods 11646 // ... 11647 // In addition, if [[Extensible]] is false the value of the [[Class]] and 11648 // [[Prototype]] internal properties of the object may not be modified. 11649 // ... 11650 // Implementation specific extensions that modify [[Class]], [[Prototype]] 11651 // or [[Extensible]] must not violate the invariants defined in the preceding 11652 // paragraph. 11653 if (!object->map()->is_extensible()) { 11654 Handle<Object> args[] = { object }; 11655 THROW_NEW_ERROR(isolate, NewTypeError("non_extensible_proto", 11656 HandleVector(args, arraysize(args))), 11657 Object); 11658 } 11659 11660 // Before we can set the prototype we need to be sure 11661 // prototype cycles are prevented. 11662 // It is sufficient to validate that the receiver is not in the new prototype 11663 // chain. 11664 for (PrototypeIterator iter(isolate, *value, 11665 PrototypeIterator::START_AT_RECEIVER); 11666 !iter.IsAtEnd(); iter.Advance()) { 11667 if (JSReceiver::cast(iter.GetCurrent()) == *object) { 11668 // Cycle detected. 11669 THROW_NEW_ERROR(isolate, 11670 NewError("cyclic_proto", HandleVector<Object>(NULL, 0)), 11671 Object); 11672 } 11673 } 11674 11675 bool dictionary_elements_in_chain = 11676 object->map()->DictionaryElementsInPrototypeChainOnly(); 11677 Handle<JSObject> real_receiver = object; 11678 11679 if (from_javascript) { 11680 // Find the first object in the chain whose prototype object is not 11681 // hidden and set the new prototype on that object. 11682 PrototypeIterator iter(isolate, real_receiver); 11683 while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN)) { 11684 real_receiver = 11685 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)); 11686 iter.Advance(); 11687 } 11688 } 11689 11690 // Set the new prototype of the object. 11691 Handle<Map> map(real_receiver->map()); 11692 11693 // Nothing to do if prototype is already set. 11694 if (map->prototype() == *value) return value; 11695 11696 if (value->IsJSObject()) { 11697 PrototypeOptimizationMode mode = 11698 from_javascript ? REGULAR_PROTOTYPE : FAST_PROTOTYPE; 11699 JSObject::OptimizeAsPrototype(Handle<JSObject>::cast(value), mode); 11700 } 11701 11702 Handle<Map> new_map = Map::TransitionToPrototype(map, value); 11703 DCHECK(new_map->prototype() == *value); 11704 JSObject::MigrateToMap(real_receiver, new_map); 11705 11706 if (!dictionary_elements_in_chain && 11707 new_map->DictionaryElementsInPrototypeChainOnly()) { 11708 // If the prototype chain didn't previously have element callbacks, then 11709 // KeyedStoreICs need to be cleared to ensure any that involve this 11710 // map go generic. 11711 object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC); 11712 } 11713 11714 heap->ClearInstanceofCache(); 11715 DCHECK(size == object->Size()); 11716 return value; 11717 } 11718 11719 11720 void JSObject::EnsureCanContainElements(Handle<JSObject> object, 11721 Arguments* args, 11722 uint32_t first_arg, 11723 uint32_t arg_count, 11724 EnsureElementsMode mode) { 11725 // Elements in |Arguments| are ordered backwards (because they're on the 11726 // stack), but the method that's called here iterates over them in forward 11727 // direction. 11728 return EnsureCanContainElements( 11729 object, args->arguments() - first_arg - (arg_count - 1), arg_count, mode); 11730 } 11731 11732 11733 MaybeHandle<AccessorPair> JSObject::GetOwnElementAccessorPair( 11734 Handle<JSObject> object, 11735 uint32_t index) { 11736 if (object->IsJSGlobalProxy()) { 11737 PrototypeIterator iter(object->GetIsolate(), object); 11738 if (iter.IsAtEnd()) return MaybeHandle<AccessorPair>(); 11739 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 11740 return GetOwnElementAccessorPair( 11741 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index); 11742 } 11743 11744 // Check for lookup interceptor. 11745 if (object->HasIndexedInterceptor()) return MaybeHandle<AccessorPair>(); 11746 11747 return object->GetElementsAccessor()->GetAccessorPair(object, object, index); 11748 } 11749 11750 11751 MaybeHandle<Object> JSObject::SetElementWithInterceptor( 11752 Handle<JSObject> object, 11753 uint32_t index, 11754 Handle<Object> value, 11755 PropertyAttributes attributes, 11756 StrictMode strict_mode, 11757 bool check_prototype, 11758 SetPropertyMode set_mode) { 11759 Isolate* isolate = object->GetIsolate(); 11760 11761 // Make sure that the top context does not change when doing 11762 // callbacks or interceptor calls. 11763 AssertNoContextChange ncc(isolate); 11764 11765 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 11766 if (!interceptor->setter()->IsUndefined()) { 11767 v8::IndexedPropertySetterCallback setter = 11768 v8::ToCData<v8::IndexedPropertySetterCallback>(interceptor->setter()); 11769 LOG(isolate, 11770 ApiIndexedPropertyAccess("interceptor-indexed-set", *object, index)); 11771 PropertyCallbackArguments args(isolate, interceptor->data(), *object, 11772 *object); 11773 v8::Handle<v8::Value> result = 11774 args.Call(setter, index, v8::Utils::ToLocal(value)); 11775 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 11776 if (!result.IsEmpty()) return value; 11777 } 11778 11779 return SetElementWithoutInterceptor(object, index, value, attributes, 11780 strict_mode, 11781 check_prototype, 11782 set_mode); 11783 } 11784 11785 11786 MaybeHandle<Object> JSObject::GetElementWithCallback( 11787 Handle<JSObject> object, 11788 Handle<Object> receiver, 11789 Handle<Object> structure, 11790 uint32_t index, 11791 Handle<Object> holder) { 11792 Isolate* isolate = object->GetIsolate(); 11793 DCHECK(!structure->IsForeign()); 11794 // api style callbacks. 11795 if (structure->IsExecutableAccessorInfo()) { 11796 Handle<ExecutableAccessorInfo> data = 11797 Handle<ExecutableAccessorInfo>::cast(structure); 11798 Object* fun_obj = data->getter(); 11799 v8::AccessorNameGetterCallback call_fun = 11800 v8::ToCData<v8::AccessorNameGetterCallback>(fun_obj); 11801 if (call_fun == NULL) return isolate->factory()->undefined_value(); 11802 Handle<JSObject> holder_handle = Handle<JSObject>::cast(holder); 11803 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 11804 Handle<String> key = isolate->factory()->NumberToString(number); 11805 LOG(isolate, ApiNamedPropertyAccess("load", *holder_handle, *key)); 11806 PropertyCallbackArguments 11807 args(isolate, data->data(), *receiver, *holder_handle); 11808 v8::Handle<v8::Value> result = args.Call(call_fun, v8::Utils::ToLocal(key)); 11809 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 11810 if (result.IsEmpty()) return isolate->factory()->undefined_value(); 11811 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 11812 result_internal->VerifyApiCallResultType(); 11813 // Rebox handle before return. 11814 return handle(*result_internal, isolate); 11815 } 11816 11817 // __defineGetter__ callback 11818 if (structure->IsAccessorPair()) { 11819 Handle<Object> getter(Handle<AccessorPair>::cast(structure)->getter(), 11820 isolate); 11821 if (getter->IsSpecFunction()) { 11822 // TODO(rossberg): nicer would be to cast to some JSCallable here... 11823 return GetPropertyWithDefinedGetter( 11824 receiver, Handle<JSReceiver>::cast(getter)); 11825 } 11826 // Getter is not a function. 11827 return isolate->factory()->undefined_value(); 11828 } 11829 11830 if (structure->IsDeclaredAccessorInfo()) { 11831 return GetDeclaredAccessorProperty( 11832 receiver, Handle<DeclaredAccessorInfo>::cast(structure), isolate); 11833 } 11834 11835 UNREACHABLE(); 11836 return MaybeHandle<Object>(); 11837 } 11838 11839 11840 MaybeHandle<Object> JSObject::SetElementWithCallback(Handle<JSObject> object, 11841 Handle<Object> structure, 11842 uint32_t index, 11843 Handle<Object> value, 11844 Handle<JSObject> holder, 11845 StrictMode strict_mode) { 11846 Isolate* isolate = object->GetIsolate(); 11847 11848 // We should never get here to initialize a const with the hole 11849 // value since a const declaration would conflict with the setter. 11850 DCHECK(!value->IsTheHole()); 11851 DCHECK(!structure->IsForeign()); 11852 if (structure->IsExecutableAccessorInfo()) { 11853 // api style callbacks 11854 Handle<ExecutableAccessorInfo> data = 11855 Handle<ExecutableAccessorInfo>::cast(structure); 11856 Object* call_obj = data->setter(); 11857 v8::AccessorNameSetterCallback call_fun = 11858 v8::ToCData<v8::AccessorNameSetterCallback>(call_obj); 11859 if (call_fun == NULL) return value; 11860 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 11861 Handle<String> key(isolate->factory()->NumberToString(number)); 11862 LOG(isolate, ApiNamedPropertyAccess("store", *object, *key)); 11863 PropertyCallbackArguments 11864 args(isolate, data->data(), *object, *holder); 11865 args.Call(call_fun, 11866 v8::Utils::ToLocal(key), 11867 v8::Utils::ToLocal(value)); 11868 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 11869 return value; 11870 } 11871 11872 if (structure->IsAccessorPair()) { 11873 Handle<Object> setter(AccessorPair::cast(*structure)->setter(), isolate); 11874 if (setter->IsSpecFunction()) { 11875 // TODO(rossberg): nicer would be to cast to some JSCallable here... 11876 return SetPropertyWithDefinedSetter( 11877 object, Handle<JSReceiver>::cast(setter), value); 11878 } else { 11879 if (strict_mode == SLOPPY) return value; 11880 Handle<Object> key(isolate->factory()->NewNumberFromUint(index)); 11881 Handle<Object> args[2] = { key, holder }; 11882 THROW_NEW_ERROR( 11883 isolate, NewTypeError("no_setter_in_callback", HandleVector(args, 2)), 11884 Object); 11885 } 11886 } 11887 11888 // TODO(dcarney): Handle correctly. 11889 if (structure->IsDeclaredAccessorInfo()) return value; 11890 11891 UNREACHABLE(); 11892 return MaybeHandle<Object>(); 11893 } 11894 11895 11896 bool JSObject::HasFastArgumentsElements() { 11897 Heap* heap = GetHeap(); 11898 if (!elements()->IsFixedArray()) return false; 11899 FixedArray* elements = FixedArray::cast(this->elements()); 11900 if (elements->map() != heap->sloppy_arguments_elements_map()) { 11901 return false; 11902 } 11903 FixedArray* arguments = FixedArray::cast(elements->get(1)); 11904 return !arguments->IsDictionary(); 11905 } 11906 11907 11908 bool JSObject::HasDictionaryArgumentsElements() { 11909 Heap* heap = GetHeap(); 11910 if (!elements()->IsFixedArray()) return false; 11911 FixedArray* elements = FixedArray::cast(this->elements()); 11912 if (elements->map() != heap->sloppy_arguments_elements_map()) { 11913 return false; 11914 } 11915 FixedArray* arguments = FixedArray::cast(elements->get(1)); 11916 return arguments->IsDictionary(); 11917 } 11918 11919 11920 // Adding n elements in fast case is O(n*n). 11921 // Note: revisit design to have dual undefined values to capture absent 11922 // elements. 11923 MaybeHandle<Object> JSObject::SetFastElement(Handle<JSObject> object, 11924 uint32_t index, 11925 Handle<Object> value, 11926 StrictMode strict_mode, 11927 bool check_prototype) { 11928 DCHECK(object->HasFastSmiOrObjectElements() || 11929 object->HasFastArgumentsElements()); 11930 11931 Isolate* isolate = object->GetIsolate(); 11932 11933 // Array optimizations rely on the prototype lookups of Array objects always 11934 // returning undefined. If there is a store to the initial prototype object, 11935 // make sure all of these optimizations are invalidated. 11936 if (isolate->is_initial_object_prototype(*object) || 11937 isolate->is_initial_array_prototype(*object)) { 11938 object->map()->dependent_code()->DeoptimizeDependentCodeGroup(isolate, 11939 DependentCode::kElementsCantBeAddedGroup); 11940 } 11941 11942 Handle<FixedArray> backing_store(FixedArray::cast(object->elements())); 11943 if (backing_store->map() == 11944 isolate->heap()->sloppy_arguments_elements_map()) { 11945 backing_store = handle(FixedArray::cast(backing_store->get(1))); 11946 } else { 11947 backing_store = EnsureWritableFastElements(object); 11948 } 11949 uint32_t capacity = static_cast<uint32_t>(backing_store->length()); 11950 11951 if (check_prototype && 11952 (index >= capacity || backing_store->get(index)->IsTheHole())) { 11953 bool found; 11954 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( 11955 object, index, value, &found, strict_mode); 11956 if (found) return result; 11957 } 11958 11959 uint32_t new_capacity = capacity; 11960 // Check if the length property of this object needs to be updated. 11961 uint32_t array_length = 0; 11962 bool must_update_array_length = false; 11963 bool introduces_holes = true; 11964 if (object->IsJSArray()) { 11965 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length)); 11966 introduces_holes = index > array_length; 11967 if (index >= array_length) { 11968 must_update_array_length = true; 11969 array_length = index + 1; 11970 } 11971 } else { 11972 introduces_holes = index >= capacity; 11973 } 11974 11975 // If the array is growing, and it's not growth by a single element at the 11976 // end, make sure that the ElementsKind is HOLEY. 11977 ElementsKind elements_kind = object->GetElementsKind(); 11978 if (introduces_holes && 11979 IsFastElementsKind(elements_kind) && 11980 !IsFastHoleyElementsKind(elements_kind)) { 11981 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind); 11982 TransitionElementsKind(object, transitioned_kind); 11983 } 11984 11985 // Check if the capacity of the backing store needs to be increased, or if 11986 // a transition to slow elements is necessary. 11987 if (index >= capacity) { 11988 bool convert_to_slow = true; 11989 if ((index - capacity) < kMaxGap) { 11990 new_capacity = NewElementsCapacity(index + 1); 11991 DCHECK(new_capacity > index); 11992 if (!object->ShouldConvertToSlowElements(new_capacity)) { 11993 convert_to_slow = false; 11994 } 11995 } 11996 if (convert_to_slow) { 11997 NormalizeElements(object); 11998 return SetDictionaryElement(object, index, value, NONE, strict_mode, 11999 check_prototype); 12000 } 12001 } 12002 // Convert to fast double elements if appropriate. 12003 if (object->HasFastSmiElements() && !value->IsSmi() && value->IsNumber()) { 12004 // Consider fixing the boilerplate as well if we have one. 12005 ElementsKind to_kind = IsHoleyElementsKind(elements_kind) 12006 ? FAST_HOLEY_DOUBLE_ELEMENTS 12007 : FAST_DOUBLE_ELEMENTS; 12008 12009 UpdateAllocationSite(object, to_kind); 12010 12011 SetFastDoubleElementsCapacityAndLength(object, new_capacity, array_length); 12012 FixedDoubleArray::cast(object->elements())->set(index, value->Number()); 12013 JSObject::ValidateElements(object); 12014 return value; 12015 } 12016 // Change elements kind from Smi-only to generic FAST if necessary. 12017 if (object->HasFastSmiElements() && !value->IsSmi()) { 12018 ElementsKind kind = object->HasFastHoleyElements() 12019 ? FAST_HOLEY_ELEMENTS 12020 : FAST_ELEMENTS; 12021 12022 UpdateAllocationSite(object, kind); 12023 Handle<Map> new_map = GetElementsTransitionMap(object, kind); 12024 JSObject::MigrateToMap(object, new_map); 12025 DCHECK(IsFastObjectElementsKind(object->GetElementsKind())); 12026 } 12027 // Increase backing store capacity if that's been decided previously. 12028 if (new_capacity != capacity) { 12029 SetFastElementsCapacitySmiMode smi_mode = 12030 value->IsSmi() && object->HasFastSmiElements() 12031 ? kAllowSmiElements 12032 : kDontAllowSmiElements; 12033 Handle<FixedArray> new_elements = 12034 SetFastElementsCapacityAndLength(object, new_capacity, array_length, 12035 smi_mode); 12036 new_elements->set(index, *value); 12037 JSObject::ValidateElements(object); 12038 return value; 12039 } 12040 12041 // Finally, set the new element and length. 12042 DCHECK(object->elements()->IsFixedArray()); 12043 backing_store->set(index, *value); 12044 if (must_update_array_length) { 12045 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(array_length)); 12046 } 12047 return value; 12048 } 12049 12050 12051 MaybeHandle<Object> JSObject::SetDictionaryElement( 12052 Handle<JSObject> object, 12053 uint32_t index, 12054 Handle<Object> value, 12055 PropertyAttributes attributes, 12056 StrictMode strict_mode, 12057 bool check_prototype, 12058 SetPropertyMode set_mode) { 12059 DCHECK(object->HasDictionaryElements() || 12060 object->HasDictionaryArgumentsElements()); 12061 Isolate* isolate = object->GetIsolate(); 12062 12063 // Insert element in the dictionary. 12064 Handle<FixedArray> elements(FixedArray::cast(object->elements())); 12065 bool is_arguments = 12066 (elements->map() == isolate->heap()->sloppy_arguments_elements_map()); 12067 Handle<SeededNumberDictionary> dictionary(is_arguments 12068 ? SeededNumberDictionary::cast(elements->get(1)) 12069 : SeededNumberDictionary::cast(*elements)); 12070 12071 int entry = dictionary->FindEntry(index); 12072 if (entry != SeededNumberDictionary::kNotFound) { 12073 Handle<Object> element(dictionary->ValueAt(entry), isolate); 12074 PropertyDetails details = dictionary->DetailsAt(entry); 12075 if (details.type() == CALLBACKS && set_mode == SET_PROPERTY) { 12076 return SetElementWithCallback(object, element, index, value, object, 12077 strict_mode); 12078 } else { 12079 dictionary->UpdateMaxNumberKey(index); 12080 // If a value has not been initialized we allow writing to it even if it 12081 // is read-only (a declared const that has not been initialized). If a 12082 // value is being defined we skip attribute checks completely. 12083 if (set_mode == DEFINE_PROPERTY) { 12084 details = PropertyDetails( 12085 attributes, NORMAL, details.dictionary_index()); 12086 dictionary->DetailsAtPut(entry, details); 12087 } else if (details.IsReadOnly() && !element->IsTheHole()) { 12088 if (strict_mode == SLOPPY) { 12089 return isolate->factory()->undefined_value(); 12090 } else { 12091 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12092 Handle<Object> args[2] = { number, object }; 12093 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property", 12094 HandleVector(args, 2)), 12095 Object); 12096 } 12097 } 12098 // Elements of the arguments object in slow mode might be slow aliases. 12099 if (is_arguments && element->IsAliasedArgumentsEntry()) { 12100 Handle<AliasedArgumentsEntry> entry = 12101 Handle<AliasedArgumentsEntry>::cast(element); 12102 Handle<Context> context(Context::cast(elements->get(0))); 12103 int context_index = entry->aliased_context_slot(); 12104 DCHECK(!context->get(context_index)->IsTheHole()); 12105 context->set(context_index, *value); 12106 // For elements that are still writable we keep slow aliasing. 12107 if (!details.IsReadOnly()) value = element; 12108 } 12109 dictionary->ValueAtPut(entry, *value); 12110 } 12111 } else { 12112 // Index not already used. Look for an accessor in the prototype chain. 12113 // Can cause GC! 12114 if (check_prototype) { 12115 bool found; 12116 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( 12117 object, index, value, &found, strict_mode); 12118 if (found) return result; 12119 } 12120 12121 // When we set the is_extensible flag to false we always force the 12122 // element into dictionary mode (and force them to stay there). 12123 if (!object->map()->is_extensible()) { 12124 if (strict_mode == SLOPPY) { 12125 return isolate->factory()->undefined_value(); 12126 } else { 12127 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12128 Handle<String> name = isolate->factory()->NumberToString(number); 12129 Handle<Object> args[1] = { name }; 12130 THROW_NEW_ERROR(isolate, NewTypeError("object_not_extensible", 12131 HandleVector(args, 1)), 12132 Object); 12133 } 12134 } 12135 12136 PropertyDetails details = PropertyDetails(attributes, NORMAL, 0); 12137 Handle<SeededNumberDictionary> new_dictionary = 12138 SeededNumberDictionary::AddNumberEntry(dictionary, index, value, 12139 details); 12140 if (*dictionary != *new_dictionary) { 12141 if (is_arguments) { 12142 elements->set(1, *new_dictionary); 12143 } else { 12144 object->set_elements(*new_dictionary); 12145 } 12146 dictionary = new_dictionary; 12147 } 12148 } 12149 12150 // Update the array length if this JSObject is an array. 12151 if (object->IsJSArray()) { 12152 JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray>::cast(object), index, 12153 value); 12154 } 12155 12156 // Attempt to put this object back in fast case. 12157 if (object->ShouldConvertToFastElements()) { 12158 uint32_t new_length = 0; 12159 if (object->IsJSArray()) { 12160 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&new_length)); 12161 } else { 12162 new_length = dictionary->max_number_key() + 1; 12163 } 12164 bool has_smi_only_elements = false; 12165 bool should_convert_to_fast_double_elements = 12166 object->ShouldConvertToFastDoubleElements(&has_smi_only_elements); 12167 SetFastElementsCapacitySmiMode smi_mode = 12168 has_smi_only_elements ? kForceSmiElements : kAllowSmiElements; 12169 12170 if (should_convert_to_fast_double_elements) { 12171 SetFastDoubleElementsCapacityAndLength(object, new_length, new_length); 12172 } else { 12173 SetFastElementsCapacityAndLength(object, new_length, new_length, 12174 smi_mode); 12175 } 12176 JSObject::ValidateElements(object); 12177 #ifdef DEBUG 12178 if (FLAG_trace_normalization) { 12179 OFStream os(stdout); 12180 os << "Object elements are fast case again:\n"; 12181 object->Print(os); 12182 } 12183 #endif 12184 } 12185 return value; 12186 } 12187 12188 MaybeHandle<Object> JSObject::SetFastDoubleElement( 12189 Handle<JSObject> object, 12190 uint32_t index, 12191 Handle<Object> value, 12192 StrictMode strict_mode, 12193 bool check_prototype) { 12194 DCHECK(object->HasFastDoubleElements()); 12195 12196 Handle<FixedArrayBase> base_elms(FixedArrayBase::cast(object->elements())); 12197 uint32_t elms_length = static_cast<uint32_t>(base_elms->length()); 12198 12199 // If storing to an element that isn't in the array, pass the store request 12200 // up the prototype chain before storing in the receiver's elements. 12201 if (check_prototype && 12202 (index >= elms_length || 12203 Handle<FixedDoubleArray>::cast(base_elms)->is_the_hole(index))) { 12204 bool found; 12205 MaybeHandle<Object> result = SetElementWithCallbackSetterInPrototypes( 12206 object, index, value, &found, strict_mode); 12207 if (found) return result; 12208 } 12209 12210 // If the value object is not a heap number, switch to fast elements and try 12211 // again. 12212 bool value_is_smi = value->IsSmi(); 12213 bool introduces_holes = true; 12214 uint32_t length = elms_length; 12215 if (object->IsJSArray()) { 12216 CHECK(Handle<JSArray>::cast(object)->length()->ToArrayIndex(&length)); 12217 introduces_holes = index > length; 12218 } else { 12219 introduces_holes = index >= elms_length; 12220 } 12221 12222 if (!value->IsNumber()) { 12223 SetFastElementsCapacityAndLength(object, elms_length, length, 12224 kDontAllowSmiElements); 12225 Handle<Object> result; 12226 ASSIGN_RETURN_ON_EXCEPTION( 12227 object->GetIsolate(), result, 12228 SetFastElement(object, index, value, strict_mode, check_prototype), 12229 Object); 12230 JSObject::ValidateElements(object); 12231 return result; 12232 } 12233 12234 double double_value = value_is_smi 12235 ? static_cast<double>(Handle<Smi>::cast(value)->value()) 12236 : Handle<HeapNumber>::cast(value)->value(); 12237 12238 // If the array is growing, and it's not growth by a single element at the 12239 // end, make sure that the ElementsKind is HOLEY. 12240 ElementsKind elements_kind = object->GetElementsKind(); 12241 if (introduces_holes && !IsFastHoleyElementsKind(elements_kind)) { 12242 ElementsKind transitioned_kind = GetHoleyElementsKind(elements_kind); 12243 TransitionElementsKind(object, transitioned_kind); 12244 } 12245 12246 // Check whether there is extra space in the fixed array. 12247 if (index < elms_length) { 12248 Handle<FixedDoubleArray> elms(FixedDoubleArray::cast(object->elements())); 12249 elms->set(index, double_value); 12250 if (object->IsJSArray()) { 12251 // Update the length of the array if needed. 12252 uint32_t array_length = 0; 12253 CHECK( 12254 Handle<JSArray>::cast(object)->length()->ToArrayIndex(&array_length)); 12255 if (index >= array_length) { 12256 Handle<JSArray>::cast(object)->set_length(Smi::FromInt(index + 1)); 12257 } 12258 } 12259 return value; 12260 } 12261 12262 // Allow gap in fast case. 12263 if ((index - elms_length) < kMaxGap) { 12264 // Try allocating extra space. 12265 int new_capacity = NewElementsCapacity(index+1); 12266 if (!object->ShouldConvertToSlowElements(new_capacity)) { 12267 DCHECK(static_cast<uint32_t>(new_capacity) > index); 12268 SetFastDoubleElementsCapacityAndLength(object, new_capacity, index + 1); 12269 FixedDoubleArray::cast(object->elements())->set(index, double_value); 12270 JSObject::ValidateElements(object); 12271 return value; 12272 } 12273 } 12274 12275 // Otherwise default to slow case. 12276 DCHECK(object->HasFastDoubleElements()); 12277 DCHECK(object->map()->has_fast_double_elements()); 12278 DCHECK(object->elements()->IsFixedDoubleArray() || 12279 object->elements()->length() == 0); 12280 12281 NormalizeElements(object); 12282 DCHECK(object->HasDictionaryElements()); 12283 return SetElement(object, index, value, NONE, strict_mode, check_prototype); 12284 } 12285 12286 12287 MaybeHandle<Object> JSReceiver::SetElement(Handle<JSReceiver> object, 12288 uint32_t index, 12289 Handle<Object> value, 12290 PropertyAttributes attributes, 12291 StrictMode strict_mode) { 12292 if (object->IsJSProxy()) { 12293 return JSProxy::SetElementWithHandler( 12294 Handle<JSProxy>::cast(object), object, index, value, strict_mode); 12295 } 12296 return JSObject::SetElement( 12297 Handle<JSObject>::cast(object), index, value, attributes, strict_mode); 12298 } 12299 12300 12301 MaybeHandle<Object> JSObject::SetOwnElement(Handle<JSObject> object, 12302 uint32_t index, 12303 Handle<Object> value, 12304 StrictMode strict_mode) { 12305 DCHECK(!object->HasExternalArrayElements()); 12306 return JSObject::SetElement(object, index, value, NONE, strict_mode, false); 12307 } 12308 12309 12310 MaybeHandle<Object> JSObject::SetElement(Handle<JSObject> object, 12311 uint32_t index, 12312 Handle<Object> value, 12313 PropertyAttributes attributes, 12314 StrictMode strict_mode, 12315 bool check_prototype, 12316 SetPropertyMode set_mode) { 12317 Isolate* isolate = object->GetIsolate(); 12318 12319 if (object->HasExternalArrayElements() || 12320 object->HasFixedTypedArrayElements()) { 12321 if (!value->IsNumber() && !value->IsUndefined()) { 12322 ASSIGN_RETURN_ON_EXCEPTION( 12323 isolate, value, 12324 Execution::ToNumber(isolate, value), Object); 12325 } 12326 } 12327 12328 // Check access rights if needed. 12329 if (object->IsAccessCheckNeeded()) { 12330 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_SET)) { 12331 isolate->ReportFailedAccessCheck(object, v8::ACCESS_SET); 12332 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 12333 return value; 12334 } 12335 } 12336 12337 if (object->IsJSGlobalProxy()) { 12338 PrototypeIterator iter(isolate, object); 12339 if (iter.IsAtEnd()) return value; 12340 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 12341 return SetElement( 12342 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index, 12343 value, attributes, strict_mode, check_prototype, set_mode); 12344 } 12345 12346 // Don't allow element properties to be redefined for external arrays. 12347 if ((object->HasExternalArrayElements() || 12348 object->HasFixedTypedArrayElements()) && 12349 set_mode == DEFINE_PROPERTY) { 12350 Handle<Object> number = isolate->factory()->NewNumberFromUint(index); 12351 Handle<Object> args[] = { object, number }; 12352 THROW_NEW_ERROR(isolate, NewTypeError("redef_external_array_element", 12353 HandleVector(args, arraysize(args))), 12354 Object); 12355 } 12356 12357 // Normalize the elements to enable attributes on the property. 12358 if ((attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) != 0) { 12359 Handle<SeededNumberDictionary> dictionary = NormalizeElements(object); 12360 // Make sure that we never go back to fast case. 12361 dictionary->set_requires_slow_elements(); 12362 } 12363 12364 if (!object->map()->is_observed()) { 12365 return object->HasIndexedInterceptor() 12366 ? SetElementWithInterceptor(object, index, value, attributes, 12367 strict_mode, check_prototype, set_mode) 12368 : SetElementWithoutInterceptor(object, index, value, attributes, 12369 strict_mode, check_prototype, set_mode); 12370 } 12371 12372 Maybe<PropertyAttributes> maybe = 12373 JSReceiver::GetOwnElementAttribute(object, index); 12374 if (!maybe.has_value) return MaybeHandle<Object>(); 12375 PropertyAttributes old_attributes = maybe.value; 12376 12377 Handle<Object> old_value = isolate->factory()->the_hole_value(); 12378 Handle<Object> old_length_handle; 12379 Handle<Object> new_length_handle; 12380 12381 if (old_attributes != ABSENT) { 12382 if (GetOwnElementAccessorPair(object, index).is_null()) { 12383 old_value = Object::GetElement(isolate, object, index).ToHandleChecked(); 12384 } 12385 } else if (object->IsJSArray()) { 12386 // Store old array length in case adding an element grows the array. 12387 old_length_handle = handle(Handle<JSArray>::cast(object)->length(), 12388 isolate); 12389 } 12390 12391 // Check for lookup interceptor 12392 Handle<Object> result; 12393 ASSIGN_RETURN_ON_EXCEPTION( 12394 isolate, result, 12395 object->HasIndexedInterceptor() 12396 ? SetElementWithInterceptor( 12397 object, index, value, attributes, 12398 strict_mode, check_prototype, set_mode) 12399 : SetElementWithoutInterceptor( 12400 object, index, value, attributes, 12401 strict_mode, check_prototype, set_mode), 12402 Object); 12403 12404 Handle<String> name = isolate->factory()->Uint32ToString(index); 12405 maybe = GetOwnElementAttribute(object, index); 12406 if (!maybe.has_value) return MaybeHandle<Object>(); 12407 PropertyAttributes new_attributes = maybe.value; 12408 12409 if (old_attributes == ABSENT) { 12410 if (object->IsJSArray() && 12411 !old_length_handle->SameValue( 12412 Handle<JSArray>::cast(object)->length())) { 12413 new_length_handle = handle(Handle<JSArray>::cast(object)->length(), 12414 isolate); 12415 uint32_t old_length = 0; 12416 uint32_t new_length = 0; 12417 CHECK(old_length_handle->ToArrayIndex(&old_length)); 12418 CHECK(new_length_handle->ToArrayIndex(&new_length)); 12419 12420 BeginPerformSplice(Handle<JSArray>::cast(object)); 12421 EnqueueChangeRecord(object, "add", name, old_value); 12422 EnqueueChangeRecord(object, "update", isolate->factory()->length_string(), 12423 old_length_handle); 12424 EndPerformSplice(Handle<JSArray>::cast(object)); 12425 Handle<JSArray> deleted = isolate->factory()->NewJSArray(0); 12426 EnqueueSpliceRecord(Handle<JSArray>::cast(object), old_length, deleted, 12427 new_length - old_length); 12428 } else { 12429 EnqueueChangeRecord(object, "add", name, old_value); 12430 } 12431 } else if (old_value->IsTheHole()) { 12432 EnqueueChangeRecord(object, "reconfigure", name, old_value); 12433 } else { 12434 Handle<Object> new_value = 12435 Object::GetElement(isolate, object, index).ToHandleChecked(); 12436 bool value_changed = !old_value->SameValue(*new_value); 12437 if (old_attributes != new_attributes) { 12438 if (!value_changed) old_value = isolate->factory()->the_hole_value(); 12439 EnqueueChangeRecord(object, "reconfigure", name, old_value); 12440 } else if (value_changed) { 12441 EnqueueChangeRecord(object, "update", name, old_value); 12442 } 12443 } 12444 12445 return result; 12446 } 12447 12448 12449 MaybeHandle<Object> JSObject::SetElementWithoutInterceptor( 12450 Handle<JSObject> object, 12451 uint32_t index, 12452 Handle<Object> value, 12453 PropertyAttributes attributes, 12454 StrictMode strict_mode, 12455 bool check_prototype, 12456 SetPropertyMode set_mode) { 12457 DCHECK(object->HasDictionaryElements() || 12458 object->HasDictionaryArgumentsElements() || 12459 (attributes & (DONT_DELETE | DONT_ENUM | READ_ONLY)) == 0); 12460 Isolate* isolate = object->GetIsolate(); 12461 if (FLAG_trace_external_array_abuse && 12462 IsExternalArrayElementsKind(object->GetElementsKind())) { 12463 CheckArrayAbuse(object, "external elements write", index); 12464 } 12465 if (FLAG_trace_js_array_abuse && 12466 !IsExternalArrayElementsKind(object->GetElementsKind())) { 12467 if (object->IsJSArray()) { 12468 CheckArrayAbuse(object, "elements write", index, true); 12469 } 12470 } 12471 if (object->IsJSArray() && JSArray::WouldChangeReadOnlyLength( 12472 Handle<JSArray>::cast(object), index)) { 12473 if (strict_mode == SLOPPY) { 12474 return value; 12475 } else { 12476 return JSArray::ReadOnlyLengthError(Handle<JSArray>::cast(object)); 12477 } 12478 } 12479 switch (object->GetElementsKind()) { 12480 case FAST_SMI_ELEMENTS: 12481 case FAST_ELEMENTS: 12482 case FAST_HOLEY_SMI_ELEMENTS: 12483 case FAST_HOLEY_ELEMENTS: 12484 return SetFastElement(object, index, value, strict_mode, check_prototype); 12485 case FAST_DOUBLE_ELEMENTS: 12486 case FAST_HOLEY_DOUBLE_ELEMENTS: 12487 return SetFastDoubleElement(object, index, value, strict_mode, 12488 check_prototype); 12489 12490 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 12491 case EXTERNAL_##TYPE##_ELEMENTS: { \ 12492 Handle<External##Type##Array> array( \ 12493 External##Type##Array::cast(object->elements())); \ 12494 return External##Type##Array::SetValue(array, index, value); \ 12495 } \ 12496 case TYPE##_ELEMENTS: { \ 12497 Handle<Fixed##Type##Array> array( \ 12498 Fixed##Type##Array::cast(object->elements())); \ 12499 return Fixed##Type##Array::SetValue(array, index, value); \ 12500 } 12501 12502 TYPED_ARRAYS(TYPED_ARRAY_CASE) 12503 12504 #undef TYPED_ARRAY_CASE 12505 12506 case DICTIONARY_ELEMENTS: 12507 return SetDictionaryElement(object, index, value, attributes, strict_mode, 12508 check_prototype, 12509 set_mode); 12510 case SLOPPY_ARGUMENTS_ELEMENTS: { 12511 Handle<FixedArray> parameter_map(FixedArray::cast(object->elements())); 12512 uint32_t length = parameter_map->length(); 12513 Handle<Object> probe = index < length - 2 ? 12514 Handle<Object>(parameter_map->get(index + 2), isolate) : 12515 Handle<Object>(); 12516 if (!probe.is_null() && !probe->IsTheHole()) { 12517 Handle<Context> context(Context::cast(parameter_map->get(0))); 12518 int context_index = Handle<Smi>::cast(probe)->value(); 12519 DCHECK(!context->get(context_index)->IsTheHole()); 12520 context->set(context_index, *value); 12521 // Redefining attributes of an aliased element destroys fast aliasing. 12522 if (set_mode == SET_PROPERTY || attributes == NONE) return value; 12523 parameter_map->set_the_hole(index + 2); 12524 // For elements that are still writable we re-establish slow aliasing. 12525 if ((attributes & READ_ONLY) == 0) { 12526 value = Handle<Object>::cast( 12527 isolate->factory()->NewAliasedArgumentsEntry(context_index)); 12528 } 12529 } 12530 Handle<FixedArray> arguments(FixedArray::cast(parameter_map->get(1))); 12531 if (arguments->IsDictionary()) { 12532 return SetDictionaryElement(object, index, value, attributes, 12533 strict_mode, 12534 check_prototype, 12535 set_mode); 12536 } else { 12537 return SetFastElement(object, index, value, strict_mode, 12538 check_prototype); 12539 } 12540 } 12541 } 12542 // All possible cases have been handled above. Add a return to avoid the 12543 // complaints from the compiler. 12544 UNREACHABLE(); 12545 return isolate->factory()->null_value(); 12546 } 12547 12548 12549 const double AllocationSite::kPretenureRatio = 0.85; 12550 12551 12552 void AllocationSite::ResetPretenureDecision() { 12553 set_pretenure_decision(kUndecided); 12554 set_memento_found_count(0); 12555 set_memento_create_count(0); 12556 } 12557 12558 12559 PretenureFlag AllocationSite::GetPretenureMode() { 12560 PretenureDecision mode = pretenure_decision(); 12561 // Zombie objects "decide" to be untenured. 12562 return mode == kTenure ? TENURED : NOT_TENURED; 12563 } 12564 12565 12566 bool AllocationSite::IsNestedSite() { 12567 DCHECK(FLAG_trace_track_allocation_sites); 12568 Object* current = GetHeap()->allocation_sites_list(); 12569 while (current->IsAllocationSite()) { 12570 AllocationSite* current_site = AllocationSite::cast(current); 12571 if (current_site->nested_site() == this) { 12572 return true; 12573 } 12574 current = current_site->weak_next(); 12575 } 12576 return false; 12577 } 12578 12579 12580 void AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site, 12581 ElementsKind to_kind) { 12582 Isolate* isolate = site->GetIsolate(); 12583 12584 if (site->SitePointsToLiteral() && site->transition_info()->IsJSArray()) { 12585 Handle<JSArray> transition_info = 12586 handle(JSArray::cast(site->transition_info())); 12587 ElementsKind kind = transition_info->GetElementsKind(); 12588 // if kind is holey ensure that to_kind is as well. 12589 if (IsHoleyElementsKind(kind)) { 12590 to_kind = GetHoleyElementsKind(to_kind); 12591 } 12592 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) { 12593 // If the array is huge, it's not likely to be defined in a local 12594 // function, so we shouldn't make new instances of it very often. 12595 uint32_t length = 0; 12596 CHECK(transition_info->length()->ToArrayIndex(&length)); 12597 if (length <= kMaximumArrayBytesToPretransition) { 12598 if (FLAG_trace_track_allocation_sites) { 12599 bool is_nested = site->IsNestedSite(); 12600 PrintF( 12601 "AllocationSite: JSArray %p boilerplate %s updated %s->%s\n", 12602 reinterpret_cast<void*>(*site), 12603 is_nested ? "(nested)" : "", 12604 ElementsKindToString(kind), 12605 ElementsKindToString(to_kind)); 12606 } 12607 JSObject::TransitionElementsKind(transition_info, to_kind); 12608 site->dependent_code()->DeoptimizeDependentCodeGroup( 12609 isolate, DependentCode::kAllocationSiteTransitionChangedGroup); 12610 } 12611 } 12612 } else { 12613 ElementsKind kind = site->GetElementsKind(); 12614 // if kind is holey ensure that to_kind is as well. 12615 if (IsHoleyElementsKind(kind)) { 12616 to_kind = GetHoleyElementsKind(to_kind); 12617 } 12618 if (IsMoreGeneralElementsKindTransition(kind, to_kind)) { 12619 if (FLAG_trace_track_allocation_sites) { 12620 PrintF("AllocationSite: JSArray %p site updated %s->%s\n", 12621 reinterpret_cast<void*>(*site), 12622 ElementsKindToString(kind), 12623 ElementsKindToString(to_kind)); 12624 } 12625 site->SetElementsKind(to_kind); 12626 site->dependent_code()->DeoptimizeDependentCodeGroup( 12627 isolate, DependentCode::kAllocationSiteTransitionChangedGroup); 12628 } 12629 } 12630 } 12631 12632 12633 // static 12634 void AllocationSite::AddDependentCompilationInfo(Handle<AllocationSite> site, 12635 Reason reason, 12636 CompilationInfo* info) { 12637 DependentCode::DependencyGroup group = site->ToDependencyGroup(reason); 12638 Handle<DependentCode> dep(site->dependent_code()); 12639 Handle<DependentCode> codes = 12640 DependentCode::Insert(dep, group, info->object_wrapper()); 12641 if (*codes != site->dependent_code()) site->set_dependent_code(*codes); 12642 info->dependencies(group)->Add(Handle<HeapObject>(*site), info->zone()); 12643 } 12644 12645 12646 const char* AllocationSite::PretenureDecisionName(PretenureDecision decision) { 12647 switch (decision) { 12648 case kUndecided: return "undecided"; 12649 case kDontTenure: return "don't tenure"; 12650 case kMaybeTenure: return "maybe tenure"; 12651 case kTenure: return "tenure"; 12652 case kZombie: return "zombie"; 12653 default: UNREACHABLE(); 12654 } 12655 return NULL; 12656 } 12657 12658 12659 void JSObject::UpdateAllocationSite(Handle<JSObject> object, 12660 ElementsKind to_kind) { 12661 if (!object->IsJSArray()) return; 12662 12663 Heap* heap = object->GetHeap(); 12664 if (!heap->InNewSpace(*object)) return; 12665 12666 Handle<AllocationSite> site; 12667 { 12668 DisallowHeapAllocation no_allocation; 12669 12670 AllocationMemento* memento = heap->FindAllocationMemento(*object); 12671 if (memento == NULL) return; 12672 12673 // Walk through to the Allocation Site 12674 site = handle(memento->GetAllocationSite()); 12675 } 12676 AllocationSite::DigestTransitionFeedback(site, to_kind); 12677 } 12678 12679 12680 void JSObject::TransitionElementsKind(Handle<JSObject> object, 12681 ElementsKind to_kind) { 12682 ElementsKind from_kind = object->map()->elements_kind(); 12683 12684 if (IsFastHoleyElementsKind(from_kind)) { 12685 to_kind = GetHoleyElementsKind(to_kind); 12686 } 12687 12688 if (from_kind == to_kind) return; 12689 // Don't update the site if to_kind isn't fast 12690 if (IsFastElementsKind(to_kind)) { 12691 UpdateAllocationSite(object, to_kind); 12692 } 12693 12694 Isolate* isolate = object->GetIsolate(); 12695 if (object->elements() == isolate->heap()->empty_fixed_array() || 12696 (IsFastSmiOrObjectElementsKind(from_kind) && 12697 IsFastSmiOrObjectElementsKind(to_kind)) || 12698 (from_kind == FAST_DOUBLE_ELEMENTS && 12699 to_kind == FAST_HOLEY_DOUBLE_ELEMENTS)) { 12700 DCHECK(from_kind != TERMINAL_FAST_ELEMENTS_KIND); 12701 // No change is needed to the elements() buffer, the transition 12702 // only requires a map change. 12703 Handle<Map> new_map = GetElementsTransitionMap(object, to_kind); 12704 MigrateToMap(object, new_map); 12705 if (FLAG_trace_elements_transitions) { 12706 Handle<FixedArrayBase> elms(object->elements()); 12707 PrintElementsTransition(stdout, object, from_kind, elms, to_kind, elms); 12708 } 12709 return; 12710 } 12711 12712 Handle<FixedArrayBase> elms(object->elements()); 12713 uint32_t capacity = static_cast<uint32_t>(elms->length()); 12714 uint32_t length = capacity; 12715 12716 if (object->IsJSArray()) { 12717 Object* raw_length = Handle<JSArray>::cast(object)->length(); 12718 if (raw_length->IsUndefined()) { 12719 // If length is undefined, then JSArray is being initialized and has no 12720 // elements, assume a length of zero. 12721 length = 0; 12722 } else { 12723 CHECK(raw_length->ToArrayIndex(&length)); 12724 } 12725 } 12726 12727 if (IsFastSmiElementsKind(from_kind) && 12728 IsFastDoubleElementsKind(to_kind)) { 12729 SetFastDoubleElementsCapacityAndLength(object, capacity, length); 12730 JSObject::ValidateElements(object); 12731 return; 12732 } 12733 12734 if (IsFastDoubleElementsKind(from_kind) && 12735 IsFastObjectElementsKind(to_kind)) { 12736 SetFastElementsCapacityAndLength(object, capacity, length, 12737 kDontAllowSmiElements); 12738 JSObject::ValidateElements(object); 12739 return; 12740 } 12741 12742 // This method should never be called for any other case than the ones 12743 // handled above. 12744 UNREACHABLE(); 12745 } 12746 12747 12748 // static 12749 bool Map::IsValidElementsTransition(ElementsKind from_kind, 12750 ElementsKind to_kind) { 12751 // Transitions can't go backwards. 12752 if (!IsMoreGeneralElementsKindTransition(from_kind, to_kind)) { 12753 return false; 12754 } 12755 12756 // Transitions from HOLEY -> PACKED are not allowed. 12757 return !IsFastHoleyElementsKind(from_kind) || 12758 IsFastHoleyElementsKind(to_kind); 12759 } 12760 12761 12762 void JSArray::JSArrayUpdateLengthFromIndex(Handle<JSArray> array, 12763 uint32_t index, 12764 Handle<Object> value) { 12765 uint32_t old_len = 0; 12766 CHECK(array->length()->ToArrayIndex(&old_len)); 12767 // Check to see if we need to update the length. For now, we make 12768 // sure that the length stays within 32-bits (unsigned). 12769 if (index >= old_len && index != 0xffffffff) { 12770 Handle<Object> len = array->GetIsolate()->factory()->NewNumber( 12771 static_cast<double>(index) + 1); 12772 array->set_length(*len); 12773 } 12774 } 12775 12776 12777 bool JSArray::IsReadOnlyLengthDescriptor(Handle<Map> jsarray_map) { 12778 Isolate* isolate = jsarray_map->GetIsolate(); 12779 DCHECK(!jsarray_map->is_dictionary_map()); 12780 LookupResult lookup(isolate); 12781 Handle<Name> length_string = isolate->factory()->length_string(); 12782 jsarray_map->LookupDescriptor(NULL, *length_string, &lookup); 12783 return lookup.IsReadOnly(); 12784 } 12785 12786 12787 bool JSArray::WouldChangeReadOnlyLength(Handle<JSArray> array, 12788 uint32_t index) { 12789 uint32_t length = 0; 12790 CHECK(array->length()->ToArrayIndex(&length)); 12791 if (length <= index) { 12792 LookupIterator it(array, array->GetIsolate()->factory()->length_string(), 12793 LookupIterator::OWN_SKIP_INTERCEPTOR); 12794 CHECK_NE(LookupIterator::ACCESS_CHECK, it.state()); 12795 CHECK(it.IsFound()); 12796 CHECK_EQ(LookupIterator::ACCESSOR, it.state()); 12797 return it.IsReadOnly(); 12798 } 12799 return false; 12800 } 12801 12802 12803 MaybeHandle<Object> JSArray::ReadOnlyLengthError(Handle<JSArray> array) { 12804 Isolate* isolate = array->GetIsolate(); 12805 Handle<Name> length = isolate->factory()->length_string(); 12806 Handle<Object> args[2] = { length, array }; 12807 THROW_NEW_ERROR(isolate, NewTypeError("strict_read_only_property", 12808 HandleVector(args, arraysize(args))), 12809 Object); 12810 } 12811 12812 12813 MaybeHandle<Object> JSObject::GetElementWithInterceptor( 12814 Handle<JSObject> object, 12815 Handle<Object> receiver, 12816 uint32_t index) { 12817 Isolate* isolate = object->GetIsolate(); 12818 12819 // Make sure that the top context does not change when doing 12820 // callbacks or interceptor calls. 12821 AssertNoContextChange ncc(isolate); 12822 12823 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor(), isolate); 12824 if (!interceptor->getter()->IsUndefined()) { 12825 v8::IndexedPropertyGetterCallback getter = 12826 v8::ToCData<v8::IndexedPropertyGetterCallback>(interceptor->getter()); 12827 LOG(isolate, 12828 ApiIndexedPropertyAccess("interceptor-indexed-get", *object, index)); 12829 PropertyCallbackArguments 12830 args(isolate, interceptor->data(), *receiver, *object); 12831 v8::Handle<v8::Value> result = args.Call(getter, index); 12832 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 12833 if (!result.IsEmpty()) { 12834 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 12835 result_internal->VerifyApiCallResultType(); 12836 // Rebox handle before return. 12837 return handle(*result_internal, isolate); 12838 } 12839 } 12840 12841 ElementsAccessor* handler = object->GetElementsAccessor(); 12842 Handle<Object> result; 12843 ASSIGN_RETURN_ON_EXCEPTION( 12844 isolate, result, handler->Get(receiver, object, index), 12845 Object); 12846 if (!result->IsTheHole()) return result; 12847 12848 PrototypeIterator iter(isolate, object); 12849 if (iter.IsAtEnd()) return isolate->factory()->undefined_value(); 12850 return Object::GetElementWithReceiver( 12851 isolate, PrototypeIterator::GetCurrent(iter), receiver, index); 12852 } 12853 12854 12855 bool JSObject::HasDenseElements() { 12856 int capacity = 0; 12857 int used = 0; 12858 GetElementsCapacityAndUsage(&capacity, &used); 12859 return (capacity == 0) || (used > (capacity / 2)); 12860 } 12861 12862 12863 void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) { 12864 *capacity = 0; 12865 *used = 0; 12866 12867 FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements()); 12868 FixedArray* backing_store = NULL; 12869 switch (GetElementsKind()) { 12870 case SLOPPY_ARGUMENTS_ELEMENTS: 12871 backing_store_base = 12872 FixedArray::cast(FixedArray::cast(backing_store_base)->get(1)); 12873 backing_store = FixedArray::cast(backing_store_base); 12874 if (backing_store->IsDictionary()) { 12875 SeededNumberDictionary* dictionary = 12876 SeededNumberDictionary::cast(backing_store); 12877 *capacity = dictionary->Capacity(); 12878 *used = dictionary->NumberOfElements(); 12879 break; 12880 } 12881 // Fall through. 12882 case FAST_SMI_ELEMENTS: 12883 case FAST_ELEMENTS: 12884 if (IsJSArray()) { 12885 *capacity = backing_store_base->length(); 12886 *used = Smi::cast(JSArray::cast(this)->length())->value(); 12887 break; 12888 } 12889 // Fall through if packing is not guaranteed. 12890 case FAST_HOLEY_SMI_ELEMENTS: 12891 case FAST_HOLEY_ELEMENTS: 12892 backing_store = FixedArray::cast(backing_store_base); 12893 *capacity = backing_store->length(); 12894 for (int i = 0; i < *capacity; ++i) { 12895 if (!backing_store->get(i)->IsTheHole()) ++(*used); 12896 } 12897 break; 12898 case DICTIONARY_ELEMENTS: { 12899 SeededNumberDictionary* dictionary = element_dictionary(); 12900 *capacity = dictionary->Capacity(); 12901 *used = dictionary->NumberOfElements(); 12902 break; 12903 } 12904 case FAST_DOUBLE_ELEMENTS: 12905 if (IsJSArray()) { 12906 *capacity = backing_store_base->length(); 12907 *used = Smi::cast(JSArray::cast(this)->length())->value(); 12908 break; 12909 } 12910 // Fall through if packing is not guaranteed. 12911 case FAST_HOLEY_DOUBLE_ELEMENTS: { 12912 *capacity = elements()->length(); 12913 if (*capacity == 0) break; 12914 FixedDoubleArray * elms = FixedDoubleArray::cast(elements()); 12915 for (int i = 0; i < *capacity; i++) { 12916 if (!elms->is_the_hole(i)) ++(*used); 12917 } 12918 break; 12919 } 12920 12921 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 12922 case EXTERNAL_##TYPE##_ELEMENTS: \ 12923 case TYPE##_ELEMENTS: \ 12924 12925 TYPED_ARRAYS(TYPED_ARRAY_CASE) 12926 #undef TYPED_ARRAY_CASE 12927 { 12928 // External arrays are considered 100% used. 12929 FixedArrayBase* external_array = FixedArrayBase::cast(elements()); 12930 *capacity = external_array->length(); 12931 *used = external_array->length(); 12932 break; 12933 } 12934 } 12935 } 12936 12937 12938 bool JSObject::WouldConvertToSlowElements(Handle<Object> key) { 12939 uint32_t index; 12940 if (HasFastElements() && key->ToArrayIndex(&index)) { 12941 Handle<FixedArrayBase> backing_store(FixedArrayBase::cast(elements())); 12942 uint32_t capacity = static_cast<uint32_t>(backing_store->length()); 12943 if (index >= capacity) { 12944 if ((index - capacity) >= kMaxGap) return true; 12945 uint32_t new_capacity = NewElementsCapacity(index + 1); 12946 return ShouldConvertToSlowElements(new_capacity); 12947 } 12948 } 12949 return false; 12950 } 12951 12952 12953 bool JSObject::ShouldConvertToSlowElements(int new_capacity) { 12954 STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <= 12955 kMaxUncheckedFastElementsLength); 12956 if (new_capacity <= kMaxUncheckedOldFastElementsLength || 12957 (new_capacity <= kMaxUncheckedFastElementsLength && 12958 GetHeap()->InNewSpace(this))) { 12959 return false; 12960 } 12961 // If the fast-case backing storage takes up roughly three times as 12962 // much space (in machine words) as a dictionary backing storage 12963 // would, the object should have slow elements. 12964 int old_capacity = 0; 12965 int used_elements = 0; 12966 GetElementsCapacityAndUsage(&old_capacity, &used_elements); 12967 int dictionary_size = SeededNumberDictionary::ComputeCapacity(used_elements) * 12968 SeededNumberDictionary::kEntrySize; 12969 return 3 * dictionary_size <= new_capacity; 12970 } 12971 12972 12973 bool JSObject::ShouldConvertToFastElements() { 12974 DCHECK(HasDictionaryElements() || HasDictionaryArgumentsElements()); 12975 // If the elements are sparse, we should not go back to fast case. 12976 if (!HasDenseElements()) return false; 12977 // An object requiring access checks is never allowed to have fast 12978 // elements. If it had fast elements we would skip security checks. 12979 if (IsAccessCheckNeeded()) return false; 12980 // Observed objects may not go to fast mode because they rely on map checks, 12981 // and for fast element accesses we sometimes check element kinds only. 12982 if (map()->is_observed()) return false; 12983 12984 FixedArray* elements = FixedArray::cast(this->elements()); 12985 SeededNumberDictionary* dictionary = NULL; 12986 if (elements->map() == GetHeap()->sloppy_arguments_elements_map()) { 12987 dictionary = SeededNumberDictionary::cast(elements->get(1)); 12988 } else { 12989 dictionary = SeededNumberDictionary::cast(elements); 12990 } 12991 // If an element has been added at a very high index in the elements 12992 // dictionary, we cannot go back to fast case. 12993 if (dictionary->requires_slow_elements()) return false; 12994 // If the dictionary backing storage takes up roughly half as much 12995 // space (in machine words) as a fast-case backing storage would, 12996 // the object should have fast elements. 12997 uint32_t array_size = 0; 12998 if (IsJSArray()) { 12999 CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size)); 13000 } else { 13001 array_size = dictionary->max_number_key(); 13002 } 13003 uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) * 13004 SeededNumberDictionary::kEntrySize; 13005 return 2 * dictionary_size >= array_size; 13006 } 13007 13008 13009 bool JSObject::ShouldConvertToFastDoubleElements( 13010 bool* has_smi_only_elements) { 13011 *has_smi_only_elements = false; 13012 if (HasSloppyArgumentsElements()) return false; 13013 if (FLAG_unbox_double_arrays) { 13014 DCHECK(HasDictionaryElements()); 13015 SeededNumberDictionary* dictionary = element_dictionary(); 13016 bool found_double = false; 13017 for (int i = 0; i < dictionary->Capacity(); i++) { 13018 Object* key = dictionary->KeyAt(i); 13019 if (key->IsNumber()) { 13020 Object* value = dictionary->ValueAt(i); 13021 if (!value->IsNumber()) return false; 13022 if (!value->IsSmi()) { 13023 found_double = true; 13024 } 13025 } 13026 } 13027 *has_smi_only_elements = !found_double; 13028 return found_double; 13029 } else { 13030 return false; 13031 } 13032 } 13033 13034 13035 // Certain compilers request function template instantiation when they 13036 // see the definition of the other template functions in the 13037 // class. This requires us to have the template functions put 13038 // together, so even though this function belongs in objects-debug.cc, 13039 // we keep it here instead to satisfy certain compilers. 13040 #ifdef OBJECT_PRINT 13041 template <typename Derived, typename Shape, typename Key> 13042 void Dictionary<Derived, Shape, Key>::Print(OStream& os) { // NOLINT 13043 int capacity = DerivedHashTable::Capacity(); 13044 for (int i = 0; i < capacity; i++) { 13045 Object* k = DerivedHashTable::KeyAt(i); 13046 if (DerivedHashTable::IsKey(k)) { 13047 os << " "; 13048 if (k->IsString()) { 13049 String::cast(k)->StringPrint(os); 13050 } else { 13051 os << Brief(k); 13052 } 13053 os << ": " << Brief(ValueAt(i)) << "\n"; 13054 } 13055 } 13056 } 13057 #endif 13058 13059 13060 template<typename Derived, typename Shape, typename Key> 13061 void Dictionary<Derived, Shape, Key>::CopyValuesTo(FixedArray* elements) { 13062 int pos = 0; 13063 int capacity = DerivedHashTable::Capacity(); 13064 DisallowHeapAllocation no_gc; 13065 WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc); 13066 for (int i = 0; i < capacity; i++) { 13067 Object* k = Dictionary::KeyAt(i); 13068 if (Dictionary::IsKey(k)) { 13069 elements->set(pos++, ValueAt(i), mode); 13070 } 13071 } 13072 DCHECK(pos == elements->length()); 13073 } 13074 13075 13076 InterceptorInfo* JSObject::GetNamedInterceptor() { 13077 DCHECK(map()->has_named_interceptor()); 13078 JSFunction* constructor = JSFunction::cast(map()->constructor()); 13079 DCHECK(constructor->shared()->IsApiFunction()); 13080 Object* result = 13081 constructor->shared()->get_api_func_data()->named_property_handler(); 13082 return InterceptorInfo::cast(result); 13083 } 13084 13085 13086 InterceptorInfo* JSObject::GetIndexedInterceptor() { 13087 DCHECK(map()->has_indexed_interceptor()); 13088 JSFunction* constructor = JSFunction::cast(map()->constructor()); 13089 DCHECK(constructor->shared()->IsApiFunction()); 13090 Object* result = 13091 constructor->shared()->get_api_func_data()->indexed_property_handler(); 13092 return InterceptorInfo::cast(result); 13093 } 13094 13095 13096 MaybeHandle<Object> JSObject::GetPropertyWithInterceptor( 13097 Handle<JSObject> holder, 13098 Handle<Object> receiver, 13099 Handle<Name> name) { 13100 Isolate* isolate = holder->GetIsolate(); 13101 13102 // TODO(rossberg): Support symbols in the API. 13103 if (name->IsSymbol()) return isolate->factory()->undefined_value(); 13104 13105 Handle<InterceptorInfo> interceptor(holder->GetNamedInterceptor(), isolate); 13106 Handle<String> name_string = Handle<String>::cast(name); 13107 13108 if (interceptor->getter()->IsUndefined()) return MaybeHandle<Object>(); 13109 13110 v8::NamedPropertyGetterCallback getter = 13111 v8::ToCData<v8::NamedPropertyGetterCallback>(interceptor->getter()); 13112 LOG(isolate, 13113 ApiNamedPropertyAccess("interceptor-named-get", *holder, *name)); 13114 PropertyCallbackArguments 13115 args(isolate, interceptor->data(), *receiver, *holder); 13116 v8::Handle<v8::Value> result = 13117 args.Call(getter, v8::Utils::ToLocal(name_string)); 13118 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object); 13119 if (result.IsEmpty()) return MaybeHandle<Object>(); 13120 13121 Handle<Object> result_internal = v8::Utils::OpenHandle(*result); 13122 result_internal->VerifyApiCallResultType(); 13123 // Rebox handle before return 13124 return handle(*result_internal, isolate); 13125 } 13126 13127 13128 // Compute the property keys from the interceptor. 13129 // TODO(rossberg): support symbols in API, and filter here if needed. 13130 MaybeHandle<JSObject> JSObject::GetKeysForNamedInterceptor( 13131 Handle<JSObject> object, Handle<JSReceiver> receiver) { 13132 Isolate* isolate = receiver->GetIsolate(); 13133 Handle<InterceptorInfo> interceptor(object->GetNamedInterceptor()); 13134 PropertyCallbackArguments 13135 args(isolate, interceptor->data(), *receiver, *object); 13136 v8::Handle<v8::Object> result; 13137 if (!interceptor->enumerator()->IsUndefined()) { 13138 v8::NamedPropertyEnumeratorCallback enum_fun = 13139 v8::ToCData<v8::NamedPropertyEnumeratorCallback>( 13140 interceptor->enumerator()); 13141 LOG(isolate, ApiObjectAccess("interceptor-named-enum", *object)); 13142 result = args.Call(enum_fun); 13143 } 13144 if (result.IsEmpty()) return MaybeHandle<JSObject>(); 13145 #if ENABLE_EXTRA_CHECKS 13146 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() || 13147 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements()); 13148 #endif 13149 // Rebox before returning. 13150 return handle(*v8::Utils::OpenHandle(*result), isolate); 13151 } 13152 13153 13154 // Compute the element keys from the interceptor. 13155 MaybeHandle<JSObject> JSObject::GetKeysForIndexedInterceptor( 13156 Handle<JSObject> object, Handle<JSReceiver> receiver) { 13157 Isolate* isolate = receiver->GetIsolate(); 13158 Handle<InterceptorInfo> interceptor(object->GetIndexedInterceptor()); 13159 PropertyCallbackArguments 13160 args(isolate, interceptor->data(), *receiver, *object); 13161 v8::Handle<v8::Object> result; 13162 if (!interceptor->enumerator()->IsUndefined()) { 13163 v8::IndexedPropertyEnumeratorCallback enum_fun = 13164 v8::ToCData<v8::IndexedPropertyEnumeratorCallback>( 13165 interceptor->enumerator()); 13166 LOG(isolate, ApiObjectAccess("interceptor-indexed-enum", *object)); 13167 result = args.Call(enum_fun); 13168 } 13169 if (result.IsEmpty()) return MaybeHandle<JSObject>(); 13170 #if ENABLE_EXTRA_CHECKS 13171 CHECK(v8::Utils::OpenHandle(*result)->IsJSArray() || 13172 v8::Utils::OpenHandle(*result)->HasSloppyArgumentsElements()); 13173 #endif 13174 // Rebox before returning. 13175 return handle(*v8::Utils::OpenHandle(*result), isolate); 13176 } 13177 13178 13179 Maybe<bool> JSObject::HasRealNamedProperty(Handle<JSObject> object, 13180 Handle<Name> key) { 13181 LookupIterator it(object, key, LookupIterator::OWN_SKIP_INTERCEPTOR); 13182 Maybe<PropertyAttributes> maybe_result = GetPropertyAttributes(&it); 13183 if (!maybe_result.has_value) return Maybe<bool>(); 13184 return maybe(it.IsFound()); 13185 } 13186 13187 13188 Maybe<bool> JSObject::HasRealElementProperty(Handle<JSObject> object, 13189 uint32_t index) { 13190 Isolate* isolate = object->GetIsolate(); 13191 HandleScope scope(isolate); 13192 // Check access rights if needed. 13193 if (object->IsAccessCheckNeeded()) { 13194 if (!isolate->MayIndexedAccess(object, index, v8::ACCESS_HAS)) { 13195 isolate->ReportFailedAccessCheck(object, v8::ACCESS_HAS); 13196 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, Maybe<bool>()); 13197 return maybe(false); 13198 } 13199 } 13200 13201 if (object->IsJSGlobalProxy()) { 13202 HandleScope scope(isolate); 13203 PrototypeIterator iter(isolate, object); 13204 if (iter.IsAtEnd()) return maybe(false); 13205 DCHECK(PrototypeIterator::GetCurrent(iter)->IsJSGlobalObject()); 13206 return HasRealElementProperty( 13207 Handle<JSObject>::cast(PrototypeIterator::GetCurrent(iter)), index); 13208 } 13209 13210 Maybe<PropertyAttributes> result = 13211 GetElementAttributeWithoutInterceptor(object, object, index, false); 13212 if (!result.has_value) return Maybe<bool>(); 13213 return maybe(result.value != ABSENT); 13214 } 13215 13216 13217 Maybe<bool> JSObject::HasRealNamedCallbackProperty(Handle<JSObject> object, 13218 Handle<Name> key) { 13219 LookupIterator it(object, key, LookupIterator::OWN_SKIP_INTERCEPTOR); 13220 Maybe<PropertyAttributes> maybe_result = GetPropertyAttributes(&it); 13221 if (!maybe_result.has_value) return Maybe<bool>(); 13222 return maybe(it.state() == LookupIterator::ACCESSOR); 13223 } 13224 13225 13226 int JSObject::NumberOfOwnProperties(PropertyAttributes filter) { 13227 if (HasFastProperties()) { 13228 Map* map = this->map(); 13229 if (filter == NONE) return map->NumberOfOwnDescriptors(); 13230 if (filter & DONT_ENUM) { 13231 int result = map->EnumLength(); 13232 if (result != kInvalidEnumCacheSentinel) return result; 13233 } 13234 return map->NumberOfDescribedProperties(OWN_DESCRIPTORS, filter); 13235 } 13236 return property_dictionary()->NumberOfElementsFilterAttributes(filter); 13237 } 13238 13239 13240 void FixedArray::SwapPairs(FixedArray* numbers, int i, int j) { 13241 Object* temp = get(i); 13242 set(i, get(j)); 13243 set(j, temp); 13244 if (this != numbers) { 13245 temp = numbers->get(i); 13246 numbers->set(i, Smi::cast(numbers->get(j))); 13247 numbers->set(j, Smi::cast(temp)); 13248 } 13249 } 13250 13251 13252 static void InsertionSortPairs(FixedArray* content, 13253 FixedArray* numbers, 13254 int len) { 13255 for (int i = 1; i < len; i++) { 13256 int j = i; 13257 while (j > 0 && 13258 (NumberToUint32(numbers->get(j - 1)) > 13259 NumberToUint32(numbers->get(j)))) { 13260 content->SwapPairs(numbers, j - 1, j); 13261 j--; 13262 } 13263 } 13264 } 13265 13266 13267 void HeapSortPairs(FixedArray* content, FixedArray* numbers, int len) { 13268 // In-place heap sort. 13269 DCHECK(content->length() == numbers->length()); 13270 13271 // Bottom-up max-heap construction. 13272 for (int i = 1; i < len; ++i) { 13273 int child_index = i; 13274 while (child_index > 0) { 13275 int parent_index = ((child_index + 1) >> 1) - 1; 13276 uint32_t parent_value = NumberToUint32(numbers->get(parent_index)); 13277 uint32_t child_value = NumberToUint32(numbers->get(child_index)); 13278 if (parent_value < child_value) { 13279 content->SwapPairs(numbers, parent_index, child_index); 13280 } else { 13281 break; 13282 } 13283 child_index = parent_index; 13284 } 13285 } 13286 13287 // Extract elements and create sorted array. 13288 for (int i = len - 1; i > 0; --i) { 13289 // Put max element at the back of the array. 13290 content->SwapPairs(numbers, 0, i); 13291 // Sift down the new top element. 13292 int parent_index = 0; 13293 while (true) { 13294 int child_index = ((parent_index + 1) << 1) - 1; 13295 if (child_index >= i) break; 13296 uint32_t child1_value = NumberToUint32(numbers->get(child_index)); 13297 uint32_t child2_value = NumberToUint32(numbers->get(child_index + 1)); 13298 uint32_t parent_value = NumberToUint32(numbers->get(parent_index)); 13299 if (child_index + 1 >= i || child1_value > child2_value) { 13300 if (parent_value > child1_value) break; 13301 content->SwapPairs(numbers, parent_index, child_index); 13302 parent_index = child_index; 13303 } else { 13304 if (parent_value > child2_value) break; 13305 content->SwapPairs(numbers, parent_index, child_index + 1); 13306 parent_index = child_index + 1; 13307 } 13308 } 13309 } 13310 } 13311 13312 13313 // Sort this array and the numbers as pairs wrt. the (distinct) numbers. 13314 void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) { 13315 DCHECK(this->length() == numbers->length()); 13316 // For small arrays, simply use insertion sort. 13317 if (len <= 10) { 13318 InsertionSortPairs(this, numbers, len); 13319 return; 13320 } 13321 // Check the range of indices. 13322 uint32_t min_index = NumberToUint32(numbers->get(0)); 13323 uint32_t max_index = min_index; 13324 uint32_t i; 13325 for (i = 1; i < len; i++) { 13326 if (NumberToUint32(numbers->get(i)) < min_index) { 13327 min_index = NumberToUint32(numbers->get(i)); 13328 } else if (NumberToUint32(numbers->get(i)) > max_index) { 13329 max_index = NumberToUint32(numbers->get(i)); 13330 } 13331 } 13332 if (max_index - min_index + 1 == len) { 13333 // Indices form a contiguous range, unless there are duplicates. 13334 // Do an in-place linear time sort assuming distinct numbers, but 13335 // avoid hanging in case they are not. 13336 for (i = 0; i < len; i++) { 13337 uint32_t p; 13338 uint32_t j = 0; 13339 // While the current element at i is not at its correct position p, 13340 // swap the elements at these two positions. 13341 while ((p = NumberToUint32(numbers->get(i)) - min_index) != i && 13342 j++ < len) { 13343 SwapPairs(numbers, i, p); 13344 } 13345 } 13346 } else { 13347 HeapSortPairs(this, numbers, len); 13348 return; 13349 } 13350 } 13351 13352 13353 // Fill in the names of own properties into the supplied storage. The main 13354 // purpose of this function is to provide reflection information for the object 13355 // mirrors. 13356 void JSObject::GetOwnPropertyNames( 13357 FixedArray* storage, int index, PropertyAttributes filter) { 13358 DCHECK(storage->length() >= (NumberOfOwnProperties(filter) - index)); 13359 if (HasFastProperties()) { 13360 int real_size = map()->NumberOfOwnDescriptors(); 13361 DescriptorArray* descs = map()->instance_descriptors(); 13362 for (int i = 0; i < real_size; i++) { 13363 if ((descs->GetDetails(i).attributes() & filter) == 0 && 13364 !FilterKey(descs->GetKey(i), filter)) { 13365 storage->set(index++, descs->GetKey(i)); 13366 } 13367 } 13368 } else { 13369 property_dictionary()->CopyKeysTo(storage, 13370 index, 13371 filter, 13372 NameDictionary::UNSORTED); 13373 } 13374 } 13375 13376 13377 int JSObject::NumberOfOwnElements(PropertyAttributes filter) { 13378 return GetOwnElementKeys(NULL, filter); 13379 } 13380 13381 13382 int JSObject::NumberOfEnumElements() { 13383 // Fast case for objects with no elements. 13384 if (!IsJSValue() && HasFastObjectElements()) { 13385 uint32_t length = IsJSArray() ? 13386 static_cast<uint32_t>( 13387 Smi::cast(JSArray::cast(this)->length())->value()) : 13388 static_cast<uint32_t>(FixedArray::cast(elements())->length()); 13389 if (length == 0) return 0; 13390 } 13391 // Compute the number of enumerable elements. 13392 return NumberOfOwnElements(static_cast<PropertyAttributes>(DONT_ENUM)); 13393 } 13394 13395 13396 int JSObject::GetOwnElementKeys(FixedArray* storage, 13397 PropertyAttributes filter) { 13398 int counter = 0; 13399 switch (GetElementsKind()) { 13400 case FAST_SMI_ELEMENTS: 13401 case FAST_ELEMENTS: 13402 case FAST_HOLEY_SMI_ELEMENTS: 13403 case FAST_HOLEY_ELEMENTS: { 13404 int length = IsJSArray() ? 13405 Smi::cast(JSArray::cast(this)->length())->value() : 13406 FixedArray::cast(elements())->length(); 13407 for (int i = 0; i < length; i++) { 13408 if (!FixedArray::cast(elements())->get(i)->IsTheHole()) { 13409 if (storage != NULL) { 13410 storage->set(counter, Smi::FromInt(i)); 13411 } 13412 counter++; 13413 } 13414 } 13415 DCHECK(!storage || storage->length() >= counter); 13416 break; 13417 } 13418 case FAST_DOUBLE_ELEMENTS: 13419 case FAST_HOLEY_DOUBLE_ELEMENTS: { 13420 int length = IsJSArray() ? 13421 Smi::cast(JSArray::cast(this)->length())->value() : 13422 FixedArrayBase::cast(elements())->length(); 13423 for (int i = 0; i < length; i++) { 13424 if (!FixedDoubleArray::cast(elements())->is_the_hole(i)) { 13425 if (storage != NULL) { 13426 storage->set(counter, Smi::FromInt(i)); 13427 } 13428 counter++; 13429 } 13430 } 13431 DCHECK(!storage || storage->length() >= counter); 13432 break; 13433 } 13434 13435 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 13436 case EXTERNAL_##TYPE##_ELEMENTS: \ 13437 case TYPE##_ELEMENTS: \ 13438 13439 TYPED_ARRAYS(TYPED_ARRAY_CASE) 13440 #undef TYPED_ARRAY_CASE 13441 { 13442 int length = FixedArrayBase::cast(elements())->length(); 13443 while (counter < length) { 13444 if (storage != NULL) { 13445 storage->set(counter, Smi::FromInt(counter)); 13446 } 13447 counter++; 13448 } 13449 DCHECK(!storage || storage->length() >= counter); 13450 break; 13451 } 13452 13453 case DICTIONARY_ELEMENTS: { 13454 if (storage != NULL) { 13455 element_dictionary()->CopyKeysTo(storage, 13456 filter, 13457 SeededNumberDictionary::SORTED); 13458 } 13459 counter += element_dictionary()->NumberOfElementsFilterAttributes(filter); 13460 break; 13461 } 13462 case SLOPPY_ARGUMENTS_ELEMENTS: { 13463 FixedArray* parameter_map = FixedArray::cast(elements()); 13464 int mapped_length = parameter_map->length() - 2; 13465 FixedArray* arguments = FixedArray::cast(parameter_map->get(1)); 13466 if (arguments->IsDictionary()) { 13467 // Copy the keys from arguments first, because Dictionary::CopyKeysTo 13468 // will insert in storage starting at index 0. 13469 SeededNumberDictionary* dictionary = 13470 SeededNumberDictionary::cast(arguments); 13471 if (storage != NULL) { 13472 dictionary->CopyKeysTo( 13473 storage, filter, SeededNumberDictionary::UNSORTED); 13474 } 13475 counter += dictionary->NumberOfElementsFilterAttributes(filter); 13476 for (int i = 0; i < mapped_length; ++i) { 13477 if (!parameter_map->get(i + 2)->IsTheHole()) { 13478 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 13479 ++counter; 13480 } 13481 } 13482 if (storage != NULL) storage->SortPairs(storage, counter); 13483 13484 } else { 13485 int backing_length = arguments->length(); 13486 int i = 0; 13487 for (; i < mapped_length; ++i) { 13488 if (!parameter_map->get(i + 2)->IsTheHole()) { 13489 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 13490 ++counter; 13491 } else if (i < backing_length && !arguments->get(i)->IsTheHole()) { 13492 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 13493 ++counter; 13494 } 13495 } 13496 for (; i < backing_length; ++i) { 13497 if (storage != NULL) storage->set(counter, Smi::FromInt(i)); 13498 ++counter; 13499 } 13500 } 13501 break; 13502 } 13503 } 13504 13505 if (this->IsJSValue()) { 13506 Object* val = JSValue::cast(this)->value(); 13507 if (val->IsString()) { 13508 String* str = String::cast(val); 13509 if (storage) { 13510 for (int i = 0; i < str->length(); i++) { 13511 storage->set(counter + i, Smi::FromInt(i)); 13512 } 13513 } 13514 counter += str->length(); 13515 } 13516 } 13517 DCHECK(!storage || storage->length() == counter); 13518 return counter; 13519 } 13520 13521 13522 int JSObject::GetEnumElementKeys(FixedArray* storage) { 13523 return GetOwnElementKeys(storage, static_cast<PropertyAttributes>(DONT_ENUM)); 13524 } 13525 13526 13527 // StringSharedKeys are used as keys in the eval cache. 13528 class StringSharedKey : public HashTableKey { 13529 public: 13530 StringSharedKey(Handle<String> source, 13531 Handle<SharedFunctionInfo> shared, 13532 StrictMode strict_mode, 13533 int scope_position) 13534 : source_(source), 13535 shared_(shared), 13536 strict_mode_(strict_mode), 13537 scope_position_(scope_position) { } 13538 13539 bool IsMatch(Object* other) OVERRIDE { 13540 DisallowHeapAllocation no_allocation; 13541 if (!other->IsFixedArray()) return false; 13542 FixedArray* other_array = FixedArray::cast(other); 13543 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); 13544 if (shared != *shared_) return false; 13545 int strict_unchecked = Smi::cast(other_array->get(2))->value(); 13546 DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT); 13547 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked); 13548 if (strict_mode != strict_mode_) return false; 13549 int scope_position = Smi::cast(other_array->get(3))->value(); 13550 if (scope_position != scope_position_) return false; 13551 String* source = String::cast(other_array->get(1)); 13552 return source->Equals(*source_); 13553 } 13554 13555 static uint32_t StringSharedHashHelper(String* source, 13556 SharedFunctionInfo* shared, 13557 StrictMode strict_mode, 13558 int scope_position) { 13559 uint32_t hash = source->Hash(); 13560 if (shared->HasSourceCode()) { 13561 // Instead of using the SharedFunctionInfo pointer in the hash 13562 // code computation, we use a combination of the hash of the 13563 // script source code and the start position of the calling scope. 13564 // We do this to ensure that the cache entries can survive garbage 13565 // collection. 13566 Script* script(Script::cast(shared->script())); 13567 hash ^= String::cast(script->source())->Hash(); 13568 if (strict_mode == STRICT) hash ^= 0x8000; 13569 hash += scope_position; 13570 } 13571 return hash; 13572 } 13573 13574 uint32_t Hash() OVERRIDE { 13575 return StringSharedHashHelper(*source_, *shared_, strict_mode_, 13576 scope_position_); 13577 } 13578 13579 uint32_t HashForObject(Object* obj) OVERRIDE { 13580 DisallowHeapAllocation no_allocation; 13581 FixedArray* other_array = FixedArray::cast(obj); 13582 SharedFunctionInfo* shared = SharedFunctionInfo::cast(other_array->get(0)); 13583 String* source = String::cast(other_array->get(1)); 13584 int strict_unchecked = Smi::cast(other_array->get(2))->value(); 13585 DCHECK(strict_unchecked == SLOPPY || strict_unchecked == STRICT); 13586 StrictMode strict_mode = static_cast<StrictMode>(strict_unchecked); 13587 int scope_position = Smi::cast(other_array->get(3))->value(); 13588 return StringSharedHashHelper( 13589 source, shared, strict_mode, scope_position); 13590 } 13591 13592 13593 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { 13594 Handle<FixedArray> array = isolate->factory()->NewFixedArray(4); 13595 array->set(0, *shared_); 13596 array->set(1, *source_); 13597 array->set(2, Smi::FromInt(strict_mode_)); 13598 array->set(3, Smi::FromInt(scope_position_)); 13599 return array; 13600 } 13601 13602 private: 13603 Handle<String> source_; 13604 Handle<SharedFunctionInfo> shared_; 13605 StrictMode strict_mode_; 13606 int scope_position_; 13607 }; 13608 13609 13610 // RegExpKey carries the source and flags of a regular expression as key. 13611 class RegExpKey : public HashTableKey { 13612 public: 13613 RegExpKey(Handle<String> string, JSRegExp::Flags flags) 13614 : string_(string), 13615 flags_(Smi::FromInt(flags.value())) { } 13616 13617 // Rather than storing the key in the hash table, a pointer to the 13618 // stored value is stored where the key should be. IsMatch then 13619 // compares the search key to the found object, rather than comparing 13620 // a key to a key. 13621 bool IsMatch(Object* obj) OVERRIDE { 13622 FixedArray* val = FixedArray::cast(obj); 13623 return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex))) 13624 && (flags_ == val->get(JSRegExp::kFlagsIndex)); 13625 } 13626 13627 uint32_t Hash() OVERRIDE { return RegExpHash(*string_, flags_); } 13628 13629 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { 13630 // Plain hash maps, which is where regexp keys are used, don't 13631 // use this function. 13632 UNREACHABLE(); 13633 return MaybeHandle<Object>().ToHandleChecked(); 13634 } 13635 13636 uint32_t HashForObject(Object* obj) OVERRIDE { 13637 FixedArray* val = FixedArray::cast(obj); 13638 return RegExpHash(String::cast(val->get(JSRegExp::kSourceIndex)), 13639 Smi::cast(val->get(JSRegExp::kFlagsIndex))); 13640 } 13641 13642 static uint32_t RegExpHash(String* string, Smi* flags) { 13643 return string->Hash() + flags->value(); 13644 } 13645 13646 Handle<String> string_; 13647 Smi* flags_; 13648 }; 13649 13650 13651 Handle<Object> OneByteStringKey::AsHandle(Isolate* isolate) { 13652 if (hash_field_ == 0) Hash(); 13653 return isolate->factory()->NewOneByteInternalizedString(string_, hash_field_); 13654 } 13655 13656 13657 Handle<Object> TwoByteStringKey::AsHandle(Isolate* isolate) { 13658 if (hash_field_ == 0) Hash(); 13659 return isolate->factory()->NewTwoByteInternalizedString(string_, hash_field_); 13660 } 13661 13662 13663 Handle<Object> SeqOneByteSubStringKey::AsHandle(Isolate* isolate) { 13664 if (hash_field_ == 0) Hash(); 13665 return isolate->factory()->NewOneByteInternalizedSubString( 13666 string_, from_, length_, hash_field_); 13667 } 13668 13669 13670 bool SeqOneByteSubStringKey::IsMatch(Object* string) { 13671 Vector<const uint8_t> chars(string_->GetChars() + from_, length_); 13672 return String::cast(string)->IsOneByteEqualTo(chars); 13673 } 13674 13675 13676 // InternalizedStringKey carries a string/internalized-string object as key. 13677 class InternalizedStringKey : public HashTableKey { 13678 public: 13679 explicit InternalizedStringKey(Handle<String> string) 13680 : string_(string) { } 13681 13682 virtual bool IsMatch(Object* string) OVERRIDE { 13683 return String::cast(string)->Equals(*string_); 13684 } 13685 13686 virtual uint32_t Hash() OVERRIDE { return string_->Hash(); } 13687 13688 virtual uint32_t HashForObject(Object* other) OVERRIDE { 13689 return String::cast(other)->Hash(); 13690 } 13691 13692 virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { 13693 // Internalize the string if possible. 13694 MaybeHandle<Map> maybe_map = 13695 isolate->factory()->InternalizedStringMapForString(string_); 13696 Handle<Map> map; 13697 if (maybe_map.ToHandle(&map)) { 13698 string_->set_map_no_write_barrier(*map); 13699 DCHECK(string_->IsInternalizedString()); 13700 return string_; 13701 } 13702 // Otherwise allocate a new internalized string. 13703 return isolate->factory()->NewInternalizedStringImpl( 13704 string_, string_->length(), string_->hash_field()); 13705 } 13706 13707 static uint32_t StringHash(Object* obj) { 13708 return String::cast(obj)->Hash(); 13709 } 13710 13711 Handle<String> string_; 13712 }; 13713 13714 13715 template<typename Derived, typename Shape, typename Key> 13716 void HashTable<Derived, Shape, Key>::IteratePrefix(ObjectVisitor* v) { 13717 IteratePointers(v, 0, kElementsStartOffset); 13718 } 13719 13720 13721 template<typename Derived, typename Shape, typename Key> 13722 void HashTable<Derived, Shape, Key>::IterateElements(ObjectVisitor* v) { 13723 IteratePointers(v, 13724 kElementsStartOffset, 13725 kHeaderSize + length() * kPointerSize); 13726 } 13727 13728 13729 template<typename Derived, typename Shape, typename Key> 13730 Handle<Derived> HashTable<Derived, Shape, Key>::New( 13731 Isolate* isolate, 13732 int at_least_space_for, 13733 MinimumCapacity capacity_option, 13734 PretenureFlag pretenure) { 13735 DCHECK(0 <= at_least_space_for); 13736 DCHECK(!capacity_option || base::bits::IsPowerOfTwo32(at_least_space_for)); 13737 int capacity = (capacity_option == USE_CUSTOM_MINIMUM_CAPACITY) 13738 ? at_least_space_for 13739 : ComputeCapacity(at_least_space_for); 13740 if (capacity > HashTable::kMaxCapacity) { 13741 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true); 13742 } 13743 13744 Factory* factory = isolate->factory(); 13745 int length = EntryToIndex(capacity); 13746 Handle<FixedArray> array = factory->NewFixedArray(length, pretenure); 13747 array->set_map_no_write_barrier(*factory->hash_table_map()); 13748 Handle<Derived> table = Handle<Derived>::cast(array); 13749 13750 table->SetNumberOfElements(0); 13751 table->SetNumberOfDeletedElements(0); 13752 table->SetCapacity(capacity); 13753 return table; 13754 } 13755 13756 13757 // Find entry for key otherwise return kNotFound. 13758 int NameDictionary::FindEntry(Handle<Name> key) { 13759 if (!key->IsUniqueName()) { 13760 return DerivedHashTable::FindEntry(key); 13761 } 13762 13763 // Optimized for unique names. Knowledge of the key type allows: 13764 // 1. Move the check if the key is unique out of the loop. 13765 // 2. Avoid comparing hash codes in unique-to-unique comparison. 13766 // 3. Detect a case when a dictionary key is not unique but the key is. 13767 // In case of positive result the dictionary key may be replaced by the 13768 // internalized string with minimal performance penalty. It gives a chance 13769 // to perform further lookups in code stubs (and significant performance 13770 // boost a certain style of code). 13771 13772 // EnsureCapacity will guarantee the hash table is never full. 13773 uint32_t capacity = Capacity(); 13774 uint32_t entry = FirstProbe(key->Hash(), capacity); 13775 uint32_t count = 1; 13776 13777 while (true) { 13778 int index = EntryToIndex(entry); 13779 Object* element = get(index); 13780 if (element->IsUndefined()) break; // Empty entry. 13781 if (*key == element) return entry; 13782 if (!element->IsUniqueName() && 13783 !element->IsTheHole() && 13784 Name::cast(element)->Equals(*key)) { 13785 // Replace a key that is a non-internalized string by the equivalent 13786 // internalized string for faster further lookups. 13787 set(index, *key); 13788 return entry; 13789 } 13790 DCHECK(element->IsTheHole() || !Name::cast(element)->Equals(*key)); 13791 entry = NextProbe(entry, count++, capacity); 13792 } 13793 return kNotFound; 13794 } 13795 13796 13797 template<typename Derived, typename Shape, typename Key> 13798 void HashTable<Derived, Shape, Key>::Rehash( 13799 Handle<Derived> new_table, 13800 Key key) { 13801 DCHECK(NumberOfElements() < new_table->Capacity()); 13802 13803 DisallowHeapAllocation no_gc; 13804 WriteBarrierMode mode = new_table->GetWriteBarrierMode(no_gc); 13805 13806 // Copy prefix to new array. 13807 for (int i = kPrefixStartIndex; 13808 i < kPrefixStartIndex + Shape::kPrefixSize; 13809 i++) { 13810 new_table->set(i, get(i), mode); 13811 } 13812 13813 // Rehash the elements. 13814 int capacity = Capacity(); 13815 for (int i = 0; i < capacity; i++) { 13816 uint32_t from_index = EntryToIndex(i); 13817 Object* k = get(from_index); 13818 if (IsKey(k)) { 13819 uint32_t hash = HashTable::HashForObject(key, k); 13820 uint32_t insertion_index = 13821 EntryToIndex(new_table->FindInsertionEntry(hash)); 13822 for (int j = 0; j < Shape::kEntrySize; j++) { 13823 new_table->set(insertion_index + j, get(from_index + j), mode); 13824 } 13825 } 13826 } 13827 new_table->SetNumberOfElements(NumberOfElements()); 13828 new_table->SetNumberOfDeletedElements(0); 13829 } 13830 13831 13832 template<typename Derived, typename Shape, typename Key> 13833 uint32_t HashTable<Derived, Shape, Key>::EntryForProbe( 13834 Key key, 13835 Object* k, 13836 int probe, 13837 uint32_t expected) { 13838 uint32_t hash = HashTable::HashForObject(key, k); 13839 uint32_t capacity = Capacity(); 13840 uint32_t entry = FirstProbe(hash, capacity); 13841 for (int i = 1; i < probe; i++) { 13842 if (entry == expected) return expected; 13843 entry = NextProbe(entry, i, capacity); 13844 } 13845 return entry; 13846 } 13847 13848 13849 template<typename Derived, typename Shape, typename Key> 13850 void HashTable<Derived, Shape, Key>::Swap(uint32_t entry1, 13851 uint32_t entry2, 13852 WriteBarrierMode mode) { 13853 int index1 = EntryToIndex(entry1); 13854 int index2 = EntryToIndex(entry2); 13855 Object* temp[Shape::kEntrySize]; 13856 for (int j = 0; j < Shape::kEntrySize; j++) { 13857 temp[j] = get(index1 + j); 13858 } 13859 for (int j = 0; j < Shape::kEntrySize; j++) { 13860 set(index1 + j, get(index2 + j), mode); 13861 } 13862 for (int j = 0; j < Shape::kEntrySize; j++) { 13863 set(index2 + j, temp[j], mode); 13864 } 13865 } 13866 13867 13868 template<typename Derived, typename Shape, typename Key> 13869 void HashTable<Derived, Shape, Key>::Rehash(Key key) { 13870 DisallowHeapAllocation no_gc; 13871 WriteBarrierMode mode = GetWriteBarrierMode(no_gc); 13872 uint32_t capacity = Capacity(); 13873 bool done = false; 13874 for (int probe = 1; !done; probe++) { 13875 // All elements at entries given by one of the first _probe_ probes 13876 // are placed correctly. Other elements might need to be moved. 13877 done = true; 13878 for (uint32_t current = 0; current < capacity; current++) { 13879 Object* current_key = get(EntryToIndex(current)); 13880 if (IsKey(current_key)) { 13881 uint32_t target = EntryForProbe(key, current_key, probe, current); 13882 if (current == target) continue; 13883 Object* target_key = get(EntryToIndex(target)); 13884 if (!IsKey(target_key) || 13885 EntryForProbe(key, target_key, probe, target) != target) { 13886 // Put the current element into the correct position. 13887 Swap(current, target, mode); 13888 // The other element will be processed on the next iteration. 13889 current--; 13890 } else { 13891 // The place for the current element is occupied. Leave the element 13892 // for the next probe. 13893 done = false; 13894 } 13895 } 13896 } 13897 } 13898 } 13899 13900 13901 template<typename Derived, typename Shape, typename Key> 13902 Handle<Derived> HashTable<Derived, Shape, Key>::EnsureCapacity( 13903 Handle<Derived> table, 13904 int n, 13905 Key key, 13906 PretenureFlag pretenure) { 13907 Isolate* isolate = table->GetIsolate(); 13908 int capacity = table->Capacity(); 13909 int nof = table->NumberOfElements() + n; 13910 int nod = table->NumberOfDeletedElements(); 13911 // Return if: 13912 // 50% is still free after adding n elements and 13913 // at most 50% of the free elements are deleted elements. 13914 if (nod <= (capacity - nof) >> 1) { 13915 int needed_free = nof >> 1; 13916 if (nof + needed_free <= capacity) return table; 13917 } 13918 13919 const int kMinCapacityForPretenure = 256; 13920 bool should_pretenure = pretenure == TENURED || 13921 ((capacity > kMinCapacityForPretenure) && 13922 !isolate->heap()->InNewSpace(*table)); 13923 Handle<Derived> new_table = HashTable::New( 13924 isolate, 13925 nof * 2, 13926 USE_DEFAULT_MINIMUM_CAPACITY, 13927 should_pretenure ? TENURED : NOT_TENURED); 13928 13929 table->Rehash(new_table, key); 13930 return new_table; 13931 } 13932 13933 13934 template<typename Derived, typename Shape, typename Key> 13935 Handle<Derived> HashTable<Derived, Shape, Key>::Shrink(Handle<Derived> table, 13936 Key key) { 13937 int capacity = table->Capacity(); 13938 int nof = table->NumberOfElements(); 13939 13940 // Shrink to fit the number of elements if only a quarter of the 13941 // capacity is filled with elements. 13942 if (nof > (capacity >> 2)) return table; 13943 // Allocate a new dictionary with room for at least the current 13944 // number of elements. The allocation method will make sure that 13945 // there is extra room in the dictionary for additions. Don't go 13946 // lower than room for 16 elements. 13947 int at_least_room_for = nof; 13948 if (at_least_room_for < 16) return table; 13949 13950 Isolate* isolate = table->GetIsolate(); 13951 const int kMinCapacityForPretenure = 256; 13952 bool pretenure = 13953 (at_least_room_for > kMinCapacityForPretenure) && 13954 !isolate->heap()->InNewSpace(*table); 13955 Handle<Derived> new_table = HashTable::New( 13956 isolate, 13957 at_least_room_for, 13958 USE_DEFAULT_MINIMUM_CAPACITY, 13959 pretenure ? TENURED : NOT_TENURED); 13960 13961 table->Rehash(new_table, key); 13962 return new_table; 13963 } 13964 13965 13966 template<typename Derived, typename Shape, typename Key> 13967 uint32_t HashTable<Derived, Shape, Key>::FindInsertionEntry(uint32_t hash) { 13968 uint32_t capacity = Capacity(); 13969 uint32_t entry = FirstProbe(hash, capacity); 13970 uint32_t count = 1; 13971 // EnsureCapacity will guarantee the hash table is never full. 13972 while (true) { 13973 Object* element = KeyAt(entry); 13974 if (element->IsUndefined() || element->IsTheHole()) break; 13975 entry = NextProbe(entry, count++, capacity); 13976 } 13977 return entry; 13978 } 13979 13980 13981 // Force instantiation of template instances class. 13982 // Please note this list is compiler dependent. 13983 13984 template class HashTable<StringTable, StringTableShape, HashTableKey*>; 13985 13986 template class HashTable<CompilationCacheTable, 13987 CompilationCacheShape, 13988 HashTableKey*>; 13989 13990 template class HashTable<MapCache, MapCacheShape, HashTableKey*>; 13991 13992 template class HashTable<ObjectHashTable, 13993 ObjectHashTableShape, 13994 Handle<Object> >; 13995 13996 template class HashTable<WeakHashTable, WeakHashTableShape<2>, Handle<Object> >; 13997 13998 template class Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >; 13999 14000 template class Dictionary<SeededNumberDictionary, 14001 SeededNumberDictionaryShape, 14002 uint32_t>; 14003 14004 template class Dictionary<UnseededNumberDictionary, 14005 UnseededNumberDictionaryShape, 14006 uint32_t>; 14007 14008 template Handle<SeededNumberDictionary> 14009 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14010 New(Isolate*, int at_least_space_for, PretenureFlag pretenure); 14011 14012 template Handle<UnseededNumberDictionary> 14013 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14014 New(Isolate*, int at_least_space_for, PretenureFlag pretenure); 14015 14016 template Handle<NameDictionary> 14017 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14018 New(Isolate*, int n, PretenureFlag pretenure); 14019 14020 template Handle<SeededNumberDictionary> 14021 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14022 AtPut(Handle<SeededNumberDictionary>, uint32_t, Handle<Object>); 14023 14024 template Handle<UnseededNumberDictionary> 14025 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14026 AtPut(Handle<UnseededNumberDictionary>, uint32_t, Handle<Object>); 14027 14028 template Object* 14029 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14030 SlowReverseLookup(Object* value); 14031 14032 template Object* 14033 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14034 SlowReverseLookup(Object* value); 14035 14036 template void 14037 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14038 CopyKeysTo( 14039 FixedArray*, 14040 PropertyAttributes, 14041 Dictionary<SeededNumberDictionary, 14042 SeededNumberDictionaryShape, 14043 uint32_t>::SortMode); 14044 14045 template Handle<Object> 14046 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::DeleteProperty( 14047 Handle<NameDictionary>, int, JSObject::DeleteMode); 14048 14049 template Handle<Object> 14050 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14051 DeleteProperty(Handle<SeededNumberDictionary>, int, JSObject::DeleteMode); 14052 14053 template Handle<NameDictionary> 14054 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14055 New(Isolate*, int, MinimumCapacity, PretenureFlag); 14056 14057 template Handle<NameDictionary> 14058 HashTable<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14059 Shrink(Handle<NameDictionary>, Handle<Name>); 14060 14061 template Handle<SeededNumberDictionary> 14062 HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14063 Shrink(Handle<SeededNumberDictionary>, uint32_t); 14064 14065 template void Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14066 CopyKeysTo( 14067 FixedArray*, 14068 int, 14069 PropertyAttributes, 14070 Dictionary< 14071 NameDictionary, NameDictionaryShape, Handle<Name> >::SortMode); 14072 14073 template int 14074 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14075 NumberOfElementsFilterAttributes(PropertyAttributes); 14076 14077 template Handle<NameDictionary> 14078 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >::Add( 14079 Handle<NameDictionary>, Handle<Name>, Handle<Object>, PropertyDetails); 14080 14081 template void 14082 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14083 GenerateNewEnumerationIndices(Handle<NameDictionary>); 14084 14085 template int 14086 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14087 NumberOfElementsFilterAttributes(PropertyAttributes); 14088 14089 template Handle<SeededNumberDictionary> 14090 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14091 Add(Handle<SeededNumberDictionary>, 14092 uint32_t, 14093 Handle<Object>, 14094 PropertyDetails); 14095 14096 template Handle<UnseededNumberDictionary> 14097 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14098 Add(Handle<UnseededNumberDictionary>, 14099 uint32_t, 14100 Handle<Object>, 14101 PropertyDetails); 14102 14103 template Handle<SeededNumberDictionary> 14104 Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14105 EnsureCapacity(Handle<SeededNumberDictionary>, int, uint32_t); 14106 14107 template Handle<UnseededNumberDictionary> 14108 Dictionary<UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t>:: 14109 EnsureCapacity(Handle<UnseededNumberDictionary>, int, uint32_t); 14110 14111 template Handle<NameDictionary> 14112 Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14113 EnsureCapacity(Handle<NameDictionary>, int, Handle<Name>); 14114 14115 template 14116 int Dictionary<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14117 NumberOfEnumElements(); 14118 14119 template 14120 int Dictionary<NameDictionary, NameDictionaryShape, Handle<Name> >:: 14121 NumberOfEnumElements(); 14122 14123 template 14124 int HashTable<SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t>:: 14125 FindEntry(uint32_t); 14126 14127 14128 Handle<Object> JSObject::PrepareSlowElementsForSort( 14129 Handle<JSObject> object, uint32_t limit) { 14130 DCHECK(object->HasDictionaryElements()); 14131 Isolate* isolate = object->GetIsolate(); 14132 // Must stay in dictionary mode, either because of requires_slow_elements, 14133 // or because we are not going to sort (and therefore compact) all of the 14134 // elements. 14135 Handle<SeededNumberDictionary> dict(object->element_dictionary(), isolate); 14136 Handle<SeededNumberDictionary> new_dict = 14137 SeededNumberDictionary::New(isolate, dict->NumberOfElements()); 14138 14139 uint32_t pos = 0; 14140 uint32_t undefs = 0; 14141 int capacity = dict->Capacity(); 14142 Handle<Smi> bailout(Smi::FromInt(-1), isolate); 14143 // Entry to the new dictionary does not cause it to grow, as we have 14144 // allocated one that is large enough for all entries. 14145 DisallowHeapAllocation no_gc; 14146 for (int i = 0; i < capacity; i++) { 14147 Object* k = dict->KeyAt(i); 14148 if (!dict->IsKey(k)) continue; 14149 14150 DCHECK(k->IsNumber()); 14151 DCHECK(!k->IsSmi() || Smi::cast(k)->value() >= 0); 14152 DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() >= 0); 14153 DCHECK(!k->IsHeapNumber() || HeapNumber::cast(k)->value() <= kMaxUInt32); 14154 14155 HandleScope scope(isolate); 14156 Handle<Object> value(dict->ValueAt(i), isolate); 14157 PropertyDetails details = dict->DetailsAt(i); 14158 if (details.type() == CALLBACKS || details.IsReadOnly()) { 14159 // Bail out and do the sorting of undefineds and array holes in JS. 14160 // Also bail out if the element is not supposed to be moved. 14161 return bailout; 14162 } 14163 14164 uint32_t key = NumberToUint32(k); 14165 if (key < limit) { 14166 if (value->IsUndefined()) { 14167 undefs++; 14168 } else if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { 14169 // Adding an entry with the key beyond smi-range requires 14170 // allocation. Bailout. 14171 return bailout; 14172 } else { 14173 Handle<Object> result = SeededNumberDictionary::AddNumberEntry( 14174 new_dict, pos, value, details); 14175 DCHECK(result.is_identical_to(new_dict)); 14176 USE(result); 14177 pos++; 14178 } 14179 } else if (key > static_cast<uint32_t>(Smi::kMaxValue)) { 14180 // Adding an entry with the key beyond smi-range requires 14181 // allocation. Bailout. 14182 return bailout; 14183 } else { 14184 Handle<Object> result = SeededNumberDictionary::AddNumberEntry( 14185 new_dict, key, value, details); 14186 DCHECK(result.is_identical_to(new_dict)); 14187 USE(result); 14188 } 14189 } 14190 14191 uint32_t result = pos; 14192 PropertyDetails no_details = PropertyDetails(NONE, NORMAL, 0); 14193 while (undefs > 0) { 14194 if (pos > static_cast<uint32_t>(Smi::kMaxValue)) { 14195 // Adding an entry with the key beyond smi-range requires 14196 // allocation. Bailout. 14197 return bailout; 14198 } 14199 HandleScope scope(isolate); 14200 Handle<Object> result = SeededNumberDictionary::AddNumberEntry( 14201 new_dict, pos, isolate->factory()->undefined_value(), no_details); 14202 DCHECK(result.is_identical_to(new_dict)); 14203 USE(result); 14204 pos++; 14205 undefs--; 14206 } 14207 14208 object->set_elements(*new_dict); 14209 14210 AllowHeapAllocation allocate_return_value; 14211 return isolate->factory()->NewNumberFromUint(result); 14212 } 14213 14214 14215 // Collects all defined (non-hole) and non-undefined (array) elements at 14216 // the start of the elements array. 14217 // If the object is in dictionary mode, it is converted to fast elements 14218 // mode. 14219 Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object, 14220 uint32_t limit) { 14221 Isolate* isolate = object->GetIsolate(); 14222 if (object->HasSloppyArgumentsElements() || 14223 object->map()->is_observed()) { 14224 return handle(Smi::FromInt(-1), isolate); 14225 } 14226 14227 if (object->HasDictionaryElements()) { 14228 // Convert to fast elements containing only the existing properties. 14229 // Ordering is irrelevant, since we are going to sort anyway. 14230 Handle<SeededNumberDictionary> dict(object->element_dictionary()); 14231 if (object->IsJSArray() || dict->requires_slow_elements() || 14232 dict->max_number_key() >= limit) { 14233 return JSObject::PrepareSlowElementsForSort(object, limit); 14234 } 14235 // Convert to fast elements. 14236 14237 Handle<Map> new_map = 14238 JSObject::GetElementsTransitionMap(object, FAST_HOLEY_ELEMENTS); 14239 14240 PretenureFlag tenure = isolate->heap()->InNewSpace(*object) ? 14241 NOT_TENURED: TENURED; 14242 Handle<FixedArray> fast_elements = 14243 isolate->factory()->NewFixedArray(dict->NumberOfElements(), tenure); 14244 dict->CopyValuesTo(*fast_elements); 14245 JSObject::ValidateElements(object); 14246 14247 JSObject::SetMapAndElements(object, new_map, fast_elements); 14248 } else if (object->HasExternalArrayElements() || 14249 object->HasFixedTypedArrayElements()) { 14250 // Typed arrays cannot have holes or undefined elements. 14251 return handle(Smi::FromInt( 14252 FixedArrayBase::cast(object->elements())->length()), isolate); 14253 } else if (!object->HasFastDoubleElements()) { 14254 EnsureWritableFastElements(object); 14255 } 14256 DCHECK(object->HasFastSmiOrObjectElements() || 14257 object->HasFastDoubleElements()); 14258 14259 // Collect holes at the end, undefined before that and the rest at the 14260 // start, and return the number of non-hole, non-undefined values. 14261 14262 Handle<FixedArrayBase> elements_base(object->elements()); 14263 uint32_t elements_length = static_cast<uint32_t>(elements_base->length()); 14264 if (limit > elements_length) { 14265 limit = elements_length ; 14266 } 14267 if (limit == 0) { 14268 return handle(Smi::FromInt(0), isolate); 14269 } 14270 14271 uint32_t result = 0; 14272 if (elements_base->map() == isolate->heap()->fixed_double_array_map()) { 14273 FixedDoubleArray* elements = FixedDoubleArray::cast(*elements_base); 14274 // Split elements into defined and the_hole, in that order. 14275 unsigned int holes = limit; 14276 // Assume most arrays contain no holes and undefined values, so minimize the 14277 // number of stores of non-undefined, non-the-hole values. 14278 for (unsigned int i = 0; i < holes; i++) { 14279 if (elements->is_the_hole(i)) { 14280 holes--; 14281 } else { 14282 continue; 14283 } 14284 // Position i needs to be filled. 14285 while (holes > i) { 14286 if (elements->is_the_hole(holes)) { 14287 holes--; 14288 } else { 14289 elements->set(i, elements->get_scalar(holes)); 14290 break; 14291 } 14292 } 14293 } 14294 result = holes; 14295 while (holes < limit) { 14296 elements->set_the_hole(holes); 14297 holes++; 14298 } 14299 } else { 14300 FixedArray* elements = FixedArray::cast(*elements_base); 14301 DisallowHeapAllocation no_gc; 14302 14303 // Split elements into defined, undefined and the_hole, in that order. Only 14304 // count locations for undefined and the hole, and fill them afterwards. 14305 WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_gc); 14306 unsigned int undefs = limit; 14307 unsigned int holes = limit; 14308 // Assume most arrays contain no holes and undefined values, so minimize the 14309 // number of stores of non-undefined, non-the-hole values. 14310 for (unsigned int i = 0; i < undefs; i++) { 14311 Object* current = elements->get(i); 14312 if (current->IsTheHole()) { 14313 holes--; 14314 undefs--; 14315 } else if (current->IsUndefined()) { 14316 undefs--; 14317 } else { 14318 continue; 14319 } 14320 // Position i needs to be filled. 14321 while (undefs > i) { 14322 current = elements->get(undefs); 14323 if (current->IsTheHole()) { 14324 holes--; 14325 undefs--; 14326 } else if (current->IsUndefined()) { 14327 undefs--; 14328 } else { 14329 elements->set(i, current, write_barrier); 14330 break; 14331 } 14332 } 14333 } 14334 result = undefs; 14335 while (undefs < holes) { 14336 elements->set_undefined(undefs); 14337 undefs++; 14338 } 14339 while (holes < limit) { 14340 elements->set_the_hole(holes); 14341 holes++; 14342 } 14343 } 14344 14345 return isolate->factory()->NewNumberFromUint(result); 14346 } 14347 14348 14349 ExternalArrayType JSTypedArray::type() { 14350 switch (elements()->map()->instance_type()) { 14351 #define INSTANCE_TYPE_TO_ARRAY_TYPE(Type, type, TYPE, ctype, size) \ 14352 case EXTERNAL_##TYPE##_ARRAY_TYPE: \ 14353 case FIXED_##TYPE##_ARRAY_TYPE: \ 14354 return kExternal##Type##Array; 14355 14356 TYPED_ARRAYS(INSTANCE_TYPE_TO_ARRAY_TYPE) 14357 #undef INSTANCE_TYPE_TO_ARRAY_TYPE 14358 14359 default: 14360 UNREACHABLE(); 14361 return static_cast<ExternalArrayType>(-1); 14362 } 14363 } 14364 14365 14366 size_t JSTypedArray::element_size() { 14367 switch (elements()->map()->instance_type()) { 14368 #define INSTANCE_TYPE_TO_ELEMENT_SIZE(Type, type, TYPE, ctype, size) \ 14369 case EXTERNAL_##TYPE##_ARRAY_TYPE: \ 14370 return size; 14371 14372 TYPED_ARRAYS(INSTANCE_TYPE_TO_ELEMENT_SIZE) 14373 #undef INSTANCE_TYPE_TO_ELEMENT_SIZE 14374 14375 default: 14376 UNREACHABLE(); 14377 return 0; 14378 } 14379 } 14380 14381 14382 Handle<Object> ExternalUint8ClampedArray::SetValue( 14383 Handle<ExternalUint8ClampedArray> array, 14384 uint32_t index, 14385 Handle<Object> value) { 14386 uint8_t clamped_value = 0; 14387 if (index < static_cast<uint32_t>(array->length())) { 14388 if (value->IsSmi()) { 14389 int int_value = Handle<Smi>::cast(value)->value(); 14390 if (int_value < 0) { 14391 clamped_value = 0; 14392 } else if (int_value > 255) { 14393 clamped_value = 255; 14394 } else { 14395 clamped_value = static_cast<uint8_t>(int_value); 14396 } 14397 } else if (value->IsHeapNumber()) { 14398 double double_value = Handle<HeapNumber>::cast(value)->value(); 14399 if (!(double_value > 0)) { 14400 // NaN and less than zero clamp to zero. 14401 clamped_value = 0; 14402 } else if (double_value > 255) { 14403 // Greater than 255 clamp to 255. 14404 clamped_value = 255; 14405 } else { 14406 // Other doubles are rounded to the nearest integer. 14407 clamped_value = static_cast<uint8_t>(lrint(double_value)); 14408 } 14409 } else { 14410 // Clamp undefined to zero (default). All other types have been 14411 // converted to a number type further up in the call chain. 14412 DCHECK(value->IsUndefined()); 14413 } 14414 array->set(index, clamped_value); 14415 } 14416 return handle(Smi::FromInt(clamped_value), array->GetIsolate()); 14417 } 14418 14419 14420 template<typename ExternalArrayClass, typename ValueType> 14421 static Handle<Object> ExternalArrayIntSetter( 14422 Isolate* isolate, 14423 Handle<ExternalArrayClass> receiver, 14424 uint32_t index, 14425 Handle<Object> value) { 14426 ValueType cast_value = 0; 14427 if (index < static_cast<uint32_t>(receiver->length())) { 14428 if (value->IsSmi()) { 14429 int int_value = Handle<Smi>::cast(value)->value(); 14430 cast_value = static_cast<ValueType>(int_value); 14431 } else if (value->IsHeapNumber()) { 14432 double double_value = Handle<HeapNumber>::cast(value)->value(); 14433 cast_value = static_cast<ValueType>(DoubleToInt32(double_value)); 14434 } else { 14435 // Clamp undefined to zero (default). All other types have been 14436 // converted to a number type further up in the call chain. 14437 DCHECK(value->IsUndefined()); 14438 } 14439 receiver->set(index, cast_value); 14440 } 14441 return isolate->factory()->NewNumberFromInt(cast_value); 14442 } 14443 14444 14445 Handle<Object> ExternalInt8Array::SetValue(Handle<ExternalInt8Array> array, 14446 uint32_t index, 14447 Handle<Object> value) { 14448 return ExternalArrayIntSetter<ExternalInt8Array, int8_t>( 14449 array->GetIsolate(), array, index, value); 14450 } 14451 14452 14453 Handle<Object> ExternalUint8Array::SetValue(Handle<ExternalUint8Array> array, 14454 uint32_t index, 14455 Handle<Object> value) { 14456 return ExternalArrayIntSetter<ExternalUint8Array, uint8_t>( 14457 array->GetIsolate(), array, index, value); 14458 } 14459 14460 14461 Handle<Object> ExternalInt16Array::SetValue(Handle<ExternalInt16Array> array, 14462 uint32_t index, 14463 Handle<Object> value) { 14464 return ExternalArrayIntSetter<ExternalInt16Array, int16_t>( 14465 array->GetIsolate(), array, index, value); 14466 } 14467 14468 14469 Handle<Object> ExternalUint16Array::SetValue(Handle<ExternalUint16Array> array, 14470 uint32_t index, 14471 Handle<Object> value) { 14472 return ExternalArrayIntSetter<ExternalUint16Array, uint16_t>( 14473 array->GetIsolate(), array, index, value); 14474 } 14475 14476 14477 Handle<Object> ExternalInt32Array::SetValue(Handle<ExternalInt32Array> array, 14478 uint32_t index, 14479 Handle<Object> value) { 14480 return ExternalArrayIntSetter<ExternalInt32Array, int32_t>( 14481 array->GetIsolate(), array, index, value); 14482 } 14483 14484 14485 Handle<Object> ExternalUint32Array::SetValue( 14486 Handle<ExternalUint32Array> array, 14487 uint32_t index, 14488 Handle<Object> value) { 14489 uint32_t cast_value = 0; 14490 if (index < static_cast<uint32_t>(array->length())) { 14491 if (value->IsSmi()) { 14492 int int_value = Handle<Smi>::cast(value)->value(); 14493 cast_value = static_cast<uint32_t>(int_value); 14494 } else if (value->IsHeapNumber()) { 14495 double double_value = Handle<HeapNumber>::cast(value)->value(); 14496 cast_value = static_cast<uint32_t>(DoubleToUint32(double_value)); 14497 } else { 14498 // Clamp undefined to zero (default). All other types have been 14499 // converted to a number type further up in the call chain. 14500 DCHECK(value->IsUndefined()); 14501 } 14502 array->set(index, cast_value); 14503 } 14504 return array->GetIsolate()->factory()->NewNumberFromUint(cast_value); 14505 } 14506 14507 14508 Handle<Object> ExternalFloat32Array::SetValue( 14509 Handle<ExternalFloat32Array> array, 14510 uint32_t index, 14511 Handle<Object> value) { 14512 float cast_value = static_cast<float>(base::OS::nan_value()); 14513 if (index < static_cast<uint32_t>(array->length())) { 14514 if (value->IsSmi()) { 14515 int int_value = Handle<Smi>::cast(value)->value(); 14516 cast_value = static_cast<float>(int_value); 14517 } else if (value->IsHeapNumber()) { 14518 double double_value = Handle<HeapNumber>::cast(value)->value(); 14519 cast_value = static_cast<float>(double_value); 14520 } else { 14521 // Clamp undefined to NaN (default). All other types have been 14522 // converted to a number type further up in the call chain. 14523 DCHECK(value->IsUndefined()); 14524 } 14525 array->set(index, cast_value); 14526 } 14527 return array->GetIsolate()->factory()->NewNumber(cast_value); 14528 } 14529 14530 14531 Handle<Object> ExternalFloat64Array::SetValue( 14532 Handle<ExternalFloat64Array> array, 14533 uint32_t index, 14534 Handle<Object> value) { 14535 double double_value = base::OS::nan_value(); 14536 if (index < static_cast<uint32_t>(array->length())) { 14537 if (value->IsNumber()) { 14538 double_value = value->Number(); 14539 } else { 14540 // Clamp undefined to NaN (default). All other types have been 14541 // converted to a number type further up in the call chain. 14542 DCHECK(value->IsUndefined()); 14543 } 14544 array->set(index, double_value); 14545 } 14546 return array->GetIsolate()->factory()->NewNumber(double_value); 14547 } 14548 14549 14550 Handle<PropertyCell> JSGlobalObject::EnsurePropertyCell( 14551 Handle<JSGlobalObject> global, 14552 Handle<Name> name) { 14553 DCHECK(!global->HasFastProperties()); 14554 int entry = global->property_dictionary()->FindEntry(name); 14555 if (entry == NameDictionary::kNotFound) { 14556 Isolate* isolate = global->GetIsolate(); 14557 Handle<PropertyCell> cell = isolate->factory()->NewPropertyCell( 14558 isolate->factory()->the_hole_value()); 14559 PropertyDetails details(NONE, NORMAL, 0); 14560 details = details.AsDeleted(); 14561 Handle<NameDictionary> dictionary = NameDictionary::Add( 14562 handle(global->property_dictionary()), name, cell, details); 14563 global->set_properties(*dictionary); 14564 return cell; 14565 } else { 14566 Object* value = global->property_dictionary()->ValueAt(entry); 14567 DCHECK(value->IsPropertyCell()); 14568 return handle(PropertyCell::cast(value)); 14569 } 14570 } 14571 14572 14573 // This class is used for looking up two character strings in the string table. 14574 // If we don't have a hit we don't want to waste much time so we unroll the 14575 // string hash calculation loop here for speed. Doesn't work if the two 14576 // characters form a decimal integer, since such strings have a different hash 14577 // algorithm. 14578 class TwoCharHashTableKey : public HashTableKey { 14579 public: 14580 TwoCharHashTableKey(uint16_t c1, uint16_t c2, uint32_t seed) 14581 : c1_(c1), c2_(c2) { 14582 // Char 1. 14583 uint32_t hash = seed; 14584 hash += c1; 14585 hash += hash << 10; 14586 hash ^= hash >> 6; 14587 // Char 2. 14588 hash += c2; 14589 hash += hash << 10; 14590 hash ^= hash >> 6; 14591 // GetHash. 14592 hash += hash << 3; 14593 hash ^= hash >> 11; 14594 hash += hash << 15; 14595 if ((hash & String::kHashBitMask) == 0) hash = StringHasher::kZeroHash; 14596 hash_ = hash; 14597 #ifdef DEBUG 14598 // If this assert fails then we failed to reproduce the two-character 14599 // version of the string hashing algorithm above. One reason could be 14600 // that we were passed two digits as characters, since the hash 14601 // algorithm is different in that case. 14602 uint16_t chars[2] = {c1, c2}; 14603 uint32_t check_hash = StringHasher::HashSequentialString(chars, 2, seed); 14604 hash = (hash << String::kHashShift) | String::kIsNotArrayIndexMask; 14605 DCHECK_EQ(static_cast<int32_t>(hash), static_cast<int32_t>(check_hash)); 14606 #endif 14607 } 14608 14609 bool IsMatch(Object* o) OVERRIDE { 14610 if (!o->IsString()) return false; 14611 String* other = String::cast(o); 14612 if (other->length() != 2) return false; 14613 if (other->Get(0) != c1_) return false; 14614 return other->Get(1) == c2_; 14615 } 14616 14617 uint32_t Hash() OVERRIDE { return hash_; } 14618 uint32_t HashForObject(Object* key) OVERRIDE { 14619 if (!key->IsString()) return 0; 14620 return String::cast(key)->Hash(); 14621 } 14622 14623 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { 14624 // The TwoCharHashTableKey is only used for looking in the string 14625 // table, not for adding to it. 14626 UNREACHABLE(); 14627 return MaybeHandle<Object>().ToHandleChecked(); 14628 } 14629 14630 private: 14631 uint16_t c1_; 14632 uint16_t c2_; 14633 uint32_t hash_; 14634 }; 14635 14636 14637 MaybeHandle<String> StringTable::InternalizeStringIfExists( 14638 Isolate* isolate, 14639 Handle<String> string) { 14640 if (string->IsInternalizedString()) { 14641 return string; 14642 } 14643 return LookupStringIfExists(isolate, string); 14644 } 14645 14646 14647 MaybeHandle<String> StringTable::LookupStringIfExists( 14648 Isolate* isolate, 14649 Handle<String> string) { 14650 Handle<StringTable> string_table = isolate->factory()->string_table(); 14651 InternalizedStringKey key(string); 14652 int entry = string_table->FindEntry(&key); 14653 if (entry == kNotFound) { 14654 return MaybeHandle<String>(); 14655 } else { 14656 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate); 14657 DCHECK(StringShape(*result).IsInternalized()); 14658 return result; 14659 } 14660 } 14661 14662 14663 MaybeHandle<String> StringTable::LookupTwoCharsStringIfExists( 14664 Isolate* isolate, 14665 uint16_t c1, 14666 uint16_t c2) { 14667 Handle<StringTable> string_table = isolate->factory()->string_table(); 14668 TwoCharHashTableKey key(c1, c2, isolate->heap()->HashSeed()); 14669 int entry = string_table->FindEntry(&key); 14670 if (entry == kNotFound) { 14671 return MaybeHandle<String>(); 14672 } else { 14673 Handle<String> result(String::cast(string_table->KeyAt(entry)), isolate); 14674 DCHECK(StringShape(*result).IsInternalized()); 14675 return result; 14676 } 14677 } 14678 14679 14680 Handle<String> StringTable::LookupString(Isolate* isolate, 14681 Handle<String> string) { 14682 InternalizedStringKey key(string); 14683 return LookupKey(isolate, &key); 14684 } 14685 14686 14687 Handle<String> StringTable::LookupKey(Isolate* isolate, HashTableKey* key) { 14688 Handle<StringTable> table = isolate->factory()->string_table(); 14689 int entry = table->FindEntry(key); 14690 14691 // String already in table. 14692 if (entry != kNotFound) { 14693 return handle(String::cast(table->KeyAt(entry)), isolate); 14694 } 14695 14696 // Adding new string. Grow table if needed. 14697 table = StringTable::EnsureCapacity(table, 1, key); 14698 14699 // Create string object. 14700 Handle<Object> string = key->AsHandle(isolate); 14701 // There must be no attempts to internalize strings that could throw 14702 // InvalidStringLength error. 14703 CHECK(!string.is_null()); 14704 14705 // Add the new string and return it along with the string table. 14706 entry = table->FindInsertionEntry(key->Hash()); 14707 table->set(EntryToIndex(entry), *string); 14708 table->ElementAdded(); 14709 14710 isolate->factory()->set_string_table(table); 14711 return Handle<String>::cast(string); 14712 } 14713 14714 14715 Handle<Object> CompilationCacheTable::Lookup(Handle<String> src, 14716 Handle<Context> context) { 14717 Isolate* isolate = GetIsolate(); 14718 Handle<SharedFunctionInfo> shared(context->closure()->shared()); 14719 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY, 14720 RelocInfo::kNoPosition); 14721 int entry = FindEntry(&key); 14722 if (entry == kNotFound) return isolate->factory()->undefined_value(); 14723 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); 14724 } 14725 14726 14727 Handle<Object> CompilationCacheTable::LookupEval( 14728 Handle<String> src, Handle<SharedFunctionInfo> outer_info, 14729 StrictMode strict_mode, int scope_position) { 14730 Isolate* isolate = GetIsolate(); 14731 // Cache key is the tuple (source, outer shared function info, scope position) 14732 // to unambiguously identify the context chain the cached eval code assumes. 14733 StringSharedKey key(src, outer_info, strict_mode, scope_position); 14734 int entry = FindEntry(&key); 14735 if (entry == kNotFound) return isolate->factory()->undefined_value(); 14736 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); 14737 } 14738 14739 14740 Handle<Object> CompilationCacheTable::LookupRegExp(Handle<String> src, 14741 JSRegExp::Flags flags) { 14742 Isolate* isolate = GetIsolate(); 14743 DisallowHeapAllocation no_allocation; 14744 RegExpKey key(src, flags); 14745 int entry = FindEntry(&key); 14746 if (entry == kNotFound) return isolate->factory()->undefined_value(); 14747 return Handle<Object>(get(EntryToIndex(entry) + 1), isolate); 14748 } 14749 14750 14751 Handle<CompilationCacheTable> CompilationCacheTable::Put( 14752 Handle<CompilationCacheTable> cache, Handle<String> src, 14753 Handle<Context> context, Handle<Object> value) { 14754 Isolate* isolate = cache->GetIsolate(); 14755 Handle<SharedFunctionInfo> shared(context->closure()->shared()); 14756 StringSharedKey key(src, shared, FLAG_use_strict ? STRICT : SLOPPY, 14757 RelocInfo::kNoPosition); 14758 cache = EnsureCapacity(cache, 1, &key); 14759 Handle<Object> k = key.AsHandle(isolate); 14760 int entry = cache->FindInsertionEntry(key.Hash()); 14761 cache->set(EntryToIndex(entry), *k); 14762 cache->set(EntryToIndex(entry) + 1, *value); 14763 cache->ElementAdded(); 14764 return cache; 14765 } 14766 14767 14768 Handle<CompilationCacheTable> CompilationCacheTable::PutEval( 14769 Handle<CompilationCacheTable> cache, Handle<String> src, 14770 Handle<SharedFunctionInfo> outer_info, Handle<SharedFunctionInfo> value, 14771 int scope_position) { 14772 Isolate* isolate = cache->GetIsolate(); 14773 StringSharedKey key(src, outer_info, value->strict_mode(), scope_position); 14774 cache = EnsureCapacity(cache, 1, &key); 14775 Handle<Object> k = key.AsHandle(isolate); 14776 int entry = cache->FindInsertionEntry(key.Hash()); 14777 cache->set(EntryToIndex(entry), *k); 14778 cache->set(EntryToIndex(entry) + 1, *value); 14779 cache->ElementAdded(); 14780 return cache; 14781 } 14782 14783 14784 Handle<CompilationCacheTable> CompilationCacheTable::PutRegExp( 14785 Handle<CompilationCacheTable> cache, Handle<String> src, 14786 JSRegExp::Flags flags, Handle<FixedArray> value) { 14787 RegExpKey key(src, flags); 14788 cache = EnsureCapacity(cache, 1, &key); 14789 int entry = cache->FindInsertionEntry(key.Hash()); 14790 // We store the value in the key slot, and compare the search key 14791 // to the stored value with a custon IsMatch function during lookups. 14792 cache->set(EntryToIndex(entry), *value); 14793 cache->set(EntryToIndex(entry) + 1, *value); 14794 cache->ElementAdded(); 14795 return cache; 14796 } 14797 14798 14799 void CompilationCacheTable::Remove(Object* value) { 14800 DisallowHeapAllocation no_allocation; 14801 Object* the_hole_value = GetHeap()->the_hole_value(); 14802 for (int entry = 0, size = Capacity(); entry < size; entry++) { 14803 int entry_index = EntryToIndex(entry); 14804 int value_index = entry_index + 1; 14805 if (get(value_index) == value) { 14806 NoWriteBarrierSet(this, entry_index, the_hole_value); 14807 NoWriteBarrierSet(this, value_index, the_hole_value); 14808 ElementRemoved(); 14809 } 14810 } 14811 return; 14812 } 14813 14814 14815 // StringsKey used for HashTable where key is array of internalized strings. 14816 class StringsKey : public HashTableKey { 14817 public: 14818 explicit StringsKey(Handle<FixedArray> strings) : strings_(strings) { } 14819 14820 bool IsMatch(Object* strings) OVERRIDE { 14821 FixedArray* o = FixedArray::cast(strings); 14822 int len = strings_->length(); 14823 if (o->length() != len) return false; 14824 for (int i = 0; i < len; i++) { 14825 if (o->get(i) != strings_->get(i)) return false; 14826 } 14827 return true; 14828 } 14829 14830 uint32_t Hash() OVERRIDE { return HashForObject(*strings_); } 14831 14832 uint32_t HashForObject(Object* obj) OVERRIDE { 14833 FixedArray* strings = FixedArray::cast(obj); 14834 int len = strings->length(); 14835 uint32_t hash = 0; 14836 for (int i = 0; i < len; i++) { 14837 hash ^= String::cast(strings->get(i))->Hash(); 14838 } 14839 return hash; 14840 } 14841 14842 Handle<Object> AsHandle(Isolate* isolate) OVERRIDE { return strings_; } 14843 14844 private: 14845 Handle<FixedArray> strings_; 14846 }; 14847 14848 14849 Object* MapCache::Lookup(FixedArray* array) { 14850 DisallowHeapAllocation no_alloc; 14851 StringsKey key(handle(array)); 14852 int entry = FindEntry(&key); 14853 if (entry == kNotFound) return GetHeap()->undefined_value(); 14854 return get(EntryToIndex(entry) + 1); 14855 } 14856 14857 14858 Handle<MapCache> MapCache::Put( 14859 Handle<MapCache> map_cache, Handle<FixedArray> array, Handle<Map> value) { 14860 StringsKey key(array); 14861 14862 Handle<MapCache> new_cache = EnsureCapacity(map_cache, 1, &key); 14863 int entry = new_cache->FindInsertionEntry(key.Hash()); 14864 new_cache->set(EntryToIndex(entry), *array); 14865 new_cache->set(EntryToIndex(entry) + 1, *value); 14866 new_cache->ElementAdded(); 14867 return new_cache; 14868 } 14869 14870 14871 template<typename Derived, typename Shape, typename Key> 14872 Handle<Derived> Dictionary<Derived, Shape, Key>::New( 14873 Isolate* isolate, 14874 int at_least_space_for, 14875 PretenureFlag pretenure) { 14876 DCHECK(0 <= at_least_space_for); 14877 Handle<Derived> dict = DerivedHashTable::New(isolate, 14878 at_least_space_for, 14879 USE_DEFAULT_MINIMUM_CAPACITY, 14880 pretenure); 14881 14882 // Initialize the next enumeration index. 14883 dict->SetNextEnumerationIndex(PropertyDetails::kInitialIndex); 14884 return dict; 14885 } 14886 14887 14888 template<typename Derived, typename Shape, typename Key> 14889 void Dictionary<Derived, Shape, Key>::GenerateNewEnumerationIndices( 14890 Handle<Derived> dictionary) { 14891 Factory* factory = dictionary->GetIsolate()->factory(); 14892 int length = dictionary->NumberOfElements(); 14893 14894 // Allocate and initialize iteration order array. 14895 Handle<FixedArray> iteration_order = factory->NewFixedArray(length); 14896 for (int i = 0; i < length; i++) { 14897 iteration_order->set(i, Smi::FromInt(i)); 14898 } 14899 14900 // Allocate array with enumeration order. 14901 Handle<FixedArray> enumeration_order = factory->NewFixedArray(length); 14902 14903 // Fill the enumeration order array with property details. 14904 int capacity = dictionary->Capacity(); 14905 int pos = 0; 14906 for (int i = 0; i < capacity; i++) { 14907 if (dictionary->IsKey(dictionary->KeyAt(i))) { 14908 int index = dictionary->DetailsAt(i).dictionary_index(); 14909 enumeration_order->set(pos++, Smi::FromInt(index)); 14910 } 14911 } 14912 14913 // Sort the arrays wrt. enumeration order. 14914 iteration_order->SortPairs(*enumeration_order, enumeration_order->length()); 14915 14916 // Overwrite the enumeration_order with the enumeration indices. 14917 for (int i = 0; i < length; i++) { 14918 int index = Smi::cast(iteration_order->get(i))->value(); 14919 int enum_index = PropertyDetails::kInitialIndex + i; 14920 enumeration_order->set(index, Smi::FromInt(enum_index)); 14921 } 14922 14923 // Update the dictionary with new indices. 14924 capacity = dictionary->Capacity(); 14925 pos = 0; 14926 for (int i = 0; i < capacity; i++) { 14927 if (dictionary->IsKey(dictionary->KeyAt(i))) { 14928 int enum_index = Smi::cast(enumeration_order->get(pos++))->value(); 14929 PropertyDetails details = dictionary->DetailsAt(i); 14930 PropertyDetails new_details = PropertyDetails( 14931 details.attributes(), details.type(), enum_index); 14932 dictionary->DetailsAtPut(i, new_details); 14933 } 14934 } 14935 14936 // Set the next enumeration index. 14937 dictionary->SetNextEnumerationIndex(PropertyDetails::kInitialIndex+length); 14938 } 14939 14940 14941 template<typename Derived, typename Shape, typename Key> 14942 Handle<Derived> Dictionary<Derived, Shape, Key>::EnsureCapacity( 14943 Handle<Derived> dictionary, int n, Key key) { 14944 // Check whether there are enough enumeration indices to add n elements. 14945 if (Shape::kIsEnumerable && 14946 !PropertyDetails::IsValidIndex(dictionary->NextEnumerationIndex() + n)) { 14947 // If not, we generate new indices for the properties. 14948 GenerateNewEnumerationIndices(dictionary); 14949 } 14950 return DerivedHashTable::EnsureCapacity(dictionary, n, key); 14951 } 14952 14953 14954 template<typename Derived, typename Shape, typename Key> 14955 Handle<Object> Dictionary<Derived, Shape, Key>::DeleteProperty( 14956 Handle<Derived> dictionary, 14957 int entry, 14958 JSObject::DeleteMode mode) { 14959 Factory* factory = dictionary->GetIsolate()->factory(); 14960 PropertyDetails details = dictionary->DetailsAt(entry); 14961 // Ignore attributes if forcing a deletion. 14962 if (!details.IsConfigurable() && mode != JSReceiver::FORCE_DELETION) { 14963 return factory->false_value(); 14964 } 14965 14966 dictionary->SetEntry( 14967 entry, factory->the_hole_value(), factory->the_hole_value()); 14968 dictionary->ElementRemoved(); 14969 return factory->true_value(); 14970 } 14971 14972 14973 template<typename Derived, typename Shape, typename Key> 14974 Handle<Derived> Dictionary<Derived, Shape, Key>::AtPut( 14975 Handle<Derived> dictionary, Key key, Handle<Object> value) { 14976 int entry = dictionary->FindEntry(key); 14977 14978 // If the entry is present set the value; 14979 if (entry != Dictionary::kNotFound) { 14980 dictionary->ValueAtPut(entry, *value); 14981 return dictionary; 14982 } 14983 14984 // Check whether the dictionary should be extended. 14985 dictionary = EnsureCapacity(dictionary, 1, key); 14986 #ifdef DEBUG 14987 USE(Shape::AsHandle(dictionary->GetIsolate(), key)); 14988 #endif 14989 PropertyDetails details = PropertyDetails(NONE, NORMAL, 0); 14990 14991 AddEntry(dictionary, key, value, details, dictionary->Hash(key)); 14992 return dictionary; 14993 } 14994 14995 14996 template<typename Derived, typename Shape, typename Key> 14997 Handle<Derived> Dictionary<Derived, Shape, Key>::Add( 14998 Handle<Derived> dictionary, 14999 Key key, 15000 Handle<Object> value, 15001 PropertyDetails details) { 15002 // Valdate key is absent. 15003 SLOW_DCHECK((dictionary->FindEntry(key) == Dictionary::kNotFound)); 15004 // Check whether the dictionary should be extended. 15005 dictionary = EnsureCapacity(dictionary, 1, key); 15006 15007 AddEntry(dictionary, key, value, details, dictionary->Hash(key)); 15008 return dictionary; 15009 } 15010 15011 15012 // Add a key, value pair to the dictionary. 15013 template<typename Derived, typename Shape, typename Key> 15014 void Dictionary<Derived, Shape, Key>::AddEntry( 15015 Handle<Derived> dictionary, 15016 Key key, 15017 Handle<Object> value, 15018 PropertyDetails details, 15019 uint32_t hash) { 15020 // Compute the key object. 15021 Handle<Object> k = Shape::AsHandle(dictionary->GetIsolate(), key); 15022 15023 uint32_t entry = dictionary->FindInsertionEntry(hash); 15024 // Insert element at empty or deleted entry 15025 if (!details.IsDeleted() && 15026 details.dictionary_index() == 0 && 15027 Shape::kIsEnumerable) { 15028 // Assign an enumeration index to the property and update 15029 // SetNextEnumerationIndex. 15030 int index = dictionary->NextEnumerationIndex(); 15031 details = PropertyDetails(details.attributes(), details.type(), index); 15032 dictionary->SetNextEnumerationIndex(index + 1); 15033 } 15034 dictionary->SetEntry(entry, k, value, details); 15035 DCHECK((dictionary->KeyAt(entry)->IsNumber() || 15036 dictionary->KeyAt(entry)->IsName())); 15037 dictionary->ElementAdded(); 15038 } 15039 15040 15041 void SeededNumberDictionary::UpdateMaxNumberKey(uint32_t key) { 15042 DisallowHeapAllocation no_allocation; 15043 // If the dictionary requires slow elements an element has already 15044 // been added at a high index. 15045 if (requires_slow_elements()) return; 15046 // Check if this index is high enough that we should require slow 15047 // elements. 15048 if (key > kRequiresSlowElementsLimit) { 15049 set_requires_slow_elements(); 15050 return; 15051 } 15052 // Update max key value. 15053 Object* max_index_object = get(kMaxNumberKeyIndex); 15054 if (!max_index_object->IsSmi() || max_number_key() < key) { 15055 FixedArray::set(kMaxNumberKeyIndex, 15056 Smi::FromInt(key << kRequiresSlowElementsTagSize)); 15057 } 15058 } 15059 15060 15061 Handle<SeededNumberDictionary> SeededNumberDictionary::AddNumberEntry( 15062 Handle<SeededNumberDictionary> dictionary, 15063 uint32_t key, 15064 Handle<Object> value, 15065 PropertyDetails details) { 15066 dictionary->UpdateMaxNumberKey(key); 15067 SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound); 15068 return Add(dictionary, key, value, details); 15069 } 15070 15071 15072 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AddNumberEntry( 15073 Handle<UnseededNumberDictionary> dictionary, 15074 uint32_t key, 15075 Handle<Object> value) { 15076 SLOW_DCHECK(dictionary->FindEntry(key) == kNotFound); 15077 return Add(dictionary, key, value, PropertyDetails(NONE, NORMAL, 0)); 15078 } 15079 15080 15081 Handle<SeededNumberDictionary> SeededNumberDictionary::AtNumberPut( 15082 Handle<SeededNumberDictionary> dictionary, 15083 uint32_t key, 15084 Handle<Object> value) { 15085 dictionary->UpdateMaxNumberKey(key); 15086 return AtPut(dictionary, key, value); 15087 } 15088 15089 15090 Handle<UnseededNumberDictionary> UnseededNumberDictionary::AtNumberPut( 15091 Handle<UnseededNumberDictionary> dictionary, 15092 uint32_t key, 15093 Handle<Object> value) { 15094 return AtPut(dictionary, key, value); 15095 } 15096 15097 15098 Handle<SeededNumberDictionary> SeededNumberDictionary::Set( 15099 Handle<SeededNumberDictionary> dictionary, 15100 uint32_t key, 15101 Handle<Object> value, 15102 PropertyDetails details) { 15103 int entry = dictionary->FindEntry(key); 15104 if (entry == kNotFound) { 15105 return AddNumberEntry(dictionary, key, value, details); 15106 } 15107 // Preserve enumeration index. 15108 details = PropertyDetails(details.attributes(), 15109 details.type(), 15110 dictionary->DetailsAt(entry).dictionary_index()); 15111 Handle<Object> object_key = 15112 SeededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key); 15113 dictionary->SetEntry(entry, object_key, value, details); 15114 return dictionary; 15115 } 15116 15117 15118 Handle<UnseededNumberDictionary> UnseededNumberDictionary::Set( 15119 Handle<UnseededNumberDictionary> dictionary, 15120 uint32_t key, 15121 Handle<Object> value) { 15122 int entry = dictionary->FindEntry(key); 15123 if (entry == kNotFound) return AddNumberEntry(dictionary, key, value); 15124 Handle<Object> object_key = 15125 UnseededNumberDictionaryShape::AsHandle(dictionary->GetIsolate(), key); 15126 dictionary->SetEntry(entry, object_key, value); 15127 return dictionary; 15128 } 15129 15130 15131 15132 template<typename Derived, typename Shape, typename Key> 15133 int Dictionary<Derived, Shape, Key>::NumberOfElementsFilterAttributes( 15134 PropertyAttributes filter) { 15135 int capacity = DerivedHashTable::Capacity(); 15136 int result = 0; 15137 for (int i = 0; i < capacity; i++) { 15138 Object* k = DerivedHashTable::KeyAt(i); 15139 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { 15140 PropertyDetails details = DetailsAt(i); 15141 if (details.IsDeleted()) continue; 15142 PropertyAttributes attr = details.attributes(); 15143 if ((attr & filter) == 0) result++; 15144 } 15145 } 15146 return result; 15147 } 15148 15149 15150 template<typename Derived, typename Shape, typename Key> 15151 int Dictionary<Derived, Shape, Key>::NumberOfEnumElements() { 15152 return NumberOfElementsFilterAttributes( 15153 static_cast<PropertyAttributes>(DONT_ENUM | SYMBOLIC)); 15154 } 15155 15156 15157 template<typename Derived, typename Shape, typename Key> 15158 void Dictionary<Derived, Shape, Key>::CopyKeysTo( 15159 FixedArray* storage, 15160 PropertyAttributes filter, 15161 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) { 15162 DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter)); 15163 int capacity = DerivedHashTable::Capacity(); 15164 int index = 0; 15165 for (int i = 0; i < capacity; i++) { 15166 Object* k = DerivedHashTable::KeyAt(i); 15167 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { 15168 PropertyDetails details = DetailsAt(i); 15169 if (details.IsDeleted()) continue; 15170 PropertyAttributes attr = details.attributes(); 15171 if ((attr & filter) == 0) storage->set(index++, k); 15172 } 15173 } 15174 if (sort_mode == Dictionary::SORTED) { 15175 storage->SortPairs(storage, index); 15176 } 15177 DCHECK(storage->length() >= index); 15178 } 15179 15180 15181 struct EnumIndexComparator { 15182 explicit EnumIndexComparator(NameDictionary* dict) : dict(dict) { } 15183 bool operator() (Smi* a, Smi* b) { 15184 PropertyDetails da(dict->DetailsAt(a->value())); 15185 PropertyDetails db(dict->DetailsAt(b->value())); 15186 return da.dictionary_index() < db.dictionary_index(); 15187 } 15188 NameDictionary* dict; 15189 }; 15190 15191 15192 void NameDictionary::CopyEnumKeysTo(FixedArray* storage) { 15193 int length = storage->length(); 15194 int capacity = Capacity(); 15195 int properties = 0; 15196 for (int i = 0; i < capacity; i++) { 15197 Object* k = KeyAt(i); 15198 if (IsKey(k) && !k->IsSymbol()) { 15199 PropertyDetails details = DetailsAt(i); 15200 if (details.IsDeleted() || details.IsDontEnum()) continue; 15201 storage->set(properties, Smi::FromInt(i)); 15202 properties++; 15203 if (properties == length) break; 15204 } 15205 } 15206 CHECK_EQ(length, properties); 15207 EnumIndexComparator cmp(this); 15208 Smi** start = reinterpret_cast<Smi**>(storage->GetFirstElementAddress()); 15209 std::sort(start, start + length, cmp); 15210 for (int i = 0; i < length; i++) { 15211 int index = Smi::cast(storage->get(i))->value(); 15212 storage->set(i, KeyAt(index)); 15213 } 15214 } 15215 15216 15217 template<typename Derived, typename Shape, typename Key> 15218 void Dictionary<Derived, Shape, Key>::CopyKeysTo( 15219 FixedArray* storage, 15220 int index, 15221 PropertyAttributes filter, 15222 typename Dictionary<Derived, Shape, Key>::SortMode sort_mode) { 15223 DCHECK(storage->length() >= NumberOfElementsFilterAttributes(filter)); 15224 int capacity = DerivedHashTable::Capacity(); 15225 for (int i = 0; i < capacity; i++) { 15226 Object* k = DerivedHashTable::KeyAt(i); 15227 if (DerivedHashTable::IsKey(k) && !FilterKey(k, filter)) { 15228 PropertyDetails details = DetailsAt(i); 15229 if (details.IsDeleted()) continue; 15230 PropertyAttributes attr = details.attributes(); 15231 if ((attr & filter) == 0) storage->set(index++, k); 15232 } 15233 } 15234 if (sort_mode == Dictionary::SORTED) { 15235 storage->SortPairs(storage, index); 15236 } 15237 DCHECK(storage->length() >= index); 15238 } 15239 15240 15241 // Backwards lookup (slow). 15242 template<typename Derived, typename Shape, typename Key> 15243 Object* Dictionary<Derived, Shape, Key>::SlowReverseLookup(Object* value) { 15244 int capacity = DerivedHashTable::Capacity(); 15245 for (int i = 0; i < capacity; i++) { 15246 Object* k = DerivedHashTable::KeyAt(i); 15247 if (Dictionary::IsKey(k)) { 15248 Object* e = ValueAt(i); 15249 if (e->IsPropertyCell()) { 15250 e = PropertyCell::cast(e)->value(); 15251 } 15252 if (e == value) return k; 15253 } 15254 } 15255 Heap* heap = Dictionary::GetHeap(); 15256 return heap->undefined_value(); 15257 } 15258 15259 15260 Object* ObjectHashTable::Lookup(Handle<Object> key) { 15261 DisallowHeapAllocation no_gc; 15262 DCHECK(IsKey(*key)); 15263 15264 // If the object does not have an identity hash, it was never used as a key. 15265 Object* hash = key->GetHash(); 15266 if (hash->IsUndefined()) { 15267 return GetHeap()->the_hole_value(); 15268 } 15269 int entry = FindEntry(key); 15270 if (entry == kNotFound) return GetHeap()->the_hole_value(); 15271 return get(EntryToIndex(entry) + 1); 15272 } 15273 15274 15275 Handle<ObjectHashTable> ObjectHashTable::Put(Handle<ObjectHashTable> table, 15276 Handle<Object> key, 15277 Handle<Object> value) { 15278 DCHECK(table->IsKey(*key)); 15279 DCHECK(!value->IsTheHole()); 15280 15281 Isolate* isolate = table->GetIsolate(); 15282 15283 // Make sure the key object has an identity hash code. 15284 Handle<Smi> hash = Object::GetOrCreateHash(isolate, key); 15285 15286 int entry = table->FindEntry(key); 15287 15288 // Key is already in table, just overwrite value. 15289 if (entry != kNotFound) { 15290 table->set(EntryToIndex(entry) + 1, *value); 15291 return table; 15292 } 15293 15294 // Check whether the hash table should be extended. 15295 table = EnsureCapacity(table, 1, key); 15296 table->AddEntry(table->FindInsertionEntry(hash->value()), 15297 *key, 15298 *value); 15299 return table; 15300 } 15301 15302 15303 Handle<ObjectHashTable> ObjectHashTable::Remove(Handle<ObjectHashTable> table, 15304 Handle<Object> key, 15305 bool* was_present) { 15306 DCHECK(table->IsKey(*key)); 15307 15308 Object* hash = key->GetHash(); 15309 if (hash->IsUndefined()) { 15310 *was_present = false; 15311 return table; 15312 } 15313 15314 int entry = table->FindEntry(key); 15315 if (entry == kNotFound) { 15316 *was_present = false; 15317 return table; 15318 } 15319 15320 *was_present = true; 15321 table->RemoveEntry(entry); 15322 return Shrink(table, key); 15323 } 15324 15325 15326 void ObjectHashTable::AddEntry(int entry, Object* key, Object* value) { 15327 set(EntryToIndex(entry), key); 15328 set(EntryToIndex(entry) + 1, value); 15329 ElementAdded(); 15330 } 15331 15332 15333 void ObjectHashTable::RemoveEntry(int entry) { 15334 set_the_hole(EntryToIndex(entry)); 15335 set_the_hole(EntryToIndex(entry) + 1); 15336 ElementRemoved(); 15337 } 15338 15339 15340 Object* WeakHashTable::Lookup(Handle<Object> key) { 15341 DisallowHeapAllocation no_gc; 15342 DCHECK(IsKey(*key)); 15343 int entry = FindEntry(key); 15344 if (entry == kNotFound) return GetHeap()->the_hole_value(); 15345 return get(EntryToValueIndex(entry)); 15346 } 15347 15348 15349 Handle<WeakHashTable> WeakHashTable::Put(Handle<WeakHashTable> table, 15350 Handle<Object> key, 15351 Handle<Object> value) { 15352 DCHECK(table->IsKey(*key)); 15353 int entry = table->FindEntry(key); 15354 // Key is already in table, just overwrite value. 15355 if (entry != kNotFound) { 15356 // TODO(ulan): Skipping write barrier is a temporary solution to avoid 15357 // memory leaks. Remove this once we have special visitor for weak fixed 15358 // arrays. 15359 table->set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER); 15360 return table; 15361 } 15362 15363 // Check whether the hash table should be extended. 15364 table = EnsureCapacity(table, 1, key, TENURED); 15365 15366 table->AddEntry(table->FindInsertionEntry(table->Hash(key)), key, value); 15367 return table; 15368 } 15369 15370 15371 void WeakHashTable::AddEntry(int entry, 15372 Handle<Object> key, 15373 Handle<Object> value) { 15374 DisallowHeapAllocation no_allocation; 15375 // TODO(ulan): Skipping write barrier is a temporary solution to avoid 15376 // memory leaks. Remove this once we have special visitor for weak fixed 15377 // arrays. 15378 set(EntryToIndex(entry), *key, SKIP_WRITE_BARRIER); 15379 set(EntryToValueIndex(entry), *value, SKIP_WRITE_BARRIER); 15380 ElementAdded(); 15381 } 15382 15383 15384 template<class Derived, class Iterator, int entrysize> 15385 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Allocate( 15386 Isolate* isolate, int capacity, PretenureFlag pretenure) { 15387 // Capacity must be a power of two, since we depend on being able 15388 // to divide and multiple by 2 (kLoadFactor) to derive capacity 15389 // from number of buckets. If we decide to change kLoadFactor 15390 // to something other than 2, capacity should be stored as another 15391 // field of this object. 15392 capacity = base::bits::RoundUpToPowerOfTwo32(Max(kMinCapacity, capacity)); 15393 if (capacity > kMaxCapacity) { 15394 v8::internal::Heap::FatalProcessOutOfMemory("invalid table size", true); 15395 } 15396 int num_buckets = capacity / kLoadFactor; 15397 Handle<FixedArray> backing_store = isolate->factory()->NewFixedArray( 15398 kHashTableStartIndex + num_buckets + (capacity * kEntrySize), pretenure); 15399 backing_store->set_map_no_write_barrier( 15400 isolate->heap()->ordered_hash_table_map()); 15401 Handle<Derived> table = Handle<Derived>::cast(backing_store); 15402 for (int i = 0; i < num_buckets; ++i) { 15403 table->set(kHashTableStartIndex + i, Smi::FromInt(kNotFound)); 15404 } 15405 table->SetNumberOfBuckets(num_buckets); 15406 table->SetNumberOfElements(0); 15407 table->SetNumberOfDeletedElements(0); 15408 return table; 15409 } 15410 15411 15412 template<class Derived, class Iterator, int entrysize> 15413 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::EnsureGrowable( 15414 Handle<Derived> table) { 15415 DCHECK(!table->IsObsolete()); 15416 15417 int nof = table->NumberOfElements(); 15418 int nod = table->NumberOfDeletedElements(); 15419 int capacity = table->Capacity(); 15420 if ((nof + nod) < capacity) return table; 15421 // Don't need to grow if we can simply clear out deleted entries instead. 15422 // Note that we can't compact in place, though, so we always allocate 15423 // a new table. 15424 return Rehash(table, (nod < (capacity >> 1)) ? capacity << 1 : capacity); 15425 } 15426 15427 15428 template<class Derived, class Iterator, int entrysize> 15429 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Shrink( 15430 Handle<Derived> table) { 15431 DCHECK(!table->IsObsolete()); 15432 15433 int nof = table->NumberOfElements(); 15434 int capacity = table->Capacity(); 15435 if (nof >= (capacity >> 2)) return table; 15436 return Rehash(table, capacity / 2); 15437 } 15438 15439 15440 template<class Derived, class Iterator, int entrysize> 15441 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Clear( 15442 Handle<Derived> table) { 15443 DCHECK(!table->IsObsolete()); 15444 15445 Handle<Derived> new_table = 15446 Allocate(table->GetIsolate(), 15447 kMinCapacity, 15448 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED); 15449 15450 table->SetNextTable(*new_table); 15451 table->SetNumberOfDeletedElements(-1); 15452 15453 return new_table; 15454 } 15455 15456 15457 template<class Derived, class Iterator, int entrysize> 15458 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Remove( 15459 Handle<Derived> table, Handle<Object> key, bool* was_present) { 15460 int entry = table->FindEntry(key); 15461 if (entry == kNotFound) { 15462 *was_present = false; 15463 return table; 15464 } 15465 *was_present = true; 15466 table->RemoveEntry(entry); 15467 return Shrink(table); 15468 } 15469 15470 15471 template<class Derived, class Iterator, int entrysize> 15472 Handle<Derived> OrderedHashTable<Derived, Iterator, entrysize>::Rehash( 15473 Handle<Derived> table, int new_capacity) { 15474 DCHECK(!table->IsObsolete()); 15475 15476 Handle<Derived> new_table = 15477 Allocate(table->GetIsolate(), 15478 new_capacity, 15479 table->GetHeap()->InNewSpace(*table) ? NOT_TENURED : TENURED); 15480 int nof = table->NumberOfElements(); 15481 int nod = table->NumberOfDeletedElements(); 15482 int new_buckets = new_table->NumberOfBuckets(); 15483 int new_entry = 0; 15484 int removed_holes_index = 0; 15485 15486 for (int old_entry = 0; old_entry < (nof + nod); ++old_entry) { 15487 Object* key = table->KeyAt(old_entry); 15488 if (key->IsTheHole()) { 15489 table->SetRemovedIndexAt(removed_holes_index++, old_entry); 15490 continue; 15491 } 15492 15493 Object* hash = key->GetHash(); 15494 int bucket = Smi::cast(hash)->value() & (new_buckets - 1); 15495 Object* chain_entry = new_table->get(kHashTableStartIndex + bucket); 15496 new_table->set(kHashTableStartIndex + bucket, Smi::FromInt(new_entry)); 15497 int new_index = new_table->EntryToIndex(new_entry); 15498 int old_index = table->EntryToIndex(old_entry); 15499 for (int i = 0; i < entrysize; ++i) { 15500 Object* value = table->get(old_index + i); 15501 new_table->set(new_index + i, value); 15502 } 15503 new_table->set(new_index + kChainOffset, chain_entry); 15504 ++new_entry; 15505 } 15506 15507 DCHECK_EQ(nod, removed_holes_index); 15508 15509 new_table->SetNumberOfElements(nof); 15510 table->SetNextTable(*new_table); 15511 15512 return new_table; 15513 } 15514 15515 15516 template <class Derived, class Iterator, int entrysize> 15517 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry( 15518 Handle<Object> key, int hash) { 15519 DCHECK(!IsObsolete()); 15520 15521 DisallowHeapAllocation no_gc; 15522 DCHECK(!key->IsTheHole()); 15523 for (int entry = HashToEntry(hash); entry != kNotFound; 15524 entry = ChainAt(entry)) { 15525 Object* candidate = KeyAt(entry); 15526 if (candidate->SameValueZero(*key)) 15527 return entry; 15528 } 15529 return kNotFound; 15530 } 15531 15532 15533 template <class Derived, class Iterator, int entrysize> 15534 int OrderedHashTable<Derived, Iterator, entrysize>::FindEntry( 15535 Handle<Object> key) { 15536 DisallowHeapAllocation no_gc; 15537 Object* hash = key->GetHash(); 15538 if (!hash->IsSmi()) return kNotFound; 15539 return FindEntry(key, Smi::cast(hash)->value()); 15540 } 15541 15542 15543 template <class Derived, class Iterator, int entrysize> 15544 int OrderedHashTable<Derived, Iterator, entrysize>::AddEntry(int hash) { 15545 DCHECK(!IsObsolete()); 15546 15547 int entry = UsedCapacity(); 15548 int bucket = HashToBucket(hash); 15549 int index = EntryToIndex(entry); 15550 Object* chain_entry = get(kHashTableStartIndex + bucket); 15551 set(kHashTableStartIndex + bucket, Smi::FromInt(entry)); 15552 set(index + kChainOffset, chain_entry); 15553 SetNumberOfElements(NumberOfElements() + 1); 15554 return index; 15555 } 15556 15557 15558 template<class Derived, class Iterator, int entrysize> 15559 void OrderedHashTable<Derived, Iterator, entrysize>::RemoveEntry(int entry) { 15560 DCHECK(!IsObsolete()); 15561 15562 int index = EntryToIndex(entry); 15563 for (int i = 0; i < entrysize; ++i) { 15564 set_the_hole(index + i); 15565 } 15566 SetNumberOfElements(NumberOfElements() - 1); 15567 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1); 15568 } 15569 15570 15571 template Handle<OrderedHashSet> 15572 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Allocate( 15573 Isolate* isolate, int capacity, PretenureFlag pretenure); 15574 15575 template Handle<OrderedHashSet> 15576 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::EnsureGrowable( 15577 Handle<OrderedHashSet> table); 15578 15579 template Handle<OrderedHashSet> 15580 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Shrink( 15581 Handle<OrderedHashSet> table); 15582 15583 template Handle<OrderedHashSet> 15584 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Clear( 15585 Handle<OrderedHashSet> table); 15586 15587 template Handle<OrderedHashSet> 15588 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::Remove( 15589 Handle<OrderedHashSet> table, Handle<Object> key, bool* was_present); 15590 15591 template int OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry( 15592 Handle<Object> key, int hash); 15593 template int OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::FindEntry( 15594 Handle<Object> key); 15595 15596 template int 15597 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::AddEntry(int hash); 15598 15599 template void 15600 OrderedHashTable<OrderedHashSet, JSSetIterator, 1>::RemoveEntry(int entry); 15601 15602 15603 template Handle<OrderedHashMap> 15604 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Allocate( 15605 Isolate* isolate, int capacity, PretenureFlag pretenure); 15606 15607 template Handle<OrderedHashMap> 15608 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::EnsureGrowable( 15609 Handle<OrderedHashMap> table); 15610 15611 template Handle<OrderedHashMap> 15612 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Shrink( 15613 Handle<OrderedHashMap> table); 15614 15615 template Handle<OrderedHashMap> 15616 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Clear( 15617 Handle<OrderedHashMap> table); 15618 15619 template Handle<OrderedHashMap> 15620 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::Remove( 15621 Handle<OrderedHashMap> table, Handle<Object> key, bool* was_present); 15622 15623 template int OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry( 15624 Handle<Object> key, int hash); 15625 template int OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::FindEntry( 15626 Handle<Object> key); 15627 15628 template int 15629 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::AddEntry(int hash); 15630 15631 template void 15632 OrderedHashTable<OrderedHashMap, JSMapIterator, 2>::RemoveEntry(int entry); 15633 15634 15635 bool OrderedHashSet::Contains(Handle<Object> key) { 15636 return FindEntry(key) != kNotFound; 15637 } 15638 15639 15640 Handle<OrderedHashSet> OrderedHashSet::Add(Handle<OrderedHashSet> table, 15641 Handle<Object> key) { 15642 int hash = GetOrCreateHash(table->GetIsolate(), key)->value(); 15643 if (table->FindEntry(key, hash) != kNotFound) return table; 15644 15645 table = EnsureGrowable(table); 15646 15647 int index = table->AddEntry(hash); 15648 table->set(index, *key); 15649 return table; 15650 } 15651 15652 15653 Object* OrderedHashMap::Lookup(Handle<Object> key) { 15654 DisallowHeapAllocation no_gc; 15655 int entry = FindEntry(key); 15656 if (entry == kNotFound) return GetHeap()->the_hole_value(); 15657 return ValueAt(entry); 15658 } 15659 15660 15661 Handle<OrderedHashMap> OrderedHashMap::Put(Handle<OrderedHashMap> table, 15662 Handle<Object> key, 15663 Handle<Object> value) { 15664 DCHECK(!key->IsTheHole()); 15665 15666 int hash = GetOrCreateHash(table->GetIsolate(), key)->value(); 15667 int entry = table->FindEntry(key, hash); 15668 15669 if (entry != kNotFound) { 15670 table->set(table->EntryToIndex(entry) + kValueOffset, *value); 15671 return table; 15672 } 15673 15674 table = EnsureGrowable(table); 15675 15676 int index = table->AddEntry(hash); 15677 table->set(index, *key); 15678 table->set(index + kValueOffset, *value); 15679 return table; 15680 } 15681 15682 15683 template<class Derived, class TableType> 15684 void OrderedHashTableIterator<Derived, TableType>::Transition() { 15685 DisallowHeapAllocation no_allocation; 15686 TableType* table = TableType::cast(this->table()); 15687 if (!table->IsObsolete()) return; 15688 15689 int index = Smi::cast(this->index())->value(); 15690 while (table->IsObsolete()) { 15691 TableType* next_table = table->NextTable(); 15692 15693 if (index > 0) { 15694 int nod = table->NumberOfDeletedElements(); 15695 15696 // When we clear the table we set the number of deleted elements to -1. 15697 if (nod == -1) { 15698 index = 0; 15699 } else { 15700 int old_index = index; 15701 for (int i = 0; i < nod; ++i) { 15702 int removed_index = table->RemovedIndexAt(i); 15703 if (removed_index >= old_index) break; 15704 --index; 15705 } 15706 } 15707 } 15708 15709 table = next_table; 15710 } 15711 15712 set_table(table); 15713 set_index(Smi::FromInt(index)); 15714 } 15715 15716 15717 template<class Derived, class TableType> 15718 bool OrderedHashTableIterator<Derived, TableType>::HasMore() { 15719 DisallowHeapAllocation no_allocation; 15720 if (this->table()->IsUndefined()) return false; 15721 15722 Transition(); 15723 15724 TableType* table = TableType::cast(this->table()); 15725 int index = Smi::cast(this->index())->value(); 15726 int used_capacity = table->UsedCapacity(); 15727 15728 while (index < used_capacity && table->KeyAt(index)->IsTheHole()) { 15729 index++; 15730 } 15731 15732 set_index(Smi::FromInt(index)); 15733 15734 if (index < used_capacity) return true; 15735 15736 set_table(GetHeap()->undefined_value()); 15737 return false; 15738 } 15739 15740 15741 template<class Derived, class TableType> 15742 Smi* OrderedHashTableIterator<Derived, TableType>::Next(JSArray* value_array) { 15743 DisallowHeapAllocation no_allocation; 15744 if (HasMore()) { 15745 FixedArray* array = FixedArray::cast(value_array->elements()); 15746 static_cast<Derived*>(this)->PopulateValueArray(array); 15747 MoveNext(); 15748 return Smi::cast(kind()); 15749 } 15750 return Smi::FromInt(0); 15751 } 15752 15753 15754 template Smi* 15755 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Next( 15756 JSArray* value_array); 15757 15758 template bool 15759 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::HasMore(); 15760 15761 template void 15762 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::MoveNext(); 15763 15764 template Object* 15765 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::CurrentKey(); 15766 15767 template void 15768 OrderedHashTableIterator<JSSetIterator, OrderedHashSet>::Transition(); 15769 15770 15771 template Smi* 15772 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Next( 15773 JSArray* value_array); 15774 15775 template bool 15776 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::HasMore(); 15777 15778 template void 15779 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::MoveNext(); 15780 15781 template Object* 15782 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::CurrentKey(); 15783 15784 template void 15785 OrderedHashTableIterator<JSMapIterator, OrderedHashMap>::Transition(); 15786 15787 15788 DeclaredAccessorDescriptorIterator::DeclaredAccessorDescriptorIterator( 15789 DeclaredAccessorDescriptor* descriptor) 15790 : array_(descriptor->serialized_data()->GetDataStartAddress()), 15791 length_(descriptor->serialized_data()->length()), 15792 offset_(0) { 15793 } 15794 15795 15796 const DeclaredAccessorDescriptorData* 15797 DeclaredAccessorDescriptorIterator::Next() { 15798 DCHECK(offset_ < length_); 15799 uint8_t* ptr = &array_[offset_]; 15800 DCHECK(reinterpret_cast<uintptr_t>(ptr) % sizeof(uintptr_t) == 0); 15801 const DeclaredAccessorDescriptorData* data = 15802 reinterpret_cast<const DeclaredAccessorDescriptorData*>(ptr); 15803 offset_ += sizeof(*data); 15804 DCHECK(offset_ <= length_); 15805 return data; 15806 } 15807 15808 15809 Handle<DeclaredAccessorDescriptor> DeclaredAccessorDescriptor::Create( 15810 Isolate* isolate, 15811 const DeclaredAccessorDescriptorData& descriptor, 15812 Handle<DeclaredAccessorDescriptor> previous) { 15813 int previous_length = 15814 previous.is_null() ? 0 : previous->serialized_data()->length(); 15815 int length = sizeof(descriptor) + previous_length; 15816 Handle<ByteArray> serialized_descriptor = 15817 isolate->factory()->NewByteArray(length); 15818 Handle<DeclaredAccessorDescriptor> value = 15819 isolate->factory()->NewDeclaredAccessorDescriptor(); 15820 value->set_serialized_data(*serialized_descriptor); 15821 // Copy in the data. 15822 { 15823 DisallowHeapAllocation no_allocation; 15824 uint8_t* array = serialized_descriptor->GetDataStartAddress(); 15825 if (previous_length != 0) { 15826 uint8_t* previous_array = 15827 previous->serialized_data()->GetDataStartAddress(); 15828 MemCopy(array, previous_array, previous_length); 15829 array += previous_length; 15830 } 15831 DCHECK(reinterpret_cast<uintptr_t>(array) % sizeof(uintptr_t) == 0); 15832 DeclaredAccessorDescriptorData* data = 15833 reinterpret_cast<DeclaredAccessorDescriptorData*>(array); 15834 *data = descriptor; 15835 } 15836 return value; 15837 } 15838 15839 15840 // Check if there is a break point at this code position. 15841 bool DebugInfo::HasBreakPoint(int code_position) { 15842 // Get the break point info object for this code position. 15843 Object* break_point_info = GetBreakPointInfo(code_position); 15844 15845 // If there is no break point info object or no break points in the break 15846 // point info object there is no break point at this code position. 15847 if (break_point_info->IsUndefined()) return false; 15848 return BreakPointInfo::cast(break_point_info)->GetBreakPointCount() > 0; 15849 } 15850 15851 15852 // Get the break point info object for this code position. 15853 Object* DebugInfo::GetBreakPointInfo(int code_position) { 15854 // Find the index of the break point info object for this code position. 15855 int index = GetBreakPointInfoIndex(code_position); 15856 15857 // Return the break point info object if any. 15858 if (index == kNoBreakPointInfo) return GetHeap()->undefined_value(); 15859 return BreakPointInfo::cast(break_points()->get(index)); 15860 } 15861 15862 15863 // Clear a break point at the specified code position. 15864 void DebugInfo::ClearBreakPoint(Handle<DebugInfo> debug_info, 15865 int code_position, 15866 Handle<Object> break_point_object) { 15867 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position), 15868 debug_info->GetIsolate()); 15869 if (break_point_info->IsUndefined()) return; 15870 BreakPointInfo::ClearBreakPoint( 15871 Handle<BreakPointInfo>::cast(break_point_info), 15872 break_point_object); 15873 } 15874 15875 15876 void DebugInfo::SetBreakPoint(Handle<DebugInfo> debug_info, 15877 int code_position, 15878 int source_position, 15879 int statement_position, 15880 Handle<Object> break_point_object) { 15881 Isolate* isolate = debug_info->GetIsolate(); 15882 Handle<Object> break_point_info(debug_info->GetBreakPointInfo(code_position), 15883 isolate); 15884 if (!break_point_info->IsUndefined()) { 15885 BreakPointInfo::SetBreakPoint( 15886 Handle<BreakPointInfo>::cast(break_point_info), 15887 break_point_object); 15888 return; 15889 } 15890 15891 // Adding a new break point for a code position which did not have any 15892 // break points before. Try to find a free slot. 15893 int index = kNoBreakPointInfo; 15894 for (int i = 0; i < debug_info->break_points()->length(); i++) { 15895 if (debug_info->break_points()->get(i)->IsUndefined()) { 15896 index = i; 15897 break; 15898 } 15899 } 15900 if (index == kNoBreakPointInfo) { 15901 // No free slot - extend break point info array. 15902 Handle<FixedArray> old_break_points = 15903 Handle<FixedArray>(FixedArray::cast(debug_info->break_points())); 15904 Handle<FixedArray> new_break_points = 15905 isolate->factory()->NewFixedArray( 15906 old_break_points->length() + 15907 DebugInfo::kEstimatedNofBreakPointsInFunction); 15908 15909 debug_info->set_break_points(*new_break_points); 15910 for (int i = 0; i < old_break_points->length(); i++) { 15911 new_break_points->set(i, old_break_points->get(i)); 15912 } 15913 index = old_break_points->length(); 15914 } 15915 DCHECK(index != kNoBreakPointInfo); 15916 15917 // Allocate new BreakPointInfo object and set the break point. 15918 Handle<BreakPointInfo> new_break_point_info = Handle<BreakPointInfo>::cast( 15919 isolate->factory()->NewStruct(BREAK_POINT_INFO_TYPE)); 15920 new_break_point_info->set_code_position(Smi::FromInt(code_position)); 15921 new_break_point_info->set_source_position(Smi::FromInt(source_position)); 15922 new_break_point_info-> 15923 set_statement_position(Smi::FromInt(statement_position)); 15924 new_break_point_info->set_break_point_objects( 15925 isolate->heap()->undefined_value()); 15926 BreakPointInfo::SetBreakPoint(new_break_point_info, break_point_object); 15927 debug_info->break_points()->set(index, *new_break_point_info); 15928 } 15929 15930 15931 // Get the break point objects for a code position. 15932 Object* DebugInfo::GetBreakPointObjects(int code_position) { 15933 Object* break_point_info = GetBreakPointInfo(code_position); 15934 if (break_point_info->IsUndefined()) { 15935 return GetHeap()->undefined_value(); 15936 } 15937 return BreakPointInfo::cast(break_point_info)->break_point_objects(); 15938 } 15939 15940 15941 // Get the total number of break points. 15942 int DebugInfo::GetBreakPointCount() { 15943 if (break_points()->IsUndefined()) return 0; 15944 int count = 0; 15945 for (int i = 0; i < break_points()->length(); i++) { 15946 if (!break_points()->get(i)->IsUndefined()) { 15947 BreakPointInfo* break_point_info = 15948 BreakPointInfo::cast(break_points()->get(i)); 15949 count += break_point_info->GetBreakPointCount(); 15950 } 15951 } 15952 return count; 15953 } 15954 15955 15956 Object* DebugInfo::FindBreakPointInfo(Handle<DebugInfo> debug_info, 15957 Handle<Object> break_point_object) { 15958 Heap* heap = debug_info->GetHeap(); 15959 if (debug_info->break_points()->IsUndefined()) return heap->undefined_value(); 15960 for (int i = 0; i < debug_info->break_points()->length(); i++) { 15961 if (!debug_info->break_points()->get(i)->IsUndefined()) { 15962 Handle<BreakPointInfo> break_point_info = 15963 Handle<BreakPointInfo>(BreakPointInfo::cast( 15964 debug_info->break_points()->get(i))); 15965 if (BreakPointInfo::HasBreakPointObject(break_point_info, 15966 break_point_object)) { 15967 return *break_point_info; 15968 } 15969 } 15970 } 15971 return heap->undefined_value(); 15972 } 15973 15974 15975 // Find the index of the break point info object for the specified code 15976 // position. 15977 int DebugInfo::GetBreakPointInfoIndex(int code_position) { 15978 if (break_points()->IsUndefined()) return kNoBreakPointInfo; 15979 for (int i = 0; i < break_points()->length(); i++) { 15980 if (!break_points()->get(i)->IsUndefined()) { 15981 BreakPointInfo* break_point_info = 15982 BreakPointInfo::cast(break_points()->get(i)); 15983 if (break_point_info->code_position()->value() == code_position) { 15984 return i; 15985 } 15986 } 15987 } 15988 return kNoBreakPointInfo; 15989 } 15990 15991 15992 // Remove the specified break point object. 15993 void BreakPointInfo::ClearBreakPoint(Handle<BreakPointInfo> break_point_info, 15994 Handle<Object> break_point_object) { 15995 Isolate* isolate = break_point_info->GetIsolate(); 15996 // If there are no break points just ignore. 15997 if (break_point_info->break_point_objects()->IsUndefined()) return; 15998 // If there is a single break point clear it if it is the same. 15999 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16000 if (break_point_info->break_point_objects() == *break_point_object) { 16001 break_point_info->set_break_point_objects( 16002 isolate->heap()->undefined_value()); 16003 } 16004 return; 16005 } 16006 // If there are multiple break points shrink the array 16007 DCHECK(break_point_info->break_point_objects()->IsFixedArray()); 16008 Handle<FixedArray> old_array = 16009 Handle<FixedArray>( 16010 FixedArray::cast(break_point_info->break_point_objects())); 16011 Handle<FixedArray> new_array = 16012 isolate->factory()->NewFixedArray(old_array->length() - 1); 16013 int found_count = 0; 16014 for (int i = 0; i < old_array->length(); i++) { 16015 if (old_array->get(i) == *break_point_object) { 16016 DCHECK(found_count == 0); 16017 found_count++; 16018 } else { 16019 new_array->set(i - found_count, old_array->get(i)); 16020 } 16021 } 16022 // If the break point was found in the list change it. 16023 if (found_count > 0) break_point_info->set_break_point_objects(*new_array); 16024 } 16025 16026 16027 // Add the specified break point object. 16028 void BreakPointInfo::SetBreakPoint(Handle<BreakPointInfo> break_point_info, 16029 Handle<Object> break_point_object) { 16030 Isolate* isolate = break_point_info->GetIsolate(); 16031 16032 // If there was no break point objects before just set it. 16033 if (break_point_info->break_point_objects()->IsUndefined()) { 16034 break_point_info->set_break_point_objects(*break_point_object); 16035 return; 16036 } 16037 // If the break point object is the same as before just ignore. 16038 if (break_point_info->break_point_objects() == *break_point_object) return; 16039 // If there was one break point object before replace with array. 16040 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16041 Handle<FixedArray> array = isolate->factory()->NewFixedArray(2); 16042 array->set(0, break_point_info->break_point_objects()); 16043 array->set(1, *break_point_object); 16044 break_point_info->set_break_point_objects(*array); 16045 return; 16046 } 16047 // If there was more than one break point before extend array. 16048 Handle<FixedArray> old_array = 16049 Handle<FixedArray>( 16050 FixedArray::cast(break_point_info->break_point_objects())); 16051 Handle<FixedArray> new_array = 16052 isolate->factory()->NewFixedArray(old_array->length() + 1); 16053 for (int i = 0; i < old_array->length(); i++) { 16054 // If the break point was there before just ignore. 16055 if (old_array->get(i) == *break_point_object) return; 16056 new_array->set(i, old_array->get(i)); 16057 } 16058 // Add the new break point. 16059 new_array->set(old_array->length(), *break_point_object); 16060 break_point_info->set_break_point_objects(*new_array); 16061 } 16062 16063 16064 bool BreakPointInfo::HasBreakPointObject( 16065 Handle<BreakPointInfo> break_point_info, 16066 Handle<Object> break_point_object) { 16067 // No break point. 16068 if (break_point_info->break_point_objects()->IsUndefined()) return false; 16069 // Single break point. 16070 if (!break_point_info->break_point_objects()->IsFixedArray()) { 16071 return break_point_info->break_point_objects() == *break_point_object; 16072 } 16073 // Multiple break points. 16074 FixedArray* array = FixedArray::cast(break_point_info->break_point_objects()); 16075 for (int i = 0; i < array->length(); i++) { 16076 if (array->get(i) == *break_point_object) { 16077 return true; 16078 } 16079 } 16080 return false; 16081 } 16082 16083 16084 // Get the number of break points. 16085 int BreakPointInfo::GetBreakPointCount() { 16086 // No break point. 16087 if (break_point_objects()->IsUndefined()) return 0; 16088 // Single break point. 16089 if (!break_point_objects()->IsFixedArray()) return 1; 16090 // Multiple break points. 16091 return FixedArray::cast(break_point_objects())->length(); 16092 } 16093 16094 16095 Object* JSDate::GetField(Object* object, Smi* index) { 16096 return JSDate::cast(object)->DoGetField( 16097 static_cast<FieldIndex>(index->value())); 16098 } 16099 16100 16101 Object* JSDate::DoGetField(FieldIndex index) { 16102 DCHECK(index != kDateValue); 16103 16104 DateCache* date_cache = GetIsolate()->date_cache(); 16105 16106 if (index < kFirstUncachedField) { 16107 Object* stamp = cache_stamp(); 16108 if (stamp != date_cache->stamp() && stamp->IsSmi()) { 16109 // Since the stamp is not NaN, the value is also not NaN. 16110 int64_t local_time_ms = 16111 date_cache->ToLocal(static_cast<int64_t>(value()->Number())); 16112 SetCachedFields(local_time_ms, date_cache); 16113 } 16114 switch (index) { 16115 case kYear: return year(); 16116 case kMonth: return month(); 16117 case kDay: return day(); 16118 case kWeekday: return weekday(); 16119 case kHour: return hour(); 16120 case kMinute: return min(); 16121 case kSecond: return sec(); 16122 default: UNREACHABLE(); 16123 } 16124 } 16125 16126 if (index >= kFirstUTCField) { 16127 return GetUTCField(index, value()->Number(), date_cache); 16128 } 16129 16130 double time = value()->Number(); 16131 if (std::isnan(time)) return GetIsolate()->heap()->nan_value(); 16132 16133 int64_t local_time_ms = date_cache->ToLocal(static_cast<int64_t>(time)); 16134 int days = DateCache::DaysFromTime(local_time_ms); 16135 16136 if (index == kDays) return Smi::FromInt(days); 16137 16138 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days); 16139 if (index == kMillisecond) return Smi::FromInt(time_in_day_ms % 1000); 16140 DCHECK(index == kTimeInDay); 16141 return Smi::FromInt(time_in_day_ms); 16142 } 16143 16144 16145 Object* JSDate::GetUTCField(FieldIndex index, 16146 double value, 16147 DateCache* date_cache) { 16148 DCHECK(index >= kFirstUTCField); 16149 16150 if (std::isnan(value)) return GetIsolate()->heap()->nan_value(); 16151 16152 int64_t time_ms = static_cast<int64_t>(value); 16153 16154 if (index == kTimezoneOffset) { 16155 return Smi::FromInt(date_cache->TimezoneOffset(time_ms)); 16156 } 16157 16158 int days = DateCache::DaysFromTime(time_ms); 16159 16160 if (index == kWeekdayUTC) return Smi::FromInt(date_cache->Weekday(days)); 16161 16162 if (index <= kDayUTC) { 16163 int year, month, day; 16164 date_cache->YearMonthDayFromDays(days, &year, &month, &day); 16165 if (index == kYearUTC) return Smi::FromInt(year); 16166 if (index == kMonthUTC) return Smi::FromInt(month); 16167 DCHECK(index == kDayUTC); 16168 return Smi::FromInt(day); 16169 } 16170 16171 int time_in_day_ms = DateCache::TimeInDay(time_ms, days); 16172 switch (index) { 16173 case kHourUTC: return Smi::FromInt(time_in_day_ms / (60 * 60 * 1000)); 16174 case kMinuteUTC: return Smi::FromInt((time_in_day_ms / (60 * 1000)) % 60); 16175 case kSecondUTC: return Smi::FromInt((time_in_day_ms / 1000) % 60); 16176 case kMillisecondUTC: return Smi::FromInt(time_in_day_ms % 1000); 16177 case kDaysUTC: return Smi::FromInt(days); 16178 case kTimeInDayUTC: return Smi::FromInt(time_in_day_ms); 16179 default: UNREACHABLE(); 16180 } 16181 16182 UNREACHABLE(); 16183 return NULL; 16184 } 16185 16186 16187 void JSDate::SetValue(Object* value, bool is_value_nan) { 16188 set_value(value); 16189 if (is_value_nan) { 16190 HeapNumber* nan = GetIsolate()->heap()->nan_value(); 16191 set_cache_stamp(nan, SKIP_WRITE_BARRIER); 16192 set_year(nan, SKIP_WRITE_BARRIER); 16193 set_month(nan, SKIP_WRITE_BARRIER); 16194 set_day(nan, SKIP_WRITE_BARRIER); 16195 set_hour(nan, SKIP_WRITE_BARRIER); 16196 set_min(nan, SKIP_WRITE_BARRIER); 16197 set_sec(nan, SKIP_WRITE_BARRIER); 16198 set_weekday(nan, SKIP_WRITE_BARRIER); 16199 } else { 16200 set_cache_stamp(Smi::FromInt(DateCache::kInvalidStamp), SKIP_WRITE_BARRIER); 16201 } 16202 } 16203 16204 16205 void JSDate::SetCachedFields(int64_t local_time_ms, DateCache* date_cache) { 16206 int days = DateCache::DaysFromTime(local_time_ms); 16207 int time_in_day_ms = DateCache::TimeInDay(local_time_ms, days); 16208 int year, month, day; 16209 date_cache->YearMonthDayFromDays(days, &year, &month, &day); 16210 int weekday = date_cache->Weekday(days); 16211 int hour = time_in_day_ms / (60 * 60 * 1000); 16212 int min = (time_in_day_ms / (60 * 1000)) % 60; 16213 int sec = (time_in_day_ms / 1000) % 60; 16214 set_cache_stamp(date_cache->stamp()); 16215 set_year(Smi::FromInt(year), SKIP_WRITE_BARRIER); 16216 set_month(Smi::FromInt(month), SKIP_WRITE_BARRIER); 16217 set_day(Smi::FromInt(day), SKIP_WRITE_BARRIER); 16218 set_weekday(Smi::FromInt(weekday), SKIP_WRITE_BARRIER); 16219 set_hour(Smi::FromInt(hour), SKIP_WRITE_BARRIER); 16220 set_min(Smi::FromInt(min), SKIP_WRITE_BARRIER); 16221 set_sec(Smi::FromInt(sec), SKIP_WRITE_BARRIER); 16222 } 16223 16224 16225 void JSArrayBuffer::Neuter() { 16226 DCHECK(is_external()); 16227 set_backing_store(NULL); 16228 set_byte_length(Smi::FromInt(0)); 16229 } 16230 16231 16232 void JSArrayBufferView::NeuterView() { 16233 set_byte_offset(Smi::FromInt(0)); 16234 set_byte_length(Smi::FromInt(0)); 16235 } 16236 16237 16238 void JSDataView::Neuter() { 16239 NeuterView(); 16240 } 16241 16242 16243 void JSTypedArray::Neuter() { 16244 NeuterView(); 16245 set_length(Smi::FromInt(0)); 16246 set_elements(GetHeap()->EmptyExternalArrayForMap(map())); 16247 } 16248 16249 16250 static ElementsKind FixedToExternalElementsKind(ElementsKind elements_kind) { 16251 switch (elements_kind) { 16252 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \ 16253 case TYPE##_ELEMENTS: return EXTERNAL_##TYPE##_ELEMENTS; 16254 16255 TYPED_ARRAYS(TYPED_ARRAY_CASE) 16256 #undef TYPED_ARRAY_CASE 16257 16258 default: 16259 UNREACHABLE(); 16260 return FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND; 16261 } 16262 } 16263 16264 16265 Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer( 16266 Handle<JSTypedArray> typed_array) { 16267 16268 Handle<Map> map(typed_array->map()); 16269 Isolate* isolate = typed_array->GetIsolate(); 16270 16271 DCHECK(IsFixedTypedArrayElementsKind(map->elements_kind())); 16272 16273 Handle<Map> new_map = Map::TransitionElementsTo( 16274 map, 16275 FixedToExternalElementsKind(map->elements_kind())); 16276 16277 Handle<JSArrayBuffer> buffer = isolate->factory()->NewJSArrayBuffer(); 16278 Handle<FixedTypedArrayBase> fixed_typed_array( 16279 FixedTypedArrayBase::cast(typed_array->elements())); 16280 Runtime::SetupArrayBufferAllocatingData(isolate, buffer, 16281 fixed_typed_array->DataSize(), false); 16282 memcpy(buffer->backing_store(), 16283 fixed_typed_array->DataPtr(), 16284 fixed_typed_array->DataSize()); 16285 Handle<ExternalArray> new_elements = 16286 isolate->factory()->NewExternalArray( 16287 fixed_typed_array->length(), typed_array->type(), 16288 static_cast<uint8_t*>(buffer->backing_store())); 16289 16290 buffer->set_weak_first_view(*typed_array); 16291 DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value()); 16292 typed_array->set_buffer(*buffer); 16293 JSObject::SetMapAndElements(typed_array, new_map, new_elements); 16294 16295 return buffer; 16296 } 16297 16298 16299 Handle<JSArrayBuffer> JSTypedArray::GetBuffer() { 16300 Handle<Object> result(buffer(), GetIsolate()); 16301 if (*result != Smi::FromInt(0)) { 16302 DCHECK(IsExternalArrayElementsKind(map()->elements_kind())); 16303 return Handle<JSArrayBuffer>::cast(result); 16304 } 16305 Handle<JSTypedArray> self(this); 16306 return MaterializeArrayBuffer(self); 16307 } 16308 16309 16310 HeapType* PropertyCell::type() { 16311 return static_cast<HeapType*>(type_raw()); 16312 } 16313 16314 16315 void PropertyCell::set_type(HeapType* type, WriteBarrierMode ignored) { 16316 DCHECK(IsPropertyCell()); 16317 set_type_raw(type, ignored); 16318 } 16319 16320 16321 Handle<HeapType> PropertyCell::UpdatedType(Handle<PropertyCell> cell, 16322 Handle<Object> value) { 16323 Isolate* isolate = cell->GetIsolate(); 16324 Handle<HeapType> old_type(cell->type(), isolate); 16325 Handle<HeapType> new_type = HeapType::Constant(value, isolate); 16326 16327 if (new_type->Is(old_type)) return old_type; 16328 16329 cell->dependent_code()->DeoptimizeDependentCodeGroup( 16330 isolate, DependentCode::kPropertyCellChangedGroup); 16331 16332 if (old_type->Is(HeapType::None()) || old_type->Is(HeapType::Undefined())) { 16333 return new_type; 16334 } 16335 16336 return HeapType::Any(isolate); 16337 } 16338 16339 16340 void PropertyCell::SetValueInferType(Handle<PropertyCell> cell, 16341 Handle<Object> value) { 16342 cell->set_value(*value); 16343 if (!HeapType::Any()->Is(cell->type())) { 16344 Handle<HeapType> new_type = UpdatedType(cell, value); 16345 cell->set_type(*new_type); 16346 } 16347 } 16348 16349 16350 // static 16351 void PropertyCell::AddDependentCompilationInfo(Handle<PropertyCell> cell, 16352 CompilationInfo* info) { 16353 Handle<DependentCode> codes = 16354 DependentCode::Insert(handle(cell->dependent_code(), info->isolate()), 16355 DependentCode::kPropertyCellChangedGroup, 16356 info->object_wrapper()); 16357 if (*codes != cell->dependent_code()) cell->set_dependent_code(*codes); 16358 info->dependencies(DependentCode::kPropertyCellChangedGroup)->Add( 16359 cell, info->zone()); 16360 } 16361 16362 } } // namespace v8::internal 16363