1 // Copyright 2017 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_OBJECTS_MAP_INL_H_ 6 #define V8_OBJECTS_MAP_INL_H_ 7 8 #include "src/objects/map.h" 9 #include "src/field-type.h" 10 #include "src/objects-inl.h" 11 #include "src/objects/api-callbacks-inl.h" 12 #include "src/objects/descriptor-array.h" 13 #include "src/objects/prototype-info-inl.h" 14 #include "src/objects/shared-function-info.h" 15 #include "src/objects/templates-inl.h" 16 #include "src/property.h" 17 #include "src/transitions.h" 18 19 // For pulling in heap/incremental-marking.h which is needed by 20 // ACCESSORS_CHECKED. 21 #include "src/heap/heap-inl.h" 22 23 // Has to be the last include (doesn't have include guards): 24 #include "src/objects/object-macros.h" 25 26 namespace v8 { 27 namespace internal { 28 29 CAST_ACCESSOR(Map) 30 31 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset) 32 ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor, 33 kLayoutDescriptorOffset, FLAG_unbox_double_fields) 34 WEAK_ACCESSORS(Map, raw_transitions, kTransitionsOrPrototypeInfoOffset) 35 36 // |bit_field| fields. 37 BIT_FIELD_ACCESSORS(Map, bit_field, has_non_instance_prototype, 38 Map::HasNonInstancePrototypeBit) 39 BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::IsCallableBit) 40 BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor, 41 Map::HasNamedInterceptorBit) 42 BIT_FIELD_ACCESSORS(Map, bit_field, has_indexed_interceptor, 43 Map::HasIndexedInterceptorBit) 44 BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable, Map::IsUndetectableBit) 45 BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed, 46 Map::IsAccessCheckNeededBit) 47 BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor, Map::IsConstructorBit) 48 BIT_FIELD_ACCESSORS(Map, bit_field, has_prototype_slot, 49 Map::HasPrototypeSlotBit) 50 51 // |bit_field2| fields. 52 BIT_FIELD_ACCESSORS(Map, bit_field2, is_extensible, Map::IsExtensibleBit) 53 BIT_FIELD_ACCESSORS(Map, bit_field2, is_prototype_map, Map::IsPrototypeMapBit) 54 BIT_FIELD_ACCESSORS(Map, bit_field2, is_in_retained_map_list, 55 Map::IsInRetainedMapListBit) 56 57 // |bit_field3| fields. 58 BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors, Map::OwnsDescriptorsBit) 59 BIT_FIELD_ACCESSORS(Map, bit_field3, has_hidden_prototype, 60 Map::HasHiddenPrototypeBit) 61 BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::IsDeprecatedBit) 62 BIT_FIELD_ACCESSORS(Map, bit_field3, is_migration_target, 63 Map::IsMigrationTargetBit) 64 BIT_FIELD_ACCESSORS(Map, bit_field3, is_immutable_proto, 65 Map::IsImmutablePrototypeBit) 66 BIT_FIELD_ACCESSORS(Map, bit_field3, new_target_is_base, 67 Map::NewTargetIsBaseBit) 68 BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols, 69 Map::MayHaveInterestingSymbolsBit) 70 BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter, 71 Map::ConstructionCounterBits) 72 73 InterceptorInfo* Map::GetNamedInterceptor() { 74 DCHECK(has_named_interceptor()); 75 FunctionTemplateInfo* info = GetFunctionTemplateInfo(); 76 return InterceptorInfo::cast(info->named_property_handler()); 77 } 78 79 InterceptorInfo* Map::GetIndexedInterceptor() { 80 DCHECK(has_indexed_interceptor()); 81 FunctionTemplateInfo* info = GetFunctionTemplateInfo(); 82 return InterceptorInfo::cast(info->indexed_property_handler()); 83 } 84 85 bool Map::IsInplaceGeneralizableField(PropertyConstness constness, 86 Representation representation, 87 FieldType* field_type) { 88 if (FLAG_track_constant_fields && FLAG_modify_map_inplace && 89 (constness == PropertyConstness::kConst)) { 90 // VariableMode::kConst -> PropertyConstness::kMutable field generalization 91 // may happen in-place. 92 return true; 93 } 94 if (representation.IsHeapObject() && !field_type->IsAny()) { 95 return true; 96 } 97 return false; 98 } 99 100 bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) { 101 return instance_type == JS_ARRAY_TYPE || instance_type == JS_VALUE_TYPE || 102 instance_type == JS_ARGUMENTS_TYPE; 103 } 104 105 bool Map::CanHaveFastTransitionableElementsKind() const { 106 return CanHaveFastTransitionableElementsKind(instance_type()); 107 } 108 109 // static 110 void Map::GeneralizeIfCanHaveTransitionableFastElementsKind( 111 Isolate* isolate, InstanceType instance_type, PropertyConstness* constness, 112 Representation* representation, Handle<FieldType>* field_type) { 113 if (CanHaveFastTransitionableElementsKind(instance_type)) { 114 // We don't support propagation of field generalization through elements 115 // kind transitions because they are inserted into the transition tree 116 // before field transitions. In order to avoid complexity of handling 117 // such a case we ensure that all maps with transitionable elements kinds 118 // do not have fields that can be generalized in-place (without creation 119 // of a new map). 120 if (FLAG_track_constant_fields && FLAG_modify_map_inplace) { 121 // The constness is either already PropertyConstness::kMutable or should 122 // become PropertyConstness::kMutable if it was VariableMode::kConst. 123 *constness = PropertyConstness::kMutable; 124 } 125 if (representation->IsHeapObject()) { 126 // The field type is either already Any or should become Any if it was 127 // something else. 128 *field_type = FieldType::Any(isolate); 129 } 130 } 131 } 132 133 bool Map::IsUnboxedDoubleField(FieldIndex index) const { 134 if (!FLAG_unbox_double_fields) return false; 135 if (index.is_hidden_field() || !index.is_inobject()) return false; 136 return !layout_descriptor()->IsTagged(index.property_index()); 137 } 138 139 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) const { 140 if (UnusedPropertyFields() != 0) return false; 141 if (is_prototype_map()) return false; 142 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12; 143 int limit = Max(minimum, GetInObjectProperties()); 144 int external = NumberOfFields() - GetInObjectProperties(); 145 return external > limit; 146 } 147 148 PropertyDetails Map::GetLastDescriptorDetails() const { 149 return instance_descriptors()->GetDetails(LastAdded()); 150 } 151 152 int Map::LastAdded() const { 153 int number_of_own_descriptors = NumberOfOwnDescriptors(); 154 DCHECK_GT(number_of_own_descriptors, 0); 155 return number_of_own_descriptors - 1; 156 } 157 158 int Map::NumberOfOwnDescriptors() const { 159 return NumberOfOwnDescriptorsBits::decode(bit_field3()); 160 } 161 162 void Map::SetNumberOfOwnDescriptors(int number) { 163 DCHECK_LE(number, instance_descriptors()->number_of_descriptors()); 164 CHECK_LE(static_cast<unsigned>(number), 165 static_cast<unsigned>(kMaxNumberOfDescriptors)); 166 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number)); 167 } 168 169 int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); } 170 171 void Map::SetEnumLength(int length) { 172 if (length != kInvalidEnumCacheSentinel) { 173 DCHECK_LE(length, NumberOfOwnDescriptors()); 174 CHECK_LE(static_cast<unsigned>(length), 175 static_cast<unsigned>(kMaxNumberOfDescriptors)); 176 } 177 set_bit_field3(EnumLengthBits::update(bit_field3(), length)); 178 } 179 180 FixedArrayBase* Map::GetInitialElements() const { 181 FixedArrayBase* result = nullptr; 182 if (has_fast_elements() || has_fast_string_wrapper_elements()) { 183 result = GetReadOnlyRoots().empty_fixed_array(); 184 } else if (has_fast_sloppy_arguments_elements()) { 185 result = GetReadOnlyRoots().empty_sloppy_arguments_elements(); 186 } else if (has_fixed_typed_array_elements()) { 187 result = GetReadOnlyRoots().EmptyFixedTypedArrayForMap(this); 188 } else if (has_dictionary_elements()) { 189 result = GetReadOnlyRoots().empty_slow_element_dictionary(); 190 } else { 191 UNREACHABLE(); 192 } 193 DCHECK(!Heap::InNewSpace(result)); 194 return result; 195 } 196 197 VisitorId Map::visitor_id() const { 198 return static_cast<VisitorId>( 199 RELAXED_READ_BYTE_FIELD(this, kVisitorIdOffset)); 200 } 201 202 void Map::set_visitor_id(VisitorId id) { 203 CHECK_LT(static_cast<unsigned>(id), 256); 204 RELAXED_WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id)); 205 } 206 207 int Map::instance_size_in_words() const { 208 return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeInWordsOffset); 209 } 210 211 void Map::set_instance_size_in_words(int value) { 212 RELAXED_WRITE_BYTE_FIELD(this, kInstanceSizeInWordsOffset, 213 static_cast<byte>(value)); 214 } 215 216 int Map::instance_size() const { 217 return instance_size_in_words() << kPointerSizeLog2; 218 } 219 220 void Map::set_instance_size(int value) { 221 CHECK_EQ(0, value & (kPointerSize - 1)); 222 value >>= kPointerSizeLog2; 223 CHECK_LT(static_cast<unsigned>(value), 256); 224 set_instance_size_in_words(value); 225 } 226 227 int Map::inobject_properties_start_or_constructor_function_index() const { 228 return RELAXED_READ_BYTE_FIELD( 229 this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset); 230 } 231 232 void Map::set_inobject_properties_start_or_constructor_function_index( 233 int value) { 234 CHECK_LT(static_cast<unsigned>(value), 256); 235 RELAXED_WRITE_BYTE_FIELD( 236 this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset, 237 static_cast<byte>(value)); 238 } 239 240 int Map::GetInObjectPropertiesStartInWords() const { 241 DCHECK(IsJSObjectMap()); 242 return inobject_properties_start_or_constructor_function_index(); 243 } 244 245 void Map::SetInObjectPropertiesStartInWords(int value) { 246 CHECK(IsJSObjectMap()); 247 set_inobject_properties_start_or_constructor_function_index(value); 248 } 249 250 int Map::GetInObjectProperties() const { 251 DCHECK(IsJSObjectMap()); 252 return instance_size_in_words() - GetInObjectPropertiesStartInWords(); 253 } 254 255 int Map::GetConstructorFunctionIndex() const { 256 DCHECK(IsPrimitiveMap()); 257 return inobject_properties_start_or_constructor_function_index(); 258 } 259 260 void Map::SetConstructorFunctionIndex(int value) { 261 CHECK(IsPrimitiveMap()); 262 set_inobject_properties_start_or_constructor_function_index(value); 263 } 264 265 int Map::GetInObjectPropertyOffset(int index) const { 266 return (GetInObjectPropertiesStartInWords() + index) * kPointerSize; 267 } 268 269 Handle<Map> Map::AddMissingTransitionsForTesting( 270 Isolate* isolate, Handle<Map> split_map, 271 Handle<DescriptorArray> descriptors, 272 Handle<LayoutDescriptor> full_layout_descriptor) { 273 return AddMissingTransitions(isolate, split_map, descriptors, 274 full_layout_descriptor); 275 } 276 277 InstanceType Map::instance_type() const { 278 return static_cast<InstanceType>( 279 READ_UINT16_FIELD(this, kInstanceTypeOffset)); 280 } 281 282 void Map::set_instance_type(InstanceType value) { 283 WRITE_UINT16_FIELD(this, kInstanceTypeOffset, value); 284 } 285 286 int Map::UnusedPropertyFields() const { 287 int value = used_or_unused_instance_size_in_words(); 288 DCHECK_IMPLIES(!IsJSObjectMap(), value == 0); 289 int unused; 290 if (value >= JSObject::kFieldsAdded) { 291 unused = instance_size_in_words() - value; 292 } else { 293 // For out of object properties "used_or_unused_instance_size_in_words" 294 // byte encodes the slack in the property array. 295 unused = value; 296 } 297 return unused; 298 } 299 300 int Map::UnusedInObjectProperties() const { 301 // Like Map::UnusedPropertyFields(), but returns 0 for out of object 302 // properties. 303 int value = used_or_unused_instance_size_in_words(); 304 DCHECK_IMPLIES(!IsJSObjectMap(), value == 0); 305 if (value >= JSObject::kFieldsAdded) { 306 return instance_size_in_words() - value; 307 } 308 return 0; 309 } 310 311 int Map::used_or_unused_instance_size_in_words() const { 312 return RELAXED_READ_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset); 313 } 314 315 void Map::set_used_or_unused_instance_size_in_words(int value) { 316 CHECK_LE(static_cast<unsigned>(value), 255); 317 RELAXED_WRITE_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset, 318 static_cast<byte>(value)); 319 } 320 321 int Map::UsedInstanceSize() const { 322 int words = used_or_unused_instance_size_in_words(); 323 if (words < JSObject::kFieldsAdded) { 324 // All in-object properties are used and the words is tracking the slack 325 // in the property array. 326 return instance_size(); 327 } 328 return words * kPointerSize; 329 } 330 331 void Map::SetInObjectUnusedPropertyFields(int value) { 332 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize); 333 if (!IsJSObjectMap()) { 334 CHECK_EQ(0, value); 335 set_used_or_unused_instance_size_in_words(0); 336 DCHECK_EQ(0, UnusedPropertyFields()); 337 return; 338 } 339 CHECK_LE(0, value); 340 DCHECK_LE(value, GetInObjectProperties()); 341 int used_inobject_properties = GetInObjectProperties() - value; 342 set_used_or_unused_instance_size_in_words( 343 GetInObjectPropertyOffset(used_inobject_properties) / kPointerSize); 344 DCHECK_EQ(value, UnusedPropertyFields()); 345 } 346 347 void Map::SetOutOfObjectUnusedPropertyFields(int value) { 348 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize); 349 CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded); 350 // For out of object properties "used_instance_size_in_words" byte encodes 351 // the slack in the property array. 352 set_used_or_unused_instance_size_in_words(value); 353 DCHECK_EQ(value, UnusedPropertyFields()); 354 } 355 356 void Map::CopyUnusedPropertyFields(Map* map) { 357 set_used_or_unused_instance_size_in_words( 358 map->used_or_unused_instance_size_in_words()); 359 DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields()); 360 } 361 362 void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map* map) { 363 int value = map->used_or_unused_instance_size_in_words(); 364 if (value >= JSValue::kFieldsAdded) { 365 // Unused in-object fields. Adjust the offset from the objects start 366 // so it matches the distance to the objects end. 367 value += instance_size_in_words() - map->instance_size_in_words(); 368 } 369 set_used_or_unused_instance_size_in_words(value); 370 DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields()); 371 } 372 373 void Map::AccountAddedPropertyField() { 374 // Update used instance size and unused property fields number. 375 STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kPointerSize); 376 #ifdef DEBUG 377 int new_unused = UnusedPropertyFields() - 1; 378 if (new_unused < 0) new_unused += JSObject::kFieldsAdded; 379 #endif 380 int value = used_or_unused_instance_size_in_words(); 381 if (value >= JSObject::kFieldsAdded) { 382 if (value == instance_size_in_words()) { 383 AccountAddedOutOfObjectPropertyField(0); 384 } else { 385 // The property is added in-object, so simply increment the counter. 386 set_used_or_unused_instance_size_in_words(value + 1); 387 } 388 } else { 389 AccountAddedOutOfObjectPropertyField(value); 390 } 391 DCHECK_EQ(new_unused, UnusedPropertyFields()); 392 } 393 394 void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) { 395 unused_in_property_array--; 396 if (unused_in_property_array < 0) { 397 unused_in_property_array += JSObject::kFieldsAdded; 398 } 399 CHECK_LT(static_cast<unsigned>(unused_in_property_array), 400 JSObject::kFieldsAdded); 401 set_used_or_unused_instance_size_in_words(unused_in_property_array); 402 DCHECK_EQ(unused_in_property_array, UnusedPropertyFields()); 403 } 404 405 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); } 406 407 void Map::set_bit_field(byte value) { 408 WRITE_BYTE_FIELD(this, kBitFieldOffset, value); 409 } 410 411 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); } 412 413 void Map::set_bit_field2(byte value) { 414 WRITE_BYTE_FIELD(this, kBitField2Offset, value); 415 } 416 417 bool Map::is_abandoned_prototype_map() const { 418 return is_prototype_map() && !owns_descriptors(); 419 } 420 421 bool Map::should_be_fast_prototype_map() const { 422 if (!prototype_info()->IsPrototypeInfo()) return false; 423 return PrototypeInfo::cast(prototype_info())->should_be_fast_map(); 424 } 425 426 void Map::set_elements_kind(ElementsKind elements_kind) { 427 CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount); 428 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind)); 429 } 430 431 ElementsKind Map::elements_kind() const { 432 return Map::ElementsKindBits::decode(bit_field2()); 433 } 434 435 bool Map::has_fast_smi_elements() const { 436 return IsSmiElementsKind(elements_kind()); 437 } 438 439 bool Map::has_fast_object_elements() const { 440 return IsObjectElementsKind(elements_kind()); 441 } 442 443 bool Map::has_fast_smi_or_object_elements() const { 444 return IsSmiOrObjectElementsKind(elements_kind()); 445 } 446 447 bool Map::has_fast_double_elements() const { 448 return IsDoubleElementsKind(elements_kind()); 449 } 450 451 bool Map::has_fast_elements() const { 452 return IsFastElementsKind(elements_kind()); 453 } 454 455 bool Map::has_sloppy_arguments_elements() const { 456 return IsSloppyArgumentsElementsKind(elements_kind()); 457 } 458 459 bool Map::has_fast_sloppy_arguments_elements() const { 460 return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS; 461 } 462 463 bool Map::has_fast_string_wrapper_elements() const { 464 return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS; 465 } 466 467 bool Map::has_fixed_typed_array_elements() const { 468 return IsFixedTypedArrayElementsKind(elements_kind()); 469 } 470 471 bool Map::has_dictionary_elements() const { 472 return IsDictionaryElementsKind(elements_kind()); 473 } 474 475 void Map::set_is_dictionary_map(bool value) { 476 uint32_t new_bit_field3 = IsDictionaryMapBit::update(bit_field3(), value); 477 new_bit_field3 = IsUnstableBit::update(new_bit_field3, value); 478 set_bit_field3(new_bit_field3); 479 } 480 481 bool Map::is_dictionary_map() const { 482 return IsDictionaryMapBit::decode(bit_field3()); 483 } 484 485 void Map::mark_unstable() { 486 set_bit_field3(IsUnstableBit::update(bit_field3(), true)); 487 } 488 489 bool Map::is_stable() const { return !IsUnstableBit::decode(bit_field3()); } 490 491 bool Map::CanBeDeprecated() const { 492 int descriptor = LastAdded(); 493 for (int i = 0; i <= descriptor; i++) { 494 PropertyDetails details = instance_descriptors()->GetDetails(i); 495 if (details.representation().IsNone()) return true; 496 if (details.representation().IsSmi()) return true; 497 if (details.representation().IsDouble()) return true; 498 if (details.representation().IsHeapObject()) return true; 499 if (details.kind() == kData && details.location() == kDescriptor) { 500 return true; 501 } 502 } 503 return false; 504 } 505 506 void Map::NotifyLeafMapLayoutChange(Isolate* isolate) { 507 if (is_stable()) { 508 mark_unstable(); 509 dependent_code()->DeoptimizeDependentCodeGroup( 510 isolate, DependentCode::kPrototypeCheckGroup); 511 } 512 } 513 514 bool Map::IsJSObject(InstanceType type) { 515 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE); 516 return type >= FIRST_JS_OBJECT_TYPE; 517 } 518 519 bool Map::CanTransition() const { 520 // Only JSObject and subtypes have map transitions and back pointers. 521 return IsJSObject(instance_type()); 522 } 523 524 bool Map::IsBooleanMap() const { 525 return this == GetReadOnlyRoots().boolean_map(); 526 } 527 528 bool Map::IsNullMap() const { return this == GetReadOnlyRoots().null_map(); } 529 530 bool Map::IsUndefinedMap() const { 531 return this == GetReadOnlyRoots().undefined_map(); 532 } 533 534 bool Map::IsNullOrUndefinedMap() const { 535 return IsNullMap() || IsUndefinedMap(); 536 } 537 538 bool Map::IsPrimitiveMap() const { 539 return instance_type() <= LAST_PRIMITIVE_TYPE; 540 } 541 bool Map::IsJSReceiverMap() const { 542 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); 543 return instance_type() >= FIRST_JS_RECEIVER_TYPE; 544 } 545 bool Map::IsJSObjectMap() const { return IsJSObject(instance_type()); } 546 bool Map::IsJSPromiseMap() const { return instance_type() == JS_PROMISE_TYPE; } 547 bool Map::IsJSArrayMap() const { return instance_type() == JS_ARRAY_TYPE; } 548 bool Map::IsJSFunctionMap() const { 549 return instance_type() == JS_FUNCTION_TYPE; 550 } 551 bool Map::IsStringMap() const { return instance_type() < FIRST_NONSTRING_TYPE; } 552 bool Map::IsJSProxyMap() const { return instance_type() == JS_PROXY_TYPE; } 553 bool Map::IsJSGlobalProxyMap() const { 554 return instance_type() == JS_GLOBAL_PROXY_TYPE; 555 } 556 bool Map::IsJSGlobalObjectMap() const { 557 return instance_type() == JS_GLOBAL_OBJECT_TYPE; 558 } 559 bool Map::IsJSTypedArrayMap() const { 560 return instance_type() == JS_TYPED_ARRAY_TYPE; 561 } 562 bool Map::IsJSDataViewMap() const { 563 return instance_type() == JS_DATA_VIEW_TYPE; 564 } 565 566 Object* Map::prototype() const { return READ_FIELD(this, kPrototypeOffset); } 567 568 void Map::set_prototype(Object* value, WriteBarrierMode mode) { 569 DCHECK(value->IsNull() || value->IsJSReceiver()); 570 WRITE_FIELD(this, kPrototypeOffset, value); 571 CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, value, mode); 572 } 573 574 LayoutDescriptor* Map::layout_descriptor_gc_safe() const { 575 DCHECK(FLAG_unbox_double_fields); 576 Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset); 577 return LayoutDescriptor::cast_gc_safe(layout_desc); 578 } 579 580 bool Map::HasFastPointerLayout() const { 581 DCHECK(FLAG_unbox_double_fields); 582 Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset); 583 return LayoutDescriptor::IsFastPointerLayout(layout_desc); 584 } 585 586 void Map::UpdateDescriptors(DescriptorArray* descriptors, 587 LayoutDescriptor* layout_desc) { 588 set_instance_descriptors(descriptors); 589 if (FLAG_unbox_double_fields) { 590 if (layout_descriptor()->IsSlowLayout()) { 591 set_layout_descriptor(layout_desc); 592 } 593 #ifdef VERIFY_HEAP 594 // TODO(ishell): remove these checks from VERIFY_HEAP mode. 595 if (FLAG_verify_heap) { 596 CHECK(layout_descriptor()->IsConsistentWithMap(this)); 597 CHECK_EQ(Map::GetVisitorId(this), visitor_id()); 598 } 599 #else 600 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this)); 601 DCHECK(visitor_id() == Map::GetVisitorId(this)); 602 #endif 603 } 604 } 605 606 void Map::InitializeDescriptors(DescriptorArray* descriptors, 607 LayoutDescriptor* layout_desc) { 608 int len = descriptors->number_of_descriptors(); 609 set_instance_descriptors(descriptors); 610 SetNumberOfOwnDescriptors(len); 611 612 if (FLAG_unbox_double_fields) { 613 set_layout_descriptor(layout_desc); 614 #ifdef VERIFY_HEAP 615 // TODO(ishell): remove these checks from VERIFY_HEAP mode. 616 if (FLAG_verify_heap) { 617 CHECK(layout_descriptor()->IsConsistentWithMap(this)); 618 } 619 #else 620 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this)); 621 #endif 622 set_visitor_id(Map::GetVisitorId(this)); 623 } 624 } 625 626 void Map::set_bit_field3(uint32_t bits) { 627 if (kInt32Size != kPointerSize) { 628 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0); 629 } 630 WRITE_UINT32_FIELD(this, kBitField3Offset, bits); 631 } 632 633 uint32_t Map::bit_field3() const { 634 return READ_UINT32_FIELD(this, kBitField3Offset); 635 } 636 637 LayoutDescriptor* Map::GetLayoutDescriptor() const { 638 return FLAG_unbox_double_fields ? layout_descriptor() 639 : LayoutDescriptor::FastPointerLayout(); 640 } 641 642 void Map::AppendDescriptor(Descriptor* desc) { 643 DescriptorArray* descriptors = instance_descriptors(); 644 int number_of_own_descriptors = NumberOfOwnDescriptors(); 645 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors); 646 descriptors->Append(desc); 647 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1); 648 649 // Properly mark the map if the {desc} is an "interesting symbol". 650 if (desc->GetKey()->IsInterestingSymbol()) { 651 set_may_have_interesting_symbols(true); 652 } 653 PropertyDetails details = desc->GetDetails(); 654 if (details.location() == kField) { 655 DCHECK_GT(UnusedPropertyFields(), 0); 656 AccountAddedPropertyField(); 657 } 658 659 // This function does not support appending double field descriptors and 660 // it should never try to (otherwise, layout descriptor must be updated too). 661 #ifdef DEBUG 662 DCHECK(details.location() != kField || !details.representation().IsDouble()); 663 #endif 664 } 665 666 Object* Map::GetBackPointer() const { 667 Object* object = constructor_or_backpointer(); 668 if (object->IsMap()) { 669 return object; 670 } 671 return GetReadOnlyRoots().undefined_value(); 672 } 673 674 Map* Map::ElementsTransitionMap() { 675 DisallowHeapAllocation no_gc; 676 // TODO(delphick): While it's safe to pass nullptr for Isolate* here as 677 // SearchSpecial doesn't need it, this is really ugly. Perhaps factor out a 678 // base class for methods not requiring an Isolate? 679 return TransitionsAccessor(nullptr, this, &no_gc) 680 .SearchSpecial(GetReadOnlyRoots().elements_transition_symbol()); 681 } 682 683 Object* Map::prototype_info() const { 684 DCHECK(is_prototype_map()); 685 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset); 686 } 687 688 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) { 689 CHECK(is_prototype_map()); 690 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value); 691 CONDITIONAL_WRITE_BARRIER(this, Map::kTransitionsOrPrototypeInfoOffset, value, 692 mode); 693 } 694 695 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) { 696 CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE); 697 CHECK(value->IsMap()); 698 CHECK(GetBackPointer()->IsUndefined()); 699 CHECK_IMPLIES(value->IsMap(), Map::cast(value)->GetConstructor() == 700 constructor_or_backpointer()); 701 set_constructor_or_backpointer(value, mode); 702 } 703 704 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset) 705 ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset) 706 ACCESSORS(Map, constructor_or_backpointer, Object, 707 kConstructorOrBackPointerOffset) 708 709 bool Map::IsPrototypeValidityCellValid() const { 710 Object* validity_cell = prototype_validity_cell(); 711 Object* value = validity_cell->IsSmi() ? Smi::cast(validity_cell) 712 : Cell::cast(validity_cell)->value(); 713 return value == Smi::FromInt(Map::kPrototypeChainValid); 714 } 715 716 Object* Map::GetConstructor() const { 717 Object* maybe_constructor = constructor_or_backpointer(); 718 // Follow any back pointers. 719 while (maybe_constructor->IsMap()) { 720 maybe_constructor = 721 Map::cast(maybe_constructor)->constructor_or_backpointer(); 722 } 723 return maybe_constructor; 724 } 725 726 FunctionTemplateInfo* Map::GetFunctionTemplateInfo() const { 727 Object* constructor = GetConstructor(); 728 if (constructor->IsJSFunction()) { 729 DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction()); 730 return JSFunction::cast(constructor)->shared()->get_api_func_data(); 731 } 732 DCHECK(constructor->IsFunctionTemplateInfo()); 733 return FunctionTemplateInfo::cast(constructor); 734 } 735 736 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) { 737 // Never overwrite a back pointer with a constructor. 738 CHECK(!constructor_or_backpointer()->IsMap()); 739 set_constructor_or_backpointer(constructor, mode); 740 } 741 742 Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map) { 743 return CopyInitialMap(isolate, map, map->instance_size(), 744 map->GetInObjectProperties(), 745 map->UnusedPropertyFields()); 746 } 747 748 bool Map::IsInobjectSlackTrackingInProgress() const { 749 return construction_counter() != Map::kNoSlackTracking; 750 } 751 752 void Map::InobjectSlackTrackingStep(Isolate* isolate) { 753 // Slack tracking should only be performed on an initial map. 754 DCHECK(GetBackPointer()->IsUndefined()); 755 if (!IsInobjectSlackTrackingInProgress()) return; 756 int counter = construction_counter(); 757 set_construction_counter(counter - 1); 758 if (counter == kSlackTrackingCounterEnd) { 759 CompleteInobjectSlackTracking(isolate); 760 } 761 } 762 763 int Map::SlackForArraySize(int old_size, int size_limit) { 764 const int max_slack = size_limit - old_size; 765 CHECK_LE(0, max_slack); 766 if (old_size < 4) { 767 DCHECK_LE(1, max_slack); 768 return 1; 769 } 770 return Min(max_slack, old_size / 4); 771 } 772 773 int NormalizedMapCache::GetIndex(Handle<Map> map) { 774 return map->Hash() % NormalizedMapCache::kEntries; 775 } 776 777 bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) { 778 if (!obj->IsWeakFixedArray()) return false; 779 if (WeakFixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) { 780 return false; 781 } 782 #ifdef VERIFY_HEAP 783 if (FLAG_verify_heap) { 784 NormalizedMapCache* cache = 785 reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj)); 786 cache->NormalizedMapCacheVerify(cache->GetIsolate()); 787 } 788 #endif 789 return true; 790 } 791 792 } // namespace internal 793 } // namespace v8 794 795 #include "src/objects/object-macros-undef.h" 796 797 #endif // V8_OBJECTS_MAP_INL_H_ 798