1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/heap/factory.h" 6 7 #include "src/accessors.h" 8 #include "src/allocation-site-scopes.h" 9 #include "src/ast/ast-source-ranges.h" 10 #include "src/ast/ast.h" 11 #include "src/base/bits.h" 12 #include "src/bootstrapper.h" 13 #include "src/builtins/constants-table-builder.h" 14 #include "src/compiler.h" 15 #include "src/conversions.h" 16 #include "src/interpreter/interpreter.h" 17 #include "src/isolate-inl.h" 18 #include "src/macro-assembler.h" 19 #include "src/objects/api-callbacks.h" 20 #include "src/objects/arguments-inl.h" 21 #include "src/objects/bigint.h" 22 #include "src/objects/debug-objects-inl.h" 23 #include "src/objects/frame-array-inl.h" 24 #include "src/objects/js-array-inl.h" 25 #include "src/objects/js-collection-inl.h" 26 #include "src/objects/js-generator-inl.h" 27 #include "src/objects/js-regexp-inl.h" 28 #include "src/objects/literal-objects-inl.h" 29 #include "src/objects/microtask-inl.h" 30 #include "src/objects/module-inl.h" 31 #include "src/objects/promise-inl.h" 32 #include "src/objects/scope-info.h" 33 #include "src/unicode-cache.h" 34 #include "src/unicode-decoder.h" 35 36 namespace v8 { 37 namespace internal { 38 39 namespace { 40 41 int ComputeCodeObjectSize(const CodeDesc& desc) { 42 bool has_unwinding_info = desc.unwinding_info != nullptr; 43 DCHECK((has_unwinding_info && desc.unwinding_info_size > 0) || 44 (!has_unwinding_info && desc.unwinding_info_size == 0)); 45 int body_size = desc.instr_size; 46 int unwinding_info_size_field_size = kInt64Size; 47 if (has_unwinding_info) { 48 body_size = RoundUp(body_size, kInt64Size) + desc.unwinding_info_size + 49 unwinding_info_size_field_size; 50 } 51 int object_size = Code::SizeFor(RoundUp(body_size, kObjectAlignment)); 52 DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment)); 53 return object_size; 54 } 55 56 void InitializeCode(Heap* heap, Handle<Code> code, int object_size, 57 const CodeDesc& desc, Code::Kind kind, 58 Handle<Object> self_ref, int32_t builtin_index, 59 Handle<ByteArray> source_position_table, 60 Handle<DeoptimizationData> deopt_data, 61 Handle<ByteArray> reloc_info, 62 Handle<CodeDataContainer> data_container, uint32_t stub_key, 63 bool is_turbofanned, int stack_slots, 64 int safepoint_table_offset, int handler_table_offset) { 65 DCHECK(IsAligned(code->address(), kCodeAlignment)); 66 DCHECK(!heap->memory_allocator()->code_range()->valid() || 67 heap->memory_allocator()->code_range()->contains(code->address()) || 68 object_size <= heap->code_space()->AreaSize()); 69 70 bool has_unwinding_info = desc.unwinding_info != nullptr; 71 72 code->set_raw_instruction_size(desc.instr_size); 73 code->set_relocation_info(*reloc_info); 74 const bool is_off_heap_trampoline = false; 75 code->initialize_flags(kind, has_unwinding_info, is_turbofanned, stack_slots, 76 is_off_heap_trampoline); 77 code->set_safepoint_table_offset(safepoint_table_offset); 78 code->set_handler_table_offset(handler_table_offset); 79 code->set_code_data_container(*data_container); 80 code->set_deoptimization_data(*deopt_data); 81 code->set_stub_key(stub_key); 82 code->set_source_position_table(*source_position_table); 83 code->set_constant_pool_offset(desc.instr_size - desc.constant_pool_size); 84 code->set_builtin_index(builtin_index); 85 86 // Allow self references to created code object by patching the handle to 87 // point to the newly allocated Code object. 88 if (!self_ref.is_null()) { 89 DCHECK(self_ref->IsOddball()); 90 DCHECK(Oddball::cast(*self_ref)->kind() == Oddball::kSelfReferenceMarker); 91 if (FLAG_embedded_builtins) { 92 auto builder = heap->isolate()->builtins_constants_table_builder(); 93 if (builder != nullptr) builder->PatchSelfReference(self_ref, code); 94 } 95 *(self_ref.location()) = *code; 96 } 97 98 // Migrate generated code. 99 // The generated code can contain Object** values (typically from handles) 100 // that are dereferenced during the copy to point directly to the actual heap 101 // objects. These pointers can include references to the code object itself, 102 // through the self_reference parameter. 103 code->CopyFromNoFlush(heap, desc); 104 105 code->clear_padding(); 106 107 #ifdef VERIFY_HEAP 108 if (FLAG_verify_heap) code->ObjectVerify(heap->isolate()); 109 #endif 110 } 111 112 } // namespace 113 114 HeapObject* Factory::AllocateRawWithImmortalMap(int size, 115 PretenureFlag pretenure, 116 Map* map, 117 AllocationAlignment alignment) { 118 HeapObject* result = isolate()->heap()->AllocateRawWithRetryOrFail( 119 size, Heap::SelectSpace(pretenure), alignment); 120 result->set_map_after_allocation(map, SKIP_WRITE_BARRIER); 121 return result; 122 } 123 124 HeapObject* Factory::AllocateRawWithAllocationSite( 125 Handle<Map> map, PretenureFlag pretenure, 126 Handle<AllocationSite> allocation_site) { 127 DCHECK(map->instance_type() != MAP_TYPE); 128 int size = map->instance_size(); 129 if (!allocation_site.is_null()) size += AllocationMemento::kSize; 130 AllocationSpace space = Heap::SelectSpace(pretenure); 131 HeapObject* result = 132 isolate()->heap()->AllocateRawWithRetryOrFail(size, space); 133 WriteBarrierMode write_barrier_mode = 134 space == NEW_SPACE ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; 135 result->set_map_after_allocation(*map, write_barrier_mode); 136 if (!allocation_site.is_null()) { 137 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( 138 reinterpret_cast<Address>(result) + map->instance_size()); 139 InitializeAllocationMemento(alloc_memento, *allocation_site); 140 } 141 return result; 142 } 143 144 void Factory::InitializeAllocationMemento(AllocationMemento* memento, 145 AllocationSite* allocation_site) { 146 memento->set_map_after_allocation(*allocation_memento_map(), 147 SKIP_WRITE_BARRIER); 148 memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER); 149 if (FLAG_allocation_site_pretenuring) { 150 allocation_site->IncrementMementoCreateCount(); 151 } 152 } 153 154 HeapObject* Factory::AllocateRawArray(int size, PretenureFlag pretenure) { 155 AllocationSpace space = Heap::SelectSpace(pretenure); 156 HeapObject* result = 157 isolate()->heap()->AllocateRawWithRetryOrFail(size, space); 158 if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) { 159 MemoryChunk* chunk = MemoryChunk::FromAddress(result->address()); 160 chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR); 161 } 162 return result; 163 } 164 165 HeapObject* Factory::AllocateRawFixedArray(int length, 166 PretenureFlag pretenure) { 167 if (length < 0 || length > FixedArray::kMaxLength) { 168 isolate()->heap()->FatalProcessOutOfMemory("invalid array length"); 169 } 170 return AllocateRawArray(FixedArray::SizeFor(length), pretenure); 171 } 172 173 HeapObject* Factory::AllocateRawWeakArrayList(int capacity, 174 PretenureFlag pretenure) { 175 if (capacity < 0 || capacity > WeakArrayList::kMaxCapacity) { 176 isolate()->heap()->FatalProcessOutOfMemory("invalid array length"); 177 } 178 return AllocateRawArray(WeakArrayList::SizeForCapacity(capacity), pretenure); 179 } 180 181 HeapObject* Factory::New(Handle<Map> map, PretenureFlag pretenure) { 182 DCHECK(map->instance_type() != MAP_TYPE); 183 int size = map->instance_size(); 184 AllocationSpace space = Heap::SelectSpace(pretenure); 185 HeapObject* result = 186 isolate()->heap()->AllocateRawWithRetryOrFail(size, space); 187 // New space objects are allocated white. 188 WriteBarrierMode write_barrier_mode = 189 space == NEW_SPACE ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER; 190 result->set_map_after_allocation(*map, write_barrier_mode); 191 return result; 192 } 193 194 Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align, 195 AllocationSpace space) { 196 AllocationAlignment alignment = double_align ? kDoubleAligned : kWordAligned; 197 Heap* heap = isolate()->heap(); 198 HeapObject* result = heap->AllocateRawWithRetryOrFail(size, space, alignment); 199 #ifdef DEBUG 200 MemoryChunk* chunk = MemoryChunk::FromAddress(result->address()); 201 DCHECK(chunk->owner()->identity() == space); 202 #endif 203 heap->CreateFillerObjectAt(result->address(), size, ClearRecordedSlots::kNo); 204 return Handle<HeapObject>(result, isolate()); 205 } 206 207 Handle<PrototypeInfo> Factory::NewPrototypeInfo() { 208 Handle<PrototypeInfo> result = 209 Handle<PrototypeInfo>::cast(NewStruct(PROTOTYPE_INFO_TYPE, TENURED)); 210 result->set_prototype_users(*empty_weak_array_list()); 211 result->set_registry_slot(PrototypeInfo::UNREGISTERED); 212 result->set_bit_field(0); 213 result->set_module_namespace(*undefined_value()); 214 return result; 215 } 216 217 Handle<EnumCache> Factory::NewEnumCache(Handle<FixedArray> keys, 218 Handle<FixedArray> indices) { 219 return Handle<EnumCache>::cast(NewTuple2(keys, indices, TENURED)); 220 } 221 222 Handle<Tuple2> Factory::NewTuple2(Handle<Object> value1, Handle<Object> value2, 223 PretenureFlag pretenure) { 224 Handle<Tuple2> result = 225 Handle<Tuple2>::cast(NewStruct(TUPLE2_TYPE, pretenure)); 226 result->set_value1(*value1); 227 result->set_value2(*value2); 228 return result; 229 } 230 231 Handle<Tuple3> Factory::NewTuple3(Handle<Object> value1, Handle<Object> value2, 232 Handle<Object> value3, 233 PretenureFlag pretenure) { 234 Handle<Tuple3> result = 235 Handle<Tuple3>::cast(NewStruct(TUPLE3_TYPE, pretenure)); 236 result->set_value1(*value1); 237 result->set_value2(*value2); 238 result->set_value3(*value3); 239 return result; 240 } 241 242 Handle<ArrayBoilerplateDescription> Factory::NewArrayBoilerplateDescription( 243 ElementsKind elements_kind, Handle<FixedArrayBase> constant_values) { 244 Handle<ArrayBoilerplateDescription> result = 245 Handle<ArrayBoilerplateDescription>::cast( 246 NewStruct(ARRAY_BOILERPLATE_DESCRIPTION_TYPE, TENURED)); 247 result->set_elements_kind(elements_kind); 248 result->set_constant_elements(*constant_values); 249 return result; 250 } 251 252 Handle<TemplateObjectDescription> Factory::NewTemplateObjectDescription( 253 Handle<FixedArray> raw_strings, Handle<FixedArray> cooked_strings) { 254 DCHECK_EQ(raw_strings->length(), cooked_strings->length()); 255 DCHECK_LT(0, raw_strings->length()); 256 Handle<TemplateObjectDescription> result = 257 Handle<TemplateObjectDescription>::cast(NewStruct(TUPLE2_TYPE, TENURED)); 258 result->set_raw_strings(*raw_strings); 259 result->set_cooked_strings(*cooked_strings); 260 return result; 261 } 262 263 Handle<Oddball> Factory::NewOddball(Handle<Map> map, const char* to_string, 264 Handle<Object> to_number, 265 const char* type_of, byte kind, 266 PretenureFlag pretenure) { 267 Handle<Oddball> oddball(Oddball::cast(New(map, pretenure)), isolate()); 268 Oddball::Initialize(isolate(), oddball, to_string, to_number, type_of, kind); 269 return oddball; 270 } 271 272 Handle<Oddball> Factory::NewSelfReferenceMarker(PretenureFlag pretenure) { 273 return NewOddball(self_reference_marker_map(), "self_reference_marker", 274 handle(Smi::FromInt(-1), isolate()), "undefined", 275 Oddball::kSelfReferenceMarker, pretenure); 276 } 277 278 Handle<PropertyArray> Factory::NewPropertyArray(int length, 279 PretenureFlag pretenure) { 280 DCHECK_LE(0, length); 281 if (length == 0) return empty_property_array(); 282 HeapObject* result = AllocateRawFixedArray(length, pretenure); 283 result->set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER); 284 Handle<PropertyArray> array(PropertyArray::cast(result), isolate()); 285 array->initialize_length(length); 286 MemsetPointer(array->data_start(), *undefined_value(), length); 287 return array; 288 } 289 290 Handle<FixedArray> Factory::NewFixedArrayWithFiller( 291 Heap::RootListIndex map_root_index, int length, Object* filler, 292 PretenureFlag pretenure) { 293 HeapObject* result = AllocateRawFixedArray(length, pretenure); 294 DCHECK(Heap::RootIsImmortalImmovable(map_root_index)); 295 Map* map = Map::cast(isolate()->heap()->root(map_root_index)); 296 result->set_map_after_allocation(map, SKIP_WRITE_BARRIER); 297 Handle<FixedArray> array(FixedArray::cast(result), isolate()); 298 array->set_length(length); 299 MemsetPointer(array->data_start(), filler, length); 300 return array; 301 } 302 303 template <typename T> 304 Handle<T> Factory::NewFixedArrayWithMap(Heap::RootListIndex map_root_index, 305 int length, PretenureFlag pretenure) { 306 static_assert(std::is_base_of<FixedArray, T>::value, 307 "T must be a descendant of FixedArray"); 308 // Zero-length case must be handled outside, where the knowledge about 309 // the map is. 310 DCHECK_LT(0, length); 311 return Handle<T>::cast(NewFixedArrayWithFiller( 312 map_root_index, length, *undefined_value(), pretenure)); 313 } 314 315 template <typename T> 316 Handle<T> Factory::NewWeakFixedArrayWithMap(Heap::RootListIndex map_root_index, 317 int length, 318 PretenureFlag pretenure) { 319 static_assert(std::is_base_of<WeakFixedArray, T>::value, 320 "T must be a descendant of WeakFixedArray"); 321 322 // Zero-length case must be handled outside. 323 DCHECK_LT(0, length); 324 325 HeapObject* result = 326 AllocateRawArray(WeakFixedArray::SizeFor(length), pretenure); 327 Map* map = Map::cast(isolate()->heap()->root(map_root_index)); 328 result->set_map_after_allocation(map, SKIP_WRITE_BARRIER); 329 330 Handle<WeakFixedArray> array(WeakFixedArray::cast(result), isolate()); 331 array->set_length(length); 332 MemsetPointer(array->data_start(), 333 HeapObjectReference::Strong(*undefined_value()), length); 334 335 return Handle<T>::cast(array); 336 } 337 338 template Handle<FixedArray> Factory::NewFixedArrayWithMap<FixedArray>( 339 Heap::RootListIndex, int, PretenureFlag); 340 341 template Handle<DescriptorArray> 342 Factory::NewWeakFixedArrayWithMap<DescriptorArray>(Heap::RootListIndex, int, 343 PretenureFlag); 344 345 Handle<FixedArray> Factory::NewFixedArray(int length, PretenureFlag pretenure) { 346 DCHECK_LE(0, length); 347 if (length == 0) return empty_fixed_array(); 348 return NewFixedArrayWithFiller(Heap::kFixedArrayMapRootIndex, length, 349 *undefined_value(), pretenure); 350 } 351 352 Handle<WeakFixedArray> Factory::NewWeakFixedArray(int length, 353 PretenureFlag pretenure) { 354 DCHECK_LE(0, length); 355 if (length == 0) return empty_weak_fixed_array(); 356 HeapObject* result = 357 AllocateRawArray(WeakFixedArray::SizeFor(length), pretenure); 358 DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakFixedArrayMapRootIndex)); 359 result->set_map_after_allocation(*weak_fixed_array_map(), SKIP_WRITE_BARRIER); 360 Handle<WeakFixedArray> array(WeakFixedArray::cast(result), isolate()); 361 array->set_length(length); 362 MemsetPointer(array->data_start(), 363 HeapObjectReference::Strong(*undefined_value()), length); 364 return array; 365 } 366 367 MaybeHandle<FixedArray> Factory::TryNewFixedArray(int length, 368 PretenureFlag pretenure) { 369 DCHECK_LE(0, length); 370 if (length == 0) return empty_fixed_array(); 371 372 int size = FixedArray::SizeFor(length); 373 AllocationSpace space = Heap::SelectSpace(pretenure); 374 Heap* heap = isolate()->heap(); 375 AllocationResult allocation = heap->AllocateRaw(size, space); 376 HeapObject* result = nullptr; 377 if (!allocation.To(&result)) return MaybeHandle<FixedArray>(); 378 if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) { 379 MemoryChunk* chunk = MemoryChunk::FromAddress(result->address()); 380 chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR); 381 } 382 result->set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER); 383 Handle<FixedArray> array(FixedArray::cast(result), isolate()); 384 array->set_length(length); 385 MemsetPointer(array->data_start(), ReadOnlyRoots(heap).undefined_value(), 386 length); 387 return array; 388 } 389 390 Handle<FixedArray> Factory::NewFixedArrayWithHoles(int length, 391 PretenureFlag pretenure) { 392 DCHECK_LE(0, length); 393 if (length == 0) return empty_fixed_array(); 394 return NewFixedArrayWithFiller(Heap::kFixedArrayMapRootIndex, length, 395 *the_hole_value(), pretenure); 396 } 397 398 Handle<FixedArray> Factory::NewUninitializedFixedArray( 399 int length, PretenureFlag pretenure) { 400 DCHECK_LE(0, length); 401 if (length == 0) return empty_fixed_array(); 402 403 // TODO(ulan): As an experiment this temporarily returns an initialized fixed 404 // array. After getting canary/performance coverage, either remove the 405 // function or revert to returning uninitilized array. 406 return NewFixedArrayWithFiller(Heap::kFixedArrayMapRootIndex, length, 407 *undefined_value(), pretenure); 408 } 409 410 Handle<FeedbackVector> Factory::NewFeedbackVector( 411 Handle<SharedFunctionInfo> shared, PretenureFlag pretenure) { 412 int length = shared->feedback_metadata()->slot_count(); 413 DCHECK_LE(0, length); 414 int size = FeedbackVector::SizeFor(length); 415 416 HeapObject* result = 417 AllocateRawWithImmortalMap(size, pretenure, *feedback_vector_map()); 418 Handle<FeedbackVector> vector(FeedbackVector::cast(result), isolate()); 419 vector->set_shared_function_info(*shared); 420 vector->set_optimized_code_weak_or_smi(MaybeObject::FromSmi(Smi::FromEnum( 421 FLAG_log_function_events ? OptimizationMarker::kLogFirstExecution 422 : OptimizationMarker::kNone))); 423 vector->set_length(length); 424 vector->set_invocation_count(0); 425 vector->set_profiler_ticks(0); 426 vector->set_deopt_count(0); 427 // TODO(leszeks): Initialize based on the feedback metadata. 428 MemsetPointer(vector->slots_start(), 429 MaybeObject::FromObject(*undefined_value()), length); 430 return vector; 431 } 432 433 Handle<ObjectBoilerplateDescription> Factory::NewObjectBoilerplateDescription( 434 int boilerplate, int all_properties, int index_keys, bool has_seen_proto) { 435 DCHECK_GE(boilerplate, 0); 436 DCHECK_GE(all_properties, index_keys); 437 DCHECK_GE(index_keys, 0); 438 439 int backing_store_size = 440 all_properties - index_keys - (has_seen_proto ? 1 : 0); 441 DCHECK_GE(backing_store_size, 0); 442 bool has_different_size_backing_store = boilerplate != backing_store_size; 443 444 // Space for name and value for every boilerplate property + LiteralType flag. 445 int size = 446 2 * boilerplate + ObjectBoilerplateDescription::kDescriptionStartIndex; 447 448 if (has_different_size_backing_store) { 449 // An extra entry for the backing store size. 450 size++; 451 } 452 453 Handle<ObjectBoilerplateDescription> description = 454 Handle<ObjectBoilerplateDescription>::cast(NewFixedArrayWithMap( 455 Heap::kObjectBoilerplateDescriptionMapRootIndex, size, TENURED)); 456 457 if (has_different_size_backing_store) { 458 DCHECK_IMPLIES((boilerplate == (all_properties - index_keys)), 459 has_seen_proto); 460 description->set_backing_store_size(isolate(), backing_store_size); 461 } 462 463 description->set_flags(0); 464 465 return description; 466 } 467 468 Handle<FixedArrayBase> Factory::NewFixedDoubleArray(int length, 469 PretenureFlag pretenure) { 470 DCHECK_LE(0, length); 471 if (length == 0) return empty_fixed_array(); 472 if (length > FixedDoubleArray::kMaxLength) { 473 isolate()->heap()->FatalProcessOutOfMemory("invalid array length"); 474 } 475 int size = FixedDoubleArray::SizeFor(length); 476 Map* map = *fixed_double_array_map(); 477 HeapObject* result = 478 AllocateRawWithImmortalMap(size, pretenure, map, kDoubleAligned); 479 Handle<FixedDoubleArray> array(FixedDoubleArray::cast(result), isolate()); 480 array->set_length(length); 481 return array; 482 } 483 484 Handle<FixedArrayBase> Factory::NewFixedDoubleArrayWithHoles( 485 int length, PretenureFlag pretenure) { 486 DCHECK_LE(0, length); 487 Handle<FixedArrayBase> array = NewFixedDoubleArray(length, pretenure); 488 if (length > 0) { 489 Handle<FixedDoubleArray>::cast(array)->FillWithHoles(0, length); 490 } 491 return array; 492 } 493 494 Handle<FeedbackMetadata> Factory::NewFeedbackMetadata(int slot_count, 495 PretenureFlag tenure) { 496 DCHECK_LE(0, slot_count); 497 int size = FeedbackMetadata::SizeFor(slot_count); 498 HeapObject* result = 499 AllocateRawWithImmortalMap(size, tenure, *feedback_metadata_map()); 500 Handle<FeedbackMetadata> data(FeedbackMetadata::cast(result), isolate()); 501 data->set_slot_count(slot_count); 502 503 // Initialize the data section to 0. 504 int data_size = size - FeedbackMetadata::kHeaderSize; 505 Address data_start = data->address() + FeedbackMetadata::kHeaderSize; 506 memset(reinterpret_cast<byte*>(data_start), 0, data_size); 507 // Fields have been zeroed out but not initialized, so this object will not 508 // pass object verification at this point. 509 return data; 510 } 511 512 Handle<FrameArray> Factory::NewFrameArray(int number_of_frames, 513 PretenureFlag pretenure) { 514 DCHECK_LE(0, number_of_frames); 515 Handle<FixedArray> result = NewFixedArrayWithHoles( 516 FrameArray::LengthFor(number_of_frames), pretenure); 517 result->set(FrameArray::kFrameCountIndex, Smi::kZero); 518 return Handle<FrameArray>::cast(result); 519 } 520 521 Handle<SmallOrderedHashSet> Factory::NewSmallOrderedHashSet( 522 int capacity, PretenureFlag pretenure) { 523 DCHECK_LE(0, capacity); 524 CHECK_LE(capacity, SmallOrderedHashSet::kMaxCapacity); 525 DCHECK_EQ(0, capacity % SmallOrderedHashSet::kLoadFactor); 526 527 int size = SmallOrderedHashSet::SizeFor(capacity); 528 Map* map = *small_ordered_hash_set_map(); 529 HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, map); 530 Handle<SmallOrderedHashSet> table(SmallOrderedHashSet::cast(result), 531 isolate()); 532 table->Initialize(isolate(), capacity); 533 return table; 534 } 535 536 Handle<SmallOrderedHashMap> Factory::NewSmallOrderedHashMap( 537 int capacity, PretenureFlag pretenure) { 538 DCHECK_LE(0, capacity); 539 CHECK_LE(capacity, SmallOrderedHashMap::kMaxCapacity); 540 DCHECK_EQ(0, capacity % SmallOrderedHashMap::kLoadFactor); 541 542 int size = SmallOrderedHashMap::SizeFor(capacity); 543 Map* map = *small_ordered_hash_map_map(); 544 HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, map); 545 Handle<SmallOrderedHashMap> table(SmallOrderedHashMap::cast(result), 546 isolate()); 547 table->Initialize(isolate(), capacity); 548 return table; 549 } 550 551 Handle<OrderedHashSet> Factory::NewOrderedHashSet() { 552 return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kMinCapacity); 553 } 554 555 Handle<OrderedHashMap> Factory::NewOrderedHashMap() { 556 return OrderedHashMap::Allocate(isolate(), OrderedHashMap::kMinCapacity); 557 } 558 559 Handle<AccessorPair> Factory::NewAccessorPair() { 560 Handle<AccessorPair> accessors = 561 Handle<AccessorPair>::cast(NewStruct(ACCESSOR_PAIR_TYPE, TENURED)); 562 accessors->set_getter(*null_value(), SKIP_WRITE_BARRIER); 563 accessors->set_setter(*null_value(), SKIP_WRITE_BARRIER); 564 return accessors; 565 } 566 567 // Internalized strings are created in the old generation (data space). 568 Handle<String> Factory::InternalizeUtf8String(Vector<const char> string) { 569 Utf8StringKey key(string, isolate()->heap()->HashSeed()); 570 return InternalizeStringWithKey(&key); 571 } 572 573 Handle<String> Factory::InternalizeOneByteString(Vector<const uint8_t> string) { 574 OneByteStringKey key(string, isolate()->heap()->HashSeed()); 575 return InternalizeStringWithKey(&key); 576 } 577 578 Handle<String> Factory::InternalizeOneByteString( 579 Handle<SeqOneByteString> string, int from, int length) { 580 SeqOneByteSubStringKey key(isolate(), string, from, length); 581 return InternalizeStringWithKey(&key); 582 } 583 584 Handle<String> Factory::InternalizeTwoByteString(Vector<const uc16> string) { 585 TwoByteStringKey key(string, isolate()->heap()->HashSeed()); 586 return InternalizeStringWithKey(&key); 587 } 588 589 template <class StringTableKey> 590 Handle<String> Factory::InternalizeStringWithKey(StringTableKey* key) { 591 return StringTable::LookupKey(isolate(), key); 592 } 593 594 MaybeHandle<String> Factory::NewStringFromOneByte(Vector<const uint8_t> string, 595 PretenureFlag pretenure) { 596 int length = string.length(); 597 if (length == 0) return empty_string(); 598 if (length == 1) return LookupSingleCharacterStringFromCode(string[0]); 599 Handle<SeqOneByteString> result; 600 ASSIGN_RETURN_ON_EXCEPTION(isolate(), result, 601 NewRawOneByteString(string.length(), pretenure), 602 String); 603 604 DisallowHeapAllocation no_gc; 605 // Copy the characters into the new object. 606 CopyChars(SeqOneByteString::cast(*result)->GetChars(), string.start(), 607 length); 608 return result; 609 } 610 611 MaybeHandle<String> Factory::NewStringFromUtf8(Vector<const char> string, 612 PretenureFlag pretenure) { 613 // Check for ASCII first since this is the common case. 614 const char* ascii_data = string.start(); 615 int length = string.length(); 616 int non_ascii_start = String::NonAsciiStart(ascii_data, length); 617 if (non_ascii_start >= length) { 618 // If the string is ASCII, we do not need to convert the characters 619 // since UTF8 is backwards compatible with ASCII. 620 return NewStringFromOneByte(Vector<const uint8_t>::cast(string), pretenure); 621 } 622 623 // Non-ASCII and we need to decode. 624 auto non_ascii = string.SubVector(non_ascii_start, length); 625 Access<UnicodeCache::Utf8Decoder> decoder( 626 isolate()->unicode_cache()->utf8_decoder()); 627 decoder->Reset(non_ascii); 628 629 int utf16_length = static_cast<int>(decoder->Utf16Length()); 630 DCHECK_GT(utf16_length, 0); 631 632 // Allocate string. 633 Handle<SeqTwoByteString> result; 634 ASSIGN_RETURN_ON_EXCEPTION( 635 isolate(), result, 636 NewRawTwoByteString(non_ascii_start + utf16_length, pretenure), String); 637 638 // Copy ASCII portion. 639 uint16_t* data = result->GetChars(); 640 for (int i = 0; i < non_ascii_start; i++) { 641 *data++ = *ascii_data++; 642 } 643 644 // Now write the remainder. 645 decoder->WriteUtf16(data, utf16_length, non_ascii); 646 return result; 647 } 648 649 MaybeHandle<String> Factory::NewStringFromUtf8SubString( 650 Handle<SeqOneByteString> str, int begin, int length, 651 PretenureFlag pretenure) { 652 const char* ascii_data = 653 reinterpret_cast<const char*>(str->GetChars() + begin); 654 int non_ascii_start = String::NonAsciiStart(ascii_data, length); 655 if (non_ascii_start >= length) { 656 // If the string is ASCII, we can just make a substring. 657 // TODO(v8): the pretenure flag is ignored in this case. 658 return NewSubString(str, begin, begin + length); 659 } 660 661 // Non-ASCII and we need to decode. 662 auto non_ascii = Vector<const char>(ascii_data + non_ascii_start, 663 length - non_ascii_start); 664 Access<UnicodeCache::Utf8Decoder> decoder( 665 isolate()->unicode_cache()->utf8_decoder()); 666 decoder->Reset(non_ascii); 667 668 int utf16_length = static_cast<int>(decoder->Utf16Length()); 669 DCHECK_GT(utf16_length, 0); 670 671 // Allocate string. 672 Handle<SeqTwoByteString> result; 673 ASSIGN_RETURN_ON_EXCEPTION( 674 isolate(), result, 675 NewRawTwoByteString(non_ascii_start + utf16_length, pretenure), String); 676 677 // Update pointer references, since the original string may have moved after 678 // allocation. 679 ascii_data = reinterpret_cast<const char*>(str->GetChars() + begin); 680 non_ascii = Vector<const char>(ascii_data + non_ascii_start, 681 length - non_ascii_start); 682 683 // Copy ASCII portion. 684 uint16_t* data = result->GetChars(); 685 for (int i = 0; i < non_ascii_start; i++) { 686 *data++ = *ascii_data++; 687 } 688 689 // Now write the remainder. 690 decoder->WriteUtf16(data, utf16_length, non_ascii); 691 return result; 692 } 693 694 MaybeHandle<String> Factory::NewStringFromTwoByte(const uc16* string, 695 int length, 696 PretenureFlag pretenure) { 697 if (length == 0) return empty_string(); 698 if (String::IsOneByte(string, length)) { 699 if (length == 1) return LookupSingleCharacterStringFromCode(string[0]); 700 Handle<SeqOneByteString> result; 701 ASSIGN_RETURN_ON_EXCEPTION(isolate(), result, 702 NewRawOneByteString(length, pretenure), String); 703 CopyChars(result->GetChars(), string, length); 704 return result; 705 } else { 706 Handle<SeqTwoByteString> result; 707 ASSIGN_RETURN_ON_EXCEPTION(isolate(), result, 708 NewRawTwoByteString(length, pretenure), String); 709 CopyChars(result->GetChars(), string, length); 710 return result; 711 } 712 } 713 714 MaybeHandle<String> Factory::NewStringFromTwoByte(Vector<const uc16> string, 715 PretenureFlag pretenure) { 716 return NewStringFromTwoByte(string.start(), string.length(), pretenure); 717 } 718 719 MaybeHandle<String> Factory::NewStringFromTwoByte( 720 const ZoneVector<uc16>* string, PretenureFlag pretenure) { 721 return NewStringFromTwoByte(string->data(), static_cast<int>(string->size()), 722 pretenure); 723 } 724 725 namespace { 726 727 bool inline IsOneByte(Vector<const char> str, int chars) { 728 // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported? 729 return chars == str.length(); 730 } 731 732 bool inline IsOneByte(Handle<String> str) { 733 return str->IsOneByteRepresentation(); 734 } 735 736 inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars, 737 int len) { 738 // Only works for one byte strings. 739 DCHECK(vector.length() == len); 740 MemCopy(chars, vector.start(), len); 741 } 742 743 inline void WriteTwoByteData(Vector<const char> vector, uint16_t* chars, 744 int len) { 745 unibrow::Utf8Iterator it = unibrow::Utf8Iterator(vector); 746 while (!it.Done()) { 747 DCHECK_GT(len, 0); 748 len -= 1; 749 750 uint16_t c = *it; 751 ++it; 752 DCHECK_NE(unibrow::Utf8::kBadChar, c); 753 *chars++ = c; 754 } 755 DCHECK_EQ(len, 0); 756 } 757 758 inline void WriteOneByteData(Handle<String> s, uint8_t* chars, int len) { 759 DCHECK(s->length() == len); 760 String::WriteToFlat(*s, chars, 0, len); 761 } 762 763 inline void WriteTwoByteData(Handle<String> s, uint16_t* chars, int len) { 764 DCHECK(s->length() == len); 765 String::WriteToFlat(*s, chars, 0, len); 766 } 767 768 } // namespace 769 770 Handle<SeqOneByteString> Factory::AllocateRawOneByteInternalizedString( 771 int length, uint32_t hash_field) { 772 CHECK_GE(String::kMaxLength, length); 773 // The canonical empty_string is the only zero-length string we allow. 774 DCHECK_IMPLIES( 775 length == 0, 776 isolate()->heap()->roots_[Heap::kempty_stringRootIndex] == nullptr); 777 778 Map* map = *one_byte_internalized_string_map(); 779 int size = SeqOneByteString::SizeFor(length); 780 HeapObject* result = AllocateRawWithImmortalMap( 781 size, 782 isolate()->heap()->CanAllocateInReadOnlySpace() ? TENURED_READ_ONLY 783 : TENURED, 784 map); 785 Handle<SeqOneByteString> answer(SeqOneByteString::cast(result), isolate()); 786 answer->set_length(length); 787 answer->set_hash_field(hash_field); 788 DCHECK_EQ(size, answer->Size()); 789 return answer; 790 } 791 792 Handle<String> Factory::AllocateTwoByteInternalizedString( 793 Vector<const uc16> str, uint32_t hash_field) { 794 CHECK_GE(String::kMaxLength, str.length()); 795 DCHECK_NE(0, str.length()); // Use Heap::empty_string() instead. 796 797 Map* map = *internalized_string_map(); 798 int size = SeqTwoByteString::SizeFor(str.length()); 799 HeapObject* result = AllocateRawWithImmortalMap(size, TENURED, map); 800 Handle<SeqTwoByteString> answer(SeqTwoByteString::cast(result), isolate()); 801 answer->set_length(str.length()); 802 answer->set_hash_field(hash_field); 803 DCHECK_EQ(size, answer->Size()); 804 805 // Fill in the characters. 806 MemCopy(answer->GetChars(), str.start(), str.length() * kUC16Size); 807 808 return answer; 809 } 810 811 template <bool is_one_byte, typename T> 812 Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars, 813 uint32_t hash_field) { 814 DCHECK_LE(0, chars); 815 DCHECK_GE(String::kMaxLength, chars); 816 817 // Compute map and object size. 818 int size; 819 Map* map; 820 if (is_one_byte) { 821 map = *one_byte_internalized_string_map(); 822 size = SeqOneByteString::SizeFor(chars); 823 } else { 824 map = *internalized_string_map(); 825 size = SeqTwoByteString::SizeFor(chars); 826 } 827 828 HeapObject* result = AllocateRawWithImmortalMap( 829 size, 830 isolate()->heap()->CanAllocateInReadOnlySpace() ? TENURED_READ_ONLY 831 : TENURED, 832 map); 833 Handle<String> answer(String::cast(result), isolate()); 834 answer->set_length(chars); 835 answer->set_hash_field(hash_field); 836 DCHECK_EQ(size, answer->Size()); 837 838 if (is_one_byte) { 839 WriteOneByteData(t, SeqOneByteString::cast(*answer)->GetChars(), chars); 840 } else { 841 WriteTwoByteData(t, SeqTwoByteString::cast(*answer)->GetChars(), chars); 842 } 843 return answer; 844 } 845 846 Handle<String> Factory::NewInternalizedStringFromUtf8(Vector<const char> str, 847 int chars, 848 uint32_t hash_field) { 849 if (IsOneByte(str, chars)) { 850 Handle<SeqOneByteString> result = 851 AllocateRawOneByteInternalizedString(str.length(), hash_field); 852 MemCopy(result->GetChars(), str.start(), str.length()); 853 return result; 854 } 855 return AllocateInternalizedStringImpl<false>(str, chars, hash_field); 856 } 857 858 Handle<String> Factory::NewOneByteInternalizedString(Vector<const uint8_t> str, 859 uint32_t hash_field) { 860 Handle<SeqOneByteString> result = 861 AllocateRawOneByteInternalizedString(str.length(), hash_field); 862 MemCopy(result->GetChars(), str.start(), str.length()); 863 return result; 864 } 865 866 Handle<String> Factory::NewOneByteInternalizedSubString( 867 Handle<SeqOneByteString> string, int offset, int length, 868 uint32_t hash_field) { 869 Handle<SeqOneByteString> result = 870 AllocateRawOneByteInternalizedString(length, hash_field); 871 MemCopy(result->GetChars(), string->GetChars() + offset, length); 872 return result; 873 } 874 875 Handle<String> Factory::NewTwoByteInternalizedString(Vector<const uc16> str, 876 uint32_t hash_field) { 877 return AllocateTwoByteInternalizedString(str, hash_field); 878 } 879 880 Handle<String> Factory::NewInternalizedStringImpl(Handle<String> string, 881 int chars, 882 uint32_t hash_field) { 883 if (IsOneByte(string)) { 884 return AllocateInternalizedStringImpl<true>(string, chars, hash_field); 885 } 886 return AllocateInternalizedStringImpl<false>(string, chars, hash_field); 887 } 888 889 namespace { 890 891 MaybeHandle<Map> GetInternalizedStringMap(Factory* f, Handle<String> string) { 892 switch (string->map()->instance_type()) { 893 case STRING_TYPE: 894 return f->internalized_string_map(); 895 case ONE_BYTE_STRING_TYPE: 896 return f->one_byte_internalized_string_map(); 897 case EXTERNAL_STRING_TYPE: 898 return f->external_internalized_string_map(); 899 case EXTERNAL_ONE_BYTE_STRING_TYPE: 900 return f->external_one_byte_internalized_string_map(); 901 case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE: 902 return f->external_internalized_string_with_one_byte_data_map(); 903 case SHORT_EXTERNAL_STRING_TYPE: 904 return f->short_external_internalized_string_map(); 905 case SHORT_EXTERNAL_ONE_BYTE_STRING_TYPE: 906 return f->short_external_one_byte_internalized_string_map(); 907 case SHORT_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE: 908 return f->short_external_internalized_string_with_one_byte_data_map(); 909 default: 910 return MaybeHandle<Map>(); // No match found. 911 } 912 } 913 914 } // namespace 915 916 MaybeHandle<Map> Factory::InternalizedStringMapForString( 917 Handle<String> string) { 918 // If the string is in new space it cannot be used as internalized. 919 if (Heap::InNewSpace(*string)) return MaybeHandle<Map>(); 920 921 return GetInternalizedStringMap(this, string); 922 } 923 924 template <class StringClass> 925 Handle<StringClass> Factory::InternalizeExternalString(Handle<String> string) { 926 Handle<StringClass> cast_string = Handle<StringClass>::cast(string); 927 Handle<Map> map = GetInternalizedStringMap(this, string).ToHandleChecked(); 928 Handle<StringClass> external_string(StringClass::cast(New(map, TENURED)), 929 isolate()); 930 external_string->set_length(cast_string->length()); 931 external_string->set_hash_field(cast_string->hash_field()); 932 external_string->SetResource(isolate(), nullptr); 933 isolate()->heap()->RegisterExternalString(*external_string); 934 return external_string; 935 } 936 937 template Handle<ExternalOneByteString> 938 Factory::InternalizeExternalString<ExternalOneByteString>(Handle<String>); 939 template Handle<ExternalTwoByteString> 940 Factory::InternalizeExternalString<ExternalTwoByteString>(Handle<String>); 941 942 MaybeHandle<SeqOneByteString> Factory::NewRawOneByteString( 943 int length, PretenureFlag pretenure) { 944 if (length > String::kMaxLength || length < 0) { 945 THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), SeqOneByteString); 946 } 947 DCHECK_GT(length, 0); // Use Factory::empty_string() instead. 948 int size = SeqOneByteString::SizeFor(length); 949 DCHECK_GE(SeqOneByteString::kMaxSize, size); 950 951 HeapObject* result = 952 AllocateRawWithImmortalMap(size, pretenure, *one_byte_string_map()); 953 Handle<SeqOneByteString> string(SeqOneByteString::cast(result), isolate()); 954 string->set_length(length); 955 string->set_hash_field(String::kEmptyHashField); 956 DCHECK_EQ(size, string->Size()); 957 return string; 958 } 959 960 MaybeHandle<SeqTwoByteString> Factory::NewRawTwoByteString( 961 int length, PretenureFlag pretenure) { 962 if (length > String::kMaxLength || length < 0) { 963 THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), SeqTwoByteString); 964 } 965 DCHECK_GT(length, 0); // Use Factory::empty_string() instead. 966 int size = SeqTwoByteString::SizeFor(length); 967 DCHECK_GE(SeqTwoByteString::kMaxSize, size); 968 969 HeapObject* result = 970 AllocateRawWithImmortalMap(size, pretenure, *string_map()); 971 Handle<SeqTwoByteString> string(SeqTwoByteString::cast(result), isolate()); 972 string->set_length(length); 973 string->set_hash_field(String::kEmptyHashField); 974 DCHECK_EQ(size, string->Size()); 975 return string; 976 } 977 978 Handle<String> Factory::LookupSingleCharacterStringFromCode(uint32_t code) { 979 if (code <= String::kMaxOneByteCharCodeU) { 980 { 981 DisallowHeapAllocation no_allocation; 982 Object* value = single_character_string_cache()->get(code); 983 if (value != *undefined_value()) { 984 return handle(String::cast(value), isolate()); 985 } 986 } 987 uint8_t buffer[1]; 988 buffer[0] = static_cast<uint8_t>(code); 989 Handle<String> result = 990 InternalizeOneByteString(Vector<const uint8_t>(buffer, 1)); 991 single_character_string_cache()->set(code, *result); 992 return result; 993 } 994 DCHECK_LE(code, String::kMaxUtf16CodeUnitU); 995 996 Handle<SeqTwoByteString> result = NewRawTwoByteString(1).ToHandleChecked(); 997 result->SeqTwoByteStringSet(0, static_cast<uint16_t>(code)); 998 return result; 999 } 1000 1001 // Returns true for a character in a range. Both limits are inclusive. 1002 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) { 1003 // This makes uses of the the unsigned wraparound. 1004 return character - from <= to - from; 1005 } 1006 1007 static inline Handle<String> MakeOrFindTwoCharacterString(Isolate* isolate, 1008 uint16_t c1, 1009 uint16_t c2) { 1010 // Numeric strings have a different hash algorithm not known by 1011 // LookupTwoCharsStringIfExists, so we skip this step for such strings. 1012 if (!Between(c1, '0', '9') || !Between(c2, '0', '9')) { 1013 Handle<String> result; 1014 if (StringTable::LookupTwoCharsStringIfExists(isolate, c1, c2) 1015 .ToHandle(&result)) { 1016 return result; 1017 } 1018 } 1019 1020 // Now we know the length is 2, we might as well make use of that fact 1021 // when building the new string. 1022 if (static_cast<unsigned>(c1 | c2) <= String::kMaxOneByteCharCodeU) { 1023 // We can do this. 1024 DCHECK(base::bits::IsPowerOfTwo(String::kMaxOneByteCharCodeU + 1025 1)); // because of this. 1026 Handle<SeqOneByteString> str = 1027 isolate->factory()->NewRawOneByteString(2).ToHandleChecked(); 1028 uint8_t* dest = str->GetChars(); 1029 dest[0] = static_cast<uint8_t>(c1); 1030 dest[1] = static_cast<uint8_t>(c2); 1031 return str; 1032 } else { 1033 Handle<SeqTwoByteString> str = 1034 isolate->factory()->NewRawTwoByteString(2).ToHandleChecked(); 1035 uc16* dest = str->GetChars(); 1036 dest[0] = c1; 1037 dest[1] = c2; 1038 return str; 1039 } 1040 } 1041 1042 template <typename SinkChar, typename StringType> 1043 Handle<String> ConcatStringContent(Handle<StringType> result, 1044 Handle<String> first, 1045 Handle<String> second) { 1046 DisallowHeapAllocation pointer_stays_valid; 1047 SinkChar* sink = result->GetChars(); 1048 String::WriteToFlat(*first, sink, 0, first->length()); 1049 String::WriteToFlat(*second, sink + first->length(), 0, second->length()); 1050 return result; 1051 } 1052 1053 MaybeHandle<String> Factory::NewConsString(Handle<String> left, 1054 Handle<String> right) { 1055 if (left->IsThinString()) { 1056 left = handle(Handle<ThinString>::cast(left)->actual(), isolate()); 1057 } 1058 if (right->IsThinString()) { 1059 right = handle(Handle<ThinString>::cast(right)->actual(), isolate()); 1060 } 1061 int left_length = left->length(); 1062 if (left_length == 0) return right; 1063 int right_length = right->length(); 1064 if (right_length == 0) return left; 1065 1066 int length = left_length + right_length; 1067 1068 if (length == 2) { 1069 uint16_t c1 = left->Get(0); 1070 uint16_t c2 = right->Get(0); 1071 return MakeOrFindTwoCharacterString(isolate(), c1, c2); 1072 } 1073 1074 // Make sure that an out of memory exception is thrown if the length 1075 // of the new cons string is too large. 1076 if (length > String::kMaxLength || length < 0) { 1077 THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String); 1078 } 1079 1080 bool left_is_one_byte = left->IsOneByteRepresentation(); 1081 bool right_is_one_byte = right->IsOneByteRepresentation(); 1082 bool is_one_byte = left_is_one_byte && right_is_one_byte; 1083 bool is_one_byte_data_in_two_byte_string = false; 1084 if (!is_one_byte) { 1085 // At least one of the strings uses two-byte representation so we 1086 // can't use the fast case code for short one-byte strings below, but 1087 // we can try to save memory if all chars actually fit in one-byte. 1088 is_one_byte_data_in_two_byte_string = 1089 left->HasOnlyOneByteChars() && right->HasOnlyOneByteChars(); 1090 if (is_one_byte_data_in_two_byte_string) { 1091 isolate()->counters()->string_add_runtime_ext_to_one_byte()->Increment(); 1092 } 1093 } 1094 1095 // If the resulting string is small make a flat string. 1096 if (length < ConsString::kMinLength) { 1097 // Note that neither of the two inputs can be a slice because: 1098 STATIC_ASSERT(ConsString::kMinLength <= SlicedString::kMinLength); 1099 DCHECK(left->IsFlat()); 1100 DCHECK(right->IsFlat()); 1101 1102 STATIC_ASSERT(ConsString::kMinLength <= String::kMaxLength); 1103 if (is_one_byte) { 1104 Handle<SeqOneByteString> result = 1105 NewRawOneByteString(length).ToHandleChecked(); 1106 DisallowHeapAllocation no_gc; 1107 uint8_t* dest = result->GetChars(); 1108 // Copy left part. 1109 const uint8_t* src = 1110 left->IsExternalString() 1111 ? Handle<ExternalOneByteString>::cast(left)->GetChars() 1112 : Handle<SeqOneByteString>::cast(left)->GetChars(); 1113 for (int i = 0; i < left_length; i++) *dest++ = src[i]; 1114 // Copy right part. 1115 src = right->IsExternalString() 1116 ? Handle<ExternalOneByteString>::cast(right)->GetChars() 1117 : Handle<SeqOneByteString>::cast(right)->GetChars(); 1118 for (int i = 0; i < right_length; i++) *dest++ = src[i]; 1119 return result; 1120 } 1121 1122 return (is_one_byte_data_in_two_byte_string) 1123 ? ConcatStringContent<uint8_t>( 1124 NewRawOneByteString(length).ToHandleChecked(), left, right) 1125 : ConcatStringContent<uc16>( 1126 NewRawTwoByteString(length).ToHandleChecked(), left, 1127 right); 1128 } 1129 1130 bool one_byte = (is_one_byte || is_one_byte_data_in_two_byte_string); 1131 return NewConsString(left, right, length, one_byte); 1132 } 1133 1134 Handle<String> Factory::NewConsString(Handle<String> left, Handle<String> right, 1135 int length, bool one_byte) { 1136 DCHECK(!left->IsThinString()); 1137 DCHECK(!right->IsThinString()); 1138 DCHECK_GE(length, ConsString::kMinLength); 1139 DCHECK_LE(length, String::kMaxLength); 1140 1141 Handle<ConsString> result( 1142 ConsString::cast(one_byte ? New(cons_one_byte_string_map(), NOT_TENURED) 1143 : New(cons_string_map(), NOT_TENURED)), 1144 isolate()); 1145 1146 DisallowHeapAllocation no_gc; 1147 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 1148 1149 result->set_hash_field(String::kEmptyHashField); 1150 result->set_length(length); 1151 result->set_first(isolate(), *left, mode); 1152 result->set_second(isolate(), *right, mode); 1153 return result; 1154 } 1155 1156 Handle<String> Factory::NewSurrogatePairString(uint16_t lead, uint16_t trail) { 1157 DCHECK_GE(lead, 0xD800); 1158 DCHECK_LE(lead, 0xDBFF); 1159 DCHECK_GE(trail, 0xDC00); 1160 DCHECK_LE(trail, 0xDFFF); 1161 1162 Handle<SeqTwoByteString> str = 1163 isolate()->factory()->NewRawTwoByteString(2).ToHandleChecked(); 1164 uc16* dest = str->GetChars(); 1165 dest[0] = lead; 1166 dest[1] = trail; 1167 return str; 1168 } 1169 1170 Handle<String> Factory::NewProperSubString(Handle<String> str, int begin, 1171 int end) { 1172 #if VERIFY_HEAP 1173 if (FLAG_verify_heap) str->StringVerify(isolate()); 1174 #endif 1175 DCHECK(begin > 0 || end < str->length()); 1176 1177 str = String::Flatten(isolate(), str); 1178 1179 int length = end - begin; 1180 if (length <= 0) return empty_string(); 1181 if (length == 1) { 1182 return LookupSingleCharacterStringFromCode(str->Get(begin)); 1183 } 1184 if (length == 2) { 1185 // Optimization for 2-byte strings often used as keys in a decompression 1186 // dictionary. Check whether we already have the string in the string 1187 // table to prevent creation of many unnecessary strings. 1188 uint16_t c1 = str->Get(begin); 1189 uint16_t c2 = str->Get(begin + 1); 1190 return MakeOrFindTwoCharacterString(isolate(), c1, c2); 1191 } 1192 1193 if (!FLAG_string_slices || length < SlicedString::kMinLength) { 1194 if (str->IsOneByteRepresentation()) { 1195 Handle<SeqOneByteString> result = 1196 NewRawOneByteString(length).ToHandleChecked(); 1197 uint8_t* dest = result->GetChars(); 1198 DisallowHeapAllocation no_gc; 1199 String::WriteToFlat(*str, dest, begin, end); 1200 return result; 1201 } else { 1202 Handle<SeqTwoByteString> result = 1203 NewRawTwoByteString(length).ToHandleChecked(); 1204 uc16* dest = result->GetChars(); 1205 DisallowHeapAllocation no_gc; 1206 String::WriteToFlat(*str, dest, begin, end); 1207 return result; 1208 } 1209 } 1210 1211 int offset = begin; 1212 1213 if (str->IsSlicedString()) { 1214 Handle<SlicedString> slice = Handle<SlicedString>::cast(str); 1215 str = Handle<String>(slice->parent(), isolate()); 1216 offset += slice->offset(); 1217 } 1218 if (str->IsThinString()) { 1219 Handle<ThinString> thin = Handle<ThinString>::cast(str); 1220 str = handle(thin->actual(), isolate()); 1221 } 1222 1223 DCHECK(str->IsSeqString() || str->IsExternalString()); 1224 Handle<Map> map = str->IsOneByteRepresentation() 1225 ? sliced_one_byte_string_map() 1226 : sliced_string_map(); 1227 Handle<SlicedString> slice(SlicedString::cast(New(map, NOT_TENURED)), 1228 isolate()); 1229 1230 slice->set_hash_field(String::kEmptyHashField); 1231 slice->set_length(length); 1232 slice->set_parent(isolate(), *str); 1233 slice->set_offset(offset); 1234 return slice; 1235 } 1236 1237 MaybeHandle<String> Factory::NewExternalStringFromOneByte( 1238 const ExternalOneByteString::Resource* resource) { 1239 size_t length = resource->length(); 1240 if (length > static_cast<size_t>(String::kMaxLength)) { 1241 THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String); 1242 } 1243 if (length == 0) return empty_string(); 1244 1245 Handle<Map> map; 1246 if (resource->IsCompressible()) { 1247 // TODO(hajimehoshi): Rename this to 'uncached_external_one_byte_string_map' 1248 map = short_external_one_byte_string_map(); 1249 } else { 1250 map = external_one_byte_string_map(); 1251 } 1252 Handle<ExternalOneByteString> external_string( 1253 ExternalOneByteString::cast(New(map, TENURED)), isolate()); 1254 external_string->set_length(static_cast<int>(length)); 1255 external_string->set_hash_field(String::kEmptyHashField); 1256 external_string->SetResource(isolate(), resource); 1257 isolate()->heap()->RegisterExternalString(*external_string); 1258 1259 return external_string; 1260 } 1261 1262 MaybeHandle<String> Factory::NewExternalStringFromTwoByte( 1263 const ExternalTwoByteString::Resource* resource) { 1264 size_t length = resource->length(); 1265 if (length > static_cast<size_t>(String::kMaxLength)) { 1266 THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String); 1267 } 1268 if (length == 0) return empty_string(); 1269 1270 // For small strings we check whether the resource contains only 1271 // one byte characters. If yes, we use a different string map. 1272 static const size_t kOneByteCheckLengthLimit = 32; 1273 bool is_one_byte = 1274 length <= kOneByteCheckLengthLimit && 1275 String::IsOneByte(resource->data(), static_cast<int>(length)); 1276 Handle<Map> map; 1277 if (resource->IsCompressible()) { 1278 // TODO(hajimehoshi): Rename these to 'uncached_external_string_...'. 1279 map = is_one_byte ? short_external_string_with_one_byte_data_map() 1280 : short_external_string_map(); 1281 } else { 1282 map = is_one_byte ? external_string_with_one_byte_data_map() 1283 : external_string_map(); 1284 } 1285 Handle<ExternalTwoByteString> external_string( 1286 ExternalTwoByteString::cast(New(map, TENURED)), isolate()); 1287 external_string->set_length(static_cast<int>(length)); 1288 external_string->set_hash_field(String::kEmptyHashField); 1289 external_string->SetResource(isolate(), resource); 1290 isolate()->heap()->RegisterExternalString(*external_string); 1291 1292 return external_string; 1293 } 1294 1295 Handle<ExternalOneByteString> Factory::NewNativeSourceString( 1296 const ExternalOneByteString::Resource* resource) { 1297 size_t length = resource->length(); 1298 DCHECK_LE(length, static_cast<size_t>(String::kMaxLength)); 1299 1300 Handle<Map> map = native_source_string_map(); 1301 Handle<ExternalOneByteString> external_string( 1302 ExternalOneByteString::cast(New(map, TENURED)), isolate()); 1303 external_string->set_length(static_cast<int>(length)); 1304 external_string->set_hash_field(String::kEmptyHashField); 1305 external_string->SetResource(isolate(), resource); 1306 isolate()->heap()->RegisterExternalString(*external_string); 1307 1308 return external_string; 1309 } 1310 1311 Handle<JSStringIterator> Factory::NewJSStringIterator(Handle<String> string) { 1312 Handle<Map> map(isolate()->native_context()->string_iterator_map(), 1313 isolate()); 1314 Handle<String> flat_string = String::Flatten(isolate(), string); 1315 Handle<JSStringIterator> iterator = 1316 Handle<JSStringIterator>::cast(NewJSObjectFromMap(map)); 1317 iterator->set_string(*flat_string); 1318 iterator->set_index(0); 1319 1320 return iterator; 1321 } 1322 1323 Handle<Symbol> Factory::NewSymbol(PretenureFlag flag) { 1324 DCHECK(flag != NOT_TENURED); 1325 // Statically ensure that it is safe to allocate symbols in paged spaces. 1326 STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize); 1327 1328 HeapObject* result = 1329 AllocateRawWithImmortalMap(Symbol::kSize, flag, *symbol_map()); 1330 1331 // Generate a random hash value. 1332 int hash = isolate()->GenerateIdentityHash(Name::kHashBitMask); 1333 1334 Handle<Symbol> symbol(Symbol::cast(result), isolate()); 1335 symbol->set_hash_field(Name::kIsNotArrayIndexMask | 1336 (hash << Name::kHashShift)); 1337 symbol->set_name(*undefined_value()); 1338 symbol->set_flags(0); 1339 DCHECK(!symbol->is_private()); 1340 return symbol; 1341 } 1342 1343 Handle<Symbol> Factory::NewPrivateSymbol(PretenureFlag flag) { 1344 DCHECK(flag != NOT_TENURED); 1345 Handle<Symbol> symbol = NewSymbol(flag); 1346 symbol->set_is_private(true); 1347 return symbol; 1348 } 1349 1350 Handle<Symbol> Factory::NewPrivateFieldSymbol() { 1351 Handle<Symbol> symbol = NewSymbol(); 1352 symbol->set_is_private_field(); 1353 return symbol; 1354 } 1355 1356 Handle<NativeContext> Factory::NewNativeContext() { 1357 Handle<NativeContext> context = NewFixedArrayWithMap<NativeContext>( 1358 Heap::kNativeContextMapRootIndex, Context::NATIVE_CONTEXT_SLOTS, TENURED); 1359 context->set_native_context(*context); 1360 context->set_errors_thrown(Smi::kZero); 1361 context->set_math_random_index(Smi::kZero); 1362 context->set_serialized_objects(*empty_fixed_array()); 1363 return context; 1364 } 1365 1366 Handle<Context> Factory::NewScriptContext(Handle<NativeContext> outer, 1367 Handle<ScopeInfo> scope_info) { 1368 DCHECK_EQ(scope_info->scope_type(), SCRIPT_SCOPE); 1369 Handle<Context> context = NewFixedArrayWithMap<Context>( 1370 Heap::kScriptContextMapRootIndex, scope_info->ContextLength(), TENURED); 1371 context->set_scope_info(*scope_info); 1372 context->set_previous(*outer); 1373 context->set_extension(*the_hole_value()); 1374 context->set_native_context(*outer); 1375 DCHECK(context->IsScriptContext()); 1376 return context; 1377 } 1378 1379 Handle<ScriptContextTable> Factory::NewScriptContextTable() { 1380 Handle<ScriptContextTable> context_table = 1381 NewFixedArrayWithMap<ScriptContextTable>( 1382 Heap::kScriptContextTableMapRootIndex, 1383 ScriptContextTable::kMinLength); 1384 context_table->set_used(0); 1385 return context_table; 1386 } 1387 1388 Handle<Context> Factory::NewModuleContext(Handle<Module> module, 1389 Handle<NativeContext> outer, 1390 Handle<ScopeInfo> scope_info) { 1391 DCHECK_EQ(scope_info->scope_type(), MODULE_SCOPE); 1392 Handle<Context> context = NewFixedArrayWithMap<Context>( 1393 Heap::kModuleContextMapRootIndex, scope_info->ContextLength(), TENURED); 1394 context->set_scope_info(*scope_info); 1395 context->set_previous(*outer); 1396 context->set_extension(*module); 1397 context->set_native_context(*outer); 1398 DCHECK(context->IsModuleContext()); 1399 return context; 1400 } 1401 1402 Handle<Context> Factory::NewFunctionContext(Handle<Context> outer, 1403 Handle<ScopeInfo> scope_info) { 1404 int length = scope_info->ContextLength(); 1405 DCHECK_LE(Context::MIN_CONTEXT_SLOTS, length); 1406 Heap::RootListIndex mapRootIndex; 1407 switch (scope_info->scope_type()) { 1408 case EVAL_SCOPE: 1409 mapRootIndex = Heap::kEvalContextMapRootIndex; 1410 break; 1411 case FUNCTION_SCOPE: 1412 mapRootIndex = Heap::kFunctionContextMapRootIndex; 1413 break; 1414 default: 1415 UNREACHABLE(); 1416 } 1417 Handle<Context> context = NewFixedArrayWithMap<Context>(mapRootIndex, length); 1418 context->set_scope_info(*scope_info); 1419 context->set_previous(*outer); 1420 context->set_extension(*the_hole_value()); 1421 context->set_native_context(outer->native_context()); 1422 return context; 1423 } 1424 1425 Handle<Context> Factory::NewCatchContext(Handle<Context> previous, 1426 Handle<ScopeInfo> scope_info, 1427 Handle<Object> thrown_object) { 1428 STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX); 1429 Handle<Context> context = NewFixedArrayWithMap<Context>( 1430 Heap::kCatchContextMapRootIndex, Context::MIN_CONTEXT_SLOTS + 1); 1431 context->set_scope_info(*scope_info); 1432 context->set_previous(*previous); 1433 context->set_extension(*the_hole_value()); 1434 context->set_native_context(previous->native_context()); 1435 context->set(Context::THROWN_OBJECT_INDEX, *thrown_object); 1436 return context; 1437 } 1438 1439 Handle<Context> Factory::NewDebugEvaluateContext(Handle<Context> previous, 1440 Handle<ScopeInfo> scope_info, 1441 Handle<JSReceiver> extension, 1442 Handle<Context> wrapped, 1443 Handle<StringSet> whitelist) { 1444 STATIC_ASSERT(Context::WHITE_LIST_INDEX == Context::MIN_CONTEXT_SLOTS + 1); 1445 DCHECK(scope_info->IsDebugEvaluateScope()); 1446 Handle<HeapObject> ext = extension.is_null() 1447 ? Handle<HeapObject>::cast(the_hole_value()) 1448 : Handle<HeapObject>::cast(extension); 1449 Handle<Context> c = NewFixedArrayWithMap<Context>( 1450 Heap::kDebugEvaluateContextMapRootIndex, Context::MIN_CONTEXT_SLOTS + 2); 1451 c->set_scope_info(*scope_info); 1452 c->set_previous(*previous); 1453 c->set_native_context(previous->native_context()); 1454 c->set_extension(*ext); 1455 if (!wrapped.is_null()) c->set(Context::WRAPPED_CONTEXT_INDEX, *wrapped); 1456 if (!whitelist.is_null()) c->set(Context::WHITE_LIST_INDEX, *whitelist); 1457 return c; 1458 } 1459 1460 Handle<Context> Factory::NewWithContext(Handle<Context> previous, 1461 Handle<ScopeInfo> scope_info, 1462 Handle<JSReceiver> extension) { 1463 Handle<Context> context = NewFixedArrayWithMap<Context>( 1464 Heap::kWithContextMapRootIndex, Context::MIN_CONTEXT_SLOTS); 1465 context->set_scope_info(*scope_info); 1466 context->set_previous(*previous); 1467 context->set_extension(*extension); 1468 context->set_native_context(previous->native_context()); 1469 return context; 1470 } 1471 1472 Handle<Context> Factory::NewBlockContext(Handle<Context> previous, 1473 Handle<ScopeInfo> scope_info) { 1474 DCHECK_EQ(scope_info->scope_type(), BLOCK_SCOPE); 1475 Handle<Context> context = NewFixedArrayWithMap<Context>( 1476 Heap::kBlockContextMapRootIndex, scope_info->ContextLength()); 1477 context->set_scope_info(*scope_info); 1478 context->set_previous(*previous); 1479 context->set_extension(*the_hole_value()); 1480 context->set_native_context(previous->native_context()); 1481 return context; 1482 } 1483 1484 Handle<Context> Factory::NewBuiltinContext(Handle<NativeContext> native_context, 1485 int length) { 1486 DCHECK_GE(length, Context::MIN_CONTEXT_SLOTS); 1487 Handle<Context> context = 1488 NewFixedArrayWithMap<Context>(Heap::kFunctionContextMapRootIndex, length); 1489 context->set_scope_info(ReadOnlyRoots(isolate()).empty_scope_info()); 1490 context->set_extension(*the_hole_value()); 1491 context->set_native_context(*native_context); 1492 return context; 1493 } 1494 1495 Handle<Struct> Factory::NewStruct(InstanceType type, PretenureFlag pretenure) { 1496 Map* map; 1497 switch (type) { 1498 #define MAKE_CASE(NAME, Name, name) \ 1499 case NAME##_TYPE: \ 1500 map = *name##_map(); \ 1501 break; 1502 STRUCT_LIST(MAKE_CASE) 1503 #undef MAKE_CASE 1504 default: 1505 UNREACHABLE(); 1506 } 1507 int size = map->instance_size(); 1508 HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, map); 1509 Handle<Struct> str(Struct::cast(result), isolate()); 1510 str->InitializeBody(size); 1511 return str; 1512 } 1513 1514 Handle<AliasedArgumentsEntry> Factory::NewAliasedArgumentsEntry( 1515 int aliased_context_slot) { 1516 Handle<AliasedArgumentsEntry> entry = Handle<AliasedArgumentsEntry>::cast( 1517 NewStruct(ALIASED_ARGUMENTS_ENTRY_TYPE, NOT_TENURED)); 1518 entry->set_aliased_context_slot(aliased_context_slot); 1519 return entry; 1520 } 1521 1522 Handle<AccessorInfo> Factory::NewAccessorInfo() { 1523 Handle<AccessorInfo> info = 1524 Handle<AccessorInfo>::cast(NewStruct(ACCESSOR_INFO_TYPE, TENURED)); 1525 info->set_name(*empty_string()); 1526 info->set_flags(0); // Must clear the flags, it was initialized as undefined. 1527 info->set_is_sloppy(true); 1528 info->set_initial_property_attributes(NONE); 1529 return info; 1530 } 1531 1532 Handle<Script> Factory::NewScript(Handle<String> source, PretenureFlag tenure) { 1533 return NewScriptWithId(source, isolate()->heap()->NextScriptId(), tenure); 1534 } 1535 1536 Handle<Script> Factory::NewScriptWithId(Handle<String> source, int script_id, 1537 PretenureFlag tenure) { 1538 DCHECK(tenure == TENURED || tenure == TENURED_READ_ONLY); 1539 // Create and initialize script object. 1540 Heap* heap = isolate()->heap(); 1541 ReadOnlyRoots roots(heap); 1542 Handle<Script> script = Handle<Script>::cast(NewStruct(SCRIPT_TYPE, tenure)); 1543 script->set_source(*source); 1544 script->set_name(roots.undefined_value()); 1545 script->set_id(script_id); 1546 script->set_line_offset(0); 1547 script->set_column_offset(0); 1548 script->set_context_data(roots.undefined_value()); 1549 script->set_type(Script::TYPE_NORMAL); 1550 script->set_line_ends(roots.undefined_value()); 1551 script->set_eval_from_shared_or_wrapped_arguments(roots.undefined_value()); 1552 script->set_eval_from_position(0); 1553 script->set_shared_function_infos(*empty_weak_fixed_array(), 1554 SKIP_WRITE_BARRIER); 1555 script->set_flags(0); 1556 script->set_host_defined_options(*empty_fixed_array()); 1557 Handle<WeakArrayList> scripts = script_list(); 1558 scripts = WeakArrayList::AddToEnd(isolate(), scripts, 1559 MaybeObjectHandle::Weak(script)); 1560 heap->set_script_list(*scripts); 1561 LOG(isolate(), ScriptEvent(Logger::ScriptEventType::kCreate, script_id)); 1562 return script; 1563 } 1564 1565 Handle<Script> Factory::CloneScript(Handle<Script> script) { 1566 Heap* heap = isolate()->heap(); 1567 int script_id = isolate()->heap()->NextScriptId(); 1568 Handle<Script> new_script = 1569 Handle<Script>::cast(NewStruct(SCRIPT_TYPE, TENURED)); 1570 new_script->set_source(script->source()); 1571 new_script->set_name(script->name()); 1572 new_script->set_id(script_id); 1573 new_script->set_line_offset(script->line_offset()); 1574 new_script->set_column_offset(script->column_offset()); 1575 new_script->set_context_data(script->context_data()); 1576 new_script->set_type(script->type()); 1577 new_script->set_line_ends(ReadOnlyRoots(heap).undefined_value()); 1578 new_script->set_eval_from_shared_or_wrapped_arguments( 1579 script->eval_from_shared_or_wrapped_arguments()); 1580 new_script->set_shared_function_infos(*empty_weak_fixed_array(), 1581 SKIP_WRITE_BARRIER); 1582 new_script->set_eval_from_position(script->eval_from_position()); 1583 new_script->set_flags(script->flags()); 1584 new_script->set_host_defined_options(script->host_defined_options()); 1585 Handle<WeakArrayList> scripts = script_list(); 1586 scripts = WeakArrayList::AddToEnd(isolate(), scripts, 1587 MaybeObjectHandle::Weak(new_script)); 1588 heap->set_script_list(*scripts); 1589 LOG(isolate(), ScriptEvent(Logger::ScriptEventType::kCreate, script_id)); 1590 return new_script; 1591 } 1592 1593 Handle<CallableTask> Factory::NewCallableTask(Handle<JSReceiver> callable, 1594 Handle<Context> context) { 1595 DCHECK(callable->IsCallable()); 1596 Handle<CallableTask> microtask = 1597 Handle<CallableTask>::cast(NewStruct(CALLABLE_TASK_TYPE)); 1598 microtask->set_callable(*callable); 1599 microtask->set_context(*context); 1600 return microtask; 1601 } 1602 1603 Handle<CallbackTask> Factory::NewCallbackTask(Handle<Foreign> callback, 1604 Handle<Foreign> data) { 1605 Handle<CallbackTask> microtask = 1606 Handle<CallbackTask>::cast(NewStruct(CALLBACK_TASK_TYPE)); 1607 microtask->set_callback(*callback); 1608 microtask->set_data(*data); 1609 return microtask; 1610 } 1611 1612 Handle<PromiseResolveThenableJobTask> Factory::NewPromiseResolveThenableJobTask( 1613 Handle<JSPromise> promise_to_resolve, Handle<JSReceiver> then, 1614 Handle<JSReceiver> thenable, Handle<Context> context) { 1615 DCHECK(then->IsCallable()); 1616 Handle<PromiseResolveThenableJobTask> microtask = 1617 Handle<PromiseResolveThenableJobTask>::cast( 1618 NewStruct(PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE)); 1619 microtask->set_promise_to_resolve(*promise_to_resolve); 1620 microtask->set_then(*then); 1621 microtask->set_thenable(*thenable); 1622 microtask->set_context(*context); 1623 return microtask; 1624 } 1625 1626 Handle<Foreign> Factory::NewForeign(Address addr, PretenureFlag pretenure) { 1627 // Statically ensure that it is safe to allocate foreigns in paged spaces. 1628 STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize); 1629 Map* map = *foreign_map(); 1630 HeapObject* result = 1631 AllocateRawWithImmortalMap(map->instance_size(), pretenure, map); 1632 Handle<Foreign> foreign(Foreign::cast(result), isolate()); 1633 foreign->set_foreign_address(addr); 1634 return foreign; 1635 } 1636 1637 Handle<ByteArray> Factory::NewByteArray(int length, PretenureFlag pretenure) { 1638 DCHECK_LE(0, length); 1639 if (length > ByteArray::kMaxLength) { 1640 isolate()->heap()->FatalProcessOutOfMemory("invalid array length"); 1641 } 1642 int size = ByteArray::SizeFor(length); 1643 HeapObject* result = 1644 AllocateRawWithImmortalMap(size, pretenure, *byte_array_map()); 1645 Handle<ByteArray> array(ByteArray::cast(result), isolate()); 1646 array->set_length(length); 1647 array->clear_padding(); 1648 return array; 1649 } 1650 1651 Handle<BytecodeArray> Factory::NewBytecodeArray( 1652 int length, const byte* raw_bytecodes, int frame_size, int parameter_count, 1653 Handle<FixedArray> constant_pool) { 1654 DCHECK_LE(0, length); 1655 if (length > BytecodeArray::kMaxLength) { 1656 isolate()->heap()->FatalProcessOutOfMemory("invalid array length"); 1657 } 1658 // Bytecode array is pretenured, so constant pool array should be too. 1659 DCHECK(!Heap::InNewSpace(*constant_pool)); 1660 1661 int size = BytecodeArray::SizeFor(length); 1662 HeapObject* result = 1663 AllocateRawWithImmortalMap(size, TENURED, *bytecode_array_map()); 1664 Handle<BytecodeArray> instance(BytecodeArray::cast(result), isolate()); 1665 instance->set_length(length); 1666 instance->set_frame_size(frame_size); 1667 instance->set_parameter_count(parameter_count); 1668 instance->set_incoming_new_target_or_generator_register( 1669 interpreter::Register::invalid_value()); 1670 instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); 1671 instance->set_osr_loop_nesting_level(0); 1672 instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge); 1673 instance->set_constant_pool(*constant_pool); 1674 instance->set_handler_table(*empty_byte_array()); 1675 instance->set_source_position_table(*empty_byte_array()); 1676 CopyBytes(reinterpret_cast<byte*>(instance->GetFirstBytecodeAddress()), 1677 raw_bytecodes, length); 1678 instance->clear_padding(); 1679 1680 return instance; 1681 } 1682 1683 Handle<FixedTypedArrayBase> Factory::NewFixedTypedArrayWithExternalPointer( 1684 int length, ExternalArrayType array_type, void* external_pointer, 1685 PretenureFlag pretenure) { 1686 // TODO(7881): Smi length check 1687 DCHECK(0 <= length && length <= Smi::kMaxValue); 1688 int size = FixedTypedArrayBase::kHeaderSize; 1689 HeapObject* result = AllocateRawWithImmortalMap( 1690 size, pretenure, isolate()->heap()->MapForFixedTypedArray(array_type)); 1691 Handle<FixedTypedArrayBase> elements(FixedTypedArrayBase::cast(result), 1692 isolate()); 1693 elements->set_base_pointer(Smi::kZero, SKIP_WRITE_BARRIER); 1694 elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER); 1695 elements->set_length(length); 1696 return elements; 1697 } 1698 1699 Handle<FixedTypedArrayBase> Factory::NewFixedTypedArray( 1700 size_t length, size_t byte_length, ExternalArrayType array_type, 1701 bool initialize, PretenureFlag pretenure) { 1702 // TODO(7881): Smi length check 1703 DCHECK(0 <= length && length <= Smi::kMaxValue); 1704 CHECK(byte_length <= kMaxInt - FixedTypedArrayBase::kDataOffset); 1705 size_t size = 1706 OBJECT_POINTER_ALIGN(byte_length + FixedTypedArrayBase::kDataOffset); 1707 Map* map = isolate()->heap()->MapForFixedTypedArray(array_type); 1708 AllocationAlignment alignment = 1709 array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned; 1710 HeapObject* object = AllocateRawWithImmortalMap(static_cast<int>(size), 1711 pretenure, map, alignment); 1712 1713 Handle<FixedTypedArrayBase> elements(FixedTypedArrayBase::cast(object), 1714 isolate()); 1715 elements->set_base_pointer(*elements, SKIP_WRITE_BARRIER); 1716 elements->set_external_pointer( 1717 reinterpret_cast<void*>( 1718 ExternalReference::fixed_typed_array_base_data_offset().address()), 1719 SKIP_WRITE_BARRIER); 1720 elements->set_length(static_cast<int>(length)); 1721 if (initialize) memset(elements->DataPtr(), 0, elements->DataSize()); 1722 return elements; 1723 } 1724 1725 Handle<Cell> Factory::NewCell(Handle<Object> value) { 1726 AllowDeferredHandleDereference convert_to_cell; 1727 STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize); 1728 HeapObject* result = 1729 AllocateRawWithImmortalMap(Cell::kSize, TENURED, *cell_map()); 1730 Handle<Cell> cell(Cell::cast(result), isolate()); 1731 cell->set_value(*value); 1732 return cell; 1733 } 1734 1735 Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) { 1736 AllowDeferredHandleDereference convert_to_cell; 1737 HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED, 1738 *no_closures_cell_map()); 1739 Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate()); 1740 cell->set_value(*value); 1741 return cell; 1742 } 1743 1744 Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) { 1745 AllowDeferredHandleDereference convert_to_cell; 1746 HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED, 1747 *one_closure_cell_map()); 1748 Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate()); 1749 cell->set_value(*value); 1750 return cell; 1751 } 1752 1753 Handle<FeedbackCell> Factory::NewManyClosuresCell(Handle<HeapObject> value) { 1754 AllowDeferredHandleDereference convert_to_cell; 1755 HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED, 1756 *many_closures_cell_map()); 1757 Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate()); 1758 cell->set_value(*value); 1759 return cell; 1760 } 1761 1762 Handle<PropertyCell> Factory::NewPropertyCell(Handle<Name> name, 1763 PretenureFlag pretenure) { 1764 DCHECK(name->IsUniqueName()); 1765 STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize); 1766 HeapObject* result = AllocateRawWithImmortalMap( 1767 PropertyCell::kSize, pretenure, *global_property_cell_map()); 1768 Handle<PropertyCell> cell(PropertyCell::cast(result), isolate()); 1769 cell->set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()), 1770 SKIP_WRITE_BARRIER); 1771 cell->set_property_details(PropertyDetails(Smi::kZero)); 1772 cell->set_name(*name); 1773 cell->set_value(*the_hole_value()); 1774 return cell; 1775 } 1776 1777 Handle<TransitionArray> Factory::NewTransitionArray(int number_of_transitions, 1778 int slack) { 1779 int capacity = TransitionArray::LengthFor(number_of_transitions + slack); 1780 Handle<TransitionArray> array = NewWeakFixedArrayWithMap<TransitionArray>( 1781 Heap::kTransitionArrayMapRootIndex, capacity, TENURED); 1782 // Transition arrays are tenured. When black allocation is on we have to 1783 // add the transition array to the list of encountered_transition_arrays. 1784 Heap* heap = isolate()->heap(); 1785 if (heap->incremental_marking()->black_allocation()) { 1786 heap->mark_compact_collector()->AddTransitionArray(*array); 1787 } 1788 array->WeakFixedArray::Set(TransitionArray::kPrototypeTransitionsIndex, 1789 MaybeObject::FromObject(Smi::kZero)); 1790 array->WeakFixedArray::Set( 1791 TransitionArray::kTransitionLengthIndex, 1792 MaybeObject::FromObject(Smi::FromInt(number_of_transitions))); 1793 return array; 1794 } 1795 1796 Handle<AllocationSite> Factory::NewAllocationSite(bool with_weak_next) { 1797 Handle<Map> map = with_weak_next ? allocation_site_map() 1798 : allocation_site_without_weaknext_map(); 1799 Handle<AllocationSite> site(AllocationSite::cast(New(map, TENURED)), 1800 isolate()); 1801 site->Initialize(); 1802 1803 if (with_weak_next) { 1804 // Link the site 1805 site->set_weak_next(isolate()->heap()->allocation_sites_list()); 1806 isolate()->heap()->set_allocation_sites_list(*site); 1807 } 1808 return site; 1809 } 1810 1811 Handle<Map> Factory::NewMap(InstanceType type, int instance_size, 1812 ElementsKind elements_kind, 1813 int inobject_properties) { 1814 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE); 1815 DCHECK_IMPLIES(Map::IsJSObject(type) && 1816 !Map::CanHaveFastTransitionableElementsKind(type), 1817 IsDictionaryElementsKind(elements_kind) || 1818 IsTerminalElementsKind(elements_kind)); 1819 HeapObject* result = 1820 isolate()->heap()->AllocateRawWithRetryOrFail(Map::kSize, MAP_SPACE); 1821 result->set_map_after_allocation(*meta_map(), SKIP_WRITE_BARRIER); 1822 return handle(InitializeMap(Map::cast(result), type, instance_size, 1823 elements_kind, inobject_properties), 1824 isolate()); 1825 } 1826 1827 Map* Factory::InitializeMap(Map* map, InstanceType type, int instance_size, 1828 ElementsKind elements_kind, 1829 int inobject_properties) { 1830 map->set_instance_type(type); 1831 map->set_prototype(*null_value(), SKIP_WRITE_BARRIER); 1832 map->set_constructor_or_backpointer(*null_value(), SKIP_WRITE_BARRIER); 1833 map->set_instance_size(instance_size); 1834 if (map->IsJSObjectMap()) { 1835 DCHECK(!isolate()->heap()->InReadOnlySpace(map)); 1836 map->SetInObjectPropertiesStartInWords(instance_size / kPointerSize - 1837 inobject_properties); 1838 DCHECK_EQ(map->GetInObjectProperties(), inobject_properties); 1839 map->set_prototype_validity_cell(*invalid_prototype_validity_cell()); 1840 } else { 1841 DCHECK_EQ(inobject_properties, 0); 1842 map->set_inobject_properties_start_or_constructor_function_index(0); 1843 map->set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid)); 1844 } 1845 map->set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()), 1846 SKIP_WRITE_BARRIER); 1847 map->set_raw_transitions(MaybeObject::FromSmi(Smi::kZero)); 1848 map->SetInObjectUnusedPropertyFields(inobject_properties); 1849 map->set_instance_descriptors(*empty_descriptor_array()); 1850 if (FLAG_unbox_double_fields) { 1851 map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout()); 1852 } 1853 // Must be called only after |instance_type|, |instance_size| and 1854 // |layout_descriptor| are set. 1855 map->set_visitor_id(Map::GetVisitorId(map)); 1856 map->set_bit_field(0); 1857 map->set_bit_field2(Map::IsExtensibleBit::kMask); 1858 DCHECK(!map->is_in_retained_map_list()); 1859 int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) | 1860 Map::OwnsDescriptorsBit::encode(true) | 1861 Map::ConstructionCounterBits::encode(Map::kNoSlackTracking); 1862 map->set_bit_field3(bit_field3); 1863 map->set_elements_kind(elements_kind); 1864 map->set_new_target_is_base(true); 1865 isolate()->counters()->maps_created()->Increment(); 1866 if (FLAG_trace_maps) LOG(isolate(), MapCreate(map)); 1867 return map; 1868 } 1869 1870 Handle<JSObject> Factory::CopyJSObject(Handle<JSObject> source) { 1871 return CopyJSObjectWithAllocationSite(source, Handle<AllocationSite>()); 1872 } 1873 1874 Handle<JSObject> Factory::CopyJSObjectWithAllocationSite( 1875 Handle<JSObject> source, Handle<AllocationSite> site) { 1876 Handle<Map> map(source->map(), isolate()); 1877 1878 // We can only clone regexps, normal objects, api objects, errors or arrays. 1879 // Copying anything else will break invariants. 1880 CHECK(map->instance_type() == JS_REGEXP_TYPE || 1881 map->instance_type() == JS_OBJECT_TYPE || 1882 map->instance_type() == JS_ERROR_TYPE || 1883 map->instance_type() == JS_ARRAY_TYPE || 1884 map->instance_type() == JS_API_OBJECT_TYPE || 1885 map->instance_type() == WASM_GLOBAL_TYPE || 1886 map->instance_type() == WASM_INSTANCE_TYPE || 1887 map->instance_type() == WASM_MEMORY_TYPE || 1888 map->instance_type() == WASM_MODULE_TYPE || 1889 map->instance_type() == WASM_TABLE_TYPE || 1890 map->instance_type() == JS_SPECIAL_API_OBJECT_TYPE); 1891 DCHECK(site.is_null() || AllocationSite::CanTrack(map->instance_type())); 1892 1893 int object_size = map->instance_size(); 1894 int adjusted_object_size = 1895 site.is_null() ? object_size : object_size + AllocationMemento::kSize; 1896 HeapObject* raw_clone = isolate()->heap()->AllocateRawWithRetryOrFail( 1897 adjusted_object_size, NEW_SPACE); 1898 1899 SLOW_DCHECK(Heap::InNewSpace(raw_clone)); 1900 // Since we know the clone is allocated in new space, we can copy 1901 // the contents without worrying about updating the write barrier. 1902 Heap::CopyBlock(raw_clone->address(), source->address(), object_size); 1903 Handle<JSObject> clone(JSObject::cast(raw_clone), isolate()); 1904 1905 if (!site.is_null()) { 1906 AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>( 1907 reinterpret_cast<Address>(raw_clone) + object_size); 1908 InitializeAllocationMemento(alloc_memento, *site); 1909 } 1910 1911 SLOW_DCHECK(clone->GetElementsKind() == source->GetElementsKind()); 1912 FixedArrayBase* elements = FixedArrayBase::cast(source->elements()); 1913 // Update elements if necessary. 1914 if (elements->length() > 0) { 1915 FixedArrayBase* elem = nullptr; 1916 if (elements->map() == *fixed_cow_array_map()) { 1917 elem = elements; 1918 } else if (source->HasDoubleElements()) { 1919 elem = *CopyFixedDoubleArray( 1920 handle(FixedDoubleArray::cast(elements), isolate())); 1921 } else { 1922 elem = *CopyFixedArray(handle(FixedArray::cast(elements), isolate())); 1923 } 1924 clone->set_elements(elem); 1925 } 1926 1927 // Update properties if necessary. 1928 if (source->HasFastProperties()) { 1929 PropertyArray* properties = source->property_array(); 1930 if (properties->length() > 0) { 1931 // TODO(gsathya): Do not copy hash code. 1932 Handle<PropertyArray> prop = CopyArrayWithMap( 1933 handle(properties, isolate()), handle(properties->map(), isolate())); 1934 clone->set_raw_properties_or_hash(*prop); 1935 } 1936 } else { 1937 Handle<FixedArray> properties( 1938 FixedArray::cast(source->property_dictionary()), isolate()); 1939 Handle<FixedArray> prop = CopyFixedArray(properties); 1940 clone->set_raw_properties_or_hash(*prop); 1941 } 1942 return clone; 1943 } 1944 1945 namespace { 1946 template <typename T> 1947 void initialize_length(T* array, int length) { 1948 array->set_length(length); 1949 } 1950 1951 template <> 1952 void initialize_length<PropertyArray>(PropertyArray* array, int length) { 1953 array->initialize_length(length); 1954 } 1955 1956 } // namespace 1957 1958 template <typename T> 1959 Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) { 1960 int len = src->length(); 1961 HeapObject* obj = AllocateRawFixedArray(len, NOT_TENURED); 1962 obj->set_map_after_allocation(*map, SKIP_WRITE_BARRIER); 1963 1964 T* result = T::cast(obj); 1965 DisallowHeapAllocation no_gc; 1966 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 1967 1968 if (mode == SKIP_WRITE_BARRIER) { 1969 // Eliminate the write barrier if possible. 1970 Heap::CopyBlock(obj->address() + kPointerSize, 1971 src->address() + kPointerSize, 1972 T::SizeFor(len) - kPointerSize); 1973 } else { 1974 // Slow case: Just copy the content one-by-one. 1975 initialize_length(result, len); 1976 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); 1977 } 1978 return Handle<T>(result, isolate()); 1979 } 1980 1981 template <typename T> 1982 Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by, 1983 PretenureFlag pretenure) { 1984 DCHECK_LT(0, grow_by); 1985 DCHECK_LE(grow_by, kMaxInt - src->length()); 1986 int old_len = src->length(); 1987 int new_len = old_len + grow_by; 1988 HeapObject* obj = AllocateRawFixedArray(new_len, pretenure); 1989 obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER); 1990 1991 T* result = T::cast(obj); 1992 initialize_length(result, new_len); 1993 1994 // Copy the content. 1995 DisallowHeapAllocation no_gc; 1996 WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc); 1997 for (int i = 0; i < old_len; i++) result->set(i, src->get(i), mode); 1998 MemsetPointer(result->data_start() + old_len, *undefined_value(), grow_by); 1999 return Handle<T>(result, isolate()); 2000 } 2001 2002 Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array, 2003 Handle<Map> map) { 2004 return CopyArrayWithMap(array, map); 2005 } 2006 2007 Handle<FixedArray> Factory::CopyFixedArrayAndGrow(Handle<FixedArray> array, 2008 int grow_by, 2009 PretenureFlag pretenure) { 2010 return CopyArrayAndGrow(array, grow_by, pretenure); 2011 } 2012 2013 Handle<WeakFixedArray> Factory::CopyWeakFixedArrayAndGrow( 2014 Handle<WeakFixedArray> src, int grow_by, PretenureFlag pretenure) { 2015 DCHECK( 2016 !src->IsTransitionArray()); // Compacted by GC, this code doesn't work. 2017 int old_len = src->length(); 2018 int new_len = old_len + grow_by; 2019 DCHECK_GE(new_len, old_len); 2020 HeapObject* obj = AllocateRawFixedArray(new_len, pretenure); 2021 DCHECK_EQ(old_len, src->length()); 2022 obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER); 2023 2024 WeakFixedArray* result = WeakFixedArray::cast(obj); 2025 result->set_length(new_len); 2026 2027 // Copy the content. 2028 DisallowHeapAllocation no_gc; 2029 WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc); 2030 for (int i = 0; i < old_len; i++) result->Set(i, src->Get(i), mode); 2031 HeapObjectReference* undefined_reference = 2032 HeapObjectReference::Strong(ReadOnlyRoots(isolate()).undefined_value()); 2033 MemsetPointer(result->data_start() + old_len, undefined_reference, grow_by); 2034 return Handle<WeakFixedArray>(result, isolate()); 2035 } 2036 2037 Handle<WeakArrayList> Factory::CopyWeakArrayListAndGrow( 2038 Handle<WeakArrayList> src, int grow_by, PretenureFlag pretenure) { 2039 int old_capacity = src->capacity(); 2040 int new_capacity = old_capacity + grow_by; 2041 DCHECK_GE(new_capacity, old_capacity); 2042 HeapObject* obj = AllocateRawWeakArrayList(new_capacity, pretenure); 2043 obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER); 2044 2045 WeakArrayList* result = WeakArrayList::cast(obj); 2046 result->set_length(src->length()); 2047 result->set_capacity(new_capacity); 2048 2049 // Copy the content. 2050 DisallowHeapAllocation no_gc; 2051 WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc); 2052 for (int i = 0; i < old_capacity; i++) result->Set(i, src->Get(i), mode); 2053 HeapObjectReference* undefined_reference = 2054 HeapObjectReference::Strong(ReadOnlyRoots(isolate()).undefined_value()); 2055 MemsetPointer(result->data_start() + old_capacity, undefined_reference, 2056 grow_by); 2057 return Handle<WeakArrayList>(result, isolate()); 2058 } 2059 2060 Handle<PropertyArray> Factory::CopyPropertyArrayAndGrow( 2061 Handle<PropertyArray> array, int grow_by, PretenureFlag pretenure) { 2062 return CopyArrayAndGrow(array, grow_by, pretenure); 2063 } 2064 2065 Handle<FixedArray> Factory::CopyFixedArrayUpTo(Handle<FixedArray> array, 2066 int new_len, 2067 PretenureFlag pretenure) { 2068 DCHECK_LE(0, new_len); 2069 DCHECK_LE(new_len, array->length()); 2070 if (new_len == 0) return empty_fixed_array(); 2071 2072 HeapObject* obj = AllocateRawFixedArray(new_len, pretenure); 2073 obj->set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER); 2074 Handle<FixedArray> result(FixedArray::cast(obj), isolate()); 2075 result->set_length(new_len); 2076 2077 // Copy the content. 2078 DisallowHeapAllocation no_gc; 2079 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 2080 for (int i = 0; i < new_len; i++) result->set(i, array->get(i), mode); 2081 return result; 2082 } 2083 2084 Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) { 2085 if (array->length() == 0) return array; 2086 return CopyArrayWithMap(array, handle(array->map(), isolate())); 2087 } 2088 2089 Handle<FixedArray> Factory::CopyAndTenureFixedCOWArray( 2090 Handle<FixedArray> array) { 2091 DCHECK(Heap::InNewSpace(*array)); 2092 Handle<FixedArray> result = 2093 CopyFixedArrayUpTo(array, array->length(), TENURED); 2094 2095 // TODO(mvstanton): The map is set twice because of protection against calling 2096 // set() on a COW FixedArray. Issue v8:3221 created to track this, and 2097 // we might then be able to remove this whole method. 2098 result->set_map_after_allocation(*fixed_cow_array_map(), SKIP_WRITE_BARRIER); 2099 return result; 2100 } 2101 2102 Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray( 2103 Handle<FixedDoubleArray> array) { 2104 int len = array->length(); 2105 if (len == 0) return array; 2106 Handle<FixedDoubleArray> result = 2107 Handle<FixedDoubleArray>::cast(NewFixedDoubleArray(len, NOT_TENURED)); 2108 Heap::CopyBlock( 2109 result->address() + FixedDoubleArray::kLengthOffset, 2110 array->address() + FixedDoubleArray::kLengthOffset, 2111 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); 2112 return result; 2113 } 2114 2115 Handle<FeedbackVector> Factory::CopyFeedbackVector( 2116 Handle<FeedbackVector> array) { 2117 int len = array->length(); 2118 HeapObject* obj = AllocateRawWithImmortalMap( 2119 FeedbackVector::SizeFor(len), NOT_TENURED, *feedback_vector_map()); 2120 Handle<FeedbackVector> result(FeedbackVector::cast(obj), isolate()); 2121 2122 DisallowHeapAllocation no_gc; 2123 WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc); 2124 2125 // Eliminate the write barrier if possible. 2126 if (mode == SKIP_WRITE_BARRIER) { 2127 Heap::CopyBlock(result->address() + kPointerSize, 2128 result->address() + kPointerSize, 2129 FeedbackVector::SizeFor(len) - kPointerSize); 2130 } else { 2131 // Slow case: Just copy the content one-by-one. 2132 result->set_shared_function_info(array->shared_function_info()); 2133 result->set_optimized_code_weak_or_smi(array->optimized_code_weak_or_smi()); 2134 result->set_invocation_count(array->invocation_count()); 2135 result->set_profiler_ticks(array->profiler_ticks()); 2136 result->set_deopt_count(array->deopt_count()); 2137 for (int i = 0; i < len; i++) result->set(i, array->get(i), mode); 2138 } 2139 return result; 2140 } 2141 2142 Handle<Object> Factory::NewNumber(double value, PretenureFlag pretenure) { 2143 // Materialize as a SMI if possible. 2144 int32_t int_value; 2145 if (DoubleToSmiInteger(value, &int_value)) { 2146 return handle(Smi::FromInt(int_value), isolate()); 2147 } 2148 return NewHeapNumber(value, pretenure); 2149 } 2150 2151 Handle<Object> Factory::NewNumberFromInt(int32_t value, 2152 PretenureFlag pretenure) { 2153 if (Smi::IsValid(value)) return handle(Smi::FromInt(value), isolate()); 2154 // Bypass NewNumber to avoid various redundant checks. 2155 return NewHeapNumber(FastI2D(value), pretenure); 2156 } 2157 2158 Handle<Object> Factory::NewNumberFromUint(uint32_t value, 2159 PretenureFlag pretenure) { 2160 int32_t int32v = static_cast<int32_t>(value); 2161 if (int32v >= 0 && Smi::IsValid(int32v)) { 2162 return handle(Smi::FromInt(int32v), isolate()); 2163 } 2164 return NewHeapNumber(FastUI2D(value), pretenure); 2165 } 2166 2167 Handle<HeapNumber> Factory::NewHeapNumber(PretenureFlag pretenure) { 2168 STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize); 2169 Map* map = *heap_number_map(); 2170 HeapObject* result = AllocateRawWithImmortalMap(HeapNumber::kSize, pretenure, 2171 map, kDoubleUnaligned); 2172 return handle(HeapNumber::cast(result), isolate()); 2173 } 2174 2175 Handle<MutableHeapNumber> Factory::NewMutableHeapNumber( 2176 PretenureFlag pretenure) { 2177 STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize); 2178 Map* map = *mutable_heap_number_map(); 2179 HeapObject* result = AllocateRawWithImmortalMap( 2180 MutableHeapNumber::kSize, pretenure, map, kDoubleUnaligned); 2181 return handle(MutableHeapNumber::cast(result), isolate()); 2182 } 2183 2184 Handle<FreshlyAllocatedBigInt> Factory::NewBigInt(int length, 2185 PretenureFlag pretenure) { 2186 if (length < 0 || length > BigInt::kMaxLength) { 2187 isolate()->heap()->FatalProcessOutOfMemory("invalid BigInt length"); 2188 } 2189 HeapObject* result = AllocateRawWithImmortalMap(BigInt::SizeFor(length), 2190 pretenure, *bigint_map()); 2191 return handle(FreshlyAllocatedBigInt::cast(result), isolate()); 2192 } 2193 2194 Handle<Object> Factory::NewError(Handle<JSFunction> constructor, 2195 MessageTemplate::Template template_index, 2196 Handle<Object> arg0, Handle<Object> arg1, 2197 Handle<Object> arg2) { 2198 HandleScope scope(isolate()); 2199 if (isolate()->bootstrapper()->IsActive()) { 2200 // During bootstrapping we cannot construct error objects. 2201 return scope.CloseAndEscape(NewStringFromAsciiChecked( 2202 MessageTemplate::TemplateString(template_index))); 2203 } 2204 2205 if (arg0.is_null()) arg0 = undefined_value(); 2206 if (arg1.is_null()) arg1 = undefined_value(); 2207 if (arg2.is_null()) arg2 = undefined_value(); 2208 2209 Handle<Object> result; 2210 if (!ErrorUtils::MakeGenericError(isolate(), constructor, template_index, 2211 arg0, arg1, arg2, SKIP_NONE) 2212 .ToHandle(&result)) { 2213 // If an exception is thrown while 2214 // running the factory method, use the exception as the result. 2215 DCHECK(isolate()->has_pending_exception()); 2216 result = handle(isolate()->pending_exception(), isolate()); 2217 isolate()->clear_pending_exception(); 2218 } 2219 2220 return scope.CloseAndEscape(result); 2221 } 2222 2223 Handle<Object> Factory::NewError(Handle<JSFunction> constructor, 2224 Handle<String> message) { 2225 // Construct a new error object. If an exception is thrown, use the exception 2226 // as the result. 2227 2228 Handle<Object> no_caller; 2229 MaybeHandle<Object> maybe_error = 2230 ErrorUtils::Construct(isolate(), constructor, constructor, message, 2231 SKIP_NONE, no_caller, false); 2232 if (maybe_error.is_null()) { 2233 DCHECK(isolate()->has_pending_exception()); 2234 maybe_error = handle(isolate()->pending_exception(), isolate()); 2235 isolate()->clear_pending_exception(); 2236 } 2237 2238 return maybe_error.ToHandleChecked(); 2239 } 2240 2241 Handle<Object> Factory::NewInvalidStringLengthError() { 2242 if (FLAG_abort_on_stack_or_string_length_overflow) { 2243 FATAL("Aborting on invalid string length"); 2244 } 2245 // Invalidate the "string length" protector. 2246 if (isolate()->IsStringLengthOverflowIntact()) { 2247 isolate()->InvalidateStringLengthOverflowProtector(); 2248 } 2249 return NewRangeError(MessageTemplate::kInvalidStringLength); 2250 } 2251 2252 #define DEFINE_ERROR(NAME, name) \ 2253 Handle<Object> Factory::New##NAME(MessageTemplate::Template template_index, \ 2254 Handle<Object> arg0, Handle<Object> arg1, \ 2255 Handle<Object> arg2) { \ 2256 return NewError(isolate()->name##_function(), template_index, arg0, arg1, \ 2257 arg2); \ 2258 } 2259 DEFINE_ERROR(Error, error) 2260 DEFINE_ERROR(EvalError, eval_error) 2261 DEFINE_ERROR(RangeError, range_error) 2262 DEFINE_ERROR(ReferenceError, reference_error) 2263 DEFINE_ERROR(SyntaxError, syntax_error) 2264 DEFINE_ERROR(TypeError, type_error) 2265 DEFINE_ERROR(WasmCompileError, wasm_compile_error) 2266 DEFINE_ERROR(WasmLinkError, wasm_link_error) 2267 DEFINE_ERROR(WasmRuntimeError, wasm_runtime_error) 2268 #undef DEFINE_ERROR 2269 2270 Handle<JSFunction> Factory::NewFunction(Handle<Map> map, 2271 Handle<SharedFunctionInfo> info, 2272 Handle<Context> context, 2273 PretenureFlag pretenure) { 2274 Handle<JSFunction> function(JSFunction::cast(New(map, pretenure)), isolate()); 2275 2276 function->initialize_properties(); 2277 function->initialize_elements(); 2278 function->set_shared(*info); 2279 function->set_code(info->GetCode()); 2280 function->set_context(*context); 2281 function->set_feedback_cell(*many_closures_cell()); 2282 int header_size; 2283 if (map->has_prototype_slot()) { 2284 header_size = JSFunction::kSizeWithPrototype; 2285 function->set_prototype_or_initial_map(*the_hole_value()); 2286 } else { 2287 header_size = JSFunction::kSizeWithoutPrototype; 2288 } 2289 InitializeJSObjectBody(function, map, header_size); 2290 return function; 2291 } 2292 2293 Handle<JSFunction> Factory::NewFunctionForTest(Handle<String> name) { 2294 NewFunctionArgs args = NewFunctionArgs::ForFunctionWithoutCode( 2295 name, isolate()->sloppy_function_map(), LanguageMode::kSloppy); 2296 Handle<JSFunction> result = NewFunction(args); 2297 DCHECK(is_sloppy(result->shared()->language_mode())); 2298 return result; 2299 } 2300 2301 Handle<JSFunction> Factory::NewFunction(const NewFunctionArgs& args) { 2302 DCHECK(!args.name_.is_null()); 2303 2304 // Create the SharedFunctionInfo. 2305 Handle<NativeContext> context(isolate()->native_context()); 2306 Handle<Map> map = args.GetMap(isolate()); 2307 Handle<SharedFunctionInfo> info = 2308 NewSharedFunctionInfo(args.name_, args.maybe_exported_function_data_, 2309 args.maybe_builtin_id_, kNormalFunction); 2310 2311 // Proper language mode in shared function info will be set later. 2312 DCHECK(is_sloppy(info->language_mode())); 2313 DCHECK(!map->IsUndefined(isolate())); 2314 2315 #ifdef DEBUG 2316 if (isolate()->bootstrapper()->IsActive()) { 2317 Handle<Code> code; 2318 DCHECK( 2319 // During bootstrapping some of these maps could be not created yet. 2320 (*map == context->get(Context::STRICT_FUNCTION_MAP_INDEX)) || 2321 (*map == 2322 context->get(Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX)) || 2323 (*map == 2324 context->get( 2325 Context::STRICT_FUNCTION_WITH_READONLY_PROTOTYPE_MAP_INDEX)) || 2326 // Check if it's a creation of an empty or Proxy function during 2327 // bootstrapping. 2328 (args.maybe_builtin_id_ == Builtins::kEmptyFunction || 2329 args.maybe_builtin_id_ == Builtins::kProxyConstructor)); 2330 } else { 2331 DCHECK( 2332 (*map == *isolate()->sloppy_function_map()) || 2333 (*map == *isolate()->sloppy_function_without_prototype_map()) || 2334 (*map == *isolate()->sloppy_function_with_readonly_prototype_map()) || 2335 (*map == *isolate()->strict_function_map()) || 2336 (*map == *isolate()->strict_function_without_prototype_map()) || 2337 (*map == *isolate()->native_function_map())); 2338 } 2339 #endif 2340 2341 Handle<JSFunction> result = NewFunction(map, info, context); 2342 2343 if (args.should_set_prototype_) { 2344 result->set_prototype_or_initial_map( 2345 *args.maybe_prototype_.ToHandleChecked()); 2346 } 2347 2348 if (args.should_set_language_mode_) { 2349 result->shared()->set_language_mode(args.language_mode_); 2350 } 2351 2352 if (args.should_create_and_set_initial_map_) { 2353 ElementsKind elements_kind; 2354 switch (args.type_) { 2355 case JS_ARRAY_TYPE: 2356 elements_kind = PACKED_SMI_ELEMENTS; 2357 break; 2358 case JS_ARGUMENTS_TYPE: 2359 elements_kind = PACKED_ELEMENTS; 2360 break; 2361 default: 2362 elements_kind = TERMINAL_FAST_ELEMENTS_KIND; 2363 break; 2364 } 2365 Handle<Map> initial_map = NewMap(args.type_, args.instance_size_, 2366 elements_kind, args.inobject_properties_); 2367 result->shared()->set_expected_nof_properties(args.inobject_properties_); 2368 // TODO(littledan): Why do we have this is_generator test when 2369 // NewFunctionPrototype already handles finding an appropriately 2370 // shared prototype? 2371 Handle<Object> prototype = args.maybe_prototype_.ToHandleChecked(); 2372 if (!IsResumableFunction(result->shared()->kind())) { 2373 if (prototype->IsTheHole(isolate())) { 2374 prototype = NewFunctionPrototype(result); 2375 } 2376 } 2377 JSFunction::SetInitialMap(result, initial_map, prototype); 2378 } 2379 2380 return result; 2381 } 2382 2383 Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) { 2384 // Make sure to use globals from the function's context, since the function 2385 // can be from a different context. 2386 Handle<NativeContext> native_context(function->context()->native_context(), 2387 isolate()); 2388 Handle<Map> new_map; 2389 if (V8_UNLIKELY(IsAsyncGeneratorFunction(function->shared()->kind()))) { 2390 new_map = handle(native_context->async_generator_object_prototype_map(), 2391 isolate()); 2392 } else if (IsResumableFunction(function->shared()->kind())) { 2393 // Generator and async function prototypes can share maps since they 2394 // don't have "constructor" properties. 2395 new_map = 2396 handle(native_context->generator_object_prototype_map(), isolate()); 2397 } else { 2398 // Each function prototype gets a fresh map to avoid unwanted sharing of 2399 // maps between prototypes of different constructors. 2400 Handle<JSFunction> object_function(native_context->object_function(), 2401 isolate()); 2402 DCHECK(object_function->has_initial_map()); 2403 new_map = handle(object_function->initial_map(), isolate()); 2404 } 2405 2406 DCHECK(!new_map->is_prototype_map()); 2407 Handle<JSObject> prototype = NewJSObjectFromMap(new_map); 2408 2409 if (!IsResumableFunction(function->shared()->kind())) { 2410 JSObject::AddProperty(isolate(), prototype, constructor_string(), function, 2411 DONT_ENUM); 2412 } 2413 2414 return prototype; 2415 } 2416 2417 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo( 2418 Handle<SharedFunctionInfo> info, Handle<Context> context, 2419 PretenureFlag pretenure) { 2420 Handle<Map> initial_map( 2421 Map::cast(context->native_context()->get(info->function_map_index())), 2422 isolate()); 2423 return NewFunctionFromSharedFunctionInfo(initial_map, info, context, 2424 pretenure); 2425 } 2426 2427 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo( 2428 Handle<SharedFunctionInfo> info, Handle<Context> context, 2429 Handle<FeedbackCell> feedback_cell, PretenureFlag pretenure) { 2430 Handle<Map> initial_map( 2431 Map::cast(context->native_context()->get(info->function_map_index())), 2432 isolate()); 2433 return NewFunctionFromSharedFunctionInfo(initial_map, info, context, 2434 feedback_cell, pretenure); 2435 } 2436 2437 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo( 2438 Handle<Map> initial_map, Handle<SharedFunctionInfo> info, 2439 Handle<Context> context, PretenureFlag pretenure) { 2440 DCHECK_EQ(JS_FUNCTION_TYPE, initial_map->instance_type()); 2441 Handle<JSFunction> result = 2442 NewFunction(initial_map, info, context, pretenure); 2443 2444 // Give compiler a chance to pre-initialize. 2445 Compiler::PostInstantiation(result, pretenure); 2446 2447 return result; 2448 } 2449 2450 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo( 2451 Handle<Map> initial_map, Handle<SharedFunctionInfo> info, 2452 Handle<Context> context, Handle<FeedbackCell> feedback_cell, 2453 PretenureFlag pretenure) { 2454 DCHECK_EQ(JS_FUNCTION_TYPE, initial_map->instance_type()); 2455 Handle<JSFunction> result = 2456 NewFunction(initial_map, info, context, pretenure); 2457 2458 // Bump the closure count that is encoded in the feedback cell's map. 2459 if (feedback_cell->map() == *no_closures_cell_map()) { 2460 feedback_cell->set_map(*one_closure_cell_map()); 2461 } else if (feedback_cell->map() == *one_closure_cell_map()) { 2462 feedback_cell->set_map(*many_closures_cell_map()); 2463 } else { 2464 DCHECK_EQ(feedback_cell->map(), *many_closures_cell_map()); 2465 } 2466 2467 // Check that the optimized code in the feedback cell wasn't marked for 2468 // deoptimization while not pointed to by any live JSFunction. 2469 if (feedback_cell->value()->IsFeedbackVector()) { 2470 FeedbackVector::cast(feedback_cell->value()) 2471 ->EvictOptimizedCodeMarkedForDeoptimization( 2472 *info, "new function from shared function info"); 2473 } 2474 result->set_feedback_cell(*feedback_cell); 2475 2476 // Give compiler a chance to pre-initialize. 2477 Compiler::PostInstantiation(result, pretenure); 2478 2479 return result; 2480 } 2481 2482 Handle<ScopeInfo> Factory::NewScopeInfo(int length) { 2483 return NewFixedArrayWithMap<ScopeInfo>(Heap::kScopeInfoMapRootIndex, length, 2484 TENURED); 2485 } 2486 2487 Handle<ModuleInfo> Factory::NewModuleInfo() { 2488 return NewFixedArrayWithMap<ModuleInfo>(Heap::kModuleInfoMapRootIndex, 2489 ModuleInfo::kLength, TENURED); 2490 } 2491 2492 Handle<PreParsedScopeData> Factory::NewPreParsedScopeData(int length) { 2493 int size = PreParsedScopeData::SizeFor(length); 2494 Handle<PreParsedScopeData> result( 2495 PreParsedScopeData::cast(AllocateRawWithImmortalMap( 2496 size, TENURED, *pre_parsed_scope_data_map())), 2497 isolate()); 2498 result->set_scope_data(PodArray<uint8_t>::cast(*empty_byte_array())); 2499 result->set_length(length); 2500 MemsetPointer(result->child_data_start(), *null_value(), length); 2501 2502 result->clear_padding(); 2503 return result; 2504 } 2505 2506 Handle<UncompiledDataWithoutPreParsedScope> 2507 Factory::NewUncompiledDataWithoutPreParsedScope(Handle<String> inferred_name, 2508 int32_t start_position, 2509 int32_t end_position, 2510 int32_t function_literal_id) { 2511 Handle<UncompiledDataWithoutPreParsedScope> result( 2512 UncompiledDataWithoutPreParsedScope::cast( 2513 New(uncompiled_data_without_pre_parsed_scope_map(), TENURED)), 2514 isolate()); 2515 result->set_inferred_name(*inferred_name); 2516 result->set_start_position(start_position); 2517 result->set_end_position(end_position); 2518 result->set_function_literal_id(function_literal_id); 2519 2520 result->clear_padding(); 2521 return result; 2522 } 2523 2524 Handle<UncompiledDataWithPreParsedScope> 2525 Factory::NewUncompiledDataWithPreParsedScope( 2526 Handle<String> inferred_name, int32_t start_position, int32_t end_position, 2527 int32_t function_literal_id, 2528 Handle<PreParsedScopeData> pre_parsed_scope_data) { 2529 Handle<UncompiledDataWithPreParsedScope> result( 2530 UncompiledDataWithPreParsedScope::cast( 2531 New(uncompiled_data_with_pre_parsed_scope_map(), TENURED)), 2532 isolate()); 2533 result->set_inferred_name(*inferred_name); 2534 result->set_start_position(start_position); 2535 result->set_end_position(end_position); 2536 result->set_function_literal_id(function_literal_id); 2537 result->set_pre_parsed_scope_data(*pre_parsed_scope_data); 2538 2539 result->clear_padding(); 2540 return result; 2541 } 2542 2543 Handle<JSObject> Factory::NewExternal(void* value) { 2544 Handle<Foreign> foreign = NewForeign(reinterpret_cast<Address>(value)); 2545 Handle<JSObject> external = NewJSObjectFromMap(external_map()); 2546 external->SetEmbedderField(0, *foreign); 2547 return external; 2548 } 2549 2550 Handle<CodeDataContainer> Factory::NewCodeDataContainer(int flags) { 2551 Handle<CodeDataContainer> data_container( 2552 CodeDataContainer::cast(New(code_data_container_map(), TENURED)), 2553 isolate()); 2554 data_container->set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER); 2555 data_container->set_kind_specific_flags(flags); 2556 data_container->clear_padding(); 2557 return data_container; 2558 } 2559 2560 MaybeHandle<Code> Factory::TryNewCode( 2561 const CodeDesc& desc, Code::Kind kind, Handle<Object> self_ref, 2562 int32_t builtin_index, MaybeHandle<ByteArray> maybe_source_position_table, 2563 MaybeHandle<DeoptimizationData> maybe_deopt_data, Movability movability, 2564 uint32_t stub_key, bool is_turbofanned, int stack_slots, 2565 int safepoint_table_offset, int handler_table_offset) { 2566 // Allocate objects needed for code initialization. 2567 Handle<ByteArray> reloc_info = NewByteArray(desc.reloc_size, TENURED); 2568 Handle<CodeDataContainer> data_container = NewCodeDataContainer(0); 2569 Handle<ByteArray> source_position_table = 2570 maybe_source_position_table.is_null() 2571 ? empty_byte_array() 2572 : maybe_source_position_table.ToHandleChecked(); 2573 Handle<DeoptimizationData> deopt_data = 2574 maybe_deopt_data.is_null() ? DeoptimizationData::Empty(isolate()) 2575 : maybe_deopt_data.ToHandleChecked(); 2576 Handle<Code> code; 2577 { 2578 int object_size = ComputeCodeObjectSize(desc); 2579 2580 Heap* heap = isolate()->heap(); 2581 CodePageCollectionMemoryModificationScope code_allocation(heap); 2582 HeapObject* result = 2583 heap->AllocateRawWithLightRetry(object_size, CODE_SPACE); 2584 2585 // Return an empty handle if we cannot allocate the code object. 2586 if (!result) return MaybeHandle<Code>(); 2587 2588 if (movability == kImmovable) { 2589 result = heap->EnsureImmovableCode(result, object_size); 2590 } 2591 2592 // The code object has not been fully initialized yet. We rely on the 2593 // fact that no allocation will happen from this point on. 2594 DisallowHeapAllocation no_gc; 2595 2596 result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER); 2597 code = handle(Code::cast(result), isolate()); 2598 2599 InitializeCode(heap, code, object_size, desc, kind, self_ref, builtin_index, 2600 source_position_table, deopt_data, reloc_info, 2601 data_container, stub_key, is_turbofanned, stack_slots, 2602 safepoint_table_offset, handler_table_offset); 2603 } 2604 // Flush the instruction cache after changing the permissions. 2605 code->FlushICache(); 2606 2607 return code; 2608 } 2609 2610 Handle<Code> Factory::NewCode( 2611 const CodeDesc& desc, Code::Kind kind, Handle<Object> self_ref, 2612 int32_t builtin_index, MaybeHandle<ByteArray> maybe_source_position_table, 2613 MaybeHandle<DeoptimizationData> maybe_deopt_data, Movability movability, 2614 uint32_t stub_key, bool is_turbofanned, int stack_slots, 2615 int safepoint_table_offset, int handler_table_offset) { 2616 // Allocate objects needed for code initialization. 2617 Handle<ByteArray> reloc_info = NewByteArray(desc.reloc_size, TENURED); 2618 Handle<CodeDataContainer> data_container = NewCodeDataContainer(0); 2619 Handle<ByteArray> source_position_table = 2620 maybe_source_position_table.is_null() 2621 ? empty_byte_array() 2622 : maybe_source_position_table.ToHandleChecked(); 2623 Handle<DeoptimizationData> deopt_data = 2624 maybe_deopt_data.is_null() ? DeoptimizationData::Empty(isolate()) 2625 : maybe_deopt_data.ToHandleChecked(); 2626 2627 Handle<Code> code; 2628 { 2629 int object_size = ComputeCodeObjectSize(desc); 2630 2631 Heap* heap = isolate()->heap(); 2632 CodePageCollectionMemoryModificationScope code_allocation(heap); 2633 HeapObject* result = 2634 heap->AllocateRawWithRetryOrFail(object_size, CODE_SPACE); 2635 2636 if (movability == kImmovable) { 2637 result = heap->EnsureImmovableCode(result, object_size); 2638 } 2639 2640 // The code object has not been fully initialized yet. We rely on the 2641 // fact that no allocation will happen from this point on. 2642 DisallowHeapAllocation no_gc; 2643 2644 result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER); 2645 code = handle(Code::cast(result), isolate()); 2646 2647 InitializeCode(heap, code, object_size, desc, kind, self_ref, builtin_index, 2648 source_position_table, deopt_data, reloc_info, 2649 data_container, stub_key, is_turbofanned, stack_slots, 2650 safepoint_table_offset, handler_table_offset); 2651 } 2652 // Flush the instruction cache after changing the permissions. 2653 code->FlushICache(); 2654 2655 return code; 2656 } 2657 2658 Handle<Code> Factory::NewCodeForDeserialization(uint32_t size) { 2659 DCHECK(IsAligned(static_cast<intptr_t>(size), kCodeAlignment)); 2660 Heap* heap = isolate()->heap(); 2661 HeapObject* result = heap->AllocateRawWithRetryOrFail(size, CODE_SPACE); 2662 // Unprotect the memory chunk of the object if it was not unprotected 2663 // already. 2664 heap->UnprotectAndRegisterMemoryChunk(result); 2665 heap->ZapCodeObject(result->address(), size); 2666 result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER); 2667 DCHECK(IsAligned(result->address(), kCodeAlignment)); 2668 DCHECK(!heap->memory_allocator()->code_range()->valid() || 2669 heap->memory_allocator()->code_range()->contains(result->address()) || 2670 static_cast<int>(size) <= heap->code_space()->AreaSize()); 2671 return handle(Code::cast(result), isolate()); 2672 } 2673 2674 Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code, 2675 Address off_heap_entry) { 2676 CHECK(isolate()->serializer_enabled()); 2677 CHECK_NOT_NULL(isolate()->embedded_blob()); 2678 CHECK_NE(0, isolate()->embedded_blob_size()); 2679 CHECK(Builtins::IsIsolateIndependentBuiltin(*code)); 2680 2681 Handle<Code> result = 2682 Builtins::GenerateOffHeapTrampolineFor(isolate(), off_heap_entry); 2683 2684 // The trampoline code object must inherit specific flags from the original 2685 // builtin (e.g. the safepoint-table offset). We set them manually here. 2686 2687 const bool set_is_off_heap_trampoline = true; 2688 const int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0; 2689 result->initialize_flags(code->kind(), code->has_unwinding_info(), 2690 code->is_turbofanned(), stack_slots, 2691 set_is_off_heap_trampoline); 2692 result->set_builtin_index(code->builtin_index()); 2693 result->set_handler_table_offset(code->handler_table_offset()); 2694 result->code_data_container()->set_kind_specific_flags( 2695 code->code_data_container()->kind_specific_flags()); 2696 result->set_constant_pool_offset(code->constant_pool_offset()); 2697 if (code->has_safepoint_info()) { 2698 result->set_safepoint_table_offset(code->safepoint_table_offset()); 2699 } 2700 2701 return result; 2702 } 2703 2704 Handle<Code> Factory::CopyCode(Handle<Code> code) { 2705 Handle<CodeDataContainer> data_container = 2706 NewCodeDataContainer(code->code_data_container()->kind_specific_flags()); 2707 2708 Heap* heap = isolate()->heap(); 2709 int obj_size = code->Size(); 2710 HeapObject* result = heap->AllocateRawWithRetryOrFail(obj_size, CODE_SPACE); 2711 2712 // Copy code object. 2713 Address old_addr = code->address(); 2714 Address new_addr = result->address(); 2715 Heap::CopyBlock(new_addr, old_addr, obj_size); 2716 Handle<Code> new_code(Code::cast(result), isolate()); 2717 2718 // Set the {CodeDataContainer}, it cannot be shared. 2719 new_code->set_code_data_container(*data_container); 2720 2721 new_code->Relocate(new_addr - old_addr); 2722 // We have to iterate over the object and process its pointers when black 2723 // allocation is on. 2724 heap->incremental_marking()->ProcessBlackAllocatedObject(*new_code); 2725 // Record all references to embedded objects in the new code object. 2726 WriteBarrierForCode(*new_code); 2727 2728 #ifdef VERIFY_HEAP 2729 if (FLAG_verify_heap) new_code->ObjectVerify(isolate()); 2730 #endif 2731 DCHECK(IsAligned(new_code->address(), kCodeAlignment)); 2732 DCHECK( 2733 !heap->memory_allocator()->code_range()->valid() || 2734 heap->memory_allocator()->code_range()->contains(new_code->address()) || 2735 obj_size <= heap->code_space()->AreaSize()); 2736 return new_code; 2737 } 2738 2739 Handle<BytecodeArray> Factory::CopyBytecodeArray( 2740 Handle<BytecodeArray> bytecode_array) { 2741 int size = BytecodeArray::SizeFor(bytecode_array->length()); 2742 HeapObject* result = 2743 AllocateRawWithImmortalMap(size, TENURED, *bytecode_array_map()); 2744 2745 Handle<BytecodeArray> copy(BytecodeArray::cast(result), isolate()); 2746 copy->set_length(bytecode_array->length()); 2747 copy->set_frame_size(bytecode_array->frame_size()); 2748 copy->set_parameter_count(bytecode_array->parameter_count()); 2749 copy->set_incoming_new_target_or_generator_register( 2750 bytecode_array->incoming_new_target_or_generator_register()); 2751 copy->set_constant_pool(bytecode_array->constant_pool()); 2752 copy->set_handler_table(bytecode_array->handler_table()); 2753 copy->set_source_position_table(bytecode_array->source_position_table()); 2754 copy->set_interrupt_budget(bytecode_array->interrupt_budget()); 2755 copy->set_osr_loop_nesting_level(bytecode_array->osr_loop_nesting_level()); 2756 copy->set_bytecode_age(bytecode_array->bytecode_age()); 2757 bytecode_array->CopyBytecodesTo(*copy); 2758 return copy; 2759 } 2760 2761 Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor, 2762 PretenureFlag pretenure) { 2763 JSFunction::EnsureHasInitialMap(constructor); 2764 Handle<Map> map(constructor->initial_map(), isolate()); 2765 return NewJSObjectFromMap(map, pretenure); 2766 } 2767 2768 Handle<JSObject> Factory::NewJSObjectWithNullProto(PretenureFlag pretenure) { 2769 Handle<JSObject> result = 2770 NewJSObject(isolate()->object_function(), pretenure); 2771 Handle<Map> new_map = Map::Copy( 2772 isolate(), Handle<Map>(result->map(), isolate()), "ObjectWithNullProto"); 2773 Map::SetPrototype(isolate(), new_map, null_value()); 2774 JSObject::MigrateToMap(result, new_map); 2775 return result; 2776 } 2777 2778 Handle<JSGlobalObject> Factory::NewJSGlobalObject( 2779 Handle<JSFunction> constructor) { 2780 DCHECK(constructor->has_initial_map()); 2781 Handle<Map> map(constructor->initial_map(), isolate()); 2782 DCHECK(map->is_dictionary_map()); 2783 2784 // Make sure no field properties are described in the initial map. 2785 // This guarantees us that normalizing the properties does not 2786 // require us to change property values to PropertyCells. 2787 DCHECK_EQ(map->NextFreePropertyIndex(), 0); 2788 2789 // Make sure we don't have a ton of pre-allocated slots in the 2790 // global objects. They will be unused once we normalize the object. 2791 DCHECK_EQ(map->UnusedPropertyFields(), 0); 2792 DCHECK_EQ(map->GetInObjectProperties(), 0); 2793 2794 // Initial size of the backing store to avoid resize of the storage during 2795 // bootstrapping. The size differs between the JS global object ad the 2796 // builtins object. 2797 int initial_size = 64; 2798 2799 // Allocate a dictionary object for backing storage. 2800 int at_least_space_for = map->NumberOfOwnDescriptors() * 2 + initial_size; 2801 Handle<GlobalDictionary> dictionary = 2802 GlobalDictionary::New(isolate(), at_least_space_for); 2803 2804 // The global object might be created from an object template with accessors. 2805 // Fill these accessors into the dictionary. 2806 Handle<DescriptorArray> descs(map->instance_descriptors(), isolate()); 2807 for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) { 2808 PropertyDetails details = descs->GetDetails(i); 2809 // Only accessors are expected. 2810 DCHECK_EQ(kAccessor, details.kind()); 2811 PropertyDetails d(kAccessor, details.attributes(), 2812 PropertyCellType::kMutable); 2813 Handle<Name> name(descs->GetKey(i), isolate()); 2814 Handle<PropertyCell> cell = NewPropertyCell(name); 2815 cell->set_value(descs->GetStrongValue(i)); 2816 // |dictionary| already contains enough space for all properties. 2817 USE(GlobalDictionary::Add(isolate(), dictionary, name, cell, d)); 2818 } 2819 2820 // Allocate the global object and initialize it with the backing store. 2821 Handle<JSGlobalObject> global(JSGlobalObject::cast(New(map, TENURED)), 2822 isolate()); 2823 InitializeJSObjectFromMap(global, dictionary, map); 2824 2825 // Create a new map for the global object. 2826 Handle<Map> new_map = Map::CopyDropDescriptors(isolate(), map); 2827 new_map->set_may_have_interesting_symbols(true); 2828 new_map->set_is_dictionary_map(true); 2829 2830 // Set up the global object as a normalized object. 2831 global->set_global_dictionary(*dictionary); 2832 global->synchronized_set_map(*new_map); 2833 2834 // Make sure result is a global object with properties in dictionary. 2835 DCHECK(global->IsJSGlobalObject() && !global->HasFastProperties()); 2836 return global; 2837 } 2838 2839 void Factory::InitializeJSObjectFromMap(Handle<JSObject> obj, 2840 Handle<Object> properties, 2841 Handle<Map> map) { 2842 obj->set_raw_properties_or_hash(*properties); 2843 obj->initialize_elements(); 2844 // TODO(1240798): Initialize the object's body using valid initial values 2845 // according to the object's initial map. For example, if the map's 2846 // instance type is JS_ARRAY_TYPE, the length field should be initialized 2847 // to a number (e.g. Smi::kZero) and the elements initialized to a 2848 // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object 2849 // verification code has to cope with (temporarily) invalid objects. See 2850 // for example, JSArray::JSArrayVerify). 2851 InitializeJSObjectBody(obj, map, JSObject::kHeaderSize); 2852 } 2853 2854 void Factory::InitializeJSObjectBody(Handle<JSObject> obj, Handle<Map> map, 2855 int start_offset) { 2856 if (start_offset == map->instance_size()) return; 2857 DCHECK_LT(start_offset, map->instance_size()); 2858 2859 // We cannot always fill with one_pointer_filler_map because objects 2860 // created from API functions expect their embedder fields to be initialized 2861 // with undefined_value. 2862 // Pre-allocated fields need to be initialized with undefined_value as well 2863 // so that object accesses before the constructor completes (e.g. in the 2864 // debugger) will not cause a crash. 2865 2866 // In case of Array subclassing the |map| could already be transitioned 2867 // to different elements kind from the initial map on which we track slack. 2868 bool in_progress = map->IsInobjectSlackTrackingInProgress(); 2869 Object* filler; 2870 if (in_progress) { 2871 filler = *one_pointer_filler_map(); 2872 } else { 2873 filler = *undefined_value(); 2874 } 2875 obj->InitializeBody(*map, start_offset, *undefined_value(), filler); 2876 if (in_progress) { 2877 map->FindRootMap(isolate())->InobjectSlackTrackingStep(isolate()); 2878 } 2879 } 2880 2881 Handle<JSObject> Factory::NewJSObjectFromMap( 2882 Handle<Map> map, PretenureFlag pretenure, 2883 Handle<AllocationSite> allocation_site) { 2884 // JSFunctions should be allocated using AllocateFunction to be 2885 // properly initialized. 2886 DCHECK(map->instance_type() != JS_FUNCTION_TYPE); 2887 2888 // Both types of global objects should be allocated using 2889 // AllocateGlobalObject to be properly initialized. 2890 DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE); 2891 2892 HeapObject* obj = 2893 AllocateRawWithAllocationSite(map, pretenure, allocation_site); 2894 Handle<JSObject> js_obj(JSObject::cast(obj), isolate()); 2895 2896 InitializeJSObjectFromMap(js_obj, empty_fixed_array(), map); 2897 2898 DCHECK(js_obj->HasFastElements() || js_obj->HasFixedTypedArrayElements() || 2899 js_obj->HasFastStringWrapperElements() || 2900 js_obj->HasFastArgumentsElements()); 2901 return js_obj; 2902 } 2903 2904 Handle<JSObject> Factory::NewSlowJSObjectFromMap(Handle<Map> map, int capacity, 2905 PretenureFlag pretenure) { 2906 DCHECK(map->is_dictionary_map()); 2907 Handle<NameDictionary> object_properties = 2908 NameDictionary::New(isolate(), capacity); 2909 Handle<JSObject> js_object = NewJSObjectFromMap(map, pretenure); 2910 js_object->set_raw_properties_or_hash(*object_properties); 2911 return js_object; 2912 } 2913 2914 Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, 2915 PretenureFlag pretenure) { 2916 NativeContext* native_context = isolate()->raw_native_context(); 2917 Map* map = native_context->GetInitialJSArrayMap(elements_kind); 2918 if (map == nullptr) { 2919 JSFunction* array_function = native_context->array_function(); 2920 map = array_function->initial_map(); 2921 } 2922 return Handle<JSArray>::cast( 2923 NewJSObjectFromMap(handle(map, isolate()), pretenure)); 2924 } 2925 2926 Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, int length, 2927 int capacity, 2928 ArrayStorageAllocationMode mode, 2929 PretenureFlag pretenure) { 2930 Handle<JSArray> array = NewJSArray(elements_kind, pretenure); 2931 NewJSArrayStorage(array, length, capacity, mode); 2932 return array; 2933 } 2934 2935 Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArrayBase> elements, 2936 ElementsKind elements_kind, 2937 int length, 2938 PretenureFlag pretenure) { 2939 DCHECK(length <= elements->length()); 2940 Handle<JSArray> array = NewJSArray(elements_kind, pretenure); 2941 2942 array->set_elements(*elements); 2943 array->set_length(Smi::FromInt(length)); 2944 JSObject::ValidateElements(*array); 2945 return array; 2946 } 2947 2948 void Factory::NewJSArrayStorage(Handle<JSArray> array, int length, int capacity, 2949 ArrayStorageAllocationMode mode) { 2950 DCHECK(capacity >= length); 2951 2952 if (capacity == 0) { 2953 array->set_length(Smi::kZero); 2954 array->set_elements(*empty_fixed_array()); 2955 return; 2956 } 2957 2958 HandleScope inner_scope(isolate()); 2959 Handle<FixedArrayBase> elms; 2960 ElementsKind elements_kind = array->GetElementsKind(); 2961 if (IsDoubleElementsKind(elements_kind)) { 2962 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { 2963 elms = NewFixedDoubleArray(capacity); 2964 } else { 2965 DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); 2966 elms = NewFixedDoubleArrayWithHoles(capacity); 2967 } 2968 } else { 2969 DCHECK(IsSmiOrObjectElementsKind(elements_kind)); 2970 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) { 2971 elms = NewUninitializedFixedArray(capacity); 2972 } else { 2973 DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE); 2974 elms = NewFixedArrayWithHoles(capacity); 2975 } 2976 } 2977 2978 array->set_elements(*elms); 2979 array->set_length(Smi::FromInt(length)); 2980 } 2981 2982 Handle<JSWeakMap> Factory::NewJSWeakMap() { 2983 NativeContext* native_context = isolate()->raw_native_context(); 2984 Handle<Map> map(native_context->js_weak_map_fun()->initial_map(), isolate()); 2985 Handle<JSWeakMap> weakmap(JSWeakMap::cast(*NewJSObjectFromMap(map)), 2986 isolate()); 2987 { 2988 // Do not leak handles for the hash table, it would make entries strong. 2989 HandleScope scope(isolate()); 2990 JSWeakCollection::Initialize(weakmap, isolate()); 2991 } 2992 return weakmap; 2993 } 2994 2995 Handle<JSModuleNamespace> Factory::NewJSModuleNamespace() { 2996 Handle<Map> map = isolate()->js_module_namespace_map(); 2997 Handle<JSModuleNamespace> module_namespace( 2998 Handle<JSModuleNamespace>::cast(NewJSObjectFromMap(map))); 2999 FieldIndex index = FieldIndex::ForDescriptor( 3000 *map, JSModuleNamespace::kToStringTagFieldIndex); 3001 module_namespace->FastPropertyAtPut(index, 3002 ReadOnlyRoots(isolate()).Module_string()); 3003 return module_namespace; 3004 } 3005 3006 Handle<JSGeneratorObject> Factory::NewJSGeneratorObject( 3007 Handle<JSFunction> function) { 3008 DCHECK(IsResumableFunction(function->shared()->kind())); 3009 JSFunction::EnsureHasInitialMap(function); 3010 Handle<Map> map(function->initial_map(), isolate()); 3011 3012 DCHECK(map->instance_type() == JS_GENERATOR_OBJECT_TYPE || 3013 map->instance_type() == JS_ASYNC_GENERATOR_OBJECT_TYPE); 3014 3015 return Handle<JSGeneratorObject>::cast(NewJSObjectFromMap(map)); 3016 } 3017 3018 Handle<Module> Factory::NewModule(Handle<SharedFunctionInfo> code) { 3019 Handle<ModuleInfo> module_info(code->scope_info()->ModuleDescriptorInfo(), 3020 isolate()); 3021 Handle<ObjectHashTable> exports = 3022 ObjectHashTable::New(isolate(), module_info->RegularExportCount()); 3023 Handle<FixedArray> regular_exports = 3024 NewFixedArray(module_info->RegularExportCount()); 3025 Handle<FixedArray> regular_imports = 3026 NewFixedArray(module_info->regular_imports()->length()); 3027 int requested_modules_length = module_info->module_requests()->length(); 3028 Handle<FixedArray> requested_modules = 3029 requested_modules_length > 0 ? NewFixedArray(requested_modules_length) 3030 : empty_fixed_array(); 3031 3032 ReadOnlyRoots roots(isolate()); 3033 Handle<Module> module = Handle<Module>::cast(NewStruct(MODULE_TYPE, TENURED)); 3034 module->set_code(*code); 3035 module->set_exports(*exports); 3036 module->set_regular_exports(*regular_exports); 3037 module->set_regular_imports(*regular_imports); 3038 module->set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue)); 3039 module->set_module_namespace(roots.undefined_value()); 3040 module->set_requested_modules(*requested_modules); 3041 module->set_script(Script::cast(code->script())); 3042 module->set_status(Module::kUninstantiated); 3043 module->set_exception(roots.the_hole_value()); 3044 module->set_import_meta(roots.the_hole_value()); 3045 module->set_dfs_index(-1); 3046 module->set_dfs_ancestor_index(-1); 3047 return module; 3048 } 3049 3050 Handle<JSArrayBuffer> Factory::NewJSArrayBuffer(SharedFlag shared, 3051 PretenureFlag pretenure) { 3052 Handle<JSFunction> array_buffer_fun( 3053 shared == SharedFlag::kShared 3054 ? isolate()->native_context()->shared_array_buffer_fun() 3055 : isolate()->native_context()->array_buffer_fun(), 3056 isolate()); 3057 Handle<Map> map(array_buffer_fun->initial_map(), isolate()); 3058 return Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, pretenure)); 3059 } 3060 3061 Handle<JSIteratorResult> Factory::NewJSIteratorResult(Handle<Object> value, 3062 bool done) { 3063 Handle<Map> map(isolate()->native_context()->iterator_result_map(), 3064 isolate()); 3065 Handle<JSIteratorResult> js_iter_result = 3066 Handle<JSIteratorResult>::cast(NewJSObjectFromMap(map)); 3067 js_iter_result->set_value(*value); 3068 js_iter_result->set_done(*ToBoolean(done)); 3069 return js_iter_result; 3070 } 3071 3072 Handle<JSAsyncFromSyncIterator> Factory::NewJSAsyncFromSyncIterator( 3073 Handle<JSReceiver> sync_iterator, Handle<Object> next) { 3074 Handle<Map> map(isolate()->native_context()->async_from_sync_iterator_map(), 3075 isolate()); 3076 Handle<JSAsyncFromSyncIterator> iterator = 3077 Handle<JSAsyncFromSyncIterator>::cast(NewJSObjectFromMap(map)); 3078 3079 iterator->set_sync_iterator(*sync_iterator); 3080 iterator->set_next(*next); 3081 return iterator; 3082 } 3083 3084 Handle<JSMap> Factory::NewJSMap() { 3085 Handle<Map> map(isolate()->native_context()->js_map_map(), isolate()); 3086 Handle<JSMap> js_map = Handle<JSMap>::cast(NewJSObjectFromMap(map)); 3087 JSMap::Initialize(js_map, isolate()); 3088 return js_map; 3089 } 3090 3091 Handle<JSSet> Factory::NewJSSet() { 3092 Handle<Map> map(isolate()->native_context()->js_set_map(), isolate()); 3093 Handle<JSSet> js_set = Handle<JSSet>::cast(NewJSObjectFromMap(map)); 3094 JSSet::Initialize(js_set, isolate()); 3095 return js_set; 3096 } 3097 3098 Handle<JSMapIterator> Factory::NewJSMapIterator(Handle<Map> map, 3099 Handle<OrderedHashMap> table, 3100 int index) { 3101 Handle<JSMapIterator> result = 3102 Handle<JSMapIterator>::cast(NewJSObjectFromMap(map)); 3103 result->set_table(*table); 3104 result->set_index(Smi::FromInt(index)); 3105 return result; 3106 } 3107 3108 Handle<JSSetIterator> Factory::NewJSSetIterator(Handle<Map> map, 3109 Handle<OrderedHashSet> table, 3110 int index) { 3111 Handle<JSSetIterator> result = 3112 Handle<JSSetIterator>::cast(NewJSObjectFromMap(map)); 3113 result->set_table(*table); 3114 result->set_index(Smi::FromInt(index)); 3115 return result; 3116 } 3117 3118 void Factory::TypeAndSizeForElementsKind(ElementsKind kind, 3119 ExternalArrayType* array_type, 3120 size_t* element_size) { 3121 switch (kind) { 3122 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \ 3123 case TYPE##_ELEMENTS: \ 3124 *array_type = kExternal##Type##Array; \ 3125 *element_size = sizeof(ctype); \ 3126 break; 3127 TYPED_ARRAYS(TYPED_ARRAY_CASE) 3128 #undef TYPED_ARRAY_CASE 3129 3130 default: 3131 UNREACHABLE(); 3132 } 3133 } 3134 3135 namespace { 3136 3137 static void ForFixedTypedArray(ExternalArrayType array_type, 3138 size_t* element_size, 3139 ElementsKind* element_kind) { 3140 switch (array_type) { 3141 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \ 3142 case kExternal##Type##Array: \ 3143 *element_size = sizeof(ctype); \ 3144 *element_kind = TYPE##_ELEMENTS; \ 3145 return; 3146 3147 TYPED_ARRAYS(TYPED_ARRAY_CASE) 3148 #undef TYPED_ARRAY_CASE 3149 } 3150 UNREACHABLE(); 3151 } 3152 3153 JSFunction* GetTypedArrayFun(ExternalArrayType type, Isolate* isolate) { 3154 NativeContext* native_context = isolate->context()->native_context(); 3155 switch (type) { 3156 #define TYPED_ARRAY_FUN(Type, type, TYPE, ctype) \ 3157 case kExternal##Type##Array: \ 3158 return native_context->type##_array_fun(); 3159 3160 TYPED_ARRAYS(TYPED_ARRAY_FUN) 3161 #undef TYPED_ARRAY_FUN 3162 } 3163 UNREACHABLE(); 3164 } 3165 3166 JSFunction* GetTypedArrayFun(ElementsKind elements_kind, Isolate* isolate) { 3167 NativeContext* native_context = isolate->context()->native_context(); 3168 switch (elements_kind) { 3169 #define TYPED_ARRAY_FUN(Type, type, TYPE, ctype) \ 3170 case TYPE##_ELEMENTS: \ 3171 return native_context->type##_array_fun(); 3172 3173 TYPED_ARRAYS(TYPED_ARRAY_FUN) 3174 #undef TYPED_ARRAY_FUN 3175 3176 default: 3177 UNREACHABLE(); 3178 } 3179 } 3180 3181 void SetupArrayBufferView(i::Isolate* isolate, 3182 i::Handle<i::JSArrayBufferView> obj, 3183 i::Handle<i::JSArrayBuffer> buffer, 3184 size_t byte_offset, size_t byte_length, 3185 PretenureFlag pretenure = NOT_TENURED) { 3186 DCHECK(byte_offset + byte_length <= 3187 static_cast<size_t>(buffer->byte_length()->Number())); 3188 3189 DCHECK_EQ(obj->GetEmbedderFieldCount(), 3190 v8::ArrayBufferView::kEmbedderFieldCount); 3191 for (int i = 0; i < v8::ArrayBufferView::kEmbedderFieldCount; i++) { 3192 obj->SetEmbedderField(i, Smi::kZero); 3193 } 3194 3195 obj->set_buffer(*buffer); 3196 3197 i::Handle<i::Object> byte_offset_object = 3198 isolate->factory()->NewNumberFromSize(byte_offset, pretenure); 3199 obj->set_byte_offset(*byte_offset_object); 3200 3201 i::Handle<i::Object> byte_length_object = 3202 isolate->factory()->NewNumberFromSize(byte_length, pretenure); 3203 obj->set_byte_length(*byte_length_object); 3204 } 3205 3206 } // namespace 3207 3208 Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type, 3209 PretenureFlag pretenure) { 3210 Handle<JSFunction> typed_array_fun(GetTypedArrayFun(type, isolate()), 3211 isolate()); 3212 Handle<Map> map(typed_array_fun->initial_map(), isolate()); 3213 return Handle<JSTypedArray>::cast(NewJSObjectFromMap(map, pretenure)); 3214 } 3215 3216 Handle<JSTypedArray> Factory::NewJSTypedArray(ElementsKind elements_kind, 3217 PretenureFlag pretenure) { 3218 Handle<JSFunction> typed_array_fun(GetTypedArrayFun(elements_kind, isolate()), 3219 isolate()); 3220 Handle<Map> map(typed_array_fun->initial_map(), isolate()); 3221 return Handle<JSTypedArray>::cast(NewJSObjectFromMap(map, pretenure)); 3222 } 3223 3224 Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type, 3225 Handle<JSArrayBuffer> buffer, 3226 size_t byte_offset, size_t length, 3227 PretenureFlag pretenure) { 3228 Handle<JSTypedArray> obj = NewJSTypedArray(type, pretenure); 3229 3230 size_t element_size; 3231 ElementsKind elements_kind; 3232 ForFixedTypedArray(type, &element_size, &elements_kind); 3233 3234 CHECK_EQ(byte_offset % element_size, 0); 3235 3236 CHECK(length <= (std::numeric_limits<size_t>::max() / element_size)); 3237 // TODO(7881): Smi length check 3238 CHECK(length <= static_cast<size_t>(Smi::kMaxValue)); 3239 size_t byte_length = length * element_size; 3240 SetupArrayBufferView(isolate(), obj, buffer, byte_offset, byte_length, 3241 pretenure); 3242 3243 Handle<Object> length_object = NewNumberFromSize(length, pretenure); 3244 obj->set_length(*length_object); 3245 3246 Handle<FixedTypedArrayBase> elements = NewFixedTypedArrayWithExternalPointer( 3247 static_cast<int>(length), type, 3248 static_cast<uint8_t*>(buffer->backing_store()) + byte_offset, pretenure); 3249 Handle<Map> map = JSObject::GetElementsTransitionMap(obj, elements_kind); 3250 JSObject::SetMapAndElements(obj, map, elements); 3251 return obj; 3252 } 3253 3254 Handle<JSTypedArray> Factory::NewJSTypedArray(ElementsKind elements_kind, 3255 size_t number_of_elements, 3256 PretenureFlag pretenure) { 3257 Handle<JSTypedArray> obj = NewJSTypedArray(elements_kind, pretenure); 3258 DCHECK_EQ(obj->GetEmbedderFieldCount(), 3259 v8::ArrayBufferView::kEmbedderFieldCount); 3260 for (int i = 0; i < v8::ArrayBufferView::kEmbedderFieldCount; i++) { 3261 obj->SetEmbedderField(i, Smi::kZero); 3262 } 3263 3264 size_t element_size; 3265 ExternalArrayType array_type; 3266 TypeAndSizeForElementsKind(elements_kind, &array_type, &element_size); 3267 3268 CHECK(number_of_elements <= 3269 (std::numeric_limits<size_t>::max() / element_size)); 3270 // TODO(7881): Smi length check 3271 CHECK(number_of_elements <= static_cast<size_t>(Smi::kMaxValue)); 3272 size_t byte_length = number_of_elements * element_size; 3273 3274 obj->set_byte_offset(Smi::kZero); 3275 i::Handle<i::Object> byte_length_object = 3276 NewNumberFromSize(byte_length, pretenure); 3277 obj->set_byte_length(*byte_length_object); 3278 Handle<Object> length_object = 3279 NewNumberFromSize(number_of_elements, pretenure); 3280 obj->set_length(*length_object); 3281 3282 Handle<JSArrayBuffer> buffer = 3283 NewJSArrayBuffer(SharedFlag::kNotShared, pretenure); 3284 JSArrayBuffer::Setup(buffer, isolate(), true, nullptr, byte_length, 3285 SharedFlag::kNotShared); 3286 obj->set_buffer(*buffer); 3287 Handle<FixedTypedArrayBase> elements = NewFixedTypedArray( 3288 number_of_elements, byte_length, array_type, true, pretenure); 3289 obj->set_elements(*elements); 3290 return obj; 3291 } 3292 3293 Handle<JSDataView> Factory::NewJSDataView(Handle<JSArrayBuffer> buffer, 3294 size_t byte_offset, 3295 size_t byte_length) { 3296 Handle<Map> map(isolate()->native_context()->data_view_fun()->initial_map(), 3297 isolate()); 3298 Handle<JSDataView> obj = Handle<JSDataView>::cast(NewJSObjectFromMap(map)); 3299 SetupArrayBufferView(isolate(), obj, buffer, byte_offset, byte_length); 3300 return obj; 3301 } 3302 3303 MaybeHandle<JSBoundFunction> Factory::NewJSBoundFunction( 3304 Handle<JSReceiver> target_function, Handle<Object> bound_this, 3305 Vector<Handle<Object>> bound_args) { 3306 DCHECK(target_function->IsCallable()); 3307 STATIC_ASSERT(Code::kMaxArguments <= FixedArray::kMaxLength); 3308 if (bound_args.length() >= Code::kMaxArguments) { 3309 THROW_NEW_ERROR(isolate(), 3310 NewRangeError(MessageTemplate::kTooManyArguments), 3311 JSBoundFunction); 3312 } 3313 3314 // Determine the prototype of the {target_function}. 3315 Handle<Object> prototype; 3316 ASSIGN_RETURN_ON_EXCEPTION( 3317 isolate(), prototype, 3318 JSReceiver::GetPrototype(isolate(), target_function), JSBoundFunction); 3319 3320 SaveContext save(isolate()); 3321 isolate()->set_context(*target_function->GetCreationContext()); 3322 3323 // Create the [[BoundArguments]] for the result. 3324 Handle<FixedArray> bound_arguments; 3325 if (bound_args.length() == 0) { 3326 bound_arguments = empty_fixed_array(); 3327 } else { 3328 bound_arguments = NewFixedArray(bound_args.length()); 3329 for (int i = 0; i < bound_args.length(); ++i) { 3330 bound_arguments->set(i, *bound_args[i]); 3331 } 3332 } 3333 3334 // Setup the map for the JSBoundFunction instance. 3335 Handle<Map> map = target_function->IsConstructor() 3336 ? isolate()->bound_function_with_constructor_map() 3337 : isolate()->bound_function_without_constructor_map(); 3338 if (map->prototype() != *prototype) { 3339 map = Map::TransitionToPrototype(isolate(), map, prototype); 3340 } 3341 DCHECK_EQ(target_function->IsConstructor(), map->is_constructor()); 3342 3343 // Setup the JSBoundFunction instance. 3344 Handle<JSBoundFunction> result = 3345 Handle<JSBoundFunction>::cast(NewJSObjectFromMap(map)); 3346 result->set_bound_target_function(*target_function); 3347 result->set_bound_this(*bound_this); 3348 result->set_bound_arguments(*bound_arguments); 3349 return result; 3350 } 3351 3352 // ES6 section 9.5.15 ProxyCreate (target, handler) 3353 Handle<JSProxy> Factory::NewJSProxy(Handle<JSReceiver> target, 3354 Handle<JSReceiver> handler) { 3355 // Allocate the proxy object. 3356 Handle<Map> map; 3357 if (target->IsCallable()) { 3358 if (target->IsConstructor()) { 3359 map = Handle<Map>(isolate()->proxy_constructor_map()); 3360 } else { 3361 map = Handle<Map>(isolate()->proxy_callable_map()); 3362 } 3363 } else { 3364 map = Handle<Map>(isolate()->proxy_map()); 3365 } 3366 DCHECK(map->prototype()->IsNull(isolate())); 3367 Handle<JSProxy> result(JSProxy::cast(New(map, NOT_TENURED)), isolate()); 3368 result->initialize_properties(); 3369 result->set_target(*target); 3370 result->set_handler(*handler); 3371 return result; 3372 } 3373 3374 Handle<JSGlobalProxy> Factory::NewUninitializedJSGlobalProxy(int size) { 3375 // Create an empty shell of a JSGlobalProxy that needs to be reinitialized 3376 // via ReinitializeJSGlobalProxy later. 3377 Handle<Map> map = NewMap(JS_GLOBAL_PROXY_TYPE, size); 3378 // Maintain invariant expected from any JSGlobalProxy. 3379 map->set_is_access_check_needed(true); 3380 map->set_may_have_interesting_symbols(true); 3381 return Handle<JSGlobalProxy>::cast(NewJSObjectFromMap(map, NOT_TENURED)); 3382 } 3383 3384 void Factory::ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object, 3385 Handle<JSFunction> constructor) { 3386 DCHECK(constructor->has_initial_map()); 3387 Handle<Map> map(constructor->initial_map(), isolate()); 3388 Handle<Map> old_map(object->map(), isolate()); 3389 3390 // The proxy's hash should be retained across reinitialization. 3391 Handle<Object> raw_properties_or_hash(object->raw_properties_or_hash(), 3392 isolate()); 3393 3394 if (old_map->is_prototype_map()) { 3395 map = Map::Copy(isolate(), map, "CopyAsPrototypeForJSGlobalProxy"); 3396 map->set_is_prototype_map(true); 3397 } 3398 JSObject::NotifyMapChange(old_map, map, isolate()); 3399 old_map->NotifyLeafMapLayoutChange(isolate()); 3400 3401 // Check that the already allocated object has the same size and type as 3402 // objects allocated using the constructor. 3403 DCHECK(map->instance_size() == old_map->instance_size()); 3404 DCHECK(map->instance_type() == old_map->instance_type()); 3405 3406 // In order to keep heap in consistent state there must be no allocations 3407 // before object re-initialization is finished. 3408 DisallowHeapAllocation no_allocation; 3409 3410 // Reset the map for the object. 3411 object->synchronized_set_map(*map); 3412 3413 // Reinitialize the object from the constructor map. 3414 InitializeJSObjectFromMap(object, raw_properties_or_hash, map); 3415 } 3416 3417 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForLiteral( 3418 FunctionLiteral* literal, Handle<Script> script, bool is_toplevel) { 3419 FunctionKind kind = literal->kind(); 3420 Handle<SharedFunctionInfo> shared = NewSharedFunctionInfoForBuiltin( 3421 literal->name(), Builtins::kCompileLazy, kind); 3422 SharedFunctionInfo::InitFromFunctionLiteral(shared, literal, is_toplevel); 3423 SharedFunctionInfo::SetScript(shared, script, literal->function_literal_id(), 3424 false); 3425 return shared; 3426 } 3427 3428 Handle<JSMessageObject> Factory::NewJSMessageObject( 3429 MessageTemplate::Template message, Handle<Object> argument, 3430 int start_position, int end_position, Handle<Script> script, 3431 Handle<Object> stack_frames) { 3432 Handle<Map> map = message_object_map(); 3433 Handle<JSMessageObject> message_obj( 3434 JSMessageObject::cast(New(map, NOT_TENURED)), isolate()); 3435 message_obj->set_raw_properties_or_hash(*empty_fixed_array(), 3436 SKIP_WRITE_BARRIER); 3437 message_obj->initialize_elements(); 3438 message_obj->set_elements(*empty_fixed_array(), SKIP_WRITE_BARRIER); 3439 message_obj->set_type(message); 3440 message_obj->set_argument(*argument); 3441 message_obj->set_start_position(start_position); 3442 message_obj->set_end_position(end_position); 3443 message_obj->set_script(*script); 3444 message_obj->set_stack_frames(*stack_frames); 3445 message_obj->set_error_level(v8::Isolate::kMessageError); 3446 return message_obj; 3447 } 3448 3449 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForApiFunction( 3450 MaybeHandle<String> maybe_name, 3451 Handle<FunctionTemplateInfo> function_template_info, FunctionKind kind) { 3452 Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo( 3453 maybe_name, function_template_info, Builtins::kNoBuiltinId, kind); 3454 return shared; 3455 } 3456 3457 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForBuiltin( 3458 MaybeHandle<String> maybe_name, int builtin_index, FunctionKind kind) { 3459 Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo( 3460 maybe_name, MaybeHandle<Code>(), builtin_index, kind); 3461 return shared; 3462 } 3463 3464 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo( 3465 MaybeHandle<String> maybe_name, MaybeHandle<HeapObject> maybe_function_data, 3466 int maybe_builtin_index, FunctionKind kind) { 3467 // Function names are assumed to be flat elsewhere. Must flatten before 3468 // allocating SharedFunctionInfo to avoid GC seeing the uninitialized SFI. 3469 Handle<String> shared_name; 3470 bool has_shared_name = maybe_name.ToHandle(&shared_name); 3471 if (has_shared_name) { 3472 shared_name = String::Flatten(isolate(), shared_name, TENURED); 3473 } 3474 3475 Handle<Map> map = shared_function_info_map(); 3476 Handle<SharedFunctionInfo> share(SharedFunctionInfo::cast(New(map, TENURED)), 3477 isolate()); 3478 { 3479 DisallowHeapAllocation no_allocation; 3480 3481 // Set pointer fields. 3482 share->set_name_or_scope_info( 3483 has_shared_name ? *shared_name 3484 : SharedFunctionInfo::kNoSharedNameSentinel); 3485 Handle<HeapObject> function_data; 3486 if (maybe_function_data.ToHandle(&function_data)) { 3487 // If we pass function_data then we shouldn't pass a builtin index, and 3488 // the function_data should not be code with a builtin. 3489 DCHECK(!Builtins::IsBuiltinId(maybe_builtin_index)); 3490 DCHECK_IMPLIES(function_data->IsCode(), 3491 !Code::cast(*function_data)->is_builtin()); 3492 share->set_function_data(*function_data); 3493 } else if (Builtins::IsBuiltinId(maybe_builtin_index)) { 3494 DCHECK_NE(maybe_builtin_index, Builtins::kDeserializeLazy); 3495 share->set_builtin_id(maybe_builtin_index); 3496 } else { 3497 share->set_builtin_id(Builtins::kIllegal); 3498 } 3499 // Generally functions won't have feedback, unless they have been created 3500 // from a FunctionLiteral. Those can just reset this field to keep the 3501 // SharedFunctionInfo in a consistent state. 3502 if (maybe_builtin_index == Builtins::kCompileLazy) { 3503 share->set_raw_outer_scope_info_or_feedback_metadata(*the_hole_value(), 3504 SKIP_WRITE_BARRIER); 3505 } else { 3506 share->set_raw_outer_scope_info_or_feedback_metadata( 3507 *empty_feedback_metadata(), SKIP_WRITE_BARRIER); 3508 } 3509 share->set_script_or_debug_info(*undefined_value(), SKIP_WRITE_BARRIER); 3510 #if V8_SFI_HAS_UNIQUE_ID 3511 share->set_unique_id(isolate()->GetNextUniqueSharedFunctionInfoId()); 3512 #endif 3513 3514 // Set integer fields (smi or int, depending on the architecture). 3515 share->set_length(0); 3516 share->set_internal_formal_parameter_count(0); 3517 share->set_expected_nof_properties(0); 3518 share->set_builtin_function_id( 3519 BuiltinFunctionId::kInvalidBuiltinFunctionId); 3520 share->set_raw_function_token_offset(0); 3521 // All flags default to false or 0. 3522 share->set_flags(0); 3523 share->CalculateConstructAsBuiltin(); 3524 share->set_kind(kind); 3525 3526 share->clear_padding(); 3527 } 3528 // Link into the list. 3529 Handle<WeakArrayList> noscript_list = noscript_shared_function_infos(); 3530 noscript_list = WeakArrayList::AddToEnd(isolate(), noscript_list, 3531 MaybeObjectHandle::Weak(share)); 3532 isolate()->heap()->set_noscript_shared_function_infos(*noscript_list); 3533 3534 #ifdef VERIFY_HEAP 3535 share->SharedFunctionInfoVerify(isolate()); 3536 #endif 3537 return share; 3538 } 3539 3540 namespace { 3541 inline int NumberToStringCacheHash(Handle<FixedArray> cache, Smi* number) { 3542 int mask = (cache->length() >> 1) - 1; 3543 return number->value() & mask; 3544 } 3545 inline int NumberToStringCacheHash(Handle<FixedArray> cache, double number) { 3546 int mask = (cache->length() >> 1) - 1; 3547 int64_t bits = bit_cast<int64_t>(number); 3548 return (static_cast<int>(bits) ^ static_cast<int>(bits >> 32)) & mask; 3549 } 3550 } // namespace 3551 3552 Handle<String> Factory::NumberToStringCacheSet(Handle<Object> number, int hash, 3553 const char* string, 3554 bool check_cache) { 3555 // We tenure the allocated string since it is referenced from the 3556 // number-string cache which lives in the old space. 3557 Handle<String> js_string = 3558 NewStringFromAsciiChecked(string, check_cache ? TENURED : NOT_TENURED); 3559 if (!check_cache) return js_string; 3560 3561 if (!number_string_cache()->get(hash * 2)->IsUndefined(isolate())) { 3562 int full_size = isolate()->heap()->MaxNumberToStringCacheSize(); 3563 if (number_string_cache()->length() != full_size) { 3564 Handle<FixedArray> new_cache = NewFixedArray(full_size, TENURED); 3565 isolate()->heap()->set_number_string_cache(*new_cache); 3566 return js_string; 3567 } 3568 } 3569 number_string_cache()->set(hash * 2, *number); 3570 number_string_cache()->set(hash * 2 + 1, *js_string); 3571 return js_string; 3572 } 3573 3574 Handle<Object> Factory::NumberToStringCacheGet(Object* number, int hash) { 3575 DisallowHeapAllocation no_gc; 3576 Object* key = number_string_cache()->get(hash * 2); 3577 if (key == number || (key->IsHeapNumber() && number->IsHeapNumber() && 3578 key->Number() == number->Number())) { 3579 return Handle<String>( 3580 String::cast(number_string_cache()->get(hash * 2 + 1)), isolate()); 3581 } 3582 return undefined_value(); 3583 } 3584 3585 Handle<String> Factory::NumberToString(Handle<Object> number, 3586 bool check_cache) { 3587 if (number->IsSmi()) return NumberToString(Smi::cast(*number), check_cache); 3588 3589 double double_value = Handle<HeapNumber>::cast(number)->value(); 3590 // Try to canonicalize doubles. 3591 int smi_value; 3592 if (DoubleToSmiInteger(double_value, &smi_value)) { 3593 return NumberToString(Smi::FromInt(smi_value), check_cache); 3594 } 3595 3596 int hash = 0; 3597 if (check_cache) { 3598 hash = NumberToStringCacheHash(number_string_cache(), double_value); 3599 Handle<Object> cached = NumberToStringCacheGet(*number, hash); 3600 if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached); 3601 } 3602 3603 char arr[100]; 3604 Vector<char> buffer(arr, arraysize(arr)); 3605 const char* string = DoubleToCString(double_value, buffer); 3606 3607 return NumberToStringCacheSet(number, hash, string, check_cache); 3608 } 3609 3610 Handle<String> Factory::NumberToString(Smi* number, bool check_cache) { 3611 int hash = 0; 3612 if (check_cache) { 3613 hash = NumberToStringCacheHash(number_string_cache(), number); 3614 Handle<Object> cached = NumberToStringCacheGet(number, hash); 3615 if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached); 3616 } 3617 3618 char arr[100]; 3619 Vector<char> buffer(arr, arraysize(arr)); 3620 const char* string = IntToCString(number->value(), buffer); 3621 3622 return NumberToStringCacheSet(handle(number, isolate()), hash, string, 3623 check_cache); 3624 } 3625 3626 Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) { 3627 DCHECK(!shared->HasDebugInfo()); 3628 Heap* heap = isolate()->heap(); 3629 3630 Handle<DebugInfo> debug_info = 3631 Handle<DebugInfo>::cast(NewStruct(DEBUG_INFO_TYPE, TENURED)); 3632 debug_info->set_flags(DebugInfo::kNone); 3633 debug_info->set_shared(*shared); 3634 debug_info->set_debugger_hints(0); 3635 DCHECK_EQ(DebugInfo::kNoDebuggingId, debug_info->debugging_id()); 3636 DCHECK(!shared->HasDebugInfo()); 3637 debug_info->set_script(shared->script_or_debug_info()); 3638 debug_info->set_original_bytecode_array( 3639 ReadOnlyRoots(heap).undefined_value()); 3640 debug_info->set_break_points(ReadOnlyRoots(heap).empty_fixed_array()); 3641 3642 // Link debug info to function. 3643 shared->SetDebugInfo(*debug_info); 3644 3645 return debug_info; 3646 } 3647 3648 Handle<CoverageInfo> Factory::NewCoverageInfo( 3649 const ZoneVector<SourceRange>& slots) { 3650 const int slot_count = static_cast<int>(slots.size()); 3651 3652 const int length = CoverageInfo::FixedArrayLengthForSlotCount(slot_count); 3653 Handle<CoverageInfo> info = 3654 Handle<CoverageInfo>::cast(NewUninitializedFixedArray(length)); 3655 3656 for (int i = 0; i < slot_count; i++) { 3657 SourceRange range = slots[i]; 3658 info->InitializeSlot(i, range.start, range.end); 3659 } 3660 3661 return info; 3662 } 3663 3664 Handle<BreakPointInfo> Factory::NewBreakPointInfo(int source_position) { 3665 Handle<BreakPointInfo> new_break_point_info = 3666 Handle<BreakPointInfo>::cast(NewStruct(TUPLE2_TYPE, TENURED)); 3667 new_break_point_info->set_source_position(source_position); 3668 new_break_point_info->set_break_points(*undefined_value()); 3669 return new_break_point_info; 3670 } 3671 3672 Handle<BreakPoint> Factory::NewBreakPoint(int id, Handle<String> condition) { 3673 Handle<BreakPoint> new_break_point = 3674 Handle<BreakPoint>::cast(NewStruct(TUPLE2_TYPE, TENURED)); 3675 new_break_point->set_id(id); 3676 new_break_point->set_condition(*condition); 3677 return new_break_point; 3678 } 3679 3680 Handle<StackFrameInfo> Factory::NewStackFrameInfo() { 3681 Handle<StackFrameInfo> stack_frame_info = Handle<StackFrameInfo>::cast( 3682 NewStruct(STACK_FRAME_INFO_TYPE, NOT_TENURED)); 3683 stack_frame_info->set_line_number(0); 3684 stack_frame_info->set_column_number(0); 3685 stack_frame_info->set_script_id(0); 3686 stack_frame_info->set_script_name(Smi::kZero); 3687 stack_frame_info->set_script_name_or_source_url(Smi::kZero); 3688 stack_frame_info->set_function_name(Smi::kZero); 3689 stack_frame_info->set_flag(0); 3690 return stack_frame_info; 3691 } 3692 3693 Handle<SourcePositionTableWithFrameCache> 3694 Factory::NewSourcePositionTableWithFrameCache( 3695 Handle<ByteArray> source_position_table, 3696 Handle<SimpleNumberDictionary> stack_frame_cache) { 3697 Handle<SourcePositionTableWithFrameCache> 3698 source_position_table_with_frame_cache = 3699 Handle<SourcePositionTableWithFrameCache>::cast( 3700 NewStruct(TUPLE2_TYPE, TENURED)); 3701 source_position_table_with_frame_cache->set_source_position_table( 3702 *source_position_table); 3703 source_position_table_with_frame_cache->set_stack_frame_cache( 3704 *stack_frame_cache); 3705 return source_position_table_with_frame_cache; 3706 } 3707 3708 Handle<JSObject> Factory::NewArgumentsObject(Handle<JSFunction> callee, 3709 int length) { 3710 bool strict_mode_callee = is_strict(callee->shared()->language_mode()) || 3711 !callee->shared()->has_simple_parameters(); 3712 Handle<Map> map = strict_mode_callee ? isolate()->strict_arguments_map() 3713 : isolate()->sloppy_arguments_map(); 3714 AllocationSiteUsageContext context(isolate(), Handle<AllocationSite>(), 3715 false); 3716 DCHECK(!isolate()->has_pending_exception()); 3717 Handle<JSObject> result = NewJSObjectFromMap(map); 3718 Handle<Smi> value(Smi::FromInt(length), isolate()); 3719 Object::SetProperty(isolate(), result, length_string(), value, 3720 LanguageMode::kStrict) 3721 .Assert(); 3722 if (!strict_mode_callee) { 3723 Object::SetProperty(isolate(), result, callee_string(), callee, 3724 LanguageMode::kStrict) 3725 .Assert(); 3726 } 3727 return result; 3728 } 3729 3730 Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<NativeContext> context, 3731 int number_of_properties) { 3732 if (number_of_properties == 0) { 3733 // Reuse the initial map of the Object function if the literal has no 3734 // predeclared properties. 3735 return handle(context->object_function()->initial_map(), isolate()); 3736 } 3737 3738 // We do not cache maps for too many properties or when running builtin code. 3739 if (isolate()->bootstrapper()->IsActive()) { 3740 return Map::Create(isolate(), number_of_properties); 3741 } 3742 3743 // Use initial slow object proto map for too many properties. 3744 const int kMapCacheSize = 128; 3745 if (number_of_properties > kMapCacheSize) { 3746 return handle(context->slow_object_with_object_prototype_map(), isolate()); 3747 } 3748 3749 int cache_index = number_of_properties - 1; 3750 Handle<Object> maybe_cache(context->map_cache(), isolate()); 3751 if (maybe_cache->IsUndefined(isolate())) { 3752 // Allocate the new map cache for the native context. 3753 maybe_cache = NewWeakFixedArray(kMapCacheSize, TENURED); 3754 context->set_map_cache(*maybe_cache); 3755 } else { 3756 // Check to see whether there is a matching element in the cache. 3757 Handle<WeakFixedArray> cache = Handle<WeakFixedArray>::cast(maybe_cache); 3758 MaybeObject* result = cache->Get(cache_index); 3759 HeapObject* heap_object; 3760 if (result->ToWeakHeapObject(&heap_object)) { 3761 Map* map = Map::cast(heap_object); 3762 DCHECK(!map->is_dictionary_map()); 3763 return handle(map, isolate()); 3764 } 3765 } 3766 3767 // Create a new map and add it to the cache. 3768 Handle<WeakFixedArray> cache = Handle<WeakFixedArray>::cast(maybe_cache); 3769 Handle<Map> map = Map::Create(isolate(), number_of_properties); 3770 DCHECK(!map->is_dictionary_map()); 3771 cache->Set(cache_index, HeapObjectReference::Weak(*map)); 3772 return map; 3773 } 3774 3775 Handle<LoadHandler> Factory::NewLoadHandler(int data_count) { 3776 Handle<Map> map; 3777 switch (data_count) { 3778 case 1: 3779 map = load_handler1_map(); 3780 break; 3781 case 2: 3782 map = load_handler2_map(); 3783 break; 3784 case 3: 3785 map = load_handler3_map(); 3786 break; 3787 default: 3788 UNREACHABLE(); 3789 break; 3790 } 3791 return handle(LoadHandler::cast(New(map, TENURED)), isolate()); 3792 } 3793 3794 Handle<StoreHandler> Factory::NewStoreHandler(int data_count) { 3795 Handle<Map> map; 3796 switch (data_count) { 3797 case 0: 3798 map = store_handler0_map(); 3799 break; 3800 case 1: 3801 map = store_handler1_map(); 3802 break; 3803 case 2: 3804 map = store_handler2_map(); 3805 break; 3806 case 3: 3807 map = store_handler3_map(); 3808 break; 3809 default: 3810 UNREACHABLE(); 3811 break; 3812 } 3813 return handle(StoreHandler::cast(New(map, TENURED)), isolate()); 3814 } 3815 3816 void Factory::SetRegExpAtomData(Handle<JSRegExp> regexp, JSRegExp::Type type, 3817 Handle<String> source, JSRegExp::Flags flags, 3818 Handle<Object> data) { 3819 Handle<FixedArray> store = NewFixedArray(JSRegExp::kAtomDataSize); 3820 3821 store->set(JSRegExp::kTagIndex, Smi::FromInt(type)); 3822 store->set(JSRegExp::kSourceIndex, *source); 3823 store->set(JSRegExp::kFlagsIndex, Smi::FromInt(flags)); 3824 store->set(JSRegExp::kAtomPatternIndex, *data); 3825 regexp->set_data(*store); 3826 } 3827 3828 void Factory::SetRegExpIrregexpData(Handle<JSRegExp> regexp, 3829 JSRegExp::Type type, Handle<String> source, 3830 JSRegExp::Flags flags, int capture_count) { 3831 Handle<FixedArray> store = NewFixedArray(JSRegExp::kIrregexpDataSize); 3832 Smi* uninitialized = Smi::FromInt(JSRegExp::kUninitializedValue); 3833 store->set(JSRegExp::kTagIndex, Smi::FromInt(type)); 3834 store->set(JSRegExp::kSourceIndex, *source); 3835 store->set(JSRegExp::kFlagsIndex, Smi::FromInt(flags)); 3836 store->set(JSRegExp::kIrregexpLatin1CodeIndex, uninitialized); 3837 store->set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized); 3838 store->set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::kZero); 3839 store->set(JSRegExp::kIrregexpCaptureCountIndex, Smi::FromInt(capture_count)); 3840 store->set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized); 3841 regexp->set_data(*store); 3842 } 3843 3844 Handle<RegExpMatchInfo> Factory::NewRegExpMatchInfo() { 3845 // Initially, the last match info consists of all fixed fields plus space for 3846 // the match itself (i.e., 2 capture indices). 3847 static const int kInitialSize = RegExpMatchInfo::kFirstCaptureIndex + 3848 RegExpMatchInfo::kInitialCaptureIndices; 3849 3850 Handle<FixedArray> elems = NewFixedArray(kInitialSize); 3851 Handle<RegExpMatchInfo> result = Handle<RegExpMatchInfo>::cast(elems); 3852 3853 result->SetNumberOfCaptureRegisters(RegExpMatchInfo::kInitialCaptureIndices); 3854 result->SetLastSubject(*empty_string()); 3855 result->SetLastInput(*undefined_value()); 3856 result->SetCapture(0, 0); 3857 result->SetCapture(1, 0); 3858 3859 return result; 3860 } 3861 3862 Handle<Object> Factory::GlobalConstantFor(Handle<Name> name) { 3863 if (Name::Equals(isolate(), name, undefined_string())) { 3864 return undefined_value(); 3865 } 3866 if (Name::Equals(isolate(), name, NaN_string())) return nan_value(); 3867 if (Name::Equals(isolate(), name, Infinity_string())) return infinity_value(); 3868 return Handle<Object>::null(); 3869 } 3870 3871 Handle<Object> Factory::ToBoolean(bool value) { 3872 return value ? true_value() : false_value(); 3873 } 3874 3875 Handle<String> Factory::ToPrimitiveHintString(ToPrimitiveHint hint) { 3876 switch (hint) { 3877 case ToPrimitiveHint::kDefault: 3878 return default_string(); 3879 case ToPrimitiveHint::kNumber: 3880 return number_string(); 3881 case ToPrimitiveHint::kString: 3882 return string_string(); 3883 } 3884 UNREACHABLE(); 3885 } 3886 3887 Handle<Map> Factory::CreateSloppyFunctionMap( 3888 FunctionMode function_mode, MaybeHandle<JSFunction> maybe_empty_function) { 3889 bool has_prototype = IsFunctionModeWithPrototype(function_mode); 3890 int header_size = has_prototype ? JSFunction::kSizeWithPrototype 3891 : JSFunction::kSizeWithoutPrototype; 3892 int descriptors_count = has_prototype ? 5 : 4; 3893 int inobject_properties_count = 0; 3894 if (IsFunctionModeWithName(function_mode)) ++inobject_properties_count; 3895 3896 Handle<Map> map = NewMap( 3897 JS_FUNCTION_TYPE, header_size + inobject_properties_count * kPointerSize, 3898 TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count); 3899 map->set_has_prototype_slot(has_prototype); 3900 map->set_is_constructor(has_prototype); 3901 map->set_is_callable(true); 3902 Handle<JSFunction> empty_function; 3903 if (maybe_empty_function.ToHandle(&empty_function)) { 3904 Map::SetPrototype(isolate(), map, empty_function); 3905 } 3906 3907 // 3908 // Setup descriptors array. 3909 // 3910 Map::EnsureDescriptorSlack(isolate(), map, descriptors_count); 3911 3912 PropertyAttributes ro_attribs = 3913 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY); 3914 PropertyAttributes rw_attribs = 3915 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE); 3916 PropertyAttributes roc_attribs = 3917 static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY); 3918 3919 int field_index = 0; 3920 STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0); 3921 { // Add length accessor. 3922 Descriptor d = Descriptor::AccessorConstant( 3923 length_string(), function_length_accessor(), roc_attribs); 3924 map->AppendDescriptor(&d); 3925 } 3926 3927 STATIC_ASSERT(JSFunction::kNameDescriptorIndex == 1); 3928 if (IsFunctionModeWithName(function_mode)) { 3929 // Add name field. 3930 Handle<Name> name = isolate()->factory()->name_string(); 3931 Descriptor d = Descriptor::DataField(isolate(), name, field_index++, 3932 roc_attribs, Representation::Tagged()); 3933 map->AppendDescriptor(&d); 3934 3935 } else { 3936 // Add name accessor. 3937 Descriptor d = Descriptor::AccessorConstant( 3938 name_string(), function_name_accessor(), roc_attribs); 3939 map->AppendDescriptor(&d); 3940 } 3941 { // Add arguments accessor. 3942 Descriptor d = Descriptor::AccessorConstant( 3943 arguments_string(), function_arguments_accessor(), ro_attribs); 3944 map->AppendDescriptor(&d); 3945 } 3946 { // Add caller accessor. 3947 Descriptor d = Descriptor::AccessorConstant( 3948 caller_string(), function_caller_accessor(), ro_attribs); 3949 map->AppendDescriptor(&d); 3950 } 3951 if (IsFunctionModeWithPrototype(function_mode)) { 3952 // Add prototype accessor. 3953 PropertyAttributes attribs = 3954 IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs 3955 : ro_attribs; 3956 Descriptor d = Descriptor::AccessorConstant( 3957 prototype_string(), function_prototype_accessor(), attribs); 3958 map->AppendDescriptor(&d); 3959 } 3960 DCHECK_EQ(inobject_properties_count, field_index); 3961 return map; 3962 } 3963 3964 Handle<Map> Factory::CreateStrictFunctionMap( 3965 FunctionMode function_mode, Handle<JSFunction> empty_function) { 3966 bool has_prototype = IsFunctionModeWithPrototype(function_mode); 3967 int header_size = has_prototype ? JSFunction::kSizeWithPrototype 3968 : JSFunction::kSizeWithoutPrototype; 3969 int inobject_properties_count = 0; 3970 if (IsFunctionModeWithName(function_mode)) ++inobject_properties_count; 3971 if (IsFunctionModeWithHomeObject(function_mode)) ++inobject_properties_count; 3972 int descriptors_count = (IsFunctionModeWithPrototype(function_mode) ? 3 : 2) + 3973 inobject_properties_count; 3974 3975 Handle<Map> map = NewMap( 3976 JS_FUNCTION_TYPE, header_size + inobject_properties_count * kPointerSize, 3977 TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count); 3978 map->set_has_prototype_slot(has_prototype); 3979 map->set_is_constructor(has_prototype); 3980 map->set_is_callable(true); 3981 Map::SetPrototype(isolate(), map, empty_function); 3982 3983 // 3984 // Setup descriptors array. 3985 // 3986 Map::EnsureDescriptorSlack(isolate(), map, descriptors_count); 3987 3988 PropertyAttributes rw_attribs = 3989 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE); 3990 PropertyAttributes ro_attribs = 3991 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY); 3992 PropertyAttributes roc_attribs = 3993 static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY); 3994 3995 int field_index = 0; 3996 STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0); 3997 { // Add length accessor. 3998 Descriptor d = Descriptor::AccessorConstant( 3999 length_string(), function_length_accessor(), roc_attribs); 4000 map->AppendDescriptor(&d); 4001 } 4002 4003 STATIC_ASSERT(JSFunction::kNameDescriptorIndex == 1); 4004 if (IsFunctionModeWithName(function_mode)) { 4005 // Add name field. 4006 Handle<Name> name = isolate()->factory()->name_string(); 4007 Descriptor d = Descriptor::DataField(isolate(), name, field_index++, 4008 roc_attribs, Representation::Tagged()); 4009 map->AppendDescriptor(&d); 4010 4011 } else { 4012 // Add name accessor. 4013 Descriptor d = Descriptor::AccessorConstant( 4014 name_string(), function_name_accessor(), roc_attribs); 4015 map->AppendDescriptor(&d); 4016 } 4017 4018 STATIC_ASSERT(JSFunction::kMaybeHomeObjectDescriptorIndex == 2); 4019 if (IsFunctionModeWithHomeObject(function_mode)) { 4020 // Add home object field. 4021 Handle<Name> name = isolate()->factory()->home_object_symbol(); 4022 Descriptor d = Descriptor::DataField(isolate(), name, field_index++, 4023 DONT_ENUM, Representation::Tagged()); 4024 map->AppendDescriptor(&d); 4025 } 4026 4027 if (IsFunctionModeWithPrototype(function_mode)) { 4028 // Add prototype accessor. 4029 PropertyAttributes attribs = 4030 IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs 4031 : ro_attribs; 4032 Descriptor d = Descriptor::AccessorConstant( 4033 prototype_string(), function_prototype_accessor(), attribs); 4034 map->AppendDescriptor(&d); 4035 } 4036 DCHECK_EQ(inobject_properties_count, field_index); 4037 return map; 4038 } 4039 4040 Handle<Map> Factory::CreateClassFunctionMap(Handle<JSFunction> empty_function) { 4041 Handle<Map> map = NewMap(JS_FUNCTION_TYPE, JSFunction::kSizeWithPrototype); 4042 map->set_has_prototype_slot(true); 4043 map->set_is_constructor(true); 4044 map->set_is_prototype_map(true); 4045 map->set_is_callable(true); 4046 Map::SetPrototype(isolate(), map, empty_function); 4047 4048 // 4049 // Setup descriptors array. 4050 // 4051 Map::EnsureDescriptorSlack(isolate(), map, 2); 4052 4053 PropertyAttributes ro_attribs = 4054 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY); 4055 PropertyAttributes roc_attribs = 4056 static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY); 4057 4058 STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0); 4059 { // Add length accessor. 4060 Descriptor d = Descriptor::AccessorConstant( 4061 length_string(), function_length_accessor(), roc_attribs); 4062 map->AppendDescriptor(&d); 4063 } 4064 4065 { 4066 // Add prototype accessor. 4067 Descriptor d = Descriptor::AccessorConstant( 4068 prototype_string(), function_prototype_accessor(), ro_attribs); 4069 map->AppendDescriptor(&d); 4070 } 4071 return map; 4072 } 4073 4074 Handle<JSPromise> Factory::NewJSPromiseWithoutHook(PretenureFlag pretenure) { 4075 Handle<JSPromise> promise = Handle<JSPromise>::cast( 4076 NewJSObject(isolate()->promise_function(), pretenure)); 4077 promise->set_reactions_or_result(Smi::kZero); 4078 promise->set_flags(0); 4079 for (int i = 0; i < v8::Promise::kEmbedderFieldCount; i++) { 4080 promise->SetEmbedderField(i, Smi::kZero); 4081 } 4082 return promise; 4083 } 4084 4085 Handle<JSPromise> Factory::NewJSPromise(PretenureFlag pretenure) { 4086 Handle<JSPromise> promise = NewJSPromiseWithoutHook(pretenure); 4087 isolate()->RunPromiseHook(PromiseHookType::kInit, promise, undefined_value()); 4088 return promise; 4089 } 4090 4091 Handle<CallHandlerInfo> Factory::NewCallHandlerInfo(bool has_no_side_effect) { 4092 Handle<Map> map = has_no_side_effect 4093 ? side_effect_free_call_handler_info_map() 4094 : side_effect_call_handler_info_map(); 4095 Handle<CallHandlerInfo> info(CallHandlerInfo::cast(New(map, TENURED)), 4096 isolate()); 4097 Object* undefined_value = ReadOnlyRoots(isolate()).undefined_value(); 4098 info->set_callback(undefined_value); 4099 info->set_js_callback(undefined_value); 4100 info->set_data(undefined_value); 4101 return info; 4102 } 4103 4104 // static 4105 NewFunctionArgs NewFunctionArgs::ForWasm( 4106 Handle<String> name, 4107 Handle<WasmExportedFunctionData> exported_function_data, Handle<Map> map) { 4108 NewFunctionArgs args; 4109 args.name_ = name; 4110 args.maybe_map_ = map; 4111 args.maybe_exported_function_data_ = exported_function_data; 4112 args.language_mode_ = LanguageMode::kSloppy; 4113 args.prototype_mutability_ = MUTABLE; 4114 4115 return args; 4116 } 4117 4118 // static 4119 NewFunctionArgs NewFunctionArgs::ForBuiltin(Handle<String> name, 4120 Handle<Map> map, int builtin_id) { 4121 DCHECK(Builtins::IsBuiltinId(builtin_id)); 4122 4123 NewFunctionArgs args; 4124 args.name_ = name; 4125 args.maybe_map_ = map; 4126 args.maybe_builtin_id_ = builtin_id; 4127 args.language_mode_ = LanguageMode::kStrict; 4128 args.prototype_mutability_ = MUTABLE; 4129 4130 args.SetShouldSetLanguageMode(); 4131 4132 return args; 4133 } 4134 4135 // static 4136 NewFunctionArgs NewFunctionArgs::ForFunctionWithoutCode( 4137 Handle<String> name, Handle<Map> map, LanguageMode language_mode) { 4138 NewFunctionArgs args; 4139 args.name_ = name; 4140 args.maybe_map_ = map; 4141 args.maybe_builtin_id_ = Builtins::kIllegal; 4142 args.language_mode_ = language_mode; 4143 args.prototype_mutability_ = MUTABLE; 4144 4145 args.SetShouldSetLanguageMode(); 4146 4147 return args; 4148 } 4149 4150 // static 4151 NewFunctionArgs NewFunctionArgs::ForBuiltinWithPrototype( 4152 Handle<String> name, Handle<Object> prototype, InstanceType type, 4153 int instance_size, int inobject_properties, int builtin_id, 4154 MutableMode prototype_mutability) { 4155 DCHECK(Builtins::IsBuiltinId(builtin_id)); 4156 4157 NewFunctionArgs args; 4158 args.name_ = name; 4159 args.type_ = type; 4160 args.instance_size_ = instance_size; 4161 args.inobject_properties_ = inobject_properties; 4162 args.maybe_prototype_ = prototype; 4163 args.maybe_builtin_id_ = builtin_id; 4164 args.language_mode_ = LanguageMode::kStrict; 4165 args.prototype_mutability_ = prototype_mutability; 4166 4167 args.SetShouldCreateAndSetInitialMap(); 4168 args.SetShouldSetPrototype(); 4169 args.SetShouldSetLanguageMode(); 4170 4171 return args; 4172 } 4173 4174 // static 4175 NewFunctionArgs NewFunctionArgs::ForBuiltinWithoutPrototype( 4176 Handle<String> name, int builtin_id, LanguageMode language_mode) { 4177 DCHECK(Builtins::IsBuiltinId(builtin_id)); 4178 4179 NewFunctionArgs args; 4180 args.name_ = name; 4181 args.maybe_builtin_id_ = builtin_id; 4182 args.language_mode_ = language_mode; 4183 args.prototype_mutability_ = MUTABLE; 4184 4185 args.SetShouldSetLanguageMode(); 4186 4187 return args; 4188 } 4189 4190 void NewFunctionArgs::SetShouldCreateAndSetInitialMap() { 4191 // Needed to create the initial map. 4192 maybe_prototype_.Assert(); 4193 DCHECK_NE(kUninitialized, instance_size_); 4194 DCHECK_NE(kUninitialized, inobject_properties_); 4195 4196 should_create_and_set_initial_map_ = true; 4197 } 4198 4199 void NewFunctionArgs::SetShouldSetPrototype() { 4200 maybe_prototype_.Assert(); 4201 should_set_prototype_ = true; 4202 } 4203 4204 void NewFunctionArgs::SetShouldSetLanguageMode() { 4205 DCHECK(language_mode_ == LanguageMode::kStrict || 4206 language_mode_ == LanguageMode::kSloppy); 4207 should_set_language_mode_ = true; 4208 } 4209 4210 Handle<Map> NewFunctionArgs::GetMap(Isolate* isolate) const { 4211 if (!maybe_map_.is_null()) { 4212 return maybe_map_.ToHandleChecked(); 4213 } else if (maybe_prototype_.is_null()) { 4214 return is_strict(language_mode_) 4215 ? isolate->strict_function_without_prototype_map() 4216 : isolate->sloppy_function_without_prototype_map(); 4217 } else { 4218 DCHECK(!maybe_prototype_.is_null()); 4219 switch (prototype_mutability_) { 4220 case MUTABLE: 4221 return is_strict(language_mode_) ? isolate->strict_function_map() 4222 : isolate->sloppy_function_map(); 4223 case IMMUTABLE: 4224 return is_strict(language_mode_) 4225 ? isolate->strict_function_with_readonly_prototype_map() 4226 : isolate->sloppy_function_with_readonly_prototype_map(); 4227 } 4228 } 4229 UNREACHABLE(); 4230 } 4231 4232 } // namespace internal 4233 } // namespace v8 4234