1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_GLOBALS_H_ 6 #define V8_GLOBALS_H_ 7 8 #include <stddef.h> 9 #include <stdint.h> 10 11 #include <limits> 12 #include <ostream> 13 14 #include "include/v8.h" 15 #include "src/base/build_config.h" 16 #include "src/base/flags.h" 17 #include "src/base/logging.h" 18 #include "src/base/macros.h" 19 20 #define V8_INFINITY std::numeric_limits<double>::infinity() 21 22 namespace v8 { 23 24 namespace base { 25 class Mutex; 26 class RecursiveMutex; 27 } 28 29 namespace internal { 30 31 // Determine whether we are running in a simulated environment. 32 // Setting USE_SIMULATOR explicitly from the build script will force 33 // the use of a simulated environment. 34 #if !defined(USE_SIMULATOR) 35 #if (V8_TARGET_ARCH_ARM64 && !V8_HOST_ARCH_ARM64) 36 #define USE_SIMULATOR 1 37 #endif 38 #if (V8_TARGET_ARCH_ARM && !V8_HOST_ARCH_ARM) 39 #define USE_SIMULATOR 1 40 #endif 41 #if (V8_TARGET_ARCH_PPC && !V8_HOST_ARCH_PPC) 42 #define USE_SIMULATOR 1 43 #endif 44 #if (V8_TARGET_ARCH_MIPS && !V8_HOST_ARCH_MIPS) 45 #define USE_SIMULATOR 1 46 #endif 47 #if (V8_TARGET_ARCH_MIPS64 && !V8_HOST_ARCH_MIPS64) 48 #define USE_SIMULATOR 1 49 #endif 50 #if (V8_TARGET_ARCH_S390 && !V8_HOST_ARCH_S390) 51 #define USE_SIMULATOR 1 52 #endif 53 #endif 54 55 // Determine whether the architecture uses an embedded constant pool 56 // (contiguous constant pool embedded in code object). 57 #if V8_TARGET_ARCH_PPC 58 #define V8_EMBEDDED_CONSTANT_POOL 1 59 #else 60 #define V8_EMBEDDED_CONSTANT_POOL 0 61 #endif 62 63 #ifdef V8_TARGET_ARCH_ARM 64 // Set stack limit lower for ARM than for other architectures because 65 // stack allocating MacroAssembler takes 120K bytes. 66 // See issue crbug.com/405338 67 #define V8_DEFAULT_STACK_SIZE_KB 864 68 #else 69 // Slightly less than 1MB, since Windows' default stack size for 70 // the main execution thread is 1MB for both 32 and 64-bit. 71 #define V8_DEFAULT_STACK_SIZE_KB 984 72 #endif 73 74 // Minimum stack size in KB required by compilers. 75 constexpr int kStackSpaceRequiredForCompilation = 40; 76 77 // Determine whether double field unboxing feature is enabled. 78 #if V8_TARGET_ARCH_64_BIT 79 #define V8_DOUBLE_FIELDS_UNBOXING 1 80 #else 81 #define V8_DOUBLE_FIELDS_UNBOXING 0 82 #endif 83 84 // Some types of tracing require the SFI to store a unique ID. 85 #if defined(V8_TRACE_MAPS) || defined(V8_TRACE_IGNITION) 86 #define V8_SFI_HAS_UNIQUE_ID 1 87 #endif 88 89 // Superclass for classes only using static method functions. 90 // The subclass of AllStatic cannot be instantiated at all. 91 class AllStatic { 92 #ifdef DEBUG 93 public: 94 AllStatic() = delete; 95 #endif 96 }; 97 98 // DEPRECATED 99 // TODO(leszeks): Delete this during a quiet period 100 #define BASE_EMBEDDED 101 102 typedef uint8_t byte; 103 typedef uintptr_t Address; 104 static const Address kNullAddress = 0; 105 106 // ----------------------------------------------------------------------------- 107 // Constants 108 109 constexpr int KB = 1024; 110 constexpr int MB = KB * KB; 111 constexpr int GB = KB * KB * KB; 112 constexpr int kMaxInt = 0x7FFFFFFF; 113 constexpr int kMinInt = -kMaxInt - 1; 114 constexpr int kMaxInt8 = (1 << 7) - 1; 115 constexpr int kMinInt8 = -(1 << 7); 116 constexpr int kMaxUInt8 = (1 << 8) - 1; 117 constexpr int kMinUInt8 = 0; 118 constexpr int kMaxInt16 = (1 << 15) - 1; 119 constexpr int kMinInt16 = -(1 << 15); 120 constexpr int kMaxUInt16 = (1 << 16) - 1; 121 constexpr int kMinUInt16 = 0; 122 123 constexpr uint32_t kMaxUInt32 = 0xFFFFFFFFu; 124 constexpr int kMinUInt32 = 0; 125 126 constexpr int kUInt8Size = sizeof(uint8_t); 127 constexpr int kCharSize = sizeof(char); 128 constexpr int kShortSize = sizeof(short); // NOLINT 129 constexpr int kUInt16Size = sizeof(uint16_t); 130 constexpr int kIntSize = sizeof(int); 131 constexpr int kInt32Size = sizeof(int32_t); 132 constexpr int kInt64Size = sizeof(int64_t); 133 constexpr int kUInt32Size = sizeof(uint32_t); 134 constexpr int kSizetSize = sizeof(size_t); 135 constexpr int kFloatSize = sizeof(float); 136 constexpr int kDoubleSize = sizeof(double); 137 constexpr int kIntptrSize = sizeof(intptr_t); 138 constexpr int kUIntptrSize = sizeof(uintptr_t); 139 constexpr int kPointerSize = sizeof(void*); 140 constexpr int kPointerHexDigits = kPointerSize == 4 ? 8 : 12; 141 #if V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT 142 constexpr int kRegisterSize = kPointerSize + kPointerSize; 143 #else 144 constexpr int kRegisterSize = kPointerSize; 145 #endif 146 constexpr int kPCOnStackSize = kRegisterSize; 147 constexpr int kFPOnStackSize = kRegisterSize; 148 149 #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_IA32 150 constexpr int kElidedFrameSlots = kPCOnStackSize / kPointerSize; 151 #else 152 constexpr int kElidedFrameSlots = 0; 153 #endif 154 155 constexpr int kDoubleSizeLog2 = 3; 156 #if V8_TARGET_ARCH_ARM64 157 // ARM64 only supports direct calls within a 128 MB range. 158 constexpr size_t kMaxWasmCodeMemory = 128 * MB; 159 #else 160 constexpr size_t kMaxWasmCodeMemory = 1024 * MB; 161 #endif 162 163 #if V8_HOST_ARCH_64_BIT 164 constexpr int kPointerSizeLog2 = 3; 165 constexpr intptr_t kIntptrSignBit = 166 static_cast<intptr_t>(uintptr_t{0x8000000000000000}); 167 constexpr uintptr_t kUintptrAllBitsSet = uintptr_t{0xFFFFFFFFFFFFFFFF}; 168 constexpr bool kRequiresCodeRange = true; 169 #if V8_TARGET_ARCH_MIPS64 170 // To use pseudo-relative jumps such as j/jal instructions which have 28-bit 171 // encoded immediate, the addresses have to be in range of 256MB aligned 172 // region. Used only for large object space. 173 constexpr size_t kMaximalCodeRangeSize = 256 * MB; 174 constexpr size_t kCodeRangeAreaAlignment = 256 * MB; 175 #elif V8_HOST_ARCH_PPC && V8_TARGET_ARCH_PPC && V8_OS_LINUX 176 constexpr size_t kMaximalCodeRangeSize = 512 * MB; 177 constexpr size_t kCodeRangeAreaAlignment = 64 * KB; // OS page on PPC Linux 178 #elif V8_TARGET_ARCH_ARM64 179 constexpr size_t kMaximalCodeRangeSize = 128 * MB; 180 constexpr size_t kCodeRangeAreaAlignment = 4 * KB; // OS page. 181 #else 182 constexpr size_t kMaximalCodeRangeSize = 128 * MB; 183 constexpr size_t kCodeRangeAreaAlignment = 4 * KB; // OS page. 184 #endif 185 #if V8_OS_WIN 186 constexpr size_t kMinimumCodeRangeSize = 4 * MB; 187 constexpr size_t kReservedCodeRangePages = 1; 188 #else 189 constexpr size_t kMinimumCodeRangeSize = 3 * MB; 190 constexpr size_t kReservedCodeRangePages = 0; 191 #endif 192 #else 193 constexpr int kPointerSizeLog2 = 2; 194 constexpr intptr_t kIntptrSignBit = 0x80000000; 195 constexpr uintptr_t kUintptrAllBitsSet = 0xFFFFFFFFu; 196 #if V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT 197 // x32 port also requires code range. 198 constexpr bool kRequiresCodeRange = true; 199 constexpr size_t kMaximalCodeRangeSize = 256 * MB; 200 constexpr size_t kMinimumCodeRangeSize = 3 * MB; 201 constexpr size_t kCodeRangeAreaAlignment = 4 * KB; // OS page. 202 #elif V8_HOST_ARCH_PPC && V8_TARGET_ARCH_PPC && V8_OS_LINUX 203 constexpr bool kRequiresCodeRange = false; 204 constexpr size_t kMaximalCodeRangeSize = 0 * MB; 205 constexpr size_t kMinimumCodeRangeSize = 0 * MB; 206 constexpr size_t kCodeRangeAreaAlignment = 64 * KB; // OS page on PPC Linux 207 #else 208 constexpr bool kRequiresCodeRange = false; 209 constexpr size_t kMaximalCodeRangeSize = 0 * MB; 210 constexpr size_t kMinimumCodeRangeSize = 0 * MB; 211 constexpr size_t kCodeRangeAreaAlignment = 4 * KB; // OS page. 212 #endif 213 constexpr size_t kReservedCodeRangePages = 0; 214 #endif 215 216 // Trigger an incremental GCs once the external memory reaches this limit. 217 constexpr int kExternalAllocationSoftLimit = 64 * MB; 218 219 // Maximum object size that gets allocated into regular pages. Objects larger 220 // than that size are allocated in large object space and are never moved in 221 // memory. This also applies to new space allocation, since objects are never 222 // migrated from new space to large object space. Takes double alignment into 223 // account. 224 // 225 // Current value: Page::kAllocatableMemory (on 32-bit arch) - 512 (slack). 226 constexpr int kMaxRegularHeapObjectSize = 507136; 227 228 // Objects smaller or equal kMaxNewSpaceHeapObjectSize are allocated in the 229 // new large object space. 230 constexpr int kMaxNewSpaceHeapObjectSize = 32 * KB; 231 232 STATIC_ASSERT(kPointerSize == (1 << kPointerSizeLog2)); 233 234 constexpr int kBitsPerByte = 8; 235 constexpr int kBitsPerByteLog2 = 3; 236 constexpr int kBitsPerPointer = kPointerSize * kBitsPerByte; 237 constexpr int kBitsPerInt = kIntSize * kBitsPerByte; 238 239 // IEEE 754 single precision floating point number bit layout. 240 constexpr uint32_t kBinary32SignMask = 0x80000000u; 241 constexpr uint32_t kBinary32ExponentMask = 0x7f800000u; 242 constexpr uint32_t kBinary32MantissaMask = 0x007fffffu; 243 constexpr int kBinary32ExponentBias = 127; 244 constexpr int kBinary32MaxExponent = 0xFE; 245 constexpr int kBinary32MinExponent = 0x01; 246 constexpr int kBinary32MantissaBits = 23; 247 constexpr int kBinary32ExponentShift = 23; 248 249 // Quiet NaNs have bits 51 to 62 set, possibly the sign bit, and no 250 // other bits set. 251 constexpr uint64_t kQuietNaNMask = static_cast<uint64_t>(0xfff) << 51; 252 253 // Latin1/UTF-16 constants 254 // Code-point values in Unicode 4.0 are 21 bits wide. 255 // Code units in UTF-16 are 16 bits wide. 256 typedef uint16_t uc16; 257 typedef int32_t uc32; 258 constexpr int kOneByteSize = kCharSize; 259 constexpr int kUC16Size = sizeof(uc16); // NOLINT 260 261 // 128 bit SIMD value size. 262 constexpr int kSimd128Size = 16; 263 264 // FUNCTION_ADDR(f) gets the address of a C function f. 265 #define FUNCTION_ADDR(f) (reinterpret_cast<v8::internal::Address>(f)) 266 267 // FUNCTION_CAST<F>(addr) casts an address into a function 268 // of type F. Used to invoke generated code from within C. 269 template <typename F> 270 F FUNCTION_CAST(byte* addr) { 271 return reinterpret_cast<F>(reinterpret_cast<Address>(addr)); 272 } 273 274 template <typename F> 275 F FUNCTION_CAST(Address addr) { 276 return reinterpret_cast<F>(addr); 277 } 278 279 280 // Determine whether the architecture uses function descriptors 281 // which provide a level of indirection between the function pointer 282 // and the function entrypoint. 283 #if V8_HOST_ARCH_PPC && \ 284 (V8_OS_AIX || (V8_TARGET_ARCH_PPC64 && V8_TARGET_BIG_ENDIAN)) 285 #define USES_FUNCTION_DESCRIPTORS 1 286 #define FUNCTION_ENTRYPOINT_ADDRESS(f) \ 287 (reinterpret_cast<v8::internal::Address*>( \ 288 &(reinterpret_cast<intptr_t*>(f)[0]))) 289 #else 290 #define USES_FUNCTION_DESCRIPTORS 0 291 #endif 292 293 294 // ----------------------------------------------------------------------------- 295 // Declarations for use in both the preparser and the rest of V8. 296 297 // The Strict Mode (ECMA-262 5th edition, 4.2.2). 298 299 enum class LanguageMode : bool { kSloppy, kStrict }; 300 static const size_t LanguageModeSize = 2; 301 302 inline size_t hash_value(LanguageMode mode) { 303 return static_cast<size_t>(mode); 304 } 305 306 inline std::ostream& operator<<(std::ostream& os, const LanguageMode& mode) { 307 switch (mode) { 308 case LanguageMode::kSloppy: 309 return os << "sloppy"; 310 case LanguageMode::kStrict: 311 return os << "strict"; 312 } 313 UNREACHABLE(); 314 } 315 316 inline bool is_sloppy(LanguageMode language_mode) { 317 return language_mode == LanguageMode::kSloppy; 318 } 319 320 inline bool is_strict(LanguageMode language_mode) { 321 return language_mode != LanguageMode::kSloppy; 322 } 323 324 inline bool is_valid_language_mode(int language_mode) { 325 return language_mode == static_cast<int>(LanguageMode::kSloppy) || 326 language_mode == static_cast<int>(LanguageMode::kStrict); 327 } 328 329 inline LanguageMode construct_language_mode(bool strict_bit) { 330 return static_cast<LanguageMode>(strict_bit); 331 } 332 333 // Return kStrict if either of the language modes is kStrict, or kSloppy 334 // otherwise. 335 inline LanguageMode stricter_language_mode(LanguageMode mode1, 336 LanguageMode mode2) { 337 STATIC_ASSERT(LanguageModeSize == 2); 338 return static_cast<LanguageMode>(static_cast<int>(mode1) | 339 static_cast<int>(mode2)); 340 } 341 342 enum TypeofMode : int { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF }; 343 344 // Enums used by CEntry. 345 enum SaveFPRegsMode { kDontSaveFPRegs, kSaveFPRegs }; 346 enum ArgvMode { kArgvOnStack, kArgvInRegister }; 347 348 // This constant is used as an undefined value when passing source positions. 349 constexpr int kNoSourcePosition = -1; 350 351 // This constant is used to indicate missing deoptimization information. 352 constexpr int kNoDeoptimizationId = -1; 353 354 // Deoptimize bailout kind: 355 // - Eager: a check failed in the optimized code and deoptimization happens 356 // immediately. 357 // - Lazy: the code has been marked as dependent on some assumption which 358 // is checked elsewhere and can trigger deoptimization the next time the 359 // code is executed. 360 // - Soft: similar to lazy deoptimization, but does not contribute to the 361 // total deopt count which can lead to disabling optimization for a function. 362 enum class DeoptimizeKind : uint8_t { 363 kEager, 364 kSoft, 365 kLazy, 366 kLastDeoptimizeKind = kLazy 367 }; 368 inline size_t hash_value(DeoptimizeKind kind) { 369 return static_cast<size_t>(kind); 370 } 371 inline std::ostream& operator<<(std::ostream& os, DeoptimizeKind kind) { 372 switch (kind) { 373 case DeoptimizeKind::kEager: 374 return os << "Eager"; 375 case DeoptimizeKind::kSoft: 376 return os << "Soft"; 377 case DeoptimizeKind::kLazy: 378 return os << "Lazy"; 379 } 380 UNREACHABLE(); 381 } 382 383 // Indicates whether the lookup is related to sloppy-mode block-scoped 384 // function hoisting, and is a synthetic assignment for that. 385 enum class LookupHoistingMode { kNormal, kLegacySloppy }; 386 387 inline std::ostream& operator<<(std::ostream& os, 388 const LookupHoistingMode& mode) { 389 switch (mode) { 390 case LookupHoistingMode::kNormal: 391 return os << "normal hoisting"; 392 case LookupHoistingMode::kLegacySloppy: 393 return os << "legacy sloppy hoisting"; 394 } 395 UNREACHABLE(); 396 } 397 398 static_assert(kSmiValueSize <= 32, "Unsupported Smi tagging scheme"); 399 // Smi sign bit position must be 32-bit aligned so we can use sign extension 400 // instructions on 64-bit architectures without additional shifts. 401 static_assert((kSmiValueSize + kSmiShiftSize + kSmiTagSize) % 32 == 0, 402 "Unsupported Smi tagging scheme"); 403 404 constexpr bool kIsSmiValueInUpper32Bits = 405 (kSmiValueSize + kSmiShiftSize + kSmiTagSize) == 64; 406 constexpr bool kIsSmiValueInLower32Bits = 407 (kSmiValueSize + kSmiShiftSize + kSmiTagSize) == 32; 408 static_assert(!SmiValuesAre32Bits() == SmiValuesAre31Bits(), 409 "Unsupported Smi tagging scheme"); 410 static_assert(SmiValuesAre32Bits() == kIsSmiValueInUpper32Bits, 411 "Unsupported Smi tagging scheme"); 412 static_assert(SmiValuesAre31Bits() == kIsSmiValueInLower32Bits, 413 "Unsupported Smi tagging scheme"); 414 415 // Mask for the sign bit in a smi. 416 constexpr intptr_t kSmiSignMask = static_cast<intptr_t>( 417 uintptr_t{1} << (kSmiValueSize + kSmiShiftSize + kSmiTagSize - 1)); 418 419 constexpr int kObjectAlignmentBits = kPointerSizeLog2; 420 constexpr intptr_t kObjectAlignment = 1 << kObjectAlignmentBits; 421 constexpr intptr_t kObjectAlignmentMask = kObjectAlignment - 1; 422 423 // Desired alignment for pointers. 424 constexpr intptr_t kPointerAlignment = (1 << kPointerSizeLog2); 425 constexpr intptr_t kPointerAlignmentMask = kPointerAlignment - 1; 426 427 // Desired alignment for double values. 428 constexpr intptr_t kDoubleAlignment = 8; 429 constexpr intptr_t kDoubleAlignmentMask = kDoubleAlignment - 1; 430 431 // Desired alignment for generated code is 32 bytes (to improve cache line 432 // utilization). 433 constexpr int kCodeAlignmentBits = 5; 434 constexpr intptr_t kCodeAlignment = 1 << kCodeAlignmentBits; 435 constexpr intptr_t kCodeAlignmentMask = kCodeAlignment - 1; 436 437 const intptr_t kWeakHeapObjectMask = 1 << 1; 438 const intptr_t kClearedWeakHeapObject = 3; 439 440 // Zap-value: The value used for zapping dead objects. 441 // Should be a recognizable hex value tagged as a failure. 442 #ifdef V8_HOST_ARCH_64_BIT 443 constexpr uint64_t kClearedFreeMemoryValue = 0; 444 constexpr uint64_t kZapValue = uint64_t{0xdeadbeedbeadbeef}; 445 constexpr uint64_t kHandleZapValue = uint64_t{0x1baddead0baddeaf}; 446 constexpr uint64_t kGlobalHandleZapValue = uint64_t{0x1baffed00baffedf}; 447 constexpr uint64_t kFromSpaceZapValue = uint64_t{0x1beefdad0beefdaf}; 448 constexpr uint64_t kDebugZapValue = uint64_t{0xbadbaddbbadbaddb}; 449 constexpr uint64_t kSlotsZapValue = uint64_t{0xbeefdeadbeefdeef}; 450 constexpr uint64_t kFreeListZapValue = 0xfeed1eaffeed1eaf; 451 #else 452 constexpr uint32_t kClearedFreeMemoryValue = 0; 453 constexpr uint32_t kZapValue = 0xdeadbeef; 454 constexpr uint32_t kHandleZapValue = 0xbaddeaf; 455 constexpr uint32_t kGlobalHandleZapValue = 0xbaffedf; 456 constexpr uint32_t kFromSpaceZapValue = 0xbeefdaf; 457 constexpr uint32_t kSlotsZapValue = 0xbeefdeef; 458 constexpr uint32_t kDebugZapValue = 0xbadbaddb; 459 constexpr uint32_t kFreeListZapValue = 0xfeed1eaf; 460 #endif 461 462 constexpr int kCodeZapValue = 0xbadc0de; 463 constexpr uint32_t kPhantomReferenceZap = 0xca11bac; 464 465 // Page constants. 466 static const intptr_t kPageAlignmentMask = (intptr_t{1} << kPageSizeBits) - 1; 467 468 // On Intel architecture, cache line size is 64 bytes. 469 // On ARM it may be less (32 bytes), but as far this constant is 470 // used for aligning data, it doesn't hurt to align on a greater value. 471 #define PROCESSOR_CACHE_LINE_SIZE 64 472 473 // Constants relevant to double precision floating point numbers. 474 // If looking only at the top 32 bits, the QNaN mask is bits 19 to 30. 475 constexpr uint32_t kQuietNaNHighBitsMask = 0xfff << (51 - 32); 476 477 // ----------------------------------------------------------------------------- 478 // Forward declarations for frequently used classes 479 480 class AccessorInfo; 481 class Arguments; 482 class Assembler; 483 class Code; 484 class CodeSpace; 485 class CodeStub; 486 class Context; 487 class Debug; 488 class DebugInfo; 489 class Descriptor; 490 class DescriptorArray; 491 class TransitionArray; 492 class ExternalReference; 493 class FixedArray; 494 class FreeStoreAllocationPolicy; 495 class FunctionTemplateInfo; 496 class MemoryChunk; 497 class NumberDictionary; 498 class SimpleNumberDictionary; 499 class NameDictionary; 500 class GlobalDictionary; 501 template <typename T> class MaybeHandle; 502 template <typename T> class Handle; 503 class Heap; 504 class HeapObject; 505 class HeapObjectReference; 506 class IC; 507 class InterceptorInfo; 508 class Isolate; 509 class JSReceiver; 510 class JSArray; 511 class JSFunction; 512 class JSObject; 513 class LargeObjectSpace; 514 class MacroAssembler; 515 class Map; 516 class MapSpace; 517 class MarkCompactCollector; 518 class MaybeObject; 519 class NewSpace; 520 class NewLargeObjectSpace; 521 class Object; 522 class OldSpace; 523 class ParameterCount; 524 class ReadOnlySpace; 525 class Foreign; 526 class Scope; 527 class DeclarationScope; 528 class ModuleScope; 529 class ScopeInfo; 530 class Script; 531 class Smi; 532 template <typename Config, class Allocator = FreeStoreAllocationPolicy> 533 class SplayTree; 534 class String; 535 class Symbol; 536 class Name; 537 class Struct; 538 class FeedbackVector; 539 class Variable; 540 class RelocInfo; 541 class MessageLocation; 542 543 typedef bool (*WeakSlotCallback)(Object** pointer); 544 545 typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer); 546 547 // ----------------------------------------------------------------------------- 548 // Miscellaneous 549 550 // NOTE: SpaceIterator depends on AllocationSpace enumeration values being 551 // consecutive. 552 enum AllocationSpace { 553 // TODO(v8:7464): Actually map this space's memory as read-only. 554 RO_SPACE, // Immortal, immovable and immutable objects, 555 NEW_SPACE, // Young generation semispaces for regular objects collected with 556 // Scavenger. 557 OLD_SPACE, // Old generation regular object space. 558 CODE_SPACE, // Old generation code object space, marked executable. 559 MAP_SPACE, // Old generation map object space, non-movable. 560 LO_SPACE, // Old generation large object space. 561 NEW_LO_SPACE, // Young generation large object space. 562 563 FIRST_SPACE = RO_SPACE, 564 LAST_SPACE = NEW_LO_SPACE, 565 FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE, 566 LAST_GROWABLE_PAGED_SPACE = MAP_SPACE 567 }; 568 constexpr int kSpaceTagSize = 3; 569 STATIC_ASSERT(FIRST_SPACE == 0); 570 571 enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned }; 572 573 enum class AccessMode { ATOMIC, NON_ATOMIC }; 574 575 // Supported write barrier modes. 576 enum WriteBarrierKind : uint8_t { 577 kNoWriteBarrier, 578 kMapWriteBarrier, 579 kPointerWriteBarrier, 580 kFullWriteBarrier 581 }; 582 583 inline size_t hash_value(WriteBarrierKind kind) { 584 return static_cast<uint8_t>(kind); 585 } 586 587 inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) { 588 switch (kind) { 589 case kNoWriteBarrier: 590 return os << "NoWriteBarrier"; 591 case kMapWriteBarrier: 592 return os << "MapWriteBarrier"; 593 case kPointerWriteBarrier: 594 return os << "PointerWriteBarrier"; 595 case kFullWriteBarrier: 596 return os << "FullWriteBarrier"; 597 } 598 UNREACHABLE(); 599 } 600 601 // A flag that indicates whether objects should be pretenured when 602 // allocated (allocated directly into either the old generation or read-only 603 // space), or not (allocated in the young generation if the object size and type 604 // allows). 605 enum PretenureFlag { NOT_TENURED, TENURED, TENURED_READ_ONLY }; 606 607 inline std::ostream& operator<<(std::ostream& os, const PretenureFlag& flag) { 608 switch (flag) { 609 case NOT_TENURED: 610 return os << "NotTenured"; 611 case TENURED: 612 return os << "Tenured"; 613 case TENURED_READ_ONLY: 614 return os << "TenuredReadOnly"; 615 } 616 UNREACHABLE(); 617 } 618 619 enum MinimumCapacity { 620 USE_DEFAULT_MINIMUM_CAPACITY, 621 USE_CUSTOM_MINIMUM_CAPACITY 622 }; 623 624 enum GarbageCollector { SCAVENGER, MARK_COMPACTOR, MINOR_MARK_COMPACTOR }; 625 626 enum Executability { NOT_EXECUTABLE, EXECUTABLE }; 627 628 enum Movability { kMovable, kImmovable }; 629 630 enum VisitMode { 631 VISIT_ALL, 632 VISIT_ALL_IN_MINOR_MC_MARK, 633 VISIT_ALL_IN_MINOR_MC_UPDATE, 634 VISIT_ALL_IN_SCAVENGE, 635 VISIT_ALL_IN_SWEEP_NEWSPACE, 636 VISIT_ONLY_STRONG, 637 VISIT_FOR_SERIALIZATION, 638 }; 639 640 // Flag indicating whether code is built into the VM (one of the natives files). 641 enum NativesFlag { 642 NOT_NATIVES_CODE, 643 EXTENSION_CODE, 644 NATIVES_CODE, 645 INSPECTOR_CODE 646 }; 647 648 // ParseRestriction is used to restrict the set of valid statements in a 649 // unit of compilation. Restriction violations cause a syntax error. 650 enum ParseRestriction { 651 NO_PARSE_RESTRICTION, // All expressions are allowed. 652 ONLY_SINGLE_FUNCTION_LITERAL // Only a single FunctionLiteral expression. 653 }; 654 655 // A CodeDesc describes a buffer holding instructions and relocation 656 // information. The instructions start at the beginning of the buffer 657 // and grow forward, the relocation information starts at the end of 658 // the buffer and grows backward. A constant pool may exist at the 659 // end of the instructions. 660 // 661 // |<--------------- buffer_size ----------------------------------->| 662 // |<------------- instr_size ---------->| |<-- reloc_size -->| 663 // | |<- const_pool_size ->| | 664 // +=====================================+========+==================+ 665 // | instructions | data | free | reloc info | 666 // +=====================================+========+==================+ 667 // ^ 668 // | 669 // buffer 670 671 struct CodeDesc { 672 byte* buffer; 673 int buffer_size; 674 int instr_size; 675 int reloc_size; 676 int constant_pool_size; 677 byte* unwinding_info; 678 int unwinding_info_size; 679 Assembler* origin; 680 }; 681 682 683 // Callback function used for checking constraints when copying/relocating 684 // objects. Returns true if an object can be copied/relocated from its 685 // old_addr to a new_addr. 686 typedef bool (*ConstraintCallback)(Address new_addr, Address old_addr); 687 688 689 // Callback function on inline caches, used for iterating over inline caches 690 // in compiled code. 691 typedef void (*InlineCacheCallback)(Code* code, Address ic); 692 693 694 // State for inline cache call sites. Aliased as IC::State. 695 enum InlineCacheState { 696 // Has never been executed. 697 UNINITIALIZED, 698 // Has been executed but monomorhic state has been delayed. 699 PREMONOMORPHIC, 700 // Has been executed and only one receiver type has been seen. 701 MONOMORPHIC, 702 // Check failed due to prototype (or map deprecation). 703 RECOMPUTE_HANDLER, 704 // Multiple receiver types have been seen. 705 POLYMORPHIC, 706 // Many receiver types have been seen. 707 MEGAMORPHIC, 708 // A generic handler is installed and no extra typefeedback is recorded. 709 GENERIC, 710 }; 711 712 enum WhereToStart { kStartAtReceiver, kStartAtPrototype }; 713 714 enum ResultSentinel { kNotFound = -1, kUnsupported = -2 }; 715 716 enum ShouldThrow { kThrowOnError, kDontThrow }; 717 718 // The Store Buffer (GC). 719 typedef enum { 720 kStoreBufferFullEvent, 721 kStoreBufferStartScanningPagesEvent, 722 kStoreBufferScanningPageEvent 723 } StoreBufferEvent; 724 725 726 typedef void (*StoreBufferCallback)(Heap* heap, 727 MemoryChunk* page, 728 StoreBufferEvent event); 729 730 // Union used for customized checking of the IEEE double types 731 // inlined within v8 runtime, rather than going to the underlying 732 // platform headers and libraries 733 union IeeeDoubleLittleEndianArchType { 734 double d; 735 struct { 736 unsigned int man_low :32; 737 unsigned int man_high :20; 738 unsigned int exp :11; 739 unsigned int sign :1; 740 } bits; 741 }; 742 743 744 union IeeeDoubleBigEndianArchType { 745 double d; 746 struct { 747 unsigned int sign :1; 748 unsigned int exp :11; 749 unsigned int man_high :20; 750 unsigned int man_low :32; 751 } bits; 752 }; 753 754 #if V8_TARGET_LITTLE_ENDIAN 755 typedef IeeeDoubleLittleEndianArchType IeeeDoubleArchType; 756 constexpr int kIeeeDoubleMantissaWordOffset = 0; 757 constexpr int kIeeeDoubleExponentWordOffset = 4; 758 #else 759 typedef IeeeDoubleBigEndianArchType IeeeDoubleArchType; 760 constexpr int kIeeeDoubleMantissaWordOffset = 4; 761 constexpr int kIeeeDoubleExponentWordOffset = 0; 762 #endif 763 764 // ----------------------------------------------------------------------------- 765 // Macros 766 767 // Testers for test. 768 769 #define HAS_SMI_TAG(value) \ 770 ((reinterpret_cast<intptr_t>(value) & ::i::kSmiTagMask) == ::i::kSmiTag) 771 772 #define HAS_HEAP_OBJECT_TAG(value) \ 773 (((reinterpret_cast<intptr_t>(value) & ::i::kHeapObjectTagMask) == \ 774 ::i::kHeapObjectTag)) 775 776 // OBJECT_POINTER_ALIGN returns the value aligned as a HeapObject pointer 777 #define OBJECT_POINTER_ALIGN(value) \ 778 (((value) + kObjectAlignmentMask) & ~kObjectAlignmentMask) 779 780 // POINTER_SIZE_ALIGN returns the value aligned as a pointer. 781 #define POINTER_SIZE_ALIGN(value) \ 782 (((value) + kPointerAlignmentMask) & ~kPointerAlignmentMask) 783 784 // CODE_POINTER_ALIGN returns the value aligned as a generated code segment. 785 #define CODE_POINTER_ALIGN(value) \ 786 (((value) + kCodeAlignmentMask) & ~kCodeAlignmentMask) 787 788 // DOUBLE_POINTER_ALIGN returns the value algined for double pointers. 789 #define DOUBLE_POINTER_ALIGN(value) \ 790 (((value) + kDoubleAlignmentMask) & ~kDoubleAlignmentMask) 791 792 793 // CPU feature flags. 794 enum CpuFeature { 795 // x86 796 SSE4_1, 797 SSSE3, 798 SSE3, 799 SAHF, 800 AVX, 801 FMA3, 802 BMI1, 803 BMI2, 804 LZCNT, 805 POPCNT, 806 ATOM, 807 // ARM 808 // - Standard configurations. The baseline is ARMv6+VFPv2. 809 ARMv7, // ARMv7-A + VFPv3-D32 + NEON 810 ARMv7_SUDIV, // ARMv7-A + VFPv4-D32 + NEON + SUDIV 811 ARMv8, // ARMv8-A (+ all of the above) 812 // MIPS, MIPS64 813 FPU, 814 FP64FPU, 815 MIPSr1, 816 MIPSr2, 817 MIPSr6, 818 MIPS_SIMD, // MSA instructions 819 // PPC 820 FPR_GPR_MOV, 821 LWSYNC, 822 ISELECT, 823 VSX, 824 MODULO, 825 // S390 826 DISTINCT_OPS, 827 GENERAL_INSTR_EXT, 828 FLOATING_POINT_EXT, 829 VECTOR_FACILITY, 830 MISC_INSTR_EXT2, 831 832 NUMBER_OF_CPU_FEATURES, 833 834 // ARM feature aliases (based on the standard configurations above). 835 VFPv3 = ARMv7, 836 NEON = ARMv7, 837 VFP32DREGS = ARMv7, 838 SUDIV = ARMv7_SUDIV 839 }; 840 841 // Defines hints about receiver values based on structural knowledge. 842 enum class ConvertReceiverMode : unsigned { 843 kNullOrUndefined, // Guaranteed to be null or undefined. 844 kNotNullOrUndefined, // Guaranteed to never be null or undefined. 845 kAny // No specific knowledge about receiver. 846 }; 847 848 inline size_t hash_value(ConvertReceiverMode mode) { 849 return bit_cast<unsigned>(mode); 850 } 851 852 inline std::ostream& operator<<(std::ostream& os, ConvertReceiverMode mode) { 853 switch (mode) { 854 case ConvertReceiverMode::kNullOrUndefined: 855 return os << "NULL_OR_UNDEFINED"; 856 case ConvertReceiverMode::kNotNullOrUndefined: 857 return os << "NOT_NULL_OR_UNDEFINED"; 858 case ConvertReceiverMode::kAny: 859 return os << "ANY"; 860 } 861 UNREACHABLE(); 862 } 863 864 // Valid hints for the abstract operation OrdinaryToPrimitive, 865 // implemented according to ES6, section 7.1.1. 866 enum class OrdinaryToPrimitiveHint { kNumber, kString }; 867 868 // Valid hints for the abstract operation ToPrimitive, 869 // implemented according to ES6, section 7.1.1. 870 enum class ToPrimitiveHint { kDefault, kNumber, kString }; 871 872 // Defines specifics about arguments object or rest parameter creation. 873 enum class CreateArgumentsType : uint8_t { 874 kMappedArguments, 875 kUnmappedArguments, 876 kRestParameter 877 }; 878 879 inline size_t hash_value(CreateArgumentsType type) { 880 return bit_cast<uint8_t>(type); 881 } 882 883 inline std::ostream& operator<<(std::ostream& os, CreateArgumentsType type) { 884 switch (type) { 885 case CreateArgumentsType::kMappedArguments: 886 return os << "MAPPED_ARGUMENTS"; 887 case CreateArgumentsType::kUnmappedArguments: 888 return os << "UNMAPPED_ARGUMENTS"; 889 case CreateArgumentsType::kRestParameter: 890 return os << "REST_PARAMETER"; 891 } 892 UNREACHABLE(); 893 } 894 895 enum ScopeType : uint8_t { 896 EVAL_SCOPE, // The top-level scope for an eval source. 897 FUNCTION_SCOPE, // The top-level scope for a function. 898 MODULE_SCOPE, // The scope introduced by a module literal 899 SCRIPT_SCOPE, // The top-level scope for a script or a top-level eval. 900 CATCH_SCOPE, // The scope introduced by catch. 901 BLOCK_SCOPE, // The scope introduced by a new block. 902 WITH_SCOPE // The scope introduced by with. 903 }; 904 905 inline std::ostream& operator<<(std::ostream& os, ScopeType type) { 906 switch (type) { 907 case ScopeType::EVAL_SCOPE: 908 return os << "EVAL_SCOPE"; 909 case ScopeType::FUNCTION_SCOPE: 910 return os << "FUNCTION_SCOPE"; 911 case ScopeType::MODULE_SCOPE: 912 return os << "MODULE_SCOPE"; 913 case ScopeType::SCRIPT_SCOPE: 914 return os << "SCRIPT_SCOPE"; 915 case ScopeType::CATCH_SCOPE: 916 return os << "CATCH_SCOPE"; 917 case ScopeType::BLOCK_SCOPE: 918 return os << "BLOCK_SCOPE"; 919 case ScopeType::WITH_SCOPE: 920 return os << "WITH_SCOPE"; 921 } 922 UNREACHABLE(); 923 } 924 925 // AllocationSiteMode controls whether allocations are tracked by an allocation 926 // site. 927 enum AllocationSiteMode { 928 DONT_TRACK_ALLOCATION_SITE, 929 TRACK_ALLOCATION_SITE, 930 LAST_ALLOCATION_SITE_MODE = TRACK_ALLOCATION_SITE 931 }; 932 933 // The mips architecture prior to revision 5 has inverted encoding for sNaN. 934 #if (V8_TARGET_ARCH_MIPS && !defined(_MIPS_ARCH_MIPS32R6) && \ 935 (!defined(USE_SIMULATOR) || !defined(_MIPS_TARGET_SIMULATOR))) || \ 936 (V8_TARGET_ARCH_MIPS64 && !defined(_MIPS_ARCH_MIPS64R6) && \ 937 (!defined(USE_SIMULATOR) || !defined(_MIPS_TARGET_SIMULATOR))) 938 constexpr uint32_t kHoleNanUpper32 = 0xFFFF7FFF; 939 constexpr uint32_t kHoleNanLower32 = 0xFFFF7FFF; 940 #else 941 constexpr uint32_t kHoleNanUpper32 = 0xFFF7FFFF; 942 constexpr uint32_t kHoleNanLower32 = 0xFFF7FFFF; 943 #endif 944 945 constexpr uint64_t kHoleNanInt64 = 946 (static_cast<uint64_t>(kHoleNanUpper32) << 32) | kHoleNanLower32; 947 948 // ES6 section 20.1.2.6 Number.MAX_SAFE_INTEGER 949 constexpr double kMaxSafeInteger = 9007199254740991.0; // 2^53-1 950 951 // The order of this enum has to be kept in sync with the predicates below. 952 enum class VariableMode : uint8_t { 953 // User declared variables: 954 kLet, // declared via 'let' declarations (first lexical) 955 956 kConst, // declared via 'const' declarations (last lexical) 957 958 kVar, // declared via 'var', and 'function' declarations 959 960 // Variables introduced by the compiler: 961 kTemporary, // temporary variables (not user-visible), stack-allocated 962 // unless the scope as a whole has forced context allocation 963 964 kDynamic, // always require dynamic lookup (we don't know 965 // the declaration) 966 967 kDynamicGlobal, // requires dynamic lookup, but we know that the 968 // variable is global unless it has been shadowed 969 // by an eval-introduced variable 970 971 kDynamicLocal // requires dynamic lookup, but we know that the 972 // variable is local and where it is unless it 973 // has been shadowed by an eval-introduced 974 // variable 975 }; 976 977 // Printing support 978 #ifdef DEBUG 979 inline const char* VariableMode2String(VariableMode mode) { 980 switch (mode) { 981 case VariableMode::kVar: 982 return "VAR"; 983 case VariableMode::kLet: 984 return "LET"; 985 case VariableMode::kConst: 986 return "CONST"; 987 case VariableMode::kDynamic: 988 return "DYNAMIC"; 989 case VariableMode::kDynamicGlobal: 990 return "DYNAMIC_GLOBAL"; 991 case VariableMode::kDynamicLocal: 992 return "DYNAMIC_LOCAL"; 993 case VariableMode::kTemporary: 994 return "TEMPORARY"; 995 } 996 UNREACHABLE(); 997 } 998 #endif 999 1000 enum VariableKind : uint8_t { 1001 NORMAL_VARIABLE, 1002 FUNCTION_VARIABLE, 1003 THIS_VARIABLE, 1004 SLOPPY_FUNCTION_NAME_VARIABLE 1005 }; 1006 1007 inline bool IsDynamicVariableMode(VariableMode mode) { 1008 return mode >= VariableMode::kDynamic && mode <= VariableMode::kDynamicLocal; 1009 } 1010 1011 inline bool IsDeclaredVariableMode(VariableMode mode) { 1012 STATIC_ASSERT(static_cast<uint8_t>(VariableMode::kLet) == 1013 0); // Implies that mode >= VariableMode::kLet. 1014 return mode <= VariableMode::kVar; 1015 } 1016 1017 inline bool IsLexicalVariableMode(VariableMode mode) { 1018 STATIC_ASSERT(static_cast<uint8_t>(VariableMode::kLet) == 1019 0); // Implies that mode >= VariableMode::kLet. 1020 return mode <= VariableMode::kConst; 1021 } 1022 1023 enum VariableLocation : uint8_t { 1024 // Before and during variable allocation, a variable whose location is 1025 // not yet determined. After allocation, a variable looked up as a 1026 // property on the global object (and possibly absent). name() is the 1027 // variable name, index() is invalid. 1028 UNALLOCATED, 1029 1030 // A slot in the parameter section on the stack. index() is the 1031 // parameter index, counting left-to-right. The receiver is index -1; 1032 // the first parameter is index 0. 1033 PARAMETER, 1034 1035 // A slot in the local section on the stack. index() is the variable 1036 // index in the stack frame, starting at 0. 1037 LOCAL, 1038 1039 // An indexed slot in a heap context. index() is the variable index in 1040 // the context object on the heap, starting at 0. scope() is the 1041 // corresponding scope. 1042 CONTEXT, 1043 1044 // A named slot in a heap context. name() is the variable name in the 1045 // context object on the heap, with lookup starting at the current 1046 // context. index() is invalid. 1047 LOOKUP, 1048 1049 // A named slot in a module's export table. 1050 MODULE, 1051 1052 kLastVariableLocation = MODULE 1053 }; 1054 1055 // ES6 specifies declarative environment records with mutable and immutable 1056 // bindings that can be in two states: initialized and uninitialized. 1057 // When accessing a binding, it needs to be checked for initialization. 1058 // However in the following cases the binding is initialized immediately 1059 // after creation so the initialization check can always be skipped: 1060 // 1061 // 1. Var declared local variables. 1062 // var foo; 1063 // 2. A local variable introduced by a function declaration. 1064 // function foo() {} 1065 // 3. Parameters 1066 // function x(foo) {} 1067 // 4. Catch bound variables. 1068 // try {} catch (foo) {} 1069 // 6. Function name variables of named function expressions. 1070 // var x = function foo() {} 1071 // 7. Implicit binding of 'this'. 1072 // 8. Implicit binding of 'arguments' in functions. 1073 // 1074 // The following enum specifies a flag that indicates if the binding needs a 1075 // distinct initialization step (kNeedsInitialization) or if the binding is 1076 // immediately initialized upon creation (kCreatedInitialized). 1077 enum InitializationFlag : uint8_t { kNeedsInitialization, kCreatedInitialized }; 1078 1079 enum MaybeAssignedFlag : uint8_t { kNotAssigned, kMaybeAssigned }; 1080 1081 enum ParseErrorType { kSyntaxError = 0, kReferenceError = 1 }; 1082 1083 enum FunctionKind : uint8_t { 1084 kNormalFunction, 1085 kArrowFunction, 1086 kGeneratorFunction, 1087 kConciseMethod, 1088 kDerivedConstructor, 1089 kBaseConstructor, 1090 kGetterFunction, 1091 kSetterFunction, 1092 kAsyncFunction, 1093 kModule, 1094 kClassFieldsInitializerFunction, 1095 1096 kDefaultBaseConstructor, 1097 kDefaultDerivedConstructor, 1098 kAsyncArrowFunction, 1099 kAsyncConciseMethod, 1100 1101 kConciseGeneratorMethod, 1102 kAsyncConciseGeneratorMethod, 1103 kAsyncGeneratorFunction, 1104 kLastFunctionKind = kAsyncGeneratorFunction, 1105 }; 1106 1107 inline bool IsArrowFunction(FunctionKind kind) { 1108 return kind == FunctionKind::kArrowFunction || 1109 kind == FunctionKind::kAsyncArrowFunction; 1110 } 1111 1112 inline bool IsModule(FunctionKind kind) { 1113 return kind == FunctionKind::kModule; 1114 } 1115 1116 inline bool IsAsyncGeneratorFunction(FunctionKind kind) { 1117 return kind == FunctionKind::kAsyncGeneratorFunction || 1118 kind == FunctionKind::kAsyncConciseGeneratorMethod; 1119 } 1120 1121 inline bool IsGeneratorFunction(FunctionKind kind) { 1122 return kind == FunctionKind::kGeneratorFunction || 1123 kind == FunctionKind::kConciseGeneratorMethod || 1124 IsAsyncGeneratorFunction(kind); 1125 } 1126 1127 inline bool IsAsyncFunction(FunctionKind kind) { 1128 return kind == FunctionKind::kAsyncFunction || 1129 kind == FunctionKind::kAsyncArrowFunction || 1130 kind == FunctionKind::kAsyncConciseMethod || 1131 IsAsyncGeneratorFunction(kind); 1132 } 1133 1134 inline bool IsResumableFunction(FunctionKind kind) { 1135 return IsGeneratorFunction(kind) || IsAsyncFunction(kind) || IsModule(kind); 1136 } 1137 1138 inline bool IsConciseMethod(FunctionKind kind) { 1139 return kind == FunctionKind::kConciseMethod || 1140 kind == FunctionKind::kConciseGeneratorMethod || 1141 kind == FunctionKind::kAsyncConciseMethod || 1142 kind == FunctionKind::kAsyncConciseGeneratorMethod || 1143 kind == FunctionKind::kClassFieldsInitializerFunction; 1144 } 1145 1146 inline bool IsGetterFunction(FunctionKind kind) { 1147 return kind == FunctionKind::kGetterFunction; 1148 } 1149 1150 inline bool IsSetterFunction(FunctionKind kind) { 1151 return kind == FunctionKind::kSetterFunction; 1152 } 1153 1154 inline bool IsAccessorFunction(FunctionKind kind) { 1155 return kind == FunctionKind::kGetterFunction || 1156 kind == FunctionKind::kSetterFunction; 1157 } 1158 1159 inline bool IsDefaultConstructor(FunctionKind kind) { 1160 return kind == FunctionKind::kDefaultBaseConstructor || 1161 kind == FunctionKind::kDefaultDerivedConstructor; 1162 } 1163 1164 inline bool IsBaseConstructor(FunctionKind kind) { 1165 return kind == FunctionKind::kBaseConstructor || 1166 kind == FunctionKind::kDefaultBaseConstructor; 1167 } 1168 1169 inline bool IsDerivedConstructor(FunctionKind kind) { 1170 return kind == FunctionKind::kDerivedConstructor || 1171 kind == FunctionKind::kDefaultDerivedConstructor; 1172 } 1173 1174 1175 inline bool IsClassConstructor(FunctionKind kind) { 1176 return IsBaseConstructor(kind) || IsDerivedConstructor(kind); 1177 } 1178 1179 inline bool IsClassFieldsInitializerFunction(FunctionKind kind) { 1180 return kind == FunctionKind::kClassFieldsInitializerFunction; 1181 } 1182 1183 inline bool IsConstructable(FunctionKind kind) { 1184 if (IsAccessorFunction(kind)) return false; 1185 if (IsConciseMethod(kind)) return false; 1186 if (IsArrowFunction(kind)) return false; 1187 if (IsGeneratorFunction(kind)) return false; 1188 if (IsAsyncFunction(kind)) return false; 1189 return true; 1190 } 1191 1192 inline std::ostream& operator<<(std::ostream& os, FunctionKind kind) { 1193 switch (kind) { 1194 case FunctionKind::kNormalFunction: 1195 return os << "NormalFunction"; 1196 case FunctionKind::kArrowFunction: 1197 return os << "ArrowFunction"; 1198 case FunctionKind::kGeneratorFunction: 1199 return os << "GeneratorFunction"; 1200 case FunctionKind::kConciseMethod: 1201 return os << "ConciseMethod"; 1202 case FunctionKind::kDerivedConstructor: 1203 return os << "DerivedConstructor"; 1204 case FunctionKind::kBaseConstructor: 1205 return os << "BaseConstructor"; 1206 case FunctionKind::kGetterFunction: 1207 return os << "GetterFunction"; 1208 case FunctionKind::kSetterFunction: 1209 return os << "SetterFunction"; 1210 case FunctionKind::kAsyncFunction: 1211 return os << "AsyncFunction"; 1212 case FunctionKind::kModule: 1213 return os << "Module"; 1214 case FunctionKind::kClassFieldsInitializerFunction: 1215 return os << "ClassFieldsInitializerFunction"; 1216 case FunctionKind::kDefaultBaseConstructor: 1217 return os << "DefaultBaseConstructor"; 1218 case FunctionKind::kDefaultDerivedConstructor: 1219 return os << "DefaultDerivedConstructor"; 1220 case FunctionKind::kAsyncArrowFunction: 1221 return os << "AsyncArrowFunction"; 1222 case FunctionKind::kAsyncConciseMethod: 1223 return os << "AsyncConciseMethod"; 1224 case FunctionKind::kConciseGeneratorMethod: 1225 return os << "ConciseGeneratorMethod"; 1226 case FunctionKind::kAsyncConciseGeneratorMethod: 1227 return os << "AsyncConciseGeneratorMethod"; 1228 case FunctionKind::kAsyncGeneratorFunction: 1229 return os << "AsyncGeneratorFunction"; 1230 } 1231 UNREACHABLE(); 1232 } 1233 1234 enum class InterpreterPushArgsMode : unsigned { 1235 kArrayFunction, 1236 kWithFinalSpread, 1237 kOther 1238 }; 1239 1240 inline size_t hash_value(InterpreterPushArgsMode mode) { 1241 return bit_cast<unsigned>(mode); 1242 } 1243 1244 inline std::ostream& operator<<(std::ostream& os, 1245 InterpreterPushArgsMode mode) { 1246 switch (mode) { 1247 case InterpreterPushArgsMode::kArrayFunction: 1248 return os << "ArrayFunction"; 1249 case InterpreterPushArgsMode::kWithFinalSpread: 1250 return os << "WithFinalSpread"; 1251 case InterpreterPushArgsMode::kOther: 1252 return os << "Other"; 1253 } 1254 UNREACHABLE(); 1255 } 1256 1257 inline uint32_t ObjectHash(Address address) { 1258 // All objects are at least pointer aligned, so we can remove the trailing 1259 // zeros. 1260 return static_cast<uint32_t>(address >> kPointerSizeLog2); 1261 } 1262 1263 // Type feedback is encoded in such a way that, we can combine the feedback 1264 // at different points by performing an 'OR' operation. Type feedback moves 1265 // to a more generic type when we combine feedback. 1266 // 1267 // kSignedSmall -> kSignedSmallInputs -> kNumber -> kNumberOrOddball -> kAny 1268 // kString -> kAny 1269 // kBigInt -> kAny 1270 // 1271 // Technically we wouldn't need the separation between the kNumber and the 1272 // kNumberOrOddball values here, since for binary operations, we always 1273 // truncate oddballs to numbers. In practice though it causes TurboFan to 1274 // generate quite a lot of unused code though if we always handle numbers 1275 // and oddballs everywhere, although in 99% of the use sites they are only 1276 // used with numbers. 1277 class BinaryOperationFeedback { 1278 public: 1279 enum { 1280 kNone = 0x0, 1281 kSignedSmall = 0x1, 1282 kSignedSmallInputs = 0x3, 1283 kNumber = 0x7, 1284 kNumberOrOddball = 0xF, 1285 kString = 0x10, 1286 kBigInt = 0x20, 1287 kAny = 0x7F 1288 }; 1289 }; 1290 1291 // Type feedback is encoded in such a way that, we can combine the feedback 1292 // at different points by performing an 'OR' operation. Type feedback moves 1293 // to a more generic type when we combine feedback. 1294 // 1295 // kSignedSmall -> kNumber -> kNumberOrOddball -> kAny 1296 // kInternalizedString -> kString -> kAny 1297 // kSymbol -> kAny 1298 // kBigInt -> kAny 1299 // kReceiver -> kAny 1300 // 1301 // This is distinct from BinaryOperationFeedback on purpose, because the 1302 // feedback that matters differs greatly as well as the way it is consumed. 1303 class CompareOperationFeedback { 1304 public: 1305 enum { 1306 kNone = 0x00, 1307 kSignedSmall = 0x01, 1308 kNumber = 0x3, 1309 kNumberOrOddball = 0x7, 1310 kInternalizedString = 0x8, 1311 kString = 0x18, 1312 kSymbol = 0x20, 1313 kBigInt = 0x30, 1314 kReceiver = 0x40, 1315 kAny = 0xff 1316 }; 1317 }; 1318 1319 enum class Operation { 1320 // Binary operations. 1321 kAdd, 1322 kSubtract, 1323 kMultiply, 1324 kDivide, 1325 kModulus, 1326 kExponentiate, 1327 kBitwiseAnd, 1328 kBitwiseOr, 1329 kBitwiseXor, 1330 kShiftLeft, 1331 kShiftRight, 1332 kShiftRightLogical, 1333 // Unary operations. 1334 kBitwiseNot, 1335 kNegate, 1336 kIncrement, 1337 kDecrement, 1338 // Compare operations. 1339 kEqual, 1340 kStrictEqual, 1341 kLessThan, 1342 kLessThanOrEqual, 1343 kGreaterThan, 1344 kGreaterThanOrEqual, 1345 }; 1346 1347 // Type feedback is encoded in such a way that, we can combine the feedback 1348 // at different points by performing an 'OR' operation. Type feedback moves 1349 // to a more generic type when we combine feedback. 1350 // kNone -> kEnumCacheKeysAndIndices -> kEnumCacheKeys -> kAny 1351 class ForInFeedback { 1352 public: 1353 enum { 1354 kNone = 0x0, 1355 kEnumCacheKeysAndIndices = 0x1, 1356 kEnumCacheKeys = 0x3, 1357 kAny = 0x7 1358 }; 1359 }; 1360 STATIC_ASSERT((ForInFeedback::kNone | 1361 ForInFeedback::kEnumCacheKeysAndIndices) == 1362 ForInFeedback::kEnumCacheKeysAndIndices); 1363 STATIC_ASSERT((ForInFeedback::kEnumCacheKeysAndIndices | 1364 ForInFeedback::kEnumCacheKeys) == ForInFeedback::kEnumCacheKeys); 1365 STATIC_ASSERT((ForInFeedback::kEnumCacheKeys | ForInFeedback::kAny) == 1366 ForInFeedback::kAny); 1367 1368 enum class UnicodeEncoding : uint8_t { 1369 // Different unicode encodings in a |word32|: 1370 UTF16, // hi 16bits -> trailing surrogate or 0, low 16bits -> lead surrogate 1371 UTF32, // full UTF32 code unit / Unicode codepoint 1372 }; 1373 1374 inline size_t hash_value(UnicodeEncoding encoding) { 1375 return static_cast<uint8_t>(encoding); 1376 } 1377 1378 inline std::ostream& operator<<(std::ostream& os, UnicodeEncoding encoding) { 1379 switch (encoding) { 1380 case UnicodeEncoding::UTF16: 1381 return os << "UTF16"; 1382 case UnicodeEncoding::UTF32: 1383 return os << "UTF32"; 1384 } 1385 UNREACHABLE(); 1386 } 1387 1388 enum class IterationKind { kKeys, kValues, kEntries }; 1389 1390 inline std::ostream& operator<<(std::ostream& os, IterationKind kind) { 1391 switch (kind) { 1392 case IterationKind::kKeys: 1393 return os << "IterationKind::kKeys"; 1394 case IterationKind::kValues: 1395 return os << "IterationKind::kValues"; 1396 case IterationKind::kEntries: 1397 return os << "IterationKind::kEntries"; 1398 } 1399 UNREACHABLE(); 1400 } 1401 1402 enum class CollectionKind { kMap, kSet }; 1403 1404 inline std::ostream& operator<<(std::ostream& os, CollectionKind kind) { 1405 switch (kind) { 1406 case CollectionKind::kMap: 1407 return os << "CollectionKind::kMap"; 1408 case CollectionKind::kSet: 1409 return os << "CollectionKind::kSet"; 1410 } 1411 UNREACHABLE(); 1412 } 1413 1414 // Flags for the runtime function kDefineDataPropertyInLiteral. A property can 1415 // be enumerable or not, and, in case of functions, the function name 1416 // can be set or not. 1417 enum class DataPropertyInLiteralFlag { 1418 kNoFlags = 0, 1419 kDontEnum = 1 << 0, 1420 kSetFunctionName = 1 << 1 1421 }; 1422 typedef base::Flags<DataPropertyInLiteralFlag> DataPropertyInLiteralFlags; 1423 DEFINE_OPERATORS_FOR_FLAGS(DataPropertyInLiteralFlags) 1424 1425 enum ExternalArrayType { 1426 kExternalInt8Array = 1, 1427 kExternalUint8Array, 1428 kExternalInt16Array, 1429 kExternalUint16Array, 1430 kExternalInt32Array, 1431 kExternalUint32Array, 1432 kExternalFloat32Array, 1433 kExternalFloat64Array, 1434 kExternalUint8ClampedArray, 1435 kExternalBigInt64Array, 1436 kExternalBigUint64Array, 1437 }; 1438 1439 struct AssemblerDebugInfo { 1440 AssemblerDebugInfo(const char* name, const char* file, int line) 1441 : name(name), file(file), line(line) {} 1442 const char* name; 1443 const char* file; 1444 int line; 1445 }; 1446 1447 inline std::ostream& operator<<(std::ostream& os, 1448 const AssemblerDebugInfo& info) { 1449 os << "(" << info.name << ":" << info.file << ":" << info.line << ")"; 1450 return os; 1451 } 1452 1453 enum class OptimizationMarker { 1454 kLogFirstExecution, 1455 kNone, 1456 kCompileOptimized, 1457 kCompileOptimizedConcurrent, 1458 kInOptimizationQueue 1459 }; 1460 1461 inline std::ostream& operator<<(std::ostream& os, 1462 const OptimizationMarker& marker) { 1463 switch (marker) { 1464 case OptimizationMarker::kLogFirstExecution: 1465 return os << "OptimizationMarker::kLogFirstExecution"; 1466 case OptimizationMarker::kNone: 1467 return os << "OptimizationMarker::kNone"; 1468 case OptimizationMarker::kCompileOptimized: 1469 return os << "OptimizationMarker::kCompileOptimized"; 1470 case OptimizationMarker::kCompileOptimizedConcurrent: 1471 return os << "OptimizationMarker::kCompileOptimizedConcurrent"; 1472 case OptimizationMarker::kInOptimizationQueue: 1473 return os << "OptimizationMarker::kInOptimizationQueue"; 1474 } 1475 UNREACHABLE(); 1476 return os; 1477 } 1478 1479 enum class SpeculationMode { kAllowSpeculation, kDisallowSpeculation }; 1480 1481 inline std::ostream& operator<<(std::ostream& os, 1482 SpeculationMode speculation_mode) { 1483 switch (speculation_mode) { 1484 case SpeculationMode::kAllowSpeculation: 1485 return os << "SpeculationMode::kAllowSpeculation"; 1486 case SpeculationMode::kDisallowSpeculation: 1487 return os << "SpeculationMode::kDisallowSpeculation"; 1488 } 1489 UNREACHABLE(); 1490 return os; 1491 } 1492 1493 enum class BlockingBehavior { kBlock, kDontBlock }; 1494 1495 enum class ConcurrencyMode { kNotConcurrent, kConcurrent }; 1496 1497 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \ 1498 C(Handler, handler) \ 1499 C(CEntryFP, c_entry_fp) \ 1500 C(CFunction, c_function) \ 1501 C(Context, context) \ 1502 C(PendingException, pending_exception) \ 1503 C(PendingHandlerContext, pending_handler_context) \ 1504 C(PendingHandlerEntrypoint, pending_handler_entrypoint) \ 1505 C(PendingHandlerConstantPool, pending_handler_constant_pool) \ 1506 C(PendingHandlerFP, pending_handler_fp) \ 1507 C(PendingHandlerSP, pending_handler_sp) \ 1508 C(ExternalCaughtException, external_caught_exception) \ 1509 C(JSEntrySP, js_entry_sp) 1510 1511 enum IsolateAddressId { 1512 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address, 1513 FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM) 1514 #undef DECLARE_ENUM 1515 kIsolateAddressCount 1516 }; 1517 1518 V8_INLINE static bool HasWeakHeapObjectTag(const internal::MaybeObject* value) { 1519 return ((reinterpret_cast<intptr_t>(value) & kHeapObjectTagMask) == 1520 kWeakHeapObjectTag); 1521 } 1522 1523 // Object* should never have the weak tag; this variant is for overzealous 1524 // checking. 1525 V8_INLINE static bool HasWeakHeapObjectTag(const Object* value) { 1526 return ((reinterpret_cast<intptr_t>(value) & kHeapObjectTagMask) == 1527 kWeakHeapObjectTag); 1528 } 1529 1530 V8_INLINE static bool IsClearedWeakHeapObject(const MaybeObject* value) { 1531 return reinterpret_cast<intptr_t>(value) == kClearedWeakHeapObject; 1532 } 1533 1534 V8_INLINE static HeapObject* RemoveWeakHeapObjectMask( 1535 HeapObjectReference* value) { 1536 return reinterpret_cast<HeapObject*>(reinterpret_cast<intptr_t>(value) & 1537 ~kWeakHeapObjectMask); 1538 } 1539 1540 V8_INLINE static HeapObjectReference* AddWeakHeapObjectMask(Object* value) { 1541 return reinterpret_cast<HeapObjectReference*>( 1542 reinterpret_cast<intptr_t>(value) | kWeakHeapObjectMask); 1543 } 1544 1545 V8_INLINE static MaybeObject* AddWeakHeapObjectMask(MaybeObject* value) { 1546 return reinterpret_cast<MaybeObject*>(reinterpret_cast<intptr_t>(value) | 1547 kWeakHeapObjectMask); 1548 } 1549 1550 enum class HeapObjectReferenceType { 1551 WEAK, 1552 STRONG, 1553 }; 1554 1555 enum class PoisoningMitigationLevel { 1556 kPoisonAll, 1557 kDontPoison, 1558 kPoisonCriticalOnly 1559 }; 1560 1561 enum class LoadSensitivity { 1562 kCritical, // Critical loads are poisoned whenever we can run untrusted 1563 // code (i.e., when --untrusted-code-mitigations is on). 1564 kUnsafe, // Unsafe loads are poisoned when full poisoning is on 1565 // (--branch-load-poisoning). 1566 kSafe // Safe loads are never poisoned. 1567 }; 1568 1569 // The reason for a WebAssembly trap. 1570 #define FOREACH_WASM_TRAPREASON(V) \ 1571 V(TrapUnreachable) \ 1572 V(TrapMemOutOfBounds) \ 1573 V(TrapUnalignedAccess) \ 1574 V(TrapDivByZero) \ 1575 V(TrapDivUnrepresentable) \ 1576 V(TrapRemByZero) \ 1577 V(TrapFloatUnrepresentable) \ 1578 V(TrapFuncInvalid) \ 1579 V(TrapFuncSigMismatch) 1580 1581 } // namespace internal 1582 } // namespace v8 1583 1584 namespace i = v8::internal; 1585 1586 #endif // V8_GLOBALS_H_ 1587