1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_COMPILER_INSTRUCTION_H_ 6 #define V8_COMPILER_INSTRUCTION_H_ 7 8 #include <deque> 9 #include <iosfwd> 10 #include <map> 11 #include <set> 12 13 #include "src/base/compiler-specific.h" 14 #include "src/compiler/common-operator.h" 15 #include "src/compiler/frame.h" 16 #include "src/compiler/instruction-codes.h" 17 #include "src/compiler/opcodes.h" 18 #include "src/double.h" 19 #include "src/globals.h" 20 #include "src/macro-assembler.h" 21 #include "src/register-configuration.h" 22 #include "src/source-position.h" 23 #include "src/zone/zone-allocator.h" 24 25 namespace v8 { 26 namespace internal { 27 namespace compiler { 28 29 class Schedule; 30 class SourcePositionTable; 31 32 class V8_EXPORT_PRIVATE InstructionOperand { 33 public: 34 static const int kInvalidVirtualRegister = -1; 35 36 enum Kind { 37 INVALID, 38 UNALLOCATED, 39 CONSTANT, 40 IMMEDIATE, 41 // Location operand kinds. 42 EXPLICIT, 43 ALLOCATED, 44 FIRST_LOCATION_OPERAND_KIND = EXPLICIT 45 // Location operand kinds must be last. 46 }; 47 48 InstructionOperand() : InstructionOperand(INVALID) {} 49 50 Kind kind() const { return KindField::decode(value_); } 51 52 #define INSTRUCTION_OPERAND_PREDICATE(name, type) \ 53 bool Is##name() const { return kind() == type; } 54 INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID) 55 // UnallocatedOperands are place-holder operands created before register 56 // allocation. They later are assigned registers and become AllocatedOperands. 57 INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED) 58 // Constant operands participate in register allocation. They are allocated to 59 // registers but have a special "spilling" behavior. When a ConstantOperand 60 // value must be rematerialized, it is loaded from an immediate constant 61 // rather from an unspilled slot. 62 INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT) 63 // ImmediateOperands do not participate in register allocation and are only 64 // embedded directly in instructions, e.g. small integers and on some 65 // platforms Objects. 66 INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE) 67 // ExplicitOperands do not participate in register allocation. They are 68 // created by the instruction selector for direct access to registers and 69 // stack slots, completely bypassing the register allocator. They are never 70 // associated with a virtual register 71 INSTRUCTION_OPERAND_PREDICATE(Explicit, EXPLICIT) 72 // AllocatedOperands are registers or stack slots that are assigned by the 73 // register allocator and are always associated with a virtual register. 74 INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED) 75 #undef INSTRUCTION_OPERAND_PREDICATE 76 77 inline bool IsAnyLocationOperand() const; 78 inline bool IsLocationOperand() const; 79 inline bool IsFPLocationOperand() const; 80 inline bool IsAnyRegister() const; 81 inline bool IsRegister() const; 82 inline bool IsFPRegister() const; 83 inline bool IsFloatRegister() const; 84 inline bool IsDoubleRegister() const; 85 inline bool IsSimd128Register() const; 86 inline bool IsAnyStackSlot() const; 87 inline bool IsStackSlot() const; 88 inline bool IsFPStackSlot() const; 89 inline bool IsFloatStackSlot() const; 90 inline bool IsDoubleStackSlot() const; 91 inline bool IsSimd128StackSlot() const; 92 93 template <typename SubKindOperand> 94 static SubKindOperand* New(Zone* zone, const SubKindOperand& op) { 95 void* buffer = zone->New(sizeof(op)); 96 return new (buffer) SubKindOperand(op); 97 } 98 99 static void ReplaceWith(InstructionOperand* dest, 100 const InstructionOperand* src) { 101 *dest = *src; 102 } 103 104 bool Equals(const InstructionOperand& that) const { 105 return this->value_ == that.value_; 106 } 107 108 bool Compare(const InstructionOperand& that) const { 109 return this->value_ < that.value_; 110 } 111 112 bool EqualsCanonicalized(const InstructionOperand& that) const { 113 return this->GetCanonicalizedValue() == that.GetCanonicalizedValue(); 114 } 115 116 bool CompareCanonicalized(const InstructionOperand& that) const { 117 return this->GetCanonicalizedValue() < that.GetCanonicalizedValue(); 118 } 119 120 bool InterferesWith(const InstructionOperand& other) const; 121 122 // APIs to aid debugging. For general-stream APIs, use operator<< 123 void Print(const RegisterConfiguration* config) const; 124 void Print() const; 125 126 protected: 127 explicit InstructionOperand(Kind kind) : value_(KindField::encode(kind)) {} 128 129 inline uint64_t GetCanonicalizedValue() const; 130 131 class KindField : public BitField64<Kind, 0, 3> {}; 132 133 uint64_t value_; 134 }; 135 136 137 typedef ZoneVector<InstructionOperand> InstructionOperandVector; 138 139 140 struct PrintableInstructionOperand { 141 const RegisterConfiguration* register_configuration_; 142 InstructionOperand op_; 143 }; 144 145 146 std::ostream& operator<<(std::ostream& os, 147 const PrintableInstructionOperand& op); 148 149 150 #define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind) \ 151 \ 152 static OperandType* cast(InstructionOperand* op) { \ 153 DCHECK_EQ(OperandKind, op->kind()); \ 154 return static_cast<OperandType*>(op); \ 155 } \ 156 \ 157 static const OperandType* cast(const InstructionOperand* op) { \ 158 DCHECK_EQ(OperandKind, op->kind()); \ 159 return static_cast<const OperandType*>(op); \ 160 } \ 161 \ 162 static OperandType cast(const InstructionOperand& op) { \ 163 DCHECK_EQ(OperandKind, op.kind()); \ 164 return *static_cast<const OperandType*>(&op); \ 165 } 166 167 class UnallocatedOperand final : public InstructionOperand { 168 public: 169 enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY }; 170 171 enum ExtendedPolicy { 172 NONE, 173 REGISTER_OR_SLOT, 174 REGISTER_OR_SLOT_OR_CONSTANT, 175 FIXED_REGISTER, 176 FIXED_FP_REGISTER, 177 MUST_HAVE_REGISTER, 178 MUST_HAVE_SLOT, 179 SAME_AS_FIRST_INPUT 180 }; 181 182 // Lifetime of operand inside the instruction. 183 enum Lifetime { 184 // USED_AT_START operand is guaranteed to be live only at instruction start. 185 // The register allocator is free to assign the same register to some other 186 // operand used inside instruction (i.e. temporary or output). 187 USED_AT_START, 188 189 // USED_AT_END operand is treated as live until the end of instruction. 190 // This means that register allocator will not reuse its register for any 191 // other operand inside instruction. 192 USED_AT_END 193 }; 194 195 UnallocatedOperand(ExtendedPolicy policy, int virtual_register) 196 : UnallocatedOperand(virtual_register) { 197 value_ |= BasicPolicyField::encode(EXTENDED_POLICY); 198 value_ |= ExtendedPolicyField::encode(policy); 199 value_ |= LifetimeField::encode(USED_AT_END); 200 } 201 202 UnallocatedOperand(BasicPolicy policy, int index, int virtual_register) 203 : UnallocatedOperand(virtual_register) { 204 DCHECK(policy == FIXED_SLOT); 205 value_ |= BasicPolicyField::encode(policy); 206 value_ |= static_cast<int64_t>(index) << FixedSlotIndexField::kShift; 207 DCHECK(this->fixed_slot_index() == index); 208 } 209 210 UnallocatedOperand(ExtendedPolicy policy, int index, int virtual_register) 211 : UnallocatedOperand(virtual_register) { 212 DCHECK(policy == FIXED_REGISTER || policy == FIXED_FP_REGISTER); 213 value_ |= BasicPolicyField::encode(EXTENDED_POLICY); 214 value_ |= ExtendedPolicyField::encode(policy); 215 value_ |= LifetimeField::encode(USED_AT_END); 216 value_ |= FixedRegisterField::encode(index); 217 } 218 219 UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime, 220 int virtual_register) 221 : UnallocatedOperand(virtual_register) { 222 value_ |= BasicPolicyField::encode(EXTENDED_POLICY); 223 value_ |= ExtendedPolicyField::encode(policy); 224 value_ |= LifetimeField::encode(lifetime); 225 } 226 227 UnallocatedOperand(int reg_id, int slot_id, int virtual_register) 228 : UnallocatedOperand(FIXED_REGISTER, reg_id, virtual_register) { 229 value_ |= HasSecondaryStorageField::encode(true); 230 value_ |= SecondaryStorageField::encode(slot_id); 231 } 232 233 UnallocatedOperand(const UnallocatedOperand& other, int virtual_register) { 234 DCHECK_NE(kInvalidVirtualRegister, virtual_register); 235 value_ = VirtualRegisterField::update( 236 other.value_, static_cast<uint32_t>(virtual_register)); 237 } 238 239 // Predicates for the operand policy. 240 bool HasRegisterOrSlotPolicy() const { 241 return basic_policy() == EXTENDED_POLICY && 242 extended_policy() == REGISTER_OR_SLOT; 243 } 244 bool HasRegisterOrSlotOrConstantPolicy() const { 245 return basic_policy() == EXTENDED_POLICY && 246 extended_policy() == REGISTER_OR_SLOT_OR_CONSTANT; 247 } 248 bool HasFixedPolicy() const { 249 return basic_policy() == FIXED_SLOT || 250 extended_policy() == FIXED_REGISTER || 251 extended_policy() == FIXED_FP_REGISTER; 252 } 253 bool HasRegisterPolicy() const { 254 return basic_policy() == EXTENDED_POLICY && 255 extended_policy() == MUST_HAVE_REGISTER; 256 } 257 bool HasSlotPolicy() const { 258 return basic_policy() == EXTENDED_POLICY && 259 extended_policy() == MUST_HAVE_SLOT; 260 } 261 bool HasSameAsInputPolicy() const { 262 return basic_policy() == EXTENDED_POLICY && 263 extended_policy() == SAME_AS_FIRST_INPUT; 264 } 265 bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; } 266 bool HasFixedRegisterPolicy() const { 267 return basic_policy() == EXTENDED_POLICY && 268 extended_policy() == FIXED_REGISTER; 269 } 270 bool HasFixedFPRegisterPolicy() const { 271 return basic_policy() == EXTENDED_POLICY && 272 extended_policy() == FIXED_FP_REGISTER; 273 } 274 bool HasSecondaryStorage() const { 275 return basic_policy() == EXTENDED_POLICY && 276 extended_policy() == FIXED_REGISTER && 277 HasSecondaryStorageField::decode(value_); 278 } 279 int GetSecondaryStorage() const { 280 DCHECK(HasSecondaryStorage()); 281 return SecondaryStorageField::decode(value_); 282 } 283 284 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies. 285 BasicPolicy basic_policy() const { 286 return BasicPolicyField::decode(value_); 287 } 288 289 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy. 290 ExtendedPolicy extended_policy() const { 291 DCHECK(basic_policy() == EXTENDED_POLICY); 292 return ExtendedPolicyField::decode(value_); 293 } 294 295 // [fixed_slot_index]: Only for FIXED_SLOT. 296 int fixed_slot_index() const { 297 DCHECK(HasFixedSlotPolicy()); 298 return static_cast<int>(static_cast<int64_t>(value_) >> 299 FixedSlotIndexField::kShift); 300 } 301 302 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_FP_REGISTER. 303 int fixed_register_index() const { 304 DCHECK(HasFixedRegisterPolicy() || HasFixedFPRegisterPolicy()); 305 return FixedRegisterField::decode(value_); 306 } 307 308 // [virtual_register]: The virtual register ID for this operand. 309 int32_t virtual_register() const { 310 return static_cast<int32_t>(VirtualRegisterField::decode(value_)); 311 } 312 313 // [lifetime]: Only for non-FIXED_SLOT. 314 bool IsUsedAtStart() const { 315 DCHECK(basic_policy() == EXTENDED_POLICY); 316 return LifetimeField::decode(value_) == USED_AT_START; 317 } 318 319 INSTRUCTION_OPERAND_CASTS(UnallocatedOperand, UNALLOCATED); 320 321 // The encoding used for UnallocatedOperand operands depends on the policy 322 // that is 323 // stored within the operand. The FIXED_SLOT policy uses a compact encoding 324 // because it accommodates a larger pay-load. 325 // 326 // For FIXED_SLOT policy: 327 // +------------------------------------------------+ 328 // | slot_index | 0 | virtual_register | 001 | 329 // +------------------------------------------------+ 330 // 331 // For all other (extended) policies: 332 // +-----------------------------------------------------+ 333 // | reg_index | L | PPP | 1 | virtual_register | 001 | 334 // +-----------------------------------------------------+ 335 // L ... Lifetime 336 // P ... Policy 337 // 338 // The slot index is a signed value which requires us to decode it manually 339 // instead of using the BitField utility class. 340 341 STATIC_ASSERT(KindField::kSize == 3); 342 343 class VirtualRegisterField : public BitField64<uint32_t, 3, 32> {}; 344 345 // BitFields for all unallocated operands. 346 class BasicPolicyField : public BitField64<BasicPolicy, 35, 1> {}; 347 348 // BitFields specific to BasicPolicy::FIXED_SLOT. 349 class FixedSlotIndexField : public BitField64<int, 36, 28> {}; 350 351 // BitFields specific to BasicPolicy::EXTENDED_POLICY. 352 class ExtendedPolicyField : public BitField64<ExtendedPolicy, 36, 3> {}; 353 class LifetimeField : public BitField64<Lifetime, 39, 1> {}; 354 class HasSecondaryStorageField : public BitField64<bool, 40, 1> {}; 355 class FixedRegisterField : public BitField64<int, 41, 6> {}; 356 class SecondaryStorageField : public BitField64<int, 47, 3> {}; 357 358 private: 359 explicit UnallocatedOperand(int virtual_register) 360 : InstructionOperand(UNALLOCATED) { 361 value_ |= 362 VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register)); 363 } 364 }; 365 366 367 class ConstantOperand : public InstructionOperand { 368 public: 369 explicit ConstantOperand(int virtual_register) 370 : InstructionOperand(CONSTANT) { 371 value_ |= 372 VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register)); 373 } 374 375 int32_t virtual_register() const { 376 return static_cast<int32_t>(VirtualRegisterField::decode(value_)); 377 } 378 379 static ConstantOperand* New(Zone* zone, int virtual_register) { 380 return InstructionOperand::New(zone, ConstantOperand(virtual_register)); 381 } 382 383 INSTRUCTION_OPERAND_CASTS(ConstantOperand, CONSTANT); 384 385 STATIC_ASSERT(KindField::kSize == 3); 386 class VirtualRegisterField : public BitField64<uint32_t, 3, 32> {}; 387 }; 388 389 390 class ImmediateOperand : public InstructionOperand { 391 public: 392 enum ImmediateType { INLINE, INDEXED }; 393 394 explicit ImmediateOperand(ImmediateType type, int32_t value) 395 : InstructionOperand(IMMEDIATE) { 396 value_ |= TypeField::encode(type); 397 value_ |= static_cast<int64_t>(value) << ValueField::kShift; 398 } 399 400 ImmediateType type() const { return TypeField::decode(value_); } 401 402 int32_t inline_value() const { 403 DCHECK_EQ(INLINE, type()); 404 return static_cast<int64_t>(value_) >> ValueField::kShift; 405 } 406 407 int32_t indexed_value() const { 408 DCHECK_EQ(INDEXED, type()); 409 return static_cast<int64_t>(value_) >> ValueField::kShift; 410 } 411 412 static ImmediateOperand* New(Zone* zone, ImmediateType type, int32_t value) { 413 return InstructionOperand::New(zone, ImmediateOperand(type, value)); 414 } 415 416 INSTRUCTION_OPERAND_CASTS(ImmediateOperand, IMMEDIATE); 417 418 STATIC_ASSERT(KindField::kSize == 3); 419 class TypeField : public BitField64<ImmediateType, 3, 1> {}; 420 class ValueField : public BitField64<int32_t, 32, 32> {}; 421 }; 422 423 424 class LocationOperand : public InstructionOperand { 425 public: 426 enum LocationKind { REGISTER, STACK_SLOT }; 427 428 LocationOperand(InstructionOperand::Kind operand_kind, 429 LocationOperand::LocationKind location_kind, 430 MachineRepresentation rep, int index) 431 : InstructionOperand(operand_kind) { 432 DCHECK_IMPLIES(location_kind == REGISTER, index >= 0); 433 DCHECK(IsSupportedRepresentation(rep)); 434 value_ |= LocationKindField::encode(location_kind); 435 value_ |= RepresentationField::encode(rep); 436 value_ |= static_cast<int64_t>(index) << IndexField::kShift; 437 } 438 439 int index() const { 440 DCHECK(IsStackSlot() || IsFPStackSlot()); 441 return static_cast<int64_t>(value_) >> IndexField::kShift; 442 } 443 444 int register_code() const { 445 DCHECK(IsRegister() || IsFPRegister()); 446 return static_cast<int64_t>(value_) >> IndexField::kShift; 447 } 448 449 Register GetRegister() const { 450 DCHECK(IsRegister()); 451 return Register::from_code(register_code()); 452 } 453 454 FloatRegister GetFloatRegister() const { 455 DCHECK(IsFloatRegister()); 456 return FloatRegister::from_code(register_code()); 457 } 458 459 DoubleRegister GetDoubleRegister() const { 460 // On platforms where FloatRegister, DoubleRegister, and Simd128Register 461 // are all the same type, it's convenient to treat everything as a 462 // DoubleRegister, so be lax about type checking here. 463 DCHECK(IsFPRegister()); 464 return DoubleRegister::from_code(register_code()); 465 } 466 467 Simd128Register GetSimd128Register() const { 468 DCHECK(IsSimd128Register()); 469 return Simd128Register::from_code(register_code()); 470 } 471 472 LocationKind location_kind() const { 473 return LocationKindField::decode(value_); 474 } 475 476 MachineRepresentation representation() const { 477 return RepresentationField::decode(value_); 478 } 479 480 static bool IsSupportedRepresentation(MachineRepresentation rep) { 481 switch (rep) { 482 case MachineRepresentation::kWord32: 483 case MachineRepresentation::kWord64: 484 case MachineRepresentation::kFloat32: 485 case MachineRepresentation::kFloat64: 486 case MachineRepresentation::kSimd128: 487 case MachineRepresentation::kTaggedSigned: 488 case MachineRepresentation::kTaggedPointer: 489 case MachineRepresentation::kTagged: 490 return true; 491 case MachineRepresentation::kBit: 492 case MachineRepresentation::kWord8: 493 case MachineRepresentation::kWord16: 494 case MachineRepresentation::kNone: 495 return false; 496 } 497 UNREACHABLE(); 498 } 499 500 // Return true if the locations can be moved to one another. 501 bool IsCompatible(LocationOperand* op); 502 503 static LocationOperand* cast(InstructionOperand* op) { 504 DCHECK(op->IsAnyLocationOperand()); 505 return static_cast<LocationOperand*>(op); 506 } 507 508 static const LocationOperand* cast(const InstructionOperand* op) { 509 DCHECK(op->IsAnyLocationOperand()); 510 return static_cast<const LocationOperand*>(op); 511 } 512 513 static LocationOperand cast(const InstructionOperand& op) { 514 DCHECK(op.IsAnyLocationOperand()); 515 return *static_cast<const LocationOperand*>(&op); 516 } 517 518 STATIC_ASSERT(KindField::kSize == 3); 519 class LocationKindField : public BitField64<LocationKind, 3, 2> {}; 520 class RepresentationField : public BitField64<MachineRepresentation, 5, 8> {}; 521 class IndexField : public BitField64<int32_t, 35, 29> {}; 522 }; 523 524 class V8_EXPORT_PRIVATE ExplicitOperand 525 : public NON_EXPORTED_BASE(LocationOperand) { 526 public: 527 ExplicitOperand(LocationKind kind, MachineRepresentation rep, int index); 528 529 static ExplicitOperand* New(Zone* zone, LocationKind kind, 530 MachineRepresentation rep, int index) { 531 return InstructionOperand::New(zone, ExplicitOperand(kind, rep, index)); 532 } 533 534 INSTRUCTION_OPERAND_CASTS(ExplicitOperand, EXPLICIT); 535 }; 536 537 538 class AllocatedOperand : public LocationOperand { 539 public: 540 AllocatedOperand(LocationKind kind, MachineRepresentation rep, int index) 541 : LocationOperand(ALLOCATED, kind, rep, index) {} 542 543 static AllocatedOperand* New(Zone* zone, LocationKind kind, 544 MachineRepresentation rep, int index) { 545 return InstructionOperand::New(zone, AllocatedOperand(kind, rep, index)); 546 } 547 548 INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED); 549 }; 550 551 552 #undef INSTRUCTION_OPERAND_CASTS 553 554 bool InstructionOperand::IsAnyLocationOperand() const { 555 return this->kind() >= FIRST_LOCATION_OPERAND_KIND; 556 } 557 558 bool InstructionOperand::IsLocationOperand() const { 559 return IsAnyLocationOperand() && 560 !IsFloatingPoint(LocationOperand::cast(this)->representation()); 561 } 562 563 bool InstructionOperand::IsFPLocationOperand() const { 564 return IsAnyLocationOperand() && 565 IsFloatingPoint(LocationOperand::cast(this)->representation()); 566 } 567 568 bool InstructionOperand::IsAnyRegister() const { 569 return IsAnyLocationOperand() && 570 LocationOperand::cast(this)->location_kind() == 571 LocationOperand::REGISTER; 572 } 573 574 575 bool InstructionOperand::IsRegister() const { 576 return IsAnyRegister() && 577 !IsFloatingPoint(LocationOperand::cast(this)->representation()); 578 } 579 580 bool InstructionOperand::IsFPRegister() const { 581 return IsAnyRegister() && 582 IsFloatingPoint(LocationOperand::cast(this)->representation()); 583 } 584 585 bool InstructionOperand::IsFloatRegister() const { 586 return IsAnyRegister() && 587 LocationOperand::cast(this)->representation() == 588 MachineRepresentation::kFloat32; 589 } 590 591 bool InstructionOperand::IsDoubleRegister() const { 592 return IsAnyRegister() && 593 LocationOperand::cast(this)->representation() == 594 MachineRepresentation::kFloat64; 595 } 596 597 bool InstructionOperand::IsSimd128Register() const { 598 return IsAnyRegister() && LocationOperand::cast(this)->representation() == 599 MachineRepresentation::kSimd128; 600 } 601 602 bool InstructionOperand::IsAnyStackSlot() const { 603 return IsAnyLocationOperand() && 604 LocationOperand::cast(this)->location_kind() == 605 LocationOperand::STACK_SLOT; 606 } 607 608 bool InstructionOperand::IsStackSlot() const { 609 return IsAnyStackSlot() && 610 !IsFloatingPoint(LocationOperand::cast(this)->representation()); 611 } 612 613 bool InstructionOperand::IsFPStackSlot() const { 614 return IsAnyStackSlot() && 615 IsFloatingPoint(LocationOperand::cast(this)->representation()); 616 } 617 618 bool InstructionOperand::IsFloatStackSlot() const { 619 return IsAnyLocationOperand() && 620 LocationOperand::cast(this)->location_kind() == 621 LocationOperand::STACK_SLOT && 622 LocationOperand::cast(this)->representation() == 623 MachineRepresentation::kFloat32; 624 } 625 626 bool InstructionOperand::IsDoubleStackSlot() const { 627 return IsAnyLocationOperand() && 628 LocationOperand::cast(this)->location_kind() == 629 LocationOperand::STACK_SLOT && 630 LocationOperand::cast(this)->representation() == 631 MachineRepresentation::kFloat64; 632 } 633 634 bool InstructionOperand::IsSimd128StackSlot() const { 635 return IsAnyLocationOperand() && 636 LocationOperand::cast(this)->location_kind() == 637 LocationOperand::STACK_SLOT && 638 LocationOperand::cast(this)->representation() == 639 MachineRepresentation::kSimd128; 640 } 641 642 uint64_t InstructionOperand::GetCanonicalizedValue() const { 643 if (IsAnyLocationOperand()) { 644 MachineRepresentation canonical = MachineRepresentation::kNone; 645 if (IsFPRegister()) { 646 if (kSimpleFPAliasing) { 647 // We treat all FP register operands the same for simple aliasing. 648 canonical = MachineRepresentation::kFloat64; 649 } else { 650 // We need to distinguish FP register operands of different reps when 651 // aliasing is not simple (e.g. ARM). 652 canonical = LocationOperand::cast(this)->representation(); 653 } 654 } 655 return InstructionOperand::KindField::update( 656 LocationOperand::RepresentationField::update(this->value_, canonical), 657 LocationOperand::EXPLICIT); 658 } 659 return this->value_; 660 } 661 662 // Required for maps that don't care about machine type. 663 struct CompareOperandModuloType { 664 bool operator()(const InstructionOperand& a, 665 const InstructionOperand& b) const { 666 return a.CompareCanonicalized(b); 667 } 668 }; 669 670 class V8_EXPORT_PRIVATE MoveOperands final 671 : public NON_EXPORTED_BASE(ZoneObject) { 672 public: 673 MoveOperands(const InstructionOperand& source, 674 const InstructionOperand& destination) 675 : source_(source), destination_(destination) { 676 DCHECK(!source.IsInvalid() && !destination.IsInvalid()); 677 } 678 679 const InstructionOperand& source() const { return source_; } 680 InstructionOperand& source() { return source_; } 681 void set_source(const InstructionOperand& operand) { source_ = operand; } 682 683 const InstructionOperand& destination() const { return destination_; } 684 InstructionOperand& destination() { return destination_; } 685 void set_destination(const InstructionOperand& operand) { 686 destination_ = operand; 687 } 688 689 // The gap resolver marks moves as "in-progress" by clearing the 690 // destination (but not the source). 691 bool IsPending() const { 692 return destination_.IsInvalid() && !source_.IsInvalid(); 693 } 694 void SetPending() { destination_ = InstructionOperand(); } 695 696 // A move is redundant if it's been eliminated or if its source and 697 // destination are the same. 698 bool IsRedundant() const { 699 DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant()); 700 return IsEliminated() || source_.EqualsCanonicalized(destination_); 701 } 702 703 // We clear both operands to indicate move that's been eliminated. 704 void Eliminate() { source_ = destination_ = InstructionOperand(); } 705 bool IsEliminated() const { 706 DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid()); 707 return source_.IsInvalid(); 708 } 709 710 // APIs to aid debugging. For general-stream APIs, use operator<< 711 void Print(const RegisterConfiguration* config) const; 712 void Print() const; 713 714 private: 715 InstructionOperand source_; 716 InstructionOperand destination_; 717 718 DISALLOW_COPY_AND_ASSIGN(MoveOperands); 719 }; 720 721 722 struct PrintableMoveOperands { 723 const RegisterConfiguration* register_configuration_; 724 const MoveOperands* move_operands_; 725 }; 726 727 728 std::ostream& operator<<(std::ostream& os, const PrintableMoveOperands& mo); 729 730 class V8_EXPORT_PRIVATE ParallelMove final 731 : public NON_EXPORTED_BASE(ZoneVector<MoveOperands *>), 732 public NON_EXPORTED_BASE(ZoneObject) { 733 public: 734 explicit ParallelMove(Zone* zone) : ZoneVector<MoveOperands*>(zone) { 735 reserve(4); 736 } 737 738 MoveOperands* AddMove(const InstructionOperand& from, 739 const InstructionOperand& to) { 740 Zone* zone = get_allocator().zone(); 741 return AddMove(from, to, zone); 742 } 743 744 MoveOperands* AddMove(const InstructionOperand& from, 745 const InstructionOperand& to, 746 Zone* operand_allocation_zone) { 747 MoveOperands* move = new (operand_allocation_zone) MoveOperands(from, to); 748 push_back(move); 749 return move; 750 } 751 752 bool IsRedundant() const; 753 754 // Prepare this ParallelMove to insert move as if it happened in a subsequent 755 // ParallelMove. move->source() may be changed. Any MoveOperands added to 756 // to_eliminate must be Eliminated. 757 void PrepareInsertAfter(MoveOperands* move, 758 ZoneVector<MoveOperands*>* to_eliminate) const; 759 760 private: 761 DISALLOW_COPY_AND_ASSIGN(ParallelMove); 762 }; 763 764 765 struct PrintableParallelMove { 766 const RegisterConfiguration* register_configuration_; 767 const ParallelMove* parallel_move_; 768 }; 769 770 771 std::ostream& operator<<(std::ostream& os, const PrintableParallelMove& pm); 772 773 774 class ReferenceMap final : public ZoneObject { 775 public: 776 explicit ReferenceMap(Zone* zone) 777 : reference_operands_(8, zone), instruction_position_(-1) {} 778 779 const ZoneVector<InstructionOperand>& reference_operands() const { 780 return reference_operands_; 781 } 782 int instruction_position() const { return instruction_position_; } 783 784 void set_instruction_position(int pos) { 785 DCHECK_EQ(-1, instruction_position_); 786 instruction_position_ = pos; 787 } 788 789 void RecordReference(const AllocatedOperand& op); 790 791 private: 792 friend std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm); 793 794 ZoneVector<InstructionOperand> reference_operands_; 795 int instruction_position_; 796 }; 797 798 std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm); 799 800 class InstructionBlock; 801 802 class V8_EXPORT_PRIVATE Instruction final { 803 public: 804 size_t OutputCount() const { return OutputCountField::decode(bit_field_); } 805 const InstructionOperand* OutputAt(size_t i) const { 806 DCHECK(i < OutputCount()); 807 return &operands_[i]; 808 } 809 InstructionOperand* OutputAt(size_t i) { 810 DCHECK(i < OutputCount()); 811 return &operands_[i]; 812 } 813 814 bool HasOutput() const { return OutputCount() > 0; } 815 const InstructionOperand* Output() const { return OutputAt(0); } 816 InstructionOperand* Output() { return OutputAt(0); } 817 818 size_t InputCount() const { return InputCountField::decode(bit_field_); } 819 const InstructionOperand* InputAt(size_t i) const { 820 DCHECK(i < InputCount()); 821 return &operands_[OutputCount() + i]; 822 } 823 InstructionOperand* InputAt(size_t i) { 824 DCHECK(i < InputCount()); 825 return &operands_[OutputCount() + i]; 826 } 827 828 size_t TempCount() const { return TempCountField::decode(bit_field_); } 829 const InstructionOperand* TempAt(size_t i) const { 830 DCHECK(i < TempCount()); 831 return &operands_[OutputCount() + InputCount() + i]; 832 } 833 InstructionOperand* TempAt(size_t i) { 834 DCHECK(i < TempCount()); 835 return &operands_[OutputCount() + InputCount() + i]; 836 } 837 838 InstructionCode opcode() const { return opcode_; } 839 ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); } 840 AddressingMode addressing_mode() const { 841 return AddressingModeField::decode(opcode()); 842 } 843 FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); } 844 FlagsCondition flags_condition() const { 845 return FlagsConditionField::decode(opcode()); 846 } 847 848 static Instruction* New(Zone* zone, InstructionCode opcode) { 849 return New(zone, opcode, 0, nullptr, 0, nullptr, 0, nullptr); 850 } 851 852 static Instruction* New(Zone* zone, InstructionCode opcode, 853 size_t output_count, InstructionOperand* outputs, 854 size_t input_count, InstructionOperand* inputs, 855 size_t temp_count, InstructionOperand* temps) { 856 DCHECK_LE(0, opcode); 857 DCHECK(output_count == 0 || outputs != nullptr); 858 DCHECK(input_count == 0 || inputs != nullptr); 859 DCHECK(temp_count == 0 || temps != nullptr); 860 // TODO(jarin/mstarzinger): Handle this gracefully. See crbug.com/582702. 861 CHECK(InputCountField::is_valid(input_count)); 862 863 size_t total_extra_ops = output_count + input_count + temp_count; 864 if (total_extra_ops != 0) total_extra_ops--; 865 int size = static_cast<int>( 866 RoundUp(sizeof(Instruction), sizeof(InstructionOperand)) + 867 total_extra_ops * sizeof(InstructionOperand)); 868 return new (zone->New(size)) Instruction( 869 opcode, output_count, outputs, input_count, inputs, temp_count, temps); 870 } 871 872 Instruction* MarkAsCall() { 873 bit_field_ = IsCallField::update(bit_field_, true); 874 return this; 875 } 876 bool IsCall() const { return IsCallField::decode(bit_field_); } 877 bool NeedsReferenceMap() const { return IsCall(); } 878 bool HasReferenceMap() const { return reference_map_ != nullptr; } 879 880 bool ClobbersRegisters() const { return IsCall(); } 881 bool ClobbersTemps() const { return IsCall(); } 882 bool ClobbersDoubleRegisters() const { return IsCall(); } 883 ReferenceMap* reference_map() const { return reference_map_; } 884 885 void set_reference_map(ReferenceMap* map) { 886 DCHECK(NeedsReferenceMap()); 887 DCHECK(!reference_map_); 888 reference_map_ = map; 889 } 890 891 void OverwriteWithNop() { 892 opcode_ = ArchOpcodeField::encode(kArchNop); 893 bit_field_ = 0; 894 reference_map_ = nullptr; 895 } 896 897 bool IsNop() const { return arch_opcode() == kArchNop; } 898 899 bool IsDeoptimizeCall() const { 900 return arch_opcode() == ArchOpcode::kArchDeoptimize || 901 FlagsModeField::decode(opcode()) == kFlags_deoptimize || 902 FlagsModeField::decode(opcode()) == kFlags_deoptimize_and_poison; 903 } 904 905 bool IsTrap() const { 906 return FlagsModeField::decode(opcode()) == kFlags_trap; 907 } 908 909 bool IsJump() const { return arch_opcode() == ArchOpcode::kArchJmp; } 910 bool IsRet() const { return arch_opcode() == ArchOpcode::kArchRet; } 911 bool IsTailCall() const { 912 return arch_opcode() == ArchOpcode::kArchTailCallCodeObject || 913 arch_opcode() == ArchOpcode::kArchTailCallCodeObjectFromJSFunction || 914 arch_opcode() == ArchOpcode::kArchTailCallAddress || 915 arch_opcode() == ArchOpcode::kArchTailCallWasm; 916 } 917 bool IsThrow() const { 918 return arch_opcode() == ArchOpcode::kArchThrowTerminator; 919 } 920 921 enum GapPosition { 922 START, 923 END, 924 FIRST_GAP_POSITION = START, 925 LAST_GAP_POSITION = END 926 }; 927 928 ParallelMove* GetOrCreateParallelMove(GapPosition pos, Zone* zone) { 929 if (parallel_moves_[pos] == nullptr) { 930 parallel_moves_[pos] = new (zone) ParallelMove(zone); 931 } 932 return parallel_moves_[pos]; 933 } 934 935 ParallelMove* GetParallelMove(GapPosition pos) { 936 return parallel_moves_[pos]; 937 } 938 939 const ParallelMove* GetParallelMove(GapPosition pos) const { 940 return parallel_moves_[pos]; 941 } 942 943 bool AreMovesRedundant() const; 944 945 ParallelMove* const* parallel_moves() const { return ¶llel_moves_[0]; } 946 ParallelMove** parallel_moves() { return ¶llel_moves_[0]; } 947 948 // The block_id may be invalidated in JumpThreading. It is only important for 949 // register allocation, to avoid searching for blocks from instruction 950 // indexes. 951 InstructionBlock* block() const { return block_; } 952 void set_block(InstructionBlock* block) { 953 DCHECK_NOT_NULL(block); 954 block_ = block; 955 } 956 957 // APIs to aid debugging. For general-stream APIs, use operator<< 958 void Print(const RegisterConfiguration* config) const; 959 void Print() const; 960 961 typedef BitField<size_t, 0, 8> OutputCountField; 962 typedef BitField<size_t, 8, 16> InputCountField; 963 typedef BitField<size_t, 24, 6> TempCountField; 964 965 static const size_t kMaxOutputCount = OutputCountField::kMax; 966 static const size_t kMaxInputCount = InputCountField::kMax; 967 static const size_t kMaxTempCount = TempCountField::kMax; 968 969 private: 970 explicit Instruction(InstructionCode opcode); 971 972 Instruction(InstructionCode opcode, size_t output_count, 973 InstructionOperand* outputs, size_t input_count, 974 InstructionOperand* inputs, size_t temp_count, 975 InstructionOperand* temps); 976 977 typedef BitField<bool, 30, 1> IsCallField; 978 979 InstructionCode opcode_; 980 uint32_t bit_field_; 981 ParallelMove* parallel_moves_[2]; 982 ReferenceMap* reference_map_; 983 InstructionBlock* block_; 984 InstructionOperand operands_[1]; 985 986 DISALLOW_COPY_AND_ASSIGN(Instruction); 987 }; 988 989 990 struct PrintableInstruction { 991 const RegisterConfiguration* register_configuration_; 992 const Instruction* instr_; 993 }; 994 std::ostream& operator<<(std::ostream& os, const PrintableInstruction& instr); 995 996 997 class RpoNumber final { 998 public: 999 static const int kInvalidRpoNumber = -1; 1000 int ToInt() const { 1001 DCHECK(IsValid()); 1002 return index_; 1003 } 1004 size_t ToSize() const { 1005 DCHECK(IsValid()); 1006 return static_cast<size_t>(index_); 1007 } 1008 bool IsValid() const { return index_ >= 0; } 1009 static RpoNumber FromInt(int index) { return RpoNumber(index); } 1010 static RpoNumber Invalid() { return RpoNumber(kInvalidRpoNumber); } 1011 1012 bool IsNext(const RpoNumber other) const { 1013 DCHECK(IsValid()); 1014 return other.index_ == this->index_ + 1; 1015 } 1016 1017 // Comparison operators. 1018 bool operator==(RpoNumber other) const { return index_ == other.index_; } 1019 bool operator!=(RpoNumber other) const { return index_ != other.index_; } 1020 bool operator>(RpoNumber other) const { return index_ > other.index_; } 1021 bool operator<(RpoNumber other) const { return index_ < other.index_; } 1022 bool operator<=(RpoNumber other) const { return index_ <= other.index_; } 1023 bool operator>=(RpoNumber other) const { return index_ >= other.index_; } 1024 1025 private: 1026 explicit RpoNumber(int32_t index) : index_(index) {} 1027 int32_t index_; 1028 }; 1029 1030 1031 std::ostream& operator<<(std::ostream&, const RpoNumber&); 1032 1033 class V8_EXPORT_PRIVATE Constant final { 1034 public: 1035 enum Type { 1036 kInt32, 1037 kInt64, 1038 kFloat32, 1039 kFloat64, 1040 kExternalReference, 1041 kHeapObject, 1042 kRpoNumber 1043 }; 1044 1045 explicit Constant(int32_t v); 1046 explicit Constant(int64_t v) : type_(kInt64), value_(v) {} 1047 explicit Constant(float v) : type_(kFloat32), value_(bit_cast<int32_t>(v)) {} 1048 explicit Constant(double v) : type_(kFloat64), value_(bit_cast<int64_t>(v)) {} 1049 explicit Constant(ExternalReference ref) 1050 : type_(kExternalReference), value_(bit_cast<intptr_t>(ref)) {} 1051 explicit Constant(Handle<HeapObject> obj) 1052 : type_(kHeapObject), value_(bit_cast<intptr_t>(obj)) {} 1053 explicit Constant(RpoNumber rpo) : type_(kRpoNumber), value_(rpo.ToInt()) {} 1054 explicit Constant(RelocatablePtrConstantInfo info); 1055 1056 Type type() const { return type_; } 1057 1058 RelocInfo::Mode rmode() const { return rmode_; } 1059 1060 int32_t ToInt32() const { 1061 DCHECK(type() == kInt32 || type() == kInt64); 1062 const int32_t value = static_cast<int32_t>(value_); 1063 DCHECK_EQ(value_, static_cast<int64_t>(value)); 1064 return value; 1065 } 1066 1067 int64_t ToInt64() const { 1068 if (type() == kInt32) return ToInt32(); 1069 DCHECK_EQ(kInt64, type()); 1070 return value_; 1071 } 1072 1073 float ToFloat32() const { 1074 // TODO(ahaas): We should remove this function. If value_ has the bit 1075 // representation of a signalling NaN, then returning it as float can cause 1076 // the signalling bit to flip, and value_ is returned as a quiet NaN. 1077 DCHECK_EQ(kFloat32, type()); 1078 return bit_cast<float>(static_cast<int32_t>(value_)); 1079 } 1080 1081 uint32_t ToFloat32AsInt() const { 1082 DCHECK_EQ(kFloat32, type()); 1083 return bit_cast<uint32_t>(static_cast<int32_t>(value_)); 1084 } 1085 1086 Double ToFloat64() const { 1087 DCHECK_EQ(kFloat64, type()); 1088 return Double(bit_cast<uint64_t>(value_)); 1089 } 1090 1091 ExternalReference ToExternalReference() const { 1092 DCHECK_EQ(kExternalReference, type()); 1093 return bit_cast<ExternalReference>(static_cast<intptr_t>(value_)); 1094 } 1095 1096 RpoNumber ToRpoNumber() const { 1097 DCHECK_EQ(kRpoNumber, type()); 1098 return RpoNumber::FromInt(static_cast<int>(value_)); 1099 } 1100 1101 Handle<HeapObject> ToHeapObject() const; 1102 Handle<Code> ToCode() const; 1103 1104 private: 1105 Type type_; 1106 RelocInfo::Mode rmode_ = RelocInfo::NONE; 1107 int64_t value_; 1108 }; 1109 1110 1111 std::ostream& operator<<(std::ostream& os, const Constant& constant); 1112 1113 1114 // Forward declarations. 1115 class FrameStateDescriptor; 1116 1117 enum class StateValueKind : uint8_t { 1118 kArgumentsElements, 1119 kArgumentsLength, 1120 kPlain, 1121 kOptimizedOut, 1122 kNested, 1123 kDuplicate 1124 }; 1125 1126 class StateValueDescriptor { 1127 public: 1128 StateValueDescriptor() 1129 : kind_(StateValueKind::kPlain), type_(MachineType::AnyTagged()) {} 1130 1131 static StateValueDescriptor ArgumentsElements(ArgumentsStateType type) { 1132 StateValueDescriptor descr(StateValueKind::kArgumentsElements, 1133 MachineType::AnyTagged()); 1134 descr.args_type_ = type; 1135 return descr; 1136 } 1137 static StateValueDescriptor ArgumentsLength(ArgumentsStateType type) { 1138 StateValueDescriptor descr(StateValueKind::kArgumentsLength, 1139 MachineType::AnyTagged()); 1140 descr.args_type_ = type; 1141 return descr; 1142 } 1143 static StateValueDescriptor Plain(MachineType type) { 1144 return StateValueDescriptor(StateValueKind::kPlain, type); 1145 } 1146 static StateValueDescriptor OptimizedOut() { 1147 return StateValueDescriptor(StateValueKind::kOptimizedOut, 1148 MachineType::AnyTagged()); 1149 } 1150 static StateValueDescriptor Recursive(size_t id) { 1151 StateValueDescriptor descr(StateValueKind::kNested, 1152 MachineType::AnyTagged()); 1153 descr.id_ = id; 1154 return descr; 1155 } 1156 static StateValueDescriptor Duplicate(size_t id) { 1157 StateValueDescriptor descr(StateValueKind::kDuplicate, 1158 MachineType::AnyTagged()); 1159 descr.id_ = id; 1160 return descr; 1161 } 1162 1163 bool IsArgumentsElements() const { 1164 return kind_ == StateValueKind::kArgumentsElements; 1165 } 1166 bool IsArgumentsLength() const { 1167 return kind_ == StateValueKind::kArgumentsLength; 1168 } 1169 bool IsPlain() const { return kind_ == StateValueKind::kPlain; } 1170 bool IsOptimizedOut() const { return kind_ == StateValueKind::kOptimizedOut; } 1171 bool IsNested() const { return kind_ == StateValueKind::kNested; } 1172 bool IsDuplicate() const { return kind_ == StateValueKind::kDuplicate; } 1173 MachineType type() const { return type_; } 1174 size_t id() const { 1175 DCHECK(kind_ == StateValueKind::kDuplicate || 1176 kind_ == StateValueKind::kNested); 1177 return id_; 1178 } 1179 ArgumentsStateType arguments_type() const { 1180 DCHECK(kind_ == StateValueKind::kArgumentsElements || 1181 kind_ == StateValueKind::kArgumentsLength); 1182 return args_type_; 1183 } 1184 1185 private: 1186 StateValueDescriptor(StateValueKind kind, MachineType type) 1187 : kind_(kind), type_(type) {} 1188 1189 StateValueKind kind_; 1190 MachineType type_; 1191 union { 1192 size_t id_; 1193 ArgumentsStateType args_type_; 1194 }; 1195 }; 1196 1197 class StateValueList { 1198 public: 1199 explicit StateValueList(Zone* zone) : fields_(zone), nested_(zone) {} 1200 1201 size_t size() { return fields_.size(); } 1202 1203 struct Value { 1204 StateValueDescriptor* desc; 1205 StateValueList* nested; 1206 1207 Value(StateValueDescriptor* desc, StateValueList* nested) 1208 : desc(desc), nested(nested) {} 1209 }; 1210 1211 class iterator { 1212 public: 1213 // Bare minimum of operators needed for range iteration. 1214 bool operator!=(const iterator& other) const { 1215 return field_iterator != other.field_iterator; 1216 } 1217 bool operator==(const iterator& other) const { 1218 return field_iterator == other.field_iterator; 1219 } 1220 iterator& operator++() { 1221 if (field_iterator->IsNested()) { 1222 nested_iterator++; 1223 } 1224 ++field_iterator; 1225 return *this; 1226 } 1227 Value operator*() { 1228 StateValueDescriptor* desc = &(*field_iterator); 1229 StateValueList* nested = desc->IsNested() ? *nested_iterator : nullptr; 1230 return Value(desc, nested); 1231 } 1232 1233 private: 1234 friend class StateValueList; 1235 1236 iterator(ZoneVector<StateValueDescriptor>::iterator it, 1237 ZoneVector<StateValueList*>::iterator nested) 1238 : field_iterator(it), nested_iterator(nested) {} 1239 1240 ZoneVector<StateValueDescriptor>::iterator field_iterator; 1241 ZoneVector<StateValueList*>::iterator nested_iterator; 1242 }; 1243 1244 void ReserveSize(size_t size) { fields_.reserve(size); } 1245 1246 StateValueList* PushRecursiveField(Zone* zone, size_t id) { 1247 fields_.push_back(StateValueDescriptor::Recursive(id)); 1248 StateValueList* nested = 1249 new (zone->New(sizeof(StateValueList))) StateValueList(zone); 1250 nested_.push_back(nested); 1251 return nested; 1252 } 1253 void PushArgumentsElements(ArgumentsStateType type) { 1254 fields_.push_back(StateValueDescriptor::ArgumentsElements(type)); 1255 } 1256 void PushArgumentsLength(ArgumentsStateType type) { 1257 fields_.push_back(StateValueDescriptor::ArgumentsLength(type)); 1258 } 1259 void PushDuplicate(size_t id) { 1260 fields_.push_back(StateValueDescriptor::Duplicate(id)); 1261 } 1262 void PushPlain(MachineType type) { 1263 fields_.push_back(StateValueDescriptor::Plain(type)); 1264 } 1265 void PushOptimizedOut() { 1266 fields_.push_back(StateValueDescriptor::OptimizedOut()); 1267 } 1268 1269 iterator begin() { return iterator(fields_.begin(), nested_.begin()); } 1270 iterator end() { return iterator(fields_.end(), nested_.end()); } 1271 1272 private: 1273 ZoneVector<StateValueDescriptor> fields_; 1274 ZoneVector<StateValueList*> nested_; 1275 }; 1276 1277 class FrameStateDescriptor : public ZoneObject { 1278 public: 1279 FrameStateDescriptor(Zone* zone, FrameStateType type, BailoutId bailout_id, 1280 OutputFrameStateCombine state_combine, 1281 size_t parameters_count, size_t locals_count, 1282 size_t stack_count, 1283 MaybeHandle<SharedFunctionInfo> shared_info, 1284 FrameStateDescriptor* outer_state = nullptr); 1285 1286 FrameStateType type() const { return type_; } 1287 BailoutId bailout_id() const { return bailout_id_; } 1288 OutputFrameStateCombine state_combine() const { return frame_state_combine_; } 1289 size_t parameters_count() const { return parameters_count_; } 1290 size_t locals_count() const { return locals_count_; } 1291 size_t stack_count() const { return stack_count_; } 1292 MaybeHandle<SharedFunctionInfo> shared_info() const { return shared_info_; } 1293 FrameStateDescriptor* outer_state() const { return outer_state_; } 1294 bool HasContext() const { 1295 return FrameStateFunctionInfo::IsJSFunctionType(type_) || 1296 type_ == FrameStateType::kBuiltinContinuation; 1297 } 1298 1299 size_t GetSize() const; 1300 size_t GetTotalSize() const; 1301 size_t GetFrameCount() const; 1302 size_t GetJSFrameCount() const; 1303 1304 StateValueList* GetStateValueDescriptors() { return &values_; } 1305 1306 static const int kImpossibleValue = 0xdead; 1307 1308 private: 1309 FrameStateType type_; 1310 BailoutId bailout_id_; 1311 OutputFrameStateCombine frame_state_combine_; 1312 size_t parameters_count_; 1313 size_t locals_count_; 1314 size_t stack_count_; 1315 StateValueList values_; 1316 MaybeHandle<SharedFunctionInfo> const shared_info_; 1317 FrameStateDescriptor* outer_state_; 1318 }; 1319 1320 // A deoptimization entry is a pair of the reason why we deoptimize and the 1321 // frame state descriptor that we have to go back to. 1322 class DeoptimizationEntry final { 1323 public: 1324 DeoptimizationEntry() {} 1325 DeoptimizationEntry(FrameStateDescriptor* descriptor, DeoptimizeKind kind, 1326 DeoptimizeReason reason, VectorSlotPair const& feedback) 1327 : descriptor_(descriptor), 1328 kind_(kind), 1329 reason_(reason), 1330 feedback_(feedback) {} 1331 1332 FrameStateDescriptor* descriptor() const { return descriptor_; } 1333 DeoptimizeKind kind() const { return kind_; } 1334 DeoptimizeReason reason() const { return reason_; } 1335 VectorSlotPair const& feedback() const { return feedback_; } 1336 1337 private: 1338 FrameStateDescriptor* descriptor_ = nullptr; 1339 DeoptimizeKind kind_ = DeoptimizeKind::kEager; 1340 DeoptimizeReason reason_ = DeoptimizeReason::kUnknown; 1341 VectorSlotPair feedback_ = VectorSlotPair(); 1342 }; 1343 1344 typedef ZoneVector<DeoptimizationEntry> DeoptimizationVector; 1345 1346 class V8_EXPORT_PRIVATE PhiInstruction final 1347 : public NON_EXPORTED_BASE(ZoneObject) { 1348 public: 1349 typedef ZoneVector<InstructionOperand> Inputs; 1350 1351 PhiInstruction(Zone* zone, int virtual_register, size_t input_count); 1352 1353 void SetInput(size_t offset, int virtual_register); 1354 void RenameInput(size_t offset, int virtual_register); 1355 1356 int virtual_register() const { return virtual_register_; } 1357 const IntVector& operands() const { return operands_; } 1358 1359 // TODO(dcarney): this has no real business being here, since it's internal to 1360 // the register allocator, but putting it here was convenient. 1361 const InstructionOperand& output() const { return output_; } 1362 InstructionOperand& output() { return output_; } 1363 1364 private: 1365 const int virtual_register_; 1366 InstructionOperand output_; 1367 IntVector operands_; 1368 }; 1369 1370 1371 // Analogue of BasicBlock for Instructions instead of Nodes. 1372 class V8_EXPORT_PRIVATE InstructionBlock final 1373 : public NON_EXPORTED_BASE(ZoneObject) { 1374 public: 1375 InstructionBlock(Zone* zone, RpoNumber rpo_number, RpoNumber loop_header, 1376 RpoNumber loop_end, bool deferred, bool handler); 1377 1378 // Instruction indexes (used by the register allocator). 1379 int first_instruction_index() const { 1380 DCHECK_LE(0, code_start_); 1381 DCHECK_LT(0, code_end_); 1382 DCHECK_GE(code_end_, code_start_); 1383 return code_start_; 1384 } 1385 int last_instruction_index() const { 1386 DCHECK_LE(0, code_start_); 1387 DCHECK_LT(0, code_end_); 1388 DCHECK_GE(code_end_, code_start_); 1389 return code_end_ - 1; 1390 } 1391 1392 int32_t code_start() const { return code_start_; } 1393 void set_code_start(int32_t start) { code_start_ = start; } 1394 1395 int32_t code_end() const { return code_end_; } 1396 void set_code_end(int32_t end) { code_end_ = end; } 1397 1398 bool IsDeferred() const { return deferred_; } 1399 bool IsHandler() const { return handler_; } 1400 1401 RpoNumber ao_number() const { return ao_number_; } 1402 RpoNumber rpo_number() const { return rpo_number_; } 1403 RpoNumber loop_header() const { return loop_header_; } 1404 RpoNumber loop_end() const { 1405 DCHECK(IsLoopHeader()); 1406 return loop_end_; 1407 } 1408 inline bool IsLoopHeader() const { return loop_end_.IsValid(); } 1409 1410 typedef ZoneVector<RpoNumber> Predecessors; 1411 Predecessors& predecessors() { return predecessors_; } 1412 const Predecessors& predecessors() const { return predecessors_; } 1413 size_t PredecessorCount() const { return predecessors_.size(); } 1414 size_t PredecessorIndexOf(RpoNumber rpo_number) const; 1415 1416 typedef ZoneVector<RpoNumber> Successors; 1417 Successors& successors() { return successors_; } 1418 const Successors& successors() const { return successors_; } 1419 size_t SuccessorCount() const { return successors_.size(); } 1420 1421 typedef ZoneVector<PhiInstruction*> PhiInstructions; 1422 const PhiInstructions& phis() const { return phis_; } 1423 PhiInstruction* PhiAt(size_t i) const { return phis_[i]; } 1424 void AddPhi(PhiInstruction* phi) { phis_.push_back(phi); } 1425 1426 void set_ao_number(RpoNumber ao_number) { ao_number_ = ao_number; } 1427 1428 bool needs_frame() const { return needs_frame_; } 1429 void mark_needs_frame() { needs_frame_ = true; } 1430 1431 bool must_construct_frame() const { return must_construct_frame_; } 1432 void mark_must_construct_frame() { must_construct_frame_ = true; } 1433 1434 bool must_deconstruct_frame() const { return must_deconstruct_frame_; } 1435 void mark_must_deconstruct_frame() { must_deconstruct_frame_ = true; } 1436 1437 private: 1438 Successors successors_; 1439 Predecessors predecessors_; 1440 PhiInstructions phis_; 1441 RpoNumber ao_number_; // Assembly order number. 1442 const RpoNumber rpo_number_; 1443 const RpoNumber loop_header_; 1444 const RpoNumber loop_end_; 1445 int32_t code_start_; // start index of arch-specific code. 1446 int32_t code_end_; // end index of arch-specific code. 1447 const bool deferred_; // Block contains deferred code. 1448 const bool handler_; // Block is a handler entry point. 1449 bool needs_frame_; 1450 bool must_construct_frame_; 1451 bool must_deconstruct_frame_; 1452 }; 1453 1454 class InstructionSequence; 1455 1456 struct PrintableInstructionBlock { 1457 const RegisterConfiguration* register_configuration_; 1458 const InstructionBlock* block_; 1459 const InstructionSequence* code_; 1460 }; 1461 1462 std::ostream& operator<<(std::ostream& os, 1463 const PrintableInstructionBlock& printable_block); 1464 1465 typedef ZoneDeque<Constant> ConstantDeque; 1466 typedef std::map<int, Constant, std::less<int>, 1467 ZoneAllocator<std::pair<const int, Constant> > > 1468 ConstantMap; 1469 1470 typedef ZoneDeque<Instruction*> InstructionDeque; 1471 typedef ZoneDeque<ReferenceMap*> ReferenceMapDeque; 1472 typedef ZoneVector<InstructionBlock*> InstructionBlocks; 1473 1474 1475 // Forward declarations. 1476 struct PrintableInstructionSequence; 1477 1478 1479 // Represents architecture-specific generated code before, during, and after 1480 // register allocation. 1481 class V8_EXPORT_PRIVATE InstructionSequence final 1482 : public NON_EXPORTED_BASE(ZoneObject) { 1483 public: 1484 static InstructionBlocks* InstructionBlocksFor(Zone* zone, 1485 const Schedule* schedule); 1486 // Puts the deferred blocks last. 1487 static void ComputeAssemblyOrder(InstructionBlocks* blocks); 1488 1489 InstructionSequence(Isolate* isolate, Zone* zone, 1490 InstructionBlocks* instruction_blocks); 1491 1492 int NextVirtualRegister(); 1493 int VirtualRegisterCount() const { return next_virtual_register_; } 1494 1495 const InstructionBlocks& instruction_blocks() const { 1496 return *instruction_blocks_; 1497 } 1498 1499 int InstructionBlockCount() const { 1500 return static_cast<int>(instruction_blocks_->size()); 1501 } 1502 1503 InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) { 1504 return instruction_blocks_->at(rpo_number.ToSize()); 1505 } 1506 1507 int LastLoopInstructionIndex(const InstructionBlock* block) { 1508 return instruction_blocks_->at(block->loop_end().ToSize() - 1) 1509 ->last_instruction_index(); 1510 } 1511 1512 const InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) const { 1513 return instruction_blocks_->at(rpo_number.ToSize()); 1514 } 1515 1516 InstructionBlock* GetInstructionBlock(int instruction_index) const; 1517 1518 static MachineRepresentation DefaultRepresentation() { 1519 return MachineType::PointerRepresentation(); 1520 } 1521 MachineRepresentation GetRepresentation(int virtual_register) const; 1522 void MarkAsRepresentation(MachineRepresentation rep, int virtual_register); 1523 int representation_mask() const { return representation_mask_; } 1524 1525 bool IsReference(int virtual_register) const { 1526 return CanBeTaggedPointer(GetRepresentation(virtual_register)); 1527 } 1528 bool IsFP(int virtual_register) const { 1529 return IsFloatingPoint(GetRepresentation(virtual_register)); 1530 } 1531 1532 Instruction* GetBlockStart(RpoNumber rpo) const; 1533 1534 typedef InstructionDeque::const_iterator const_iterator; 1535 const_iterator begin() const { return instructions_.begin(); } 1536 const_iterator end() const { return instructions_.end(); } 1537 const InstructionDeque& instructions() const { return instructions_; } 1538 int LastInstructionIndex() const { 1539 return static_cast<int>(instructions().size()) - 1; 1540 } 1541 1542 Instruction* InstructionAt(int index) const { 1543 DCHECK_LE(0, index); 1544 DCHECK_GT(instructions_.size(), index); 1545 return instructions_[index]; 1546 } 1547 1548 Isolate* isolate() const { return isolate_; } 1549 const ReferenceMapDeque* reference_maps() const { return &reference_maps_; } 1550 Zone* zone() const { return zone_; } 1551 1552 // Used by the instruction selector while adding instructions. 1553 int AddInstruction(Instruction* instr); 1554 void StartBlock(RpoNumber rpo); 1555 void EndBlock(RpoNumber rpo); 1556 1557 int AddConstant(int virtual_register, Constant constant) { 1558 // TODO(titzer): allow RPO numbers as constants? 1559 DCHECK_NE(Constant::kRpoNumber, constant.type()); 1560 DCHECK(virtual_register >= 0 && virtual_register < next_virtual_register_); 1561 DCHECK(constants_.find(virtual_register) == constants_.end()); 1562 constants_.insert(std::make_pair(virtual_register, constant)); 1563 return virtual_register; 1564 } 1565 Constant GetConstant(int virtual_register) const { 1566 ConstantMap::const_iterator it = constants_.find(virtual_register); 1567 DCHECK(it != constants_.end()); 1568 DCHECK_EQ(virtual_register, it->first); 1569 return it->second; 1570 } 1571 1572 typedef ZoneVector<Constant> Immediates; 1573 Immediates& immediates() { return immediates_; } 1574 1575 ImmediateOperand AddImmediate(const Constant& constant) { 1576 if (constant.type() == Constant::kInt32 && 1577 RelocInfo::IsNone(constant.rmode())) { 1578 return ImmediateOperand(ImmediateOperand::INLINE, constant.ToInt32()); 1579 } 1580 int index = static_cast<int>(immediates_.size()); 1581 immediates_.push_back(constant); 1582 return ImmediateOperand(ImmediateOperand::INDEXED, index); 1583 } 1584 1585 Constant GetImmediate(const ImmediateOperand* op) const { 1586 switch (op->type()) { 1587 case ImmediateOperand::INLINE: 1588 return Constant(op->inline_value()); 1589 case ImmediateOperand::INDEXED: { 1590 int index = op->indexed_value(); 1591 DCHECK_LE(0, index); 1592 DCHECK_GT(immediates_.size(), index); 1593 return immediates_[index]; 1594 } 1595 } 1596 UNREACHABLE(); 1597 } 1598 1599 int AddDeoptimizationEntry(FrameStateDescriptor* descriptor, 1600 DeoptimizeKind kind, DeoptimizeReason reason, 1601 VectorSlotPair const& feedback); 1602 DeoptimizationEntry const& GetDeoptimizationEntry(int deoptimization_id); 1603 int GetDeoptimizationEntryCount() const { 1604 return static_cast<int>(deoptimization_entries_.size()); 1605 } 1606 1607 RpoNumber InputRpo(Instruction* instr, size_t index); 1608 1609 bool GetSourcePosition(const Instruction* instr, 1610 SourcePosition* result) const; 1611 void SetSourcePosition(const Instruction* instr, SourcePosition value); 1612 1613 bool ContainsCall() const { 1614 for (Instruction* instr : instructions_) { 1615 if (instr->IsCall()) return true; 1616 } 1617 return false; 1618 } 1619 1620 // APIs to aid debugging. For general-stream APIs, use operator<< 1621 void Print(const RegisterConfiguration* config) const; 1622 void Print() const; 1623 1624 void PrintBlock(const RegisterConfiguration* config, int block_id) const; 1625 void PrintBlock(int block_id) const; 1626 1627 void ValidateEdgeSplitForm() const; 1628 void ValidateDeferredBlockExitPaths() const; 1629 void ValidateDeferredBlockEntryPaths() const; 1630 void ValidateSSA() const; 1631 1632 static void SetRegisterConfigurationForTesting( 1633 const RegisterConfiguration* regConfig); 1634 static void ClearRegisterConfigurationForTesting(); 1635 1636 private: 1637 friend V8_EXPORT_PRIVATE std::ostream& operator<<( 1638 std::ostream& os, const PrintableInstructionSequence& code); 1639 1640 typedef ZoneMap<const Instruction*, SourcePosition> SourcePositionMap; 1641 1642 static const RegisterConfiguration* RegisterConfigurationForTesting(); 1643 static const RegisterConfiguration* registerConfigurationForTesting_; 1644 1645 Isolate* isolate_; 1646 Zone* const zone_; 1647 InstructionBlocks* const instruction_blocks_; 1648 SourcePositionMap source_positions_; 1649 ConstantMap constants_; 1650 Immediates immediates_; 1651 InstructionDeque instructions_; 1652 int next_virtual_register_; 1653 ReferenceMapDeque reference_maps_; 1654 ZoneVector<MachineRepresentation> representations_; 1655 int representation_mask_; 1656 DeoptimizationVector deoptimization_entries_; 1657 1658 // Used at construction time 1659 InstructionBlock* current_block_; 1660 1661 DISALLOW_COPY_AND_ASSIGN(InstructionSequence); 1662 }; 1663 1664 1665 struct PrintableInstructionSequence { 1666 const RegisterConfiguration* register_configuration_; 1667 const InstructionSequence* sequence_; 1668 }; 1669 1670 V8_EXPORT_PRIVATE std::ostream& operator<<( 1671 std::ostream& os, const PrintableInstructionSequence& code); 1672 1673 } // namespace compiler 1674 } // namespace internal 1675 } // namespace v8 1676 1677 #endif // V8_COMPILER_INSTRUCTION_H_ 1678