1 //===- subzero/src/IceOperand.h - High-level operands -----------*- C++ -*-===// 2 // 3 // The Subzero Code Generator 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// 10 /// \file 11 /// \brief Declares the Operand class and its target-independent subclasses. 12 /// 13 /// The main classes are Variable, which represents an LLVM variable that is 14 /// either register- or stack-allocated, and the Constant hierarchy, which 15 /// represents integer, floating-point, and/or symbolic constants. 16 /// 17 //===----------------------------------------------------------------------===// 18 19 #ifndef SUBZERO_SRC_ICEOPERAND_H 20 #define SUBZERO_SRC_ICEOPERAND_H 21 22 #include "IceDefs.h" 23 #include "IceCfg.h" 24 #include "IceGlobalContext.h" 25 #include "IceStringPool.h" 26 #include "IceTypes.h" 27 28 #include "llvm/Support/ErrorHandling.h" 29 #include "llvm/Support/Format.h" 30 31 #include <limits> 32 #include <type_traits> 33 34 namespace Ice { 35 36 class Operand { 37 Operand() = delete; 38 Operand(const Operand &) = delete; 39 Operand &operator=(const Operand &) = delete; 40 41 public: 42 static constexpr size_t MaxTargetKinds = 10; 43 enum OperandKind { 44 kConst_Base, 45 kConstInteger32, 46 kConstInteger64, 47 kConstFloat, 48 kConstDouble, 49 kConstRelocatable, 50 kConstUndef, 51 kConst_Target, // leave space for target-specific constant kinds 52 kConst_Max = kConst_Target + MaxTargetKinds, 53 kVariable, 54 kVariable64On32, 55 kVariableVecOn32, 56 kVariableBoolean, 57 kVariable_Target, // leave space for target-specific variable kinds 58 kVariable_Max = kVariable_Target + MaxTargetKinds, 59 // Target-specific operand classes use kTarget as the starting point for 60 // their Kind enum space. Note that the value-spaces are shared across 61 // targets. To avoid confusion over the definition of shared values, an 62 // object specific to one target should never be passed to a different 63 // target. 64 kTarget, 65 kTarget_Max = std::numeric_limits<uint8_t>::max(), 66 }; 67 static_assert(kTarget <= kTarget_Max, "Must not be above max."); 68 OperandKind getKind() const { return Kind; } 69 Type getType() const { return Ty; } 70 71 /// Every Operand keeps an array of the Variables referenced in the operand. 72 /// This is so that the liveness operations can get quick access to the 73 /// variables of interest, without having to dig so far into the operand. 74 SizeT getNumVars() const { return NumVars; } 75 Variable *getVar(SizeT I) const { 76 assert(I < getNumVars()); 77 return Vars[I]; 78 } 79 virtual void emit(const Cfg *Func) const = 0; 80 81 /// \name Dumping functions. 82 /// @{ 83 84 /// The dump(Func,Str) implementation must be sure to handle the situation 85 /// where Func==nullptr. 86 virtual void dump(const Cfg *Func, Ostream &Str) const = 0; 87 void dump(const Cfg *Func) const { 88 if (!BuildDefs::dump()) 89 return; 90 assert(Func); 91 dump(Func, Func->getContext()->getStrDump()); 92 } 93 void dump(Ostream &Str) const { 94 if (BuildDefs::dump()) 95 dump(nullptr, Str); 96 } 97 /// @} 98 99 virtual ~Operand() = default; 100 101 virtual Variable *asBoolean() { return nullptr; } 102 103 virtual SizeT hashValue() const { 104 llvm::report_fatal_error("Tried to hash unsupported operand type : " + 105 std::to_string(Kind)); 106 return 0; 107 } 108 109 inline void* getExternalData() const { return externalData; } 110 inline void setExternalData(void* data) { externalData = data; } 111 112 protected: 113 Operand(OperandKind Kind, Type Ty) : Ty(Ty), Kind(Kind) { 114 // It is undefined behavior to have a larger value in the enum 115 assert(Kind <= kTarget_Max); 116 } 117 118 const Type Ty; 119 const OperandKind Kind; 120 /// Vars and NumVars are initialized by the derived class. 121 SizeT NumVars = 0; 122 Variable **Vars = nullptr; 123 124 /// External data can be set by an optimizer to compute and retain any 125 /// information related to the current operand. All the memory used to 126 /// store this information must be managed by the optimizer. 127 void* externalData = nullptr; 128 }; 129 130 template <class StreamType> 131 inline StreamType &operator<<(StreamType &Str, const Operand &Op) { 132 Op.dump(Str); 133 return Str; 134 } 135 136 /// Constant is the abstract base class for constants. All constants are 137 /// allocated from a global arena and are pooled. 138 class Constant : public Operand { 139 Constant() = delete; 140 Constant(const Constant &) = delete; 141 Constant &operator=(const Constant &) = delete; 142 143 public: 144 // Declare the lookup counter to take minimal space in a non-DUMP build. 145 using CounterType = 146 std::conditional<BuildDefs::dump(), uint64_t, uint8_t>::type; 147 void emit(const Cfg *Func) const override { emit(Func->getTarget()); } 148 virtual void emit(TargetLowering *Target) const = 0; 149 150 static bool classof(const Operand *Operand) { 151 OperandKind Kind = Operand->getKind(); 152 return Kind >= kConst_Base && Kind <= kConst_Max; 153 } 154 155 const GlobalString getLabelName() const { return LabelName; } 156 157 /// Judge if this given immediate should be randomized or pooled By default 158 /// should return false, only constant integers should truly go through this 159 /// method. 160 virtual bool shouldBeRandomizedOrPooled() const { return false; } 161 162 bool getShouldBePooled() const { return ShouldBePooled; } 163 164 // This should be thread-safe because the constant pool lock is acquired 165 // before the method is invoked. 166 void updateLookupCount() { 167 if (!BuildDefs::dump()) 168 return; 169 ++LookupCount; 170 } 171 CounterType getLookupCount() const { return LookupCount; } 172 SizeT hashValue() const override { return 0; } 173 174 protected: 175 Constant(OperandKind Kind, Type Ty) : Operand(Kind, Ty) { 176 Vars = nullptr; 177 NumVars = 0; 178 } 179 /// Set the ShouldBePooled field to the proper value after the object is fully 180 /// initialized. 181 void initShouldBePooled(); 182 GlobalString LabelName; 183 /// Whether we should pool this constant. Usually Float/Double and pooled 184 /// Integers should be flagged true. Ideally this field would be const, but 185 /// it needs to be initialized only after the subclass is fully constructed. 186 bool ShouldBePooled = false; 187 /// Note: If ShouldBePooled is ever removed from the base class, we will want 188 /// to completely disable LookupCount in a non-DUMP build to save space. 189 CounterType LookupCount = 0; 190 }; 191 192 /// ConstantPrimitive<> wraps a primitive type. 193 template <typename T, Operand::OperandKind K> 194 class ConstantPrimitive : public Constant { 195 ConstantPrimitive() = delete; 196 ConstantPrimitive(const ConstantPrimitive &) = delete; 197 ConstantPrimitive &operator=(const ConstantPrimitive &) = delete; 198 199 public: 200 using PrimType = T; 201 202 static ConstantPrimitive *create(GlobalContext *Ctx, Type Ty, 203 PrimType Value) { 204 auto *Const = 205 new (Ctx->allocate<ConstantPrimitive>()) ConstantPrimitive(Ty, Value); 206 Const->initShouldBePooled(); 207 if (Const->getShouldBePooled()) 208 Const->initName(Ctx); 209 return Const; 210 } 211 PrimType getValue() const { return Value; } 212 using Constant::emit; 213 void emit(TargetLowering *Target) const final; 214 using Constant::dump; 215 void dump(const Cfg *, Ostream &Str) const override { 216 if (BuildDefs::dump()) 217 Str << getValue(); 218 } 219 220 static bool classof(const Operand *Operand) { 221 return Operand->getKind() == K; 222 } 223 224 SizeT hashValue() const override { return std::hash<PrimType>()(Value); } 225 226 virtual bool shouldBeRandomizedOrPooled() const override { return false; } 227 228 private: 229 ConstantPrimitive(Type Ty, PrimType Value) : Constant(K, Ty), Value(Value) {} 230 231 void initName(GlobalContext *Ctx) { 232 std::string Buffer; 233 llvm::raw_string_ostream Str(Buffer); 234 constexpr bool IsCompact = !BuildDefs::dump(); 235 if (IsCompact) { 236 switch (getType()) { 237 case IceType_f32: 238 Str << "$F"; 239 break; 240 case IceType_f64: 241 Str << "$D"; 242 break; 243 default: 244 // For constant pooling diversification 245 Str << ".L$" << getType() << "$"; 246 break; 247 } 248 } else { 249 Str << ".L$" << getType() << "$"; 250 } 251 // Print hex characters byte by byte, starting from the most significant 252 // byte. NOTE: This ordering assumes Subzero runs on a little-endian 253 // platform. That means the possibility of different label names depending 254 // on the endian-ness of the platform where Subzero runs. 255 for (unsigned i = 0; i < sizeof(Value); ++i) { 256 constexpr unsigned HexWidthChars = 2; 257 unsigned Offset = sizeof(Value) - 1 - i; 258 Str << llvm::format_hex_no_prefix( 259 *(Offset + (const unsigned char *)&Value), HexWidthChars); 260 } 261 // For a floating-point value in DecorateAsm mode, also append the value in 262 // human-readable sprintf form, changing '+' to 'p' and '-' to 'm' to 263 // maintain valid asm labels. 264 if (BuildDefs::dump() && std::is_floating_point<PrimType>::value && 265 getFlags().getDecorateAsm()) { 266 char Buf[30]; 267 snprintf(Buf, llvm::array_lengthof(Buf), "$%g", (double)Value); 268 for (unsigned i = 0; i < llvm::array_lengthof(Buf) && Buf[i]; ++i) { 269 if (Buf[i] == '-') 270 Buf[i] = 'm'; 271 else if (Buf[i] == '+') 272 Buf[i] = 'p'; 273 } 274 Str << Buf; 275 } 276 LabelName = GlobalString::createWithString(Ctx, Str.str()); 277 } 278 279 const PrimType Value; 280 }; 281 282 using ConstantInteger32 = ConstantPrimitive<int32_t, Operand::kConstInteger32>; 283 using ConstantInteger64 = ConstantPrimitive<int64_t, Operand::kConstInteger64>; 284 using ConstantFloat = ConstantPrimitive<float, Operand::kConstFloat>; 285 using ConstantDouble = ConstantPrimitive<double, Operand::kConstDouble>; 286 287 template <> 288 inline void ConstantInteger32::dump(const Cfg *, Ostream &Str) const { 289 if (!BuildDefs::dump()) 290 return; 291 if (getType() == IceType_i1) 292 Str << (getValue() ? "true" : "false"); 293 else 294 Str << static_cast<int32_t>(getValue()); 295 } 296 297 // =========== Immediate Randomization and Pooling routines ============== 298 // Specialization of the template member function for ConstantInteger32 299 // TODO(stichnot): try to move this specialization into a target-specific file. 300 template <> inline bool ConstantInteger32::shouldBeRandomizedOrPooled() const { 301 uint32_t Threshold = getFlags().getRandomizeAndPoolImmediatesThreshold(); 302 if (getFlags().getRandomizeAndPoolImmediatesOption() == RPI_None) 303 return false; 304 if (getType() != IceType_i32 && getType() != IceType_i16 && 305 getType() != IceType_i8) 306 return false; 307 // The Following checks if the signed representation of Value is between 308 // -Threshold/2 and +Threshold/2 309 bool largerThanThreshold = Threshold / 2 + Value >= Threshold; 310 return largerThanThreshold; 311 } 312 313 template <> 314 inline void ConstantInteger64::dump(const Cfg *, Ostream &Str) const { 315 if (!BuildDefs::dump()) 316 return; 317 assert(getType() == IceType_i64); 318 Str << static_cast<int64_t>(getValue()); 319 } 320 321 /// RelocOffset allows symbolic references in ConstantRelocatables' offsets, 322 /// e.g., 8 + LabelOffset, where label offset is the location (code or data) 323 /// of a Label that is only determinable during ELF emission. 324 class RelocOffset final { 325 RelocOffset(const RelocOffset &) = delete; 326 RelocOffset &operator=(const RelocOffset &) = delete; 327 328 public: 329 template <typename T> static RelocOffset *create(T *AllocOwner) { 330 return new (AllocOwner->template allocate<RelocOffset>()) RelocOffset(); 331 } 332 333 static RelocOffset *create(GlobalContext *Ctx, RelocOffsetT Value) { 334 return new (Ctx->allocate<RelocOffset>()) RelocOffset(Value); 335 } 336 337 void setSubtract(bool Value) { Subtract = Value; } 338 bool hasOffset() const { return HasOffset; } 339 340 RelocOffsetT getOffset() const { 341 assert(HasOffset); 342 return Offset; 343 } 344 345 void setOffset(const RelocOffsetT Value) { 346 assert(!HasOffset); 347 if (Subtract) { 348 assert(Value != std::numeric_limits<RelocOffsetT>::lowest()); 349 Offset = -Value; 350 } else { 351 Offset = Value; 352 } 353 HasOffset = true; 354 } 355 356 private: 357 RelocOffset() = default; 358 explicit RelocOffset(RelocOffsetT Offset) { setOffset(Offset); } 359 360 bool Subtract = false; 361 bool HasOffset = false; 362 RelocOffsetT Offset; 363 }; 364 365 /// RelocatableTuple bundles the parameters that are used to construct an 366 /// ConstantRelocatable. It is done this way so that ConstantRelocatable can fit 367 /// into the global constant pool template mechanism. 368 class RelocatableTuple { 369 RelocatableTuple() = delete; 370 RelocatableTuple &operator=(const RelocatableTuple &) = delete; 371 372 public: 373 RelocatableTuple(const RelocOffsetT Offset, 374 const RelocOffsetArray &OffsetExpr, GlobalString Name) 375 : Offset(Offset), OffsetExpr(OffsetExpr), Name(Name) {} 376 377 RelocatableTuple(const RelocOffsetT Offset, 378 const RelocOffsetArray &OffsetExpr, GlobalString Name, 379 const std::string &EmitString) 380 : Offset(Offset), OffsetExpr(OffsetExpr), Name(Name), 381 EmitString(EmitString) {} 382 383 RelocatableTuple(const RelocatableTuple &) = default; 384 385 const RelocOffsetT Offset; 386 const RelocOffsetArray OffsetExpr; 387 const GlobalString Name; 388 const std::string EmitString; 389 }; 390 391 bool operator==(const RelocatableTuple &A, const RelocatableTuple &B); 392 393 /// ConstantRelocatable represents a symbolic constant combined with a fixed 394 /// offset. 395 class ConstantRelocatable : public Constant { 396 ConstantRelocatable() = delete; 397 ConstantRelocatable(const ConstantRelocatable &) = delete; 398 ConstantRelocatable &operator=(const ConstantRelocatable &) = delete; 399 400 public: 401 template <typename T> 402 static ConstantRelocatable *create(T *AllocOwner, Type Ty, 403 const RelocatableTuple &Tuple) { 404 return new (AllocOwner->template allocate<ConstantRelocatable>()) 405 ConstantRelocatable(Ty, Tuple.Offset, Tuple.OffsetExpr, Tuple.Name, 406 Tuple.EmitString); 407 } 408 409 RelocOffsetT getOffset() const { 410 RelocOffsetT Ret = Offset; 411 for (const auto *const OffsetReloc : OffsetExpr) { 412 Ret += OffsetReloc->getOffset(); 413 } 414 return Ret; 415 } 416 417 const std::string &getEmitString() const { return EmitString; } 418 419 GlobalString getName() const { return Name; } 420 using Constant::emit; 421 void emit(TargetLowering *Target) const final; 422 void emitWithoutPrefix(const TargetLowering *Target, 423 const char *Suffix = "") const; 424 using Constant::dump; 425 void dump(const Cfg *Func, Ostream &Str) const override; 426 427 static bool classof(const Operand *Operand) { 428 OperandKind Kind = Operand->getKind(); 429 return Kind == kConstRelocatable; 430 } 431 432 private: 433 ConstantRelocatable(Type Ty, const RelocOffsetT Offset, 434 const RelocOffsetArray &OffsetExpr, GlobalString Name, 435 const std::string &EmitString) 436 : Constant(kConstRelocatable, Ty), Offset(Offset), OffsetExpr(OffsetExpr), 437 Name(Name), EmitString(EmitString) {} 438 439 const RelocOffsetT Offset; /// fixed, known offset to add 440 const RelocOffsetArray OffsetExpr; /// fixed, unknown offset to add 441 const GlobalString Name; /// optional for debug/dump 442 const std::string EmitString; /// optional for textual emission 443 }; 444 445 /// ConstantUndef represents an unspecified bit pattern. Although it is legal to 446 /// lower ConstantUndef to any value, backends should try to make code 447 /// generation deterministic by lowering ConstantUndefs to 0. 448 class ConstantUndef : public Constant { 449 ConstantUndef() = delete; 450 ConstantUndef(const ConstantUndef &) = delete; 451 ConstantUndef &operator=(const ConstantUndef &) = delete; 452 453 public: 454 static ConstantUndef *create(GlobalContext *Ctx, Type Ty) { 455 return new (Ctx->allocate<ConstantUndef>()) ConstantUndef(Ty); 456 } 457 458 using Constant::emit; 459 void emit(TargetLowering *Target) const final; 460 using Constant::dump; 461 void dump(const Cfg *, Ostream &Str) const override { 462 if (BuildDefs::dump()) 463 Str << "undef"; 464 } 465 466 static bool classof(const Operand *Operand) { 467 return Operand->getKind() == kConstUndef; 468 } 469 470 private: 471 ConstantUndef(Type Ty) : Constant(kConstUndef, Ty) {} 472 }; 473 474 /// RegNumT is for holding target-specific register numbers, plus the sentinel 475 /// value if no register is assigned. Its public ctor allows direct use of enum 476 /// values, such as RegNumT(Reg_eax), but not things like RegNumT(Reg_eax+1). 477 /// This is to try to prevent inappropriate assumptions about enum ordering. If 478 /// needed, the fromInt() method can be used, such as when a RegNumT is based 479 /// on a bitvector index. 480 class RegNumT { 481 public: 482 using BaseType = uint32_t; 483 RegNumT() = default; 484 RegNumT(const RegNumT &) = default; 485 template <typename AnyEnum> 486 RegNumT(AnyEnum Value, 487 typename std::enable_if<std::is_enum<AnyEnum>::value, int>::type = 0) 488 : Value(Value) { 489 validate(Value); 490 } 491 RegNumT &operator=(const RegNumT &) = default; 492 operator unsigned() const { return Value; } 493 /// Asserts that the register is valid, i.e. not NoRegisterValue. Note that 494 /// the ctor already does the target-specific limit check. 495 void assertIsValid() const { assert(Value != NoRegisterValue); } 496 static RegNumT fromInt(BaseType Value) { return RegNumT(Value); } 497 /// Marks cases that inappropriately add/subtract RegNumT values, and 498 /// therefore need to be fixed because they make assumptions about register 499 /// enum value ordering. TODO(stichnot): Remove fixme() as soon as all 500 /// current uses are fixed/removed. 501 static RegNumT fixme(BaseType Value) { return RegNumT(Value); } 502 /// The target's staticInit() method should call setLimit() to register the 503 /// upper bound of allowable values. 504 static void setLimit(BaseType Value) { 505 // Make sure it's only called once. 506 assert(Limit == 0); 507 assert(Value != 0); 508 Limit = Value; 509 } 510 // Define NoRegisterValue as an enum value so that it can be used as an 511 // argument for the public ctor if desired. 512 enum : BaseType { NoRegisterValue = std::numeric_limits<BaseType>::max() }; 513 514 bool hasValue() const { return Value != NoRegisterValue; } 515 bool hasNoValue() const { return !hasValue(); } 516 517 private: 518 BaseType Value = NoRegisterValue; 519 static BaseType Limit; 520 /// Private ctor called only by fromInt() and fixme(). 521 RegNumT(BaseType Value) : Value(Value) { validate(Value); } 522 /// The ctor calls this to validate against the target-supplied limit. 523 static void validate(BaseType Value) { 524 (void)Value; 525 assert(Value == NoRegisterValue || Value < Limit); 526 } 527 /// Disallow operators that inappropriately make assumptions about register 528 /// enum value ordering. 529 bool operator<(const RegNumT &) = delete; 530 bool operator<=(const RegNumT &) = delete; 531 bool operator>(const RegNumT &) = delete; 532 bool operator>=(const RegNumT &) = delete; 533 }; 534 535 /// RegNumBVIter wraps SmallBitVector so that instead of this pattern: 536 /// 537 /// for (int i = V.find_first(); i != -1; i = V.find_next(i)) { 538 /// RegNumT RegNum = RegNumT::fromInt(i); 539 /// ... 540 /// } 541 /// 542 /// this cleaner pattern can be used: 543 /// 544 /// for (RegNumT RegNum : RegNumBVIter(V)) { 545 /// ... 546 /// } 547 template <class B> class RegNumBVIterImpl { 548 using T = B; 549 static constexpr int Sentinel = -1; 550 RegNumBVIterImpl() = delete; 551 552 public: 553 class Iterator { 554 Iterator() = delete; 555 Iterator &operator=(const Iterator &) = delete; 556 557 public: 558 explicit Iterator(const T &V) : V(V), Current(V.find_first()) {} 559 Iterator(const T &V, int Value) : V(V), Current(Value) {} 560 Iterator(const Iterator &) = default; 561 RegNumT operator*() { 562 assert(Current != Sentinel); 563 return RegNumT::fromInt(Current); 564 } 565 Iterator &operator++() { 566 assert(Current != Sentinel); 567 Current = V.find_next(Current); 568 return *this; 569 } 570 bool operator!=(Iterator &Other) { return Current != Other.Current; } 571 572 private: 573 const T &V; 574 int Current; 575 }; 576 577 RegNumBVIterImpl(const RegNumBVIterImpl &) = default; 578 RegNumBVIterImpl &operator=(const RegNumBVIterImpl &) = delete; 579 explicit RegNumBVIterImpl(const T &V) : V(V) {} 580 Iterator begin() { return Iterator(V); } 581 Iterator end() { return Iterator(V, Sentinel); } 582 583 private: 584 const T &V; 585 }; 586 587 template <class B> RegNumBVIterImpl<B> RegNumBVIter(const B &BV) { 588 return RegNumBVIterImpl<B>(BV); 589 } 590 591 /// RegWeight is a wrapper for a uint32_t weight value, with a special value 592 /// that represents infinite weight, and an addWeight() method that ensures that 593 /// W+infinity=infinity. 594 class RegWeight { 595 public: 596 using BaseType = uint32_t; 597 RegWeight() = default; 598 explicit RegWeight(BaseType Weight) : Weight(Weight) {} 599 RegWeight(const RegWeight &) = default; 600 RegWeight &operator=(const RegWeight &) = default; 601 constexpr static BaseType Inf = ~0; /// Force regalloc to give a register 602 constexpr static BaseType Zero = 0; /// Force regalloc NOT to give a register 603 constexpr static BaseType Max = Inf - 1; /// Max natural weight. 604 void addWeight(BaseType Delta) { 605 if (Delta == Inf) 606 Weight = Inf; 607 else if (Weight != Inf) 608 if (Utils::add_overflow(Weight, Delta, &Weight) || Weight == Inf) 609 Weight = Max; 610 } 611 void addWeight(const RegWeight &Other) { addWeight(Other.Weight); } 612 void setWeight(BaseType Val) { Weight = Val; } 613 BaseType getWeight() const { return Weight; } 614 615 private: 616 BaseType Weight = 0; 617 }; 618 Ostream &operator<<(Ostream &Str, const RegWeight &W); 619 bool operator<(const RegWeight &A, const RegWeight &B); 620 bool operator<=(const RegWeight &A, const RegWeight &B); 621 bool operator==(const RegWeight &A, const RegWeight &B); 622 623 /// LiveRange is a set of instruction number intervals representing a variable's 624 /// live range. Generally there is one interval per basic block where the 625 /// variable is live, but adjacent intervals get coalesced into a single 626 /// interval. 627 class LiveRange { 628 public: 629 using RangeElementType = std::pair<InstNumberT, InstNumberT>; 630 /// RangeType is arena-allocated from the Cfg's allocator. 631 using RangeType = CfgVector<RangeElementType>; 632 LiveRange() = default; 633 /// Special constructor for building a kill set. The advantage is that we can 634 /// reserve the right amount of space in advance. 635 explicit LiveRange(const CfgVector<InstNumberT> &Kills) { 636 Range.reserve(Kills.size()); 637 for (InstNumberT I : Kills) 638 addSegment(I, I); 639 } 640 LiveRange(const LiveRange &) = default; 641 LiveRange &operator=(const LiveRange &) = default; 642 643 void reset() { 644 Range.clear(); 645 untrim(); 646 } 647 void addSegment(InstNumberT Start, InstNumberT End, CfgNode *Node = nullptr); 648 void addSegment(RangeElementType Segment, CfgNode *Node = nullptr) { 649 addSegment(Segment.first, Segment.second, Node); 650 } 651 652 bool endsBefore(const LiveRange &Other) const; 653 bool overlaps(const LiveRange &Other, bool UseTrimmed = false) const; 654 bool overlapsInst(InstNumberT OtherBegin, bool UseTrimmed = false) const; 655 bool containsValue(InstNumberT Value, bool IsDest) const; 656 bool isEmpty() const { return Range.empty(); } 657 InstNumberT getStart() const { 658 return Range.empty() ? -1 : Range.begin()->first; 659 } 660 InstNumberT getEnd() const { 661 return Range.empty() ? -1 : Range.rbegin()->second; 662 } 663 664 void untrim() { TrimmedBegin = Range.begin(); } 665 void trim(InstNumberT Lower); 666 667 void dump(Ostream &Str) const; 668 669 SizeT getNumSegments() const { return Range.size(); } 670 671 const RangeType &getSegments() const { return Range; } 672 CfgNode *getNodeForSegment(InstNumberT Begin) { 673 auto Iter = NodeMap.find(Begin); 674 assert(Iter != NodeMap.end()); 675 return Iter->second; 676 } 677 678 private: 679 RangeType Range; 680 CfgUnorderedMap<InstNumberT, CfgNode *> NodeMap; 681 /// TrimmedBegin is an optimization for the overlaps() computation. Since the 682 /// linear-scan algorithm always calls it as overlaps(Cur) and Cur advances 683 /// monotonically according to live range start, we can optimize overlaps() by 684 /// ignoring all segments that end before the start of Cur's range. The 685 /// linear-scan code enables this by calling trim() on the ranges of interest 686 /// as Cur advances. Note that linear-scan also has to initialize TrimmedBegin 687 /// at the beginning by calling untrim(). 688 RangeType::const_iterator TrimmedBegin; 689 }; 690 691 Ostream &operator<<(Ostream &Str, const LiveRange &L); 692 693 /// Variable represents an operand that is register-allocated or 694 /// stack-allocated. If it is register-allocated, it will ultimately have a 695 /// valid RegNum field. 696 class Variable : public Operand { 697 Variable() = delete; 698 Variable(const Variable &) = delete; 699 Variable &operator=(const Variable &) = delete; 700 701 enum RegRequirement : uint8_t { 702 RR_MayHaveRegister, 703 RR_MustHaveRegister, 704 RR_MustNotHaveRegister, 705 }; 706 707 public: 708 static Variable *create(Cfg *Func, Type Ty, SizeT Index) { 709 return new (Func->allocate<Variable>()) 710 Variable(Func, kVariable, Ty, Index); 711 } 712 713 SizeT getIndex() const { return Number; } 714 std::string getName() const { 715 if (Name.hasStdString()) 716 return Name.toString(); 717 return "__" + std::to_string(getIndex()); 718 } 719 virtual void setName(const Cfg *Func, const std::string &NewName) { 720 if (NewName.empty()) 721 return; 722 Name = VariableString::createWithString(Func, NewName); 723 } 724 725 bool getIsArg() const { return IsArgument; } 726 virtual void setIsArg(bool Val = true) { IsArgument = Val; } 727 bool getIsImplicitArg() const { return IsImplicitArgument; } 728 void setIsImplicitArg(bool Val = true) { IsImplicitArgument = Val; } 729 730 void setIgnoreLiveness() { IgnoreLiveness = true; } 731 bool getIgnoreLiveness() const { 732 return IgnoreLiveness || IsRematerializable; 733 } 734 735 /// Returns true if the variable either has a definite stack offset, or has 736 /// the UndeterminedStackOffset such that it is guaranteed to have a definite 737 /// stack offset at emission time. 738 bool hasStackOffset() const { return StackOffset != InvalidStackOffset; } 739 /// Returns true if the variable has a stack offset that is known at this 740 /// time. 741 bool hasKnownStackOffset() const { 742 return StackOffset != InvalidStackOffset && 743 StackOffset != UndeterminedStackOffset; 744 } 745 int32_t getStackOffset() const { 746 assert(hasKnownStackOffset()); 747 return StackOffset; 748 } 749 void setStackOffset(int32_t Offset) { StackOffset = Offset; } 750 /// Set a "placeholder" stack offset before its actual offset has been 751 /// determined. 752 void setHasStackOffset() { 753 if (!hasStackOffset()) 754 StackOffset = UndeterminedStackOffset; 755 } 756 /// Returns the variable's stack offset in symbolic form, to improve 757 /// readability in DecorateAsm mode. 758 std::string getSymbolicStackOffset() const { 759 if (!BuildDefs::dump()) 760 return ""; 761 return ".L$lv$" + getName(); 762 } 763 764 bool hasReg() const { return getRegNum().hasValue(); } 765 RegNumT getRegNum() const { return RegNum; } 766 void setRegNum(RegNumT NewRegNum) { 767 // Regnum shouldn't be set more than once. 768 assert(!hasReg() || RegNum == NewRegNum); 769 RegNum = NewRegNum; 770 } 771 bool hasRegTmp() const { return getRegNumTmp().hasValue(); } 772 RegNumT getRegNumTmp() const { return RegNumTmp; } 773 void setRegNumTmp(RegNumT NewRegNum) { RegNumTmp = NewRegNum; } 774 775 RegWeight getWeight(const Cfg *Func) const; 776 777 void setMustHaveReg() { RegRequirement = RR_MustHaveRegister; } 778 bool mustHaveReg() const { return RegRequirement == RR_MustHaveRegister; } 779 void setMustNotHaveReg() { RegRequirement = RR_MustNotHaveRegister; } 780 bool mustNotHaveReg() const { 781 return RegRequirement == RR_MustNotHaveRegister; 782 } 783 bool mayHaveReg() const { return RegRequirement == RR_MayHaveRegister; } 784 void setRematerializable(RegNumT NewRegNum, int32_t NewOffset) { 785 IsRematerializable = true; 786 setRegNum(NewRegNum); 787 setStackOffset(NewOffset); 788 setMustHaveReg(); 789 } 790 bool isRematerializable() const { return IsRematerializable; } 791 792 void setRegClass(uint8_t RC) { RegisterClass = static_cast<RegClass>(RC); } 793 RegClass getRegClass() const { return RegisterClass; } 794 795 LiveRange &getLiveRange() { return Live; } 796 const LiveRange &getLiveRange() const { return Live; } 797 void setLiveRange(const LiveRange &Range) { Live = Range; } 798 void resetLiveRange() { Live.reset(); } 799 void addLiveRange(InstNumberT Start, InstNumberT End, 800 CfgNode *Node = nullptr) { 801 assert(!getIgnoreLiveness()); 802 Live.addSegment(Start, End, Node); 803 } 804 void trimLiveRange(InstNumberT Start) { Live.trim(Start); } 805 void untrimLiveRange() { Live.untrim(); } 806 bool rangeEndsBefore(const Variable *Other) const { 807 return Live.endsBefore(Other->Live); 808 } 809 bool rangeOverlaps(const Variable *Other) const { 810 constexpr bool UseTrimmed = true; 811 return Live.overlaps(Other->Live, UseTrimmed); 812 } 813 bool rangeOverlapsStart(const Variable *Other) const { 814 constexpr bool UseTrimmed = true; 815 return Live.overlapsInst(Other->Live.getStart(), UseTrimmed); 816 } 817 818 /// Creates a temporary copy of the variable with a different type. Used 819 /// primarily for syntactic correctness of textual assembly emission. Note 820 /// that only basic information is copied, in particular not IsArgument, 821 /// IsImplicitArgument, IgnoreLiveness, RegNumTmp, Live, LoVar, HiVar, 822 /// VarsReal. If NewRegNum.hasValue(), then that register assignment is made 823 /// instead of copying the existing assignment. 824 const Variable *asType(const Cfg *Func, Type Ty, RegNumT NewRegNum) const; 825 826 void emit(const Cfg *Func) const override; 827 using Operand::dump; 828 void dump(const Cfg *Func, Ostream &Str) const override; 829 830 /// Return reg num of base register, if different from stack/frame register. 831 virtual RegNumT getBaseRegNum() const { return RegNumT(); } 832 833 /// Access the LinkedTo field. 834 void setLinkedTo(Variable *Var) { LinkedTo = Var; } 835 Variable *getLinkedTo() const { return LinkedTo; } 836 /// Follow the LinkedTo chain up to the furthest ancestor. 837 Variable *getLinkedToRoot() const { 838 Variable *Root = LinkedTo; 839 if (Root == nullptr) 840 return nullptr; 841 while (Root->LinkedTo != nullptr) 842 Root = Root->LinkedTo; 843 return Root; 844 } 845 /// Follow the LinkedTo chain up to the furthest stack-allocated ancestor. 846 /// This is only certain to be accurate after register allocation and stack 847 /// slot assignment have completed. 848 Variable *getLinkedToStackRoot() const { 849 Variable *FurthestStackVar = nullptr; 850 for (Variable *Root = LinkedTo; Root != nullptr; Root = Root->LinkedTo) { 851 if (!Root->hasReg() && Root->hasStackOffset()) { 852 FurthestStackVar = Root; 853 } 854 } 855 return FurthestStackVar; 856 } 857 858 static bool classof(const Operand *Operand) { 859 OperandKind Kind = Operand->getKind(); 860 return Kind >= kVariable && Kind <= kVariable_Max; 861 } 862 863 SizeT hashValue() const override { return std::hash<SizeT>()(getIndex()); } 864 865 inline void* getExternalData() const { return externalData; } 866 inline void setExternalData(void* data) { externalData = data; } 867 868 protected: 869 Variable(const Cfg *Func, OperandKind K, Type Ty, SizeT Index) 870 : Operand(K, Ty), Number(Index), 871 Name(VariableString::createWithoutString(Func)), 872 RegisterClass(static_cast<RegClass>(Ty)) { 873 Vars = VarsReal; 874 Vars[0] = this; 875 NumVars = 1; 876 } 877 /// Number is unique across all variables, and is used as a (bit)vector index 878 /// for liveness analysis. 879 const SizeT Number; 880 VariableString Name; 881 bool IsArgument = false; 882 bool IsImplicitArgument = false; 883 /// IgnoreLiveness means that the variable should be ignored when constructing 884 /// and validating live ranges. This is usually reserved for the stack 885 /// pointer and other physical registers specifically referenced by name. 886 bool IgnoreLiveness = false; 887 // If IsRematerializable, RegNum keeps track of which register (stack or frame 888 // pointer), and StackOffset is the known offset from that register. 889 bool IsRematerializable = false; 890 RegRequirement RegRequirement = RR_MayHaveRegister; 891 RegClass RegisterClass; 892 /// RegNum is the allocated register, (as long as RegNum.hasValue() is true). 893 RegNumT RegNum; 894 /// RegNumTmp is the tentative assignment during register allocation. 895 RegNumT RegNumTmp; 896 static constexpr int32_t InvalidStackOffset = 897 std::numeric_limits<int32_t>::min(); 898 static constexpr int32_t UndeterminedStackOffset = 899 1 + std::numeric_limits<int32_t>::min(); 900 /// StackOffset is the canonical location on stack (only if 901 /// RegNum.hasNoValue() || IsArgument). 902 int32_t StackOffset = InvalidStackOffset; 903 LiveRange Live; 904 /// VarsReal (and Operand::Vars) are set up such that Vars[0] == this. 905 Variable *VarsReal[1]; 906 /// This Variable may be "linked" to another Variable, such that if neither 907 /// Variable gets a register, they are guaranteed to share a stack location. 908 Variable *LinkedTo = nullptr; 909 /// External data can be set by an optimizer to compute and retain any 910 /// information related to the current variable. All the memory used to 911 /// store this information must be managed by the optimizer. 912 void* externalData = nullptr; 913 }; 914 915 // Variable64On32 represents a 64-bit variable on a 32-bit architecture. In 916 // this situation the variable must be split into a low and a high word. 917 class Variable64On32 : public Variable { 918 Variable64On32() = delete; 919 Variable64On32(const Variable64On32 &) = delete; 920 Variable64On32 &operator=(const Variable64On32 &) = delete; 921 922 public: 923 static Variable64On32 *create(Cfg *Func, Type Ty, SizeT Index) { 924 return new (Func->allocate<Variable64On32>()) 925 Variable64On32(Func, kVariable64On32, Ty, Index); 926 } 927 928 void setName(const Cfg *Func, const std::string &NewName) override { 929 Variable::setName(Func, NewName); 930 if (LoVar && HiVar) { 931 LoVar->setName(Func, getName() + "__lo"); 932 HiVar->setName(Func, getName() + "__hi"); 933 } 934 } 935 936 void setIsArg(bool Val = true) override { 937 Variable::setIsArg(Val); 938 if (LoVar && HiVar) { 939 LoVar->setIsArg(Val); 940 HiVar->setIsArg(Val); 941 } 942 } 943 944 Variable *getLo() const { 945 assert(LoVar != nullptr); 946 return LoVar; 947 } 948 Variable *getHi() const { 949 assert(HiVar != nullptr); 950 return HiVar; 951 } 952 953 void initHiLo(Cfg *Func) { 954 assert(LoVar == nullptr); 955 assert(HiVar == nullptr); 956 LoVar = Func->makeVariable(IceType_i32); 957 HiVar = Func->makeVariable(IceType_i32); 958 LoVar->setIsArg(getIsArg()); 959 HiVar->setIsArg(getIsArg()); 960 if (BuildDefs::dump()) { 961 LoVar->setName(Func, getName() + "__lo"); 962 HiVar->setName(Func, getName() + "__hi"); 963 } 964 } 965 966 static bool classof(const Operand *Operand) { 967 OperandKind Kind = Operand->getKind(); 968 return Kind == kVariable64On32; 969 } 970 971 protected: 972 Variable64On32(const Cfg *Func, OperandKind K, Type Ty, SizeT Index) 973 : Variable(Func, K, Ty, Index) { 974 assert(typeWidthInBytes(Ty) == 8); 975 } 976 977 Variable *LoVar = nullptr; 978 Variable *HiVar = nullptr; 979 }; 980 981 // VariableVecOn32 represents a 128-bit vector variable on a 32-bit 982 // architecture. In this case the variable must be split into 4 containers. 983 class VariableVecOn32 : public Variable { 984 VariableVecOn32() = delete; 985 VariableVecOn32(const VariableVecOn32 &) = delete; 986 VariableVecOn32 &operator=(const VariableVecOn32 &) = delete; 987 988 public: 989 static VariableVecOn32 *create(Cfg *Func, Type Ty, SizeT Index) { 990 return new (Func->allocate<VariableVecOn32>()) 991 VariableVecOn32(Func, kVariableVecOn32, Ty, Index); 992 } 993 994 void setName(const Cfg *Func, const std::string &NewName) override { 995 Variable::setName(Func, NewName); 996 if (!Containers.empty()) { 997 for (SizeT i = 0; i < ContainersPerVector; ++i) { 998 Containers[i]->setName(Func, getName() + "__cont" + std::to_string(i)); 999 } 1000 } 1001 } 1002 1003 void setIsArg(bool Val = true) override { 1004 Variable::setIsArg(Val); 1005 for (Variable *Var : Containers) { 1006 Var->setIsArg(getIsArg()); 1007 } 1008 } 1009 1010 const VarList &getContainers() const { return Containers; } 1011 1012 void initVecElement(Cfg *Func) { 1013 for (SizeT i = 0; i < ContainersPerVector; ++i) { 1014 Variable *Var = Func->makeVariable(IceType_i32); 1015 Var->setIsArg(getIsArg()); 1016 if (BuildDefs::dump()) { 1017 Var->setName(Func, getName() + "__cont" + std::to_string(i)); 1018 } 1019 Containers.push_back(Var); 1020 } 1021 } 1022 1023 static bool classof(const Operand *Operand) { 1024 OperandKind Kind = Operand->getKind(); 1025 return Kind == kVariableVecOn32; 1026 } 1027 1028 // A 128-bit vector value is mapped onto 4 32-bit register values. 1029 static constexpr SizeT ContainersPerVector = 4; 1030 1031 protected: 1032 VariableVecOn32(const Cfg *Func, OperandKind K, Type Ty, SizeT Index) 1033 : Variable(Func, K, Ty, Index) { 1034 assert(typeWidthInBytes(Ty) == 1035 ContainersPerVector * typeWidthInBytes(IceType_i32)); 1036 } 1037 1038 VarList Containers; 1039 }; 1040 1041 enum MetadataKind { 1042 VMK_Uses, /// Track only uses, not defs 1043 VMK_SingleDefs, /// Track uses+defs, but only record single def 1044 VMK_All /// Track uses+defs, including full def list 1045 }; 1046 using InstDefList = CfgVector<const Inst *>; 1047 1048 /// VariableTracking tracks the metadata for a single variable. It is 1049 /// only meant to be used internally by VariablesMetadata. 1050 class VariableTracking { 1051 public: 1052 enum MultiDefState { 1053 // TODO(stichnot): Consider using just a simple counter. 1054 MDS_Unknown, 1055 MDS_SingleDef, 1056 MDS_MultiDefSingleBlock, 1057 MDS_MultiDefMultiBlock 1058 }; 1059 enum MultiBlockState { 1060 MBS_Unknown, // Not yet initialized, so be conservative 1061 MBS_NoUses, // Known to have no uses 1062 MBS_SingleBlock, // All uses in are in a single block 1063 MBS_MultiBlock // Several uses across several blocks 1064 }; 1065 VariableTracking() = default; 1066 VariableTracking(const VariableTracking &) = default; 1067 VariableTracking &operator=(const VariableTracking &) = default; 1068 VariableTracking(MultiBlockState MultiBlock) : MultiBlock(MultiBlock) {} 1069 MultiDefState getMultiDef() const { return MultiDef; } 1070 MultiBlockState getMultiBlock() const { return MultiBlock; } 1071 const Inst *getFirstDefinitionSingleBlock() const; 1072 const Inst *getSingleDefinition() const; 1073 const Inst *getFirstDefinition() const; 1074 const InstDefList &getLatterDefinitions() const { return Definitions; } 1075 CfgNode *getNode() const { return SingleUseNode; } 1076 RegWeight getUseWeight() const { return UseWeight; } 1077 void markUse(MetadataKind TrackingKind, const Inst *Instr, CfgNode *Node, 1078 bool IsImplicit); 1079 void markDef(MetadataKind TrackingKind, const Inst *Instr, CfgNode *Node); 1080 1081 private: 1082 MultiDefState MultiDef = MDS_Unknown; 1083 MultiBlockState MultiBlock = MBS_Unknown; 1084 CfgNode *SingleUseNode = nullptr; 1085 CfgNode *SingleDefNode = nullptr; 1086 /// All definitions of the variable are collected in Definitions[] (except for 1087 /// the earliest definition), in increasing order of instruction number. 1088 InstDefList Definitions; /// Only used if Kind==VMK_All 1089 const Inst *FirstOrSingleDefinition = nullptr; 1090 RegWeight UseWeight; 1091 }; 1092 1093 /// VariablesMetadata analyzes and summarizes the metadata for the complete set 1094 /// of Variables. 1095 class VariablesMetadata { 1096 VariablesMetadata() = delete; 1097 VariablesMetadata(const VariablesMetadata &) = delete; 1098 VariablesMetadata &operator=(const VariablesMetadata &) = delete; 1099 1100 public: 1101 explicit VariablesMetadata(const Cfg *Func) : Func(Func) {} 1102 /// Initialize the state by traversing all instructions/variables in the CFG. 1103 void init(MetadataKind TrackingKind); 1104 /// Add a single node. This is called by init(), and can be called 1105 /// incrementally from elsewhere, e.g. after edge-splitting. 1106 void addNode(CfgNode *Node); 1107 MetadataKind getKind() const { return Kind; } 1108 /// Returns whether the given Variable is tracked in this object. It should 1109 /// only return false if changes were made to the CFG after running init(), in 1110 /// which case the state is stale and the results shouldn't be trusted (but it 1111 /// may be OK e.g. for dumping). 1112 bool isTracked(const Variable *Var) const { 1113 return Var->getIndex() < Metadata.size(); 1114 } 1115 1116 /// Returns whether the given Variable has multiple definitions. 1117 bool isMultiDef(const Variable *Var) const; 1118 /// Returns the first definition instruction of the given Variable. This is 1119 /// only valid for variables whose definitions are all within the same block, 1120 /// e.g. T after the lowered sequence "T=B; T+=C; A=T", for which 1121 /// getFirstDefinitionSingleBlock(T) would return the "T=B" instruction. For 1122 /// variables with definitions span multiple blocks, nullptr is returned. 1123 const Inst *getFirstDefinitionSingleBlock(const Variable *Var) const; 1124 /// Returns the definition instruction of the given Variable, when the 1125 /// variable has exactly one definition. Otherwise, nullptr is returned. 1126 const Inst *getSingleDefinition(const Variable *Var) const; 1127 /// getFirstDefinition() and getLatterDefinitions() are used together to 1128 /// return the complete set of instructions that define the given Variable, 1129 /// regardless of whether the definitions are within the same block (in 1130 /// contrast to getFirstDefinitionSingleBlock). 1131 const Inst *getFirstDefinition(const Variable *Var) const; 1132 const InstDefList &getLatterDefinitions(const Variable *Var) const; 1133 1134 /// Returns whether the given Variable is live across multiple blocks. Mainly, 1135 /// this is used to partition Variables into single-block versus multi-block 1136 /// sets for leveraging sparsity in liveness analysis, and for implementing 1137 /// simple stack slot coalescing. As a special case, function arguments are 1138 /// always considered multi-block because they are live coming into the entry 1139 /// block. 1140 bool isMultiBlock(const Variable *Var) const; 1141 bool isSingleBlock(const Variable *Var) const; 1142 /// Returns the node that the given Variable is used in, assuming 1143 /// isMultiBlock() returns false. Otherwise, nullptr is returned. 1144 CfgNode *getLocalUseNode(const Variable *Var) const; 1145 1146 /// Returns the total use weight computed as the sum of uses multiplied by a 1147 /// loop nest depth factor for each use. 1148 RegWeight getUseWeight(const Variable *Var) const; 1149 1150 private: 1151 const Cfg *Func; 1152 MetadataKind Kind; 1153 CfgVector<VariableTracking> Metadata; 1154 static const InstDefList *NoDefinitions; 1155 }; 1156 1157 /// BooleanVariable represents a variable that was the zero-extended result of a 1158 /// comparison. It maintains a pointer to its original i1 source so that the 1159 /// WASM frontend can avoid adding needless comparisons. 1160 class BooleanVariable : public Variable { 1161 BooleanVariable() = delete; 1162 BooleanVariable(const BooleanVariable &) = delete; 1163 BooleanVariable &operator=(const BooleanVariable &) = delete; 1164 1165 BooleanVariable(const Cfg *Func, OperandKind K, Type Ty, SizeT Index) 1166 : Variable(Func, K, Ty, Index) {} 1167 1168 public: 1169 static BooleanVariable *create(Cfg *Func, Type Ty, SizeT Index) { 1170 return new (Func->allocate<BooleanVariable>()) 1171 BooleanVariable(Func, kVariable, Ty, Index); 1172 } 1173 1174 virtual Variable *asBoolean() { return BoolSource; } 1175 1176 void setBoolSource(Variable *Src) { BoolSource = Src; } 1177 1178 static bool classof(const Operand *Operand) { 1179 return Operand->getKind() == kVariableBoolean; 1180 } 1181 1182 private: 1183 Variable *BoolSource = nullptr; 1184 }; 1185 1186 } // end of namespace Ice 1187 1188 #endif // SUBZERO_SRC_ICEOPERAND_H 1189