1 //===- subzero/src/IceOperand.cpp - High-level operand implementation -----===// 2 // 3 // The Subzero Code Generator 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// 10 /// \file 11 /// \brief Implements the Operand class and its target-independent subclasses, 12 /// primarily for the methods of the Variable class. 13 /// 14 //===----------------------------------------------------------------------===// 15 16 #include "IceOperand.h" 17 18 #include "IceCfg.h" 19 #include "IceCfgNode.h" 20 #include "IceInst.h" 21 #include "IceInstVarIter.h" 22 #include "IceMemory.h" 23 #include "IceTargetLowering.h" // dumping stack/frame pointer register 24 25 namespace Ice { 26 27 void Constant::initShouldBePooled() { 28 ShouldBePooled = TargetLowering::shouldBePooled(this); 29 } 30 31 bool operator==(const RelocatableTuple &A, const RelocatableTuple &B) { 32 // A and B are the same if: 33 // (1) they have the same name; and 34 // (2) they have the same offset. 35 // 36 // (1) is trivial to check, but (2) requires some care. 37 // 38 // For (2): 39 // if A and B have known offsets (i.e., no symbolic references), then 40 // A == B -> A.Offset == B.Offset. 41 // else each element i in A.OffsetExpr[i] must be the same (or have the same 42 // value) as B.OffsetExpr[i]. 43 if (A.Name != B.Name) { 44 return false; 45 } 46 47 bool BothHaveKnownOffsets = true; 48 RelocOffsetT OffsetA = A.Offset; 49 RelocOffsetT OffsetB = B.Offset; 50 for (SizeT i = 0; i < A.OffsetExpr.size() && BothHaveKnownOffsets; ++i) { 51 BothHaveKnownOffsets = A.OffsetExpr[i]->hasOffset(); 52 if (BothHaveKnownOffsets) { 53 OffsetA += A.OffsetExpr[i]->getOffset(); 54 } 55 } 56 for (SizeT i = 0; i < B.OffsetExpr.size() && BothHaveKnownOffsets; ++i) { 57 BothHaveKnownOffsets = B.OffsetExpr[i]->hasOffset(); 58 if (BothHaveKnownOffsets) { 59 OffsetB += B.OffsetExpr[i]->getOffset(); 60 } 61 } 62 if (BothHaveKnownOffsets) { 63 // Both have known offsets (i.e., no unresolved symbolic references), so 64 // A == B -> A.Offset == B.Offset. 65 return OffsetA == OffsetB; 66 } 67 68 // Otherwise, A and B are not the same if their OffsetExpr's have different 69 // sizes. 70 if (A.OffsetExpr.size() != B.OffsetExpr.size()) { 71 return false; 72 } 73 74 // If the OffsetExprs' sizes are the same, then 75 // for each i in OffsetExprSize: 76 for (SizeT i = 0; i < A.OffsetExpr.size(); ++i) { 77 const auto *const RelocOffsetA = A.OffsetExpr[i]; 78 const auto *const RelocOffsetB = B.OffsetExpr[i]; 79 if (RelocOffsetA->hasOffset() && RelocOffsetB->hasOffset()) { 80 // A.OffsetExpr[i].Offset == B.OffsetExpr[i].Offset iff they are both 81 // defined; 82 if (RelocOffsetA->getOffset() != RelocOffsetB->getOffset()) { 83 return false; 84 } 85 } else if (RelocOffsetA != RelocOffsetB) { 86 // or, if they are undefined, then the RelocOffsets must be the same. 87 return false; 88 } 89 } 90 91 return true; 92 } 93 94 RegNumT::BaseType RegNumT::Limit = 0; 95 96 bool operator<(const RegWeight &A, const RegWeight &B) { 97 return A.getWeight() < B.getWeight(); 98 } 99 bool operator<=(const RegWeight &A, const RegWeight &B) { return !(B < A); } 100 bool operator==(const RegWeight &A, const RegWeight &B) { 101 return !(B < A) && !(A < B); 102 } 103 104 void LiveRange::addSegment(InstNumberT Start, InstNumberT End, CfgNode *Node) { 105 if (getFlags().getSplitGlobalVars()) { 106 // Disable merging to make sure a live range 'segment' has a single node. 107 // Might be possible to enable when the target segment has the same node. 108 assert(NodeMap.find(Start) == NodeMap.end()); 109 NodeMap[Start] = Node; 110 } else { 111 if (!Range.empty()) { 112 // Check for merge opportunity. 113 InstNumberT CurrentEnd = Range.back().second; 114 assert(Start >= CurrentEnd); 115 if (Start == CurrentEnd) { 116 Range.back().second = End; 117 return; 118 } 119 } 120 } 121 Range.push_back(RangeElementType(Start, End)); 122 } 123 124 // Returns true if this live range ends before Other's live range starts. This 125 // means that the highest instruction number in this live range is less than or 126 // equal to the lowest instruction number of the Other live range. 127 bool LiveRange::endsBefore(const LiveRange &Other) const { 128 // Neither range should be empty, but let's be graceful. 129 if (Range.empty() || Other.Range.empty()) 130 return true; 131 InstNumberT MyEnd = (*Range.rbegin()).second; 132 InstNumberT OtherStart = (*Other.Range.begin()).first; 133 return MyEnd <= OtherStart; 134 } 135 136 // Returns true if there is any overlap between the two live ranges. 137 bool LiveRange::overlaps(const LiveRange &Other, bool UseTrimmed) const { 138 // Do a two-finger walk through the two sorted lists of segments. 139 auto I1 = (UseTrimmed ? TrimmedBegin : Range.begin()), 140 I2 = (UseTrimmed ? Other.TrimmedBegin : Other.Range.begin()); 141 auto E1 = Range.end(), E2 = Other.Range.end(); 142 while (I1 != E1 && I2 != E2) { 143 if (I1->second <= I2->first) { 144 ++I1; 145 continue; 146 } 147 if (I2->second <= I1->first) { 148 ++I2; 149 continue; 150 } 151 return true; 152 } 153 return false; 154 } 155 156 bool LiveRange::overlapsInst(InstNumberT OtherBegin, bool UseTrimmed) const { 157 bool Result = false; 158 for (auto I = (UseTrimmed ? TrimmedBegin : Range.begin()), E = Range.end(); 159 I != E; ++I) { 160 if (OtherBegin < I->first) { 161 Result = false; 162 break; 163 } 164 if (OtherBegin < I->second) { 165 Result = true; 166 break; 167 } 168 } 169 // This is an equivalent but less inefficient implementation. It's expensive 170 // enough that we wouldn't want to run it under any build, but it could be 171 // enabled if e.g. the LiveRange implementation changes and extra testing is 172 // needed. 173 if (BuildDefs::extraValidation()) { 174 LiveRange Temp; 175 Temp.addSegment(OtherBegin, OtherBegin + 1); 176 bool Validation = overlaps(Temp); 177 (void)Validation; 178 assert(Result == Validation); 179 } 180 return Result; 181 } 182 183 // Returns true if the live range contains the given instruction number. This 184 // is only used for validating the live range calculation. The IsDest argument 185 // indicates whether the Variable being tested is used in the Dest position (as 186 // opposed to a Src position). 187 bool LiveRange::containsValue(InstNumberT Value, bool IsDest) const { 188 for (const RangeElementType &I : Range) { 189 if (I.first <= Value && 190 (Value < I.second || (!IsDest && Value == I.second))) 191 return true; 192 } 193 return false; 194 } 195 196 void LiveRange::trim(InstNumberT Lower) { 197 while (TrimmedBegin != Range.end() && TrimmedBegin->second <= Lower) 198 ++TrimmedBegin; 199 } 200 201 const Variable *Variable::asType(const Cfg *Func, Type Ty, 202 RegNumT NewRegNum) const { 203 // Note: This returns a Variable, even if the "this" object is a subclass of 204 // Variable. 205 if (!BuildDefs::dump() || getType() == Ty) 206 return this; 207 static constexpr SizeT One = 1; 208 auto *V = new (CfgLocalAllocator<Variable>().allocate(One)) 209 Variable(Func, kVariable, Ty, Number); 210 V->Name = Name; 211 V->RegNum = NewRegNum.hasValue() ? NewRegNum : RegNum; 212 V->StackOffset = StackOffset; 213 V->LinkedTo = LinkedTo; 214 return V; 215 } 216 217 RegWeight Variable::getWeight(const Cfg *Func) const { 218 if (mustHaveReg()) 219 return RegWeight(RegWeight::Inf); 220 if (mustNotHaveReg()) 221 return RegWeight(RegWeight::Zero); 222 return Func->getVMetadata()->getUseWeight(this); 223 } 224 225 void VariableTracking::markUse(MetadataKind TrackingKind, const Inst *Instr, 226 CfgNode *Node, bool IsImplicit) { 227 (void)TrackingKind; 228 229 // Increment the use weight depending on the loop nest depth. The weight is 230 // exponential in the nest depth as inner loops are expected to be executed 231 // an exponentially greater number of times. 232 constexpr uint32_t LogLoopTripCountEstimate = 2; // 2^2 = 4 233 constexpr SizeT MaxShift = sizeof(uint32_t) * CHAR_BIT - 1; 234 constexpr SizeT MaxLoopNestDepth = MaxShift / LogLoopTripCountEstimate; 235 const uint32_t LoopNestDepth = 236 std::min(Node->getLoopNestDepth(), MaxLoopNestDepth); 237 const uint32_t ThisUseWeight = uint32_t(1) 238 << LoopNestDepth * LogLoopTripCountEstimate; 239 UseWeight.addWeight(ThisUseWeight); 240 241 if (MultiBlock == MBS_MultiBlock) 242 return; 243 // TODO(stichnot): If the use occurs as a source operand in the first 244 // instruction of the block, and its definition is in this block's only 245 // predecessor, we might consider not marking this as a separate use. This 246 // may also apply if it's the first instruction of the block that actually 247 // uses a Variable. 248 assert(Node); 249 bool MakeMulti = false; 250 if (IsImplicit) 251 MakeMulti = true; 252 // A phi source variable conservatively needs to be marked as multi-block, 253 // even if its definition is in the same block. This is because there can be 254 // additional control flow before branching back to this node, and the 255 // variable is live throughout those nodes. 256 if (Instr && llvm::isa<InstPhi>(Instr)) 257 MakeMulti = true; 258 259 if (!MakeMulti) { 260 switch (MultiBlock) { 261 case MBS_Unknown: 262 case MBS_NoUses: 263 MultiBlock = MBS_SingleBlock; 264 SingleUseNode = Node; 265 break; 266 case MBS_SingleBlock: 267 if (SingleUseNode != Node) 268 MakeMulti = true; 269 break; 270 case MBS_MultiBlock: 271 break; 272 } 273 } 274 275 if (MakeMulti) { 276 MultiBlock = MBS_MultiBlock; 277 SingleUseNode = nullptr; 278 } 279 } 280 281 void VariableTracking::markDef(MetadataKind TrackingKind, const Inst *Instr, 282 CfgNode *Node) { 283 // TODO(stichnot): If the definition occurs in the last instruction of the 284 // block, consider not marking this as a separate use. But be careful not to 285 // omit all uses of the variable if markDef() and markUse() both use this 286 // optimization. 287 assert(Node); 288 // Verify that instructions are added in increasing order. 289 if (BuildDefs::asserts()) { 290 if (TrackingKind == VMK_All) { 291 const Inst *LastInstruction = 292 Definitions.empty() ? FirstOrSingleDefinition : Definitions.back(); 293 (void)LastInstruction; 294 assert(LastInstruction == nullptr || 295 Instr->getNumber() >= LastInstruction->getNumber()); 296 } 297 } 298 constexpr bool IsImplicit = false; 299 markUse(TrackingKind, Instr, Node, IsImplicit); 300 if (TrackingKind == VMK_Uses) 301 return; 302 if (FirstOrSingleDefinition == nullptr) 303 FirstOrSingleDefinition = Instr; 304 else if (TrackingKind == VMK_All) 305 Definitions.push_back(Instr); 306 switch (MultiDef) { 307 case MDS_Unknown: 308 assert(SingleDefNode == nullptr); 309 MultiDef = MDS_SingleDef; 310 SingleDefNode = Node; 311 break; 312 case MDS_SingleDef: 313 assert(SingleDefNode); 314 if (Node == SingleDefNode) { 315 MultiDef = MDS_MultiDefSingleBlock; 316 } else { 317 MultiDef = MDS_MultiDefMultiBlock; 318 SingleDefNode = nullptr; 319 } 320 break; 321 case MDS_MultiDefSingleBlock: 322 assert(SingleDefNode); 323 if (Node != SingleDefNode) { 324 MultiDef = MDS_MultiDefMultiBlock; 325 SingleDefNode = nullptr; 326 } 327 break; 328 case MDS_MultiDefMultiBlock: 329 assert(SingleDefNode == nullptr); 330 break; 331 } 332 } 333 334 const Inst *VariableTracking::getFirstDefinitionSingleBlock() const { 335 switch (MultiDef) { 336 case MDS_Unknown: 337 case MDS_MultiDefMultiBlock: 338 return nullptr; 339 case MDS_SingleDef: 340 case MDS_MultiDefSingleBlock: 341 assert(FirstOrSingleDefinition); 342 return FirstOrSingleDefinition; 343 } 344 return nullptr; 345 } 346 347 const Inst *VariableTracking::getSingleDefinition() const { 348 switch (MultiDef) { 349 case MDS_Unknown: 350 case MDS_MultiDefMultiBlock: 351 case MDS_MultiDefSingleBlock: 352 return nullptr; 353 case MDS_SingleDef: 354 assert(FirstOrSingleDefinition); 355 return FirstOrSingleDefinition; 356 } 357 return nullptr; 358 } 359 360 const Inst *VariableTracking::getFirstDefinition() const { 361 switch (MultiDef) { 362 case MDS_Unknown: 363 return nullptr; 364 case MDS_MultiDefMultiBlock: 365 case MDS_SingleDef: 366 case MDS_MultiDefSingleBlock: 367 assert(FirstOrSingleDefinition); 368 return FirstOrSingleDefinition; 369 } 370 return nullptr; 371 } 372 373 void VariablesMetadata::init(MetadataKind TrackingKind) { 374 TimerMarker T(TimerStack::TT_vmetadata, Func); 375 Kind = TrackingKind; 376 Metadata.clear(); 377 Metadata.resize(Func->getNumVariables(), VariableTracking::MBS_NoUses); 378 379 // Mark implicit args as being used in the entry node. 380 for (Variable *Var : Func->getImplicitArgs()) { 381 constexpr Inst *NoInst = nullptr; 382 CfgNode *EntryNode = Func->getEntryNode(); 383 constexpr bool IsImplicit = true; 384 Metadata[Var->getIndex()].markUse(Kind, NoInst, EntryNode, IsImplicit); 385 } 386 387 for (CfgNode *Node : Func->getNodes()) 388 addNode(Node); 389 } 390 391 void VariablesMetadata::addNode(CfgNode *Node) { 392 if (Func->getNumVariables() > Metadata.size()) 393 Metadata.resize(Func->getNumVariables()); 394 395 for (Inst &I : Node->getPhis()) { 396 if (I.isDeleted()) 397 continue; 398 if (Variable *Dest = I.getDest()) { 399 SizeT DestNum = Dest->getIndex(); 400 assert(DestNum < Metadata.size()); 401 Metadata[DestNum].markDef(Kind, &I, Node); 402 } 403 for (SizeT SrcNum = 0; SrcNum < I.getSrcSize(); ++SrcNum) { 404 if (auto *Var = llvm::dyn_cast<Variable>(I.getSrc(SrcNum))) { 405 SizeT VarNum = Var->getIndex(); 406 assert(VarNum < Metadata.size()); 407 constexpr bool IsImplicit = false; 408 Metadata[VarNum].markUse(Kind, &I, Node, IsImplicit); 409 } 410 } 411 } 412 413 for (Inst &I : Node->getInsts()) { 414 if (I.isDeleted()) 415 continue; 416 // Note: The implicit definitions (and uses) from InstFakeKill are 417 // deliberately ignored. 418 if (Variable *Dest = I.getDest()) { 419 SizeT DestNum = Dest->getIndex(); 420 assert(DestNum < Metadata.size()); 421 Metadata[DestNum].markDef(Kind, &I, Node); 422 } 423 FOREACH_VAR_IN_INST(Var, I) { 424 SizeT VarNum = Var->getIndex(); 425 assert(VarNum < Metadata.size()); 426 constexpr bool IsImplicit = false; 427 Metadata[VarNum].markUse(Kind, &I, Node, IsImplicit); 428 } 429 } 430 } 431 432 bool VariablesMetadata::isMultiDef(const Variable *Var) const { 433 assert(Kind != VMK_Uses); 434 if (Var->getIsArg()) 435 return false; 436 if (!isTracked(Var)) 437 return true; // conservative answer 438 SizeT VarNum = Var->getIndex(); 439 // Conservatively return true if the state is unknown. 440 return Metadata[VarNum].getMultiDef() != VariableTracking::MDS_SingleDef; 441 } 442 443 bool VariablesMetadata::isMultiBlock(const Variable *Var) const { 444 if (Var->getIsArg()) 445 return true; 446 if (Var->isRematerializable()) 447 return false; 448 if (!isTracked(Var)) 449 return true; // conservative answer 450 SizeT VarNum = Var->getIndex(); 451 switch (Metadata[VarNum].getMultiBlock()) { 452 case VariableTracking::MBS_NoUses: 453 case VariableTracking::MBS_SingleBlock: 454 return false; 455 // Conservatively return true if the state is unknown. 456 case VariableTracking::MBS_Unknown: 457 case VariableTracking::MBS_MultiBlock: 458 return true; 459 } 460 assert(0); 461 return true; 462 } 463 464 bool VariablesMetadata::isSingleBlock(const Variable *Var) const { 465 if (Var->getIsArg()) 466 return false; 467 if (Var->isRematerializable()) 468 return false; 469 if (!isTracked(Var)) 470 return false; // conservative answer 471 SizeT VarNum = Var->getIndex(); 472 switch (Metadata[VarNum].getMultiBlock()) { 473 case VariableTracking::MBS_SingleBlock: 474 return true; 475 case VariableTracking::MBS_Unknown: 476 case VariableTracking::MBS_NoUses: 477 case VariableTracking::MBS_MultiBlock: 478 return false; 479 } 480 assert(0); 481 return false; 482 } 483 484 const Inst * 485 VariablesMetadata::getFirstDefinitionSingleBlock(const Variable *Var) const { 486 assert(Kind != VMK_Uses); 487 if (!isTracked(Var)) 488 return nullptr; // conservative answer 489 SizeT VarNum = Var->getIndex(); 490 return Metadata[VarNum].getFirstDefinitionSingleBlock(); 491 } 492 493 const Inst *VariablesMetadata::getSingleDefinition(const Variable *Var) const { 494 assert(Kind != VMK_Uses); 495 if (!isTracked(Var)) 496 return nullptr; // conservative answer 497 SizeT VarNum = Var->getIndex(); 498 return Metadata[VarNum].getSingleDefinition(); 499 } 500 501 const Inst *VariablesMetadata::getFirstDefinition(const Variable *Var) const { 502 assert(Kind != VMK_Uses); 503 if (!isTracked(Var)) 504 return nullptr; // conservative answer 505 SizeT VarNum = Var->getIndex(); 506 return Metadata[VarNum].getFirstDefinition(); 507 } 508 509 const InstDefList & 510 VariablesMetadata::getLatterDefinitions(const Variable *Var) const { 511 assert(Kind == VMK_All); 512 if (!isTracked(Var)) { 513 // NoDefinitions has to be initialized after we've had a chance to set the 514 // CfgAllocator, so it can't be a static global object. Also, while C++11 515 // guarantees the initialization of static local objects to be thread-safe, 516 // we use a pointer to it so we can avoid frequent mutex locking overhead. 517 if (NoDefinitions == nullptr) { 518 static const InstDefList NoDefinitionsInstance; 519 NoDefinitions = &NoDefinitionsInstance; 520 } 521 return *NoDefinitions; 522 } 523 SizeT VarNum = Var->getIndex(); 524 return Metadata[VarNum].getLatterDefinitions(); 525 } 526 527 CfgNode *VariablesMetadata::getLocalUseNode(const Variable *Var) const { 528 if (!isTracked(Var)) 529 return nullptr; // conservative answer 530 SizeT VarNum = Var->getIndex(); 531 return Metadata[VarNum].getNode(); 532 } 533 534 RegWeight VariablesMetadata::getUseWeight(const Variable *Var) const { 535 if (!isTracked(Var)) 536 return RegWeight(1); // conservative answer 537 SizeT VarNum = Var->getIndex(); 538 return Metadata[VarNum].getUseWeight(); 539 } 540 541 const InstDefList *VariablesMetadata::NoDefinitions = nullptr; 542 543 // ======================== dump routines ======================== // 544 545 void Variable::emit(const Cfg *Func) const { 546 if (BuildDefs::dump()) 547 Func->getTarget()->emitVariable(this); 548 } 549 550 void Variable::dump(const Cfg *Func, Ostream &Str) const { 551 if (!BuildDefs::dump()) 552 return; 553 if (Func == nullptr) { 554 Str << "%" << getName(); 555 return; 556 } 557 if (Func->isVerbose(IceV_RegOrigins) || 558 (!hasReg() && !Func->getTarget()->hasComputedFrame())) { 559 Str << "%" << getName(); 560 for (Variable *Link = getLinkedTo(); Link != nullptr; 561 Link = Link->getLinkedTo()) { 562 Str << ":%" << Link->getName(); 563 } 564 } 565 if (hasReg()) { 566 if (Func->isVerbose(IceV_RegOrigins)) 567 Str << ":"; 568 Str << Func->getTarget()->getRegName(RegNum, getType()); 569 } else if (Func->getTarget()->hasComputedFrame()) { 570 if (Func->isVerbose(IceV_RegOrigins)) 571 Str << ":"; 572 const auto BaseRegisterNumber = 573 hasReg() ? getBaseRegNum() : Func->getTarget()->getFrameOrStackReg(); 574 Str << "[" 575 << Func->getTarget()->getRegName(BaseRegisterNumber, IceType_i32); 576 if (hasKnownStackOffset()) { 577 int32_t Offset = getStackOffset(); 578 if (Offset) { 579 if (Offset > 0) 580 Str << "+"; 581 Str << Offset; 582 } 583 } 584 Str << "]"; 585 } 586 } 587 588 template <> void ConstantInteger32::emit(TargetLowering *Target) const { 589 Target->emit(this); 590 } 591 592 template <> void ConstantInteger64::emit(TargetLowering *Target) const { 593 Target->emit(this); 594 } 595 596 template <> void ConstantFloat::emit(TargetLowering *Target) const { 597 Target->emit(this); 598 } 599 600 template <> void ConstantDouble::emit(TargetLowering *Target) const { 601 Target->emit(this); 602 } 603 604 void ConstantRelocatable::emit(TargetLowering *Target) const { 605 Target->emit(this); 606 } 607 608 void ConstantRelocatable::emitWithoutPrefix(const TargetLowering *Target, 609 const char *Suffix) const { 610 Target->emitWithoutPrefix(this, Suffix); 611 } 612 613 void ConstantRelocatable::dump(const Cfg *, Ostream &Str) const { 614 if (!BuildDefs::dump()) 615 return; 616 if (!EmitString.empty()) { 617 Str << EmitString; 618 return; 619 } 620 Str << "@" << Name; 621 const RelocOffsetT Offset = getOffset(); 622 if (Offset) { 623 if (Offset >= 0) { 624 Str << "+"; 625 } 626 Str << Offset; 627 } 628 } 629 630 void ConstantUndef::emit(TargetLowering *Target) const { Target->emit(this); } 631 632 void LiveRange::dump(Ostream &Str) const { 633 if (!BuildDefs::dump()) 634 return; 635 bool First = true; 636 for (const RangeElementType &I : Range) { 637 if (!First) 638 Str << ", "; 639 First = false; 640 Str << "[" << I.first << ":" << I.second << ")"; 641 } 642 } 643 644 Ostream &operator<<(Ostream &Str, const LiveRange &L) { 645 if (!BuildDefs::dump()) 646 return Str; 647 L.dump(Str); 648 return Str; 649 } 650 651 Ostream &operator<<(Ostream &Str, const RegWeight &W) { 652 if (!BuildDefs::dump()) 653 return Str; 654 if (W.getWeight() == RegWeight::Inf) 655 Str << "Inf"; 656 else 657 Str << W.getWeight(); 658 return Str; 659 } 660 661 } // end of namespace Ice 662