1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the AggressiveAntiDepBreaker class, which 11 // implements register anti-dependence breaking during post-RA 12 // scheduling. It attempts to break all anti-dependencies within a 13 // block. 14 // 15 //===----------------------------------------------------------------------===// 16 17 #define DEBUG_TYPE "post-RA-sched" 18 #include "AggressiveAntiDepBreaker.h" 19 #include "llvm/CodeGen/MachineBasicBlock.h" 20 #include "llvm/CodeGen/MachineFrameInfo.h" 21 #include "llvm/CodeGen/MachineInstr.h" 22 #include "llvm/CodeGen/RegisterClassInfo.h" 23 #include "llvm/Support/CommandLine.h" 24 #include "llvm/Support/Debug.h" 25 #include "llvm/Support/ErrorHandling.h" 26 #include "llvm/Support/raw_ostream.h" 27 #include "llvm/Target/TargetInstrInfo.h" 28 #include "llvm/Target/TargetMachine.h" 29 #include "llvm/Target/TargetRegisterInfo.h" 30 using namespace llvm; 31 32 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod 33 static cl::opt<int> 34 DebugDiv("agg-antidep-debugdiv", 35 cl::desc("Debug control for aggressive anti-dep breaker"), 36 cl::init(0), cl::Hidden); 37 static cl::opt<int> 38 DebugMod("agg-antidep-debugmod", 39 cl::desc("Debug control for aggressive anti-dep breaker"), 40 cl::init(0), cl::Hidden); 41 42 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs, 43 MachineBasicBlock *BB) : 44 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0), 45 GroupNodeIndices(TargetRegs, 0), 46 KillIndices(TargetRegs, 0), 47 DefIndices(TargetRegs, 0) 48 { 49 const unsigned BBSize = BB->size(); 50 for (unsigned i = 0; i < NumTargetRegs; ++i) { 51 // Initialize all registers to be in their own group. Initially we 52 // assign the register to the same-indexed GroupNode. 53 GroupNodeIndices[i] = i; 54 // Initialize the indices to indicate that no registers are live. 55 KillIndices[i] = ~0u; 56 DefIndices[i] = BBSize; 57 } 58 } 59 60 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) { 61 unsigned Node = GroupNodeIndices[Reg]; 62 while (GroupNodes[Node] != Node) 63 Node = GroupNodes[Node]; 64 65 return Node; 66 } 67 68 void AggressiveAntiDepState::GetGroupRegs( 69 unsigned Group, 70 std::vector<unsigned> &Regs, 71 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs) 72 { 73 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) { 74 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0)) 75 Regs.push_back(Reg); 76 } 77 } 78 79 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2) 80 { 81 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!"); 82 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!"); 83 84 // find group for each register 85 unsigned Group1 = GetGroup(Reg1); 86 unsigned Group2 = GetGroup(Reg2); 87 88 // if either group is 0, then that must become the parent 89 unsigned Parent = (Group1 == 0) ? Group1 : Group2; 90 unsigned Other = (Parent == Group1) ? Group2 : Group1; 91 GroupNodes.at(Other) = Parent; 92 return Parent; 93 } 94 95 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg) 96 { 97 // Create a new GroupNode for Reg. Reg's existing GroupNode must 98 // stay as is because there could be other GroupNodes referring to 99 // it. 100 unsigned idx = GroupNodes.size(); 101 GroupNodes.push_back(idx); 102 GroupNodeIndices[Reg] = idx; 103 return idx; 104 } 105 106 bool AggressiveAntiDepState::IsLive(unsigned Reg) 107 { 108 // KillIndex must be defined and DefIndex not defined for a register 109 // to be live. 110 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u)); 111 } 112 113 114 115 AggressiveAntiDepBreaker:: 116 AggressiveAntiDepBreaker(MachineFunction& MFi, 117 const RegisterClassInfo &RCI, 118 TargetSubtargetInfo::RegClassVector& CriticalPathRCs) : 119 AntiDepBreaker(), MF(MFi), 120 MRI(MF.getRegInfo()), 121 TII(MF.getTarget().getInstrInfo()), 122 TRI(MF.getTarget().getRegisterInfo()), 123 RegClassInfo(RCI), 124 State(NULL) { 125 /* Collect a bitset of all registers that are only broken if they 126 are on the critical path. */ 127 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) { 128 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]); 129 if (CriticalPathSet.none()) 130 CriticalPathSet = CPSet; 131 else 132 CriticalPathSet |= CPSet; 133 } 134 135 DEBUG(dbgs() << "AntiDep Critical-Path Registers:"); 136 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1; 137 r = CriticalPathSet.find_next(r)) 138 dbgs() << " " << TRI->getName(r)); 139 DEBUG(dbgs() << '\n'); 140 } 141 142 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() { 143 delete State; 144 } 145 146 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) { 147 assert(State == NULL); 148 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB); 149 150 bool IsReturnBlock = (!BB->empty() && BB->back().isReturn()); 151 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 152 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 153 154 // Examine the live-in regs of all successors. 155 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(), 156 SE = BB->succ_end(); SI != SE; ++SI) 157 for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(), 158 E = (*SI)->livein_end(); I != E; ++I) { 159 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) { 160 unsigned Reg = *AI; 161 State->UnionGroups(Reg, 0); 162 KillIndices[Reg] = BB->size(); 163 DefIndices[Reg] = ~0u; 164 } 165 } 166 167 // Mark live-out callee-saved registers. In a return block this is 168 // all callee-saved registers. In non-return this is any 169 // callee-saved register that is not saved in the prolog. 170 const MachineFrameInfo *MFI = MF.getFrameInfo(); 171 BitVector Pristine = MFI->getPristineRegs(BB); 172 for (const uint16_t *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) { 173 unsigned Reg = *I; 174 if (!IsReturnBlock && !Pristine.test(Reg)) continue; 175 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 176 unsigned AliasReg = *AI; 177 State->UnionGroups(AliasReg, 0); 178 KillIndices[AliasReg] = BB->size(); 179 DefIndices[AliasReg] = ~0u; 180 } 181 } 182 } 183 184 void AggressiveAntiDepBreaker::FinishBlock() { 185 delete State; 186 State = NULL; 187 } 188 189 void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count, 190 unsigned InsertPosIndex) { 191 assert(Count < InsertPosIndex && "Instruction index out of expected range!"); 192 193 std::set<unsigned> PassthruRegs; 194 GetPassthruRegs(MI, PassthruRegs); 195 PrescanInstruction(MI, Count, PassthruRegs); 196 ScanInstruction(MI, Count); 197 198 DEBUG(dbgs() << "Observe: "); 199 DEBUG(MI->dump()); 200 DEBUG(dbgs() << "\tRegs:"); 201 202 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 203 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) { 204 // If Reg is current live, then mark that it can't be renamed as 205 // we don't know the extent of its live-range anymore (now that it 206 // has been scheduled). If it is not live but was defined in the 207 // previous schedule region, then set its def index to the most 208 // conservative location (i.e. the beginning of the previous 209 // schedule region). 210 if (State->IsLive(Reg)) { 211 DEBUG(if (State->GetGroup(Reg) != 0) 212 dbgs() << " " << TRI->getName(Reg) << "=g" << 213 State->GetGroup(Reg) << "->g0(region live-out)"); 214 State->UnionGroups(Reg, 0); 215 } else if ((DefIndices[Reg] < InsertPosIndex) 216 && (DefIndices[Reg] >= Count)) { 217 DefIndices[Reg] = Count; 218 } 219 } 220 DEBUG(dbgs() << '\n'); 221 } 222 223 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI, 224 MachineOperand& MO) 225 { 226 if (!MO.isReg() || !MO.isImplicit()) 227 return false; 228 229 unsigned Reg = MO.getReg(); 230 if (Reg == 0) 231 return false; 232 233 MachineOperand *Op = NULL; 234 if (MO.isDef()) 235 Op = MI->findRegisterUseOperand(Reg, true); 236 else 237 Op = MI->findRegisterDefOperand(Reg); 238 239 return((Op != NULL) && Op->isImplicit()); 240 } 241 242 void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI, 243 std::set<unsigned>& PassthruRegs) { 244 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 245 MachineOperand &MO = MI->getOperand(i); 246 if (!MO.isReg()) continue; 247 if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) || 248 IsImplicitDefUse(MI, MO)) { 249 const unsigned Reg = MO.getReg(); 250 PassthruRegs.insert(Reg); 251 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) 252 PassthruRegs.insert(*SubRegs); 253 } 254 } 255 } 256 257 /// AntiDepEdges - Return in Edges the anti- and output- dependencies 258 /// in SU that we want to consider for breaking. 259 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) { 260 SmallSet<unsigned, 4> RegSet; 261 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 262 P != PE; ++P) { 263 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) { 264 unsigned Reg = P->getReg(); 265 if (RegSet.count(Reg) == 0) { 266 Edges.push_back(&*P); 267 RegSet.insert(Reg); 268 } 269 } 270 } 271 } 272 273 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up 274 /// critical path. 275 static const SUnit *CriticalPathStep(const SUnit *SU) { 276 const SDep *Next = 0; 277 unsigned NextDepth = 0; 278 // Find the predecessor edge with the greatest depth. 279 if (SU != 0) { 280 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 281 P != PE; ++P) { 282 const SUnit *PredSU = P->getSUnit(); 283 unsigned PredLatency = P->getLatency(); 284 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency; 285 // In the case of a latency tie, prefer an anti-dependency edge over 286 // other types of edges. 287 if (NextDepth < PredTotalLatency || 288 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) { 289 NextDepth = PredTotalLatency; 290 Next = &*P; 291 } 292 } 293 } 294 295 return (Next) ? Next->getSUnit() : 0; 296 } 297 298 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx, 299 const char *tag, 300 const char *header, 301 const char *footer) { 302 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 303 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 304 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 305 RegRefs = State->GetRegRefs(); 306 307 if (!State->IsLive(Reg)) { 308 KillIndices[Reg] = KillIdx; 309 DefIndices[Reg] = ~0u; 310 RegRefs.erase(Reg); 311 State->LeaveGroup(Reg); 312 DEBUG(if (header != NULL) { 313 dbgs() << header << TRI->getName(Reg); header = NULL; }); 314 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag); 315 } 316 // Repeat for subregisters. 317 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) { 318 unsigned SubregReg = *SubRegs; 319 if (!State->IsLive(SubregReg)) { 320 KillIndices[SubregReg] = KillIdx; 321 DefIndices[SubregReg] = ~0u; 322 RegRefs.erase(SubregReg); 323 State->LeaveGroup(SubregReg); 324 DEBUG(if (header != NULL) { 325 dbgs() << header << TRI->getName(Reg); header = NULL; }); 326 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" << 327 State->GetGroup(SubregReg) << tag); 328 } 329 } 330 331 DEBUG(if ((header == NULL) && (footer != NULL)) dbgs() << footer); 332 } 333 334 void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI, 335 unsigned Count, 336 std::set<unsigned>& PassthruRegs) { 337 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 338 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 339 RegRefs = State->GetRegRefs(); 340 341 // Handle dead defs by simulating a last-use of the register just 342 // after the def. A dead def can occur because the def is truly 343 // dead, or because only a subregister is live at the def. If we 344 // don't do this the dead def will be incorrectly merged into the 345 // previous def. 346 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 347 MachineOperand &MO = MI->getOperand(i); 348 if (!MO.isReg() || !MO.isDef()) continue; 349 unsigned Reg = MO.getReg(); 350 if (Reg == 0) continue; 351 352 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n"); 353 } 354 355 DEBUG(dbgs() << "\tDef Groups:"); 356 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 357 MachineOperand &MO = MI->getOperand(i); 358 if (!MO.isReg() || !MO.isDef()) continue; 359 unsigned Reg = MO.getReg(); 360 if (Reg == 0) continue; 361 362 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg)); 363 364 // If MI's defs have a special allocation requirement, don't allow 365 // any def registers to be changed. Also assume all registers 366 // defined in a call must not be changed (ABI). 367 if (MI->isCall() || MI->hasExtraDefRegAllocReq() || 368 TII->isPredicated(MI)) { 369 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 370 State->UnionGroups(Reg, 0); 371 } 372 373 // Any aliased that are live at this point are completely or 374 // partially defined here, so group those aliases with Reg. 375 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) { 376 unsigned AliasReg = *AI; 377 if (State->IsLive(AliasReg)) { 378 State->UnionGroups(Reg, AliasReg); 379 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " << 380 TRI->getName(AliasReg) << ")"); 381 } 382 } 383 384 // Note register reference... 385 const TargetRegisterClass *RC = NULL; 386 if (i < MI->getDesc().getNumOperands()) 387 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF); 388 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 389 RegRefs.insert(std::make_pair(Reg, RR)); 390 } 391 392 DEBUG(dbgs() << '\n'); 393 394 // Scan the register defs for this instruction and update 395 // live-ranges. 396 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 397 MachineOperand &MO = MI->getOperand(i); 398 if (!MO.isReg() || !MO.isDef()) continue; 399 unsigned Reg = MO.getReg(); 400 if (Reg == 0) continue; 401 // Ignore KILLs and passthru registers for liveness... 402 if (MI->isKill() || (PassthruRegs.count(Reg) != 0)) 403 continue; 404 405 // Update def for Reg and aliases. 406 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) 407 DefIndices[*AI] = Count; 408 } 409 } 410 411 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI, 412 unsigned Count) { 413 DEBUG(dbgs() << "\tUse Groups:"); 414 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 415 RegRefs = State->GetRegRefs(); 416 417 // If MI's uses have special allocation requirement, don't allow 418 // any use registers to be changed. Also assume all registers 419 // used in a call must not be changed (ABI). 420 // FIXME: The issue with predicated instruction is more complex. We are being 421 // conservatively here because the kill markers cannot be trusted after 422 // if-conversion: 423 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14] 424 // ... 425 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395] 426 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12] 427 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8) 428 // 429 // The first R6 kill is not really a kill since it's killed by a predicated 430 // instruction which may not be executed. The second R6 def may or may not 431 // re-define R6 so it's not safe to change it since the last R6 use cannot be 432 // changed. 433 bool Special = MI->isCall() || 434 MI->hasExtraSrcRegAllocReq() || 435 TII->isPredicated(MI); 436 437 // Scan the register uses for this instruction and update 438 // live-ranges, groups and RegRefs. 439 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 440 MachineOperand &MO = MI->getOperand(i); 441 if (!MO.isReg() || !MO.isUse()) continue; 442 unsigned Reg = MO.getReg(); 443 if (Reg == 0) continue; 444 445 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << 446 State->GetGroup(Reg)); 447 448 // It wasn't previously live but now it is, this is a kill. Forget 449 // the previous live-range information and start a new live-range 450 // for the register. 451 HandleLastUse(Reg, Count, "(last-use)"); 452 453 if (Special) { 454 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 455 State->UnionGroups(Reg, 0); 456 } 457 458 // Note register reference... 459 const TargetRegisterClass *RC = NULL; 460 if (i < MI->getDesc().getNumOperands()) 461 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF); 462 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 463 RegRefs.insert(std::make_pair(Reg, RR)); 464 } 465 466 DEBUG(dbgs() << '\n'); 467 468 // Form a group of all defs and uses of a KILL instruction to ensure 469 // that all registers are renamed as a group. 470 if (MI->isKill()) { 471 DEBUG(dbgs() << "\tKill Group:"); 472 473 unsigned FirstReg = 0; 474 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 475 MachineOperand &MO = MI->getOperand(i); 476 if (!MO.isReg()) continue; 477 unsigned Reg = MO.getReg(); 478 if (Reg == 0) continue; 479 480 if (FirstReg != 0) { 481 DEBUG(dbgs() << "=" << TRI->getName(Reg)); 482 State->UnionGroups(FirstReg, Reg); 483 } else { 484 DEBUG(dbgs() << " " << TRI->getName(Reg)); 485 FirstReg = Reg; 486 } 487 } 488 489 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n'); 490 } 491 } 492 493 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) { 494 BitVector BV(TRI->getNumRegs(), false); 495 bool first = true; 496 497 // Check all references that need rewriting for Reg. For each, use 498 // the corresponding register class to narrow the set of registers 499 // that are appropriate for renaming. 500 std::pair<std::multimap<unsigned, 501 AggressiveAntiDepState::RegisterReference>::iterator, 502 std::multimap<unsigned, 503 AggressiveAntiDepState::RegisterReference>::iterator> 504 Range = State->GetRegRefs().equal_range(Reg); 505 for (std::multimap<unsigned, 506 AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first, 507 QE = Range.second; Q != QE; ++Q) { 508 const TargetRegisterClass *RC = Q->second.RC; 509 if (RC == NULL) continue; 510 511 BitVector RCBV = TRI->getAllocatableSet(MF, RC); 512 if (first) { 513 BV |= RCBV; 514 first = false; 515 } else { 516 BV &= RCBV; 517 } 518 519 DEBUG(dbgs() << " " << RC->getName()); 520 } 521 522 return BV; 523 } 524 525 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters( 526 unsigned AntiDepGroupIndex, 527 RenameOrderType& RenameOrder, 528 std::map<unsigned, unsigned> &RenameMap) { 529 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 530 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 531 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 532 RegRefs = State->GetRegRefs(); 533 534 // Collect all referenced registers in the same group as 535 // AntiDepReg. These all need to be renamed together if we are to 536 // break the anti-dependence. 537 std::vector<unsigned> Regs; 538 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs); 539 assert(Regs.size() > 0 && "Empty register group!"); 540 if (Regs.size() == 0) 541 return false; 542 543 // Find the "superest" register in the group. At the same time, 544 // collect the BitVector of registers that can be used to rename 545 // each register. 546 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex 547 << ":\n"); 548 std::map<unsigned, BitVector> RenameRegisterMap; 549 unsigned SuperReg = 0; 550 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 551 unsigned Reg = Regs[i]; 552 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg)) 553 SuperReg = Reg; 554 555 // If Reg has any references, then collect possible rename regs 556 if (RegRefs.count(Reg) > 0) { 557 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":"); 558 559 BitVector BV = GetRenameRegisters(Reg); 560 RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV)); 561 562 DEBUG(dbgs() << " ::"); 563 DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r)) 564 dbgs() << " " << TRI->getName(r)); 565 DEBUG(dbgs() << "\n"); 566 } 567 } 568 569 // All group registers should be a subreg of SuperReg. 570 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 571 unsigned Reg = Regs[i]; 572 if (Reg == SuperReg) continue; 573 bool IsSub = TRI->isSubRegister(SuperReg, Reg); 574 assert(IsSub && "Expecting group subregister"); 575 if (!IsSub) 576 return false; 577 } 578 579 #ifndef NDEBUG 580 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod 581 if (DebugDiv > 0) { 582 static int renamecnt = 0; 583 if (renamecnt++ % DebugDiv != DebugMod) 584 return false; 585 586 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) << 587 " for debug ***\n"; 588 } 589 #endif 590 591 // Check each possible rename register for SuperReg in round-robin 592 // order. If that register is available, and the corresponding 593 // registers are available for the other group subregisters, then we 594 // can use those registers to rename. 595 596 // FIXME: Using getMinimalPhysRegClass is very conservative. We should 597 // check every use of the register and find the largest register class 598 // that can be used in all of them. 599 const TargetRegisterClass *SuperRC = 600 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other); 601 602 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC); 603 if (Order.empty()) { 604 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n"); 605 return false; 606 } 607 608 DEBUG(dbgs() << "\tFind Registers:"); 609 610 if (RenameOrder.count(SuperRC) == 0) 611 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size())); 612 613 unsigned OrigR = RenameOrder[SuperRC]; 614 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR); 615 unsigned R = OrigR; 616 do { 617 if (R == 0) R = Order.size(); 618 --R; 619 const unsigned NewSuperReg = Order[R]; 620 // Don't consider non-allocatable registers 621 if (!MRI.isAllocatable(NewSuperReg)) continue; 622 // Don't replace a register with itself. 623 if (NewSuperReg == SuperReg) continue; 624 625 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':'); 626 RenameMap.clear(); 627 628 // For each referenced group register (which must be a SuperReg or 629 // a subregister of SuperReg), find the corresponding subregister 630 // of NewSuperReg and make sure it is free to be renamed. 631 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 632 unsigned Reg = Regs[i]; 633 unsigned NewReg = 0; 634 if (Reg == SuperReg) { 635 NewReg = NewSuperReg; 636 } else { 637 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg); 638 if (NewSubRegIdx != 0) 639 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx); 640 } 641 642 DEBUG(dbgs() << " " << TRI->getName(NewReg)); 643 644 // Check if Reg can be renamed to NewReg. 645 BitVector BV = RenameRegisterMap[Reg]; 646 if (!BV.test(NewReg)) { 647 DEBUG(dbgs() << "(no rename)"); 648 goto next_super_reg; 649 } 650 651 // If NewReg is dead and NewReg's most recent def is not before 652 // Regs's kill, it's safe to replace Reg with NewReg. We 653 // must also check all aliases of NewReg, because we can't define a 654 // register when any sub or super is already live. 655 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) { 656 DEBUG(dbgs() << "(live)"); 657 goto next_super_reg; 658 } else { 659 bool found = false; 660 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) { 661 unsigned AliasReg = *AI; 662 if (State->IsLive(AliasReg) || 663 (KillIndices[Reg] > DefIndices[AliasReg])) { 664 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)"); 665 found = true; 666 break; 667 } 668 } 669 if (found) 670 goto next_super_reg; 671 } 672 673 // Record that 'Reg' can be renamed to 'NewReg'. 674 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg)); 675 } 676 677 // If we fall-out here, then every register in the group can be 678 // renamed, as recorded in RenameMap. 679 RenameOrder.erase(SuperRC); 680 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R)); 681 DEBUG(dbgs() << "]\n"); 682 return true; 683 684 next_super_reg: 685 DEBUG(dbgs() << ']'); 686 } while (R != EndR); 687 688 DEBUG(dbgs() << '\n'); 689 690 // No registers are free and available! 691 return false; 692 } 693 694 /// BreakAntiDependencies - Identifiy anti-dependencies within the 695 /// ScheduleDAG and break them by renaming registers. 696 /// 697 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies( 698 const std::vector<SUnit>& SUnits, 699 MachineBasicBlock::iterator Begin, 700 MachineBasicBlock::iterator End, 701 unsigned InsertPosIndex, 702 DbgValueVector &DbgValues) { 703 704 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 705 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 706 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 707 RegRefs = State->GetRegRefs(); 708 709 // The code below assumes that there is at least one instruction, 710 // so just duck out immediately if the block is empty. 711 if (SUnits.empty()) return 0; 712 713 // For each regclass the next register to use for renaming. 714 RenameOrderType RenameOrder; 715 716 // ...need a map from MI to SUnit. 717 std::map<MachineInstr *, const SUnit *> MISUnitMap; 718 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 719 const SUnit *SU = &SUnits[i]; 720 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(), 721 SU)); 722 } 723 724 // Track progress along the critical path through the SUnit graph as 725 // we walk the instructions. This is needed for regclasses that only 726 // break critical-path anti-dependencies. 727 const SUnit *CriticalPathSU = 0; 728 MachineInstr *CriticalPathMI = 0; 729 if (CriticalPathSet.any()) { 730 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 731 const SUnit *SU = &SUnits[i]; 732 if (!CriticalPathSU || 733 ((SU->getDepth() + SU->Latency) > 734 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) { 735 CriticalPathSU = SU; 736 } 737 } 738 739 CriticalPathMI = CriticalPathSU->getInstr(); 740 } 741 742 #ifndef NDEBUG 743 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n"); 744 DEBUG(dbgs() << "Available regs:"); 745 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) { 746 if (!State->IsLive(Reg)) 747 DEBUG(dbgs() << " " << TRI->getName(Reg)); 748 } 749 DEBUG(dbgs() << '\n'); 750 #endif 751 752 // Attempt to break anti-dependence edges. Walk the instructions 753 // from the bottom up, tracking information about liveness as we go 754 // to help determine which registers are available. 755 unsigned Broken = 0; 756 unsigned Count = InsertPosIndex - 1; 757 for (MachineBasicBlock::iterator I = End, E = Begin; 758 I != E; --Count) { 759 MachineInstr *MI = --I; 760 761 if (MI->isDebugValue()) 762 continue; 763 764 DEBUG(dbgs() << "Anti: "); 765 DEBUG(MI->dump()); 766 767 std::set<unsigned> PassthruRegs; 768 GetPassthruRegs(MI, PassthruRegs); 769 770 // Process the defs in MI... 771 PrescanInstruction(MI, Count, PassthruRegs); 772 773 // The dependence edges that represent anti- and output- 774 // dependencies that are candidates for breaking. 775 std::vector<const SDep *> Edges; 776 const SUnit *PathSU = MISUnitMap[MI]; 777 AntiDepEdges(PathSU, Edges); 778 779 // If MI is not on the critical path, then we don't rename 780 // registers in the CriticalPathSet. 781 BitVector *ExcludeRegs = NULL; 782 if (MI == CriticalPathMI) { 783 CriticalPathSU = CriticalPathStep(CriticalPathSU); 784 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : 0; 785 } else { 786 ExcludeRegs = &CriticalPathSet; 787 } 788 789 // Ignore KILL instructions (they form a group in ScanInstruction 790 // but don't cause any anti-dependence breaking themselves) 791 if (!MI->isKill()) { 792 // Attempt to break each anti-dependency... 793 for (unsigned i = 0, e = Edges.size(); i != e; ++i) { 794 const SDep *Edge = Edges[i]; 795 SUnit *NextSU = Edge->getSUnit(); 796 797 if ((Edge->getKind() != SDep::Anti) && 798 (Edge->getKind() != SDep::Output)) continue; 799 800 unsigned AntiDepReg = Edge->getReg(); 801 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg)); 802 assert(AntiDepReg != 0 && "Anti-dependence on reg0?"); 803 804 if (!MRI.isAllocatable(AntiDepReg)) { 805 // Don't break anti-dependencies on non-allocatable registers. 806 DEBUG(dbgs() << " (non-allocatable)\n"); 807 continue; 808 } else if ((ExcludeRegs != NULL) && ExcludeRegs->test(AntiDepReg)) { 809 // Don't break anti-dependencies for critical path registers 810 // if not on the critical path 811 DEBUG(dbgs() << " (not critical-path)\n"); 812 continue; 813 } else if (PassthruRegs.count(AntiDepReg) != 0) { 814 // If the anti-dep register liveness "passes-thru", then 815 // don't try to change it. It will be changed along with 816 // the use if required to break an earlier antidep. 817 DEBUG(dbgs() << " (passthru)\n"); 818 continue; 819 } else { 820 // No anti-dep breaking for implicit deps 821 MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg); 822 assert(AntiDepOp != NULL && 823 "Can't find index for defined register operand"); 824 if ((AntiDepOp == NULL) || AntiDepOp->isImplicit()) { 825 DEBUG(dbgs() << " (implicit)\n"); 826 continue; 827 } 828 829 // If the SUnit has other dependencies on the SUnit that 830 // it anti-depends on, don't bother breaking the 831 // anti-dependency since those edges would prevent such 832 // units from being scheduled past each other 833 // regardless. 834 // 835 // Also, if there are dependencies on other SUnits with the 836 // same register as the anti-dependency, don't attempt to 837 // break it. 838 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 839 PE = PathSU->Preds.end(); P != PE; ++P) { 840 if (P->getSUnit() == NextSU ? 841 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) : 842 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) { 843 AntiDepReg = 0; 844 break; 845 } 846 } 847 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 848 PE = PathSU->Preds.end(); P != PE; ++P) { 849 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) && 850 (P->getKind() != SDep::Output)) { 851 DEBUG(dbgs() << " (real dependency)\n"); 852 AntiDepReg = 0; 853 break; 854 } else if ((P->getSUnit() != NextSU) && 855 (P->getKind() == SDep::Data) && 856 (P->getReg() == AntiDepReg)) { 857 DEBUG(dbgs() << " (other dependency)\n"); 858 AntiDepReg = 0; 859 break; 860 } 861 } 862 863 if (AntiDepReg == 0) continue; 864 } 865 866 assert(AntiDepReg != 0); 867 if (AntiDepReg == 0) continue; 868 869 // Determine AntiDepReg's register group. 870 const unsigned GroupIndex = State->GetGroup(AntiDepReg); 871 if (GroupIndex == 0) { 872 DEBUG(dbgs() << " (zero group)\n"); 873 continue; 874 } 875 876 DEBUG(dbgs() << '\n'); 877 878 // Look for a suitable register to use to break the anti-dependence. 879 std::map<unsigned, unsigned> RenameMap; 880 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) { 881 DEBUG(dbgs() << "\tBreaking anti-dependence edge on " 882 << TRI->getName(AntiDepReg) << ":"); 883 884 // Handle each group register... 885 for (std::map<unsigned, unsigned>::iterator 886 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) { 887 unsigned CurrReg = S->first; 888 unsigned NewReg = S->second; 889 890 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" << 891 TRI->getName(NewReg) << "(" << 892 RegRefs.count(CurrReg) << " refs)"); 893 894 // Update the references to the old register CurrReg to 895 // refer to the new register NewReg. 896 std::pair<std::multimap<unsigned, 897 AggressiveAntiDepState::RegisterReference>::iterator, 898 std::multimap<unsigned, 899 AggressiveAntiDepState::RegisterReference>::iterator> 900 Range = RegRefs.equal_range(CurrReg); 901 for (std::multimap<unsigned, 902 AggressiveAntiDepState::RegisterReference>::iterator 903 Q = Range.first, QE = Range.second; Q != QE; ++Q) { 904 Q->second.Operand->setReg(NewReg); 905 // If the SU for the instruction being updated has debug 906 // information related to the anti-dependency register, make 907 // sure to update that as well. 908 const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()]; 909 if (!SU) continue; 910 for (DbgValueVector::iterator DVI = DbgValues.begin(), 911 DVE = DbgValues.end(); DVI != DVE; ++DVI) 912 if (DVI->second == Q->second.Operand->getParent()) 913 UpdateDbgValue(DVI->first, AntiDepReg, NewReg); 914 } 915 916 // We just went back in time and modified history; the 917 // liveness information for CurrReg is now inconsistent. Set 918 // the state as if it were dead. 919 State->UnionGroups(NewReg, 0); 920 RegRefs.erase(NewReg); 921 DefIndices[NewReg] = DefIndices[CurrReg]; 922 KillIndices[NewReg] = KillIndices[CurrReg]; 923 924 State->UnionGroups(CurrReg, 0); 925 RegRefs.erase(CurrReg); 926 DefIndices[CurrReg] = KillIndices[CurrReg]; 927 KillIndices[CurrReg] = ~0u; 928 assert(((KillIndices[CurrReg] == ~0u) != 929 (DefIndices[CurrReg] == ~0u)) && 930 "Kill and Def maps aren't consistent for AntiDepReg!"); 931 } 932 933 ++Broken; 934 DEBUG(dbgs() << '\n'); 935 } 936 } 937 } 938 939 ScanInstruction(MI, Count); 940 } 941 942 return Broken; 943 } 944