1 //===-- InterferenceCache.cpp - Caching per-block interference ---------*--===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // InterferenceCache remembers per-block interference in LiveIntervalUnions. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #define DEBUG_TYPE "regalloc" 15 #include "InterferenceCache.h" 16 #include "llvm/CodeGen/LiveIntervalAnalysis.h" 17 #include "llvm/Support/ErrorHandling.h" 18 #include "llvm/Target/TargetRegisterInfo.h" 19 20 using namespace llvm; 21 22 // Static member used for null interference cursors. 23 InterferenceCache::BlockInterference InterferenceCache::Cursor::NoInterference; 24 25 void InterferenceCache::init(MachineFunction *mf, 26 LiveIntervalUnion *liuarray, 27 SlotIndexes *indexes, 28 LiveIntervals *lis, 29 const TargetRegisterInfo *tri) { 30 MF = mf; 31 LIUArray = liuarray; 32 TRI = tri; 33 PhysRegEntries.assign(TRI->getNumRegs(), 0); 34 for (unsigned i = 0; i != CacheEntries; ++i) 35 Entries[i].clear(mf, indexes, lis); 36 } 37 38 InterferenceCache::Entry *InterferenceCache::get(unsigned PhysReg) { 39 unsigned E = PhysRegEntries[PhysReg]; 40 if (E < CacheEntries && Entries[E].getPhysReg() == PhysReg) { 41 if (!Entries[E].valid(LIUArray, TRI)) 42 Entries[E].revalidate(LIUArray, TRI); 43 return &Entries[E]; 44 } 45 // No valid entry exists, pick the next round-robin entry. 46 E = RoundRobin; 47 if (++RoundRobin == CacheEntries) 48 RoundRobin = 0; 49 for (unsigned i = 0; i != CacheEntries; ++i) { 50 // Skip entries that are in use. 51 if (Entries[E].hasRefs()) { 52 if (++E == CacheEntries) 53 E = 0; 54 continue; 55 } 56 Entries[E].reset(PhysReg, LIUArray, TRI, MF); 57 PhysRegEntries[PhysReg] = E; 58 return &Entries[E]; 59 } 60 llvm_unreachable("Ran out of interference cache entries."); 61 } 62 63 /// revalidate - LIU contents have changed, update tags. 64 void InterferenceCache::Entry::revalidate(LiveIntervalUnion *LIUArray, 65 const TargetRegisterInfo *TRI) { 66 // Invalidate all block entries. 67 ++Tag; 68 // Invalidate all iterators. 69 PrevPos = SlotIndex(); 70 unsigned i = 0; 71 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) 72 RegUnits[i].VirtTag = LIUArray[*Units].getTag(); 73 } 74 75 void InterferenceCache::Entry::reset(unsigned physReg, 76 LiveIntervalUnion *LIUArray, 77 const TargetRegisterInfo *TRI, 78 const MachineFunction *MF) { 79 assert(!hasRefs() && "Cannot reset cache entry with references"); 80 // LIU's changed, invalidate cache. 81 ++Tag; 82 PhysReg = physReg; 83 Blocks.resize(MF->getNumBlockIDs()); 84 85 // Reset iterators. 86 PrevPos = SlotIndex(); 87 RegUnits.clear(); 88 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) { 89 RegUnits.push_back(LIUArray[*Units]); 90 RegUnits.back().Fixed = &LIS->getRegUnit(*Units); 91 } 92 } 93 94 bool InterferenceCache::Entry::valid(LiveIntervalUnion *LIUArray, 95 const TargetRegisterInfo *TRI) { 96 unsigned i = 0, e = RegUnits.size(); 97 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) { 98 if (i == e) 99 return false; 100 if (LIUArray[*Units].changedSince(RegUnits[i].VirtTag)) 101 return false; 102 } 103 return i == e; 104 } 105 106 void InterferenceCache::Entry::update(unsigned MBBNum) { 107 SlotIndex Start, Stop; 108 tie(Start, Stop) = Indexes->getMBBRange(MBBNum); 109 110 // Use advanceTo only when possible. 111 if (PrevPos != Start) { 112 if (!PrevPos.isValid() || Start < PrevPos) { 113 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 114 RegUnitInfo &RUI = RegUnits[i]; 115 RUI.VirtI.find(Start); 116 RUI.FixedI = RUI.Fixed->find(Start); 117 } 118 } else { 119 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 120 RegUnitInfo &RUI = RegUnits[i]; 121 RUI.VirtI.advanceTo(Start); 122 if (RUI.FixedI != RUI.Fixed->end()) 123 RUI.FixedI = RUI.Fixed->advanceTo(RUI.FixedI, Start); 124 } 125 } 126 PrevPos = Start; 127 } 128 129 MachineFunction::const_iterator MFI = MF->getBlockNumbered(MBBNum); 130 BlockInterference *BI = &Blocks[MBBNum]; 131 ArrayRef<SlotIndex> RegMaskSlots; 132 ArrayRef<const uint32_t*> RegMaskBits; 133 for (;;) { 134 BI->Tag = Tag; 135 BI->First = BI->Last = SlotIndex(); 136 137 // Check for first interference from virtregs. 138 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 139 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI; 140 if (!I.valid()) 141 continue; 142 SlotIndex StartI = I.start(); 143 if (StartI >= Stop) 144 continue; 145 if (!BI->First.isValid() || StartI < BI->First) 146 BI->First = StartI; 147 } 148 149 // Same thing for fixed interference. 150 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 151 LiveInterval::const_iterator I = RegUnits[i].FixedI; 152 LiveInterval::const_iterator E = RegUnits[i].Fixed->end(); 153 if (I == E) 154 continue; 155 SlotIndex StartI = I->start; 156 if (StartI >= Stop) 157 continue; 158 if (!BI->First.isValid() || StartI < BI->First) 159 BI->First = StartI; 160 } 161 162 // Also check for register mask interference. 163 RegMaskSlots = LIS->getRegMaskSlotsInBlock(MBBNum); 164 RegMaskBits = LIS->getRegMaskBitsInBlock(MBBNum); 165 SlotIndex Limit = BI->First.isValid() ? BI->First : Stop; 166 for (unsigned i = 0, e = RegMaskSlots.size(); 167 i != e && RegMaskSlots[i] < Limit; ++i) 168 if (MachineOperand::clobbersPhysReg(RegMaskBits[i], PhysReg)) { 169 // Register mask i clobbers PhysReg before the LIU interference. 170 BI->First = RegMaskSlots[i]; 171 break; 172 } 173 174 PrevPos = Stop; 175 if (BI->First.isValid()) 176 break; 177 178 // No interference in this block? Go ahead and precompute the next block. 179 if (++MFI == MF->end()) 180 return; 181 MBBNum = MFI->getNumber(); 182 BI = &Blocks[MBBNum]; 183 if (BI->Tag == Tag) 184 return; 185 tie(Start, Stop) = Indexes->getMBBRange(MBBNum); 186 } 187 188 // Check for last interference in block. 189 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 190 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI; 191 if (!I.valid() || I.start() >= Stop) 192 continue; 193 I.advanceTo(Stop); 194 bool Backup = !I.valid() || I.start() >= Stop; 195 if (Backup) 196 --I; 197 SlotIndex StopI = I.stop(); 198 if (!BI->Last.isValid() || StopI > BI->Last) 199 BI->Last = StopI; 200 if (Backup) 201 ++I; 202 } 203 204 // Fixed interference. 205 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 206 LiveInterval::iterator &I = RegUnits[i].FixedI; 207 LiveInterval *LI = RegUnits[i].Fixed; 208 if (I == LI->end() || I->start >= Stop) 209 continue; 210 I = LI->advanceTo(I, Stop); 211 bool Backup = I == LI->end() || I->start >= Stop; 212 if (Backup) 213 --I; 214 SlotIndex StopI = I->end; 215 if (!BI->Last.isValid() || StopI > BI->Last) 216 BI->Last = StopI; 217 if (Backup) 218 ++I; 219 } 220 221 // Also check for register mask interference. 222 SlotIndex Limit = BI->Last.isValid() ? BI->Last : Start; 223 for (unsigned i = RegMaskSlots.size(); 224 i && RegMaskSlots[i-1].getDeadSlot() > Limit; --i) 225 if (MachineOperand::clobbersPhysReg(RegMaskBits[i-1], PhysReg)) { 226 // Register mask i-1 clobbers PhysReg after the LIU interference. 227 // Model the regmask clobber as a dead def. 228 BI->Last = RegMaskSlots[i-1].getDeadSlot(); 229 break; 230 } 231 } 232