Home | History | Annotate | Download | only in CodeGen
      1 //===- LiveIntervals.cpp - Live Interval Analysis -------------------------===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 /// \file This file implements the LiveInterval analysis pass which is used
     11 /// by the Linear Scan Register allocator. This pass linearizes the
     12 /// basic blocks of the function in DFS order and computes live intervals for
     13 /// each virtual and physical register.
     14 //
     15 //===----------------------------------------------------------------------===//
     16 
     17 #include "llvm/CodeGen/LiveIntervals.h"
     18 #include "LiveRangeCalc.h"
     19 #include "llvm/ADT/ArrayRef.h"
     20 #include "llvm/ADT/DepthFirstIterator.h"
     21 #include "llvm/ADT/SmallPtrSet.h"
     22 #include "llvm/ADT/SmallVector.h"
     23 #include "llvm/ADT/iterator_range.h"
     24 #include "llvm/Analysis/AliasAnalysis.h"
     25 #include "llvm/CodeGen/LiveInterval.h"
     26 #include "llvm/CodeGen/LiveVariables.h"
     27 #include "llvm/CodeGen/MachineBasicBlock.h"
     28 #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
     29 #include "llvm/CodeGen/MachineDominators.h"
     30 #include "llvm/CodeGen/MachineFunction.h"
     31 #include "llvm/CodeGen/MachineInstr.h"
     32 #include "llvm/CodeGen/MachineInstrBundle.h"
     33 #include "llvm/CodeGen/MachineOperand.h"
     34 #include "llvm/CodeGen/MachineRegisterInfo.h"
     35 #include "llvm/CodeGen/Passes.h"
     36 #include "llvm/CodeGen/SlotIndexes.h"
     37 #include "llvm/CodeGen/TargetRegisterInfo.h"
     38 #include "llvm/CodeGen/TargetSubtargetInfo.h"
     39 #include "llvm/CodeGen/VirtRegMap.h"
     40 #include "llvm/Config/llvm-config.h"
     41 #include "llvm/MC/LaneBitmask.h"
     42 #include "llvm/MC/MCRegisterInfo.h"
     43 #include "llvm/Pass.h"
     44 #include "llvm/Support/BlockFrequency.h"
     45 #include "llvm/Support/CommandLine.h"
     46 #include "llvm/Support/Compiler.h"
     47 #include "llvm/Support/Debug.h"
     48 #include "llvm/Support/MathExtras.h"
     49 #include "llvm/Support/raw_ostream.h"
     50 #include <algorithm>
     51 #include <cassert>
     52 #include <cstdint>
     53 #include <iterator>
     54 #include <tuple>
     55 #include <utility>
     56 
     57 using namespace llvm;
     58 
     59 #define DEBUG_TYPE "regalloc"
     60 
     61 char LiveIntervals::ID = 0;
     62 char &llvm::LiveIntervalsID = LiveIntervals::ID;
     63 INITIALIZE_PASS_BEGIN(LiveIntervals, "liveintervals",
     64                 "Live Interval Analysis", false, false)
     65 INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
     66 INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
     67 INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
     68 INITIALIZE_PASS_END(LiveIntervals, "liveintervals",
     69                 "Live Interval Analysis", false, false)
     70 
     71 #ifndef NDEBUG
     72 static cl::opt<bool> EnablePrecomputePhysRegs(
     73   "precompute-phys-liveness", cl::Hidden,
     74   cl::desc("Eagerly compute live intervals for all physreg units."));
     75 #else
     76 static bool EnablePrecomputePhysRegs = false;
     77 #endif // NDEBUG
     78 
     79 namespace llvm {
     80 
     81 cl::opt<bool> UseSegmentSetForPhysRegs(
     82     "use-segment-set-for-physregs", cl::Hidden, cl::init(true),
     83     cl::desc(
     84         "Use segment set for the computation of the live ranges of physregs."));
     85 
     86 } // end namespace llvm
     87 
     88 void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
     89   AU.setPreservesCFG();
     90   AU.addRequired<AAResultsWrapperPass>();
     91   AU.addPreserved<AAResultsWrapperPass>();
     92   AU.addPreserved<LiveVariables>();
     93   AU.addPreservedID(MachineLoopInfoID);
     94   AU.addRequiredTransitiveID(MachineDominatorsID);
     95   AU.addPreservedID(MachineDominatorsID);
     96   AU.addPreserved<SlotIndexes>();
     97   AU.addRequiredTransitive<SlotIndexes>();
     98   MachineFunctionPass::getAnalysisUsage(AU);
     99 }
    100 
    101 LiveIntervals::LiveIntervals() : MachineFunctionPass(ID) {
    102   initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
    103 }
    104 
    105 LiveIntervals::~LiveIntervals() {
    106   delete LRCalc;
    107 }
    108 
    109 void LiveIntervals::releaseMemory() {
    110   // Free the live intervals themselves.
    111   for (unsigned i = 0, e = VirtRegIntervals.size(); i != e; ++i)
    112     delete VirtRegIntervals[TargetRegisterInfo::index2VirtReg(i)];
    113   VirtRegIntervals.clear();
    114   RegMaskSlots.clear();
    115   RegMaskBits.clear();
    116   RegMaskBlocks.clear();
    117 
    118   for (LiveRange *LR : RegUnitRanges)
    119     delete LR;
    120   RegUnitRanges.clear();
    121 
    122   // Release VNInfo memory regions, VNInfo objects don't need to be dtor'd.
    123   VNInfoAllocator.Reset();
    124 }
    125 
    126 bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
    127   MF = &fn;
    128   MRI = &MF->getRegInfo();
    129   TRI = MF->getSubtarget().getRegisterInfo();
    130   TII = MF->getSubtarget().getInstrInfo();
    131   AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
    132   Indexes = &getAnalysis<SlotIndexes>();
    133   DomTree = &getAnalysis<MachineDominatorTree>();
    134 
    135   if (!LRCalc)
    136     LRCalc = new LiveRangeCalc();
    137 
    138   // Allocate space for all virtual registers.
    139   VirtRegIntervals.resize(MRI->getNumVirtRegs());
    140 
    141   computeVirtRegs();
    142   computeRegMasks();
    143   computeLiveInRegUnits();
    144 
    145   if (EnablePrecomputePhysRegs) {
    146     // For stress testing, precompute live ranges of all physical register
    147     // units, including reserved registers.
    148     for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
    149       getRegUnit(i);
    150   }
    151   LLVM_DEBUG(dump());
    152   return true;
    153 }
    154 
    155 void LiveIntervals::print(raw_ostream &OS, const Module* ) const {
    156   OS << "********** INTERVALS **********\n";
    157 
    158   // Dump the regunits.
    159   for (unsigned Unit = 0, UnitE = RegUnitRanges.size(); Unit != UnitE; ++Unit)
    160     if (LiveRange *LR = RegUnitRanges[Unit])
    161       OS << printRegUnit(Unit, TRI) << ' ' << *LR << '\n';
    162 
    163   // Dump the virtregs.
    164   for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
    165     unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
    166     if (hasInterval(Reg))
    167       OS << getInterval(Reg) << '\n';
    168   }
    169 
    170   OS << "RegMasks:";
    171   for (SlotIndex Idx : RegMaskSlots)
    172     OS << ' ' << Idx;
    173   OS << '\n';
    174 
    175   printInstrs(OS);
    176 }
    177 
    178 void LiveIntervals::printInstrs(raw_ostream &OS) const {
    179   OS << "********** MACHINEINSTRS **********\n";
    180   MF->print(OS, Indexes);
    181 }
    182 
    183 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
    184 LLVM_DUMP_METHOD void LiveIntervals::dumpInstrs() const {
    185   printInstrs(dbgs());
    186 }
    187 #endif
    188 
    189 LiveInterval* LiveIntervals::createInterval(unsigned reg) {
    190   float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ? huge_valf : 0.0F;
    191   return new LiveInterval(reg, Weight);
    192 }
    193 
    194 /// Compute the live interval of a virtual register, based on defs and uses.
    195 void LiveIntervals::computeVirtRegInterval(LiveInterval &LI) {
    196   assert(LRCalc && "LRCalc not initialized.");
    197   assert(LI.empty() && "Should only compute empty intervals.");
    198   LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
    199   LRCalc->calculate(LI, MRI->shouldTrackSubRegLiveness(LI.reg));
    200   computeDeadValues(LI, nullptr);
    201 }
    202 
    203 void LiveIntervals::computeVirtRegs() {
    204   for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
    205     unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
    206     if (MRI->reg_nodbg_empty(Reg))
    207       continue;
    208     createAndComputeVirtRegInterval(Reg);
    209   }
    210 }
    211 
    212 void LiveIntervals::computeRegMasks() {
    213   RegMaskBlocks.resize(MF->getNumBlockIDs());
    214 
    215   // Find all instructions with regmask operands.
    216   for (const MachineBasicBlock &MBB : *MF) {
    217     std::pair<unsigned, unsigned> &RMB = RegMaskBlocks[MBB.getNumber()];
    218     RMB.first = RegMaskSlots.size();
    219 
    220     // Some block starts, such as EH funclets, create masks.
    221     if (const uint32_t *Mask = MBB.getBeginClobberMask(TRI)) {
    222       RegMaskSlots.push_back(Indexes->getMBBStartIdx(&MBB));
    223       RegMaskBits.push_back(Mask);
    224     }
    225 
    226     for (const MachineInstr &MI : MBB) {
    227       for (const MachineOperand &MO : MI.operands()) {
    228         if (!MO.isRegMask())
    229           continue;
    230         RegMaskSlots.push_back(Indexes->getInstructionIndex(MI).getRegSlot());
    231         RegMaskBits.push_back(MO.getRegMask());
    232       }
    233     }
    234 
    235     // Some block ends, such as funclet returns, create masks. Put the mask on
    236     // the last instruction of the block, because MBB slot index intervals are
    237     // half-open.
    238     if (const uint32_t *Mask = MBB.getEndClobberMask(TRI)) {
    239       assert(!MBB.empty() && "empty return block?");
    240       RegMaskSlots.push_back(
    241           Indexes->getInstructionIndex(MBB.back()).getRegSlot());
    242       RegMaskBits.push_back(Mask);
    243     }
    244 
    245     // Compute the number of register mask instructions in this block.
    246     RMB.second = RegMaskSlots.size() - RMB.first;
    247   }
    248 }
    249 
    250 //===----------------------------------------------------------------------===//
    251 //                           Register Unit Liveness
    252 //===----------------------------------------------------------------------===//
    253 //
    254 // Fixed interference typically comes from ABI boundaries: Function arguments
    255 // and return values are passed in fixed registers, and so are exception
    256 // pointers entering landing pads. Certain instructions require values to be
    257 // present in specific registers. That is also represented through fixed
    258 // interference.
    259 //
    260 
    261 /// Compute the live range of a register unit, based on the uses and defs of
    262 /// aliasing registers.  The range should be empty, or contain only dead
    263 /// phi-defs from ABI blocks.
    264 void LiveIntervals::computeRegUnitRange(LiveRange &LR, unsigned Unit) {
    265   assert(LRCalc && "LRCalc not initialized.");
    266   LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
    267 
    268   // The physregs aliasing Unit are the roots and their super-registers.
    269   // Create all values as dead defs before extending to uses. Note that roots
    270   // may share super-registers. That's OK because createDeadDefs() is
    271   // idempotent. It is very rare for a register unit to have multiple roots, so
    272   // uniquing super-registers is probably not worthwhile.
    273   bool IsReserved = false;
    274   for (MCRegUnitRootIterator Root(Unit, TRI); Root.isValid(); ++Root) {
    275     bool IsRootReserved = true;
    276     for (MCSuperRegIterator Super(*Root, TRI, /*IncludeSelf=*/true);
    277          Super.isValid(); ++Super) {
    278       unsigned Reg = *Super;
    279       if (!MRI->reg_empty(Reg))
    280         LRCalc->createDeadDefs(LR, Reg);
    281       // A register unit is considered reserved if all its roots and all their
    282       // super registers are reserved.
    283       if (!MRI->isReserved(Reg))
    284         IsRootReserved = false;
    285     }
    286     IsReserved |= IsRootReserved;
    287   }
    288   assert(IsReserved == MRI->isReservedRegUnit(Unit) &&
    289          "reserved computation mismatch");
    290 
    291   // Now extend LR to reach all uses.
    292   // Ignore uses of reserved registers. We only track defs of those.
    293   if (!IsReserved) {
    294     for (MCRegUnitRootIterator Root(Unit, TRI); Root.isValid(); ++Root) {
    295       for (MCSuperRegIterator Super(*Root, TRI, /*IncludeSelf=*/true);
    296            Super.isValid(); ++Super) {
    297         unsigned Reg = *Super;
    298         if (!MRI->reg_empty(Reg))
    299           LRCalc->extendToUses(LR, Reg);
    300       }
    301     }
    302   }
    303 
    304   // Flush the segment set to the segment vector.
    305   if (UseSegmentSetForPhysRegs)
    306     LR.flushSegmentSet();
    307 }
    308 
    309 /// Precompute the live ranges of any register units that are live-in to an ABI
    310 /// block somewhere. Register values can appear without a corresponding def when
    311 /// entering the entry block or a landing pad.
    312 void LiveIntervals::computeLiveInRegUnits() {
    313   RegUnitRanges.resize(TRI->getNumRegUnits());
    314   LLVM_DEBUG(dbgs() << "Computing live-in reg-units in ABI blocks.\n");
    315 
    316   // Keep track of the live range sets allocated.
    317   SmallVector<unsigned, 8> NewRanges;
    318 
    319   // Check all basic blocks for live-ins.
    320   for (const MachineBasicBlock &MBB : *MF) {
    321     // We only care about ABI blocks: Entry + landing pads.
    322     if ((&MBB != &MF->front() && !MBB.isEHPad()) || MBB.livein_empty())
    323       continue;
    324 
    325     // Create phi-defs at Begin for all live-in registers.
    326     SlotIndex Begin = Indexes->getMBBStartIdx(&MBB);
    327     LLVM_DEBUG(dbgs() << Begin << "\t" << printMBBReference(MBB));
    328     for (const auto &LI : MBB.liveins()) {
    329       for (MCRegUnitIterator Units(LI.PhysReg, TRI); Units.isValid(); ++Units) {
    330         unsigned Unit = *Units;
    331         LiveRange *LR = RegUnitRanges[Unit];
    332         if (!LR) {
    333           // Use segment set to speed-up initial computation of the live range.
    334           LR = RegUnitRanges[Unit] = new LiveRange(UseSegmentSetForPhysRegs);
    335           NewRanges.push_back(Unit);
    336         }
    337         VNInfo *VNI = LR->createDeadDef(Begin, getVNInfoAllocator());
    338         (void)VNI;
    339         LLVM_DEBUG(dbgs() << ' ' << printRegUnit(Unit, TRI) << '#' << VNI->id);
    340       }
    341     }
    342     LLVM_DEBUG(dbgs() << '\n');
    343   }
    344   LLVM_DEBUG(dbgs() << "Created " << NewRanges.size() << " new intervals.\n");
    345 
    346   // Compute the 'normal' part of the ranges.
    347   for (unsigned Unit : NewRanges)
    348     computeRegUnitRange(*RegUnitRanges[Unit], Unit);
    349 }
    350 
    351 static void createSegmentsForValues(LiveRange &LR,
    352     iterator_range<LiveInterval::vni_iterator> VNIs) {
    353   for (VNInfo *VNI : VNIs) {
    354     if (VNI->isUnused())
    355       continue;
    356     SlotIndex Def = VNI->def;
    357     LR.addSegment(LiveRange::Segment(Def, Def.getDeadSlot(), VNI));
    358   }
    359 }
    360 
    361 void LiveIntervals::extendSegmentsToUses(LiveRange &Segments,
    362                                          ShrinkToUsesWorkList &WorkList,
    363                                          unsigned Reg, LaneBitmask LaneMask) {
    364   // Keep track of the PHIs that are in use.
    365   SmallPtrSet<VNInfo*, 8> UsedPHIs;
    366   // Blocks that have already been added to WorkList as live-out.
    367   SmallPtrSet<const MachineBasicBlock*, 16> LiveOut;
    368 
    369   auto getSubRange = [](const LiveInterval &I, LaneBitmask M)
    370         -> const LiveRange& {
    371     if (M.none())
    372       return I;
    373     for (const LiveInterval::SubRange &SR : I.subranges()) {
    374       if ((SR.LaneMask & M).any()) {
    375         assert(SR.LaneMask == M && "Expecting lane masks to match exactly");
    376         return SR;
    377       }
    378     }
    379     llvm_unreachable("Subrange for mask not found");
    380   };
    381 
    382   const LiveInterval &LI = getInterval(Reg);
    383   const LiveRange &OldRange = getSubRange(LI, LaneMask);
    384 
    385   // Extend intervals to reach all uses in WorkList.
    386   while (!WorkList.empty()) {
    387     SlotIndex Idx = WorkList.back().first;
    388     VNInfo *VNI = WorkList.back().second;
    389     WorkList.pop_back();
    390     const MachineBasicBlock *MBB = Indexes->getMBBFromIndex(Idx.getPrevSlot());
    391     SlotIndex BlockStart = Indexes->getMBBStartIdx(MBB);
    392 
    393     // Extend the live range for VNI to be live at Idx.
    394     if (VNInfo *ExtVNI = Segments.extendInBlock(BlockStart, Idx)) {
    395       assert(ExtVNI == VNI && "Unexpected existing value number");
    396       (void)ExtVNI;
    397       // Is this a PHIDef we haven't seen before?
    398       if (!VNI->isPHIDef() || VNI->def != BlockStart ||
    399           !UsedPHIs.insert(VNI).second)
    400         continue;
    401       // The PHI is live, make sure the predecessors are live-out.
    402       for (const MachineBasicBlock *Pred : MBB->predecessors()) {
    403         if (!LiveOut.insert(Pred).second)
    404           continue;
    405         SlotIndex Stop = Indexes->getMBBEndIdx(Pred);
    406         // A predecessor is not required to have a live-out value for a PHI.
    407         if (VNInfo *PVNI = OldRange.getVNInfoBefore(Stop))
    408           WorkList.push_back(std::make_pair(Stop, PVNI));
    409       }
    410       continue;
    411     }
    412 
    413     // VNI is live-in to MBB.
    414     LLVM_DEBUG(dbgs() << " live-in at " << BlockStart << '\n');
    415     Segments.addSegment(LiveRange::Segment(BlockStart, Idx, VNI));
    416 
    417     // Make sure VNI is live-out from the predecessors.
    418     for (const MachineBasicBlock *Pred : MBB->predecessors()) {
    419       if (!LiveOut.insert(Pred).second)
    420         continue;
    421       SlotIndex Stop = Indexes->getMBBEndIdx(Pred);
    422       if (VNInfo *OldVNI = OldRange.getVNInfoBefore(Stop)) {
    423         assert(OldVNI == VNI && "Wrong value out of predecessor");
    424         (void)OldVNI;
    425         WorkList.push_back(std::make_pair(Stop, VNI));
    426       } else {
    427 #ifndef NDEBUG
    428         // There was no old VNI. Verify that Stop is jointly dominated
    429         // by <undef>s for this live range.
    430         assert(LaneMask.any() &&
    431                "Missing value out of predecessor for main range");
    432         SmallVector<SlotIndex,8> Undefs;
    433         LI.computeSubRangeUndefs(Undefs, LaneMask, *MRI, *Indexes);
    434         assert(LiveRangeCalc::isJointlyDominated(Pred, Undefs, *Indexes) &&
    435                "Missing value out of predecessor for subrange");
    436 #endif
    437       }
    438     }
    439   }
    440 }
    441 
    442 bool LiveIntervals::shrinkToUses(LiveInterval *li,
    443                                  SmallVectorImpl<MachineInstr*> *dead) {
    444   LLVM_DEBUG(dbgs() << "Shrink: " << *li << '\n');
    445   assert(TargetRegisterInfo::isVirtualRegister(li->reg)
    446          && "Can only shrink virtual registers");
    447 
    448   // Shrink subregister live ranges.
    449   bool NeedsCleanup = false;
    450   for (LiveInterval::SubRange &S : li->subranges()) {
    451     shrinkToUses(S, li->reg);
    452     if (S.empty())
    453       NeedsCleanup = true;
    454   }
    455   if (NeedsCleanup)
    456     li->removeEmptySubRanges();
    457 
    458   // Find all the values used, including PHI kills.
    459   ShrinkToUsesWorkList WorkList;
    460 
    461   // Visit all instructions reading li->reg.
    462   unsigned Reg = li->reg;
    463   for (MachineInstr &UseMI : MRI->reg_instructions(Reg)) {
    464     if (UseMI.isDebugValue() || !UseMI.readsVirtualRegister(Reg))
    465       continue;
    466     SlotIndex Idx = getInstructionIndex(UseMI).getRegSlot();
    467     LiveQueryResult LRQ = li->Query(Idx);
    468     VNInfo *VNI = LRQ.valueIn();
    469     if (!VNI) {
    470       // This shouldn't happen: readsVirtualRegister returns true, but there is
    471       // no live value. It is likely caused by a target getting <undef> flags
    472       // wrong.
    473       LLVM_DEBUG(
    474           dbgs() << Idx << '\t' << UseMI
    475                  << "Warning: Instr claims to read non-existent value in "
    476                  << *li << '\n');
    477       continue;
    478     }
    479     // Special case: An early-clobber tied operand reads and writes the
    480     // register one slot early.
    481     if (VNInfo *DefVNI = LRQ.valueDefined())
    482       Idx = DefVNI->def;
    483 
    484     WorkList.push_back(std::make_pair(Idx, VNI));
    485   }
    486 
    487   // Create new live ranges with only minimal live segments per def.
    488   LiveRange NewLR;
    489   createSegmentsForValues(NewLR, make_range(li->vni_begin(), li->vni_end()));
    490   extendSegmentsToUses(NewLR, WorkList, Reg, LaneBitmask::getNone());
    491 
    492   // Move the trimmed segments back.
    493   li->segments.swap(NewLR.segments);
    494 
    495   // Handle dead values.
    496   bool CanSeparate = computeDeadValues(*li, dead);
    497   LLVM_DEBUG(dbgs() << "Shrunk: " << *li << '\n');
    498   return CanSeparate;
    499 }
    500 
    501 bool LiveIntervals::computeDeadValues(LiveInterval &LI,
    502                                       SmallVectorImpl<MachineInstr*> *dead) {
    503   bool MayHaveSplitComponents = false;
    504   for (VNInfo *VNI : LI.valnos) {
    505     if (VNI->isUnused())
    506       continue;
    507     SlotIndex Def = VNI->def;
    508     LiveRange::iterator I = LI.FindSegmentContaining(Def);
    509     assert(I != LI.end() && "Missing segment for VNI");
    510 
    511     // Is the register live before? Otherwise we may have to add a read-undef
    512     // flag for subregister defs.
    513     unsigned VReg = LI.reg;
    514     if (MRI->shouldTrackSubRegLiveness(VReg)) {
    515       if ((I == LI.begin() || std::prev(I)->end < Def) && !VNI->isPHIDef()) {
    516         MachineInstr *MI = getInstructionFromIndex(Def);
    517         MI->setRegisterDefReadUndef(VReg);
    518       }
    519     }
    520 
    521     if (I->end != Def.getDeadSlot())
    522       continue;
    523     if (VNI->isPHIDef()) {
    524       // This is a dead PHI. Remove it.
    525       VNI->markUnused();
    526       LI.removeSegment(I);
    527       LLVM_DEBUG(dbgs() << "Dead PHI at " << Def << " may separate interval\n");
    528       MayHaveSplitComponents = true;
    529     } else {
    530       // This is a dead def. Make sure the instruction knows.
    531       MachineInstr *MI = getInstructionFromIndex(Def);
    532       assert(MI && "No instruction defining live value");
    533       MI->addRegisterDead(LI.reg, TRI);
    534       if (dead && MI->allDefsAreDead()) {
    535         LLVM_DEBUG(dbgs() << "All defs dead: " << Def << '\t' << *MI);
    536         dead->push_back(MI);
    537       }
    538     }
    539   }
    540   return MayHaveSplitComponents;
    541 }
    542 
    543 void LiveIntervals::shrinkToUses(LiveInterval::SubRange &SR, unsigned Reg) {
    544   LLVM_DEBUG(dbgs() << "Shrink: " << SR << '\n');
    545   assert(TargetRegisterInfo::isVirtualRegister(Reg)
    546          && "Can only shrink virtual registers");
    547   // Find all the values used, including PHI kills.
    548   ShrinkToUsesWorkList WorkList;
    549 
    550   // Visit all instructions reading Reg.
    551   SlotIndex LastIdx;
    552   for (MachineOperand &MO : MRI->use_nodbg_operands(Reg)) {
    553     // Skip "undef" uses.
    554     if (!MO.readsReg())
    555       continue;
    556     // Maybe the operand is for a subregister we don't care about.
    557     unsigned SubReg = MO.getSubReg();
    558     if (SubReg != 0) {
    559       LaneBitmask LaneMask = TRI->getSubRegIndexLaneMask(SubReg);
    560       if ((LaneMask & SR.LaneMask).none())
    561         continue;
    562     }
    563     // We only need to visit each instruction once.
    564     MachineInstr *UseMI = MO.getParent();
    565     SlotIndex Idx = getInstructionIndex(*UseMI).getRegSlot();
    566     if (Idx == LastIdx)
    567       continue;
    568     LastIdx = Idx;
    569 
    570     LiveQueryResult LRQ = SR.Query(Idx);
    571     VNInfo *VNI = LRQ.valueIn();
    572     // For Subranges it is possible that only undef values are left in that
    573     // part of the subregister, so there is no real liverange at the use
    574     if (!VNI)
    575       continue;
    576 
    577     // Special case: An early-clobber tied operand reads and writes the
    578     // register one slot early.
    579     if (VNInfo *DefVNI = LRQ.valueDefined())
    580       Idx = DefVNI->def;
    581 
    582     WorkList.push_back(std::make_pair(Idx, VNI));
    583   }
    584 
    585   // Create a new live ranges with only minimal live segments per def.
    586   LiveRange NewLR;
    587   createSegmentsForValues(NewLR, make_range(SR.vni_begin(), SR.vni_end()));
    588   extendSegmentsToUses(NewLR, WorkList, Reg, SR.LaneMask);
    589 
    590   // Move the trimmed ranges back.
    591   SR.segments.swap(NewLR.segments);
    592 
    593   // Remove dead PHI value numbers
    594   for (VNInfo *VNI : SR.valnos) {
    595     if (VNI->isUnused())
    596       continue;
    597     const LiveRange::Segment *Segment = SR.getSegmentContaining(VNI->def);
    598     assert(Segment != nullptr && "Missing segment for VNI");
    599     if (Segment->end != VNI->def.getDeadSlot())
    600       continue;
    601     if (VNI->isPHIDef()) {
    602       // This is a dead PHI. Remove it.
    603       LLVM_DEBUG(dbgs() << "Dead PHI at " << VNI->def
    604                         << " may separate interval\n");
    605       VNI->markUnused();
    606       SR.removeSegment(*Segment);
    607     }
    608   }
    609 
    610   LLVM_DEBUG(dbgs() << "Shrunk: " << SR << '\n');
    611 }
    612 
    613 void LiveIntervals::extendToIndices(LiveRange &LR,
    614                                     ArrayRef<SlotIndex> Indices,
    615                                     ArrayRef<SlotIndex> Undefs) {
    616   assert(LRCalc && "LRCalc not initialized.");
    617   LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
    618   for (SlotIndex Idx : Indices)
    619     LRCalc->extend(LR, Idx, /*PhysReg=*/0, Undefs);
    620 }
    621 
    622 void LiveIntervals::pruneValue(LiveRange &LR, SlotIndex Kill,
    623                                SmallVectorImpl<SlotIndex> *EndPoints) {
    624   LiveQueryResult LRQ = LR.Query(Kill);
    625   VNInfo *VNI = LRQ.valueOutOrDead();
    626   if (!VNI)
    627     return;
    628 
    629   MachineBasicBlock *KillMBB = Indexes->getMBBFromIndex(Kill);
    630   SlotIndex MBBEnd = Indexes->getMBBEndIdx(KillMBB);
    631 
    632   // If VNI isn't live out from KillMBB, the value is trivially pruned.
    633   if (LRQ.endPoint() < MBBEnd) {
    634     LR.removeSegment(Kill, LRQ.endPoint());
    635     if (EndPoints) EndPoints->push_back(LRQ.endPoint());
    636     return;
    637   }
    638 
    639   // VNI is live out of KillMBB.
    640   LR.removeSegment(Kill, MBBEnd);
    641   if (EndPoints) EndPoints->push_back(MBBEnd);
    642 
    643   // Find all blocks that are reachable from KillMBB without leaving VNI's live
    644   // range. It is possible that KillMBB itself is reachable, so start a DFS
    645   // from each successor.
    646   using VisitedTy = df_iterator_default_set<MachineBasicBlock*,9>;
    647   VisitedTy Visited;
    648   for (MachineBasicBlock *Succ : KillMBB->successors()) {
    649     for (df_ext_iterator<MachineBasicBlock*, VisitedTy>
    650          I = df_ext_begin(Succ, Visited), E = df_ext_end(Succ, Visited);
    651          I != E;) {
    652       MachineBasicBlock *MBB = *I;
    653 
    654       // Check if VNI is live in to MBB.
    655       SlotIndex MBBStart, MBBEnd;
    656       std::tie(MBBStart, MBBEnd) = Indexes->getMBBRange(MBB);
    657       LiveQueryResult LRQ = LR.Query(MBBStart);
    658       if (LRQ.valueIn() != VNI) {
    659         // This block isn't part of the VNI segment. Prune the search.
    660         I.skipChildren();
    661         continue;
    662       }
    663 
    664       // Prune the search if VNI is killed in MBB.
    665       if (LRQ.endPoint() < MBBEnd) {
    666         LR.removeSegment(MBBStart, LRQ.endPoint());
    667         if (EndPoints) EndPoints->push_back(LRQ.endPoint());
    668         I.skipChildren();
    669         continue;
    670       }
    671 
    672       // VNI is live through MBB.
    673       LR.removeSegment(MBBStart, MBBEnd);
    674       if (EndPoints) EndPoints->push_back(MBBEnd);
    675       ++I;
    676     }
    677   }
    678 }
    679 
    680 //===----------------------------------------------------------------------===//
    681 // Register allocator hooks.
    682 //
    683 
    684 void LiveIntervals::addKillFlags(const VirtRegMap *VRM) {
    685   // Keep track of regunit ranges.
    686   SmallVector<std::pair<const LiveRange*, LiveRange::const_iterator>, 8> RU;
    687   // Keep track of subregister ranges.
    688   SmallVector<std::pair<const LiveInterval::SubRange*,
    689                         LiveRange::const_iterator>, 4> SRs;
    690 
    691   for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
    692     unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
    693     if (MRI->reg_nodbg_empty(Reg))
    694       continue;
    695     const LiveInterval &LI = getInterval(Reg);
    696     if (LI.empty())
    697       continue;
    698 
    699     // Find the regunit intervals for the assigned register. They may overlap
    700     // the virtual register live range, cancelling any kills.
    701     RU.clear();
    702     for (MCRegUnitIterator Unit(VRM->getPhys(Reg), TRI); Unit.isValid();
    703          ++Unit) {
    704       const LiveRange &RURange = getRegUnit(*Unit);
    705       if (RURange.empty())
    706         continue;
    707       RU.push_back(std::make_pair(&RURange, RURange.find(LI.begin()->end)));
    708     }
    709 
    710     if (MRI->subRegLivenessEnabled()) {
    711       SRs.clear();
    712       for (const LiveInterval::SubRange &SR : LI.subranges()) {
    713         SRs.push_back(std::make_pair(&SR, SR.find(LI.begin()->end)));
    714       }
    715     }
    716 
    717     // Every instruction that kills Reg corresponds to a segment range end
    718     // point.
    719     for (LiveInterval::const_iterator RI = LI.begin(), RE = LI.end(); RI != RE;
    720          ++RI) {
    721       // A block index indicates an MBB edge.
    722       if (RI->end.isBlock())
    723         continue;
    724       MachineInstr *MI = getInstructionFromIndex(RI->end);
    725       if (!MI)
    726         continue;
    727 
    728       // Check if any of the regunits are live beyond the end of RI. That could
    729       // happen when a physreg is defined as a copy of a virtreg:
    730       //
    731       //   %eax = COPY %5
    732       //   FOO %5             <--- MI, cancel kill because %eax is live.
    733       //   BAR killed %eax
    734       //
    735       // There should be no kill flag on FOO when %5 is rewritten as %eax.
    736       for (auto &RUP : RU) {
    737         const LiveRange &RURange = *RUP.first;
    738         LiveRange::const_iterator &I = RUP.second;
    739         if (I == RURange.end())
    740           continue;
    741         I = RURange.advanceTo(I, RI->end);
    742         if (I == RURange.end() || I->start >= RI->end)
    743           continue;
    744         // I is overlapping RI.
    745         goto CancelKill;
    746       }
    747 
    748       if (MRI->subRegLivenessEnabled()) {
    749         // When reading a partial undefined value we must not add a kill flag.
    750         // The regalloc might have used the undef lane for something else.
    751         // Example:
    752         //     %1 = ...                  ; R32: %1
    753         //     %2:high16 = ...           ; R64: %2
    754         //        = read killed %2        ; R64: %2
    755         //        = read %1              ; R32: %1
    756         // The <kill> flag is correct for %2, but the register allocator may
    757         // assign R0L to %1, and R0 to %2 because the low 32bits of R0
    758         // are actually never written by %2. After assignment the <kill>
    759         // flag at the read instruction is invalid.
    760         LaneBitmask DefinedLanesMask;
    761         if (!SRs.empty()) {
    762           // Compute a mask of lanes that are defined.
    763           DefinedLanesMask = LaneBitmask::getNone();
    764           for (auto &SRP : SRs) {
    765             const LiveInterval::SubRange &SR = *SRP.first;
    766             LiveRange::const_iterator &I = SRP.second;
    767             if (I == SR.end())
    768               continue;
    769             I = SR.advanceTo(I, RI->end);
    770             if (I == SR.end() || I->start >= RI->end)
    771               continue;
    772             // I is overlapping RI
    773             DefinedLanesMask |= SR.LaneMask;
    774           }
    775         } else
    776           DefinedLanesMask = LaneBitmask::getAll();
    777 
    778         bool IsFullWrite = false;
    779         for (const MachineOperand &MO : MI->operands()) {
    780           if (!MO.isReg() || MO.getReg() != Reg)
    781             continue;
    782           if (MO.isUse()) {
    783             // Reading any undefined lanes?
    784             LaneBitmask UseMask = TRI->getSubRegIndexLaneMask(MO.getSubReg());
    785             if ((UseMask & ~DefinedLanesMask).any())
    786               goto CancelKill;
    787           } else if (MO.getSubReg() == 0) {
    788             // Writing to the full register?
    789             assert(MO.isDef());
    790             IsFullWrite = true;
    791           }
    792         }
    793 
    794         // If an instruction writes to a subregister, a new segment starts in
    795         // the LiveInterval. But as this is only overriding part of the register
    796         // adding kill-flags is not correct here after registers have been
    797         // assigned.
    798         if (!IsFullWrite) {
    799           // Next segment has to be adjacent in the subregister write case.
    800           LiveRange::const_iterator N = std::next(RI);
    801           if (N != LI.end() && N->start == RI->end)
    802             goto CancelKill;
    803         }
    804       }
    805 
    806       MI->addRegisterKilled(Reg, nullptr);
    807       continue;
    808 CancelKill:
    809       MI->clearRegisterKills(Reg, nullptr);
    810     }
    811   }
    812 }
    813 
    814 MachineBasicBlock*
    815 LiveIntervals::intervalIsInOneMBB(const LiveInterval &LI) const {
    816   // A local live range must be fully contained inside the block, meaning it is
    817   // defined and killed at instructions, not at block boundaries. It is not
    818   // live in or out of any block.
    819   //
    820   // It is technically possible to have a PHI-defined live range identical to a
    821   // single block, but we are going to return false in that case.
    822 
    823   SlotIndex Start = LI.beginIndex();
    824   if (Start.isBlock())
    825     return nullptr;
    826 
    827   SlotIndex Stop = LI.endIndex();
    828   if (Stop.isBlock())
    829     return nullptr;
    830 
    831   // getMBBFromIndex doesn't need to search the MBB table when both indexes
    832   // belong to proper instructions.
    833   MachineBasicBlock *MBB1 = Indexes->getMBBFromIndex(Start);
    834   MachineBasicBlock *MBB2 = Indexes->getMBBFromIndex(Stop);
    835   return MBB1 == MBB2 ? MBB1 : nullptr;
    836 }
    837 
    838 bool
    839 LiveIntervals::hasPHIKill(const LiveInterval &LI, const VNInfo *VNI) const {
    840   for (const VNInfo *PHI : LI.valnos) {
    841     if (PHI->isUnused() || !PHI->isPHIDef())
    842       continue;
    843     const MachineBasicBlock *PHIMBB = getMBBFromIndex(PHI->def);
    844     // Conservatively return true instead of scanning huge predecessor lists.
    845     if (PHIMBB->pred_size() > 100)
    846       return true;
    847     for (const MachineBasicBlock *Pred : PHIMBB->predecessors())
    848       if (VNI == LI.getVNInfoBefore(Indexes->getMBBEndIdx(Pred)))
    849         return true;
    850   }
    851   return false;
    852 }
    853 
    854 float LiveIntervals::getSpillWeight(bool isDef, bool isUse,
    855                                     const MachineBlockFrequencyInfo *MBFI,
    856                                     const MachineInstr &MI) {
    857   return getSpillWeight(isDef, isUse, MBFI, MI.getParent());
    858 }
    859 
    860 float LiveIntervals::getSpillWeight(bool isDef, bool isUse,
    861                                     const MachineBlockFrequencyInfo *MBFI,
    862                                     const MachineBasicBlock *MBB) {
    863   BlockFrequency Freq = MBFI->getBlockFreq(MBB);
    864   const float Scale = 1.0f / MBFI->getEntryFreq();
    865   return (isDef + isUse) * (Freq.getFrequency() * Scale);
    866 }
    867 
    868 LiveRange::Segment
    869 LiveIntervals::addSegmentToEndOfBlock(unsigned reg, MachineInstr &startInst) {
    870   LiveInterval& Interval = createEmptyInterval(reg);
    871   VNInfo *VN = Interval.getNextValue(
    872       SlotIndex(getInstructionIndex(startInst).getRegSlot()),
    873       getVNInfoAllocator());
    874   LiveRange::Segment S(SlotIndex(getInstructionIndex(startInst).getRegSlot()),
    875                        getMBBEndIdx(startInst.getParent()), VN);
    876   Interval.addSegment(S);
    877 
    878   return S;
    879 }
    880 
    881 //===----------------------------------------------------------------------===//
    882 //                          Register mask functions
    883 //===----------------------------------------------------------------------===//
    884 
    885 bool LiveIntervals::checkRegMaskInterference(LiveInterval &LI,
    886                                              BitVector &UsableRegs) {
    887   if (LI.empty())
    888     return false;
    889   LiveInterval::iterator LiveI = LI.begin(), LiveE = LI.end();
    890 
    891   // Use a smaller arrays for local live ranges.
    892   ArrayRef<SlotIndex> Slots;
    893   ArrayRef<const uint32_t*> Bits;
    894   if (MachineBasicBlock *MBB = intervalIsInOneMBB(LI)) {
    895     Slots = getRegMaskSlotsInBlock(MBB->getNumber());
    896     Bits = getRegMaskBitsInBlock(MBB->getNumber());
    897   } else {
    898     Slots = getRegMaskSlots();
    899     Bits = getRegMaskBits();
    900   }
    901 
    902   // We are going to enumerate all the register mask slots contained in LI.
    903   // Start with a binary search of RegMaskSlots to find a starting point.
    904   ArrayRef<SlotIndex>::iterator SlotI =
    905     std::lower_bound(Slots.begin(), Slots.end(), LiveI->start);
    906   ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
    907 
    908   // No slots in range, LI begins after the last call.
    909   if (SlotI == SlotE)
    910     return false;
    911 
    912   bool Found = false;
    913   while (true) {
    914     assert(*SlotI >= LiveI->start);
    915     // Loop over all slots overlapping this segment.
    916     while (*SlotI < LiveI->end) {
    917       // *SlotI overlaps LI. Collect mask bits.
    918       if (!Found) {
    919         // This is the first overlap. Initialize UsableRegs to all ones.
    920         UsableRegs.clear();
    921         UsableRegs.resize(TRI->getNumRegs(), true);
    922         Found = true;
    923       }
    924       // Remove usable registers clobbered by this mask.
    925       UsableRegs.clearBitsNotInMask(Bits[SlotI-Slots.begin()]);
    926       if (++SlotI == SlotE)
    927         return Found;
    928     }
    929     // *SlotI is beyond the current LI segment.
    930     LiveI = LI.advanceTo(LiveI, *SlotI);
    931     if (LiveI == LiveE)
    932       return Found;
    933     // Advance SlotI until it overlaps.
    934     while (*SlotI < LiveI->start)
    935       if (++SlotI == SlotE)
    936         return Found;
    937   }
    938 }
    939 
    940 //===----------------------------------------------------------------------===//
    941 //                         IntervalUpdate class.
    942 //===----------------------------------------------------------------------===//
    943 
    944 /// Toolkit used by handleMove to trim or extend live intervals.
    945 class LiveIntervals::HMEditor {
    946 private:
    947   LiveIntervals& LIS;
    948   const MachineRegisterInfo& MRI;
    949   const TargetRegisterInfo& TRI;
    950   SlotIndex OldIdx;
    951   SlotIndex NewIdx;
    952   SmallPtrSet<LiveRange*, 8> Updated;
    953   bool UpdateFlags;
    954 
    955 public:
    956   HMEditor(LiveIntervals& LIS, const MachineRegisterInfo& MRI,
    957            const TargetRegisterInfo& TRI,
    958            SlotIndex OldIdx, SlotIndex NewIdx, bool UpdateFlags)
    959     : LIS(LIS), MRI(MRI), TRI(TRI), OldIdx(OldIdx), NewIdx(NewIdx),
    960       UpdateFlags(UpdateFlags) {}
    961 
    962   // FIXME: UpdateFlags is a workaround that creates live intervals for all
    963   // physregs, even those that aren't needed for regalloc, in order to update
    964   // kill flags. This is wasteful. Eventually, LiveVariables will strip all kill
    965   // flags, and postRA passes will use a live register utility instead.
    966   LiveRange *getRegUnitLI(unsigned Unit) {
    967     if (UpdateFlags && !MRI.isReservedRegUnit(Unit))
    968       return &LIS.getRegUnit(Unit);
    969     return LIS.getCachedRegUnit(Unit);
    970   }
    971 
    972   /// Update all live ranges touched by MI, assuming a move from OldIdx to
    973   /// NewIdx.
    974   void updateAllRanges(MachineInstr *MI) {
    975     LLVM_DEBUG(dbgs() << "handleMove " << OldIdx << " -> " << NewIdx << ": "
    976                       << *MI);
    977     bool hasRegMask = false;
    978     for (MachineOperand &MO : MI->operands()) {
    979       if (MO.isRegMask())
    980         hasRegMask = true;
    981       if (!MO.isReg())
    982         continue;
    983       if (MO.isUse()) {
    984         if (!MO.readsReg())
    985           continue;
    986         // Aggressively clear all kill flags.
    987         // They are reinserted by VirtRegRewriter.
    988         MO.setIsKill(false);
    989       }
    990 
    991       unsigned Reg = MO.getReg();
    992       if (!Reg)
    993         continue;
    994       if (TargetRegisterInfo::isVirtualRegister(Reg)) {
    995         LiveInterval &LI = LIS.getInterval(Reg);
    996         if (LI.hasSubRanges()) {
    997           unsigned SubReg = MO.getSubReg();
    998           LaneBitmask LaneMask = SubReg ? TRI.getSubRegIndexLaneMask(SubReg)
    999                                         : MRI.getMaxLaneMaskForVReg(Reg);
   1000           for (LiveInterval::SubRange &S : LI.subranges()) {
   1001             if ((S.LaneMask & LaneMask).none())
   1002               continue;
   1003             updateRange(S, Reg, S.LaneMask);
   1004           }
   1005         }
   1006         updateRange(LI, Reg, LaneBitmask::getNone());
   1007         continue;
   1008       }
   1009 
   1010       // For physregs, only update the regunits that actually have a
   1011       // precomputed live range.
   1012       for (MCRegUnitIterator Units(Reg, &TRI); Units.isValid(); ++Units)
   1013         if (LiveRange *LR = getRegUnitLI(*Units))
   1014           updateRange(*LR, *Units, LaneBitmask::getNone());
   1015     }
   1016     if (hasRegMask)
   1017       updateRegMaskSlots();
   1018   }
   1019 
   1020 private:
   1021   /// Update a single live range, assuming an instruction has been moved from
   1022   /// OldIdx to NewIdx.
   1023   void updateRange(LiveRange &LR, unsigned Reg, LaneBitmask LaneMask) {
   1024     if (!Updated.insert(&LR).second)
   1025       return;
   1026     LLVM_DEBUG({
   1027       dbgs() << "     ";
   1028       if (TargetRegisterInfo::isVirtualRegister(Reg)) {
   1029         dbgs() << printReg(Reg);
   1030         if (LaneMask.any())
   1031           dbgs() << " L" << PrintLaneMask(LaneMask);
   1032       } else {
   1033         dbgs() << printRegUnit(Reg, &TRI);
   1034       }
   1035       dbgs() << ":\t" << LR << '\n';
   1036     });
   1037     if (SlotIndex::isEarlierInstr(OldIdx, NewIdx))
   1038       handleMoveDown(LR);
   1039     else
   1040       handleMoveUp(LR, Reg, LaneMask);
   1041     LLVM_DEBUG(dbgs() << "        -->\t" << LR << '\n');
   1042     LR.verify();
   1043   }
   1044 
   1045   /// Update LR to reflect an instruction has been moved downwards from OldIdx
   1046   /// to NewIdx (OldIdx < NewIdx).
   1047   void handleMoveDown(LiveRange &LR) {
   1048     LiveRange::iterator E = LR.end();
   1049     // Segment going into OldIdx.
   1050     LiveRange::iterator OldIdxIn = LR.find(OldIdx.getBaseIndex());
   1051 
   1052     // No value live before or after OldIdx? Nothing to do.
   1053     if (OldIdxIn == E || SlotIndex::isEarlierInstr(OldIdx, OldIdxIn->start))
   1054       return;
   1055 
   1056     LiveRange::iterator OldIdxOut;
   1057     // Do we have a value live-in to OldIdx?
   1058     if (SlotIndex::isEarlierInstr(OldIdxIn->start, OldIdx)) {
   1059       // If the live-in value already extends to NewIdx, there is nothing to do.
   1060       if (SlotIndex::isEarlierEqualInstr(NewIdx, OldIdxIn->end))
   1061         return;
   1062       // Aggressively remove all kill flags from the old kill point.
   1063       // Kill flags shouldn't be used while live intervals exist, they will be
   1064       // reinserted by VirtRegRewriter.
   1065       if (MachineInstr *KillMI = LIS.getInstructionFromIndex(OldIdxIn->end))
   1066         for (MIBundleOperands MO(*KillMI); MO.isValid(); ++MO)
   1067           if (MO->isReg() && MO->isUse())
   1068             MO->setIsKill(false);
   1069 
   1070       // Is there a def before NewIdx which is not OldIdx?
   1071       LiveRange::iterator Next = std::next(OldIdxIn);
   1072       if (Next != E && !SlotIndex::isSameInstr(OldIdx, Next->start) &&
   1073           SlotIndex::isEarlierInstr(Next->start, NewIdx)) {
   1074         // If we are here then OldIdx was just a use but not a def. We only have
   1075         // to ensure liveness extends to NewIdx.
   1076         LiveRange::iterator NewIdxIn =
   1077           LR.advanceTo(Next, NewIdx.getBaseIndex());
   1078         // Extend the segment before NewIdx if necessary.
   1079         if (NewIdxIn == E ||
   1080             !SlotIndex::isEarlierInstr(NewIdxIn->start, NewIdx)) {
   1081           LiveRange::iterator Prev = std::prev(NewIdxIn);
   1082           Prev->end = NewIdx.getRegSlot();
   1083         }
   1084         // Extend OldIdxIn.
   1085         OldIdxIn->end = Next->start;
   1086         return;
   1087       }
   1088 
   1089       // Adjust OldIdxIn->end to reach NewIdx. This may temporarily make LR
   1090       // invalid by overlapping ranges.
   1091       bool isKill = SlotIndex::isSameInstr(OldIdx, OldIdxIn->end);
   1092       OldIdxIn->end = NewIdx.getRegSlot(OldIdxIn->end.isEarlyClobber());
   1093       // If this was not a kill, then there was no def and we're done.
   1094       if (!isKill)
   1095         return;
   1096 
   1097       // Did we have a Def at OldIdx?
   1098       OldIdxOut = Next;
   1099       if (OldIdxOut == E || !SlotIndex::isSameInstr(OldIdx, OldIdxOut->start))
   1100         return;
   1101     } else {
   1102       OldIdxOut = OldIdxIn;
   1103     }
   1104 
   1105     // If we are here then there is a Definition at OldIdx. OldIdxOut points
   1106     // to the segment starting there.
   1107     assert(OldIdxOut != E && SlotIndex::isSameInstr(OldIdx, OldIdxOut->start) &&
   1108            "No def?");
   1109     VNInfo *OldIdxVNI = OldIdxOut->valno;
   1110     assert(OldIdxVNI->def == OldIdxOut->start && "Inconsistent def");
   1111 
   1112     // If the defined value extends beyond NewIdx, just move the beginning
   1113     // of the segment to NewIdx.
   1114     SlotIndex NewIdxDef = NewIdx.getRegSlot(OldIdxOut->start.isEarlyClobber());
   1115     if (SlotIndex::isEarlierInstr(NewIdxDef, OldIdxOut->end)) {
   1116       OldIdxVNI->def = NewIdxDef;
   1117       OldIdxOut->start = OldIdxVNI->def;
   1118       return;
   1119     }
   1120 
   1121     // If we are here then we have a Definition at OldIdx which ends before
   1122     // NewIdx.
   1123 
   1124     // Is there an existing Def at NewIdx?
   1125     LiveRange::iterator AfterNewIdx
   1126       = LR.advanceTo(OldIdxOut, NewIdx.getRegSlot());
   1127     bool OldIdxDefIsDead = OldIdxOut->end.isDead();
   1128     if (!OldIdxDefIsDead &&
   1129         SlotIndex::isEarlierInstr(OldIdxOut->end, NewIdxDef)) {
   1130       // OldIdx is not a dead def, and NewIdxDef is inside a new interval.
   1131       VNInfo *DefVNI;
   1132       if (OldIdxOut != LR.begin() &&
   1133           !SlotIndex::isEarlierInstr(std::prev(OldIdxOut)->end,
   1134                                      OldIdxOut->start)) {
   1135         // There is no gap between OldIdxOut and its predecessor anymore,
   1136         // merge them.
   1137         LiveRange::iterator IPrev = std::prev(OldIdxOut);
   1138         DefVNI = OldIdxVNI;
   1139         IPrev->end = OldIdxOut->end;
   1140       } else {
   1141         // The value is live in to OldIdx
   1142         LiveRange::iterator INext = std::next(OldIdxOut);
   1143         assert(INext != E && "Must have following segment");
   1144         // We merge OldIdxOut and its successor. As we're dealing with subreg
   1145         // reordering, there is always a successor to OldIdxOut in the same BB
   1146         // We don't need INext->valno anymore and will reuse for the new segment
   1147         // we create later.
   1148         DefVNI = OldIdxVNI;
   1149         INext->start = OldIdxOut->end;
   1150         INext->valno->def = INext->start;
   1151       }
   1152       // If NewIdx is behind the last segment, extend that and append a new one.
   1153       if (AfterNewIdx == E) {
   1154         // OldIdxOut is undef at this point, Slide (OldIdxOut;AfterNewIdx] up
   1155         // one position.
   1156         //    |-  ?/OldIdxOut -| |- X0 -| ... |- Xn -| end
   1157         // => |- X0/OldIdxOut -| ... |- Xn -| |- undef/NewS -| end
   1158         std::copy(std::next(OldIdxOut), E, OldIdxOut);
   1159         // The last segment is undefined now, reuse it for a dead def.
   1160         LiveRange::iterator NewSegment = std::prev(E);
   1161         *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
   1162                                          DefVNI);
   1163         DefVNI->def = NewIdxDef;
   1164 
   1165         LiveRange::iterator Prev = std::prev(NewSegment);
   1166         Prev->end = NewIdxDef;
   1167       } else {
   1168         // OldIdxOut is undef at this point, Slide (OldIdxOut;AfterNewIdx] up
   1169         // one position.
   1170         //    |-  ?/OldIdxOut -| |- X0 -| ... |- Xn/AfterNewIdx -| |- Next -|
   1171         // => |- X0/OldIdxOut -| ... |- Xn -| |- Xn/AfterNewIdx -| |- Next -|
   1172         std::copy(std::next(OldIdxOut), std::next(AfterNewIdx), OldIdxOut);
   1173         LiveRange::iterator Prev = std::prev(AfterNewIdx);
   1174         // We have two cases:
   1175         if (SlotIndex::isEarlierInstr(Prev->start, NewIdxDef)) {
   1176           // Case 1: NewIdx is inside a liverange. Split this liverange at
   1177           // NewIdxDef into the segment "Prev" followed by "NewSegment".
   1178           LiveRange::iterator NewSegment = AfterNewIdx;
   1179           *NewSegment = LiveRange::Segment(NewIdxDef, Prev->end, Prev->valno);
   1180           Prev->valno->def = NewIdxDef;
   1181 
   1182           *Prev = LiveRange::Segment(Prev->start, NewIdxDef, DefVNI);
   1183           DefVNI->def = Prev->start;
   1184         } else {
   1185           // Case 2: NewIdx is in a lifetime hole. Keep AfterNewIdx as is and
   1186           // turn Prev into a segment from NewIdx to AfterNewIdx->start.
   1187           *Prev = LiveRange::Segment(NewIdxDef, AfterNewIdx->start, DefVNI);
   1188           DefVNI->def = NewIdxDef;
   1189           assert(DefVNI != AfterNewIdx->valno);
   1190         }
   1191       }
   1192       return;
   1193     }
   1194 
   1195     if (AfterNewIdx != E &&
   1196         SlotIndex::isSameInstr(AfterNewIdx->start, NewIdxDef)) {
   1197       // There is an existing def at NewIdx. The def at OldIdx is coalesced into
   1198       // that value.
   1199       assert(AfterNewIdx->valno != OldIdxVNI && "Multiple defs of value?");
   1200       LR.removeValNo(OldIdxVNI);
   1201     } else {
   1202       // There was no existing def at NewIdx. We need to create a dead def
   1203       // at NewIdx. Shift segments over the old OldIdxOut segment, this frees
   1204       // a new segment at the place where we want to construct the dead def.
   1205       //    |- OldIdxOut -| |- X0 -| ... |- Xn -| |- AfterNewIdx -|
   1206       // => |- X0/OldIdxOut -| ... |- Xn -| |- undef/NewS. -| |- AfterNewIdx -|
   1207       assert(AfterNewIdx != OldIdxOut && "Inconsistent iterators");
   1208       std::copy(std::next(OldIdxOut), AfterNewIdx, OldIdxOut);
   1209       // We can reuse OldIdxVNI now.
   1210       LiveRange::iterator NewSegment = std::prev(AfterNewIdx);
   1211       VNInfo *NewSegmentVNI = OldIdxVNI;
   1212       NewSegmentVNI->def = NewIdxDef;
   1213       *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
   1214                                        NewSegmentVNI);
   1215     }
   1216   }
   1217 
   1218   /// Update LR to reflect an instruction has been moved upwards from OldIdx
   1219   /// to NewIdx (NewIdx < OldIdx).
   1220   void handleMoveUp(LiveRange &LR, unsigned Reg, LaneBitmask LaneMask) {
   1221     LiveRange::iterator E = LR.end();
   1222     // Segment going into OldIdx.
   1223     LiveRange::iterator OldIdxIn = LR.find(OldIdx.getBaseIndex());
   1224 
   1225     // No value live before or after OldIdx? Nothing to do.
   1226     if (OldIdxIn == E || SlotIndex::isEarlierInstr(OldIdx, OldIdxIn->start))
   1227       return;
   1228 
   1229     LiveRange::iterator OldIdxOut;
   1230     // Do we have a value live-in to OldIdx?
   1231     if (SlotIndex::isEarlierInstr(OldIdxIn->start, OldIdx)) {
   1232       // If the live-in value isn't killed here, then we have no Def at
   1233       // OldIdx, moreover the value must be live at NewIdx so there is nothing
   1234       // to do.
   1235       bool isKill = SlotIndex::isSameInstr(OldIdx, OldIdxIn->end);
   1236       if (!isKill)
   1237         return;
   1238 
   1239       // At this point we have to move OldIdxIn->end back to the nearest
   1240       // previous use or (dead-)def but no further than NewIdx.
   1241       SlotIndex DefBeforeOldIdx
   1242         = std::max(OldIdxIn->start.getDeadSlot(),
   1243                    NewIdx.getRegSlot(OldIdxIn->end.isEarlyClobber()));
   1244       OldIdxIn->end = findLastUseBefore(DefBeforeOldIdx, Reg, LaneMask);
   1245 
   1246       // Did we have a Def at OldIdx? If not we are done now.
   1247       OldIdxOut = std::next(OldIdxIn);
   1248       if (OldIdxOut == E || !SlotIndex::isSameInstr(OldIdx, OldIdxOut->start))
   1249         return;
   1250     } else {
   1251       OldIdxOut = OldIdxIn;
   1252       OldIdxIn = OldIdxOut != LR.begin() ? std::prev(OldIdxOut) : E;
   1253     }
   1254 
   1255     // If we are here then there is a Definition at OldIdx. OldIdxOut points
   1256     // to the segment starting there.
   1257     assert(OldIdxOut != E && SlotIndex::isSameInstr(OldIdx, OldIdxOut->start) &&
   1258            "No def?");
   1259     VNInfo *OldIdxVNI = OldIdxOut->valno;
   1260     assert(OldIdxVNI->def == OldIdxOut->start && "Inconsistent def");
   1261     bool OldIdxDefIsDead = OldIdxOut->end.isDead();
   1262 
   1263     // Is there an existing def at NewIdx?
   1264     SlotIndex NewIdxDef = NewIdx.getRegSlot(OldIdxOut->start.isEarlyClobber());
   1265     LiveRange::iterator NewIdxOut = LR.find(NewIdx.getRegSlot());
   1266     if (SlotIndex::isSameInstr(NewIdxOut->start, NewIdx)) {
   1267       assert(NewIdxOut->valno != OldIdxVNI &&
   1268              "Same value defined more than once?");
   1269       // If OldIdx was a dead def remove it.
   1270       if (!OldIdxDefIsDead) {
   1271         // Remove segment starting at NewIdx and move begin of OldIdxOut to
   1272         // NewIdx so it can take its place.
   1273         OldIdxVNI->def = NewIdxDef;
   1274         OldIdxOut->start = NewIdxDef;
   1275         LR.removeValNo(NewIdxOut->valno);
   1276       } else {
   1277         // Simply remove the dead def at OldIdx.
   1278         LR.removeValNo(OldIdxVNI);
   1279       }
   1280     } else {
   1281       // Previously nothing was live after NewIdx, so all we have to do now is
   1282       // move the begin of OldIdxOut to NewIdx.
   1283       if (!OldIdxDefIsDead) {
   1284         // Do we have any intermediate Defs between OldIdx and NewIdx?
   1285         if (OldIdxIn != E &&
   1286             SlotIndex::isEarlierInstr(NewIdxDef, OldIdxIn->start)) {
   1287           // OldIdx is not a dead def and NewIdx is before predecessor start.
   1288           LiveRange::iterator NewIdxIn = NewIdxOut;
   1289           assert(NewIdxIn == LR.find(NewIdx.getBaseIndex()));
   1290           const SlotIndex SplitPos = NewIdxDef;
   1291           OldIdxVNI = OldIdxIn->valno;
   1292 
   1293           // Merge the OldIdxIn and OldIdxOut segments into OldIdxOut.
   1294           OldIdxOut->valno->def = OldIdxIn->start;
   1295           *OldIdxOut = LiveRange::Segment(OldIdxIn->start, OldIdxOut->end,
   1296                                           OldIdxOut->valno);
   1297           // OldIdxIn and OldIdxVNI are now undef and can be overridden.
   1298           // We Slide [NewIdxIn, OldIdxIn) down one position.
   1299           //    |- X0/NewIdxIn -| ... |- Xn-1 -||- Xn/OldIdxIn -||- OldIdxOut -|
   1300           // => |- undef/NexIdxIn -| |- X0 -| ... |- Xn-1 -| |- Xn/OldIdxOut -|
   1301           std::copy_backward(NewIdxIn, OldIdxIn, OldIdxOut);
   1302           // NewIdxIn is now considered undef so we can reuse it for the moved
   1303           // value.
   1304           LiveRange::iterator NewSegment = NewIdxIn;
   1305           LiveRange::iterator Next = std::next(NewSegment);
   1306           if (SlotIndex::isEarlierInstr(Next->start, NewIdx)) {
   1307             // There is no gap between NewSegment and its predecessor.
   1308             *NewSegment = LiveRange::Segment(Next->start, SplitPos,
   1309                                              Next->valno);
   1310             *Next = LiveRange::Segment(SplitPos, Next->end, OldIdxVNI);
   1311             Next->valno->def = SplitPos;
   1312           } else {
   1313             // There is a gap between NewSegment and its predecessor
   1314             // Value becomes live in.
   1315             *NewSegment = LiveRange::Segment(SplitPos, Next->start, OldIdxVNI);
   1316             NewSegment->valno->def = SplitPos;
   1317           }
   1318         } else {
   1319           // Leave the end point of a live def.
   1320           OldIdxOut->start = NewIdxDef;
   1321           OldIdxVNI->def = NewIdxDef;
   1322           if (OldIdxIn != E && SlotIndex::isEarlierInstr(NewIdx, OldIdxIn->end))
   1323             OldIdxIn->end = NewIdx.getRegSlot();
   1324         }
   1325       } else if (OldIdxIn != E
   1326           && SlotIndex::isEarlierInstr(NewIdxOut->start, NewIdx)
   1327           && SlotIndex::isEarlierInstr(NewIdx, NewIdxOut->end)) {
   1328         // OldIdxVNI is a dead def that has been moved into the middle of
   1329         // another value in LR. That can happen when LR is a whole register,
   1330         // but the dead def is a write to a subreg that is dead at NewIdx.
   1331         // The dead def may have been moved across other values
   1332         // in LR, so move OldIdxOut up to NewIdxOut. Slide [NewIdxOut;OldIdxOut)
   1333         // down one position.
   1334         //    |- X0/NewIdxOut -| ... |- Xn-1 -| |- Xn/OldIdxOut -| |- next - |
   1335         // => |- X0/NewIdxOut -| |- X0 -| ... |- Xn-1 -| |- next -|
   1336         std::copy_backward(NewIdxOut, OldIdxOut, std::next(OldIdxOut));
   1337         // Modify the segment at NewIdxOut and the following segment to meet at
   1338         // the point of the dead def, with the following segment getting
   1339         // OldIdxVNI as its value number.
   1340         *NewIdxOut = LiveRange::Segment(
   1341             NewIdxOut->start, NewIdxDef.getRegSlot(), NewIdxOut->valno);
   1342         *(NewIdxOut + 1) = LiveRange::Segment(
   1343             NewIdxDef.getRegSlot(), (NewIdxOut + 1)->end, OldIdxVNI);
   1344         OldIdxVNI->def = NewIdxDef;
   1345         // Modify subsequent segments to be defined by the moved def OldIdxVNI.
   1346         for (auto Idx = NewIdxOut + 2; Idx <= OldIdxOut; ++Idx)
   1347           Idx->valno = OldIdxVNI;
   1348         // Aggressively remove all dead flags from the former dead definition.
   1349         // Kill/dead flags shouldn't be used while live intervals exist; they
   1350         // will be reinserted by VirtRegRewriter.
   1351         if (MachineInstr *KillMI = LIS.getInstructionFromIndex(NewIdx))
   1352           for (MIBundleOperands MO(*KillMI); MO.isValid(); ++MO)
   1353             if (MO->isReg() && !MO->isUse())
   1354               MO->setIsDead(false);
   1355       } else {
   1356         // OldIdxVNI is a dead def. It may have been moved across other values
   1357         // in LR, so move OldIdxOut up to NewIdxOut. Slide [NewIdxOut;OldIdxOut)
   1358         // down one position.
   1359         //    |- X0/NewIdxOut -| ... |- Xn-1 -| |- Xn/OldIdxOut -| |- next - |
   1360         // => |- undef/NewIdxOut -| |- X0 -| ... |- Xn-1 -| |- next -|
   1361         std::copy_backward(NewIdxOut, OldIdxOut, std::next(OldIdxOut));
   1362         // OldIdxVNI can be reused now to build a new dead def segment.
   1363         LiveRange::iterator NewSegment = NewIdxOut;
   1364         VNInfo *NewSegmentVNI = OldIdxVNI;
   1365         *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
   1366                                          NewSegmentVNI);
   1367         NewSegmentVNI->def = NewIdxDef;
   1368       }
   1369     }
   1370   }
   1371 
   1372   void updateRegMaskSlots() {
   1373     SmallVectorImpl<SlotIndex>::iterator RI =
   1374       std::lower_bound(LIS.RegMaskSlots.begin(), LIS.RegMaskSlots.end(),
   1375                        OldIdx);
   1376     assert(RI != LIS.RegMaskSlots.end() && *RI == OldIdx.getRegSlot() &&
   1377            "No RegMask at OldIdx.");
   1378     *RI = NewIdx.getRegSlot();
   1379     assert((RI == LIS.RegMaskSlots.begin() ||
   1380             SlotIndex::isEarlierInstr(*std::prev(RI), *RI)) &&
   1381            "Cannot move regmask instruction above another call");
   1382     assert((std::next(RI) == LIS.RegMaskSlots.end() ||
   1383             SlotIndex::isEarlierInstr(*RI, *std::next(RI))) &&
   1384            "Cannot move regmask instruction below another call");
   1385   }
   1386 
   1387   // Return the last use of reg between NewIdx and OldIdx.
   1388   SlotIndex findLastUseBefore(SlotIndex Before, unsigned Reg,
   1389                               LaneBitmask LaneMask) {
   1390     if (TargetRegisterInfo::isVirtualRegister(Reg)) {
   1391       SlotIndex LastUse = Before;
   1392       for (MachineOperand &MO : MRI.use_nodbg_operands(Reg)) {
   1393         if (MO.isUndef())
   1394           continue;
   1395         unsigned SubReg = MO.getSubReg();
   1396         if (SubReg != 0 && LaneMask.any()
   1397             && (TRI.getSubRegIndexLaneMask(SubReg) & LaneMask).none())
   1398           continue;
   1399 
   1400         const MachineInstr &MI = *MO.getParent();
   1401         SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI);
   1402         if (InstSlot > LastUse && InstSlot < OldIdx)
   1403           LastUse = InstSlot.getRegSlot();
   1404       }
   1405       return LastUse;
   1406     }
   1407 
   1408     // This is a regunit interval, so scanning the use list could be very
   1409     // expensive. Scan upwards from OldIdx instead.
   1410     assert(Before < OldIdx && "Expected upwards move");
   1411     SlotIndexes *Indexes = LIS.getSlotIndexes();
   1412     MachineBasicBlock *MBB = Indexes->getMBBFromIndex(Before);
   1413 
   1414     // OldIdx may not correspond to an instruction any longer, so set MII to
   1415     // point to the next instruction after OldIdx, or MBB->end().
   1416     MachineBasicBlock::iterator MII = MBB->end();
   1417     if (MachineInstr *MI = Indexes->getInstructionFromIndex(
   1418                            Indexes->getNextNonNullIndex(OldIdx)))
   1419       if (MI->getParent() == MBB)
   1420         MII = MI;
   1421 
   1422     MachineBasicBlock::iterator Begin = MBB->begin();
   1423     while (MII != Begin) {
   1424       if ((--MII)->isDebugInstr())
   1425         continue;
   1426       SlotIndex Idx = Indexes->getInstructionIndex(*MII);
   1427 
   1428       // Stop searching when Before is reached.
   1429       if (!SlotIndex::isEarlierInstr(Before, Idx))
   1430         return Before;
   1431 
   1432       // Check if MII uses Reg.
   1433       for (MIBundleOperands MO(*MII); MO.isValid(); ++MO)
   1434         if (MO->isReg() && !MO->isUndef() &&
   1435             TargetRegisterInfo::isPhysicalRegister(MO->getReg()) &&
   1436             TRI.hasRegUnit(MO->getReg(), Reg))
   1437           return Idx.getRegSlot();
   1438     }
   1439     // Didn't reach Before. It must be the first instruction in the block.
   1440     return Before;
   1441   }
   1442 };
   1443 
   1444 void LiveIntervals::handleMove(MachineInstr &MI, bool UpdateFlags) {
   1445   assert(!MI.isBundled() && "Can't handle bundled instructions yet.");
   1446   SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
   1447   Indexes->removeMachineInstrFromMaps(MI);
   1448   SlotIndex NewIndex = Indexes->insertMachineInstrInMaps(MI);
   1449   assert(getMBBStartIdx(MI.getParent()) <= OldIndex &&
   1450          OldIndex < getMBBEndIdx(MI.getParent()) &&
   1451          "Cannot handle moves across basic block boundaries.");
   1452 
   1453   HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
   1454   HME.updateAllRanges(&MI);
   1455 }
   1456 
   1457 void LiveIntervals::handleMoveIntoBundle(MachineInstr &MI,
   1458                                          MachineInstr &BundleStart,
   1459                                          bool UpdateFlags) {
   1460   SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
   1461   SlotIndex NewIndex = Indexes->getInstructionIndex(BundleStart);
   1462   HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
   1463   HME.updateAllRanges(&MI);
   1464 }
   1465 
   1466 void LiveIntervals::repairOldRegInRange(const MachineBasicBlock::iterator Begin,
   1467                                         const MachineBasicBlock::iterator End,
   1468                                         const SlotIndex endIdx,
   1469                                         LiveRange &LR, const unsigned Reg,
   1470                                         LaneBitmask LaneMask) {
   1471   LiveInterval::iterator LII = LR.find(endIdx);
   1472   SlotIndex lastUseIdx;
   1473   if (LII == LR.begin()) {
   1474     // This happens when the function is called for a subregister that only
   1475     // occurs _after_ the range that is to be repaired.
   1476     return;
   1477   }
   1478   if (LII != LR.end() && LII->start < endIdx)
   1479     lastUseIdx = LII->end;
   1480   else
   1481     --LII;
   1482 
   1483   for (MachineBasicBlock::iterator I = End; I != Begin;) {
   1484     --I;
   1485     MachineInstr &MI = *I;
   1486     if (MI.isDebugInstr())
   1487       continue;
   1488 
   1489     SlotIndex instrIdx = getInstructionIndex(MI);
   1490     bool isStartValid = getInstructionFromIndex(LII->start);
   1491     bool isEndValid = getInstructionFromIndex(LII->end);
   1492 
   1493     // FIXME: This doesn't currently handle early-clobber or multiple removed
   1494     // defs inside of the region to repair.
   1495     for (MachineInstr::mop_iterator OI = MI.operands_begin(),
   1496                                     OE = MI.operands_end();
   1497          OI != OE; ++OI) {
   1498       const MachineOperand &MO = *OI;
   1499       if (!MO.isReg() || MO.getReg() != Reg)
   1500         continue;
   1501 
   1502       unsigned SubReg = MO.getSubReg();
   1503       LaneBitmask Mask = TRI->getSubRegIndexLaneMask(SubReg);
   1504       if ((Mask & LaneMask).none())
   1505         continue;
   1506 
   1507       if (MO.isDef()) {
   1508         if (!isStartValid) {
   1509           if (LII->end.isDead()) {
   1510             SlotIndex prevStart;
   1511             if (LII != LR.begin())
   1512               prevStart = std::prev(LII)->start;
   1513 
   1514             // FIXME: This could be more efficient if there was a
   1515             // removeSegment method that returned an iterator.
   1516             LR.removeSegment(*LII, true);
   1517             if (prevStart.isValid())
   1518               LII = LR.find(prevStart);
   1519             else
   1520               LII = LR.begin();
   1521           } else {
   1522             LII->start = instrIdx.getRegSlot();
   1523             LII->valno->def = instrIdx.getRegSlot();
   1524             if (MO.getSubReg() && !MO.isUndef())
   1525               lastUseIdx = instrIdx.getRegSlot();
   1526             else
   1527               lastUseIdx = SlotIndex();
   1528             continue;
   1529           }
   1530         }
   1531 
   1532         if (!lastUseIdx.isValid()) {
   1533           VNInfo *VNI = LR.getNextValue(instrIdx.getRegSlot(), VNInfoAllocator);
   1534           LiveRange::Segment S(instrIdx.getRegSlot(),
   1535                                instrIdx.getDeadSlot(), VNI);
   1536           LII = LR.addSegment(S);
   1537         } else if (LII->start != instrIdx.getRegSlot()) {
   1538           VNInfo *VNI = LR.getNextValue(instrIdx.getRegSlot(), VNInfoAllocator);
   1539           LiveRange::Segment S(instrIdx.getRegSlot(), lastUseIdx, VNI);
   1540           LII = LR.addSegment(S);
   1541         }
   1542 
   1543         if (MO.getSubReg() && !MO.isUndef())
   1544           lastUseIdx = instrIdx.getRegSlot();
   1545         else
   1546           lastUseIdx = SlotIndex();
   1547       } else if (MO.isUse()) {
   1548         // FIXME: This should probably be handled outside of this branch,
   1549         // either as part of the def case (for defs inside of the region) or
   1550         // after the loop over the region.
   1551         if (!isEndValid && !LII->end.isBlock())
   1552           LII->end = instrIdx.getRegSlot();
   1553         if (!lastUseIdx.isValid())
   1554           lastUseIdx = instrIdx.getRegSlot();
   1555       }
   1556     }
   1557   }
   1558 }
   1559 
   1560 void
   1561 LiveIntervals::repairIntervalsInRange(MachineBasicBlock *MBB,
   1562                                       MachineBasicBlock::iterator Begin,
   1563                                       MachineBasicBlock::iterator End,
   1564                                       ArrayRef<unsigned> OrigRegs) {
   1565   // Find anchor points, which are at the beginning/end of blocks or at
   1566   // instructions that already have indexes.
   1567   while (Begin != MBB->begin() && !Indexes->hasIndex(*Begin))
   1568     --Begin;
   1569   while (End != MBB->end() && !Indexes->hasIndex(*End))
   1570     ++End;
   1571 
   1572   SlotIndex endIdx;
   1573   if (End == MBB->end())
   1574     endIdx = getMBBEndIdx(MBB).getPrevSlot();
   1575   else
   1576     endIdx = getInstructionIndex(*End);
   1577 
   1578   Indexes->repairIndexesInRange(MBB, Begin, End);
   1579 
   1580   for (MachineBasicBlock::iterator I = End; I != Begin;) {
   1581     --I;
   1582     MachineInstr &MI = *I;
   1583     if (MI.isDebugInstr())
   1584       continue;
   1585     for (MachineInstr::const_mop_iterator MOI = MI.operands_begin(),
   1586                                           MOE = MI.operands_end();
   1587          MOI != MOE; ++MOI) {
   1588       if (MOI->isReg() &&
   1589           TargetRegisterInfo::isVirtualRegister(MOI->getReg()) &&
   1590           !hasInterval(MOI->getReg())) {
   1591         createAndComputeVirtRegInterval(MOI->getReg());
   1592       }
   1593     }
   1594   }
   1595 
   1596   for (unsigned Reg : OrigRegs) {
   1597     if (!TargetRegisterInfo::isVirtualRegister(Reg))
   1598       continue;
   1599 
   1600     LiveInterval &LI = getInterval(Reg);
   1601     // FIXME: Should we support undefs that gain defs?
   1602     if (!LI.hasAtLeastOneValue())
   1603       continue;
   1604 
   1605     for (LiveInterval::SubRange &S : LI.subranges())
   1606       repairOldRegInRange(Begin, End, endIdx, S, Reg, S.LaneMask);
   1607 
   1608     repairOldRegInRange(Begin, End, endIdx, LI, Reg);
   1609   }
   1610 }
   1611 
   1612 void LiveIntervals::removePhysRegDefAt(unsigned Reg, SlotIndex Pos) {
   1613   for (MCRegUnitIterator Unit(Reg, TRI); Unit.isValid(); ++Unit) {
   1614     if (LiveRange *LR = getCachedRegUnit(*Unit))
   1615       if (VNInfo *VNI = LR->getVNInfoAt(Pos))
   1616         LR->removeValNo(VNI);
   1617   }
   1618 }
   1619 
   1620 void LiveIntervals::removeVRegDefAt(LiveInterval &LI, SlotIndex Pos) {
   1621   // LI may not have the main range computed yet, but its subranges may
   1622   // be present.
   1623   VNInfo *VNI = LI.getVNInfoAt(Pos);
   1624   if (VNI != nullptr) {
   1625     assert(VNI->def.getBaseIndex() == Pos.getBaseIndex());
   1626     LI.removeValNo(VNI);
   1627   }
   1628 
   1629   // Also remove the value defined in subranges.
   1630   for (LiveInterval::SubRange &S : LI.subranges()) {
   1631     if (VNInfo *SVNI = S.getVNInfoAt(Pos))
   1632       if (SVNI->def.getBaseIndex() == Pos.getBaseIndex())
   1633         S.removeValNo(SVNI);
   1634   }
   1635   LI.removeEmptySubRanges();
   1636 }
   1637 
   1638 void LiveIntervals::splitSeparateComponents(LiveInterval &LI,
   1639     SmallVectorImpl<LiveInterval*> &SplitLIs) {
   1640   ConnectedVNInfoEqClasses ConEQ(*this);
   1641   unsigned NumComp = ConEQ.Classify(LI);
   1642   if (NumComp <= 1)
   1643     return;
   1644   LLVM_DEBUG(dbgs() << "  Split " << NumComp << " components: " << LI << '\n');
   1645   unsigned Reg = LI.reg;
   1646   const TargetRegisterClass *RegClass = MRI->getRegClass(Reg);
   1647   for (unsigned I = 1; I < NumComp; ++I) {
   1648     unsigned NewVReg = MRI->createVirtualRegister(RegClass);
   1649     LiveInterval &NewLI = createEmptyInterval(NewVReg);
   1650     SplitLIs.push_back(&NewLI);
   1651   }
   1652   ConEQ.Distribute(LI, SplitLIs.data(), *MRI);
   1653 }
   1654 
   1655 void LiveIntervals::constructMainRangeFromSubranges(LiveInterval &LI) {
   1656   assert(LRCalc && "LRCalc not initialized.");
   1657   LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
   1658   LRCalc->constructMainRangeFromSubranges(LI);
   1659 }
   1660