Home | History | Annotate | Download | only in CodeGen
      1 //===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This file implements the LiveInterval analysis pass which is used
     11 // by the Linear Scan Register allocator. This pass linearizes the
     12 // basic blocks of the function in DFS order and uses the
     13 // LiveVariables pass to conservatively compute live intervals for
     14 // each virtual and physical register.
     15 //
     16 //===----------------------------------------------------------------------===//
     17 
     18 #define DEBUG_TYPE "regalloc"
     19 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
     20 #include "llvm/Value.h"
     21 #include "llvm/Analysis/AliasAnalysis.h"
     22 #include "llvm/CodeGen/LiveVariables.h"
     23 #include "llvm/CodeGen/MachineInstr.h"
     24 #include "llvm/CodeGen/MachineRegisterInfo.h"
     25 #include "llvm/CodeGen/Passes.h"
     26 #include "llvm/Target/TargetRegisterInfo.h"
     27 #include "llvm/Target/TargetInstrInfo.h"
     28 #include "llvm/Target/TargetMachine.h"
     29 #include "llvm/Support/CommandLine.h"
     30 #include "llvm/Support/Debug.h"
     31 #include "llvm/Support/ErrorHandling.h"
     32 #include "llvm/Support/raw_ostream.h"
     33 #include "llvm/ADT/DenseSet.h"
     34 #include "llvm/ADT/Statistic.h"
     35 #include "llvm/ADT/STLExtras.h"
     36 #include <algorithm>
     37 #include <limits>
     38 #include <cmath>
     39 using namespace llvm;
     40 
     41 // Hidden options for help debugging.
     42 static cl::opt<bool> DisableReMat("disable-rematerialization",
     43                                   cl::init(false), cl::Hidden);
     44 
     45 STATISTIC(numIntervals , "Number of original intervals");
     46 
     47 char LiveIntervals::ID = 0;
     48 INITIALIZE_PASS_BEGIN(LiveIntervals, "liveintervals",
     49                 "Live Interval Analysis", false, false)
     50 INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
     51 INITIALIZE_PASS_DEPENDENCY(LiveVariables)
     52 INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
     53 INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
     54 INITIALIZE_PASS_END(LiveIntervals, "liveintervals",
     55                 "Live Interval Analysis", false, false)
     56 
     57 void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
     58   AU.setPreservesCFG();
     59   AU.addRequired<AliasAnalysis>();
     60   AU.addPreserved<AliasAnalysis>();
     61   AU.addRequired<LiveVariables>();
     62   AU.addPreserved<LiveVariables>();
     63   AU.addPreservedID(MachineLoopInfoID);
     64   AU.addPreservedID(MachineDominatorsID);
     65   AU.addPreserved<SlotIndexes>();
     66   AU.addRequiredTransitive<SlotIndexes>();
     67   MachineFunctionPass::getAnalysisUsage(AU);
     68 }
     69 
     70 void LiveIntervals::releaseMemory() {
     71   // Free the live intervals themselves.
     72   for (DenseMap<unsigned, LiveInterval*>::iterator I = r2iMap_.begin(),
     73        E = r2iMap_.end(); I != E; ++I)
     74     delete I->second;
     75 
     76   r2iMap_.clear();
     77   RegMaskSlots.clear();
     78   RegMaskBits.clear();
     79   RegMaskBlocks.clear();
     80 
     81   // Release VNInfo memory regions, VNInfo objects don't need to be dtor'd.
     82   VNInfoAllocator.Reset();
     83 }
     84 
     85 /// runOnMachineFunction - Register allocate the whole function
     86 ///
     87 bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
     88   mf_ = &fn;
     89   mri_ = &mf_->getRegInfo();
     90   tm_ = &fn.getTarget();
     91   tri_ = tm_->getRegisterInfo();
     92   tii_ = tm_->getInstrInfo();
     93   aa_ = &getAnalysis<AliasAnalysis>();
     94   lv_ = &getAnalysis<LiveVariables>();
     95   indexes_ = &getAnalysis<SlotIndexes>();
     96   allocatableRegs_ = tri_->getAllocatableSet(fn);
     97   reservedRegs_ = tri_->getReservedRegs(fn);
     98 
     99   computeIntervals();
    100 
    101   numIntervals += getNumIntervals();
    102 
    103   DEBUG(dump());
    104   return true;
    105 }
    106 
    107 /// print - Implement the dump method.
    108 void LiveIntervals::print(raw_ostream &OS, const Module* ) const {
    109   OS << "********** INTERVALS **********\n";
    110 
    111   // Dump the physregs.
    112   for (unsigned Reg = 1, RegE = tri_->getNumRegs(); Reg != RegE; ++Reg)
    113     if (const LiveInterval *LI = r2iMap_.lookup(Reg)) {
    114       LI->print(OS, tri_);
    115       OS << '\n';
    116     }
    117 
    118   // Dump the virtregs.
    119   for (unsigned Reg = 0, RegE = mri_->getNumVirtRegs(); Reg != RegE; ++Reg)
    120     if (const LiveInterval *LI =
    121         r2iMap_.lookup(TargetRegisterInfo::index2VirtReg(Reg))) {
    122       LI->print(OS, tri_);
    123       OS << '\n';
    124     }
    125 
    126   printInstrs(OS);
    127 }
    128 
    129 void LiveIntervals::printInstrs(raw_ostream &OS) const {
    130   OS << "********** MACHINEINSTRS **********\n";
    131   mf_->print(OS, indexes_);
    132 }
    133 
    134 void LiveIntervals::dumpInstrs() const {
    135   printInstrs(dbgs());
    136 }
    137 
    138 static
    139 bool MultipleDefsBySameMI(const MachineInstr &MI, unsigned MOIdx) {
    140   unsigned Reg = MI.getOperand(MOIdx).getReg();
    141   for (unsigned i = MOIdx+1, e = MI.getNumOperands(); i < e; ++i) {
    142     const MachineOperand &MO = MI.getOperand(i);
    143     if (!MO.isReg())
    144       continue;
    145     if (MO.getReg() == Reg && MO.isDef()) {
    146       assert(MI.getOperand(MOIdx).getSubReg() != MO.getSubReg() &&
    147              MI.getOperand(MOIdx).getSubReg() &&
    148              (MO.getSubReg() || MO.isImplicit()));
    149       return true;
    150     }
    151   }
    152   return false;
    153 }
    154 
    155 /// isPartialRedef - Return true if the specified def at the specific index is
    156 /// partially re-defining the specified live interval. A common case of this is
    157 /// a definition of the sub-register.
    158 bool LiveIntervals::isPartialRedef(SlotIndex MIIdx, MachineOperand &MO,
    159                                    LiveInterval &interval) {
    160   if (!MO.getSubReg() || MO.isEarlyClobber())
    161     return false;
    162 
    163   SlotIndex RedefIndex = MIIdx.getRegSlot();
    164   const LiveRange *OldLR =
    165     interval.getLiveRangeContaining(RedefIndex.getRegSlot(true));
    166   MachineInstr *DefMI = getInstructionFromIndex(OldLR->valno->def);
    167   if (DefMI != 0) {
    168     return DefMI->findRegisterDefOperandIdx(interval.reg) != -1;
    169   }
    170   return false;
    171 }
    172 
    173 void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
    174                                              MachineBasicBlock::iterator mi,
    175                                              SlotIndex MIIdx,
    176                                              MachineOperand& MO,
    177                                              unsigned MOIdx,
    178                                              LiveInterval &interval) {
    179   DEBUG(dbgs() << "\t\tregister: " << PrintReg(interval.reg, tri_));
    180 
    181   // Virtual registers may be defined multiple times (due to phi
    182   // elimination and 2-addr elimination).  Much of what we do only has to be
    183   // done once for the vreg.  We use an empty interval to detect the first
    184   // time we see a vreg.
    185   LiveVariables::VarInfo& vi = lv_->getVarInfo(interval.reg);
    186   if (interval.empty()) {
    187     // Get the Idx of the defining instructions.
    188     SlotIndex defIndex = MIIdx.getRegSlot(MO.isEarlyClobber());
    189 
    190     // Make sure the first definition is not a partial redefinition.
    191     assert(!MO.readsReg() && "First def cannot also read virtual register "
    192            "missing <undef> flag?");
    193 
    194     VNInfo *ValNo = interval.getNextValue(defIndex, VNInfoAllocator);
    195     assert(ValNo->id == 0 && "First value in interval is not 0?");
    196 
    197     // Loop over all of the blocks that the vreg is defined in.  There are
    198     // two cases we have to handle here.  The most common case is a vreg
    199     // whose lifetime is contained within a basic block.  In this case there
    200     // will be a single kill, in MBB, which comes after the definition.
    201     if (vi.Kills.size() == 1 && vi.Kills[0]->getParent() == mbb) {
    202       // FIXME: what about dead vars?
    203       SlotIndex killIdx;
    204       if (vi.Kills[0] != mi)
    205         killIdx = getInstructionIndex(vi.Kills[0]).getRegSlot();
    206       else
    207         killIdx = defIndex.getDeadSlot();
    208 
    209       // If the kill happens after the definition, we have an intra-block
    210       // live range.
    211       if (killIdx > defIndex) {
    212         assert(vi.AliveBlocks.empty() &&
    213                "Shouldn't be alive across any blocks!");
    214         LiveRange LR(defIndex, killIdx, ValNo);
    215         interval.addRange(LR);
    216         DEBUG(dbgs() << " +" << LR << "\n");
    217         return;
    218       }
    219     }
    220 
    221     // The other case we handle is when a virtual register lives to the end
    222     // of the defining block, potentially live across some blocks, then is
    223     // live into some number of blocks, but gets killed.  Start by adding a
    224     // range that goes from this definition to the end of the defining block.
    225     LiveRange NewLR(defIndex, getMBBEndIdx(mbb), ValNo);
    226     DEBUG(dbgs() << " +" << NewLR);
    227     interval.addRange(NewLR);
    228 
    229     bool PHIJoin = lv_->isPHIJoin(interval.reg);
    230 
    231     if (PHIJoin) {
    232       // A phi join register is killed at the end of the MBB and revived as a new
    233       // valno in the killing blocks.
    234       assert(vi.AliveBlocks.empty() && "Phi join can't pass through blocks");
    235       DEBUG(dbgs() << " phi-join");
    236       ValNo->setHasPHIKill(true);
    237     } else {
    238       // Iterate over all of the blocks that the variable is completely
    239       // live in, adding [insrtIndex(begin), instrIndex(end)+4) to the
    240       // live interval.
    241       for (SparseBitVector<>::iterator I = vi.AliveBlocks.begin(),
    242                E = vi.AliveBlocks.end(); I != E; ++I) {
    243         MachineBasicBlock *aliveBlock = mf_->getBlockNumbered(*I);
    244         LiveRange LR(getMBBStartIdx(aliveBlock), getMBBEndIdx(aliveBlock), ValNo);
    245         interval.addRange(LR);
    246         DEBUG(dbgs() << " +" << LR);
    247       }
    248     }
    249 
    250     // Finally, this virtual register is live from the start of any killing
    251     // block to the 'use' slot of the killing instruction.
    252     for (unsigned i = 0, e = vi.Kills.size(); i != e; ++i) {
    253       MachineInstr *Kill = vi.Kills[i];
    254       SlotIndex Start = getMBBStartIdx(Kill->getParent());
    255       SlotIndex killIdx = getInstructionIndex(Kill).getRegSlot();
    256 
    257       // Create interval with one of a NEW value number.  Note that this value
    258       // number isn't actually defined by an instruction, weird huh? :)
    259       if (PHIJoin) {
    260         assert(getInstructionFromIndex(Start) == 0 &&
    261                "PHI def index points at actual instruction.");
    262         ValNo = interval.getNextValue(Start, VNInfoAllocator);
    263         ValNo->setIsPHIDef(true);
    264       }
    265       LiveRange LR(Start, killIdx, ValNo);
    266       interval.addRange(LR);
    267       DEBUG(dbgs() << " +" << LR);
    268     }
    269 
    270   } else {
    271     if (MultipleDefsBySameMI(*mi, MOIdx))
    272       // Multiple defs of the same virtual register by the same instruction.
    273       // e.g. %reg1031:5<def>, %reg1031:6<def> = VLD1q16 %reg1024<kill>, ...
    274       // This is likely due to elimination of REG_SEQUENCE instructions. Return
    275       // here since there is nothing to do.
    276       return;
    277 
    278     // If this is the second time we see a virtual register definition, it
    279     // must be due to phi elimination or two addr elimination.  If this is
    280     // the result of two address elimination, then the vreg is one of the
    281     // def-and-use register operand.
    282 
    283     // It may also be partial redef like this:
    284     // 80  %reg1041:6<def> = VSHRNv4i16 %reg1034<kill>, 12, pred:14, pred:%reg0
    285     // 120 %reg1041:5<def> = VSHRNv4i16 %reg1039<kill>, 12, pred:14, pred:%reg0
    286     bool PartReDef = isPartialRedef(MIIdx, MO, interval);
    287     if (PartReDef || mi->isRegTiedToUseOperand(MOIdx)) {
    288       // If this is a two-address definition, then we have already processed
    289       // the live range.  The only problem is that we didn't realize there
    290       // are actually two values in the live interval.  Because of this we
    291       // need to take the LiveRegion that defines this register and split it
    292       // into two values.
    293       SlotIndex RedefIndex = MIIdx.getRegSlot(MO.isEarlyClobber());
    294 
    295       const LiveRange *OldLR =
    296         interval.getLiveRangeContaining(RedefIndex.getRegSlot(true));
    297       VNInfo *OldValNo = OldLR->valno;
    298       SlotIndex DefIndex = OldValNo->def.getRegSlot();
    299 
    300       // Delete the previous value, which should be short and continuous,
    301       // because the 2-addr copy must be in the same MBB as the redef.
    302       interval.removeRange(DefIndex, RedefIndex);
    303 
    304       // The new value number (#1) is defined by the instruction we claimed
    305       // defined value #0.
    306       VNInfo *ValNo = interval.createValueCopy(OldValNo, VNInfoAllocator);
    307 
    308       // Value#0 is now defined by the 2-addr instruction.
    309       OldValNo->def = RedefIndex;
    310 
    311       // Add the new live interval which replaces the range for the input copy.
    312       LiveRange LR(DefIndex, RedefIndex, ValNo);
    313       DEBUG(dbgs() << " replace range with " << LR);
    314       interval.addRange(LR);
    315 
    316       // If this redefinition is dead, we need to add a dummy unit live
    317       // range covering the def slot.
    318       if (MO.isDead())
    319         interval.addRange(LiveRange(RedefIndex, RedefIndex.getDeadSlot(),
    320                                     OldValNo));
    321 
    322       DEBUG({
    323           dbgs() << " RESULT: ";
    324           interval.print(dbgs(), tri_);
    325         });
    326     } else if (lv_->isPHIJoin(interval.reg)) {
    327       // In the case of PHI elimination, each variable definition is only
    328       // live until the end of the block.  We've already taken care of the
    329       // rest of the live range.
    330 
    331       SlotIndex defIndex = MIIdx.getRegSlot();
    332       if (MO.isEarlyClobber())
    333         defIndex = MIIdx.getRegSlot(true);
    334 
    335       VNInfo *ValNo = interval.getNextValue(defIndex, VNInfoAllocator);
    336 
    337       SlotIndex killIndex = getMBBEndIdx(mbb);
    338       LiveRange LR(defIndex, killIndex, ValNo);
    339       interval.addRange(LR);
    340       ValNo->setHasPHIKill(true);
    341       DEBUG(dbgs() << " phi-join +" << LR);
    342     } else {
    343       llvm_unreachable("Multiply defined register");
    344     }
    345   }
    346 
    347   DEBUG(dbgs() << '\n');
    348 }
    349 
    350 static bool isRegLiveIntoSuccessor(const MachineBasicBlock *MBB, unsigned Reg) {
    351   for (MachineBasicBlock::const_succ_iterator SI = MBB->succ_begin(),
    352                                               SE = MBB->succ_end();
    353        SI != SE; ++SI) {
    354     const MachineBasicBlock* succ = *SI;
    355     if (succ->isLiveIn(Reg))
    356       return true;
    357   }
    358   return false;
    359 }
    360 
    361 void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
    362                                               MachineBasicBlock::iterator mi,
    363                                               SlotIndex MIIdx,
    364                                               MachineOperand& MO,
    365                                               LiveInterval &interval) {
    366   DEBUG(dbgs() << "\t\tregister: " << PrintReg(interval.reg, tri_));
    367 
    368   SlotIndex baseIndex = MIIdx;
    369   SlotIndex start = baseIndex.getRegSlot(MO.isEarlyClobber());
    370   SlotIndex end = start;
    371 
    372   // If it is not used after definition, it is considered dead at
    373   // the instruction defining it. Hence its interval is:
    374   // [defSlot(def), defSlot(def)+1)
    375   // For earlyclobbers, the defSlot was pushed back one; the extra
    376   // advance below compensates.
    377   if (MO.isDead()) {
    378     DEBUG(dbgs() << " dead");
    379     end = start.getDeadSlot();
    380     goto exit;
    381   }
    382 
    383   // If it is not dead on definition, it must be killed by a
    384   // subsequent instruction. Hence its interval is:
    385   // [defSlot(def), useSlot(kill)+1)
    386   baseIndex = baseIndex.getNextIndex();
    387   while (++mi != MBB->end()) {
    388 
    389     if (mi->isDebugValue())
    390       continue;
    391     if (getInstructionFromIndex(baseIndex) == 0)
    392       baseIndex = indexes_->getNextNonNullIndex(baseIndex);
    393 
    394     if (mi->killsRegister(interval.reg, tri_)) {
    395       DEBUG(dbgs() << " killed");
    396       end = baseIndex.getRegSlot();
    397       goto exit;
    398     } else {
    399       int DefIdx = mi->findRegisterDefOperandIdx(interval.reg,false,false,tri_);
    400       if (DefIdx != -1) {
    401         if (mi->isRegTiedToUseOperand(DefIdx)) {
    402           // Two-address instruction.
    403           end = baseIndex.getRegSlot(mi->getOperand(DefIdx).isEarlyClobber());
    404         } else {
    405           // Another instruction redefines the register before it is ever read.
    406           // Then the register is essentially dead at the instruction that
    407           // defines it. Hence its interval is:
    408           // [defSlot(def), defSlot(def)+1)
    409           DEBUG(dbgs() << " dead");
    410           end = start.getDeadSlot();
    411         }
    412         goto exit;
    413       }
    414     }
    415 
    416     baseIndex = baseIndex.getNextIndex();
    417   }
    418 
    419   // If we get here the register *should* be live out.
    420   assert(!isAllocatable(interval.reg) && "Physregs shouldn't be live out!");
    421 
    422   // FIXME: We need saner rules for reserved regs.
    423   if (isReserved(interval.reg)) {
    424     end = start.getDeadSlot();
    425   } else {
    426     // Unreserved, unallocable registers like EFLAGS can be live across basic
    427     // block boundaries.
    428     assert(isRegLiveIntoSuccessor(MBB, interval.reg) &&
    429            "Unreserved reg not live-out?");
    430     end = getMBBEndIdx(MBB);
    431   }
    432 exit:
    433   assert(start < end && "did not find end of interval?");
    434 
    435   // Already exists? Extend old live interval.
    436   VNInfo *ValNo = interval.getVNInfoAt(start);
    437   bool Extend = ValNo != 0;
    438   if (!Extend)
    439     ValNo = interval.getNextValue(start, VNInfoAllocator);
    440   LiveRange LR(start, end, ValNo);
    441   interval.addRange(LR);
    442   DEBUG(dbgs() << " +" << LR << '\n');
    443 }
    444 
    445 void LiveIntervals::handleRegisterDef(MachineBasicBlock *MBB,
    446                                       MachineBasicBlock::iterator MI,
    447                                       SlotIndex MIIdx,
    448                                       MachineOperand& MO,
    449                                       unsigned MOIdx) {
    450   if (TargetRegisterInfo::isVirtualRegister(MO.getReg()))
    451     handleVirtualRegisterDef(MBB, MI, MIIdx, MO, MOIdx,
    452                              getOrCreateInterval(MO.getReg()));
    453   else
    454     handlePhysicalRegisterDef(MBB, MI, MIIdx, MO,
    455                               getOrCreateInterval(MO.getReg()));
    456 }
    457 
    458 void LiveIntervals::handleLiveInRegister(MachineBasicBlock *MBB,
    459                                          SlotIndex MIIdx,
    460                                          LiveInterval &interval) {
    461   assert(TargetRegisterInfo::isPhysicalRegister(interval.reg) &&
    462          "Only physical registers can be live in.");
    463   assert((!isAllocatable(interval.reg) || MBB->getParent()->begin() ||
    464           MBB->isLandingPad()) &&
    465           "Allocatable live-ins only valid for entry blocks and landing pads.");
    466 
    467   DEBUG(dbgs() << "\t\tlivein register: " << PrintReg(interval.reg, tri_));
    468 
    469   // Look for kills, if it reaches a def before it's killed, then it shouldn't
    470   // be considered a livein.
    471   MachineBasicBlock::iterator mi = MBB->begin();
    472   MachineBasicBlock::iterator E = MBB->end();
    473   // Skip over DBG_VALUE at the start of the MBB.
    474   if (mi != E && mi->isDebugValue()) {
    475     while (++mi != E && mi->isDebugValue())
    476       ;
    477     if (mi == E)
    478       // MBB is empty except for DBG_VALUE's.
    479       return;
    480   }
    481 
    482   SlotIndex baseIndex = MIIdx;
    483   SlotIndex start = baseIndex;
    484   if (getInstructionFromIndex(baseIndex) == 0)
    485     baseIndex = indexes_->getNextNonNullIndex(baseIndex);
    486 
    487   SlotIndex end = baseIndex;
    488   bool SeenDefUse = false;
    489 
    490   while (mi != E) {
    491     if (mi->killsRegister(interval.reg, tri_)) {
    492       DEBUG(dbgs() << " killed");
    493       end = baseIndex.getRegSlot();
    494       SeenDefUse = true;
    495       break;
    496     } else if (mi->modifiesRegister(interval.reg, tri_)) {
    497       // Another instruction redefines the register before it is ever read.
    498       // Then the register is essentially dead at the instruction that defines
    499       // it. Hence its interval is:
    500       // [defSlot(def), defSlot(def)+1)
    501       DEBUG(dbgs() << " dead");
    502       end = start.getDeadSlot();
    503       SeenDefUse = true;
    504       break;
    505     }
    506 
    507     while (++mi != E && mi->isDebugValue())
    508       // Skip over DBG_VALUE.
    509       ;
    510     if (mi != E)
    511       baseIndex = indexes_->getNextNonNullIndex(baseIndex);
    512   }
    513 
    514   // Live-in register might not be used at all.
    515   if (!SeenDefUse) {
    516     if (isAllocatable(interval.reg) ||
    517         !isRegLiveIntoSuccessor(MBB, interval.reg)) {
    518       // Allocatable registers are never live through.
    519       // Non-allocatable registers that aren't live into any successors also
    520       // aren't live through.
    521       DEBUG(dbgs() << " dead");
    522       return;
    523     } else {
    524       // If we get here the register is non-allocatable and live into some
    525       // successor. We'll conservatively assume it's live-through.
    526       DEBUG(dbgs() << " live through");
    527       end = getMBBEndIdx(MBB);
    528     }
    529   }
    530 
    531   SlotIndex defIdx = getMBBStartIdx(MBB);
    532   assert(getInstructionFromIndex(defIdx) == 0 &&
    533          "PHI def index points at actual instruction.");
    534   VNInfo *vni = interval.getNextValue(defIdx, VNInfoAllocator);
    535   vni->setIsPHIDef(true);
    536   LiveRange LR(start, end, vni);
    537 
    538   interval.addRange(LR);
    539   DEBUG(dbgs() << " +" << LR << '\n');
    540 }
    541 
    542 /// computeIntervals - computes the live intervals for virtual
    543 /// registers. for some ordering of the machine instructions [1,N] a
    544 /// live interval is an interval [i, j) where 1 <= i <= j < N for
    545 /// which a variable is live
    546 void LiveIntervals::computeIntervals() {
    547   DEBUG(dbgs() << "********** COMPUTING LIVE INTERVALS **********\n"
    548                << "********** Function: "
    549                << ((Value*)mf_->getFunction())->getName() << '\n');
    550 
    551   RegMaskBlocks.resize(mf_->getNumBlockIDs());
    552 
    553   SmallVector<unsigned, 8> UndefUses;
    554   for (MachineFunction::iterator MBBI = mf_->begin(), E = mf_->end();
    555        MBBI != E; ++MBBI) {
    556     MachineBasicBlock *MBB = MBBI;
    557     RegMaskBlocks[MBB->getNumber()].first = RegMaskSlots.size();
    558 
    559     if (MBB->empty())
    560       continue;
    561 
    562     // Track the index of the current machine instr.
    563     SlotIndex MIIndex = getMBBStartIdx(MBB);
    564     DEBUG(dbgs() << "BB#" << MBB->getNumber()
    565           << ":\t\t# derived from " << MBB->getName() << "\n");
    566 
    567     // Create intervals for live-ins to this BB first.
    568     for (MachineBasicBlock::livein_iterator LI = MBB->livein_begin(),
    569            LE = MBB->livein_end(); LI != LE; ++LI) {
    570       handleLiveInRegister(MBB, MIIndex, getOrCreateInterval(*LI));
    571     }
    572 
    573     // Skip over empty initial indices.
    574     if (getInstructionFromIndex(MIIndex) == 0)
    575       MIIndex = indexes_->getNextNonNullIndex(MIIndex);
    576 
    577     for (MachineBasicBlock::iterator MI = MBB->begin(), miEnd = MBB->end();
    578          MI != miEnd; ++MI) {
    579       DEBUG(dbgs() << MIIndex << "\t" << *MI);
    580       if (MI->isDebugValue())
    581         continue;
    582       assert(indexes_->getInstructionFromIndex(MIIndex) == MI &&
    583              "Lost SlotIndex synchronization");
    584 
    585       // Handle defs.
    586       for (int i = MI->getNumOperands() - 1; i >= 0; --i) {
    587         MachineOperand &MO = MI->getOperand(i);
    588 
    589         // Collect register masks.
    590         if (MO.isRegMask()) {
    591           RegMaskSlots.push_back(MIIndex.getRegSlot());
    592           RegMaskBits.push_back(MO.getRegMask());
    593           continue;
    594         }
    595 
    596         if (!MO.isReg() || !MO.getReg())
    597           continue;
    598 
    599         // handle register defs - build intervals
    600         if (MO.isDef())
    601           handleRegisterDef(MBB, MI, MIIndex, MO, i);
    602         else if (MO.isUndef())
    603           UndefUses.push_back(MO.getReg());
    604       }
    605 
    606       // Move to the next instr slot.
    607       MIIndex = indexes_->getNextNonNullIndex(MIIndex);
    608     }
    609 
    610     // Compute the number of register mask instructions in this block.
    611     std::pair<unsigned, unsigned> &RMB = RegMaskBlocks[MBB->getNumber()];
    612     RMB.second = RegMaskSlots.size() - RMB.first;;
    613   }
    614 
    615   // Create empty intervals for registers defined by implicit_def's (except
    616   // for those implicit_def that define values which are liveout of their
    617   // blocks.
    618   for (unsigned i = 0, e = UndefUses.size(); i != e; ++i) {
    619     unsigned UndefReg = UndefUses[i];
    620     (void)getOrCreateInterval(UndefReg);
    621   }
    622 }
    623 
    624 LiveInterval* LiveIntervals::createInterval(unsigned reg) {
    625   float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ? HUGE_VALF : 0.0F;
    626   return new LiveInterval(reg, Weight);
    627 }
    628 
    629 /// dupInterval - Duplicate a live interval. The caller is responsible for
    630 /// managing the allocated memory.
    631 LiveInterval* LiveIntervals::dupInterval(LiveInterval *li) {
    632   LiveInterval *NewLI = createInterval(li->reg);
    633   NewLI->Copy(*li, mri_, getVNInfoAllocator());
    634   return NewLI;
    635 }
    636 
    637 /// shrinkToUses - After removing some uses of a register, shrink its live
    638 /// range to just the remaining uses. This method does not compute reaching
    639 /// defs for new uses, and it doesn't remove dead defs.
    640 bool LiveIntervals::shrinkToUses(LiveInterval *li,
    641                                  SmallVectorImpl<MachineInstr*> *dead) {
    642   DEBUG(dbgs() << "Shrink: " << *li << '\n');
    643   assert(TargetRegisterInfo::isVirtualRegister(li->reg)
    644          && "Can only shrink virtual registers");
    645   // Find all the values used, including PHI kills.
    646   SmallVector<std::pair<SlotIndex, VNInfo*>, 16> WorkList;
    647 
    648   // Blocks that have already been added to WorkList as live-out.
    649   SmallPtrSet<MachineBasicBlock*, 16> LiveOut;
    650 
    651   // Visit all instructions reading li->reg.
    652   for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(li->reg);
    653        MachineInstr *UseMI = I.skipInstruction();) {
    654     if (UseMI->isDebugValue() || !UseMI->readsVirtualRegister(li->reg))
    655       continue;
    656     SlotIndex Idx = getInstructionIndex(UseMI).getRegSlot();
    657     // Note: This intentionally picks up the wrong VNI in case of an EC redef.
    658     // See below.
    659     VNInfo *VNI = li->getVNInfoBefore(Idx);
    660     if (!VNI) {
    661       // This shouldn't happen: readsVirtualRegister returns true, but there is
    662       // no live value. It is likely caused by a target getting <undef> flags
    663       // wrong.
    664       DEBUG(dbgs() << Idx << '\t' << *UseMI
    665                    << "Warning: Instr claims to read non-existent value in "
    666                     << *li << '\n');
    667       continue;
    668     }
    669     // Special case: An early-clobber tied operand reads and writes the
    670     // register one slot early.  The getVNInfoBefore call above would have
    671     // picked up the value defined by UseMI.  Adjust the kill slot and value.
    672     if (SlotIndex::isSameInstr(VNI->def, Idx)) {
    673       Idx = VNI->def;
    674       VNI = li->getVNInfoBefore(Idx);
    675       assert(VNI && "Early-clobber tied value not available");
    676     }
    677     WorkList.push_back(std::make_pair(Idx, VNI));
    678   }
    679 
    680   // Create a new live interval with only minimal live segments per def.
    681   LiveInterval NewLI(li->reg, 0);
    682   for (LiveInterval::vni_iterator I = li->vni_begin(), E = li->vni_end();
    683        I != E; ++I) {
    684     VNInfo *VNI = *I;
    685     if (VNI->isUnused())
    686       continue;
    687     NewLI.addRange(LiveRange(VNI->def, VNI->def.getDeadSlot(), VNI));
    688   }
    689 
    690   // Keep track of the PHIs that are in use.
    691   SmallPtrSet<VNInfo*, 8> UsedPHIs;
    692 
    693   // Extend intervals to reach all uses in WorkList.
    694   while (!WorkList.empty()) {
    695     SlotIndex Idx = WorkList.back().first;
    696     VNInfo *VNI = WorkList.back().second;
    697     WorkList.pop_back();
    698     const MachineBasicBlock *MBB = getMBBFromIndex(Idx.getPrevSlot());
    699     SlotIndex BlockStart = getMBBStartIdx(MBB);
    700 
    701     // Extend the live range for VNI to be live at Idx.
    702     if (VNInfo *ExtVNI = NewLI.extendInBlock(BlockStart, Idx)) {
    703       (void)ExtVNI;
    704       assert(ExtVNI == VNI && "Unexpected existing value number");
    705       // Is this a PHIDef we haven't seen before?
    706       if (!VNI->isPHIDef() || VNI->def != BlockStart || !UsedPHIs.insert(VNI))
    707         continue;
    708       // The PHI is live, make sure the predecessors are live-out.
    709       for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
    710            PE = MBB->pred_end(); PI != PE; ++PI) {
    711         if (!LiveOut.insert(*PI))
    712           continue;
    713         SlotIndex Stop = getMBBEndIdx(*PI);
    714         // A predecessor is not required to have a live-out value for a PHI.
    715         if (VNInfo *PVNI = li->getVNInfoBefore(Stop))
    716           WorkList.push_back(std::make_pair(Stop, PVNI));
    717       }
    718       continue;
    719     }
    720 
    721     // VNI is live-in to MBB.
    722     DEBUG(dbgs() << " live-in at " << BlockStart << '\n');
    723     NewLI.addRange(LiveRange(BlockStart, Idx, VNI));
    724 
    725     // Make sure VNI is live-out from the predecessors.
    726     for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
    727          PE = MBB->pred_end(); PI != PE; ++PI) {
    728       if (!LiveOut.insert(*PI))
    729         continue;
    730       SlotIndex Stop = getMBBEndIdx(*PI);
    731       assert(li->getVNInfoBefore(Stop) == VNI &&
    732              "Wrong value out of predecessor");
    733       WorkList.push_back(std::make_pair(Stop, VNI));
    734     }
    735   }
    736 
    737   // Handle dead values.
    738   bool CanSeparate = false;
    739   for (LiveInterval::vni_iterator I = li->vni_begin(), E = li->vni_end();
    740        I != E; ++I) {
    741     VNInfo *VNI = *I;
    742     if (VNI->isUnused())
    743       continue;
    744     LiveInterval::iterator LII = NewLI.FindLiveRangeContaining(VNI->def);
    745     assert(LII != NewLI.end() && "Missing live range for PHI");
    746     if (LII->end != VNI->def.getDeadSlot())
    747       continue;
    748     if (VNI->isPHIDef()) {
    749       // This is a dead PHI. Remove it.
    750       VNI->setIsUnused(true);
    751       NewLI.removeRange(*LII);
    752       DEBUG(dbgs() << "Dead PHI at " << VNI->def << " may separate interval\n");
    753       CanSeparate = true;
    754     } else {
    755       // This is a dead def. Make sure the instruction knows.
    756       MachineInstr *MI = getInstructionFromIndex(VNI->def);
    757       assert(MI && "No instruction defining live value");
    758       MI->addRegisterDead(li->reg, tri_);
    759       if (dead && MI->allDefsAreDead()) {
    760         DEBUG(dbgs() << "All defs dead: " << VNI->def << '\t' << *MI);
    761         dead->push_back(MI);
    762       }
    763     }
    764   }
    765 
    766   // Move the trimmed ranges back.
    767   li->ranges.swap(NewLI.ranges);
    768   DEBUG(dbgs() << "Shrunk: " << *li << '\n');
    769   return CanSeparate;
    770 }
    771 
    772 
    773 //===----------------------------------------------------------------------===//
    774 // Register allocator hooks.
    775 //
    776 
    777 void LiveIntervals::addKillFlags() {
    778   for (iterator I = begin(), E = end(); I != E; ++I) {
    779     unsigned Reg = I->first;
    780     if (TargetRegisterInfo::isPhysicalRegister(Reg))
    781       continue;
    782     if (mri_->reg_nodbg_empty(Reg))
    783       continue;
    784     LiveInterval *LI = I->second;
    785 
    786     // Every instruction that kills Reg corresponds to a live range end point.
    787     for (LiveInterval::iterator RI = LI->begin(), RE = LI->end(); RI != RE;
    788          ++RI) {
    789       // A block index indicates an MBB edge.
    790       if (RI->end.isBlock())
    791         continue;
    792       MachineInstr *MI = getInstructionFromIndex(RI->end);
    793       if (!MI)
    794         continue;
    795       MI->addRegisterKilled(Reg, NULL);
    796     }
    797   }
    798 }
    799 
    800 /// getReMatImplicitUse - If the remat definition MI has one (for now, we only
    801 /// allow one) virtual register operand, then its uses are implicitly using
    802 /// the register. Returns the virtual register.
    803 unsigned LiveIntervals::getReMatImplicitUse(const LiveInterval &li,
    804                                             MachineInstr *MI) const {
    805   unsigned RegOp = 0;
    806   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
    807     MachineOperand &MO = MI->getOperand(i);
    808     if (!MO.isReg() || !MO.isUse())
    809       continue;
    810     unsigned Reg = MO.getReg();
    811     if (Reg == 0 || Reg == li.reg)
    812       continue;
    813 
    814     if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isAllocatable(Reg))
    815       continue;
    816     RegOp = MO.getReg();
    817     break; // Found vreg operand - leave the loop.
    818   }
    819   return RegOp;
    820 }
    821 
    822 /// isValNoAvailableAt - Return true if the val# of the specified interval
    823 /// which reaches the given instruction also reaches the specified use index.
    824 bool LiveIntervals::isValNoAvailableAt(const LiveInterval &li, MachineInstr *MI,
    825                                        SlotIndex UseIdx) const {
    826   VNInfo *UValNo = li.getVNInfoAt(UseIdx);
    827   return UValNo && UValNo == li.getVNInfoAt(getInstructionIndex(MI));
    828 }
    829 
    830 /// isReMaterializable - Returns true if the definition MI of the specified
    831 /// val# of the specified interval is re-materializable.
    832 bool
    833 LiveIntervals::isReMaterializable(const LiveInterval &li,
    834                                   const VNInfo *ValNo, MachineInstr *MI,
    835                                   const SmallVectorImpl<LiveInterval*> *SpillIs,
    836                                   bool &isLoad) {
    837   if (DisableReMat)
    838     return false;
    839 
    840   if (!tii_->isTriviallyReMaterializable(MI, aa_))
    841     return false;
    842 
    843   // Target-specific code can mark an instruction as being rematerializable
    844   // if it has one virtual reg use, though it had better be something like
    845   // a PIC base register which is likely to be live everywhere.
    846   unsigned ImpUse = getReMatImplicitUse(li, MI);
    847   if (ImpUse) {
    848     const LiveInterval &ImpLi = getInterval(ImpUse);
    849     for (MachineRegisterInfo::use_nodbg_iterator
    850            ri = mri_->use_nodbg_begin(li.reg), re = mri_->use_nodbg_end();
    851          ri != re; ++ri) {
    852       MachineInstr *UseMI = &*ri;
    853       SlotIndex UseIdx = getInstructionIndex(UseMI);
    854       if (li.getVNInfoAt(UseIdx) != ValNo)
    855         continue;
    856       if (!isValNoAvailableAt(ImpLi, MI, UseIdx))
    857         return false;
    858     }
    859 
    860     // If a register operand of the re-materialized instruction is going to
    861     // be spilled next, then it's not legal to re-materialize this instruction.
    862     if (SpillIs)
    863       for (unsigned i = 0, e = SpillIs->size(); i != e; ++i)
    864         if (ImpUse == (*SpillIs)[i]->reg)
    865           return false;
    866   }
    867   return true;
    868 }
    869 
    870 /// isReMaterializable - Returns true if every definition of MI of every
    871 /// val# of the specified interval is re-materializable.
    872 bool
    873 LiveIntervals::isReMaterializable(const LiveInterval &li,
    874                                   const SmallVectorImpl<LiveInterval*> *SpillIs,
    875                                   bool &isLoad) {
    876   isLoad = false;
    877   for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
    878        i != e; ++i) {
    879     const VNInfo *VNI = *i;
    880     if (VNI->isUnused())
    881       continue; // Dead val#.
    882     // Is the def for the val# rematerializable?
    883     MachineInstr *ReMatDefMI = getInstructionFromIndex(VNI->def);
    884     if (!ReMatDefMI)
    885       return false;
    886     bool DefIsLoad = false;
    887     if (!ReMatDefMI ||
    888         !isReMaterializable(li, VNI, ReMatDefMI, SpillIs, DefIsLoad))
    889       return false;
    890     isLoad |= DefIsLoad;
    891   }
    892   return true;
    893 }
    894 
    895 MachineBasicBlock*
    896 LiveIntervals::intervalIsInOneMBB(const LiveInterval &LI) const {
    897   // A local live range must be fully contained inside the block, meaning it is
    898   // defined and killed at instructions, not at block boundaries. It is not
    899   // live in or or out of any block.
    900   //
    901   // It is technically possible to have a PHI-defined live range identical to a
    902   // single block, but we are going to return false in that case.
    903 
    904   SlotIndex Start = LI.beginIndex();
    905   if (Start.isBlock())
    906     return NULL;
    907 
    908   SlotIndex Stop = LI.endIndex();
    909   if (Stop.isBlock())
    910     return NULL;
    911 
    912   // getMBBFromIndex doesn't need to search the MBB table when both indexes
    913   // belong to proper instructions.
    914   MachineBasicBlock *MBB1 = indexes_->getMBBFromIndex(Start);
    915   MachineBasicBlock *MBB2 = indexes_->getMBBFromIndex(Stop);
    916   return MBB1 == MBB2 ? MBB1 : NULL;
    917 }
    918 
    919 float
    920 LiveIntervals::getSpillWeight(bool isDef, bool isUse, unsigned loopDepth) {
    921   // Limit the loop depth ridiculousness.
    922   if (loopDepth > 200)
    923     loopDepth = 200;
    924 
    925   // The loop depth is used to roughly estimate the number of times the
    926   // instruction is executed. Something like 10^d is simple, but will quickly
    927   // overflow a float. This expression behaves like 10^d for small d, but is
    928   // more tempered for large d. At d=200 we get 6.7e33 which leaves a bit of
    929   // headroom before overflow.
    930   // By the way, powf() might be unavailable here. For consistency,
    931   // We may take pow(double,double).
    932   float lc = std::pow(1 + (100.0 / (loopDepth + 10)), (double)loopDepth);
    933 
    934   return (isDef + isUse) * lc;
    935 }
    936 
    937 LiveRange LiveIntervals::addLiveRangeToEndOfBlock(unsigned reg,
    938                                                   MachineInstr* startInst) {
    939   LiveInterval& Interval = getOrCreateInterval(reg);
    940   VNInfo* VN = Interval.getNextValue(
    941     SlotIndex(getInstructionIndex(startInst).getRegSlot()),
    942     getVNInfoAllocator());
    943   VN->setHasPHIKill(true);
    944   LiveRange LR(
    945      SlotIndex(getInstructionIndex(startInst).getRegSlot()),
    946      getMBBEndIdx(startInst->getParent()), VN);
    947   Interval.addRange(LR);
    948 
    949   return LR;
    950 }
    951 
    952 
    953 //===----------------------------------------------------------------------===//
    954 //                          Register mask functions
    955 //===----------------------------------------------------------------------===//
    956 
    957 bool LiveIntervals::checkRegMaskInterference(LiveInterval &LI,
    958                                              BitVector &UsableRegs) {
    959   if (LI.empty())
    960     return false;
    961   LiveInterval::iterator LiveI = LI.begin(), LiveE = LI.end();
    962 
    963   // Use a smaller arrays for local live ranges.
    964   ArrayRef<SlotIndex> Slots;
    965   ArrayRef<const uint32_t*> Bits;
    966   if (MachineBasicBlock *MBB = intervalIsInOneMBB(LI)) {
    967     Slots = getRegMaskSlotsInBlock(MBB->getNumber());
    968     Bits = getRegMaskBitsInBlock(MBB->getNumber());
    969   } else {
    970     Slots = getRegMaskSlots();
    971     Bits = getRegMaskBits();
    972   }
    973 
    974   // We are going to enumerate all the register mask slots contained in LI.
    975   // Start with a binary search of RegMaskSlots to find a starting point.
    976   ArrayRef<SlotIndex>::iterator SlotI =
    977     std::lower_bound(Slots.begin(), Slots.end(), LiveI->start);
    978   ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
    979 
    980   // No slots in range, LI begins after the last call.
    981   if (SlotI == SlotE)
    982     return false;
    983 
    984   bool Found = false;
    985   for (;;) {
    986     assert(*SlotI >= LiveI->start);
    987     // Loop over all slots overlapping this segment.
    988     while (*SlotI < LiveI->end) {
    989       // *SlotI overlaps LI. Collect mask bits.
    990       if (!Found) {
    991         // This is the first overlap. Initialize UsableRegs to all ones.
    992         UsableRegs.clear();
    993         UsableRegs.resize(tri_->getNumRegs(), true);
    994         Found = true;
    995       }
    996       // Remove usable registers clobbered by this mask.
    997       UsableRegs.clearBitsNotInMask(Bits[SlotI-Slots.begin()]);
    998       if (++SlotI == SlotE)
    999         return Found;
   1000     }
   1001     // *SlotI is beyond the current LI segment.
   1002     LiveI = LI.advanceTo(LiveI, *SlotI);
   1003     if (LiveI == LiveE)
   1004       return Found;
   1005     // Advance SlotI until it overlaps.
   1006     while (*SlotI < LiveI->start)
   1007       if (++SlotI == SlotE)
   1008         return Found;
   1009   }
   1010 }
   1011 
   1012 //===----------------------------------------------------------------------===//
   1013 //                         IntervalUpdate class.
   1014 //===----------------------------------------------------------------------===//
   1015 
   1016 // HMEditor is a toolkit used by handleMove to trim or extend live intervals.
   1017 class LiveIntervals::HMEditor {
   1018 private:
   1019   LiveIntervals& LIS;
   1020   const MachineRegisterInfo& MRI;
   1021   const TargetRegisterInfo& TRI;
   1022   SlotIndex NewIdx;
   1023 
   1024   typedef std::pair<LiveInterval*, LiveRange*> IntRangePair;
   1025   typedef DenseSet<IntRangePair> RangeSet;
   1026 
   1027   struct RegRanges {
   1028     LiveRange* Use;
   1029     LiveRange* EC;
   1030     LiveRange* Dead;
   1031     LiveRange* Def;
   1032     RegRanges() : Use(0), EC(0), Dead(0), Def(0) {}
   1033   };
   1034   typedef DenseMap<unsigned, RegRanges> BundleRanges;
   1035 
   1036 public:
   1037   HMEditor(LiveIntervals& LIS, const MachineRegisterInfo& MRI,
   1038            const TargetRegisterInfo& TRI, SlotIndex NewIdx)
   1039     : LIS(LIS), MRI(MRI), TRI(TRI), NewIdx(NewIdx) {}
   1040 
   1041   // Update intervals for all operands of MI from OldIdx to NewIdx.
   1042   // This assumes that MI used to be at OldIdx, and now resides at
   1043   // NewIdx.
   1044   void moveAllRangesFrom(MachineInstr* MI, SlotIndex OldIdx) {
   1045     assert(NewIdx != OldIdx && "No-op move? That's a bit strange.");
   1046 
   1047     // Collect the operands.
   1048     RangeSet Entering, Internal, Exiting;
   1049     bool hasRegMaskOp = false;
   1050     collectRanges(MI, Entering, Internal, Exiting, hasRegMaskOp, OldIdx);
   1051 
   1052     // To keep the LiveRanges valid within an interval, move the ranges closest
   1053     // to the destination first. This prevents ranges from overlapping, to that
   1054     // APIs like removeRange still work.
   1055     if (NewIdx < OldIdx) {
   1056       moveAllEnteringFrom(OldIdx, Entering);
   1057       moveAllInternalFrom(OldIdx, Internal);
   1058       moveAllExitingFrom(OldIdx, Exiting);
   1059     }
   1060     else {
   1061       moveAllExitingFrom(OldIdx, Exiting);
   1062       moveAllInternalFrom(OldIdx, Internal);
   1063       moveAllEnteringFrom(OldIdx, Entering);
   1064     }
   1065 
   1066     if (hasRegMaskOp)
   1067       updateRegMaskSlots(OldIdx);
   1068 
   1069 #ifndef NDEBUG
   1070     LIValidator validator;
   1071     validator = std::for_each(Entering.begin(), Entering.end(), validator);
   1072     validator = std::for_each(Internal.begin(), Internal.end(), validator);
   1073     validator = std::for_each(Exiting.begin(), Exiting.end(), validator);
   1074     assert(validator.rangesOk() && "moveAllOperandsFrom broke liveness.");
   1075 #endif
   1076 
   1077   }
   1078 
   1079   // Update intervals for all operands of MI to refer to BundleStart's
   1080   // SlotIndex.
   1081   void moveAllRangesInto(MachineInstr* MI, MachineInstr* BundleStart) {
   1082     if (MI == BundleStart)
   1083       return; // Bundling instr with itself - nothing to do.
   1084 
   1085     SlotIndex OldIdx = LIS.getSlotIndexes()->getInstructionIndex(MI);
   1086     assert(LIS.getSlotIndexes()->getInstructionFromIndex(OldIdx) == MI &&
   1087            "SlotIndex <-> Instruction mapping broken for MI");
   1088 
   1089     // Collect all ranges already in the bundle.
   1090     MachineBasicBlock::instr_iterator BII(BundleStart);
   1091     RangeSet Entering, Internal, Exiting;
   1092     bool hasRegMaskOp = false;
   1093     collectRanges(BII, Entering, Internal, Exiting, hasRegMaskOp, NewIdx);
   1094     assert(!hasRegMaskOp && "Can't have RegMask operand in bundle.");
   1095     for (++BII; &*BII == MI || BII->isInsideBundle(); ++BII) {
   1096       if (&*BII == MI)
   1097         continue;
   1098       collectRanges(BII, Entering, Internal, Exiting, hasRegMaskOp, NewIdx);
   1099       assert(!hasRegMaskOp && "Can't have RegMask operand in bundle.");
   1100     }
   1101 
   1102     BundleRanges BR = createBundleRanges(Entering, Internal, Exiting);
   1103 
   1104     collectRanges(MI, Entering, Internal, Exiting, hasRegMaskOp, OldIdx);
   1105     assert(!hasRegMaskOp && "Can't have RegMask operand in bundle.");
   1106 
   1107     DEBUG(dbgs() << "Entering: " << Entering.size() << "\n");
   1108     DEBUG(dbgs() << "Internal: " << Internal.size() << "\n");
   1109     DEBUG(dbgs() << "Exiting: " << Exiting.size() << "\n");
   1110 
   1111     moveAllEnteringFromInto(OldIdx, Entering, BR);
   1112     moveAllInternalFromInto(OldIdx, Internal, BR);
   1113     moveAllExitingFromInto(OldIdx, Exiting, BR);
   1114 
   1115 
   1116 #ifndef NDEBUG
   1117     LIValidator validator;
   1118     validator = std::for_each(Entering.begin(), Entering.end(), validator);
   1119     validator = std::for_each(Internal.begin(), Internal.end(), validator);
   1120     validator = std::for_each(Exiting.begin(), Exiting.end(), validator);
   1121     assert(validator.rangesOk() && "moveAllOperandsInto broke liveness.");
   1122 #endif
   1123   }
   1124 
   1125 private:
   1126 
   1127 #ifndef NDEBUG
   1128   class LIValidator {
   1129   private:
   1130     DenseSet<const LiveInterval*> Checked, Bogus;
   1131   public:
   1132     void operator()(const IntRangePair& P) {
   1133       const LiveInterval* LI = P.first;
   1134       if (Checked.count(LI))
   1135         return;
   1136       Checked.insert(LI);
   1137       if (LI->empty())
   1138         return;
   1139       SlotIndex LastEnd = LI->begin()->start;
   1140       for (LiveInterval::const_iterator LRI = LI->begin(), LRE = LI->end();
   1141            LRI != LRE; ++LRI) {
   1142         const LiveRange& LR = *LRI;
   1143         if (LastEnd > LR.start || LR.start >= LR.end)
   1144           Bogus.insert(LI);
   1145         LastEnd = LR.end;
   1146       }
   1147     }
   1148 
   1149     bool rangesOk() const {
   1150       return Bogus.empty();
   1151     }
   1152   };
   1153 #endif
   1154 
   1155   // Collect IntRangePairs for all operands of MI that may need fixing.
   1156   // Treat's MI's index as OldIdx (regardless of what it is in SlotIndexes'
   1157   // maps).
   1158   void collectRanges(MachineInstr* MI, RangeSet& Entering, RangeSet& Internal,
   1159                      RangeSet& Exiting, bool& hasRegMaskOp, SlotIndex OldIdx) {
   1160     hasRegMaskOp = false;
   1161     for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
   1162                                     MOE = MI->operands_end();
   1163          MOI != MOE; ++MOI) {
   1164       const MachineOperand& MO = *MOI;
   1165 
   1166       if (MO.isRegMask()) {
   1167         hasRegMaskOp = true;
   1168         continue;
   1169       }
   1170 
   1171       if (!MO.isReg() || MO.getReg() == 0)
   1172         continue;
   1173 
   1174       unsigned Reg = MO.getReg();
   1175 
   1176       // TODO: Currently we're skipping uses that are reserved or have no
   1177       // interval, but we're not updating their kills. This should be
   1178       // fixed.
   1179       if (!LIS.hasInterval(Reg) ||
   1180           (TargetRegisterInfo::isPhysicalRegister(Reg) && LIS.isReserved(Reg)))
   1181         continue;
   1182 
   1183       LiveInterval* LI = &LIS.getInterval(Reg);
   1184 
   1185       if (MO.readsReg()) {
   1186         LiveRange* LR = LI->getLiveRangeContaining(OldIdx);
   1187         if (LR != 0)
   1188           Entering.insert(std::make_pair(LI, LR));
   1189       }
   1190       if (MO.isDef()) {
   1191         if (MO.isEarlyClobber()) {
   1192           LiveRange* LR = LI->getLiveRangeContaining(OldIdx.getRegSlot(true));
   1193           assert(LR != 0 && "No EC range?");
   1194           if (LR->end > OldIdx.getDeadSlot())
   1195             Exiting.insert(std::make_pair(LI, LR));
   1196           else
   1197             Internal.insert(std::make_pair(LI, LR));
   1198         } else if (MO.isDead()) {
   1199           LiveRange* LR = LI->getLiveRangeContaining(OldIdx.getRegSlot());
   1200           assert(LR != 0 && "No dead-def range?");
   1201           Internal.insert(std::make_pair(LI, LR));
   1202         } else {
   1203           LiveRange* LR = LI->getLiveRangeContaining(OldIdx.getDeadSlot());
   1204           assert(LR && LR->end > OldIdx.getDeadSlot() &&
   1205                  "Non-dead-def should have live range exiting.");
   1206           Exiting.insert(std::make_pair(LI, LR));
   1207         }
   1208       }
   1209     }
   1210   }
   1211 
   1212   // Collect IntRangePairs for all operands of MI that may need fixing.
   1213   void collectRangesInBundle(MachineInstr* MI, RangeSet& Entering,
   1214                              RangeSet& Exiting, SlotIndex MIStartIdx,
   1215                              SlotIndex MIEndIdx) {
   1216     for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
   1217                                     MOE = MI->operands_end();
   1218          MOI != MOE; ++MOI) {
   1219       const MachineOperand& MO = *MOI;
   1220       assert(!MO.isRegMask() && "Can't have RegMasks in bundles.");
   1221       if (!MO.isReg() || MO.getReg() == 0)
   1222         continue;
   1223 
   1224       unsigned Reg = MO.getReg();
   1225 
   1226       // TODO: Currently we're skipping uses that are reserved or have no
   1227       // interval, but we're not updating their kills. This should be
   1228       // fixed.
   1229       if (!LIS.hasInterval(Reg) ||
   1230           (TargetRegisterInfo::isPhysicalRegister(Reg) && LIS.isReserved(Reg)))
   1231         continue;
   1232 
   1233       LiveInterval* LI = &LIS.getInterval(Reg);
   1234 
   1235       if (MO.readsReg()) {
   1236         LiveRange* LR = LI->getLiveRangeContaining(MIStartIdx);
   1237         if (LR != 0)
   1238           Entering.insert(std::make_pair(LI, LR));
   1239       }
   1240       if (MO.isDef()) {
   1241         assert(!MO.isEarlyClobber() && "Early clobbers not allowed in bundles.");
   1242         assert(!MO.isDead() && "Dead-defs not allowed in bundles.");
   1243         LiveRange* LR = LI->getLiveRangeContaining(MIEndIdx.getDeadSlot());
   1244         assert(LR != 0 && "Internal ranges not allowed in bundles.");
   1245         Exiting.insert(std::make_pair(LI, LR));
   1246       }
   1247     }
   1248   }
   1249 
   1250   BundleRanges createBundleRanges(RangeSet& Entering, RangeSet& Internal, RangeSet& Exiting) {
   1251     BundleRanges BR;
   1252 
   1253     for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
   1254          EI != EE; ++EI) {
   1255       LiveInterval* LI = EI->first;
   1256       LiveRange* LR = EI->second;
   1257       BR[LI->reg].Use = LR;
   1258     }
   1259 
   1260     for (RangeSet::iterator II = Internal.begin(), IE = Internal.end();
   1261          II != IE; ++II) {
   1262       LiveInterval* LI = II->first;
   1263       LiveRange* LR = II->second;
   1264       if (LR->end.isDead()) {
   1265         BR[LI->reg].Dead = LR;
   1266       } else {
   1267         BR[LI->reg].EC = LR;
   1268       }
   1269     }
   1270 
   1271     for (RangeSet::iterator EI = Exiting.begin(), EE = Exiting.end();
   1272          EI != EE; ++EI) {
   1273       LiveInterval* LI = EI->first;
   1274       LiveRange* LR = EI->second;
   1275       BR[LI->reg].Def = LR;
   1276     }
   1277 
   1278     return BR;
   1279   }
   1280 
   1281   void moveKillFlags(unsigned reg, SlotIndex OldIdx, SlotIndex newKillIdx) {
   1282     MachineInstr* OldKillMI = LIS.getInstructionFromIndex(OldIdx);
   1283     if (!OldKillMI->killsRegister(reg))
   1284       return; // Bail out if we don't have kill flags on the old register.
   1285     MachineInstr* NewKillMI = LIS.getInstructionFromIndex(newKillIdx);
   1286     assert(OldKillMI->killsRegister(reg) && "Old 'kill' instr isn't a kill.");
   1287     assert(!NewKillMI->killsRegister(reg) && "New kill instr is already a kill.");
   1288     OldKillMI->clearRegisterKills(reg, &TRI);
   1289     NewKillMI->addRegisterKilled(reg, &TRI);
   1290   }
   1291 
   1292   void updateRegMaskSlots(SlotIndex OldIdx) {
   1293     SmallVectorImpl<SlotIndex>::iterator RI =
   1294       std::lower_bound(LIS.RegMaskSlots.begin(), LIS.RegMaskSlots.end(),
   1295                        OldIdx);
   1296     assert(*RI == OldIdx && "No RegMask at OldIdx.");
   1297     *RI = NewIdx;
   1298     assert(*prior(RI) < *RI && *RI < *next(RI) &&
   1299            "RegSlots out of order. Did you move one call across another?");
   1300   }
   1301 
   1302   // Return the last use of reg between NewIdx and OldIdx.
   1303   SlotIndex findLastUseBefore(unsigned Reg, SlotIndex OldIdx) {
   1304     SlotIndex LastUse = NewIdx;
   1305     for (MachineRegisterInfo::use_nodbg_iterator
   1306            UI = MRI.use_nodbg_begin(Reg),
   1307            UE = MRI.use_nodbg_end();
   1308          UI != UE; UI.skipInstruction()) {
   1309       const MachineInstr* MI = &*UI;
   1310       SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI);
   1311       if (InstSlot > LastUse && InstSlot < OldIdx)
   1312         LastUse = InstSlot;
   1313     }
   1314     return LastUse;
   1315   }
   1316 
   1317   void moveEnteringUpFrom(SlotIndex OldIdx, IntRangePair& P) {
   1318     LiveInterval* LI = P.first;
   1319     LiveRange* LR = P.second;
   1320     bool LiveThrough = LR->end > OldIdx.getRegSlot();
   1321     if (LiveThrough)
   1322       return;
   1323     SlotIndex LastUse = findLastUseBefore(LI->reg, OldIdx);
   1324     if (LastUse != NewIdx)
   1325       moveKillFlags(LI->reg, NewIdx, LastUse);
   1326     LR->end = LastUse.getRegSlot();
   1327   }
   1328 
   1329   void moveEnteringDownFrom(SlotIndex OldIdx, IntRangePair& P) {
   1330     LiveInterval* LI = P.first;
   1331     LiveRange* LR = P.second;
   1332     // Extend the LiveRange if NewIdx is past the end.
   1333     if (NewIdx > LR->end) {
   1334       // Move kill flags if OldIdx was not originally the end
   1335       // (otherwise LR->end points to an invalid slot).
   1336       if (LR->end.getRegSlot() != OldIdx.getRegSlot()) {
   1337         assert(LR->end > OldIdx && "LiveRange does not cover original slot");
   1338         moveKillFlags(LI->reg, LR->end, NewIdx);
   1339       }
   1340       LR->end = NewIdx.getRegSlot();
   1341     }
   1342   }
   1343 
   1344   void moveAllEnteringFrom(SlotIndex OldIdx, RangeSet& Entering) {
   1345     bool GoingUp = NewIdx < OldIdx;
   1346 
   1347     if (GoingUp) {
   1348       for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
   1349            EI != EE; ++EI)
   1350         moveEnteringUpFrom(OldIdx, *EI);
   1351     } else {
   1352       for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
   1353            EI != EE; ++EI)
   1354         moveEnteringDownFrom(OldIdx, *EI);
   1355     }
   1356   }
   1357 
   1358   void moveInternalFrom(SlotIndex OldIdx, IntRangePair& P) {
   1359     LiveInterval* LI = P.first;
   1360     LiveRange* LR = P.second;
   1361     assert(OldIdx < LR->start && LR->start < OldIdx.getDeadSlot() &&
   1362            LR->end <= OldIdx.getDeadSlot() &&
   1363            "Range should be internal to OldIdx.");
   1364     LiveRange Tmp(*LR);
   1365     Tmp.start = NewIdx.getRegSlot(LR->start.isEarlyClobber());
   1366     Tmp.valno->def = Tmp.start;
   1367     Tmp.end = LR->end.isDead() ? NewIdx.getDeadSlot() : NewIdx.getRegSlot();
   1368     LI->removeRange(*LR);
   1369     LI->addRange(Tmp);
   1370   }
   1371 
   1372   void moveAllInternalFrom(SlotIndex OldIdx, RangeSet& Internal) {
   1373     for (RangeSet::iterator II = Internal.begin(), IE = Internal.end();
   1374          II != IE; ++II)
   1375       moveInternalFrom(OldIdx, *II);
   1376   }
   1377 
   1378   void moveExitingFrom(SlotIndex OldIdx, IntRangePair& P) {
   1379     LiveRange* LR = P.second;
   1380     assert(OldIdx < LR->start && LR->start < OldIdx.getDeadSlot() &&
   1381            "Range should start in OldIdx.");
   1382     assert(LR->end > OldIdx.getDeadSlot() && "Range should exit OldIdx.");
   1383     SlotIndex NewStart = NewIdx.getRegSlot(LR->start.isEarlyClobber());
   1384     LR->start = NewStart;
   1385     LR->valno->def = NewStart;
   1386   }
   1387 
   1388   void moveAllExitingFrom(SlotIndex OldIdx, RangeSet& Exiting) {
   1389     for (RangeSet::iterator EI = Exiting.begin(), EE = Exiting.end();
   1390          EI != EE; ++EI)
   1391       moveExitingFrom(OldIdx, *EI);
   1392   }
   1393 
   1394   void moveEnteringUpFromInto(SlotIndex OldIdx, IntRangePair& P,
   1395                               BundleRanges& BR) {
   1396     LiveInterval* LI = P.first;
   1397     LiveRange* LR = P.second;
   1398     bool LiveThrough = LR->end > OldIdx.getRegSlot();
   1399     if (LiveThrough) {
   1400       assert((LR->start < NewIdx || BR[LI->reg].Def == LR) &&
   1401              "Def in bundle should be def range.");
   1402       assert((BR[LI->reg].Use == 0 || BR[LI->reg].Use == LR) &&
   1403              "If bundle has use for this reg it should be LR.");
   1404       BR[LI->reg].Use = LR;
   1405       return;
   1406     }
   1407 
   1408     SlotIndex LastUse = findLastUseBefore(LI->reg, OldIdx);
   1409     moveKillFlags(LI->reg, OldIdx, LastUse);
   1410 
   1411     if (LR->start < NewIdx) {
   1412       // Becoming a new entering range.
   1413       assert(BR[LI->reg].Dead == 0 && BR[LI->reg].Def == 0 &&
   1414              "Bundle shouldn't be re-defining reg mid-range.");
   1415       assert((BR[LI->reg].Use == 0 || BR[LI->reg].Use == LR) &&
   1416              "Bundle shouldn't have different use range for same reg.");
   1417       LR->end = LastUse.getRegSlot();
   1418       BR[LI->reg].Use = LR;
   1419     } else {
   1420       // Becoming a new Dead-def.
   1421       assert(LR->start == NewIdx.getRegSlot(LR->start.isEarlyClobber()) &&
   1422              "Live range starting at unexpected slot.");
   1423       assert(BR[LI->reg].Def == LR && "Reg should have def range.");
   1424       assert(BR[LI->reg].Dead == 0 &&
   1425                "Can't have def and dead def of same reg in a bundle.");
   1426       LR->end = LastUse.getDeadSlot();
   1427       BR[LI->reg].Dead = BR[LI->reg].Def;
   1428       BR[LI->reg].Def = 0;
   1429     }
   1430   }
   1431 
   1432   void moveEnteringDownFromInto(SlotIndex OldIdx, IntRangePair& P,
   1433                                 BundleRanges& BR) {
   1434     LiveInterval* LI = P.first;
   1435     LiveRange* LR = P.second;
   1436     if (NewIdx > LR->end) {
   1437       // Range extended to bundle. Add to bundle uses.
   1438       // Note: Currently adds kill flags to bundle start.
   1439       assert(BR[LI->reg].Use == 0 &&
   1440              "Bundle already has use range for reg.");
   1441       moveKillFlags(LI->reg, LR->end, NewIdx);
   1442       LR->end = NewIdx.getRegSlot();
   1443       BR[LI->reg].Use = LR;
   1444     } else {
   1445       assert(BR[LI->reg].Use != 0 &&
   1446              "Bundle should already have a use range for reg.");
   1447     }
   1448   }
   1449 
   1450   void moveAllEnteringFromInto(SlotIndex OldIdx, RangeSet& Entering,
   1451                                BundleRanges& BR) {
   1452     bool GoingUp = NewIdx < OldIdx;
   1453 
   1454     if (GoingUp) {
   1455       for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
   1456            EI != EE; ++EI)
   1457         moveEnteringUpFromInto(OldIdx, *EI, BR);
   1458     } else {
   1459       for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
   1460            EI != EE; ++EI)
   1461         moveEnteringDownFromInto(OldIdx, *EI, BR);
   1462     }
   1463   }
   1464 
   1465   void moveInternalFromInto(SlotIndex OldIdx, IntRangePair& P,
   1466                             BundleRanges& BR) {
   1467     // TODO: Sane rules for moving ranges into bundles.
   1468   }
   1469 
   1470   void moveAllInternalFromInto(SlotIndex OldIdx, RangeSet& Internal,
   1471                                BundleRanges& BR) {
   1472     for (RangeSet::iterator II = Internal.begin(), IE = Internal.end();
   1473          II != IE; ++II)
   1474       moveInternalFromInto(OldIdx, *II, BR);
   1475   }
   1476 
   1477   void moveExitingFromInto(SlotIndex OldIdx, IntRangePair& P,
   1478                            BundleRanges& BR) {
   1479     LiveInterval* LI = P.first;
   1480     LiveRange* LR = P.second;
   1481 
   1482     assert(LR->start.isRegister() &&
   1483            "Don't know how to merge exiting ECs into bundles yet.");
   1484 
   1485     if (LR->end > NewIdx.getDeadSlot()) {
   1486       // This range is becoming an exiting range on the bundle.
   1487       // If there was an old dead-def of this reg, delete it.
   1488       if (BR[LI->reg].Dead != 0) {
   1489         LI->removeRange(*BR[LI->reg].Dead);
   1490         BR[LI->reg].Dead = 0;
   1491       }
   1492       assert(BR[LI->reg].Def == 0 &&
   1493              "Can't have two defs for the same variable exiting a bundle.");
   1494       LR->start = NewIdx.getRegSlot();
   1495       LR->valno->def = LR->start;
   1496       BR[LI->reg].Def = LR;
   1497     } else {
   1498       // This range is becoming internal to the bundle.
   1499       assert(LR->end == NewIdx.getRegSlot() &&
   1500              "Can't bundle def whose kill is before the bundle");
   1501       if (BR[LI->reg].Dead || BR[LI->reg].Def) {
   1502         // Already have a def for this. Just delete range.
   1503         LI->removeRange(*LR);
   1504       } else {
   1505         // Make range dead, record.
   1506         LR->end = NewIdx.getDeadSlot();
   1507         BR[LI->reg].Dead = LR;
   1508         assert(BR[LI->reg].Use == LR &&
   1509                "Range becoming dead should currently be use.");
   1510       }
   1511       // In both cases the range is no longer a use on the bundle.
   1512       BR[LI->reg].Use = 0;
   1513     }
   1514   }
   1515 
   1516   void moveAllExitingFromInto(SlotIndex OldIdx, RangeSet& Exiting,
   1517                               BundleRanges& BR) {
   1518     for (RangeSet::iterator EI = Exiting.begin(), EE = Exiting.end();
   1519          EI != EE; ++EI)
   1520       moveExitingFromInto(OldIdx, *EI, BR);
   1521   }
   1522 
   1523 };
   1524 
   1525 void LiveIntervals::handleMove(MachineInstr* MI) {
   1526   SlotIndex OldIndex = indexes_->getInstructionIndex(MI);
   1527   indexes_->removeMachineInstrFromMaps(MI);
   1528   SlotIndex NewIndex = MI->isInsideBundle() ?
   1529                         indexes_->getInstructionIndex(MI) :
   1530                         indexes_->insertMachineInstrInMaps(MI);
   1531   assert(getMBBStartIdx(MI->getParent()) <= OldIndex &&
   1532          OldIndex < getMBBEndIdx(MI->getParent()) &&
   1533          "Cannot handle moves across basic block boundaries.");
   1534   assert(!MI->isBundled() && "Can't handle bundled instructions yet.");
   1535 
   1536   HMEditor HME(*this, *mri_, *tri_, NewIndex);
   1537   HME.moveAllRangesFrom(MI, OldIndex);
   1538 }
   1539 
   1540 void LiveIntervals::handleMoveIntoBundle(MachineInstr* MI, MachineInstr* BundleStart) {
   1541   SlotIndex NewIndex = indexes_->getInstructionIndex(BundleStart);
   1542   HMEditor HME(*this, *mri_, *tri_, NewIndex);
   1543   HME.moveAllRangesInto(MI, BundleStart);
   1544 }
   1545