Home | History | Annotate | Download | only in Sparc
      1 //===-- SparcInstrInfo.cpp - Sparc Instruction Information ----------------===//
      2 //
      3 //                     The LLVM Compiler Infrastructure
      4 //
      5 // This file is distributed under the University of Illinois Open Source
      6 // License. See LICENSE.TXT for details.
      7 //
      8 //===----------------------------------------------------------------------===//
      9 //
     10 // This file contains the Sparc implementation of the TargetInstrInfo class.
     11 //
     12 //===----------------------------------------------------------------------===//
     13 
     14 #include "SparcInstrInfo.h"
     15 #include "Sparc.h"
     16 #include "SparcMachineFunctionInfo.h"
     17 #include "SparcSubtarget.h"
     18 #include "llvm/ADT/STLExtras.h"
     19 #include "llvm/ADT/SmallVector.h"
     20 #include "llvm/CodeGen/MachineFrameInfo.h"
     21 #include "llvm/CodeGen/MachineInstrBuilder.h"
     22 #include "llvm/CodeGen/MachineMemOperand.h"
     23 #include "llvm/CodeGen/MachineRegisterInfo.h"
     24 #include "llvm/Support/ErrorHandling.h"
     25 #include "llvm/Support/TargetRegistry.h"
     26 
     27 using namespace llvm;
     28 
     29 #define GET_INSTRINFO_CTOR_DTOR
     30 #include "SparcGenInstrInfo.inc"
     31 
     32 // Pin the vtable to this file.
     33 void SparcInstrInfo::anchor() {}
     34 
     35 SparcInstrInfo::SparcInstrInfo(SparcSubtarget &ST)
     36     : SparcGenInstrInfo(SP::ADJCALLSTACKDOWN, SP::ADJCALLSTACKUP), RI(),
     37       Subtarget(ST) {}
     38 
     39 /// isLoadFromStackSlot - If the specified machine instruction is a direct
     40 /// load from a stack slot, return the virtual or physical register number of
     41 /// the destination along with the FrameIndex of the loaded stack slot.  If
     42 /// not, return 0.  This predicate must return 0 if the instruction has
     43 /// any side effects other than loading from the stack slot.
     44 unsigned SparcInstrInfo::isLoadFromStackSlot(const MachineInstr &MI,
     45                                              int &FrameIndex) const {
     46   if (MI.getOpcode() == SP::LDri || MI.getOpcode() == SP::LDXri ||
     47       MI.getOpcode() == SP::LDFri || MI.getOpcode() == SP::LDDFri ||
     48       MI.getOpcode() == SP::LDQFri) {
     49     if (MI.getOperand(1).isFI() && MI.getOperand(2).isImm() &&
     50         MI.getOperand(2).getImm() == 0) {
     51       FrameIndex = MI.getOperand(1).getIndex();
     52       return MI.getOperand(0).getReg();
     53     }
     54   }
     55   return 0;
     56 }
     57 
     58 /// isStoreToStackSlot - If the specified machine instruction is a direct
     59 /// store to a stack slot, return the virtual or physical register number of
     60 /// the source reg along with the FrameIndex of the loaded stack slot.  If
     61 /// not, return 0.  This predicate must return 0 if the instruction has
     62 /// any side effects other than storing to the stack slot.
     63 unsigned SparcInstrInfo::isStoreToStackSlot(const MachineInstr &MI,
     64                                             int &FrameIndex) const {
     65   if (MI.getOpcode() == SP::STri || MI.getOpcode() == SP::STXri ||
     66       MI.getOpcode() == SP::STFri || MI.getOpcode() == SP::STDFri ||
     67       MI.getOpcode() == SP::STQFri) {
     68     if (MI.getOperand(0).isFI() && MI.getOperand(1).isImm() &&
     69         MI.getOperand(1).getImm() == 0) {
     70       FrameIndex = MI.getOperand(0).getIndex();
     71       return MI.getOperand(2).getReg();
     72     }
     73   }
     74   return 0;
     75 }
     76 
     77 static bool IsIntegerCC(unsigned CC)
     78 {
     79   return  (CC <= SPCC::ICC_VC);
     80 }
     81 
     82 static SPCC::CondCodes GetOppositeBranchCondition(SPCC::CondCodes CC)
     83 {
     84   switch(CC) {
     85   case SPCC::ICC_A:    return SPCC::ICC_N;
     86   case SPCC::ICC_N:    return SPCC::ICC_A;
     87   case SPCC::ICC_NE:   return SPCC::ICC_E;
     88   case SPCC::ICC_E:    return SPCC::ICC_NE;
     89   case SPCC::ICC_G:    return SPCC::ICC_LE;
     90   case SPCC::ICC_LE:   return SPCC::ICC_G;
     91   case SPCC::ICC_GE:   return SPCC::ICC_L;
     92   case SPCC::ICC_L:    return SPCC::ICC_GE;
     93   case SPCC::ICC_GU:   return SPCC::ICC_LEU;
     94   case SPCC::ICC_LEU:  return SPCC::ICC_GU;
     95   case SPCC::ICC_CC:   return SPCC::ICC_CS;
     96   case SPCC::ICC_CS:   return SPCC::ICC_CC;
     97   case SPCC::ICC_POS:  return SPCC::ICC_NEG;
     98   case SPCC::ICC_NEG:  return SPCC::ICC_POS;
     99   case SPCC::ICC_VC:   return SPCC::ICC_VS;
    100   case SPCC::ICC_VS:   return SPCC::ICC_VC;
    101 
    102   case SPCC::FCC_A:    return SPCC::FCC_N;
    103   case SPCC::FCC_N:    return SPCC::FCC_A;
    104   case SPCC::FCC_U:    return SPCC::FCC_O;
    105   case SPCC::FCC_O:    return SPCC::FCC_U;
    106   case SPCC::FCC_G:    return SPCC::FCC_ULE;
    107   case SPCC::FCC_LE:   return SPCC::FCC_UG;
    108   case SPCC::FCC_UG:   return SPCC::FCC_LE;
    109   case SPCC::FCC_ULE:  return SPCC::FCC_G;
    110   case SPCC::FCC_L:    return SPCC::FCC_UGE;
    111   case SPCC::FCC_GE:   return SPCC::FCC_UL;
    112   case SPCC::FCC_UL:   return SPCC::FCC_GE;
    113   case SPCC::FCC_UGE:  return SPCC::FCC_L;
    114   case SPCC::FCC_LG:   return SPCC::FCC_UE;
    115   case SPCC::FCC_UE:   return SPCC::FCC_LG;
    116   case SPCC::FCC_NE:   return SPCC::FCC_E;
    117   case SPCC::FCC_E:    return SPCC::FCC_NE;
    118 
    119   case SPCC::CPCC_A:   return SPCC::CPCC_N;
    120   case SPCC::CPCC_N:   return SPCC::CPCC_A;
    121   case SPCC::CPCC_3:   // Fall through
    122   case SPCC::CPCC_2:   // Fall through
    123   case SPCC::CPCC_23:  // Fall through
    124   case SPCC::CPCC_1:   // Fall through
    125   case SPCC::CPCC_13:  // Fall through
    126   case SPCC::CPCC_12:  // Fall through
    127   case SPCC::CPCC_123: // Fall through
    128   case SPCC::CPCC_0:   // Fall through
    129   case SPCC::CPCC_03:  // Fall through
    130   case SPCC::CPCC_02:  // Fall through
    131   case SPCC::CPCC_023: // Fall through
    132   case SPCC::CPCC_01:  // Fall through
    133   case SPCC::CPCC_013: // Fall through
    134   case SPCC::CPCC_012:
    135       // "Opposite" code is not meaningful, as we don't know
    136       // what the CoProc condition means here. The cond-code will
    137       // only be used in inline assembler, so this code should
    138       // not be reached in a normal compilation pass.
    139       llvm_unreachable("Meaningless inversion of co-processor cond code");
    140   }
    141   llvm_unreachable("Invalid cond code");
    142 }
    143 
    144 static bool isUncondBranchOpcode(int Opc) { return Opc == SP::BA; }
    145 
    146 static bool isCondBranchOpcode(int Opc) {
    147   return Opc == SP::FBCOND || Opc == SP::BCOND;
    148 }
    149 
    150 static bool isIndirectBranchOpcode(int Opc) {
    151   return Opc == SP::BINDrr || Opc == SP::BINDri;
    152 }
    153 
    154 static void parseCondBranch(MachineInstr *LastInst, MachineBasicBlock *&Target,
    155                             SmallVectorImpl<MachineOperand> &Cond) {
    156   Cond.push_back(MachineOperand::CreateImm(LastInst->getOperand(1).getImm()));
    157   Target = LastInst->getOperand(0).getMBB();
    158 }
    159 
    160 bool SparcInstrInfo::analyzeBranch(MachineBasicBlock &MBB,
    161                                    MachineBasicBlock *&TBB,
    162                                    MachineBasicBlock *&FBB,
    163                                    SmallVectorImpl<MachineOperand> &Cond,
    164                                    bool AllowModify) const {
    165   MachineBasicBlock::iterator I = MBB.getLastNonDebugInstr();
    166   if (I == MBB.end())
    167     return false;
    168 
    169   if (!isUnpredicatedTerminator(*I))
    170     return false;
    171 
    172   // Get the last instruction in the block.
    173   MachineInstr *LastInst = &*I;
    174   unsigned LastOpc = LastInst->getOpcode();
    175 
    176   // If there is only one terminator instruction, process it.
    177   if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
    178     if (isUncondBranchOpcode(LastOpc)) {
    179       TBB = LastInst->getOperand(0).getMBB();
    180       return false;
    181     }
    182     if (isCondBranchOpcode(LastOpc)) {
    183       // Block ends with fall-through condbranch.
    184       parseCondBranch(LastInst, TBB, Cond);
    185       return false;
    186     }
    187     return true; // Can't handle indirect branch.
    188   }
    189 
    190   // Get the instruction before it if it is a terminator.
    191   MachineInstr *SecondLastInst = &*I;
    192   unsigned SecondLastOpc = SecondLastInst->getOpcode();
    193 
    194   // If AllowModify is true and the block ends with two or more unconditional
    195   // branches, delete all but the first unconditional branch.
    196   if (AllowModify && isUncondBranchOpcode(LastOpc)) {
    197     while (isUncondBranchOpcode(SecondLastOpc)) {
    198       LastInst->eraseFromParent();
    199       LastInst = SecondLastInst;
    200       LastOpc = LastInst->getOpcode();
    201       if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
    202         // Return now the only terminator is an unconditional branch.
    203         TBB = LastInst->getOperand(0).getMBB();
    204         return false;
    205       } else {
    206         SecondLastInst = &*I;
    207         SecondLastOpc = SecondLastInst->getOpcode();
    208       }
    209     }
    210   }
    211 
    212   // If there are three terminators, we don't know what sort of block this is.
    213   if (SecondLastInst && I != MBB.begin() && isUnpredicatedTerminator(*--I))
    214     return true;
    215 
    216   // If the block ends with a B and a Bcc, handle it.
    217   if (isCondBranchOpcode(SecondLastOpc) && isUncondBranchOpcode(LastOpc)) {
    218     parseCondBranch(SecondLastInst, TBB, Cond);
    219     FBB = LastInst->getOperand(0).getMBB();
    220     return false;
    221   }
    222 
    223   // If the block ends with two unconditional branches, handle it.  The second
    224   // one is not executed.
    225   if (isUncondBranchOpcode(SecondLastOpc) && isUncondBranchOpcode(LastOpc)) {
    226     TBB = SecondLastInst->getOperand(0).getMBB();
    227     return false;
    228   }
    229 
    230   // ...likewise if it ends with an indirect branch followed by an unconditional
    231   // branch.
    232   if (isIndirectBranchOpcode(SecondLastOpc) && isUncondBranchOpcode(LastOpc)) {
    233     I = LastInst;
    234     if (AllowModify)
    235       I->eraseFromParent();
    236     return true;
    237   }
    238 
    239   // Otherwise, can't handle this.
    240   return true;
    241 }
    242 
    243 unsigned SparcInstrInfo::InsertBranch(MachineBasicBlock &MBB,
    244                                       MachineBasicBlock *TBB,
    245                                       MachineBasicBlock *FBB,
    246                                       ArrayRef<MachineOperand> Cond,
    247                                       const DebugLoc &DL) const {
    248   assert(TBB && "InsertBranch must not be told to insert a fallthrough");
    249   assert((Cond.size() == 1 || Cond.size() == 0) &&
    250          "Sparc branch conditions should have one component!");
    251 
    252   if (Cond.empty()) {
    253     assert(!FBB && "Unconditional branch with multiple successors!");
    254     BuildMI(&MBB, DL, get(SP::BA)).addMBB(TBB);
    255     return 1;
    256   }
    257 
    258   // Conditional branch
    259   unsigned CC = Cond[0].getImm();
    260 
    261   if (IsIntegerCC(CC))
    262     BuildMI(&MBB, DL, get(SP::BCOND)).addMBB(TBB).addImm(CC);
    263   else
    264     BuildMI(&MBB, DL, get(SP::FBCOND)).addMBB(TBB).addImm(CC);
    265   if (!FBB)
    266     return 1;
    267 
    268   BuildMI(&MBB, DL, get(SP::BA)).addMBB(FBB);
    269   return 2;
    270 }
    271 
    272 unsigned SparcInstrInfo::RemoveBranch(MachineBasicBlock &MBB) const
    273 {
    274   MachineBasicBlock::iterator I = MBB.end();
    275   unsigned Count = 0;
    276   while (I != MBB.begin()) {
    277     --I;
    278 
    279     if (I->isDebugValue())
    280       continue;
    281 
    282     if (I->getOpcode() != SP::BA
    283         && I->getOpcode() != SP::BCOND
    284         && I->getOpcode() != SP::FBCOND)
    285       break; // Not a branch
    286 
    287     I->eraseFromParent();
    288     I = MBB.end();
    289     ++Count;
    290   }
    291   return Count;
    292 }
    293 
    294 bool SparcInstrInfo::ReverseBranchCondition(
    295     SmallVectorImpl<MachineOperand> &Cond) const {
    296   assert(Cond.size() == 1);
    297   SPCC::CondCodes CC = static_cast<SPCC::CondCodes>(Cond[0].getImm());
    298   Cond[0].setImm(GetOppositeBranchCondition(CC));
    299   return false;
    300 }
    301 
    302 void SparcInstrInfo::copyPhysReg(MachineBasicBlock &MBB,
    303                                  MachineBasicBlock::iterator I,
    304                                  const DebugLoc &DL, unsigned DestReg,
    305                                  unsigned SrcReg, bool KillSrc) const {
    306   unsigned numSubRegs = 0;
    307   unsigned movOpc     = 0;
    308   const unsigned *subRegIdx = nullptr;
    309   bool ExtraG0 = false;
    310 
    311   const unsigned DW_SubRegsIdx[]  = { SP::sub_even, SP::sub_odd };
    312   const unsigned DFP_FP_SubRegsIdx[]  = { SP::sub_even, SP::sub_odd };
    313   const unsigned QFP_DFP_SubRegsIdx[] = { SP::sub_even64, SP::sub_odd64 };
    314   const unsigned QFP_FP_SubRegsIdx[]  = { SP::sub_even, SP::sub_odd,
    315                                           SP::sub_odd64_then_sub_even,
    316                                           SP::sub_odd64_then_sub_odd };
    317 
    318   if (SP::IntRegsRegClass.contains(DestReg, SrcReg))
    319     BuildMI(MBB, I, DL, get(SP::ORrr), DestReg).addReg(SP::G0)
    320       .addReg(SrcReg, getKillRegState(KillSrc));
    321   else if (SP::IntPairRegClass.contains(DestReg, SrcReg)) {
    322     subRegIdx  = DW_SubRegsIdx;
    323     numSubRegs = 2;
    324     movOpc     = SP::ORrr;
    325     ExtraG0 = true;
    326   } else if (SP::FPRegsRegClass.contains(DestReg, SrcReg))
    327     BuildMI(MBB, I, DL, get(SP::FMOVS), DestReg)
    328       .addReg(SrcReg, getKillRegState(KillSrc));
    329   else if (SP::DFPRegsRegClass.contains(DestReg, SrcReg)) {
    330     if (Subtarget.isV9()) {
    331       BuildMI(MBB, I, DL, get(SP::FMOVD), DestReg)
    332         .addReg(SrcReg, getKillRegState(KillSrc));
    333     } else {
    334       // Use two FMOVS instructions.
    335       subRegIdx  = DFP_FP_SubRegsIdx;
    336       numSubRegs = 2;
    337       movOpc     = SP::FMOVS;
    338     }
    339   } else if (SP::QFPRegsRegClass.contains(DestReg, SrcReg)) {
    340     if (Subtarget.isV9()) {
    341       if (Subtarget.hasHardQuad()) {
    342         BuildMI(MBB, I, DL, get(SP::FMOVQ), DestReg)
    343           .addReg(SrcReg, getKillRegState(KillSrc));
    344       } else {
    345         // Use two FMOVD instructions.
    346         subRegIdx  = QFP_DFP_SubRegsIdx;
    347         numSubRegs = 2;
    348         movOpc     = SP::FMOVD;
    349       }
    350     } else {
    351       // Use four FMOVS instructions.
    352       subRegIdx  = QFP_FP_SubRegsIdx;
    353       numSubRegs = 4;
    354       movOpc     = SP::FMOVS;
    355     }
    356   } else if (SP::ASRRegsRegClass.contains(DestReg) &&
    357              SP::IntRegsRegClass.contains(SrcReg)) {
    358     BuildMI(MBB, I, DL, get(SP::WRASRrr), DestReg)
    359         .addReg(SP::G0)
    360         .addReg(SrcReg, getKillRegState(KillSrc));
    361   } else if (SP::IntRegsRegClass.contains(DestReg) &&
    362              SP::ASRRegsRegClass.contains(SrcReg)) {
    363     BuildMI(MBB, I, DL, get(SP::RDASR), DestReg)
    364         .addReg(SrcReg, getKillRegState(KillSrc));
    365   } else
    366     llvm_unreachable("Impossible reg-to-reg copy");
    367 
    368   if (numSubRegs == 0 || subRegIdx == nullptr || movOpc == 0)
    369     return;
    370 
    371   const TargetRegisterInfo *TRI = &getRegisterInfo();
    372   MachineInstr *MovMI = nullptr;
    373 
    374   for (unsigned i = 0; i != numSubRegs; ++i) {
    375     unsigned Dst = TRI->getSubReg(DestReg, subRegIdx[i]);
    376     unsigned Src = TRI->getSubReg(SrcReg,  subRegIdx[i]);
    377     assert(Dst && Src && "Bad sub-register");
    378 
    379     MachineInstrBuilder MIB = BuildMI(MBB, I, DL, get(movOpc), Dst);
    380     if (ExtraG0)
    381       MIB.addReg(SP::G0);
    382     MIB.addReg(Src);
    383     MovMI = MIB.getInstr();
    384   }
    385   // Add implicit super-register defs and kills to the last MovMI.
    386   MovMI->addRegisterDefined(DestReg, TRI);
    387   if (KillSrc)
    388     MovMI->addRegisterKilled(SrcReg, TRI);
    389 }
    390 
    391 void SparcInstrInfo::
    392 storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator I,
    393                     unsigned SrcReg, bool isKill, int FI,
    394                     const TargetRegisterClass *RC,
    395                     const TargetRegisterInfo *TRI) const {
    396   DebugLoc DL;
    397   if (I != MBB.end()) DL = I->getDebugLoc();
    398 
    399   MachineFunction *MF = MBB.getParent();
    400   const MachineFrameInfo &MFI = *MF->getFrameInfo();
    401   MachineMemOperand *MMO = MF->getMachineMemOperand(
    402       MachinePointerInfo::getFixedStack(*MF, FI), MachineMemOperand::MOStore,
    403       MFI.getObjectSize(FI), MFI.getObjectAlignment(FI));
    404 
    405   // On the order of operands here: think "[FrameIdx + 0] = SrcReg".
    406   if (RC == &SP::I64RegsRegClass)
    407     BuildMI(MBB, I, DL, get(SP::STXri)).addFrameIndex(FI).addImm(0)
    408       .addReg(SrcReg, getKillRegState(isKill)).addMemOperand(MMO);
    409   else if (RC == &SP::IntRegsRegClass)
    410     BuildMI(MBB, I, DL, get(SP::STri)).addFrameIndex(FI).addImm(0)
    411       .addReg(SrcReg, getKillRegState(isKill)).addMemOperand(MMO);
    412   else if (RC == &SP::IntPairRegClass)
    413     BuildMI(MBB, I, DL, get(SP::STDri)).addFrameIndex(FI).addImm(0)
    414       .addReg(SrcReg, getKillRegState(isKill)).addMemOperand(MMO);
    415   else if (RC == &SP::FPRegsRegClass)
    416     BuildMI(MBB, I, DL, get(SP::STFri)).addFrameIndex(FI).addImm(0)
    417       .addReg(SrcReg,  getKillRegState(isKill)).addMemOperand(MMO);
    418   else if (SP::DFPRegsRegClass.hasSubClassEq(RC))
    419     BuildMI(MBB, I, DL, get(SP::STDFri)).addFrameIndex(FI).addImm(0)
    420       .addReg(SrcReg,  getKillRegState(isKill)).addMemOperand(MMO);
    421   else if (SP::QFPRegsRegClass.hasSubClassEq(RC))
    422     // Use STQFri irrespective of its legality. If STQ is not legal, it will be
    423     // lowered into two STDs in eliminateFrameIndex.
    424     BuildMI(MBB, I, DL, get(SP::STQFri)).addFrameIndex(FI).addImm(0)
    425       .addReg(SrcReg,  getKillRegState(isKill)).addMemOperand(MMO);
    426   else
    427     llvm_unreachable("Can't store this register to stack slot");
    428 }
    429 
    430 void SparcInstrInfo::
    431 loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator I,
    432                      unsigned DestReg, int FI,
    433                      const TargetRegisterClass *RC,
    434                      const TargetRegisterInfo *TRI) const {
    435   DebugLoc DL;
    436   if (I != MBB.end()) DL = I->getDebugLoc();
    437 
    438   MachineFunction *MF = MBB.getParent();
    439   const MachineFrameInfo &MFI = *MF->getFrameInfo();
    440   MachineMemOperand *MMO = MF->getMachineMemOperand(
    441       MachinePointerInfo::getFixedStack(*MF, FI), MachineMemOperand::MOLoad,
    442       MFI.getObjectSize(FI), MFI.getObjectAlignment(FI));
    443 
    444   if (RC == &SP::I64RegsRegClass)
    445     BuildMI(MBB, I, DL, get(SP::LDXri), DestReg).addFrameIndex(FI).addImm(0)
    446       .addMemOperand(MMO);
    447   else if (RC == &SP::IntRegsRegClass)
    448     BuildMI(MBB, I, DL, get(SP::LDri), DestReg).addFrameIndex(FI).addImm(0)
    449       .addMemOperand(MMO);
    450   else if (RC == &SP::IntPairRegClass)
    451     BuildMI(MBB, I, DL, get(SP::LDDri), DestReg).addFrameIndex(FI).addImm(0)
    452       .addMemOperand(MMO);
    453   else if (RC == &SP::FPRegsRegClass)
    454     BuildMI(MBB, I, DL, get(SP::LDFri), DestReg).addFrameIndex(FI).addImm(0)
    455       .addMemOperand(MMO);
    456   else if (SP::DFPRegsRegClass.hasSubClassEq(RC))
    457     BuildMI(MBB, I, DL, get(SP::LDDFri), DestReg).addFrameIndex(FI).addImm(0)
    458       .addMemOperand(MMO);
    459   else if (SP::QFPRegsRegClass.hasSubClassEq(RC))
    460     // Use LDQFri irrespective of its legality. If LDQ is not legal, it will be
    461     // lowered into two LDDs in eliminateFrameIndex.
    462     BuildMI(MBB, I, DL, get(SP::LDQFri), DestReg).addFrameIndex(FI).addImm(0)
    463       .addMemOperand(MMO);
    464   else
    465     llvm_unreachable("Can't load this register from stack slot");
    466 }
    467 
    468 unsigned SparcInstrInfo::getGlobalBaseReg(MachineFunction *MF) const
    469 {
    470   SparcMachineFunctionInfo *SparcFI = MF->getInfo<SparcMachineFunctionInfo>();
    471   unsigned GlobalBaseReg = SparcFI->getGlobalBaseReg();
    472   if (GlobalBaseReg != 0)
    473     return GlobalBaseReg;
    474 
    475   // Insert the set of GlobalBaseReg into the first MBB of the function
    476   MachineBasicBlock &FirstMBB = MF->front();
    477   MachineBasicBlock::iterator MBBI = FirstMBB.begin();
    478   MachineRegisterInfo &RegInfo = MF->getRegInfo();
    479 
    480   const TargetRegisterClass *PtrRC =
    481     Subtarget.is64Bit() ? &SP::I64RegsRegClass : &SP::IntRegsRegClass;
    482   GlobalBaseReg = RegInfo.createVirtualRegister(PtrRC);
    483 
    484   DebugLoc dl;
    485 
    486   BuildMI(FirstMBB, MBBI, dl, get(SP::GETPCX), GlobalBaseReg);
    487   SparcFI->setGlobalBaseReg(GlobalBaseReg);
    488   return GlobalBaseReg;
    489 }
    490 
    491 bool SparcInstrInfo::expandPostRAPseudo(MachineInstr &MI) const {
    492   switch (MI.getOpcode()) {
    493   case TargetOpcode::LOAD_STACK_GUARD: {
    494     assert(Subtarget.isTargetLinux() &&
    495            "Only Linux target is expected to contain LOAD_STACK_GUARD");
    496     // offsetof(tcbhead_t, stack_guard) from sysdeps/sparc/nptl/tls.h in glibc.
    497     const int64_t Offset = Subtarget.is64Bit() ? 0x28 : 0x14;
    498     MI.setDesc(get(Subtarget.is64Bit() ? SP::LDXri : SP::LDri));
    499     MachineInstrBuilder(*MI.getParent()->getParent(), MI)
    500         .addReg(SP::G7)
    501         .addImm(Offset);
    502     return true;
    503   }
    504   }
    505   return false;
    506 }
    507