Home | History | Annotate | Download | only in AArch64

Lines Matching full:srcreg

445     unsigned SrcReg = Cond[2].getReg();
448 MRI.constrainRegClass(SrcReg, &AArch64::GPR64spRegClass);
450 .addReg(SrcReg)
454 MRI.constrainRegClass(SrcReg, &AArch64::GPR32spRegClass);
456 .addReg(SrcReg)
638 unsigned &SrcReg, unsigned &DstReg,
650 SrcReg = MI.getOperand(1).getReg();
690 /// in SrcReg and SrcReg2, and the value it compares against in CmpValue.
692 bool AArch64InstrInfo::analyzeCompare(const MachineInstr &MI, unsigned &SrcReg,
711 SrcReg = MI.getOperand(1).getReg();
720 SrcReg = MI.getOperand(1).getReg();
730 SrcReg = MI.getOperand(1).getReg();
883 MachineInstr &CmpInstr, unsigned SrcReg, unsigned SrcReg2, int CmpMask,
920 return substituteCmpToZero(CmpInstr, SrcReg, MRI);
1119 MachineInstr &CmpInstr, unsigned SrcReg,
1122 // Get the unique definition of SrcReg.
1123 MachineInstr *MI = MRI->getUniqueVRegDef(SrcReg);
1863 static bool forwardCopyWillClobberTuple(unsigned DestReg, unsigned SrcReg,
1867 return ((DestReg - SrcReg) & 0x1f) < NumRegs;
1872 unsigned DestReg, unsigned SrcReg, bool KillSrc, unsigned Opcode,
1878 uint16_t SrcEncoding = TRI->getEncodingValue(SrcReg);
1891 AddSubReg(MIB, SrcReg, Indices[SubReg], 0, TRI);
1892 AddSubReg(MIB, SrcReg, Indices[SubReg], getKillRegState(KillSrc), TRI);
1899 unsigned SrcReg, bool KillSrc) const {
1901 (AArch64::GPR32spRegClass.contains(SrcReg) || SrcReg == AArch64::WZR)) {
1904 if (DestReg == AArch64::WSP || SrcReg == AArch64::WSP) {
1910 unsigned SrcRegX = TRI->getMatchingSuperReg(SrcReg, AArch64::sub_32,
1915 // value from SrcReg.
1920 .addReg(SrcReg, RegState::Implicit | getKillRegState(KillSrc));
1923 .addReg(SrcReg, getKillRegState(KillSrc))
1927 } else if (SrcReg == AArch64::WZR && Subtarget.hasZeroCycleZeroing()) {
1935 unsigned SrcRegX = TRI->getMatchingSuperReg(SrcReg, AArch64::sub_32,
1940 // value from SrcReg.
1944 .addReg(SrcReg, RegState::Implicit | getKillRegState(KillSrc));
1949 .addReg(SrcReg, getKillRegState(KillSrc));
1956 (AArch64::GPR64spRegClass.contains(SrcReg) || SrcReg == AArch64::XZR)) {
1957 if (DestReg == AArch64::SP || SrcReg == AArch64::SP) {
1960 .addReg(SrcReg, getKillRegState(KillSrc))
1963 } else if (SrcReg == AArch64::XZR && Subtarget.hasZeroCycleZeroing()) {
1970 .addReg(SrcReg, getKillRegState(KillSrc));
1977 AArch64::DDDDRegClass.contains(SrcReg)) {
1980 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8,
1987 AArch64::DDDRegClass.contains(SrcReg)) {
1990 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8,
1997 AArch64::DDRegClass.contains(SrcReg)) {
1999 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8,
2006 AArch64::QQQQRegClass.contains(SrcReg)) {
2009 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8,
2016 AArch64::QQQRegClass.contains(SrcReg)) {
2019 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8,
2026 AArch64::QQRegClass.contains(SrcReg)) {
2028 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8,
2034 AArch64::FPR128RegClass.contains(SrcReg)) {
2037 .addReg(SrcReg)
2038 .addReg(SrcReg, getKillRegState(KillSrc));
2042 .addReg(SrcReg, getKillRegState(KillSrc))
2055 AArch64::FPR64RegClass.contains(SrcReg)) {
2059 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::dsub,
2062 .addReg(SrcReg)
2063 .addReg(SrcReg, getKillRegState(KillSrc));
2066 .addReg(SrcReg, getKillRegState(KillSrc));
2072 AArch64::FPR32RegClass.contains(SrcReg)) {
2076 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::ssub,
2079 .addReg(SrcReg)
2080 .addReg(SrcReg, getKillRegState(KillSrc));
2083 .addReg(SrcReg, getKillRegState(KillSrc));
2089 AArch64::FPR16RegClass.contains(SrcReg)) {
2093 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::hsub,
2096 .addReg(SrcReg)
2097 .addReg(SrcReg, getKillRegState(KillSrc));
2101 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::hsub,
2104 .addReg(SrcReg, getKillRegState(KillSrc));
2110 AArch64::FPR8RegClass.contains(SrcReg)) {
2114 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::bsub,
2117 .addReg(SrcReg)
2118 .addReg(SrcReg, getKillRegState(KillSrc));
2122 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::bsub,
2125 .addReg(SrcReg, getKillRegState(KillSrc));
2132 AArch64::GPR64RegClass.contains(SrcReg)) {
2134 .addReg(SrcReg, getKillRegState(KillSrc));
2138 AArch64::FPR64RegClass.contains(SrcReg)) {
2140 .addReg(SrcReg, getKillRegState(KillSrc));
2145 AArch64::GPR32RegClass.contains(SrcReg)) {
2147 .addReg(SrcReg, getKillRegState(KillSrc));
2151 AArch64::FPR32RegClass.contains(SrcReg)) {
2153 .addReg(SrcReg, getKillRegState(KillSrc));
2158 assert(AArch64::GPR64RegClass.contains(SrcReg) && "Invalid NZCV copy");
2161 .addReg(SrcReg, getKillRegState(KillSrc))
2166 if (SrcReg == AArch64::NZCV) {
2178 MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned SrcReg,
2205 if (TargetRegisterInfo::isVirtualRegister(SrcReg))
2206 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR32RegClass);
2208 assert(SrcReg != AArch64::WSP);
2215 if (TargetRegisterInfo::isVirtualRegister(SrcReg))
2216 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR64RegClass);
2218 assert(SrcReg != AArch64::SP);
2273 .addReg(SrcReg, getKillRegState(isKill))
2386 unsigned DestReg, unsigned SrcReg, int Offset,
2389 if (DestReg == SrcReg && Offset == 0)
2428 .addReg(SrcReg)
2433 SrcReg = DestReg;
2439 .addReg(SrcReg)
2464 unsigned SrcReg = MI.getOperand(1).getReg();
2465 if (SrcReg == AArch64::SP &&
2471 TargetRegisterInfo::isVirtualRegister(SrcReg)) {
2472 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR64RegClass);