Lines Matching full:aarch64
1 //===- AArch64InstrInfo.cpp - AArch64 Instruction Information -------------===//
10 // This file contains the AArch64 implementation of the TargetInstrInfo class.
32 : AArch64GenInstrInfo(AArch64::ADJCALLSTACKDOWN, AArch64::ADJCALLSTACKUP),
42 if (MI->getOpcode() == AArch64::INLINEASM)
66 case AArch64::Bcc:
70 case AArch64::CBZW:
71 case AArch64::CBZX:
72 case AArch64::CBNZW:
73 case AArch64::CBNZX:
79 case AArch64::TBZW:
80 case AArch64::TBZX:
81 case AArch64::TBNZW:
82 case AArch64::TBNZX:
190 case AArch64::CBZW:
191 Cond[1].setImm(AArch64::CBNZW);
193 case AArch64::CBNZW:
194 Cond[1].setImm(AArch64::CBZW);
196 case AArch64::CBZX:
197 Cond[1].setImm(AArch64::CBNZX);
199 case AArch64::CBNZX:
200 Cond[1].setImm(AArch64::CBZX);
202 case AArch64::TBZW:
203 Cond[1].setImm(AArch64::TBNZW);
205 case AArch64::TBNZW:
206 Cond[1].setImm(AArch64::TBZW);
208 case AArch64::TBZX:
209 Cond[1].setImm(AArch64::TBNZX);
211 case AArch64::TBNZX:
212 Cond[1].setImm(AArch64::TBZX);
250 BuildMI(&MBB, DL, get(AArch64::Bcc)).addImm(Cond[0].getImm()).addMBB(TBB);
270 BuildMI(&MBB, DL, get(AArch64::B)).addMBB(TBB);
278 BuildMI(&MBB, DL, get(AArch64::B)).addMBB(FBB);
302 bool Is64Bit = AArch64::GPR64allRegClass.hasSubClassEq(MRI.getRegClass(VReg));
307 case AArch64::ADDSXri:
308 case AArch64::ADDSWri:
310 if (DefMI->findRegisterDefOperandIdx(AArch64::NZCV, true) == -1)
313 case AArch64::ADDXri:
314 case AArch64::ADDWri:
320 Opc = Is64Bit ? AArch64::CSINCXr : AArch64::CSINCWr;
323 case AArch64::ORNXrr:
324 case AArch64::ORNWrr: {
327 if (ZReg != AArch64::XZR && ZReg != AArch64::WZR)
330 Opc = Is64Bit ? AArch64::CSINVXr : AArch64::CSINVWr;
334 case AArch64::SUBSXrr:
335 case AArch64::SUBSWrr:
337 if (DefMI->findRegisterDefOperandIdx(AArch64::NZCV, true) == -1)
340 case AArch64::SUBXrr:
341 case AArch64::SUBWrr: {
344 if (ZReg != AArch64::XZR && ZReg != AArch64::WZR)
347 Opc = Is64Bit ? AArch64::CSNEGXr : AArch64::CSNEGWr;
376 if (AArch64::GPR64allRegClass.hasSubClassEq(RC) ||
377 AArch64::GPR32allRegClass.hasSubClassEq(RC)) {
390 if (AArch64::FPR64RegClass.hasSubClassEq(RC) ||
391 AArch64::FPR32RegClass.hasSubClassEq(RC)) {
422 case AArch64::CBZW:
426 case AArch64::CBZX:
430 case AArch64::CBNZW:
434 case AArch64::CBNZX:
442 MRI.constrainRegClass(SrcReg, &AArch64::GPR64spRegClass);
443 BuildMI(MBB, I, DL, get(AArch64::SUBSXri), AArch64::XZR)
448 MRI.constrainRegClass(SrcReg, &AArch64::GPR32spRegClass);
449 BuildMI(MBB, I, DL, get(AArch64::SUBSWri), AArch64::WZR)
461 case AArch64::TBZW:
462 case AArch64::TBZX:
465 case AArch64::TBNZW:
466 case AArch64::TBNZX:
471 if (Cond[1].getImm() == AArch64::TBZW || Cond[1].getImm() == AArch64::TBNZW)
472 BuildMI(MBB, I, DL, get(AArch64::ANDSWri), AArch64::WZR)
477 BuildMI(MBB, I, DL, get(AArch64::ANDSXri), AArch64::XZR)
488 if (MRI.constrainRegClass(DstReg, &AArch64::GPR64RegClass)) {
489 RC = &AArch64::GPR64RegClass;
490 Opc = AArch64::CSELXr;
492 } else if (MRI.constrainRegClass(DstReg, &AArch64::GPR32RegClass)) {
493 RC = &AArch64::GPR32RegClass;
494 Opc = AArch64::CSELWr;
496 } else if (MRI.constrainRegClass(DstReg, &AArch64::FPR64RegClass)) {
497 RC = &AArch64::FPR64RegClass;
498 Opc = AArch64::FCSELDrrr;
499 } else if (MRI.constrainRegClass(DstReg, &AArch64::FPR32RegClass)) {
500 RC = &AArch64::FPR32RegClass;
501 Opc = AArch64::FCSELSrrr;
554 case AArch64::ADDWri:
555 case AArch64::ADDXri:
556 case AArch64::SUBWri:
557 case AArch64::SUBXri:
561 case AArch64::ANDWri:
562 case AArch64::ANDXri:
563 case AArch64::EORWri:
564 case AArch64::EORXri:
565 case AArch64::ORRWri:
566 case AArch64::ORRXri:
570 case AArch64::ANDWrr:
571 case AArch64::ANDXrr:
572 case AArch64::BICWrr:
573 case AArch64::BICXrr:
574 case AArch64::EONWrr:
575 case AArch64::EONXrr:
576 case AArch64::EORWrr:
577 case AArch64::EORXrr:
578 case AArch64::ORNWrr:
579 case AArch64::ORNXrr:
580 case AArch64::ORRWrr:
581 case AArch64::ORRXrr:
585 case AArch64::MOVi32imm:
587 case AArch64::MOVi64imm:
600 case AArch64::SBFMXri: // aka sxtw
601 case AArch64::UBFMXri: // aka uxtw
609 SubIdx = AArch64::sub_32;
657 case AArch64::SUBSWrr:
658 case AArch64::SUBSWrs:
659 case AArch64::SUBSWrx:
660 case AArch64::SUBSXrr:
661 case AArch64::SUBSXrs:
662 case AArch64::SUBSXrx:
663 case AArch64::ADDSWrr:
664 case AArch64::ADDSWrs:
665 case AArch64::ADDSWrx:
666 case AArch64::ADDSXrr:
667 case AArch64::ADDSXrs:
668 case AArch64::ADDSXrx:
675 case AArch64::SUBSWri:
676 case AArch64::ADDSWri:
677 case AArch64::SUBSXri:
678 case AArch64::ADDSXri:
685 case AArch64::ANDSWri:
686 case AArch64::ANDSXri:
699 MI->getOpcode() == AArch64::ANDSWri ? 32 : 64) != 0);
751 if (MI->definesRegister(AArch64::WZR) || MI->definesRegister(AArch64::XZR))
757 case AArch64::ADDSWrr:
758 return AArch64::ADDWrr;
759 case AArch64::ADDSWri:
760 return MIDefinesZeroReg ? AArch64::ADDSWri : AArch64::ADDWri;
761 case AArch64::ADDSWrs:
762 return MIDefinesZeroReg ? AArch64::ADDSWrs : AArch64::ADDWrs;
763 case AArch64::ADDSWrx:
764 return AArch64::ADDWrx;
765 case AArch64::ADDSXrr:
766 return AArch64::ADDXrr;
767 case AArch64::ADDSXri:
768 return MIDefinesZeroReg ? AArch64::ADDSXri : AArch64::ADDXri;
769 case AArch64::ADDSXrs:
770 return MIDefinesZeroReg ? AArch64::ADDSXrs : AArch64::ADDXrs;
771 case AArch64::ADDSXrx:
772 return AArch64::ADDXrx;
773 case AArch64::SUBSWrr:
774 return AArch64::SUBWrr;
775 case AArch64::SUBSWri:
776 return MIDefinesZeroReg ? AArch64::SUBSWri : AArch64::SUBWri;
777 case AArch64::SUBSWrs:
778 return MIDefinesZeroReg ? AArch64::SUBSWrs : AArch64::SUBWrs;
779 case AArch64::SUBSWrx:
780 return AArch64::SUBWrx;
781 case AArch64::SUBSXrr:
782 return AArch64::SUBXrr;
783 case AArch64::SUBSXri:
784 return MIDefinesZeroReg ? AArch64::SUBSXri : AArch64::SUBXri;
785 case AArch64::SUBSXrs:
786 return MIDefinesZeroReg ? AArch64::SUBSXrs : AArch64::SUBXrs;
787 case AArch64::SUBSXrx:
788 return AArch64::SUBXrx;
813 if (Instr.modifiesRegister(AArch64::NZCV, TRI) ||
814 (!CheckOnlyCCWrites && Instr.readsRegister(AArch64::NZCV, TRI)))
832 int Cmp_NZCV = CmpInstr->findRegisterDefOperandIdx(AArch64::NZCV, true);
834 if (CmpInstr->definesRegister(AArch64::WZR) ||
835 CmpInstr->definesRegister(AArch64::XZR)) {
877 case AArch64::ADDSWrr:
878 case AArch64::ADDSWri:
879 case AArch64::ADDSXrr:
880 case AArch64::ADDSXri:
881 case AArch64::SUBSWrr:
882 case AArch64::SUBSWri:
883 case AArch64::SUBSXrr:
884 case AArch64::SUBSXri:
886 case AArch64::ADDWrr: NewOpc = AArch64::ADDSWrr; break;
887 case AArch64::ADDWri: NewOpc = AArch64::ADDSWri; break;
888 case AArch64::ADDXrr: NewOpc = AArch64::ADDSXrr; break;
889 case AArch64::ADDXri: NewOpc = AArch64::ADDSXri; break;
890 case AArch64::ADCWr: NewOpc = AArch64::ADCSWr; break;
891 case AArch64::ADCXr: NewOpc = AArch64::ADCSXr; break;
892 case AArch64::SUBWrr: NewOpc = AArch64::SUBSWrr; break;
893 case AArch64::SUBWri: NewOpc = AArch64::SUBSWri; break;
894 case AArch64::SUBXrr: NewOpc = AArch64::SUBSXrr; break;
895 case AArch64::SUBXri: NewOpc = AArch64::SUBSXri; break;
896 case AArch64::SBCWr: NewOpc = AArch64::SBCSWr; break;
897 case AArch64::SBCXr: NewOpc = AArch64::SBCSXr; break;
898 case AArch64::ANDWri: NewOpc = AArch64::ANDSWri; break;
899 case AArch64::ANDXri: NewOpc = AArch64::ANDSXri; break;
916 if (MO.isRegMask() && MO.clobbersPhysReg(AArch64::NZCV)) {
920 if (!MO.isReg() || MO.getReg() != AArch64::NZCV)
933 case AArch64::Bcc:
936 case AArch64::CSINVWr:
937 case AArch64::CSINVXr:
938 case AArch64::CSINCWr:
939 case AArch64::CSINCXr:
940 case AArch64::CSELWr:
941 case AArch64::CSELXr:
942 case AArch64::CSNEGWr:
943 case AArch64::CSNEGXr:
944 case AArch64::FCSELSrrr:
945 case AArch64::FCSELDrrr:
971 AArch64::NZCV))
981 MI->addRegisterDefined(AArch64::NZCV, TRI);
1000 BuildMI(MBB, MI, DL, get(AArch64::LOADgot), Reg)
1002 BuildMI(MBB, MI, DL, get(AArch64::LDRXui), Reg)
1006 BuildMI(MBB, MI, DL, get(AArch64::MOVZXi), Reg)
1008 BuildMI(MBB, MI, DL, get(AArch64::MOVKXi), Reg)
1011 BuildMI(MBB, MI, DL, get(AArch64::MOVKXi), Reg)
1014 BuildMI(MBB, MI, DL, get(AArch64::MOVKXi), Reg)
1017 BuildMI(MBB, MI, DL, get(AArch64::LDRXui), Reg)
1021 BuildMI(MBB, MI, DL, get(AArch64::ADRP), Reg)
1024 BuildMI(MBB, MI, DL, get(AArch64::LDRXui), Reg)
1040 case AArch64::ADDSWrs:
1041 case AArch64::ADDSXrs:
1042 case AArch64::ADDWrs:
1043 case AArch64::ADDXrs:
1044 case AArch64::ANDSWrs:
1045 case AArch64::ANDSXrs:
1046 case AArch64::ANDWrs:
1047 case AArch64::ANDXrs:
1048 case AArch64::BICSWrs:
1049 case AArch64::BICSXrs:
1050 case AArch64::BICWrs:
1051 case AArch64::BICXrs:
1052 case AArch64::CRC32Brr:
1053 case AArch64::CRC32CBrr:
1054 case AArch64::CRC32CHrr:
1055 case AArch64::CRC32CWrr:
1056 case AArch64::CRC32CXrr:
1057 case AArch64::CRC32Hrr:
1058 case AArch64::CRC32Wrr:
1059 case AArch64::CRC32Xrr:
1060 case AArch64::EONWrs:
1061 case AArch64::EONXrs:
1062 case AArch64::EORWrs:
1063 case AArch64::EORXrs:
1064 case AArch64::ORNWrs:
1065 case AArch64::ORNXrs:
1066 case AArch64::ORRWrs:
1067 case AArch64::ORRXrs:
1068 case AArch64::SUBSWrs:
1069 case AArch64::SUBSXrs:
1070 case AArch64::SUBWrs:
1071 case AArch64::SUBXrs:
1086 case AArch64::ADDSWrx:
1087 case AArch64::ADDSXrx:
1088 case AArch64::ADDSXrx64:
1089 case AArch64::ADDWrx:
1090 case AArch64::ADDXrx:
1091 case AArch64::ADDXrx64:
1092 case AArch64::SUBSWrx:
1093 case AArch64::SUBSXrx:
1094 case AArch64::SUBSXrx64:
1095 case AArch64::SUBWrx:
1096 case AArch64::SUBXrx:
1097 case AArch64::SUBXrx64:
1114 case AArch64::MOVZWi:
1115 case AArch64::MOVZXi: // movz Rd, #0 (LSL #0)
1122 case AArch64::ANDWri: // and Rd, Rzr, #imm
1123 return MI->getOperand(1).getReg() == AArch64::WZR;
1124 case AArch64::ANDXri:
1125 return MI->getOperand(1).getReg() == AArch64::XZR;
1127 return MI->getOperand(1).getReg() == AArch64::WZR;
1141 return (AArch64::GPR32RegClass.contains(DstReg) ||
1142 AArch64::GPR64RegClass.contains(DstReg));
1144 case AArch64::ORRXrs: // orr Xd, Xzr, Xm (LSL #0)
1145 if (MI->getOperand(1).getReg() == AArch64::XZR) {
1151 case AArch64::ADDXri: // add Xd, Xn, #0 (LSL #0)
1171 return (AArch64::FPR64RegClass.contains(DstReg) ||
1172 AArch64::FPR128RegClass.contains(DstReg));
1174 case AArch64::ORRv16i8:
1190 case AArch64::LDRWui:
1191 case AArch64::LDRXui:
1192 case AArch64::LDRBui:
1193 case AArch64::LDRHui:
1194 case AArch64::LDRSui:
1195 case AArch64::LDRDui:
1196 case AArch64::LDRQui:
1213 case AArch64::STRWui:
1214 case AArch64::STRXui:
1215 case AArch64::STRBui:
1216 case AArch64::STRHui:
1217 case AArch64::STRSui:
1218 case AArch64::STRDui:
1219 case AArch64::STRQui:
1237 case AArch64::LDRBBroW:
1238 case AArch64::LDRBroW:
1239 case AArch64::LDRDroW:
1240 case AArch64::LDRHHroW:
1241 case AArch64::LDRHroW:
1242 case AArch64::LDRQroW:
1243 case AArch64::LDRSBWroW:
1244 case AArch64::LDRSBXroW:
1245 case AArch64::LDRSHWroW:
1246 case AArch64::LDRSHXroW:
1247 case AArch64::LDRSWroW:
1248 case AArch64::LDRSroW:
1249 case AArch64::LDRWroW:
1250 case AArch64::LDRXroW:
1251 case AArch64::STRBBroW:
1252 case AArch64::STRBroW:
1253 case AArch64::STRDroW:
1254 case AArch64::STRHHroW:
1255 case AArch64::STRHroW:
1256 case AArch64::STRQroW:
1257 case AArch64::STRSroW:
1258 case AArch64::STRWroW:
1259 case AArch64::STRXroW:
1260 case AArch64::LDRBBroX:
1261 case AArch64::LDRBroX:
1262 case AArch64::LDRDroX:
1263 case AArch64::LDRHHroX:
1264 case AArch64::LDRHroX:
1265 case AArch64::LDRQroX:
1266 case AArch64::LDRSBWroX:
1267 case AArch64::LDRSBXroX:
1268 case AArch64::LDRSHWroX:
1269 case AArch64::LDRSHXroX:
1270 case AArch64::LDRSWroX:
1271 case AArch64::LDRSroX:
1272 case AArch64::LDRWroX:
1273 case AArch64::LDRXroX:
1274 case AArch64::STRBBroX:
1275 case AArch64::STRBroX:
1276 case AArch64::STRDroX:
1277 case AArch64::STRHHroX:
1278 case AArch64::STRHroX:
1279 case AArch64::STRQroX:
1280 case AArch64::STRSroX:
1281 case AArch64::STRWroX:
1282 case AArch64::STRXroX:
1322 case AArch64::STRSui:
1323 case AArch64::STRDui:
1324 case AArch64::STRQui:
1325 case AArch64::STRXui:
1326 case AArch64::STRWui:
1327 case AArch64::LDRSui:
1328 case AArch64::LDRDui:
1329 case AArch64::LDRQui:
1330 case AArch64::LDRXui:
1331 case AArch64::LDRWui:
1357 case AArch64::LDURQi:
1358 case AArch64::STURQi:
1362 case AArch64::LDURXi:
1363 case AArch64::LDURDi:
1364 case AArch64::STURXi:
1365 case AArch64::STURDi:
1369 case AArch64::LDURWi:
1370 case AArch64::LDURSi:
1371 case AArch64::LDURSWi:
1372 case AArch64::STURWi:
1373 case AArch64::STURSi:
1377 case AArch64::LDURHi:
1378 case AArch64::LDURHHi:
1379 case AArch64::LDURSHXi:
1380 case AArch64::LDURSHWi:
1381 case AArch64::STURHi:
1382 case AArch64::STURHHi:
1386 case AArch64::LDURBi:
1387 case AArch64::LDURBBi:
1388 case AArch64::LDURSBXi:
1389 case AArch64::LDURSBWi:
1390 case AArch64::STURBi:
1391 case AArch64::STURBBi:
1395 case AArch64::LDRQui:
1396 case AArch64::STRQui:
1399 case AArch64::LDRXui:
1400 case AArch64::LDRDui:
1401 case AArch64::STRXui:
1402 case AArch64::STRDui:
1405 case AArch64::LDRWui:
1406 case AArch64::LDRSui:
1407 case AArch64::STRWui:
1408 case AArch64::STRSui:
1411 case AArch64::LDRHui:
1412 case AArch64::LDRHHui:
1413 case AArch64::STRHui:
1414 case AArch64::STRHHui:
1417 case AArch64::LDRBui:
1418 case AArch64::LDRBBui:
1419 case AArch64::STRBui:
1420 case AArch64::STRBBui:
1456 if (SecondOpcode == AArch64::Bcc) {
1460 case AArch64::SUBSWri:
1461 case AArch64::ADDSWri:
1462 case AArch64::ANDSWri:
1463 case AArch64::SUBSXri:
1464 case AArch64::ADDSXri:
1465 case AArch64::ANDSXri:
1470 if (SecondOpcode == AArch64::CBNZW || SecondOpcode == AArch64::CBNZX ||
1471 SecondOpcode == AArch64::CBZW || SecondOpcode == AArch64::CBZX) {
1475 case AArch64::ADDWri:
1476 case AArch64::ADDXri:
1477 case AArch64::ANDWri:
1478 case AArch64::ANDXri:
1479 case AArch64::EORWri:
1480 case AArch64::EORXri:
1481 case AArch64::ORRWri:
1482 case AArch64::ORRXri:
1483 case AArch64::SUBWri:
1484 case AArch64::SUBXri:
1495 MachineInstrBuilder MIB = BuildMI(MF, DL, get(AArch64::DBG_VALUE))
1553 if (AArch64::GPR32spRegClass.contains(DestReg) &&
1554 (AArch64::GPR32spRegClass.contains(SrcReg) || SrcReg == AArch64::WZR)) {
1557 if (DestReg == AArch64::WSP || SrcReg == AArch64::WSP) {
1561 unsigned DestRegX = TRI->getMatchingSuperReg(DestReg, AArch64::sub_32,
1562 &AArch64::GPR64spRegClass);
1563 unsigned SrcRegX = TRI->getMatchingSuperReg(SrcReg, AArch64::sub_32,
1564 &AArch64::GPR64spRegClass);
1569 BuildMI(MBB, I, DL, get(AArch64::ADDXri), DestRegX)
1575 BuildMI(MBB, I, DL, get(AArch64::ADDWri), DestReg)
1580 } else if (SrcReg == AArch64::WZR && Subtarget.hasZeroCycleZeroing()) {
1581 BuildMI(MBB, I, DL, get(AArch64::MOVZWi), DestReg).addImm(0).addImm(
1586 unsigned DestRegX = TRI->getMatchingSuperReg(DestReg, AArch64::sub_32,
1587 &AArch64::GPR64spRegClass);
1588 unsigned SrcRegX = TRI->getMatchingSuperReg(SrcReg, AArch64::sub_32,
1589 &AArch64::GPR64spRegClass);
1594 BuildMI(MBB, I, DL, get(AArch64::ORRXrr), DestRegX)
1595 .addReg(AArch64::XZR)
1600 BuildMI(MBB, I, DL, get(AArch64::ORRWrr), DestReg)
1601 .addReg(AArch64::WZR)
1608 if (AArch64::GPR64spRegClass.contains(DestReg) &&
1609 (AArch64::GPR64spRegClass.contains(SrcReg) || SrcReg == AArch64::XZR)) {
1610 if (DestReg == AArch64::SP || SrcReg == AArch64::SP) {
1612 BuildMI(MBB, I, DL, get(AArch64::ADDXri), DestReg)
1616 } else if (SrcReg == AArch64::XZR && Subtarget.hasZeroCycleZeroing()) {
1617 BuildMI(MBB, I, DL, get(AArch64::MOVZXi), DestReg).addImm(0).addImm(
1621 BuildMI(MBB, I, DL, get(AArch64::ORRXrr), DestReg)
1622 .addReg(AArch64::XZR)
1629 if (AArch64::DDDDRegClass.contains(DestReg) &&
1630 AArch64::DDDDRegClass.contains(SrcReg)) {
1631 static const unsigned Indices[] = { AArch64::dsub0, AArch64::dsub1,
1632 AArch64::dsub2, AArch64::dsub3 };
1633 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8,
1639 if (AArch64::DDDRegClass.contains(DestReg) &&
1640 AArch64::DDDRegClass.contains(SrcReg)) {
1641 static const unsigned Indices[] = { AArch64::dsub0, AArch64::dsub1,
1642 AArch64::dsub2 };
1643 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8,
1649 if (AArch64::DDRegClass.contains(DestReg) &&
1650 AArch64::DDRegClass.contains(SrcReg)) {
1651 static const unsigned Indices[] = { AArch64::dsub0, AArch64::dsub1 };
1652 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8,
1658 if (AArch64::QQQQRegClass.contains(DestReg) &&
1659 AArch64::QQQQRegClass.contains(SrcReg)) {
1660 static const unsigned Indices[] = { AArch64::qsub0, AArch64::qsub1,
1661 AArch64::qsub2, AArch64::qsub3 };
1662 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8,
1668 if (AArch64::QQQRegClass.contains(DestReg) &&
1669 AArch64::QQQRegClass.contains(SrcReg)) {
1670 static const unsigned Indices[] = { AArch64::qsub0, AArch64::qsub1,
1671 AArch64::qsub2 };
1672 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8,
1678 if (AArch64::QQRegClass.contains(DestReg) &&
1679 AArch64::QQRegClass.contains(SrcReg)) {
1680 static const unsigned Indices[] = { AArch64::qsub0, AArch64::qsub1 };
1681 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8,
1686 if (AArch64::FPR128RegClass.contains(DestReg) &&
1687 AArch64::FPR128RegClass.contains(SrcReg)) {
1689 BuildMI(MBB, I, DL, get(AArch64::ORRv16i8), DestReg)
1693 BuildMI(MBB, I, DL, get(AArch64::STRQpre))
1694 .addReg(AArch64::SP, RegState::Define)
1696 .addReg(AArch64::SP)
1698 BuildMI(MBB, I, DL, get(AArch64::LDRQpre))
1699 .addReg(AArch64::SP, RegState::Define)
1701 .addReg(AArch64::SP)
1707 if (AArch64::FPR64RegClass.contains(DestReg) &&
1708 AArch64::FPR64RegClass.contains(SrcReg)) {
1710 DestReg = RI.getMatchingSuperReg(DestReg, AArch64::dsub,
1711 &AArch64::FPR128RegClass);
1712 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::dsub,
1713 &AArch64::FPR128RegClass);
1714 BuildMI(MBB, I, DL, get(AArch64::ORRv16i8), DestReg)
1718 BuildMI(MBB, I, DL, get(AArch64::FMOVDr), DestReg)
1724 if (AArch64::FPR32RegClass.contains(DestReg) &&
1725 AArch64::FPR32RegClass.contains(SrcReg)) {
1727 DestReg = RI.getMatchingSuperReg(DestReg, AArch64::ssub,
1728 &AArch64::FPR128RegClass);
1729 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::ssub,
1730 &AArch64::FPR128RegClass);
1731 BuildMI(MBB, I, DL, get(AArch64::ORRv16i8), DestReg)
1735 BuildMI(MBB, I, DL, get(AArch64::FMOVSr), DestReg)
1741 if (AArch64::FPR16RegClass.contains(DestReg) &&
1742 AArch64::FPR16RegClass.contains(SrcReg)) {
1744 DestReg = RI.getMatchingSuperReg(DestReg, AArch64::hsub,
1745 &AArch64::FPR128RegClass);
1746 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::hsub,
1747 &AArch64::FPR128RegClass);
1748 BuildMI(MBB, I, DL, get(AArch64::ORRv16i8), DestReg)
1752 DestReg = RI.getMatchingSuperReg(DestReg, AArch64::hsub,
1753 &AArch64::FPR32RegClass);
1754 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::hsub,
1755 &AArch64::FPR32RegClass);
1756 BuildMI(MBB, I, DL, get(AArch64::FMOVSr), DestReg)
1762 if (AArch64::FPR8RegClass.contains(DestReg) &&
1763 AArch64::FPR8RegClass.contains(SrcReg)) {
1765 DestReg = RI.getMatchingSuperReg(DestReg, AArch64::bsub,
1766 &AArch64::FPR128RegClass);
1767 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::bsub,
1768 &AArch64::FPR128RegClass);
1769 BuildMI(MBB, I, DL, get(AArch64::ORRv16i8), DestReg)
1773 DestReg = RI.getMatchingSuperReg(DestReg, AArch64::bsub,
1774 &AArch64::FPR32RegClass);
1775 SrcReg = RI.getMatchingSuperReg(SrcReg, AArch64::bsub,
1776 &AArch64::FPR32RegClass);
1777 BuildMI(MBB, I, DL, get(AArch64::FMOVSr), DestReg)
1784 if (AArch64::FPR64RegClass.contains(DestReg) &&
1785 AArch64::GPR64RegClass.contains(SrcReg)) {
1786 BuildMI(MBB, I, DL, get(AArch64::FMOVXDr), DestReg)
1790 if (AArch64::GPR64RegClass.contains(DestReg) &&
1791 AArch64::FPR64RegClass.contains(SrcReg)) {
1792 BuildMI(MBB, I, DL, get(AArch64::FMOVDXr), DestReg)
1797 if (AArch64::FPR32RegClass.contains(DestReg) &&
1798 AArch64::GPR32RegClass.contains(SrcReg)) {
1799 BuildMI(MBB, I, DL, get(AArch64::FMOVWSr), DestReg)
1803 if (AArch64::GPR32RegClass.contains(DestReg) &&
1804 AArch64::FPR32RegClass.contains(SrcReg)) {
1805 BuildMI(MBB, I, DL, get(AArch64::FMOVSWr), DestReg)
1810 if (DestReg == AArch64::NZCV) {
1811 assert(AArch64::GPR64RegClass.contains(SrcReg) && "Invalid NZCV copy");
1812 BuildMI(MBB, I, DL, get(AArch64::MSR))
1815 .addReg(AArch64::NZCV, RegState::Implicit | RegState::Define);
1819 if (SrcReg == AArch64::NZCV) {
1820 assert(AArch64::GPR64RegClass.contains(DestReg) && "Invalid NZCV copy");
1821 BuildMI(MBB, I, DL, get(AArch64::MRS))
1824 .addReg(AArch64::NZCV, RegState::Implicit | getKillRegState(KillSrc));
1849 if (AArch64::FPR8RegClass.hasSubClassEq(RC))
1850 Opc = AArch64::STRBui;
1853 if (AArch64::FPR16RegClass.hasSubClassEq(RC))
1854 Opc = AArch64::STRHui;
1857 if (AArch64::GPR32allRegClass.hasSubClassEq(RC)) {
1858 Opc = AArch64::STRWui;
1860 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR32RegClass);
1862 assert(SrcReg != AArch64::WSP);
1863 } else if (AArch64::FPR32RegClass.hasSubClassEq(RC))
1864 Opc = AArch64::STRSui;
1867 if (AArch64::GPR64allRegClass.hasSubClassEq(RC)) {
1868 Opc = AArch64::STRXui;
1870 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR64RegClass);
1872 assert(SrcReg != AArch64::SP);
1873 } else if (AArch64::FPR64RegClass.hasSubClassEq(RC))
1874 Opc = AArch64::STRDui;
1877 if (AArch64::FPR128RegClass.hasSubClassEq(RC))
1878 Opc = AArch64::STRQui;
1879 else if (AArch64::DDRegClass.hasSubClassEq(RC)) {
1882 Opc = AArch64::ST1Twov1d, Offset = false;
1886 if (AArch64::DDDRegClass.hasSubClassEq(RC)) {
1889 Opc = AArch64::ST1Threev1d, Offset = false;
1893 if (AArch64::DDDDRegClass.hasSubClassEq(RC)) {
1896 Opc = AArch64::ST1Fourv1d, Offset = false;
1897 } else if (AArch64::QQRegClass.hasSubClassEq(RC)) {
1900 Opc = AArch64::ST1Twov2d, Offset = false;
1904 if (AArch64::QQQRegClass.hasSubClassEq(RC)) {
1907 Opc = AArch64::ST1Threev2d, Offset = false;
1911 if (AArch64
1914 Opc = AArch64::ST1Fourv2d, Offset = false;
1947 if (AArch64::FPR8RegClass.hasSubClassEq(RC))
1948 Opc = AArch64::LDRBui;
1951 if (AArch64::FPR16RegClass.hasSubClassEq(RC))
1952 Opc = AArch64::LDRHui;
1955 if (AArch64::GPR32allRegClass.hasSubClassEq(RC)) {
1956 Opc = AArch64::LDRWui;
1958 MF.getRegInfo().constrainRegClass(DestReg, &AArch64::GPR32RegClass);
1960 assert(DestReg != AArch64::WSP);
1961 } else if (AArch64::FPR32RegClass.hasSubClassEq(RC))
1962 Opc = AArch64::LDRSui;
1965 if (AArch64::GPR64allRegClass.hasSubClassEq(RC)) {
1966 Opc = AArch64::LDRXui;
1968 MF.getRegInfo().constrainRegClass(DestReg, &AArch64::GPR64RegClass);
1970 assert(DestReg != AArch64::SP);
1971 } else if (AArch64::FPR64RegClass.hasSubClassEq(RC))
1972 Opc = AArch64::LDRDui;
1975 if (AArch64::FPR128RegClass.hasSubClassEq(RC))
1976 Opc = AArch64::LDRQui;
1977 else if (AArch64::DDRegClass.hasSubClassEq(RC)) {
1980 Opc = AArch64::LD1Twov1d, Offset = false;
1984 if (AArch64::DDDRegClass.hasSubClassEq(RC)) {
1987 Opc = AArch64::LD1Threev1d, Offset = false;
1991 if (AArch64::DDDDRegClass.hasSubClassEq(RC)) {
1994 Opc = AArch64::LD1Fourv1d, Offset = false;
1995 } else if (AArch64::QQRegClass.hasSubClassEq(RC)) {
1998 Opc = AArch64::LD1Twov2d, Offset = false;
2002 if (AArch64::QQQRegClass.hasSubClassEq(RC)) {
2005 Opc = AArch64::LD1Threev2d, Offset = false;
2009 if (AArch64::QQQQRegClass.hasSubClassEq(RC)) {
2012 Opc = AArch64::LD1Fourv2d, Offset = false;
2051 Opc = isSub ? AArch64::SUBSXri : AArch64::ADDSXri;
2053 Opc = isSub ? AArch64::SUBXri : AArch64::ADDXri;
2103 if (SrcReg == AArch64::SP &&
2105 MF.getRegInfo().constrainRegClass(DstReg, &AArch64::GPR64RegClass);
2108 if (DstReg == AArch64::SP &&
2110 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR64RegClass);
2139 case AArch64::LD1Twov2d:
2140 case AArch64::LD1Threev2d:
2141 case AArch64::LD1Fourv2d:
2142 case AArch64::LD1Twov1d:
2143 case AArch64::LD1Threev1d:
2144 case AArch64::LD1Fourv1d:
2145 case AArch64::ST1Twov2d:
2146 case AArch64::ST1Threev2d:
2147 case AArch64::ST1Fourv2d:
2148 case AArch64::ST1Twov1d:
2149 case AArch64::ST1Threev1d:
2150 case AArch64::ST1Fourv1d:
2152 case AArch64::PRFMui:
2154 UnscaledOp = AArch64::PRFUMi;
2156 case AArch64::LDRXui:
2158 UnscaledOp = AArch64::LDURXi;
2160 case AArch64::LDRWui:
2162 UnscaledOp = AArch64::LDURWi;
2164 case AArch64::LDRBui:
2166 UnscaledOp = AArch64::LDURBi;
2168 case AArch64::LDRHui:
2170 UnscaledOp = AArch64::LDURHi;
2172 case AArch64::LDRSui:
2174 UnscaledOp = AArch64::LDURSi;
2176 case AArch64::LDRDui:
2178 UnscaledOp = AArch64::LDURDi;
2180 case AArch64::LDRQui:
2182 UnscaledOp = AArch64::LDURQi;
2184 case AArch64::LDRBBui:
2186 UnscaledOp = AArch64::LDURBBi;
2188 case AArch64::LDRHHui:
2190 UnscaledOp = AArch64::LDURHHi;
2192 case AArch64::LDRSBXui:
2194 UnscaledOp = AArch64::LDURSBXi;
2196 case AArch64::LDRSBWui:
2198 UnscaledOp = AArch64::LDURSBWi;
2200 case AArch64::LDRSHXui:
2202 UnscaledOp = AArch64::LDURSHXi;
2204 case AArch64::LDRSHWui:
2206 UnscaledOp = AArch64::LDURSHWi;
2208 case AArch64::LDRSWui:
2210 UnscaledOp = AArch64::LDURSWi;
2213 case AArch64::STRXui:
2215 UnscaledOp = AArch64::STURXi;
2217 case AArch64::STRWui:
2219 UnscaledOp = AArch64::STURWi;
2221 case AArch64::STRBui:
2223 UnscaledOp = AArch64::STURBi;
2225 case AArch64::STRHui:
2227 UnscaledOp = AArch64::STURHi;
2229 case AArch64::STRSui:
2231 UnscaledOp = AArch64::STURSi;
2233 case AArch64::STRDui:
2235 UnscaledOp = AArch64::STURDi;
2237 case AArch64::STRQui:
2239 UnscaledOp = AArch64::STURQi;
2241 case AArch64::STRBBui:
2243 UnscaledOp = AArch64::STURBBi;
2245 case AArch64::STRHHui:
2247 UnscaledOp = AArch64::STURHHi;
2250 case AArch64::LDPXi:
2251 case AArch64::LDPDi:
2252 case AArch64::STPXi:
2253 case AArch64::STPDi:
2254 case AArch64::LDNPXi:
2255 case AArch64::LDNPDi:
2256 case AArch64::STNPXi:
2257 case AArch64::STNPDi:
2262 case AArch64::LDPQi:
2263 case AArch64::STPQi:
2264 case AArch64::LDNPQi:
2265 case AArch64::STNPQi:
2270 case AArch64::LDPWi:
2271 case AArch64::LDPSi:
2272 case AArch64::STPWi:
2273 case AArch64::STPSi:
2274 case AArch64::LDNPWi:
2275 case AArch64::LDNPSi:
2276 case AArch64::STNPWi:
2277 case AArch64::STNPSi:
2283 case AArch64::LDURXi:
2284 case AArch64::LDURWi:
2285 case AArch64::LDURBi:
2286 case AArch64::LDURHi:
2287 case AArch64::LDURSi:
2288 case AArch64::LDURDi:
2289 case AArch64::LDURQi:
2290 case AArch64::LDURHHi:
2291 case AArch64::LDURBBi:
2292 case AArch64::LDURSBXi:
2293 case AArch64::LDURSBWi:
2294 case AArch64::LDURSHXi:
2295 case AArch64::LDURSHWi:
2296 case AArch64::LDURSWi:
2297 case AArch64::STURXi:
2298 case AArch64::STURWi:
2299 case AArch64::STURBi:
2300 case AArch64::STURHi:
2301 case AArch64::STURSi:
2302 case AArch64::STURDi:
2303 case AArch64::STURQi:
2304 case AArch64::STURBBi:
2305 case AArch64::STURHHi:
2363 if (Opcode == AArch64::ADDSXri || Opcode == AArch64::ADDXri) {
2367 MachineInstr::NoFlags, (Opcode == AArch64::ADDSXri));
2393 NopInst.setOpcode(AArch64::HINT);
2398 // AArch64 supports the combiner
2405 case AArch64::ADDSWrr:
2406 case AArch64::ADDSWri:
2407 case AArch64::ADDSXrr:
2408 case AArch64::ADDSXri:
2409 case AArch64::SUBSWrr:
2410 case AArch64::SUBSXrr:
2412 case AArch64::SUBSWri:
2413 case AArch64::SUBSXri:
2424 case AArch64::ADDWrr:
2425 case AArch64::ADDWri:
2426 case AArch64::SUBWrr:
2427 case AArch64::ADDSWrr:
2428 case AArch64::ADDSWri:
2429 case AArch64::SUBSWrr:
2431 case AArch64::SUBWri:
2432 case AArch64::SUBSWri:
2443 case AArch64::ADDXrr:
2444 case AArch64::ADDXri:
2445 case AArch64::SUBXrr:
2446 case AArch64::ADDSXrr:
2447 case AArch64::ADDSXri:
2448 case AArch64::SUBSXrr:
2450 case AArch64::SUBXri:
2451 case AArch64::SUBSXri:
2506 int Cmp_NZCV = Root.findRegisterDefOperandIdx(AArch64::NZCV, true);
2521 case AArch64::ADDWrr:
2524 if (canCombineWithMUL(MBB, Root.getOperand(1), AArch64::MADDWrrr,
2525 AArch64::WZR)) {
2529 if (canCombineWithMUL(MBB, Root.getOperand(2), AArch64::MADDWrrr,
2530 AArch64::WZR)) {
2535 case AArch64::ADDXrr:
2536 if (canCombineWithMUL(MBB, Root.getOperand(1), AArch64::MADDXrrr,
2537 AArch64::XZR)) {
2541 if (canCombineWithMUL(MBB, Root.getOperand(2), AArch64::MADDXrrr,
2542 AArch64::XZR)) {
2547 case AArch64::SUBWrr:
2548 if (canCombineWithMUL(MBB, Root.getOperand(1), AArch64::MADDWrrr,
2549 AArch64::WZR)) {
2553 if (canCombineWithMUL(MBB, Root.getOperand(2), AArch64::MADDWrrr,
2554 AArch64::WZR)) {
2559 case AArch64::SUBXrr:
2560 if (canCombineWithMUL(MBB, Root.getOperand(1), AArch64::MADDXrrr,
2561 AArch64::XZR)) {
2565 if (canCombineWithMUL(MBB, Root.getOperand(2), AArch64::MADDXrrr,
2566 AArch64::XZR)) {
2571 case AArch64::ADDWri:
2572 if (canCombineWithMUL(MBB, Root.getOperand(1), AArch64::MADDWrrr,
2573 AArch64::WZR)) {
2578 case AArch64::ADDXri:
2579 if (canCombineWithMUL(MBB, Root.getOperand(1), AArch64::MADDXrrr,
2580 AArch64::XZR)) {
2585 case AArch64::SUBWri:
2586 if (canCombineWithMUL(MBB, Root.getOperand(1), AArch64::MADDWrrr,
2587 AArch64::WZR)) {
2592 case AArch64::SUBXri:
2593 if (canCombineWithMUL(MBB, Root.getOperand(1), AArch64::MADDXrrr,
2594 AArch64::XZR)) {
2725 Opc = AArch64::MADDWrrr;
2726 RC = &AArch64::GPR32RegClass;
2728 Opc = AArch64::MADDXrrr;
2729 RC = &AArch64::GPR64RegClass;
2740 Opc = AArch64::MADDWrrr;
2741 RC = &AArch64::GPR32RegClass;
2743 Opc = AArch64::MADDXrrr;
2744 RC = &AArch64::GPR64RegClass;
2758 OrrOpc = AArch64::ORRWri;
2759 OrrRC = &AArch64::GPR32spRegClass;
2761 ZeroReg = AArch64::WZR;
2762 Opc = AArch64::MADDWrrr;
2763 RC = &AArch64::GPR32RegClass;
2765 OrrOpc = AArch64::ORRXri;
2766 OrrRC = &AArch64::GPR64spRegClass;
2768 ZeroReg = AArch64::XZR;
2769 Opc = AArch64::MADDXrrr;
2770 RC = &AArch64::GPR64RegClass;
2802 SubOpc = AArch64::SUBWrr;
2803 SubRC = &AArch64::GPR32spRegClass;
2804 ZeroReg = AArch64::WZR;
2805 Opc = AArch64::MADDWrrr;
2806 RC = &AArch64::GPR32RegClass;
2808 SubOpc = AArch64::SUBXrr;
2809 SubRC = &AArch64::GPR64spRegClass;
2810 ZeroReg = AArch64::XZR;
2811 Opc = AArch64::MADDXrrr;
2812 RC = &AArch64::GPR64RegClass;
2832 Opc = AArch64::MSUBWrrr;
2833 RC = &AArch64::GPR32RegClass;
2835 Opc = AArch64::MSUBXrrr;
2836 RC = &AArch64::GPR64RegClass;
2850 OrrOpc = AArch64::ORRWri;
2851 OrrRC = &AArch64::GPR32spRegClass;
2853 ZeroReg = AArch64::WZR;
2854 Opc = AArch64::MADDWrrr;
2855 RC = &AArch64::GPR32RegClass;
2857 OrrOpc = AArch64::ORRXri;
2858 OrrRC = &AArch64::GPR64spRegClass;
2860 ZeroReg = AArch64::XZR;
2861 Opc = AArch64::MADDXrrr;
2862 RC = &AArch64::GPR64RegClass;
2916 case AArch64::Bcc:
2918 case AArch64::CBZW:
2919 case AArch64::CBZX:
2922 case AArch64::CBNZW:
2923 case AArch64::CBNZX:
2927 case AArch64::TBZW:
2928 case AArch64::TBZX:
2932 case AArch64::TBNZW:
2933 case AArch64::TBNZX:
2957 if (!(DefMI->getOpcode() == AArch64::CSINCWr &&
2958 DefMI->getOperand(1).getReg() == AArch64::WZR &&
2959 DefMI->getOperand(2).getReg() == AArch64::WZR) &&
2960 !(DefMI->getOpcode() == AArch64::CSINCXr &&
2961 DefMI->getOperand(1).getReg() == AArch64::XZR &&
2962 DefMI->getOperand(2).getReg() == AArch64::XZR))
2965 if (DefMI->findRegisterDefOperandIdx(AArch64::NZCV, true) != -1)
2981 BuildMI(RefToMBB, MI, DL, get(AArch64::Bcc)).addImm(CC).addMBB(TBB);
2996 {MO_PAGE, "aarch64-page"},
2997 {MO_PAGEOFF, "aarch64-pageoff"},
2998 {MO_G3, "aarch64-g3"},
2999 {MO_G2, "aarch64-g2"},
3000 {MO_G1, "aarch64-g1"},
3001 {MO_G0, "aarch64-g0"},
3002 {MO_HI12, "aarch64-hi12"}};
3010 {MO_GOT, "aarch64-got"},
3011 {MO_NC, "aarch64-nc"},
3012 {MO_TLS, "aarch64-tls"},
3013 {MO_CONSTPOOL, "aarch64-constant-pool"}};