Home | History | Annotate | Download | only in X86

Lines Matching refs:X86

1 //===-- X86FrameLowering.cpp - X86 Frame Information ----------------------===//
10 // This file contains the X86 implementation of TargetFrameLowering class.
62 return X86::SUB64ri8;
63 return X86::SUB64ri32;
66 return X86::SUB32ri8;
67 return X86::SUB32ri;
74 return X86::ADD64ri8;
75 return X86::ADD64ri32;
78 return X86::ADD32ri8;
79 return X86::ADD32ri;
84 return IsLP64 ? X86::LEA64r : X86::LEA32r;
100 X86::EAX, X86::EDX, X86::ECX, 0
104 X86::RAX, X86::RDX, X86::RCX, X86::RSI, X86::RDI,
105 X86::R8, X86::R9, X86::R10, X86::R11, 0
111 case X86::RETL:
112 case X86::RETQ:
113 case X86::RETIL:
114 case X86::RETIQ:
115 case X86::TCRETURNdi:
116 case X86::TCRETURNri:
117 case X86::TCRETURNmi:
118 case X86::TCRETURNdi64:
119 case X86::TCRETURNri64:
120 case X86::TCRETURNmi64:
121 case X86::EH_RETURN:
122 case X86::EH_RETURN64: {
171 ? (unsigned)(Is64Bit ? X86::RAX : X86::EAX)
175 ? (Is64Bit ? X86::PUSH64r : X86::PUSH32r)
176 : (Is64Bit ? X86::POP64r : X86::POP32r);
213 if ((Opc == X86::ADD64ri32 || Opc == X86::ADD64ri8 ||
214 Opc == X86::ADD32ri || Opc == X86::ADD32ri8 ||
215 Opc == X86::LEA32r || Opc == X86::LEA64_32r) &&
220 } else if ((Opc == X86::SUB64ri32 || Opc == X86::SUB64ri8 ||
221 Opc == X86::SUB32ri || Opc == X86::SUB32ri8) &&
244 if ((Opc == X86::ADD64ri32 || Opc == X86::ADD64ri8 ||
245 Opc == X86::ADD32ri || Opc == X86::ADD32ri8) &&
251 } else if ((Opc == X86::SUB64ri32 || Opc == X86::SUB64ri8 ||
252 Opc == X86::SUB32ri || Opc == X86::SUB32ri8) &&
278 if ((Opc == X86::ADD64ri32 || Opc == X86::ADD64ri8 ||
279 Opc == X86::ADD32ri || Opc == X86::ADD32ri8 ||
280 Opc == X86::LEA32r || Opc == X86::LEA64_32r) &&
285 } else if ((Opc == X86::SUB64ri32 || Opc == X86::SUB64ri8 ||
286 Opc == X86::SUB32ri || Opc == X86::SUB32ri8) &&
301 if (Reg == X86::EAX || Reg == X86::AX ||
302 Reg == X86::AH || Reg == X86::AL)
347 ri = MRI.reg_instr_begin(X86::EFLAGS), re = MRI.reg_instr_end();
485 // If this is x86-64 and the Red Zone is not disabled, if we are a leaf
553 BuildMI(MBB, MBBI, DL, TII.get(Is64Bit ? X86::PUSH64r : X86::PUSH32r))
576 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_PushReg))
583 TII.get(Is64Bit ? X86::MOV64rr : X86::MOV32rr), FramePtr)
609 (MBBI->getOpcode() == X86::PUSH32r ||
610 MBBI->getOpcode() == X86::PUSH64r)) {
627 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_PushReg)).addImm(Reg).setMIFlag(
638 TII.get(Is64Bit ? X86::AND64ri32 : X86::AND32ri), StackPtr)
689 BuildMI(MBB, MBBI, DL, TII.get(X86::PUSH32r))
690 .addReg(X86::EAX, RegState::Kill)
697 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64ri), X86::RAX)
703 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32ri), X86::EAX)
709 TII.get(Is64Bit ? X86::W64ALLOCA : X86::CALLpcrel32))
712 .addReg(X86::EFLAGS, RegState::Define | RegState::Implicit)
719 BuildMI(MBB, MBBI, DL, TII.get(X86::SUB64rr), StackPtr)
721 .addReg(X86::RAX)
726 MachineInstr *MI = addRegOffset(BuildMI(MF, DL, TII.get(X86::MOV32rm),
727 X86::EAX),
757 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_StackAlloc))
762 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_SetFrame))
769 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_StackAlloc))
784 if (X86::GR64RegClass.contains(Reg) || X86::GR32RegClass.contains(Reg))
786 assert(X86::FR64RegClass.contains(Reg) && "Unexpected register class");
791 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_SaveXMM))
797 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_EndPrologue))
807 unsigned Opc = Is64Bit ? X86::MOV64rr : X86::MOV32rr;
855 case X86::RETQ:
856 case X86::RETL:
857 case X86::RETIL:
858 case X86::RETIQ:
859 case X86::TCRETURNdi:
860 case X86::TCRETURNri:
861 case X86::TCRETURNmi:
862 case X86::TCRETURNdi64:
863 case X86::TCRETURNri64:
864 case X86::TCRETURNmi64:
865 case X86::EH_RETURN:
866 case X86::EH_RETURN64:
901 TII.get(Is64Bit ? X86::POP64r : X86::POP32r), FramePtr);
911 if (Opc != X86::POP32r && Opc != X86::POP64r && Opc != X86::DBG_VALUE &&
937 unsigned Opc = (Is64Bit ? X86::MOV64rr : X86::MOV32rr);
948 if (RetOpcode == X86::EH_RETURN || RetOpcode == X86::EH_RETURN64) {
953 TII.get(Is64Bit ? X86::MOV64rr : X86::MOV32rr),
955 } else if (RetOpcode == X86::TCRETURNri || RetOpcode == X86::TCRETURNdi ||
956 RetOpcode == X86::TCRETURNmi ||
957 RetOpcode == X86::TCRETURNri64 || RetOpcode == X86::TCRETURNdi64 ||
958 RetOpcode == X86::TCRETURNmi64) {
959 bool isMem = RetOpcode == X86::TCRETURNmi || RetOpcode == X86::TCRETURNmi64;
984 if (RetOpcode == X86::TCRETURNdi || RetOpcode == X86::TCRETURNdi64) {
986 BuildMI(MBB, MBBI, DL, TII.get((RetOpcode == X86::TCRETURNdi)
987 ? X86::TAILJMPd : X86::TAILJMPd64));
996 } else if (RetOpcode == X86::TCRETURNmi || RetOpcode == X86::TCRETURNmi64) {
998 BuildMI(MBB, MBBI, DL, TII.get((RetOpcode == X86::TCRETURNmi)
999 ? X86::TAILJMPm : X86::TAILJMPm64));
1002 } else if (RetOpcode == X86::TCRETURNri64) {
1003 BuildMI(MBB, MBBI, DL, TII.get(X86::TAILJMPr64)).
1006 BuildMI(MBB, MBBI, DL, TII.get(X86::TAILJMPr)).
1015 } else if ((RetOpcode == X86::RETQ || RetOpcode == X86::RETL ||
1016 RetOpcode == X86::RETIQ || RetOpcode == X86::RETIL) &&
1121 if (!X86::GR64RegClass.contains(Reg) && !X86::GR32RegClass.contains(Reg))
1136 if (X86::GR64RegClass.contains(Reg) || X86::GR32RegClass.contains(Reg))
1164 unsigned Opc = STI.is64Bit() ? X86::PUSH64r : X86::PUSH32r;
1168 if (!X86::GR64RegClass.contains(Reg) && !X86::GR32RegClass.contains(Reg))
1177 // Make XMM regs spilled. X86 does not have ability of push/pop XMM.
1181 if (X86::GR64RegClass.contains(Reg) ||
1182 X86::GR32RegClass.contains(Reg))
1214 if (X86::GR64RegClass.contains(Reg) ||
1215 X86::GR32RegClass.contains(Reg))
1223 unsigned Opc = STI.is64Bit() ? X86::POP64r : X86::POP32r;
1226 if (!X86::GR64RegClass.contains(Reg) &&
1227 !X86::GR32RegClass.contains(Reg))
1287 return Primary ? X86::R14 : X86::R13;
1289 return Primary ? X86::EBX : X86::EDI;
1293 return Primary ? X86::R11 : X86::R12;
1302 return Primary ? X86::EAX : X86::ECX;
1305 return Primary ? X86::EDX : X86::EAX;
1306 return Primary ? X86::ECX : X86::EAX;
1362 allocMBB->addLiveIn(X86::R10);
1374 TlsReg = X86::FS;
1377 TlsReg = X86::GS;
1380 TlsReg = X86::GS;
1383 TlsReg = X86::FS;
1390 ScratchReg = X86::RSP;
1392 BuildMI(checkMBB, DL, TII.get(X86::LEA64r), ScratchReg).addReg(X86::RSP)
1395 BuildMI(checkMBB, DL, TII.get(X86::CMP64rm)).addReg(ScratchReg)
1399 TlsReg = X86::GS;
1402 TlsReg = X86::GS;
1405 TlsReg = X86::FS;
1414 ScratchReg = X86::ESP;
1416 BuildMI(checkMBB, DL, TII.get(X86::LEA32r), ScratchReg).addReg(X86::ESP)
1420 BuildMI(checkMBB, DL, TII.get(X86::CMP32rm)).addReg(ScratchReg)
1445 BuildMI(checkMBB, DL, TII.get(X86::PUSH32r))
1448 BuildMI(checkMBB, DL, TII.get(X86::MOV32ri), ScratchReg2)
1450 BuildMI(checkMBB, DL, TII.get(X86::CMP32rm))
1457 BuildMI(checkMBB, DL, TII.get(X86::POP32r), ScratchReg2);
1463 BuildMI(checkMBB, DL, TII.get(X86::JA_4)).addMBB(&prologueMBB);
1472 BuildMI(allocMBB, DL, TII.get(X86::MOV64rr), X86::RAX).addReg(X86::R10);
1474 BuildMI(allocMBB, DL, TII.get(X86::MOV64ri), X86::R10)
1476 BuildMI(allocMBB, DL, TII.get(X86::MOV64ri), X86::R11)
1478 MF.getRegInfo().setPhysRegUsed(X86::R10);
1479 MF.getRegInfo().setPhysRegUsed(X86::R11);
1481 BuildMI(allocMBB, DL, TII.get(X86::PUSHi32))
1483 BuildMI(allocMBB, DL, TII.get(X86::PUSHi32))
1489 BuildMI(allocMBB, DL, TII.get(X86::CALL64pcrel32))
1492 BuildMI(allocMBB, DL, TII.get(X86::CALLpcrel32))
1496 BuildMI(allocMBB, DL, TII.get(X86::MORESTACK_RET_RESTORE_R10));
1498 BuildMI(allocMBB, DL, TII.get(X86::MORESTACK_RET));
1611 SPReg = X86::RSP;
1612 PReg = X86::RBP;
1613 LEAop = X86::LEA64r;
1614 CMPop = X86::CMP64rm;
1615 CALLop = X86::CALL64pcrel32;
1618 SPReg = X86::ESP;
1619 PReg = X86::EBP;
1620 LEAop = X86::LEA32r;
1621 CMPop = X86::CMP32rm;
1622 CALLop = X86::CALLpcrel32;
1636 BuildMI(stackCheckMBB, DL, TII.get(X86::JAE_4)).addMBB(&prologueMBB);
1645 BuildMI(incStackMBB, DL, TII.get(X86::JLE_4)).addMBB(incStackMBB);