Home | History | Annotate | Download | only in X86

Lines Matching defs:Opc

2540                                   unsigned Opc, bool AllowSP,
2546 RC = Opc != X86::LEA32r ? &X86::GR64RegClass : &X86::GR32RegClass;
2548 RC = Opc != X86::LEA32r ?
2555 if (Opc != X86::LEA64_32r) {
2626 unsigned Opc, leaInReg;
2628 Opc = X86::LEA64_32r;
2631 Opc = X86::LEA32r;
2649 get(Opc), leaOutReg);
2777 unsigned Opc = is64Bit ? X86::LEA64_32r : X86::LEA32r;
2783 if (!classifyLEAReg(MI, Src, Opc, /*AllowSP=*/ false,
2787 MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), get(Opc))
2813 unsigned Opc = MIOpc == X86::INC64r ? X86::LEA64r
2818 if (!classifyLEAReg(MI, Src, Opc, /*AllowSP=*/ false,
2822 MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), get(Opc))
2842 unsigned Opc = MIOpc == X86::DEC64r ? X86::LEA64r
2848 if (!classifyLEAReg(MI, Src, Opc, /*AllowSP=*/ false,
2852 MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), get(Opc))
2875 unsigned Opc;
2877 Opc = X86::LEA64r;
2879 Opc = is64Bit ? X86::LEA64_32r : X86::LEA32r;
2884 if (!classifyLEAReg(MI, Src, Opc, /*AllowSP=*/ true,
2892 if (!classifyLEAReg(MI, Src2, Opc, /*AllowSP=*/ false,
2896 MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), get(Opc))
2949 unsigned Opc = is64Bit ? X86::LEA64_32r : X86::LEA32r;
2954 if (!classifyLEAReg(MI, Src, Opc, /*AllowSP=*/ true,
2958 MachineInstrBuilder MIB = BuildMI(MF, MI->getDebugLoc(), get(Opc))
3157 unsigned Opc;
3161 case X86::SHRD16rri8: Size = 16; Opc = X86::SHLD16rri8; break;
3162 case X86::SHLD16rri8: Size = 16; Opc = X86::SHRD16rri8; break;
3163 case X86::SHRD32rri8: Size = 32; Opc = X86::SHLD32rri8; break;
3164 case X86::SHLD32rri8: Size = 32; Opc = X86::SHRD32rri8; break;
3165 case X86::SHRD64rri8: Size = 64; Opc = X86::SHLD64rri8; break;
3166 case X86::SHLD64rri8: Size = 64; Opc = X86::SHRD64rri8; break;
3174 MI->setDesc(get(Opc));
3295 unsigned Opc;
3298 case X86::CMOVB16rr: Opc = X86::CMOVAE16rr; break;
3299 case X86::CMOVB32rr: Opc = X86::CMOVAE32rr; break;
3300 Opc = X86::CMOVAE64rr; break;
3301 case X86::CMOVAE16rr: Opc = X86::CMOVB16rr; break;
3302 case X86::CMOVAE32rr: Opc = X86::CMOVB32rr; break;
3303 case X86::CMOVAE64rr: Opc = X86::CMOVB64rr; break;
3304 case X86::CMOVE16rr: Opc = X86::CMOVNE16rr; break;
3305 case X86::CMOVE32rr: Opc = X86::CMOVNE32rr; break;
3306 case X86::CMOVE64rr: Opc = X86::CMOVNE64rr; break;
3307 case X86::CMOVNE16rr: Opc = X86::CMOVE16rr; break;
3308 case X86::CMOVNE32rr: Opc = X86::CMOVE32rr; break;
3309 case X86::CMOVNE64rr: Opc = X86::CMOVE64rr; break;
3310 case X86::CMOVBE16rr: Opc = X86::CMOVA16rr; break;
3311 case X86::CMOVBE32rr: Opc = X86::CMOVA32rr; break;
3312 case X86::CMOVBE64rr: Opc = X86::CMOVA64rr; break;
3313 case X86::CMOVA16rr: Opc = X86::CMOVBE16rr; break;
3314 case X86::CMOVA32rr: Opc = X86::CMOVBE32rr; break;
3315 case X86::CMOVA64rr: Opc = X86::CMOVBE64rr; break;
3316 case X86::CMOVL16rr: Opc = X86::CMOVGE16rr; break;
3317 case X86::CMOVL32rr: Opc = X86::CMOVGE32rr; break;
3318 case X86::CMOVL64rr: Opc = X86::CMOVGE64rr; break;
3319 case X86::CMOVGE16rr: Opc = X86::CMOVL16rr; break;
3320 case X86::CMOVGE32rr: Opc = X86::CMOVL32rr; break;
3321 case X86::CMOVGE64rr: Opc = X86::CMOVL64rr; break;
3322 case X86::CMOVLE16rr: Opc = X86::CMOVG16rr; break;
3323 case X86::CMOVLE32rr: Opc = X86::CMOVG32rr; break;
3324 case X86::CMOVLE64rr: Opc = X86::CMOVG64rr; break;
3325 case X86::CMOVG16rr: Opc = X86::CMOVLE16rr; break;
3326 case X86::CMOVG32rr: Opc = X86::CMOVLE32rr; break;
3327 case X86::CMOVG64rr: Opc = X86::CMOVLE64rr; break;
3328 case X86::CMOVS16rr: Opc = X86::CMOVNS16rr; break;
3329 case X86::CMOVS32rr: Opc = X86::CMOVNS32rr; break;
3330 case X86::CMOVS64rr: Opc = X86::CMOVNS64rr; break;
3331 case X86::CMOVNS16rr: Opc = X86::CMOVS16rr; break;
3332 case X86::CMOVNS32rr: Opc = X86::CMOVS32rr; break;
3333 case X86::CMOVNS64rr: Opc = X86::CMOVS64rr; break;
3334 case X86::CMOVP16rr: Opc = X86::CMOVNP16rr; break;
3335 case X86::CMOVP32rr: Opc = X86::CMOVNP32rr; break;
3336 case X86::CMOVP64rr: Opc = X86::CMOVNP64rr; break;
3337 case X86::CMOVNP16rr: Opc = X86::CMOVP16rr; break;
3338 case X86::CMOVNP32rr: Opc = X86::CMOVP32rr; break;
3339 case X86::CMOVNP64rr: Opc = X86::CMOVP64rr; break;
3340 case X86::CMOVO16rr: Opc = X86::CMOVNO16rr; break;
3341 case X86::CMOVO32rr: Opc = X86::CMOVNO32rr; break;
3342 case X86::CMOVO64rr: Opc = X86::CMOVNO64rr; break;
3343 case X86::CMOVNO16rr: Opc = X86::CMOVO16rr; break;
3344 case X86::CMOVNO32rr: Opc = X86::CMOVO32rr; break;
3345 case X86::CMOVNO64rr: Opc = X86::CMOVO64rr; break;
3352 MI->setDesc(get(Opc));
3357 unsigned Opc = getFMA3OpcodeToCommuteOperands(MI, OpIdx1, OpIdx2);
3358 if (Opc == 0)
3365 MI->setDesc(get(Opc));
3434 unsigned Opc = MI->getOpcode();
3540 isFMA3(Opc, &IsIntrinOpcode);
3559 if (OpcodeGroups[GroupIndex][FormIndex] == Opc) {
3675 static X86::CondCode getCondFromSETOpc(unsigned Opc) {
3676 switch (Opc) {
3698 X86::CondCode X86::getCondFromCMovOpc(unsigned Opc) {
3699 switch (Opc) {
3819 static const uint16_t Opc[16][2] = {
3839 return Opc[CC][HasMemoryOperand ? 1 : 0];
3846 static const uint16_t Opc[32][3] = {
3885 case 2: return Opc[Idx][0];
3886 case 4: return Opc[Idx][1];
3887 case 8: return Opc[Idx][2];
4181 unsigned Opc = GetCondBranchFromCond(CC);
4182 BuildMI(&MBB, DL, get(Opc)).addMBB(TBB);
4237 unsigned Opc = getCMovFromCond((X86::CondCode)Cond[0].getImm(),
4240 BuildMI(MBB, I, DL, get(Opc), DstReg).addReg(FalseReg).addReg(TrueReg);
4337 if (auto Opc = copyPhysRegOpcode_AVX512_DQ(DestReg, SrcReg))
4338 return Opc;
4340 if (auto Opc = copyPhysRegOpcode_AVX512_BW(DestReg, SrcReg))
4341 return Opc;
4369 unsigned Opc = 0;
4371 Opc = X86::MOV64rr;
4373 Opc = X86::MOV32rr;
4375 Opc = X86::MOV16rr;
4381 Opc = X86::MOV8rr_NOREX;
4386 Opc = X86::MOV8rr;
4389 Opc = X86::MMX_MOVQ64rr;
4391 Opc = copyPhysRegOpcode_AVX512(DestReg, SrcReg, Subtarget);
4393 Opc = HasAVX ? X86::VMOVAPSrr : X86::MOVAPSrr;
4395 Opc = X86::VMOVAPSYrr;
4396 if (!Opc)
4397 Opc = CopyToFromAsymmetricReg(DestReg, SrcReg, Subtarget);
4399 if (Opc) {
4400 BuildMI(MBB, MI, DL, get(Opc), DestReg)
4636 unsigned Opc = getStoreRegOpcode(SrcReg, RC, isAligned, Subtarget);
4638 addFrameReference(BuildMI(MBB, MI, DL, get(Opc)), FrameIdx)
4652 unsigned Opc = getStoreRegOpcode(SrcReg, RC, isAligned, Subtarget);
4654 MachineInstrBuilder MIB = BuildMI(MF, DL, get(Opc));
4673 unsigned Opc = getLoadRegOpcode(DestReg, RC, isAligned, Subtarget);
4675 addFrameReference(BuildMI(MBB, MI, DL, get(Opc), DestReg), FrameIdx);
4687 unsigned Opc = getLoadRegOpcode(DestReg, RC, isAligned, Subtarget);
4689 MachineInstrBuilder MIB = BuildMI(MF, DL, get(Opc), DestReg);
5898 unsigned Opc = HasAVX ? X86::VXORPSrr : X86::XORPSrr;
5899 BuildMI(*MI->getParent(), MI, MI->getDebugLoc(), get(Opc), Reg)
5974 unsigned Opc = LoadMI.getOpcode();
5979 if ((Opc == X86::MOVSSrm || Opc == X86::VMOVSSrm) && RegSize > 4) {
6000 if ((Opc == X86::MOVSDrm || Opc == X86::VMOVSDrm) && RegSize > 8) {
6118 unsigned Opc = LoadMI->getOpcode();
6119 if (Opc == X86::FsFLD0SS)
6121 else if (Opc == X86::FsFLD0SD)
6123 else if (Opc == X86::AVX2_SETALLONES || Opc == X86::AVX_SET0)
6128 bool IsAllOnes = (Opc == X86::V_SETALLONES || Opc == X86::AVX2_SETALLONES);
6162 unsigned Opc = I->second.first;
6173 const MCInstrDesc &MCID = get(Opc);
6291 unsigned Opc = I->second.first;
6295 const MCInstrDesc &MCID = get(Opc);
6357 SDNode *NewNode= DAG.getMachineNode(Opc, dl, VTs, BeforeOps);
6391 unsigned X86InstrInfo::getOpcodeAfterMemoryUnfold(unsigned Opc,
6395 MemOp2RegOpTable.find(Opc);
6906 bool X86InstrInfo::isHighLatencyDef(int opc) const {
6907 switch (opc) {