Home | History | Annotate | Download | only in priv

Lines Matching defs:m32

5820             case 1: /* FISTTPL m32 (SSE3) */
5827 case 2: /* FIST m32 */
5833 case 3: /* FISTP m32 */
12512 /* 66 0F 6E = MOVD from ireg32/m32 to xmm lo 1/4,
12802 /* 66 0F 7E = MOVD from xmm low 1/4 to ireg32 or m32. */
12931 /* 0F AE /3 = STMXCSR m32 -- store %mxcsr */
12938 /* 0F AE /2 = LDMXCSR m32 -- load %mxcsr */
16308 /* 66 0F 38 21 /r = PMOVSXBD xmm1, xmm2/m32
16337 /* 66 0F 38 24 /r = PMOVSXWQ xmm1, xmm2/m32
16441 /* 66 0F 38 31 /r = PMOVZXBD xmm1, xmm2/m32
16470 /* 66 0F 38 34 /r = PMOVZXWQ xmm1, xmm2/m32
17503 /* 66 0F 3A 0A /r ib = ROUNDSS imm8, xmm2/m32, xmm1
17750 /* 66 0F 3A 21 /r ib = INSERTPS imm8, xmm2/m32, xmm1
17789 /* 66 no-REX.W 0F 3A 22 /r ib = PINSRD xmm1, r/m32, imm8
18340 /* movsx r/m32 to r64 */
19579 IRTemp m32 = newTemp(Ity_I64);
19604 assign( m32, mkU64(0xFFFFFFFF00000000ULL) );
19608 binop(Iop_And64,mkexpr(s16),mkexpr(m32)),
19612 mkexpr(m32))
21489 /* VMOVSS m32, xmm1 = VEX.LIG.F3.0F.WIG 10 /r */
22153 /* VCVTSI2SD r/m32, xmm2, xmm1 = VEX.NDS.LIG.F2.0F.W0 2A /r */
22237 /* VCVTSI2SS r/m32, xmm2, xmm1 = VEX.NDS.LIG.F3.0F.W0 2A /r */
22305 /* VCVTTSD2SI xmm1/m32, r32 = VEX.LIG.F2.0F.W0 2C /r */
22315 /* VCVTTSS2SI xmm1/m32, r32 = VEX.LIG.F3.0F.W0 2C /r */
22328 /* VCVTSD2SI xmm1/m32, r32 = VEX.LIG.F2.0F.W0 2D /r */
22338 /* VCVTSS2SI xmm1/m32, r32 = VEX.LIG.F3.0F.W0 2D /r */
22358 /* VUCOMISS xmm2/m32, xmm1 = VEX.LIG.0F.WIG 2E /r */
22359 /* VCOMISS xmm2/m32, xmm1 = VEX.LIG.0F.WIG 2F /r */
22600 /* VADDSS xmm3/m32, xmm2, xmm1 = VEX.NDS.LIG.F3.0F.WIG 58 /r */
22639 /* VMULSS xmm3/m32, xmm2, xmm1 = VEX.NDS.LIG.F3.0F.WIG 59 /r */
22722 /* VCVTSS2SD xmm3/m32, xmm2, xmm1 = VEX.NDS.LIG.F3.0F.WIG 5A /r */
22794 /* VSUBSS xmm3/m32, xmm2, xmm1 = VEX.NDS.LIG.F3.0F.WIG 5C /r */
22833 /* VMINSS xmm3/m32, xmm2, xmm1 = VEX.NDS.LIG.F3.0F.WIG 5D /r */
22872 /* VDIVSS xmm3/m32, xmm2, xmm1 = VEX.NDS.LIG.F3.0F.WIG 5E /r */
22911 /* VMAXSS xmm3/m32, xmm2, xmm1 = VEX.NDS.LIG.F3.0F.WIG 5F /r */
23106 /* VMOVD r32/m32, xmm1 = VEX.128.66.0F.W0 6E */
23563 /* VMOVD xmm1, m32/r32 = VEX.128.66.0F.W0 7E /r (reg case only) */
23638 /* VSTMXCSR m32 = VEX.LZ.0F.WIG AE /3 */
23647 /* VLDMXCSR m32 = VEX.LZ.0F.WIG AE /2 */
23669 /* VCMPSS xmm3/m32(E=argL), xmm2(V=argR), xmm1(G) */
24840 /* VBROADCASTSS m32, xmm1 = VEX.128.66.0F38.WIG 18 /r */
24857 /* VBROADCASTSS m32, ymm1 = VEX.256.66.0F38.WIG 18 /r */
24954 /* VPMOVSXBD xmm2/m32, xmm1 */
24982 /* VPMOVSXWQ xmm2/m32, xmm1 = VEX.128.66.0F38.WIG 24 /r */
25058 /* VPMOVZXBD xmm2/m32, xmm1 */
25087 /* VPMOVZXWQ xmm2/m32, xmm1 = VEX.128.66.0F38.WIG 34 /r */
25661 /* VROUNDSS imm8, xmm3/m32, xmm2, xmm1 */
25937 /* VPEXTRD imm8, r32/m32, xmm2 */
25953 /* VEXTRACTPS imm8, xmm1, r32/m32 = VEX.128.66.0F3A.WIG 17 /r ib */
26064 /* VINSERTPS imm8, xmm3/m32, xmm2, xmm1
26104 /* VPINSRD r32/m32, xmm2, xmm1 = VEX.NDS.128.66.0F3A.W0 22 /r ib */