Home | History | Annotate | Download | only in priv

Lines Matching refs:pfx

683 static Bool IS_VALID_PFX ( Prefix pfx ) {
684 return toBool((pfx & 0xFF000000) == PFX_EMPTY);
687 static Bool haveREX ( Prefix pfx ) {
688 return toBool(pfx & PFX_REX);
691 static Int getRexW ( Prefix pfx ) {
692 return (pfx & PFX_REXW) ? 1 : 0;
694 static Int getRexR ( Prefix pfx ) {
695 return (pfx & PFX_REXR) ? 1 : 0;
697 static Int getRexX ( Prefix pfx ) {
698 return (pfx & PFX_REXX) ? 1 : 0;
700 static Int getRexB ( Prefix pfx ) {
701 return (pfx & PFX_REXB) ? 1 : 0;
706 static Bool haveF2orF3 ( Prefix pfx ) {
707 return toBool((pfx & (PFX_F2|PFX_F3)) > 0);
709 static Bool haveF2andF3 ( Prefix pfx ) {
710 return toBool((pfx & (PFX_F2|PFX_F3)) == (PFX_F2|PFX_F3));
712 static Bool haveF2 ( Prefix pfx ) {
713 return toBool((pfx & PFX_F2) > 0);
715 static Bool haveF3 ( Prefix pfx ) {
716 return toBool((pfx & PFX_F3) > 0);
719 static Bool have66 ( Prefix pfx ) {
720 return toBool((pfx & PFX_66) > 0);
722 static Bool haveASO ( Prefix pfx ) {
723 return toBool((pfx & PFX_ASO) > 0);
725 static Bool haveLOCK ( Prefix pfx ) {
726 return toBool((pfx & PFX_LOCK) > 0);
729 /* Return True iff pfx has 66 set and F2 and F3 clear */
730 static Bool have66noF2noF3 ( Prefix pfx )
733 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_66);
736 /* Return True iff pfx has F2 set and 66 and F3 clear */
737 static Bool haveF2no66noF3 ( Prefix pfx )
740 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_F2);
743 /* Return True iff pfx has F3 set and 66 and F2 clear */
744 static Bool haveF3no66noF2 ( Prefix pfx )
747 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_F3);
750 /* Return True iff pfx has F3 set and F2 clear */
751 static Bool haveF3noF2 ( Prefix pfx )
754 toBool((pfx & (PFX_F2|PFX_F3)) == PFX_F3);
757 /* Return True iff pfx has F2 set and F3 clear */
758 static Bool haveF2noF3 ( Prefix pfx )
761 toBool((pfx & (PFX_F2|PFX_F3)) == PFX_F2);
764 /* Return True iff pfx has 66, F2 and F3 clear */
765 static Bool haveNo66noF2noF3 ( Prefix pfx )
768 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == 0);
771 /* Return True iff pfx has any of 66, F2 and F3 set */
772 static Bool have66orF2orF3 ( Prefix pfx )
774 return toBool( ! haveNo66noF2noF3(pfx) );
777 /* Return True iff pfx has 66 or F3 set */
778 static Bool have66orF3 ( Prefix pfx )
780 return toBool((pfx & (PFX_66|PFX_F3)) > 0);
791 static UInt getVexNvvvv ( Prefix pfx ) {
792 UInt r = (UInt)pfx;
797 static Bool haveVEX ( Prefix pfx ) {
798 return toBool(pfx & PFX_VEX);
801 static Int getVexL ( Prefix pfx ) {
802 return (pfx & PFX_VEXL) ? 1 : 0;
1164 static IRExpr* getIReg64rexX ( Prefix pfx, UInt lo3bits )
1167 vassert(IS_VALID_PFX(pfx));
1168 return getIReg64( lo3bits | (getRexX(pfx) << 3) );
1171 static const HChar* nameIReg64rexX ( Prefix pfx, UInt lo3bits )
1174 vassert(IS_VALID_PFX(pfx));
1175 return nameIReg( 8, lo3bits | (getRexX(pfx) << 3), False );
1178 static const HChar* nameIRegRexB ( Int sz, Prefix pfx, UInt lo3bits )
1181 vassert(IS_VALID_PFX(pfx));
1183 return nameIReg( sz, lo3bits | (getRexB(pfx) << 3),
1184 toBool(sz==1 && !haveREX(pfx)) );
1187 static IRExpr* getIRegRexB ( Int sz, Prefix pfx, UInt lo3bits )
1190 vassert(IS_VALID_PFX(pfx));
1196 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3),
1203 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3),
1204 toBool(sz==1 && !haveREX(pfx)) ),
1210 static void putIRegRexB ( Int sz, Prefix pfx, UInt lo3bits, IRExpr* e )
1213 vassert(IS_VALID_PFX(pfx));
1217 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3),
1218 toBool(sz==1 && !haveREX(pfx)) ),
1232 static UInt gregOfRexRM ( Prefix pfx, UChar mod_reg_rm )
1235 reg += (pfx & PFX_REXR) ? 8 : 0;
1244 static UInt eregOfRexRM ( Prefix pfx, UChar mod_reg_rm )
1249 rm += (pfx & PFX_REXB) ? 8 : 0;
1260 static UInt offsetIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm )
1264 vassert(IS_VALID_PFX(pfx));
1266 reg = gregOfRexRM( pfx, mod_reg_rm );
1267 return offsetIReg( sz, reg, toBool(sz == 1 && !haveREX(pfx)) );
1271 IRExpr* getIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm )
1276 IRExpr_Get( offsetIRegG( sz, pfx, mod_reg_rm ),
1279 return IRExpr_Get( offsetIRegG( sz, pfx, mod_reg_rm ),
1285 void putIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm, IRExpr* e )
1291 stmt( IRStmt_Put( offsetIRegG( sz, pfx, mod_reg_rm ), e ) );
1295 const HChar* nameIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm )
1297 return nameIReg( sz, gregOfRexRM(pfx,mod_reg_rm),
1298 toBool(sz==1 && !haveREX(pfx)) );
1303 IRExpr* getIRegV ( Int sz, Prefix pfx )
1308 IRExpr_Get( offsetIReg( sz, getVexNvvvv(pfx), False ),
1311 return IRExpr_Get( offsetIReg( sz, getVexNvvvv(pfx), False ),
1317 void putIRegV ( Int sz, Prefix pfx, IRExpr* e )
1323 stmt( IRStmt_Put( offsetIReg( sz, getVexNvvvv(pfx), False ), e ) );
1327 const HChar* nameIRegV ( Int sz, Prefix pfx )
1329 return nameIReg( sz, getVexNvvvv(pfx), False );
1339 static UInt offsetIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm )
1343 vassert(IS_VALID_PFX(pfx));
1345 reg = eregOfRexRM( pfx, mod_reg_rm );
1346 return offsetIReg( sz, reg, toBool(sz == 1 && !haveREX(pfx)) );
1350 IRExpr* getIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm )
1355 IRExpr_Get( offsetIRegE( sz, pfx, mod_reg_rm ),
1358 return IRExpr_Get( offsetIRegE( sz, pfx, mod_reg_rm ),
1364 void putIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm, IRExpr* e )
1370 stmt( IRStmt_Put( offsetIRegE( sz, pfx, mod_reg_rm ), e ) );
1374 const HChar* nameIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm )
1376 return nameIReg( sz, eregOfRexRM(pfx,mod_reg_rm),
1377 toBool(sz==1 && !haveREX(pfx)) );
2371 const HChar* segRegTxt ( Prefix pfx )
2373 if (pfx & PFX_CS) return "%cs:";
2374 if (pfx & PFX_DS) return "%ds:";
2375 if (pfx & PFX_ES) return "%es:";
2376 if (pfx & PFX_FS) return "%fs:";
2377 if (pfx & PFX_GS) return "%gs:";
2378 if (pfx & PFX_SS) return "%ss:";
2389 Prefix pfx, IRExpr* virtual )
2392 if (haveASO(pfx))
2400 if (pfx & PFX_FS) {
2410 if (pfx & PFX_GS) {
2513 const VexAbiInfo* vbi, Prefix pfx, Long delta,
2537 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,rm));
2540 handleAddrOverrides(vbi, pfx, getIRegRexB(8,pfx,rm)));
2551 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,rm));
2553 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), d, nameIRegRexB(8,pfx,rm));
2557 handleAddrOverrides(vbi, pfx,
2558 binop(Iop_Add64,getIRegRexB(8,pfx,rm),mkU64(d))));
2568 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), d, nameIRegRexB(8,pfx,rm));
2571 handleAddrOverrides(vbi, pfx,
2572 binop(Iop_Add64,getIRegRexB(8,pfx,rm),mkU64(d))));
2586 DIS(buf, "%s%lld(%%rip)", segRegTxt(pfx), d);
2596 handleAddrOverrides(vbi, pfx,
2624 Bool index_is_SP = toBool(index_r == R_RSP && 0==getRexX(pfx));
2629 DIS(buf, "%s(%s,%s)", segRegTxt(pfx),
2630 nameIRegRexB(8,pfx,base_r),
2631 nameIReg64rexX(pfx,index_r));
2633 DIS(buf, "%s(%s,%s,%d)", segRegTxt(pfx),
2634 nameIRegRexB(8,pfx,base_r),
2635 nameIReg64rexX(pfx,index_r), 1<<scale);
2640 handleAddrOverrides(vbi, pfx,
2642 getIRegRexB(8,pfx,base_r),
2643 binop(Iop_Shl64, getIReg64rexX(pfx,index_r),
2649 DIS(buf, "%s%lld(,%s,%d)", segRegTxt(pfx), d,
2650 nameIReg64rexX(pfx,index_r), 1<<scale);
2654 handleAddrOverrides(vbi, pfx,
2656 binop(Iop_Shl64, getIReg64rexX(pfx,index_r),
2662 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,base_r));
2665 handleAddrOverrides(vbi, pfx, getIRegRexB(8,pfx,base_r)));
2670 DIS(buf, "%s%lld", segRegTxt(pfx), d);
2673 handleAddrOverrides(vbi, pfx, mkU64(d)));
2695 if (index_r == R_RSP && 0==getRexX(pfx)) {
2696 DIS(buf, "%s%lld(%s)", segRegTxt(pfx),
2697 d, nameIRegRexB(8,pfx,base_r));
2700 handleAddrOverrides(vbi, pfx,
2701 binop(Iop_Add64, getIRegRexB(8,pfx,base_r), mkU64(d)) ));
2704 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d,
2705 nameIRegRexB(8,pfx,base_r),
2706 nameIReg64rexX(pfx,index_r));
2708 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d,
2709 nameIRegRexB(8,pfx,base_r),
2710 nameIReg64rexX(pfx,index_r), 1<<scale);
2715 handleAddrOverrides(vbi, pfx,
2718 getIRegRexB(8,pfx,base_r),
2720 getIReg64rexX(pfx,index_r), mkU8(scale))),
2742 if (index_r == R_RSP && 0==getRexX(pfx)) {
2743 DIS(buf, "%s%lld(%s)", segRegTxt(pfx),
2744 d, nameIRegRexB(8,pfx,base_r));
2747 handleAddrOverrides(vbi, pfx,
2748 binop(Iop_Add64, getIRegRexB(8,pfx,base_r), mkU64(d)) ));
2751 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d,
2752 nameIRegRexB(8,pfx,base_r),
2753 nameIReg64rexX(pfx,index_r));
2755 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d,
2756 nameIRegRexB(8,pfx,base_r),
2757 nameIReg64rexX(pfx,index_r), 1<<scale);
2762 handleAddrOverrides(vbi, pfx,
2765 getIRegRexB(8,pfx,base_r),
2767 getIReg64rexX(pfx,index_r), mkU8(scale))),
2785 const VexAbiInfo* vbi, Prefix pfx, Long delta,
2809 *rI = index_r | (getRexX(pfx) << 3);
2822 DIS(buf, "%s%lld(,%s)", segRegTxt(pfx), d, vindex);
2824 DIS(buf, "%s%lld(,%s,%d)", segRegTxt(pfx), d, vindex, 1<<scale);
2829 DIS(buf, "%s(%s,%s)", segRegTxt(pfx),
2830 nameIRegRexB(8,pfx,base_r), vindex);
2832 DIS(buf, "%s(%s,%s,%d)", segRegTxt(pfx),
2833 nameIRegRexB(8,pfx,base_r), vindex, 1<<scale);
2846 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d,
2847 nameIRegRexB(8,pfx,base_r), vindex);
2849 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d,
2850 nameIRegRexB(8,pfx,base_r), vindex, 1<<scale);
2856 return disAMode_copy2tmp( getIRegRexB(8,pfx,base_r) );
2857 return disAMode_copy2tmp( binop(Iop_Add64, getIRegRexB(8,pfx,base_r),
2866 static UInt lengthAMode ( Prefix pfx, Long delta )
2978 Prefix pfx,
3028 && offsetIRegG(size,pfx,rm) == offsetIRegE(size,pfx,rm)) {
3029 putIRegG(size,pfx,rm, mkU(ty,0));
3032 assign( dst0, getIRegG(size,pfx,rm) );
3033 assign( src, getIRegE(size,pfx,rm) );
3038 putIRegG(size, pfx, rm, mkexpr(dst1));
3043 putIRegG(size, pfx, rm, mkexpr(dst1));
3047 putIRegG(size, pfx, rm, mkexpr(dst1));
3051 putIRegG(size, pfx, rm, mkexpr(dst1));
3059 putIRegG(size, pfx, rm, mkexpr(dst1));
3063 nameIRegE(size,pfx,rm),
3064 nameIRegG(size,pfx,rm));
3068 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
3069 assign( dst0, getIRegG(size,pfx,rm) );
3075 putIRegG(size, pfx, rm, mkexpr(dst1));
3080 putIRegG(size, pfx, rm, mkexpr(dst1));
3096 putIRegG(size, pfx, rm, mkexpr(dst1));
3100 dis_buf, nameIRegG(size, pfx, rm));
3128 Prefix pfx,
3172 && offsetIRegG(size,pfx,rm) == offsetIRegE(size,pfx,rm)) {
3173 putIRegE(size,pfx,rm, mkU(ty,0));
3176 assign(dst0, getIRegE(size,pfx,rm));
3177 assign(src, getIRegG(size,pfx,rm));
3182 putIRegE(size, pfx, rm, mkexpr(dst1));
3187 putIRegE(size, pfx, rm, mkexpr(dst1));
3195 putIRegE(size, pfx, rm, mkexpr(dst1));
3199 nameIRegG(size,pfx,rm),
3200 nameIRegE(size,pfx,rm));
3206 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
3208 assign(src, getIRegG(size,pfx,rm));
3211 if (haveLOCK(pfx)) {
3222 if (haveLOCK(pfx)) {
3234 if (haveLOCK(pfx)) {
3251 nameIRegG(size,pfx,rm), dis_buf);
3275 Prefix pfx,
3284 putIRegG(size, pfx, rm, getIRegE(size, pfx, rm));
3286 nameIRegE(size,pfx,rm),
3287 nameIRegG(size,pfx,rm));
3293 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
3294 putIRegG(size, pfx, rm, loadLE(szToITy(size), mkexpr(addr)));
3297 nameIRegG(size,pfx,rm));
3322 Prefix pfx,
3334 if (haveF2orF3(pfx)) { *ok = False; return delta0; }
3335 putIRegE(size, pfx, rm, getIRegG(size, pfx, rm));
3337 nameIRegG(size,pfx,rm),
3338 nameIRegE(size,pfx,rm));
3344 if (haveF2(pfx)) { *ok = False; return delta0; }
3346 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
3347 storeLE( mkexpr(addr), getIRegG(size, pfx, rm) );
3349 nameIRegG(size,pfx,rm),
3409 Prefix pfx,
3414 putIRegG(szd, pfx, rm,
3417 getIRegE(szs,pfx,rm)));
3421 nameIRegE(szs,pfx,rm),
3422 nameIRegG(szd,pfx,rm));
3430 IRTemp addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 );
3431 putIRegG(szd, pfx, rm,
3439 nameIRegG(szd,pfx,rm));
3513 Prefix pfx,
3540 assign(dst0, getIRegE(sz,pfx,modrm));
3559 putIRegE(sz, pfx, modrm, mkexpr(dst1));
3564 nameIRegE(sz,pfx,modrm));
3566 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, /*xtra*/d_sz );
3572 if (haveLOCK(pfx)) {
3583 if (haveLOCK(pfx)) {
3595 if (haveLOCK(pfx)) {
3623 Prefix pfx,
3643 assign(dst0, getIRegE(sz, pfx, modrm));
3646 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, /*xtra*/d_sz );
3868 putIRegE(sz, pfx, modrm, mkexpr(dst1));
3876 vex_printf(", %s\n", nameIRegE(sz,pfx,modrm));
3897 Prefix pfx,
3918 if (haveF2orF3(pfx)) {
3925 if (haveF2orF3(pfx)) {
3926 if (haveF2andF3(pfx) || !haveLOCK(pfx)) {
3960 assign( t2, widenUto64(getIRegE(sz, pfx, modrm)) );
3964 src_val, nameIRegE(sz,pfx,modrm));
3967 t_addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 1 );
3996 putIRegE(sz, pfx, modrm, narrowTo(ty, mkexpr(t2m)));
3998 if (haveLOCK(pfx)) {
4107 Prefix pfx, Int sz, Long delta, Bool* decode_OK )
4121 if (haveF2orF3(pfx)) goto unhandled;
4129 getIRegE(sz,pfx,modrm),
4134 nameIRegE(sz, pfx, modrm));
4142 putIRegE(sz, pfx, modrm,
4144 getIRegE(sz, pfx, modrm)));
4146 nameIRegE(sz, pfx, modrm));
4154 assign(src, getIRegE(sz, pfx, modrm));
4158 putIRegE(sz, pfx, modrm, mkexpr(dst1));
4159 DIP("neg%c %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm));
4164 assign(src, getIRegE(sz,pfx,modrm));
4166 nameIRegE(sz,pfx,modrm) );
4171 assign(src, getIRegE(sz,pfx,modrm));
4173 nameIRegE(sz,pfx,modrm) );
4177 assign( t1, getIRegE(sz, pfx, modrm) );
4180 nameIRegE(sz, pfx, modrm));
4184 assign( t1, getIRegE(sz, pfx, modrm) );
4187 nameIRegE(sz, pfx, modrm));
4195 Bool validF2orF3 = haveF2orF3(pfx) ? False : True;
4197 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) {
4202 addr = disAMode ( &len, vbi, pfx, delta, dis_buf,
4230 if (haveLOCK(pfx)) {
4246 if (haveLOCK(pfx)) {
4285 Prefix pfx, Long delta, Bool* decode_OK )
4299 if (haveF2orF3(pfx)) goto unhandled;
4300 assign(t1, getIRegE(1, pfx, modrm));
4304 putIRegE(1, pfx, modrm, mkexpr(t2));
4309 putIRegE(1, pfx, modrm, mkexpr(t2));
4318 nameIRegE(1, pfx, modrm));
4321 Bool validF2orF3 = haveF2orF3(pfx) ? False : True;
4323 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) {
4328 IRTemp addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
4333 if (haveLOCK(pfx)) {
4343 if (haveLOCK(pfx)) {
4369 Prefix pfx, Int sz, Long delta,
4388 if (haveF2orF3(pfx)
4389 && ! (haveF2(pfx)
4392 assign(t1, getIRegE(sz,pfx,modrm));
4399 putIRegE(sz,pfx,modrm, mkexpr(t2));
4406 putIRegE(sz,pfx,modrm, mkexpr(t2));
4411 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
4414 assign(t3, getIRegE(sz,pfx,modrm));
4427 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
4430 assign(t3, getIRegE(sz,pfx,modrm));
4441 assign(t3, getIRegE(sz,pfx,modrm));
4458 nameIRegE(sz, pfx, modrm));
4461 Bool validF2orF3 = haveF2orF3(pfx) ? False : True;
4463 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) {
4466 && (haveF2(pfx) && !haveF3(pfx))) {
4471 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 );
4481 if (haveLOCK(pfx)) {
4493 if (haveLOCK(pfx)) {
4504 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
4520 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
4580 void dis_string_op( void (*dis_OP)( Int, IRTemp, Prefix pfx ),
4581 Int sz, const HChar* name, Prefix pfx )
4586 vassert(pfx == clearSegBits(pfx));
4588 dis_OP( sz, t_inc, pfx );
4593 void dis_MOVS ( Int sz, IRTemp t_inc, Prefix pfx )
4600 if (haveASO(pfx)) {
4612 if (haveASO(pfx)) {
4621 void dis_LODS ( Int sz, IRTemp t_inc, Prefix pfx )
4627 if (haveASO(pfx))
4635 if (haveASO(pfx))
4641 void dis_STOS ( Int sz, IRTemp t_inc, Prefix pfx )
4650 if (haveASO(pfx))
4658 if (haveASO(pfx))
4664 void dis_CMPS ( Int sz, IRTemp t_inc, Prefix pfx )
4673 if (haveASO(pfx)) {
4689 if (haveASO(pfx)) {
4698 void dis_SCAS ( Int sz, IRTemp t_inc, Prefix pfx )
4708 if (haveASO(pfx))
4718 if (haveASO(pfx))
4732 Prefix pfx )
4740 vassert(pfx == clearSegBits(pfx));
4742 if (haveASO(pfx)) {
4755 if (haveASO(pfx))
4761 dis_OP (sz, t_inc, pfx);
4785 Prefix pfx,
4797 assign( tg, getIRegG(size, pfx, rm) );
4799 assign( te, getIRegE(size, pfx, rm) );
4801 IRTemp addr = disAMode( &alen, vbi, pfx, delta0, dis_buf, 0 );
4809 putIRegG(size, pfx, rm, mkexpr(resLo) );
4813 nameIRegE(size,pfx,rm),
4814 nameIRegG(size,pfx,rm));
4819 nameIRegG(size,pfx,rm));
4828 Prefix pfx,
4845 assign(te, getIRegE(size, pfx, rm));
4848 IRTemp addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
4863 putIRegG(size, pfx, rm, mkexpr(resLo));
4867 ( epartIsReg(rm) ? nameIRegE(size,pfx,rm) : dis_buf ),
4868 nameIRegG(size,pfx,rm) );
5500 const VexAbiInfo* vbi, Prefix pfx, Long delta )
5519 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
5663 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6193 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6322 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6501 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6612 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6645 if ( have66(pfx) ) {
6716 if ( have66(pfx) ) {
6726 if ( have66(pfx) ) {
6781 if ( have66(pfx) ) {
6876 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6997 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7175 Prefix pfx,
7281 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7325 Prefix pfx, Long delta,
7345 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
7457 const VexAbiInfo* vbi, Prefix pfx, Int sz, Long delta )
7480 getIReg32(eregOfRexRM(pfx,modrm)) ) );
7482 nameIReg32(eregOfRexRM(pfx,modrm)),
7485 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7502 getIReg64(eregOfRexRM(pfx,modrm)) );
7504 nameIReg64(eregOfRexRM(pfx,modrm)),
7507 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7525 putIReg32( eregOfRexRM(pfx,modrm),
7529 nameIReg32(eregOfRexRM(pfx,modrm)));
7531 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7544 putIReg64( eregOfRexRM(pfx,modrm),
7548 nameIReg64(eregOfRexRM(pfx,modrm)));
7550 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7564 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7574 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7585 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7595 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7608 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "padd", True );
7614 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7616 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "padds", True );
7623 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "paddus", True );
7631 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psub", True );
7638 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psubs", True );
7645 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psubus", True );
7651 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmulhw", False );
7657 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmullw", False );
7662 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmaddwd", False );
7670 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pcmpeq", True );
7678 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pcmpgt", True );
7684 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packssdw", False );
7690 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packsswb", False );
7696 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packuswb", False );
7703 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7705 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "punpckh", True );
7712 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7714 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "punpckl", True );
7720 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pand", False );
7726 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pandn", False );
7732 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "por", False );
7738 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pxor", False );
7742 delta = dis_MMX_shiftG_byE(vbi, pfx, delta, _name, _op); \
7813 assign( addr, handleAddrOverrides( vbi, pfx, getIReg64(R_RDI) ));
7895 Prefix pfx,
7937 assign( gsrc, getIRegG(sz, pfx, modrm) );
7941 assign( esrc, getIRegE(sz, pfx, modrm) );
7945 nameIRegG(sz, pfx, modrm), nameIRegE(sz, pfx, modrm));
7947 addr = disAMode ( &len, vbi, pfx, delta, dis_buf,
7955 nameIRegG(sz, pfx, modrm), dis_buf);
8051 putIRegE(sz, pfx, modrm, mkexpr(resTy));
8080 Prefix pfx, Int sz, Long delta, BtOp op,
8106 if (haveF2orF3(pfx)) {
8113 if (haveF2orF3(pfx)) {
8114 if (haveF2andF3(pfx) || !haveLOCK(pfx) || op == BtOpNone) {
8121 assign( t_bitno0, widenSto64(getIRegG(sz, pfx, modrm)) );
8146 storeLE( mkexpr(t_rsp), getIRegE(sz, pfx, modrm) );
8158 t_addr0 = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 );
8209 if ((haveLOCK(pfx)) && !epartIsReg(modrm)) {
8254 putIRegE(sz, pfx, modrm, loadLE(szToITy(sz), mkexpr(t_rsp)) );
8259 nameBtOp(op), nameISize(sz), nameIRegG(sz, pfx, modrm),
8260 ( epartIsReg(modrm) ? nameIRegE(sz, pfx, modrm) : dis_buf ) );
8270 Prefix pfx, Int sz, Long delta, Bool fwds )
8289 assign( src, getIRegE(sz, pfx, modrm) );
8292 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
8299 ( isReg ? nameIRegE(sz, pfx, modrm) : dis_buf ),
8300 nameIRegG(sz, pfx, modrm));
8366 widenUto64( getIRegG( sz, pfx, modrm ) )
8379 putIRegG( sz, pfx, modrm, mkexpr(dst) );
8387 void codegen_xchg_rAX_Reg ( Prefix pfx, Int sz, UInt regLo3 )
8396 assign( t2, getIRegRexB(8, pfx, regLo3) );
8398 putIRegRexB(8, pfx, regLo3, mkexpr(t1) );
8401 assign( t2, getIRegRexB(4, pfx, regLo3) );
8403 putIRegRexB(4, pfx, regLo3, mkexpr(t1) );
8406 assign( t2, getIRegRexB(2, pfx, regLo3) );
8408 putIRegRexB(2, pfx, regLo3, mkexpr(t1) );
8412 nameIRegRexB(sz,pfx, regLo3));
8471 Prefix pfx,
8503 if (haveF2orF3(pfx)) {
8508 if (haveF2orF3(pfx)) {
8509 if (haveF2andF3(pfx) || !haveLOCK(pfx)) {
8518 assign( dest, getIRegE(size, pfx, rm) );
8520 assign( src, getIRegG(size, pfx, rm) );
8527 putIRegE(size, pfx, rm, mkexpr(dest2));
8529 nameIRegG(size,pfx,rm),
8530 nameIRegE(size,pfx,rm) );
8532 else if (!epartIsReg(rm) && !haveLOCK(pfx)) {
8534 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8537 assign( src, getIRegG(size, pfx, rm) );
8546 nameIRegG(size,pfx,rm), dis_buf);
8548 else if (!epartIsReg(rm) && haveLOCK(pfx)) {
8554 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8556 assign( src, getIRegG(size, pfx, rm) );
8567 nameIRegG(size,pfx,rm), dis_buf);
8595 Prefix pfx,
8609 assign( tmps, getIRegE(sz, pfx, rm) );
8610 assign( tmpd, getIRegG(sz, pfx, rm) );
8612 putIRegG( sz, pfx, rm,
8618 nameIRegE(sz,pfx,rm),
8619 nameIRegG(sz,pfx,rm));
8625 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8627 assign( tmpd, getIRegG(sz, pfx, rm) );
8629 putIRegG( sz, pfx, rm,
8637 nameIRegG(sz,pfx,rm));
8646 Prefix pfx, Int sz, Long delta0 )
8670 assign( tmpd, getIRegE(sz, pfx, rm) );
8671 assign( tmpt0, getIRegG(sz, pfx, rm) );
8675 putIRegG(sz, pfx, rm, mkexpr(tmpd));
8676 putIRegE(sz, pfx, rm, mkexpr(tmpt1));
8678 nameISize(sz), nameIRegG(sz,pfx,rm), nameIRegE(sz,pfx,rm));
8682 else if (!epartIsReg(rm) && !haveLOCK(pfx)) {
8684 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8686 assign( tmpt0, getIRegG(sz, pfx, rm) );
8691 putIRegG(sz, pfx, rm, mkexpr(tmpd));
8693 nameISize(sz), nameIRegG(sz,pfx,rm), dis_buf);
8697 else if (!epartIsReg(rm) && haveLOCK(pfx)) {
8699 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8701 assign( tmpt0, getIRegG(sz, pfx, rm) );
8707 putIRegG(sz, pfx, rm, mkexpr(tmpd));
8709 nameISize(sz), nameIRegG(sz,pfx,rm), dis_buf);
8795 Prefix pfx,
8804 putIRegE(size, pfx, rm, mkU(szToITy(size), 0));
8805 DIP("mov %s,%s\n", nameSReg(gregOfRexRM(pfx, rm)),
8806 nameIRegE(size, pfx, rm));
8812 IRTemp addr = disAMode(&len, vbi, pfx, delta0, dis_buf, 0);
8814 DIP("mov %s,%s\n", nameSReg(gregOfRexRM(pfx, rm)),
8902 Prefix pfx, Long delta,
8913 = invertG ? unop(Iop_NotV128, getXMMReg(gregOfRexRM(pfx,rm)))
8914 : getXMMReg(gregOfRexRM(pfx,rm));
8917 gregOfRexRM(pfx,rm),
8921 getXMMReg(eregOfRexRM(pfx,rm)))
8923 getXMMReg(eregOfRexRM(pfx,rm)))
8926 nameXMMReg(eregOfRexRM(pfx,rm)),
8927 nameXMMReg(gregOfRexRM(pfx,rm)) );
8930 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
8932 gregOfRexRM(pfx,rm),
8942 nameXMMReg(gregOfRexRM(pfx,rm)) );
8952 Prefix pfx, Long delta,
8955 return dis_SSE_E_to_G_all_wrk( vbi, pfx, delta, opname, op, False );
8962 Prefix pfx, Long delta,
8965 return dis_SSE_E_to_G_all_wrk( vbi, pfx, delta, opname, op, True );
8972 Prefix pfx, Long delta,
8979 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm));
8981 putXMMReg( gregOfRexRM(pfx,rm),
8983 getXMMReg(eregOfRexRM(pfx,rm))) );
8985 nameXMMReg(eregOfRexRM(pfx,rm)),
8986 nameXMMReg(gregOfRexRM(pfx,rm)) );
8992 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
8995 putXMMReg( gregOfRexRM(pfx,rm),
8999 nameXMMReg(gregOfRexRM(pfx,rm)) );
9008 Prefix pfx, Long delta,
9015 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm));
9017 putXMMReg( gregOfRexRM(pfx,rm),
9019 getXMMReg(eregOfRexRM(pfx,rm))) );
9021 nameXMMReg(eregOfRexRM(pfx,rm)),
9022 nameXMMReg(gregOfRexRM(pfx,rm)) );
9028 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9031 putXMMReg( gregOfRexRM(pfx,rm),
9035 nameXMMReg(gregOfRexRM(pfx,rm)) );
9045 Prefix pfx, Long delta,
9057 IRExpr* src = getXMMReg(eregOfRexRM(pfx,rm));
9061 putXMMReg( gregOfRexRM(pfx,rm), res );
9063 nameXMMReg(eregOfRexRM(pfx,rm)),
9064 nameXMMReg(gregOfRexRM(pfx,rm)) );
9067 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9072 putXMMReg( gregOfRexRM(pfx,rm), res );
9075 nameXMMReg(gregOfRexRM(pfx,rm)) );
9085 Prefix pfx, Long delta,
9098 assign( oldG0, getXMMReg(gregOfRexRM(pfx,rm)) );
9104 getXMMRegLane32(eregOfRexRM(pfx,rm), 0)) );
9105 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) );
9107 nameXMMReg(eregOfRexRM(pfx,rm)),
9108 nameXMMReg(gregOfRexRM(pfx,rm)) );
9111 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9116 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) );
9119 nameXMMReg(gregOfRexRM(pfx,rm)) );
9129 Prefix pfx, Long delta,
9142 assign( oldG0, getXMMReg(gregOfRexRM(pfx,rm)) );
9148 getXMMRegLane64(eregOfRexRM(pfx,rm), 0)) );
9149 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) );
9151 nameXMMReg(eregOfRexRM(pfx,rm)),
9152 nameXMMReg(gregOfRexRM(pfx,rm)) );
9155 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9160 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) );
9163 nameXMMReg(gregOfRexRM(pfx,rm)) );
9175 Prefix pfx, Long delta,
9184 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm));
9187 epart = getXMMReg(eregOfRexRM(pfx,rm));
9189 nameXMMReg(eregOfRexRM(pfx,rm)),
9190 nameXMMReg(gregOfRexRM(pfx,rm)) );
9193 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9197 nameXMMReg(gregOfRexRM(pfx,rm)) );
9200 putXMMReg( gregOfRexRM(pfx,rm),
9343 Prefix pfx, Long delta,
9364 assign( plain, binop(op, getXMMReg(gregOfRexRM(pfx,rm)),
9365 getXMMReg(eregOfRexRM(pfx,rm))) );
9369 nameXMMReg(eregOfRexRM(pfx,rm)),
9370 nameXMMReg(gregOfRexRM(pfx,rm)) );
9372 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
9381 getXMMReg(gregOfRexRM(pfx,rm)),
9394 nameXMMReg(gregOfRexRM(pfx,rm)) );
9398 putXMMReg( gregOfRexRM(pfx,rm),
9404 putXMMReg( gregOfRexRM(pfx,rm),
9408 putXMMReg( gregOfRexRM(pfx,rm), mkexpr(plain) );
9419 Prefix pfx, Long delta,
9432 assign( amt, getXMMRegLane64(eregOfRexRM(pfx,rm), 0) );
9434 nameXMMReg(eregOfRexRM(pfx,rm)),
9435 nameXMMReg(gregOfRexRM(pfx,rm)) );
9438 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9442 nameXMMReg(gregOfRexRM(pfx,rm)) );
9445 assign( g0, getXMMReg(gregOfRexRM(pfx,rm)) );
9485 putXMMReg( gregOfRexRM(pfx,rm), mkexpr(g1) );
9493 ULong dis_SSE_shiftE_imm ( Prefix pfx,
9508 nameXMMReg(eregOfRexRM(pfx,rm)) );
9509 assign( e0, getXMMReg(eregOfRexRM(pfx,rm)) );
9540 putXMMReg( eregOfRexRM(pfx,rm), mkexpr(e1) );
10174 static Long dis_COMISD ( const VexAbiInfo* vbi, Prefix pfx,
10185 assign( argR, getXMMRegLane64F( eregOfRexRM(pfx,modrm),
10190 nameXMMReg(eregOfRexRM(pfx,modrm)),
10191 nameXMMReg(gregOfRexRM(pfx,modrm)) );
10193 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10199 nameXMMReg(gregOfRexRM(pfx,modrm)) );
10201 assign( argL, getXMMRegLane64F( gregOfRexRM(pfx,modrm),
10217 static Long dis_COMISS ( const VexAbiInfo* vbi, Prefix pfx,
10228 assign( argR, getXMMRegLane32F( eregOfRexRM(pfx,modrm),
10233 nameXMMReg(eregOfRexRM(pfx,modrm)),
10234 nameXMMReg(gregOfRexRM(pfx,modrm)) );
10236 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10242 nameXMMReg(gregOfRexRM(pfx,modrm)) );
10244 assign( argL, getXMMRegLane32F( gregOfRexRM(pfx,modrm),
10262 static Long dis_PSHUFD_32x4 ( const VexAbiInfo* vbi, Prefix pfx,
10273 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
10277 nameXMMReg(eregOfRexRM(pfx,modrm)),
10278 nameXMMReg(gregOfRexRM(pfx,modrm)));
10280 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
10287 nameXMMReg(gregOfRexRM(pfx,modrm)));
10303 (gregOfRexRM(pfx,modrm), mkexpr(dV));
10308 static Long dis_PSHUFD_32x8 ( const VexAbiInfo* vbi, Prefix pfx, Long delta )
10316 UInt rG = gregOfRexRM(pfx,modrm);
10318 UInt rE = eregOfRexRM(pfx,modrm);
10324 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
10443 static Long dis_CVTxSD2SI ( const VexAbiInfo* vbi, Prefix pfx,
10457 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0));
10459 nameXMMReg(eregOfRexRM(pfx,modrm)),
10460 nameIReg(sz, gregOfRexRM(pfx,modrm),
10463 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10468 nameIReg(sz, gregOfRexRM(pfx,modrm),
10479 putIReg32( gregOfRexRM(pfx,modrm),
10483 putIReg64( gregOfRexRM(pfx,modrm),
10491 static Long dis_CVTxSS2SI ( const VexAbiInfo* vbi, Prefix pfx,
10505 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0));
10507 nameXMMReg(eregOfRexRM(pfx,modrm)),
10508 nameIReg(sz, gregOfRexRM(pfx,modrm),
10511 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10516 nameIReg(sz, gregOfRexRM(pfx,modrm),
10527 putIReg32( gregOfRexRM(pfx,modrm),
10533 putIReg64( gregOfRexRM(pfx,modrm),
10543 static Long dis_CVTPS2PD_128 ( const VexAbiInfo* vbi, Prefix pfx,
10552 UInt rG = gregOfRexRM(pfx,modrm);
10554 UInt rE = eregOfRexRM(pfx,modrm);
10561 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10578 static Long dis_CVTPS2PD_256 ( const VexAbiInfo* vbi, Prefix pfx,
10589 UInt rG = gregOfRexRM(pfx,modrm);
10591 UInt rE = eregOfRexRM(pfx,modrm);
10599 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10619 static Long dis_CVTPD2PS_128 ( const VexAbiInfo* vbi, Prefix pfx,
10626 UInt rG = gregOfRexRM(pfx,modrm);
10630 UInt rE = eregOfRexRM(pfx,modrm);
10636 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10664 static Long dis_CVTxPS2DQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
10673 UInt rG = gregOfRexRM(pfx,modrm);
10677 UInt rE = eregOfRexRM(pfx,modrm);
10683 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10714 static Long dis_CVTxPS2DQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
10723 UInt rG = gregOfRexRM(pfx,modrm);
10727 UInt rE = eregOfRexRM(pfx,modrm);
10733 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10766 static Long dis_CVTxPD2DQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
10775 UInt rG = gregOfRexRM(pfx,modrm);
10779 UInt rE = eregOfRexRM(pfx,modrm);
10785 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10821 static Long dis_CVTxPD2DQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
10830 UInt rG = gregOfRexRM(pfx,modrm);
10834 UInt rE = eregOfRexRM(pfx,modrm);
10840 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10875 static Long dis_CVTDQ2PS_128 ( const VexAbiInfo* vbi, Prefix pfx,
10884 UInt rG = gregOfRexRM(pfx,modrm);
10888 UInt rE = eregOfRexRM(pfx,modrm);
10894 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10923 static Long dis_CVTDQ2PS_256 ( const VexAbiInfo* vbi, Prefix pfx,
10932 UInt rG = gregOfRexRM(pfx,modrm);
10936 UInt rE = eregOfRexRM(pfx,modrm);
10941 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10976 static Long dis_PMOVMSKB_128 ( const VexAbiInfo* vbi, Prefix pfx,
10981 UInt rE = eregOfRexRM(pfx,modrm);
10982 UInt rG = gregOfRexRM(pfx,modrm);
10995 static Long dis_PMOVMSKB_256 ( const VexAbiInfo* vbi, Prefix pfx,
11000 UInt rE = eregOfRexRM(pfx,modrm);
11001 UInt rG = gregOfRexRM(pfx,modrm);
11450 static Long dis_PSHUFxW_128 ( const VexAbiInfo* vbi, Prefix pfx,
11457 UInt rG = gregOfRexRM(pfx,modrm);
11467 UInt rE = eregOfRexRM(pfx,modrm);
11475 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
11505 static Long dis_PSHUFxW_256 ( const VexAbiInfo* vbi, Prefix pfx,
11512 UInt rG = gregOfRexRM(pfx,modrm);
11521 UInt rE = eregOfRexRM(pfx,modrm);
11528 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
11552 static Long dis_PEXTRW_128_EregOnly_toG ( const VexAbiInfo* vbi, Prefix pfx,
11557 UInt rG = gregOfRexRM(pfx,modrm);
11563 UInt rE = eregOfRexRM(pfx,modrm);
11591 static Long dis_CVTDQ2PD_128 ( const VexAbiInfo* vbi, Prefix pfx,
11599 UInt rG = gregOfRexRM(pfx,modrm);
11602 UInt rE = eregOfRexRM(pfx,modrm);
11607 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11626 static Long dis_STMXCSR ( const VexAbiInfo* vbi, Prefix pfx,
11634 vassert(gregOfRexRM(pfx,modrm) == 3); /* ditto */
11636 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11658 static Long dis_LDMXCSR ( const VexAbiInfo* vbi, Prefix pfx,
11666 vassert(gregOfRexRM(pfx,modrm) == 2); /* ditto */
11671 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11843 Prefix pfx, Long delta, Int sz )
11858 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11892 Prefix pfx, Long delta, Int sz )
11902 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12102 Prefix pfx, Long delta, Int sz )
12113 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12172 Prefix pfx, Long delta, Int sz )
12183 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12265 static Long dis_MASKMOVDQU ( const VexAbiInfo* vbi, Prefix pfx,
12274 UInt rG = gregOfRexRM(pfx,modrm);
12275 UInt rE = eregOfRexRM(pfx,modrm);
12277 assign( addr, handleAddrOverrides( vbi, pfx, getIReg64(R_RDI) ));
12286 getXMMRegLane64( eregOfRexRM(pfx,modrm), 1 ),
12289 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ),
12308 static Long dis_MOVMSKPS_128 ( const VexAbiInfo* vbi, Prefix pfx,
12312 UInt rG = gregOfRexRM(pfx,modrm);
12313 UInt rE = eregOfRexRM(pfx,modrm);
12340 static Long dis_MOVMSKPS_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta )
12343 UInt rG = gregOfRexRM(pfx,modrm);
12344 UInt rE = eregOfRexRM(pfx,modrm);
12390 static Long dis_MOVMSKPD_128 ( const VexAbiInfo* vbi, Prefix pfx,
12394 UInt rG = gregOfRexRM(pfx,modrm);
12395 UInt rE = eregOfRexRM(pfx,modrm);
12412 static Long dis_MOVMSKPD_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta )
12415 UInt rG = gregOfRexRM(pfx,modrm);
12416 UInt rE = eregOfRexRM(pfx,modrm);
12448 Prefix pfx, Int sz, Long deltaIN,
12471 if (have66noF2noF3(pfx)
12476 putXMMReg( gregOfRexRM(pfx,modrm),
12477 getXMMReg( eregOfRexRM(pfx,modrm) ));
12478 DIP("movupd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12479 nameXMMReg(gregOfRexRM(pfx,modrm)));
12482 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12483 putXMMReg( gregOfRexRM(pfx,modrm),
12486 nameXMMReg(gregOfRexRM(pfx,modrm)));
12494 if (haveF2no66noF3(pfx)
12498 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0,
12499 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ));
12500 DIP("movsd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12501 nameXMMReg(gregOfRexRM(pfx,modrm)));
12504 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12505 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) );
12506 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0,
12509 nameXMMReg(gregOfRexRM(pfx,modrm)));
12516 if (haveF3no66noF2(pfx)
12520 putXMMRegLane32( gregOfRexRM(pfx,modrm), 0,
12521 getXMMRegLane32( eregOfRexRM(pfx,modrm), 0 ));
12522 DIP("movss %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12523 nameXMMReg(gregOfRexRM(pfx,modrm)));
12526 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12527 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) );
12528 putXMMRegLane32( gregOfRexRM(pfx,modrm), 0,
12531 nameXMMReg(gregOfRexRM(pfx,modrm)));
12537 if (haveNo66noF2noF3(pfx)
12541 putXMMReg( gregOfRexRM(pfx,modrm),
12542 getXMMReg( eregOfRexRM(pfx,modrm) ));
12543 DIP("movups %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12544 nameXMMReg(gregOfRexRM(pfx,modrm)));
12547 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12548 putXMMReg( gregOfRexRM(pfx,modrm),
12551 nameXMMReg(gregOfRexRM(pfx,modrm)));
12561 if (haveF2no66noF3(pfx)
12565 putXMMRegLane64( eregOfRexRM(pfx,modrm), 0,
12566 getXMMRegLane64( gregOfRexRM(pfx,modrm), 0 ));
12567 DIP("movsd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12568 nameXMMReg(eregOfRexRM(pfx,modrm)));
12571 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12573 getXMMRegLane64(gregOfRexRM(pfx,modrm), 0) );
12574 DIP("movsd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12582 if (haveF3no66noF2(pfx) && sz == 4) {
12587 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12589 getXMMRegLane32(gregOfRexRM(pfx,modrm), 0) );
12590 DIP("movss %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12597 if (have66noF2noF3(pfx)
12601 putXMMReg( eregOfRexRM(pfx,modrm),
12602 getXMMReg( gregOfRexRM(pfx,modrm) ) );
12603 DIP("movupd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12604 nameXMMReg(eregOfRexRM(pfx,modrm)));
12607 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12608 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
12609 DIP("movupd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12616 if (haveNo66noF2noF3(pfx)
12622 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12623 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
12624 DIP("movups %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12635 if (have66noF2noF3(pfx)
12641 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12643 putXMMRegLane64( gregOfRexRM(pfx,modrm),
12647 dis_buf, nameXMMReg( gregOfRexRM(pfx,modrm) ));
12653 if (haveNo66noF2noF3(pfx)
12658 putXMMRegLane64( gregOfRexRM(pfx,modrm),
12660 getXMMRegLane64( eregOfRexRM(pfx,modrm), 1 ));
12661 DIP("movhlps %s, %s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12662 nameXMMReg(gregOfRexRM(pfx,modrm)));
12664 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12666 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0/*lower lane*/,
12669 dis_buf, nameXMMReg( gregOfRexRM(pfx,modrm) ));
12677 if (haveNo66noF2noF3(pfx)
12681 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12684 getXMMRegLane64( gregOfRexRM(pfx,modrm),
12686 DIP("movlps %s, %s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ),
12694 if (have66noF2noF3(pfx)
12698 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12701 getXMMRegLane64( gregOfRexRM(pfx,modrm),
12703 DIP("movlpd %s, %s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ),
12716 if (haveNo66noF2noF3(pfx) && sz == 4) {
12721 UInt rG = gregOfRexRM(pfx,modrm);
12724 UInt rE = eregOfRexRM(pfx,modrm);
12730 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12743 if (have66noF2noF3(pfx)
12749 UInt rG = gregOfRexRM(pfx,modrm);
12752 UInt rE = eregOfRexRM(pfx,modrm);
12758 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12774 if (have66noF2noF3(pfx)
12780 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12782 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/,
12785 nameXMMReg( gregOfRexRM(pfx,modrm) ));
12791 if (haveNo66noF2noF3(pfx)
12796 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/,
12797 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ) );
12798 DIP("movhps %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12799 nameXMMReg(gregOfRexRM(pfx,modrm)));
12801 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12803 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/,
12806 nameXMMReg( gregOfRexRM(pfx,modrm) ));
12814 if (haveNo66noF2noF3(pfx)
12818 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12821 getXMMRegLane64( gregOfRexRM(pfx,modrm),
12823 DIP("movhps %s,%s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ),
12831 if (have66noF2noF3(pfx)
12835 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12838 getXMMRegLane64( gregOfRexRM(pfx,modrm),
12840 DIP("movhpd %s,%s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ),
12853 if (haveNo66noF2noF3(pfx)
12862 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12880 if (have66noF2noF3(pfx)
12884 putXMMReg( gregOfRexRM(pfx,modrm),
12885 getXMMReg( eregOfRexRM(pfx,modrm) ));
12886 DIP("movapd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12887 nameXMMReg(gregOfRexRM(pfx,modrm)));
12890 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12892 putXMMReg( gregOfRexRM(pfx,modrm),
12895 nameXMMReg(gregOfRexRM(pfx,modrm)));
12901 if (haveNo66noF2noF3(pfx)
12905 putXMMReg( gregOfRexRM(pfx,modrm),
12906 getXMMReg( eregOfRexRM(pfx,modrm) ));
12907 DIP("movaps %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12908 nameXMMReg(gregOfRexRM(pfx,modrm)));
12911 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12913 putXMMReg( gregOfRexRM(pfx,modrm),
12916 nameXMMReg(gregOfRexRM(pfx,modrm)));
12925 if (haveNo66noF2noF3(pfx)
12929 putXMMReg( eregOfRexRM(pfx,modrm),
12930 getXMMReg( gregOfRexRM(pfx,modrm) ));
12931 DIP("movaps %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12932 nameXMMReg(eregOfRexRM(pfx,modrm)));
12935 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12937 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
12938 DIP("movaps %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12945 if (have66noF2noF3(pfx)
12949 putXMMReg( eregOfRexRM(pfx,modrm),
12950 getXMMReg( gregOfRexRM(pfx,modrm) ) );
12951 DIP("movapd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12952 nameXMMReg(eregOfRexRM(pfx,modrm)));
12955 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12957 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
12958 DIP("movapd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12969 if (haveNo66noF2noF3(pfx) && sz == 4) {
12981 pfx,modrm)));
12983 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12987 nameXMMReg(gregOfRexRM(pfx,modrm)) );
12993 gregOfRexRM(pfx,modrm), 0,
13000 gregOfRexRM(pfx,modrm), 1,
13011 if (haveF3no66noF2(pfx) && (sz == 4 || sz == 8)) {
13018 assign( arg32, getIReg32(eregOfRexRM(pfx,modrm)) );
13020 DIP("cvtsi2ss %s,%s\n", nameIReg32(eregOfRexRM(pfx,modrm)),
13021 nameXMMReg(gregOfRexRM(pfx,modrm)));
13023 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13027 nameXMMReg(gregOfRexRM(pfx,modrm)) );
13030 gregOfRexRM(pfx,modrm), 0,
13038 assign( arg64, getIReg64(eregOfRexRM(pfx,modrm)) );
13040 DIP("cvtsi2ssq %s,%s\n", nameIReg64(eregOfRexRM(pfx,modrm)),
13041 nameXMMReg(gregOfRexRM(pfx,modrm)));
13043 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13047 nameXMMReg(gregOfRexRM(pfx,modrm)) );
13050 gregOfRexRM(pfx,modrm), 0,
13061 if (haveF2no66noF3(pfx) && (sz == 4 || sz == 8)) {
13066 assign( arg32, getIReg32(eregOfRexRM(pfx,modrm)) );
13068 DIP("cvtsi2sdl %s,%s\n", nameIReg32(eregOfRexRM(pfx,modrm)),
13069 nameXMMReg(gregOfRexRM(pfx,modrm)));
13071 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13075 nameXMMReg(gregOfRexRM(pfx,modrm)) );
13077 putXMMRegLane64F( gregOfRexRM(pfx,modrm), 0,
13084 assign( arg64, getIReg64(eregOfRexRM(pfx,modrm)) );
13086 DIP("cvtsi2sdq %s,%s\n", nameIReg64(eregOfRexRM(pfx,modrm)),
13087 nameXMMReg(gregOfRexRM(pfx,modrm)));
13089 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13093 nameXMMReg(gregOfRexRM(pfx,modrm)) );
13096 gregOfRexRM(pfx,modrm),
13108 if (have66noF2noF3(pfx) && sz == 2) {
13123 nameXMMReg(gregOfRexRM(pfx,modrm)));
13125 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13129 nameXMMReg(gregOfRexRM(pfx,modrm)) );
13133 gregOfRexRM(pfx,modrm), 0,
13138 gregOfRexRM(pfx,modrm), 1,
13149 if ( (haveNo66noF2noF3(pfx) && sz == 4)
13150 || (have66noF2noF3(pfx) && sz == 2) ) {
13153 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13155 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
13158 nameXMMReg(gregOfRexRM(pfx,modrm)));
13172 if (haveNo66noF2noF3(pfx) && sz == 4) {
13184 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0));
13185 assign(f32hi, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 1));
13187 nameXMMReg(eregOfRexRM(pfx,modrm)),
13190 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13234 if (haveF3no66noF2(pfx) && (sz == 4 || sz == 8)) {
13235 delta = dis_CVTxSS2SI( vbi, pfx, delta, False/*!isAvx*/, opc, sz);
13250 if (haveF2no66noF3(pfx) && (sz == 4 || sz == 8)) {
13251 delta = dis_CVTxSD2SI( vbi, pfx, delta, False/*!isAvx*/, opc, sz);
13258 if (have66noF2noF3(pfx) && sz == 2) {
13270 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0));
13271 assign(f64hi, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 1));
13273 nameXMMReg(eregOfRexRM(pfx,modrm)),
13276 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13310 if (have66noF2noF3(pfx) && sz == 2) {
13311 delta = dis_COMISD( vbi, pfx, delta, False/*!isAvx*/, opc );
13316 if (haveNo66noF2noF3(pfx) && sz == 4) {
13317 delta = dis_COMISS( vbi, pfx, delta, False/*!isAvx*/, opc );
13325 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
13345 delta = dis_MOVMSKPS_128( vbi, pfx, delta, False/*!isAvx*/ );
13350 if (have66noF2noF3(pfx) && (sz == 2 || sz == 8)) {
13356 delta = dis_MOVMSKPD_128( vbi, pfx, delta, False/*!isAvx*/ );
13363 if (haveF3no66noF2(pfx) && sz == 4) {
13364 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta,
13369 if (haveNo66noF2noF3(pfx) && sz == 4) {
13370 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta,
13375 if (haveF2no66noF3(pfx) && sz == 4) {
13376 delta = dis_SSE_E_to_G_unary_lo64( vbi, pfx, delta,
13381 if (have66noF2noF3(pfx) && sz == 2) {
13382 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta,
13390 if (haveF3no66noF2(pfx) && sz == 4) {
13391 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta,
13396 if (haveNo66noF2noF3(pfx) && sz == 4) {
13397 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta,
13405 if (haveF3no66noF2(pfx) && sz == 4) {
13406 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta,
13411 if (haveNo66noF2noF3(pfx) && sz == 4) {
13412 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta,
13420 if (haveNo66noF2noF3(pfx) && sz == 4) {
13421 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "andps", Iop_AndV128 );
13425 if (have66noF2noF3(pfx) && sz == 2) {
13426 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "andpd", Iop_AndV128 );
13433 if (haveNo66noF2noF3(pfx) && sz == 4) {
13434 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "andnps",
13439 if (have66noF2noF3(pfx) && sz == 2) {
13440 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "andnpd",
13448 if (haveNo66noF2noF3(pfx) && sz == 4) {
13449 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "orps", Iop_OrV128 );
13453 if (have66noF2noF3(pfx) && sz == 2) {
13454 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "orpd", Iop_OrV128 );
13461 if (have66noF2noF3(pfx) && sz == 2) {
13462 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "xorpd", Iop_XorV128 );
13466 if (haveNo66noF2noF3(pfx) && sz == 4) {
13467 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "xorps", Iop_XorV128 );
13474 if (haveNo66noF2noF3(pfx) && sz == 4) {
13475 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "addps", Iop_Add32Fx4 );
13479 if (haveF3no66noF2(pfx) && sz == 4) {
13480 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "addss", Iop_Add32F0x4 );
13484 if (haveF2no66noF3(pfx)
13486 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "addsd", Iop_Add64F0x2 );
13490 if (have66noF2noF3(pfx)
13492 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "addpd", Iop_Add64Fx2 );
13499 if (haveF2no66noF3(pfx)
13501 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "mulsd", Iop_Mul64F0x2 );
13505 if (haveF3no66noF2(pfx) && sz == 4) {
13506 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "mulss", Iop_Mul32F0x4 );
13510 if (haveNo66noF2noF3(pfx) && sz == 4) {
13511 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "mulps", Iop_Mul32Fx4 );
13515 if (have66noF2noF3(pfx)
13517 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "mulpd", Iop_Mul64Fx2 );
13525 if (haveNo66noF2noF3(pfx)
13527 delta = dis_CVTPS2PD_128( vbi, pfx, delta, False/*!isAvx*/ );
13532 if (haveF3no66noF2(pfx) && sz == 4) {
13538 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0));
13539 DIP("cvtss2sd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13540 nameXMMReg(gregOfRexRM(pfx,modrm)));
13542 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13546 nameXMMReg(gregOfRexRM(pfx,modrm)));
13549 putXMMRegLane64F( gregOfRexRM(pfx,modrm), 0,
13556 if (haveF2no66noF3(pfx) && sz == 4) {
13563 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0));
13564 DIP("cvtsd2ss %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13565 nameXMMReg(gregOfRexRM(pfx,modrm)));
13567 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13571 nameXMMReg(gregOfRexRM(pfx,modrm)));
13576 gregOfRexRM(pfx,modrm), 0,
13587 if (have66noF2noF3(pfx) && sz == 2) {
13588 delta = dis_CVTPD2PS_128( vbi, pfx, delta, False/*!isAvx*/ );
13598 if ( (have66noF2noF3(pfx) && sz == 2)
13599 || (haveF3no66noF2(pfx) && sz == 4) ) {
13601 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta, False/*!isAvx*/, r2zero );
13606 if (haveNo66noF2noF3(pfx) && sz == 4) {
13607 delta = dis_CVTDQ2PS_128( vbi, pfx, delta, False/*!isAvx*/ );
13614 if (haveF3no66noF2(pfx) && sz == 4) {
13615 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "subss", Iop_Sub32F0x4 );
13619 if (haveF2no66noF3(pfx)
13621 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "subsd", Iop_Sub64F0x2 );
13625 if (haveNo66noF2noF3(pfx) && sz == 4) {
13626 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "subps", Iop_Sub32Fx4 );
13630 if (have66noF2noF3(pfx) && sz == 2) {
13631 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "subpd", Iop_Sub64Fx2 );
13638 if (haveNo66noF2noF3(pfx) && sz == 4) {
13639 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "minps", Iop_Min32Fx4 );
13643 if (haveF3no66noF2(pfx) && sz == 4) {
13644 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "minss", Iop_Min32F0x4 );
13648 if (haveF2no66noF3(pfx)
13650 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "minsd", Iop_Min64F0x2 );
13654 if (have66noF2noF3(pfx) && sz == 2) {
13655 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "minpd", Iop_Min64Fx2 );
13662 if (haveF2no66noF3(pfx) && sz == 4) {
13663 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "divsd", Iop_Div64F0x2 );
13667 if (haveNo66noF2noF3(pfx) && sz == 4) {
13668 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "divps", Iop_Div32Fx4 );
13672 if (haveF3no66noF2(pfx) && sz == 4) {
13673 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "divss", Iop_Div32F0x4 );
13677 if (have66noF2noF3(pfx) && sz == 2) {
13678 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "divpd", Iop_Div64Fx2 );
13685 if (haveNo66noF2noF3(pfx) && sz == 4) {
13686 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "maxps", Iop_Max32Fx4 );
13690 if (haveF3no66noF2(pfx) && sz == 4) {
13691 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "maxss", Iop_Max32F0x4 );
13695 if (haveF2no66noF3(pfx)
13697 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "maxsd", Iop_Max64F0x2 );
13701 if (have66noF2noF3(pfx) && sz == 2) {
13702 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "maxpd", Iop_Max64Fx2 );
13709 if (have66noF2noF3(pfx) && sz == 2) {
13710 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13719 if (have66noF2noF3(pfx) && sz == 2) {
13720 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13729 if (have66noF2noF3(pfx) && sz == 2) {
13730 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13739 if (have66noF2noF3(pfx) && sz == 2) {
13740 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13749 if (have66noF2noF3(pfx) && sz == 2) {
13750 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13758 if (have66noF2noF3(pfx) && sz == 2) {
13759 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13767 if (have66noF2noF3(pfx) && sz == 2) {
13768 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13776 if (have66noF2noF3(pfx) && sz == 2) {
13777 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13786 if (have66noF2noF3(pfx) && sz == 2) {
13787 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13796 if (have66noF2noF3(pfx) && sz == 2) {
13797 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13806 if (have66noF2noF3(pfx) && sz == 2) {
13807 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13816 if (have66noF2noF3(pfx) && sz == 2) {
13817 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13826 if (have66noF2noF3(pfx) && sz == 2) {
13827 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13836 if (have66noF2noF3(pfx) && sz == 2) {
13837 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13849 if (have66noF2noF3(pfx)) {
13857 gregOfRexRM(pfx,modrm),
13858 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) )
13860 DIP("movd %s, %s\n", nameIReg32(eregOfRexRM(pfx,modrm)),
13861 nameXMMReg(gregOfRexRM(pfx,modrm)));
13864 gregOfRexRM(pfx,modrm),
13865 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) )
13867 DIP("movq %s, %s\n", nameIReg64(eregOfRexRM(pfx,modrm)),
13868 nameXMMReg(gregOfRexRM(pfx,modrm)));
13871 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
13874 gregOfRexRM(pfx,modrm),
13880 nameXMMReg(gregOfRexRM(pfx,modrm)));
13887 if (have66noF2noF3(pfx)
13892 putXMMReg( gregOfRexRM(pfx,modrm),
13893 getXMMReg( eregOfRexRM(pfx,modrm) ));
13894 DIP("movdqa %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13895 nameXMMReg(gregOfRexRM(pfx,modrm)));
13898 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13900 putXMMReg( gregOfRexRM(pfx,modrm),
13903 nameXMMReg(gregOfRexRM(pfx,modrm)));
13908 if (haveF3no66noF2(pfx) && sz == 4) {
13912 putXMMReg( gregOfRexRM(pfx,modrm),
13913 getXMMReg( eregOfRexRM(pfx,modrm) ));
13914 DIP("movdqu %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13915 nameXMMReg(gregOfRexRM(pfx,modrm)));
13918 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13919 putXMMReg( gregOfRexRM(pfx,modrm),
13922 nameXMMReg(gregOfRexRM(pfx,modrm)));
13931 if (have66noF2noF3(pfx) && sz == 2) {
13932 delta = dis_PSHUFD_32x4( vbi, pfx, delta, False/*!writesYmm*/);
13937 if (haveNo66noF2noF3(pfx) && sz == 4) {
13953 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
13975 if (haveF2no66noF3(pfx) && sz == 4) {
13976 delta = dis_PSHUFxW_128( vbi, pfx, delta,
13982 if (haveF3no66noF2(pfx) && sz == 4) {
13983 delta = dis_PSHUFxW_128( vbi, pfx, delta,
13991 if (have66noF2noF3(pfx) && sz == 2
13994 delta = dis_SSE_shiftE_imm( pfx, delta, "psrlw", Iop_ShrN16x8 );
13998 if (have66noF2noF3(pfx) && sz == 2
14001 delta = dis_SSE_shiftE_imm( pfx, delta, "psraw", Iop_SarN16x8 );
14005 if (have66noF2noF3(pfx) && sz == 2
14008 delta = dis_SSE_shiftE_imm( pfx, delta, "psllw", Iop_ShlN16x8 );
14015 if (have66noF2noF3(pfx) && sz == 2
14018 delta = dis_SSE_shiftE_imm( pfx, delta, "psrld", Iop_ShrN32x4 );
14022 if (have66noF2noF3(pfx) && sz == 2
14025 delta = dis_SSE_shiftE_imm( pfx, delta, "psrad", Iop_SarN32x4 );
14029 if (have66noF2noF3(pfx) && sz == 2
14032 delta = dis_SSE_shiftE_imm( pfx, delta, "pslld", Iop_ShlN32x4 );
14040 if (have66noF2noF3(pfx) && sz == 2
14044 Int reg = eregOfRexRM(pfx,getUChar(delta));
14054 if (have66noF2noF3(pfx) && sz == 2
14058 Int reg = eregOfRexRM(pfx,getUChar(delta));
14068 if (have66noF2noF3(pfx) && sz == 2
14071 delta = dis_SSE_shiftE_imm( pfx, delta, "psrlq", Iop_ShrN64x2 );
14075 if (have66noF2noF3(pfx) && sz == 2
14078 delta = dis_SSE_shiftE_imm( pfx, delta, "psllq", Iop_ShlN64x2 );
14085 if (have66noF2noF3(pfx) && sz == 2) {
14086 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14094 if (have66noF2noF3(pfx) && sz == 2) {
14095 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14103 if (have66noF2noF3(pfx) && sz == 2) {
14104 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14113 if (haveF3no66noF2(pfx)
14117 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0,
14118 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ));
14120 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1, mkU64(0) );
14121 DIP("movsd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
14122 nameXMMReg(gregOfRexRM(pfx,modrm)));
14125 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14126 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) );
14127 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0,
14130 nameXMMReg(gregOfRexRM(pfx,modrm)));
14137 if (have66noF2noF3(pfx) && (sz == 2 || sz == 8)) {
14143 putIReg32( eregOfRexRM(pfx,modrm),
14144 getXMMRegLane32(gregOfRexRM(pfx,modrm), 0) );
14145 DIP("movd %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
14146 nameIReg32(eregOfRexRM(pfx,modrm)));
14148 putIReg64( eregOfRexRM(pfx,modrm),
14149 getXMMRegLane64(gregOfRexRM(pfx,modrm), 0) );
14150 DIP("movq %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
14151 nameIReg64(eregOfRexRM(pfx,modrm)));
14154 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
14158 ? getXMMRegLane32(gregOfRexRM(pfx,modrm),0)
14159 : getXMMRegLane64(gregOfRexRM(pfx,modrm),0) );
14161 nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf);
14169 if (haveF3no66noF2(pfx) && sz == 4) {
14174 putXMMReg( eregOfRexRM(pfx,modrm),
14175 getXMMReg(gregOfRexRM(pfx,modrm)) );
14176 DIP("movdqu %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
14177 nameXMMReg(eregOfRexRM(pfx,modrm)));
14179 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
14181 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
14182 DIP("movdqu %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf);
14187 if (have66noF2noF3(pfx) && sz == 2) {
14191 putXMMReg( eregOfRexRM(pfx,modrm),
14192 getXMMReg(gregOfRexRM(pfx,modrm)) );
14193 DIP("movdqa %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
14194 nameXMMReg(eregOfRexRM(pfx,modrm)));
14196 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
14199 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
14200 DIP("movdqa %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf);
14208 if (haveNo66noF2noF3(pfx)
14221 if (haveNo66noF2noF3(pfx)
14235 if (haveNo66noF2noF3(pfx)
14246 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14265 if (haveNo66noF2noF3(pfx)
14268 delta = dis_STMXCSR(vbi, pfx, delta, False/*!isAvx*/);
14272 if (haveNo66noF2noF3(pfx)
14275 delta = dis_LDMXCSR(vbi, pfx, delta, False/*!isAvx*/);
14279 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
14281 && gregOfRexRM(pfx,getUChar(delta)) == 0) {
14282 delta = dis_FXSAVE(vbi, pfx, delta, sz);
14286 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
14288 && gregOfRexRM(pfx,getUChar(delta)) == 1) {
14289 delta = dis_FXRSTOR(vbi, pfx, delta, sz);
14293 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
14295 && gregOfRexRM(pfx,getUChar(delta)) == 4
14297 delta = dis_XSAVE(vbi, pfx, delta, sz);
14301 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
14303 && gregOfRexRM(pfx,getUChar(delta)) == 5
14305 delta = dis_XRSTOR(vbi, pfx, delta, sz);
14312 if (haveNo66noF2noF3(pfx) && sz == 4) {
14314 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpps", True, 4 );
14318 if (haveF3no66noF2(pfx) && sz == 4) {
14320 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpss", False, 4 );
14324 if (haveF2no66noF3(pfx) && sz == 4) {
14326 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpsd", False, 8 );
14330 if (have66noF2noF3(pfx) && sz == 2) {
14332 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmppd", True, 8 );
14339 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)) {
14342 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14343 storeLE( mkexpr(addr), getIRegG(sz, pfx, modrm) );
14345 nameIRegG(sz, pfx, modrm));
14357 if (haveNo66noF2noF3(pfx)
14373 assign(t4, getIReg16(eregOfRexRM(pfx,modrm)));
14377 nameIReg16(eregOfRexRM(pfx,modrm)),
14380 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
14401 if (have66noF2noF3(pfx)
14406 UInt rG = gregOfRexRM(pfx,modrm);
14408 UInt rE = eregOfRexRM(pfx,modrm);
14415 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
14435 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)) {
14451 putIReg64(gregOfRexRM(pfx,modrm), unop(Iop_16Uto64, mkexpr(t5)));
14453 putIReg32(gregOfRexRM(pfx,modrm), unop(Iop_16Uto32, mkexpr(t5)));
14457 sz==8 ? nameIReg64(gregOfRexRM(pfx,modrm))
14458 : nameIReg32(gregOfRexRM(pfx,modrm))
14470 if (have66noF2noF3(pfx)
14473 delta = dis_PEXTRW_128_EregOnly_toG( vbi, pfx, delta,
14482 if (haveNo66noF2noF3(pfx) && sz == 4) {
14487 UInt rG = gregOfRexRM(pfx,modrm);
14490 UInt rE = eregOfRexRM(pfx,modrm);
14496 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
14503 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) );
14507 if (have66noF2noF3(pfx) && sz == 2) {
14513 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
14516 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
14520 nameXMMReg(eregOfRexRM(pfx,modrm)),
14521 nameXMMReg(gregOfRexRM(pfx,modrm)));
14523 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
14529 nameXMMReg(gregOfRexRM(pfx,modrm)));
14533 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) );
14540 if (have66noF2noF3(pfx) && sz == 2) {
14541 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrlw", Iop_ShrN16x8 );
14548 if (have66noF2noF3(pfx) && sz == 2) {
14549 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrld", Iop_ShrN32x4 );
14556 if (have66noF2noF3(pfx) && sz == 2) {
14557 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrlq", Iop_ShrN64x2 );
14564 if (have66noF2noF3(pfx) && sz == 2) {
14565 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14571 if (haveNo66noF2noF3(pfx) && sz == 4) {
14574 vbi, pfx, delta, opc, "paddq", False );
14581 if (have66noF2noF3(pfx) && sz == 2) {
14582 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14591 if (haveF3no66noF2(pfx) && sz == 4) {
14595 putXMMReg( gregOfRexRM(pfx,modrm),
14598 nameXMMReg(gregOfRexRM(pfx,modrm)));
14606 if (have66noF2noF3(pfx)
14613 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14615 getXMMRegLane64( gregOfRexRM(pfx,modrm), 0 ));
14616 DIP("movq %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf );
14622 if (haveF2no66noF3(pfx) && sz == 4) {
14627 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ));
14628 DIP("movdq2q %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
14642 if (have66noF2noF3(pfx)
14645 delta = dis_PMOVMSKB_128( vbi, pfx, delta, False/*!isAvx*/ );
14652 if (haveNo66noF2noF3(pfx)
14661 putIReg32(gregOfRexRM(pfx,modrm), mkexpr(t1));
14663 nameIReg32(gregOfRexRM(pfx,modrm)));
14673 if (have66noF2noF3(pfx) && sz == 2) {
14674 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14682 if (have66noF2noF3(pfx) && sz == 2) {
14683 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14692 if (haveNo66noF2noF3(pfx) && sz == 4) {
14695 vbi, pfx, delta, opc, "pminub", False );
14699 if (have66noF2noF3(pfx) && sz == 2) {
14700 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14708 if (have66noF2noF3(pfx) && sz == 2) {
14709 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "pand", Iop_AndV128 );
14716 if (have66noF2noF3(pfx) && sz == 2) {
14717 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14725 if (have66noF2noF3(pfx) && sz == 2) {
14726 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14735 if (haveNo66noF2noF3(pfx) && sz == 4) {
14738 vbi, pfx, delta, opc, "pmaxub", False );
14742 if (have66noF2noF3(pfx) && sz == 2) {
14743 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14751 if (have66noF2noF3(pfx) && sz == 2) {
14752 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "pandn", Iop_AndV128 );
14760 if (haveNo66noF2noF3(pfx) && sz == 4) {
14763 vbi, pfx, delta, opc, "pavgb", False );
14767 if (have66noF2noF3(pfx) && sz == 2) {
14768 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14776 if (have66noF2noF3(pfx) && sz == 2) {
14777 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psraw", Iop_SarN16x8 );
14784 if (have66noF2noF3(pfx) && sz == 2) {
14785 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrad", Iop_SarN32x4 );
14793 if (haveNo66noF2noF3(pfx) && sz == 4) {
14796 vbi, pfx, delta, opc, "pavgw", False );
14800 if (have66noF2noF3(pfx) && sz == 2) {
14801 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14810 if (haveNo66noF2noF3(pfx) && sz == 4) {
14813 vbi, pfx, delta, opc, "pmuluh", False );
14817 if (have66noF2noF3(pfx) && sz == 2) {
14818 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14826 if (have66noF2noF3(pfx) && sz == 2) {
14827 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14839 if ( (haveF2no66noF3(pfx) && sz == 4)
14840 || (have66noF2noF3(pfx) && sz == 2) ) {
14841 delta = dis_CVTxPD2DQ_128( vbi, pfx, delta, False/*!isAvx*/,
14847 if (haveF3no66noF2(pfx) && sz == 4) {
14848 delta = dis_CVTDQ2PD_128(vbi, pfx, delta, False/*!isAvx*/);
14860 if (haveNo66noF2noF3(pfx) && sz == 4) {
14864 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14874 if (have66noF2noF3(pfx) && sz == 2) {
14877 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14879 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
14881 nameXMMReg(gregOfRexRM(pfx,modrm)));
14891 if (have66noF2noF3(pfx) && sz == 2) {
14892 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14900 if (have66noF2noF3(pfx) && sz == 2) {
14901 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14910 if (haveNo66noF2noF3(pfx) && sz == 4) {
14913 vbi, pfx, delta, opc, "pminsw", False );
14917 if (have66noF2noF3(pfx) && sz == 2) {
14918 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14926 if (have66noF2noF3(pfx) && sz == 2) {
14927 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "por", Iop_OrV128 );
14934 if (have66noF2noF3(pfx) && sz == 2) {
14935 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14943 if (have66noF2noF3(pfx) && sz == 2) {
14944 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14953 if (haveNo66noF2noF3(pfx) && sz == 4) {
14956 vbi, pfx, delta, opc, "pmaxsw", False );
14960 if (have66noF2noF3(pfx) && sz == 2) {
14961 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14969 if (have66noF2noF3(pfx) && sz == 2) {
14970 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "pxor", Iop_XorV128 );
14977 if (have66noF2noF3(pfx) && sz == 2) {
14978 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psllw", Iop_ShlN16x8 );
14985 if (have66noF2noF3(pfx) && sz == 2) {
14986 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "pslld", Iop_ShlN32x4 );
14993 if (have66noF2noF3(pfx) && sz == 2) {
14994 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psllq", Iop_ShlN64x2 );
15003 if (have66noF2noF3(pfx) && sz == 2) {
15007 UInt rG = gregOfRexRM(pfx,modrm);
15010 UInt rE = eregOfRexRM(pfx,modrm);
15015 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15026 if (haveNo66noF2noF3(pfx) && sz == 4) {
15042 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15060 if (have66noF2noF3(pfx) && sz == 2) {
15064 UInt rG = gregOfRexRM(pfx,modrm);
15066 UInt rE = eregOfRexRM(pfx,modrm);
15071 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15085 if (haveNo66noF2noF3(pfx) && sz == 4) {
15088 vbi, pfx, delta, opc, "psadbw", False );
15093 if (have66noF2noF3(pfx) && sz == 2) {
15097 UInt rG = gregOfRexRM(pfx,modrm);
15099 pfx,modrm);
15104 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15119 if (haveNo66noF2noF3(pfx) && sz == 4) {
15121 delta = dis_MMX( &ok, vbi, pfx, sz, delta-1 );
15125 if (have66noF2noF3(pfx) && sz == 2 && epartIsReg(getUChar(delta))) {
15126 delta = dis_MASKMOVDQU( vbi, pfx, delta, False/*!isAvx*/ );
15133 if (have66noF2noF3(pfx) && sz == 2) {
15134 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15142 if (have66noF2noF3(pfx) && sz == 2) {
15143 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15151 if (have66noF2noF3(pfx) && sz == 2) {
15152 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15160 if (have66noF2noF3(pfx) && sz == 2) {
15161 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15167 if (haveNo66noF2noF3(pfx) && sz == 4) {
15170 vbi, pfx, delta, opc, "psubq", False );
15177 if (have66noF2noF3(pfx) && sz == 2) {
15178 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15186 if (have66noF2noF3(pfx) && sz == 2) {
15187 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15195 if (have66noF2noF3(pfx) && sz == 2) {
15196 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15223 static Long dis_MOVDDUP_128 ( const VexAbiInfo* vbi, Prefix pfx,
15232 UInt rG = gregOfRexRM(pfx,modrm);
15234 UInt rE = eregOfRexRM(pfx,modrm);
15241 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15253 static Long dis_MOVDDUP_256 ( const VexAbiInfo* vbi, Prefix pfx,
15262 UInt rG = gregOfRexRM(pfx,modrm);
15264 UInt rE = eregOfRexRM(pfx,modrm);
15270 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15285 static Long dis_MOVSxDUP_128 ( const VexAbiInfo* vbi, Prefix pfx,
15293 UInt rG = gregOfRexRM(pfx,modrm);
15297 UInt rE = eregOfRexRM(pfx,modrm);
15303 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15319 static Long dis_MOVSxDUP_256 ( const VexAbiInfo* vbi, Prefix pfx,
15327 UInt rG = gregOfRexRM(pfx,modrm);
15331 UInt rE = eregOfRexRM(pfx,modrm);
15337 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15400 Prefix pfx, Int sz, Long deltaIN )
15417 if (haveF3no66noF2(pfx) && sz == 4) {
15418 delta = dis_MOVSxDUP_128( vbi, pfx, delta, False/*!isAvx*/,
15424 if (haveF2no66noF3(pfx)
15426 delta = dis_MOVDDUP_128( vbi, pfx, delta, False/*!isAvx*/ );
15434 if (haveF3no66noF2(pfx) && sz == 4) {
15435 delta = dis_MOVSxDUP_128( vbi, pfx, delta, False/*!isAvx*/,
15445 if (haveF2no66noF3(pfx) && sz == 4) {
15451 UInt rG = gregOfRexRM(pfx,modrm);
15453 UInt rE = eregOfRexRM(pfx,modrm);
15458 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15470 if (have66noF2noF3(pfx) && sz == 2) {
15476 UInt rG = gregOfRexRM(pfx,modrm);
15478 UInt rE = eregOfRexRM(pfx,modrm);
15483 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15497 if (have66noF2noF3(pfx) && sz == 2) {
15501 UInt rG = gregOfRexRM(pfx,modrm);
15503 UInt rE = eregOfRexRM(pfx,modrm);
15508 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15519 if (haveF2no66noF3(pfx) && sz == 4) {
15523 UInt rG = gregOfRexRM(pfx,modrm);
15527 UInt rE = eregOfRexRM(pfx,modrm);
15532 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15546 if (haveF2no66noF3(pfx) && sz == 4) {
15551 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15552 putXMMReg( gregOfRexRM(pfx,modrm),
15555 nameXMMReg(gregOfRexRM(pfx,modrm)));
15689 static Long dis_PHADD_128 ( const VexAbiInfo* vbi, Prefix pfx, Long delta,
15706 UInt rG = gregOfRexRM(pfx,modrm);
15707 UInt rV = isAvx ? getVexNvvvv(pfx) : rG;
15726 UInt rE = eregOfRexRM(pfx,modrm);
15732 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15763 static Long dis_PHADD_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta,
15778 UInt rG = gregOfRexRM(pfx,modrm);
15779 UInt rV = getVexNvvvv(pfx);
15798 UInt rE = eregOfRexRM(pfx,modrm);
15803 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15881 Prefix pfx, Int sz, Long deltaIN )
15897 if (have66noF2noF3(pfx)
15903 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
15906 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
15908 DIP("pshufb %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
15909 nameXMMReg(gregOfRexRM(pfx,modrm)));
15911 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15916 nameXMMReg(gregOfRexRM(pfx,modrm)));
15920 putXMMReg(gregOfRexRM(pfx,modrm), mkexpr(res));
15924 if (haveNo66noF2noF3(pfx) && sz == 4) {
15938 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15981 if (have66noF2noF3(pfx)
15983 delta = dis_PHADD_128( vbi, pfx, delta, False/*isAvx*/, opc );
15999 if (haveNo66noF2noF3(pfx) && sz == 4) {
16032 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16053 if (have66noF2noF3(pfx)
16058 UInt rG = gregOfRexRM(pfx,modrm);
16063 UInt rE = eregOfRexRM(pfx,modrm);
16068 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16080 if (haveNo66noF2noF3(pfx) && sz == 4) {
16098 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16136 if (have66noF2noF3(pfx)
16155 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
16158 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
16160 DIP("psign%s %s,%s\n", str, nameXMMReg(eregOfRexRM(pfx,modrm)),
16161 nameXMMReg(gregOfRexRM(pfx,modrm)));
16163 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16168 nameXMMReg(gregOfRexRM(pfx,modrm)));
16177 gregOfRexRM(pfx,modrm),
16188 if (haveNo66noF2noF3(pfx) && sz == 4) {
16211 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16229 if (have66noF2noF3(pfx)
16239 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
16242 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
16244 DIP("pmulhrsw %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
16245 nameXMMReg(gregOfRexRM(pfx,modrm)));
16247 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16252 nameXMMReg(gregOfRexRM(pfx,modrm)));
16261 gregOfRexRM(pfx,modrm),
16271 if (haveNo66noF2noF3(pfx) && sz == 4) {
16285 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16306 if (have66noF2noF3(pfx)
16321 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
16323 DIP("pabs%s %s,%s\n", str, nameXMMReg(eregOfRexRM(pfx,modrm)),
16324 nameXMMReg(gregOfRexRM(pfx,modrm)));
16326 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16331 nameXMMReg(gregOfRexRM(pfx,modrm)));
16334 putXMMReg( gregOfRexRM(pfx,modrm),
16341 if (haveNo66noF2noF3(pfx) && sz == 4) {
16362 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16400 Prefix pfx, Int sz, Long deltaIN )
16417 if (have66noF2noF3(pfx)
16423 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
16426 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
16430 nameXMMReg(eregOfRexRM(pfx,modrm)),
16431 nameXMMReg(gregOfRexRM(pfx,modrm)));
16433 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
16440 nameXMMReg(gregOfRexRM(pfx,modrm)));
16444 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) );
16448 if (haveNo66noF2noF3(pfx) && sz == 4) {
16465 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
16527 Prefix pfx, Int sz, Long deltaIN )
16546 if (haveF3noF2(pfx) /* so both 66 and REX.W are possibilities */
16552 assign(src, getIRegE(sz, pfx, modrm));
16554 DIP("popcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm),
16555 nameIRegG(sz, pfx, modrm));
16557 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0);
16561 nameIRegG(sz, pfx, modrm));
16565 putIRegG(sz, pfx, modrm, mkexpr(result));
16590 if (haveF3noF2(pfx) /* so both 66 and 48 are possibilities */
16597 assign(src, getIRegE(sz, pfx, modrm));
16599 DIP("tzcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm),
16600 nameIRegG(sz, pfx, modrm));
16602 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0);
16606 nameIRegG(sz, pfx, modrm));
16610 putIRegG(sz, pfx, modrm, mkexpr(res));
16651 if (haveF3noF2(pfx) /* so both 66 and 48 are possibilities */
16658 assign(src, getIRegE(sz, pfx, modrm));
16660 DIP("lzcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm),
16661 nameIRegG(sz, pfx, modrm));
16663 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0);
16667 nameIRegG(sz, pfx, modrm));
16671 putIRegG(sz, pfx, modrm, mkexpr(res));
16779 static Long dis_VBLENDV_128 ( const VexAbiInfo* vbi, Prefix pfx, Long delta,
16786 UInt rG = gregOfRexRM(pfx, modrm);
16787 UInt rV = getVexNvvvv(pfx);
16794 UInt rE = eregOfRexRM(pfx, modrm);
16802 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
16818 static Long dis_VBLENDV_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta,
16825 UInt rG = gregOfRexRM(pfx, modrm);
16826 UInt rV = getVexNvvvv(pfx);
16833 UInt rE = eregOfRexRM(pfx, modrm);
16841 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
16957 static Long dis_xTESTy_128 ( const VexAbiInfo* vbi, Prefix pfx,
16964 UInt rG = gregOfRexRM(pfx, modrm);
16969 UInt rE = eregOfRexRM(pfx, modrm);
16977 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17010 static Long dis_xTESTy_256 ( const VexAbiInfo* vbi, Prefix pfx,
17017 UInt rG = gregOfRexRM(pfx, modrm);
17022 UInt rE = eregOfRexRM(pfx, modrm);
17029 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17068 static Long dis_PMOVxXBW_128 ( const VexAbiInfo* vbi, Prefix pfx,
17078 UInt rG = gregOfRexRM(pfx, modrm);
17080 UInt rE = eregOfRexRM(pfx, modrm);
17085 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17111 static Long dis_PMOVxXBW_256 ( const VexAbiInfo* vbi, Prefix pfx,
17120 UInt rG = gregOfRexRM(pfx, modrm);
17122 UInt rE = eregOfRexRM(pfx, modrm);
17127 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17151 static Long dis_PMOVxXWD_128 ( const VexAbiInfo* vbi, Prefix pfx,
17161 UInt rG = gregOfRexRM(pfx, modrm);
17164 UInt rE = eregOfRexRM(pfx, modrm);
17169 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17184 ( gregOfRexRM(pfx, modrm), res );
17190 static Long dis_PMOVxXWD_256 ( const VexAbiInfo* vbi, Prefix pfx,
17199 UInt rG = gregOfRexRM(pfx, modrm);
17202 UInt rE = eregOfRexRM(pfx, modrm);
17207 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17229 static Long dis_PMOVSXWQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17238 UInt rG = gregOfRexRM(pfx, modrm);
17241 UInt rE = eregOfRexRM(pfx, modrm);
17246 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17262 static Long dis_PMOVSXWQ_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta )
17269 UInt rG = gregOfRexRM(pfx, modrm);
17274 UInt rE = eregOfRexRM(pfx, modrm);
17279 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17297 static Long dis_PMOVZXWQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17306 UInt rG = gregOfRexRM(pfx, modrm);
17309 UInt rE = eregOfRexRM(pfx, modrm);
17314 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17333 static Long dis_PMOVZXWQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
17341 UInt rG = gregOfRexRM(pfx, modrm);
17344 UInt rE = eregOfRexRM(pfx, modrm);
17349 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17373 static Long dis_PMOVxXDQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17384 UInt rG = gregOfRexRM(pfx, modrm);
17390 UInt rE = eregOfRexRM(pfx, modrm);
17396 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17420 static Long dis_PMOVxXDQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
17429 UInt rG = gregOfRexRM(pfx, modrm);
17435 UInt rE = eregOfRexRM(pfx, modrm);
17440 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17473 static Long dis_PMOVxXBD_128 ( const VexAbiInfo* vbi, Prefix pfx,
17483 UInt rG = gregOfRexRM(pfx, modrm);
17485 UInt rE = eregOfRexRM(pfx, modrm);
17490 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17516 static Long dis_PMOVxXBD_256 ( const VexAbiInfo* vbi, Prefix pfx,
17525 UInt rG = gregOfRexRM(pfx, modrm);
17527 UInt rE = eregOfRexRM(pfx, modrm);
17532 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17563 static Long dis_PMOVSXBQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17572 UInt rG = gregOfRexRM(pfx, modrm);
17574 UInt rE = eregOfRexRM(pfx, modrm);
17579 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17596 static Long dis_PMOVSXBQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
17604 UInt rG = gregOfRexRM(pfx, modrm);
17606 UInt rE = eregOfRexRM(pfx, modrm);
17611 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17642 static Long dis_PMOVZXBQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17651 UInt rG = gregOfRexRM(pfx, modrm);
17653 UInt rE = eregOfRexRM(pfx, modrm);
17658 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17681 static Long dis_PMOVZXBQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
17689 UInt rG = gregOfRexRM(pfx, modrm);
17691 UInt rE = eregOfRexRM(pfx, modrm);
17696 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17725 static Long dis_PHMINPOSUW_128 ( const VexAbiInfo* vbi, Prefix pfx,
17737 UInt rG = gregOfRexRM(pfx,modrm);
17739 UInt rE = eregOfRexRM(pfx,modrm);
17744 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
17765 static Long dis_AESx ( const VexAbiInfo* vbi, Prefix pfx,
17772 UInt rG = gregOfRexRM(pfx, modrm);
17774 UInt regNoR = (isAvx && opc != 0xDB) ? getVexNvvvv(pfx) : rG;
17783 regNoL = eregOfRexRM(pfx, modrm);
17787 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17857 static Long dis_AESKEYGENASSIST ( const VexAbiInfo* vbi, Prefix pfx,
17865 UInt regNoR = gregOfRexRM(pfx, modrm);
17871 regNoL = eregOfRexRM(pfx, modrm);
17876 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
17926 Prefix pfx, Int sz, Long deltaIN )
17949 if (have66noF2noF3(pfx) && sz == 2) {
17973 assign(vecE, getXMMReg(eregOfRexRM(pfx, modrm)));
17976 nameXMMReg( eregOfRexRM(pfx, modrm) ),
17977 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
17979 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17984 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
17987 assign(vecG, getXMMReg(gregOfRexRM(pfx, modrm)));
17991 putXMMReg(gregOfRexRM(pfx, modrm), mkexpr(res));
18000 if (have66noF2noF3(pfx)
18002 delta = dis_xTESTy_128( vbi, pfx, delta, False/*!isAvx*/, 0 );
18010 if (have66noF2noF3(pfx) && sz == 2) {
18011 delta = dis_PMOVxXBW_128( vbi, pfx, delta,
18020 if (have66noF2noF3(pfx) && sz == 2) {
18021 delta = dis_PMOVxXBD_128( vbi, pfx, delta,
18030 if (have66noF2noF3(pfx) && sz == 2) {
18031 delta = dis_PMOVSXBQ_128( vbi, pfx, delta, False/*!isAvx*/ );
18039 if (have66noF2noF3(pfx) && sz == 2) {
18040 delta = dis_PMOVxXWD_128(vbi, pfx, delta,
18049 if (have66noF2noF3(pfx) && sz == 2) {
18050 delta = dis_PMOVSXWQ_128( vbi, pfx, delta, False/*!isAvx*/ );
18058 if (have66noF2noF3(pfx) && sz == 2) {
18059 delta = dis_PMOVxXDQ_128( vbi, pfx, delta,
18071 if (have66noF2noF3(pfx) && sz == 2) {
18075 UInt rG = gregOfRexRM(pfx,modrm);
18078 UInt rE = eregOfRexRM(pfx,modrm);
18083 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
18097 if (have66noF2noF3(pfx) && sz == 2) {
18099 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
18109 if (have66noF2noF3(pfx) && sz == 2) {
18112 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
18114 putXMMReg( gregOfRexRM(pfx,modrm),
18117 nameXMMReg(gregOfRexRM(pfx,modrm)));
18127 if (have66noF2noF3(pfx) && sz == 2) {
18135 assign( argL, getXMMReg( eregOfRexRM(pfx, modrm) ) );
18138 nameXMMReg( eregOfRexRM(pfx, modrm) ),
18139 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
18141 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
18146 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
18149 assign(argR, getXMMReg( gregOfRexRM(pfx, modrm) ));
18151 putXMMReg( gregOfRexRM(pfx, modrm),
18162 if (have66noF2noF3(pfx) && sz == 2) {
18163 delta = dis_PMOVxXBW_128( vbi, pfx, delta,
18172 if (have66noF2noF3(pfx) && sz == 2) {
18173 delta = dis_PMOVxXBD_128( vbi, pfx, delta,
18182 if (have66noF2noF3(pfx) && sz == 2) {
18183 delta = dis_PMOVZXBQ_128( vbi, pfx, delta, False/*!isAvx*/ );
18191 if (have66noF2noF3(pfx) && sz == 2) {
18192 delta = dis_PMOVxXWD_128( vbi, pfx, delta,
18201 if (have66noF2noF3(pfx) && sz == 2) {
18202 delta = dis_PMOVZXWQ_128( vbi, pfx, delta, False/*!isAvx*/ );
18210 if (have66noF2noF3(pfx) && sz == 2) {
18211 delta = dis_PMOVxXDQ_128( vbi, pfx, delta,
18221 if (have66noF2noF3(pfx) && sz == 2) {
18223 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
18234 if (have66noF2noF3(pfx) && sz == 2) {
18238 vbi, pfx, delta,
18254 if (have66noF2noF3(pfx) && sz == 2) {
18258 vbi, pfx, delta,
18274 if (have66noF2noF3(pfx) && sz == 2) {
18278 vbi, pfx, delta,
18294 if (have66noF2noF3(pfx) && sz == 2) {
18298 vbi, pfx, delta,
18310 if (have66noF2noF3(pfx) && sz == 2) {
18318 assign( argL, getXMMReg( eregOfRexRM(pfx, modrm) ) );
18321 nameXMMReg( eregOfRexRM(pfx, modrm) ),
18322 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
18324 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
18329 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
18332 assign(argR, getXMMReg( gregOfRexRM(pfx, modrm) ));
18334 putXMMReg( gregOfRexRM(pfx, modrm),
18344 if (have66noF2noF3(pfx) && sz == 2) {
18345 delta = dis_PHMINPOSUW_128( vbi, pfx, delta, False/*!isAvx*/ );
18361 if (have66noF2noF3(pfx) && sz == 2) {
18362 delta = dis_AESx( vbi, pfx, delta, False/*!isAvx*/, opc );
18373 if (haveF2noF3(pfx)
18374 && (opc == 0xF1 || (opc == 0xF0 && !have66(pfx)))) {
18386 assign(valE, getIRegE(sz, pfx, modrm));
18388 DIP("crc32b %s,%s\n", nameIRegE(sz, pfx, modrm),
18389 nameIRegG(1==getRexW(pfx) ? 8 : 4, pfx
18391 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
18395 nameIRegG(1==getRexW(pfx) ? 8 : 4, pfx, modrm));
18403 assign(valG0, binop(Iop_And64, getIRegG(8, pfx, modrm),
18425 putIRegG(4, pfx, modrm, unop(Iop_64to32, mkexpr(valG1)));
18451 static Long dis_PEXTRW ( const VexAbiInfo* vbi, Prefix pfx,
18462 UInt rG = gregOfRexRM(pfx,modrm);
18468 vassert(0==getRexW(pfx)); /* ensured by caller */
18475 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18492 UInt rE = eregOfRexRM(pfx,modrm);
18506 static Long dis_PEXTRD ( const VexAbiInfo* vbi, Prefix pfx,
18523 vassert(0==getRexW(pfx)); /* ensured by caller */
18525 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) );
18531 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18544 putIReg32( eregOfRexRM(pfx,modrm), mkexpr(src_dword) );
18547 nameXMMReg( gregOfRexRM(pfx, modrm) ),
18548 nameIReg32( eregOfRexRM(pfx, modrm) ) );
18553 imm8_10, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf );
18559 static Long dis_PEXTRQ ( const VexAbiInfo* vbi, Prefix pfx,
18572 vassert(1==getRexW(pfx)); /* ensured by caller */
18574 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) );
18579 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18592 putIReg64( eregOfRexRM(pfx,modrm), mkexpr(src_qword) );
18595 nameXMMReg( gregOfRexRM(pfx, modrm) ),
18596 nameIReg64( eregOfRexRM(pfx, modrm) ) );
18601 imm8_0, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf );
18740 static Long dis_PCMPxSTRx ( const VexAbiInfo* vbi, Prefix pfx,
18760 regNoL = eregOfRexRM(pfx, modrm);
18761 regNoR = gregOfRexRM(pfx, modrm);
18766 regNoR = gregOfRexRM(pfx, modrm);
18767 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18992 static Long dis_PEXTRB_128_GtoE ( const VexAbiInfo* vbi, Prefix pfx,
19005 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) );
19012 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19026 putIReg64( eregOfRexRM(pfx,modrm),
19031 nameXMMReg( gregOfRexRM(pfx, modrm) ),
19032 nameIReg64( eregOfRexRM(pfx, modrm) ) );
19037 imm8, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf );
19168 static Long dis_EXTRACTPS ( const VexAbiInfo* vbi, Prefix pfx,
19178 UInt rG = gregOfRexRM(pfx,modrm);
19188 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19201 UInt rE = eregOfRexRM(pfx,modrm);
19248 Prefix pfx, Int sz, Long deltaIN )
19264 if (have66noF2noF3(pfx) && sz == 2) {
19281 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 0 ) );
19283 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 1 ) );
19285 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 2 ) );
19287 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 3 ) );
19292 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ),
19293 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19295 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19309 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19323 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 0, mkexpr(res0) );
19324 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 1, mkexpr(res1) );
19325 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 2, mkexpr(res2) );
19326 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 3, mkexpr(res3) );
19334 if (have66noF2noF3(pfx) && sz == 2) {
19347 getXMMRegLane64F( eregOfRexRM(pfx, modrm), 0 ) );
19349 getXMMRegLane64F( eregOfRexRM(pfx, modrm), 1 ) );
19354 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ),
19355 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19357 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19367 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19379 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 0, mkexpr(res0) );
19380 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 1, mkexpr(res1) );
19391 if (have66noF2noF3(pfx) && sz == 2) {
19402 isD ? getXMMRegLane64F( eregOfRexRM(pfx, modrm), 0 )
19403 : getXMMRegLane32F( eregOfRexRM(pfx, modrm), 0 ) );
19409 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ),
19410 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19412 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19419 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19432 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 0, mkexpr(res) );
19434 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 0, mkexpr(res) );
19443 if (have66noF2noF3(pfx) && sz == 2) {
19451 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) );
19455 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) );
19458 nameXMMReg( eregOfRexRM(pfx, modrm) ),
19459 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19461 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19468 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19471 putXMMReg( gregOfRexRM(pfx, modrm),
19480 if (have66noF2noF3(pfx) && sz == 2) {
19487 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) );
19491 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) );
19494 nameXMMReg( eregOfRexRM(pfx, modrm) ),
19495 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19497 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19504 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19507 putXMMReg( gregOfRexRM(pfx, modrm),
19516 if (have66noF2noF3(pfx) && sz == 2) {
19524 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) );
19528 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) );
19531 nameXMMReg( eregOfRexRM(pfx, modrm) ),
19532 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19534 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19541 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19544 putXMMReg( gregOfRexRM(pfx, modrm),
19554 if (have66noF2noF3(pfx) && sz == 2) {
19555 delta = dis_PEXTRB_128_GtoE( vbi, pfx, delta, False/*!isAvx*/ );
19564 if (have66noF2noF3(pfx) && sz == 2) {
19565 delta = dis_PEXTRW( vbi, pfx, delta, False/*!isAvx*/ );
19575 if (have66noF2noF3(pfx)
19577 delta = dis_PEXTRD( vbi, pfx, delta, False/*!isAvx*/ );
19584 if (have66noF2noF3(pfx)
19586 delta = dis_PEXTRQ( vbi, pfx, delta, False/*!isAvx*/);
19596 if (have66noF2noF3(pfx)
19598 delta = dis_EXTRACTPS( vbi, pfx, delta, False/*!isAvx*/ );
19606 if (have66noF2noF3(pfx) && sz == 2) {
19610 UInt rG = gregOfRexRM(pfx, modrm);
19612 UInt rE = eregOfRexRM(pfx,modrm);
19619 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19627 assign(src_vec, getXMMReg( gregOfRexRM(pfx, modrm) ));
19637 if (have66noF2noF3(pfx) && sz == 2) {
19643 UInt rG = gregOfRexRM(pfx, modrm);
19646 UInt rE = eregOfRexRM(pfx, modrm);
19657 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19676 if (have66noF2noF3(pfx)
19681 UInt rG = gregOfRexRM(pfx, modrm);
19684 UInt rE = eregOfRexRM(pfx,modrm);
19691 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19707 if (have66noF2noF3(pfx)
19712 UInt rG = gregOfRexRM(pfx, modrm);
19715 UInt rE = eregOfRexRM(pfx,modrm);
19722 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19741 if (have66noF2noF3(pfx) && sz == 2) {
19746 UInt rG = gregOfRexRM(pfx, modrm);
19749 UInt rE = eregOfRexRM(pfx, modrm);
19756 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19774 if (have66noF2noF3(pfx) && sz == 2) {
19779 UInt rG = gregOfRexRM(pfx, modrm);
19782 UInt rE = eregOfRexRM(pfx, modrm);
19789 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19807 if (have66noF2noF3(pfx) && sz == 2) {
19812 UInt rG = gregOfRexRM(pfx, modrm);
19817 UInt rE = eregOfRexRM(pfx, modrm);
19825 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19844 if (have66noF2noF3(pfx) && sz == 2) {
19850 UInt rG = gregOfRexRM(pfx, modrm);
19855 UInt rE = eregOfRexRM(pfx, modrm);
19862 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19888 if (have66noF2noF3(pfx) && sz == 2) {
19890 delta = dis_PCMPxSTRx( vbi, pfx, delta, False/*!isAvx*/, opc );
19898 if (have66noF2noF3(pfx) && sz == 2) {
19899 delta = dis_AESKEYGENASSIST( vbi, pfx, delta, False/*!isAvx*/ );
19935 Prefix pfx, Int sz, Long deltaIN
19964 Bool validF2orF3 = haveF2orF3(pfx) ? False : True;
19975 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) {
19986 switch above, use validF2orF3 rather than looking at pfx
19992 delta = dis_op2_G_E ( vbi, pfx, Iop_Add8, WithFlagNone, True, 1, delta, "add" );
19996 delta = dis_op2_G_E ( vbi, pfx, Iop_Add8, WithFlagNone, True, sz, delta, "add" );
20000 if (haveF2orF3(pfx)) goto decode_failure;
20001 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagNone, True, 1, delta, "add" );
20004 if (haveF2orF3(pfx)) goto decode_failure;
20005 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagNone, True, sz, delta, "add" );
20009 if (haveF2orF3(pfx)) goto decode_failure;
20013 if (haveF2orF3(pfx)) goto decode_failure;
20019 delta = dis_op2_G_E ( vbi, pfx, Iop_Or8, WithFlagNone, True, 1, delta, "or" );
20023 delta = dis_op2_G_E ( vbi, pfx, Iop_Or8, WithFlagNone, True, sz, delta, "or" );
20027 if (haveF2orF3(pfx)) goto decode_failure;
20028 delta = dis_op2_E_G ( vbi, pfx, Iop_Or8, WithFlagNone, True, 1, delta, "or" );
20031 if (haveF2orF3(pfx)) goto decode_failure;
20032 delta = dis_op2_E_G ( vbi, pfx, Iop_Or8, WithFlagNone, True, sz, delta, "or" );
20036 if (haveF2orF3(pfx)) goto decode_failure;
20040 if (haveF2orF3(pfx)) goto decode_failure;
20046 delta = dis_op2_G_E ( vbi, pfx, Iop_Add8, WithFlagCarry, True, 1, delta, "adc" );
20050 delta = dis_op2_G_E ( vbi, pfx, Iop_Add8, WithFlagCarry, True, sz, delta, "adc" );
20054 if (haveF2orF3(pfx)) goto decode_failure;
20055 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagCarry, True, 1, delta, "adc" );
20058 if (haveF2orF3(pfx)) goto decode_failure;
20059 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagCarry, True, sz, delta, "adc" );
20063 if (haveF2orF3(pfx)) goto decode_failure;
20067 if (haveF2orF3(pfx)) goto decode_failure;
20073 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagCarry, True, 1, delta, "sbb" );
20077 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagCarry, True, sz, delta, "sbb" );
20081 if (haveF2orF3(pfx)) goto decode_failure;
20082 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagCarry, True, 1, delta, "sbb" );
20085 if (haveF2orF3(pfx)) goto decode_failure;
20086 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagCarry, True, sz, delta, "sbb" );
20090 if (haveF2orF3(pfx)) goto decode_failure;
20094 if (haveF2orF3(pfx)) goto decode_failure;
20100 delta = dis_op2_G_E ( vbi, pfx, Iop_And8, WithFlagNone, True, 1, delta, "and" );
20104 delta = dis_op2_G_E ( vbi, pfx, Iop_And8, WithFlagNone, True, sz, delta, "and" );
20108 if (haveF2orF3(pfx)) goto decode_failure;
20109 delta = dis_op2_E_G ( vbi, pfx, Iop_And8, WithFlagNone, True, 1, delta, "and" );
20112 if (haveF2orF3(pfx)) goto decode_failure;
20113 delta = dis_op2_E_G ( vbi, pfx, Iop_And8, WithFlagNone, True, sz, delta, "and" );
20117 if (haveF2orF3(pfx)) goto decode_failure;
20121 if (haveF2orF3(pfx)) goto decode_failure;
20127 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagNone, True, 1, delta, "sub" );
20131 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagNone, True, sz, delta, "sub" );
20135 if (haveF2orF3(pfx)) goto decode_failure;
20136 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagNone, True, 1, delta, "sub" );
20139 if (haveF2orF3(pfx)) goto decode_failure;
20140 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagNone, True, sz, delta, "sub" );
20144 if (haveF2orF3(pfx)) goto decode_failure;
20148 if (haveF2orF3(pfx)) goto decode_failure;
20154 delta = dis_op2_G_E ( vbi, pfx, Iop_Xor8, WithFlagNone, True, 1, delta, "xor" );
20158 delta = dis_op2_G_E ( vbi, pfx, Iop_Xor8, WithFlagNone, True, sz, delta, "xor" );
20162 if (haveF2orF3(pfx)) goto decode_failure;
20163 delta = dis_op2_E_G ( vbi, pfx, Iop_Xor8, WithFlagNone, True, 1, delta, "xor" );
20166 if (haveF2orF3(pfx)) goto decode_failure;
20167 delta = dis_op2_E_G ( vbi, pfx, Iop_Xor8, WithFlagNone, True, sz, delta, "xor" );
20171 if (haveF2orF3(pfx)) goto decode_failure;
20175 if (haveF2orF3(pfx)) goto decode_failure;
20180 if (haveF2orF3(pfx)) goto decode_failure;
20181 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagNone, False, 1, delta, "cmp" );
20184 if (haveF2orF3(pfx)) goto decode_failure;
20185 delta = dis_op2_G_E ( vbi, pfx, Iop_Sub8, WithFlagNone, False, sz, delta, "cmp" );
20189 if (haveF2orF3(pfx)) goto decode_failure;
20190 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagNone, False, 1, delta, "cmp" );
20193 if (haveF2orF3(pfx)) goto decode_failure;
20194 delta = dis_op2_E_G ( vbi, pfx, Iop_Sub8, WithFlagNone, False, sz, delta, "cmp" );
20198 if (haveF2orF3(pfx)) goto decode_failure;
20202 if (haveF2orF3(pfx)) goto decode_failure;
20217 if (haveF2orF3(pfx)) goto decode_failure;
20224 assign(t1, getIRegRexB(sz, pfx, opc-0x50));
20228 DIP("push%c %s\n", nameISize(sz), nameIRegRexB(sz,pfx,opc-0x50));
20239 if (haveF2orF3(pfx)) goto decode_failure;
20248 putIRegRexB(sz, pfx, opc-0x58, mkexpr(t1));
20249 DIP("pop%c %s\n", nameISize(sz), nameIRegRexB(sz,pfx,opc-0x58));
20253 if (haveF2orF3(pfx)) goto decode_failure;
20254 if (haveREX(pfx) && 1==getRexW(pfx)) {
20260 putIRegG(8, pfx, modrm,
20262 getIRegE(4, pfx, modrm)));
20264 nameIRegE(4, pfx, modrm),
20265 nameIRegG(8, pfx, modrm));
20268 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
20270 putIRegG(8, pfx, modrm,
20274 nameIRegG(8, pfx, modrm));
20282 if (haveF2orF3(pfx)) goto decode_failure;
20290 if (haveF2orF3(pfx)) goto decode_failure;
20291 delta = dis_imul_I_E_G ( vbi, pfx, sz, delta, sz );
20295 if (haveF2orF3(pfx)) goto decode_failure;
20315 delta = dis_imul_I_E_G ( vbi, pfx, sz, delta, 1 );
20336 if (haveF3(pfx)) goto decode_failure;
20337 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
20400 if (epartIsReg(modrm) && haveF2orF3(pfx))
20402 if (!epartIsReg(modrm) && haveF2andF3(pfx))
20404 if (!epartIsReg(modrm) && haveF2orF3(pfx) && !haveLOCK(pfx))
20406 am_sz = lengthAMode(pfx,delta);
20410 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 );
20416 if (epartIsReg(modrm) && haveF2orF3(pfx))
20418 if (!epartIsReg(modrm) && haveF2andF3(pfx))
20420 if (!epartIsReg(modrm) && haveF2orF3(pfx) && !haveLOCK(pfx))
20422 am_sz = lengthAMode(pfx,delta);
20425 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 );
20429 if (haveF2orF3(pfx)) goto decode_failure;
20431 am_sz = lengthAMode(pfx,delta);
20434 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 );
20438 if (haveF2orF3(pfx)) goto decode_failure;
20439 delta = dis_op2_E_G ( vbi, pfx, Iop_And8, WithFlagNone, False,
20444 if (haveF2orF3(pfx)) goto decode_failure;
20445 delta = dis_op2_E_G ( vbi, pfx, Iop_And8, WithFlagNone, False,
20461 if (haveF2orF3(pfx)) {
20465 if (haveF2andF3(pfx))
20472 assign(t1, getIRegE(sz, pfx, modrm));
20473 assign(t2, getIRegG(sz, pfx, modrm));
20474 putIRegG(sz, pfx, modrm, mkexpr(t1));
20475 putIRegE(sz, pfx, modrm, mkexpr(t2));
20478 nameISize(sz), nameIRegG(sz, pfx, modrm),
20479 nameIRegE(sz, pfx, modrm));
20482 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
20484 assign( t2, getIRegG(sz, pfx, modrm) );
20487 putIRegG( sz, pfx, modrm, mkexpr(t1) );
20490 nameIRegG(sz, pfx, modrm), dis_buf);
20497 delta = dis_mov_G_E(vbi, pfx, 1, delta, &ok);
20505 delta = dis_mov_G_E(vbi, pfx, sz, delta, &ok);
20511 if (haveF2orF3(pfx)) goto decode_failure;
20512 delta = dis_mov_E_G(vbi, pfx, 1, delta);
20516 if (haveF2orF3(pfx)) goto decode_failure;
20517 delta = dis_mov_E_G(vbi, pfx, sz, delta);
20521 if (haveF2orF3(pfx)) goto decode_failure;
20522 delta = dis_mov_S_E(vbi, pfx, sz, delta);
20526 if (haveF2orF3(pfx)) goto decode_failure;
20534 any segment override bits in pfx. */
20535 addr = disAMode ( &alen, vbi, clearSegBits(pfx), delta, dis_buf, 0 );
20540 putIRegG( sz, pfx, modrm,
20546 nameIRegG(sz,pfx,modrm));
20554 if (haveF2orF3(pfx)) goto decode_failure;
20580 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 );
20591 if (!have66(pfx) && !haveF2(pfx) && haveF3(pfx)) {
20601 !haveF2orF3(pfx)
20603 && getRexB(pfx)==0 ) {
20616 if (haveF2orF3(pfx)) goto decode_failure;
20617 codegen_xchg_rAX_Reg ( pfx, sz, opc - 0x90 );
20621 if (haveF2orF3(pfx)) goto decode_failure;
20640 if (haveF2orF3(pfx)) goto decode_failure;
20662 if (haveF2orF3(pfx)) goto decode_failure;
20718 if (haveF2orF3(pfx)) goto decode_failure;
20791 if (have66orF2orF3(pfx)) goto decode_failure;
20801 assign( addr, handleAddrOverrides(vbi, pfx, mkU64(d64)) );
20804 segRegTxt(pfx), (ULong)d64,
20809 if (have66orF2orF3(pfx)) goto decode_failure;
20819 assign( addr, handleAddrOverrides(vbi, pfx, mkU64(d64)) );
20822 segRegTxt(pfx), (ULong)d64);
20828 if (haveF3(pfx) && !haveF2(pfx)) {
20833 guest_RIP_bbstart+delta, "rep movs", pfx );
20838 if (!haveF3(pfx) && !haveF2(pfx)) {
20841 dis_string_op( dis_MOVS, sz, "movs", pfx );
20849 if (haveF3(pfx) && !haveF2(pfx)) {
20854 guest_RIP_bbstart+delta, "repe cmps", pfx );
20863 if (haveF3(pfx) && !haveF2(pfx)) {
20868 guest_RIP_bbstart+delta, "rep stos", pfx );
20873 if (!haveF3(pfx) && !haveF2(pfx)) {
20876 dis_string_op( dis_STOS, sz, "stos", pfx );
20882 if (haveF2orF3(pfx)) goto decode_failure;
20886 if (haveF2orF3(pfx)) goto decode_failure;
20892 dis_string_op( dis_LODS, ( opc == 0xAC ? 1 : sz ), "lods", pfx );
20898 if (haveF2(pfx) && !haveF3(pfx)) {
20903 guest_RIP_bbstart+delta, "repne scas", pfx );
20908 if (!haveF2(pfx) && haveF3(pfx)) {
20913 guest_RIP_bbstart+delta, "repe scas", pfx );
20918 if (!haveF2(pfx) && !haveF3(pfx)) {
20921 dis_string_op( dis_SCAS, sz, "scas", pfx );
20935 if (haveF2orF3(pfx)) goto decode_failure;
20938 putIRegRexB(1, pfx, opc-0xB0, mkU8(d64));
20939 DIP("movb $%lld,%s\n", d64, nameIRegRexB(1,pfx,opc-0xB0));
20952 if (haveF2orF3(pfx)) goto decode_failure;
20956 putIRegRexB(8, pfx, opc-0xB8, mkU64(d64));
20958 nameIRegRexB(8,pfx,opc-0xB8));
20962 putIRegRexB(sz, pfx, opc-0xB8,
20966 nameIRegRexB(sz,pfx,opc-0xB8));
20972 if (haveF2orF3(pfx)) goto decode_failure;
20974 am_sz = lengthAMode(pfx,delta);
20978 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
20986 if (haveF2orF3(pfx)) goto decode_failure;
20988 am_sz = lengthAMode(pfx,delta);
20991 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
20998 if (have66orF3(pfx)) goto decode_failure;
20999 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21007 if (have66(pfx)) goto decode_failure;
21009 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21011 DIP(haveF3(pfx) ? "rep ; ret\n" : "ret\n");
21024 if (haveF2orF3(pfx)) goto decode_failure;
21028 putIRegE(sz, pfx, modrm,
21032 nameIRegE(sz,pfx,modrm));
21034 if (haveF2(pfx)) goto decode_failure;
21036 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
21048 if (opc == 0xC7 && modrm == 0xF8 && !have66orF2orF3(pfx) && sz == 4
21068 if (opc == 0xC6 && modrm == 0xF8 && !have66orF2orF3(pfx) && sz == 1
21148 if (haveF2orF3(pfx)) goto decode_failure;
21150 am_sz = lengthAMode(pfx,delta);
21154 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
21162 if (haveF2orF3(pfx)) goto decode_failure;
21164 am_sz = lengthAMode(pfx,delta);
21167 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
21175 if (haveF2orF3(pfx)) goto decode_failure;
21177 am_sz = lengthAMode(pfx,delta);
21180 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
21188 if (haveF2orF3(pfx)) goto decode_failure;
21190 am_sz = lengthAMode(pfx,delta);
21192 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
21208 if (haveF2orF3(pfx))
21237 delta = dis_FPU ( &decode_OK, vbi, pfx, delta );
21254 if (have66orF2orF3(pfx) || 1==getRexW(pfx)) goto decode_failure;
21261 if (haveASO(pfx)) {
21296 DIP("loop%s%s 0x%llx\n", xtra, haveASO(pfx) ? "l" : "", (ULong)d64);
21302 if (have66orF2orF3(pfx)) goto decode_failure;
21305 if (haveASO(pfx)) {
21360 if (haveF2orF3(pfx)) goto decode_failure;
21409 if (haveF2orF3(pfx)) goto decode_failure;
21425 if (haveF3(pfx)) goto decode_failure;
21426 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21449 if (haveF3(pfx)) goto decode_failure;
21452 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21466 if (haveF3(pfx)) goto decode_failure;
21469 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21517 /* RM'd: if (haveF2orF3(pfx)) goto decode_failure; */
21519 delta = dis_Grp3 ( vbi, pfx, 1, delta, &decode_OK );
21526 /* RM'd: if (haveF2orF3(pfx)) goto decode_failure; */
21528 pfx, sz, delta, &decode_OK );
21534 if (haveF2orF3(pfx)) goto decode_failure;
21540 if (haveF2orF3(pfx)) goto decode_failure;
21547 /* RM'd: if (haveF2orF3(pfx)) goto decode_failure; */
21549 delta = dis_Grp4 ( vbi, pfx, delta, &decode_OK );
21556 /* RM'd: if (haveF2orF3(pfx)) goto decode_failure; */
21558 delta = dis_Grp5 ( vbi, pfx, sz, delta, dres, &decode_OK );
21666 Prefix pfx, Int sz, Long deltaIN
21695 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
21819 if (have66orF2orF3(pfx)) goto decode_failure;
21824 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
21845 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
21861 if (have66orF2orF3(pfx)) goto decode_failure;
21886 if (haveF2orF3(pfx)) goto decode_failure;
21887 delta = dis_cmov_E_G(vbi, pfx, sz, (AMD64Condcode)(opc - 0x40), delta);
21908 if (haveF3(pfx)) goto decode_failure;
21909 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21984 if (haveF2orF3(pfx)) goto decode_failure;
21990 putIRegE(1, pfx, modrm, mkexpr(t1));
21992 nameIRegE(1,pfx,modrm));
21994 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22021 int bnd = gregOfRexRM(pfx,modrm);
22024 oper = nameIReg64 (eregOfRexRM(pfx,modrm));
22027 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22032 if (haveF3no66noF2 (pfx)) {
22038 } else if (haveF2no66noF3 (pfx)) {
22044 } else if (have66noF2noF3 (pfx)) {
22050 } else if (haveNo66noF2noF3 (pfx)) {
22070 if (haveF2orF3(pfx)) goto decode_failure;
22132 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpNone, &ok );
22139 d64 = delta + lengthAMode(pfx, delta);
22142 vbi, pfx, delta, modrm, sz,
22150 vbi, pfx, delta, modrm, sz,
22159 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpSet, &ok );
22166 d64 = delta + lengthAMode(pfx, delta);
22169 vbi, pfx, delta, modrm, sz,
22177 vbi, pfx, delta, modrm, sz,
22183 if (haveF2orF3(pfx)) goto decode_failure;
22184 delta = dis_mul_E_G ( vbi, pfx, sz, delta );
22190 delta = dis_cmpxchg_G_E ( &ok, vbi, pfx, 1, delta );
22199 delta = dis_cmpxchg_G_E ( &ok, vbi, pfx, sz, delta );
22208 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpReset, &ok );
22214 if (haveF2orF3(pfx)) goto decode_failure;
22217 delta = dis_movx_E_G ( vbi, pfx, delta, 1, sz, False );
22221 if (haveF2orF3(pfx)) goto decode_failure;
22224 delta = dis_movx_E_G ( vbi, pfx, delta, 2, sz, False );
22231 am_sz = lengthAMode(pfx,delta);
22233 delta = dis_Grp8_Imm ( vbi, pfx, delta, modrm, am_sz, sz, d64,
22244 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpComp, &ok );
22250 if (!haveF2orF3(pfx)
22251 || (haveF3noF2(pfx)
22255 delta = dis_bs_E_G ( vbi, pfx, sz, delta, True );
22263 if (!haveF2orF3(pfx)
22264 || (haveF3noF2(pfx)
22268 delta = dis_bs_E_G ( vbi, pfx, sz, delta, False );
22276 if (haveF2orF3(pfx)) goto decode_failure;
22279 delta = dis_movx_E_G ( vbi, pfx, delta, 1, sz, True );
22283 if (haveF2orF3(pfx)) goto decode_failure;
22286 delta = dis_movx_E_G ( vbi, pfx, delta, 2, sz, True );
22291 delta = dis_xadd_G_E ( &decode_OK, vbi, pfx, 1, delta );
22299 delta = dis_xadd_G_E ( &decode_OK, vbi, pfx, sz, delta );
22330 if (have66(pfx)) goto decode_failure;
22337 if (haveF2orF3(pfx)) {
22342 if (haveF2andF3(pfx) || !haveLOCK(pfx)) goto decode_failure;
22345 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22450 if (haveF2orF3(pfx)) goto decode_failure;
22455 assign( t1, getIRegRexB(4, pfx, opc-0xC8) );
22457 putIRegRexB(4, pfx, opc-0xC8, mkexpr(t2));
22458 DIP("bswapl %s\n", nameIRegRexB(4, pfx, opc-0xC8));
22464 assign( t1, getIRegRexB(8, pfx, opc-0xC8) );
22466 putIRegRexB(8, pfx, opc-0xC8, mkexpr(t2));
22467 DIP("bswapq %s\n", nameIRegRexB(8, pfx, opc-0xC8));
22481 if (!have66orF2orF3(pfx)) {
22558 delta = dis_MMX ( &decode_OK, vbi, pfx, sz, deltaIN );
22586 archinfo, vbi, pfx, sz, deltaIN, dres );
22596 delta = dis_ESC_0F__SSE3 ( &decode_OK, vbi, pfx, sz, deltaIN );
22607 archinfo, vbi, pfx, sz, deltaIN );
22632 Prefix pfx, Int sz, Long deltaIN
22642 if (!haveF2orF3(pfx) && !haveVEX(pfx)
22650 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22657 putIRegG(sz, pfx, modrm, mkexpr(dst));
22658 DIP("movbe %s,%s\n", dis_buf, nameIRegG(sz, pfx, modrm));
22660 assign(src, getIRegG(sz, pfx, modrm));
22663 DIP("movbe %s,%s\n", nameIRegG(sz, pfx, modrm), dis_buf);
22681 delta = dis_ESC_0F38__SupSSE3 ( &decode_OK, vbi, pfx, sz, deltaIN );
22691 delta = dis_ESC_0F38__SSE4 ( &decode_OK, vbi, pfx, sz, deltaIN );
22709 if (have66noF2noF3(pfx) && (archinfo->hwcaps & VEX_HWCAPS_AMD64_AVX)) {
22713 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagCarryX, True,
22717 if (haveF3no66noF2(pfx) && (archinfo->hwcaps & VEX_HWCAPS_AMD64_AVX)) {
22718 delta = dis_op2_E_G ( vbi, pfx, Iop_Add8, WithFlagOverX, True,
22750 Prefix pfx, Int sz, Long deltaIN
22768 delta = dis_ESC_0F3A__SupSSE3 ( &decode_OK, vbi, pfx, sz, deltaIN );
22778 delta = dis_ESC_0F3A__SSE4 ( &decode_OK, vbi, pfx, sz, deltaIN );
22797 Prefix pfx, Long delta, const HChar* name,
22806 UInt rD = gregOfRexRM(pfx, modrm);
22807 UInt rSL = getVexNvvvv(pfx);
22813 vassert(0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*WIG?*/);
22819 UInt rSR = eregOfRexRM(pfx, modrm);
22825 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
22865 Prefix pfx, Long delta, const HChar* name,
22870 uses_vvvv, vbi, pfx, delta, name, op, NULL, False, False);
22880 Prefix pfx, Long delta, const HChar* name,
22885 uses_vvvv, vbi, pfx, delta, name,
22893 Prefix pfx, Long delta,
22901 UInt rG = gregOfRexRM(pfx,modrm);
22902 UInt rV = getVexNvvvv(pfx);;
22908 UInt rE = eregOfRexRM(pfx,modrm);
22914 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22967 Prefix pfx, Long delta,
22975 UInt rG = gregOfRexRM(pfx,modrm);
22976 UInt rV = getVexNvvvv(pfx);;
22982 UInt rE = eregOfRexRM(pfx,modrm);
22988 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23043 Prefix pfx, Long delta,
23050 UInt rG = gregOfRexRM(pfx,modrm);
23051 UInt rV = getVexNvvvv(pfx);;
23056 UInt rE = eregOfRexRM(pfx,modrm);
23067 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23153 Long dis_AVX128_shiftE_to_V_imm( Prefix pfx,
23160 UInt rD = getVexNvvvv(pfx);
23169 nameXMMReg(eregOfRexRM(pfx,rm)),
23171 assign( e0, getXMMReg(eregOfRexRM(pfx,rm)) );
23210 Long dis_AVX256_shiftE_to_V_imm( Prefix pfx,
23217 UInt rD = getVexNvvvv(pfx);
23226 nameYMMReg(eregOfRexRM(pfx,rm)),
23228 assign( e0, getYMMReg(eregOfRexRM(pfx,rm)) );
23274 Prefix pfx, Long delta,
23281 UInt rG = gregOfRexRM(pfx,rm);
23282 UInt rV = getVexNvvvv(pfx);
23285 UInt rE = eregOfRexRM(pfx,rm);
23294 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23317 Prefix pfx, Long delta,
23324 UInt rG = gregOfRexRM(pfx,rm);
23325 UInt rV = getVexNvvvv(pfx);
23330 UInt rE = eregOfRexRM(pfx,rm);
23336 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23364 Prefix pfx, Long delta,
23371 UInt rG = gregOfRexRM(pfx,rm);
23372 UInt rV = getVexNvvvv(pfx);
23377 UInt rE = eregOfRexRM(pfx,rm);
23383 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23411 Prefix pfx, Long delta,
23418 UInt rG = gregOfRexRM(pfx,rm);
23419 UInt rV = getVexNvvvv(pfx);
23422 UInt rE = eregOfRexRM(pfx,rm);
23431 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23451 Prefix pfx, Long delta,
23455 uses_vvvv, vbi, pfx, delta, opname, op,
23467 Prefix pfx, Long delta,
23481 UInt rG = gregOfRexRM(pfx, rm);
23482 UInt rV = getVexNvvvv(pfx);
23491 UInt rE = eregOfRexRM(pfx,rm);
23498 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
23577 Prefix pfx, Long delta,
23591 UInt rG = gregOfRexRM(pfx, rm);
23592 UInt rV = getVexNvvvv(pfx);
23606 UInt rE = eregOfRexRM(pfx,rm);
23613 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
23647 Prefix pfx, Long delta,
23657 UInt rG = gregOfRexRM(pfx, rm);
23659 UInt rE = eregOfRexRM(pfx,rm);
23664 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23680 Prefix pfx, Long delta,
23688 UInt rG = gregOfRexRM(pfx, rm);
23690 UInt rE = eregOfRexRM(pfx,rm);
23695 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23716 Prefix pfx, Long delta, const HChar* name,
23725 UInt rD = gregOfRexRM(pfx, modrm);
23726 UInt rSL = getVexNvvvv(pfx);
23732 vassert(1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*WIG?*/);
23738 UInt rSR = eregOfRexRM(pfx, modrm);
23744 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
23783 Prefix pfx, Long delta,
23787 uses_vvvv, vbi, pfx, delta, opname, op,
23799 Prefix pfx, Long delta, const HChar* name,
23804 uses_vvvv, vbi, pfx, delta, name, op, NULL, False, False);
23814 Prefix pfx, Long delta, const HChar* name,
23819 uses_vvvv, vbi, pfx, delta, name,
23828 Prefix pfx, Long delta,
23838 UInt rG = gregOfRexRM(pfx, rm);
23840 UInt rE = eregOfRexRM(pfx,rm);
23845 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23861 Prefix pfx, Long delta,
23869 UInt rG = gregOfRexRM(pfx, rm);
23871 UInt rE = eregOfRexRM(pfx,rm);
23876 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23889 static Long dis_CVTDQ2PD_256 ( const VexAbiInfo* vbi, Prefix pfx,
23897 UInt rG = gregOfRexRM(pfx,modrm);
23899 UInt rE = eregOfRexRM(pfx,modrm);
23904 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23925 static Long dis_CVTPD2PS_256 ( const VexAbiInfo* vbi, Prefix pfx,
23932 UInt rG = gregOfRexRM(pfx,modrm);
23936 UInt rE = eregOfRexRM(pfx,modrm);
23941 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24059 Prefix pfx, Int sz, Long deltaIN
24076 if (haveF2no66noF3(pfx) && !epartIsReg(getUChar(delta))) {
24078 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24079 UInt rG = gregOfRexRM(pfx,modrm);
24092 if (haveF2no66noF3(pfx) && epartIsReg(getUChar(delta))) {
24094 UInt rG = gregOfRexRM(pfx, modrm);
24095 UInt rE = eregOfRexRM(pfx, modrm);
24096 UInt rV = getVexNvvvv(pfx);
24111 if (haveF3no66noF2(pfx) && !epartIsReg(getUChar(delta))) {
24113 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24114 UInt rG = gregOfRexRM(pfx,modrm);
24127 if (haveF3no66noF2(pfx) && epartIsReg(getUChar(delta))) {
24129 UInt rG = gregOfRexRM(pfx, modrm);
24130 UInt rE = eregOfRexRM(pfx, modrm);
24131 UInt rV = getVexNvvvv(pfx);
24146 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24148 UInt rG = gregOfRexRM(pfx, modrm);
24150 UInt rE = eregOfRexRM(pfx,modrm);
24155 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24163 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24165 UInt rG = gregOfRexRM(pfx, modrm);
24167 UInt rE = eregOfRexRM(pfx,modrm);
24172 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24180 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24182 UInt rG = gregOfRexRM(pfx, modrm);
24184 UInt rE = eregOfRexRM(pfx,modrm);
24189 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24197 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24199 UInt rG = gregOfRexRM(pfx, modrm);
24201 UInt rE = eregOfRexRM(pfx,modrm);
24206 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24218 if (haveF2no66noF3(pfx) && !epartIsReg(getUChar(delta))) {
24220 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24221 UInt rG = gregOfRexRM(pfx,modrm);
24230 if (haveF2no66noF3(pfx) && epartIsReg(getUChar(delta))) {
24232 UInt rG = gregOfRexRM(pfx, modrm);
24233 UInt rE = eregOfRexRM(pfx, modrm);
24234 UInt rV = getVexNvvvv(pfx);
24248 if (haveF3no66noF2(pfx) && !epartIsReg(getUChar(delta))) {
24250 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24251 UInt rG = gregOfRexRM(pfx,modrm);
24260 if (haveF3no66noF2(pfx) && epartIsReg(getUChar(delta))) {
24262 UInt rG = gregOfRexRM(pfx, modrm);
24263 UInt rE = eregOfRexRM(pfx, modrm);
24264 UInt rV = getVexNvvvv(pfx);
24279 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24281 UInt rG = gregOfRexRM(pfx,modrm);
24283 UInt rE = eregOfRexRM(pfx,modrm);
24288 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24296 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24298 UInt rG = gregOfRexRM(pfx,modrm);
24300 UInt rE = eregOfRexRM(pfx,modrm);
24305 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24313 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24315 UInt rG = gregOfRexRM(pfx,modrm);
24317 UInt rE = eregOfRexRM(pfx,modrm);
24322 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24330 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24332 UInt rG = gregOfRexRM(pfx,modrm);
24334 UInt rE = eregOfRexRM(pfx,modrm);
24339 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24350 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24351 delta = dis_MOVDDUP_128( vbi, pfx, delta, True/*isAvx*/ );
24355 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24356 delta = dis_MOVDDUP_256( vbi, pfx, delta );
24361 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
24364 UInt rG = gregOfRexRM(pfx, modrm);
24365 UInt rE = eregOfRexRM(pfx, modrm);
24366 UInt rV = getVexNvvvv(pfx);
24382 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24383 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24385 UInt rG = gregOfRexRM(pfx, modrm);
24386 UInt rV = getVexNvvvv(pfx);
24387 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24400 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
24401 delta = dis_MOVSxDUP_128( vbi, pfx, delta, True/*isAvx*/,
24406 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
24407 delta = dis_MOVSxDUP_256( vbi, pfx, delta, True/*isL*/ );
24417 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24418 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24420 UInt rG = gregOfRexRM(pfx, modrm);
24421 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24433 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24436 UInt rG = gregOfRexRM(pfx,modrm);
24437 UInt rV = getVexNvvvv(pfx);
24442 UInt rE = eregOfRexRM(pfx,modrm);
24448 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24461 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24464 UInt rG = gregOfRexRM(pfx,modrm);
24465 UInt rV = getVexNvvvv(pfx);
24470 UInt rE = eregOfRexRM(pfx,modrm);
24476 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24489 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24492 UInt rG = gregOfRexRM(pfx,modrm);
24493 UInt rV = getVexNvvvv(pfx);
24498 UInt rE = eregOfRexRM(pfx,modrm);
24504 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24517 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24520 UInt rG = gregOfRexRM(pfx,modrm);
24521 UInt rV = getVexNvvvv(pfx);
24526 UInt rE = eregOfRexRM(pfx,modrm);
24532 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24548 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
24551 UInt rG = gregOfRexRM(pfx, modrm);
24552 UInt rE = eregOfRexRM(pfx, modrm);
24553 UInt rV = getVexNvvvv(pfx);
24569 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24570 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24572 UInt rG = gregOfRexRM(pfx, modrm);
24573 UInt rV = getVexNvvvv(pfx);
24574 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24576 DIP("vmovhp%c %s,%s,%s\n", have66(pfx) ? 'd' : 's',
24587 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
24588 delta = dis_MOVSxDUP_128( vbi, pfx, delta, True/*isAvx*/,
24593 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
24594 delta = dis_MOVSxDUP_256( vbi, pfx, delta, False/*!isL*/ );
24604 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24605 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24607 UInt rG = gregOfRexRM(pfx, modrm);
24608 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24611 DIP("vmovhp%c %s,%s\n", have66(pfx) ? 'd' : 's',
24619 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24621 UInt rG = gregOfRexRM(pfx, modrm);
24623 UInt rE = eregOfRexRM(pfx,modrm);
24628 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24637 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24639 UInt rG = gregOfRexRM(pfx, modrm);
24641 UInt rE = eregOfRexRM(pfx,modrm);
24646 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24655 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24657 UInt rG = gregOfRexRM(pfx, modrm);
24659 UInt rE = eregOfRexRM(pfx,modrm);
24664 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24673 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24675 UInt rG = gregOfRexRM(pfx, modrm);
24677 UInt rE = eregOfRexRM(pfx,modrm);
24682 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24694 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24696 UInt rG = gregOfRexRM(pfx,modrm);
24698 UInt rE = eregOfRexRM(pfx,modrm);
24703 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24712 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24714 UInt rG = gregOfRexRM(pfx,modrm);
24716 UInt rE = eregOfRexRM(pfx,modrm);
24721 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24730 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24732 UInt rG = gregOfRexRM(pfx,modrm);
24734 UInt rE = eregOfRexRM(pfx,modrm);
24740 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24749 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24751 UInt rG = gregOfRexRM(pfx,modrm);
24753 UInt rE = eregOfRexRM(pfx,modrm);
24759 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24773 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
24775 UInt rV = getVexNvvvv(pfx);
24776 UInt rD = gregOfRexRM(pfx, modrm);
24779 UInt rS = eregOfRexRM(pfx,modrm);
24785 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24799 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
24801 UInt rV = getVexNvvvv(pfx);
24802 UInt rD = gregOfRexRM(pfx, modrm);
24805 UInt rS = eregOfRexRM(pfx,modrm);
24811 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24827 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) {
24829 UInt rV = getVexNvvvv(pfx);
24830 UInt rD = gregOfRexRM(pfx, modrm);
24833 UInt rS = eregOfRexRM(pfx,modrm);
24839 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24857 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) {
24859 UInt rV = getVexNvvvv(pfx);
24860 UInt rD = gregOfRexRM(pfx, modrm);
24863 UInt rS = eregOfRexRM(pfx,modrm);
24869 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24891 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24892 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24894 UInt rS = gregOfRexRM(pfx, modrm);
24897 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
24901 DIP("vmovntp%c %s,%s\n", have66(pfx) ? 'd' : 's',
24907 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24908 && 1==getVexL(pfx)/*256*/ && !epartIsReg(getUChar(delta))) {
24910 UInt rS = gregOfRexRM(pfx, modrm);
24913 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
24917 DIP("vmovntp%c %s,%s\n", have66(pfx) ? 'd' : 's',
24925 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
24926 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4);
24930 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
24931 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8);
24935 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) {
24936 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4);
24940 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) {
24941 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8);
24948 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
24949 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4);
24953 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
24954 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8);
24958 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) {
24959 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4);
24963 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) {
24964 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8);
24973 if (have66noF2noF3(pfx)) {
24974 delta = dis_COMISD( vbi, pfx, delta, True/*isAvx*/, opc );
24979 if (haveNo66noF2noF3(pfx)) {
24980 delta = dis_COMISS( vbi, pfx, delta, True/*isAvx*/, opc );
24987 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24988 delta = dis_MOVMSKPD_128( vbi, pfx, delta, True/*isAvx*/ );
24992 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24993 delta = dis_MOVMSKPD_256( vbi, pfx, delta );
24997 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24998 delta = dis_MOVMSKPS_128( vbi, pfx, delta, True/*isAvx*/ );
25002 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25003 delta = dis_MOVMSKPS_256( vbi, pfx, delta );
25010 if (haveF3no66noF2(pfx)) {
25012 uses_vvvv, vbi, pfx, delta, "vsqrtss", Iop_Sqrt32F0x4 );
25016 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25018 uses_vvvv, vbi, pfx, delta, "vsqrtps", Iop_Sqrt32Fx4 );
25022 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25024 uses_vvvv, vbi, pfx, delta, "vsqrtps", Iop_Sqrt32Fx8 );
25028 if (haveF2no66noF3(pfx)) {
25030 uses_vvvv, vbi, pfx, delta, "vsqrtsd", Iop_Sqrt64F0x2 );
25034 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25036 uses_vvvv, vbi, pfx, delta, "vsqrtpd", Iop_Sqrt64Fx2 );
25040 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25042 uses_vvvv, vbi, pfx, delta, "vsqrtpd", Iop_Sqrt64Fx4 );
25049 if (haveF3no66noF2(pfx)) {
25051 uses_vvvv, vbi, pfx, delta, "vrsqrtss",
25056 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25058 uses_vvvv, vbi, pfx, delta, "vrsqrtps", Iop_RSqrtEst32Fx4 );
25062 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25064 uses_vvvv, vbi, pfx, delta, "vrsqrtps", Iop_RSqrtEst32Fx8 );
25071 if (haveF3no66noF2(pfx)) {
25073 uses_vvvv, vbi, pfx, delta, "vrcpss", Iop_RecipEst32F0x4 );
25077 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25079 uses_vvvv, vbi, pfx, delta, "vrcpps", Iop_RecipEst32Fx4 );
25083 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25085 uses_vvvv, vbi, pfx, delta, "vrcpps", Iop_RecipEst32Fx8 );
25093 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25095 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV128 );
25100 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25102 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV256 );
25106 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25108 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV128 );
25112 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25114 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV256 );
25122 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25124 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV128,
25129 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25131 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV256,
25136 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25138 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV128,
25143 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25145 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV256,
25154 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25156 uses_vvvv, vbi, pfx, delta, "vorpd", Iop_OrV128 );
25161 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25163 uses_vvvv, vbi, pfx, delta, "vorpd", Iop_OrV256 );
25168 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25170 uses_vvvv, vbi, pfx, delta, "vorps", Iop_OrV128 );
25175 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25177 uses_vvvv, vbi, pfx, delta, "vorps", Iop_OrV256 );
25185 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25187 uses_vvvv, vbi, pfx, delta, "vxorpd", Iop_XorV128 );
25192 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25194 uses_vvvv, vbi, pfx, delta, "vxorpd", Iop_XorV256 );
25199 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25201 uses_vvvv, vbi, pfx, delta, "vxorps", Iop_XorV128 );
25206 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25208 uses_vvvv, vbi, pfx, delta, "vxorps", Iop_XorV256 );
25215 if (haveF2no66noF3(pfx)) {
25217 uses_vvvv, vbi, pfx, delta, "vaddsd", Iop_Add64F0x2 );
25221 if (haveF3no66noF2(pfx)) {
25223 uses_vvvv, vbi, pfx, delta, "vaddss", Iop_Add32F0x4 );
25227 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25229 uses_vvvv, vbi, pfx, delta, "vaddps", Iop_Add32Fx4 );
25233 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25235 uses_vvvv, vbi, pfx, delta, "vaddps", Iop_Add32Fx8 );
25239 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25241 uses_vvvv, vbi, pfx, delta, "vaddpd", Iop_Add64Fx2 );
25245 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25247 uses_vvvv, vbi, pfx, delta, "vaddpd", Iop_Add64Fx4 );
25254 if (haveF2no66noF3(pfx)) {
25256 uses_vvvv, vbi, pfx, delta, "vmulsd", Iop_Mul64F0x2 );
25260 if (haveF3no66noF2(pfx)) {
25262 uses_vvvv, vbi, pfx, delta, "vmulss", Iop_Mul32F0x4 );
25266 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25268 uses_vvvv, vbi, pfx, delta, "vmulps", Iop_Mul32Fx4 );
25272 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25274 uses_vvvv, vbi, pfx, delta, "vmulps", Iop_Mul32Fx8 );
25278 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25280 uses_vvvv, vbi, pfx, delta, "vmulpd", Iop_Mul64Fx2 );
25284 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25286 uses_vvvv, vbi, pfx, delta, "vmulpd", Iop_Mul64Fx4 );
25293 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25294 delta = dis_CVTPS2PD_128( vbi, pfx, delta, True/*isAvx*/ );
25298 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25299 delta = dis_CVTPS2PD_256( vbi, pfx, delta );
25303 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25304 delta = dis_CVTPD2PS_128( vbi, pfx, delta, True/*isAvx*/ );
25308 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25309 delta = dis_CVTPD2PS_256( vbi, pfx, delta );
25313 if (haveF2no66noF3(pfx)) {
25315 UInt rV = getVexNvvvv(pfx);
25316 UInt rD = gregOfRexRM(pfx, modrm);
25321 UInt rS = eregOfRexRM(pfx,modrm);
25327 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
25343 if (haveF3no66noF2(pfx)) {
25345 UInt rV = getVexNvvvv(pfx);
25346 UInt rD = gregOfRexRM(pfx, modrm);
25349 UInt rS = eregOfRexRM(pfx,modrm);
25355 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
25372 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25373 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta,
25378 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25379 delta = dis_CVTxPS2DQ_256( vbi, pfx, delta,
25384 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
25385 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta,
25390 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
25391 delta = dis_CVTxPS2DQ_256( vbi, pfx, delta,
25396 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25397 delta = dis_CVTDQ2PS_128 ( vbi, pfx, delta, True/*isAvx*/ );
25401 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25402 delta = dis_CVTDQ2PS_256 ( vbi, pfx, delta );
25409 if (haveF2no66noF3(pfx)) {
25411 uses_vvvv, vbi, pfx, delta, "vsubsd", Iop_Sub64F0x2 );
25415 if (haveF3no66noF2(pfx)) {
25417 uses_vvvv, vbi, pfx, delta, "vsubss", Iop_Sub32F0x4 );
25421 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25423 uses_vvvv, vbi, pfx, delta, "vsubps", Iop_Sub32Fx4 );
25427 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25429 uses_vvvv, vbi, pfx, delta, "vsubps", Iop_Sub32Fx8 );
25433 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25435 uses_vvvv, vbi, pfx, delta, "vsubpd", Iop_Sub64Fx2 );
25439 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25441 uses_vvvv, vbi, pfx, delta, "vsubpd", Iop_Sub64Fx4 );
25448 if (haveF2no66noF3(pfx)) {
25450 uses_vvvv, vbi, pfx, delta, "vminsd", Iop_Min64F0x2 );
25454 if (haveF3no66noF2(pfx)) {
25456 uses_vvvv, vbi, pfx, delta, "vminss", Iop_Min32F0x4 );
25460 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25462 uses_vvvv, vbi, pfx, delta, "vminps", Iop_Min32Fx4 );
25466 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25468 uses_vvvv, vbi, pfx, delta, "vminps", Iop_Min32Fx8 );
25472 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25474 uses_vvvv, vbi, pfx, delta, "vminpd", Iop_Min64Fx2 );
25478 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25480 uses_vvvv, vbi, pfx, delta, "vminpd", Iop_Min64Fx4 );
25487 if (haveF2no66noF3(pfx)) {
25489 uses_vvvv, vbi, pfx, delta, "vdivsd", Iop_Div64F0x2 );
25493 if (haveF3no66noF2(pfx)) {
25495 uses_vvvv, vbi, pfx, delta, "vdivss", Iop_Div32F0x4 );
25499 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25501 uses_vvvv, vbi, pfx, delta, "vdivps", Iop_Div32Fx4 );
25505 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25507 uses_vvvv, vbi, pfx, delta, "vdivps", Iop_Div32Fx8 );
25511 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25513 uses_vvvv, vbi, pfx, delta, "vdivpd", Iop_Div64Fx2 );
25517 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25519 uses_vvvv, vbi, pfx, delta, "vdivpd", Iop_Div64Fx4 );
25526 if (haveF2no66noF3(pfx)) {
25528 uses_vvvv, vbi, pfx, delta, "vmaxsd", Iop_Max64F0x2 );
25532 if (haveF3no66noF2(pfx)) {
25534 uses_vvvv, vbi, pfx, delta, "vmaxss", Iop_Max32F0x4 );
25538 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25540 uses_vvvv, vbi, pfx, delta, "vmaxps", Iop_Max32Fx4 );
25544 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25546 uses_vvvv, vbi, pfx, delta, "vmaxps", Iop_Max32Fx8 );
25550 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25552 uses_vvvv, vbi, pfx, delta, "vmaxpd", Iop_Max64Fx2 );
25556 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25558 uses_vvvv, vbi, pfx, delta, "vmaxpd", Iop_Max64Fx4 );
25566 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25568 uses_vvvv, vbi, pfx, delta, "vpunpcklbw",
25575 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25577 uses_vvvv, vbi, pfx, delta, "vpunpcklbw",
25586 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25588 uses_vvvv, vbi, pfx, delta, "vpunpcklwd",
25595 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25597 uses_vvvv, vbi, pfx, delta, "vpunpcklwd",
25606 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25608 uses_vvvv, vbi, pfx, delta, "vpunpckldq",
25615 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25617 uses_vvvv, vbi, pfx, delta, "vpunpckldq",
25626 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25628 uses_vvvv, vbi, pfx, delta, "vpacksswb",
25635 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25637 uses_vvvv, vbi, pfx, delta, "vpacksswb",
25646 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25648 uses_vvvv, vbi, pfx, delta, "vpcmpgtb", Iop_CmpGT8Sx16 );
25653 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25655 uses_vvvv, vbi, pfx, delta, "vpcmpgtb", Iop_CmpGT8Sx32 );
25663 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25665 uses_vvvv, vbi, pfx, delta, "vpcmpgtw", Iop_CmpGT16Sx8 );
25670 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25672 uses_vvvv, vbi, pfx, delta, "vpcmpgtw", Iop_CmpGT16Sx16 );
25680 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25682 uses_vvvv, vbi, pfx, delta, "vpcmpgtd", Iop_CmpGT32Sx4 );
25687 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25689 uses_vvvv, vbi, pfx, delta, "vpcmpgtd", Iop_CmpGT32Sx8 );
25697 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25699 uses_vvvv, vbi, pfx, delta, "vpackuswb",
25706 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25708 uses_vvvv, vbi, pfx, delta, "vpackuswb",
25717 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25719 uses_vvvv, vbi, pfx, delta, "vpunpckhbw",
25726 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25728 uses_vvvv, vbi, pfx, delta, "vpunpckhbw",
25737 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25739 uses_vvvv, vbi, pfx, delta, "vpunpckhwd",
25746 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25748 uses_vvvv, vbi, pfx, delta, "vpunpckhwd",
25757 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25759 uses_vvvv, vbi, pfx, delta, "vpunpckhdq",
25766 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25768 uses_vvvv, vbi, pfx, delta, "vpunpckhdq",
25777 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25779 uses_vvvv, vbi, pfx, delta, "vpackssdw",
25786 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25788 uses_vvvv, vbi, pfx, delta, "vpackssdw",
25797 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25799 uses_vvvv, vbi, pfx, delta, "vpunpcklqdq",
25806 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25808 uses_vvvv, vbi, pfx, delta, "vpunpcklqdq",
25817 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25819 uses_vvvv, vbi, pfx, delta, "vpunpckhqdq",
25826 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25828 uses_vvvv, vbi, pfx, delta, "vpunpckhqdq",
25836 if (have66noF2noF3(pfx)
25837 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
25843 gregOfRexRM(pfx,modrm),
25844 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) )
25846 DIP("vmovd %s, %s\n", nameIReg32(eregOfRexRM(pfx,modrm)),
25847 nameXMMReg(gregOfRexRM(pfx,modrm)));
25849 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
25852 gregOfRexRM(pfx,modrm),
25856 nameXMMReg(gregOfRexRM(pfx,modrm)));
25861 if (have66noF2noF3(pfx)
25862 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) {
25868 gregOfRexRM(pfx,modrm),
25869 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) )
25871 DIP("vmovq %s, %s\n", nameIReg64(eregOfRexRM(pfx,modrm)),
25872 nameXMMReg(gregOfRexRM(pfx,modrm)));
25874 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
25877 gregOfRexRM(pfx,modrm),
25881 nameXMMReg(gregOfRexRM(pfx,modrm)));
25890 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx))
25891 && 1==getVexL(pfx)/*256*/) {
25893 UInt rD = gregOfRexRM(pfx, modrm);
25895 Bool isA = have66noF2noF3(pfx);
25898 UInt rS = eregOfRexRM(pfx, modrm);
25903 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
25915 if ((have66noF2noF3(pfxpfx))
25916 && 0==getVexL(pfx)/*128*/) {
25918 UInt rD = gregOfRexRM(pfx, modrm);
25920 Bool isA = have66noF2noF3(pfx);
25923 UInt rS = eregOfRexRM(pfx, modrm);
25928 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
25942 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25943 delta = dis_PSHUFD_32x4( vbi, pfx, delta, True/*writesYmm*/);
25947 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25948 delta = dis_PSHUFD_32x8( vbi, pfx, delta);
25952 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25953 delta = dis_PSHUFxW_128( vbi, pfx, delta,
25958 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25959 delta = dis_PSHUFxW_256( vbi, pfx, delta, False/*!xIsH*/ );
25963 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
25964 delta = dis_PSHUFxW_128( vbi, pfx, delta,
25969 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
25970 delta = dis_PSHUFxW_256( vbi, pfx, delta, True/*xIsH*/ );
25979 if (have66noF2noF3(pfx)
25980 && 0==getVexL(pfx)/*128*/
25983 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25989 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25995 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
26005 if (have66noF2noF3(pfx)
26006 && 1==getVexL(pfx)/*256*/
26009 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
26015 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
26021 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
26034 if (have66noF2noF3(pfx)
26035 && 0==getVexL(pfx)/*128*/
26038 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
26044 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
26050 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
26060 if (have66noF2noF3(pfx)
26061 && 1==getVexL(pfx)/*256*/
26064 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
26070 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
26076 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
26090 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
26092 Int rS = eregOfRexRM(pfx,getUChar(delta));
26093 Int rD = getVexNvvvv(pfx);
26114 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
26120 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
26131 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
26133 Int rS = eregOfRexRM(pfx,getUChar(delta));
26134 Int rD = getVexNvvvv(pfx);
26162 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
26168 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
26180 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26182 uses_vvvv, vbi, pfx, delta, "vpcmpeqb", Iop_CmpEQ8x16 );
26187 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26189 uses_vvvv, vbi, pfx, delta, "vpcmpeqb", Iop_CmpEQ8x32 );
26197 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26199 uses_vvvv, vbi, pfx, delta, "vpcmpeqw", Iop_CmpEQ16x8 );
26204 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26206 uses_vvvv, vbi, pfx, delta, "vpcmpeqw", Iop_CmpEQ16x16 );
26214 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26216 uses_vvvv, vbi, pfx, delta, "vpcmpeqd", Iop_CmpEQ32x4 );
26221 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26223 uses_vvvv, vbi, pfx, delta, "vpcmpeqd", Iop_CmpEQ32x8 );
26230 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26241 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26257 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26263 UInt rG = gregOfRexRM(pfx,modrm);
26264 UInt rV = getVexNvvvv(pfx);
26266 UInt rE = eregOfRexRM(pfx,modrm);
26272 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26285 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26292 UInt rG = gregOfRexRM(pfx,modrm);
26293 UInt rV = getVexNvvvv(pfx);
26296 UInt rE = eregOfRexRM(pfx,modrm);
26302 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26319 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26325 UInt rG = gregOfRexRM(pfx,modrm);
26326 UInt rV = getVexNvvvv(pfx);
26328 UInt rE = eregOfRexRM(pfx,modrm);
26334 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26347 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26354 UInt rG = gregOfRexRM(pfx,modrm);
26355 UInt rV = getVexNvvvv(pfx);
26358 UInt rE = eregOfRexRM(pfx,modrm);
26364 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26387 if (haveF3no66noF2(pfx)
26388 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
26391 UInt rG = gregOfRexRM(pfx,modrm);
26393 UInt rE = eregOfRexRM(pfx,modrm);
26398 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26411 if (have66noF2noF3(pfx)
26412 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) {
26414 UInt rG = gregOfRexRM(pfx,modrm);
26416 UInt rE = eregOfRexRM(pfx,modrm);
26421 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26430 if (have66noF2noF3(pfx)
26431 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
26433 UInt rG = gregOfRexRM(pfx,modrm);
26435 UInt rE = eregOfRexRM(pfx,modrm);
26440 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26452 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx))
26453 && 1==getVexL(pfx)/*256*/) {
26455 UInt rS = gregOfRexRM(pfx, modrm);
26457 Bool isA = have66noF2noF3(pfx);
26461 UInt rD = eregOfRexRM(pfx, modrm);
26466 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
26477 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx))
26478 && 0==getVexL(pfx)/*128*/) {
26480 UInt rS = gregOfRexRM(pfx, modrm);
26482 Bool isA = have66noF2noF3(pfx);
26486 UInt rD = eregOfRexRM(pfx, modrm);
26491 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
26504 if (haveNo66noF2noF3(pfx)
26505 && 0==getVexL(pfx)/*LZ*/
26506 && 0==getRexW(pfx) /* be paranoid -- Intel docs don't require this */
26509 delta = dis_STMXCSR(vbi, pfx, delta, True/*isAvx*/);
26513 if (haveNo66noF2noF3(pfx)
26514 && 0==getVexL(pfx)/*LZ*/
26515 && 0==getRexW(pfx) /* be paranoid -- Intel docs don't require this */
26518 delta = dis_LDMXCSR(vbi, pfx, delta, True/*isAvx*/);
26526 if (haveF2no66noF3(pfx)) {
26528 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26536 if (haveF3no66noF2(pfx)) {
26538 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26546 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26548 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26556 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26558 delta = dis_AVX256_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26565 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26567 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26575 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26577 delta = dis_AVX256_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26586 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26588 UInt rG = gregOfRexRM(pfx, modrm);
26589 UInt rV = getVexNvvvv(pfx);
26596 getIReg32(eregOfRexRM(pfx,modrm))) );
26599 nameIReg32( eregOfRexRM(pfx, modrm) ), nameXMMReg(rG) );
26601 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
26620 if (have66noF2noF3(pfx)
26621 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
26623 delta = dis_PEXTRW_128_EregOnly_toG( vbi, pfx, delta,
26633 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26638 UInt rG = gregOfRexRM(pfx,modrm);
26639 UInt rV = getVexNvvvv(pfx);
26642 UInt rE = eregOfRexRM(pfx,modrm);
26649 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
26663 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26668 UInt rG = gregOfRexRM(pfx,modrm);
26669 UInt rV = getVexNvvvv(pfx);
26672 UInt rE = eregOfRexRM(pfx,modrm);
26679 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
26693 if (have66noF2noF3(pfxpfx)/*128*/) {
26698 UInt rG = gregOfRexRM(pfx,modrm);
26699 UInt rV = getVexNvvvv(pfx);
26702 UInt rE = eregOfRexRM(pfx,modrm);
26709 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
26723 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26728 UInt rG = gregOfRexRM(pfx,modrm);
26729 UInt rV = getVexNvvvv(pfx);
26732 UInt rE = eregOfRexRM(pfx,modrm);
26739 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
26755 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26757 uses_vvvv, vbi, pfx, delta,
26762 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26764 uses_vvvv, vbi, pfx, delta,
26769 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26771 uses_vvvv, vbi, pfx, delta,
26776 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26778 uses_vvvv, vbi, pfx, delta,
26786 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26787 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
26794 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26795 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
26805 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26806 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
26812 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26813 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
26822 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26823 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
26829 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26830 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
26840 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26842 uses_vvvv, vbi, pfx, delta, "vpaddq", Iop_Add64x2 );
26847 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26849 uses_vvvv, vbi, pfx, delta, "vpaddq", Iop_Add64x4 );
26856 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26858 uses_vvvv, vbi, pfx, delta, "vpmullw", Iop_Mul16x8 );
26862 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26864 uses_vvvv, vbi, pfx, delta, "vpmullw", Iop_Mul16x16 );
26874 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
26875 && 0==getRexW(pfx)/*this might be redundant, dunno*/) {
26877 UInt rG = gregOfRexRM(pfx,modrm);
26882 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26893 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26894 delta = dis_PMOVMSKB_128( vbi, pfx, delta, True/*isAvx*/ );
26898 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26899 delta = dis_PMOVMSKB_256( vbi, pfx, delta );
26906 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26908 uses_vvvv, vbi, pfx, delta, "vpsubusb", Iop_QSub8Ux16 );
26912 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26914 uses_vvvv, vbi, pfx, delta, "vpsubusb", Iop_QSub8Ux32 );
26921 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26923 uses_vvvv, vbi, pfx, delta, "vpsubusw", Iop_QSub16Ux8 );
26927 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26929 uses_vvvv, vbi, pfx, delta, "vpsubusw", Iop_QSub16Ux16 );
26936 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26938 uses_vvvv, vbi, pfx, delta, "vpminub", Iop_Min8Ux16 );
26942 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26944 uses_vvvv, vbi, pfx, delta, "vpminub", Iop_Min8Ux32 );
26952 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26954 uses_vvvv, vbi, pfx, delta, "vpand", Iop_AndV128 );
26959 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26961 uses_vvvv, vbi, pfx, delta, "vpand", Iop_AndV256 );
26968 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26970 uses_vvvv, vbi, pfx, delta, "vpaddusb", Iop_QAdd8Ux16 );
26974 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26976 uses_vvvv, vbi, pfx, delta, "vpaddusb", Iop_QAdd8Ux32 );
26983 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26985 uses_vvvv, vbi, pfx, delta, "vpaddusw", Iop_QAdd16Ux8 );
26989 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26991 uses_vvvv, vbi, pfx, delta, "vpaddusw", Iop_QAdd16Ux16 );
26998 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27000 uses_vvvv, vbi, pfx, delta, "vpmaxub", Iop_Max8Ux16 );
27004 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27006 uses_vvvv, vbi, pfx, delta, "vpmaxub", Iop_Max8Ux32 );
27014 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27016 uses_vvvv, vbi, pfx, delta, "vpandn", Iop_AndV128,
27022 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27024 uses_vvvv, vbi, pfx, delta, "vpandn", Iop_AndV256,
27032 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27034 uses_vvvv, vbi, pfx, delta, "vpavgb", Iop_Avg8Ux16 );
27038 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27040 uses_vvvv, vbi, pfx, delta, "vpavgb", Iop_Avg8Ux32 );
27047 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27048 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
27054 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27055 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
27064 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27065 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
27071 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27072 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
27081 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27083 uses_vvvv, vbi, pfx, delta, "vpavgw", Iop_Avg16Ux8 );
27087 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27089 uses_vvvv, vbi, pfx, delta, "vpavgw", Iop_Avg16Ux16 );
27096 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27098 uses_vvvv, vbi, pfx, delta, "vpmulhuw", Iop_MulHi16Ux8 );
27102 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27104 uses_vvvv, vbi, pfx, delta, "vpmulhuw", Iop_MulHi16Ux16 );
27111 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27113 uses_vvvv, vbi, pfx, delta, "vpmulhw", Iop_MulHi16Sx8 );
27117 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27119 uses_vvvv, vbi, pfx, delta, "vpmulhw", Iop_MulHi16Sx16 );
27126 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
27127 delta = dis_CVTDQ2PD_128(vbi, pfx, delta, True/*isAvx*/);
27131 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
27132 delta = dis_CVTDQ2PD_256(vbi, pfx, delta);
27136 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27137 delta = dis_CVTxPD2DQ_128(vbi, pfx, delta, True/*isAvx*/,
27142 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27143 delta = dis_CVTxPD2DQ_256(vbi, pfx, delta, True/*r2zero*/);
27147 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27148 delta = dis_CVTxPD2DQ_128(vbi, pfx, delta, True/*isAvx*/,
27153 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27154 delta = dis_CVTxPD2DQ_256(vbi, pfx, delta, False/*!r2zero*/);
27161 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27163 UInt rG = gregOfRexRM(pfx,modrm);
27165 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
27175 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27177 UInt rG = gregOfRexRM(pfx,modrm);
27179 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
27192 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27194 uses_vvvv, vbi, pfx, delta, "vpsubsb", Iop_QSub8Sx16 );
27198 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27200 uses_vvvv, vbi, pfx, delta, "vpsubsb", Iop_QSub8Sx32 );
27207 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27209 uses_vvvv, vbi, pfx, delta, "vpsubsw", Iop_QSub16Sx8 );
27213 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27215 uses_vvvv, vbi, pfx, delta, "vpsubsw", Iop_QSub16Sx16 );
27223 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27225 uses_vvvv, vbi, pfx, delta, "vpminsw", Iop_Min16Sx8 );
27230 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27232 uses_vvvv, vbi, pfx, delta, "vpminsw", Iop_Min16Sx16 );
27240 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27242 uses_vvvv, vbi, pfx, delta, "vpor", Iop_OrV128 );
27247 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27249 uses_vvvv, vbi, pfx, delta, "vpor", Iop_OrV256 );
27256 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27258 uses_vvvv, vbi, pfx, delta, "vpaddsb", Iop_QAdd8Sx16 );
27262 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27264 uses_vvvv, vbi, pfx, delta, "vpaddsb", Iop_QAdd8Sx32 );
27271 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27273 uses_vvvv, vbi, pfx, delta, "vpaddsw", Iop_QAdd16Sx8 );
27277 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27279 uses_vvvv, vbi, pfx, delta, "vpaddsw", Iop_QAdd16Sx16 );
27287 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27289 uses_vvvv, vbi, pfx, delta, "vpmaxsw", Iop_Max16Sx8 );
27294 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27296 uses_vvvv, vbi, pfx, delta, "vpmaxsw", Iop_Max16Sx16 );
27304 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27306 uses_vvvv, vbi, pfx, delta, "vpxor", Iop_XorV128 );
27311 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27313 uses_vvvv, vbi, pfx, delta, "vpxor", Iop_XorV256 );
27320 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27322 UInt rD = gregOfRexRM(pfx, modrm);
27325 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
27333 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27335 UInt rD = gregOfRexRM(pfx, modrm);
27338 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
27349 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27350 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
27357 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27358 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
27368 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27369 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
27375 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27376 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
27385 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27386 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
27392 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27393 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
27402 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27404 uses_vvvv, vbi, pfx, delta,
27409 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27411 uses_vvvv, vbi, pfx, delta,
27419 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27421 uses_vvvv, vbi, pfx, delta,
27426 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27428 uses_vvvv, vbi, pfx, delta,
27436 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27438 uses_vvvv, vbi, pfx, delta,
27443 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27445 uses_vvvv, vbi, pfx, delta,
27453 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
27455 delta = dis_MASKMOVDQU( vbi, pfx, delta, True/*isAvx*/ );
27463 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27465 uses_vvvv, vbi, pfx, delta, "vpsubb", Iop_Sub8x16 );
27470 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27472 uses_vvvv, vbi, pfx, delta, "vpsubb", Iop_Sub8x32 );
27480 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27482 uses_vvvv, vbi, pfx, delta, "vpsubw", Iop_Sub16x8 );
27487 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27489 uses_vvvv, vbi, pfx, delta, "vpsubw", Iop_Sub16x16 );
27497 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27499 uses_vvvv, vbi, pfx, delta, "vpsubd", Iop_Sub32x4 );
27504 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27506 uses_vvvv, vbi, pfx, delta, "vpsubd", Iop_Sub32x8 );
27514 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27516 uses_vvvv, vbi, pfx, delta, "vpsubq", Iop_Sub64x2 );
27521 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27523 uses_vvvv, vbi, pfx, delta, "vpsubq", Iop_Sub64x4 );
27531 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27533 uses_vvvv, vbi, pfx, delta, "vpaddb", Iop_Add8x16 );
27538 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27540 uses_vvvv, vbi, pfx, delta, "vpaddb", Iop_Add8x32 );
27548 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27550 uses_vvvv, vbi, pfx, delta, "vpaddw", Iop_Add16x8 );
27555 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27557 uses_vvvv, vbi, pfx, delta, "vpaddw", Iop_Add16x16 );
27565 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27567 uses_vvvv, vbi, pfx, delta, "vpaddd", Iop_Add32x4 );
27572 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27574 uses_vvvv, vbi, pfx, delta, "vpaddd", Iop_Add32x8 );
27673 const VexAbiInfo* vbi, Prefix pfx, Long delta,
27678 Int size = getRexW(pfx) ? 8 : 4;
27684 assign( amt, getIRegV(size,pfx) );
27686 assign( src, getIRegE(size,pfx,rm) );
27687 DIP("%s %s,%s,%s\n", opname, nameIRegV(size,pfx),
27688 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm));
27691 IRTemp addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
27693 DIP("%s %s,%s,%s\n", opname, nameIRegV(size,pfx), dis_buf,
27694 nameIRegG(size,pfx,rm));
27698 putIRegG( size, pfx, rm,
27708 static Long dis_FMA ( const VexAbiInfo* vbi, Prefix pfx, Long delta, UChar opc )
27711 UInt rG = gregOfRexRM(pfx, modrm);
27712 UInt rV = getVexNvvvv(pfx);
27714 IRType ty = getRexW(pfx) ? Ity_F64 : Ity_F32;
27715 IRType vty = scalar ? ty : (getVexL(pfx) ? Ity_V256 : Ity_V128);
27774 UInt rE = eregOfRexRM(pfx, modrm);
27789 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
27857 Prefix pfx, Long delta,
27865 UInt rG = gregOfRexRM(pfx,modrm);
27866 UInt rV = getVexNvvvv(pfx);
27868 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
27927 Prefix pfx, Long delta,
27935 UInt rG = gregOfRexRM(pfx,modrm);
27936 UInt rV = getVexNvvvv(pfx);
27941 addr = disAVSIBMode ( &alen, vbi, pfx, delta, dis_buf, &rI,
27993 addr_expr = handleAddrOverrides(vbi, pfx, addr_expr);
28030 Prefix pfx, Int sz, Long deltaIN
28046 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28048 uses_vvvv, vbi, pfx, delta, "vpshufb", math_PSHUFB_XMM );
28053 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28055 uses_vvvv, vbi, pfx, delta, "vpshufb", math_PSHUFB_YMM );
28066 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28067 delta = dis_PHADD_128( vbi, pfx, delta, True/*isAvx*/, opc );
28074 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28075 delta = dis_PHADD_256( vbi, pfx, delta, opc );
28083 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28085 uses_vvvv, vbi, pfx, delta, "vpmaddubsw",
28090 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28092 uses_vvvv, vbi, pfx, delta, "vpmaddubsw",
28104 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28105 delta = dis_PHADD_128( vbi, pfx, delta, True/*isAvx*/, opc );
28112 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28113 delta = dis_PHADD_256( vbi, pfx, delta, opc );
28125 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28133 UInt rG = gregOfRexRM(pfx,modrm);
28134 UInt rV = getVexNvvvv(pfx);
28146 UInt rE = eregOfRexRM(pfx,modrm);
28152 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
28175 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28184 UInt rG = gregOfRexRM(pfx,modrm);
28185 UInt rV = getVexNvvvv(pfx);
28197 UInt rE = eregOfRexRM(pfx,modrm);
28203 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
28233 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28239 UInt rG = gregOfRexRM(pfx,modrm);
28240 UInt rV = getVexNvvvv(pfx);
28245 UInt rE = eregOfRexRM(pfx,modrm);
28251 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
28272 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28278 UInt rG = gregOfRexRM(pfx,modrm);
28279 UInt rV = getVexNvvvv(pfx);
28284 UInt rE = eregOfRexRM(pfx,modrm);
28290 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
28319 if (have66noF2noF3(pfx)
28320 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
28322 UInt rG = gregOfRexRM(pfx, modrm);
28323 UInt rV = getVexNvvvv(pfx);
28326 UInt rE = eregOfRexRM(pfx, modrm);
28332 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28346 if (have66noF2noF3(pfx)
28347 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
28349 UInt rG = gregOfRexRM(pfx, modrm);
28350 UInt rV = getVexNvvvv(pfx);
28353 UInt rE = eregOfRexRM(pfx, modrm);
28359 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28376 if (have66noF2noF3(pfx)
28377 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
28379 UInt rG = gregOfRexRM(pfx, modrm);
28380 UInt rV = getVexNvvvv(pfx);
28383 UInt rE = eregOfRexRM(pfx, modrm);
28389 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28403 if (have66noF2noF3(pfx)
28404 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
28406 UInt rG = gregOfRexRM(pfx, modrm);
28407 UInt rV = getVexNvvvv(pfx);
28410 UInt rE = eregOfRexRM(pfx, modrm);
28416 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28433 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28434 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 32 );
28438 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28439 delta = dis_xTESTy_256( vbi, pfx, delta, 32 );
28446 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28447 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 64 );
28451 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28452 delta = dis_xTESTy_256( vbi, pfx, delta, 64 );
28459 if (have66noF2noF3(pfx)
28460 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
28462 uses_vvvv, vbi, pfx, delta, "vpermps", math_VPERMD );
28469 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28470 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 0 );
28474 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28475 delta = dis_xTESTy_256( vbi, pfx, delta, 0 );
28482 if (have66noF2noF3(pfx)
28483 && 0==getVexL(pfx)/*128*/
28486 UInt rG = gregOfRexRM(pfx, modrm);
28487 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28499 if (have66noF2noF3(pfx)
28500 && 1==getVexL(pfx)/*256*/
28503 UInt rG = gregOfRexRM(pfx, modrm);
28504 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28517 if (have66noF2noF3(pfx)
28518 && 0==getVexL(pfx)/*128*/
28521 UInt rG = gregOfRexRM(pfx, modrm);
28522 UInt rE = eregOfRexRM(pfx, modrm);
28534 if (have66noF2noF3(pfx)
28535 && 1==getVexL(pfx)/*256*/
28538 UInt rG = gregOfRexRM(pfx, modrm);
28539 UInt rE = eregOfRexRM(pfx, modrm);
28555 if (have66noF2noF3(pfx)
28556 && 1==getVexL(pfx)/*256*/
28559 UInt rG = gregOfRexRM(pfx, modrm);
28560 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28571 if (have66noF2noF3(pfx)
28572 && 1==getVexL(pfx)/*256*/
28575 UInt rG = gregOfRexRM(pfx, modrm);
28576 UInt rE = eregOfRexRM(pfx, modrm);
28590 if (have66noF2noF3(pfx)
28591 && 1==getVexL(pfx)/*256*/
28594 UInt rG = gregOfRexRM(pfx, modrm);
28595 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28607 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28609 uses_vvvv, vbi, pfx, delta,
28614 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28616 uses_vvvv, vbi, pfx, delta,
28624 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28626 uses_vvvv, vbi, pfx, delta,
28631 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28633 uses_vvvv, vbi, pfx, delta,
28641 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28643 uses_vvvv, vbi, pfx, delta,
28648 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28650 uses_vvvv, vbi, pfx, delta,
28659 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28660 delta = dis_PMOVxXBW_128( vbi, pfx, delta,
28666 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28667 delta = dis_PMOVxXBW_256( vbi, pfx, delta, False/*!xIsZ*/ );
28675 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28676 delta = dis_PMOVxXBD_128( vbi, pfx, delta,
28682 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28683 delta = dis_PMOVxXBD_256( vbi, pfx, delta, False/*!xIsZ*/ );
28691 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28692 delta = dis_PMOVSXBQ_128( vbi, pfx, delta, True/*isAvx*/ );
28697 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28698 delta = dis_PMOVSXBQ_256( vbi, pfx, delta );
28705 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28706 delta = dis_PMOVxXWD_128( vbi, pfx, delta,
28711 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28712 delta = dis_PMOVxXWD_256( vbi, pfx, delta, False/*!xIsZ*/ );
28719 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28720 delta = dis_PMOVSXWQ_128( vbi, pfx, delta, True/*isAvx*/ );
28724 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28725 delta = dis_PMOVSXWQ_256( vbi, pfx, delta );
28732 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28733 delta = dis_PMOVxXDQ_128( vbi, pfx, delta,
28738 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28739 delta = dis_PMOVxXDQ_256( vbi, pfx, delta, False/*!xIsZ*/ );
28746 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28748 uses_vvvv, vbi, pfx, delta,
28753 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28755 uses_vvvv, vbi, pfx, delta,
28764 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28766 uses_vvvv, vbi, pfx, delta, "vpcmpeqq", Iop_CmpEQ64x2 );
28771 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28773 uses_vvvv, vbi, pfx, delta, "vpcmpeqq", Iop_CmpEQ64x4 );
28780 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28783 UInt rD = gregOfRexRM(pfx, modrm);
28785 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28794 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28797 UInt rD = gregOfRexRM(pfx, modrm);
28799 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28812 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28814 uses_vvvv, vbi, pfx, delta, "vpackusdw",
28821 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28823 uses_vvvv, vbi, pfx, delta, "vpackusdw",
28831 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28832 && 0==getRexW(pfx)/*W0*/
28834 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx
28839 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28840 && 0==getRexW(pfx)/*W0*/
28842 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps",
28850 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28851 && 0==getRexW(pfx)/*W0*/
28853 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd",
28858 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28859 && 0==getRexW(pfx)/*W0*/
28861 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd",
28869 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28870 && 0==getRexW(pfx)/*W0*/
28872 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps",
28877 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28878 && 0==getRexW(pfx)/*W0*/
28880 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps",
28888 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28889 && 0==getRexW(pfx)/*W0*/
28891 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd",
28896 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28897 && 0==getRexW(pfx)/*W0*/
28899 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd",
28908 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28909 delta = dis_PMOVxXBW_128( vbi, pfx, delta,
28915 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28916 delta = dis_PMOVxXBW_256( vbi, pfx, delta, True/*xIsZ*/ );
28924 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28925 delta = dis_PMOVxXBD_128( vbi, pfx, delta,
28931 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28932 delta = dis_PMOVxXBD_256( vbi, pfx, delta, True/*xIsZ*/ );
28940 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28941 delta = dis_PMOVZXBQ_128( vbi, pfx, delta, True/*isAvx*/ );
28946 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28947 delta = dis_PMOVZXBQ_256( vbi, pfx, delta );
28955 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28956 delta = dis_PMOVxXWD_128( vbi, pfx, delta,
28962 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28963 delta = dis_PMOVxXWD_256( vbi, pfx, delta, True/*xIsZ*/ );
28970 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28971 delta = dis_PMOVZXWQ_128( vbi, pfx, delta, True/*isAvx*/ );
28975 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28976 delta = dis_PMOVZXWQ_256( vbi, pfx, delta );
28983 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28984 delta = dis_PMOVxXDQ_128( vbi, pfx, delta,
28989 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28990 delta = dis_PMOVxXDQ_256( vbi, pfx, delta, True/*xIsZ*/ );
28997 if (have66noF2noF3(pfx)
28998 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
29000 uses_vvvv, vbi, pfx, delta, "vpermd", math_VPERMD );
29008 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29010 uses_vvvv, vbi, pfx, delta, "vpcmpgtq", Iop_CmpGT64Sx2 );
29015 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29017 uses_vvvv, vbi, pfx, delta, "vpcmpgtq", Iop_CmpGT64Sx4 );
29025 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29027 uses_vvvv, vbi, pfx, delta, "vpminsb", Iop_Min8Sx16 );
29032 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29034 uses_vvvv, vbi, pfx, delta, "vpminsb", Iop_Min8Sx32 );
29042 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29044 uses_vvvv, vbi, pfx, delta, "vpminsd", Iop_Min32Sx4 );
29049 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29051 uses_vvvv, vbi, pfx, delta, "vpminsd", Iop_Min32Sx8 );
29059 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29061 uses_vvvv, vbi, pfx, delta, "vpminuw", Iop_Min16Ux8 );
29066 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29068 uses_vvvv, vbi, pfx, delta, "vpminuw", Iop_Min16Ux16 );
29076 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29078 uses_vvvv, vbi, pfx, delta, "vpminud", Iop_Min32Ux4 );
29083 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29085 uses_vvvv, vbi, pfx, delta, "vpminud", Iop_Min32Ux8 );
29093 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29095 uses_vvvv, vbi, pfx, delta, "vpmaxsb", Iop_Max8Sx16 );
29100 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29102 uses_vvvv, vbi, pfx, delta, "vpmaxsb", Iop_Max8Sx32 );
29110 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29112 uses_vvvv, vbi, pfx, delta, "vpmaxsd", Iop_Max32Sx4 );
29117 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29119 uses_vvvv, vbi, pfx, delta, "vpmaxsd", Iop_Max32Sx8 );
29127 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29129 uses_vvvv, vbi, pfx, delta, "vpmaxuw", Iop_Max16Ux8 );
29134 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29136 uses_vvvv, vbi, pfx, delta, "vpmaxuw", Iop_Max16Ux16 );
29144 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29146 uses_vvvv, vbi, pfx, delta, "vpmaxud", Iop_Max32Ux4 );
29151 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29153 uses_vvvv, vbi, pfx, delta, "vpmaxud", Iop_Max32Ux8 );
29161 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29163 uses_vvvv, vbi, pfx, delta, "vpmulld", Iop_Mul32x4 );
29168 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29170 uses_vvvv, vbi, pfx, delta, "vpmulld", Iop_Mul32x8 );
29177 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29178 delta = dis_PHMINPOSUW_128( vbi, pfx, delta, True/*isAvx*/ );
29186 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
29187 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsrlvd",
29188 Iop_Shr32, 1==getVexL(pfx) );
29194 if (have66noF2noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
29195 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsrlvq",
29196 Iop_Shr64, 1==getVexL(pfx) );
29205 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
29206 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsravd",
29207 Iop_Sar32, 1==getVexL(pfx) );
29216 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
29217 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsllvd",
29218 Iop_Shl32, 1==getVexL(pfx) );
29224 if (have66noF2noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
29225 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsllvq",
29226 Iop_Shl64, 1==getVexL(pfx) );
29234 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29235 && 0==getRexW(pfx
29237 UInt rG = gregOfRexRM(pfx, modrm);
29240 UInt rE = eregOfRexRM(pfx, modrm);
29245 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29257 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29258 && 0==getRexW(pfx)/*W0*/) {
29260 UInt rG = gregOfRexRM(pfx, modrm);
29263 UInt rE = eregOfRexRM(pfx, modrm);
29268 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29284 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29285 && 0==getRexW(pfx)/*W0*/) {
29287 UInt rG = gregOfRexRM(pfx, modrm);
29290 UInt rE = eregOfRexRM(pfx, modrm);
29295 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29305 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29306 && 0==getRexW(pfx)/*W0*/) {
29308 UInt rG = gregOfRexRM(pfx, modrm);
29311 UInt rE = eregOfRexRM(pfx, modrm);
29316 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29330 if (have66noF2noF3(pfx)
29331 && 1==getVexL(pfx)/*256*/
29334 UInt rG = gregOfRexRM(pfx, modrm);
29335 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29347 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29348 && 0==getRexW(pfx)/*W0*/) {
29350 UInt rG = gregOfRexRM(pfx, modrm);
29353 UInt rE = eregOfRexRM(pfx, modrm);
29358 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29374 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29375 && 0==getRexW(pfx)/*W0*/) {
29377 UInt rG = gregOfRexRM(pfx, modrm);
29380 UInt rE = eregOfRexRM(pfx, modrm);
29385 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29405 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29406 && 0==getRexW(pfx)/*W0*/) {
29408 UInt rG = gregOfRexRM(pfx, modrm);
29411 UInt rE = eregOfRexRM(pfx, modrm);
29416 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29430 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29431 && 0==getRexW(pfx)/*W0*/) {
29433 UInt rG = gregOfRexRM(pfx, modrm);
29436 UInt rE = eregOfRexRM(pfx, modrm);
29441 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29459 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29460 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29461 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd",
29466 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29467 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29468 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd",
29473 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29474 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29475 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq",
29480 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29481 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29482 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq",
29490 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29491 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29492 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd",
29497 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29498 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29499 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd",
29504 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29505 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29506 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq",
29511 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29512 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29513 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq",
29521 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29522 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29524 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdd",
29530 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29531 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29533 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdd",
29539 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29540 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29542 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdq",
29548 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29549 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29551 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdq",
29560 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29561 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29563 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqd",
29569 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29570 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29572 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqd",
29578 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29579 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29581 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqq",
29587 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29588 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29590 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqq",
29599 if (have66noF2noF3(pfxpfx)/*128*/
29600 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29602 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdps",
29608 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29609 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29611 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdps",
29617 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29618 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29620 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdpd",
29626 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29627 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29629 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdpd",
29638 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29639 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29641 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqps",
29647 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29648 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29650 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqps",
29656 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29657 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29659 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqpd",
29665 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29666 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29668 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqpd",
29774 if (have66noF2noF3(pfx)) {
29775 delta = dis_FMA( vbi, pfx, delta, opc );
29792 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29793 delta = dis_AESx( vbi, pfx, delta, True/*!isAvx*/, opc );
29802 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29803 Int size = getRexW(pfx) ? 8 : 4;
29810 assign( src1, getIRegV(size,pfx) );
29812 assign( src2, getIRegE(size,pfx,rm) );
29813 DIP("andn %s,%s,%s\n", nameIRegE(size,pfx,rm),
29814 nameIRegV(size,pfx), nameIRegG(size,pfx,rm));
29817 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29819 DIP("andn %s,%s,%s\n", dis_buf, nameIRegV(size,pfx),
29820 nameIRegG(size,pfx,rm));
29827 putIRegG( size, pfx, rm, mkexpr(dst) );
29841 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/
29842 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 3) {
29843 Int size = getRexW(pfx) ? 8 : 4;
29850 assign( src, getIRegE(size,pfx,rm) );
29851 DIP("blsi %s,%s\n", nameIRegE(size,pfx,rm),
29852 nameIRegV(size,pfx));
29855 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29857 DIP("blsi %s,%s\n", dis_buf, nameIRegV(size,pfx));
29864 putIRegV( size, pfx, mkexpr(dst) );
29875 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/
29876 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 2) {
29877 Int size = getRexW(pfx) ? 8 : 4;
29884 assign( src, getIRegE(size,pfx,rm) );
29885 DIP("blsmsk %s,%s\n", nameIRegE(size,pfx,rm),
29886 nameIRegV(size,pfx));
29889 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29891 DIP("blsmsk %s,%s\n", dis_buf, nameIRegV(size,pfx));
29898 putIRegV( size, pfx, mkexpr(dst) );
29909 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/
29910 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 1) {
29911 Int size = getRexW(pfx) ? 8 : 4;
29918 assign( src, getIRegE(size,pfx,rm) );
29919 DIP("blsr %s,%s\n", nameIRegE(size,pfx,rm),
29920 nameIRegV(size,pfx));
29923 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29925 DIP("blsr %s,%s\n", dis_buf, nameIRegV(size,pfx));
29932 putIRegV( size, pfx, mkexpr(dst) );
29946 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29947 Int size = getRexW(pfx) ? 8 : 4;
29956 assign( src2, getIRegV(size,pfx) );
29958 assign( src1, getIRegE(size,pfx,rm) );
29959 DIP("bzhi %s,%s,%s\n", nameIRegV(size,pfx),
29960 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm));
29963 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29965 DIP("bzhi %s,%s,%s\n", nameIRegV(size,pfx), dis_buf,
29966 nameIRegG(size,pfx,rm));
30001 putIRegG( size, pfx, rm, mkexpr(dst) );
30012 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
30013 Int size = getRexW(pfx) ? 8 : 4;
30019 assign( src, getIRegV(size,pfx) );
30021 assign( mask, getIRegE(size,pfx,rm) );
30022 DIP("pdep %s,%s,%s\n", nameIRegE(size,pfx,rm),
30023 nameIRegV(size,pfx), nameIRegG(size,pfx,rm));
30026 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
30028 DIP("pdep %s,%s,%s\n", dis_buf, nameIRegV(size,pfx),
30029 nameIRegG(size,pfx,rm));
30035 putIRegG( size, pfx, rm,
30045 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
30046 Int size = getRexW(pfx) ? 8 : 4;
30052 assign( src, getIRegV(size,pfx) );
30054 assign( mask, getIRegE(size,pfx,rm) );
30055 DIP("pext %s,%s,%s\n", nameIRegE(size,pfx,rm),
30056 nameIRegV(size,pfx), nameIRegG(size,pfx,rm));
30059 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
30061 DIP("pext %s,%s,%s\n", dis_buf, nameIRegV(size,pfx),
30062 nameIRegG(size,pfx,rm));
30072 putIRegG( size, pfx, rm,
30085 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
30086 Int size = getRexW(pfx) ? 8 : 4;
30095 assign( src2, getIRegE(size,pfx,rm) );
30096 DIP("mulx %s,%s,%s\n", nameIRegE(size,pfx,rm),
30097 nameIRegV(size,pfx), nameIRegG(size,pfx,rm));
30100 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
30102 DIP("mulx %s,%s,%s\n", dis_buf, nameIRegV(size,pfx),
30103 nameIRegG(size,pfx,rm));
30109 putIRegV( size, pfx,
30111 putIRegG( size, pfx, rm,
30123 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
30124 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "sarx", Iop_Sar8 );
30129 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
30130 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "shlx", Iop_Shl8 );
30135 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
30136 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "shrx", Iop_Shr8 );
30141 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
30142 Int size = getRexW(pfx) ? 8 : 4;
30152 assign( src2, getIRegV(size,pfx) );
30154 assign( src1, getIRegE(size,pfx,rm) );
30155 DIP("bextr %s,%s,%s\n", nameIRegV(size,pfx),
30156 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm));
30159 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
30161 DIP("bextr %s,%s,%s\n", nameIRegV(size,pfx), dis_buf,
30162 nameIRegG(size,pfx,rm));
30209 putIRegG( size, pfx, rm, mkexpr(dst) );
30236 static Long decode_vregW(Int count, Long delta, UChar modrm, Prefix pfx,
30247 *dst = gregOfRexRM(pfx, modrm);
30251 UInt ereg = eregOfRexRM(pfx, modrm);
30256 addr = disAMode(&alen, vbi, pfx, delta, dis_buf, extra_byte);
30261 UInt vvvv = getVexNvvvv(pfx);
30283 static Long dis_FMA4 (Prefix pfx, Long delta, UChar opc,
30311 delta = decode_vregW(4, delta, modrm, pfx, vbi, operand, &dst, getRexW(pfx));
30396 Prefix pfx, Int sz, Long deltaIN
30413 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
30414 && 1==getRexW(pfx)/*W1*/) {
30417 UInt rG = gregOfRexRM(pfx, modrm);
30421 UInt rE = eregOfRexRM(pfx, modrm);
30428 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30452 if (have66noF2noF3(pfx)
30453 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
30456 UInt rG = gregOfRexRM(pfx, modrm);
30457 UInt rV = getVexNvvvv(pfx);
30464 UInt rE = eregOfRexRM(pfx, modrm);
30471 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30492 if (have66noF2noF3(pfx)
30493 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
30496 UInt rG = gregOfRexRM(pfx, modrm);
30497 UInt rV = getVexNvvvv(pfx);
30504 UInt rE = eregOfRexRM(pfx, modrm);
30511 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30536 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30539 UInt rG = gregOfRexRM(pfx, modrm);
30542 UInt rE = eregOfRexRM(pfx, modrm);
30549 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30566 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30569 UInt rG = gregOfRexRM(pfx, modrm);
30572 UInt rE = eregOfRexRM(pfx, modrm);
30579 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30594 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30597 UInt rG = gregOfRexRM(pfx, modrm);
30600 UInt rE = eregOfRexRM(pfx, modrm);
30607 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30627 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30630 UInt rG = gregOfRexRM(pfx, modrm);
30633 UInt rE = eregOfRexRM(pfx, modrm);
30640 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30664 if (have66noF2noF3(pfx)
30665 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
30668 UInt rG = gregOfRexRM(pfx, modrm);
30669 UInt rV = getVexNvvvv(pfx);
30677 UInt rE = eregOfRexRM(pfx, modrm);
30685 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30711 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30713 UInt rG = gregOfRexRM(pfx, modrm);
30725 UInt rE = eregOfRexRM(pfx, modrm);
30732 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30759 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30761 UInt rG = gregOfRexRM(pfx, modrm);
30777 UInt rE = eregOfRexRM(pfx, modrm);
30784 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30816 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30818 UInt rG = gregOfRexRM(pfx, modrm);
30828 UInt rE = eregOfRexRM(pfx, modrm);
30835 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30860 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30862 UInt rG = gregOfRexRM(pfx, modrm);
30874 UInt rE = eregOfRexRM(pfx, modrm);
30881 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30912 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30914 UInt rG = gregOfRexRM(pfx, modrm);
30915 UInt rV = getVexNvvvv(pfx);
30922 UInt rE = eregOfRexRM(pfx, modrm);
30932 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30967 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30970 UInt rG = gregOfRexRM(pfx, modrm);
30971 UInt rV = getVexNvvvv(pfx);
30976 UInt rE = eregOfRexRM(pfx, modrm);
30983 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30998 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31001 UInt rG = gregOfRexRM(pfx, modrm);
31002 UInt rV = getVexNvvvv(pfx);
31007 UInt rE = eregOfRexRM(pfx, modrm);
31014 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31032 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31035 UInt rG = gregOfRexRM(pfx, modrm);
31036 UInt rV = getVexNvvvv(pfx);
31041 UInt rE = eregOfRexRM(pfx, modrm);
31048 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31063 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31066 UInt rG = gregOfRexRM(pfx, modrm);
31067 UInt rV = getVexNvvvv(pfx);
31072 UInt rE = eregOfRexRM(pfx, modrm);
31079 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31097 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31100 UInt rG = gregOfRexRM(pfx, modrm);
31101 UInt rV = getVexNvvvv(pfx);
31106 UInt rE = eregOfRexRM(pfx, modrm);
31113 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31128 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31131 UInt rG = gregOfRexRM(pfx, modrm);
31132 UInt rV = getVexNvvvv(pfx);
31139 UInt rE = eregOfRexRM(pfx, modrm);
31146 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31167 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31169 UInt rG = gregOfRexRM(pfx, modrm);
31170 UInt rV = getVexNvvvv(pfx);
31178 UInt rE = eregOfRexRM(pfx, modrm);
31185 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31200 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31202 UInt rG = gregOfRexRM(pfx, modrm);
31203 UInt rV = getVexNvvvv(pfx);
31213 UInt rE = eregOfRexRM(pfx, modrm);
31220 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31241 if (have66noF2noF3(pfx)
31242 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
31243 delta = dis_PEXTRB_128_GtoE( vbi, pfx, delta, False/*!isAvx*/ );
31251 if (have66noF2noF3(pfx)
31252 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
31253 delta = dis_PEXTRW( vbi, pfx, delta, True/*isAvx*/ );
31261 if (have66noF2noF3(pfx)
31262 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
31263 delta = dis_PEXTRD( vbi, pfx, delta, True/*isAvx*/ );
31267 if (have66noF2noF3(pfx)
31268 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) {
31269 delta = dis_PEXTRQ( vbi, pfx, delta, True/*isAvx*/ );
31276 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31277 delta = dis_EXTRACTPS( vbi, pfx, delta, True/*isAvx*/ );
31286 if (have66noF2noF3(pfx)
31287 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31290 UInt rG = gregOfRexRM(pfx, modrm);
31291 UInt rV = getVexNvvvv(pfx);
31294 UInt rE = eregOfRexRM(pfx, modrm);
31301 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31321 if (have66noF2noF3(pfx)
31322 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31325 UInt rS = gregOfRexRM(pfx, modrm);
31328 UInt rD = eregOfRexRM(pfx, modrm);
31336 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31352 if (have66noF2noF3(pfx)
31353 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
31355 UInt rG = gregOfRexRM(pfx, modrm);
31356 UInt rV = getVexNvvvv(pfx);
31361 UInt rE = eregOfRexRM(pfx,modrm);
31368 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31388 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31390 UInt rG = gregOfRexRM(pfx, modrm);
31391 UInt rV = getVexNvvvv(pfx);
31397 UInt rE = eregOfRexRM(pfx, modrm);
31408 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31427 if (have66noF2noF3(pfx)
31428 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
31430 UInt rG = gregOfRexRM(pfx, modrm);
31431 UInt rV = getVexNvvvv(pfx);
31436 UInt rE = eregOfRexRM(pfx,modrm);
31443 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31459 if (have66noF2noF3(pfx)
31460 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) {
31462 UInt rG = gregOfRexRM(pfx, modrm);
31463 UInt rV = getVexNvvvv(pfx);
31468 UInt rE = eregOfRexRM(pfx,modrm);
31475 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31496 if (have66noF2noF3(pfx)
31497 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31500 UInt rG = gregOfRexRM(pfx, modrm);
31501 UInt rV = getVexNvvvv(pfx);
31504 UInt rE = eregOfRexRM(pfx, modrm);
31511 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31531 if (have66noF2noF3(pfx)
31532 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31535 UInt rS = gregOfRexRM(pfx, modrm);
31538 UInt rD = eregOfRexRM(pfx, modrm);
31546 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31562 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31564 UInt rG = gregOfRexRM(pfx, modrm);
31565 UInt rV = getVexNvvvv(pfx);
31569 UInt rE = eregOfRexRM(pfx,modrm);
31576 pfx, delta, dis_buf, 1 );
31592 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31594 UInt rG = gregOfRexRM(pfx, modrm);
31595 UInt rV = getVexNvvvv(pfx);
31599 UInt rE = eregOfRexRM(pfx,modrm);
31606 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31630 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31632 UInt rG = gregOfRexRM(pfx, modrm);
31633 UInt rV = getVexNvvvv(pfx);
31637 UInt rE = eregOfRexRM(pfx,modrm);
31644 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31664 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31669 UInt rG = gregOfRexRM(pfx, modrm);
31670 UInt rV = getVexNvvvv(pfx);
31675 UInt rE = eregOfRexRM(pfx, modrm);
31683 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
31699 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31704 UInt rG = gregOfRexRM(pfx, modrm);
31705 UInt rV = getVexNvvvv(pfx);
31712 UInt rE = eregOfRexRM(pfx, modrm);
31720 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
31746 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31751 UInt rG = gregOfRexRM(pfx, modrm);
31752 UInt rV = getVexNvvvv(pfx);
31757 UInt rE = eregOfRexRM(pfx, modrm);
31764 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
31781 if (have66noF2noF3(pfx)
31782 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31785 UInt rG = gregOfRexRM(pfx, modrm);
31786 UInt rV = getVexNvvvv(pfx);
31794 UInt rE = eregOfRexRM(pfx, modrm);
31802 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31829 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31830 delta = dis_VBLENDV_128 ( vbi, pfx, delta,
31838 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31839 delta = dis_VBLENDV_256 ( vbi, pfx, delta,
31850 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31851 delta = dis_VBLENDV_128 ( vbi, pfx, delta,
31859 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31860 delta = dis_VBLENDV_256 ( vbi, pfx, delta,
31871 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31872 delta = dis_VBLENDV_128 ( vbi, pfx, delta,
31880 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31881 delta = dis_VBLENDV_256 ( vbi, pfx, delta,
31899 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31901 delta = dis_PCMPxSTRx( vbi, pfx, delta, True/*isAvx*/, opc );
31911 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31913 delta = dis_FMA4( pfx, delta, opc, uses_vvvv, vbi );
31924 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31925 delta = dis_AESKEYGENASSIST( vbi, pfx, delta, True/*!isAvx*/ );
31933 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
31934 Int size = getRexW(pfx) ? 8 : 4;
31942 assign( src, getIRegE(size,pfx,rm) );
31943 DIP("rorx %d,%s,%s\n", imm8, nameIRegE(size,pfx,rm),
31944 nameIRegG(size,pfx,rm));
31947 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
31950 DIP("rorx %d,%s,%s\n", imm8, dis_buf, nameIRegG(size,pfx,rm));
31956 putIRegG( size, pfx, rm,
32019 /* pfx holds the summary of prefixes. */
32020 Prefix pfx = PFX_EMPTY;
32119 /* Eat prefixes, summarising the result in pfx and sz, and rejecting
32126 case 0x66: pfx |= PFX_66; break;
32127 case 0x67: pfx |= PFX_ASO; break;
32128 case 0xF2: pfx |= PFX_F2; break;
32129 case 0xF3: pfx |= PFX_F3; break;
32130 case 0xF0: pfx |= PFX_LOCK; *expect_CAS = True; break;
32131 case 0x2E: pfx |= PFX_CS; break;
32132 case 0x3E: pfx |= PFX_DS; break;
32133 case 0x26: pfx |= PFX_ES; break;
32134 case 0x64: pfx |= PFX_FS; break;
32135 case 0x65: pfx |= PFX_GS; break;
32136 case 0x36: pfx |= PFX_SS; break;
32138 pfx |= PFX_REX;
32139 if (pre & (1<<3)) pfx |= PFX_REXW;
32140 if (pre & (1<<2)) pfx |= PFX_REXR;
32141 if (pre & (1<<1)) pfx |= PFX_REXX;
32142 if (pre & (1<<0)) pfx |= PFX_REXB;
32162 pfx |= PFX_VEX;
32164 /* R */ pfx |= (vex1 & (1<<7)) ? 0 : PFX_REXR;
32165 /* X */ pfx |= (vex1 & (1<<6)) ? 0 : PFX_REXX;
32166 /* B */ pfx |= (vex1 & (1<<5)) ? 0 : PFX_REXB;
32176 /* W */ pfx |= (vex2 & (1<<7)) ? PFX_REXW : 0;
32177 /* ~v3 */ pfx |= (vex2 & (1<<6)) ? 0 : PFX_VEXnV3;
32178 /* ~v2 */ pfx |= (vex2 & (1<<5)) ? 0 : PFX_VEXnV2;
32179 /* ~v1 */ pfx |= (vex2 & (1<<4)) ? 0 : PFX_VEXnV1;
32180 /* ~v0 */ pfx |= (vex2 & (1<<3)) ? 0 : PFX_VEXnV0;
32181 /* L */ pfx |= (vex2 & (1<<2)) ? PFX_VEXL : 0;
32185 case 1: pfx |= PFX_66; break;
32186 case 2: pfx |= PFX_F3; break;
32187 case 3: pfx |= PFX_F2; break;
32195 pfx |= PFX_VEX;
32197 /* R */ pfx |= (vex1 & (1<<7)) ? 0 : PFX_REXR;
32198 /* ~v3 */ pfx |= (vex1 & (1<<6)) ? 0 : PFX_VEXnV3;
32199 /* ~v2 */ pfx |= (vex1 & (1<<5)) ? 0 : PFX_VEXnV2;
32200 /* ~v1 */ pfx |= (vex1 & (1<<4)) ? 0 : PFX_VEXnV1;
32201 /* ~v0 */ pfx |= (vex1 & (1<<3)) ? 0 : PFX_VEXnV0;
32202 /* L */ pfx |= (vex1 & (1<<2)) ? PFX_VEXL : 0;
32206 case 1: pfx |= PFX_66; break;
32207 case 2: pfx |= PFX_F3; break;
32208 case 3: pfx |= PFX_F2; break;
32215 if ((pfx & PFX_VEX) && (pfx & PFX_REX))
32221 if (pfx & PFX_F2) n++;
32222 if (pfx & PFX_F3) n++;
32227 if (pfx & PFX_CS) n++;
32228 if (pfx & PFX_DS) n++;
32229 if (pfx & PFX_ES) n++;
32230 if (pfx & PFX_FS) n++;
32231 if (pfx & PFX_GS) n++;
32232 if (pfx & PFX_SS) n++;
32238 if ((pfx & PFX_FS) && !vbi->guest_amd64_assume_fs_is_const)
32242 if ((pfx & PFX_GS) && !vbi->guest_amd64_assume_gs_is_const)
32247 if (pfx & PFX_66) sz = 2;
32248 if ((pfx & PFX_REX) && (pfx & PFX_REXW)) sz = 8;
32253 if (haveLOCK(pfx)) {
32264 if (!(pfx & PFX_VEX)) {
32282 if (!(pfx & PFX_VEX)) {
32291 archinfo, vbi, pfx, sz, delta );
32296 archinfo, vbi, pfx, sz, delta );
32301 archinfo, vbi, pfx, sz, delta );
32306 archinfo, vbi, pfx, sz, delta );
32322 archinfo, vbi, pfx, sz, delta );
32328 archinfo, vbi, pfx, sz, delta );
32334 archinfo, vbi, pfx, sz, delta );
32346 if (getVexNvvvv(pfx) != 0)
32379 haveREX(pfx) ? 1 : 0, getRexW(pfx), getRexR(pfx),
32380 getRexX(pfx), getRexB(pfx));
32382 haveVEX(pfx) ? 1 : 0, getVexL(pfx),
32383 getVexNvvvv(pfx),
32388 vex_printf("vex amd64->IR: PFX.66=%d PFX.F2=%d PFX.F3=%d\n",
32389 have66(pfx) ? 1 : 0, haveF2(pfx) ? 1 : 0,
32390 haveF3(pfx) ? 1 : 0);