Lines Matching refs:pfx
683 static Bool IS_VALID_PFX ( Prefix pfx ) {
684 return toBool((pfx & 0xFF000000) == PFX_EMPTY);
687 static Bool haveREX ( Prefix pfx ) {
688 return toBool(pfx & PFX_REX);
691 static Int getRexW ( Prefix pfx ) {
692 return (pfx & PFX_REXW) ? 1 : 0;
694 static Int getRexR ( Prefix pfx ) {
695 return (pfx & PFX_REXR) ? 1 : 0;
697 static Int getRexX ( Prefix pfx ) {
698 return (pfx & PFX_REXX) ? 1 : 0;
700 static Int getRexB ( Prefix pfx ) {
701 return (pfx & PFX_REXB) ? 1 : 0;
706 static Bool haveF2orF3 ( Prefix pfx ) {
707 return toBool((pfx & (PFX_F2|PFX_F3)) > 0);
709 static Bool haveF2andF3 ( Prefix pfx ) {
710 return toBool((pfx & (PFX_F2|PFX_F3)) == (PFX_F2|PFX_F3));
712 static Bool haveF2 ( Prefix pfx ) {
713 return toBool((pfx & PFX_F2) > 0);
715 static Bool haveF3 ( Prefix pfx ) {
716 return toBool((pfx & PFX_F3) > 0);
719 static Bool have66 ( Prefix pfx ) {
720 return toBool((pfx & PFX_66) > 0);
722 static Bool haveASO ( Prefix pfx ) {
723 return toBool((pfx & PFX_ASO) > 0);
725 static Bool haveLOCK ( Prefix pfx ) {
726 return toBool((pfx & PFX_LOCK) > 0);
729 /* Return True iff pfx has 66 set and F2 and F3 clear */
730 static Bool have66noF2noF3 ( Prefix pfx )
733 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_66);
736 /* Return True iff pfx has F2 set and 66 and F3 clear */
737 static Bool haveF2no66noF3 ( Prefix pfx )
740 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_F2);
743 /* Return True iff pfx has F3 set and 66 and F2 clear */
744 static Bool haveF3no66noF2 ( Prefix pfx )
747 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_F3);
750 /* Return True iff pfx has F3 set and F2 clear */
751 static Bool haveF3noF2 ( Prefix pfx )
754 toBool((pfx & (PFX_F2|PFX_F3)) == PFX_F3);
757 /* Return True iff pfx has F2 set and F3 clear */
758 static Bool haveF2noF3 ( Prefix pfx )
761 toBool((pfx & (PFX_F2|PFX_F3)) == PFX_F2);
764 /* Return True iff pfx has 66, F2 and F3 clear */
765 static Bool haveNo66noF2noF3 ( Prefix pfx )
768 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == 0);
771 /* Return True iff pfx has any of 66, F2 and F3 set */
772 static Bool have66orF2orF3 ( Prefix pfx )
774 return toBool( ! haveNo66noF2noF3(pfx) );
777 /* Return True iff pfx has 66 or F3 set */
778 static Bool have66orF3 ( Prefix pfx )
780 return toBool((pfx & (PFX_66|PFX_F3)) > 0);
791 static UInt getVexNvvvv ( Prefix pfx ) {
792 UInt r = (UInt)pfx;
797 static Bool haveVEX ( Prefix pfx ) {
798 return toBool(pfx & PFX_VEX);
801 static Int getVexL ( Prefix pfx ) {
802 return (pfx & PFX_VEXL) ? 1 : 0;
1164 static IRExpr* getIReg64rexX ( Prefix pfx, UInt lo3bits )
1167 vassert(IS_VALID_PFX(pfx));
1168 return getIReg64( lo3bits | (getRexX(pfx) << 3) );
1171 static const HChar* nameIReg64rexX ( Prefix pfx, UInt lo3bits )
1174 vassert(IS_VALID_PFX(pfx));
1175 return nameIReg( 8, lo3bits | (getRexX(pfx) << 3), False );
1178 static const HChar* nameIRegRexB ( Int sz, Prefix pfx, UInt lo3bits )
1181 vassert(IS_VALID_PFX(pfx));
1183 return nameIReg( sz, lo3bits | (getRexB(pfx) << 3),
1184 toBool(sz==1 && !haveREX(pfx)) );
1187 static IRExpr* getIRegRexB ( Int sz, Prefix pfx, UInt lo3bits )
1190 vassert(IS_VALID_PFX(pfx));
1196 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3),
1203 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3),
1204 toBool(sz==1 && !haveREX(pfx)) ),
1210 static void putIRegRexB ( Int sz, Prefix pfx, UInt lo3bits, IRExpr* e )
1213 vassert(IS_VALID_PFX(pfx));
1217 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3),
1218 toBool(sz==1 && !haveREX(pfx)) ),
1232 static UInt gregOfRexRM ( Prefix pfx, UChar mod_reg_rm )
1235 reg += (pfx & PFX_REXR) ? 8 : 0;
1244 static UInt eregOfRexRM ( Prefix pfx, UChar mod_reg_rm )
1249 rm += (pfx & PFX_REXB) ? 8 : 0;
1260 static UInt offsetIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm )
1264 vassert(IS_VALID_PFX(pfx));
1266 reg = gregOfRexRM( pfx, mod_reg_rm );
1267 return offsetIReg( sz, reg, toBool(sz == 1 && !haveREX(pfx)) );
1271 IRExpr* getIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm )
1276 IRExpr_Get( offsetIRegG( sz, pfx, mod_reg_rm ),
1279 return IRExpr_Get( offsetIRegG( sz, pfx, mod_reg_rm ),
1285 void putIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm, IRExpr* e )
1291 stmt( IRStmt_Put( offsetIRegG( sz, pfx, mod_reg_rm ), e ) );
1295 const HChar* nameIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm )
1297 return nameIReg( sz, gregOfRexRM(pfx,mod_reg_rm),
1298 toBool(sz==1 && !haveREX(pfx)) );
1303 IRExpr* getIRegV ( Int sz, Prefix pfx )
1308 IRExpr_Get( offsetIReg( sz, getVexNvvvv(pfx), False ),
1311 return IRExpr_Get( offsetIReg( sz, getVexNvvvv(pfx), False ),
1317 void putIRegV ( Int sz, Prefix pfx, IRExpr* e )
1323 stmt( IRStmt_Put( offsetIReg( sz, getVexNvvvv(pfx), False ), e ) );
1327 const HChar* nameIRegV ( Int sz, Prefix pfx )
1329 return nameIReg( sz, getVexNvvvv(pfx), False );
1339 static UInt offsetIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm )
1343 vassert(IS_VALID_PFX(pfx));
1345 reg = eregOfRexRM( pfx, mod_reg_rm );
1346 return offsetIReg( sz, reg, toBool(sz == 1 && !haveREX(pfx)) );
1350 IRExpr* getIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm )
1355 IRExpr_Get( offsetIRegE( sz, pfx, mod_reg_rm ),
1358 return IRExpr_Get( offsetIRegE( sz, pfx, mod_reg_rm ),
1364 void putIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm, IRExpr* e )
1370 stmt( IRStmt_Put( offsetIRegE( sz, pfx, mod_reg_rm ), e ) );
1374 const HChar* nameIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm )
1376 return nameIReg( sz, eregOfRexRM(pfx,mod_reg_rm),
1377 toBool(sz==1 && !haveREX(pfx)) );
2313 const HChar* segRegTxt ( Prefix pfx )
2315 if (pfx & PFX_CS) return "%cs:";
2316 if (pfx & PFX_DS) return "%ds:";
2317 if (pfx & PFX_ES) return "%es:";
2318 if (pfx & PFX_FS) return "%fs:";
2319 if (pfx & PFX_GS) return "%gs:";
2320 if (pfx & PFX_SS) return "%ss:";
2331 Prefix pfx, IRExpr* virtual )
2338 if (pfx & PFX_FS) {
2348 if (pfx & PFX_GS) {
2361 if (haveASO(pfx))
2455 const VexAbiInfo* vbi, Prefix pfx, Long delta,
2479 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,rm));
2482 handleAddrOverrides(vbi, pfx, getIRegRexB(8,pfx,rm)));
2493 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,rm));
2495 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), d, nameIRegRexB(8,pfx,rm));
2499 handleAddrOverrides(vbi, pfx,
2500 binop(Iop_Add64,getIRegRexB(8,pfx,rm),mkU64(d))));
2510 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), d, nameIRegRexB(8,pfx,rm));
2513 handleAddrOverrides(vbi, pfx,
2514 binop(Iop_Add64,getIRegRexB(8,pfx,rm),mkU64(d))));
2528 DIS(buf, "%s%lld(%%rip)", segRegTxt(pfx), d);
2538 handleAddrOverrides(vbi, pfx,
2566 Bool index_is_SP = toBool(index_r == R_RSP && 0==getRexX(pfx));
2571 DIS(buf, "%s(%s,%s)", segRegTxt(pfx),
2572 nameIRegRexB(8,pfx,base_r),
2573 nameIReg64rexX(pfx,index_r));
2575 DIS(buf, "%s(%s,%s,%d)", segRegTxt(pfx),
2576 nameIRegRexB(8,pfx,base_r),
2577 nameIReg64rexX(pfx,index_r), 1<<scale);
2582 handleAddrOverrides(vbi, pfx,
2584 getIRegRexB(8,pfx,base_r),
2585 binop(Iop_Shl64, getIReg64rexX(pfx,index_r),
2591 DIS(buf, "%s%lld(,%s,%d)", segRegTxt(pfx), d,
2592 nameIReg64rexX(pfx,index_r), 1<<scale);
2596 handleAddrOverrides(vbi, pfx,
2598 binop(Iop_Shl64, getIReg64rexX(pfx,index_r),
2604 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,base_r));
2607 handleAddrOverrides(vbi, pfx, getIRegRexB(8,pfx,base_r)));
2612 DIS(buf, "%s%lld", segRegTxt(pfx), d);
2615 handleAddrOverrides(vbi, pfx, mkU64(d)));
2637 if (index_r == R_RSP && 0==getRexX(pfx)) {
2638 DIS(buf, "%s%lld(%s)", segRegTxt(pfx),
2639 d, nameIRegRexB(8,pfx,base_r));
2642 handleAddrOverrides(vbi, pfx,
2643 binop(Iop_Add64, getIRegRexB(8,pfx,base_r), mkU64(d)) ));
2646 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d,
2647 nameIRegRexB(8,pfx,base_r),
2648 nameIReg64rexX(pfx,index_r));
2650 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d,
2651 nameIRegRexB(8,pfx,base_r),
2652 nameIReg64rexX(pfx,index_r), 1<<scale);
2657 handleAddrOverrides(vbi, pfx,
2660 getIRegRexB(8,pfx,base_r),
2662 getIReg64rexX(pfx,index_r), mkU8(scale))),
2684 if (index_r == R_RSP && 0==getRexX(pfx)) {
2685 DIS(buf, "%s%lld(%s)", segRegTxt(pfx),
2686 d, nameIRegRexB(8,pfx,base_r));
2689 handleAddrOverrides(vbi, pfx,
2690 binop(Iop_Add64, getIRegRexB(8,pfx,base_r), mkU64(d)) ));
2693 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d,
2694 nameIRegRexB(8,pfx,base_r),
2695 nameIReg64rexX(pfx,index_r));
2697 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d,
2698 nameIRegRexB(8,pfx,base_r),
2699 nameIReg64rexX(pfx,index_r), 1<<scale);
2704 handleAddrOverrides(vbi, pfx,
2707 getIRegRexB(8,pfx,base_r),
2709 getIReg64rexX(pfx,index_r), mkU8(scale))),
2727 const VexAbiInfo* vbi, Prefix pfx, Long delta,
2751 *rI = index_r | (getRexX(pfx) << 3);
2764 DIS(buf, "%s%lld(,%s)", segRegTxt(pfx), d, vindex);
2766 DIS(buf, "%s%lld(,%s,%d)", segRegTxt(pfx), d, vindex, 1<<scale);
2771 DIS(buf, "%s(%s,%s)", segRegTxt(pfx),
2772 nameIRegRexB(8,pfx,base_r), vindex);
2774 DIS(buf, "%s(%s,%s,%d)", segRegTxt(pfx),
2775 nameIRegRexB(8,pfx,base_r), vindex, 1<<scale);
2788 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d,
2789 nameIRegRexB(8,pfx,base_r), vindex);
2791 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d,
2792 nameIRegRexB(8,pfx,base_r), vindex, 1<<scale);
2798 return disAMode_copy2tmp( getIRegRexB(8,pfx,base_r) );
2799 return disAMode_copy2tmp( binop(Iop_Add64, getIRegRexB(8,pfx,base_r),
2808 static UInt lengthAMode ( Prefix pfx, Long delta )
2916 Prefix pfx,
2946 && offsetIRegG(size,pfx,rm) == offsetIRegE(size,pfx,rm)) {
2949 putIRegG(size,pfx,rm, mkU(ty,0));
2952 assign( dst0, getIRegG(size,pfx,rm) );
2953 assign( src, getIRegE(size,pfx,rm) );
2958 putIRegG(size, pfx, rm, mkexpr(dst1));
2963 putIRegG(size, pfx, rm, mkexpr(dst1));
2971 putIRegG(size, pfx, rm, mkexpr(dst1));
2975 nameIRegE(size,pfx,rm),
2976 nameIRegG(size,pfx,rm));
2980 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
2981 assign( dst0, getIRegG(size,pfx,rm) );
2987 putIRegG(size, pfx, rm, mkexpr(dst1));
2992 putIRegG(size, pfx, rm, mkexpr(dst1));
3000 putIRegG(size, pfx, rm, mkexpr(dst1));
3004 dis_buf, nameIRegG(size, pfx, rm));
3032 Prefix pfx,
3062 && offsetIRegG(size,pfx,rm) == offsetIRegE(size,pfx,rm)) {
3063 putIRegE(size,pfx,rm, mkU(ty,0));
3066 assign(dst0, getIRegE(size,pfx,rm));
3067 assign(src, getIRegG(size,pfx,rm));
3072 putIRegE(size, pfx, rm, mkexpr(dst1));
3077 putIRegE(size, pfx, rm, mkexpr(dst1));
3085 putIRegE(size, pfx, rm, mkexpr(dst1));
3089 nameIRegG(size,pfx,rm),
3090 nameIRegE(size,pfx,rm));
3096 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
3098 assign(src, getIRegG(size,pfx,rm));
3101 if (haveLOCK(pfx)) {
3112 if (haveLOCK(pfx)) {
3124 if (haveLOCK(pfx)) {
3141 nameIRegG(size,pfx,rm), dis_buf);
3165 Prefix pfx,
3174 putIRegG(size, pfx, rm, getIRegE(size, pfx, rm));
3176 nameIRegE(size,pfx,rm),
3177 nameIRegG(size,pfx,rm));
3183 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
3184 putIRegG(size, pfx, rm, loadLE(szToITy(size), mkexpr(addr)));
3187 nameIRegG(size,pfx,rm));
3212 Prefix pfx,
3224 if (haveF2orF3(pfx)) { *ok = False; return delta0; }
3225 putIRegE(size, pfx, rm, getIRegG(size, pfx, rm));
3227 nameIRegG(size,pfx,rm),
3228 nameIRegE(size,pfx,rm));
3234 if (haveF2(pfx)) { *ok = False; return delta0; }
3236 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
3237 storeLE( mkexpr(addr), getIRegG(size, pfx, rm) );
3239 nameIRegG(size,pfx,rm),
3299 Prefix pfx,
3304 putIRegG(szd, pfx, rm,
3307 getIRegE(szs,pfx,rm)));
3311 nameIRegE(szs,pfx,rm),
3312 nameIRegG(szd,pfx,rm));
3320 IRTemp addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 );
3321 putIRegG(szd, pfx, rm,
3329 nameIRegG(szd,pfx,rm));
3403 Prefix pfx,
3430 assign(dst0, getIRegE(sz,pfx,modrm));
3449 putIRegE(sz, pfx, modrm, mkexpr(dst1));
3454 nameIRegE(sz,pfx,modrm));
3456 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, /*xtra*/d_sz );
3462 if (haveLOCK(pfx)) {
3473 if (haveLOCK(pfx)) {
3485 if (haveLOCK(pfx)) {
3513 pfx,
3533 assign(dst0, getIRegE(sz, pfx, modrm));
3536 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, /*xtra*/d_sz );
3758 putIRegE(sz, pfx, modrm, mkexpr(dst1));
3766 vex_printf(", %s\n", nameIRegE(sz,pfx,modrm));
3787 Prefix pfx,
3808 if (haveF2orF3(pfx)) {
3815 if (haveF2orF3(pfx)) {
3816 if (haveF2andF3(pfx) || !haveLOCK(pfx)) {
3850 assign( t2, widenUto64(getIRegE(sz, pfx, modrm)) );
3854 src_val, nameIRegE(sz,pfx,modrm));
3857 t_addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 1 );
3886 putIRegE(sz, pfx, modrm, narrowTo(ty, mkexpr(t2m)));
3888 if (haveLOCK(pfx)) {
3997 Prefix pfx, Int sz, Long delta, Bool* decode_OK )
4011 if (haveF2orF3(pfx)) goto unhandled;
4019 getIRegE(sz,pfx,modrm),
4024 nameIRegE(sz, pfx, modrm));
4032 putIRegE(sz, pfx, modrm,
4034 getIRegE(sz, pfx, modrm)));
4036 nameIRegE(sz, pfx, modrm));
4044 assign(src, getIRegE(sz, pfx, modrm));
4048 putIRegE(sz, pfx, modrm, mkexpr(dst1));
4049 DIP("neg%c %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm));
4054 assign(src, getIRegE(sz,pfx,modrm));
4056 nameIRegE(sz,pfx,modrm) );
4061 assign(src, getIRegE(sz,pfx,modrm));
4063 nameIRegE(sz,pfx,modrm) );
4067 assign( t1, getIRegE(sz, pfx, modrm) );
4070 nameIRegE(sz, pfx, modrm));
4074 assign( t1, getIRegE(sz, pfx, modrm) );
4077 nameIRegE(sz, pfx, modrm));
4085 Bool validF2orF3 = haveF2orF3(pfx) ? False : True;
4087 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) {
4092 addr = disAMode ( &len, vbi, pfx, delta, dis_buf,
4120 if (haveLOCK(pfx)) {
4136 if (haveLOCK(pfx)) {
4175 Prefix pfx, Long delta, Bool* decode_OK )
4189 if (haveF2orF3(pfx)) goto unhandled;
4190 assign(t1, getIRegE(1, pfx, modrm));
4194 putIRegE(1, pfx, modrm, mkexpr(t2));
4199 putIRegE(1, pfx, modrm, mkexpr(t2));
4208 nameIRegE(1, pfx, modrm));
4211 Bool validF2orF3 = haveF2orF3(pfx) ? False : True;
4213 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) {
4218 IRTemp addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
4223 if (haveLOCK(pfx)) {
4233 if (haveLOCK(pfx)) {
4259 Prefix pfx, Int sz, Long delta,
4278 if (haveF2orF3(pfx)
4279 && ! (haveF2(pfx)
4282 assign(t1, getIRegE(sz,pfx,modrm));
4289 putIRegE(sz,pfx,modrm, mkexpr(t2));
4296 putIRegE(sz,pfx,modrm, mkexpr(t2));
4301 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
4304 assign(t3, getIRegE(sz,pfx,modrm));
4317 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
4320 assign(t3, getIRegE(sz,pfx,modrm));
4331 assign(t3, getIRegE(sz,pfx,modrm));
4348 nameIRegE(sz, pfx, modrm));
4351 Bool validF2orF3 = haveF2orF3(pfx) ? False : True;
4353 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) {
4356 && (haveF2(pfx) && !haveF3(pfx))) {
4361 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 );
4371 if (haveLOCK(pfx)) {
4383 if (haveLOCK(pfx)) {
4394 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
4410 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
4470 void dis_string_op( void (*dis_OP)( Int, IRTemp, Prefix pfx ),
4471 Int sz, const HChar* name, Prefix pfx )
4476 vassert(pfx == clearSegBits(pfx));
4478 dis_OP( sz, t_inc, pfx );
4483 void dis_MOVS ( Int sz, IRTemp t_inc, Prefix pfx )
4490 if (haveASO(pfx)) {
4502 if (haveASO(pfx)) {
4511 void dis_LODS ( Int sz, IRTemp t_inc, Prefix pfx )
4517 if (haveASO(pfx))
4525 if (haveASO(pfx))
4531 void dis_STOS ( Int sz, IRTemp t_inc, Prefix pfx )
4540 if (haveASO(pfx))
4548 if (haveASO(pfx))
4554 void dis_CMPS ( Int sz, IRTemp t_inc, Prefix pfx )
4563 if (haveASO(pfx)) {
4579 if (haveASO(pfx)) {
4588 void dis_SCAS ( Int sz, IRTemp t_inc, Prefix pfx )
4598 if (haveASO(pfx))
4608 if (haveASO(pfx))
4622 Prefix pfx )
4630 vassert(pfx == clearSegBits(pfx));
4632 if (haveASO(pfx)) {
4645 if (haveASO(pfx))
4651 dis_OP (sz, t_inc, pfx);
4675 Prefix pfx,
4687 assign( tg, getIRegG(size, pfx, rm) );
4689 assign( te, getIRegE(size, pfx, rm) );
4691 IRTemp addr = disAMode( &alen, vbi, pfx, delta0, dis_buf, 0 );
4699 putIRegG(size, pfx, rm, mkexpr(resLo) );
4703 nameIRegE(size,pfx,rm),
4704 nameIRegG(size,pfx,rm));
4709 nameIRegG(size,pfx,rm));
4718 Prefix pfx,
4735 assign(te, getIRegE(size, pfx, rm));
4738 IRTemp addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
4753 putIRegG(size, pfx, rm, mkexpr(resLo));
4757 ( epartIsReg(rm) ? nameIRegE(size,pfx,rm) : dis_buf ),
4758 nameIRegG(size,pfx,rm) );
5390 const VexAbiInfo* vbi, Prefix pfx, Long delta )
5409 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
5553 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6083 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6212 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6391 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6501 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6534 if ( have66(pfx) ) {
6605 if ( have66(pfx) ) {
6615 if ( have66(pfx) ) {
6670 if ( have66(pfx) ) {
6765 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
6886 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7064 Prefix pfx,
7170 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7214 Prefix pfx, Long delta,
7234 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
7346 const VexAbiInfo* vbi, Prefix pfx, Int sz, Long delta )
7369 getIReg32(eregOfRexRM(pfx,modrm)) ) );
7371 nameIReg32(eregOfRexRM(pfx,modrm)),
7374 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7391 getIReg64(eregOfRexRM(pfx,modrm)) );
7393 nameIReg64(eregOfRexRM(pfx,modrm)),
7396 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7414 putIReg32( eregOfRexRM(pfx,modrm),
7418 nameIReg32(eregOfRexRM(pfx,modrm)));
7420 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7433 putIReg64( eregOfRexRM(pfx,modrm),
7437 nameIReg64(eregOfRexRM(pfx,modrm)));
7439 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7453 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7463 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7474 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7484 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
7497 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "padd", True );
7503 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7505 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "padds", True );
7512 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "paddus", True );
7520 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psub", True );
7527 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psubs", True );
7534 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psubus", True );
7540 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmulhw", False );
7546 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmullw", False );
7551 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmaddwd", False );
7559 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pcmpeq", True );
7567 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pcmpgt", True );
7573 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packssdw", False );
7579 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packsswb", False );
7585 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packuswb", False );
7592 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7594 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "punpckh", True );
7601 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx)))
7603 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "punpckl", True );
7609 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pand", False );
7615 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pandn", False );
7621 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "por", False );
7627 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pxor", False );
7631 delta = dis_MMX_shiftG_byE(vbi, pfx, delta, _name, _op); \
7702 assign( addr, handleAddrOverrides( vbi, pfx, getIReg64(R_RDI) ));
7784 Prefix pfx,
7826 assign( gsrc, getIRegG(sz, pfx, modrm) );
7830 assign( esrc, getIRegE(sz, pfx, modrm) );
7834 nameIRegG(sz, pfx, modrm), nameIRegE(sz, pfx, modrm));
7836 addr = disAMode ( &len, vbi, pfx, delta, dis_buf,
7844 nameIRegG(sz, pfx, modrm), dis_buf);
7940 putIRegE(sz, pfx, modrm, mkexpr(resTy));
7969 Prefix pfx, Int sz, Long delta, BtOp op,
7995 if (haveF2orF3(pfx)) {
8002 if (haveF2orF3(pfx)) {
8003 if (haveF2andF3(pfx) || !haveLOCK(pfx) || op == BtOpNone) {
8010 assign( t_bitno0, widenSto64(getIRegG(sz, pfx, modrm)) );
8035 storeLE( mkexpr(t_rsp), getIRegE(sz, pfx, modrm) );
8047 t_addr0 = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 );
8098 if ((haveLOCK(pfx)) && !epartIsReg(modrm)) {
8130 putIRegE(sz, pfx, modrm, loadLE(szToITy(sz), mkexpr(t_rsp)) );
8135 nameBtOp(op), nameISize(sz), nameIRegG(sz, pfx, modrm),
8136 ( epartIsReg(modrm) ? nameIRegE(sz, pfx, modrm) : dis_buf ) );
8146 Prefix pfx, Int sz, Long delta, Bool fwds )
8165 assign( src, getIRegE(sz, pfx, modrm) );
8168 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 );
8175 ( isReg ? nameIRegE(sz, pfx, modrm) : dis_buf ),
8176 nameIRegG(sz, pfx, modrm));
8242 widenUto64( getIRegG( sz, pfx, modrm ) )
8255 putIRegG( sz, pfx, modrm, mkexpr(dst) );
8263 void codegen_xchg_rAX_Reg ( Prefix pfx, Int sz, UInt regLo3 )
8272 assign( t2, getIRegRexB(8, pfx, regLo3) );
8274 putIRegRexB(8, pfx, regLo3, mkexpr(t1) );
8277 assign( t2, getIRegRexB(4, pfx, regLo3) );
8279 putIRegRexB(4, pfx, regLo3, mkexpr(t1) );
8282 assign( t2, getIRegRexB(2, pfx, regLo3) );
8284 pfx, regLo3, mkexpr(t1) );
8288 nameIRegRexB(sz,pfx, regLo3));
8347 Prefix pfx,
8379 if (haveF2orF3(pfx)) {
8384 if (haveF2orF3(pfx)) {
8385 if (haveF2andF3(pfx) || !haveLOCK(pfx)) {
8394 assign( dest, getIRegE(size, pfx, rm) );
8396 assign( src, getIRegG(size, pfx, rm) );
8403 putIRegE(size, pfx, rm, mkexpr(dest2));
8405 nameIRegG(size,pfx,rm),
8406 nameIRegE(size,pfx,rm) );
8408 else if (!epartIsReg(rm) && !haveLOCK(pfx)) {
8410 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8413 assign( src, getIRegG(size, pfx, rm) );
8422 nameIRegG(size,pfx,rm), dis_buf);
8424 else if (!epartIsReg(rm) && haveLOCK(pfx)) {
8430 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8432 assign( src, getIRegG(size, pfx, rm) );
8443 nameIRegG(size,pfx,rm), dis_buf);
8471 Prefix pfx,
8485 assign( tmps, getIRegE(sz, pfx, rm) );
8486 assign( tmpd, getIRegG(sz, pfx, rm) );
8488 putIRegG( sz, pfx, rm,
8494 nameIRegE(sz,pfx,rm),
8495 nameIRegG(sz,pfx,rm));
8501 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8503 assign( tmpd, getIRegG(sz, pfx, rm) );
8505 putIRegG( sz, pfx, rm,
8513 nameIRegG(sz,pfx,rm));
8522 Prefix pfx, Int sz, Long delta0 )
8546 assign( tmpd, getIRegE(sz, pfx, rm) );
8547 assign( tmpt0, getIRegG(sz, pfx, rm) );
8551 putIRegG(sz, pfx, rm, mkexpr(tmpd));
8552 putIRegE(sz, pfx, rm, mkexpr(tmpt1));
8554 nameISize(sz), nameIRegG(sz,pfx,rm), nameIRegE(sz,pfx,rm));
8558 else if (!epartIsReg(rm) && !haveLOCK(pfx)) {
8560 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8562 assign( tmpt0, getIRegG(sz, pfx, rm) );
8567 putIRegG(sz, pfx, rm, mkexpr(tmpd));
8569 nameISize(sz), nameIRegG(sz,pfx,rm), dis_buf);
8573 else if (!epartIsReg(rm) && haveLOCK(pfx)) {
8575 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 );
8577 assign( tmpt0, getIRegG(sz, pfx, rm) );
8583 putIRegG(sz, pfx, rm, mkexpr(tmpd));
8585 nameISize(sz), nameIRegG(sz,pfx,rm), dis_buf);
8671 Prefix pfx,
8680 putIRegE(size, pfx, rm, mkU(szToITy(size), 0));
8681 DIP("mov %s,%s\n", nameSReg(gregOfRexRM(pfx, rm)),
8682 nameIRegE(size, pfx, rm));
8688 IRTemp addr = disAMode(&len, vbi, pfx, delta0, dis_buf, 0);
8690 DIP("mov %s,%s\n", nameSReg(gregOfRexRM(pfx, rm)),
8778 Prefix pfx, Long delta,
8789 = invertG ? unop(Iop_NotV128, getXMMReg(gregOfRexRM(pfx,rm)))
8790 : getXMMReg(gregOfRexRM(pfx,rm));
8793 gregOfRexRM(pfx,rm),
8797 getXMMReg(eregOfRexRM(pfx,rm)))
8799 getXMMReg(eregOfRexRM(pfx,rm)))
8802 nameXMMReg(eregOfRexRM(pfx,rm)),
8803 nameXMMReg(gregOfRexRM(pfx,rm)) );
8806 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
8808 gregOfRexRM(pfx,rm),
8818 nameXMMReg(gregOfRexRM(pfx,rm)) );
8828 Prefix pfx, Long delta,
8831 return dis_SSE_E_to_G_all_wrk( vbi, pfx, delta, opname, op, False );
8838 Prefix pfx, Long delta,
8841 return dis_SSE_E_to_G_all_wrk( vbi, pfx, delta, opname, op, True );
8848 Prefix pfx, Long delta,
8855 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm));
8857 putXMMReg( gregOfRexRM(pfx,rm),
8859 getXMMReg(eregOfRexRM(pfx,rm))) );
8861 nameXMMReg(eregOfRexRM(pfx,rm)),
8862 nameXMMReg(gregOfRexRM(pfx,rm)) );
8868 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
8871 putXMMReg( gregOfRexRM(pfx,rm),
8875 nameXMMReg(gregOfRexRM(pfx,rm)) );
8884 Prefix pfx, Long delta,
8891 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm));
8893 putXMMReg( gregOfRexRM(pfx,rm),
8895 getXMMReg(eregOfRexRM(pfx,rm))) );
8897 nameXMMReg(eregOfRexRM(pfx,rm)),
8898 nameXMMReg(gregOfRexRM(pfx,rm)) );
8904 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
8907 putXMMReg( gregOfRexRM(pfx,rm),
8911 nameXMMReg(gregOfRexRM(pfx,rm)) );
8921 Prefix pfx, Long delta,
8933 IRExpr* src = getXMMReg(eregOfRexRM(pfx,rm));
8937 putXMMReg( gregOfRexRM(pfx,rm), res );
8939 nameXMMReg(eregOfRexRM(pfx,rm)),
8940 nameXMMReg(gregOfRexRM(pfx,rm)) );
8943 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
8948 putXMMReg( gregOfRexRM(pfx,rm), res );
8951 nameXMMReg(gregOfRexRM(pfx,rm)) );
8961 Prefix pfx, Long delta,
8974 assign( oldG0, getXMMReg(gregOfRexRM(pfx,rm)) );
8980 getXMMRegLane32(eregOfRexRM(pfx,rm), 0)) );
8981 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) );
8983 nameXMMReg(eregOfRexRM(pfx,rm)),
8984 nameXMMReg(gregOfRexRM(pfx,rm)) );
8987 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
8992 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) );
8995 nameXMMReg(gregOfRexRM(pfx,rm)) );
9005 Prefix pfx, Long delta,
9018 assign( oldG0, getXMMReg(gregOfRexRM(pfx,rm)) );
9024 getXMMRegLane64(eregOfRexRM(pfx,rm), 0)) );
9025 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) );
9027 nameXMMReg(eregOfRexRM(pfx,rm)),
9028 nameXMMReg(gregOfRexRM(pfx,rm)) );
9031 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9036 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) );
9039 nameXMMReg(gregOfRexRM(pfx,rm)) );
9051 Prefix pfx, Long delta,
9060 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm));
9063 epart = getXMMReg(eregOfRexRM(pfx,rm));
9065 nameXMMReg(eregOfRexRM(pfx,rm)),
9066 nameXMMReg(gregOfRexRM(pfx,rm)) );
9069 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9073 nameXMMReg(gregOfRexRM(pfx,rm)) );
9076 putXMMReg( gregOfRexRM(pfx,rm),
9219 Prefix pfx, Long delta,
9240 assign( plain, binop(op, getXMMReg(gregOfRexRM(pfx,rm)),
9241 getXMMReg(eregOfRexRM(pfx,rm))) );
9245 nameXMMReg(eregOfRexRM(pfx,rm)),
9246 nameXMMReg(gregOfRexRM(pfx,rm)) );
9248 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
9257 getXMMReg(gregOfRexRM(pfx,rm)),
9270 nameXMMReg(gregOfRexRM(pfx,rm)) );
9274 putXMMReg( gregOfRexRM(pfx,rm),
9280 putXMMReg( gregOfRexRM(pfx,rm),
9284 putXMMReg( gregOfRexRM(pfx,rm), mkexpr(plain) );
9295 Prefix pfx, Long delta,
9308 assign( amt, getXMMRegLane64(eregOfRexRM(pfx,rm), 0) );
9310 nameXMMReg(eregOfRexRM(pfx,rm)),
9311 nameXMMReg(gregOfRexRM(pfx,rm)) );
9314 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
9318 nameXMMReg(gregOfRexRM(pfx,rm)) );
9321 assign( g0, getXMMReg(gregOfRexRM(pfx,rm)) );
9361 putXMMReg( gregOfRexRM(pfx,rm), mkexpr(g1) );
9369 ULong dis_SSE_shiftE_imm ( Prefix pfx,
9384 nameXMMReg(eregOfRexRM(pfx,rm)) );
9385 assign( e0, getXMMReg(eregOfRexRM(pfx,rm)) );
9416 putXMMReg( eregOfRexRM(pfx,rm), mkexpr(e1) );
10050 static Long dis_COMISD ( const VexAbiInfo* vbi, Prefix pfx,
10061 assign( argR, getXMMRegLane64F( eregOfRexRM(pfx,modrm),
10066 nameXMMReg(eregOfRexRM(pfx,modrm)),
10067 nameXMMReg(gregOfRexRM(pfx,modrm)) );
10069 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10075 nameXMMReg(gregOfRexRM(pfx,modrm)) );
10077 assign( argL, getXMMRegLane64F( gregOfRexRM(pfx,modrm),
10093 static Long dis_COMISS ( const VexAbiInfo* vbi, Prefix pfx,
10104 assign( argR, getXMMRegLane32F( eregOfRexRM(pfx,modrm),
10109 nameXMMReg(eregOfRexRM(pfx,modrm)),
10110 nameXMMReg(gregOfRexRM(pfx,modrm)) );
10112 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10118 nameXMMReg(gregOfRexRM(pfx,modrm)) );
10120 assign( argL, getXMMRegLane32F( gregOfRexRM(pfx,modrm),
10138 static Long dis_PSHUFD_32x4 ( const VexAbiInfo* vbi, Prefix pfx,
10149 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
10153 nameXMMReg(eregOfRexRM(pfx,modrm)),
10154 nameXMMReg(gregOfRexRM(pfx,modrm)));
10156 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
10163 nameXMMReg(gregOfRexRM(pfx,modrm)));
10179 (gregOfRexRM(pfx,modrm), mkexpr(dV));
10184 static Long dis_PSHUFD_32x8 ( const VexAbiInfo* vbi, Prefix pfx, Long delta )
10192 UInt rG = gregOfRexRM(pfx,modrm);
10194 UInt rE = eregOfRexRM(pfx,modrm);
10200 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
10319 static Long dis_CVTxSD2SI ( const VexAbiInfo* vbi, Prefix pfx,
10333 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0));
10335 nameXMMReg(eregOfRexRM(pfx,modrm)),
10336 nameIReg(sz, gregOfRexRM(pfx,modrm),
10339 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10344 nameIReg(sz, gregOfRexRM(pfx,modrm),
10355 putIReg32( gregOfRexRM(pfx,modrm),
10359 putIReg64( gregOfRexRM(pfx,modrm),
10367 static Long dis_CVTxSS2SI ( const VexAbiInfo* vbi, Prefix pfx,
10381 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0));
10383 nameXMMReg(eregOfRexRM(pfx,modrm)),
10384 nameIReg(sz, gregOfRexRM(pfx,modrm),
10387 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10392 nameIReg(sz, gregOfRexRM(pfx,modrm),
10403 putIReg32( gregOfRexRM(pfx,modrm),
10409 putIReg64( gregOfRexRM(pfx,modrm),
10419 static Long dis_CVTPS2PD_128 ( const VexAbiInfo* vbi, Prefix pfx,
10428 UInt rG = gregOfRexRM(pfx,modrm);
10430 UInt rE = eregOfRexRM(pfx,modrm);
10437 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10454 static Long dis_CVTPS2PD_256 ( const VexAbiInfo* vbi, Prefix pfx,
10465 UInt rG = gregOfRexRM(pfx,modrm);
10467 UInt rE = eregOfRexRM(pfx,modrm);
10475 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10495 static Long dis_CVTPD2PS_128 ( const VexAbiInfo* vbi, Prefix pfx,
10502 UInt rG = gregOfRexRM(pfx,modrm);
10506 UInt rE = eregOfRexRM(pfx,modrm);
10512 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10540 static Long dis_CVTxPS2DQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
10549 UInt rG = gregOfRexRM(pfx,modrm);
10553 UInt rE = eregOfRexRM(pfx,modrm);
10559 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10590 static Long dis_CVTxPS2DQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
10599 UInt rG = gregOfRexRM(pfx,modrm);
10603 UInt rE = eregOfRexRM(pfx,modrm);
10609 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10642 static Long dis_CVTxPD2DQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
10651 UInt rG = gregOfRexRM(pfx,modrm);
10655 UInt rE = eregOfRexRM(pfx,modrm);
10661 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10697 static Long dis_CVTxPD2DQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
10706 UInt rG = gregOfRexRM(pfx,modrm);
10710 UInt rE = eregOfRexRM(pfx,modrm);
10716 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10751 static Long dis_CVTDQ2PS_128 ( const VexAbiInfo* vbi, Prefix pfx,
10760 UInt rG = gregOfRexRM(pfx,modrm);
10764 UInt rE = eregOfRexRM(pfx,modrm);
10770 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10799 static Long dis_CVTDQ2PS_256 ( const VexAbiInfo* vbi, Prefix pfx,
10808 UInt rG = gregOfRexRM(pfx,modrm);
10812 UInt rE = eregOfRexRM(pfx,modrm);
10817 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
10852 static Long dis_PMOVMSKB_128 ( const VexAbiInfo* vbi, Prefix pfx,
10857 UInt rE = eregOfRexRM(pfx,modrm);
10858 UInt rG = gregOfRexRM(pfx,modrm);
10871 static Long dis_PMOVMSKB_256 ( const VexAbiInfo* vbi, Prefix pfx,
10876 UInt rE = eregOfRexRM(pfx,modrm);
10877 UInt rG = gregOfRexRM(pfx,modrm);
11326 static Long dis_PSHUFxW_128 ( const VexAbiInfo* vbi, Prefix pfx,
11333 UInt rG = gregOfRexRM(pfx,modrm);
11343 UInt rE = eregOfRexRM(pfx,modrm);
11351 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
11381 static Long dis_PSHUFxW_256 ( const VexAbiInfo* vbi, Prefix pfx,
11388 UInt rG = gregOfRexRM(pfx,modrm);
11397 UInt rE = eregOfRexRM(pfx,modrm);
11404 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
11428 static Long dis_PEXTRW_128_EregOnly_toG ( const VexAbiInfo* vbi, Prefix pfx,
11433 UInt rG = gregOfRexRM(pfx,modrm);
11439 UInt rE = eregOfRexRM(pfx,modrm);
11467 static Long dis_CVTDQ2PD_128 ( const VexAbiInfo* vbi, Prefix pfx,
11475 UInt rG = gregOfRexRM(pfx,modrm);
11478 UInt rE = eregOfRexRM(pfx,modrm);
11483 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11502 static Long dis_STMXCSR ( const VexAbiInfo* vbi, Prefix pfx,
11510 vassert(gregOfRexRM(pfx,modrm) == 3); /* ditto */
11512 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11534 static Long dis_LDMXCSR ( const VexAbiInfo* vbi, Prefix pfx,
11542 vassert(gregOfRexRM(pfx,modrm) == 2); /* ditto */
11547 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11719 Prefix pfx, Long delta, Int sz )
11734 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11768 Prefix pfx, Long delta, Int sz )
11778 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
11978 Prefix pfx, Long delta, Int sz )
11989 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12048 Prefix pfx, Long delta, Int sz )
12059 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12141 static Long dis_MASKMOVDQU ( const VexAbiInfo* vbi, Prefix pfx,
12150 UInt rG = gregOfRexRM(pfx,modrm);
12151 UInt rE = eregOfRexRM(pfx,modrm);
12153 assign( addr, handleAddrOverrides( vbi, pfx, getIReg64(R_RDI) ));
12162 pfx,modrm), 1 ),
12165 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ),
12184 static Long dis_MOVMSKPS_128 ( const VexAbiInfo* vbi, Prefix pfx,
12188 UInt rG = gregOfRexRM(pfx,modrm);
12189 UInt rE = eregOfRexRM(pfx,modrm);
12216 static Long dis_MOVMSKPS_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta )
12219 UInt rG = gregOfRexRM(pfx,modrm);
12220 UInt rE = eregOfRexRM(pfx,modrm);
12266 static Long dis_MOVMSKPD_128 ( const VexAbiInfo* vbi, Prefix pfx,
12270 UInt rG = gregOfRexRM(pfx,modrm);
12271 UInt rE = eregOfRexRM(pfx,modrm);
12288 static Long dis_MOVMSKPD_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta )
12291 UInt rG = gregOfRexRM(pfx,modrm);
12292 UInt rE = eregOfRexRM(pfx,modrm);
12324 Prefix pfx, Int sz, Long deltaIN,
12347 if (have66noF2noF3(pfx)
12352 putXMMReg( gregOfRexRM(pfx,modrm),
12353 getXMMReg( eregOfRexRM(pfx,modrm) ));
12354 DIP("movupd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12355 nameXMMReg(gregOfRexRM(pfx,modrm)));
12358 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12359 putXMMReg( gregOfRexRM(pfx,modrm),
12362 nameXMMReg(gregOfRexRM(pfx,modrm)));
12370 if (haveF2no66noF3(pfx)
12374 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0,
12375 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ));
12376 DIP("movsd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12377 nameXMMReg(gregOfRexRM(pfx,modrm)));
12380 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12381 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) );
12382 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0,
12385 nameXMMReg(gregOfRexRM(pfx,modrm)));
12392 if (haveF3no66noF2(pfx)
12396 putXMMRegLane32( gregOfRexRM(pfx,modrm), 0,
12397 getXMMRegLane32( eregOfRexRM(pfx,modrm), 0 ));
12398 DIP("movss %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12399 nameXMMReg(gregOfRexRM(pfx,modrm)));
12402 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12403 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) );
12404 putXMMRegLane32( gregOfRexRM(pfx,modrm), 0,
12407 nameXMMReg(gregOfRexRM(pfx,modrm)));
12413 if (haveNo66noF2noF3(pfx)
12417 putXMMReg( gregOfRexRM(pfx,modrm),
12418 getXMMReg( eregOfRexRM(pfx,modrm) ));
12419 DIP("movups %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12420 nameXMMReg(gregOfRexRM(pfx,modrm)));
12423 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12424 putXMMReg( gregOfRexRM(pfx,modrm),
12427 nameXMMReg(gregOfRexRM(pfx,modrm)));
12437 if (haveF2no66noF3(pfx)
12441 putXMMRegLane64( eregOfRexRM(pfx,modrm), 0,
12442 getXMMRegLane64( gregOfRexRM(pfx,modrm), 0 ));
12443 DIP("movsd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12444 nameXMMReg(eregOfRexRM(pfx,modrm)));
12447 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12449 getXMMRegLane64(gregOfRexRM(pfx,modrm), 0) );
12450 DIP("movsd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12458 if (haveF3no66noF2(pfx) && sz == 4) {
12463 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12465 getXMMRegLane32(gregOfRexRM(pfx,modrm), 0) );
12466 DIP("movss %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12473 if (have66noF2noF3(pfx)
12477 putXMMReg( eregOfRexRM(pfx,modrm),
12478 getXMMReg( gregOfRexRM(pfx,modrm) ) );
12479 DIP("movupd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12480 nameXMMReg(eregOfRexRM(pfx,modrm)));
12483 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12484 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
12485 DIP("movupd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12492 if (haveNo66noF2noF3(pfx)
12498 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12499 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
12500 DIP("movups %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12511 if (have66noF2noF3(pfx)
12517 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12519 putXMMRegLane64( gregOfRexRM(pfx,modrm),
12523 dis_buf, nameXMMReg( gregOfRexRM(pfx,modrm) ));
12529 if (haveNo66noF2noF3(pfx)
12534 putXMMRegLane64( gregOfRexRM(pfx,modrm),
12536 getXMMRegLane64( eregOfRexRM(pfx,modrm), 1 ));
12537 DIP("movhlps %s, %s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12538 nameXMMReg(gregOfRexRM(pfx,modrm)));
12540 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12542 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0/*lower lane*/,
12545 dis_buf, nameXMMReg( gregOfRexRM(pfx,modrm) ));
12553 if (haveNo66noF2noF3(pfx)
12557 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12560 getXMMRegLane64( gregOfRexRM(pfx,modrm),
12562 DIP("movlps %s, %s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ),
12570 if (have66noF2noF3(pfx)
12574 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12577 getXMMRegLane64( gregOfRexRM(pfx,modrm),
12579 DIP("movlpd %s, %s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ),
12592 if (haveNo66noF2noF3(pfx) && sz == 4) {
12597 UInt rG = gregOfRexRM(pfx,modrm);
12600 UInt rE = eregOfRexRM(pfx,modrm);
12606 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12619 if (have66noF2noF3(pfx)
12625 UInt rG = gregOfRexRM(pfx,modrm);
12628 UInt rE = eregOfRexRM(pfx,modrm);
12634 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12650 if (have66noF2noF3(pfx)
12656 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12658 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/,
12661 nameXMMReg( gregOfRexRM(pfx,modrm) ));
12667 if (haveNo66noF2noF3(pfx)
12672 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/,
12673 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ) );
12674 DIP("movhps %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12675 nameXMMReg(gregOfRexRM(pfx,modrm)));
12677 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12679 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/,
12682 nameXMMReg( gregOfRexRM(pfx,modrm) ));
12690 if (haveNo66noF2noF3(pfx)
12694 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12697 getXMMRegLane64( gregOfRexRM(pfx,modrm),
12699 DIP("movhps %s,%s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ),
12707 if (have66noF2noF3(pfx)
12711 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12714 getXMMRegLane64( gregOfRexRM(pfx,modrm),
12716 DIP("movhpd %s,%s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ),
12729 if (haveNo66noF2noF3(pfx)
12738 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12756 if (have66noF2noF3(pfx)
12760 putXMMReg( gregOfRexRM(pfx,modrm),
12761 getXMMReg( eregOfRexRM(pfx,modrm) ));
12762 DIP("movapd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12763 nameXMMReg(gregOfRexRM(pfx,modrm)));
12766 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12768 putXMMReg( gregOfRexRM(pfx,modrm),
12771 nameXMMReg(gregOfRexRM(pfx,modrm)));
12777 if (haveNo66noF2noF3(pfx)
12781 putXMMReg( gregOfRexRM(pfx,modrm),
12782 getXMMReg( eregOfRexRM(pfx,modrm) ));
12783 DIP("movaps %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
12784 nameXMMReg(gregOfRexRM(pfx,modrm)));
12787 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12789 putXMMReg( gregOfRexRM(pfx,modrm),
12792 nameXMMReg(gregOfRexRM(pfx,modrm)));
12801 if (haveNo66noF2noF3(pfx)
12805 putXMMReg( eregOfRexRM(pfx,modrm),
12806 getXMMReg( gregOfRexRM(pfx,modrm) ));
12807 DIP("movaps %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12808 nameXMMReg(eregOfRexRM(pfx,modrm)));
12811 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12813 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
12814 DIP("movaps %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12821 if (have66noF2noF3(pfx)
12825 putXMMReg( eregOfRexRM(pfx,modrm),
12826 getXMMReg( gregOfRexRM(pfx,modrm) ) );
12827 DIP("movapd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12828 nameXMMReg(eregOfRexRM(pfx,modrm)));
12831 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12833 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
12834 DIP("movapd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
12845 if (haveNo66noF2noF3(pfx) && sz == 4) {
12855 nameXMMReg(gregOfRexRM(pfx,modrm)));
12857 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12861 nameXMMReg(gregOfRexRM(pfx,modrm)) );
12867 gregOfRexRM(pfx,modrm), 0,
12874 gregOfRexRM(pfx,modrm), 1,
12885 if (haveF3no66noF2(pfx) && (sz == 4 || sz == 8)) {
12892 assign( arg32, getIReg32(eregOfRexRM(pfx,modrm)) );
12894 DIP("cvtsi2ss %s,%s\n", nameIReg32(eregOfRexRM(pfx,modrm)),
12895 nameXMMReg(gregOfRexRM(pfx,modrm)));
12897 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12901 nameXMMReg(gregOfRexRM(pfx,modrm)) );
12904 gregOfRexRM(pfx,modrm), 0,
12912 assign( arg64, getIReg64(eregOfRexRM(pfx,modrm)) );
12914 DIP("cvtsi2ssq %s,%s\n", nameIReg64(eregOfRexRM(pfx,modrm)),
12915 nameXMMReg(gregOfRexRM(pfx,modrm)));
12917 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12921 nameXMMReg(gregOfRexRM(pfx,modrm)) );
12924 gregOfRexRM(pfx,modrm), 0,
12935 if (haveF2no66noF3(pfx) && (sz == 4 || sz == 8)) {
12940 assign( arg32, getIReg32(eregOfRexRM(pfx,modrm)) );
12942 DIP("cvtsi2sdl %s,%s\n", nameIReg32(eregOfRexRM(pfx,modrm)),
12943 nameXMMReg(gregOfRexRM(pfx,modrm)));
12945 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
12949 nameXMMReg(gregOfRexRM(pfx,modrm)) );
12951 putXMMRegLane64F( gregOfRexRM(pfx,modrm), 0,
12958 assign( arg64, getIReg64(eregOfRexRM(pfx,modrm)) );
12960 DIP("cvtsi2sdq %s,%s\n", nameIReg64(eregOfRexRM(pfx,modrm)),
12961 nameXMMReg(gregOfRexRM(pfx,modrm)));
12963 addr = disAMode ( &alen, vbi, pfx
12967 nameXMMReg(gregOfRexRM(pfx,modrm)) );
12970 gregOfRexRM(pfx,modrm),
12982 if (have66noF2noF3(pfx) && sz == 2) {
12997 nameXMMReg(gregOfRexRM(pfx,modrm)));
12999 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13003 nameXMMReg(gregOfRexRM(pfx,modrm)) );
13007 gregOfRexRM(pfx,modrm), 0,
13012 gregOfRexRM(pfx,modrm), 1,
13023 if ( (haveNo66noF2noF3(pfx) && sz == 4)
13024 || (have66noF2noF3(pfx) && sz == 2) ) {
13027 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13029 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
13032 nameXMMReg(gregOfRexRM(pfx,modrm)));
13046 if (haveNo66noF2noF3(pfx) && sz == 4) {
13058 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0));
13059 assign(f32hi, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 1));
13061 nameXMMReg(eregOfRexRM(pfx,modrm)),
13064 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13108 if (haveF3no66noF2(pfx) && (sz == 4 || sz == 8)) {
13109 delta = dis_CVTxSS2SI( vbi, pfx, delta, False/*!isAvx*/, opc, sz);
13124 if (haveF2no66noF3(pfx) && (sz == 4 || sz == 8)) {
13125 delta = dis_CVTxSD2SI( vbi, pfx, delta, False/*!isAvx*/, opc, sz);
13132 if (have66noF2noF3(pfx) && sz == 2) {
13144 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0));
13145 assign(f64hi, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 1));
13147 nameXMMReg(eregOfRexRM(pfx,modrm)),
13150 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13184 if (have66noF2noF3(pfx) && sz == 2) {
13185 delta = dis_COMISD( vbi, pfx, delta, False/*!isAvx*/, opc );
13190 if (haveNo66noF2noF3(pfx) && sz == 4) {
13191 delta = dis_COMISS( vbi, pfx, delta, False/*!isAvx*/, opc );
13199 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
13219 delta = dis_MOVMSKPS_128( vbi, pfx, delta, False/*!isAvx*/ );
13224 if (have66noF2noF3(pfx) && (sz == 2 || sz == 8)) {
13230 delta = dis_MOVMSKPD_128( vbi, pfx, delta, False/*!isAvx*/ );
13237 if (haveF3no66noF2(pfx) && sz == 4) {
13238 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta,
13243 if (haveNo66noF2noF3(pfx) && sz == 4) {
13244 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta,
13249 if (haveF2no66noF3(pfx) && sz == 4) {
13250 delta = dis_SSE_E_to_G_unary_lo64( vbi, pfx, delta,
13255 if (have66noF2noF3(pfx) && sz == 2) {
13256 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta,
13264 if (haveF3no66noF2(pfx) && sz == 4) {
13265 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta,
13270 if (haveNo66noF2noF3(pfx) && sz == 4) {
13271 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta,
13279 if (haveF3no66noF2(pfx) && sz == 4) {
13280 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta,
13285 if (haveNo66noF2noF3(pfx) && sz == 4) {
13286 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta,
13294 if (haveNo66noF2noF3(pfx) && sz == 4) {
13295 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "andps", Iop_AndV128 );
13299 if (have66noF2noF3(pfx) && sz == 2) {
13300 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "andpd", Iop_AndV128 );
13307 if (haveNo66noF2noF3(pfx) && sz == 4) {
13308 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "andnps",
13313 if (have66noF2noF3(pfx) && sz == 2) {
13314 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "andnpd",
13322 if (haveNo66noF2noF3(pfx) && sz == 4) {
13323 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "orps", Iop_OrV128 );
13327 if (have66noF2noF3(pfx) && sz == 2) {
13328 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "orpd", Iop_OrV128 );
13335 if (have66noF2noF3(pfx) && sz == 2) {
13336 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "xorpd", Iop_XorV128 );
13340 if (haveNo66noF2noF3(pfx) && sz == 4) {
13341 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "xorps", Iop_XorV128 );
13348 if (haveNo66noF2noF3(pfx) && sz == 4) {
13349 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "addps", Iop_Add32Fx4 );
13353 if (haveF3no66noF2(pfx) && sz == 4) {
13354 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "addss", Iop_Add32F0x4 );
13358 if (haveF2no66noF3(pfx)
13360 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "addsd", Iop_Add64F0x2 );
13364 if (have66noF2noF3(pfx)
13366 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "addpd", Iop_Add64Fx2 );
13373 if (haveF2no66noF3(pfx)
13375 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "mulsd", Iop_Mul64F0x2 );
13379 if (haveF3no66noF2(pfx) && sz == 4) {
13380 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "mulss", Iop_Mul32F0x4 );
13384 if (haveNo66noF2noF3(pfx) && sz == 4) {
13385 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "mulps", Iop_Mul32Fx4 );
13389 if (have66noF2noF3(pfx)
13391 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "mulpd", Iop_Mul64Fx2 );
13399 if (haveNo66noF2noF3(pfx)
13401 delta = dis_CVTPS2PD_128( vbi, pfx, delta, False/*!isAvx*/ );
13406 if (haveF3no66noF2(pfx) && sz == 4) {
13412 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0));
13413 DIP("cvtss2sd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13414 nameXMMReg(gregOfRexRM(pfx,modrm)));
13416 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13420 nameXMMReg(gregOfRexRM(pfx,modrm)));
13423 putXMMRegLane64F( gregOfRexRM(pfx,modrm), 0,
13430 if (haveF2no66noF3(pfx) && sz == 4) {
13437 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0));
13438 DIP("cvtsd2ss %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13439 nameXMMReg(gregOfRexRM(pfx,modrm)));
13441 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13445 nameXMMReg(gregOfRexRM(pfx,modrm)));
13450 gregOfRexRM(pfx,modrm), 0,
13461 if (have66noF2noF3(pfx) && sz == 2) {
13462 delta = dis_CVTPD2PS_128( vbi, pfx, delta, False/*!isAvx*/ );
13472 if ( (have66noF2noF3(pfx) && sz == 2)
13473 || (haveF3no66noF2(pfx) && sz == 4) ) {
13475 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta, False/*!isAvx*/, r2zero );
13480 if (haveNo66noF2noF3(pfx) && sz == 4) {
13481 delta = dis_CVTDQ2PS_128( vbi, pfx, delta, False/*!isAvx*/ );
13488 if (haveF3no66noF2(pfx) && sz == 4) {
13489 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "subss", Iop_Sub32F0x4 );
13493 if (haveF2no66noF3(pfx)
13495 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "subsd", Iop_Sub64F0x2 );
13499 if (haveNo66noF2noF3(pfx) && sz == 4) {
13500 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "subps", Iop_Sub32Fx4 );
13504 if (have66noF2noF3(pfx) && sz == 2) {
13505 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "subpd", Iop_Sub64Fx2 );
13512 if (haveNo66noF2noF3(pfx) && sz == 4) {
13513 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "minps", Iop_Min32Fx4 );
13517 if (haveF3no66noF2(pfx) && sz == 4) {
13518 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "minss", Iop_Min32F0x4 );
13522 if (haveF2no66noF3(pfx) && sz == 4) {
13523 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "minsd", Iop_Min64F0x2 );
13527 if (have66noF2noF3(pfx) && sz == 2) {
13528 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "minpd", Iop_Min64Fx2 );
13535 if (haveF2no66noF3(pfx) && sz == 4) {
13536 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "divsd", Iop_Div64F0x2 );
13540 if (haveNo66noF2noF3(pfx) && sz == 4) {
13541 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "divps", Iop_Div32Fx4 );
13545 if (haveF3no66noF2(pfx) && sz == 4) {
13546 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "divss", Iop_Div32F0x4 );
13550 if (have66noF2noF3(pfx) && sz == 2) {
13551 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "divpd", Iop_Div64Fx2 );
13558 if (haveNo66noF2noF3(pfx) && sz == 4) {
13559 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "maxps", Iop_Max32Fx4 );
13563 if (haveF3no66noF2(pfx) && sz == 4) {
13564 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "maxss", Iop_Max32F0x4 );
13568 if (haveF2no66noF3(pfx) && sz == 4) {
13569 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "maxsd", Iop_Max64F0x2 );
13573 if (have66noF2noF3(pfx) && sz == 2) {
13574 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "maxpd", Iop_Max64Fx2 );
13581 if (have66noF2noF3(pfx) && sz == 2) {
13582 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13591 if (have66noF2noF3(pfx) && sz == 2) {
13592 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13601 if (have66noF2noF3(pfx) && sz == 2) {
13602 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13611 if (have66noF2noF3(pfx) && sz == 2) {
13612 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13621 if (have66noF2noF3(pfx) && sz == 2) {
13622 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13630 if (have66noF2noF3(pfx) && sz == 2) {
13631 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13639 if (have66noF2noF3(pfx) && sz == 2) {
13640 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13648 if (have66noF2noF3(pfx) && sz == 2) {
13649 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13658 if (have66noF2noF3(pfx) && sz == 2) {
13659 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13668 if (have66noF2noF3(pfx) && sz == 2) {
13669 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13678 if (have66noF2noF3(pfx) && sz == 2) {
13679 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13688 if (have66noF2noF3(pfx) && sz == 2) {
13689 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13698 if (have66noF2noF3(pfx) && sz == 2) {
13699 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13708 if (have66noF2noF3(pfx) && sz == 2) {
13709 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13721 if (have66noF2noF3(pfx)) {
13729 gregOfRexRM(pfx,modrm),
13730 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) )
13732 DIP("movd %s, %s\n", nameIReg32(eregOfRexRM(pfx,modrm)),
13733 nameXMMReg(gregOfRexRM(pfx,modrm)));
13736 gregOfRexRM(pfx,modrm),
13737 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) )
13739 DIP("movq %s, %s\n", nameIReg64(eregOfRexRM(pfx,modrm)),
13740 nameXMMReg(gregOfRexRM(pfx,modrm)));
13743 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
13746 gregOfRexRM(pfx,modrm),
13752 nameXMMReg(gregOfRexRM(pfx,modrm)));
13759 if (have66noF2noF3(pfx)
13764 putXMMReg( gregOfRexRM(pfx,modrm),
13765 getXMMReg( eregOfRexRM(pfx,modrm) ));
13766 DIP("movdqa %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13767 nameXMMReg(gregOfRexRM(pfx,modrm)));
13770 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13772 putXMMReg( gregOfRexRM(pfx,modrm),
13775 nameXMMReg(gregOfRexRM(pfx,modrm)));
13780 if (haveF3no66noF2(pfx) && sz == 4) {
13784 putXMMReg( gregOfRexRM(pfx,modrm),
13785 getXMMReg( eregOfRexRM(pfx,modrm) ));
13786 DIP("movdqu %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13787 nameXMMReg(gregOfRexRM(pfx,modrm)));
13790 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13791 putXMMReg( gregOfRexRM(pfx,modrm),
13794 nameXMMReg(gregOfRexRM(pfx,modrm)));
13803 if (have66noF2noF3(pfx) && sz == 2) {
13804 delta = dis_PSHUFD_32x4( vbi, pfx, delta, False/*!writesYmm*/);
13809 if (haveNo66noF2noF3(pfx) && sz == 4) {
13825 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
13847 if (haveF2no66noF3(pfx) && sz == 4) {
13848 delta = dis_PSHUFxW_128( vbi, pfx, delta,
13854 if (haveF3no66noF2(pfx) && sz == 4) {
13855 delta = dis_PSHUFxW_128( vbi, pfx, delta,
13863 if (have66noF2noF3(pfx) && sz == 2
13866 delta = dis_SSE_shiftE_imm( pfx, delta, "psrlw", Iop_ShrN16x8 );
13870 if (have66noF2noF3(pfx) && sz == 2
13873 delta = dis_SSE_shiftE_imm( pfx, delta, "psraw", Iop_SarN16x8 );
13877 if (have66noF2noF3(pfx) && sz == 2
13880 delta = dis_SSE_shiftE_imm( pfx, delta, "psllw", Iop_ShlN16x8 );
13887 if (have66noF2noF3(pfx) && sz == 2
13890 delta = dis_SSE_shiftE_imm( pfx, delta, "psrld", Iop_ShrN32x4 );
13894 if (have66noF2noF3(pfx) && sz == 2
13897 delta = dis_SSE_shiftE_imm( pfx, delta, "psrad", Iop_SarN32x4 );
13901 if (have66noF2noF3(pfx) && sz == 2
13904 delta = dis_SSE_shiftE_imm( pfx, delta, "pslld", Iop_ShlN32x4 );
13912 if (have66noF2noF3(pfx) && sz == 2
13916 Int reg = eregOfRexRM(pfx,getUChar(delta));
13926 if (have66noF2noF3(pfx) && sz == 2
13930 Int reg = eregOfRexRM(pfx,getUChar(delta));
13940 if (have66noF2noF3(pfx) && sz == 2
13943 delta = dis_SSE_shiftE_imm( pfx, delta, "psrlq", Iop_ShrN64x2 );
13947 if (have66noF2noF3(pfx) && sz == 2
13950 delta = dis_SSE_shiftE_imm( pfx, delta, "psllq", Iop_ShlN64x2 );
13957 if (have66noF2noF3(pfx) && sz == 2) {
13958 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13966 if (have66noF2noF3(pfx) && sz == 2) {
13967 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13975 if (have66noF2noF3(pfx) && sz == 2) {
13976 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
13985 if (haveF3no66noF2(pfx)
13989 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0,
13990 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ));
13992 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1, mkU64(0) );
13993 DIP("movsd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
13994 nameXMMReg(gregOfRexRM(pfx,modrm)));
13997 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
13998 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) );
13999 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0,
14002 nameXMMReg(gregOfRexRM(pfx,modrm)));
14009 if (have66noF2noF3(pfx) && (sz == 2 || sz == 8)) {
14015 putIReg32( eregOfRexRM(pfx,modrm),
14016 getXMMRegLane32(gregOfRexRM(pfx,modrm), 0) );
14017 DIP("movd %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
14018 nameIReg32(eregOfRexRM(pfx,modrm)));
14020 putIReg64( eregOfRexRM(pfx,modrm),
14021 getXMMRegLane64(gregOfRexRM(pfx,modrm), 0) );
14022 DIP("movq %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
14023 nameIReg64(eregOfRexRM(pfx,modrm)));
14026 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
14030 ? getXMMRegLane32(gregOfRexRM(pfx,modrm),0)
14031 : getXMMRegLane64(gregOfRexRM(pfx,modrm),0) );
14033 nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf);
14041 if (haveF3no66noF2(pfx) && sz == 4) {
14046 putXMMReg( eregOfRexRM(pfx,modrm),
14047 getXMMReg(gregOfRexRM(pfx,modrm)) );
14048 DIP("movdqu %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
14049 nameXMMReg(eregOfRexRM(pfx,modrm)));
14051 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
14053 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
14054 DIP("movdqu %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf);
14059 if (have66noF2noF3(pfx) && sz == 2) {
14063 putXMMReg( eregOfRexRM(pfx,modrm),
14064 getXMMReg(gregOfRexRM(pfx,modrm)) );
14065 DIP("movdqa %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)),
14066 nameXMMReg(eregOfRexRM(pfx,modrm)));
14068 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
14071 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
14072 DIP("movdqa %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf);
14080 if (haveNo66noF2noF3(pfx)
14093 if (haveNo66noF2noF3(pfx)
14107 if (haveNo66noF2noF3(pfx)
14118 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14137 if (haveNo66noF2noF3(pfx)
14140 delta = dis_STMXCSR(vbi, pfx, delta, False/*!isAvx*/);
14144 if (haveNo66noF2noF3(pfx)
14147 delta = dis_LDMXCSR(vbi, pfx, delta, False/*!isAvx*/);
14151 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
14153 && gregOfRexRM(pfx,getUChar(delta)) == 0) {
14154 delta = dis_FXSAVE(vbi, pfx, delta, sz);
14158 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
14160 && gregOfRexRM(pfx,getUChar(delta)) == 1) {
14161 delta = dis_FXRSTOR(vbi, pfx, delta, sz);
14165 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
14167 && gregOfRexRM(pfx,getUChar(delta)) == 4
14169 delta = dis_XSAVE(vbi, pfx, delta, sz);
14173 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)
14175 && gregOfRexRM(pfx,getUChar(delta)) == 5
14177 delta = dis_XRSTOR(vbi, pfx, delta, sz);
14184 if (haveNo66noF2noF3(pfx) && sz == 4) {
14186 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpps", True, 4 );
14190 if (haveF3no66noF2(pfx) && sz == 4) {
14192 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpss", False, 4 );
14196 if (haveF2no66noF3(pfx) && sz == 4) {
14198 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpsd", False, 8 );
14202 if (have66noF2noF3(pfx) && sz == 2) {
14204 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmppd", True, 8 );
14211 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)) {
14214 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14215 storeLE( mkexpr(addr), getIRegG(sz, pfx, modrm) );
14217 nameIRegG(sz, pfx, modrm));
14229 if (haveNo66noF2noF3(pfx)
14245 assign(t4, getIReg16(eregOfRexRM(pfx,modrm)));
14249 nameIReg16(eregOfRexRM(pfx,modrm)),
14252 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
14273 if (have66noF2noF3(pfx)
14278 UInt rG = gregOfRexRM(pfx,modrm);
14280 UInt rE = eregOfRexRM(pfx,modrm);
14287 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
14307 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)) {
14323 putIReg64(gregOfRexRM(pfx,modrm), unop(Iop_16Uto64, mkexpr(t5)));
14325 putIReg32(gregOfRexRM(pfx,modrm), unop(Iop_16Uto32, mkexpr(t5)));
14329 sz==8 ? nameIReg64(gregOfRexRM(pfx,modrm))
14330 : nameIReg32(gregOfRexRM(pfx,modrm))
14342 if (have66noF2noF3(pfx)
14345 delta = dis_PEXTRW_128_EregOnly_toG( vbi, pfx, delta,
14354 if (haveNo66noF2noF3(pfx) && sz == 4) {
14359 UInt rG = gregOfRexRM(pfx,modrm);
14362 UInt rE = eregOfRexRM(pfx,modrm);
14368 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
14375 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) );
14379 if (have66noF2noF3(pfx) && sz == 2) {
14385 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
14388 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
14392 nameXMMReg(eregOfRexRM(pfx,modrm)),
14393 nameXMMReg(gregOfRexRM(pfx,modrm)));
14395 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
14401 nameXMMReg(gregOfRexRM(pfx,modrm)));
14405 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) );
14412 if (have66noF2noF3(pfx) && sz == 2) {
14413 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrlw", Iop_ShrN16x8 );
14420 if (have66noF2noF3(pfx) && sz == 2) {
14421 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrld", Iop_ShrN32x4 );
14428 if (have66noF2noF3(pfx) && sz == 2) {
14429 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrlq", Iop_ShrN64x2 );
14436 if (have66noF2noF3(pfx) && sz == 2) {
14437 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14443 if (haveNo66noF2noF3(pfx) && sz == 4) {
14446 vbi, pfx, delta, opc, "paddq", False );
14453 if (have66noF2noF3(pfx) && sz == 2) {
14454 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14463 if (haveF3no66noF2(pfx) && sz == 4) {
14467 putXMMReg( gregOfRexRM(pfx,modrm),
14470 nameXMMReg(gregOfRexRM(pfx,modrm)));
14478 if (have66noF2noF3(pfx)
14485 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14487 getXMMRegLane64( gregOfRexRM(pfx,modrm), 0 ));
14488 DIP("movq %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf );
14494 if (haveF2no66noF3(pfx) && sz == 4) {
14499 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ));
14500 DIP("movdq2q %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
14514 if (have66noF2noF3(pfx)
14517 delta = dis_PMOVMSKB_128( vbi, pfx, delta, False/*!isAvx*/ );
14524 if (haveNo66noF2noF3(pfx)
14533 putIReg32(gregOfRexRM(pfx,modrm), mkexpr(t1));
14535 nameIReg32(gregOfRexRM(pfx,modrm)));
14545 if (have66noF2noF3(pfx) && sz == 2) {
14546 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14554 if (have66noF2noF3(pfx) && sz == 2) {
14555 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14564 if (haveNo66noF2noF3(pfx) && sz == 4) {
14567 vbi, pfx, delta, opc, "pminub", False );
14571 if (have66noF2noF3(pfx) && sz == 2) {
14572 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14580 if (have66noF2noF3(pfx) && sz == 2) {
14581 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "pand", Iop_AndV128 );
14588 if (have66noF2noF3(pfx) && sz == 2) {
14589 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14597 if (have66noF2noF3(pfx) && sz == 2) {
14598 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14607 if (haveNo66noF2noF3(pfx) && sz == 4) {
14610 vbi, pfx, delta, opc, "pmaxub", False );
14614 if (have66noF2noF3(pfx) && sz == 2) {
14615 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14623 if (have66noF2noF3(pfx) && sz == 2) {
14624 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "pandn", Iop_AndV128 );
14632 if (haveNo66noF2noF3(pfx) && sz == 4) {
14635 vbi, pfx, delta, opc, "pavgb", False );
14639 if (have66noF2noF3(pfx) && sz == 2) {
14640 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14648 if (have66noF2noF3(pfx
14649 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psraw", Iop_SarN16x8 );
14656 if (have66noF2noF3(pfx) && sz == 2) {
14657 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrad", Iop_SarN32x4 );
14665 if (haveNo66noF2noF3(pfx) && sz == 4) {
14668 vbi, pfx, delta, opc, "pavgw", False );
14672 if (have66noF2noF3(pfx) && sz == 2) {
14673 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14682 if (haveNo66noF2noF3(pfx) && sz == 4) {
14685 vbi, pfx, delta, opc, "pmuluh", False );
14689 if (have66noF2noF3(pfx) && sz == 2) {
14690 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14698 if (have66noF2noF3(pfx) && sz == 2) {
14699 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14711 if ( (haveF2no66noF3(pfx) && sz == 4)
14712 || (have66noF2noF3(pfx) && sz == 2) ) {
14713 delta = dis_CVTxPD2DQ_128( vbi, pfx, delta, False/*!isAvx*/,
14719 if (haveF3no66noF2(pfx) && sz == 4) {
14720 delta = dis_CVTDQ2PD_128(vbi, pfx, delta, False/*!isAvx*/);
14732 if (haveNo66noF2noF3(pfx) && sz == 4) {
14736 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14746 if (have66noF2noF3(pfx) && sz == 2) {
14749 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14751 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) );
14753 nameXMMReg(gregOfRexRM(pfx,modrm)));
14763 if (have66noF2noF3(pfx) && sz == 2) {
14764 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14772 if (have66noF2noF3(pfx) && sz == 2) {
14773 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14782 if (haveNo66noF2noF3(pfx) && sz == 4) {
14785 vbi, pfx, delta, opc, "pminsw", False );
14789 if (have66noF2noF3(pfx) && sz == 2) {
14790 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14798 if (have66noF2noF3(pfx) && sz == 2) {
14799 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "por", Iop_OrV128 );
14806 if (have66noF2noF3(pfx) && sz == 2) {
14807 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14815 if (have66noF2noF3(pfx) && sz == 2) {
14816 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14825 if (haveNo66noF2noF3(pfx) && sz == 4) {
14828 vbi, pfx, delta, opc, "pmaxsw", False );
14832 if (have66noF2noF3(pfx) && sz == 2) {
14833 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
14841 if (have66noF2noF3(pfx) && sz == 2) {
14842 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "pxor", Iop_XorV128 );
14849 if (have66noF2noF3(pfx) && sz == 2) {
14850 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psllw", Iop_ShlN16x8 );
14857 if (have66noF2noF3(pfx) && sz == 2) {
14858 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "pslld", Iop_ShlN32x4 );
14865 if (have66noF2noF3(pfx) && sz == 2) {
14866 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psllq", Iop_ShlN64x2 );
14875 if (have66noF2noF3(pfx) && sz == 2) {
14879 UInt rG = gregOfRexRM(pfx,modrm);
14882 UInt rE = eregOfRexRM(pfx,modrm);
14887 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14898 if (haveNo66noF2noF3(pfx) && sz == 4) {
14914 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14932 if (have66noF2noF3(pfx) && sz == 2) {
14936 UInt rG = gregOfRexRM(pfx,modrm);
14938 UInt rE = eregOfRexRM(pfx,modrm);
14943 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14957 if (haveNo66noF2noF3(pfx) && sz == 4) {
14960 vbi, pfx, delta, opc, "psadbw", False );
14965 if (have66noF2noF3(pfx) && sz == 2) {
14969 UInt rG = gregOfRexRM(pfx,modrm);
14971 UInt rE = eregOfRexRM(pfx,modrm);
14976 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
14991 if (haveNo66noF2noF3(pfx) && sz == 4) {
14993 delta = dis_MMX( &ok, vbi, pfx, sz, delta-1 );
14997 if (have66noF2noF3(pfx) && sz == 2 && epartIsReg(getUChar(delta))) {
14998 delta = dis_MASKMOVDQU( vbi, pfx, delta, False/*!isAvx*/ );
15005 if (have66noF2noF3(pfx) && sz == 2) {
15006 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15014 if (have66noF2noF3(pfx) && sz == 2) {
15015 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15023 if (have66noF2noF3(pfx) && sz == 2) {
15024 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15032 if (have66noF2noF3(pfx) && sz == 2) {
15033 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15039 if (haveNo66noF2noF3(pfx) && sz == 4) {
15042 vbi, pfx, delta, opc, "psubq", False );
15049 if (have66noF2noF3(pfx) && sz == 2) {
15050 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15058 if (have66noF2noF3(pfx) && sz == 2) {
15059 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15067 if (have66noF2noF3(pfx) && sz == 2) {
15068 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
15095 static Long dis_MOVDDUP_128 ( const VexAbiInfo* vbi, Prefix pfx,
15104 UInt rG = gregOfRexRM(pfx,modrm);
15106 UInt rE = eregOfRexRM(pfx,modrm);
15113 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15125 pfx,
15134 UInt rG = gregOfRexRM(pfx,modrm);
15136 UInt rE = eregOfRexRM(pfx,modrm);
15142 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15157 static Long dis_MOVSxDUP_128 ( const VexAbiInfo* vbi, Prefix pfx,
15165 UInt rG = gregOfRexRM(pfx,modrm);
15169 UInt rE = eregOfRexRM(pfx,modrm);
15175 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15191 static Long dis_MOVSxDUP_256 ( const VexAbiInfo* vbi, Prefix pfx,
15199 UInt rG = gregOfRexRM(pfx,modrm);
15203 UInt rE = eregOfRexRM(pfx,modrm);
15209 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15272 Prefix pfx, Int sz, Long deltaIN )
15289 if (haveF3no66noF2(pfx) && sz == 4) {
15290 delta = dis_MOVSxDUP_128( vbi, pfx, delta, False/*!isAvx*/,
15296 if (haveF2no66noF3(pfx)
15298 delta = dis_MOVDDUP_128( vbi, pfx, delta, False/*!isAvx*/ );
15306 if (haveF3no66noF2(pfx) && sz == 4) {
15307 delta = dis_MOVSxDUP_128( vbi, pfx, delta, False/*!isAvx*/,
15317 if (haveF2no66noF3(pfx) && sz == 4) {
15323 UInt rG = gregOfRexRM(pfx,modrm);
15325 UInt rE = eregOfRexRM(pfx,modrm);
15330 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15342 if (have66noF2noF3(pfx) && sz == 2) {
15348 UInt rG = gregOfRexRM(pfx,modrm);
15350 UInt rE = eregOfRexRM(pfx,modrm);
15355 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15369 if (have66noF2noF3(pfx) && sz == 2) {
15373 UInt rG = gregOfRexRM(pfx,modrm);
15375 UInt rE = eregOfRexRM(pfx,modrm);
15380 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15391 if (haveF2no66noF3(pfx) && sz == 4) {
15395 UInt rG = gregOfRexRM(pfx,modrm);
15399 UInt rE = eregOfRexRM(pfx,modrm);
15404 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15418 if (haveF2no66noF3(pfx) && sz == 4) {
15423 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15424 putXMMReg( gregOfRexRM(pfx,modrm),
15427 nameXMMReg(gregOfRexRM(pfx,modrm)));
15561 static Long dis_PHADD_128 ( const VexAbiInfo* vbi, Prefix pfx, Long delta,
15578 UInt rG = gregOfRexRM(pfx,modrm);
15579 UInt rV = isAvx ? getVexNvvvv(pfx) : rG;
15598 UInt rE = eregOfRexRM(pfx,modrm);
15604 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15635 static Long dis_PHADD_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta,
15650 UInt rG = gregOfRexRM(pfx,modrm);
15651 UInt rV = getVexNvvvv(pfx);
15670 UInt rE = eregOfRexRM(pfx,modrm);
15675 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15753 Prefix pfx, Int sz, Long deltaIN )
15769 if (have66noF2noF3(pfx)
15775 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
15778 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
15780 DIP("pshufb %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
15781 nameXMMReg(gregOfRexRM(pfx,modrm)));
15783 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15788 nameXMMReg(gregOfRexRM(pfx,modrm)));
15792 putXMMReg(gregOfRexRM(pfx,modrm), mkexpr(res));
15796 if (haveNo66noF2noF3(pfx) && sz == 4) {
15810 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15853 if (have66noF2noF3(pfx)
15855 delta = dis_PHADD_128( vbi, pfx, delta, False/*isAvx*/, opc );
15871 if (haveNo66noF2noF3(pfx) && sz == 4) {
15904 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15925 if (have66noF2noF3(pfx)
15930 UInt rG = gregOfRexRM(pfx,modrm);
15935 UInt rE = eregOfRexRM(pfx,modrm);
15940 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
15952 if (haveNo66noF2noF3(pfx) && sz == 4) {
15970 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16008 if (have66noF2noF3(pfx)
16027 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
16030 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
16032 DIP("psign%s %s,%s\n", str, nameXMMReg(eregOfRexRM(pfx,modrm)),
16033 nameXMMReg(gregOfRexRM(pfx,modrm)));
16035 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16040 nameXMMReg(gregOfRexRM(pfx,modrm)));
16049 gregOfRexRM(pfx,modrm),
16060 if (haveNo66noF2noF3(pfx) && sz == 4) {
16083 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16101 if (have66noF2noF3(pfx)
16111 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
16114 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
16116 DIP("pmulhrsw %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)),
16117 nameXMMReg(gregOfRexRM(pfx,modrm)));
16119 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16124 nameXMMReg(gregOfRexRM(pfx,modrm)));
16133 gregOfRexRM(pfx,modrm),
16143 if (haveNo66noF2noF3(pfx) && sz == 4) {
16157 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16178 if (have66noF2noF3(pfx)
16193 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
16195 DIP("pabs%s %s,%s\n", str, nameXMMReg(eregOfRexRM(pfx,modrm)),
16196 nameXMMReg(gregOfRexRM(pfx,modrm)));
16198 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16203 nameXMMReg(gregOfRexRM(pfx,modrm)));
16206 putXMMReg( gregOfRexRM(pfx,modrm),
16213 if (haveNo66noF2noF3(pfx) && sz == 4) {
16234 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
16272 Prefix pfx, Int sz, Long deltaIN )
16289 if (have66noF2noF3(pfx)
16295 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) );
16298 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) );
16302 nameXMMReg(eregOfRexRM(pfx,modrm)),
16303 nameXMMReg(gregOfRexRM(pfx,modrm)));
16305 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
16312 nameXMMReg(gregOfRexRM(pfx,modrm)));
16316 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) );
16320 if (haveNo66noF2noF3(pfx) && sz == 4) {
16337 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
16399 Prefix pfx, Int sz, Long deltaIN )
16418 if (haveF3noF2(pfx) /* so both 66 and REX.W are possibilities */
16424 assign(src, getIRegE(sz, pfx, modrm));
16426 DIP("popcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm),
16427 nameIRegG(sz, pfx, modrm));
16429 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0);
16433 nameIRegG(sz, pfx, modrm));
16437 putIRegG(sz, pfx, modrm, mkexpr(result));
16462 if (haveF3noF2(pfx) /* so both 66 and 48 are possibilities */
16469 assign(src, getIRegE(sz, pfx, modrm));
16471 DIP("tzcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm),
16472 nameIRegG(sz, pfx, modrm));
16474 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0);
16478 nameIRegG(sz, pfx, modrm));
16482 putIRegG(sz, pfx, modrm, mkexpr(res));
16523 if (haveF3noF2(pfx) /* so both 66 and 48 are possibilities */
16530 assign(src, getIRegE(sz, pfx, modrm));
16532 DIP("lzcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm),
16533 nameIRegG(sz, pfx, modrm));
16535 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0);
16539 nameIRegG(sz, pfx, modrm));
16543 putIRegG(sz, pfx, modrm, mkexpr(res));
16651 static Long dis_VBLENDV_128 ( const VexAbiInfo* vbi, Prefix pfx, Long delta,
16658 UInt rG = gregOfRexRM(pfx, modrm);
16659 UInt rV = getVexNvvvv(pfx);
16666 UInt rE = eregOfRexRM(pfx, modrm);
16674 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
16690 static Long dis_VBLENDV_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta,
16697 UInt rG = gregOfRexRM(pfx, modrm);
16698 UInt rV = getVexNvvvv(pfx);
16705 UInt rE = eregOfRexRM(pfx, modrm);
16713 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
16829 static Long dis_xTESTy_128 ( const VexAbiInfo* vbi, Prefix pfx,
16836 UInt rG = gregOfRexRM(pfx, modrm);
16841 UInt rE = eregOfRexRM(pfx, modrm);
16849 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
16882 static Long dis_xTESTy_256 ( const VexAbiInfo* vbi, Prefix pfx,
16889 UInt rG = gregOfRexRM(pfx, modrm);
16894 UInt rE = eregOfRexRM(pfx, modrm);
16901 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
16940 static Long dis_PMOVxXBW_128 ( const VexAbiInfo* vbi, Prefix pfx,
16950 UInt rG = gregOfRexRM(pfx, modrm);
16952 UInt rE = eregOfRexRM(pfx, modrm);
16957 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
16983 static Long dis_PMOVxXBW_256 ( const VexAbiInfo* vbi, Prefix pfx,
16992 UInt rG = gregOfRexRM(pfx, modrm);
16994 UInt rE = eregOfRexRM(pfx, modrm);
16999 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17023 static Long dis_PMOVxXWD_128 ( const VexAbiInfo* vbi, Prefix pfx,
17033 UInt rG = gregOfRexRM(pfx, modrm);
17036 UInt rE = eregOfRexRM(pfx, modrm);
17041 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17056 ( gregOfRexRM(pfx, modrm), res );
17062 static Long dis_PMOVxXWD_256 ( const VexAbiInfo* vbi, Prefix pfx,
17071 UInt rG = gregOfRexRM(pfx, modrm);
17074 UInt rE = eregOfRexRM(pfx, modrm);
17079 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17101 static Long dis_PMOVSXWQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17110 UInt rG = gregOfRexRM(pfx, modrm);
17113 UInt rE = eregOfRexRM(pfx, modrm);
17118 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17134 static Long dis_PMOVSXWQ_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta )
17141 UInt rG = gregOfRexRM(pfx, modrm);
17146 UInt rE = eregOfRexRM(pfx, modrm);
17151 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17169 static Long dis_PMOVZXWQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17178 UInt rG = gregOfRexRM(pfx, modrm);
17181 UInt rE = eregOfRexRM(pfx, modrm);
17186 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17205 static Long dis_PMOVZXWQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
17213 UInt rG = gregOfRexRM(pfx, modrm);
17216 UInt rE = eregOfRexRM(pfx, modrm);
17221 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17245 static Long dis_PMOVxXDQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17256 UInt rG = gregOfRexRM(pfx, modrm);
17262 UInt rE = eregOfRexRM(pfx, modrm);
17268 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17292 static Long dis_PMOVxXDQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
17301 UInt rG = gregOfRexRM(pfx, modrm);
17307 UInt rE = eregOfRexRM(pfx, modrm);
17312 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17345 static Long dis_PMOVxXBD_128 ( const VexAbiInfo* vbi, Prefix pfx,
17355 UInt rG = gregOfRexRM(pfx, modrm);
17357 UInt rE = eregOfRexRM(pfx, modrm);
17362 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17388 static Long dis_PMOVxXBD_256 ( const VexAbiInfo* vbi, Prefix pfx,
17397 UInt rG = gregOfRexRM(pfx, modrm);
17399 UInt rE = eregOfRexRM(pfx, modrm);
17404 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17435 static Long dis_PMOVSXBQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17444 UInt rG = gregOfRexRM(pfx, modrm);
17446 UInt rE = eregOfRexRM(pfx, modrm);
17451 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17468 static Long dis_PMOVSXBQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
17476 UInt rG = gregOfRexRM(pfx, modrm);
17478 UInt rE = eregOfRexRM(pfx, modrm);
17483 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17514 static Long dis_PMOVZXBQ_128 ( const VexAbiInfo* vbi, Prefix pfx,
17523 UInt rG = gregOfRexRM(pfx, modrm);
17525 UInt rE = eregOfRexRM(pfx, modrm);
17530 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17553 static Long dis_PMOVZXBQ_256 ( const VexAbiInfo* vbi, Prefix pfx,
17561 UInt rG = gregOfRexRM(pfx, modrm);
17563 UInt rE = eregOfRexRM(pfx, modrm);
17568 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17597 static Long dis_PHMINPOSUW_128 ( const VexAbiInfo* vbi, Prefix pfx,
17609 UInt rG = gregOfRexRM(pfx,modrm);
17611 UInt rE = eregOfRexRM(pfx,modrm);
17616 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
17637 static Long dis_AESx ( const VexAbiInfo* vbi, Prefix pfx,
17644 UInt rG = gregOfRexRM(pfx, modrm);
17646 UInt regNoR = (isAvx && opc != 0xDB) ? getVexNvvvv(pfx) : rG;
17655 regNoL = eregOfRexRM(pfx, modrm);
17659 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17729 static Long dis_AESKEYGENASSIST ( const VexAbiInfo* vbi, Prefix pfx,
17737 UInt regNoR = gregOfRexRM(pfx, modrm);
17743 regNoL = eregOfRexRM(pfx, modrm);
17748 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
17798 Prefix pfx, Int sz, Long deltaIN )
17821 if (have66noF2noF3(pfx) && sz == 2) {
17845 assign(vecE, getXMMReg(eregOfRexRM(pfx, modrm)));
17848 nameXMMReg( eregOfRexRM(pfx, modrm) ),
17849 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
17851 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
17856 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
17859 assign(vecG, getXMMReg(gregOfRexRM(pfx, modrm)));
17863 putXMMReg(gregOfRexRM(pfx, modrm), mkexpr(res));
17872 if (have66noF2noF3(pfx)
17874 delta = dis_xTESTy_128( vbi, pfx, delta, False/*!isAvx*/, 0 );
17882 if (have66noF2noF3(pfx) && sz == 2) {
17883 delta = dis_PMOVxXBW_128( vbi, pfx, delta,
17892 if (have66noF2noF3(pfx) && sz == 2) {
17893 delta = dis_PMOVxXBD_128( vbi, pfx, delta,
17902 if (have66noF2noF3(pfx) && sz == 2) {
17903 delta = dis_PMOVSXBQ_128( vbi, pfx, delta, False/*!isAvx*/ );
17911 if (have66noF2noF3(pfx) && sz == 2) {
17912 delta = dis_PMOVxXWD_128(vbi, pfx, delta,
17921 if (have66noF2noF3(pfx) && sz == 2) {
17922 delta = dis_PMOVSXWQ_128( vbi, pfx, delta, False/*!isAvx*/ );
17930 if (have66noF2noF3(pfx) && sz == 2) {
17931 delta = dis_PMOVxXDQ_128( vbi, pfx, delta,
17943 if (have66noF2noF3(pfx) && sz == 2) {
17947 UInt rG = gregOfRexRM(pfx,modrm);
17950 UInt rE = eregOfRexRM(pfx,modrm);
17955 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
17969 if (have66noF2noF3(pfx) && sz == 2) {
17971 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
17981 if (have66noF2noF3(pfx) && sz == 2) {
17984 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
17986 putXMMReg( gregOfRexRM(pfx,modrm),
17989 nameXMMReg(gregOfRexRM(pfx,modrm)));
17999 if (have66noF2noF3(pfx) && sz == 2) {
18007 assign( argL, getXMMReg( eregOfRexRM(pfx, modrm) ) );
18010 nameXMMReg( eregOfRexRM(pfx, modrm) ),
18011 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
18013 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
18018 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
18021 assign(argR, getXMMReg( gregOfRexRM(pfx, modrm) ));
18023 putXMMReg( gregOfRexRM(pfx, modrm),
18034 if (have66noF2noF3(pfx) && sz == 2) {
18035 delta = dis_PMOVxXBW_128( vbi, pfx, delta,
18044 if (have66noF2noF3(pfx) && sz == 2) {
18045 delta = dis_PMOVxXBD_128( vbi, pfx, delta,
18054 if (have66noF2noF3(pfx) && sz == 2) {
18055 delta = dis_PMOVZXBQ_128( vbi, pfx, delta, False/*!isAvx*/ );
18063 if (have66noF2noF3(pfx) && sz == 2) {
18064 delta = dis_PMOVxXWD_128( vbi, pfx, delta,
18073 if (have66noF2noF3(pfx) && sz == 2) {
18074 delta = dis_PMOVZXWQ_128( vbi, pfx, delta, False/*!isAvx*/ );
18082 if (have66noF2noF3(pfx) && sz == 2) {
18083 delta = dis_PMOVxXDQ_128( vbi, pfx, delta,
18093 if (have66noF2noF3(pfx) && sz == 2) {
18095 delta = dis_SSEint_E_to_G( vbi, pfx, delta,
18106 if (have66noF2noF3(pfx) && sz == 2) {
18110 vbi, pfx, delta,
18126 if (have66noF2noF3(pfx) && sz == 2) {
18130 vbi, pfx, delta,
18146 if (have66noF2noF3(pfx) && sz == 2) {
18150 vbi, pfx, delta,
18166 if (have66noF2noF3(pfx) && sz == 2) {
18170 vbi, pfx, delta,
18182 if (have66noF2noF3(pfx) && sz == 2) {
18190 assign( argL, getXMMReg( eregOfRexRM(pfx, modrm) ) );
18193 nameXMMReg( eregOfRexRM(pfx, modrm) ),
18194 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
18196 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
18201 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
18204 assign(argR, getXMMReg( gregOfRexRM(pfx, modrm) ));
18206 putXMMReg( gregOfRexRM(pfx, modrm),
18216 if (have66noF2noF3(pfx) && sz == 2) {
18217 delta = dis_PHMINPOSUW_128( vbi, pfx, delta, False/*!isAvx*/ );
18233 if (have66noF2noF3(pfx) && sz == 2) {
18234 delta = dis_AESx( vbi, pfx, delta, False/*!isAvx*/, opc );
18245 if (haveF2noF3(pfx)
18246 && (opc == 0xF1 || (opc == 0xF0 && !have66(pfx)))) {
18258 assign(valE, getIRegE(sz, pfx, modrm));
18260 DIP("crc32b %s,%s\n", nameIRegE(sz, pfx, modrm),
18261 nameIRegG(1==getRexW(pfx) ? 8 : 4, pfx, modrm));
18263 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
18267 nameIRegG(1==getRexW(pfx) ? 8 : 4, pfx, modrm));
18275 assign(valG0, binop(Iop_And64, getIRegG(8, pfx, modrm),
18297 putIRegG(4, pfx, modrm, unop(Iop_64to32, mkexpr(valG1)));
18323 static Long dis_PEXTRW ( const VexAbiInfo* vbi, Prefix pfx,
18334 UInt rG = gregOfRexRM(pfx,modrm);
18340 vassert(0==getRexW(pfx)); /* ensured by caller */
18347 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18364 UInt rE = eregOfRexRM(pfx,modrm);
18378 static Long dis_PEXTRD ( const VexAbiInfo* vbi, Prefix pfx,
18395 vassert(0==getRexW(pfx)); /* ensured by caller */
18397 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) );
18403 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18416 putIReg32( eregOfRexRM(pfx,modrm), mkexpr(src_dword) );
18419 nameXMMReg( gregOfRexRM(pfx, modrm) ),
18420 nameIReg32( eregOfRexRM(pfx, modrm) ) );
18425 imm8_10, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf );
18431 static Long dis_PEXTRQ ( const VexAbiInfo* vbi, Prefix pfx,
18444 vassert(1==getRexW(pfx)); /* ensured by caller */
18446 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) );
18451 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18464 putIReg64( eregOfRexRM(pfx,modrm), mkexpr(src_qword) );
18467 nameXMMReg( gregOfRexRM(pfx, modrm) ),
18468 nameIReg64( eregOfRexRM(pfx, modrm) ) );
18473 imm8_0, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf );
18612 static Long dis_PCMPxSTRx ( const VexAbiInfo* vbi, Prefix pfx,
18632 regNoL = eregOfRexRM(pfx, modrm);
18633 regNoR = gregOfRexRM(pfx, modrm);
18638 regNoR = gregOfRexRM(pfx, modrm);
18639 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18855 pfx,
18868 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) );
18875 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
18889 putIReg64( eregOfRexRM(pfx,modrm),
18894 nameXMMReg( gregOfRexRM(pfx, modrm) ),
18895 nameIReg64( eregOfRexRM(pfx, modrm) ) );
18900 imm8, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf );
19031 static Long dis_EXTRACTPS ( const VexAbiInfo* vbi, Prefix pfx,
19041 UInt rG = gregOfRexRM(pfx,modrm);
19051 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19064 UInt rE = eregOfRexRM(pfx,modrm);
19111 Prefix pfx, Int sz, Long deltaIN )
19127 if (have66noF2noF3(pfx) && sz == 2) {
19144 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 0 ) );
19146 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 1 ) );
19148 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 2 ) );
19150 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 3 ) );
19155 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ),
19156 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19158 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19172 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19186 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 0, mkexpr(res0) );
19187 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 1, mkexpr(res1) );
19188 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 2, mkexpr(res2) );
19189 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 3, mkexpr(res3) );
19197 if (have66noF2noF3(pfx) && sz == 2) {
19210 getXMMRegLane64F( eregOfRexRM(pfx, modrm), 0 ) );
19212 getXMMRegLane64F( eregOfRexRM(pfx, modrm), 1 ) );
19217 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ),
19218 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19220 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19230 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19242 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 0, mkexpr(res0) );
19243 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 1, mkexpr(res1) );
19254 if (have66noF2noF3(pfx) && sz == 2) {
19265 isD ? getXMMRegLane64F( eregOfRexRM(pfx, modrm), 0 )
19266 : getXMMRegLane32F( eregOfRexRM(pfx, modrm), 0 ) );
19272 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ),
19273 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19275 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19282 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19295 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 0, mkexpr(res) );
19297 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 0, mkexpr(res) );
19306 if (have66noF2noF3(pfx) && sz == 2) {
19314 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) );
19318 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) );
19321 nameXMMReg( eregOfRexRM(pfx, modrm) ),
19322 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19324 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19331 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19334 putXMMReg( gregOfRexRM(pfx, modrm),
19343 if (have66noF2noF3(pfx) && sz == 2) {
19350 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) );
19354 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) );
19357 nameXMMReg( eregOfRexRM(pfx, modrm) ),
19358 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19360 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19367 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19370 putXMMReg( gregOfRexRM(pfx, modrm),
19379 if (have66noF2noF3(pfx) && sz == 2) {
19387 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) );
19391 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) );
19394 nameXMMReg( eregOfRexRM(pfx, modrm) ),
19395 nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19397 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19404 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) );
19407 putXMMReg( gregOfRexRM(pfx, modrm),
19417 if (have66noF2noF3(pfx) && sz == 2) {
19418 delta = dis_PEXTRB_128_GtoE( vbi, pfx, delta, False/*!isAvx*/ );
19427 if (have66noF2noF3(pfx) && sz == 2) {
19428 delta = dis_PEXTRW( vbi, pfx, delta, False/*!isAvx*/ );
19438 if (have66noF2noF3(pfx)
19440 delta = dis_PEXTRD( vbi, pfx, delta, False/*!isAvx*/ );
19447 if (have66noF2noF3(pfx)
19449 delta = dis_PEXTRQ( vbi, pfx, delta, False/*!isAvx*/);
19459 if (have66noF2noF3(pfx)
19461 delta = dis_EXTRACTPS( vbi, pfx, delta, False/*!isAvx*/ );
19469 if (have66noF2noF3(pfx) && sz == 2) {
19473 UInt rG = gregOfRexRM(pfx, modrm);
19475 UInt rE = eregOfRexRM(pfx,modrm);
19482 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19490 assign(src_vec, getXMMReg( gregOfRexRM(pfx, modrm) ));
19500 if (have66noF2noF3(pfx) && sz == 2) {
19506 UInt rG = gregOfRexRM(pfx, modrm);
19509 UInt rE = eregOfRexRM(pfx, modrm);
19520 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19539 if (have66noF2noF3(pfx)
19544 UInt rG = gregOfRexRM(pfx, modrm);
19547 UInt rE = eregOfRexRM(pfx,modrm);
19554 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19570 if (have66noF2noF3(pfx)
19575 UInt rG = gregOfRexRM(pfx, modrm);
19578 UInt rE = eregOfRexRM(pfx,modrm);
19585 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
19604 if (have66noF2noF3(pfx) && sz == 2) {
19609 UInt rG = gregOfRexRM(pfx, modrm);
19612 UInt rE = eregOfRexRM(pfx, modrm);
19619 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19637 if (have66noF2noF3(pfx) && sz == 2) {
19642 UInt rG = gregOfRexRM(pfx, modrm);
19645 UInt rE = eregOfRexRM(pfx, modrm);
19652 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19670 if (have66noF2noF3(pfx) && sz == 2) {
19675 UInt rG = gregOfRexRM(pfx, modrm);
19680 UInt rE = eregOfRexRM(pfx, modrm);
19688 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19707 if (have66noF2noF3(pfx) && sz == 2) {
19713 UInt rG = gregOfRexRM(pfx, modrm);
19718 UInt rE = eregOfRexRM(pfx, modrm);
19725 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
19751 if (have66noF2noF3(pfx) && sz == 2) {
19753 delta = dis_PCMPxSTRx( vbi, pfx, delta, False/*!isAvx*/, opc );
19761 if (have66noF2noF3(pfx) && sz == 2) {
19762 delta = dis_AESKEYGENASSIST( vbi, pfx, delta, False/*!isAvx*/ );
19798 Prefix pfx, Int sz, Long deltaIN
19827 Bool validF2orF3 = haveF2orF3(pfx) ? False : True;
19838 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) {
19849 switch above, use validF2orF3 rather than looking at pfx
19855 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Add8, True, 1, delta, "add" );
19859 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Add8, True, sz, delta, "add" );
19863 if (haveF2orF3(pfx)) goto decode_failure;
19864 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Add8, True, 1, delta, "add" );
19867 if (haveF2orF3(pfx)) goto decode_failure;
19868 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Add8, True, sz, delta, "add" );
19872 if (haveF2orF3(pfx)) goto decode_failure;
19876 if (haveF2orF3(pfx)) goto decode_failure;
19882 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Or8, True, 1, delta, "or" );
19886 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Or8, True, sz, delta, "or" );
19890 if (haveF2orF3(pfx)) goto decode_failure;
19891 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Or8, True, 1, delta, "or" );
19894 if (haveF2orF3(pfx)) goto decode_failure;
19895 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Or8, True, sz, delta, "or" );
19899 if (haveF2orF3(pfx)) goto decode_failure;
19903 if (haveF2orF3(pfx)) goto decode_failure;
19909 delta = dis_op2_G_E ( vbi, pfx, True, Iop_Add8, True, 1, delta, "adc" );
19913 delta = dis_op2_G_E ( vbi, pfx, True, Iop_Add8, True, sz, delta, "adc" );
19917 if (haveF2orF3(pfx)) goto decode_failure;
19918 delta = dis_op2_E_G ( vbi, pfx, True, Iop_Add8, True, 1, delta, "adc" );
19921 if (haveF2orF3(pfx)) goto decode_failure;
19922 delta = dis_op2_E_G ( vbi, pfx, True, Iop_Add8, True, sz, delta, "adc" );
19926 if (haveF2orF3(pfx)) goto decode_failure;
19930 if (haveF2orF3(pfx)) goto decode_failure;
19936 delta = dis_op2_G_E ( vbi, pfx, True, Iop_Sub8, True, 1, delta, "sbb" );
19940 delta = dis_op2_G_E ( vbi, pfx, True, Iop_Sub8, True, sz, delta, "sbb" );
19944 if (haveF2orF3(pfx)) goto decode_failure;
19945 delta = dis_op2_E_G ( vbi, pfx, True, Iop_Sub8, True, 1, delta, "sbb" );
19948 if (haveF2orF3(pfx)) goto decode_failure;
19949 delta = dis_op2_E_G ( vbi, pfx, True, Iop_Sub8, True, sz, delta, "sbb" );
19953 if (haveF2orF3(pfx)) goto decode_failure;
19957 if (haveF2orF3(pfx)) goto decode_failure;
19963 delta = dis_op2_G_E ( vbi, pfx, False, Iop_And8, True, 1, delta, "and" );
19967 delta = dis_op2_G_E ( vbi, pfx, False, Iop_And8, True, sz, delta, "and" );
19971 if (haveF2orF3(pfx)) goto decode_failure;
19972 delta = dis_op2_E_G ( vbi, pfx, False, Iop_And8, True, 1, delta, "and" );
19975 if (haveF2orF3(pfx)) goto decode_failure;
19976 delta = dis_op2_E_G ( vbi, pfx, False, Iop_And8, True, sz, delta, "and" );
19980 if (haveF2orF3(pfx)) goto decode_failure;
19984 if (haveF2orF3(pfx)) goto decode_failure;
19990 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Sub8, True, 1, delta, "sub" );
19994 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Sub8, True, sz, delta, "sub" );
19998 if (haveF2orF3(pfx)) goto decode_failure;
19999 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Sub8, True, 1, delta, "sub" );
20002 if (haveF2orF3(pfx)) goto decode_failure;
20003 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Sub8, True, sz, delta, "sub" );
20007 if (haveF2orF3(pfx)) goto decode_failure;
20011 if (haveF2orF3(pfx)) goto decode_failure;
20017 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Xor8, True, 1, delta, "xor" );
20021 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Xor8, True, sz, delta, "xor" );
20025 if (haveF2orF3(pfx)) goto decode_failure;
20026 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Xor8, True, 1, delta, "xor" );
20029 if (haveF2orF3(pfx)) goto decode_failure;
20030 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Xor8, True, sz, delta, "xor" );
20034 if (haveF2orF3(pfx)) goto decode_failure;
20038 if (haveF2orF3(pfx)) goto decode_failure;
20043 if (haveF2orF3(pfx)) goto decode_failure;
20044 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Sub8, False, 1, delta, "cmp" );
20047 if (haveF2orF3(pfx)) goto decode_failure;
20048 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Sub8, False, sz, delta, "cmp" );
20052 if (haveF2orF3(pfx)) goto decode_failure;
20053 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Sub8, False, 1, delta, "cmp" );
20056 if (haveF2orF3(pfx)) goto decode_failure;
20057 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Sub8, False, sz, delta, "cmp" );
20061 if (haveF2orF3(pfx)) goto decode_failure;
20065 if (haveF2orF3(pfx)) goto decode_failure;
20080 if (haveF2orF3(pfx)) goto decode_failure;
20087 assign(t1, getIRegRexB(sz, pfx, opc-0x50));
20091 DIP("push%c %s\n", nameISize(sz), nameIRegRexB(sz,pfx,opc-0x50));
20102 if (haveF2orF3(pfx)) goto decode_failure;
20111 putIRegRexB(sz, pfx, opc-0x58, mkexpr(t1));
20112 DIP("pop%c %s\n", nameISize(sz), nameIRegRexB(sz,pfx,opc-0x58));
20116 if (haveF2orF3(pfx)) goto decode_failure;
20117 if (haveREX(pfx) && 1==getRexW(pfx)) {
20123 putIRegG(8, pfx, modrm,
20125 getIRegE(4, pfx, modrm)));
20127 nameIRegE(4, pfx, modrm),
20128 nameIRegG(8, pfx, modrm));
20131 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
20133 putIRegG(8, pfx, modrm,
20137 nameIRegG(8, pfx, modrm));
20145 if (haveF2orF3(pfx)) goto decode_failure;
20153 if (haveF2orF3(pfx)) goto decode_failure;
20154 delta = dis_imul_I_E_G ( vbi, pfx, sz, delta, sz );
20158 if (haveF2orF3(pfx)) goto decode_failure;
20178 delta = dis_imul_I_E_G ( vbi, pfx, sz, delta, 1 );
20199 if (haveF3(pfx)) goto decode_failure;
20200 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
20263 if (epartIsReg(modrm) && haveF2orF3(pfx))
20265 if (!epartIsReg(modrm) && haveF2andF3(pfx))
20267 if (!epartIsReg(modrm) && haveF2orF3(pfx) && !haveLOCK(pfx))
20269 am_sz = lengthAMode(pfx,delta);
20273 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 );
20279 if (epartIsReg(modrm) && haveF2orF3(pfx))
20281 if (!epartIsReg(modrm) && haveF2andF3(pfx))
20283 if (!epartIsReg(modrm) && haveF2orF3(pfx) && !haveLOCK(pfx))
20285 am_sz = lengthAMode(pfx,delta);
20288 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 );
20292 if (haveF2orF3(pfx)) goto decode_failure;
20294 am_sz = lengthAMode(pfx,delta);
20297 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 );
20301 if (haveF2orF3(pfx)) goto decode_failure;
20302 delta = dis_op2_E_G ( vbi, pfx, False, Iop_And8, False, 1, delta, "test" );
20306 if (haveF2orF3(pfx)) goto decode_failure;
20307 delta = dis_op2_E_G ( vbi, pfx, False, Iop_And8, False, sz, delta, "test" );
20322 if (haveF2orF3(pfx)) {
20326 if (haveF2andF3(pfx))
20333 assign(t1, getIRegE(sz, pfx, modrm));
20334 assign(t2, getIRegG(sz, pfx, modrm));
20335 putIRegG(sz, pfx, modrm, mkexpr(t1));
20336 putIRegE(sz, pfx, modrm, mkexpr(t2));
20339 nameISize(sz), nameIRegG(sz, pfx, modrm),
20340 nameIRegE(sz, pfx, modrm));
20343 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
20345 assign( t2, getIRegG(sz, pfx, modrm) );
20348 putIRegG( sz, pfx, modrm, mkexpr(t1) );
20351 nameIRegG(sz, pfx, modrm), dis_buf);
20358 delta = dis_mov_G_E(vbi, pfx, 1, delta, &ok);
20366 delta = dis_mov_G_E(vbi, pfx, sz, delta, &ok);
20372 if (haveF2orF3(pfx)) goto decode_failure;
20373 delta = dis_mov_E_G(vbi, pfx, 1, delta);
20377 if (haveF2orF3(pfx)) goto decode_failure;
20378 delta = dis_mov_E_G(vbi, pfx, sz, delta);
20382 if (haveF2orF3(pfx)) goto decode_failure;
20383 delta = dis_mov_S_E(vbi, pfx, sz, delta);
20387 if (haveF2orF3(pfx)) goto decode_failure;
20395 any segment override bits in pfx. */
20396 addr = disAMode ( &alen, vbi, clearSegBits(pfx), delta, dis_buf, 0 );
20401 putIRegG( sz, pfx, modrm,
20407 nameIRegG(sz,pfx,modrm));
20415 if (haveF2orF3(pfx)) goto decode_failure;
20441 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 );
20452 if (!have66(pfx) && !haveF2(pfx) && haveF3(pfx)) {
20462 !haveF2orF3(pfx)
20464 && getRexB(pfx)==0 ) {
20477 if (haveF2orF3(pfx)) goto decode_failure;
20478 codegen_xchg_rAX_Reg ( pfx, sz, opc - 0x90 );
20482 if (haveF2orF3(pfx)) goto decode_failure;
20501 if (haveF2orF3(pfx)) goto decode_failure;
20523 if (haveF2orF3(pfx)) goto decode_failure;
20579 if (haveF2orF3(pfx)) goto decode_failure;
20652 if (have66orF2orF3(pfx)) goto decode_failure;
20662 assign( addr, handleAddrOverrides(vbi, pfx, mkU64(d64)) );
20665 segRegTxt(pfx), (ULong)d64,
20670 if (have66orF2orF3(pfx)) goto decode_failure;
20680 assign( addr, handleAddrOverrides(vbi, pfx, mkU64(d64)) );
20683 segRegTxt(pfx), (ULong)d64);
20689 if (haveF3(pfx) && !haveF2(pfx)) {
20694 guest_RIP_bbstart+delta, "rep movs", pfx );
20699 if (!haveF3(pfx) && !haveF2(pfx)) {
20702 dis_string_op( dis_MOVS, sz, "movs", pfx );
20710 if (haveF3(pfx) && !haveF2(pfx)) {
20715 guest_RIP_bbstart+delta, "repe cmps", pfx );
20724 if (haveF3(pfx) && !haveF2(pfx)) {
20729 guest_RIP_bbstart+delta, "rep stos", pfx );
20734 if (!haveF3(pfx) && !haveF2(pfx)) {
20737 dis_string_op( dis_STOS, sz, "stos", pfx );
20743 if (haveF2orF3(pfx)) goto decode_failure;
20747 if (haveF2orF3(pfx)) goto decode_failure;
20753 dis_string_op( dis_LODS, ( opc == 0xAC ? 1 : sz ), "lods", pfx );
20759 if (haveF2(pfx) && !haveF3(pfx)) {
20764 guest_RIP_bbstart+delta, "repne scas", pfx );
20769 if (!haveF2(pfx) && haveF3(pfx)) {
20774 guest_RIP_bbstart+delta, "repe scas", pfx );
20779 if (!haveF2(pfx) && !haveF3(pfx)) {
20782 dis_string_op( dis_SCAS, sz, "scas", pfx );
20796 if (haveF2orF3(pfx)) goto decode_failure;
20799 putIRegRexB(1, pfx, opc-0xB0, mkU8(d64));
20800 DIP("movb $%lld,%s\n", d64, nameIRegRexB(1,pfx,opc-0xB0));
20813 if (haveF2orF3(pfx)) goto decode_failure;
20817 putIRegRexB(8, pfx, opc-0xB8, mkU64(d64));
20819 nameIRegRexB(8,pfx,opc-0xB8));
20823 putIRegRexB(sz, pfx, opc-0xB8,
20827 nameIRegRexB(sz,pfx,opc-0xB8));
20833 if (haveF2orF3(pfx)) goto decode_failure;
20835 am_sz = lengthAMode(pfx,delta);
20839 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
20847 if (haveF2orF3(pfx)) goto decode_failure;
20849 am_sz = lengthAMode(pfx,delta);
20852 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
20859 if (have66orF3(pfx)) goto decode_failure;
20860 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
20868 if (have66(pfx)) goto decode_failure;
20870 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
20872 DIP(haveF3(pfx) ? "rep ; ret\n" : "ret\n");
20885 if (haveF2orF3(pfx)) goto decode_failure;
20889 putIRegE(sz, pfx, modrm,
20893 nameIRegE(sz,pfx,modrm));
20895 if (haveF2(pfx)) goto decode_failure;
20897 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf,
20909 if (opc == 0xC7 && modrm == 0xF8 && !have66orF2orF3(pfx) && sz == 4
20929 if (opc == 0xC6 && modrm == 0xF8 && !have66orF2orF3(pfx) && sz == 1
21009 if (haveF2orF3(pfx)) goto decode_failure;
21011 am_sz = lengthAMode(pfx,delta);
21015 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
21023 if (haveF2orF3(pfx)) goto decode_failure;
21025 am_sz = lengthAMode(pfx,delta);
21028 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
21036 if (haveF2orF3(pfx)) goto decode_failure;
21038 am_sz = lengthAMode(pfx,delta);
21041 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
21049 if (haveF2orF3(pfx)) goto decode_failure;
21051 am_sz = lengthAMode(pfx,delta);
21053 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz,
21069 if (haveF2orF3(pfx))
21098 delta = dis_FPU ( &decode_OK, vbi, pfx, delta );
21115 if (have66orF2orF3(pfx) || 1==getRexW(pfx)) goto decode_failure;
21122 if (haveASO(pfx)) {
21157 DIP("loop%s%s 0x%llx\n", xtra, haveASO(pfx) ? "l" : "", (ULong)d64);
21163 if (have66orF2orF3(pfx)) goto decode_failure;
21166 if (haveASO(pfx)) {
21221 if (haveF2orF3(pfx)) goto decode_failure;
21270 if (haveF2orF3(pfx)) goto decode_failure;
21286 if (haveF3(pfx)) goto decode_failure;
21287 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21310 if (haveF3(pfx)) goto decode_failure;
21313 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21327 if (haveF3(pfx)) goto decode_failure;
21330 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21378 /* RM'd: if (haveF2orF3(pfx)) goto decode_failure; */
21380 delta = dis_Grp3 ( vbi, pfx, 1, delta, &decode_OK );
21387 /* RM'd: if (haveF2orF3(pfx)) goto decode_failure; */
21389 delta = dis_Grp3 ( vbi, pfx, sz, delta, &decode_OK );
21395 if (haveF2orF3(pfx)) goto decode_failure;
21401 if (haveF2orF3(pfx)) goto decode_failure;
21408 /* RM'd: if (haveF2orF3(pfx)) goto decode_failure; */
21410 delta = dis_Grp4 ( vbi, pfx, delta, &decode_OK );
21417 /* RM'd: if (haveF2orF3(pfx)) goto decode_failure; */
21419 delta = dis_Grp5 ( vbi, pfx, sz, delta, dres, &decode_OK );
21527 Prefix pfx, Int sz, Long deltaIN
21556 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
21680 if (have66orF2orF3(pfx)) goto decode_failure;
21685 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
21695 if (haveF2orF3(pfx)) goto decode_failure;
21698 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
21713 if (have66orF2orF3(pfx)) goto decode_failure;
21738 if (haveF2orF3(pfx)) goto decode_failure;
21739 delta = dis_cmov_E_G(vbi, pfx, sz, (AMD64Condcode)(opc - 0x40), delta);
21760 if (haveF3(pfx)) goto decode_failure;
21761 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */
21836 if (haveF2orF3(pfx)) goto decode_failure;
21842 putIRegE(1, pfx, modrm, mkexpr(t1));
21844 nameIRegE(1,pfx,modrm));
21846 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
21873 int bnd = gregOfRexRM(pfx,modrm);
21876 oper = nameIReg64 (eregOfRexRM(pfx,modrm));
21879 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
21884 if (haveF3no66noF2 (pfx)) {
21890 } else if (haveF2no66noF3 (pfx)) {
21896 } else if (have66noF2noF3 (pfx)) {
21902 } else if (haveNo66noF2noF3 (pfx)) {
21922 if (haveF2orF3(pfx)) goto decode_failure;
21984 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpNone, &ok );
21991 d64 = delta + lengthAMode(pfx, delta);
21994 vbi, pfx, delta, modrm, sz,
22002 vbi, pfx, delta, modrm, sz,
22011 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpSet, &ok );
22018 d64 = delta + lengthAMode(pfx, delta);
22021 vbi, pfx, delta, modrm, sz,
22029 vbi, pfx, delta, modrm, sz,
22035 if (haveF2orF3(pfx)) goto decode_failure;
22036 delta = dis_mul_E_G ( vbi, pfx, sz, delta );
22042 delta = dis_cmpxchg_G_E ( &ok, vbi, pfx, 1, delta );
22051 delta = dis_cmpxchg_G_E ( &ok, vbi, pfx, sz, delta );
22060 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpReset, &ok );
22066 if (haveF2orF3(pfx)) goto decode_failure;
22069 delta = dis_movx_E_G ( vbi, pfx, delta, 1, sz, False );
22073 if (haveF2orF3(pfx)) goto decode_failure;
22076 delta = dis_movx_E_G ( vbi, pfx, delta, 2, sz, False );
22083 am_sz = lengthAMode(pfx,delta);
22085 delta = dis_Grp8_Imm ( vbi, pfx, delta, modrm, am_sz, sz, d64,
22096 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpComp, &ok );
22102 if (!haveF2orF3(pfx)
22103 || (haveF3noF2(pfx)
22107 delta = dis_bs_E_G ( vbi, pfx, sz, delta, True );
22115 if (!haveF2orF3(pfx)
22116 || (haveF3noF2(pfx)
22120 delta = dis_bs_E_G ( vbi, pfx, sz, delta, False );
22128 if (haveF2orF3(pfx)) goto decode_failure;
22131 delta = dis_movx_E_G ( vbi, pfx, delta, 1, sz, True );
22135 if (haveF2orF3(pfx)) goto decode_failure;
22138 delta = dis_movx_E_G ( vbi, pfx, delta, 2, sz, True );
22143 delta = dis_xadd_G_E ( &decode_OK, vbi, pfx, 1, delta );
22151 delta = dis_xadd_G_E ( &decode_OK, vbi, pfx, sz, delta );
22182 if (have66(pfx)) goto decode_failure;
22189 if (haveF2orF3(pfx)) {
22194 if (haveF2andF3(pfx) || !haveLOCK(pfx)) goto decode_failure;
22197 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22302 if (haveF2orF3(pfx)) goto decode_failure;
22307 assign( t1, getIRegRexB(4, pfx, opc-0xC8) );
22309 putIRegRexB(4, pfx, opc-0xC8, mkexpr(t2));
22310 DIP("bswapl %s\n", nameIRegRexB(4, pfx, opc-0xC8));
22316 assign( t1, getIRegRexB(8, pfx, opc-0xC8) );
22318 putIRegRexB(8, pfx, opc-0xC8, mkexpr(t2));
22319 DIP("bswapq %s\n", nameIRegRexB(8, pfx, opc-0xC8));
22333 if (!have66orF2orF3(pfx)) {
22410 delta = dis_MMX ( &decode_OK, vbi, pfx, sz, deltaIN );
22438 archinfo, vbi, pfx, sz, deltaIN, dres );
22448 delta = dis_ESC_0F__SSE3 ( &decode_OK, vbi, pfx, sz, deltaIN );
22459 archinfo, vbi, pfx, sz, deltaIN );
22484 Prefix pfx, Int sz, Long deltaIN
22494 if (!haveF2orF3(pfx) && !haveVEX(pfx)
22502 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22509 putIRegG(sz, pfx, modrm, mkexpr(dst));
22510 DIP("movbe %s,%s\n", dis_buf, nameIRegG(sz, pfx, modrm));
22512 assign(src, getIRegG(sz, pfx, modrm));
22515 DIP("movbe %s,%s\n", nameIRegG(sz, pfx, modrm), dis_buf);
22534 delta = dis_ESC_0F38__SupSSE3 ( &decode_OK, vbi, pfx, sz, deltaIN );
22544 delta = dis_ESC_0F38__SSE4 ( &decode_OK, vbi, pfx, sz, deltaIN );
22569 Prefix pfx, Int sz, Long deltaIN
22587 delta = dis_ESC_0F3A__SupSSE3 ( &decode_OK, vbi, pfx, sz, deltaIN );
22597 delta = dis_ESC_0F3A__SSE4 ( &decode_OK, vbi, pfx, sz, deltaIN );
22616 Prefix pfx, Long delta, const HChar* name,
22625 UInt rD = gregOfRexRM(pfx, modrm);
22626 UInt rSL = getVexNvvvv(pfx);
22632 vassert(0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*WIG?*/);
22638 UInt rSR = eregOfRexRM(pfx, modrm);
22644 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
22684 Prefix pfx, Long delta, const HChar* name,
22689 uses_vvvv, vbi, pfx, delta, name, op, NULL, False, False);
22699 Prefix pfx, Long delta, const HChar* name,
22704 uses_vvvv, vbi, pfx, delta, name,
22712 Prefix pfx, Long delta,
22720 UInt rG = gregOfRexRM(pfx,modrm);
22721 UInt rV = getVexNvvvv(pfx);;
22727 UInt rE = eregOfRexRM(pfx,modrm);
22733 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22786 Prefix pfx, Long delta,
22794 UInt rG = gregOfRexRM(pfx,modrm);
22795 UInt rV = getVexNvvvv(pfx);;
22801 UInt rE = eregOfRexRM(pfx,modrm);
22807 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22862 Prefix pfx, Long delta,
22869 UInt rG = gregOfRexRM(pfx,modrm);
22870 UInt rV = getVexNvvvv(pfx);;
22875 UInt rE = eregOfRexRM(pfx,modrm);
22886 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
22972 Long dis_AVX128_shiftE_to_V_imm( Prefix pfx,
22979 UInt rD = getVexNvvvv(pfx);
22988 nameXMMReg(eregOfRexRM(pfx,rm)),
22990 assign( e0, getXMMReg(eregOfRexRM(pfx,rm)) );
23029 Long dis_AVX256_shiftE_to_V_imm( Prefix pfx,
23036 UInt rD = getVexNvvvv(pfx);
23045 nameYMMReg(eregOfRexRM(pfx,rm)),
23047 assign( e0, getYMMReg(eregOfRexRM(pfx,rm)) );
23093 Prefix pfx, Long delta,
23100 UInt rG = gregOfRexRM(pfx,rm);
23101 UInt rV = getVexNvvvv(pfx);
23104 UInt rE = eregOfRexRM(pfx,rm);
23113 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23136 Prefix pfx, Long delta,
23143 UInt rG = gregOfRexRM(pfx,rm);
23144 UInt rV = getVexNvvvv(pfx);
23149 UInt rE = eregOfRexRM(pfx,rm);
23155 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23183 Prefix pfx, Long delta,
23190 UInt rG = gregOfRexRM(pfx,rm);
23191 UInt rV = getVexNvvvv(pfx);
23196 UInt rE = eregOfRexRM(pfx,rm);
23202 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23230 Prefix pfx, Long delta,
23237 UInt rG = gregOfRexRM(pfx,rm);
23238 UInt rV = getVexNvvvv(pfx);
23241 UInt rE = eregOfRexRM(pfx,rm);
23250 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23270 Prefix pfx, Long delta,
23274 uses_vvvv, vbi, pfx, delta, opname, op,
23286 Prefix pfx, Long delta,
23300 UInt rG = gregOfRexRM(pfx, rm);
23301 UInt rV = getVexNvvvv(pfx);
23310 UInt rE = eregOfRexRM(pfx,rm);
23317 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
23396 Prefix pfx, Long delta,
23410 UInt rG = gregOfRexRM(pfx, rm);
23411 UInt rV = getVexNvvvv(pfx);
23425 UInt rE = eregOfRexRM(pfx,rm);
23432 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
23466 Prefix pfx, Long delta,
23476 UInt rG = gregOfRexRM(pfx, rm);
23478 UInt rE = eregOfRexRM(pfx,rm);
23483 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23499 Prefix pfx, Long delta,
23507 UInt rG = gregOfRexRM(pfx, rm);
23509 UInt rE = eregOfRexRM(pfx,rm);
23514 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23535 Prefix pfx, Long delta, const HChar* name,
23544 UInt rD = gregOfRexRM(pfx, modrm);
23545 UInt rSL = getVexNvvvv(pfx);
23551 vassert(1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*WIG?*/);
23557 UInt rSR = eregOfRexRM(pfx, modrm);
23563 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
23602 Prefix pfx, Long delta,
23606 uses_vvvv, vbi, pfx, delta, opname, op,
23618 Prefix pfx, Long delta, const HChar* name,
23623 uses_vvvv, vbi, pfx, delta, name, op, NULL, False, False);
23633 Prefix pfx, Long delta, const HChar* name,
23638 uses_vvvv, vbi, pfx, delta, name,
23647 Prefix pfx, Long delta,
23657 UInt rG = gregOfRexRM(pfx, rm);
23659 UInt rE = eregOfRexRM(pfx,rm);
23664 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23680 Prefix pfx, Long delta,
23688 UInt rG = gregOfRexRM(pfx, rm);
23690 UInt rE = eregOfRexRM(pfx,rm);
23695 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23708 static Long dis_CVTDQ2PD_256 ( const VexAbiInfo* vbi, Prefix pfx,
23716 UInt rG = gregOfRexRM(pfx,modrm);
23718 UInt rE = eregOfRexRM(pfx,modrm);
23723 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23744 static Long dis_CVTPD2PS_256 ( const VexAbiInfo* vbi, Prefix pfx,
23751 UInt rG = gregOfRexRM(pfx,modrm);
23755 UInt rE = eregOfRexRM(pfx,modrm);
23760 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23878 Prefix pfx, Int sz, Long deltaIN
23895 if (haveF2no66noF3(pfx) && !epartIsReg(getUChar(delta))) {
23897 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23898 UInt rG = gregOfRexRM(pfx,modrm);
23911 if (haveF2no66noF3(pfx) && epartIsReg(getUChar(delta))) {
23913 UInt rG = gregOfRexRM(pfx, modrm);
23914 UInt rE = eregOfRexRM(pfx, modrm);
23915 UInt rV = getVexNvvvv(pfx);
23930 if (haveF3no66noF2(pfx) && !epartIsReg(getUChar(delta))) {
23932 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23933 UInt rG = gregOfRexRM(pfx,modrm);
23946 if (haveF3no66noF2(pfx) && epartIsReg(getUChar(delta))) {
23948 UInt rG = gregOfRexRM(pfx, modrm);
23949 UInt rE = eregOfRexRM(pfx, modrm);
23950 UInt rV = getVexNvvvv(pfx);
23965 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
23967 UInt rG = gregOfRexRM(pfx, modrm);
23969 UInt rE = eregOfRexRM(pfx,modrm);
23974 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23982 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
23984 UInt rG = gregOfRexRM(pfx, modrm);
23986 UInt rE = eregOfRexRM(pfx,modrm);
23991 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
23999 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24001 UInt rG = gregOfRexRM(pfx, modrm);
24003 UInt rE = eregOfRexRM(pfx,modrm);
24008 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24016 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24018 UInt rG = gregOfRexRM(pfx, modrm);
24020 UInt rE = eregOfRexRM(pfx,modrm);
24025 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24037 if (haveF2no66noF3(pfx) && !epartIsReg(getUChar(delta))) {
24039 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24040 UInt rG = gregOfRexRM(pfx,modrm);
24049 if (haveF2no66noF3(pfx) && epartIsReg(getUChar(delta))) {
24051 UInt rG = gregOfRexRM(pfx, modrm);
24052 UInt rE = eregOfRexRM(pfx, modrm);
24053 UInt rV = getVexNvvvv(pfx);
24067 if (haveF3no66noF2(pfx) && !epartIsReg(getUChar(delta))) {
24069 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24070 UInt rG = gregOfRexRM(pfx,modrm);
24079 if (haveF3no66noF2(pfx) && epartIsReg(getUChar(delta))) {
24081 UInt rG = gregOfRexRM(pfx, modrm);
24082 UInt rE = eregOfRexRM(pfx, modrm);
24083 UInt rV = getVexNvvvv(pfx);
24098 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24100 UInt rG = gregOfRexRM(pfx,modrm);
24102 UInt rE = eregOfRexRM(pfx,modrm);
24107 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24115 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24117 UInt rG = gregOfRexRM(pfx,modrm);
24119 UInt rE = eregOfRexRM(pfx,modrm);
24124 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24132 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24134 UInt rG = gregOfRexRM(pfx,modrm);
24136 UInt rE = eregOfRexRM(pfx,modrm);
24141 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24149 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24151 UInt rG = gregOfRexRM(pfx,modrm);
24153 UInt rE = eregOfRexRM(pfx,modrm);
24158 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24169 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24170 delta = dis_MOVDDUP_128( vbi, pfx, delta, True/*isAvx*/ );
24174 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24175 delta = dis_MOVDDUP_256( vbi, pfx, delta );
24180 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
24183 UInt rG = gregOfRexRM(pfx, modrm);
24184 UInt rE = eregOfRexRM(pfx, modrm);
24185 UInt rV = getVexNvvvv(pfx);
24201 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24202 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24204 UInt rG = gregOfRexRM(pfx, modrm);
24205 UInt rV = getVexNvvvv(pfx);
24206 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24219 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
24220 delta = dis_MOVSxDUP_128( vbi, pfx, delta, True/*isAvx*/,
24225 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
24226 delta = dis_MOVSxDUP_256( vbi, pfx, delta, True/*isL*/ );
24236 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24237 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24239 UInt rG = gregOfRexRM(pfx, modrm);
24240 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24252 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24255 UInt rG = gregOfRexRM(pfx,modrm);
24256 UInt rV = getVexNvvvv(pfx);
24261 UInt rE = eregOfRexRM(pfx,modrm);
24267 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24280 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24283 UInt rG = gregOfRexRM(pfx,modrm);
24284 UInt rV = getVexNvvvv(pfx);
24289 UInt rE = eregOfRexRM(pfx,modrm);
24295 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24308 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24311 UInt rG = gregOfRexRM(pfx,modrm);
24312 UInt rV = getVexNvvvv(pfx);
24317 UInt rE = eregOfRexRM(pfx,modrm);
24323 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24336 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24339 UInt rG = gregOfRexRM(pfx,modrm);
24340 UInt rV = getVexNvvvv(pfx);
24345 UInt rE = eregOfRexRM(pfx,modrm);
24351 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24367 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
24370 UInt rG = gregOfRexRM(pfx, modrm);
24371 UInt rE = eregOfRexRM(pfx, modrm);
24372 UInt rV = getVexNvvvv(pfx);
24388 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24389 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24391 UInt rG = gregOfRexRM(pfx, modrm);
24392 UInt rV = getVexNvvvv(pfx);
24393 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24395 DIP("vmovhp%c %s,%s,%s\n", have66(pfx) ? 'd' : 's',
24406 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
24407 delta = dis_MOVSxDUP_128( vbi, pfx, delta, True/*isAvx*/,
24412 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
24413 delta = dis_MOVSxDUP_256( vbi, pfx, delta, False/*!isL*/ );
24423 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24424 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24426 UInt rG = gregOfRexRM(pfx, modrm);
24427 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24430 DIP("vmovhp%c %s,%s\n", have66(pfx) ? 'd' : 's',
24438 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24440 UInt rG = gregOfRexRM(pfx, modrm);
24442 UInt rE = eregOfRexRM(pfx,modrm);
24447 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24456 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24458 UInt rG = gregOfRexRM(pfx, modrm);
24460 UInt rE = eregOfRexRM(pfx,modrm);
24465 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24474 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24476 UInt rG = gregOfRexRM(pfx, modrm);
24478 UInt rE = eregOfRexRM(pfx,modrm);
24483 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24492 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24494 UInt rG = gregOfRexRM(pfx, modrm);
24496 UInt rE = eregOfRexRM(pfx,modrm);
24501 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24513 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24515 UInt rG = gregOfRexRM(pfx,modrm);
24517 UInt rE = eregOfRexRM(pfx,modrm);
24522 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24531 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24533 UInt rG = gregOfRexRM(pfx,modrm);
24535 UInt rE = eregOfRexRM(pfx,modrm);
24540 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24549 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24551 UInt rG = gregOfRexRM(pfx,modrm);
24553 UInt rE = eregOfRexRM(pfx,modrm);
24559 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24568 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24570 UInt rG = gregOfRexRM(pfx,modrm);
24572 UInt rE = eregOfRexRM(pfx,modrm);
24578 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24592 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
24594 UInt rV = getVexNvvvv(pfx);
24595 UInt rD = gregOfRexRM(pfx, modrm);
24598 UInt rS = eregOfRexRM(pfx,modrm);
24604 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24618 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
24620 UInt rV = getVexNvvvv(pfx);
24621 UInt rD = gregOfRexRM(pfx, modrm);
24624 UInt rS = eregOfRexRM(pfx,modrm);
24630 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24646 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) {
24648 UInt rV = getVexNvvvv(pfx);
24649 UInt rD = gregOfRexRM(pfx, modrm);
24652 UInt rS = eregOfRexRM(pfx,modrm);
24658 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24676 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) {
24678 UInt rV = getVexNvvvv(pfx);
24679 UInt rD = gregOfRexRM(pfx, modrm);
24682 UInt rS = eregOfRexRM(pfx,modrm);
24688 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
24710 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24711 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) {
24713 UInt rS = gregOfRexRM(pfx, modrm);
24716 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
24720 DIP("vmovntp%c %s,%s\n", have66(pfx) ? 'd' : 's',
24726 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx))
24727 && 1==getVexL(pfx)/*256*/ && !epartIsReg(getUChar(delta))) {
24729 UInt rS = gregOfRexRM(pfx, modrm);
24732 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
24736 DIP("vmovntp%c %s,%s\n", have66(pfx) ? 'd' : 's',
24744 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
24745 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4);
24749 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
24750 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8);
24754 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) {
24755 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4);
24759 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) {
24760 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8);
24767 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
24768 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4);
24772 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
24773 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8);
24777 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) {
24778 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4);
24782 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) {
24783 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8);
24792 if (have66noF2noF3(pfx)) {
24793 delta = dis_COMISD( vbi, pfx, delta, True/*isAvx*/, opc );
24798 if (haveNo66noF2noF3(pfx)) {
24799 delta = dis_COMISS( vbi, pfx, delta, True/*isAvx*/, opc );
24806 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24807 delta = dis_MOVMSKPD_128( vbi, pfx, delta, True/*isAvx*/ );
24811 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24812 delta = dis_MOVMSKPD_256( vbi, pfx, delta );
24816 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24817 delta = dis_MOVMSKPS_128( vbi, pfx, delta, True/*isAvx*/ );
24821 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24822 delta = dis_MOVMSKPS_256( vbi, pfx, delta );
24829 if (haveF3no66noF2(pfx)) {
24831 uses_vvvv, vbi, pfx, delta, "vsqrtss", Iop_Sqrt32F0x4 );
24835 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24837 uses_vvvv, vbi, pfx, delta, "vsqrtps", Iop_Sqrt32Fx4 );
24841 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24843 uses_vvvv, vbi, pfx, delta, "vsqrtps", Iop_Sqrt32Fx8 );
24847 if (haveF2no66noF3(pfx)) {
24849 uses_vvvv, vbi, pfx, delta, "vsqrtsd", Iop_Sqrt64F0x2 );
24853 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24855 uses_vvvv, vbi, pfx, delta, "vsqrtpd", Iop_Sqrt64Fx2 );
24859 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24861 uses_vvvv, vbi, pfx, delta, "vsqrtpd", Iop_Sqrt64Fx4 );
24868 if (haveF3no66noF2(pfx)) {
24870 uses_vvvv, vbi, pfx, delta, "vrsqrtss",
24875 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24877 uses_vvvv, vbi, pfx, delta, "vrsqrtps", Iop_RSqrtEst32Fx4 );
24881 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24883 uses_vvvv, vbi, pfx, delta, "vrsqrtps", Iop_RSqrtEst32Fx8 );
24890 if (haveF3no66noF2(pfx)) {
24892 uses_vvvv, vbi, pfx, delta, "vrcpss", Iop_RecipEst32F0x4 );
24896 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24898 uses_vvvv, vbi, pfx, delta, "vrcpps", Iop_RecipEst32Fx4 );
24902 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24904 uses_vvvv, vbi, pfx, delta, "vrcpps", Iop_RecipEst32Fx8 );
24912 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24914 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV128 );
24919 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24921 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV256 );
24925 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24927 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV128 );
24931 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24933 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV256 );
24941 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24943 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV128,
24948 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24950 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV256,
24955 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24957 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV128,
24962 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24964 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV256,
24973 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24975 uses_vvvv, vbi, pfx, delta, "vorpd", Iop_OrV128 );
24980 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24982 uses_vvvv, vbi, pfx, delta, "vorpd", Iop_OrV256 );
24987 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
24989 uses_vvvv, vbi, pfx, delta, "vorps", Iop_OrV128 );
24994 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
24996 uses_vvvv, vbi, pfx, delta, "vorps", Iop_OrV256 );
25004 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25006 uses_vvvv, vbi, pfx, delta, "vxorpd", Iop_XorV128 );
25011 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25013 uses_vvvv, vbi, pfx, delta, "vxorpd", Iop_XorV256 );
25018 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25020 uses_vvvv, vbi, pfx, delta, "vxorps", Iop_XorV128 );
25025 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25027 uses_vvvv, vbi, pfx, delta, "vxorps", Iop_XorV256 );
25034 if (haveF2no66noF3(pfx)) {
25036 uses_vvvv, vbi, pfx, delta, "vaddsd", Iop_Add64F0x2 );
25040 if (haveF3no66noF2(pfx)) {
25042 uses_vvvv, vbi, pfx, delta, "vaddss", Iop_Add32F0x4 );
25046 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25048 uses_vvvv, vbi, pfx, delta, "vaddps", Iop_Add32Fx4 );
25052 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25054 uses_vvvv, vbi, pfx, delta, "vaddps", Iop_Add32Fx8 );
25058 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25060 uses_vvvv, vbi, pfx, delta, "vaddpd", Iop_Add64Fx2 );
25064 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25066 uses_vvvv, vbi, pfx, delta, "vaddpd", Iop_Add64Fx4 );
25073 if (haveF2no66noF3(pfx)) {
25075 uses_vvvv, vbi, pfx, delta, "vmulsd", Iop_Mul64F0x2 );
25079 if (haveF3no66noF2(pfx)) {
25081 uses_vvvv, vbi, pfx, delta, "vmulss", Iop_Mul32F0x4 );
25085 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25087 uses_vvvv, vbi, pfx, delta, "vmulps", Iop_Mul32Fx4 );
25091 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25093 uses_vvvv, vbi, pfx, delta, "vmulps", Iop_Mul32Fx8 );
25097 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25099 uses_vvvv, vbi, pfx, delta, "vmulpd", Iop_Mul64Fx2 );
25103 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25105 uses_vvvv, vbi, pfx, delta, "vmulpd", Iop_Mul64Fx4 );
25112 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25113 delta = dis_CVTPS2PD_128( vbi, pfx
25117 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25118 delta = dis_CVTPS2PD_256( vbi, pfx, delta );
25122 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25123 delta = dis_CVTPD2PS_128( vbi, pfx, delta, True/*isAvx*/ );
25127 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25128 delta = dis_CVTPD2PS_256( vbi, pfx, delta );
25132 if (haveF2no66noF3(pfx)) {
25134 UInt rV = getVexNvvvv(pfx);
25135 UInt rD = gregOfRexRM(pfx, modrm);
25140 UInt rS = eregOfRexRM(pfx,modrm);
25146 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
25162 if (haveF3no66noF2(pfx)) {
25164 UInt rV = getVexNvvvv(pfx);
25165 UInt rD = gregOfRexRM(pfx, modrm);
25168 UInt rS = eregOfRexRM(pfx,modrm);
25174 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
25191 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25192 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta,
25197 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25198 delta = dis_CVTxPS2DQ_256( vbi, pfx, delta,
25203 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
25204 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta,
25209 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
25210 delta = dis_CVTxPS2DQ_256( vbi, pfx, delta,
25215 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25216 delta = dis_CVTDQ2PS_128 ( vbi, pfx, delta, True/*isAvx*/ );
25220 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25221 delta = dis_CVTDQ2PS_256 ( vbi, pfx, delta );
25228 if (haveF2no66noF3(pfx)) {
25230 uses_vvvv, vbi, pfx, delta, "vsubsd", Iop_Sub64F0x2 );
25234 if (haveF3no66noF2(pfx)) {
25236 uses_vvvv, vbi, pfx, delta, "vsubss", Iop_Sub32F0x4 );
25240 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25242 uses_vvvv, vbi, pfx, delta, "vsubps", Iop_Sub32Fx4 );
25246 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25248 uses_vvvv, vbi, pfx, delta, "vsubps", Iop_Sub32Fx8 );
25252 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25254 uses_vvvv, vbi, pfx, delta, "vsubpd", Iop_Sub64Fx2 );
25258 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25260 uses_vvvv, vbi, pfx, delta, "vsubpd", Iop_Sub64Fx4 );
25267 if (haveF2no66noF3(pfx)) {
25269 uses_vvvv, vbi, pfx, delta, "vminsd", Iop_Min64F0x2 );
25273 if (haveF3no66noF2(pfx)) {
25275 uses_vvvv, vbi, pfx, delta, "vminss", Iop_Min32F0x4 );
25279 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25281 uses_vvvv, vbi, pfx, delta, "vminps", Iop_Min32Fx4 );
25285 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25287 uses_vvvv, vbi, pfx, delta, "vminps", Iop_Min32Fx8 );
25291 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25293 uses_vvvv, vbi, pfx, delta, "vminpd", Iop_Min64Fx2 );
25297 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25299 uses_vvvv, vbi, pfx, delta, "vminpd", Iop_Min64Fx4 );
25306 if (haveF2no66noF3(pfx)) {
25308 uses_vvvv, vbi, pfx, delta, "vdivsd", Iop_Div64F0x2 );
25312 if (haveF3no66noF2(pfx)) {
25314 uses_vvvv, vbi, pfx, delta, "vdivss", Iop_Div32F0x4 );
25318 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25320 uses_vvvv, vbi, pfx, delta, "vdivps", Iop_Div32Fx4 );
25324 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25326 uses_vvvv, vbi, pfx, delta, "vdivps", Iop_Div32Fx8 );
25330 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25332 uses_vvvv, vbi, pfx, delta, "vdivpd", Iop_Div64Fx2 );
25336 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25338 uses_vvvv, vbi, pfx, delta, "vdivpd", Iop_Div64Fx4 );
25345 if (haveF2no66noF3(pfx)) {
25347 uses_vvvv, vbi, pfx, delta, "vmaxsd", Iop_Max64F0x2 );
25351 if (haveF3no66noF2(pfx)) {
25353 uses_vvvv, vbi, pfx, delta, "vmaxss", Iop_Max32F0x4 );
25357 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25359 uses_vvvv, vbi, pfx, delta, "vmaxps", Iop_Max32Fx4 );
25363 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25365 uses_vvvv, vbi, pfx, delta, "vmaxps", Iop_Max32Fx8 );
25369 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25371 uses_vvvv, vbi, pfx, delta, "vmaxpd", Iop_Max64Fx2 );
25375 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25377 uses_vvvv, vbi, pfx, delta, "vmaxpd", Iop_Max64Fx4 );
25385 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25387 uses_vvvv, vbi, pfx, delta, "vpunpcklbw",
25394 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25396 uses_vvvv, vbi, pfx, delta, "vpunpcklbw",
25405 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25407 uses_vvvv, vbi, pfx, delta, "vpunpcklwd",
25414 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25416 uses_vvvv, vbi, pfx, delta, "vpunpcklwd",
25425 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25427 uses_vvvv, vbi, pfx, delta, "vpunpckldq",
25434 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25436 uses_vvvv, vbi, pfx, delta, "vpunpckldq",
25445 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25447 uses_vvvv, vbi, pfx, delta, "vpacksswb",
25454 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25456 uses_vvvv, vbi, pfx, delta, "vpacksswb",
25465 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25467 uses_vvvv, vbi, pfx, delta, "vpcmpgtb", Iop_CmpGT8Sx16 );
25472 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25474 uses_vvvv, vbi, pfx, delta, "vpcmpgtb", Iop_CmpGT8Sx32 );
25482 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25484 uses_vvvv, vbi, pfx, delta, "vpcmpgtw", Iop_CmpGT16Sx8 );
25489 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25491 uses_vvvv, vbi, pfx, delta, "vpcmpgtw", Iop_CmpGT16Sx16 );
25499 pfx) && 0==getVexL(pfx)/*128*/) {
25501 uses_vvvv, vbi, pfx, delta, "vpcmpgtd", Iop_CmpGT32Sx4 );
25506 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25508 uses_vvvv, vbi, pfx, delta, "vpcmpgtd", Iop_CmpGT32Sx8 );
25516 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25518 uses_vvvv, vbi, pfx, delta, "vpackuswb",
25525 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25527 uses_vvvv, vbi, pfx, delta, "vpackuswb",
25536 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25538 uses_vvvv, vbi, pfx, delta, "vpunpckhbw",
25545 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25547 uses_vvvv, vbi, pfx, delta, "vpunpckhbw",
25556 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25558 uses_vvvv, vbi, pfx, delta, "vpunpckhwd",
25565 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25567 uses_vvvv, vbi, pfx, delta, "vpunpckhwd",
25576 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25578 uses_vvvv, vbi, pfx, delta, "vpunpckhdq",
25585 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25587 uses_vvvv, vbi, pfx, delta, "vpunpckhdq",
25596 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25598 uses_vvvv, vbi, pfx, delta, "vpackssdw",
25605 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25607 uses_vvvv, vbi, pfx, delta, "vpackssdw",
25616 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25618 uses_vvvv, vbi, pfx, delta, "vpunpcklqdq",
25625 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25627 uses_vvvv, vbi, pfx, delta, "vpunpcklqdq",
25636 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25638 uses_vvvv, vbi, pfx, delta, "vpunpckhqdq",
25645 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25647 uses_vvvv, vbi, pfx, delta, "vpunpckhqdq",
25655 if (have66noF2noF3(pfx)
25656 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
25662 gregOfRexRM(pfx,modrm),
25663 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) )
25665 DIP("vmovd %s, %s\n", nameIReg32(eregOfRexRM(pfx,modrm)),
25666 nameXMMReg(gregOfRexRM(pfx,modrm)));
25668 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
25671 gregOfRexRM(pfx,modrm),
25675 nameXMMReg(gregOfRexRM(pfx,modrm)));
25680 if (have66noF2noF3(pfx)
25681 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) {
25687 gregOfRexRM(pfx,modrm),
25688 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) )
25690 DIP("vmovq %s, %s\n", nameIReg64(eregOfRexRM(pfx,modrm)),
25691 nameXMMReg(gregOfRexRM(pfx,modrm)));
25693 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
25696 gregOfRexRM(pfx,modrm),
25700 nameXMMReg(gregOfRexRM(pfx,modrm)));
25709 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx))
25710 && 1==getVexL(pfx)/*256*/) {
25712 UInt rD = gregOfRexRM(pfx, modrm);
25714 Bool isA = have66noF2noF3(pfx);
25717 UInt rS = eregOfRexRM(pfx, modrm);
25722 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
25734 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx))
25735 && 0==getVexL(pfx)/*128*/) {
25737 UInt rD = gregOfRexRM(pfx, modrm);
25739 Bool isA = have66noF2noF3(pfx);
25742 UInt rS = eregOfRexRM(pfx, modrm);
25747 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
25761 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25762 delta = dis_PSHUFD_32x4( vbi, pfx, delta, True/*writesYmm*/);
25766 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25767 delta = dis_PSHUFD_32x8( vbi, pfx, delta);
25771 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
25772 delta = dis_PSHUFxW_128( vbi, pfx, delta,
25777 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
25778 delta = dis_PSHUFxW_256( vbi, pfx, delta, False/*!xIsH*/ );
25782 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
25783 delta = dis_PSHUFxW_128( vbi, pfx, delta,
25788 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
25789 delta = dis_PSHUFxW_256( vbi, pfx, delta, True/*xIsH*/ );
25798 if (have66noF2noF3(pfx)
25799 && 0==getVexL(pfx)/*128*/
25802 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25808 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25814 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25824 if (have66noF2noF3(pfx)
25825 && 1==getVexL(pfx)/*256*/
25828 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
25834 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
25840 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
25853 if (have66noF2noF3(pfx)
25854 && 0==getVexL(pfx)/*128*/
25857 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25863 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25869 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25879 if (have66noF2noF3(pfx)
25880 && 1==getVexL(pfx)/*256*/
25883 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
25889 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
25895 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
25909 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
25911 Int rS = eregOfRexRM(pfx,getUChar(delta));
25912 Int rD = getVexNvvvv(pfx);
25933 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25939 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta,
25950 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
25952 Int rS = eregOfRexRM(pfx,getUChar(delta));
25953 Int rD = getVexNvvvv(pfx);
25981 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
25987 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta,
25999 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26001 uses_vvvv, vbi, pfx, delta, "vpcmpeqb", Iop_CmpEQ8x16 );
26006 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26008 uses_vvvv, vbi, pfx, delta, "vpcmpeqb", Iop_CmpEQ8x32 );
26016 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26018 uses_vvvv, vbi, pfx, delta, "vpcmpeqw", Iop_CmpEQ16x8 );
26023 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26025 uses_vvvv, vbi, pfx, delta, "vpcmpeqw", Iop_CmpEQ16x16 );
26033 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26035 uses_vvvv, vbi, pfx, delta, "vpcmpeqd", Iop_CmpEQ32x4 );
26040 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26042 uses_vvvv, vbi, pfx, delta, "vpcmpeqd", Iop_CmpEQ32x8 );
26049 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26060 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26076 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26082 UInt rG = gregOfRexRM(pfx,modrm);
26083 UInt rV = getVexNvvvv(pfx);
26085 UInt rE = eregOfRexRM(pfx,modrm);
26091 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26104 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26111 UInt rG = gregOfRexRM(pfx,modrm);
26112 UInt rV = getVexNvvvv(pfx);
26115 UInt rE = eregOfRexRM(pfx,modrm);
26121 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26138 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26144 UInt rG = gregOfRexRM(pfx,modrm);
26145 UInt rV = getVexNvvvv(pfx);
26147 UInt rE = eregOfRexRM(pfx,modrm);
26153 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26166 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26173 UInt rG = gregOfRexRM(pfx,modrm);
26174 UInt rV = getVexNvvvv(pfx);
26177 UInt rE = eregOfRexRM(pfx,modrm);
26183 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26206 if (haveF3no66noF2(pfx)
26207 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
26210 UInt rG = gregOfRexRM(pfx,modrm);
26212 UInt rE = eregOfRexRM(pfx,modrm);
26217 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26230 if (have66noF2noF3(pfx)
26231 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) {
26233 UInt rG = gregOfRexRM(pfx,modrm);
26235 UInt rE = eregOfRexRM(pfx,modrm);
26240 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26249 if (have66noF2noF3(pfx)
26250 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
26252 UInt rG = gregOfRexRM(pfx,modrm);
26254 UInt rE = eregOfRexRM(pfx,modrm);
26259 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26271 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx))
26272 pfx)/*256*/) {
26274 UInt rS = gregOfRexRM(pfx, modrm);
26276 Bool isA = have66noF2noF3(pfx);
26280 UInt rD = eregOfRexRM(pfx, modrm);
26285 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
26296 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx))
26297 && 0==getVexL(pfx)/*128*/) {
26299 UInt rS = gregOfRexRM(pfx, modrm);
26301 Bool isA = have66noF2noF3(pfx);
26305 UInt rD = eregOfRexRM(pfx, modrm);
26310 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
26323 if (haveNo66noF2noF3(pfx)
26324 && 0==getVexL(pfx)/*LZ*/
26325 && 0==getRexW(pfx) /* be paranoid -- Intel docs don't require this */
26328 delta = dis_STMXCSR(vbi, pfx, delta, True/*isAvx*/);
26332 if (haveNo66noF2noF3(pfx)
26333 && 0==getVexL(pfx)/*LZ*/
26334 && 0==getRexW(pfx) /* be paranoid -- Intel docs don't require this */
26337 delta = dis_LDMXCSR(vbi, pfx, delta, True/*isAvx*/);
26345 if (haveF2no66noF3(pfx)) {
26347 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26355 if (haveF3no66noF2(pfx)) {
26357 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26365 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26367 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26375 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26377 delta = dis_AVX256_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26384 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26386 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26394 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26396 delta = dis_AVX256_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta,
26405 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26407 UInt rG = gregOfRexRM(pfx, modrm);
26408 UInt rV = getVexNvvvv(pfx);
26415 getIReg32(eregOfRexRM(pfx,modrm))) );
26418 nameIReg32( eregOfRexRM(pfx, modrm) ), nameXMMReg(rG) );
26420 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
26439 if (have66noF2noF3(pfx)
26440 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
26442 delta = dis_PEXTRW_128_EregOnly_toG( vbi, pfx, delta,
26452 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26457 UInt rG = gregOfRexRM(pfx,modrm);
26458 UInt rV = getVexNvvvv(pfx);
26461 UInt rE = eregOfRexRM(pfx,modrm);
26468 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
26482 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26487 UInt rG = gregOfRexRM(pfx,modrm);
26488 UInt rV = getVexNvvvv(pfx);
26491 UInt rE = eregOfRexRM(pfx,modrm);
26498 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
26512 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26517 UInt rG = gregOfRexRM(pfx,modrm);
26518 UInt rV = getVexNvvvv(pfx);
26521 UInt rE = eregOfRexRM(pfx,modrm);
26528 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
26542 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26547 UInt rG = gregOfRexRM(pfx,modrm);
26548 UInt rV = getVexNvvvv(pfx);
26551 UInt rE = eregOfRexRM(pfx,modrm);
26558 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 );
26574 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26576 uses_vvvv, vbi, pfx, delta,
26581 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26583 uses_vvvv, vbi, pfx, delta,
26588 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26590 uses_vvvv, vbi, pfx, delta,
26595 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26597 uses_vvvv, vbi, pfx, delta,
26605 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26606 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
26613 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26614 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
26624 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26625 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
26631 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26632 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
26641 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26642 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
26648 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26649 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
26659 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26661 uses_vvvv, vbi, pfx, delta, "vpaddq", Iop_Add64x2 );
26666 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26668 uses_vvvv, vbi, pfx, delta, "vpaddq", Iop_Add64x4 );
26675 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26677 uses_vvvv, vbi, pfx, delta, "vpmullw", Iop_Mul16x8 );
26681 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26683 uses_vvvv, vbi, pfx, delta, "vpmullw", Iop_Mul16x16 );
26693 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
26694 && 0==getRexW(pfx)/*this might be redundant, dunno*/) {
26696 UInt rG = gregOfRexRM(pfx,modrm);
26701 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26712 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26713 delta = dis_PMOVMSKB_128( vbi, pfx, delta, True/*isAvx*/ );
26717 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26718 delta = dis_PMOVMSKB_256( vbi, pfx, delta );
26725 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26727 uses_vvvv, vbi, pfx, delta, "vpsubusb", Iop_QSub8Ux16 );
26731 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26733 uses_vvvv, vbi, pfx, delta, "vpsubusb", Iop_QSub8Ux32 );
26740 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26742 uses_vvvv, vbi, pfx, delta, "vpsubusw", Iop_QSub16Ux8 );
26746 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26748 uses_vvvv, vbi, pfx, delta, "vpsubusw", Iop_QSub16Ux16 );
26755 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26757 uses_vvvv, vbi, pfx, delta, "vpminub", Iop_Min8Ux16 );
26761 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26763 uses_vvvv, vbi, pfx, delta, "vpminub", Iop_Min8Ux32 );
26771 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26773 uses_vvvv, vbi, pfx, delta, "vpand", Iop_AndV128 );
26778 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26780 uses_vvvv, vbi, pfx, delta, "vpand", Iop_AndV256 );
26787 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26789 uses_vvvv, vbi, pfx, delta, "vpaddusb", Iop_QAdd8Ux16 );
26793 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26795 uses_vvvv, vbi, pfx, delta, "vpaddusb", Iop_QAdd8Ux32 );
26802 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26804 uses_vvvv, vbi, pfx, delta, "vpaddusw", Iop_QAdd16Ux8 );
26808 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26810 uses_vvvv, vbi, pfx, delta, "vpaddusw", Iop_QAdd16Ux16 );
26817 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26819 uses_vvvv, vbi, pfx, delta, "vpmaxub", Iop_Max8Ux16 );
26823 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26825 uses_vvvv, vbi, pfx, delta, "vpmaxub", Iop_Max8Ux32 );
26833 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26835 uses_vvvv, vbi, pfx, delta, "vpandn", Iop_AndV128,
26841 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26843 uses_vvvv, vbi, pfx, delta, "vpandn", Iop_AndV256,
26851 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26853 uses_vvvv, vbi, pfx, delta, "vpavgb", Iop_Avg8Ux16 );
26857 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26859 uses_vvvv, vbi, pfx, delta, "vpavgb", Iop_Avg8Ux32 );
26866 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26867 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
26873 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26874 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
26883 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26884 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
26890 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26891 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
26900 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26902 uses_vvvv, vbi, pfx, delta, "vpavgw", Iop_Avg16Ux8 );
26906 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26908 uses_vvvv, vbi, pfx, delta, "vpavgw", Iop_Avg16Ux16 );
26915 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26917 uses_vvvv, vbi, pfx, delta, "vpmulhuw", Iop_MulHi16Ux8 );
26921 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26923 uses_vvvv, vbi, pfx, delta, "vpmulhuw", Iop_MulHi16Ux16 );
26930 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26932 uses_vvvv, vbi, pfx, delta, "vpmulhw", Iop_MulHi16Sx8 );
26936 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26938 uses_vvvv, vbi, pfx, delta, "vpmulhw", Iop_MulHi16Sx16 );
26945 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) {
26946 delta = dis_CVTDQ2PD_128(vbi, pfx, delta, True/*isAvx*/);
26950 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) {
26951 delta = dis_CVTDQ2PD_256(vbi, pfx, delta);
26955 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26956 delta = dis_CVTxPD2DQ_128(vbi, pfx, delta, True/*isAvx*/,
26961 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26962 delta = dis_CVTxPD2DQ_256(vbi, pfx, delta, True/*r2zero*/);
26966 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26967 delta = dis_CVTxPD2DQ_128(vbi, pfx, delta, True/*isAvx*/,
26972 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26973 delta = dis_CVTxPD2DQ_256(vbi, pfx, delta, False/*!r2zero*/);
26980 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
26982 UInt rG = gregOfRexRM(pfx,modrm);
26984 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
26994 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
26996 UInt rG = gregOfRexRM(pfx,modrm);
26998 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
27011 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27013 uses_vvvv, vbi, pfx, delta, "vpsubsb", Iop_QSub8Sx16 );
27017 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27019 uses_vvvv, vbi, pfx, delta, "vpsubsb", Iop_QSub8Sx32 );
27026 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27028 uses_vvvv, vbi, pfx, delta, "vpsubsw", Iop_QSub16Sx8 );
27032 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27034 uses_vvvv, vbi, pfx, delta, "vpsubsw", Iop_QSub16Sx16 );
27042 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27044 uses_vvvv, vbi, pfx, delta, "vpminsw", Iop_Min16Sx8 );
27049 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27051 uses_vvvv, vbi, pfx, delta, "vpminsw", Iop_Min16Sx16 );
27059 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27061 uses_vvvv, vbi, pfx, delta, "vpor", Iop_OrV128 );
27066 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27068 uses_vvvv, vbi, pfx, delta, "vpor", Iop_OrV256 );
27075 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27077 uses_vvvv, vbi, pfx, delta, "vpaddsb", Iop_QAdd8Sx16 );
27081 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27083 uses_vvvv, vbi, pfx, delta, "vpaddsb", Iop_QAdd8Sx32 );
27090 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27092 uses_vvvv, vbi, pfx, delta, "vpaddsw", Iop_QAdd16Sx8 );
27096 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27098 uses_vvvv, vbi, pfx, delta, "vpaddsw", Iop_QAdd16Sx16 );
27106 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27108 uses_vvvv, vbi, pfx, delta, "vpmaxsw", Iop_Max16Sx8 );
27113 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27115 uses_vvvv, vbi, pfx, delta, "vpmaxsw", Iop_Max16Sx16 );
27123 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27125 uses_vvvv, vbi, pfx, delta, "vpxor", Iop_XorV128 );
27130 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27132 uses_vvvv, vbi, pfx, delta, "vpxor", Iop_XorV256 );
27139 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27141 UInt rD = gregOfRexRM(pfx, modrm);
27144 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
27152 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27154 UInt rD = gregOfRexRM(pfx, modrm);
27157 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
27168 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27169 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
27176 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27177 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
27187 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27188 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
27194 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27195 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
27204 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27205 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta,
27211 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27212 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta,
27221 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27223 uses_vvvv, vbi, pfx, delta,
27228 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27230 uses_vvvv, vbi, pfx, delta,
27238 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27240 uses_vvvv, vbi, pfx, delta,
27245 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27247 uses_vvvv, vbi, pfx, delta,
27255 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27257 uses_vvvv, vbi, pfx, delta,
27262 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27264 uses_vvvv, vbi, pfx, delta,
27272 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
27274 delta = dis_MASKMOVDQU( vbi, pfx, delta, True/*isAvx*/ );
27282 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27284 uses_vvvv, vbi, pfx, delta, "vpsubb", Iop_Sub8x16 );
27289 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27291 uses_vvvv, vbi, pfx, delta, "vpsubb", Iop_Sub8x32 );
27299 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27301 uses_vvvv, vbi, pfx, delta, "vpsubw", Iop_Sub16x8 );
27306 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27308 uses_vvvv, vbi, pfx, delta, "vpsubw", Iop_Sub16x16 );
27316 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27318 uses_vvvv, vbi, pfx, delta, "vpsubd", Iop_Sub32x4 );
27323 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27325 uses_vvvv, vbi, pfx, delta, "vpsubd", Iop_Sub32x8 );
27333 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27335 uses_vvvv, vbi, pfx, delta, "vpsubq", Iop_Sub64x2 );
27340 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27342 uses_vvvv, vbi, pfx, delta, "vpsubq", Iop_Sub64x4 );
27350 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27352 uses_vvvv, vbi, pfx, delta, "vpaddb", Iop_Add8x16 );
27357 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27359 uses_vvvv, vbi, pfx, delta, "vpaddb", Iop_Add8x32 );
27367 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27369 uses_vvvv, vbi, pfx, delta, "vpaddw", Iop_Add16x8 );
27374 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27376 uses_vvvv, vbi, pfx, delta, "vpaddw", Iop_Add16x16 );
27384 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27386 uses_vvvv, vbi, pfx, delta, "vpaddd", Iop_Add32x4 );
27391 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27393 uses_vvvv, vbi, pfx, delta, "vpaddd", Iop_Add32x8 );
27492 const VexAbiInfo* vbi, Prefix pfx, Long delta,
27497 Int size = getRexW(pfx) ? 8 : 4;
27503 assign( amt, getIRegV(size,pfx) );
27505 assign( src, getIRegE(size,pfx,rm) );
27506 DIP("%s %s,%s,%s\n", opname, nameIRegV(size,pfx),
27507 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm));
27510 IRTemp addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
27512 DIP("%s %s,%s,%s\n", opname, nameIRegV(size,pfx), dis_buf,
27513 nameIRegG(size,pfx,rm));
27517 putIRegG( size, pfx, rm,
27527 static Long dis_FMA ( const VexAbiInfo* vbi, Prefix pfx, Long delta, UChar opc )
27530 UInt rG = gregOfRexRM(pfx, modrm);
27531 UInt rV = getVexNvvvv(pfx);
27533 IRType ty = getRexW(pfx) ? Ity_F64 : Ity_F32;
27534 IRType vty = scalar ? ty : getVexL(pfx) ? Ity_V256 : Ity_V128;
27611 UInt rE = eregOfRexRM(pfx, modrm);
27628 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
27716 Prefix pfx, Long delta,
27724 UInt rG = gregOfRexRM(pfx,modrm);
27725 UInt rV = getVexNvvvv(pfx);
27727 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
27786 Prefix pfx, Long delta,
27794 UInt rG = gregOfRexRM(pfx,modrm);
27795 UInt rV = getVexNvvvv(pfx);
27800 addr = disAVSIBMode ( &alen, vbi, pfx, delta, dis_buf, &rI,
27852 addr_expr = handleAddrOverrides(vbi, pfx, addr_expr);
27889 Prefix pfx, Int sz, Long deltaIN
27905 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27907 uses_vvvv, vbi, pfx, delta, "vpshufb", math_PSHUFB_XMM );
27912 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27914 uses_vvvv, vbi, pfx, delta, "vpshufb", math_PSHUFB_YMM );
27925 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27926 delta = dis_PHADD_128( vbi, pfx, delta, True/*isAvx*/, opc );
27933 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27934 delta = dis_PHADD_256( vbi, pfx, delta, opc );
27942 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27944 uses_vvvv, vbi, pfx, delta, "vpmaddubsw",
27949 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27951 uses_vvvv, vbi, pfx, delta, "vpmaddubsw",
27963 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27964 delta = dis_PHADD_128( vbi, pfx, delta, True/*isAvx*/, opc );
27971 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
27972 delta = dis_PHADD_256( vbi, pfx, delta, opc );
27984 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
27992 pfx,modrm);
27993 UInt rV = getVexNvvvv(pfx);
28005 UInt rE = eregOfRexRM(pfx,modrm);
28011 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
28034 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28043 UInt rG = gregOfRexRM(pfx,modrm);
28044 UInt rV = getVexNvvvv(pfx);
28056 UInt rE = eregOfRexRM(pfx,modrm);
28062 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
28092 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28098 UInt rG = gregOfRexRM(pfx,modrm);
28099 UInt rV = getVexNvvvv(pfx);
28104 UInt rE = eregOfRexRM(pfx,modrm);
28110 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
28131 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28137 UInt rG = gregOfRexRM(pfx,modrm);
28138 UInt rV = getVexNvvvv(pfx);
28143 UInt rE = eregOfRexRM(pfx,modrm);
28149 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
28177 if (have66noF2noF3(pfx)
28178 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
28180 UInt rG = gregOfRexRM(pfx, modrm);
28181 UInt rV = getVexNvvvv(pfx);
28184 UInt rE = eregOfRexRM(pfx, modrm);
28190 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28204 if (have66noF2noF3(pfx)
28205 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
28207 UInt rG = gregOfRexRM(pfx, modrm);
28208 UInt rV = getVexNvvvv(pfx);
28211 UInt rE = eregOfRexRM(pfx, modrm);
28217 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28234 if (have66noF2noF3(pfx)
28235 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
28237 UInt rG = gregOfRexRM(pfx, modrm);
28238 UInt rV = getVexNvvvv(pfx);
28241 UInt rE = eregOfRexRM(pfx, modrm);
28247 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28261 if (have66noF2noF3(pfx)
28262 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
28264 UInt rG = gregOfRexRM(pfx, modrm);
28265 UInt rV = getVexNvvvv(pfx);
28268 UInt rE = eregOfRexRM(pfx, modrm);
28274 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28291 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28292 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 32 );
28296 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28297 delta = dis_xTESTy_256( vbi, pfx, delta, 32 );
28304 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28305 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 64 );
28309 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28310 delta = dis_xTESTy_256( vbi, pfx, delta, 64 );
28317 if (have66noF2noF3(pfx)
28318 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
28320 uses_vvvv, vbi, pfx, delta, "vpermps", math_VPERMD );
28327 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28328 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 0 );
28332 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28333 delta = dis_xTESTy_256( vbi, pfx, delta, 0 );
28340 if (have66noF2noF3(pfx)
28341 && 0==getVexL(pfx)/*128*/
28344 UInt rG = gregOfRexRM(pfx, modrm);
28345 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28357 if (have66noF2noF3(pfx)
28358 && 1==getVexL(pfx)/*256*/
28361 UInt rG = gregOfRexRM(pfx, modrm);
28362 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28375 if (have66noF2noF3(pfx)
28376 && 0==getVexL(pfx)/*128*/
28379 UInt rG = gregOfRexRM(pfx, modrm);
28380 UInt rE = eregOfRexRM(pfx, modrm);
28392 if (have66noF2noF3(pfx)
28393 && 1==getVexL(pfx)/*256*/
28396 UInt rG = gregOfRexRM(pfx, modrm);
28397 UInt rE = eregOfRexRM(pfx, modrm);
28413 if (have66noF2noF3(pfx)
28414 && 1==getVexL(pfx)/*256*/
28417 UInt rG = gregOfRexRM(pfx, modrm);
28418 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28429 if (have66noF2noF3(pfx)
28430 && 1==getVexL(pfx)/*256*/
28433 UInt rG = gregOfRexRM(pfx, modrm);
28434 UInt rE = eregOfRexRM(pfx, modrm);
28448 if (have66noF2noF3(pfx)
28449 && 1==getVexL(pfx)/*256*/
28452 UInt rG = gregOfRexRM(pfx, modrm);
28453 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28465 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28467 uses_vvvv, vbi, pfx, delta,
28472 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28474 uses_vvvv, vbi, pfx, delta,
28482 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28484 uses_vvvv, vbi, pfx, delta,
28489 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28491 uses_vvvv, vbi, pfx, delta,
28499 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28501 uses_vvvv, vbi, pfx, delta,
28506 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28508 uses_vvvv, vbi, pfx, delta,
28517 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28518 delta = dis_PMOVxXBW_128( vbi, pfx, delta,
28524 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28525 delta = dis_PMOVxXBW_256( vbi, pfx, delta, False/*!xIsZ*/ );
28533 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28534 delta = dis_PMOVxXBD_128( vbi, pfx, delta,
28540 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28541 delta = dis_PMOVxXBD_256( vbi, pfx, delta, False/*!xIsZ*/ );
28549 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28550 delta = dis_PMOVSXBQ_128( vbi, pfx, delta, True/*isAvx*/ );
28555 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28556 delta = dis_PMOVSXBQ_256( vbi, pfx, delta );
28563 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28564 delta = dis_PMOVxXWD_128( vbi, pfx, delta,
28569 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28570 delta = dis_PMOVxXWD_256( vbi, pfx, delta, False/*!xIsZ*/ );
28577 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28578 delta = dis_PMOVSXWQ_128( vbi, pfx, delta, True/*isAvx*/ );
28582 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28583 delta = dis_PMOVSXWQ_256( vbi, pfx, delta );
28590 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28591 delta = dis_PMOVxXDQ_128( vbi, pfx, delta,
28596 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28597 delta = dis_PMOVxXDQ_256( vbi, pfx, delta, False/*!xIsZ*/ );
28604 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28606 uses_vvvv, vbi, pfx, delta,
28611 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28613 uses_vvvv, vbi, pfx, delta,
28622 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28624 uses_vvvv, vbi, pfx, delta, "vpcmpeqq", Iop_CmpEQ64x2 );
28629 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28631 uses_vvvv, vbi, pfx, delta, "vpcmpeqq", Iop_CmpEQ64x4 );
28638 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28641 UInt rD = gregOfRexRM(pfx, modrm);
28643 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28652 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28655 UInt rD = gregOfRexRM(pfx, modrm);
28657 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
28670 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28672 uses_vvvv, vbi, pfx, delta, "vpackusdw",
28679 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28681 uses_vvvv, vbi, pfx, delta, "vpackusdw",
28689 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28690 && 0==getRexW(pfx)/*W0*/
28692 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps",
28697 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28698 && 0==getRexW(pfx)/*W0*/
28700 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps",
28708 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28709 && 0==getRexW(pfx)/*W0*/
28711 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd",
28716 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28717 && 0==getRexW(pfx)/*W0*/
28719 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd",
28727 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28728 && 0==getRexW(pfx)/*W0*/
28730 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps",
28735 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28736 && 0==getRexW(pfx)/*W0*/
28738 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps",
28746 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
28747 && 0==getRexW(pfx)/*W0*/
28749 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd",
28754 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
28755 && 0==getRexW(pfx)/*W0*/
28757 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd",
28766 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28767 delta = dis_PMOVxXBW_128( vbi, pfx, delta,
28773 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28774 delta = dis_PMOVxXBW_256( vbi, pfx, delta, True/*xIsZ*/ );
28782 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28783 delta = dis_PMOVxXBD_128( vbi, pfx, delta,
28789 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28790 delta = dis_PMOVxXBD_256( vbi, pfx, delta, True/*xIsZ*/ );
28798 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28799 delta = dis_PMOVZXBQ_128( vbi, pfx, delta, True/*isAvx*/ );
28804 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28805 delta = dis_PMOVZXBQ_256( vbi, pfx, delta );
28813 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28814 delta = dis_PMOVxXWD_128( vbi, pfx, delta,
28820 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28821 delta = dis_PMOVxXWD_256( vbi, pfx, delta, True/*xIsZ*/ );
28828 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28829 delta = dis_PMOVZXWQ_128( vbi, pfx, delta, True/*isAvx*/ );
28833 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28834 delta = dis_PMOVZXWQ_256( vbi, pfx, delta );
28841 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28842 delta = dis_PMOVxXDQ_128( vbi, pfx, delta,
28847 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28848 delta = dis_PMOVxXDQ_256( vbi, pfx, delta, True/*xIsZ*/ );
28855 if (have66noF2noF3(pfx)
28856 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
28858 uses_vvvv, vbi, pfx, delta, "vpermd", math_VPERMD );
28866 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28868 uses_vvvv, vbi, pfx, delta, "vpcmpgtq", Iop_CmpGT64Sx2 );
28873 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28875 uses_vvvv, vbi, pfx, delta, "vpcmpgtq", Iop_CmpGT64Sx4 );
28883 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28885 uses_vvvv, vbi, pfx, delta, "vpminsb", Iop_Min8Sx16 );
28890 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28892 uses_vvvv, vbi, pfx, delta, "vpminsb", Iop_Min8Sx32 );
28900 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28902 uses_vvvv, vbi, pfx, delta, "vpminsd", Iop_Min32Sx4 );
28907 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28909 uses_vvvv, vbi, pfx, delta, "vpminsd", Iop_Min32Sx8 );
28917 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28919 uses_vvvv, vbi, pfx, delta, "vpminuw", Iop_Min16Ux8 );
28924 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28926 uses_vvvv, vbi, pfx, delta, "vpminuw", Iop_Min16Ux16 );
28934 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28936 uses_vvvv, vbi, pfx, delta, "vpminud", Iop_Min32Ux4 );
28941 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28943 uses_vvvv, vbi, pfx, delta, "vpminud", Iop_Min32Ux8 );
28951 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28953 uses_vvvv, vbi, pfx, delta, "vpmaxsb", Iop_Max8Sx16 );
28958 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28960 uses_vvvv, vbi, pfx, delta, "vpmaxsb", Iop_Max8Sx32 );
28968 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28970 uses_vvvv, vbi, pfx, delta, "vpmaxsd", Iop_Max32Sx4 );
28975 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28977 uses_vvvv, vbi, pfx, delta, "vpmaxsd", Iop_Max32Sx8 );
28985 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
28987 uses_vvvv, vbi, pfx, delta, "vpmaxuw", Iop_Max16Ux8 );
28992 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
28994 uses_vvvv, vbi, pfx, delta, "vpmaxuw", Iop_Max16Ux16 );
29002 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29004 uses_vvvv, vbi, pfx, delta, "vpmaxud", Iop_Max32Ux4 );
29009 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29011 uses_vvvv, vbi, pfx, delta, "vpmaxud", Iop_Max32Ux8 );
29019 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29021 uses_vvvv, vbi, pfx, delta, "vpmulld", Iop_Mul32x4 );
29026 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
29028 uses_vvvv, vbi, pfx, delta, "vpmulld", Iop_Mul32x8 );
29035 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29036 delta = dis_PHMINPOSUW_128( vbi, pfx, delta, True/*isAvx*/ );
29044 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
29045 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsrlvd",
29046 Iop_Shr32, 1==getVexL(pfx) );
29052 if (have66noF2noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
29053 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsrlvq",
29054 Iop_Shr64, 1==getVexL(pfx) );
29063 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
29064 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsravd",
29065 Iop_Sar32, 1==getVexL(pfx) );
29074 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) {
29075 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsllvd",
29076 Iop_Shl32, 1==getVexL(pfx) );
29082 if (have66noF2noF3(pfx) && 1==getRexW(pfx)/*W1*/) {
29083 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsllvq",
29084 Iop_Shl64, 1==getVexL(pfx) );
29092 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29093 && 0==getRexW(pfx)/*W0*/) {
29095 UInt rG = gregOfRexRM(pfx, modrm);
29098 UInt rE = eregOfRexRM(pfx, modrm);
29103 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29115 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29116 && 0==getRexW(pfx)/*W0*/) {
29118 UInt rG = gregOfRexRM(pfx, modrm);
29121 UInt rE = eregOfRexRM(pfx, modrm);
29126 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29142 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29143 && 0==getRexW(pfx)/*W0*/) {
29145 UInt rG = gregOfRexRM(pfx, modrm);
29148 UInt rE = eregOfRexRM(pfx, modrm);
29153 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29163 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29164 && 0==getRexW(pfx)/*W0*/) {
29166 UInt rG = gregOfRexRM(pfx, modrm);
29169 UInt rE = eregOfRexRM(pfx, modrm);
29174 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29188 if (have66noF2noF3(pfx)
29189 && 1==getVexL(pfx)/*256*/
29192 UInt rG = gregOfRexRM(pfx, modrm);
29193 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29205 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29206 && 0==getRexW(pfx)/*W0*/) {
29208 UInt rG = gregOfRexRM(pfx, modrm);
29211 UInt rE = eregOfRexRM(pfx, modrm);
29216 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29232 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29233 && 0==getRexW(pfx)/*W0*/) {
29235 UInt rG = gregOfRexRM(pfx, modrm);
29238 UInt rE = eregOfRexRM(pfx, modrm);
29243 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29263 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29264 && 0==getRexW(pfx)/*W0*/) {
29266 UInt rG = gregOfRexRM(pfx, modrm);
29269 UInt rE = eregOfRexRM(pfx, modrm);
29274 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29288 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29289 && 0==getRexW(pfx)/*W0*/) {
29291 UInt rG = gregOfRexRM(pfx, modrm);
29294 UInt rE = eregOfRexRM(pfx, modrm);
29299 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 );
29317 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29318 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29319 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd",
29324 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29325 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29326 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd",
29331 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29332 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29333 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq",
29338 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29339 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29340 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq",
29348 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29349 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29350 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd",
29355 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29356 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29357 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd",
29362 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29363 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29364 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq",
29369 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29370 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29371 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq",
29379 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29380 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29382 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdd",
29388 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29389 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29391 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdd",
29397 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29398 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29400 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdq",
29406 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29407 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29409 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdq",
29418 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29419 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29421 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqd",
29427 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29428 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29430 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqd",
29436 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29437 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29439 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqq",
29445 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29446 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29448 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqq",
29457 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29458 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29460 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdps",
29466 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29467 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29469 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdps",
29475 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29476 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29478 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdpd",
29484 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29485 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29487 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdpd",
29496 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29497 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29499 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqps",
29505 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29506 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) {
29508 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqps",
29514 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/
29515 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29517 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqpd",
29523 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
29524 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) {
29526 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqpd",
29632 if (have66noF2noF3(pfx)) {
29633 delta = dis_FMA( vbi, pfx, delta, opc );
29649 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
29650 delta = dis_AESx( vbi, pfx, delta, True/*!isAvx*/, opc );
29659 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29660 Int size = getRexW(pfx) ? 8 : 4;
29667 assign( src1, getIRegV(size,pfx) );
29669 assign( src2, getIRegE(size,pfx,rm) );
29670 DIP("andn %s,%s,%s\n", nameIRegE(size,pfx,rm),
29671 nameIRegV(size,pfx), nameIRegG(size,pfx,rm));
29674 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29676 DIP("andn %s,%s,%s\n", dis_buf, nameIRegV(size,pfx),
29677 nameIRegG(size,pfx,rm));
29684 putIRegG( size, pfx, rm, mkexpr(dst) );
29698 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/
29699 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 3) {
29700 Int size = getRexW(pfx) ? 8 : 4;
29707 assign( src, getIRegE(size,pfx,rm) );
29708 DIP("blsi %s,%s\n", nameIRegE(size,pfx,rm),
29709 nameIRegV(size,pfx));
29712 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29714 DIP("blsi %s,%s\n", dis_buf, nameIRegV(size,pfx));
29721 putIRegV( size, pfx, mkexpr(dst) );
29732 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/
29733 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 2) {
29734 Int size = getRexW(pfx) ? 8 : 4;
29741 assign( src, getIRegE(size,pfx,rm) );
29742 DIP("blsmsk %s,%s\n", nameIRegE(size,pfx,rm),
29743 nameIRegV(size,pfx));
29746 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29748 DIP("blsmsk %s,%s\n", dis_buf, nameIRegV(size,pfx));
29755 putIRegV( size, pfx, mkexpr(dst) );
29766 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/
29767 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 1) {
29768 Int size = getRexW(pfx) ? 8 : 4;
29775 assign( src, getIRegE(size,pfx,rm) );
29776 DIP("blsr %s,%s\n", nameIRegE(size,pfx,rm),
29777 nameIRegV(size,pfx));
29780 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29782 DIP("blsr %s,%s\n", dis_buf, nameIRegV(size,pfx));
29789 putIRegV( size, pfx, mkexpr(dst) );
29803 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29804 Int size = getRexW(pfx) ? 8 : 4;
29813 assign( src2, getIRegV(size,pfx) );
29815 assign( src1, getIRegE(size,pfx,rm) );
29816 DIP("bzhi %s,%s,%s\n", nameIRegV(size,pfx),
29817 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm));
29820 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29822 DIP("bzhi %s,%s,%s\n", nameIRegV(size,pfx), dis_buf,
29823 nameIRegG(size,pfx,rm));
29858 putIRegG( size, pfx, rm, mkexpr(dst) );
29869 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29870 Int size = getRexW(pfx) ? 8 : 4;
29876 assign( src, getIRegV(size,pfx) );
29878 assign( mask, getIRegE(size,pfx,rm) );
29879 DIP("pdep %s,%s,%s\n", nameIRegE(size,pfx,rm),
29880 nameIRegV(size,pfx), nameIRegG(size,pfx,rm));
29883 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29885 DIP("pdep %s,%s,%s\n", dis_buf, nameIRegV(size,pfx),
29886 nameIRegG(size,pfx,rm));
29892 pfx, rm,
29902 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29903 Int size = getRexW(pfx) ? 8 : 4;
29909 assign( src, getIRegV(size,pfx) );
29911 assign( mask, getIRegE(size,pfx,rm) );
29912 DIP("pext %s,%s,%s\n", nameIRegE(size,pfx,rm),
29913 nameIRegV(size,pfx), nameIRegG(size,pfx,rm));
29916 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29918 DIP("pext %s,%s,%s\n", dis_buf, nameIRegV(size,pfx),
29919 nameIRegG(size,pfx,rm));
29929 putIRegG( size, pfx, rm,
29942 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29943 Int size = getRexW(pfx) ? 8 : 4;
29952 assign( src2, getIRegE(size,pfx,rm) );
29953 DIP("mulx %s,%s,%s\n", nameIRegE(size,pfx,rm),
29954 nameIRegV(size,pfx), nameIRegG(size,pfx,rm));
29957 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
29959 DIP("mulx %s,%s,%s\n", dis_buf, nameIRegV(size,pfx),
29960 nameIRegG(size,pfx,rm));
29966 putIRegV( size, pfx,
29968 putIRegG( size, pfx, rm,
29980 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29981 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "sarx", Iop_Sar8 );
29986 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29987 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "shlx", Iop_Shl8 );
29992 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29993 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "shrx", Iop_Shr8 );
29998 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
29999 Int size = getRexW(pfx) ? 8 : 4;
30009 assign( src2, getIRegV(size,pfx) );
30011 assign( src1, getIRegE(size,pfx,rm) );
30012 DIP("bextr %s,%s,%s\n", nameIRegV(size,pfx),
30013 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm));
30016 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
30018 DIP("bextr %s,%s,%s\n", nameIRegV(size,pfx), dis_buf,
30019 nameIRegG(size,pfx,rm));
30066 putIRegG( size, pfx, rm, mkexpr(dst) );
30123 Prefix pfx, Int sz, Long deltaIN
30140 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/
30141 && 1==getRexW(pfx)/*W1*/) {
30144 UInt rG = gregOfRexRM(pfx, modrm);
30148 UInt rE = eregOfRexRM(pfx, modrm);
30155 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30179 if (have66noF2noF3(pfx)
30180 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
30183 UInt rG = gregOfRexRM(pfx, modrm);
30184 UInt rV = getVexNvvvv(pfx);
30191 UInt rE = eregOfRexRM(pfx, modrm);
30198 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30219 if (have66noF2noF3(pfx)
30220 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
30223 UInt rG = gregOfRexRM(pfx, modrm);
30224 UInt rV = getVexNvvvv(pfx);
30231 UInt rE = eregOfRexRM(pfx, modrm);
30238 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30263 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30266 UInt rG = gregOfRexRM(pfx, modrm);
30269 UInt rE = eregOfRexRM(pfx, modrm);
30276 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30293 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30296 UInt rG = gregOfRexRM(pfx, modrm);
30299 UInt rE = eregOfRexRM(pfx, modrm);
30306 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30321 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30324 UInt rG = gregOfRexRM(pfx, modrm);
30327 UInt rE = eregOfRexRM(pfx, modrm);
30334 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30354 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30357 UInt rG = gregOfRexRM(pfx, modrm);
30360 UInt rE = eregOfRexRM(pfx, modrm);
30367 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30391 if (have66noF2noF3(pfx)
30392 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
30395 UInt rG = gregOfRexRM(pfx, modrm);
30396 UInt rV = getVexNvvvv(pfx);
30404 UInt rE = eregOfRexRM(pfx, modrm);
30412 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30438 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30440 UInt rG = gregOfRexRM(pfx, modrm);
30452 UInt rE = eregOfRexRM(pfx, modrm);
30459 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30486 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30488 UInt rG = gregOfRexRM(pfx, modrm);
30504 UInt rE = eregOfRexRM(pfx, modrm);
30511 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30543 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30545 UInt rG = gregOfRexRM(pfx, modrm);
30555 UInt rE = eregOfRexRM(pfx, modrm);
30562 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30587 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30589 UInt rG = gregOfRexRM(pfx, modrm);
30601 UInt rE = eregOfRexRM(pfx, modrm);
30608 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30639 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30641 UInt rG = gregOfRexRM(pfx, modrm);
30642 UInt rV = getVexNvvvv(pfx);
30649 UInt rE = eregOfRexRM(pfx, modrm);
30659 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30694 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30697 UInt rG = gregOfRexRM(pfx, modrm);
30698 UInt rV = getVexNvvvv(pfx);
30703 UInt rE = eregOfRexRM(pfx, modrm);
30710 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30725 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30728 UInt rG = gregOfRexRM(pfx, modrm);
30729 UInt rV = getVexNvvvv(pfx);
30734 UInt rE = eregOfRexRM(pfx, modrm);
30741 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30759 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30762 UInt rG = gregOfRexRM(pfx, modrm);
30763 UInt rV = getVexNvvvv(pfx);
30768 UInt rE = eregOfRexRM(pfx, modrm);
30775 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30790 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30793 UInt rG = gregOfRexRM(pfx, modrm);
30794 UInt rV = getVexNvvvv(pfx);
30799 UInt rE = eregOfRexRM(pfx, modrm);
30806 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30824 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30827 UInt rG = gregOfRexRM(pfx, modrm);
30828 UInt rV = getVexNvvvv(pfx);
30833 UInt rE = eregOfRexRM(pfx, modrm);
30840 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30855 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30858 UInt rG = gregOfRexRM(pfx, modrm);
30859 UInt rV = getVexNvvvv(pfx);
30866 UInt rE = eregOfRexRM(pfx, modrm);
30873 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30894 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
30896 UInt rG = gregOfRexRM(pfx, modrm);
30897 UInt rV = getVexNvvvv(pfx);
30905 UInt rE = eregOfRexRM(pfx, modrm);
30912 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30927 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
30929 UInt rG = gregOfRexRM(pfx, modrm);
30930 UInt rV = getVexNvvvv(pfx);
30940 UInt rE = eregOfRexRM(pfx, modrm);
30947 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
30968 if (have66noF2noF3(pfx)
30969 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
30970 delta = dis_PEXTRB_128_GtoE( vbi, pfx, delta, False/*!isAvx*/ );
30978 if (have66noF2noF3(pfx)
30979 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
30980 delta = dis_PEXTRW( vbi, pfx, delta, True/*isAvx*/ );
30988 if (have66noF2noF3(pfx)
30989 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
30990 delta = dis_PEXTRD( vbi, pfx, delta, True/*isAvx*/ );
30994 if (have66noF2noF3(pfx)
30995 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) {
30996 delta = dis_PEXTRQ( vbi, pfx, delta, True/*isAvx*/ );
31003 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31004 delta = dis_EXTRACTPS( vbi, pfx, delta, True/*isAvx*/ );
31013 if (have66noF2noF3(pfx)
31014 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31017 UInt rG = gregOfRexRM(pfx, modrm);
31018 UInt rV = getVexNvvvv(pfx);
31021 UInt rE = eregOfRexRM(pfx, modrm);
31028 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31048 if (have66noF2noF3(pfx)
31049 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31052 UInt rS = gregOfRexRM(pfx, modrm);
31055 UInt rD = eregOfRexRM(pfx, modrm);
31063 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31079 if (have66noF2noF3(pfx)
31080 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
31082 UInt rG = gregOfRexRM(pfx, modrm);
31083 UInt rV = getVexNvvvv(pfx);
31088 UInt rE = eregOfRexRM(pfx,modrm);
31095 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31115 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31117 UInt rG = gregOfRexRM(pfx, modrm);
31118 UInt rV = getVexNvvvv(pfx);
31124 UInt rE = eregOfRexRM(pfx, modrm);
31135 pfx, delta, dis_buf, 1 );
31154 if (have66noF2noF3(pfx)
31155 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) {
31157 UInt rG = gregOfRexRM(pfx, modrm);
31158 UInt rV = getVexNvvvv(pfx);
31163 UInt rE = eregOfRexRM(pfx,modrm);
31170 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31186 if (have66noF2noF3(pfx)
31187 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) {
31189 UInt rG = gregOfRexRM(pfx, modrm);
31190 UInt rV = getVexNvvvv(pfx);
31195 UInt rE = eregOfRexRM(pfx,modrm);
31202 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31223 if (have66noF2noF3(pfx)
31224 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31227 UInt rG = gregOfRexRM(pfx, modrm);
31228 UInt rV = getVexNvvvv(pfx);
31231 UInt rE = eregOfRexRM(pfx, modrm);
31238 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31258 if (have66noF2noF3(pfx)
31259 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31262 UInt rS = gregOfRexRM(pfx, modrm);
31265 UInt rD = eregOfRexRM(pfx, modrm);
31273 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31289 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31291 UInt rG = gregOfRexRM(pfx, modrm);
31292 UInt rV = getVexNvvvv(pfx);
31296 UInt rE = eregOfRexRM(pfx,modrm);
31303 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31319 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31321 UInt rG = gregOfRexRM(pfx, modrm);
31322 UInt rV = getVexNvvvv(pfx);
31326 UInt rE = eregOfRexRM(pfx,modrm);
31333 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31357 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31359 UInt rG = gregOfRexRM(pfx, modrm);
31360 UInt rV = getVexNvvvv(pfx);
31364 UInt rE = eregOfRexRM(pfx,modrm);
31371 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31391 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31396 UInt rG = gregOfRexRM(pfx, modrm);
31397 UInt rV = getVexNvvvv(pfx);
31402 UInt rE = eregOfRexRM(pfx, modrm);
31410 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
31426 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31431 UInt rG = gregOfRexRM(pfx, modrm);
31432 UInt rV = getVexNvvvv(pfx);
31439 UInt rE = eregOfRexRM(pfx, modrm);
31447 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
31473 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31478 UInt rG = gregOfRexRM(pfx, modrm);
31479 UInt rV = getVexNvvvv(pfx);
31484 UInt rE = eregOfRexRM(pfx, modrm);
31491 addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
31508 if (have66noF2noF3(pfx)
31509 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) {
31512 UInt rG = gregOfRexRM(pfx, modrm);
31513 UInt rV = getVexNvvvv(pfx);
31521 UInt rE = eregOfRexRM(pfx, modrm);
31529 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
31556 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31557 delta = dis_VBLENDV_128 ( vbi, pfx, delta,
31565 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31566 delta = dis_VBLENDV_256 ( vbi, pfx, delta,
31577 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31578 delta = dis_VBLENDV_128 ( vbi, pfx, delta,
31586 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31587 delta = dis_VBLENDV_256 ( vbi, pfx, delta,
31598 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31599 delta = dis_VBLENDV_128 ( vbi, pfx, delta,
31607 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) {
31608 delta = dis_VBLENDV_256 ( vbi, pfx, delta,
31626 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31628 delta = dis_PCMPxSTRx( vbi, pfx, delta, True/*isAvx*/, opc );
31636 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) {
31637 delta = dis_AESKEYGENASSIST( vbi, pfx, delta, True/*!isAvx*/ );
31645 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) {
31646 Int size = getRexW(pfx) ? 8 : 4;
31654 assign( src, getIRegE(size,pfx,rm) );
31655 DIP("rorx %d,%s,%s\n", imm8, nameIRegE(size,pfx,rm),
31656 nameIRegG(size,pfx,rm));
31659 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
31662 DIP("rorx %d,%s,%s\n", imm8, dis_buf, nameIRegG(size,pfx,rm));
31668 putIRegG( size, pfx, rm,
31731 /* pfx holds the summary of prefixes. */
31732 Prefix pfx = PFX_EMPTY;
31830 /* Eat prefixes, summarising the result in pfx and sz, and rejecting
31837 case 0x66: pfx |= PFX_66; break;
31838 case 0x67: pfx |= PFX_ASO; break;
31839 case 0xF2: pfx |= PFX_F2; break;
31840 case 0xF3: pfx |= PFX_F3; break;
31841 case 0xF0: pfx |= PFX_LOCK; *expect_CAS = True; break;
31842 case 0x2E: pfx |= PFX_CS; break;
31843 case 0x3E: pfx |= PFX_DS; break;
31844 case 0x26: pfx |= PFX_ES; break;
31845 case 0x64: pfx |= PFX_FS; break;
31846 case 0x65: pfx |= PFX_GS; break;
31847 case 0x36: pfx |= PFX_SS; break;
31849 pfx |= PFX_REX;
31850 if (pre & (1<<3)) pfx |= PFX_REXW;
31851 if (pre & (1<<2)) pfx |= PFX_REXR;
31852 if (pre & (1<<1)) pfx |= PFX_REXX;
31853 if (pre & (1<<0)) pfx |= PFX_REXB;
31873 pfx |= PFX_VEX;
31875 /* R */ pfx |= (vex1 & (1<<7)) ? 0 : PFX_REXR;
31876 /* X */ pfx |= (vex1 & (1<<6)) ? 0 : PFX_REXX;
31877 /* B */ pfx |= (vex1 & (1<<5)) ? 0 : PFX_REXB;
31887 /* W */ pfx |= (vex2 & (1<<7)) ? PFX_REXW : 0;
31888 /* ~v3 */ pfx |= (vex2 & (1<<6)) ? 0 : PFX_VEXnV3;
31889 /* ~v2 */ pfx |= (vex2 & (1<<5)) ? 0 : PFX_VEXnV2;
31890 /* ~v1 */ pfx |= (vex2 & (1<<4)) ? 0 : PFX_VEXnV1;
31891 /* ~v0 */ pfx |= (vex2 & (1<<3)) ? 0 : PFX_VEXnV0;
31892 /* L */ pfx |= (vex2 & (1<<2)) ? PFX_VEXL : 0;
31896 case 1: pfx |= PFX_66; break;
31897 case 2: pfx |= PFX_F3; break;
31898 case 3: pfx |= PFX_F2; break;
31906 pfx |= PFX_VEX;
31908 /* R */ pfx |= (vex1 & (1<<7)) ? 0 : PFX_REXR;
31909 /* ~v3 */ pfx |= (vex1 & (1<<6)) ? 0 : PFX_VEXnV3;
31910 /* ~v2 */ pfx |= (vex1 & (1<<5)) ? 0 : PFX_VEXnV2;
31911 /* ~v1 */ pfx |= (vex1 & (1<<4)) ? 0 : PFX_VEXnV1;
31912 /* ~v0 */ pfx |= (vex1 & (1<<3)) ? 0 : PFX_VEXnV0;
31913 /* L */ pfx |= (vex1 & (1<<2)) ? PFX_VEXL : 0;
31917 case 1: pfx |= PFX_66; break;
31918 case 2: pfx |= PFX_F3; break;
31919 case 3: pfx |= PFX_F2; break;
31926 if ((pfx & PFX_VEX) && (pfx & PFX_REX))
31932 if (pfx & PFX_F2) n++;
31933 if (pfx & PFX_F3) n++;
31938 if (pfx & PFX_CS) n++;
31939 if (pfx & PFX_DS) n++;
31940 if (pfx & PFX_ES) n++;
31941 if (pfx & PFX_FS) n++;
31942 if (pfx & PFX_GS) n++;
31943 if (pfx & PFX_SS) n++;
31949 if ((pfx & PFX_FS) && !vbi->guest_amd64_assume_fs_is_const)
31953 if ((pfx & PFX_GS) && !vbi->guest_amd64_assume_gs_is_const)
31958 if (pfx & PFX_66) sz = 2;
31959 if ((pfx & PFX_REX) && (pfx & PFX_REXW)) sz = 8;
31964 if (haveLOCK(pfx)) {
31975 if (!(pfx & PFX_VEX)) {
31993 if (!(pfx & PFX_VEX)) {
32002 archinfo, vbi, pfx, sz, delta );
32007 archinfo, vbi, pfx, sz, delta );
32012 archinfo, vbi, pfx, sz, delta );
32017 archinfo, vbi, pfx, sz, delta );
32033 archinfo, vbi, pfx, sz, delta );
32039 archinfo, vbi, pfx, sz, delta );
32045 archinfo, vbi, pfx, sz, delta );
32057 if (getVexNvvvv(pfx) != 0)
32088 haveREX(pfx) ? 1 : 0, getRexW(pfx), getRexR(pfx),
32089 getRexX(pfx), getRexB(pfx));
32091 haveVEX(pfx) ? 1 : 0, getVexL(pfx),
32092 getVexNvvvv(pfx),
32097 vex_printf("vex amd64->IR: PFX.66=%d PFX.F2=%d PFX.F3=%d\n",
32098 have66(pfx) ? 1 : 0, haveF2(pfx) ? 1 : 0,
32099 haveF3(pfx) ? 1 : 0);