Lines Matching defs:ib
12657 /* 66 0F 71 /2 ib = PSRLW by immediate */
12664 /* 66 0F 71 /4 ib = PSRAW by immediate */
12671 /* 66 0F 71 /6 ib = PSLLW by immediate */
12681 /* 66 0F 72 /2 ib = PSRLD by immediate */
12688 /* 66 0F 72 /4 ib = PSRAD by immediate */
12695 /* 66 0F 72 /6 ib = PSLLD by immediate */
12705 /* 66 0F 73 /3 ib = PSRLDQ by immediate */
12719 /* 66 0F 73 /7 ib = PSLLDQ by immediate */
12734 /* 66 0F 73 /2 ib = PSRLQ by immediate */
12741 /* 66 0F 73 /6 ib = PSLLQ by immediate */
13277 /* 0F C6 /r ib = SHUFPS -- shuffle packed F32s */
13302 /* 66 0F C6 /r ib = SHUFPD -- shuffle packed F64s */
15427 UChar ib = getUChar(delta);
15428 rIS4 = (ib >> 4) & 0xF;
15436 UChar ib = getUChar(delta);
15437 rIS4 = (ib >> 4) & 0xF;
15466 UChar ib = getUChar(delta);
15467 rIS4 = (ib >> 4) & 0xF;
15475 UChar ib = getUChar(delta);
15476 rIS4 = (ib >> 4) & 0xF;
17378 /* 66 0F 3A 08 /r ib = ROUNDPS imm8, xmm2/m128, xmm1 */
17448 /* 66 0F 3A 09 /r ib = ROUNDPD imm8, xmm2/m128, xmm1 */
17503 /* 66 0F 3A 0A /r ib = ROUNDSS imm8, xmm2/m32, xmm1
17504 66 0F 3A 0B /r ib = ROUNDSD imm8, xmm2/m64, xmm1
17556 /* 66 0F 3A 0C /r ib = BLENDPS xmm1, xmm2/m128, imm8
17593 /* 66 0F 3A 0D /r ib = BLENDPD xmm1, xmm2/m128, imm8
17629 /* 66 0F 3A 0E /r ib = PBLENDW xmm1, xmm2/m128, imm8
17666 /* 66 0F 3A 14 /r ib = PEXTRB r/m16, xmm, imm8
17676 /* 66 0F 3A 15 /r ib = PEXTRW r/m16, xmm, imm8
17686 /* 66 no-REX.W 0F 3A 16 /r ib = PEXTRD reg/mem32, xmm2, imm8
17695 /* 66 REX.W 0F 3A 16 /r ib = PEXTRQ reg/mem64, xmm2, imm8
17707 /* 66 0F 3A 17 /r ib = EXTRACTPS reg/mem32, xmm2, imm8 Extract
17719 /* 66 0F 3A 20 /r ib = PINSRB xmm1, r32/m8, imm8
17750 /* 66 0F 3A 21 /r ib = INSERTPS imm8, xmm2/m32, xmm1
17789 /* 66 no-REX.W 0F 3A 22 /r ib = PINSRD xmm1, r/m32, imm8
17820 /* 66 REX.W 0F 3A 22 /r ib = PINSRQ xmm1, r/m64, imm8
17854 /* 66 0F 3A 40 /r ib = DPPS xmm1, xmm2/m128, imm8
17887 /* 66 0F 3A 41 /r ib = DPPD xmm1, xmm2/m128, imm8
17920 /* 66 0F 3A 42 /r ib = MPSADBW xmm1, xmm2/m128, imm8
17955 /* 66 0F 3A 44 /r ib = PCLMULQDQ xmm1, xmm2/m128, imm8
17996 /* 66 0F 3A 63 /r ib = PCMPISTRI imm8, xmm2/m128, xmm1
17997 66 0F 3A 62 /r ib = PCMPISTRM imm8, xmm2/m128, xmm1
17998 66 0F 3A 61 /r ib = PCMPESTRI imm8, xmm2/m128, xmm1
17999 66 0F 3A 60 /r ib = PCMPESTRM imm8, xmm2/m128, xmm1
18012 /* 66 0F 3A DF /r ib = AESKEYGENASSIST imm8, xmm2/m128, xmm1 */
18091 case 0x04: /* ADD Ib, AL */
18118 case 0x0C: /* OR Ib, AL */
18145 case 0x14: /* ADC Ib, AL */
18172 case 0x1C: /* SBB Ib, AL */
18199 case 0x24: /* AND Ib, AL */
18226 case 0x2C: /* SUB Ib, AL */
18254 case 0x34: /* XOR Ib, AL */
18281 case 0x3C: /* CMP Ib, AL */
18378 case 0x6A: /* PUSH Ib, sign-extended to sz */
18398 case 0x6B: /* IMUL Ib, Ev, Gv */
18476 case 0x80: /* Grp1 Ib,Eb */
18495 case 0x83: /* Grp1 Ib,Ev */
18930 case 0xA8: /* TEST Ib, AL */
19019 case 0xC0: { /* Grp2 Ib,Eb */
19033 case 0xC1: { /* Grp2 Ib,Ev */
19061 case 0xC6: /* MOV Ib,Eb */
20069 case 0xBA: { /* Grp8 Ib,Ev */
23212 /* VPSHUFD imm8, xmm2/m128, xmm1 = VEX.128.66.0F.WIG 70 /r ib */
23217 /* VPSHUFLW imm8, xmm2/m128, xmm1 = VEX.128.F2.0F.WIG 70 /r ib */
23223 /* VPSHUFHW imm8, xmm2/m128, xmm1 = VEX.128.F3.0F.WIG 70 /r ib */
23232 /* VPSRLW imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 71 /2 ib */
23233 /* VPSRAW imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 71 /4 ib */
23234 /* VPSLLW imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 71 /6 ib */
23261 /* VPSRLD imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 72 /2 ib */
23262 /* VPSRAD imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 72 /4 ib */
23263 /* VPSLLD imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 72 /6 ib */
23290 /* VPSRLDQ imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 73 /3 ib */
23291 /* VPSLLDQ imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 73 /7 ib */
23292 /* VPSRLQ imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 73 /2 ib */
23293 /* VPSLLQ imm8, xmm2, xmm1 = VEX.NDD.128.66.0F.WIG 73 /6 ib */
23660 /* = VEX.NDS.LIG.F2.0F.WIG C2 /r ib */
23670 /* = VEX.NDS.LIG.F3.0F.WIG C2 /r ib */
23680 /* = VEX.NDS.128.66.0F.WIG C2 /r ib */
23690 /* = VEX.NDS.256.66.0F.WIG C2 /r ib */
23699 /* = VEX.NDS.128.0F.WIG C2 /r ib */
23709 /* = VEX.NDS.256.0F.WIG C2 /r ib */
23720 /* VPINSRW r32/m16, xmm2, xmm1 = VEX.NDS.128.66.0F.WIG C4 /r ib */
23754 /* VPEXTRW imm8, xmm1, reg32 = VEX.128.66.0F.W0 C5 /r ib */
23767 /* = VEX.NDS.128.0F.WIG C6 /r ib */
23797 /* = VEX.NDS.256.0F.WIG C6 /r ib */
23827 /* = VEX.NDS.128.66.0F.WIG C6 /r ib */
23857 /* = VEX.NDS.256.66.0F.WIG C6 /r ib */
25288 /* VPERMILPS imm8, ymm2/m256, ymm1 = VEX.256.66.0F3A.WIG 04 /r ib */
25318 /* VPERMILPS imm8, xmm2/m128, xmm1 = VEX.128.66.0F3A.WIG 04 /r ib */
25346 /* VPERMILPD imm8, xmm2/m128, xmm1 = VEX.128.66.0F3A.WIG 05 /r ib */
25379 /* VPERMILPD imm8, ymm2/m256, ymm1 = VEX.256.66.0F3A.WIG 05 /r ib */
25416 /* VPERM2F128 imm8, ymm3/m256, ymm2, ymm1 = VEX.NDS.66.0F3A.W0 06 /r ib */
25463 /* VROUNDPS = VEX.NDS.128.66.0F3A.WIG 08 ib */
25511 /* VROUNDPS = VEX.NDS.256.66.0F3A.WIG 08 ib */
25568 /* VROUNDPD = VEX.NDS.128.66.0F3A.WIG 09 ib */
25612 /* VROUNDPD = VEX.NDS.256.66.0F3A.WIG 09 ib */
25662 /* VROUNDSS = VEX.NDS.128.66.0F3A.WIG 0A ib */
25664 /* VROUNDSD = VEX.NDS.128.66.0F3A.WIG 0B ib */
25719 /* VBLENDPS = VEX.NDS.256.66.0F3A.WIG 0C /r ib */
25750 /* VBLENDPS = VEX.NDS.128.66.0F3A.WIG 0C /r ib */
25784 /* VBLENDPD = VEX.NDS.256.66.0F3A.WIG 0D /r ib */
25815 /* VBLENDPD = VEX.NDS.128.66.0F3A.WIG 0D /r ib */
25849 /* VPBLENDW = VEX.NDS.128.66.0F3A.WIG 0E /r ib */
25883 /* VPALIGNR = VEX.NDS.128.66.0F3A.WIG 0F /r ib */
25918 /* VPEXTRB imm8, xmm2, reg/m8 = VEX.128.66.0F3A.W0 14 /r ib */
25928 /* VPEXTRW = VEX.128.66.0F3A.W0 15 /r ib */
25938 /* VPEXTRD = VEX.128.66.0F3A.W0 16 /r ib */
25944 /* VPEXTRQ = VEX.128.66.0F3A.W1 16 /r ib */
25953 /* VEXTRACTPS imm8, xmm1, r32/m32 = VEX.128.66.0F3A.WIG 17 /r ib */
25963 /* VINSERTF128 = VEX.NDS.256.66.0F3A.W0 18 /r ib */
25967 UInt ib = 0;
25975 ib = getUChar(delta);
25977 ib, nameXMMReg(rE), nameYMMReg(rV), nameYMMReg(rG));
25982 ib = getUChar(delta);
25984 ib, dis_buf, nameYMMReg(rV), nameYMMReg(rG));
25989 putYMMRegLane128(rG, ib & 1, mkexpr(t128));
25998 /* VEXTRACTF128 = VEX.256.66.0F3A.W0 19 /r ib */
26002 UInt ib = 0;
26008 ib = getUChar(delta);
26009 assign(t128, getYMMRegLane128(rS, ib & 1));
26012 ib, nameXMMReg(rS), nameYMMReg(rD));
26016 ib = getUChar(delta);
26017 assign(t128, getYMMRegLane128(rS, ib & 1));
26020 ib, nameYMMReg(rS), dis_buf);
26029 /* VPINSRB r32/m8, xmm2, xmm1 = VEX.NDS.128.66.0F3A.W0 20 /r ib */
26065 = VEX.NDS.128.66.0F3A.WIG 21 /r ib */
26104 /* VPINSRD r32/m32, xmm2, xmm1 = VEX.NDS.128.66.0F3A.W0 22 /r ib */
26136 /* VPINSRQ r64/m64, xmm2, xmm1 = VEX.NDS.128.66.0F3A.W1 22 /r ib */
26171 /* VDPPS imm8, xmm3/m128,xmm2,xmm1 = VEX.NDS.128.66.0F3A.WIG 40 /r ib */
26201 /* VDPPS imm8, ymm3/m128,ymm2,ymm1 = VEX.NDS.256.66.0F3A.WIG 40 /r ib */
26239 /* VDPPD imm8, xmm3/m128,xmm2,xmm1 = VEX.NDS.128.66.0F3A.WIG 41 /r ib */
26273 /* VMPSADBW = VEX.NDS.128.66.0F3A.WIG 42 /r ib */
26311 /* VPCLMULQDQ = VEX.NDS.128.66.0F3A.WIG 44 /r ib */
26312 /* 66 0F 3A 44 /r ib = PCLMULQDQ xmm1, xmm2/m128, imm8
26407 /* VEX.128.66.0F3A.WIG 63 /r ib = VPCMPISTRI imm8, xmm2/m128, xmm1
26408 VEX.128.66.0F3A.WIG 62 /r ib = VPCMPISTRM imm8, xmm2/m128, xmm1
26409 VEX.128.66.0F3A.WIG 61 /r ib = VPCMPESTRI imm8, xmm2/m128, xmm1
26410 VEX.128.66.0F3A.WIG 60 /r ib = VPCMPESTRM imm8, xmm2/m128, xmm1