Home | History | Annotate | Download | only in X86

Lines Matching full:xmm3

915 ; AVX1-NEXT:    vextractf128 $1, %ymm1, %xmm3
917 ; AVX1-NEXT: vpblendvb %xmm4, %xmm2, %xmm3, %xmm2
935 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
937 ; AVX1-NEXT: vpblendvb %xmm4, %xmm2, %xmm3, %xmm2
992 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
993 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
994 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1]
995 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
997 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0
1018 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1019 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1020 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,0,0,0,4,5,6,7]
1021 ; AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
1043 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = <15,14,13,12,11,10,9,8,u,u,u,u,u,u,u,u>
1044 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
1049 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0
1068 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1069 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1070 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [14,12,10,8,6,4,2,0,15,13,11,9,7,5,3,1]
1071 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
1073 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0
1200 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1201 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1218 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1219 ; AVX1-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
1236 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1237 ; AVX1-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
1257 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1258 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1484 ; AVX1-NEXT: vpshufb {{.*#+}} xmm3 = xmm2[u,u,4,u,1,6],zero,zero,xmm2[0],zero,xmm2[11,u],zero,zero,zero,zero
1486 ; AVX1-NEXT: vpor %xmm3, %xmm4, %xmm3
1491 ; AVX1-NEXT: vpblendvb %xmm6, %xmm3, %xmm5, %xmm3
1500 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
1810 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1811 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]
1897 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1898 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0]
1915 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1916 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0]
1933 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1934 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]