Lines Matching full:xmm3
914 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
916 ; AVX1-NEXT: vpblendvb %xmm4, %xmm2, %xmm3, %xmm2
934 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
936 ; AVX1-NEXT: vpblendvb %xmm4, %xmm2, %xmm3, %xmm2
973 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
974 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
975 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,0,1,0,1,0,1,0,1,0,1,0,1,0,1]
976 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
978 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0
999 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1000 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm3 = xmm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
1001 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,0,0,0,4,5,6,7]
1002 ; AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
1024 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = <15,14,13,12,11,10,9,8,u,u,u,u,u,u,u,u>
1025 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
1030 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0
1049 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1050 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1051 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [14,12,10,8,6,4,2,0,15,13,11,9,7,5,3,1]
1052 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
1054 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0
1181 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1182 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1199 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1200 ; AVX1-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
1217 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1218 ; AVX1-NEXT: vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm2[8],xmm3[9],xmm2[9],xmm3[10],xmm2[10],xmm3[11],xmm2[11],xmm3[12],xmm2[12],xmm3[13],xmm2[13],xmm3[14],xmm2[14],xmm3[15],xmm2[15]
1238 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1239 ; AVX1-NEXT: vpunpcklbw {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3],xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
1465 ; AVX1-NEXT: vpshufb {{.*#+}} xmm3 = xmm2[u,u,4,u,1,6],zero,zero,xmm2[0],zero,xmm2[11,u],zero,zero,zero,zero
1467 ; AVX1-NEXT: vpor %xmm3, %xmm4, %xmm3
1472 ; AVX1-NEXT: vpblendvb %xmm6, %xmm3, %xmm5, %xmm3
1481 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
1789 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1790 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]
1876 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1877 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0]
1894 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1895 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0]
1912 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1913 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14]