Lines Matching full:xmm3
544 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
545 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2],xmm2[3],xmm3[4],xmm2[5],xmm3[6],xmm2[7]
562 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
563 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2],xmm2[3],xmm3[4],xmm2[5],xmm3[6],xmm2[7]
643 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
644 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2],xmm2[3],xmm3[4],xmm2[5],xmm3[6],xmm2[7]
662 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
663 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2],xmm2[3],xmm3[4],xmm2[5],xmm3[6],xmm2[7]
736 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
737 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[0,0,0,0,4,5,6,7]
738 ; AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm3[0],xmm2[0]
759 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
760 ; AVX1-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,3,2,3]
761 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm3 = xmm3[3,2,1,0,4,5,6,7]
762 ; AVX1-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
784 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
785 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
786 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [12,13,8,9,4,5,0,1,14,15,10,11,6,7,2,3]
787 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
789 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0
934 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
935 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
952 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
953 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
970 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
971 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
990 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
991 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
1478 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1479 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[14,15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1496 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1497 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0,1]
1514 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
1515 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[2,3,4,5,6,7,8,9,10,11,12,13,14,15],xmm3[0,1]
1532 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1533 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[14,15],xmm3[0,1,2,3,4,5,6,7,8,9,10,11,12,13]
1630 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
1631 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[8,9,10,11,12,13,14,15],xmm3[0,1,2,3,4,5,6,7]
1632 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
1876 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm3
1879 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
1886 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm3
1889 ; AVX2-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
2020 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2023 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2030 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2033 ; AVX2-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
2044 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2047 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2054 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2057 ; AVX2-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
2068 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2071 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2078 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2081 ; AVX2-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
2092 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2095 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2102 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2105 ; AVX2-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
2368 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2371 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2378 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2381 ; AVX2-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
2414 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2417 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2424 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm3
2427 ; AVX2-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
2437 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
2438 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2442 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2464 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
2465 ; AVX1-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[2,3,0,1]
2467 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
2491 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
2492 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
2496 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2518 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
2519 ; AVX1-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,3,0,1]
2520 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2524 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2545 ; AVX1-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[0,3,2,3]
2548 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2555 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2579 ; AVX1-NEXT: vpshufd {{.*#+}} xmm3 = xmm2[2,0,2,3]
2582 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
2587 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2593 ; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [8,9,8,9,4,5,10,11,0,1,0,1,12,13,2,3]
2594 ; AVX2-NEXT: vpshufb %xmm3, %xmm2, %xmm4
2596 ; AVX2-NEXT: vpshufb %xmm3, %xmm1, %xmm1
2612 ; AVX1-NEXT: vpshuflw {{.*#+}} xmm3 = xmm0[1,0,3,2,4,5,6,7]
2613 ; AVX1-NEXT: vpunpckldq {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
2640 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
2641 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
2645 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2667 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
2668 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
2672 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm0, %ymm0
2695 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
2696 ; AVX1-NEXT: vpshufd {{.*#+}} xmm4 = xmm3[2,3,2,3]
2699 ; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm3[4],xmm1[5],xmm3[5],xmm1[6],xmm3[6],xmm1[7],xmm3[7]
2749 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
2750 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2,3],xmm2[4,5,6,7]
2751 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,6,7,4,5,10,11,0,1,10,11,0,1,2,3]
2752 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
2754 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0
2785 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
2786 ; AVX1-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
2787 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2],xmm3[3],xmm2[4,5,6,7]
2826 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
2827 ; AVX1-NEXT: vpshufd {{.*#+}} xmm3 = xmm3[2,3,0,1]
2828 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0,1,2],xmm2[3],xmm3[4,5,6,7]
2848 ; AVX1-NEXT: vpshufd {{.*#+}} xmm3 = xmm1[0,2,2,3]
2849 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[3,4,5,6],xmm0[7]
2874 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
2875 ; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm3[0],xmm2[1],xmm3[2,3],xmm2[4,5,6],xmm3[7]
2876 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm3[4,5,6,7]
2921 ; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,4,5,4,5,6,7,0,1,4,5,8,9,4,5]
2922 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2
2926 ; AVX1-NEXT: vpshufb %xmm3, %xmm1, %xmm1
2946 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
2947 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[10,11,12,13,14,15],xmm3[0,1,2,3,4,5,6,7,8,9]
2948 ; AVX1-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3],xmm3[4,5,6,7]
2972 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
2973 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[10,11,12,13,14,15],xmm3[0,1,2,3,4,5,6,7,8,9]
3046 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
3047 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],xmm3[0,1,2,3,4,5]
3048 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
3072 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm3
3073 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],xmm3[0,1,2,3,4,5]
3146 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
3147 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],xmm3[0,1,2,3,4,5]
3148 ; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
3172 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
3173 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[6,7,8,9,10,11,12,13,14,15],xmm3[0,1,2,3,4,5]
3190 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
3191 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[10,11,12,13,14,15],xmm3[0,1,2,3,4,5,6,7,8,9]
3192 ; AVX1-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0,1,2,3],xmm3[4,5,6,7]
3216 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
3217 ; AVX1-NEXT: vpalignr {{.*#+}} xmm2 = xmm2[10,11,12,13,14,15],xmm3[0,1,2,3,4,5,6,7,8,9]
3235 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
3236 ; AVX1-NEXT: vpshufhw {{.*#+}} xmm3 = xmm3[0,1,2,3,7,5,4,4]
3237 ; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm3[2],xmm2[2],xmm3[3],xmm2[3]