Home | History | Annotate | Download | only in X86
      1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
      2 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefix=AVX-32 --check-prefix=AVX512F-32
      3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefix=AVX-64 --check-prefix=AVX512F-64
      4 ; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512f,+avx512bw | FileCheck %s --check-prefix=AVX-32 --check-prefix=AVX512BW-32
      5 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f,+avx512bw | FileCheck %s --check-prefix=AVX-64 --check-prefix=AVX512BW-64
      6 
      7 define <8 x double> @test_buildvector_v8f64(double %a0, double %a1, double %a2, double %a3, double %a4, double %a5, double %a6, double %a7) {
      8 ; AVX-32-LABEL: test_buildvector_v8f64:
      9 ; AVX-32:       # %bb.0:
     10 ; AVX-32-NEXT:    vmovups {{[0-9]+}}(%esp), %zmm0
     11 ; AVX-32-NEXT:    retl
     12 ;
     13 ; AVX-64-LABEL: test_buildvector_v8f64:
     14 ; AVX-64:       # %bb.0:
     15 ; AVX-64-NEXT:    vmovlhps {{.*#+}} xmm6 = xmm6[0],xmm7[0]
     16 ; AVX-64-NEXT:    vmovlhps {{.*#+}} xmm4 = xmm4[0],xmm5[0]
     17 ; AVX-64-NEXT:    vinsertf128 $1, %xmm6, %ymm4, %ymm4
     18 ; AVX-64-NEXT:    vmovlhps {{.*#+}} xmm2 = xmm2[0],xmm3[0]
     19 ; AVX-64-NEXT:    vmovlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
     20 ; AVX-64-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0
     21 ; AVX-64-NEXT:    vinsertf64x4 $1, %ymm4, %zmm0, %zmm0
     22 ; AVX-64-NEXT:    retq
     23   %ins0 = insertelement <8 x double> undef, double %a0, i32 0
     24   %ins1 = insertelement <8 x double> %ins0, double %a1, i32 1
     25   %ins2 = insertelement <8 x double> %ins1, double %a2, i32 2
     26   %ins3 = insertelement <8 x double> %ins2, double %a3, i32 3
     27   %ins4 = insertelement <8 x double> %ins3, double %a4, i32 4
     28   %ins5 = insertelement <8 x double> %ins4, double %a5, i32 5
     29   %ins6 = insertelement <8 x double> %ins5, double %a6, i32 6
     30   %ins7 = insertelement <8 x double> %ins6, double %a7, i32 7
     31   ret <8 x double> %ins7
     32 }
     33 
     34 define <16 x float> @test_buildvector_v16f32(float %a0, float %a1, float %a2, float %a3, float %a4, float %a5, float %a6, float %a7, float %a8, float %a9, float %a10, float %a11, float %a12, float %a13, float %a14, float %a15) {
     35 ; AVX-32-LABEL: test_buildvector_v16f32:
     36 ; AVX-32:       # %bb.0:
     37 ; AVX-32-NEXT:    vmovups {{[0-9]+}}(%esp), %zmm0
     38 ; AVX-32-NEXT:    retl
     39 ;
     40 ; AVX-64-LABEL: test_buildvector_v16f32:
     41 ; AVX-64:       # %bb.0:
     42 ; AVX-64-NEXT:    vmovss {{.*#+}} xmm8 = mem[0],zero,zero,zero
     43 ; AVX-64-NEXT:    vmovss {{.*#+}} xmm9 = mem[0],zero,zero,zero
     44 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[2,3]
     45 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm4 = xmm4[0,1],xmm6[0],xmm4[3]
     46 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm4 = xmm4[0,1,2],xmm7[0]
     47 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[2,3]
     48 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0,1],xmm2[0],xmm0[3]
     49 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0,1,2],xmm3[0]
     50 ; AVX-64-NEXT:    vinsertf128 $1, %xmm4, %ymm0, %ymm0
     51 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm1 = xmm9[0],mem[0],xmm9[2,3]
     52 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm1 = xmm1[0,1],mem[0],xmm1[3]
     53 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm1 = xmm1[0,1,2],mem[0]
     54 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm2 = xmm8[0],mem[0],xmm8[2,3]
     55 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm2 = xmm2[0,1],mem[0],xmm2[3]
     56 ; AVX-64-NEXT:    vinsertps {{.*#+}} xmm2 = xmm2[0,1,2],mem[0]
     57 ; AVX-64-NEXT:    vinsertf128 $1, %xmm2, %ymm1, %ymm1
     58 ; AVX-64-NEXT:    vinsertf64x4 $1, %ymm1, %zmm0, %zmm0
     59 ; AVX-64-NEXT:    retq
     60   %ins0  = insertelement <16 x float> undef,  float %a0,  i32 0
     61   %ins1  = insertelement <16 x float> %ins0,  float %a1,  i32 1
     62   %ins2  = insertelement <16 x float> %ins1,  float %a2,  i32 2
     63   %ins3  = insertelement <16 x float> %ins2,  float %a3,  i32 3
     64   %ins4  = insertelement <16 x float> %ins3,  float %a4,  i32 4
     65   %ins5  = insertelement <16 x float> %ins4,  float %a5,  i32 5
     66   %ins6  = insertelement <16 x float> %ins5,  float %a6,  i32 6
     67   %ins7  = insertelement <16 x float> %ins6,  float %a7,  i32 7
     68   %ins8  = insertelement <16 x float> %ins7,  float %a8,  i32 8
     69   %ins9  = insertelement <16 x float> %ins8,  float %a9,  i32 9
     70   %ins10 = insertelement <16 x float> %ins9,  float %a10, i32 10
     71   %ins11 = insertelement <16 x float> %ins10, float %a11, i32 11
     72   %ins12 = insertelement <16 x float> %ins11, float %a12, i32 12
     73   %ins13 = insertelement <16 x float> %ins12, float %a13, i32 13
     74   %ins14 = insertelement <16 x float> %ins13, float %a14, i32 14
     75   %ins15 = insertelement <16 x float> %ins14, float %a15, i32 15
     76   ret <16 x float> %ins15
     77 }
     78 
     79 define <8 x i64> @test_buildvector_v8i64(i64 %a0, i64 %a1, i64 %a2, i64 %a3, i64 %a4, i64 %a5, i64 %a6, i64 %a7) {
     80 ; AVX-32-LABEL: test_buildvector_v8i64:
     81 ; AVX-32:       # %bb.0:
     82 ; AVX-32-NEXT:    vmovups {{[0-9]+}}(%esp), %zmm0
     83 ; AVX-32-NEXT:    retl
     84 ;
     85 ; AVX-64-LABEL: test_buildvector_v8i64:
     86 ; AVX-64:       # %bb.0:
     87 ; AVX-64-NEXT:    vmovq %rcx, %xmm0
     88 ; AVX-64-NEXT:    vmovq %rdx, %xmm1
     89 ; AVX-64-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm1[0],xmm0[0]
     90 ; AVX-64-NEXT:    vmovq %rsi, %xmm1
     91 ; AVX-64-NEXT:    vmovq %rdi, %xmm2
     92 ; AVX-64-NEXT:    vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
     93 ; AVX-64-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
     94 ; AVX-64-NEXT:    vmovq %r9, %xmm1
     95 ; AVX-64-NEXT:    vmovq %r8, %xmm2
     96 ; AVX-64-NEXT:    vpunpcklqdq {{.*#+}} xmm1 = xmm2[0],xmm1[0]
     97 ; AVX-64-NEXT:    vinserti128 $1, {{[0-9]+}}(%rsp), %ymm1, %ymm1
     98 ; AVX-64-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
     99 ; AVX-64-NEXT:    retq
    100   %ins0 = insertelement <8 x i64> undef, i64 %a0, i32 0
    101   %ins1 = insertelement <8 x i64> %ins0, i64 %a1, i32 1
    102   %ins2 = insertelement <8 x i64> %ins1, i64 %a2, i32 2
    103   %ins3 = insertelement <8 x i64> %ins2, i64 %a3, i32 3
    104   %ins4 = insertelement <8 x i64> %ins3, i64 %a4, i32 4
    105   %ins5 = insertelement <8 x i64> %ins4, i64 %a5, i32 5
    106   %ins6 = insertelement <8 x i64> %ins5, i64 %a6, i32 6
    107   %ins7 = insertelement <8 x i64> %ins6, i64 %a7, i32 7
    108   ret <8 x i64> %ins7
    109 }
    110 
    111 define <16 x i32> @test_buildvector_v16i32(i32 %a0, i32 %a1, i32 %a2, i32 %a3, i32 %a4, i32 %a5, i32 %a6, i32 %a7, i32 %a8, i32 %a9, i32 %a10, i32 %a11, i32 %a12, i32 %a13, i32 %a14, i32 %a15) {
    112 ; AVX-32-LABEL: test_buildvector_v16i32:
    113 ; AVX-32:       # %bb.0:
    114 ; AVX-32-NEXT:    vmovups {{[0-9]+}}(%esp), %zmm0
    115 ; AVX-32-NEXT:    retl
    116 ;
    117 ; AVX-64-LABEL: test_buildvector_v16i32:
    118 ; AVX-64:       # %bb.0:
    119 ; AVX-64-NEXT:    vmovd %edi, %xmm0
    120 ; AVX-64-NEXT:    vpinsrd $1, %esi, %xmm0, %xmm0
    121 ; AVX-64-NEXT:    vpinsrd $2, %edx, %xmm0, %xmm0
    122 ; AVX-64-NEXT:    vpinsrd $3, %ecx, %xmm0, %xmm0
    123 ; AVX-64-NEXT:    vmovd %r8d, %xmm1
    124 ; AVX-64-NEXT:    vpinsrd $1, %r9d, %xmm1, %xmm1
    125 ; AVX-64-NEXT:    vpinsrd $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    126 ; AVX-64-NEXT:    vpinsrd $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    127 ; AVX-64-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
    128 ; AVX-64-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    129 ; AVX-64-NEXT:    vpinsrd $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    130 ; AVX-64-NEXT:    vpinsrd $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    131 ; AVX-64-NEXT:    vpinsrd $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    132 ; AVX-64-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    133 ; AVX-64-NEXT:    vpinsrd $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    134 ; AVX-64-NEXT:    vpinsrd $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    135 ; AVX-64-NEXT:    vpinsrd $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    136 ; AVX-64-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
    137 ; AVX-64-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
    138 ; AVX-64-NEXT:    retq
    139   %ins0  = insertelement <16 x i32> undef,  i32 %a0,  i32 0
    140   %ins1  = insertelement <16 x i32> %ins0,  i32 %a1,  i32 1
    141   %ins2  = insertelement <16 x i32> %ins1,  i32 %a2,  i32 2
    142   %ins3  = insertelement <16 x i32> %ins2,  i32 %a3,  i32 3
    143   %ins4  = insertelement <16 x i32> %ins3,  i32 %a4,  i32 4
    144   %ins5  = insertelement <16 x i32> %ins4,  i32 %a5,  i32 5
    145   %ins6  = insertelement <16 x i32> %ins5,  i32 %a6,  i32 6
    146   %ins7  = insertelement <16 x i32> %ins6,  i32 %a7,  i32 7
    147   %ins8  = insertelement <16 x i32> %ins7,  i32 %a8,  i32 8
    148   %ins9  = insertelement <16 x i32> %ins8,  i32 %a9,  i32 9
    149   %ins10 = insertelement <16 x i32> %ins9,  i32 %a10, i32 10
    150   %ins11 = insertelement <16 x i32> %ins10, i32 %a11, i32 11
    151   %ins12 = insertelement <16 x i32> %ins11, i32 %a12, i32 12
    152   %ins13 = insertelement <16 x i32> %ins12, i32 %a13, i32 13
    153   %ins14 = insertelement <16 x i32> %ins13, i32 %a14, i32 14
    154   %ins15 = insertelement <16 x i32> %ins14, i32 %a15, i32 15
    155   ret <16 x i32> %ins15
    156 }
    157 
    158 define <32 x i16> @test_buildvector_v32i16(i16 %a0, i16 %a1, i16 %a2, i16 %a3, i16 %a4, i16 %a5, i16 %a6, i16 %a7, i16 %a8, i16 %a9, i16 %a10, i16 %a11, i16 %a12, i16 %a13, i16 %a14, i16 %a15, i16 %a16, i16 %a17, i16 %a18, i16 %a19, i16 %a20, i16 %a21, i16 %a22, i16 %a23, i16 %a24, i16 %a25, i16 %a26, i16 %a27, i16 %a28, i16 %a29, i16 %a30, i16 %a31) {
    159 ; AVX512F-32-LABEL: test_buildvector_v32i16:
    160 ; AVX512F-32:       # %bb.0:
    161 ; AVX512F-32-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
    162 ; AVX512F-32-NEXT:    vpinsrw $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
    163 ; AVX512F-32-NEXT:    vpinsrw $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
    164 ; AVX512F-32-NEXT:    vpinsrw $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
    165 ; AVX512F-32-NEXT:    vpinsrw $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
    166 ; AVX512F-32-NEXT:    vpinsrw $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
    167 ; AVX512F-32-NEXT:    vpinsrw $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
    168 ; AVX512F-32-NEXT:    vpinsrw $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
    169 ; AVX512F-32-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    170 ; AVX512F-32-NEXT:    vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
    171 ; AVX512F-32-NEXT:    vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
    172 ; AVX512F-32-NEXT:    vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
    173 ; AVX512F-32-NEXT:    vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
    174 ; AVX512F-32-NEXT:    vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
    175 ; AVX512F-32-NEXT:    vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
    176 ; AVX512F-32-NEXT:    vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
    177 ; AVX512F-32-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
    178 ; AVX512F-32-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    179 ; AVX512F-32-NEXT:    vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
    180 ; AVX512F-32-NEXT:    vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
    181 ; AVX512F-32-NEXT:    vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
    182 ; AVX512F-32-NEXT:    vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
    183 ; AVX512F-32-NEXT:    vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
    184 ; AVX512F-32-NEXT:    vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
    185 ; AVX512F-32-NEXT:    vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
    186 ; AVX512F-32-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    187 ; AVX512F-32-NEXT:    vpinsrw $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
    188 ; AVX512F-32-NEXT:    vpinsrw $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
    189 ; AVX512F-32-NEXT:    vpinsrw $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
    190 ; AVX512F-32-NEXT:    vpinsrw $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
    191 ; AVX512F-32-NEXT:    vpinsrw $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
    192 ; AVX512F-32-NEXT:    vpinsrw $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
    193 ; AVX512F-32-NEXT:    vpinsrw $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
    194 ; AVX512F-32-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
    195 ; AVX512F-32-NEXT:    retl
    196 ;
    197 ; AVX512F-64-LABEL: test_buildvector_v32i16:
    198 ; AVX512F-64:       # %bb.0:
    199 ; AVX512F-64-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
    200 ; AVX512F-64-NEXT:    vpinsrw $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    201 ; AVX512F-64-NEXT:    vpinsrw $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    202 ; AVX512F-64-NEXT:    vpinsrw $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    203 ; AVX512F-64-NEXT:    vpinsrw $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    204 ; AVX512F-64-NEXT:    vpinsrw $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    205 ; AVX512F-64-NEXT:    vpinsrw $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    206 ; AVX512F-64-NEXT:    vpinsrw $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    207 ; AVX512F-64-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    208 ; AVX512F-64-NEXT:    vpinsrw $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    209 ; AVX512F-64-NEXT:    vpinsrw $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    210 ; AVX512F-64-NEXT:    vpinsrw $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    211 ; AVX512F-64-NEXT:    vpinsrw $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    212 ; AVX512F-64-NEXT:    vpinsrw $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    213 ; AVX512F-64-NEXT:    vpinsrw $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    214 ; AVX512F-64-NEXT:    vpinsrw $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    215 ; AVX512F-64-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm1
    216 ; AVX512F-64-NEXT:    vmovd %edi, %xmm0
    217 ; AVX512F-64-NEXT:    vpinsrw $1, %esi, %xmm0, %xmm0
    218 ; AVX512F-64-NEXT:    vpinsrw $2, %edx, %xmm0, %xmm0
    219 ; AVX512F-64-NEXT:    vpinsrw $3, %ecx, %xmm0, %xmm0
    220 ; AVX512F-64-NEXT:    vpinsrw $4, %r8d, %xmm0, %xmm0
    221 ; AVX512F-64-NEXT:    vpinsrw $5, %r9d, %xmm0, %xmm0
    222 ; AVX512F-64-NEXT:    vpinsrw $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    223 ; AVX512F-64-NEXT:    vpinsrw $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    224 ; AVX512F-64-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    225 ; AVX512F-64-NEXT:    vpinsrw $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    226 ; AVX512F-64-NEXT:    vpinsrw $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    227 ; AVX512F-64-NEXT:    vpinsrw $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    228 ; AVX512F-64-NEXT:    vpinsrw $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    229 ; AVX512F-64-NEXT:    vpinsrw $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    230 ; AVX512F-64-NEXT:    vpinsrw $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    231 ; AVX512F-64-NEXT:    vpinsrw $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    232 ; AVX512F-64-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
    233 ; AVX512F-64-NEXT:    retq
    234 ;
    235 ; AVX512BW-32-LABEL: test_buildvector_v32i16:
    236 ; AVX512BW-32:       # %bb.0:
    237 ; AVX512BW-32-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
    238 ; AVX512BW-32-NEXT:    vpinsrw $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
    239 ; AVX512BW-32-NEXT:    vpinsrw $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
    240 ; AVX512BW-32-NEXT:    vpinsrw $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
    241 ; AVX512BW-32-NEXT:    vpinsrw $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
    242 ; AVX512BW-32-NEXT:    vpinsrw $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
    243 ; AVX512BW-32-NEXT:    vpinsrw $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
    244 ; AVX512BW-32-NEXT:    vpinsrw $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
    245 ; AVX512BW-32-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    246 ; AVX512BW-32-NEXT:    vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
    247 ; AVX512BW-32-NEXT:    vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
    248 ; AVX512BW-32-NEXT:    vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
    249 ; AVX512BW-32-NEXT:    vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
    250 ; AVX512BW-32-NEXT:    vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
    251 ; AVX512BW-32-NEXT:    vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
    252 ; AVX512BW-32-NEXT:    vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
    253 ; AVX512BW-32-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
    254 ; AVX512BW-32-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    255 ; AVX512BW-32-NEXT:    vpinsrw $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
    256 ; AVX512BW-32-NEXT:    vpinsrw $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
    257 ; AVX512BW-32-NEXT:    vpinsrw $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
    258 ; AVX512BW-32-NEXT:    vpinsrw $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
    259 ; AVX512BW-32-NEXT:    vpinsrw $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
    260 ; AVX512BW-32-NEXT:    vpinsrw $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
    261 ; AVX512BW-32-NEXT:    vpinsrw $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
    262 ; AVX512BW-32-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    263 ; AVX512BW-32-NEXT:    vpinsrw $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
    264 ; AVX512BW-32-NEXT:    vpinsrw $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
    265 ; AVX512BW-32-NEXT:    vpinsrw $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
    266 ; AVX512BW-32-NEXT:    vpinsrw $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
    267 ; AVX512BW-32-NEXT:    vpinsrw $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
    268 ; AVX512BW-32-NEXT:    vpinsrw $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
    269 ; AVX512BW-32-NEXT:    vpinsrw $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
    270 ; AVX512BW-32-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
    271 ; AVX512BW-32-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
    272 ; AVX512BW-32-NEXT:    retl
    273 ;
    274 ; AVX512BW-64-LABEL: test_buildvector_v32i16:
    275 ; AVX512BW-64:       # %bb.0:
    276 ; AVX512BW-64-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
    277 ; AVX512BW-64-NEXT:    vpinsrw $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    278 ; AVX512BW-64-NEXT:    vpinsrw $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    279 ; AVX512BW-64-NEXT:    vpinsrw $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    280 ; AVX512BW-64-NEXT:    vpinsrw $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    281 ; AVX512BW-64-NEXT:    vpinsrw $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    282 ; AVX512BW-64-NEXT:    vpinsrw $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    283 ; AVX512BW-64-NEXT:    vpinsrw $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    284 ; AVX512BW-64-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    285 ; AVX512BW-64-NEXT:    vpinsrw $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    286 ; AVX512BW-64-NEXT:    vpinsrw $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    287 ; AVX512BW-64-NEXT:    vpinsrw $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    288 ; AVX512BW-64-NEXT:    vpinsrw $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    289 ; AVX512BW-64-NEXT:    vpinsrw $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    290 ; AVX512BW-64-NEXT:    vpinsrw $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    291 ; AVX512BW-64-NEXT:    vpinsrw $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    292 ; AVX512BW-64-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
    293 ; AVX512BW-64-NEXT:    vmovd %edi, %xmm1
    294 ; AVX512BW-64-NEXT:    vpinsrw $1, %esi, %xmm1, %xmm1
    295 ; AVX512BW-64-NEXT:    vpinsrw $2, %edx, %xmm1, %xmm1
    296 ; AVX512BW-64-NEXT:    vpinsrw $3, %ecx, %xmm1, %xmm1
    297 ; AVX512BW-64-NEXT:    vpinsrw $4, %r8d, %xmm1, %xmm1
    298 ; AVX512BW-64-NEXT:    vpinsrw $5, %r9d, %xmm1, %xmm1
    299 ; AVX512BW-64-NEXT:    vpinsrw $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    300 ; AVX512BW-64-NEXT:    vpinsrw $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    301 ; AVX512BW-64-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    302 ; AVX512BW-64-NEXT:    vpinsrw $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    303 ; AVX512BW-64-NEXT:    vpinsrw $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    304 ; AVX512BW-64-NEXT:    vpinsrw $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    305 ; AVX512BW-64-NEXT:    vpinsrw $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    306 ; AVX512BW-64-NEXT:    vpinsrw $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    307 ; AVX512BW-64-NEXT:    vpinsrw $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    308 ; AVX512BW-64-NEXT:    vpinsrw $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    309 ; AVX512BW-64-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
    310 ; AVX512BW-64-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
    311 ; AVX512BW-64-NEXT:    retq
    312   %ins0  = insertelement <32 x i16> undef,  i16 %a0,  i32 0
    313   %ins1  = insertelement <32 x i16> %ins0,  i16 %a1,  i32 1
    314   %ins2  = insertelement <32 x i16> %ins1,  i16 %a2,  i32 2
    315   %ins3  = insertelement <32 x i16> %ins2,  i16 %a3,  i32 3
    316   %ins4  = insertelement <32 x i16> %ins3,  i16 %a4,  i32 4
    317   %ins5  = insertelement <32 x i16> %ins4,  i16 %a5,  i32 5
    318   %ins6  = insertelement <32 x i16> %ins5,  i16 %a6,  i32 6
    319   %ins7  = insertelement <32 x i16> %ins6,  i16 %a7,  i32 7
    320   %ins8  = insertelement <32 x i16> %ins7,  i16 %a8,  i32 8
    321   %ins9  = insertelement <32 x i16> %ins8,  i16 %a9,  i32 9
    322   %ins10 = insertelement <32 x i16> %ins9,  i16 %a10, i32 10
    323   %ins11 = insertelement <32 x i16> %ins10, i16 %a11, i32 11
    324   %ins12 = insertelement <32 x i16> %ins11, i16 %a12, i32 12
    325   %ins13 = insertelement <32 x i16> %ins12, i16 %a13, i32 13
    326   %ins14 = insertelement <32 x i16> %ins13, i16 %a14, i32 14
    327   %ins15 = insertelement <32 x i16> %ins14, i16 %a15, i32 15
    328   %ins16 = insertelement <32 x i16> %ins15, i16 %a16, i32 16
    329   %ins17 = insertelement <32 x i16> %ins16, i16 %a17, i32 17
    330   %ins18 = insertelement <32 x i16> %ins17, i16 %a18, i32 18
    331   %ins19 = insertelement <32 x i16> %ins18, i16 %a19, i32 19
    332   %ins20 = insertelement <32 x i16> %ins19, i16 %a20, i32 20
    333   %ins21 = insertelement <32 x i16> %ins20, i16 %a21, i32 21
    334   %ins22 = insertelement <32 x i16> %ins21, i16 %a22, i32 22
    335   %ins23 = insertelement <32 x i16> %ins22, i16 %a23, i32 23
    336   %ins24 = insertelement <32 x i16> %ins23, i16 %a24, i32 24
    337   %ins25 = insertelement <32 x i16> %ins24, i16 %a25, i32 25
    338   %ins26 = insertelement <32 x i16> %ins25, i16 %a26, i32 26
    339   %ins27 = insertelement <32 x i16> %ins26, i16 %a27, i32 27
    340   %ins28 = insertelement <32 x i16> %ins27, i16 %a28, i32 28
    341   %ins29 = insertelement <32 x i16> %ins28, i16 %a29, i32 29
    342   %ins30 = insertelement <32 x i16> %ins29, i16 %a30, i32 30
    343   %ins31 = insertelement <32 x i16> %ins30, i16 %a31, i32 31
    344   ret <32 x i16> %ins31
    345 }
    346 
    347 define <64 x i8> @test_buildvector_v64i8(i8 %a0, i8 %a1, i8 %a2, i8 %a3, i8 %a4, i8 %a5, i8 %a6, i8 %a7, i8 %a8, i8 %a9, i8 %a10, i8 %a11, i8 %a12, i8 %a13, i8 %a14, i8 %a15, i8 %a16, i8 %a17, i8 %a18, i8 %a19, i8 %a20, i8 %a21, i8 %a22, i8 %a23, i8 %a24, i8 %a25, i8 %a26, i8 %a27, i8 %a28, i8 %a29, i8 %a30, i8 %a31, i8 %a32, i8 %a33, i8 %a34, i8 %a35, i8 %a36, i8 %a37, i8 %a38, i8 %a39, i8 %a40, i8 %a41, i8 %a42, i8 %a43, i8 %a44, i8 %a45, i8 %a46, i8 %a47, i8 %a48, i8 %a49, i8 %a50, i8 %a51, i8 %a52, i8 %a53, i8 %a54, i8 %a55, i8 %a56, i8 %a57, i8 %a58, i8 %a59, i8 %a60, i8 %a61, i8 %a62, i8 %a63) {
    348 ; AVX512F-32-LABEL: test_buildvector_v64i8:
    349 ; AVX512F-32:       # %bb.0:
    350 ; AVX512F-32-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
    351 ; AVX512F-32-NEXT:    vpinsrb $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
    352 ; AVX512F-32-NEXT:    vpinsrb $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
    353 ; AVX512F-32-NEXT:    vpinsrb $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
    354 ; AVX512F-32-NEXT:    vpinsrb $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
    355 ; AVX512F-32-NEXT:    vpinsrb $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
    356 ; AVX512F-32-NEXT:    vpinsrb $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
    357 ; AVX512F-32-NEXT:    vpinsrb $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
    358 ; AVX512F-32-NEXT:    vpinsrb $8, {{[0-9]+}}(%esp), %xmm0, %xmm0
    359 ; AVX512F-32-NEXT:    vpinsrb $9, {{[0-9]+}}(%esp), %xmm0, %xmm0
    360 ; AVX512F-32-NEXT:    vpinsrb $10, {{[0-9]+}}(%esp), %xmm0, %xmm0
    361 ; AVX512F-32-NEXT:    vpinsrb $11, {{[0-9]+}}(%esp), %xmm0, %xmm0
    362 ; AVX512F-32-NEXT:    vpinsrb $12, {{[0-9]+}}(%esp), %xmm0, %xmm0
    363 ; AVX512F-32-NEXT:    vpinsrb $13, {{[0-9]+}}(%esp), %xmm0, %xmm0
    364 ; AVX512F-32-NEXT:    vpinsrb $14, {{[0-9]+}}(%esp), %xmm0, %xmm0
    365 ; AVX512F-32-NEXT:    vpinsrb $15, {{[0-9]+}}(%esp), %xmm0, %xmm0
    366 ; AVX512F-32-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    367 ; AVX512F-32-NEXT:    vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
    368 ; AVX512F-32-NEXT:    vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
    369 ; AVX512F-32-NEXT:    vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
    370 ; AVX512F-32-NEXT:    vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
    371 ; AVX512F-32-NEXT:    vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
    372 ; AVX512F-32-NEXT:    vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
    373 ; AVX512F-32-NEXT:    vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
    374 ; AVX512F-32-NEXT:    vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
    375 ; AVX512F-32-NEXT:    vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
    376 ; AVX512F-32-NEXT:    vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
    377 ; AVX512F-32-NEXT:    vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
    378 ; AVX512F-32-NEXT:    vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
    379 ; AVX512F-32-NEXT:    vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
    380 ; AVX512F-32-NEXT:    vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
    381 ; AVX512F-32-NEXT:    vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
    382 ; AVX512F-32-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
    383 ; AVX512F-32-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    384 ; AVX512F-32-NEXT:    vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
    385 ; AVX512F-32-NEXT:    vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
    386 ; AVX512F-32-NEXT:    vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
    387 ; AVX512F-32-NEXT:    vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
    388 ; AVX512F-32-NEXT:    vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
    389 ; AVX512F-32-NEXT:    vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
    390 ; AVX512F-32-NEXT:    vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
    391 ; AVX512F-32-NEXT:    vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
    392 ; AVX512F-32-NEXT:    vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
    393 ; AVX512F-32-NEXT:    vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
    394 ; AVX512F-32-NEXT:    vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
    395 ; AVX512F-32-NEXT:    vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
    396 ; AVX512F-32-NEXT:    vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
    397 ; AVX512F-32-NEXT:    vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
    398 ; AVX512F-32-NEXT:    vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
    399 ; AVX512F-32-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    400 ; AVX512F-32-NEXT:    vpinsrb $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
    401 ; AVX512F-32-NEXT:    vpinsrb $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
    402 ; AVX512F-32-NEXT:    vpinsrb $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
    403 ; AVX512F-32-NEXT:    vpinsrb $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
    404 ; AVX512F-32-NEXT:    vpinsrb $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
    405 ; AVX512F-32-NEXT:    vpinsrb $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
    406 ; AVX512F-32-NEXT:    vpinsrb $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
    407 ; AVX512F-32-NEXT:    vpinsrb $8, {{[0-9]+}}(%esp), %xmm2, %xmm2
    408 ; AVX512F-32-NEXT:    vpinsrb $9, {{[0-9]+}}(%esp), %xmm2, %xmm2
    409 ; AVX512F-32-NEXT:    vpinsrb $10, {{[0-9]+}}(%esp), %xmm2, %xmm2
    410 ; AVX512F-32-NEXT:    vpinsrb $11, {{[0-9]+}}(%esp), %xmm2, %xmm2
    411 ; AVX512F-32-NEXT:    vpinsrb $12, {{[0-9]+}}(%esp), %xmm2, %xmm2
    412 ; AVX512F-32-NEXT:    vpinsrb $13, {{[0-9]+}}(%esp), %xmm2, %xmm2
    413 ; AVX512F-32-NEXT:    vpinsrb $14, {{[0-9]+}}(%esp), %xmm2, %xmm2
    414 ; AVX512F-32-NEXT:    vpinsrb $15, {{[0-9]+}}(%esp), %xmm2, %xmm2
    415 ; AVX512F-32-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
    416 ; AVX512F-32-NEXT:    retl
    417 ;
    418 ; AVX512F-64-LABEL: test_buildvector_v64i8:
    419 ; AVX512F-64:       # %bb.0:
    420 ; AVX512F-64-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
    421 ; AVX512F-64-NEXT:    vpinsrb $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    422 ; AVX512F-64-NEXT:    vpinsrb $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    423 ; AVX512F-64-NEXT:    vpinsrb $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    424 ; AVX512F-64-NEXT:    vpinsrb $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    425 ; AVX512F-64-NEXT:    vpinsrb $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    426 ; AVX512F-64-NEXT:    vpinsrb $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    427 ; AVX512F-64-NEXT:    vpinsrb $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    428 ; AVX512F-64-NEXT:    vpinsrb $8, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    429 ; AVX512F-64-NEXT:    vpinsrb $9, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    430 ; AVX512F-64-NEXT:    vpinsrb $10, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    431 ; AVX512F-64-NEXT:    vpinsrb $11, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    432 ; AVX512F-64-NEXT:    vpinsrb $12, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    433 ; AVX512F-64-NEXT:    vpinsrb $13, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    434 ; AVX512F-64-NEXT:    vpinsrb $14, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    435 ; AVX512F-64-NEXT:    vpinsrb $15, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    436 ; AVX512F-64-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    437 ; AVX512F-64-NEXT:    vpinsrb $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    438 ; AVX512F-64-NEXT:    vpinsrb $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    439 ; AVX512F-64-NEXT:    vpinsrb $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    440 ; AVX512F-64-NEXT:    vpinsrb $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    441 ; AVX512F-64-NEXT:    vpinsrb $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    442 ; AVX512F-64-NEXT:    vpinsrb $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    443 ; AVX512F-64-NEXT:    vpinsrb $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    444 ; AVX512F-64-NEXT:    vpinsrb $8, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    445 ; AVX512F-64-NEXT:    vpinsrb $9, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    446 ; AVX512F-64-NEXT:    vpinsrb $10, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    447 ; AVX512F-64-NEXT:    vpinsrb $11, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    448 ; AVX512F-64-NEXT:    vpinsrb $12, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    449 ; AVX512F-64-NEXT:    vpinsrb $13, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    450 ; AVX512F-64-NEXT:    vpinsrb $14, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    451 ; AVX512F-64-NEXT:    vpinsrb $15, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    452 ; AVX512F-64-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm1
    453 ; AVX512F-64-NEXT:    vmovd %edi, %xmm0
    454 ; AVX512F-64-NEXT:    vpinsrb $1, %esi, %xmm0, %xmm0
    455 ; AVX512F-64-NEXT:    vpinsrb $2, %edx, %xmm0, %xmm0
    456 ; AVX512F-64-NEXT:    vpinsrb $3, %ecx, %xmm0, %xmm0
    457 ; AVX512F-64-NEXT:    vpinsrb $4, %r8d, %xmm0, %xmm0
    458 ; AVX512F-64-NEXT:    vpinsrb $5, %r9d, %xmm0, %xmm0
    459 ; AVX512F-64-NEXT:    vpinsrb $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    460 ; AVX512F-64-NEXT:    vpinsrb $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    461 ; AVX512F-64-NEXT:    vpinsrb $8, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    462 ; AVX512F-64-NEXT:    vpinsrb $9, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    463 ; AVX512F-64-NEXT:    vpinsrb $10, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    464 ; AVX512F-64-NEXT:    vpinsrb $11, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    465 ; AVX512F-64-NEXT:    vpinsrb $12, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    466 ; AVX512F-64-NEXT:    vpinsrb $13, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    467 ; AVX512F-64-NEXT:    vpinsrb $14, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    468 ; AVX512F-64-NEXT:    vpinsrb $15, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    469 ; AVX512F-64-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    470 ; AVX512F-64-NEXT:    vpinsrb $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    471 ; AVX512F-64-NEXT:    vpinsrb $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    472 ; AVX512F-64-NEXT:    vpinsrb $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    473 ; AVX512F-64-NEXT:    vpinsrb $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    474 ; AVX512F-64-NEXT:    vpinsrb $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    475 ; AVX512F-64-NEXT:    vpinsrb $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    476 ; AVX512F-64-NEXT:    vpinsrb $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    477 ; AVX512F-64-NEXT:    vpinsrb $8, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    478 ; AVX512F-64-NEXT:    vpinsrb $9, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    479 ; AVX512F-64-NEXT:    vpinsrb $10, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    480 ; AVX512F-64-NEXT:    vpinsrb $11, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    481 ; AVX512F-64-NEXT:    vpinsrb $12, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    482 ; AVX512F-64-NEXT:    vpinsrb $13, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    483 ; AVX512F-64-NEXT:    vpinsrb $14, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    484 ; AVX512F-64-NEXT:    vpinsrb $15, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    485 ; AVX512F-64-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
    486 ; AVX512F-64-NEXT:    retq
    487 ;
    488 ; AVX512BW-32-LABEL: test_buildvector_v64i8:
    489 ; AVX512BW-32:       # %bb.0:
    490 ; AVX512BW-32-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
    491 ; AVX512BW-32-NEXT:    vpinsrb $1, {{[0-9]+}}(%esp), %xmm0, %xmm0
    492 ; AVX512BW-32-NEXT:    vpinsrb $2, {{[0-9]+}}(%esp), %xmm0, %xmm0
    493 ; AVX512BW-32-NEXT:    vpinsrb $3, {{[0-9]+}}(%esp), %xmm0, %xmm0
    494 ; AVX512BW-32-NEXT:    vpinsrb $4, {{[0-9]+}}(%esp), %xmm0, %xmm0
    495 ; AVX512BW-32-NEXT:    vpinsrb $5, {{[0-9]+}}(%esp), %xmm0, %xmm0
    496 ; AVX512BW-32-NEXT:    vpinsrb $6, {{[0-9]+}}(%esp), %xmm0, %xmm0
    497 ; AVX512BW-32-NEXT:    vpinsrb $7, {{[0-9]+}}(%esp), %xmm0, %xmm0
    498 ; AVX512BW-32-NEXT:    vpinsrb $8, {{[0-9]+}}(%esp), %xmm0, %xmm0
    499 ; AVX512BW-32-NEXT:    vpinsrb $9, {{[0-9]+}}(%esp), %xmm0, %xmm0
    500 ; AVX512BW-32-NEXT:    vpinsrb $10, {{[0-9]+}}(%esp), %xmm0, %xmm0
    501 ; AVX512BW-32-NEXT:    vpinsrb $11, {{[0-9]+}}(%esp), %xmm0, %xmm0
    502 ; AVX512BW-32-NEXT:    vpinsrb $12, {{[0-9]+}}(%esp), %xmm0, %xmm0
    503 ; AVX512BW-32-NEXT:    vpinsrb $13, {{[0-9]+}}(%esp), %xmm0, %xmm0
    504 ; AVX512BW-32-NEXT:    vpinsrb $14, {{[0-9]+}}(%esp), %xmm0, %xmm0
    505 ; AVX512BW-32-NEXT:    vpinsrb $15, {{[0-9]+}}(%esp), %xmm0, %xmm0
    506 ; AVX512BW-32-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    507 ; AVX512BW-32-NEXT:    vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
    508 ; AVX512BW-32-NEXT:    vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
    509 ; AVX512BW-32-NEXT:    vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
    510 ; AVX512BW-32-NEXT:    vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
    511 ; AVX512BW-32-NEXT:    vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
    512 ; AVX512BW-32-NEXT:    vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
    513 ; AVX512BW-32-NEXT:    vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
    514 ; AVX512BW-32-NEXT:    vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
    515 ; AVX512BW-32-NEXT:    vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
    516 ; AVX512BW-32-NEXT:    vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
    517 ; AVX512BW-32-NEXT:    vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
    518 ; AVX512BW-32-NEXT:    vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
    519 ; AVX512BW-32-NEXT:    vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
    520 ; AVX512BW-32-NEXT:    vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
    521 ; AVX512BW-32-NEXT:    vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
    522 ; AVX512BW-32-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
    523 ; AVX512BW-32-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    524 ; AVX512BW-32-NEXT:    vpinsrb $1, {{[0-9]+}}(%esp), %xmm1, %xmm1
    525 ; AVX512BW-32-NEXT:    vpinsrb $2, {{[0-9]+}}(%esp), %xmm1, %xmm1
    526 ; AVX512BW-32-NEXT:    vpinsrb $3, {{[0-9]+}}(%esp), %xmm1, %xmm1
    527 ; AVX512BW-32-NEXT:    vpinsrb $4, {{[0-9]+}}(%esp), %xmm1, %xmm1
    528 ; AVX512BW-32-NEXT:    vpinsrb $5, {{[0-9]+}}(%esp), %xmm1, %xmm1
    529 ; AVX512BW-32-NEXT:    vpinsrb $6, {{[0-9]+}}(%esp), %xmm1, %xmm1
    530 ; AVX512BW-32-NEXT:    vpinsrb $7, {{[0-9]+}}(%esp), %xmm1, %xmm1
    531 ; AVX512BW-32-NEXT:    vpinsrb $8, {{[0-9]+}}(%esp), %xmm1, %xmm1
    532 ; AVX512BW-32-NEXT:    vpinsrb $9, {{[0-9]+}}(%esp), %xmm1, %xmm1
    533 ; AVX512BW-32-NEXT:    vpinsrb $10, {{[0-9]+}}(%esp), %xmm1, %xmm1
    534 ; AVX512BW-32-NEXT:    vpinsrb $11, {{[0-9]+}}(%esp), %xmm1, %xmm1
    535 ; AVX512BW-32-NEXT:    vpinsrb $12, {{[0-9]+}}(%esp), %xmm1, %xmm1
    536 ; AVX512BW-32-NEXT:    vpinsrb $13, {{[0-9]+}}(%esp), %xmm1, %xmm1
    537 ; AVX512BW-32-NEXT:    vpinsrb $14, {{[0-9]+}}(%esp), %xmm1, %xmm1
    538 ; AVX512BW-32-NEXT:    vpinsrb $15, {{[0-9]+}}(%esp), %xmm1, %xmm1
    539 ; AVX512BW-32-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    540 ; AVX512BW-32-NEXT:    vpinsrb $1, {{[0-9]+}}(%esp), %xmm2, %xmm2
    541 ; AVX512BW-32-NEXT:    vpinsrb $2, {{[0-9]+}}(%esp), %xmm2, %xmm2
    542 ; AVX512BW-32-NEXT:    vpinsrb $3, {{[0-9]+}}(%esp), %xmm2, %xmm2
    543 ; AVX512BW-32-NEXT:    vpinsrb $4, {{[0-9]+}}(%esp), %xmm2, %xmm2
    544 ; AVX512BW-32-NEXT:    vpinsrb $5, {{[0-9]+}}(%esp), %xmm2, %xmm2
    545 ; AVX512BW-32-NEXT:    vpinsrb $6, {{[0-9]+}}(%esp), %xmm2, %xmm2
    546 ; AVX512BW-32-NEXT:    vpinsrb $7, {{[0-9]+}}(%esp), %xmm2, %xmm2
    547 ; AVX512BW-32-NEXT:    vpinsrb $8, {{[0-9]+}}(%esp), %xmm2, %xmm2
    548 ; AVX512BW-32-NEXT:    vpinsrb $9, {{[0-9]+}}(%esp), %xmm2, %xmm2
    549 ; AVX512BW-32-NEXT:    vpinsrb $10, {{[0-9]+}}(%esp), %xmm2, %xmm2
    550 ; AVX512BW-32-NEXT:    vpinsrb $11, {{[0-9]+}}(%esp), %xmm2, %xmm2
    551 ; AVX512BW-32-NEXT:    vpinsrb $12, {{[0-9]+}}(%esp), %xmm2, %xmm2
    552 ; AVX512BW-32-NEXT:    vpinsrb $13, {{[0-9]+}}(%esp), %xmm2, %xmm2
    553 ; AVX512BW-32-NEXT:    vpinsrb $14, {{[0-9]+}}(%esp), %xmm2, %xmm2
    554 ; AVX512BW-32-NEXT:    vpinsrb $15, {{[0-9]+}}(%esp), %xmm2, %xmm2
    555 ; AVX512BW-32-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
    556 ; AVX512BW-32-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
    557 ; AVX512BW-32-NEXT:    retl
    558 ;
    559 ; AVX512BW-64-LABEL: test_buildvector_v64i8:
    560 ; AVX512BW-64:       # %bb.0:
    561 ; AVX512BW-64-NEXT:    vmovd {{.*#+}} xmm0 = mem[0],zero,zero,zero
    562 ; AVX512BW-64-NEXT:    vpinsrb $1, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    563 ; AVX512BW-64-NEXT:    vpinsrb $2, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    564 ; AVX512BW-64-NEXT:    vpinsrb $3, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    565 ; AVX512BW-64-NEXT:    vpinsrb $4, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    566 ; AVX512BW-64-NEXT:    vpinsrb $5, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    567 ; AVX512BW-64-NEXT:    vpinsrb $6, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    568 ; AVX512BW-64-NEXT:    vpinsrb $7, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    569 ; AVX512BW-64-NEXT:    vpinsrb $8, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    570 ; AVX512BW-64-NEXT:    vpinsrb $9, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    571 ; AVX512BW-64-NEXT:    vpinsrb $10, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    572 ; AVX512BW-64-NEXT:    vpinsrb $11, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    573 ; AVX512BW-64-NEXT:    vpinsrb $12, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    574 ; AVX512BW-64-NEXT:    vpinsrb $13, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    575 ; AVX512BW-64-NEXT:    vpinsrb $14, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    576 ; AVX512BW-64-NEXT:    vpinsrb $15, {{[0-9]+}}(%rsp), %xmm0, %xmm0
    577 ; AVX512BW-64-NEXT:    vmovd {{.*#+}} xmm1 = mem[0],zero,zero,zero
    578 ; AVX512BW-64-NEXT:    vpinsrb $1, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    579 ; AVX512BW-64-NEXT:    vpinsrb $2, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    580 ; AVX512BW-64-NEXT:    vpinsrb $3, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    581 ; AVX512BW-64-NEXT:    vpinsrb $4, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    582 ; AVX512BW-64-NEXT:    vpinsrb $5, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    583 ; AVX512BW-64-NEXT:    vpinsrb $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    584 ; AVX512BW-64-NEXT:    vpinsrb $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    585 ; AVX512BW-64-NEXT:    vpinsrb $8, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    586 ; AVX512BW-64-NEXT:    vpinsrb $9, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    587 ; AVX512BW-64-NEXT:    vpinsrb $10, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    588 ; AVX512BW-64-NEXT:    vpinsrb $11, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    589 ; AVX512BW-64-NEXT:    vpinsrb $12, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    590 ; AVX512BW-64-NEXT:    vpinsrb $13, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    591 ; AVX512BW-64-NEXT:    vpinsrb $14, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    592 ; AVX512BW-64-NEXT:    vpinsrb $15, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    593 ; AVX512BW-64-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
    594 ; AVX512BW-64-NEXT:    vmovd %edi, %xmm1
    595 ; AVX512BW-64-NEXT:    vpinsrb $1, %esi, %xmm1, %xmm1
    596 ; AVX512BW-64-NEXT:    vpinsrb $2, %edx, %xmm1, %xmm1
    597 ; AVX512BW-64-NEXT:    vpinsrb $3, %ecx, %xmm1, %xmm1
    598 ; AVX512BW-64-NEXT:    vpinsrb $4, %r8d, %xmm1, %xmm1
    599 ; AVX512BW-64-NEXT:    vpinsrb $5, %r9d, %xmm1, %xmm1
    600 ; AVX512BW-64-NEXT:    vpinsrb $6, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    601 ; AVX512BW-64-NEXT:    vpinsrb $7, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    602 ; AVX512BW-64-NEXT:    vpinsrb $8, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    603 ; AVX512BW-64-NEXT:    vpinsrb $9, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    604 ; AVX512BW-64-NEXT:    vpinsrb $10, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    605 ; AVX512BW-64-NEXT:    vpinsrb $11, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    606 ; AVX512BW-64-NEXT:    vpinsrb $12, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    607 ; AVX512BW-64-NEXT:    vpinsrb $13, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    608 ; AVX512BW-64-NEXT:    vpinsrb $14, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    609 ; AVX512BW-64-NEXT:    vpinsrb $15, {{[0-9]+}}(%rsp), %xmm1, %xmm1
    610 ; AVX512BW-64-NEXT:    vmovd {{.*#+}} xmm2 = mem[0],zero,zero,zero
    611 ; AVX512BW-64-NEXT:    vpinsrb $1, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    612 ; AVX512BW-64-NEXT:    vpinsrb $2, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    613 ; AVX512BW-64-NEXT:    vpinsrb $3, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    614 ; AVX512BW-64-NEXT:    vpinsrb $4, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    615 ; AVX512BW-64-NEXT:    vpinsrb $5, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    616 ; AVX512BW-64-NEXT:    vpinsrb $6, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    617 ; AVX512BW-64-NEXT:    vpinsrb $7, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    618 ; AVX512BW-64-NEXT:    vpinsrb $8, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    619 ; AVX512BW-64-NEXT:    vpinsrb $9, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    620 ; AVX512BW-64-NEXT:    vpinsrb $10, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    621 ; AVX512BW-64-NEXT:    vpinsrb $11, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    622 ; AVX512BW-64-NEXT:    vpinsrb $12, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    623 ; AVX512BW-64-NEXT:    vpinsrb $13, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    624 ; AVX512BW-64-NEXT:    vpinsrb $14, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    625 ; AVX512BW-64-NEXT:    vpinsrb $15, {{[0-9]+}}(%rsp), %xmm2, %xmm2
    626 ; AVX512BW-64-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
    627 ; AVX512BW-64-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
    628 ; AVX512BW-64-NEXT:    retq
    629   %ins0  = insertelement <64 x i8> undef,  i8 %a0,  i32 0
    630   %ins1  = insertelement <64 x i8> %ins0,  i8 %a1,  i32 1
    631   %ins2  = insertelement <64 x i8> %ins1,  i8 %a2,  i32 2
    632   %ins3  = insertelement <64 x i8> %ins2,  i8 %a3,  i32 3
    633   %ins4  = insertelement <64 x i8> %ins3,  i8 %a4,  i32 4
    634   %ins5  = insertelement <64 x i8> %ins4,  i8 %a5,  i32 5
    635   %ins6  = insertelement <64 x i8> %ins5,  i8 %a6,  i32 6
    636   %ins7  = insertelement <64 x i8> %ins6,  i8 %a7,  i32 7
    637   %ins8  = insertelement <64 x i8> %ins7,  i8 %a8,  i32 8
    638   %ins9  = insertelement <64 x i8> %ins8,  i8 %a9,  i32 9
    639   %ins10 = insertelement <64 x i8> %ins9,  i8 %a10, i32 10
    640   %ins11 = insertelement <64 x i8> %ins10, i8 %a11, i32 11
    641   %ins12 = insertelement <64 x i8> %ins11, i8 %a12, i32 12
    642   %ins13 = insertelement <64 x i8> %ins12, i8 %a13, i32 13
    643   %ins14 = insertelement <64 x i8> %ins13, i8 %a14, i32 14
    644   %ins15 = insertelement <64 x i8> %ins14, i8 %a15, i32 15
    645   %ins16 = insertelement <64 x i8> %ins15, i8 %a16, i32 16
    646   %ins17 = insertelement <64 x i8> %ins16, i8 %a17, i32 17
    647   %ins18 = insertelement <64 x i8> %ins17, i8 %a18, i32 18
    648   %ins19 = insertelement <64 x i8> %ins18, i8 %a19, i32 19
    649   %ins20 = insertelement <64 x i8> %ins19, i8 %a20, i32 20
    650   %ins21 = insertelement <64 x i8> %ins20, i8 %a21, i32 21
    651   %ins22 = insertelement <64 x i8> %ins21, i8 %a22, i32 22
    652   %ins23 = insertelement <64 x i8> %ins22, i8 %a23, i32 23
    653   %ins24 = insertelement <64 x i8> %ins23, i8 %a24, i32 24
    654   %ins25 = insertelement <64 x i8> %ins24, i8 %a25, i32 25
    655   %ins26 = insertelement <64 x i8> %ins25, i8 %a26, i32 26
    656   %ins27 = insertelement <64 x i8> %ins26, i8 %a27, i32 27
    657   %ins28 = insertelement <64 x i8> %ins27, i8 %a28, i32 28
    658   %ins29 = insertelement <64 x i8> %ins28, i8 %a29, i32 29
    659   %ins30 = insertelement <64 x i8> %ins29, i8 %a30, i32 30
    660   %ins31 = insertelement <64 x i8> %ins30, i8 %a31, i32 31
    661   %ins32 = insertelement <64 x i8> %ins31, i8 %a32, i32 32
    662   %ins33 = insertelement <64 x i8> %ins32, i8 %a33, i32 33
    663   %ins34 = insertelement <64 x i8> %ins33, i8 %a34, i32 34
    664   %ins35 = insertelement <64 x i8> %ins34, i8 %a35, i32 35
    665   %ins36 = insertelement <64 x i8> %ins35, i8 %a36, i32 36
    666   %ins37 = insertelement <64 x i8> %ins36, i8 %a37, i32 37
    667   %ins38 = insertelement <64 x i8> %ins37, i8 %a38, i32 38
    668   %ins39 = insertelement <64 x i8> %ins38, i8 %a39, i32 39
    669   %ins40 = insertelement <64 x i8> %ins39, i8 %a40, i32 40
    670   %ins41 = insertelement <64 x i8> %ins40, i8 %a41, i32 41
    671   %ins42 = insertelement <64 x i8> %ins41, i8 %a42, i32 42
    672   %ins43 = insertelement <64 x i8> %ins42, i8 %a43, i32 43
    673   %ins44 = insertelement <64 x i8> %ins43, i8 %a44, i32 44
    674   %ins45 = insertelement <64 x i8> %ins44, i8 %a45, i32 45
    675   %ins46 = insertelement <64 x i8> %ins45, i8 %a46, i32 46
    676   %ins47 = insertelement <64 x i8> %ins46, i8 %a47, i32 47
    677   %ins48 = insertelement <64 x i8> %ins47, i8 %a48, i32 48
    678   %ins49 = insertelement <64 x i8> %ins48, i8 %a49, i32 49
    679   %ins50 = insertelement <64 x i8> %ins49, i8 %a50, i32 50
    680   %ins51 = insertelement <64 x i8> %ins50, i8 %a51, i32 51
    681   %ins52 = insertelement <64 x i8> %ins51, i8 %a52, i32 52
    682   %ins53 = insertelement <64 x i8> %ins52, i8 %a53, i32 53
    683   %ins54 = insertelement <64 x i8> %ins53, i8 %a54, i32 54
    684   %ins55 = insertelement <64 x i8> %ins54, i8 %a55, i32 55
    685   %ins56 = insertelement <64 x i8> %ins55, i8 %a56, i32 56
    686   %ins57 = insertelement <64 x i8> %ins56, i8 %a57, i32 57
    687   %ins58 = insertelement <64 x i8> %ins57, i8 %a58, i32 58
    688   %ins59 = insertelement <64 x i8> %ins58, i8 %a59, i32 59
    689   %ins60 = insertelement <64 x i8> %ins59, i8 %a60, i32 60
    690   %ins61 = insertelement <64 x i8> %ins60, i8 %a61, i32 61
    691   %ins62 = insertelement <64 x i8> %ins61, i8 %a62, i32 62
    692   %ins63 = insertelement <64 x i8> %ins62, i8 %a63, i32 63
    693   ret <64 x i8> %ins63
    694 }
    695