1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mcpu=knl -mattr=+avx512f | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512F 3 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mcpu=knl -mattr=+avx512bw | FileCheck %s --check-prefix=ALL --check-prefix=AVX512 --check-prefix=AVX512BW 4 5 define <8 x i64> @testv8i64(<8 x i64> %in) nounwind { 6 ; AVX512F-LABEL: testv8i64: 7 ; AVX512F: ## BB#0: 8 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm1 9 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] 10 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm3 11 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] 12 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 13 ; AVX512F-NEXT: vpsrlw $4, %ymm1, %ymm1 14 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm1 15 ; AVX512F-NEXT: vpshufb %ymm1, %ymm4, %ymm1 16 ; AVX512F-NEXT: vpaddb %ymm3, %ymm1, %ymm1 17 ; AVX512F-NEXT: vpxor %ymm3, %ymm3, %ymm3 18 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm1, %ymm1 19 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm5 20 ; AVX512F-NEXT: vpshufb %ymm5, %ymm4, %ymm5 21 ; AVX512F-NEXT: vpsrlw $4, %ymm0, %ymm0 22 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm0 23 ; AVX512F-NEXT: vpshufb %ymm0, %ymm4, %ymm0 24 ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 25 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 26 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 27 ; AVX512F-NEXT: retq 28 ; 29 ; AVX512BW-LABEL: testv8i64: 30 ; AVX512BW: ## BB#0: 31 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] 32 ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 33 ; AVX512BW-NEXT: vmovdqu8 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] 34 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 35 ; AVX512BW-NEXT: vpsrlw $4, %zmm0, %zmm0 36 ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 37 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 38 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 39 ; AVX512BW-NEXT: vpxord %zmm1, %zmm1, %zmm1 40 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 41 ; AVX512BW-NEXT: retq 42 %out = call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %in) 43 ret <8 x i64> %out 44 } 45 46 define <16 x i32> @testv16i32(<16 x i32> %in) nounwind { 47 ; AVX512F-LABEL: testv16i32: 48 ; AVX512F: ## BB#0: 49 ; AVX512F-NEXT: vextracti64x4 $1, %zmm0, %ymm1 50 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] 51 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm3 52 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] 53 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 54 ; AVX512F-NEXT: vpsrlw $4, %ymm1, %ymm1 55 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm1 56 ; AVX512F-NEXT: vpshufb %ymm1, %ymm4, %ymm1 57 ; AVX512F-NEXT: vpaddb %ymm3, %ymm1, %ymm1 58 ; AVX512F-NEXT: vpxor %ymm3, %ymm3, %ymm3 59 ; AVX512F-NEXT: vpunpckhdq {{.*#+}} ymm5 = ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[6],ymm3[6],ymm1[7],ymm3[7] 60 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm5, %ymm5 61 ; AVX512F-NEXT: vpunpckldq {{.*#+}} ymm1 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[4],ymm3[4],ymm1[5],ymm3[5] 62 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm1, %ymm1 63 ; AVX512F-NEXT: vpackuswb %ymm5, %ymm1, %ymm1 64 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm5 65 ; AVX512F-NEXT: vpshufb %ymm5, %ymm4, %ymm5 66 ; AVX512F-NEXT: vpsrlw $4, %ymm0, %ymm0 67 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm0 68 ; AVX512F-NEXT: vpshufb %ymm0, %ymm4, %ymm0 69 ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 70 ; AVX512F-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm3[2],ymm0[3],ymm3[3],ymm0[6],ymm3[6],ymm0[7],ymm3[7] 71 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm2, %ymm2 72 ; AVX512F-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm3[0],ymm0[1],ymm3[1],ymm0[4],ymm3[4],ymm0[5],ymm3[5] 73 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 74 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 75 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 76 ; AVX512F-NEXT: retq 77 ; 78 ; AVX512BW-LABEL: testv16i32: 79 ; AVX512BW: ## BB#0: 80 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] 81 ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 82 ; AVX512BW-NEXT: vmovdqu8 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] 83 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 84 ; AVX512BW-NEXT: vpsrlw $4, %zmm0, %zmm0 85 ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 86 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 87 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 88 ; AVX512BW-NEXT: vpxord %zmm1, %zmm1, %zmm1 89 ; AVX512BW-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15] 90 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm2, %zmm2 91 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] 92 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 93 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 94 ; AVX512BW-NEXT: retq 95 %out = call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %in) 96 ret <16 x i32> %out 97 } 98 99 define <32 x i16> @testv32i16(<32 x i16> %in) nounwind { 100 ; AVX512F-LABEL: testv32i16: 101 ; AVX512F: ## BB#0: 102 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] 103 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm3 104 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] 105 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 106 ; AVX512F-NEXT: vpsrlw $4, %ymm0, %ymm0 107 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm0 108 ; AVX512F-NEXT: vpshufb %ymm0, %ymm4, %ymm0 109 ; AVX512F-NEXT: vpaddb %ymm3, %ymm0, %ymm0 110 ; AVX512F-NEXT: vpsllw $8, %ymm0, %ymm3 111 ; AVX512F-NEXT: vpaddb %ymm0, %ymm3, %ymm0 112 ; AVX512F-NEXT: vpsrlw $8, %ymm0, %ymm0 113 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm3 114 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 115 ; AVX512F-NEXT: vpsrlw $4, %ymm1, %ymm1 116 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm1 117 ; AVX512F-NEXT: vpshufb %ymm1, %ymm4, %ymm1 118 ; AVX512F-NEXT: vpaddb %ymm3, %ymm1, %ymm1 119 ; AVX512F-NEXT: vpsllw $8, %ymm1, %ymm2 120 ; AVX512F-NEXT: vpaddb %ymm1, %ymm2, %ymm1 121 ; AVX512F-NEXT: vpsrlw $8, %ymm1, %ymm1 122 ; AVX512F-NEXT: retq 123 ; 124 ; AVX512BW-LABEL: testv32i16: 125 ; AVX512BW: ## BB#0: 126 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] 127 ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 128 ; AVX512BW-NEXT: vmovdqu8 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] 129 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 130 ; AVX512BW-NEXT: vpsrlw $4, %zmm0, %zmm0 131 ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 132 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 133 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 134 ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 135 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 136 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 137 ; AVX512BW-NEXT: retq 138 %out = call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %in) 139 ret <32 x i16> %out 140 } 141 142 define <64 x i8> @testv64i8(<64 x i8> %in) nounwind { 143 ; AVX512F-LABEL: testv64i8: 144 ; AVX512F: ## BB#0: 145 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] 146 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm3 147 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] 148 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 149 ; AVX512F-NEXT: vpsrlw $4, %ymm0, %ymm0 150 ; AVX512F-NEXT: vpand %ymm2, %ymm0, %ymm0 151 ; AVX512F-NEXT: vpshufb %ymm0, %ymm4, %ymm0 152 ; AVX512F-NEXT: vpaddb %ymm3, %ymm0, %ymm0 153 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm3 154 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 155 ; AVX512F-NEXT: vpsrlw $4, %ymm1, %ymm1 156 ; AVX512F-NEXT: vpand %ymm2, %ymm1, %ymm1 157 ; AVX512F-NEXT: vpshufb %ymm1, %ymm4, %ymm1 158 ; AVX512F-NEXT: vpaddb %ymm3, %ymm1, %ymm1 159 ; AVX512F-NEXT: retq 160 ; 161 ; AVX512BW-LABEL: testv64i8: 162 ; AVX512BW: ## BB#0: 163 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] 164 ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 165 ; AVX512BW-NEXT: vmovdqu8 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] 166 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 167 ; AVX512BW-NEXT: vpsrlw $4, %zmm0, %zmm0 168 ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 169 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 170 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 171 ; AVX512BW-NEXT: retq 172 %out = call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %in) 173 ret <64 x i8> %out 174 } 175 176 declare <8 x i64> @llvm.ctpop.v8i64(<8 x i64>) 177 declare <16 x i32> @llvm.ctpop.v16i32(<16 x i32>) 178 declare <32 x i16> @llvm.ctpop.v32i16(<32 x i16>) 179 declare <64 x i8> @llvm.ctpop.v64i8(<64 x i8>) 180