Lines Matching full:next
9 ; SSE-NEXT: insertps {{.*#+}} xmm0 = xmm0[0],zero,xmm0[2],xmm1[2]
10 ; SSE-NEXT: retq
14 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],zero,xmm0[2],xmm1[2]
15 ; AVX-NEXT: retq
27 ; SSE-NEXT: insertps {{.*#+}} xmm0 = xmm0[0],zero,zero,xmm1[0]
28 ; SSE-NEXT: retq
32 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],zero,zero,xmm1[0]
33 ; AVX-NEXT: retq
45 ; SSE-NEXT: insertps {{.*#+}} xmm0 = xmm0[0],zero,xmm0[2],xmm1[0]
46 ; SSE-NEXT: retq
50 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],zero,xmm0[2],xmm1[0]
51 ; AVX-NEXT: retq
63 ; SSE-NEXT: insertps {{.*#+}} xmm0 = xmm0[0],zero,zero,xmm0[0]
64 ; SSE-NEXT: retq
68 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],zero,zero,xmm0[0]
69 ; AVX-NEXT: retq
80 ; SSE-NEXT: insertps {{.*#+}} xmm0 = xmm0[0],zero,xmm1[2],zero
81 ; SSE-NEXT: retq
85 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],zero,xmm1[2],zero
86 ; AVX-NEXT: retq
99 ; SSE-NEXT: insertps {{.*#+}} xmm0 = zero,xmm1[0],zero,zero
100 ; SSE-NEXT: retq
104 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = zero,xmm1[0],zero,zero
105 ; AVX-NEXT: retq
115 ; SSE-NEXT: xorps %xmm1, %xmm1
116 ; SSE-NEXT: blendps {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
117 ; SSE-NEXT: retq
121 ; AVX-NEXT: vxorps %xmm1, %xmm1, %xmm1
122 ; AVX-NEXT: vblendps {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[3]
123 ; AVX-NEXT: retq
133 ; SSE-NEXT: movapd (%rdi), %xmm1
134 ; SSE-NEXT: addpd {{.*}}(%rip), %xmm1
135 ; SSE-NEXT: insertps {{.*#+}} xmm0 = zero,xmm0[2,2,3]
136 ; SSE-NEXT: movapd %xmm1, (%rdi)
137 ; SSE-NEXT: retq
141 ; AVX-NEXT: vmovapd (%rdi), %xmm1
142 ; AVX-NEXT: vaddpd {{.*}}(%rip), %xmm1, %xmm1
143 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = zero,xmm0[2,2,3]
144 ; AVX-NEXT: vmovapd %xmm1, (%rdi)
145 ; AVX-NEXT: retq
157 ; SSE-NEXT: movdqa (%rdi), %xmm1
158 ; SSE-NEXT: paddq {{.*}}(%rip), %xmm1
159 ; SSE-NEXT: insertps {{.*#+}} xmm0 = zero,xmm0[2,2,3]
160 ; SSE-NEXT: movdqa %xmm1, (%rdi)
161 ; SSE-NEXT: retq
165 ; AVX-NEXT: vmovdqa (%rdi), %xmm1
166 ; AVX-NEXT: vpaddq {{.*}}(%rip), %xmm1, %xmm1
167 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = zero,xmm0[2,2,3]
168 ; AVX-NEXT: vmovdqa %xmm1, (%rdi)
169 ; AVX-NEXT: retq
181 ; SSE-NEXT: movdqa (%rdi), %xmm1
182 ; SSE-NEXT: paddw {{.*}}(%rip), %xmm1
183 ; SSE-NEXT: insertps {{.*#+}} xmm0 = zero,xmm0[2,2,3]
184 ; SSE-NEXT: movdqa %xmm1, (%rdi)
185 ; SSE-NEXT: retq
189 ; AVX-NEXT: vmovdqa (%rdi), %xmm1
190 ; AVX-NEXT: vpaddw {{.*}}(%rip), %xmm1, %xmm1
191 ; AVX-NEXT: vinsertps {{.*#+}} xmm0 = zero,xmm0[2,2,3]
192 ; AVX-NEXT: vmovdqa %xmm1, (%rdi)
193 ; AVX-NEXT: retq
205 ; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
206 ; SSE-NEXT: retq
210 ; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
211 ; AVX-NEXT: retq
225 ; SSE-NEXT: xorps %xmm0, %xmm0
226 ; SSE-NEXT: retq
230 ; AVX-NEXT: vxorps %xmm0, %xmm0, %xmm0
231 ; AVX-NEXT: retq
240 ; SSE-NEXT: movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
241 ; SSE-NEXT: retq
245 ; AVX-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
246 ; AVX-NEXT: retq