Home | History | Annotate | Download | only in X86

Lines Matching full:next

14 ; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
15 ; X32-NEXT: vmovaps %xmm0, (%eax)
16 ; X32-NEXT: vzeroupper
17 ; X32-NEXT: retl
21 ; X64-NEXT: vmovaps %xmm0, (%rdi)
22 ; X64-NEXT: vzeroupper
23 ; X64-NEXT: retq
40 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
41 ; X32-NEXT: vextractf128 $1, %ymm0, (%eax)
42 ; X32-NEXT: vzeroupper
43 ; X32-NEXT: retl
47 ; X64-NEXT: vextractf128 $1, %ymm0, (%rdi)
48 ; X64-NEXT: vzeroupper
49 ; X64-NEXT: retq
68 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
69 ; X32-NEXT: vextractf128 $1, %ymm0, (%eax)
70 ; X32-NEXT: vzeroupper
71 ; X32-NEXT: retl
75 ; X64-NEXT: vextractf128 $1, %ymm0, (%rdi)
76 ; X64-NEXT: vzeroupper
77 ; X64-NEXT: retq
94 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
95 ; X32-NEXT: vextractf128 $1, %ymm0, (%eax)
96 ; X32-NEXT: vzeroupper
97 ; X32-NEXT: retl
101 ; X64-NEXT: vextractf128 $1, %ymm0, (%rdi)
102 ; X64-NEXT: vzeroupper
103 ; X64-NEXT: retq
118 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
119 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
120 ; X32-NEXT: vpmovzxdq {{.*#+}} xmm0 = mem[0],zero,mem[1],zero
121 ; X32-NEXT: vxorps %ymm1, %ymm1, %ymm1
122 ; X32-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3,4,5,6,7]
123 ; X32-NEXT: vmovaps %ymm0, (%eax)
124 ; X32-NEXT: vzeroupper
125 ; X32-NEXT: retl
129 ; X64-NEXT: vpmovzxdq {{.*#+}} xmm0 = mem[0],zero,mem[1],zero
130 ; X64-NEXT: vxorps %ymm1, %ymm1, %ymm1
131 ; X64-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3,4,5,6,7]
132 ; X64-NEXT: vmovaps %ymm0, (%rsi)
133 ; X64-NEXT: vzeroupper
134 ; X64-NEXT: retq
145 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
146 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
147 ; X32-NEXT: vmovupd (%ecx), %xmm0
148 ; X32-NEXT: vxorpd %ymm1, %ymm1, %ymm1
149 ; X32-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3]
150 ; X32-NEXT: vmovapd %ymm0, (%eax)
151 ; X32-NEXT: vzeroupper
152 ; X32-NEXT: retl
156 ; X64-NEXT: vmovupd (%rdi), %xmm0
157 ; X64-NEXT: vxorpd %ymm1, %ymm1, %ymm1
158 ; X64-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3]
159 ; X64-NEXT: vmovapd %ymm0, (%rsi)
160 ; X64-NEXT: vzeroupper
161 ; X64-NEXT: retq
172 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
173 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
174 ; X32-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
175 ; X32-NEXT: vmovaps %ymm0, (%eax)
176 ; X32-NEXT: vzeroupper
177 ; X32-NEXT: retl
181 ; X64-NEXT: vmovq {{.*#+}} xmm0 = mem[0],zero
182 ; X64-NEXT: vxorps %ymm1, %ymm1, %ymm1
183 ; X64-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3,4,5,6,7]
184 ; X64-NEXT: vmovaps %ymm0, (%rsi)
185 ; X64-NEXT: vzeroupper
186 ; X64-NEXT: retq
197 ; X32-NEXT: movl {{[0-9]+}}(%esp), %eax
198 ; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx
199 ; X32-NEXT: vmovupd (%ecx), %xmm0
200 ; X32-NEXT: vxorpd %ymm1, %ymm1, %ymm1
201 ; X32-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3]
202 ; X32-NEXT: vmovapd %ymm0, (%eax)
203 ; X32-NEXT: vzeroupper
204 ; X32-NEXT: retl
208 ; X64-NEXT: vmovupd (%rdi), %xmm0
209 ; X64-NEXT: vxorpd %ymm1, %ymm1, %ymm1
210 ; X64-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1,2,3]
211 ; X64-NEXT: vmovapd %ymm0, (%rsi)
212 ; X64-NEXT: vzeroupper
213 ; X64-NEXT: retq