HomeSort by relevance Sort by last modified time
    Searched full:xmm8 (Results 1 - 25 of 169) sorted by null

1 2 3 4 5 6 7

  /toolchain/binutils/binutils-2.25/gas/testsuite/gas/i386/
x86-64-avx-swap.s 16 movapd %xmm8,%xmm6
17 movaps %xmm8,%xmm6
18 movdqa %xmm8,%xmm6
19 movdqu %xmm8,%xmm6
20 movq %xmm8,%xmm6
21 movsd %xmm8,%xmm6
22 movss %xmm8,%xmm6
23 movupd %xmm8,%xmm6
24 movups %xmm8,%xmm6
25 vmovapd %xmm8,%xmm
    [all...]
x86-64-avx-swap-intel.d 18 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd xmm6,xmm8
19 [ ]*[a-f0-9]+: c5 78 29 c6 vmovaps xmm6,xmm8
20 [ ]*[a-f0-9]+: c5 79 7f c6 vmovdqa xmm6,xmm8
21 [ ]*[a-f0-9]+: c5 7a 7f c6 vmovdqu xmm6,xmm8
22 [ ]*[a-f0-9]+: c5 79 d6 c6 vmovq xmm6,xmm8
23 [ ]*[a-f0-9]+: c5 4b 11 c6 vmovsd xmm6,xmm6,xmm8
24 [ ]*[a-f0-9]+: c5 4a 11 c6 vmovss xmm6,xmm6,xmm8
25 [ ]*[a-f0-9]+: c5 79 11 c6 vmovupd xmm6,xmm8
26 [ ]*[a-f0-9]+: c5 78 11 c6 vmovups xmm6,xmm8
27 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd xmm6,xmm8
    [all...]
x86-64-avx-swap.d 17 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd %xmm8,%xmm6
18 [ ]*[a-f0-9]+: c5 78 29 c6 vmovaps %xmm8,%xmm6
19 [ ]*[a-f0-9]+: c5 79 7f c6 vmovdqa %xmm8,%xmm6
20 [ ]*[a-f0-9]+: c5 7a 7f c6 vmovdqu %xmm8,%xmm6
21 [ ]*[a-f0-9]+: c5 79 d6 c6 vmovq %xmm8,%xmm6
22 [ ]*[a-f0-9]+: c5 4b 11 c6 vmovsd %xmm8,%xmm6,%xmm6
23 [ ]*[a-f0-9]+: c5 4a 11 c6 vmovss %xmm8,%xmm6,%xmm6
24 [ ]*[a-f0-9]+: c5 79 11 c6 vmovupd %xmm8,%xmm6
25 [ ]*[a-f0-9]+: c5 78 11 c6 vmovups %xmm8,%xmm6
26 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd %xmm8,%xmm
    [all...]
x86-64-avx-scalar.s 271 vmovd %xmm8,0x12345678
272 vcvtsi2sdl 0x12345678,%xmm8,%xmm15
273 vmovd %xmm8,(%rbp)
274 vcvtsi2sdl (%rbp),%xmm8,%xmm15
275 vmovd %xmm8,(%rsp)
276 vcvtsi2sdl (%rsp),%xmm8,%xmm15
277 vmovd %xmm8,0x99(%rbp)
278 vcvtsi2sdl 0x99(%rbp),%xmm8,%xmm15
279 vmovd %xmm8,0x99(%r15)
280 vcvtsi2sdl 0x99(%r15),%xmm8,%xmm1
    [all...]
x86-64-avx.s     [all...]
  /external/valgrind/none/tests/amd64/
avx-1.c 84 : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \
104 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \
118 "vpor %%xmm6, %%xmm8, %%xmm7",
119 "vpor (%%rax), %%xmm8, %%xmm7")
122 "vpxor %%xmm6, %%xmm8, %%xmm7",
123 "vpxor (%%rax), %%xmm8, %%xmm7")
126 "vpsubb %%xmm6, %%xmm8, %%xmm7",
127 "vpsubb (%%rax), %%xmm8, %%xmm7")
130 "vpsubd %%xmm6, %%xmm8, %%xmm7",
131 "vpsubd (%%rax), %%xmm8, %%xmm7"
    [all...]
fma.c 103 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%2), %%xmm7; vmovaps (%3), %%xmm8;"
104 "vfmadd132ps %%xmm7, %%xmm8, %%xmm9;"
106 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9");
109 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%3), %%xmm8;"
110 "vfmadd132ps (%2), %%xmm8, %%xmm9;"
112 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9");
115 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%3), %%xmm7; vmovaps (%2), %%xmm8;"
116 "vfmadd213ps %%xmm7, %%xmm8, %%xmm9;"
118 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9");
121 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%2), %%xmm8;"
    [all...]
avx2-1.c 90 : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \
110 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \
762 "vpblendd $0x00, %%xmm6, %%xmm8, %%xmm7",
763 "vpblendd $0x01, (%%rax), %%xmm8, %%xmm7")
765 "vpblendd $0x02, %%xmm6, %%xmm8, %%xmm7",
766 "vpblendd $0x03, (%%rax), %%xmm8, %%xmm7")
768 "vpblendd $0x04, %%xmm6, %%xmm8, %%xmm7",
769 "vpblendd $0x05, (%%rax), %%xmm8, %%xmm7")
771 "vpblendd $0x06, %%xmm6, %%xmm8, %%xmm7",
772 "vpblendd $0x07, (%%rax), %%xmm8, %%xmm7"
    [all...]
  /toolchain/binutils/binutils-2.25/gas/testsuite/gas/i386/ilp32/
x86-64-avx-swap-intel.d 18 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd xmm6,xmm8
19 [ ]*[a-f0-9]+: c5 78 29 c6 vmovaps xmm6,xmm8
20 [ ]*[a-f0-9]+: c5 79 7f c6 vmovdqa xmm6,xmm8
21 [ ]*[a-f0-9]+: c5 7a 7f c6 vmovdqu xmm6,xmm8
22 [ ]*[a-f0-9]+: c5 79 d6 c6 vmovq xmm6,xmm8
23 [ ]*[a-f0-9]+: c5 4b 11 c6 vmovsd xmm6,xmm6,xmm8
24 [ ]*[a-f0-9]+: c5 4a 11 c6 vmovss xmm6,xmm6,xmm8
25 [ ]*[a-f0-9]+: c5 79 11 c6 vmovupd xmm6,xmm8
26 [ ]*[a-f0-9]+: c5 78 11 c6 vmovups xmm6,xmm8
27 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd xmm6,xmm8
    [all...]
x86-64-avx-swap.d 18 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd %xmm8,%xmm6
19 [ ]*[a-f0-9]+: c5 78 29 c6 vmovaps %xmm8,%xmm6
20 [ ]*[a-f0-9]+: c5 79 7f c6 vmovdqa %xmm8,%xmm6
21 [ ]*[a-f0-9]+: c5 7a 7f c6 vmovdqu %xmm8,%xmm6
22 [ ]*[a-f0-9]+: c5 79 d6 c6 vmovq %xmm8,%xmm6
23 [ ]*[a-f0-9]+: c5 4b 11 c6 vmovsd %xmm8,%xmm6,%xmm6
24 [ ]*[a-f0-9]+: c5 4a 11 c6 vmovss %xmm8,%xmm6,%xmm6
25 [ ]*[a-f0-9]+: c5 79 11 c6 vmovupd %xmm8,%xmm6
26 [ ]*[a-f0-9]+: c5 78 11 c6 vmovups %xmm8,%xmm6
27 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd %xmm8,%xmm
    [all...]
  /external/llvm/test/CodeGen/X86/
2009-06-03-Win64SpillXMM.ll 3 ; CHECK: movaps %xmm8, 16(%rsp)
8 tail call void asm sideeffect "", "~{xmm7},~{xmm8},~{dirflag},~{fpsr},~{flags}"() nounwind
  /external/valgrind/memcheck/tests/amd64/
xor-undef-amd64.c 67 "movups 16(%0), %%xmm8\n\t"
68 "xorps %%xmm8, %%xmm0\n\t"
73 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory"
79 "movups 16(%0), %%xmm8\n\t"
85 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory"
94 "movups 16(%0), %%xmm8\n\t"
95 "pxor %%xmm8, %%xmm0\n\t"
100 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory"
106 "movups 16(%0), %%xmm8\n\t"
112 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory
    [all...]
  /external/boringssl/win-x86_64/crypto/aes/
bsaes-x86_64.asm 16 movdqa xmm8,XMMWORD[rax]
19 pxor xmm15,xmm8
20 pxor xmm0,xmm8
21 pxor xmm1,xmm8
22 pxor xmm2,xmm8
25 pxor xmm3,xmm8
26 pxor xmm4,xmm8
29 pxor xmm5,xmm8
30 pxor xmm6,xmm8
37 movdqa xmm8,XMMWORD[16+r11
    [all...]
  /external/v8/test/cctest/
test-assembler-x64.cc 773 __ movaps(xmm8, xmm0);
774 __ vfmadd132sd(xmm8, xmm2, xmm1);
775 __ ucomisd(xmm8, xmm3);
779 __ movaps(xmm8, xmm1);
780 __ vfmadd213sd(xmm8, xmm0, xmm2);
781 __ ucomisd(xmm8, xmm3);
785 __ movaps(xmm8, xmm2);
786 __ vfmadd231sd(xmm8, xmm0, xmm1);
787 __ ucomisd(xmm8, xmm3);
792 __ movaps(xmm8, xmm0)
1393 __ vmovss(Operand(rsp, 0), xmm8); local
    [all...]
  /external/libvpx/libvpx/vp8/common/x86/
loopfilter_block_sse2_x86_64.asm 204 movdqa xmm8, i5
207 LF_FILTER_HEV_MASK xmm0, xmm1, xmm2, xmm3, xmm4, xmm8, xmm9, xmm10
212 movdqa xmm8, i5
213 LF_FILTER xmm1, xmm2, xmm3, xmm8, xmm0, xmm4
219 movdqa i5, xmm8
227 LF_FILTER_HEV_MASK xmm3, xmm8, xmm0, xmm1, xmm2, xmm4, xmm10, xmm11, xmm9
232 movdqa xmm8, i9
233 LF_FILTER xmm0, xmm1, xmm4, xmm8, xmm3, xmm2
239 movdqa i9, xmm8
247 LF_FILTER_HEV_MASK xmm4, xmm8, xmm0, xmm1, xmm2, xmm3, xmm9, xmm11, xmm1
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/common/x86/
loopfilter_block_sse2.asm 204 movdqa xmm8, i5
207 LF_FILTER_HEV_MASK xmm0, xmm1, xmm2, xmm3, xmm4, xmm8, xmm9, xmm10
212 movdqa xmm8, i5
213 LF_FILTER xmm1, xmm2, xmm3, xmm8, xmm0, xmm4
219 movdqa i5, xmm8
227 LF_FILTER_HEV_MASK xmm3, xmm8, xmm0, xmm1, xmm2, xmm4, xmm10, xmm11, xmm9
232 movdqa xmm8, i9
233 LF_FILTER xmm0, xmm1, xmm4, xmm8, xmm3, xmm2
239 movdqa i9, xmm8
247 LF_FILTER_HEV_MASK xmm4, xmm8, xmm0, xmm1, xmm2, xmm3, xmm9, xmm11, xmm1
    [all...]
  /external/libvpx/libvpx/third_party/libyuv/source/
rotate_gcc.cc 249 "movdqa %%xmm0,%%xmm8 \n"
251 "punpckhbw %%xmm1,%%xmm8 \n"
254 "movdqa %%xmm8,%%xmm9 \n"
302 "punpcklwd %%xmm10,%%xmm8 \n"
304 "movdqa %%xmm8,%%xmm10 \n"
340 "punpckldq %%xmm12,%%xmm8 \n"
341 "movq %%xmm8,(%1) \n"
342 "movdqa %%xmm8,%%xmm12 \n"
373 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15"
388 "movdqa %%xmm0,%%xmm8 \n
    [all...]
  /external/libyuv/files/source/
rotate_gcc.cc 123 "movdqa %%xmm0,%%xmm8 \n"
125 "punpckhbw %%xmm1,%%xmm8 \n"
128 "movdqa %%xmm8,%%xmm9 \n"
176 "punpcklwd %%xmm10,%%xmm8 \n"
178 "movdqa %%xmm8,%%xmm10 \n"
214 "punpckldq %%xmm12,%%xmm8 \n"
215 "movq %%xmm8,(%1) \n"
216 "movdqa %%xmm8,%%xmm12 \n"
247 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15"
265 "movdqa %%xmm0,%%xmm8 \n
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/encoder/x86/
quantize_sse4.asm 127 pxor xmm8, xmm8
163 ZIGZAG_LOOP 8, 0, xmm3, xmm7, xmm8
168 ZIGZAG_LOOP 9, 1, xmm3, xmm7, xmm8
169 ZIGZAG_LOOP 12, 4, xmm3, xmm7, xmm8
170 ZIGZAG_LOOP 13, 5, xmm3, xmm7, xmm8
171 ZIGZAG_LOOP 10, 2, xmm3, xmm7, xmm8
173 ZIGZAG_LOOP 11, 3, xmm3, xmm7, xmm8
174 ZIGZAG_LOOP 14, 6, xmm3, xmm7, xmm8
175 ZIGZAG_LOOP 15, 7, xmm3, xmm7, xmm8
    [all...]
  /external/boringssl/win-x86_64/crypto/sha/
sha512-x86_64.asm 1845 movaps XMMWORD[(128+64)+rsp],xmm8
1879 vpaddq xmm8,xmm0,XMMWORD[((-128))+rbp]
1885 vmovdqa XMMWORD[rsp],xmm8
1886 vpaddq xmm8,xmm4,XMMWORD[rbp]
1893 vmovdqa XMMWORD[64+rsp],xmm8
1906 vpalignr xmm8,xmm1,xmm0,8
1915 vpsrlq xmm8,xmm8,7
1926 vpxor xmm8,xmm8,xmm
    [all...]
sha1-x86_64.asm 1275 movaps XMMWORD[(-40-64)+rax],xmm8
1326 movdqa xmm8,xmm3
1334 psrldq xmm8,4
1340 pxor xmm8,xmm2
1344 pxor xmm4,xmm8
1354 movdqa xmm8,xmm4
1360 psrld xmm8,31
1370 por xmm4,xmm8
1411 movdqa xmm8,xmm5
1417 pslldq xmm8,1
    [all...]
  /external/boringssl/linux-x86_64/crypto/aes/
bsaes-x86_64.S 14 movdqa (%rax),%xmm8
17 pxor %xmm8,%xmm15
18 pxor %xmm8,%xmm0
19 pxor %xmm8,%xmm1
20 pxor %xmm8,%xmm2
23 pxor %xmm8,%xmm3
24 pxor %xmm8,%xmm4
27 pxor %xmm8,%xmm5
28 pxor %xmm8,%xmm6
35 movdqa 16(%r11),%xmm8
    [all...]
  /external/boringssl/mac-x86_64/crypto/aes/
bsaes-x86_64.S 12 movdqa (%rax),%xmm8
15 pxor %xmm8,%xmm15
16 pxor %xmm8,%xmm0
17 pxor %xmm8,%xmm1
18 pxor %xmm8,%xmm2
21 pxor %xmm8,%xmm3
22 pxor %xmm8,%xmm4
25 pxor %xmm8,%xmm5
26 pxor %xmm8,%xmm6
33 movdqa 16(%r11),%xmm8
    [all...]
  /external/boringssl/linux-x86_64/crypto/sha/
sha512-x86_64.S 1847 vpaddq -128(%rbp),%xmm0,%xmm8
1853 vmovdqa %xmm8,0(%rsp)
1854 vpaddq 0(%rbp),%xmm4,%xmm8
1861 vmovdqa %xmm8,64(%rsp)
1874 vpalignr $8,%xmm0,%xmm1,%xmm8
1883 vpsrlq $7,%xmm8,%xmm8
1894 vpxor %xmm9,%xmm8,%xmm8
1902 vpxor %xmm10,%xmm8,%xmm
    [all...]
  /external/boringssl/mac-x86_64/crypto/sha/
sha512-x86_64.S 1846 vpaddq -128(%rbp),%xmm0,%xmm8
1852 vmovdqa %xmm8,0(%rsp)
1853 vpaddq 0(%rbp),%xmm4,%xmm8
1860 vmovdqa %xmm8,64(%rsp)
1873 vpalignr $8,%xmm0,%xmm1,%xmm8
1882 vpsrlq $7,%xmm8,%xmm8
1893 vpxor %xmm9,%xmm8,%xmm8
1901 vpxor %xmm10,%xmm8,%xmm
    [all...]

Completed in 373 milliseconds

1 2 3 4 5 6 7