/toolchain/binutils/binutils-2.27/gas/testsuite/gas/i386/ |
x86-64-avx-swap.s | 16 movapd %xmm8,%xmm6 17 movaps %xmm8,%xmm6 18 movdqa %xmm8,%xmm6 19 movdqu %xmm8,%xmm6 20 movq %xmm8,%xmm6 21 movsd %xmm8,%xmm6 22 movss %xmm8,%xmm6 23 movupd %xmm8,%xmm6 24 movups %xmm8,%xmm6 25 vmovapd %xmm8,%xmm [all...] |
x86-64-avx-swap-intel.d | 18 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd xmm6,xmm8 19 [ ]*[a-f0-9]+: c5 78 29 c6 vmovaps xmm6,xmm8 20 [ ]*[a-f0-9]+: c5 79 7f c6 vmovdqa xmm6,xmm8 21 [ ]*[a-f0-9]+: c5 7a 7f c6 vmovdqu xmm6,xmm8 22 [ ]*[a-f0-9]+: c5 79 d6 c6 vmovq xmm6,xmm8 23 [ ]*[a-f0-9]+: c5 4b 11 c6 vmovsd xmm6,xmm6,xmm8 24 [ ]*[a-f0-9]+: c5 4a 11 c6 vmovss xmm6,xmm6,xmm8 25 [ ]*[a-f0-9]+: c5 79 11 c6 vmovupd xmm6,xmm8 26 [ ]*[a-f0-9]+: c5 78 11 c6 vmovups xmm6,xmm8 27 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd xmm6,xmm8 [all...] |
x86-64-avx-swap.d | 17 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd %xmm8,%xmm6 18 [ ]*[a-f0-9]+: c5 78 29 c6 vmovaps %xmm8,%xmm6 19 [ ]*[a-f0-9]+: c5 79 7f c6 vmovdqa %xmm8,%xmm6 20 [ ]*[a-f0-9]+: c5 7a 7f c6 vmovdqu %xmm8,%xmm6 21 [ ]*[a-f0-9]+: c5 79 d6 c6 vmovq %xmm8,%xmm6 22 [ ]*[a-f0-9]+: c5 4b 11 c6 vmovsd %xmm8,%xmm6,%xmm6 23 [ ]*[a-f0-9]+: c5 4a 11 c6 vmovss %xmm8,%xmm6,%xmm6 24 [ ]*[a-f0-9]+: c5 79 11 c6 vmovupd %xmm8,%xmm6 25 [ ]*[a-f0-9]+: c5 78 11 c6 vmovups %xmm8,%xmm6 26 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd %xmm8,%xmm [all...] |
x86-64-avx-scalar.s | 271 vmovd %xmm8,0x12345678 272 vcvtsi2sdl 0x12345678,%xmm8,%xmm15 273 vmovd %xmm8,(%rbp) 274 vcvtsi2sdl (%rbp),%xmm8,%xmm15 275 vmovd %xmm8,(%rsp) 276 vcvtsi2sdl (%rsp),%xmm8,%xmm15 277 vmovd %xmm8,0x99(%rbp) 278 vcvtsi2sdl 0x99(%rbp),%xmm8,%xmm15 279 vmovd %xmm8,0x99(%r15) 280 vcvtsi2sdl 0x99(%r15),%xmm8,%xmm1 [all...] |
x86-64-avx.s | [all...] |
x86-64-arch-3.s | 14 sha1nexte (%rax), %xmm8
|
/external/valgrind/none/tests/amd64/ |
avx-1.c | 84 : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \ 104 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \ 118 "vpor %%xmm6, %%xmm8, %%xmm7", 119 "vpor (%%rax), %%xmm8, %%xmm7") 122 "vpxor %%xmm6, %%xmm8, %%xmm7", 123 "vpxor (%%rax), %%xmm8, %%xmm7") 126 "vpsubb %%xmm6, %%xmm8, %%xmm7", 127 "vpsubb (%%rax), %%xmm8, %%xmm7") 130 "vpsubd %%xmm6, %%xmm8, %%xmm7", 131 "vpsubd (%%rax), %%xmm8, %%xmm7" [all...] |
fma4.c | 219 : /*TRASH*/"xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \ 235 "vfmaddpd %%xmm7,%%xmm8,%%xmm6,%%xmm9", 1); \ 237 "vfmaddpd %%xmm7,%%xmm8,%%xmm9,%%xmm9", 1); \ 239 "vfmaddpd (%%r14),%%xmm8,%%xmm6,%%xmm9", 1); \ 241 "vfmaddpd %%xmm8,(%%r14),%%xmm6,%%xmm9", 1); 257 "vfmaddps %%xmm7,%%xmm8,%%xmm6,%%xmm9", 0); \ 259 "vfmaddps %%xmm7,%%xmm8,%%xmm9,%%xmm9", 0); \ 261 "vfmaddps (%%r14),%%xmm8,%%xmm6,%%xmm9", 0); \ 263 "vfmaddps %%xmm8,(%%r14),%%xmm6,%%xmm9", 0); 279 "vfmaddsd %%xmm7,%%xmm8,%%xmm6,%%xmm9", 1); [all...] |
fma.c | 103 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%2), %%xmm7; vmovaps (%3), %%xmm8;" 104 "vfmadd132ps %%xmm7, %%xmm8, %%xmm9;" 106 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9"); 109 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%3), %%xmm8;" 110 "vfmadd132ps (%2), %%xmm8, %%xmm9;" 112 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9"); 115 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%3), %%xmm7; vmovaps (%2), %%xmm8;" 116 "vfmadd213ps %%xmm7, %%xmm8, %%xmm9;" 118 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9"); 121 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%2), %%xmm8;" [all...] |
avx2-1.c | 90 : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \ 110 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \ 762 "vpblendd $0x00, %%xmm6, %%xmm8, %%xmm7", 763 "vpblendd $0x01, (%%rax), %%xmm8, %%xmm7") 765 "vpblendd $0x02, %%xmm6, %%xmm8, %%xmm7", 766 "vpblendd $0x03, (%%rax), %%xmm8, %%xmm7") 768 "vpblendd $0x04, %%xmm6, %%xmm8, %%xmm7", 769 "vpblendd $0x05, (%%rax), %%xmm8, %%xmm7") 771 "vpblendd $0x06, %%xmm6, %%xmm8, %%xmm7", 772 "vpblendd $0x07, (%%rax), %%xmm8, %%xmm7" [all...] |
/toolchain/binutils/binutils-2.27/gas/testsuite/gas/i386/ilp32/ |
x86-64-avx-swap-intel.d | 18 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd xmm6,xmm8 19 [ ]*[a-f0-9]+: c5 78 29 c6 vmovaps xmm6,xmm8 20 [ ]*[a-f0-9]+: c5 79 7f c6 vmovdqa xmm6,xmm8 21 [ ]*[a-f0-9]+: c5 7a 7f c6 vmovdqu xmm6,xmm8 22 [ ]*[a-f0-9]+: c5 79 d6 c6 vmovq xmm6,xmm8 23 [ ]*[a-f0-9]+: c5 4b 11 c6 vmovsd xmm6,xmm6,xmm8 24 [ ]*[a-f0-9]+: c5 4a 11 c6 vmovss xmm6,xmm6,xmm8 25 [ ]*[a-f0-9]+: c5 79 11 c6 vmovupd xmm6,xmm8 26 [ ]*[a-f0-9]+: c5 78 11 c6 vmovups xmm6,xmm8 27 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd xmm6,xmm8 [all...] |
x86-64-avx-swap.d | 18 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd %xmm8,%xmm6 19 [ ]*[a-f0-9]+: c5 78 29 c6 vmovaps %xmm8,%xmm6 20 [ ]*[a-f0-9]+: c5 79 7f c6 vmovdqa %xmm8,%xmm6 21 [ ]*[a-f0-9]+: c5 7a 7f c6 vmovdqu %xmm8,%xmm6 22 [ ]*[a-f0-9]+: c5 79 d6 c6 vmovq %xmm8,%xmm6 23 [ ]*[a-f0-9]+: c5 4b 11 c6 vmovsd %xmm8,%xmm6,%xmm6 24 [ ]*[a-f0-9]+: c5 4a 11 c6 vmovss %xmm8,%xmm6,%xmm6 25 [ ]*[a-f0-9]+: c5 79 11 c6 vmovupd %xmm8,%xmm6 26 [ ]*[a-f0-9]+: c5 78 11 c6 vmovups %xmm8,%xmm6 27 [ ]*[a-f0-9]+: c5 79 29 c6 vmovapd %xmm8,%xmm [all...] |
/external/llvm/test/CodeGen/X86/ |
2009-06-03-Win64SpillXMM.ll | 3 ; CHECK: movaps %xmm8, 16(%rsp) 8 tail call void asm sideeffect "", "~{xmm7},~{xmm8},~{dirflag},~{fpsr},~{flags}"() nounwind
|
sad.ll | 169 ; SSE2-NEXT: pshufd {{.*#+}} xmm8 = xmm1[2,3,0,1] 170 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm8 = xmm8[0],xmm12[0],xmm8[1],xmm12[1],xmm8[2],xmm12[2],xmm8[3],xmm12[3],xmm8[4],xmm12[4],xmm8[5],xmm12[5],xmm8[6],xmm12[6],xmm8[7],xmm12[7 [all...] |
/external/swiftshader/third_party/LLVM/test/CodeGen/X86/ |
2009-06-03-Win64SpillXMM.ll | 3 ; CHECK: movaps %xmm8, (%rsp) 8 tail call void asm sideeffect "", "~{xmm7},~{xmm8},~{dirflag},~{fpsr},~{flags}"() nounwind
|
/external/llvm/test/MC/X86/ |
intel-syntax-x86-64-avx.s | 19 // CHECK: vgatherdps xmm10, xmmword ptr [r15 + 2*xmm9], xmm8 21 vgatherdps xmm10, xmmword ptr [r15 + 2*xmm9], xmm8 23 // CHECK: vgatherqps xmm10, qword ptr [r15 + 2*xmm9], xmm8 25 vgatherqps xmm10, qword ptr [r15 + 2*xmm9], xmm8 31 // CHECK: vgatherqps xmm10, xmmword ptr [r15 + 2*ymm9], xmm8 33 vgatherqps xmm10, xmmword ptr [r15 + 2*ymm9], xmm8 51 // CHECK: vpgatherdd xmm10, xmmword ptr [r15 + 2*xmm9], xmm8 53 vpgatherdd xmm10, xmmword ptr [r15 + 2*xmm9], xmm8 55 // CHECK: vpgatherqd xmm10, qword ptr [r15 + 2*xmm9], xmm8 57 vpgatherqd xmm10, qword ptr [r15 + 2*xmm9], xmm8 [all...] |
/external/boringssl/win-x86_64/crypto/cipher_extra/ |
aes128gcmsiv-x86_64.asm | 843 vmovdqa xmm8,xmm3 848 vpxor xmm8,xmm8,XMMWORD[rcx] 854 vaesenc xmm8,xmm8,xmm12 861 vaesenc xmm8,xmm8,xmm12 868 vaesenc xmm8,xmm8,xmm12 875 vaesenc xmm8,xmm8,xmm1 [all...] |
/external/valgrind/memcheck/tests/amd64/ |
xor-undef-amd64.c | 67 "movups 16(%0), %%xmm8\n\t" 68 "xorps %%xmm8, %%xmm0\n\t" 73 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory" 79 "movups 16(%0), %%xmm8\n\t" 85 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory" 94 "movups 16(%0), %%xmm8\n\t" 95 "pxor %%xmm8, %%xmm0\n\t" 100 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory" 106 "movups 16(%0), %%xmm8\n\t" 112 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory [all...] |
/external/boringssl/win-x86_64/crypto/fipsmodule/ |
aesni-gcm-x86_64.asm | 74 vpxor xmm8,xmm8,xmm4 85 vpxor xmm8,xmm8,XMMWORD[((16+8))+rsp] 149 vpxor xmm8,xmm8,XMMWORD[((112+8))+rsp] 163 vpclmulqdq xmm5,xmm8,xmm3,0x10 166 vpclmulqdq xmm1,xmm8,xmm3,0x01 170 vpclmulqdq xmm2,xmm8,xmm3,0x00 173 vpclmulqdq xmm8,xmm8,xmm3,0x1 [all...] |
bsaes-x86_64.asm | 16 movdqa xmm8,XMMWORD[rax] 19 pxor xmm15,xmm8 20 pxor xmm0,xmm8 21 pxor xmm1,xmm8 22 pxor xmm2,xmm8 25 pxor xmm3,xmm8 26 pxor xmm4,xmm8 29 pxor xmm5,xmm8 30 pxor xmm6,xmm8 37 movdqa xmm8,XMMWORD[16+r11 [all...] |
/external/boringssl/linux-x86_64/crypto/fipsmodule/ |
aesni-gcm-x86_64.S | 70 vpxor %xmm4,%xmm8,%xmm8 81 vpxor 16+8(%rsp),%xmm8,%xmm8 145 vpxor 112+8(%rsp),%xmm8,%xmm8 159 vpclmulqdq $0x10,%xmm3,%xmm8,%xmm5 162 vpclmulqdq $0x01,%xmm3,%xmm8,%xmm1 166 vpclmulqdq $0x00,%xmm3,%xmm8,%xmm2 169 vpclmulqdq $0x11,%xmm3,%xmm8,%xmm [all...] |
/external/boringssl/mac-x86_64/crypto/fipsmodule/ |
aesni-gcm-x86_64.S | 70 vpxor %xmm4,%xmm8,%xmm8 81 vpxor 16+8(%rsp),%xmm8,%xmm8 145 vpxor 112+8(%rsp),%xmm8,%xmm8 159 vpclmulqdq $0x10,%xmm3,%xmm8,%xmm5 162 vpclmulqdq $0x01,%xmm3,%xmm8,%xmm1 166 vpclmulqdq $0x00,%xmm3,%xmm8,%xmm2 169 vpclmulqdq $0x11,%xmm3,%xmm8,%xmm [all...] |
/external/libvpx/libvpx/vp8/common/x86/ |
loopfilter_block_sse2_x86_64.asm | 206 movdqa xmm8, i5 209 LF_FILTER_HEV_MASK xmm0, xmm1, xmm2, xmm3, xmm4, xmm8, xmm9, xmm10 214 movdqa xmm8, i5 215 LF_FILTER xmm1, xmm2, xmm3, xmm8, xmm0, xmm4 221 movdqa i5, xmm8 229 LF_FILTER_HEV_MASK xmm3, xmm8, xmm0, xmm1, xmm2, xmm4, xmm10, xmm11, xmm9 234 movdqa xmm8, i9 235 LF_FILTER xmm0, xmm1, xmm4, xmm8, xmm3, xmm2 241 movdqa i9, xmm8 249 LF_FILTER_HEV_MASK xmm4, xmm8, xmm0, xmm1, xmm2, xmm3, xmm9, xmm11, xmm1 [all...] |
/external/boringssl/linux-x86_64/crypto/cipher_extra/ |
aes128gcmsiv-x86_64.S | 745 vmovdqa %xmm3,%xmm8 750 vpxor (%rcx),%xmm8,%xmm8 756 vaesenc %xmm12,%xmm8,%xmm8 763 vaesenc %xmm12,%xmm8,%xmm8 770 vaesenc %xmm12,%xmm8,%xmm8 777 vaesenc %xmm12,%xmm8,%xmm [all...] |
/external/boringssl/mac-x86_64/crypto/cipher_extra/ |
aes128gcmsiv-x86_64.S | 743 vmovdqa %xmm3,%xmm8 748 vpxor (%rcx),%xmm8,%xmm8 754 vaesenc %xmm12,%xmm8,%xmm8 761 vaesenc %xmm12,%xmm8,%xmm8 768 vaesenc %xmm12,%xmm8,%xmm8 775 vaesenc %xmm12,%xmm8,%xmm [all...] |