/external/libvpx/libvpx/vp9/encoder/x86/ |
vp9_ssim_opt.asm | 17 movdqa xmm1, xmm3 20 movdqa xmm2, xmm4 29 movdqa xmm2,%1 33 movdqa xmm2,%1 41 movdqa xmm1, %1 92 movdqa xmm3, xmm5 93 movdqa xmm4, xmm6 99 movdqa xmm3, xmm5 100 movdqa xmm4, xmm6
|
vp9_variance_impl_sse2.asm | 35 movdqa xmm0, [rax] 36 movdqa xmm1, [rax+16] 37 movdqa xmm2, [rax+32] 38 movdqa xmm3, [rax+48] 53 movdqa xmm3,xmm4 56 movdqa xmm3,xmm4 134 movdqa xmm3, xmm1 135 movdqa xmm4, xmm2 164 movdqa xmm1, xmm6 176 movdqa xmm2, xmm [all...] |
/external/llvm/test/CodeGen/X86/ |
4char-promote.ll | 8 ; CHECK-NOT: movdqa
|
pmulld.ll | 9 ; WIN64-NEXT: movdqa (%rcx), %xmm0 20 ; WIN64-NEXT: movdqa (%rcx), %xmm0
|
machine-cp.ll | 28 ; Two movdqa (from phi-elimination) in the entry BB cancels out. 33 ; CHECK-NOT: movdqa
|
vec_setcc.ll | 45 ; SSE2: movdqa {{.*}}(%rip), %xmm2 66 ; SSE2: movdqa {{.*}}(%rip), %xmm2 72 ; SSE2: movdqa %xmm1, %xmm0 89 ; SSE2: movdqa {{.*}}(%rip), %xmm2 110 ; SSE2: movdqa {{.*}}(%rip), %xmm2 116 ; SSE2: movdqa %xmm1, %xmm0
|
/bionic/libc/arch-x86/string/ |
ssse3-memcmp-atom.S | 270 movdqa (%esi), %xmm0 274 movdqa 16(%esi), %xmm2 280 movdqa %xmm0, %xmm1 281 movdqa 32(%esi), %xmm0 282 movdqa 48(%esi), %xmm2 300 movdqa %xmm0, %xmm1 321 movdqa 16(%esi), %xmm1 322 movdqa %xmm1, %xmm2 326 movdqa 32(%esi), %xmm3 347 movdqa 16(%esi), %xmm [all...] |
sse2-memchr-atom.S | 127 movdqa (%edi), %xmm0 143 movdqa (%edi), %xmm0 150 movdqa 16(%edi), %xmm2 157 movdqa 32(%edi), %xmm3 164 movdqa 48(%edi), %xmm4 175 movdqa (%edi), %xmm0 182 movdqa 16(%edi), %xmm2 189 movdqa 32(%edi), %xmm3 196 movdqa 48(%edi), %xmm4 213 movdqa (%edi), %xmm [all...] |
sse2-wcschr-atom.S | 119 movdqa (%edi), %xmm0 160 movdqa (%ecx), %xmm0 169 movdqa (%ecx), %xmm0 178 movdqa (%ecx), %xmm0 187 movdqa (%ecx), %xmm0
|
/external/chromium_org/third_party/openssl/openssl/crypto/aes/asm/ |
bsaes-x86_64.pl | 57 # SIMD "domain" (movaps instead of movdqa, xorps instead of 230 movdqa $y0, $t0 245 movdqa $y0, $t0 261 movdqa $y0, $t0 262 movdqa $y2, $t1 284 movdqa @x[0], @t[0] 285 movdqa @x[1], @t[1] 302 movdqa @x[4], @t[0] 303 movdqa @x[5], @t[1] 330 movdqa @x[4], @t[3 [all...] |
aesni-sha1-x86_64.S | 48 movdqa 64(%r11),%xmm6 49 movdqa 0(%r11),%xmm9 62 movdqa %xmm0,0(%rsp) 64 movdqa %xmm1,16(%rsp) 66 movdqa %xmm2,32(%rsp) 73 movdqa %xmm1,%xmm4 81 movdqa %xmm3,%xmm8 102 movdqa %xmm9,48(%rsp) 107 movdqa %xmm4,%xmm10 108 movdqa %xmm4,%xmm [all...] |
/external/openssl/crypto/aes/asm/ |
bsaes-x86_64.pl | 57 # SIMD "domain" (movaps instead of movdqa, xorps instead of 230 movdqa $y0, $t0 245 movdqa $y0, $t0 261 movdqa $y0, $t0 262 movdqa $y2, $t1 284 movdqa @x[0], @t[0] 285 movdqa @x[1], @t[1] 302 movdqa @x[4], @t[0] 303 movdqa @x[5], @t[1] 330 movdqa @x[4], @t[3 [all...] |
aesni-sha1-x86_64.S | 48 movdqa 64(%r11),%xmm6 49 movdqa 0(%r11),%xmm9 62 movdqa %xmm0,0(%rsp) 64 movdqa %xmm1,16(%rsp) 66 movdqa %xmm2,32(%rsp) 73 movdqa %xmm1,%xmm4 81 movdqa %xmm3,%xmm8 102 movdqa %xmm9,48(%rsp) 107 movdqa %xmm4,%xmm10 108 movdqa %xmm4,%xmm [all...] |
/external/libvpx/libvpx/vp8/common/x86/ |
postproc_sse2.asm | 16 movdqa xmm4, xmm0 17 movdqa xmm6, xmm0 18 movdqa xmm5, xmm1 30 movdqa xmm2, flimit 32 movdqa xmm7, xmm2 43 movdqa xmm6, xmm0 44 movdqa xmm4, xmm0 45 movdqa xmm2, xmm1 59 movdqa xmm2, flimit 61 movdqa xmm3, xmm [all...] |
sad_sse2.asm | 370 movdqa XMMWORD PTR [rdi], xmm0 371 movdqa XMMWORD PTR [rdi + 16], xmm1 372 movdqa XMMWORD PTR [rdi + rdx], xmm2 373 movdqa XMMWORD PTR [rdi + rdx + 16], xmm3 377 movdqa XMMWORD PTR [rdi], xmm4 378 movdqa XMMWORD PTR [rdi + 16], xmm5 379 movdqa XMMWORD PTR [rdi + rdx], xmm6 380 movdqa XMMWORD PTR [rdi + rdx + 16], xmm7 396 movdqa XMMWORD PTR [rdi], xmm0 397 movdqa XMMWORD PTR [rdi + 16], xmm [all...] |
sad_sse3.asm | 169 movdqa xmm0, XMMWORD PTR [%2] 178 movdqa xmm0, XMMWORD PTR [%2] 191 movdqa xmm0, XMMWORD PTR [%2+%4] 263 movdqa xmm0, XMMWORD PTR [%2] 274 movdqa xmm0, XMMWORD PTR [%2] 291 movdqa xmm0, XMMWORD PTR [%2+%7] 601 movdqa xmm0, XMMWORD PTR [src_ptr] 603 movdqa xmm2, XMMWORD PTR [src_ptr+src_stride] 609 movdqa xmm4, XMMWORD PTR [src_ptr] 611 movdqa xmm6, XMMWORD PTR [src_ptr+src_stride [all...] |
/external/chromium_org/third_party/yasm/source/patched-yasm/modules/objfmts/win64/tests/ |
sce1.asm | 9 movdqa [rbp], xmm7 label 35 movdqa xmm7, [rbp] label
|
sce3.asm | 9 movdqa [rbp], xmm7 label 35 movdqa xmm7, [rbp] label
|
sce3.masm | 12 movdqa [rbp], xmm7 38 movdqa xmm7, [rbp]
|
/external/libvpx/libvpx/vp9/common/x86/ |
vp9_postproc_sse2.asm | 39 movdqa xmm0, [GLOBAL(rd42)] 41 movdqa [rsp], xmm0 66 movdqa xmm1, xmm3 ; mm1 = p0..p3 74 movdqa xmm7, xmm1 ; mm7 = r0 p0..p3 85 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 99 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 111 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 152 movdqa xmm3, xmm4 155 movdqa xmm1, xmm3 ; mm1 = p0..p3 159 movdqa xmm5, xmm [all...] |
/external/libffi/src/x86/ |
darwin64.S | 190 movdqa 48(%r10), %xmm0 191 movdqa 64(%r10), %xmm1 192 movdqa 80(%r10), %xmm2 193 movdqa 96(%r10), %xmm3 194 movdqa 112(%r10), %xmm4 195 movdqa 128(%r10), %xmm5 196 movdqa 144(%r10), %xmm6 197 movdqa 160(%r10), %xmm7 307 movdqa %xmm0, 48(%rsp) 308 movdqa %xmm1, 64(%rsp [all...] |
unix64.S | 194 movdqa 48(%r10), %xmm0 195 movdqa 64(%r10), %xmm1 196 movdqa 80(%r10), %xmm2 197 movdqa 96(%r10), %xmm3 198 movdqa 112(%r10), %xmm4 199 movdqa 128(%r10), %xmm5 200 movdqa 144(%r10), %xmm6 201 movdqa 160(%r10), %xmm7 318 movdqa %xmm0, 48(%rsp) 319 movdqa %xmm1, 64(%rsp [all...] |
/external/dropbear/libtomcrypt/src/encauth/gcm/ |
gcm_mult_h.c | 30 asm("movdqa (%0),%%xmm0"::"r"(&gcm->PC[0][I[0]][0])); 34 asm("movdqa %%xmm0,(%0)"::"r"(&T));
|
/external/chromium_org/third_party/openssl/openssl/crypto/sha/asm/ |
sha1-x86_64.S | 1314 movdqa 64(%r11),%xmm6 1315 movdqa 0(%r11),%xmm9 1328 movdqa %xmm0,0(%rsp) 1330 movdqa %xmm1,16(%rsp) 1332 movdqa %xmm2,32(%rsp) 1337 movdqa %xmm1,%xmm4 1340 movdqa %xmm3,%xmm8 1361 movdqa %xmm9,48(%rsp) 1364 movdqa %xmm4,%xmm10 1365 movdqa %xmm4,%xmm [all...] |
/external/openssl/crypto/sha/asm/ |
sha1-x86_64.S | 1314 movdqa 64(%r11),%xmm6 1315 movdqa 0(%r11),%xmm9 1328 movdqa %xmm0,0(%rsp) 1330 movdqa %xmm1,16(%rsp) 1332 movdqa %xmm2,32(%rsp) 1337 movdqa %xmm1,%xmm4 1340 movdqa %xmm3,%xmm8 1361 movdqa %xmm9,48(%rsp) 1364 movdqa %xmm4,%xmm10 1365 movdqa %xmm4,%xmm [all...] |