/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/encoder/x86/ |
ssim_opt.asm | 17 movdqa xmm1, xmm3 20 movdqa xmm2, xmm4 29 movdqa xmm2,%1 33 movdqa xmm2,%1 41 movdqa xmm1, %1 92 movdqa xmm3, xmm5 93 movdqa xmm4, xmm6 99 movdqa xmm3, xmm5 100 movdqa xmm4, xmm6
|
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/encoder/x86/ |
vp9_ssim_opt.asm | 17 movdqa xmm1, xmm3 20 movdqa xmm2, xmm4 29 movdqa xmm2,%1 33 movdqa xmm2,%1 41 movdqa xmm1, %1 92 movdqa xmm3, xmm5 93 movdqa xmm4, xmm6 99 movdqa xmm3, xmm5 100 movdqa xmm4, xmm6
|
vp9_variance_impl_sse2.asm | 35 movdqa xmm0, [rax] 36 movdqa xmm1, [rax+16] 37 movdqa xmm2, [rax+32] 38 movdqa xmm3, [rax+48] 53 movdqa xmm3,xmm4 56 movdqa xmm3,xmm4 134 movdqa xmm3, xmm1 135 movdqa xmm4, xmm2 164 movdqa xmm1, xmm6 176 movdqa xmm2, xmm [all...] |
/bionic/libc/arch-x86/atom/string/ |
ssse3-memcmp-atom.S | 270 movdqa (%esi), %xmm0 274 movdqa 16(%esi), %xmm2 280 movdqa %xmm0, %xmm1 281 movdqa 32(%esi), %xmm0 282 movdqa 48(%esi), %xmm2 300 movdqa %xmm0, %xmm1 321 movdqa 16(%esi), %xmm1 322 movdqa %xmm1, %xmm2 326 movdqa 32(%esi), %xmm3 347 movdqa 16(%esi), %xmm [all...] |
sse2-memchr-atom.S | 127 movdqa (%edi), %xmm0 143 movdqa (%edi), %xmm0 150 movdqa 16(%edi), %xmm2 157 movdqa 32(%edi), %xmm3 164 movdqa 48(%edi), %xmm4 175 movdqa (%edi), %xmm0 182 movdqa 16(%edi), %xmm2 189 movdqa 32(%edi), %xmm3 196 movdqa 48(%edi), %xmm4 213 movdqa (%edi), %xmm [all...] |
/external/boringssl/src/crypto/aes/asm/ |
bsaes-x86_64.pl | 224 movdqa $y0, $t0 239 movdqa $y0, $t0 255 movdqa $y0, $t0 256 movdqa $y2, $t1 278 movdqa @x[0], @t[0] 279 movdqa @x[1], @t[1] 296 movdqa @x[4], @t[0] 297 movdqa @x[5], @t[1] 324 movdqa @x[4], @t[3] 325 movdqa @x[5], @t[2 [all...] |
/external/libvpx/libvpx/vp8/common/x86/ |
postproc_sse2.asm | 16 movdqa xmm4, xmm0 17 movdqa xmm6, xmm0 18 movdqa xmm5, xmm1 30 movdqa xmm2, flimit 32 movdqa xmm7, xmm2 43 movdqa xmm6, xmm0 44 movdqa xmm4, xmm0 45 movdqa xmm2, xmm1 59 movdqa xmm2, flimit 61 movdqa xmm3, xmm [all...] |
sad_sse2.asm | 370 movdqa XMMWORD PTR [rdi], xmm0 371 movdqa XMMWORD PTR [rdi + 16], xmm1 372 movdqa XMMWORD PTR [rdi + rdx], xmm2 373 movdqa XMMWORD PTR [rdi + rdx + 16], xmm3 377 movdqa XMMWORD PTR [rdi], xmm4 378 movdqa XMMWORD PTR [rdi + 16], xmm5 379 movdqa XMMWORD PTR [rdi + rdx], xmm6 380 movdqa XMMWORD PTR [rdi + rdx + 16], xmm7 396 movdqa XMMWORD PTR [rdi], xmm0 397 movdqa XMMWORD PTR [rdi + 16], xmm [all...] |
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/common/x86/ |
postproc_sse2.asm | 16 movdqa xmm4, xmm0 17 movdqa xmm6, xmm0 18 movdqa xmm5, xmm1 30 movdqa xmm2, flimit 32 movdqa xmm7, xmm2 43 movdqa xmm6, xmm0 44 movdqa xmm4, xmm0 45 movdqa xmm2, xmm1 59 movdqa xmm2, flimit 61 movdqa xmm3, xmm [all...] |
sad_sse2.asm | 370 movdqa XMMWORD PTR [rdi], xmm0 371 movdqa XMMWORD PTR [rdi + 16], xmm1 372 movdqa XMMWORD PTR [rdi + rdx], xmm2 373 movdqa XMMWORD PTR [rdi + rdx + 16], xmm3 377 movdqa XMMWORD PTR [rdi], xmm4 378 movdqa XMMWORD PTR [rdi + 16], xmm5 379 movdqa XMMWORD PTR [rdi + rdx], xmm6 380 movdqa XMMWORD PTR [rdi + rdx + 16], xmm7 396 movdqa XMMWORD PTR [rdi], xmm0 397 movdqa XMMWORD PTR [rdi + 16], xmm [all...] |
/external/llvm/test/CodeGen/X86/ |
vector-zext.ll | 10 ; SSE2-NEXT: movdqa %xmm0, %xmm1 20 ; SSSE3-NEXT: movdqa %xmm0, %xmm1 30 ; SSE41-NEXT: movdqa %xmm0, %xmm1 57 ; SSE2-NEXT: movdqa {{.*#+}} xmm3 = [4294967295,4294967295] 61 ; SSE2-NEXT: movdqa %xmm2, %xmm0 67 ; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [4294967295,4294967295] 71 ; SSSE3-NEXT: movdqa %xmm2, %xmm0 77 ; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [4294967295,4294967295] 81 ; SSE41-NEXT: movdqa %xmm2, %xmm0 104 ; SSE2-NEXT: movdqa %xmm0, %xmm [all...] |
pmulld.ll | 9 ; WIN64-NEXT: movdqa (%rcx), %xmm0 20 ; WIN64-NEXT: movdqa (%rcx), %xmm0
|
/external/boringssl/linux-x86/crypto/sha/ |
sha1-586.S | 1417 movdqa 80(%ebp),%xmm3 1433 movdqa %xmm1,(%esp) 1436 movdqa %xmm0,16(%esp) 1438 movdqa %xmm0,%xmm2 1444 movdqa %xmm0,%xmm1 1450 movdqa %xmm0,%xmm2 1456 movdqa %xmm0,%xmm1 1462 movdqa %xmm0,%xmm2 1468 movdqa %xmm0,%xmm1 1474 movdqa %xmm0,%xmm [all...] |
/external/boringssl/mac-x86/crypto/sha/ |
sha1-586.S | 1414 movdqa 80(%ebp),%xmm3 1430 movdqa %xmm1,(%esp) 1433 movdqa %xmm0,16(%esp) 1435 movdqa %xmm0,%xmm2 1441 movdqa %xmm0,%xmm1 1447 movdqa %xmm0,%xmm2 1453 movdqa %xmm0,%xmm1 1459 movdqa %xmm0,%xmm2 1465 movdqa %xmm0,%xmm1 1471 movdqa %xmm0,%xmm [all...] |
/external/boringssl/win-x86/crypto/sha/ |
sha1-586.asm | 1426 movdqa xmm3,[80+ebp] 1442 movdqa [esp],xmm1 1445 movdqa [16+esp],xmm0 1447 movdqa xmm2,xmm0 1453 movdqa xmm1,xmm0 1459 movdqa xmm2,xmm0 1465 movdqa xmm1,xmm0 1471 movdqa xmm2,xmm0 1477 movdqa xmm1,xmm0 1483 movdqa xmm2,xmm [all...] |
/external/libvpx/libvpx/vp9/common/x86/ |
vp9_subpixel_bilinear_ssse3.asm | 19 movdqa xmm3, [rdx] ;load filters 56 movdqa xmm7, [rdx] ;load filters 176 movdqa xmm2, xmm0 253 movdqa xmm2, xmm0 278 movdqa xmm1, xmm0 304 movdqa xmm1, xmm0 332 movdqa xmm2, xmm0 357 movdqa xmm1, xmm0 383 movdqa xmm1, xmm0 411 movdqa xmm2, xmm [all...] |
vp9_postproc_sse2.asm | 39 movdqa xmm0, [GLOBAL(rd42)] 41 movdqa [rsp], xmm0 66 movdqa xmm1, xmm3 ; mm1 = p0..p3 74 movdqa xmm7, xmm1 ; mm7 = r0 p0..p3 85 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 99 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 111 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 152 movdqa xmm3, xmm4 155 movdqa xmm1, xmm3 ; mm1 = p0..p3 159 movdqa xmm5, xmm [all...] |
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/common/x86/ |
vp9_subpixel_bilinear_ssse3.asm | 19 movdqa xmm3, [rdx] ;load filters 56 movdqa xmm7, [rdx] ;load filters 176 movdqa xmm2, xmm0 253 movdqa xmm2, xmm0 278 movdqa xmm1, xmm0 304 movdqa xmm1, xmm0 332 movdqa xmm2, xmm0 357 movdqa xmm1, xmm0 383 movdqa xmm1, xmm0 411 movdqa xmm2, xmm [all...] |
vp9_postproc_sse2.asm | 39 movdqa xmm0, [GLOBAL(rd42)] 41 movdqa [rsp], xmm0 66 movdqa xmm1, xmm3 ; mm1 = p0..p3 74 movdqa xmm7, xmm1 ; mm7 = r0 p0..p3 85 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 99 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 111 movdqa xmm6, xmm1 ; mm6 = r0 p0..p3 152 movdqa xmm3, xmm4 155 movdqa xmm1, xmm3 ; mm1 = p0..p3 159 movdqa xmm5, xmm [all...] |
/external/boringssl/linux-x86_64/crypto/aes/ |
aesni-x86_64.S | 846 movdqa .Lincrement64(%rip),%xmm9 847 movdqa .Lbswap_mask(%rip),%xmm7 853 movdqa %xmm6,%xmm2 888 movdqa %xmm6,%xmm2 911 movdqa .Lincrement64(%rip),%xmm9 912 movdqa .Lbswap_mask(%rip),%xmm7 941 movdqa %xmm6,%xmm2 1049 movdqa %xmm2,0(%rsp) 1051 movdqa %xmm2,%xmm3 1052 movdqa %xmm2,%xmm [all...] |
/external/boringssl/mac-x86_64/crypto/aes/ |
aesni-x86_64.S | 845 movdqa L$increment64(%rip),%xmm9 846 movdqa L$bswap_mask(%rip),%xmm7 852 movdqa %xmm6,%xmm2 887 movdqa %xmm6,%xmm2 910 movdqa L$increment64(%rip),%xmm9 911 movdqa L$bswap_mask(%rip),%xmm7 940 movdqa %xmm6,%xmm2 1048 movdqa %xmm2,0(%rsp) 1050 movdqa %xmm2,%xmm3 1051 movdqa %xmm2,%xmm [all...] |
/external/boringssl/win-x86_64/crypto/aes/ |
aesni-x86_64.asm | 892 movdqa xmm9,XMMWORD[$L$increment64] 893 movdqa xmm7,XMMWORD[$L$bswap_mask] 899 movdqa xmm2,xmm6 934 movdqa xmm2,xmm6 986 movdqa xmm9,XMMWORD[$L$increment64] 987 movdqa xmm7,XMMWORD[$L$bswap_mask] 1016 movdqa xmm2,xmm6 1157 movdqa XMMWORD[rsp],xmm2 1159 movdqa xmm3,xmm2 1160 movdqa xmm4,xmm [all...] |
/external/boringssl/win-x86_64/crypto/sha/ |
sha1-x86_64.asm | 1295 movdqa xmm6,XMMWORD[64+r11] 1296 movdqa xmm9,XMMWORD[((-64))+r11] 1309 movdqa XMMWORD[rsp],xmm0 1311 movdqa XMMWORD[16+rsp],xmm1 1313 movdqa XMMWORD[32+rsp],xmm2 1321 movdqa xmm8,xmm3 1342 movdqa XMMWORD[48+rsp],xmm9 1345 movdqa xmm10,xmm4 1349 movdqa xmm8,xmm4 1359 movdqa xmm9,xmm1 [all...] |
/external/boringssl/linux-x86_64/crypto/modes/ |
ghash-x86_64.S | 673 movdqa %xmm2,%xmm3 687 movdqa %xmm2,%xmm0 689 movdqa %xmm0,%xmm1 698 movdqa %xmm3,%xmm4 704 movdqa %xmm0,%xmm4 705 movdqa %xmm0,%xmm3 711 movdqa %xmm0,%xmm3 718 movdqa %xmm0,%xmm4 734 movdqa %xmm0,%xmm1 743 movdqa %xmm3,%xmm [all...] |
/external/boringssl/mac-x86_64/crypto/modes/ |
ghash-x86_64.S | 672 movdqa %xmm2,%xmm3 686 movdqa %xmm2,%xmm0 688 movdqa %xmm0,%xmm1 697 movdqa %xmm3,%xmm4 703 movdqa %xmm0,%xmm4 704 movdqa %xmm0,%xmm3 710 movdqa %xmm0,%xmm3 717 movdqa %xmm0,%xmm4 733 movdqa %xmm0,%xmm1 742 movdqa %xmm3,%xmm [all...] |