/external/chromium_org/third_party/yasm/source/patched-yasm/modules/arch/x86/tests/ |
ssse3.asm | 19 TEST_GENERIC pabsb, movdqu, xmm0, xmm, xmm1 20 TEST_GENERIC pabsw, movdqu, xmm0, xmm, xmm1 21 TEST_GENERIC pabsd, movdqu, xmm0, xmm, xmm1 27 TEST_GENERIC psignb, movdqu, xmm0, xmm, xmm1 28 TEST_GENERIC psignw, movdqu, xmm0, xmm, xmm1 29 TEST_GENERIC psignd, movdqu, xmm0, xmm, xmm1 35 TEST_GENERIC phaddw, movdqu, xmm0, xmm, xmm1 36 TEST_GENERIC phaddsw, movdqu, xmm0, xmm, xmm1 37 TEST_GENERIC phaddd, movdqu, xmm0, xmm, xmm1 43 TEST_GENERIC phsubw, movdqu, xmm0, xmm, xmm [all...] |
/external/chromium_org/third_party/openssl/openssl/crypto/aes/asm/ |
bsaes-x86_64.pl | 911 movdqu ($inp), %xmm7 # load round 0 key 920 movdqu ($inp), %xmm6 # load round 1 key 973 movdqu ($inp), %xmm6 # load next round key 1013 movdqu 0x00($inp), @XMM[0] # load input 1014 movdqu 0x10($inp), @XMM[1] 1015 movdqu 0x20($inp), @XMM[2] 1016 movdqu 0x30($inp), @XMM[3] 1017 movdqu 0x40($inp), @XMM[4] 1018 movdqu 0x50($inp), @XMM[5] 1019 movdqu 0x60($inp), @XMM[6 [all...] |
bsaes-x86_64.S | 1050 movdqu (%rcx),%xmm7 1059 movdqu (%rcx),%xmm6 1112 movdqu (%rcx),%xmm6 1169 movdqu (%rbx),%xmm14 1172 movdqu 0(%r12),%xmm15 1173 movdqu 16(%r12),%xmm0 1174 movdqu 32(%r12),%xmm1 1175 movdqu 48(%r12),%xmm2 1176 movdqu 64(%r12),%xmm3 1177 movdqu 80(%r12),%xmm [all...] |
vpaes-x86.S | 76 movdqu (%edx),%xmm5 131 movdqu (%edx),%xmm5 155 movdqu (%edx),%xmm5 221 movdqu (%edx),%xmm0 237 movdqu (%esi),%xmm0 246 movdqu %xmm0,(%edx) 251 movdqu %xmm3,(%edx) 267 movdqu 8(%esi),%xmm0 287 movdqu 16(%esi),%xmm0 316 movdqu %xmm0,(%edx [all...] |
vpaes-x86_64.S | 27 movdqu (%r9),%xmm5 86 movdqu (%r9),%xmm5 118 movdqu (%r9),%xmm5 195 movdqu (%r9),%xmm0 225 movdqu (%rdi),%xmm0 238 movdqu %xmm0,(%rdx) 245 movdqu %xmm3,(%rdx) 289 movdqu 8(%rdi),%xmm0 321 movdqu 16(%rdi),%xmm0 372 movdqu %xmm0,(%rdx [all...] |
aesni-x86_64.S | 456 movdqu (%rdi),%xmm2 457 movdqu 16(%rdi),%xmm3 458 movdqu 32(%rdi),%xmm4 459 movdqu 48(%rdi),%xmm5 460 movdqu 64(%rdi),%xmm6 461 movdqu 80(%rdi),%xmm7 462 movdqu 96(%rdi),%xmm8 463 movdqu 112(%rdi),%xmm9 471 movdqu (%rdi),%xmm2 474 movdqu 16(%rdi),%xmm [all...] |
vpaes-x86.pl | 186 &movdqu ("xmm5",&QWP(0,$key)); 245 &movdqu ("xmm5",&QWP(0,$key)); 275 &movdqu ("xmm5",&QWP(0,$key)); 350 &movdqu ("xmm0",&QWP(0,$key)); 372 &movdqu ("xmm0",&QWP(0,$inp)); # load key (unaligned) 386 &movdqu (&QWP(0,$key),"xmm0"); 393 &movdqu (&QWP(0,$key),"xmm3"); 436 &movdqu ("xmm0",&QWP(8,$inp)); # load key part 2 (very unaligned) 467 &movdqu ("xmm0",&QWP(16,$inp)); # load key part 2 (unaligned) 516 &movdqu (&QWP(0,$key),"xmm0"); # save last ke [all...] |
/external/openssl/crypto/aes/asm/ |
bsaes-x86_64.pl | 911 movdqu ($inp), %xmm7 # load round 0 key 920 movdqu ($inp), %xmm6 # load round 1 key 973 movdqu ($inp), %xmm6 # load next round key 1013 movdqu 0x00($inp), @XMM[0] # load input 1014 movdqu 0x10($inp), @XMM[1] 1015 movdqu 0x20($inp), @XMM[2] 1016 movdqu 0x30($inp), @XMM[3] 1017 movdqu 0x40($inp), @XMM[4] 1018 movdqu 0x50($inp), @XMM[5] 1019 movdqu 0x60($inp), @XMM[6 [all...] |
bsaes-x86_64.S | 1050 movdqu (%rcx),%xmm7 1059 movdqu (%rcx),%xmm6 1112 movdqu (%rcx),%xmm6 1169 movdqu (%rbx),%xmm14 1172 movdqu 0(%r12),%xmm15 1173 movdqu 16(%r12),%xmm0 1174 movdqu 32(%r12),%xmm1 1175 movdqu 48(%r12),%xmm2 1176 movdqu 64(%r12),%xmm3 1177 movdqu 80(%r12),%xmm [all...] |
vpaes-x86.S | 76 movdqu (%edx),%xmm5 131 movdqu (%edx),%xmm5 155 movdqu (%edx),%xmm5 221 movdqu (%edx),%xmm0 237 movdqu (%esi),%xmm0 246 movdqu %xmm0,(%edx) 251 movdqu %xmm3,(%edx) 267 movdqu 8(%esi),%xmm0 287 movdqu 16(%esi),%xmm0 316 movdqu %xmm0,(%edx [all...] |
vpaes-x86_64.S | 27 movdqu (%r9),%xmm5 86 movdqu (%r9),%xmm5 118 movdqu (%r9),%xmm5 195 movdqu (%r9),%xmm0 225 movdqu (%rdi),%xmm0 238 movdqu %xmm0,(%rdx) 245 movdqu %xmm3,(%rdx) 289 movdqu 8(%rdi),%xmm0 321 movdqu 16(%rdi),%xmm0 372 movdqu %xmm0,(%rdx [all...] |
aesni-x86_64.S | 456 movdqu (%rdi),%xmm2 457 movdqu 16(%rdi),%xmm3 458 movdqu 32(%rdi),%xmm4 459 movdqu 48(%rdi),%xmm5 460 movdqu 64(%rdi),%xmm6 461 movdqu 80(%rdi),%xmm7 462 movdqu 96(%rdi),%xmm8 463 movdqu 112(%rdi),%xmm9 471 movdqu (%rdi),%xmm2 474 movdqu 16(%rdi),%xmm [all...] |
vpaes-x86.pl | 186 &movdqu ("xmm5",&QWP(0,$key)); 245 &movdqu ("xmm5",&QWP(0,$key)); 275 &movdqu ("xmm5",&QWP(0,$key)); 350 &movdqu ("xmm0",&QWP(0,$key)); 372 &movdqu ("xmm0",&QWP(0,$inp)); # load key (unaligned) 386 &movdqu (&QWP(0,$key),"xmm0"); 393 &movdqu (&QWP(0,$key),"xmm3"); 436 &movdqu ("xmm0",&QWP(8,$inp)); # load key part 2 (very unaligned) 467 &movdqu ("xmm0",&QWP(16,$inp)); # load key part 2 (unaligned) 516 &movdqu (&QWP(0,$key),"xmm0"); # save last ke [all...] |
/external/llvm/test/CodeGen/X86/ |
sse-align-6.ll | 1 ; RUN: llc < %s -march=x86-64 | grep movdqu | count 1
|
pmovext.ll | 7 ;CHECK-NEXT: movdqu
|
/external/valgrind/main/memcheck/tests/amd64/ |
bug279698.c | 11 ("movdqu (%0), %%xmm0 \n" 13 "movdqu %%xmm0, 16(%0) \n"
|
/bionic/libc/arch-x86/string/ |
sse2-wcscmp-atom.S | 175 movdqu 16(%edi), %xmm1 176 movdqu 16(%esi), %xmm2 184 movdqu 32(%edi), %xmm1 185 movdqu 32(%esi), %xmm2 193 movdqu 48(%edi), %xmm1 194 movdqu 48(%esi), %xmm2 242 movdqu 16(%edi), %xmm1 243 movdqu 16(%esi), %xmm2 251 movdqu 32(%edi), %xmm1 252 movdqu 32(%esi), %xmm [all...] |
/external/valgrind/main/none/tests/amd64/ |
aes.c | 80 "movdqu %1, %%xmm1" "\n\t" 81 "movdqu %2, %%xmm2" "\n\t" 83 "movdqu %%xmm1, %0" "\n\t" 112 "movdqu %1, %%xmm1" "\n\t" 113 "movdqu %2, %%xmm2" "\n\t" 115 "movdqu %%xmm1, %0" "\n\t" 144 "movdqu %1, %%xmm1" "\n\t" 145 "movdqu %2, %%xmm2" "\n\t" 147 "movdqu %%xmm1, %0" "\n\t" 176 "movdqu %1, %%xmm1" "\n\t [all...] |
/external/libvpx/libvpx/vp8/common/x86/ |
sad_sse2.asm | 356 movdqu xmm0, XMMWORD PTR [rsi] 357 movdqu xmm1, XMMWORD PTR [rsi + 16] 358 movdqu xmm2, XMMWORD PTR [rsi + rax] 359 movdqu xmm3, XMMWORD PTR [rsi + rax + 16] 363 movdqu xmm4, XMMWORD PTR [rsi] 364 movdqu xmm5, XMMWORD PTR [rsi + 16] 365 movdqu xmm6, XMMWORD PTR [rsi + rax] 366 movdqu xmm7, XMMWORD PTR [rsi + rax + 16] 392 movdqu xmm0, XMMWORD PTR [rsi] 393 movdqu xmm1, XMMWORD PTR [rsi + 16 [all...] |
postproc_sse2.asm | 124 movdqu xmm0, XMMWORD PTR [rsi] 125 movdqu xmm1, XMMWORD PTR [rsi + rax] 126 movdqu xmm3, XMMWORD PTR [rsi + 2*rax] 132 movdqu xmm1, XMMWORD PTR [rsi + 2*rax] 133 movdqu xmm3, XMMWORD PTR [rsi + rax] 137 movdqu XMMWORD PTR [rdi], xmm0 178 movdqu xmm0, XMMWORD PTR [rdi + rdx] 179 movdqu xmm1, XMMWORD PTR [rdi + rdx -2] 180 movdqu xmm3, XMMWORD PTR [rdi + rdx -1] 184 movdqu xmm1, XMMWORD PTR [rdi + rdx +1 [all...] |
variance_impl_ssse3.asm | 67 movdqu xmm0, XMMWORD PTR [rsi] 68 movdqu xmm1, XMMWORD PTR [rsi+1] 92 movdqu xmm1, XMMWORD PTR [rsi] 93 movdqu xmm2, XMMWORD PTR [rsi+1] 163 movdqu xmm1, XMMWORD PTR [rsi] 173 movdqu xmm3, XMMWORD PTR [rsi] 264 movdqu xmm1, XMMWORD PTR [rsi] 265 movdqu xmm2, XMMWORD PTR [rsi+1]
|
recon_sse2.asm | 30 movdqu xmm0, [rsi] 35 movdqu xmm1, [rsi+rax] 36 movdqu xmm2, [rsi+rax*2] 48 movdqu xmm3, [rsi] 51 movdqu xmm4, [rsi+rax] 53 movdqu xmm5, [rsi+rax*2] 63 movdqu xmm0, [rsi] 66 movdqu xmm1, [rsi+rax] 68 movdqu xmm2, [rsi+rax*2] 77 movdqu xmm3, [rsi [all...] |
sad_sse3.asm | 591 ;%define lddqu movdqu 602 movdqu xmm1, XMMWORD PTR [ref_ptr] 604 movdqu xmm3, XMMWORD PTR [ref_ptr+ref_stride] 610 movdqu xmm5, XMMWORD PTR [ref_ptr] 615 movdqu xmm1, XMMWORD PTR [ref_ptr+ref_stride] 653 movdqu xmm0, XMMWORD PTR [src_ptr] 654 movdqu xmm1, XMMWORD PTR [src_ptr + 16] 655 movdqu xmm2, XMMWORD PTR [src_ptr + src_stride] 656 movdqu xmm3, XMMWORD PTR [src_ptr + src_stride + 16] 657 movdqu xmm4, XMMWORD PTR [end_ptr [all...] |
/external/libvpx/libvpx/vp9/encoder/x86/ |
vp9_subpel_variance_impl_sse2.asm | 45 movdqu xmm5, XMMWORD PTR [rsi] 46 movdqu xmm3, XMMWORD PTR [rsi+1] 52 movdqu xmm1, XMMWORD PTR [rsi] ; 53 movdqu xmm2, XMMWORD PTR [rsi+1] ; 158 movdqu xmm5, XMMWORD PTR [rsi] 163 movdqu xmm3, XMMWORD PTR [rsi] 268 movdqu xmm5, XMMWORD PTR [rsi] ; xmm5 = s0,s1,s2..s15 269 movdqu xmm3, XMMWORD PTR [rsi+1] ; xmm3 = s1,s2,s3..s16
|
/external/libyuv/files/source/ |
row_win.cc | 249 movdqu xmm0, [eax] 250 movdqu xmm1, [eax + 16] 251 movdqu xmm3, [eax + 32] 289 movdqu xmm0, [eax] 290 movdqu xmm1, [eax + 16] 291 movdqu xmm3, [eax + 32] 349 movdqu xmm0, [eax] // fetch 8 pixels of bgr565 399 movdqu xmm0, [eax] // fetch 8 pixels of 1555 445 movdqu xmm0, [eax] // fetch 8 pixels of bgra4444 702 movdqu xmm0, [eax 718 movdqu [edx], xmm0 local 786 movdqu [edx], xmm0 local 854 movdqu [edx], xmm0 local 922 movdqu [edx], xmm0 local 1851 movdqu [edx], xmm0 local 1852 movdqu [edx + 16], xmm1 local 1894 movdqu [edx], xmm0 local 1895 movdqu [edx + 16], xmm1 local 1938 movdqu [edx], xmm0 local 1939 movdqu [edx + 16], xmm1 local 1978 movdqu [edx], xmm0 local 1979 movdqu [edx + 16], xmm1 local 2016 movdqu [edx], xmm0 local 2017 movdqu [edx + 16], xmm1 local 2097 movdqu [edx], xmm5 local 2098 movdqu [edx + 16], xmm0 local 2179 movdqu [edx], xmm2 local 2180 movdqu [edx + 16], xmm1 local 2261 movdqu [edx], xmm5 local 2262 movdqu [edx + 16], xmm0 local 2372 movdqu [edx], xmm0 local 2700 movdqu [edx], xmm0 local 2910 movdqu [edx], xmm0 local 3777 movdqu [edi], xmm0 local 3884 movdqu [edx], xmm2 local [all...] |