/bionic/libc/arch-x86/silvermont/string/ |
sse2-memmove-slm.S | 123 movdqu (%eax), %xmm0 124 movdqu -16(%eax, %ecx), %xmm1 125 movdqu %xmm0, (%edx) 126 movdqu %xmm1, -16(%edx, %ecx) 134 movdqu (%eax), %xmm0 135 movdqu 16(%eax), %xmm1 136 movdqu -16(%eax, %ecx), %xmm2 137 movdqu -32(%eax, %ecx), %xmm3 138 movdqu %xmm0, (%edx) 139 movdqu %xmm1, 16(%edx [all...] |
sse2-memcpy-slm.S | 112 movdqu (%eax), %xmm0 113 movdqu -16(%eax, %ecx), %xmm1 115 movdqu %xmm0, (%edx) 116 movdqu %xmm1, -16(%edx, %ecx) 119 movdqu 16(%eax), %xmm0 120 movdqu -32(%eax, %ecx), %xmm1 122 movdqu %xmm0, 16(%edx) 123 movdqu %xmm1, -32(%edx, %ecx) 126 movdqu 32(%eax), %xmm0 127 movdqu 48(%eax), %xmm [all...] |
sse4-memcmp-slm.S | 272 movdqu (%eax), %xmm1 273 movdqu (%edx), %xmm2 278 movdqu 16(%eax), %xmm1 279 movdqu 16(%edx), %xmm2 284 movdqu 32(%eax), %xmm1 285 movdqu 32(%edx), %xmm2 290 movdqu 48(%eax), %xmm1 291 movdqu 48(%edx), %xmm2 371 movdqu -49(%eax), %xmm1 372 movdqu -49(%edx), %xmm [all...] |
sse2-strcpy-slm.S | 187 movdqu (%esi, %ecx), %xmm1 /* copy 16 bytes */ 188 movdqu %xmm1, (%edi) 201 movdqu %xmm1, (%edi, %ecx) 216 movdqu %xmm2, (%edi, %ecx) 231 movdqu %xmm3, (%edi, %ecx) 246 movdqu %xmm4, (%edi, %ecx) 261 movdqu %xmm1, (%edi, %ecx) 276 movdqu %xmm2, (%edi, %ecx) 290 movdqu %xmm3, (%edi, %ecx) 321 movdqu %xmm4, -64(%edi [all...] |
/bionic/libc/arch-x86_64/string/ |
sse2-memmove-slm.S | 113 movdqu (%rsi), %xmm0 114 movdqu -16(%rsi, %rdx), %xmm1 115 movdqu %xmm0, (%rdi) 116 movdqu %xmm1, -16(%rdi, %rdx) 124 movdqu (%rsi), %xmm0 125 movdqu 16(%rsi), %xmm1 126 movdqu -16(%rsi, %rdx), %xmm2 127 movdqu -32(%rsi, %rdx), %xmm3 128 movdqu %xmm0, (%rdi) 129 movdqu %xmm1, 16(%rdi [all...] |
sse2-memcpy-slm.S | 103 movdqu (%rsi), %xmm0 104 movdqu -16(%rsi, %rdx), %xmm1 106 movdqu %xmm0, (%rdi) 107 movdqu %xmm1, -16(%rdi, %rdx) 110 movdqu 16(%rsi), %xmm0 111 movdqu -32(%rsi, %rdx), %xmm1 113 movdqu %xmm0, 16(%rdi) 114 movdqu %xmm1, -32(%rdi, %rdx) 117 movdqu 32(%rsi), %xmm0 118 movdqu 48(%rsi), %xmm [all...] |
sse4-memcmp-slm.S | 108 movdqu (%rsi), %xmm1 109 movdqu (%rdi), %xmm2 128 movdqu (%rdi), %xmm2 133 movdqu 16(%rdi), %xmm2 138 movdqu 32(%rdi), %xmm2 143 movdqu 48(%rdi), %xmm2 150 movdqu 64(%rdi), %xmm2 155 movdqu 80(%rdi), %xmm2 177 movdqu (%rdi), %xmm2 182 movdqu 16(%rdi), %xmm [all...] |
sse2-strcpy-slm.S | 127 movdqu (%rsi, %rcx), %xmm1 /* copy 16 bytes */ 128 movdqu %xmm1, (%rdi) 140 movdqu %xmm1, (%rdi, %rcx) 156 movdqu %xmm2, (%rdi, %rcx) 172 movdqu %xmm3, (%rdi, %rcx) 188 movdqu %xmm4, (%rdi, %rcx) 204 movdqu %xmm1, (%rdi, %rcx) 220 movdqu %xmm2, (%rdi, %rcx) 235 movdqu %xmm3, (%rdi, %rcx) 266 movdqu %xmm4, -64(%rdi [all...] |
sse2-memset-slm.S | 115 movdqu %xmm0, (%rdi) 116 movdqu %xmm0, -16(%rdi, %rdx) 119 movdqu %xmm0, 16(%rdi) 120 movdqu %xmm0, -32(%rdi, %rdx) 123 movdqu %xmm0, 32(%rdi) 124 movdqu %xmm0, 48(%rdi) 125 movdqu %xmm0, -64(%rdi, %rdx) 126 movdqu %xmm0, -48(%rdi, %rdx)
|
/external/chromium_org/third_party/yasm/source/patched-yasm/modules/arch/x86/tests/ |
ssse3.asm | 19 TEST_GENERIC pabsb, movdqu, xmm0, xmm, xmm1 20 TEST_GENERIC pabsw, movdqu, xmm0, xmm, xmm1 21 TEST_GENERIC pabsd, movdqu, xmm0, xmm, xmm1 27 TEST_GENERIC psignb, movdqu, xmm0, xmm, xmm1 28 TEST_GENERIC psignw, movdqu, xmm0, xmm, xmm1 29 TEST_GENERIC psignd, movdqu, xmm0, xmm, xmm1 35 TEST_GENERIC phaddw, movdqu, xmm0, xmm, xmm1 36 TEST_GENERIC phaddsw, movdqu, xmm0, xmm, xmm1 37 TEST_GENERIC phaddd, movdqu, xmm0, xmm, xmm1 43 TEST_GENERIC phsubw, movdqu, xmm0, xmm, xmm [all...] |
/external/chromium_org/third_party/openssl/openssl/crypto/aes/asm/ |
bsaes-x86_64.pl | 911 movdqu ($inp), %xmm7 # load round 0 key 920 movdqu ($inp), %xmm6 # load round 1 key 973 movdqu ($inp), %xmm6 # load next round key 1013 movdqu 0x00($inp), @XMM[0] # load input 1014 movdqu 0x10($inp), @XMM[1] 1015 movdqu 0x20($inp), @XMM[2] 1016 movdqu 0x30($inp), @XMM[3] 1017 movdqu 0x40($inp), @XMM[4] 1018 movdqu 0x50($inp), @XMM[5] 1019 movdqu 0x60($inp), @XMM[6 [all...] |
bsaes-x86_64.S | 1050 movdqu (%rcx),%xmm7 1059 movdqu (%rcx),%xmm6 1112 movdqu (%rcx),%xmm6 1169 movdqu (%rbx),%xmm14 1172 movdqu 0(%r12),%xmm15 1173 movdqu 16(%r12),%xmm0 1174 movdqu 32(%r12),%xmm1 1175 movdqu 48(%r12),%xmm2 1176 movdqu 64(%r12),%xmm3 1177 movdqu 80(%r12),%xmm [all...] |
/external/openssl/crypto/aes/asm/ |
bsaes-x86_64.pl | 972 movdqu ($inp), %xmm7 # load round 0 key 981 movdqu ($inp), %xmm6 # load round 1 key 1034 movdqu ($inp), %xmm6 # load next round key 1074 movdqu 0x00($inp), @XMM[0] # load input 1075 movdqu 0x10($inp), @XMM[1] 1076 movdqu 0x20($inp), @XMM[2] 1077 movdqu 0x30($inp), @XMM[3] 1078 movdqu 0x40($inp), @XMM[4] 1079 movdqu 0x50($inp), @XMM[5] 1080 movdqu 0x60($inp), @XMM[6 [all...] |
bsaes-x86_64.S | 987 movdqu (%rcx),%xmm7 996 movdqu (%rcx),%xmm6 1049 movdqu (%rcx),%xmm6 1106 movdqu (%rbx),%xmm14 1109 movdqu 0(%r12),%xmm15 1110 movdqu 16(%r12),%xmm0 1111 movdqu 32(%r12),%xmm1 1112 movdqu 48(%r12),%xmm2 1113 movdqu 64(%r12),%xmm3 1114 movdqu 80(%r12),%xmm [all...] |
/external/llvm/test/CodeGen/X86/ |
sse-align-6.ll | 1 ; RUN: llc < %s -march=x86-64 | grep movdqu | count 1
|
/art/runtime/arch/x86_64/ |
memcmp16_x86_64.S | 56 movdqu (%rsi), %xmm1 57 movdqu (%rdi), %xmm2 76 movdqu (%rdi), %xmm2 81 movdqu 16(%rdi), %xmm2 86 movdqu 32(%rdi), %xmm2 91 movdqu 48(%rdi), %xmm2 98 movdqu 64(%rdi), %xmm2 103 movdqu 80(%rdi), %xmm2 125 movdqu (%rdi), %xmm2 130 movdqu 16(%rdi), %xmm [all...] |
/external/valgrind/main/memcheck/tests/amd64/ |
bug279698.c | 11 ("movdqu (%0), %%xmm0 \n" 13 "movdqu %%xmm0, 16(%0) \n"
|
/bionic/libc/arch-x86/atom/string/ |
sse2-wcscmp-atom.S | 175 movdqu 16(%edi), %xmm1 176 movdqu 16(%esi), %xmm2 184 movdqu 32(%edi), %xmm1 185 movdqu 32(%esi), %xmm2 193 movdqu 48(%edi), %xmm1 194 movdqu 48(%esi), %xmm2 242 movdqu 16(%edi), %xmm1 243 movdqu 16(%esi), %xmm2 251 movdqu 32(%edi), %xmm1 252 movdqu 32(%esi), %xmm [all...] |
/external/chromium_org/third_party/libvpx/source/libvpx/vp9/common/x86/ |
vp9_subpixel_bilinear_sse2.asm | 124 movdqu xmm1, [rdi] 127 movdqu [rdi], xmm0 ;store the result 196 movdqu xmm0, [rsi] ;0 197 movdqu xmm1, [rsi + rax] ;1 274 movdqu xmm0, [rsi] ;0 275 movdqu xmm1, [rsi + rax] ;1 301 movdqu xmm0, [rsi] ;load src 327 movdqu xmm0, [rsi] ;load src 354 movdqu xmm0, [rsi] ;load src 355 movdqu xmm1, [rsi + 1 [all...] |
vp9_subpixel_bilinear_ssse3.asm | 102 movdqu xmm1, [rdi] 105 movdqu [rdi], xmm0 ;store the result 174 movdqu xmm0, [rsi] ;0 175 movdqu xmm1, [rsi + rax] ;1 251 movdqu xmm0, [rsi] ;0 252 movdqu xmm1, [rsi + rax] ;1 277 movdqu xmm0, [rsi] ;load src 303 movdqu xmm0, [rsi] ;load src 330 movdqu xmm0, [rsi] ;load src 331 movdqu xmm1, [rsi + 1 [all...] |
/external/libvpx/libvpx/vp9/common/x86/ |
vp9_subpixel_bilinear_sse2.asm | 124 movdqu xmm1, [rdi] 127 movdqu [rdi], xmm0 ;store the result 196 movdqu xmm0, [rsi] ;0 197 movdqu xmm1, [rsi + rax] ;1 274 movdqu xmm0, [rsi] ;0 275 movdqu xmm1, [rsi + rax] ;1 301 movdqu xmm0, [rsi] ;load src 327 movdqu xmm0, [rsi] ;load src 354 movdqu xmm0, [rsi] ;load src 355 movdqu xmm1, [rsi + 1 [all...] |
vp9_subpixel_bilinear_ssse3.asm | 102 movdqu xmm1, [rdi] 105 movdqu [rdi], xmm0 ;store the result 174 movdqu xmm0, [rsi] ;0 175 movdqu xmm1, [rsi + rax] ;1 251 movdqu xmm0, [rsi] ;0 252 movdqu xmm1, [rsi + rax] ;1 277 movdqu xmm0, [rsi] ;load src 303 movdqu xmm0, [rsi] ;load src 330 movdqu xmm0, [rsi] ;load src 331 movdqu xmm1, [rsi + 1 [all...] |
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/common/x86/ |
vp9_subpixel_bilinear_sse2.asm | 124 movdqu xmm1, [rdi] 127 movdqu [rdi], xmm0 ;store the result 196 movdqu xmm0, [rsi] ;0 197 movdqu xmm1, [rsi + rax] ;1 274 movdqu xmm0, [rsi] ;0 275 movdqu xmm1, [rsi + rax] ;1 301 movdqu xmm0, [rsi] ;load src 327 movdqu xmm0, [rsi] ;load src 354 movdqu xmm0, [rsi] ;load src 355 movdqu xmm1, [rsi + 1 [all...] |
vp9_subpixel_bilinear_ssse3.asm | 102 movdqu xmm1, [rdi] 105 movdqu [rdi], xmm0 ;store the result 174 movdqu xmm0, [rsi] ;0 175 movdqu xmm1, [rsi + rax] ;1 251 movdqu xmm0, [rsi] ;0 252 movdqu xmm1, [rsi + rax] ;1 277 movdqu xmm0, [rsi] ;load src 303 movdqu xmm0, [rsi] ;load src 330 movdqu xmm0, [rsi] ;load src 331 movdqu xmm1, [rsi + 1 [all...] |
/external/valgrind/main/none/tests/amd64/ |
aes.c | 80 "movdqu %1, %%xmm1" "\n\t" 81 "movdqu %2, %%xmm2" "\n\t" 83 "movdqu %%xmm1, %0" "\n\t" 112 "movdqu %1, %%xmm1" "\n\t" 113 "movdqu %2, %%xmm2" "\n\t" 115 "movdqu %%xmm1, %0" "\n\t" 144 "movdqu %1, %%xmm1" "\n\t" 145 "movdqu %2, %%xmm2" "\n\t" 147 "movdqu %%xmm1, %0" "\n\t" 176 "movdqu %1, %%xmm1" "\n\t [all...] |