HomeSort by relevance Sort by last modified time
    Searched refs:movdqu (Results 1 - 25 of 156) sorted by null

1 2 3 4 5 6 7

  /bionic/libc/arch-x86/silvermont/string/
sse2-memcpy-slm.S 112 movdqu (%eax), %xmm0
113 movdqu -16(%eax, %ecx), %xmm1
115 movdqu %xmm0, (%edx)
116 movdqu %xmm1, -16(%edx, %ecx)
119 movdqu 16(%eax), %xmm0
120 movdqu -32(%eax, %ecx), %xmm1
122 movdqu %xmm0, 16(%edx)
123 movdqu %xmm1, -32(%edx, %ecx)
126 movdqu 32(%eax), %xmm0
127 movdqu 48(%eax), %xmm
    [all...]
sse2-memmove-slm.S 123 movdqu (%eax), %xmm0
124 movdqu -16(%eax, %ecx), %xmm1
125 movdqu %xmm0, (%edx)
126 movdqu %xmm1, -16(%edx, %ecx)
134 movdqu (%eax), %xmm0
135 movdqu 16(%eax), %xmm1
136 movdqu -16(%eax, %ecx), %xmm2
137 movdqu -32(%eax, %ecx), %xmm3
138 movdqu %xmm0, (%edx)
139 movdqu %xmm1, 16(%edx
    [all...]
sse4-memcmp-slm.S 272 movdqu (%eax), %xmm1
273 movdqu (%edx), %xmm2
278 movdqu 16(%eax), %xmm1
279 movdqu 16(%edx), %xmm2
284 movdqu 32(%eax), %xmm1
285 movdqu 32(%edx), %xmm2
290 movdqu 48(%eax), %xmm1
291 movdqu 48(%edx), %xmm2
371 movdqu -49(%eax), %xmm1
372 movdqu -49(%edx), %xmm
    [all...]
sse2-strcpy-slm.S 187 movdqu (%esi, %ecx), %xmm1 /* copy 16 bytes */
188 movdqu %xmm1, (%edi)
201 movdqu %xmm1, (%edi, %ecx)
216 movdqu %xmm2, (%edi, %ecx)
231 movdqu %xmm3, (%edi, %ecx)
246 movdqu %xmm4, (%edi, %ecx)
261 movdqu %xmm1, (%edi, %ecx)
276 movdqu %xmm2, (%edi, %ecx)
290 movdqu %xmm3, (%edi, %ecx)
321 movdqu %xmm4, -64(%edi
    [all...]
  /bionic/libc/arch-x86_64/string/
sse2-memcpy-slm.S 103 movdqu (%rsi), %xmm0
104 movdqu -16(%rsi, %rdx), %xmm1
106 movdqu %xmm0, (%rdi)
107 movdqu %xmm1, -16(%rdi, %rdx)
110 movdqu 16(%rsi), %xmm0
111 movdqu -32(%rsi, %rdx), %xmm1
113 movdqu %xmm0, 16(%rdi)
114 movdqu %xmm1, -32(%rdi, %rdx)
117 movdqu 32(%rsi), %xmm0
118 movdqu 48(%rsi), %xmm
    [all...]
sse2-memmove-slm.S 113 movdqu (%rsi), %xmm0
114 movdqu -16(%rsi, %rdx), %xmm1
115 movdqu %xmm0, (%rdi)
116 movdqu %xmm1, -16(%rdi, %rdx)
124 movdqu (%rsi), %xmm0
125 movdqu 16(%rsi), %xmm1
126 movdqu -16(%rsi, %rdx), %xmm2
127 movdqu -32(%rsi, %rdx), %xmm3
128 movdqu %xmm0, (%rdi)
129 movdqu %xmm1, 16(%rdi
    [all...]
sse2-strlcpy-slm.S 86 movdqu (%rdi), %xmm1
96 movdqu 16(%rdi), %xmm1
200 movdqu (%rsi, %rcx), %xmm1
201 movdqu %xmm1, (%rdi)
214 movdqu (%rsi), %xmm1
223 movdqu 16(%rsi), %xmm2
225 movdqu %xmm1, (%rdi)
252 movdqu %xmm1, (%rdi, %rax)
529 movdqu (%rsi), %xmm0
530 movdqu %xmm0, (%rdi
    [all...]
sse4-memcmp-slm.S 108 movdqu (%rsi), %xmm1
109 movdqu (%rdi), %xmm2
128 movdqu (%rdi), %xmm2
133 movdqu 16(%rdi), %xmm2
138 movdqu 32(%rdi), %xmm2
143 movdqu 48(%rdi), %xmm2
150 movdqu 64(%rdi), %xmm2
155 movdqu 80(%rdi), %xmm2
177 movdqu (%rdi), %xmm2
182 movdqu 16(%rdi), %xmm
    [all...]
sse2-strcpy-slm.S 127 movdqu (%rsi, %rcx), %xmm1 /* copy 16 bytes */
128 movdqu %xmm1, (%rdi)
140 movdqu %xmm1, (%rdi, %rcx)
156 movdqu %xmm2, (%rdi, %rcx)
172 movdqu %xmm3, (%rdi, %rcx)
188 movdqu %xmm4, (%rdi, %rcx)
204 movdqu %xmm1, (%rdi, %rcx)
220 movdqu %xmm2, (%rdi, %rcx)
235 movdqu %xmm3, (%rdi, %rcx)
266 movdqu %xmm4, -64(%rdi
    [all...]
sse2-memset-slm.S 115 movdqu %xmm0, (%rdi)
116 movdqu %xmm0, -16(%rdi, %rdx)
119 movdqu %xmm0, 16(%rdi)
120 movdqu %xmm0, -32(%rdi, %rdx)
123 movdqu %xmm0, 32(%rdi)
124 movdqu %xmm0, 48(%rdi)
125 movdqu %xmm0, -64(%rdi, %rdx)
126 movdqu %xmm0, -48(%rdi, %rdx)
  /external/boringssl/src/crypto/aes/asm/
bsaes-x86_64.pl 966 movdqu ($inp), %xmm7 # load round 0 key
975 movdqu ($inp), %xmm6 # load round 1 key
1028 movdqu ($inp), %xmm6 # load next round key
1068 movdqu 0x00($inp), @XMM[0] # load input
1069 movdqu 0x10($inp), @XMM[1]
1070 movdqu 0x20($inp), @XMM[2]
1071 movdqu 0x30($inp), @XMM[3]
1072 movdqu 0x40($inp), @XMM[4]
1073 movdqu 0x50($inp), @XMM[5]
1074 movdqu 0x60($inp), @XMM[6
    [all...]
aesni-x86_64.pl 611 movdqu ($inp),$inout0 # load 8 input blocks
612 movdqu 0x10($inp),$inout1
613 movdqu 0x20($inp),$inout2
614 movdqu 0x30($inp),$inout3
615 movdqu 0x40($inp),$inout4
616 movdqu 0x50($inp),$inout5
617 movdqu 0x60($inp),$inout6
618 movdqu 0x70($inp),$inout7
626 movdqu ($inp),$inout0 # load 8 input blocks
629 movdqu 0x10($inp),$inout
    [all...]
vpaes-x86.pl 188 &movdqu ("xmm5",&QWP(0,$key));
247 &movdqu ("xmm5",&QWP(0,$key));
276 &movdqu ("xmm5",&QWP(0,$key));
349 &movdqu ("xmm0",&QWP(0,$key));
372 &movdqu ("xmm0",&QWP(0,$inp)); # load key (unaligned)
386 &movdqu (&QWP(0,$key),"xmm0");
393 &movdqu (&QWP(0,$key),"xmm3");
436 &movdqu ("xmm0",&QWP(8,$inp)); # load key part 2 (very unaligned)
467 &movdqu ("xmm0",&QWP(16,$inp)); # load key part 2 (unaligned)
516 &movdqu (&QWP(0,$key),"xmm0"); # save last ke
    [all...]
  /art/runtime/arch/x86_64/
memcmp16_x86_64.S 56 movdqu (%rsi), %xmm1
57 movdqu (%rdi), %xmm2
76 movdqu (%rdi), %xmm2
81 movdqu 16(%rdi), %xmm2
86 movdqu 32(%rdi), %xmm2
91 movdqu 48(%rdi), %xmm2
98 movdqu 64(%rdi), %xmm2
103 movdqu 80(%rdi), %xmm2
125 movdqu (%rdi), %xmm2
130 movdqu 16(%rdi), %xmm
    [all...]
  /external/boringssl/linux-x86_64/crypto/aes/
bsaes-x86_64.S 990 movdqu (%rcx),%xmm7
999 movdqu (%rcx),%xmm6
1052 movdqu (%rcx),%xmm6
1111 movdqu (%rbx),%xmm14
1114 movdqu 0(%r12),%xmm15
1115 movdqu 16(%r12),%xmm0
1116 movdqu 32(%r12),%xmm1
1117 movdqu 48(%r12),%xmm2
1118 movdqu 64(%r12),%xmm3
1119 movdqu 80(%r12),%xmm
    [all...]
aesni-x86_64.S 514 movdqu (%rdi),%xmm2
515 movdqu 16(%rdi),%xmm3
516 movdqu 32(%rdi),%xmm4
517 movdqu 48(%rdi),%xmm5
518 movdqu 64(%rdi),%xmm6
519 movdqu 80(%rdi),%xmm7
520 movdqu 96(%rdi),%xmm8
521 movdqu 112(%rdi),%xmm9
529 movdqu (%rdi),%xmm2
532 movdqu 16(%rdi),%xmm
    [all...]
  /external/boringssl/mac-x86_64/crypto/aes/
bsaes-x86_64.S 988 movdqu (%rcx),%xmm7
997 movdqu (%rcx),%xmm6
1050 movdqu (%rcx),%xmm6
1108 movdqu (%rbx),%xmm14
1111 movdqu 0(%r12),%xmm15
1112 movdqu 16(%r12),%xmm0
1113 movdqu 32(%r12),%xmm1
1114 movdqu 48(%r12),%xmm2
1115 movdqu 64(%r12),%xmm3
1116 movdqu 80(%r12),%xmm
    [all...]
aesni-x86_64.S 513 movdqu (%rdi),%xmm2
514 movdqu 16(%rdi),%xmm3
515 movdqu 32(%rdi),%xmm4
516 movdqu 48(%rdi),%xmm5
517 movdqu 64(%rdi),%xmm6
518 movdqu 80(%rdi),%xmm7
519 movdqu 96(%rdi),%xmm8
520 movdqu 112(%rdi),%xmm9
528 movdqu (%rdi),%xmm2
531 movdqu 16(%rdi),%xmm
    [all...]
  /external/boringssl/win-x86_64/crypto/aes/
bsaes-x86_64.asm 992 movdqu xmm7,XMMWORD[rcx]
1001 movdqu xmm6,XMMWORD[rcx]
1054 movdqu xmm6,XMMWORD[rcx]
1125 movdqu xmm14,XMMWORD[rbx]
1128 movdqu xmm15,XMMWORD[r12]
1129 movdqu xmm0,XMMWORD[16+r12]
1130 movdqu xmm1,XMMWORD[32+r12]
1131 movdqu xmm2,XMMWORD[48+r12]
1132 movdqu xmm3,XMMWORD[64+r12]
1133 movdqu xmm4,XMMWORD[80+r12
    [all...]
aesni-x86_64.asm 531 movdqu xmm2,XMMWORD[rdi]
532 movdqu xmm3,XMMWORD[16+rdi]
533 movdqu xmm4,XMMWORD[32+rdi]
534 movdqu xmm5,XMMWORD[48+rdi]
535 movdqu xmm6,XMMWORD[64+rdi]
536 movdqu xmm7,XMMWORD[80+rdi]
537 movdqu xmm8,XMMWORD[96+rdi]
538 movdqu xmm9,XMMWORD[112+rdi]
546 movdqu xmm2,XMMWORD[rdi]
549 movdqu xmm3,XMMWORD[16+rdi
    [all...]
  /bionic/libc/arch-x86/atom/string/
sse2-wcscmp-atom.S 175 movdqu 16(%edi), %xmm1
176 movdqu 16(%esi), %xmm2
184 movdqu 32(%edi), %xmm1
185 movdqu 32(%esi), %xmm2
193 movdqu 48(%edi), %xmm1
194 movdqu 48(%esi), %xmm2
242 movdqu 16(%edi), %xmm1
243 movdqu 16(%esi), %xmm2
251 movdqu 32(%edi), %xmm1
252 movdqu 32(%esi), %xmm
    [all...]
  /external/libvpx/libvpx/vp9/common/x86/
vp9_subpixel_bilinear_sse2.asm 124 movdqu xmm1, [rdi]
127 movdqu [rdi], xmm0 ;store the result
196 movdqu xmm0, [rsi] ;0
197 movdqu xmm1, [rsi + rax] ;1
274 movdqu xmm0, [rsi] ;0
275 movdqu xmm1, [rsi + rax] ;1
301 movdqu xmm0, [rsi] ;load src
327 movdqu xmm0, [rsi] ;load src
354 movdqu xmm0, [rsi] ;load src
355 movdqu xmm1, [rsi + 1
    [all...]
vp9_subpixel_bilinear_ssse3.asm 102 movdqu xmm1, [rdi]
105 movdqu [rdi], xmm0 ;store the result
174 movdqu xmm0, [rsi] ;0
175 movdqu xmm1, [rsi + rax] ;1
251 movdqu xmm0, [rsi] ;0
252 movdqu xmm1, [rsi + rax] ;1
277 movdqu xmm0, [rsi] ;load src
303 movdqu xmm0, [rsi] ;load src
330 movdqu xmm0, [rsi] ;load src
331 movdqu xmm1, [rsi + 1
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/common/x86/
vp9_subpixel_bilinear_sse2.asm 124 movdqu xmm1, [rdi]
127 movdqu [rdi], xmm0 ;store the result
196 movdqu xmm0, [rsi] ;0
197 movdqu xmm1, [rsi + rax] ;1
274 movdqu xmm0, [rsi] ;0
275 movdqu xmm1, [rsi + rax] ;1
301 movdqu xmm0, [rsi] ;load src
327 movdqu xmm0, [rsi] ;load src
354 movdqu xmm0, [rsi] ;load src
355 movdqu xmm1, [rsi + 1
    [all...]
vp9_subpixel_bilinear_ssse3.asm 102 movdqu xmm1, [rdi]
105 movdqu [rdi], xmm0 ;store the result
174 movdqu xmm0, [rsi] ;0
175 movdqu xmm1, [rsi + rax] ;1
251 movdqu xmm0, [rsi] ;0
252 movdqu xmm1, [rsi + rax] ;1
277 movdqu xmm0, [rsi] ;load src
303 movdqu xmm0, [rsi] ;load src
330 movdqu xmm0, [rsi] ;load src
331 movdqu xmm1, [rsi + 1
    [all...]

Completed in 189 milliseconds

1 2 3 4 5 6 7