HomeSort by relevance Sort by last modified time
    Searched full:movdqu (Results 26 - 50 of 186) sorted by null

12 3 4 5 6 7 8

  /bionic/libc/arch-x86_64/string/
sse2-memset-slm.S 115 movdqu %xmm0, (%rdi)
116 movdqu %xmm0, -16(%rdi, %rdx)
119 movdqu %xmm0, 16(%rdi)
120 movdqu %xmm0, -32(%rdi, %rdx)
123 movdqu %xmm0, 32(%rdi)
124 movdqu %xmm0, 48(%rdi)
125 movdqu %xmm0, -64(%rdi, %rdx)
126 movdqu %xmm0, -48(%rdi, %rdx)
  /external/libvpx/libvpx/vp8/common/x86/
sad_sse2.asm 356 movdqu xmm0, XMMWORD PTR [rsi]
357 movdqu xmm1, XMMWORD PTR [rsi + 16]
358 movdqu xmm2, XMMWORD PTR [rsi + rax]
359 movdqu xmm3, XMMWORD PTR [rsi + rax + 16]
363 movdqu xmm4, XMMWORD PTR [rsi]
364 movdqu xmm5, XMMWORD PTR [rsi + 16]
365 movdqu xmm6, XMMWORD PTR [rsi + rax]
366 movdqu xmm7, XMMWORD PTR [rsi + rax + 16]
392 movdqu xmm0, XMMWORD PTR [rsi]
393 movdqu xmm1, XMMWORD PTR [rsi + 16
    [all...]
postproc_sse2.asm 124 movdqu xmm0, XMMWORD PTR [rsi]
125 movdqu xmm1, XMMWORD PTR [rsi + rax]
126 movdqu xmm3, XMMWORD PTR [rsi + 2*rax]
132 movdqu xmm1, XMMWORD PTR [rsi + 2*rax]
133 movdqu xmm3, XMMWORD PTR [rsi + rax]
137 movdqu XMMWORD PTR [rdi], xmm0
178 movdqu xmm0, XMMWORD PTR [rdi + rdx]
179 movdqu xmm1, XMMWORD PTR [rdi + rdx -2]
180 movdqu xmm3, XMMWORD PTR [rdi + rdx -1]
184 movdqu xmm1, XMMWORD PTR [rdi + rdx +1
    [all...]
variance_impl_ssse3.asm 67 movdqu xmm0, XMMWORD PTR [rsi]
68 movdqu xmm1, XMMWORD PTR [rsi+1]
92 movdqu xmm1, XMMWORD PTR [rsi]
93 movdqu xmm2, XMMWORD PTR [rsi+1]
163 movdqu xmm1, XMMWORD PTR [rsi]
173 movdqu xmm3, XMMWORD PTR [rsi]
264 movdqu xmm1, XMMWORD PTR [rsi]
265 movdqu xmm2, XMMWORD PTR [rsi+1]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/common/x86/
sad_sse2.asm 356 movdqu xmm0, XMMWORD PTR [rsi]
357 movdqu xmm1, XMMWORD PTR [rsi + 16]
358 movdqu xmm2, XMMWORD PTR [rsi + rax]
359 movdqu xmm3, XMMWORD PTR [rsi + rax + 16]
363 movdqu xmm4, XMMWORD PTR [rsi]
364 movdqu xmm5, XMMWORD PTR [rsi + 16]
365 movdqu xmm6, XMMWORD PTR [rsi + rax]
366 movdqu xmm7, XMMWORD PTR [rsi + rax + 16]
392 movdqu xmm0, XMMWORD PTR [rsi]
393 movdqu xmm1, XMMWORD PTR [rsi + 16
    [all...]
postproc_sse2.asm 124 movdqu xmm0, XMMWORD PTR [rsi]
125 movdqu xmm1, XMMWORD PTR [rsi + rax]
126 movdqu xmm3, XMMWORD PTR [rsi + 2*rax]
132 movdqu xmm1, XMMWORD PTR [rsi + 2*rax]
133 movdqu xmm3, XMMWORD PTR [rsi + rax]
137 movdqu XMMWORD PTR [rdi], xmm0
178 movdqu xmm0, XMMWORD PTR [rdi + rdx]
179 movdqu xmm1, XMMWORD PTR [rdi + rdx -2]
180 movdqu xmm3, XMMWORD PTR [rdi + rdx -1]
184 movdqu xmm1, XMMWORD PTR [rdi + rdx +1
    [all...]
variance_impl_ssse3.asm 67 movdqu xmm0, XMMWORD PTR [rsi]
68 movdqu xmm1, XMMWORD PTR [rsi+1]
92 movdqu xmm1, XMMWORD PTR [rsi]
93 movdqu xmm2, XMMWORD PTR [rsi+1]
163 movdqu xmm1, XMMWORD PTR [rsi]
173 movdqu xmm3, XMMWORD PTR [rsi]
264 movdqu xmm1, XMMWORD PTR [rsi]
265 movdqu xmm2, XMMWORD PTR [rsi+1]
  /external/libvpx/libvpx/third_party/libyuv/source/
row_posix.cc 286 "movdqu %%xmm0," MEMACCESS(1) " \n"
287 "movdqu %%xmm1," MEMACCESS2(0x10,1) " \n"
311 "movdqu " MEMACCESS(0) ",%%xmm0 \n"
312 "movdqu " MEMACCESS2(0x10,0) ",%%xmm1 \n"
313 "movdqu " MEMACCESS2(0x20,0) ",%%xmm3 \n"
352 "movdqu " MEMACCESS(0) ",%%xmm0 \n"
353 "movdqu " MEMACCESS2(0x10,0) ",%%xmm1 \n"
354 "movdqu " MEMACCESS2(0x20,0) ",%%xmm3 \n"
405 "movdqu " MEMACCESS(0) ",%%xmm0 \n"
458 "movdqu " MEMACCESS(0) ",%%xmm0 \n
    [all...]
row_win.cc 359 movdqu [edx], xmm0 local
360 movdqu [edx + 16], xmm1 local
380 movdqu xmm0, [eax]
381 movdqu xmm1, [eax + 16]
382 movdqu xmm3, [eax + 32]
420 movdqu xmm0, [eax]
421 movdqu xmm1, [eax + 16]
422 movdqu xmm3, [eax + 32]
480 movdqu xmm0, [eax] // fetch 8 pixels of bgr565
530 movdqu xmm0, [eax] // fetch 8 pixels of 155
623 movdqu [edx], xmm0 \/\/ store 0 local
628 movdqu [edx + 16], xmm1 \/\/ store 1 local
629 movdqu [edx + 32], xmm2 \/\/ store 2 local
662 movdqu [edx], xmm0 \/\/ store 0 local
667 movdqu [edx + 16], xmm1 \/\/ store 1 local
668 movdqu [edx + 32], xmm2 \/\/ store 2 local
967 movdqu [edx], xmm0 local
1002 movdqu [edx], xmm0 local
1070 movdqu [edx], xmm0 local
1138 movdqu [edx], xmm0 local
1206 movdqu [edx], xmm0 local
1644 movdqu [edx], xmm0 local
1661 movdqu [edx + edi], xmm0 local
2475 movdqu [edx + 8], xmm1 \/\/ Last 16 bytes. = 24 bytes, 8 RGB pixels. local
2522 movdqu [edx + 8], xmm1 \/\/ Last 16 bytes. = 24 bytes, 8 RGB pixels. local
2596 movdqu [edx], xmm0 \/\/ store 8 pixels of RGB565 local
2802 movdqu [edx], xmm0 local
2803 movdqu [edx + 16], xmm1 local
2845 movdqu [edx], xmm0 local
2846 movdqu [edx + 16], xmm1 local
2890 movdqu [edx], xmm0 local
2891 movdqu [edx + 16], xmm1 local
2930 movdqu [edx], xmm0 local
2931 movdqu [edx + 16], xmm1 local
2968 movdqu [edx], xmm0 local
2969 movdqu [edx + 16], xmm1 local
3049 movdqu [edx], xmm5 local
3050 movdqu [edx + 16], xmm0 local
3131 movdqu [edx], xmm2 local
3132 movdqu [edx + 16], xmm1 local
3213 movdqu [edx], xmm5 local
3214 movdqu [edx + 16], xmm0 local
3356 movdqu [edx], xmm0 local
3520 movdqu [edx], xmm0 local
3521 movdqu [edx + edi], xmm2 local
3621 movdqu [edi], xmm0 local
3622 movdqu [edi + 16], xmm2 local
4264 movdqu [edx], xmm0 local
4474 movdqu [edx], xmm0 local
4928 movdqu [edx], xmm0 local
5021 movdqu [edx], xmm0 local
5433 movdqu [edx], xmm0 local
5467 movdqu [edx], xmm0 local
5513 movdqu [edx], xmm0 local
5942 movdqu [edi], xmm0 local
5993 movdqu [edi], xmm0 local
6101 movdqu [edx], xmm2 local
6565 movdqu [esi + edi], xmm0 local
6578 movdqu [esi + edi], xmm0 local
6590 movdqu [esi + edi], xmm0 local
6603 movdqu [esi + edi], xmm0 local
6613 movdqu [esi + edi], xmm0 local
6677 movdqu [esi + edi], xmm0 local
6690 movdqu [esi + edi], xmm0 local
6702 movdqu [esi + edi], xmm0 local
6715 movdqu [esi + edi], xmm0 local
6725 movdqu [esi + edi], xmm0 local
6891 movdqu [edx], xmm0 local
6892 movdqu [edx + 16], xmm1 local
6984 movdqu [edx], xmm0 local
7002 movdqu [edx], xmm0 local
7020 movdqu [edx], xmm0 local
7038 movdqu [edx], xmm0 local
7081 movdqu [edi], xmm0 local
7082 movdqu [edi + 16], xmm1 local
7119 movdqu [edi], xmm1 local
7120 movdqu [edi + 16], xmm2 local
    [all...]
  /external/boringssl/win-x86/crypto/aes/
aesni-x86.asm 376 movdqu xmm2,[esi]
377 movdqu xmm3,[16+esi]
378 movdqu xmm4,[32+esi]
379 movdqu xmm5,[48+esi]
380 movdqu xmm6,[64+esi]
381 movdqu xmm7,[80+esi]
388 movdqu xmm2,[esi]
390 movdqu xmm3,[16+esi]
392 movdqu xmm4,[32+esi]
394 movdqu xmm5,[48+esi
    [all...]
vpaes-x86.asm 88 movdqu xmm5,[edx]
143 movdqu xmm5,[edx]
164 movdqu xmm5,[edx]
229 movdqu xmm0,[edx]
244 movdqu xmm0,[esi]
253 movdqu [edx],xmm0
258 movdqu [edx],xmm3
274 movdqu xmm0,[8+esi]
294 movdqu xmm0,[16+esi]
323 movdqu [edx],xmm
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/encoder/x86/
vp9_subpel_variance_impl_sse2.asm 45 movdqu xmm5, XMMWORD PTR [rsi]
46 movdqu xmm3, XMMWORD PTR [rsi+1]
52 movdqu xmm1, XMMWORD PTR [rsi] ;
53 movdqu xmm2, XMMWORD PTR [rsi+1] ;
158 movdqu xmm5, XMMWORD PTR [rsi]
163 movdqu xmm3, XMMWORD PTR [rsi]
268 movdqu xmm5, XMMWORD PTR [rsi] ; xmm5 = s0,s1,s2..s15
269 movdqu xmm3, XMMWORD PTR [rsi+1] ; xmm3 = s1,s2,s3..s16
  /external/boringssl/src/crypto/aes/asm/
aesni-x86_64.pl 611 movdqu ($inp),$inout0 # load 8 input blocks
612 movdqu 0x10($inp),$inout1
613 movdqu 0x20($inp),$inout2
614 movdqu 0x30($inp),$inout3
615 movdqu 0x40($inp),$inout4
616 movdqu 0x50($inp),$inout5
617 movdqu 0x60($inp),$inout6
618 movdqu 0x70($inp),$inout7
626 movdqu ($inp),$inout0 # load 8 input blocks
629 movdqu 0x10($inp),$inout
    [all...]
aesni-x86.pl 434 &movdqu ($inout0,&QWP(0,$inp));
435 &movdqu ($inout1,&QWP(0x10,$inp));
436 &movdqu ($inout2,&QWP(0x20,$inp));
437 &movdqu ($inout3,&QWP(0x30,$inp));
438 &movdqu ($inout4,&QWP(0x40,$inp));
439 &movdqu ($inout5,&QWP(0x50,$inp));
446 &movdqu ($inout0,&QWP(0,$inp));
448 &movdqu ($inout1,&QWP(0x10,$inp));
450 &movdqu ($inout2,&QWP(0x20,$inp));
452 &movdqu ($inout3,&QWP(0x30,$inp))
    [all...]
vpaes-x86.pl 188 &movdqu ("xmm5",&QWP(0,$key));
247 &movdqu ("xmm5",&QWP(0,$key));
276 &movdqu ("xmm5",&QWP(0,$key));
349 &movdqu ("xmm0",&QWP(0,$key));
372 &movdqu ("xmm0",&QWP(0,$inp)); # load key (unaligned)
386 &movdqu (&QWP(0,$key),"xmm0");
393 &movdqu (&QWP(0,$key),"xmm3");
436 &movdqu ("xmm0",&QWP(8,$inp)); # load key part 2 (very unaligned)
467 &movdqu ("xmm0",&QWP(16,$inp)); # load key part 2 (unaligned)
516 &movdqu (&QWP(0,$key),"xmm0"); # save last ke
    [all...]
  /external/boringssl/linux-x86/crypto/aes/
vpaes-x86.S 80 movdqu (%edx),%xmm5
135 movdqu (%edx),%xmm5
159 movdqu (%edx),%xmm5
224 movdqu (%edx),%xmm0
242 movdqu (%esi),%xmm0
251 movdqu %xmm0,(%edx)
256 movdqu %xmm3,(%edx)
272 movdqu 8(%esi),%xmm0
292 movdqu 16(%esi),%xmm0
321 movdqu %xmm0,(%edx
    [all...]
aesni-x86.S 394 movdqu (%esi),%xmm2
395 movdqu 16(%esi),%xmm3
396 movdqu 32(%esi),%xmm4
397 movdqu 48(%esi),%xmm5
398 movdqu 64(%esi),%xmm6
399 movdqu 80(%esi),%xmm7
406 movdqu (%esi),%xmm2
408 movdqu 16(%esi),%xmm3
410 movdqu 32(%esi),%xmm4
412 movdqu 48(%esi),%xmm
    [all...]
  /external/boringssl/linux-x86_64/crypto/aes/
vpaes-x86_64.S 28 movdqu (%r9),%xmm5
88 movdqu (%r9),%xmm5
119 movdqu (%r9),%xmm5
194 movdqu (%r9),%xmm0
225 movdqu (%rdi),%xmm0
238 movdqu %xmm0,(%rdx)
245 movdqu %xmm3,(%rdx)
289 movdqu 8(%rdi),%xmm0
321 movdqu 16(%rdi),%xmm0
372 movdqu %xmm0,(%rdx
    [all...]
  /external/boringssl/mac-x86/crypto/aes/
vpaes-x86.S 77 movdqu (%edx),%xmm5
132 movdqu (%edx),%xmm5
154 movdqu (%edx),%xmm5
219 movdqu (%edx),%xmm0
235 movdqu (%esi),%xmm0
244 movdqu %xmm0,(%edx)
249 movdqu %xmm3,(%edx)
265 movdqu 8(%esi),%xmm0
285 movdqu 16(%esi),%xmm0
314 movdqu %xmm0,(%edx
    [all...]
aesni-x86.S 373 movdqu (%esi),%xmm2
374 movdqu 16(%esi),%xmm3
375 movdqu 32(%esi),%xmm4
376 movdqu 48(%esi),%xmm5
377 movdqu 64(%esi),%xmm6
378 movdqu 80(%esi),%xmm7
385 movdqu (%esi),%xmm2
387 movdqu 16(%esi),%xmm3
389 movdqu 32(%esi),%xmm4
391 movdqu 48(%esi),%xmm
    [all...]
  /external/boringssl/mac-x86_64/crypto/aes/
vpaes-x86_64.S 28 movdqu (%r9),%xmm5
88 movdqu (%r9),%xmm5
119 movdqu (%r9),%xmm5
194 movdqu (%r9),%xmm0
225 movdqu (%rdi),%xmm0
238 movdqu %xmm0,(%rdx)
245 movdqu %xmm3,(%rdx)
289 movdqu 8(%rdi),%xmm0
321 movdqu 16(%rdi),%xmm0
372 movdqu %xmm0,(%rdx
    [all...]
  /external/boringssl/win-x86_64/crypto/aes/
vpaes-x86_64.asm 32 movdqu xmm5,XMMWORD[r9]
92 movdqu xmm5,XMMWORD[r9]
123 movdqu xmm5,XMMWORD[r9]
198 movdqu xmm0,XMMWORD[r9]
229 movdqu xmm0,XMMWORD[rdi]
242 movdqu XMMWORD[rdx],xmm0
249 movdqu XMMWORD[rdx],xmm3
293 movdqu xmm0,XMMWORD[8+rdi]
325 movdqu xmm0,XMMWORD[16+rdi]
376 movdqu XMMWORD[rdx],xmm
    [all...]
  /external/libyuv/files/source/
row_win.cc 249 movdqu xmm0, [eax]
250 movdqu xmm1, [eax + 16]
251 movdqu xmm3, [eax + 32]
289 movdqu xmm0, [eax]
290 movdqu xmm1, [eax + 16]
291 movdqu xmm3, [eax + 32]
349 movdqu xmm0, [eax] // fetch 8 pixels of bgr565
399 movdqu xmm0, [eax] // fetch 8 pixels of 1555
445 movdqu xmm0, [eax] // fetch 8 pixels of bgra4444
702 movdqu xmm0, [eax
718 movdqu [edx], xmm0 local
786 movdqu [edx], xmm0 local
854 movdqu [edx], xmm0 local
922 movdqu [edx], xmm0 local
1851 movdqu [edx], xmm0 local
1852 movdqu [edx + 16], xmm1 local
1894 movdqu [edx], xmm0 local
1895 movdqu [edx + 16], xmm1 local
1938 movdqu [edx], xmm0 local
1939 movdqu [edx + 16], xmm1 local
1978 movdqu [edx], xmm0 local
1979 movdqu [edx + 16], xmm1 local
2016 movdqu [edx], xmm0 local
2017 movdqu [edx + 16], xmm1 local
2097 movdqu [edx], xmm5 local
2098 movdqu [edx + 16], xmm0 local
2179 movdqu [edx], xmm2 local
2180 movdqu [edx + 16], xmm1 local
2261 movdqu [edx], xmm5 local
2262 movdqu [edx + 16], xmm0 local
2372 movdqu [edx], xmm0 local
2700 movdqu [edx], xmm0 local
2910 movdqu [edx], xmm0 local
3777 movdqu [edi], xmm0 local
3884 movdqu [edx], xmm2 local
    [all...]
  /external/v8/src/ia32/
codegen-ia32.cc 258 __ movdqu(xmm0, Operand(src, 0));
259 __ movdqu(Operand(dst, 0), xmm0);
280 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
281 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
326 __ movdqu(xmm0, Operand(src, -0x10));
327 __ movdqu(Operand(dst, -0x10), xmm0);
348 __ movdqu(xmm0, Operand(src, 0));
349 __ movdqu(Operand(dst, 0), xmm0);
398 __ movdqu(xmm0, Operand(src, 0));
399 __ movdqu(xmm1, Operand(src, count, times_1, -0x10))
    [all...]
  /external/boringssl/win-x86_64/crypto/modes/
ghash-x86_64.asm 694 movdqu xmm2,XMMWORD[rdx]
755 movdqu XMMWORD[rcx],xmm2
757 movdqu XMMWORD[16+rcx],xmm0
759 movdqu XMMWORD[32+rcx],xmm4
838 movdqu XMMWORD[48+rcx],xmm5
840 movdqu XMMWORD[64+rcx],xmm0
842 movdqu XMMWORD[80+rcx],xmm4
853 movdqu xmm0,XMMWORD[rcx]
855 movdqu xmm2,XMMWORD[rdx]
856 movdqu xmm4,XMMWORD[32+rdx
    [all...]

Completed in 754 milliseconds

12 3 4 5 6 7 8