HomeSort by relevance Sort by last modified time
    Searched refs:movdqu (Results 1 - 25 of 118) sorted by null

1 2 3 4 5

  /external/chromium_org/third_party/yasm/source/patched-yasm/modules/arch/x86/tests/
ssse3.asm 19 TEST_GENERIC pabsb, movdqu, xmm0, xmm, xmm1
20 TEST_GENERIC pabsw, movdqu, xmm0, xmm, xmm1
21 TEST_GENERIC pabsd, movdqu, xmm0, xmm, xmm1
27 TEST_GENERIC psignb, movdqu, xmm0, xmm, xmm1
28 TEST_GENERIC psignw, movdqu, xmm0, xmm, xmm1
29 TEST_GENERIC psignd, movdqu, xmm0, xmm, xmm1
35 TEST_GENERIC phaddw, movdqu, xmm0, xmm, xmm1
36 TEST_GENERIC phaddsw, movdqu, xmm0, xmm, xmm1
37 TEST_GENERIC phaddd, movdqu, xmm0, xmm, xmm1
43 TEST_GENERIC phsubw, movdqu, xmm0, xmm, xmm
    [all...]
  /external/chromium_org/third_party/openssl/openssl/crypto/aes/asm/
bsaes-x86_64.pl 911 movdqu ($inp), %xmm7 # load round 0 key
920 movdqu ($inp), %xmm6 # load round 1 key
973 movdqu ($inp), %xmm6 # load next round key
1013 movdqu 0x00($inp), @XMM[0] # load input
1014 movdqu 0x10($inp), @XMM[1]
1015 movdqu 0x20($inp), @XMM[2]
1016 movdqu 0x30($inp), @XMM[3]
1017 movdqu 0x40($inp), @XMM[4]
1018 movdqu 0x50($inp), @XMM[5]
1019 movdqu 0x60($inp), @XMM[6
    [all...]
bsaes-x86_64.S 1050 movdqu (%rcx),%xmm7
1059 movdqu (%rcx),%xmm6
1112 movdqu (%rcx),%xmm6
1169 movdqu (%rbx),%xmm14
1172 movdqu 0(%r12),%xmm15
1173 movdqu 16(%r12),%xmm0
1174 movdqu 32(%r12),%xmm1
1175 movdqu 48(%r12),%xmm2
1176 movdqu 64(%r12),%xmm3
1177 movdqu 80(%r12),%xmm
    [all...]
vpaes-x86.pl 186 &movdqu ("xmm5",&QWP(0,$key));
245 &movdqu ("xmm5",&QWP(0,$key));
275 &movdqu ("xmm5",&QWP(0,$key));
350 &movdqu ("xmm0",&QWP(0,$key));
372 &movdqu ("xmm0",&QWP(0,$inp)); # load key (unaligned)
386 &movdqu (&QWP(0,$key),"xmm0");
393 &movdqu (&QWP(0,$key),"xmm3");
436 &movdqu ("xmm0",&QWP(8,$inp)); # load key part 2 (very unaligned)
467 &movdqu ("xmm0",&QWP(16,$inp)); # load key part 2 (unaligned)
516 &movdqu (&QWP(0,$key),"xmm0"); # save last ke
    [all...]
aesni-x86.S 322 movdqu (%esi),%xmm2
323 movdqu 16(%esi),%xmm3
324 movdqu 32(%esi),%xmm4
325 movdqu 48(%esi),%xmm5
326 movdqu 64(%esi),%xmm6
327 movdqu 80(%esi),%xmm7
334 movdqu (%esi),%xmm2
336 movdqu 16(%esi),%xmm3
338 movdqu 32(%esi),%xmm4
340 movdqu 48(%esi),%xmm
    [all...]
aesni-x86.pl 378 &movdqu ($inout0,&QWP(0,$inp));
379 &movdqu ($inout1,&QWP(0x10,$inp));
380 &movdqu ($inout2,&QWP(0x20,$inp));
381 &movdqu ($inout3,&QWP(0x30,$inp));
382 &movdqu ($inout4,&QWP(0x40,$inp));
383 &movdqu ($inout5,&QWP(0x50,$inp));
390 &movdqu ($inout0,&QWP(0,$inp));
392 &movdqu ($inout1,&QWP(0x10,$inp));
394 &movdqu ($inout2,&QWP(0x20,$inp));
396 &movdqu ($inout3,&QWP(0x30,$inp))
    [all...]
vpaes-x86.S 76 movdqu (%edx),%xmm5
131 movdqu (%edx),%xmm5
155 movdqu (%edx),%xmm5
221 movdqu (%edx),%xmm0
237 movdqu (%esi),%xmm0
246 movdqu %xmm0,(%edx)
251 movdqu %xmm3,(%edx)
267 movdqu 8(%esi),%xmm0
287 movdqu 16(%esi),%xmm0
316 movdqu %xmm0,(%edx
    [all...]
vpaes-x86_64.S 27 movdqu (%r9),%xmm5
86 movdqu (%r9),%xmm5
118 movdqu (%r9),%xmm5
195 movdqu (%r9),%xmm0
225 movdqu (%rdi),%xmm0
238 movdqu %xmm0,(%rdx)
245 movdqu %xmm3,(%rdx)
289 movdqu 8(%rdi),%xmm0
321 movdqu 16(%rdi),%xmm0
372 movdqu %xmm0,(%rdx
    [all...]
aesni-x86_64.S 456 movdqu (%rdi),%xmm2
457 movdqu 16(%rdi),%xmm3
458 movdqu 32(%rdi),%xmm4
459 movdqu 48(%rdi),%xmm5
460 movdqu 64(%rdi),%xmm6
461 movdqu 80(%rdi),%xmm7
462 movdqu 96(%rdi),%xmm8
463 movdqu 112(%rdi),%xmm9
471 movdqu (%rdi),%xmm2
474 movdqu 16(%rdi),%xmm
    [all...]
aesni-x86_64.pl 544 movdqu ($inp),$inout0
545 movdqu 0x10($inp),$inout1
546 movdqu 0x20($inp),$inout2
547 movdqu 0x30($inp),$inout3
548 movdqu 0x40($inp),$inout4
549 movdqu 0x50($inp),$inout5
550 movdqu 0x60($inp),$inout6
551 movdqu 0x70($inp),$inout7
559 movdqu ($inp),$inout0
562 movdqu 0x10($inp),$inout
    [all...]
  /external/openssl/crypto/aes/asm/
bsaes-x86_64.pl 911 movdqu ($inp), %xmm7 # load round 0 key
920 movdqu ($inp), %xmm6 # load round 1 key
973 movdqu ($inp), %xmm6 # load next round key
1013 movdqu 0x00($inp), @XMM[0] # load input
1014 movdqu 0x10($inp), @XMM[1]
1015 movdqu 0x20($inp), @XMM[2]
1016 movdqu 0x30($inp), @XMM[3]
1017 movdqu 0x40($inp), @XMM[4]
1018 movdqu 0x50($inp), @XMM[5]
1019 movdqu 0x60($inp), @XMM[6
    [all...]
bsaes-x86_64.S 1050 movdqu (%rcx),%xmm7
1059 movdqu (%rcx),%xmm6
1112 movdqu (%rcx),%xmm6
1169 movdqu (%rbx),%xmm14
1172 movdqu 0(%r12),%xmm15
1173 movdqu 16(%r12),%xmm0
1174 movdqu 32(%r12),%xmm1
1175 movdqu 48(%r12),%xmm2
1176 movdqu 64(%r12),%xmm3
1177 movdqu 80(%r12),%xmm
    [all...]
vpaes-x86.pl 186 &movdqu ("xmm5",&QWP(0,$key));
245 &movdqu ("xmm5",&QWP(0,$key));
275 &movdqu ("xmm5",&QWP(0,$key));
350 &movdqu ("xmm0",&QWP(0,$key));
372 &movdqu ("xmm0",&QWP(0,$inp)); # load key (unaligned)
386 &movdqu (&QWP(0,$key),"xmm0");
393 &movdqu (&QWP(0,$key),"xmm3");
436 &movdqu ("xmm0",&QWP(8,$inp)); # load key part 2 (very unaligned)
467 &movdqu ("xmm0",&QWP(16,$inp)); # load key part 2 (unaligned)
516 &movdqu (&QWP(0,$key),"xmm0"); # save last ke
    [all...]
aesni-x86.S 322 movdqu (%esi),%xmm2
323 movdqu 16(%esi),%xmm3
324 movdqu 32(%esi),%xmm4
325 movdqu 48(%esi),%xmm5
326 movdqu 64(%esi),%xmm6
327 movdqu 80(%esi),%xmm7
334 movdqu (%esi),%xmm2
336 movdqu 16(%esi),%xmm3
338 movdqu 32(%esi),%xmm4
340 movdqu 48(%esi),%xmm
    [all...]
aesni-x86.pl 378 &movdqu ($inout0,&QWP(0,$inp));
379 &movdqu ($inout1,&QWP(0x10,$inp));
380 &movdqu ($inout2,&QWP(0x20,$inp));
381 &movdqu ($inout3,&QWP(0x30,$inp));
382 &movdqu ($inout4,&QWP(0x40,$inp));
383 &movdqu ($inout5,&QWP(0x50,$inp));
390 &movdqu ($inout0,&QWP(0,$inp));
392 &movdqu ($inout1,&QWP(0x10,$inp));
394 &movdqu ($inout2,&QWP(0x20,$inp));
396 &movdqu ($inout3,&QWP(0x30,$inp))
    [all...]
vpaes-x86.S 76 movdqu (%edx),%xmm5
131 movdqu (%edx),%xmm5
155 movdqu (%edx),%xmm5
221 movdqu (%edx),%xmm0
237 movdqu (%esi),%xmm0
246 movdqu %xmm0,(%edx)
251 movdqu %xmm3,(%edx)
267 movdqu 8(%esi),%xmm0
287 movdqu 16(%esi),%xmm0
316 movdqu %xmm0,(%edx
    [all...]
vpaes-x86_64.S 27 movdqu (%r9),%xmm5
86 movdqu (%r9),%xmm5
118 movdqu (%r9),%xmm5
195 movdqu (%r9),%xmm0
225 movdqu (%rdi),%xmm0
238 movdqu %xmm0,(%rdx)
245 movdqu %xmm3,(%rdx)
289 movdqu 8(%rdi),%xmm0
321 movdqu 16(%rdi),%xmm0
372 movdqu %xmm0,(%rdx
    [all...]
aesni-x86_64.S 456 movdqu (%rdi),%xmm2
457 movdqu 16(%rdi),%xmm3
458 movdqu 32(%rdi),%xmm4
459 movdqu 48(%rdi),%xmm5
460 movdqu 64(%rdi),%xmm6
461 movdqu 80(%rdi),%xmm7
462 movdqu 96(%rdi),%xmm8
463 movdqu 112(%rdi),%xmm9
471 movdqu (%rdi),%xmm2
474 movdqu 16(%rdi),%xmm
    [all...]
aesni-x86_64.pl 544 movdqu ($inp),$inout0
545 movdqu 0x10($inp),$inout1
546 movdqu 0x20($inp),$inout2
547 movdqu 0x30($inp),$inout3
548 movdqu 0x40($inp),$inout4
549 movdqu 0x50($inp),$inout5
550 movdqu 0x60($inp),$inout6
551 movdqu 0x70($inp),$inout7
559 movdqu ($inp),$inout0
562 movdqu 0x10($inp),$inout
    [all...]
  /bionic/libc/arch-x86/string/
sse2-wcscmp-atom.S 175 movdqu 16(%edi), %xmm1
176 movdqu 16(%esi), %xmm2
184 movdqu 32(%edi), %xmm1
185 movdqu 32(%esi), %xmm2
193 movdqu 48(%edi), %xmm1
194 movdqu 48(%esi), %xmm2
242 movdqu 16(%edi), %xmm1
243 movdqu 16(%esi), %xmm2
251 movdqu 32(%edi), %xmm1
252 movdqu 32(%esi), %xmm
    [all...]
  /external/zlib/src/contrib/amd64/
amd64-match.S 293 movdqu (%windowbestlen, %rdx), %xmm1
294 movdqu (%prev, %rdx), %xmm2
296 movdqu 16(%windowbestlen, %rdx), %xmm3
297 movdqu 16(%prev, %rdx), %xmm4
299 movdqu 32(%windowbestlen, %rdx), %xmm5
300 movdqu 32(%prev, %rdx), %xmm6
302 movdqu 48(%windowbestlen, %rdx), %xmm7
303 movdqu 48(%prev, %rdx), %xmm8
  /external/libyuv/files/source/
row_win.cc 249 movdqu xmm0, [eax]
250 movdqu xmm1, [eax + 16]
251 movdqu xmm3, [eax + 32]
289 movdqu xmm0, [eax]
290 movdqu xmm1, [eax + 16]
291 movdqu xmm3, [eax + 32]
349 movdqu xmm0, [eax] // fetch 8 pixels of bgr565
399 movdqu xmm0, [eax] // fetch 8 pixels of 1555
445 movdqu xmm0, [eax] // fetch 8 pixels of bgra4444
702 movdqu xmm0, [eax
718 movdqu [edx], xmm0 local
786 movdqu [edx], xmm0 local
854 movdqu [edx], xmm0 local
922 movdqu [edx], xmm0 local
1851 movdqu [edx], xmm0 local
1852 movdqu [edx + 16], xmm1 local
1894 movdqu [edx], xmm0 local
1895 movdqu [edx + 16], xmm1 local
1938 movdqu [edx], xmm0 local
1939 movdqu [edx + 16], xmm1 local
1978 movdqu [edx], xmm0 local
1979 movdqu [edx + 16], xmm1 local
2016 movdqu [edx], xmm0 local
2017 movdqu [edx + 16], xmm1 local
2097 movdqu [edx], xmm5 local
2098 movdqu [edx + 16], xmm0 local
2179 movdqu [edx], xmm2 local
2180 movdqu [edx + 16], xmm1 local
2261 movdqu [edx], xmm5 local
2262 movdqu [edx + 16], xmm0 local
2372 movdqu [edx], xmm0 local
2700 movdqu [edx], xmm0 local
2910 movdqu [edx], xmm0 local
3777 movdqu [edi], xmm0 local
3884 movdqu [edx], xmm2 local
    [all...]
  /external/libvpx/libvpx/vp8/common/x86/
sad_sse2.asm 356 movdqu xmm0, XMMWORD PTR [rsi]
357 movdqu xmm1, XMMWORD PTR [rsi + 16]
358 movdqu xmm2, XMMWORD PTR [rsi + rax]
359 movdqu xmm3, XMMWORD PTR [rsi + rax + 16]
363 movdqu xmm4, XMMWORD PTR [rsi]
364 movdqu xmm5, XMMWORD PTR [rsi + 16]
365 movdqu xmm6, XMMWORD PTR [rsi + rax]
366 movdqu xmm7, XMMWORD PTR [rsi + rax + 16]
392 movdqu xmm0, XMMWORD PTR [rsi]
393 movdqu xmm1, XMMWORD PTR [rsi + 16
    [all...]
postproc_sse2.asm 124 movdqu xmm0, XMMWORD PTR [rsi]
125 movdqu xmm1, XMMWORD PTR [rsi + rax]
126 movdqu xmm3, XMMWORD PTR [rsi + 2*rax]
132 movdqu xmm1, XMMWORD PTR [rsi + 2*rax]
133 movdqu xmm3, XMMWORD PTR [rsi + rax]
137 movdqu XMMWORD PTR [rdi], xmm0
178 movdqu xmm0, XMMWORD PTR [rdi + rdx]
179 movdqu xmm1, XMMWORD PTR [rdi + rdx -2]
180 movdqu xmm3, XMMWORD PTR [rdi + rdx -1]
184 movdqu xmm1, XMMWORD PTR [rdi + rdx +1
    [all...]
  /external/chromium_org/third_party/openssl/openssl/crypto/modes/asm/
ghash-x86.pl 926 &movdqu ($Hkey,&QWP(0,$Xip));
948 &movdqu (&QWP(0,$Htbl),$Hkey); # save H
949 &movdqu (&QWP(16,$Htbl),$Xi); # save H^2
963 &movdqu ($Xi,&QWP(0,$Xip));
972 &movdqu (&QWP(0,$Xip),$Xi);
988 &movdqu ($Xi,&QWP(0,$Xip));
990 &movdqu ($Hkey,&QWP(0,$Htbl));
1001 &movdqu ($T1,&QWP(0,$inp)); # Ii
1002 &movdqu ($Xn,&QWP(16,$inp)); # Ii+1
1016 &movdqu ($T1,&QWP(0,$inp)); # I
    [all...]

Completed in 555 milliseconds

1 2 3 4 5