HomeSort by relevance Sort by last modified time
    Searched full:movdqu (Results 26 - 50 of 239) sorted by null

12 3 4 5 6 7 8 910

  /external/libvpx/libvpx/vpx_dsp/x86/
vpx_subpixel_bilinear_sse2.asm 124 movdqu xmm1, [rdi]
127 movdqu [rdi], xmm0 ;store the result
196 movdqu xmm0, [rsi] ;0
197 movdqu xmm1, [rsi + rax] ;1
274 movdqu xmm0, [rsi] ;0
275 movdqu xmm1, [rsi + rax] ;1
301 movdqu xmm0, [rsi] ;load src
327 movdqu xmm0, [rsi] ;load src
354 movdqu xmm0, [rsi] ;load src
355 movdqu xmm1, [rsi + 1
    [all...]
vpx_subpixel_bilinear_ssse3.asm 102 movdqu xmm1, [rdi]
105 movdqu [rdi], xmm0 ;store the result
174 movdqu xmm0, [rsi] ;0
175 movdqu xmm1, [rsi + rax] ;1
251 movdqu xmm0, [rsi] ;0
252 movdqu xmm1, [rsi + rax] ;1
277 movdqu xmm0, [rsi] ;load src
303 movdqu xmm0, [rsi] ;load src
330 movdqu xmm0, [rsi] ;load src
331 movdqu xmm1, [rsi + 1
    [all...]
halfpix_variance_impl_sse2.asm 42 movdqu xmm5, XMMWORD PTR [rsi]
43 movdqu xmm3, XMMWORD PTR [rsi+1]
49 movdqu xmm1, XMMWORD PTR [rsi] ;
50 movdqu xmm2, XMMWORD PTR [rsi+1] ;
153 movdqu xmm5, XMMWORD PTR [rsi]
158 movdqu xmm3, XMMWORD PTR [rsi]
261 movdqu xmm5, XMMWORD PTR [rsi] ; xmm5 = s0,s1,s2..s15
262 movdqu xmm3, XMMWORD PTR [rsi+1] ; xmm3 = s1,s2,s3..s16
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/common/x86/
vp9_subpixel_bilinear_sse2.asm 124 movdqu xmm1, [rdi]
127 movdqu [rdi], xmm0 ;store the result
196 movdqu xmm0, [rsi] ;0
197 movdqu xmm1, [rsi + rax] ;1
274 movdqu xmm0, [rsi] ;0
275 movdqu xmm1, [rsi + rax] ;1
301 movdqu xmm0, [rsi] ;load src
327 movdqu xmm0, [rsi] ;load src
354 movdqu xmm0, [rsi] ;load src
355 movdqu xmm1, [rsi + 1
    [all...]
vp9_subpixel_bilinear_ssse3.asm 102 movdqu xmm1, [rdi]
105 movdqu [rdi], xmm0 ;store the result
174 movdqu xmm0, [rsi] ;0
175 movdqu xmm1, [rsi + rax] ;1
251 movdqu xmm0, [rsi] ;0
252 movdqu xmm1, [rsi + rax] ;1
277 movdqu xmm0, [rsi] ;load src
303 movdqu xmm0, [rsi] ;load src
330 movdqu xmm0, [rsi] ;load src
331 movdqu xmm1, [rsi + 1
    [all...]
  /external/valgrind/none/tests/amd64/
aes.c 80 "movdqu %1, %%xmm1" "\n\t"
81 "movdqu %2, %%xmm2" "\n\t"
83 "movdqu %%xmm1, %0" "\n\t"
112 "movdqu %1, %%xmm1" "\n\t"
113 "movdqu %2, %%xmm2" "\n\t"
115 "movdqu %%xmm1, %0" "\n\t"
144 "movdqu %1, %%xmm1" "\n\t"
145 "movdqu %2, %%xmm2" "\n\t"
147 "movdqu %%xmm1, %0" "\n\t"
176 "movdqu %1, %%xmm1" "\n\t
    [all...]
  /external/libvpx/libvpx/third_party/libyuv/source/
scale_win.cc 108 movdqu xmm0, [eax]
109 movdqu xmm1, [eax + 16]
114 movdqu [edx], xmm0 local
136 movdqu xmm0, [eax]
137 movdqu xmm1, [eax + 16]
150 movdqu [edx], xmm0 local
173 movdqu xmm0, [eax]
174 movdqu xmm1, [eax + 16]
175 movdqu xmm2, [eax + esi]
176 movdqu xmm3, [eax + esi + 16
191 movdqu [edx], xmm0 local
820 movdqu [edx], xmm0 \/\/ write 16 words to destination local
821 movdqu [edx + 16], xmm1 local
952 movdqu [edx], xmm0 local
953 movdqu [edx + 16], xmm1 local
978 movdqu [edx], xmm0 local
1006 movdqu [edx], xmm0 local
1039 movdqu [edx], xmm0 local
1074 movdqu [edx], xmm0 local
1121 movdqu [edx], xmm0 local
1178 movdqu [edi], xmm0 local
1310 movdqu [edx], xmm0 local
1311 movdqu [edx + 16], xmm1 local
    [all...]
row_gcc.cc 226 "movdqu %%xmm0," MEMACCESS(1) " \n"
254 "movdqu %%xmm0," MEMACCESS(1) " \n"
255 "movdqu %%xmm1," MEMACCESS2(0x10,1) " \n"
275 "movdqu " MEMACCESS(0) ",%%xmm0 \n"
276 "movdqu " MEMACCESS2(0x10,0) ",%%xmm1 \n"
277 "movdqu " MEMACCESS2(0x20,0) ",%%xmm3 \n"
285 "movdqu %%xmm2," MEMACCESS2(0x20,1) " \n"
288 "movdqu %%xmm0," MEMACCESS(1) " \n"
292 "movdqu %%xmm1," MEMACCESS2(0x10,1) " \n"
294 "movdqu %%xmm3," MEMACCESS2(0x30,1) " \n
    [all...]
rotate_win.cc 140 movdqu xmm0, [eax]
141 movdqu xmm1, [eax + edi]
147 movdqu xmm2, [eax]
148 movdqu xmm3, [eax + edi]
154 movdqu xmm4, [eax]
155 movdqu xmm5, [eax + edi]
161 movdqu xmm6, [eax]
162 movdqu xmm7, [eax + edi]
164 movdqu [esp], xmm5 // backup xmm5 local
185 movdqu xmm5, [esp] // restore xmm
186 movdqu [esp], xmm6 \/\/ backup xmm6 local
    [all...]
rotate_gcc.cc 132 "movdqu (%eax),%xmm0 \n"
133 "movdqu (%eax,%edi,1),%xmm1 \n"
139 "movdqu (%eax),%xmm2 \n"
140 "movdqu (%eax,%edi,1),%xmm3 \n"
146 "movdqu (%eax),%xmm4 \n"
147 "movdqu (%eax,%edi,1),%xmm5 \n"
153 "movdqu (%eax),%xmm6 \n"
154 "movdqu (%eax,%edi,1),%xmm7 \n"
156 "movdqu %xmm5,(%esp) \n"
176 "movdqu (%esp),%xmm5 \n
    [all...]
  /external/boringssl/linux-x86_64/crypto/aes/
aesni-x86_64.S 514 movdqu (%rdi),%xmm2
515 movdqu 16(%rdi),%xmm3
516 movdqu 32(%rdi),%xmm4
517 movdqu 48(%rdi),%xmm5
518 movdqu 64(%rdi),%xmm6
519 movdqu 80(%rdi),%xmm7
520 movdqu 96(%rdi),%xmm8
521 movdqu 112(%rdi),%xmm9
529 movdqu (%rdi),%xmm2
532 movdqu 16(%rdi),%xmm
    [all...]
  /external/boringssl/mac-x86_64/crypto/aes/
aesni-x86_64.S 513 movdqu (%rdi),%xmm2
514 movdqu 16(%rdi),%xmm3
515 movdqu 32(%rdi),%xmm4
516 movdqu 48(%rdi),%xmm5
517 movdqu 64(%rdi),%xmm6
518 movdqu 80(%rdi),%xmm7
519 movdqu 96(%rdi),%xmm8
520 movdqu 112(%rdi),%xmm9
528 movdqu (%rdi),%xmm2
531 movdqu 16(%rdi),%xmm
    [all...]
  /external/boringssl/win-x86_64/crypto/aes/
aesni-x86_64.asm 531 movdqu xmm2,XMMWORD[rdi]
532 movdqu xmm3,XMMWORD[16+rdi]
533 movdqu xmm4,XMMWORD[32+rdi]
534 movdqu xmm5,XMMWORD[48+rdi]
535 movdqu xmm6,XMMWORD[64+rdi]
536 movdqu xmm7,XMMWORD[80+rdi]
537 movdqu xmm8,XMMWORD[96+rdi]
538 movdqu xmm9,XMMWORD[112+rdi]
546 movdqu xmm2,XMMWORD[rdi]
549 movdqu xmm3,XMMWORD[16+rdi
    [all...]
  /bionic/libc/arch-x86_64/string/
sse2-memset-slm.S 115 movdqu %xmm0, (%rdi)
116 movdqu %xmm0, -16(%rdi, %rdx)
119 movdqu %xmm0, 16(%rdi)
120 movdqu %xmm0, -32(%rdi, %rdx)
123 movdqu %xmm0, 32(%rdi)
124 movdqu %xmm0, 48(%rdi)
125 movdqu %xmm0, -64(%rdi, %rdx)
126 movdqu %xmm0, -48(%rdi, %rdx)
  /toolchain/binutils/binutils-2.25/gas/testsuite/gas/i386/
x86-64-avx-swap.s 19 movdqu %xmm8,%xmm6
51 movdqu xmm6,xmm8
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/common/x86/
sad_sse2.asm 356 movdqu xmm0, XMMWORD PTR [rsi]
357 movdqu xmm1, XMMWORD PTR [rsi + 16]
358 movdqu xmm2, XMMWORD PTR [rsi + rax]
359 movdqu xmm3, XMMWORD PTR [rsi + rax + 16]
363 movdqu xmm4, XMMWORD PTR [rsi]
364 movdqu xmm5, XMMWORD PTR [rsi + 16]
365 movdqu xmm6, XMMWORD PTR [rsi + rax]
366 movdqu xmm7, XMMWORD PTR [rsi + rax + 16]
392 movdqu xmm0, XMMWORD PTR [rsi]
393 movdqu xmm1, XMMWORD PTR [rsi + 16
    [all...]
postproc_sse2.asm 124 movdqu xmm0, XMMWORD PTR [rsi]
125 movdqu xmm1, XMMWORD PTR [rsi + rax]
126 movdqu xmm3, XMMWORD PTR [rsi + 2*rax]
132 movdqu xmm1, XMMWORD PTR [rsi + 2*rax]
133 movdqu xmm3, XMMWORD PTR [rsi + rax]
137 movdqu XMMWORD PTR [rdi], xmm0
178 movdqu xmm0, XMMWORD PTR [rdi + rdx]
179 movdqu xmm1, XMMWORD PTR [rdi + rdx -2]
180 movdqu xmm3, XMMWORD PTR [rdi + rdx -1]
184 movdqu xmm1, XMMWORD PTR [rdi + rdx +1
    [all...]
variance_impl_ssse3.asm 67 movdqu xmm0, XMMWORD PTR [rsi]
68 movdqu xmm1, XMMWORD PTR [rsi+1]
92 movdqu xmm1, XMMWORD PTR [rsi]
93 movdqu xmm2, XMMWORD PTR [rsi+1]
163 movdqu xmm1, XMMWORD PTR [rsi]
173 movdqu xmm3, XMMWORD PTR [rsi]
264 movdqu xmm1, XMMWORD PTR [rsi]
265 movdqu xmm2, XMMWORD PTR [rsi+1]
  /external/boringssl/win-x86_64/crypto/ec/
p256-x86_64-asm.asm 762 movdqu XMMWORD[rcx],xmm2
763 movdqu XMMWORD[16+rcx],xmm3
764 movdqu XMMWORD[32+rcx],xmm4
765 movdqu XMMWORD[48+rcx],xmm5
766 movdqu XMMWORD[64+rcx],xmm6
767 movdqu XMMWORD[80+rcx],xmm7
837 movdqu XMMWORD[rcx],xmm2
838 movdqu XMMWORD[16+rcx],xmm3
839 movdqu XMMWORD[32+rcx],xmm4
840 movdqu XMMWORD[48+rcx],xmm
    [all...]
  /external/llvm/test/CodeGen/X86/
psubus.ll 9 ; SSE-NEXT: movdqu (%rdi), %xmm0
11 ; SSE-NEXT: movdqu %xmm0, (%rdi)
34 ; SSE-NEXT: movdqu (%rdi), %xmm0
36 ; SSE-NEXT: movdqu %xmm0, (%rdi)
63 ; SSE2-NEXT: movdqu (%rdi), %xmm1
65 ; SSE2-NEXT: movdqu %xmm1, (%rdi)
72 ; SSSE3-NEXT: movdqu (%rdi), %xmm1
74 ; SSSE3-NEXT: movdqu %xmm1, (%rdi)
110 ; SSE-NEXT: movdqu (%rdi), %xmm0
112 ; SSE-NEXT: movdqu %xmm0, (%rdi
    [all...]
  /external/libvpx/libvpx/vp8/common/x86/
postproc_sse2.asm 124 movdqu xmm0, XMMWORD PTR [rsi]
125 movdqu xmm1, XMMWORD PTR [rsi + rax]
126 movdqu xmm3, XMMWORD PTR [rsi + 2*rax]
132 movdqu xmm1, XMMWORD PTR [rsi + 2*rax]
133 movdqu xmm3, XMMWORD PTR [rsi + rax]
137 movdqu XMMWORD PTR [rdi], xmm0
178 movdqu xmm0, XMMWORD PTR [rdi + rdx]
179 movdqu xmm1, XMMWORD PTR [rdi + rdx -2]
180 movdqu xmm3, XMMWORD PTR [rdi + rdx -1]
184 movdqu xmm1, XMMWORD PTR [rdi + rdx +1
    [all...]
  /external/boringssl/win-x86/crypto/aes/
aesni-x86.asm 376 movdqu xmm2,[esi]
377 movdqu xmm3,[16+esi]
378 movdqu xmm4,[32+esi]
379 movdqu xmm5,[48+esi]
380 movdqu xmm6,[64+esi]
381 movdqu xmm7,[80+esi]
388 movdqu xmm2,[esi]
390 movdqu xmm3,[16+esi]
392 movdqu xmm4,[32+esi]
394 movdqu xmm5,[48+esi
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/encoder/x86/
vp9_subpel_variance_impl_sse2.asm 45 movdqu xmm5, XMMWORD PTR [rsi]
46 movdqu xmm3, XMMWORD PTR [rsi+1]
52 movdqu xmm1, XMMWORD PTR [rsi] ;
53 movdqu xmm2, XMMWORD PTR [rsi+1] ;
158 movdqu xmm5, XMMWORD PTR [rsi]
163 movdqu xmm3, XMMWORD PTR [rsi]
268 movdqu xmm5, XMMWORD PTR [rsi] ; xmm5 = s0,s1,s2..s15
269 movdqu xmm3, XMMWORD PTR [rsi+1] ; xmm3 = s1,s2,s3..s16
  /external/boringssl/src/crypto/aes/asm/
aesni-x86_64.pl 611 movdqu ($inp),$inout0 # load 8 input blocks
612 movdqu 0x10($inp),$inout1
613 movdqu 0x20($inp),$inout2
614 movdqu 0x30($inp),$inout3
615 movdqu 0x40($inp),$inout4
616 movdqu 0x50($inp),$inout5
617 movdqu 0x60($inp),$inout6
618 movdqu 0x70($inp),$inout7
626 movdqu ($inp),$inout0 # load 8 input blocks
629 movdqu 0x10($inp),$inout
    [all...]
  /external/boringssl/linux-x86/crypto/aes/
vpaes-x86.S 80 movdqu (%edx),%xmm5
135 movdqu (%edx),%xmm5
159 movdqu (%edx),%xmm5
224 movdqu (%edx),%xmm0
242 movdqu (%esi),%xmm0
251 movdqu %xmm0,(%edx)
256 movdqu %xmm3,(%edx)
272 movdqu 8(%esi),%xmm0
292 movdqu 16(%esi),%xmm0
321 movdqu %xmm0,(%edx
    [all...]

Completed in 4873 milliseconds

12 3 4 5 6 7 8 910