HomeSort by relevance Sort by last modified time
    Searched full:movdqu (Results 1 - 25 of 46) sorted by null

1 2

  /external/libvpx/vp8/common/x86/
recon_sse2.asm 145 movdqu xmm0, [rsi]
150 movdqu xmm1, [rsi+rax]
151 movdqu xmm2, [rsi+rax*2]
163 movdqu xmm3, [rsi]
166 movdqu xmm4, [rsi+rax]
168 movdqu xmm5, [rsi+rax*2]
178 movdqu xmm0, [rsi]
181 movdqu xmm1, [rsi+rax]
183 movdqu xmm2, [rsi+rax*2]
192 movdqu xmm3, [rsi
    [all...]
subpixel_sse2.asm     [all...]
iwalsh_sse2.asm 51 ;; movdqu [rdi + 0], xmm4
52 ;; movdqu [rdi + 16], xmm3
subpixel_ssse3.asm 288 movdqu xmm1, XMMWORD PTR [rsi - 2]
295 movdqu xmm3, XMMWORD PTR [rsi + 6]
376 movdqu xmm0, XMMWORD PTR [rsi - 2]
427 movdqu xmm1, XMMWORD PTR [rsi - 2]
    [all...]
loopfilter_sse2.asm     [all...]
postproc_sse2.asm 397 movdqu xmm4, [rax + rcx*2] ;vp8_rv[rcx*2]
400 movdqu xmm4, [r8 + rcx*2] ;vp8_rv[rcx*2]
402 movdqu xmm4, [sym(vp8_rv) + rcx*2]
661 movdqu xmm1,[rsi+rax] ; get the source
667 movdqu xmm2,[rdi+rax] ; get the noise for this line
669 movdqu [rsi+rax],xmm1 ; store the result
postproc_mmx.c 1002 movdqu xmm4, vp8_rv[ecx*2]
1128 movdqu xmm1, [esi+eax] // get the source
1134 movdqu xmm2, [edi+eax] // get the noise for this line
1136 movdqu [esi+eax], xmm1 // store the result
1498 movdqu [esi+eax], xmm1 \/\/ store the result local
    [all...]
  /external/llvm/test/CodeGen/X86/
sse-align-6.ll 1 ; RUN: llc < %s -march=x86-64 | grep movdqu | count 1
sse-align-12.ll 4 ; CHECK: movdqu
  /external/libvpx/vp8/encoder/x86/
variance_impl_ssse3.asm 67 movdqu xmm0, XMMWORD PTR [rsi]
68 movdqu xmm1, XMMWORD PTR [rsi+1]
92 movdqu xmm1, XMMWORD PTR [rsi]
93 movdqu xmm2, XMMWORD PTR [rsi+1]
163 movdqu xmm1, XMMWORD PTR [rsi]
173 movdqu xmm3, XMMWORD PTR [rsi]
264 movdqu xmm1, XMMWORD PTR [rsi]
265 movdqu xmm2, XMMWORD PTR [rsi+1]
variance_impl_sse2.asm 129 movdqu xmm1, XMMWORD PTR [rsi]
130 movdqu xmm2, XMMWORD PTR [rdi]
245 movdqu xmm1, XMMWORD PTR [rsi]
246 movdqu xmm2, XMMWORD PTR [rdi]
945 movdqu xmm5, XMMWORD PTR [rsi]
946 movdqu xmm3, XMMWORD PTR [rsi+1]
952 movdqu xmm1, XMMWORD PTR [rsi] ;
953 movdqu xmm2, XMMWORD PTR [rsi+1] ;
    [all...]
ssim_opt.asm 89 movdqu xmm5, [rsi]
90 movdqu xmm6, [rdi]
sad_sse3.asm 583 ;%define lddqu movdqu
594 movdqu xmm1, XMMWORD PTR [ref_ptr]
596 movdqu xmm3, XMMWORD PTR [ref_ptr+ref_stride]
602 movdqu xmm5, XMMWORD PTR [ref_ptr]
607 movdqu xmm1, XMMWORD PTR [ref_ptr+ref_stride]
  /external/v8/src/ia32/
codegen-ia32.cc 106 __ movdqu(xmm0, Operand(src, 0));
107 __ movdqu(Operand(dst, 0), xmm0);
155 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
156 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
177 __ movdqu(xmm0, Operand(src, 0x00));
178 __ movdqu(xmm1, Operand(src, 0x10));
193 __ movdqu(xmm0, Operand(src, 0));
201 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
202 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
  /bionic/libc/arch-x86/string/
ssse3-memcpy5.S 204 movdqu (%eax), %xmm0
239 movdqu %xmm0, (%esi)
443 movdqu %xmm0, (%esi)
488 movdqu %xmm0, (%esi)
533 movdqu %xmm0, (%esi)
578 movdqu %xmm0, (%esi)
623 movdqu %xmm0, (%esi)
668 movdqu %xmm0, (%esi)
713 movdqu %xmm0, (%esi)
758 movdqu %xmm0, (%esi
    [all...]
  /external/zlib/contrib/amd64/
amd64-match.S 293 movdqu (%windowbestlen, %rdx), %xmm1
294 movdqu (%prev, %rdx), %xmm2
296 movdqu 16(%windowbestlen, %rdx), %xmm3
297 movdqu 16(%prev, %rdx), %xmm4
299 movdqu 32(%windowbestlen, %rdx), %xmm5
300 movdqu 32(%prev, %rdx), %xmm6
302 movdqu 48(%windowbestlen, %rdx), %xmm7
303 movdqu 48(%prev, %rdx), %xmm8
  /external/valgrind/main/exp-bbv/tests/amd64-linux/
rep_prefix.S 22 movdqu %xmm1,%xmm2
23 movdqu %xmm2,%xmm1
  /external/valgrind/main/memcheck/tests/amd64/
sse_memory.stdout.exp     [all...]
sse_memory.c 301 TEST_INSN( &AllMask, 16,movdqu)
  /external/valgrind/main/memcheck/tests/x86/
sse2_memory.stdout.exp     [all...]
sse_memory.c 301 TEST_INSN( &AllMask, 16,movdqu)
  /external/v8/test/cctest/
test-disasm-ia32.cc 393 __ movdqu(xmm0, Operand(ebx, ecx, times_4, 10000));
394 __ movdqu(Operand(ebx, ecx, times_4, 10000), xmm0);
  /packages/apps/Camera/jni/feature_stab/db_vlvm/
db_feature_matching.cpp 893 movdqu [ecx+1*22],xmm7 \/* move short values to patch *\/ local
894 movdqu [ecx+1*22+16],xmm6 \/* move short values to patch *\/ local
914 movdqu [ecx+2*22],xmm7 \/* move short values to patch *\/ local
915 movdqu [ecx+2*22+16],xmm6 \/* move short values to patch *\/ local
935 movdqu [ecx+3*22],xmm7 \/* move short values to patch *\/ local
936 movdqu [ecx+3*22+16],xmm6 \/* move short values to patch *\/ local
956 movdqu [ecx+4*22],xmm7 \/* move short values to patch *\/ local
957 movdqu [ecx+4*22+16],xmm6 \/* move short values to patch *\/ local
977 movdqu [ecx+5*22],xmm7 \/* move short values to patch *\/ local
978 movdqu [ecx+5*22+16],xmm6 \/* move short values to patch *\/ local
998 movdqu [ecx+6*22],xmm7 \/* move short values to patch *\/ local
999 movdqu [ecx+6*22+16],xmm6 \/* move short values to patch *\/ local
1019 movdqu [ecx+7*22],xmm7 \/* move short values to patch *\/ local
1020 movdqu [ecx+7*22+16],xmm6 \/* move short values to patch *\/ local
1061 movdqu [ecx+9*22],xmm7 \/* move short values to patch *\/ local
1062 movdqu [ecx+9*22+16],xmm6 \/* move short values to patch *\/ local
1082 movdqu [ecx+10*22],xmm7 \/* move short values to patch *\/ local
1083 movdqu [ecx+10*22+16],xmm6 \/* move short values to patch *\/ local
    [all...]
  /system/core/libcutils/arch-x86/
sse2-memset32-atom.S 239 movdqu %xmm0, (%edx)
  /external/valgrind/main/none/tests/amd64/
insn_sse2.def 102 movdqu xmm.uq[0x012345678abcdef,0xfedcba9876543210] xmm.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
103 movdqu m128.uq[0x012345678abcdef,0xfedcba9876543210] xmm.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
104 movdqu xmm.uq[0x012345678abcdef,0xfedcba9876543210] m128.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
    [all...]

Completed in 597 milliseconds

1 2