HomeSort by relevance Sort by last modified time
    Searched full:movdqa (Results 126 - 150 of 440) sorted by null

1 2 3 4 56 7 8 91011>>

  /external/llvm/test/CodeGen/X86/
pmulld.ll 9 ; WIN64-NEXT: movdqa (%rcx), %xmm0
20 ; WIN64-NEXT: movdqa (%rcx), %xmm0
vec_shift7.ll 10 ; X32-NEXT: movdqa %xmm0, %xmm1
20 ; X64-NEXT: movdqa %xmm0, %xmm1
vector-shift-shl-128.ll 21 ; SSE2-NEXT: movdqa %xmm0, %xmm2
30 ; SSE41-NEXT: movdqa %xmm0, %xmm2
68 ; X32-SSE-NEXT: movdqa %xmm0, %xmm2
92 ; SSE2-NEXT: movdqa %xmm1, %xmm0
143 ; X32-SSE-NEXT: movdqa %xmm1, %xmm0
153 ; SSE2-NEXT: movdqa %xmm1, %xmm2
155 ; SSE2-NEXT: movdqa %xmm2, %xmm3
161 ; SSE2-NEXT: movdqa %xmm1, %xmm2
163 ; SSE2-NEXT: movdqa %xmm2, %xmm3
169 ; SSE2-NEXT: movdqa %xmm1, %xmm
    [all...]
vec_minmax_sint.ll 17 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
18 ; SSE2-NEXT: movdqa %xmm1, %xmm3
21 ; SSE2-NEXT: movdqa %xmm2, %xmm4
36 ; SSE41-NEXT: movdqa %xmm0, %xmm2
37 ; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,0,2147483648,0]
38 ; SSE41-NEXT: movdqa %xmm1, %xmm3
41 ; SSE41-NEXT: movdqa %xmm0, %xmm4
55 ; SSE42-NEXT: movdqa %xmm0, %xmm2
74 ; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
75 ; SSE2-NEXT: movdqa %xmm3, %xmm
    [all...]
vector-shift-ashr-128.ll 22 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808]
23 ; SSE2-NEXT: movdqa %xmm2, %xmm4
27 ; SSE2-NEXT: movdqa %xmm0, %xmm2
33 ; SSE2-NEXT: movdqa %xmm2, %xmm0
38 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808]
39 ; SSE41-NEXT: movdqa %xmm2, %xmm3
44 ; SSE41-NEXT: movdqa %xmm0, %xmm3
94 ; X32-SSE-NEXT: movdqa {{.*#+}} xmm3 = [0,2147483648,0,2147483648]
95 ; X32-SSE-NEXT: movdqa %xmm3, %xmm4
100 ; X32-SSE-NEXT: movdqa %xmm0, %xmm
    [all...]
vector-shift-lshr-128.ll 21 ; SSE2-NEXT: movdqa %xmm0, %xmm2
30 ; SSE41-NEXT: movdqa %xmm0, %xmm2
70 ; X32-SSE-NEXT: movdqa %xmm0, %xmm2
84 ; SSE2-NEXT: movdqa %xmm1, %xmm2
86 ; SSE2-NEXT: movdqa %xmm0, %xmm3
88 ; SSE2-NEXT: movdqa %xmm1, %xmm2
90 ; SSE2-NEXT: movdqa %xmm0, %xmm4
95 ; SSE2-NEXT: movdqa %xmm1, %xmm4
97 ; SSE2-NEXT: movdqa %xmm0, %xmm5
108 ; SSE41-NEXT: movdqa %xmm1, %xmm
    [all...]
vec_sdiv_to_shift.ll 9 ; SSE-NEXT: movdqa %xmm0, %xmm1
14 ; SSE-NEXT: movdqa %xmm1, %xmm0
32 ; SSE-NEXT: movdqa %xmm0, %xmm1
37 ; SSE-NEXT: movdqa %xmm1, %xmm0
73 ; SSE-NEXT: movdqa %xmm1, %xmm0
105 ; SSE-NEXT: movdqa %xmm0, %xmm1
110 ; SSE-NEXT: movdqa %xmm1, %xmm0
128 ; SSE-NEXT: movdqa %xmm0, %xmm1
154 ; SSE-NEXT: movdqa %xmm0, %xmm2
159 ; SSE-NEXT: movdqa %xmm1, %xmm
    [all...]
vector-bitreverse.ll     [all...]
vec_compare-sse4.ll 9 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
12 ; SSE2-NEXT: movdqa %xmm0, %xmm2
24 ; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
27 ; SSE41-NEXT: movdqa %xmm0, %xmm2
  /external/swiftshader/third_party/LLVM/test/CodeGen/X86/
pmulld.ll 9 ; WIN64-NEXT: movdqa (%rcx), %xmm0
20 ; WIN64-NEXT: movdqa (%rcx), %xmm0
  /toolchain/binutils/binutils-2.27/gas/testsuite/gas/i386/
x86-64-avx-swap.s 18 movdqa %xmm8,%xmm6
50 movdqa xmm6,xmm8
  /external/libvpx/libvpx/vpx_dsp/x86/
deblock_sse2.asm 16 movdqa xmm4, xmm0
17 movdqa xmm6, xmm0
18 movdqa xmm5, xmm1
30 movdqa xmm2, flimit
32 movdqa xmm7, xmm2
43 movdqa xmm6, xmm0
44 movdqa xmm4, xmm0
45 movdqa xmm2, xmm1
59 movdqa xmm2, flimit
61 movdqa xmm3, xmm
    [all...]
vpx_subpixel_bilinear_ssse3.asm 19 movdqa xmm3, [rdx] ;load filters
55 movdqa xmm7, [rdx] ;load filters
174 movdqa xmm2, xmm0
251 movdqa xmm2, xmm0
276 movdqa xmm1, xmm0
302 movdqa xmm1, xmm0
330 movdqa xmm2, xmm0
355 movdqa xmm1, xmm0
381 movdqa xmm1, xmm0
409 movdqa xmm2, xmm
    [all...]
vpx_high_subpixel_bilinear_sse2.asm 19 movdqa xmm3, [rdx] ;load filters
33 movdqa xmm1, xmm5
74 movdqa xmm6, [rdx] ;load filters
89 movdqa xmm1, xmm8
100 movdqa xmm6, xmm0
128 movdqa xmm9, xmm0
129 movdqa xmm6, xmm2
348 movdqa xmm1, xmm0
429 movdqa xmm1, xmm0
  /external/boringssl/win-x86/crypto/fipsmodule/
ghash-x86.asm 729 movdqa xmm3,xmm2
738 movdqa xmm0,xmm2
739 movdqa xmm1,xmm0
749 movdqa xmm4,xmm3
754 movdqa xmm4,xmm0
755 movdqa xmm3,xmm0
761 movdqa xmm3,xmm0
766 movdqa xmm4,xmm0
794 movdqa xmm5,[ecx]
798 movdqa xmm1,xmm
    [all...]
sha1-586.asm 1421 movdqa xmm7,[ebp]
1422 movdqa xmm0,[16+ebp]
1423 movdqa xmm1,[32+ebp]
1424 movdqa xmm2,[48+ebp]
1425 movdqa xmm6,[64+ebp]
1432 movdqa [112+esp],xmm0
1433 movdqa [128+esp],xmm1
1434 movdqa [144+esp],xmm2
1436 movdqa [160+esp],xmm7
1438 movdqa [176+esp],xmm
    [all...]
  /external/libjpeg-turbo/simd/
jdcolext-sse2-64.asm 89 movdqa xmm5, XMMWORD [rbx] ; xmm5=Cb(0123456789ABCDEF)
90 movdqa xmm1, XMMWORD [rdx] ; xmm1=Cr(0123456789ABCDEF)
96 movdqa xmm0,xmm4 ; xmm0=xmm4={0xFF 0x00 0xFF 0x00 ..}
118 movdqa xmm2,xmm4 ; xmm2=CbE
119 movdqa xmm3,xmm5 ; xmm3=CbO
122 movdqa xmm6,xmm0 ; xmm6=CrE
123 movdqa xmm7,xmm1 ; xmm7=CrO
148 movdqa XMMWORD [wk(0)], xmm4 ; wk(0)=(B-Y)E
149 movdqa XMMWORD [wk(1)], xmm5 ; wk(1)=(B-Y)O
151 movdqa xmm4,xmm
    [all...]
jdcolext-sse2.asm 101 movdqa xmm5, XMMWORD [ebx] ; xmm5=Cb(0123456789ABCDEF)
102 movdqa xmm1, XMMWORD [edx] ; xmm1=Cr(0123456789ABCDEF)
108 movdqa xmm0,xmm4 ; xmm0=xmm4={0xFF 0x00 0xFF 0x00 ..}
130 movdqa xmm2,xmm4 ; xmm2=CbE
131 movdqa xmm3,xmm5 ; xmm3=CbO
134 movdqa xmm6,xmm0 ; xmm6=CrE
135 movdqa xmm7,xmm1 ; xmm7=CrO
160 movdqa XMMWORD [wk(0)], xmm4 ; wk(0)=(B-Y)E
161 movdqa XMMWORD [wk(1)], xmm5 ; wk(1)=(B-Y)O
163 movdqa xmm4,xmm
    [all...]
  /bionic/libc/arch-x86/atom/string/
sse2-memchr-atom.S 127 movdqa (%edi), %xmm0
143 movdqa (%edi), %xmm0
150 movdqa 16(%edi), %xmm2
157 movdqa 32(%edi), %xmm3
164 movdqa 48(%edi), %xmm4
175 movdqa (%edi), %xmm0
182 movdqa 16(%edi), %xmm2
189 movdqa 32(%edi), %xmm3
196 movdqa 48(%edi), %xmm4
213 movdqa (%edi), %xmm
    [all...]
  /external/boringssl/win-x86_64/crypto/fipsmodule/
ghash-x86_64.asm 704 movdqa xmm3,xmm2
718 movdqa xmm0,xmm2
720 movdqa xmm1,xmm0
729 movdqa xmm4,xmm3
735 movdqa xmm4,xmm0
736 movdqa xmm3,xmm0
742 movdqa xmm3,xmm0
749 movdqa xmm4,xmm0
765 movdqa xmm1,xmm0
774 movdqa xmm4,xmm
    [all...]
  /external/boringssl/src/crypto/cipher_extra/asm/
chacha20_poly1305_x86_64.pl 102 $code.="movdqa $t, $tmp_store\n" if ($dir =~ /store/);
108 movdqa $b, $t
117 movdqa $b, $t
127 $code.="movdqa $tmp_store, $t\n" if ($dir =~ /load/);
196 $code.="movdqa .chacha20_consts(%rip), $A0
197 movdqa $state1_store, $B0
198 movdqa $state2_store, $C0\n";
199 $code.="movdqa $A0, $A1
200 movdqa $B0, $B1
201 movdqa $C0, $C1\n" if ($n ge 2)
    [all...]
  /external/libvpx/libvpx/third_party/libyuv/source/
scale_win.cc 355 movdqa xmm5, xmm4
505 movdqa xmm3, xmmword ptr kShuf0
506 movdqa xmm4, xmmword ptr kShuf1
507 movdqa xmm5, xmmword ptr kShuf2
513 movdqa xmm2, xmm1
543 // Note that movdqa+palign may be better than movdqu.
554 movdqa xmm2, xmmword ptr kShuf01
555 movdqa xmm3, xmmword ptr kShuf11
556 movdqa xmm4, xmmword ptr kShuf21
557 movdqa xmm5, xmmword ptr kMadd0
    [all...]
  /external/libyuv/files/source/
scale_win.cc 357 movdqa xmm5, xmm4
510 movdqa xmm3, xmmword ptr kShuf0
511 movdqa xmm4, xmmword ptr kShuf1
512 movdqa xmm5, xmmword ptr kShuf2
518 movdqa xmm2, xmm1
548 // Note that movdqa+palign may be better than movdqu.
559 movdqa xmm2, xmmword ptr kShuf01
560 movdqa xmm3, xmmword ptr kShuf11
561 movdqa xmm4, xmmword ptr kShuf21
562 movdqa xmm5, xmmword ptr kMadd0
    [all...]
  /external/libffi/src/x86/
darwin64.S 190 movdqa 48(%r10), %xmm0
191 movdqa 64(%r10), %xmm1
192 movdqa 80(%r10), %xmm2
193 movdqa 96(%r10), %xmm3
194 movdqa 112(%r10), %xmm4
195 movdqa 128(%r10), %xmm5
196 movdqa 144(%r10), %xmm6
197 movdqa 160(%r10), %xmm7
307 movdqa %xmm0, 48(%rsp)
308 movdqa %xmm1, 64(%rsp
    [all...]
unix64.S 193 movdqa 48(%r10), %xmm0
194 movdqa 64(%r10), %xmm1
195 movdqa 80(%r10), %xmm2
196 movdqa 96(%r10), %xmm3
197 movdqa 112(%r10), %xmm4
198 movdqa 128(%r10), %xmm5
199 movdqa 144(%r10), %xmm6
200 movdqa 160(%r10), %xmm7
315 movdqa %xmm0, 48(%rsp)
316 movdqa %xmm1, 64(%rsp
    [all...]

Completed in 475 milliseconds

1 2 3 4 56 7 8 91011>>