HomeSort by relevance Sort by last modified time
    Searched refs:movdqa (Results 1 - 25 of 148) sorted by null

1 2 3 4 5 6

  /external/chromium_org/third_party/yasm/source/patched-yasm/modules/arch/x86/tests/
simd64-1.asm 2 movdqa xmm10, xmm1 label
  /external/libvpx/libvpx/vp8/common/x86/
loopfilter_block_sse2.asm 18 movdqa scratch1, %2 ; v2
31 movdqa scratch2, %3 ; save p1
68 movdqa scratch2, %6 ; save hev
72 movdqa scratch1, %1
78 movdqa scratch1, %3
85 movdqa scratch2, %5
90 movdqa scratch1, zero
101 movdqa scratch1, zero
113 movdqa scratch1, zero
199 movdqa xmm0, i
    [all...]
idctllm_sse2.asm 128 movdqa xmm0, [rax]
129 movdqa xmm2, [rax+16]
130 movdqa xmm1, [rax+32]
131 movdqa xmm3, [rax+48]
134 movdqa [rax], xmm7
135 movdqa [rax+16], xmm7
136 movdqa [rax+32], xmm7
137 movdqa [rax+48], xmm7
147 movdqa xmm4, xmm0
154 movdqa xmm4, xmm
    [all...]
  /external/chromium_org/third_party/openssl/openssl/crypto/aes/asm/
vpaes-x86.pl 161 &movdqa ("xmm7",&QWP($k_inv,$const));
162 &movdqa ("xmm6",&QWP($k_s0F,$const));
183 &movdqa ("xmm1","xmm6")
184 &movdqa ("xmm2",&QWP($k_ipt,$const));
190 &movdqa ("xmm0",&QWP($k_ipt+16,$const));
201 &movdqa ("xmm4",&QWP($k_sb1,$const)); # 4 : sb1u
204 &movdqa ("xmm0",&QWP($k_sb1+16,$const));# 0 : sb1t
207 &movdqa ("xmm5",&QWP($k_sb2,$const)); # 4 : sb2u
209 &movdqa ("xmm1",&QWP(-0x40,$base,$magic));# .Lk_mc_forward[]
210 &movdqa ("xmm2",&QWP($k_sb2+16,$const));# 2 : sb2
    [all...]
vpaes-x86.S 64 movdqa -48(%ebp),%xmm7
65 movdqa -16(%ebp),%xmm6
73 movdqa %xmm6,%xmm1
74 movdqa (%ebp),%xmm2
80 movdqa 16(%ebp),%xmm0
89 movdqa 32(%ebp),%xmm4
92 movdqa 48(%ebp),%xmm0
95 movdqa 64(%ebp),%xmm5
97 movdqa -64(%ebx,%ecx,1),%xmm1
98 movdqa 80(%ebp),%xmm
    [all...]
vpaes-x86_64.S 24 movdqa %xmm9,%xmm1
25 movdqa .Lk_ipt(%rip),%xmm2
31 movdqa .Lk_ipt+16(%rip),%xmm0
42 movdqa %xmm13,%xmm4
45 movdqa %xmm12,%xmm0
48 movdqa %xmm15,%xmm5
50 movdqa -64(%r11,%r10,1),%xmm1
51 movdqa %xmm14,%xmm2
54 movdqa (%r11,%r10,1),%xmm4
55 movdqa %xmm0,%xmm
    [all...]
bsaes-x86_64.S 11 movdqa (%rax),%xmm8
13 movdqa 80(%r11),%xmm7
31 movdqa 0(%r11),%xmm7
32 movdqa 16(%r11),%xmm8
33 movdqa %xmm5,%xmm9
35 movdqa %xmm3,%xmm10
47 movdqa %xmm1,%xmm9
49 movdqa %xmm15,%xmm10
61 movdqa 32(%r11),%xmm7
62 movdqa %xmm4,%xmm
    [all...]
vpaes-x86_64.pl 88 movdqa %xmm9, %xmm1
89 movdqa .Lk_ipt(%rip), %xmm2 # iptlo
95 movdqa .Lk_ipt+16(%rip), %xmm0 # ipthi
106 movdqa %xmm13, %xmm4 # 4 : sb1u
109 movdqa %xmm12, %xmm0 # 0 : sb1t
112 movdqa %xmm15, %xmm5 # 4 : sb2u
114 movdqa -0x40(%r11,%r10), %xmm1 # .Lk_mc_forward[]
115 movdqa %xmm14, %xmm2 # 2 : sb2t
118 movdqa (%r11,%r10), %xmm4 # .Lk_mc_backward[]
119 movdqa %xmm0, %xmm3 # 3 =
    [all...]
  /external/openssl/crypto/aes/asm/
vpaes-x86.pl 161 &movdqa ("xmm7",&QWP($k_inv,$const));
162 &movdqa ("xmm6",&QWP($k_s0F,$const));
183 &movdqa ("xmm1","xmm6")
184 &movdqa ("xmm2",&QWP($k_ipt,$const));
190 &movdqa ("xmm0",&QWP($k_ipt+16,$const));
201 &movdqa ("xmm4",&QWP($k_sb1,$const)); # 4 : sb1u
204 &movdqa ("xmm0",&QWP($k_sb1+16,$const));# 0 : sb1t
207 &movdqa ("xmm5",&QWP($k_sb2,$const)); # 4 : sb2u
209 &movdqa ("xmm1",&QWP(-0x40,$base,$magic));# .Lk_mc_forward[]
210 &movdqa ("xmm2",&QWP($k_sb2+16,$const));# 2 : sb2
    [all...]
vpaes-x86.S 64 movdqa -48(%ebp),%xmm7
65 movdqa -16(%ebp),%xmm6
73 movdqa %xmm6,%xmm1
74 movdqa (%ebp),%xmm2
80 movdqa 16(%ebp),%xmm0
89 movdqa 32(%ebp),%xmm4
92 movdqa 48(%ebp),%xmm0
95 movdqa 64(%ebp),%xmm5
97 movdqa -64(%ebx,%ecx,1),%xmm1
98 movdqa 80(%ebp),%xmm
    [all...]
vpaes-x86_64.S 24 movdqa %xmm9,%xmm1
25 movdqa .Lk_ipt(%rip),%xmm2
31 movdqa .Lk_ipt+16(%rip),%xmm0
42 movdqa %xmm13,%xmm4
45 movdqa %xmm12,%xmm0
48 movdqa %xmm15,%xmm5
50 movdqa -64(%r11,%r10,1),%xmm1
51 movdqa %xmm14,%xmm2
54 movdqa (%r11,%r10,1),%xmm4
55 movdqa %xmm0,%xmm
    [all...]
bsaes-x86_64.S 11 movdqa (%rax),%xmm8
13 movdqa 80(%r11),%xmm7
31 movdqa 0(%r11),%xmm7
32 movdqa 16(%r11),%xmm8
33 movdqa %xmm5,%xmm9
35 movdqa %xmm3,%xmm10
47 movdqa %xmm1,%xmm9
49 movdqa %xmm15,%xmm10
61 movdqa 32(%r11),%xmm7
62 movdqa %xmm4,%xmm
    [all...]
vpaes-x86_64.pl 88 movdqa %xmm9, %xmm1
89 movdqa .Lk_ipt(%rip), %xmm2 # iptlo
95 movdqa .Lk_ipt+16(%rip), %xmm0 # ipthi
106 movdqa %xmm13, %xmm4 # 4 : sb1u
109 movdqa %xmm12, %xmm0 # 0 : sb1t
112 movdqa %xmm15, %xmm5 # 4 : sb2u
114 movdqa -0x40(%r11,%r10), %xmm1 # .Lk_mc_forward[]
115 movdqa %xmm14, %xmm2 # 2 : sb2t
118 movdqa (%r11,%r10), %xmm4 # .Lk_mc_backward[]
119 movdqa %xmm0, %xmm3 # 3 =
    [all...]
  /bionic/libc/arch-x86/string/
sse2-memset-atom.S 357 movdqa %xmm0, (%edx)
358 movdqa %xmm0, 0x10(%edx)
359 movdqa %xmm0, 0x20(%edx)
360 movdqa %xmm0, 0x30(%edx)
361 movdqa %xmm0, 0x40(%edx)
362 movdqa %xmm0, 0x50(%edx)
363 movdqa %xmm0, 0x60(%edx)
364 movdqa %xmm0, 0x70(%edx)
370 movdqa %xmm0, (%edx)
371 movdqa %xmm0, 0x10(%edx
    [all...]
ssse3-memcpy-atom.S 228 movdqa (%eax, %edi), %xmm0
229 movdqa 16(%eax, %edi), %xmm1
231 movdqa %xmm0, (%edx, %edi)
232 movdqa %xmm1, 16(%edx, %edi)
236 movdqa (%eax, %edi), %xmm0
237 movdqa 16(%eax, %edi), %xmm1
239 movdqa %xmm0, (%edx, %edi)
240 movdqa %xmm1, 16(%edx, %edi)
244 movdqa (%eax, %edi), %xmm0
245 movdqa 16(%eax, %edi), %xmm
    [all...]
ssse3-strcmp-atom.S 277 movdqa (%eax), %xmm1
294 movdqa (%eax, %ecx), %xmm1
295 movdqa (%edx, %ecx), %xmm2
320 movdqa (%edx), %xmm2
321 movdqa (%eax), %xmm1
335 movdqa (%edx), %xmm3
349 movdqa (%eax, %ecx), %xmm1
350 movdqa (%edx, %ecx), %xmm2
351 movdqa %xmm2, %xmm4
368 movdqa %xmm4, %xmm
    [all...]
  /external/lzma/Asm/x86/
AesOpt.asm 23 movdqa [r4 + 8], xmm6
24 movdqa [r4 + 8 + 16], xmm7
35 movdqa reg, [r1]
45 movdqa xmm6, [r4 + 8]
46 movdqa xmm7, [r4 + 8 + 16]
68 movdqa xmm7, [r1 + r3 offs]
77 movdqa xmm6, [rD + offs]
78 movdqa [rD + offs], reg
101 OP_W movdqa, [rD + i * 16]
115 movdqa xmm1, [rD]
    [all...]
  /external/libvpx/libvpx/vp8/encoder/x86/
quantize_ssse3.asm 55 movdqa xmm0, [rax]
56 movdqa xmm4, [rax + 16]
59 movdqa xmm2, [rcx]
60 movdqa xmm3, [rcx + 16]
62 movdqa xmm1, xmm0
63 movdqa xmm5, xmm4
83 movdqa xmm2, xmm1 ;store y for getting eob
84 movdqa xmm3, xmm5
91 movdqa [rax], xmm1
92 movdqa [rax + 16], xmm
    [all...]
temporal_filter_apply_sse2.asm 47 movdqa [rsp + strength], xmm6 ; where strength is used, all 16 bytes are read
54 movdqa xmm5, [GLOBAL(_const_top_bit)]
56 movdqa [rsp + rounding_bit], xmm5
67 movdqa [rsp + filter_weight], xmm0
87 movdqa xmm0, [rsi] ; src (frame1)
89 movdqa xmm1, xmm0
94 movdqa xmm2, [rdx] ; predictor (frame2)
95 movdqa xmm3, xmm2
121 movdqa xmm3, [GLOBAL(_const_16w)]
122 movdqa xmm2, [GLOBAL(_const_16w)
    [all...]
dct_sse2.asm 78 movdqa xmm2, xmm0
81 movdqa xmm1, xmm0
87 movdqa xmm3, xmm0
93 movdqa xmm1, xmm0
96 movdqa xmm4, xmm3
111 movdqa xmm2, xmm0
115 movdqa xmm3, xmm0
118 movdqa xmm2, xmm0
122 movdqa xmm5, XMMWORD PTR[GLOBAL(_7)]
124 movdqa xmm3, xmm
    [all...]
quantize_sse4.asm 61 movdqa xmm0, [rax]
62 movdqa xmm1, [rax + 16]
68 movdqa xmm2, xmm0
69 movdqa xmm3, xmm1
84 movdqa xmm4, [rcx]
85 movdqa xmm5, [rcx + 16]
91 movdqa xmm6, xmm2
92 movdqa xmm7, xmm3
99 movdqa xmm4, [rdx]
100 movdqa xmm5, [rdx + 16
    [all...]
  /system/core/libcutils/arch-x86/
sse2-memset32-atom.S 295 movdqa %xmm0, (%edx)
296 movdqa %xmm0, 0x10(%edx)
297 movdqa %xmm0, 0x20(%edx)
298 movdqa %xmm0, 0x30(%edx)
299 movdqa %xmm0, 0x40(%edx)
300 movdqa %xmm0, 0x50(%edx)
301 movdqa %xmm0, 0x60(%edx)
302 movdqa %xmm0, 0x70(%edx)
308 movdqa %xmm0, (%edx)
309 movdqa %xmm0, 0x10(%edx
    [all...]
sse2-memset16-atom.S 381 movdqa %xmm0, (%edx)
382 movdqa %xmm0, 0x10(%edx)
383 movdqa %xmm0, 0x20(%edx)
384 movdqa %xmm0, 0x30(%edx)
385 movdqa %xmm0, 0x40(%edx)
386 movdqa %xmm0, 0x50(%edx)
387 movdqa %xmm0, 0x60(%edx)
388 movdqa %xmm0, 0x70(%edx)
394 movdqa %xmm0, (%edx)
395 movdqa %xmm0, 0x10(%edx
    [all...]
  /external/libvpx/libvpx/vp9/encoder/x86/
vp9_temporal_filter_apply_sse2.asm 47 movdqa [rsp + strength], xmm6 ; where strength is used, all 16 bytes are read
54 movdqa xmm5, [GLOBAL(_const_top_bit)]
56 movdqa [rsp + rounding_bit], xmm5
67 movdqa [rsp + filter_weight], xmm0
87 movdqa xmm0, [rsi] ; src (frame1)
89 movdqa xmm1, xmm0
94 movdqa xmm2, [rdx] ; predictor (frame2)
95 movdqa xmm3, xmm2
121 movdqa xmm3, [GLOBAL(_const_16w)]
122 movdqa xmm2, [GLOBAL(_const_16w)
    [all...]
  /external/libyuv/files/source/
row_win.cc 139 movdqa xmm1, xmm0
144 movdqa [edx], xmm0 local
145 movdqa [edx + 16], xmm1 local
159 movdqa xmm5, kShuffleMaskBGRAToARGB
164 movdqa xmm0, [eax]
167 movdqa [eax + edx], xmm0 local
180 movdqa xmm5, kShuffleMaskABGRToARGB
185 movdqa xmm0, [eax]
188 movdqa [eax + edx], xmm0 local
201 movdqa xmm5, kShuffleMaskRGBAToARG
209 movdqa [eax + edx], xmm0 local
230 movdqa [eax + edx], xmm0 local
259 movdqa [edx + 32], xmm2 local
262 movdqa [edx], xmm0 local
266 movdqa [edx + 16], xmm1 local
269 movdqa [edx + 48], xmm3 local
299 movdqa [edx + 32], xmm2 local
302 movdqa [edx], xmm0 local
306 movdqa [edx + 16], xmm1 local
309 movdqa [edx + 48], xmm3 local
364 movdqa [eax * 2 + edx], xmm1 \/\/ store 4 pixels of ARGB local
365 movdqa [eax * 2 + edx + 16], xmm2 \/\/ store next 4 pixels of ARGB local
418 movdqa [eax * 2 + edx], xmm1 \/\/ store 4 pixels of ARGB local
419 movdqa [eax * 2 + edx + 16], xmm2 \/\/ store next 4 pixels of ARGB local
458 movdqa [eax * 2 + edx], xmm0 \/\/ store 4 pixels of ARGB local
459 movdqa [eax * 2 + edx + 16], xmm1 \/\/ store next 4 pixels of ARGB local
492 movdqa [edx], xmm0 \/\/ store 0 local
497 movdqa [edx + 16], xmm1 \/\/ store 1 local
498 movdqa [edx + 32], xmm2 \/\/ store 2 local
531 movdqa [edx], xmm0 \/\/ store 0 local
536 movdqa [edx + 16], xmm1 \/\/ store 1 local
537 movdqa [edx + 32], xmm2 \/\/ store 2 local
684 movdqa [edx], xmm0 local
752 movdqa [edx], xmm0 local
820 movdqa [edx], xmm0 local
888 movdqa [edx], xmm0 local
1645 movdqa [edx], xmm0 local
1646 movdqa [edx + 16], xmm1 local
1688 movdqa [edx], xmm0 local
1689 movdqa [edx + 16], xmm1 local
1732 movdqa [edx], xmm0 local
1733 movdqa [edx + 16], xmm1 local
1771 movdqa [edx], xmm0 local
1772 movdqa [edx + 16], xmm1 local
1809 movdqa [edx], xmm0 local
1810 movdqa [edx + 16], xmm1 local
2056 movdqa [edx], xmm5 local
2057 movdqa [edx + 16], xmm0 local
2138 movdqa [edx], xmm2 local
2139 movdqa [edx + 16], xmm1 local
2220 movdqa [edx], xmm5 local
2221 movdqa [edx + 16], xmm0 local
2310 movdqa [edx], xmm0 local
2311 movdqa [edx + 16], xmm1 local
2342 movdqa [edx], xmm0 local
2437 movdqa [edx], xmm0 local
2471 movdqa [edx], xmm0 local
2472 movdqa [edx + edi], xmm2 local
2497 movdqa [eax + edx], xmm0 local
2498 movdqa [eax + edx + 16], xmm1 local
2593 movdqa [edx], xmm0 local
2805 movdqa [edx], xmm0 local
3079 movdqa [edx], xmm0 local
3214 movdqa [edx], xmm0 local
3240 movdqa [edx], xmm0 local
3315 movdqa [eax + edx], xmm0 local
3364 movdqa [eax + edx], xmm0 local
3418 movdqa [eax + edx], xmm0 local
3466 movdqa [eax + edx], xmm0 local
3467 movdqa [eax + edx + 16], xmm1 local
3537 movdqa [eax], xmm0 local
3538 movdqa [eax + 16], xmm1 local
3600 movdqa [eax], xmm0 local
3601 movdqa [eax + 16], xmm1 local
3696 movdqa [eax], xmm0 local
3861 movdqa [edx], xmm2 local
3862 movdqa [edx + 16], xmm3 local
3863 movdqa [edx + 32], xmm4 local
3864 movdqa [edx + 48], xmm5 local
3921 movdqa [eax + edx], xmm0 local
4057 movdqa [esi + edi], xmm0 local
4069 movdqa [esi + edi], xmm0 local
4082 movdqa [esi + edi], xmm0 local
    [all...]

Completed in 677 milliseconds

1 2 3 4 5 6