/external/libvpx/libvpx/vp8/common/x86/ |
loopfilter_block_sse2.asm | 18 movdqa scratch1, %2 ; v2 31 movdqa scratch2, %3 ; save p1 68 movdqa scratch2, %6 ; save hev 72 movdqa scratch1, %1 78 movdqa scratch1, %3 85 movdqa scratch2, %5 90 movdqa scratch1, zero 101 movdqa scratch1, zero 113 movdqa scratch1, zero 198 movdqa xmm0, i [all...] |
loopfilter_sse2.asm | 30 movdqa xmm2, [rdi+2*rax] ; q3 31 movdqa xmm1, [rsi+2*rax] ; q2 32 movdqa xmm4, [rsi+rax] ; q1 33 movdqa xmm5, [rsi] ; q0 49 movdqa [rsp+_q2], xmm1 ; store q2 50 movdqa [rsp+_q1], xmm4 ; store q1 52 movdqa xmm7, [rdx] ;limit 54 movdqa xmm6, xmm1 ; q2 55 movdqa xmm3, xmm4 ; q1 66 movdqa xmm0, xmm5 ; q [all...] |
recon_sse2.asm | 41 movdqa [rdi], xmm0 44 movdqa [rdi+rcx], xmm1 45 movdqa [rdi+rcx*2],xmm2 56 movdqa [rdi], xmm3 59 movdqa [rdi+rcx], xmm4 60 movdqa [rdi+rcx*2],xmm5 71 movdqa [rdi], xmm0 74 movdqa [rdi+rcx], xmm1 76 movdqa [rdi+rcx*2], xmm2 86 movdqa [rdi], xmm [all...] |
idctllm_sse2.asm | 128 movdqa xmm0, [rax] 129 movdqa xmm2, [rax+16] 130 movdqa xmm1, [rax+32] 131 movdqa xmm3, [rax+48] 134 movdqa [rax], xmm7 135 movdqa [rax+16], xmm7 136 movdqa [rax+32], xmm7 137 movdqa [rax+48], xmm7 147 movdqa xmm4, xmm0 154 movdqa xmm4, xmm [all...] |
subpixel_ssse3.asm | 52 movdqa xmm7, [GLOBAL(rd)] 61 movdqa xmm4, XMMWORD PTR [rax] ;k0_k5 62 movdqa xmm5, XMMWORD PTR [rax+256] ;k2_k4 63 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3 80 movdqa xmm1, xmm0 83 movdqa xmm2, xmm1 117 movdqa xmm5, XMMWORD PTR [rax+256] ;k2_k4 118 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3 120 movdqa xmm3, XMMWORD PTR [GLOBAL(shuf2bfrom1)] 121 movdqa xmm4, XMMWORD PTR [GLOBAL(shuf3bfrom1) [all...] |
iwalsh_sse2.asm | 26 movdqa xmm0, [rcx + 0] ;ip[4] ip[0] 27 movdqa xmm1, [rcx + 16] ;ip[12] ip[8] 31 movdqa xmm3, xmm0 ;ip[4] ip[0] 36 movdqa xmm4, xmm0 40 movdqa xmm1, xmm4 ;c1 b1 49 movdqa xmm3, xmm4 ; 13 12 11 10 03 02 01 00 52 movdqa xmm1, xmm4 ; 23 03 22 02 21 01 20 00 58 movdqa xmm3, xmm4 ;ip[4] ip[0] 65 movdqa xmm5, xmm4 69 movdqa xmm1, xmm5 ;c1 b [all...] |
/external/libvpx/libvpx/vp8/encoder/x86/ |
quantize_ssse3.asm | 55 movdqa xmm0, [rax] 56 movdqa xmm4, [rax + 16] 59 movdqa xmm2, [rcx] 60 movdqa xmm3, [rcx + 16] 62 movdqa xmm1, xmm0 63 movdqa xmm5, xmm4 83 movdqa xmm2, xmm1 ;store y for getting eob 84 movdqa xmm3, xmm5 91 movdqa [rax], xmm1 92 movdqa [rax + 16], xmm [all...] |
quantize_sse2.asm | 63 movdqa xmm0, [rdx] 64 movdqa xmm4, [rdx + 16] 70 movdqa xmm1, xmm0 71 movdqa xmm5, xmm4 85 movdqa xmm2, [rcx] 86 movdqa xmm3, [rcx + 16] 96 movdqa [rsp + abs_minus_zbin], xmm1 97 movdqa [rsp + abs_minus_zbin + 16], xmm5 103 movdqa xmm2, [rdx] 104 movdqa xmm6, [rdx + 16 [all...] |
temporal_filter_apply_sse2.asm | 47 movdqa [rsp + strength], xmm6 ; where strength is used, all 16 bytes are read 54 movdqa xmm5, [GLOBAL(_const_top_bit)] 56 movdqa [rsp + rounding_bit], xmm5 67 movdqa [rsp + filter_weight], xmm0 87 movdqa xmm0, [rsi] ; src (frame1) 89 movdqa xmm1, xmm0 94 movdqa xmm2, [rdx] ; predictor (frame2) 95 movdqa xmm3, xmm2 121 movdqa xmm3, [GLOBAL(_const_16w)] 122 movdqa xmm2, [GLOBAL(_const_16w) [all...] |
subtract_sse2.asm | 90 movdqa xmm4, [GLOBAL(t80)] 96 movdqa xmm0, [rsi] ; src 97 movdqa xmm1, [rax] ; pred 99 movdqa xmm2, xmm0 106 movdqa xmm2, xmm0 110 movdqa xmm3, [rsi + rdx] 111 movdqa xmm5, [rax + rbx] 116 movdqa [rdi], xmm0 117 movdqa [rdi +16], xmm2 119 movdqa xmm1, xmm [all...] |
quantize_sse4.asm | 61 movdqa xmm0, [rax] 62 movdqa xmm1, [rax + 16] 68 movdqa xmm2, xmm0 69 movdqa xmm3, xmm1 84 movdqa xmm4, [rcx] 85 movdqa xmm5, [rcx + 16] 91 movdqa xmm6, xmm2 92 movdqa xmm7, xmm3 99 movdqa xmm4, [rdx] 100 movdqa xmm5, [rdx + 16 [all...] |
dct_sse2.asm | 78 movdqa xmm2, xmm0 81 movdqa xmm1, xmm0 87 movdqa xmm3, xmm0 93 movdqa xmm1, xmm0 96 movdqa xmm4, xmm3 111 movdqa xmm2, xmm0 115 movdqa xmm3, xmm0 118 movdqa xmm2, xmm0 122 movdqa xmm5, XMMWORD PTR[GLOBAL(_7)] 124 movdqa xmm3, xmm [all...] |
encodeopt.asm | 27 movdqa xmm0, [rsi] 28 movdqa xmm1, [rdi] 30 movdqa xmm2, [rsi+16] 31 movdqa xmm3, [rdi+16] 42 movdqa xmm1, xmm0 48 movdqa xmm1, xmm0 230 movdqa xmm0, [rsi] 231 movdqa xmm1, [rdi] 233 movdqa xmm2, [rsi+16] 234 movdqa xmm3, [rdi+16 [all...] |
fwalsh_sse2.asm | 40 movdqa xmm1, xmm0 44 movdqa xmm2, xmm0 51 movdqa xmm1, xmm0 61 movdqa xmm2, xmm0 77 movdqa xmm2, xmm0 80 movdqa xmm3, xmm1 89 movdqa xmm0, xmm4 92 movdqa xmm1, xmm6 96 movdqa xmm2, xmm0 99 movdqa xmm3, xmm [all...] |
/bionic/libc/arch-x86/string/ |
sse2-memset5-atom.S | 350 movdqa %xmm0, (%edx) 351 movdqa %xmm0, 0x10(%edx) 352 movdqa %xmm0, 0x20(%edx) 353 movdqa %xmm0, 0x30(%edx) 354 movdqa %xmm0, 0x40(%edx) 355 movdqa %xmm0, 0x50(%edx) 356 movdqa %xmm0, 0x60(%edx) 357 movdqa %xmm0, 0x70(%edx) 363 movdqa %xmm0, (%edx) 364 movdqa %xmm0, 0x10(%edx [all...] |
ssse3-memcpy5.S | 225 movdqa (%eax, %edi), %xmm0 226 movdqa 16(%eax, %edi), %xmm1 228 movdqa %xmm0, (%edx, %edi) 229 movdqa %xmm1, 16(%edx, %edi) 233 movdqa (%eax, %edi), %xmm0 234 movdqa 16(%eax, %edi), %xmm1 236 movdqa %xmm0, (%edx, %edi) 237 movdqa %xmm1, 16(%edx, %edi) 241 movdqa (%eax, %edi), %xmm0 242 movdqa 16(%eax, %edi), %xmm [all...] |
ssse3-strcmp-latest.S | 273 movdqa (%eax), %xmm1 290 movdqa (%eax, %ecx), %xmm1 291 movdqa (%edx, %ecx), %xmm2 316 movdqa (%edx), %xmm2 317 movdqa (%eax), %xmm1 331 movdqa (%edx), %xmm3 345 movdqa (%eax, %ecx), %xmm1 346 movdqa (%edx, %ecx), %xmm2 347 movdqa %xmm2, %xmm4 364 movdqa %xmm4, %xmm [all...] |
/external/openssl/crypto/aes/asm/ |
vpaes-x86.S | 64 movdqa -48(%ebp),%xmm7 65 movdqa -16(%ebp),%xmm6 73 movdqa %xmm6,%xmm1 74 movdqa (%ebp),%xmm2 80 movdqa 16(%ebp),%xmm0 89 movdqa 32(%ebp),%xmm4 92 movdqa 48(%ebp),%xmm0 95 movdqa 64(%ebp),%xmm5 97 movdqa -64(%ebx,%ecx,1),%xmm1 98 movdqa 80(%ebp),%xmm [all...] |
vpaes-x86_64.S | 24 movdqa %xmm9,%xmm1 25 movdqa .Lk_ipt(%rip),%xmm2 31 movdqa .Lk_ipt+16(%rip),%xmm0 42 movdqa %xmm13,%xmm4 45 movdqa %xmm12,%xmm0 48 movdqa %xmm15,%xmm5 50 movdqa -64(%r11,%r10,1),%xmm1 51 movdqa %xmm14,%xmm2 54 movdqa (%r11,%r10,1),%xmm4 55 movdqa %xmm0,%xmm [all...] |
bsaes-x86_64.S | 11 movdqa (%rax),%xmm8 13 movdqa 80(%r11),%xmm7 31 movdqa 0(%r11),%xmm7 32 movdqa 16(%r11),%xmm8 33 movdqa %xmm5,%xmm9 35 movdqa %xmm3,%xmm10 47 movdqa %xmm1,%xmm9 49 movdqa %xmm15,%xmm10 61 movdqa 32(%r11),%xmm7 62 movdqa %xmm4,%xmm [all...] |
vpaes-x86.pl | 161 &movdqa ("xmm7",&QWP($k_inv,$const)); 162 &movdqa ("xmm6",&QWP($k_s0F,$const)); 183 &movdqa ("xmm1","xmm6") 184 &movdqa ("xmm2",&QWP($k_ipt,$const)); 190 &movdqa ("xmm0",&QWP($k_ipt+16,$const)); 201 &movdqa ("xmm4",&QWP($k_sb1,$const)); # 4 : sb1u 204 &movdqa ("xmm0",&QWP($k_sb1+16,$const));# 0 : sb1t 207 &movdqa ("xmm5",&QWP($k_sb2,$const)); # 4 : sb2u 209 &movdqa ("xmm1",&QWP(-0x40,$base,$magic));# .Lk_mc_forward[] 210 &movdqa ("xmm2",&QWP($k_sb2+16,$const));# 2 : sb2 [all...] |
/external/libyuv/files/source/ |
rotate.cc | 86 movdqa xmm1, xmm0 91 movdqa xmm3, xmm2 97 movdqa xmm5, xmm4 103 movdqa xmm7, xmm6 108 movdqa xmm2, xmm0 109 movdqa xmm3, xmm1 114 movdqa xmm6, xmm4 115 movdqa xmm7, xmm5 122 movdqa xmm4, xmm0 127 movdqa xmm6, xmm 205 movdqa [esp], xmm5 \/\/ backup xmm5 local 227 movdqa [esp], xmm6 \/\/ backup xmm6 local [all...] |
row_win.cc | 139 movdqa xmm1, xmm0 144 movdqa [edx], xmm0 local 145 movdqa [edx + 16], xmm1 local 159 movdqa xmm5, kShuffleMaskBGRAToARGB 164 movdqa xmm0, [eax] 167 movdqa [eax + edx], xmm0 local 180 movdqa xmm5, kShuffleMaskABGRToARGB 185 movdqa xmm0, [eax] 188 movdqa [eax + edx], xmm0 local 201 movdqa xmm5, kShuffleMaskRGBAToARG 209 movdqa [eax + edx], xmm0 local 230 movdqa [eax + edx], xmm0 local 259 movdqa [edx + 32], xmm2 local 262 movdqa [edx], xmm0 local 266 movdqa [edx + 16], xmm1 local 269 movdqa [edx + 48], xmm3 local 299 movdqa [edx + 32], xmm2 local 302 movdqa [edx], xmm0 local 306 movdqa [edx + 16], xmm1 local 309 movdqa [edx + 48], xmm3 local 364 movdqa [eax * 2 + edx], xmm1 \/\/ store 4 pixels of ARGB local 365 movdqa [eax * 2 + edx + 16], xmm2 \/\/ store next 4 pixels of ARGB local 418 movdqa [eax * 2 + edx], xmm1 \/\/ store 4 pixels of ARGB local 419 movdqa [eax * 2 + edx + 16], xmm2 \/\/ store next 4 pixels of ARGB local 458 movdqa [eax * 2 + edx], xmm0 \/\/ store 4 pixels of ARGB local 459 movdqa [eax * 2 + edx + 16], xmm1 \/\/ store next 4 pixels of ARGB local 492 movdqa [edx], xmm0 \/\/ store 0 local 497 movdqa [edx + 16], xmm1 \/\/ store 1 local 498 movdqa [edx + 32], xmm2 \/\/ store 2 local 531 movdqa [edx], xmm0 \/\/ store 0 local 536 movdqa [edx + 16], xmm1 \/\/ store 1 local 537 movdqa [edx + 32], xmm2 \/\/ store 2 local 684 movdqa [edx], xmm0 local 752 movdqa [edx], xmm0 local 820 movdqa [edx], xmm0 local 888 movdqa [edx], xmm0 local 1645 movdqa [edx], xmm0 local 1646 movdqa [edx + 16], xmm1 local 1688 movdqa [edx], xmm0 local 1689 movdqa [edx + 16], xmm1 local 1732 movdqa [edx], xmm0 local 1733 movdqa [edx + 16], xmm1 local 1771 movdqa [edx], xmm0 local 1772 movdqa [edx + 16], xmm1 local 1809 movdqa [edx], xmm0 local 1810 movdqa [edx + 16], xmm1 local 2056 movdqa [edx], xmm5 local 2057 movdqa [edx + 16], xmm0 local 2138 movdqa [edx], xmm2 local 2139 movdqa [edx + 16], xmm1 local 2220 movdqa [edx], xmm5 local 2221 movdqa [edx + 16], xmm0 local 2310 movdqa [edx], xmm0 local 2311 movdqa [edx + 16], xmm1 local 2342 movdqa [edx], xmm0 local 2437 movdqa [edx], xmm0 local 2471 movdqa [edx], xmm0 local 2472 movdqa [edx + edi], xmm2 local 2497 movdqa [eax + edx], xmm0 local 2498 movdqa [eax + edx + 16], xmm1 local 2593 movdqa [edx], xmm0 local 2805 movdqa [edx], xmm0 local 3079 movdqa [edx], xmm0 local 3214 movdqa [edx], xmm0 local 3240 movdqa [edx], xmm0 local 3315 movdqa [eax + edx], xmm0 local 3364 movdqa [eax + edx], xmm0 local 3418 movdqa [eax + edx], xmm0 local 3466 movdqa [eax + edx], xmm0 local 3467 movdqa [eax + edx + 16], xmm1 local 3537 movdqa [eax], xmm0 local 3538 movdqa [eax + 16], xmm1 local 3600 movdqa [eax], xmm0 local 3601 movdqa [eax + 16], xmm1 local 3696 movdqa [eax], xmm0 local 3861 movdqa [edx], xmm2 local 3862 movdqa [edx + 16], xmm3 local 3863 movdqa [edx + 32], xmm4 local 3864 movdqa [edx + 48], xmm5 local 3921 movdqa [eax + edx], xmm0 local 4057 movdqa [esi + edi], xmm0 local 4069 movdqa [esi + edi], xmm0 local 4082 movdqa [esi + edi], xmm0 local [all...] |
/system/core/libcutils/arch-x86/ |
sse2-memset32-atom.S | 295 movdqa %xmm0, (%edx) 296 movdqa %xmm0, 0x10(%edx) 297 movdqa %xmm0, 0x20(%edx) 298 movdqa %xmm0, 0x30(%edx) 299 movdqa %xmm0, 0x40(%edx) 300 movdqa %xmm0, 0x50(%edx) 301 movdqa %xmm0, 0x60(%edx) 302 movdqa %xmm0, 0x70(%edx) 308 movdqa %xmm0, (%edx) 309 movdqa %xmm0, 0x10(%edx [all...] |
sse2-memset16-atom.S | 381 movdqa %xmm0, (%edx) 382 movdqa %xmm0, 0x10(%edx) 383 movdqa %xmm0, 0x20(%edx) 384 movdqa %xmm0, 0x30(%edx) 385 movdqa %xmm0, 0x40(%edx) 386 movdqa %xmm0, 0x50(%edx) 387 movdqa %xmm0, 0x60(%edx) 388 movdqa %xmm0, 0x70(%edx) 394 movdqa %xmm0, (%edx) 395 movdqa %xmm0, 0x10(%edx [all...] |