/external/libvpx/vp8/encoder/x86/ |
subtract_sse2.asm | 95 movdqa xmm0, XMMWORD PTR [rsi] ; src 96 movdqa xmm1, XMMWORD PTR [rax] ; pred 98 movdqa xmm2, xmm0 105 movdqa xmm2, xmm0 106 movdqa xmm3, xmm1 110 movdqa XMMWORD PTR [rdi], xmm0 111 movdqa XMMWORD PTR [rdi +16], xmm2 113 movdqa xmm4, XMMWORD PTR [rsi + rdx] 114 movdqa xmm5, XMMWORD PTR [rax + 16] 116 movdqa xmm6, xmm [all...] |
quantize_sse2.asm | 62 movdqa xmm0, [rdx] 63 movdqa xmm4, [rdx + 16] 69 movdqa xmm1, xmm0 70 movdqa xmm5, xmm4 84 movdqa xmm2, [rcx] 85 movdqa xmm3, [rcx + 16] 95 movdqa [rsp + abs_minus_zbin], xmm1 96 movdqa [rsp + abs_minus_zbin + 16], xmm5 102 movdqa xmm2, [rdx] 103 movdqa xmm6, [rdx + 16 [all...] |
quantize_ssse3.asm | 33 movdqa xmm0, [rdx] 34 movdqa xmm4, [rdx + 16] 36 movdqa xmm2, [rdi] ;round lo 37 movdqa xmm3, [rdi + 16] ;round hi 39 movdqa xmm1, xmm0 40 movdqa xmm5, xmm4 63 movdqa [rdi], xmm1 64 movdqa [rdi + 16], xmm5 66 movdqa xmm2, [rcx] 67 movdqa xmm3, [rcx + 16 [all...] |
temporal_filter_apply_sse2.asm | 47 movdqa [rsp + strength], xmm6 ; where strength is used, all 16 bytes are read 54 movdqa xmm5, [GLOBAL(_const_top_bit)] 56 movdqa [rsp + rounding_bit], xmm5 67 movdqa [rsp + filter_weight], xmm0 87 movdqa xmm0, [rsi] ; src (frame1) 89 movdqa xmm1, xmm0 94 movdqa xmm2, [rdx] ; predictor (frame2) 95 movdqa xmm3, xmm2 121 movdqa xmm3, [GLOBAL(_const_16w)] 122 movdqa xmm2, [GLOBAL(_const_16w) [all...] |
dct_sse2.asm | 76 movdqa xmm2, xmm0 79 movdqa xmm1, xmm0 85 movdqa xmm3, xmm0 91 movdqa xmm1, xmm0 94 movdqa xmm4, xmm3 109 movdqa xmm2, xmm0 113 movdqa xmm3, xmm0 116 movdqa xmm2, xmm0 120 movdqa xmm5, XMMWORD PTR[GLOBAL(_7)] 122 movdqa xmm3, xmm [all...] |
encodeopt.asm | 27 movdqa xmm3, [rsi] 29 movdqa xmm4, [rdi] 30 movdqa xmm5, [rsi+16] 32 movdqa xmm6, [rdi+16] 42 movdqa xmm0, xmm3 48 movdqa xmm3, xmm0 229 movdqa xmm3, [rsi] 230 movdqa xmm4, [rdi] 232 movdqa xmm5, [rsi+16] 233 movdqa xmm6, [rdi+16 [all...] |
fwalsh_sse2.asm | 40 movdqa xmm1, xmm0 44 movdqa xmm2, xmm0 51 movdqa xmm1, xmm0 61 movdqa xmm2, xmm0 77 movdqa xmm2, xmm0 80 movdqa xmm3, xmm1 89 movdqa xmm0, xmm4 92 movdqa xmm1, xmm6 96 movdqa xmm2, xmm0 99 movdqa xmm3, xmm [all...] |
sad_ssse3.asm | 16 movdqa xmm0, XMMWORD PTR [rsi] 25 movdqa xmm0, XMMWORD PTR [rsi] 38 movdqa xmm0, XMMWORD PTR [rsi+rax] 57 movdqa xmm0, XMMWORD PTR [rsi] 58 movdqa xmm4, XMMWORD PTR [rdi] 59 movdqa xmm7, XMMWORD PTR [rdi+16] 61 movdqa xmm5, xmm7 64 movdqa xmm6, xmm7 73 movdqa xmm0, XMMWORD PTR [rsi] 74 movdqa xmm4, XMMWORD PTR [rdi [all...] |
/external/libvpx/vp8/common/x86/ |
recon_sse2.asm | 81 movdqa xmm1, XMMWORD PTR [rsi] 82 movdqa xmm5, xmm1 88 movdqa XMMWORD PTR [rdi], xmm1 91 movdqa xmm2, XMMWORD PTR [rsi+16] 92 movdqa xmm6, xmm2 98 movdqa XMMWORD PTR [rdi+rax], xmm2 101 movdqa xmm3, XMMWORD PTR [rsi+32] 102 movdqa xmm7, xmm3 108 movdqa XMMWORD PTR [rdi+rax*2], xmm3 111 movdqa xmm4, XMMWORD PTR [rsi+48 [all...] |
iwalsh_sse2.asm | 29 movdqa xmm0, [rsi + 0] ;ip[4] ip[0] 30 movdqa xmm1, [rsi + 16] ;ip[12] ip[8] 36 movdqa xmm3, xmm0 ;ip[4] ip[0] 41 movdqa xmm4, xmm0 46 movdqa xmm1, xmm4 ;c1 b1 59 movdqa xmm3, xmm4 ; 13 12 11 10 03 02 01 00 62 movdqa xmm1, xmm4 ; 23 03 22 02 21 01 20 00 67 movdqa xmm3, xmm4 ;ip[4] ip[0] 74 movdqa xmm5, xmm4 78 movdqa xmm1, xmm5 ;c1 b [all...] |
loopfilter_sse2.asm | 19 movdqa xmm2, [rdi+2*rax] ; q3 20 movdqa xmm1, [rsi+2*rax] ; q2 21 movdqa xmm4, [rsi+rax] ; q1 22 movdqa xmm5, [rsi] ; q0 38 movdqa XMMWORD PTR [rsp], xmm1 ; store q2 39 movdqa XMMWORD PTR [rsp + 16], xmm4 ; store q1 42 movdqa xmm6, xmm1 ; q2 43 movdqa xmm3, xmm4 ; q1 54 movdqa xmm0, xmm5 ; q0 61 movdqa t0, xmm5 ; save to t [all...] |
subpixel_sse2.asm | 67 movdqa xmm4, xmm1 68 movdqa xmm5, xmm1 70 movdqa xmm6, xmm1 71 movdqa xmm7, xmm1 117 movdqa XMMWORD Ptr [rdi], xmm4 192 movdqa xmm4, xmm1 193 movdqa xmm5, xmm1 195 movdqa xmm6, xmm1 196 movdqa xmm7, xmm1 241 movdqa XMMWORD Ptr [rdi], xmm [all...] |
idctllm_sse2.asm | 124 movdqa xmm0, [rax] 125 movdqa xmm2, [rax+16] 126 movdqa xmm1, [rax+32] 127 movdqa xmm3, [rax+48] 130 movdqa [rax], xmm7 131 movdqa [rax+16], xmm7 132 movdqa [rax+32], xmm7 133 movdqa [rax+48], xmm7 142 movdqa xmm4, xmm0 149 movdqa xmm4, xmm [all...] |
subpixel_ssse3.asm | 51 movdqa xmm7, [GLOBAL(rd)] 60 movdqa xmm4, XMMWORD PTR [rax] ;k0_k5 61 movdqa xmm5, XMMWORD PTR [rax+256] ;k2_k4 62 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3 79 movdqa xmm1, xmm0 82 movdqa xmm2, xmm1 115 movdqa xmm5, XMMWORD PTR [rax+256] ;k2_k4 116 movdqa xmm6, XMMWORD PTR [rax+128] ;k1_k3 118 movdqa xmm3, XMMWORD PTR [GLOBAL(shuf2bfrom1)] 119 movdqa xmm4, XMMWORD PTR [GLOBAL(shuf3bfrom1) [all...] |
/bionic/libc/arch-x86/string/ |
sse2-memset5-atom.S | 350 movdqa %xmm0, (%edx) 351 movdqa %xmm0, 0x10(%edx) 352 movdqa %xmm0, 0x20(%edx) 353 movdqa %xmm0, 0x30(%edx) 354 movdqa %xmm0, 0x40(%edx) 355 movdqa %xmm0, 0x50(%edx) 356 movdqa %xmm0, 0x60(%edx) 357 movdqa %xmm0, 0x70(%edx) 363 movdqa %xmm0, (%edx) 364 movdqa %xmm0, 0x10(%edx [all...] |
ssse3-memcpy5.S | 246 movdqa (%eax, %edi), %xmm0 247 movdqa 16(%eax, %edi), %xmm1 249 movdqa %xmm0, (%edx, %edi) 250 movdqa %xmm1, 16(%edx, %edi) 254 movdqa (%eax, %edi), %xmm0 255 movdqa 16(%eax, %edi), %xmm1 257 movdqa %xmm0, (%edx, %edi) 258 movdqa %xmm1, 16(%edx, %edi) 262 movdqa (%eax, %edi), %xmm0 263 movdqa 16(%eax, %edi), %xmm [all...] |
ssse3-strcmp-latest.S | 273 movdqa (%eax), %xmm1 290 movdqa (%eax, %ecx), %xmm1 291 movdqa (%edx, %ecx), %xmm2 316 movdqa (%edx), %xmm2 317 movdqa (%eax), %xmm1 331 movdqa (%edx), %xmm3 345 movdqa (%eax, %ecx), %xmm1 346 movdqa (%edx, %ecx), %xmm2 347 movdqa %xmm2, %xmm4 364 movdqa %xmm4, %xmm [all...] |
ssse3-memcmp3-new.S | 227 movdqa (%esi), %xmm0 231 movdqa 16(%esi), %xmm2 237 movdqa %xmm0, %xmm1 238 movdqa 32(%esi), %xmm0 239 movdqa 48(%esi), %xmm2 257 movdqa %xmm0, %xmm1 277 movdqa 16(%esi), %xmm1 278 movdqa %xmm1, %xmm2 282 movdqa 32(%esi), %xmm3 303 movdqa 16(%esi), %xmm [all...] |
/external/libyuv/files/source/ |
rotate.cc | 91 movdqa xmm1, xmm0 96 movdqa xmm3, xmm2 102 movdqa xmm5, xmm4 108 movdqa xmm7, xmm6 113 movdqa xmm2, xmm0 114 movdqa xmm3, xmm1 119 movdqa xmm6, xmm4 120 movdqa xmm7, xmm5 127 movdqa xmm4, xmm0 132 movdqa xmm6, xmm 208 movdqa [esp], xmm5 \/\/ backup xmm5 local 230 movdqa [esp], xmm6 \/\/ backup xmm6 local 874 movdqa [edx], xmm0 local [all...] |
row_win.cc | 84 movdqa xmm7, _kARGBToY 85 movdqa xmm6, _kAddY16 88 movdqa xmm0, [eax] 89 movdqa xmm1, [eax + 16] 90 movdqa xmm2, [eax + 32] 91 movdqa xmm3, [eax + 48] 103 movdqa [edx], xmm0 local 117 movdqa xmm7, _kBGRAToY 118 movdqa xmm6, _kAddY16 121 movdqa xmm0, [eax 136 movdqa [edx], xmm0 local 169 movdqa [edx], xmm0 local 390 movdqa [edx + 32], xmm2 local 393 movdqa [edx], xmm0 local 397 movdqa [edx + 16], xmm1 local 399 movdqa [edx + 48], xmm3 local 429 movdqa [edx + 32], xmm2 local 432 movdqa [edx], xmm0 local 436 movdqa [edx + 16], xmm1 local 438 movdqa [edx + 48], xmm3 local [all...] |
scale.cc | 281 movdqa xmm0, [eax] 282 movdqa xmm1, [eax + 16] 287 movdqa [edx], xmm0 local 310 movdqa xmm0, [eax] 311 movdqa xmm1, [eax + 16] 312 movdqa xmm2, [eax + esi] 313 movdqa xmm3, [eax + esi + 16] 318 movdqa xmm2, xmm0 // average columns (32 to 16 pixels) 320 movdqa xmm3, xmm1 328 movdqa [edx], xmm local 885 movdqa [edi], xmm2 local 886 movdqa [edi + 16], xmm3 local 946 movdqa [edi], xmm0 local 960 movdqa [edi], xmm0 local 976 movdqa [edi], xmm0 local 1028 movdqa [edi], xmm0 local 1042 movdqa [edi], xmm0 local 1058 movdqa [edi], xmm0 local [all...] |
/system/core/libcutils/arch-x86/ |
sse2-memset32-atom.S | 295 movdqa %xmm0, (%edx) 296 movdqa %xmm0, 0x10(%edx) 297 movdqa %xmm0, 0x20(%edx) 298 movdqa %xmm0, 0x30(%edx) 299 movdqa %xmm0, 0x40(%edx) 300 movdqa %xmm0, 0x50(%edx) 301 movdqa %xmm0, 0x60(%edx) 302 movdqa %xmm0, 0x70(%edx) 308 movdqa %xmm0, (%edx) 309 movdqa %xmm0, 0x10(%edx [all...] |
sse2-memset16-atom.S | 381 movdqa %xmm0, (%edx) 382 movdqa %xmm0, 0x10(%edx) 383 movdqa %xmm0, 0x20(%edx) 384 movdqa %xmm0, 0x30(%edx) 385 movdqa %xmm0, 0x40(%edx) 386 movdqa %xmm0, 0x50(%edx) 387 movdqa %xmm0, 0x60(%edx) 388 movdqa %xmm0, 0x70(%edx) 394 movdqa %xmm0, (%edx) 395 movdqa %xmm0, 0x10(%edx [all...] |
/external/openssl/crypto/aes/asm/ |
vpaes-x86.pl | 161 &movdqa ("xmm7",&QWP($k_inv,$const)); 162 &movdqa ("xmm6",&QWP($k_s0F,$const)); 183 &movdqa ("xmm1","xmm6") 184 &movdqa ("xmm2",&QWP($k_ipt,$const)); 190 &movdqa ("xmm0",&QWP($k_ipt+16,$const)); 201 &movdqa ("xmm4",&QWP($k_sb1,$const)); # 4 : sb1u 204 &movdqa ("xmm0",&QWP($k_sb1+16,$const));# 0 : sb1t 207 &movdqa ("xmm5",&QWP($k_sb2,$const)); # 4 : sb2u 209 &movdqa ("xmm1",&QWP(-0x40,$base,$magic));# .Lk_mc_forward[] 210 &movdqa ("xmm2",&QWP($k_sb2+16,$const));# 2 : sb2 [all...] |
/external/llvm/test/CodeGen/X86/ |
widen_load-2.ll | 9 ; CHECK: movdqa 39 ; CHECK: movdqa 40 ; CHECK: movdqa 45 ; CHECK: movdqa 56 ; CHECK: movdqa 57 ; CHECK: movdqa 58 ; CHECK: movdqa 62 ; CHECK: movdqa 63 ; CHECK: movdqa 64 ; CHECK: movdqa [all...] |