/external/boringssl/mac-x86/crypto/sha/ |
sha1-586.S | 1417 movdqu 16(%esi),%xmm5 1444 pxor %xmm7,%xmm5 1456 pxor %xmm5,%xmm7 1468 pxor %xmm7,%xmm5 1480 pxor %xmm5,%xmm7 1492 pxor %xmm7,%xmm5 1504 pxor %xmm5,%xmm7 1516 pxor %xmm7,%xmm5 1528 pxor %xmm5,%xmm7 1534 movdqu 16(%esi),%xmm5 [all...] |
/external/boringssl/win-x86/crypto/sha/ |
sha1-586.asm | 1429 movdqu xmm5,[16+esi] 1456 pxor xmm5,xmm7 1468 pxor xmm7,xmm5 1480 pxor xmm5,xmm7 1492 pxor xmm7,xmm5 1504 pxor xmm5,xmm7 1516 pxor xmm7,xmm5 1528 pxor xmm5,xmm7 1540 pxor xmm7,xmm5 1546 movdqu xmm5,[16+esi [all...] |
/bionic/libm/x86_64/ |
e_sinh.S | 98 pshufd $68, %xmm4, %xmm5 107 mulsd %xmm5, %xmm2 114 xorpd %xmm5, %xmm5 121 pinsrw $3, %edx, %xmm5 133 pshufd $68, %xmm5, %xmm5 136 psubw %xmm0, %xmm5 140 mulpd (%r8,%rdx,8), %xmm5 155 shufpd $0, %xmm5, %xmm [all...] |
s_expm1.S | 105 movapd 64+cv(%rip), %xmm5 121 addpd %xmm4, %xmm5 131 mulpd %xmm5, %xmm0 144 pshufd $238, %xmm0, %xmm5 149 addsd %xmm5, %xmm0 154 xorpd %xmm5, %xmm5 156 pinsrw $3, %eax, %xmm5 164 subsd %xmm5, %xmm3 173 subsd %xmm5, %xmm [all...] |
s_tanh.S | 98 pshufd $68, %xmm4, %xmm5 107 mulsd %xmm5, %xmm2 123 movapd (%r8,%rcx,8), %xmm5 131 mulpd %xmm5, %xmm0 134 movq %xmm4, %xmm5 150 divsd %xmm1, %xmm5 168 andpd %xmm1, %xmm5 170 mulsd %xmm5, %xmm4 172 mulsd %xmm5, %xmm3 178 mulsd %xmm5, %xmm [all...] |
e_asin.S | 89 xorpd %xmm5, %xmm5 92 pinsrw $2, %ecx, %xmm5 98 pinsrw $2, %ecx, %xmm5 114 orpd %xmm5, %xmm2 127 movsd cv(%rip), %xmm5 135 mulsd %xmm3, %xmm5 140 addsd %xmm4, %xmm5 142 addsd %xmm5, %xmm6 179 orpd %xmm5, %xmm [all...] |
/external/libyuv/files/source/ |
format_conversion.cc | 35 movd xmm5, [esp + 12] // selector local 37 pshufd xmm5, xmm5, 0 local 43 pshufb xmm0, xmm5 58 "movd %3,%%xmm5 \n" 59 "pshufd $0x0,%%xmm5,%%xmm5 \n" 64 "pshufb %%xmm5,%%xmm0 \n" 75 , "xmm0", "xmm5"
|
/external/llvm/test/CodeGen/X86/ |
pr3154.ll | 32 call void asm sideeffect "movsd $0, %xmm7 \0A\09movapd ff_pd_1, %xmm6 \0A\09movapd ff_pd_2, %xmm5 \0A\09movlhps %xmm7, %xmm7 \0A\09subpd %xmm5, %xmm7 \0A\09addsd %xmm6, %xmm7 \0A\09", "*m,~{dirflag},~{fpsr},~{flags}"(double* %c) nounwind 40 %asmtmp = call { i32, i32 } asm sideeffect "1: \0A\09movapd %xmm7, %xmm1 \0A\09mulpd %xmm1, %xmm1 \0A\09movapd %xmm6, %xmm0 \0A\09subpd %xmm1, %xmm0 \0A\09pshufd $$0x4e, %xmm0, %xmm1 \0A\09cvtpi2pd ($3,$0), %xmm2 \0A\09cvtpi2pd -1*4($3,$1), %xmm3 \0A\09mulpd %xmm0, %xmm2 \0A\09mulpd %xmm1, %xmm3 \0A\09movapd %xmm2, ($2,$0,2) \0A\09movupd %xmm3, -1*8($2,$1,2) \0A\09subpd %xmm5, %xmm7 \0A\09sub $$8, $1 \0A\09add $$8, $0 \0A\09jl 1b \0A\09", "=&r,=&r,r,r,0,1,~{dirflag},~{fpsr},~{flags}"(double* %16, i32* %17, i32 %12, i32 %14) nounwind ; <{ i32, i32 }> [#uses=0] 44 %asmtmp23 = call { i32, i32 } asm sideeffect "1: \0A\09movapd %xmm7, %xmm1 \0A\09mulpd %xmm1, %xmm1 \0A\09movapd %xmm6, %xmm0 \0A\09subpd %xmm1, %xmm0 \0A\09pshufd $$0x4e, %xmm0, %xmm1 \0A\09cvtpi2pd ($3,$0), %xmm2 \0A\09cvtpi2pd -2*4($3,$1), %xmm3 \0A\09mulpd %xmm0, %xmm2 \0A\09mulpd %xmm1, %xmm3 \0A\09movapd %xmm2, ($2,$0,2) \0A\09movapd %xmm3, -2*8($2,$1,2) \0A\09subpd %xmm5, %xmm7 \0A\09sub $$8, $1 \0A\09add $$8, $0 \0A\09jl 1b \0A\09", "=&r,=&r,r,r,0,1,~{dirflag},~{fpsr},~{flags}"(double* %16, i32* %17, i32 %12, i32 %14) nounwind ; <{ i32, i32 }> [#uses=0] 84 %asmtmp32 = call i32 asm sideeffect "movsd ff_pd_1, %xmm0 \0A\09movsd ff_pd_1, %xmm1 \0A\09movsd ff_pd_1, %xmm2 \0A\091: \0A\09movapd ($4,$0), %xmm3 \0A\09movupd -8($5,$0), %xmm4 \0A\09movapd ($5,$0), %xmm5 \0A\09mulpd %xmm3, %xmm4 \0A\09mulpd %xmm3, %xmm5 \0A\09mulpd -16($5,$0), %xmm3 \0A\09addpd %xmm4, %xmm1 \0A\09addpd %xmm5, %xmm0 \0A\09addpd %xmm3, %xmm2 \0A\09add $$16, $0 \0A\09jl 1b \0A\09movhlps %xmm0, %xmm3 \0A\09movhlps %xmm1, %xmm4 \0A\09movhlps %xmm2, %xmm5 \0A\09addsd %xmm3, %xmm0 \0A\09addsd %xmm4, %xmm1 \0A\09addsd %xmm5, %xmm2 \0A\09movsd %xmm0, $1 \0A\09movsd %xmm1, $2 \0A\09movsd %xmm2, $3 \0A\09", "=&r,=*m,=*m,=*m,r,r,0,~{dirflag},~{fpsr},~{flags}"(double* %28, double* %29, double* %30, double* %21, double* %27, i32 %22) nounwind ; <i32> [#uses=0]
|
asm-reg-type-mismatch.ll | 15 call void asm sideeffect "", "{xmm0},{xmm1},{xmm2},{xmm3},{xmm4},{xmm5},{xmm6},{xmm7},~{dirflag},~{fpsr},~{flags}"( double undef, double undef, double undef, double 1.0, double undef, double 0.0, double undef, double 0.0 ) nounwind
|
/bionic/libm/x86/ |
s_expm1.S | 123 movapd 128(%ebx), %xmm5 138 addpd %xmm4, %xmm5 148 mulpd %xmm5, %xmm0 161 pshufd $238, %xmm0, %xmm5 166 addsd %xmm5, %xmm0 171 xorpd %xmm5, %xmm5 173 pinsrw $3, %eax, %xmm5 181 subsd %xmm5, %xmm3 190 subsd %xmm5, %xmm [all...] |
s_tanh.S | 117 pshufd $68, %xmm4, %xmm5 126 mulsd %xmm5, %xmm2 141 movapd (%ebx,%ecx,8), %xmm5 149 mulpd %xmm5, %xmm0 152 movapd %xmm4, %xmm5 168 divsd %xmm1, %xmm5 186 andpd %xmm1, %xmm5 188 mulsd %xmm5, %xmm4 190 mulsd %xmm5, %xmm3 196 mulsd %xmm5, %xmm [all...] |
e_asin.S | 107 xorpd %xmm5, %xmm5 110 pinsrw $2, %ecx, %xmm5 117 pinsrw $2, %ecx, %xmm5 132 orpd %xmm5, %xmm2 144 movsd 64(%ebx), %xmm5 152 mulsd %xmm3, %xmm5 157 addsd %xmm4, %xmm5 159 addsd %xmm5, %xmm6 198 orpd %xmm5, %xmm [all...] |
s_cos.S | 209 movapd 2240(%ebx), %xmm5 212 orps %xmm4, %xmm5 215 addpd %xmm5, %xmm1 223 movapd 2096(%ebx), %xmm5 235 mulpd %xmm0, %xmm5 240 mulpd %xmm0, %xmm5 253 addpd 2080(%ebx), %xmm5 256 mulpd %xmm0, %xmm5 262 addpd %xmm5, %xmm6 263 movsd 8(%eax), %xmm5 [all...] |
s_sin.S | 210 movsd 2272(%ebx), %xmm5 213 orps %xmm4, %xmm5 216 addpd %xmm5, %xmm1 224 movapd 2096(%ebx), %xmm5 236 mulpd %xmm0, %xmm5 241 mulpd %xmm0, %xmm5 254 addpd 2080(%ebx), %xmm5 257 mulpd %xmm0, %xmm5 263 addpd %xmm5, %xmm6 264 movsd 8(%eax), %xmm5 [all...] |
/external/boringssl/win-x86_64/crypto/modes/ |
ghash-x86_64.asm | 701 pxor xmm5,xmm5 703 pcmpgtd xmm5,xmm4 708 pand xmm5,XMMWORD[$L$0x1c2_polynomial] 709 pxor xmm2,xmm5 797 movdqa xmm5,xmm0 835 pshufd xmm3,xmm5,78 837 pxor xmm3,xmm5 838 movdqu XMMWORD[48+rcx],xmm5 854 movdqa xmm5,XMMWORD[$L$bswap_mask [all...] |
/external/llvm/test/MC/X86/ |
x86_64-encoding.s | 243 // CHECK: pmovmskb %xmm5, %ecx 245 pmovmskb %xmm5,%rcx 247 // CHECK: pinsrw $3, %ecx, %xmm5 249 pinsrw $3, %ecx, %xmm5 251 // CHECK: pinsrw $3, %ecx, %xmm5 253 pinsrw $3, %rcx, %xmm5
|
/bionic/libc/arch-x86/silvermont/string/ |
sse2-memmove-slm.S | 154 movdqu -48(%eax, %ecx), %xmm5 162 movdqu %xmm5, -48(%edx, %ecx) 182 movdqu 16(%eax, %edi), %xmm5 191 movaps %xmm5, 16(%edi) 378 movdqu -48(%eax, %ecx), %xmm5 386 movdqu %xmm5, -48(%edx, %ecx) 409 movdqu -32(%edi, %esi), %xmm5 418 movdqa %xmm5, -32(%edi)
|
/bionic/libc/arch-x86_64/string/ |
sse2-memmove-slm.S | 144 movdqu -48(%rsi, %rdx), %xmm5 152 movdqu %xmm5, -48(%rdi, %rdx) 170 movdqu 16(%r8, %rsi), %xmm5 179 movaps %xmm5, 16(%r8) 366 movdqu -48(%rsi, %rdx), %xmm5 374 movdqu %xmm5, -48(%rdi, %rdx) 394 movdqu -32(%r9, %r8), %xmm5 403 movaps %xmm5, -32(%r9)
|
/external/valgrind/VEX/test/ |
fxsave.c | 42 asm __volatile__("movups vecZ, %xmm5"); 66 asm __volatile__("movaps %xmm2, %xmm5");
|
/external/valgrind/memcheck/tests/x86/ |
fxsave.c | 43 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm5"); 67 asm __volatile__("movaps %xmm2, %xmm5");
|
/art/runtime/arch/x86_64/ |
registers_x86_64.h | 58 XMM5 = 5,
|
/external/libvpx/libvpx/vp8/encoder/x86/ |
ssim_opt_x86_64.asm | 90 movdqu xmm5, [rsi] 92 movdqa xmm3, xmm5 99 movdqa xmm3, xmm5
|
/external/libvpx/libvpx/vp9/encoder/x86/ |
vp9_ssim_opt_x86_64.asm | 90 movdqu xmm5, [rsi] 92 movdqa xmm3, xmm5 99 movdqa xmm3, xmm5
|
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/encoder/x86/ |
ssim_opt.asm | 90 movdqu xmm5, [rsi] 92 movdqa xmm3, xmm5 99 movdqa xmm3, xmm5
|
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/encoder/x86/ |
vp9_ssim_opt.asm | 90 movdqu xmm5, [rsi] 92 movdqa xmm3, xmm5 99 movdqa xmm3, xmm5
|