/art/runtime/arch/x86_64/ |
memcmp16_x86_64.S | 57 movdqu (%rdi), %xmm2 58 pxor %xmm1, %xmm2 59 ptest %xmm2, %xmm0 76 movdqu (%rdi), %xmm2 77 pxor (%rsi), %xmm2 78 ptest %xmm2, %xmm0 81 movdqu 16(%rdi), %xmm2 82 pxor 16(%rsi), %xmm2 83 ptest %xmm2, %xmm0 86 movdqu 32(%rdi), %xmm2 [all...] |
/toolchain/binutils/binutils-2.25/gas/testsuite/gas/i386/ |
avx.s | 414 vaddpd %xmm4,%xmm6,%xmm2 416 vaddps %xmm4,%xmm6,%xmm2 418 vaddsubpd %xmm4,%xmm6,%xmm2 420 vaddsubps %xmm4,%xmm6,%xmm2 422 vandnpd %xmm4,%xmm6,%xmm2 424 vandnps %xmm4,%xmm6,%xmm2 426 vandpd %xmm4,%xmm6,%xmm2 428 vandps %xmm4,%xmm6,%xmm2 430 vdivpd %xmm4,%xmm6,%xmm2 432 vdivps %xmm4,%xmm6,%xmm2 [all...] |
avx-scalar-intel.d | 24 [ ]*[a-f0-9]+: c5 cf c2 d4 07 vcmpordsd xmm2,xmm6,xmm4 25 [ ]*[a-f0-9]+: c5 cf c2 11 07 vcmpordsd xmm2,xmm6,QWORD PTR \[ecx\] 26 [ ]*[a-f0-9]+: c4 e3 4d 0b d4 07 vroundsd xmm2,xmm6,xmm4,0x7 27 [ ]*[a-f0-9]+: c4 e3 4d 0b 11 07 vroundsd xmm2,xmm6,QWORD PTR \[ecx\],0x7 28 [ ]*[a-f0-9]+: c5 cf 58 d4 vaddsd xmm2,xmm6,xmm4 29 [ ]*[a-f0-9]+: c5 cf 58 11 vaddsd xmm2,xmm6,QWORD PTR \[ecx\] 30 [ ]*[a-f0-9]+: c5 cf 5a d4 vcvtsd2ss xmm2,xmm6,xmm4 31 [ ]*[a-f0-9]+: c5 cf 5a 11 vcvtsd2ss xmm2,xmm6,QWORD PTR \[ecx\] 32 [ ]*[a-f0-9]+: c5 cf 5e d4 vdivsd xmm2,xmm6,xmm4 33 [ ]*[a-f0-9]+: c5 cf 5e 11 vdivsd xmm2,xmm6,QWORD PTR \[ecx\ [all...] |
amdfam10.s | 13 extrq %xmm2,%xmm1 15 insertq %xmm2,%xmm1 16 insertq $4,$2,%xmm2,%xmm1 29 extrq xmm1,xmm2 31 insertq xmm1,xmm2 32 insertq xmm1,xmm2,2,4
|
sse3.s | 6 addsubpd %xmm2,%xmm1 7 addsubps (%ebx),%xmm2 18 hsubps %xmm2,%xmm2 26 movshdup %xmm2,%xmm1 27 movsldup (%ebx),%xmm2
|
x86-64-sse3.s | 6 addsubpd %xmm2,%xmm1 7 addsubps (%rbx),%xmm2 18 hsubps %xmm2,%xmm2 26 movshdup %xmm2,%xmm1 27 movsldup (%rbx),%xmm2
|
rexw.s | 6 movmskpd %xmm2,%rcx 7 movmskps %xmm2,%rcx 19 vmovmskpd %xmm2,%rcx 20 vmovmskps %xmm2,%rcx 29 movmskpd rcx,xmm2 30 movmskps rcx,xmm2 42 vmovmskpd rcx,xmm2 43 vmovmskps rcx,xmm2
|
ssemmx2.d | 12 [ ]+8: 66 0f e3 d3[ ]+pavgw[ ]+%xmm3,%xmm2 16 [ ]+1b: 66 0f c4 d2 02[ ]+pinsrw \$0x2,%edx,%xmm2 19 [ ]+28: 66 0f de d2[ ]+pmaxub %xmm2,%xmm2 30 [ ]+55: 66 0f 70 da 01[ ]+pshufd \$0x1,%xmm2,%xmm3 32 [ ]+60: f3 0f 70 da 01[ ]+pshufhw \$0x1,%xmm2,%xmm3 34 [ ]+6b: f2 0f 70 da 01[ ]+pshuflw \$0x1,%xmm2,%xmm3 36 [ ]+76: 66 0f e7 10[ ]+movntdq %xmm2,\(%eax\) 37 [ ]+7a: 66 0f 60 90 90 90 90 90 punpcklbw -0x6f6f6f70\(%eax\),%xmm2 38 [ ]+82: 66 0f 61 90 90 90 90 90 punpcklwd -0x6f6f6f70\(%eax\),%xmm2 [all...] |
x86-64-avx.s | 414 vaddpd %xmm4,%xmm6,%xmm2 416 vaddps %xmm4,%xmm6,%xmm2 418 vaddsubpd %xmm4,%xmm6,%xmm2 420 vaddsubps %xmm4,%xmm6,%xmm2 422 vandnpd %xmm4,%xmm6,%xmm2 424 vandnps %xmm4,%xmm6,%xmm2 426 vandpd %xmm4,%xmm6,%xmm2 428 vandps %xmm4,%xmm6,%xmm2 430 vdivpd %xmm4,%xmm6,%xmm2 432 vdivps %xmm4,%xmm6,%xmm2 [all...] |
/bionic/libc/arch-x86_64/string/ |
sse4-memcmp-slm.S | 109 movdqu (%rdi), %xmm2 110 pxor %xmm1, %xmm2 111 ptest %xmm2, %xmm0 128 movdqu (%rdi), %xmm2 129 pxor (%rsi), %xmm2 130 ptest %xmm2, %xmm0 133 movdqu 16(%rdi), %xmm2 134 pxor 16(%rsi), %xmm2 135 ptest %xmm2, %xmm0 138 movdqu 32(%rdi), %xmm2 [all...] |
/external/llvm/test/MC/X86/ |
x86-32-avx.s | 3 // CHECK: vaddss %xmm4, %xmm6, %xmm2 5 vaddss %xmm4, %xmm6, %xmm2 7 // CHECK: vmulss %xmm4, %xmm6, %xmm2 9 vmulss %xmm4, %xmm6, %xmm2 11 // CHECK: vsubss %xmm4, %xmm6, %xmm2 13 vsubss %xmm4, %xmm6, %xmm2 15 // CHECK: vdivss %xmm4, %xmm6, %xmm2 17 vdivss %xmm4, %xmm6, %xmm2 19 // CHECK: vaddsd %xmm4, %xmm6, %xmm2 21 vaddsd %xmm4, %xmm6, %xmm2 [all...] |
/external/llvm/test/CodeGen/X86/ |
fmaxnum.ll | 19 ; SSE: movaps %xmm0, %xmm2 20 ; SSE-NEXT: cmpunordss %xmm2, %xmm2 21 ; SSE-NEXT: movaps %xmm2, %xmm3 24 ; SSE-NEXT: andnps %xmm1, %xmm2 25 ; SSE-NEXT: orps %xmm3, %xmm2 26 ; SSE-NEXT: movaps %xmm2, %xmm0 29 ; AVX: vmaxss %xmm0, %xmm1, %xmm2 31 ; AVX-NEXT: vblendvps %xmm0, %xmm1, %xmm2, %xmm0 62 ; SSE: movaps %xmm0, %xmm2 [all...] |
vector-zext.ll | 41 ; SSE2-NEXT: pxor %xmm2, %xmm2 42 ; SSE2-NEXT: punpcklbw {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1],xmm0[2],xmm2[2],xmm0[3],xmm2[3],xmm0[4],xmm2[4],xmm0[5],xmm2[5],xmm0[6],xmm2[6],xmm0[7],xmm2[7 [all...] |
lower-vec-shift-2.ll | 16 ; AVX-NEXT: vpxor %xmm2, %xmm2, %xmm2 17 ; AVX-NEXT: vpblendw {{.*#+}} xmm1 = xmm1[0],xmm2[1,2,3,4,5,6,7] 29 ; SSE2-NEXT: xorps %xmm2, %xmm2 30 ; SSE2-NEXT: movss {{.*#+}} xmm2 = xmm1[0],xmm2[1,2,3] 31 ; SSE2-NEXT: pslld %xmm2, %xmm0 36 ; AVX-NEXT: vpxor %xmm2, %xmm2, %xmm [all...] |
vector-shift-ashr-128.ll | 20 ; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372036854775808,9223372036854775808] 21 ; SSE2-NEXT: movdqa %xmm2, %xmm4 23 ; SSE2-NEXT: psrlq %xmm1, %xmm2 24 ; SSE2-NEXT: movsd {{.*#+}} xmm4 = xmm2[0],xmm4[1] 25 ; SSE2-NEXT: movdqa %xmm0, %xmm2 26 ; SSE2-NEXT: psrlq %xmm3, %xmm2 28 ; SSE2-NEXT: movsd {{.*#+}} xmm2 = xmm0[0],xmm2[1] 29 ; SSE2-NEXT: xorpd %xmm4, %xmm2 30 ; SSE2-NEXT: psubq %xmm4, %xmm2 [all...] |
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/common/x86/ |
variance_impl_ssse3.asm | 69 movdqa xmm2, xmm0 72 punpckhbw xmm2, xmm1 74 pmaddubsw xmm2, [rax] 77 paddw xmm2, [GLOBAL(xmm_bi_rd)] 79 psraw xmm2, xmm_filter_shift 81 packuswb xmm0, xmm2 93 movdqu xmm2, XMMWORD PTR [rsi+1] 96 punpcklbw xmm1, xmm2 97 punpckhbw xmm3, xmm2 107 movdqa xmm2, xmm [all...] |
mfqe_sse2.asm | 49 movdqa xmm2, [rax] 54 movdqa xmm3, xmm2 55 punpcklbw xmm2, xmm6 57 pmullw xmm2, xmm0 68 paddw xmm2, xmm4 70 paddw xmm2, [GLOBAL(tMFQE_round)] 72 psrlw xmm2, 4 75 packuswb xmm2, xmm3 76 movdqa [rdx], xmm2 126 movq xmm2, [rax [all...] |
/external/libvpx/libvpx/vp8/common/x86/ |
mfqe_sse2.asm | 49 movdqa xmm2, [rax] 54 movdqa xmm3, xmm2 55 punpcklbw xmm2, xmm6 57 pmullw xmm2, xmm0 68 paddw xmm2, xmm4 70 paddw xmm2, [GLOBAL(tMFQE_round)] 72 psrlw xmm2, 4 75 packuswb xmm2, xmm3 76 movdqa [rdx], xmm2 126 movq xmm2, [rax [all...] |
/external/libvpx/libvpx/vp9/common/x86/ |
vp9_mfqe_sse2.asm | 50 movdqa xmm2, [rax] 55 movdqa xmm3, xmm2 56 punpcklbw xmm2, xmm6 58 pmullw xmm2, xmm0 69 paddw xmm2, xmm4 71 paddw xmm2, [GLOBAL(tMFQE_round)] 73 psrlw xmm2, 4 76 packuswb xmm2, xmm3 77 movdqa [rdx], xmm2 127 movq xmm2, [rax [all...] |
/external/libvpx/libvpx/third_party/libyuv/source/ |
compare_win.cc | 35 movdqu xmm2, [edx] local 38 psubusb xmm1, xmm2 39 psubusb xmm2, xmm3 local 40 por xmm1, xmm2 41 movdqa xmm2, xmm1 local 43 punpckhbw xmm2, xmm5 local 45 pmaddwd xmm2, xmm2 local 47 paddd xmm0, xmm2 133 // 72: 66 0F 38 40 D5 pmulld xmm2,xmm 153 movdqa xmm2, xmm1 local 154 punpcklbw xmm2, xmm7 \/\/ src[0-7] local 164 movdqa xmm2, xmm1 local 165 punpcklwd xmm2, xmm7 \/\/ src[8-11] local 174 pshufd xmm2, xmm1, 0x0e \/\/ upper 2 dwords local 176 pshufd xmm2, xmm1, 0x01 local 202 vpmovzxbd xmm2, dword ptr [eax + 8] \/\/ src[8-11] local 205 pmulld xmm2, kHashMul2 local 211 pshufd xmm2, xmm1, 0x0e \/\/ upper 2 dwords local 213 pshufd xmm2, xmm1, 0x01 local [all...] |
/external/libyuv/files/source/ |
compare_win.cc | 36 movdqu xmm2, [edx] local 39 psubusb xmm1, xmm2 40 psubusb xmm2, xmm3 local 41 por xmm1, xmm2 42 movdqa xmm2, xmm1 local 44 punpckhbw xmm2, xmm5 local 46 pmaddwd xmm2, xmm2 local 48 paddd xmm0, xmm2 145 movdqa xmm2, xmm local 146 punpcklbw xmm2, xmm7 \/\/ src[0-7] local 156 movdqa xmm2, xmm1 local 157 punpcklwd xmm2, xmm7 \/\/ src[8-11] local 158 pmulld xmm2, xmm5 local 166 pshufd xmm2, xmm1, 0x0e \/\/ upper 2 dwords local 168 pshufd xmm2, xmm1, 0x01 local 193 vpmovzxbd xmm2, [eax + 8] \/\/ src[8-11] local 196 vpmulld xmm2, xmm2, xmmword ptr kHashMul2 local 202 vpshufd xmm2, xmm1, 0x0e \/\/ upper 2 dwords local 204 vpshufd xmm2, xmm1, 0x01 local [all...] |
/bionic/libc/arch-x86/atom/string/ |
ssse3-strcpy-atom.S | 271 movaps 16(%ecx), %xmm2 273 pcmpeqb %xmm2, %xmm0 284 movaps %xmm2, (%edx, %esi) 319 movaps 16(%ecx, %esi), %xmm2 321 pcmpeqb %xmm2, %xmm0 332 movaps %xmm2, (%edx, %esi) 355 movaps (%ecx), %xmm2 357 movaps %xmm2, %xmm4 361 pminub %xmm5, %xmm2 363 pminub %xmm2, %xmm [all...] |
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp9/encoder/x86/ |
vp9_variance_impl_sse2.asm | 37 movdqa xmm2, [rax+32] 41 pmaddwd xmm2, xmm2 45 paddd xmm2, xmm3 47 paddd xmm4, xmm2 129 movdqu xmm2, XMMWORD PTR [rdi] 135 movdqa xmm4, xmm2 141 punpcklbw xmm2, xmm0 145 psubw xmm1, xmm2 176 movdqa xmm2, xmm [all...] |
/external/boringssl/win-x86/crypto/aes/ |
aesni-x86.asm | 24 movups xmm2,[eax] 30 xorps xmm2,xmm0 40 movups [eax],xmm2 41 pxor xmm2,xmm2 49 movups xmm2,[eax] 55 xorps xmm2,xmm0 65 movups [eax],xmm2 66 pxor xmm2,xmm2 [all...] |
/external/libvpx/libvpx/vpx_dsp/x86/ |
highbd_variance_impl_sse2.asm | 71 movdqu xmm2, XMMWORD PTR [rdi] 86 psubw xmm1, xmm2 90 movdqu xmm2, XMMWORD PTR [rdi+16] 93 psubw xmm3, xmm2 97 movdqu xmm2, XMMWORD PTR [rdi+rdx] 100 psubw xmm1, xmm2 104 movdqu xmm2, XMMWORD PTR [rdi+rdx+16] 107 psubw xmm3, xmm2 113 movdqa xmm2, xmm5 115 pcmpeqw xmm2, xmm [all...] |