HomeSort by relevance Sort by last modified time
    Searched refs:xmm0 (Results 1 - 25 of 77) sorted by null

1 2 3 4

  /bionic/libm/amd64/
e_sqrt.S 31 sqrtsd %xmm0, %xmm0
e_sqrtf.S 31 sqrtss %xmm0, %xmm0
s_lrint.S 35 cvtsd2si %xmm0, %rax
s_lrintf.S 35 cvtss2si %xmm0, %rax
s_scalbn.S 31 movsd %xmm0,-8(%rsp)
38 movsd -8(%rsp),%xmm0
s_scalbnf.S 31 movss %xmm0,-8(%rsp)
38 movss -8(%rsp),%xmm0
  /dalvik/vm/mterp/x86-atom/
OP_RETURN_WIDE.S 32 movq (rFP, rINST, 4), %xmm0 # %xmm0<- vAA
33 movq %xmm0, offGlue_retval(%edx)# glue->retval<- vAA
OP_INT_TO_FLOAT.S 34 cvtsi2ss (rFP,%eax,4), %xmm0 # %xmm0<- vB
35 movss %xmm0, (rFP, rINST, 4) # vA<- %xmm0
OP_MOVE_WIDE_16.S 34 movq (rFP, %edx, 4), %xmm0 # %xmm0<- vB
35 movq %xmm0, (rFP, %ecx, 4) # vA<- vB; %xmm0
OP_SHL_LONG.S 35 movss (rFP, %eax, 4), %xmm0 # %xmm0<- vCC
36 pand %xmm2, %xmm0 # %xmm0<- masked shift bits
38 psllq %xmm0, %xmm1 # %xmm1<- shifted vBB
OP_SHL_LONG_2ADDR.S 35 movss (rFP, %edx, 4), %xmm0 # %xmm0<- vB
38 pand %xmm2, %xmm0 # %xmm0<- masked shift bits
39 psllq %xmm0, %xmm1 # %xmm1<- shifted vA
OP_USHR_LONG.S 34 movss (rFP, %eax, 4), %xmm0 # %xmm0<- vCC
35 pand %xmm2, %xmm0 # %xmm0<- masked shift bits
37 psrlq %xmm0, %xmm1 # %xmm1<- shifted vBB
OP_USHR_LONG_2ADDR.S 36 movss (rFP, %edx, 4), %xmm0 # %xmm0<- vB
38 pand %xmm2, %xmm0 # %xmm0<- masked shift bits
39 psrlq %xmm0, %xmm1 # %xmm1<- shifted vA
binopF.S 20 * specify an instruction that performs "%xmm0 = %xmm0 op %xmm1"
35 movss (rFP, %ecx, 4), %xmm0 # %xmm0<-vBB
37 $instr # %xmm0<- vBB op vCC
38 movss %xmm0, (rFP, rINST, 4) # vAA<- %xmm0; result
binopF2addr.S 21 * "%xmm0 = %xmm0 op %xmm1".
37 movss (rFP, %ecx, 4), %xmm0 # %xmm0<- vA
39 $instr # %xmm0<- vA op vB
40 movss %xmm0, (rFP, %ecx, 4) # vA<- %xmm0; result
binopWide2addr.S 21 * "%xmm0= %xmm0 op %xmm1".
39 movq (rFP, %edx, 4), %xmm0 # %xmm0<- vA
40 $instr # %xmm0<- vA op vB
41 movq %xmm0, (rFP, %edx, 4) # vA<- %xmm0; result
OP_MOVE_RESULT_WIDE.S 35 movq offGlue_retval(%eax), %xmm0 # %xmm0<- glue->retval
36 movq %xmm0, (rFP, rINST, 4) # vA<- glue->retval
OP_MOVE_WIDE_FROM16.S 32 movq (rFP, %edx, 4), %xmm0 # %xmm0<- vB
33 movq %xmm0, (rFP, rINST, 4) # vA<- vB
OP_INT_TO_DOUBLE.S 34 cvtsi2sd (rFP, %eax, 4), %xmm0 # %xmm0<- vB
35 movq %xmm0, (rFP, rINST, 4) # vA<- %xmm0; (double) vB
binopWide.S 20 * specify an instruction that performs "%xmm0 = %xmm0 op %xmm1"
36 movq (rFP, %ecx, 4), %xmm0 # %xmm0<- vBB
38 $instr # %xmm0<- vBB op vCC
39 movq %xmm0, (rFP, rINST, 4) # vAA<- %ecx
OP_SHR_LONG.S 35 movss (rFP, %eax, 4), %xmm0 # %xmm0<- vCC
37 pand %xmm2, %xmm0 # %xmm0<- masked for the shift bits
38 psrlq %xmm0, %xmm1 # %xmm1<- shifted vBB
46 psubq %xmm0, %xmm3 # %xmm3<- 64 - shift amount
OP_AGET_WIDE.S 41 movq offArrayObject_contents(%ecx, %edx, 8), %xmm0 # %xmm0<- vBB[vCC]
42 movq %xmm0, (rFP, rINST, 4) # vAA<- %xmm0; value
  /bionic/libc/arch-x86/string/
sse2-memset5-atom.S 283 /* Fill xmm0 with the pattern. */
285 pxor %xmm0, %xmm0
287 movd %eax, %xmm0
288 punpcklbw %xmm0, %xmm0
289 pshufd $0, %xmm0, %xmm0
295 movdqu %xmm0, (%edx)
301 movd %xmm0, %ea
    [all...]
  /external/libvpx/vp8/encoder/x86/
dct_sse2.asm 30 movq xmm0, MMWORD PTR[rsi ] ;03 02 01 00
35 punpcklqdq xmm0, xmm2 ;13 12 11 10 03 02 01 00
40 movdqa xmm2, xmm0
41 punpckldq xmm0, xmm1 ;23 22 03 02 21 20 01 00
43 movdqa xmm1, xmm0
44 punpckldq xmm0, xmm2 ;31 21 30 20 11 10 01 00
49 movdqa xmm3, xmm0
50 paddw xmm0, xmm1 ;b1 a1 b1 a1 b1 a1 b1 a1
52 psllw xmm0, 3 ;b1 <<= 3 a1 <<= 3
54 movdqa xmm1, xmm0
    [all...]
fwalsh_sse2.asm 31 movq xmm0, MMWORD PTR [rsi] ; load input
37 punpcklwd xmm0, xmm1
40 movdqa xmm1, xmm0
41 punpckldq xmm0, xmm2 ; ip[1] ip[0]
44 movdqa xmm2, xmm0
45 paddw xmm0, xmm1
48 psllw xmm0, 2 ; d1 a1
51 movdqa xmm1, xmm0
52 punpcklqdq xmm0, xmm2 ; b1 a1
56 movq xmm6, xmm0
    [all...]

Completed in 258 milliseconds

1 2 3 4