HomeSort by relevance Sort by last modified time
    Searched refs:xmm5 (Results 1 - 25 of 350) sorted by null

1 2 3 4 5 6 7 8 91011>>

  /external/swiftshader/third_party/LLVM/test/MC/X86/
x86-32-coverage.s 521 // CHECK: movd %ecx, %xmm5
522 movd %ecx,%xmm5
524 // CHECK: movd 3735928559(%ebx,%ecx,8), %xmm5
525 movd 0xdeadbeef(%ebx,%ecx,8),%xmm5
527 // CHECK: movd %xmm5, %ecx
528 movd %xmm5,%ecx
530 // CHECK: movd %xmm5, 3735928559(%ebx,%ecx,8)
531 movd %xmm5,0xdeadbeef(%ebx,%ecx,8)
542 // CHECK: movq %xmm5, %xmm5
    [all...]
x86-32-fma3.s 3 // CHECK: vfmadd132pd %xmm2, %xmm5, %xmm1
5 vfmadd132pd %xmm2, %xmm5, %xmm1
7 // CHECK: vfmadd132pd (%eax), %xmm5, %xmm1
9 vfmadd132pd (%eax), %xmm5, %xmm1
11 // CHECK: vfmadd132ps %xmm2, %xmm5, %xmm1
13 vfmadd132ps %xmm2, %xmm5, %xmm1
15 // CHECK: vfmadd132ps (%eax), %xmm5, %xmm1
17 vfmadd132ps (%eax), %xmm5, %xmm1
19 // CHECK: vfmadd213pd %xmm2, %xmm5, %xmm1
21 vfmadd213pd %xmm2, %xmm5, %xmm
    [all...]
  /external/llvm/test/MC/X86/
x86-32-coverage.s     [all...]
x86-32-fma3.s 3 // CHECK: vfmadd132pd %xmm2, %xmm5, %xmm1
5 vfmadd132pd %xmm2, %xmm5, %xmm1
7 // CHECK: vfmadd132pd (%eax), %xmm5, %xmm1
9 vfmadd132pd (%eax), %xmm5, %xmm1
11 // CHECK: vfmadd132ps %xmm2, %xmm5, %xmm1
13 vfmadd132ps %xmm2, %xmm5, %xmm1
15 // CHECK: vfmadd132ps (%eax), %xmm5, %xmm1
17 vfmadd132ps (%eax), %xmm5, %xmm1
19 // CHECK: vfmadd213pd %xmm2, %xmm5, %xmm1
21 vfmadd213pd %xmm2, %xmm5, %xmm
    [all...]
  /toolchain/binutils/binutils-2.25/gas/testsuite/gas/i386/
evex-lig.s 7 vaddsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
8 vaddsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
9 vaddsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
10 vaddsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
11 vaddsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
12 vaddsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
13 vaddsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
14 vaddsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
15 vaddsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
16 vaddsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX51
    [all...]
evex-lig256.d 12 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 f4 vaddsd %xmm4,%xmm5,%xmm6\{%k7\}
13 [ ]*[a-f0-9]+: 62 f1 d7 af 58 f4 vaddsd %xmm4,%xmm5,%xmm6\{%k7\}\{z\}
14 [ ]*[a-f0-9]+: 62 f1 d7 1f 58 f4 vaddsd \{rn-sae\},%xmm4,%xmm5,%xmm6\{%k7\}
15 [ ]*[a-f0-9]+: 62 f1 d7 5f 58 f4 vaddsd \{ru-sae\},%xmm4,%xmm5,%xmm6\{%k7\}
16 [ ]*[a-f0-9]+: 62 f1 d7 3f 58 f4 vaddsd \{rd-sae\},%xmm4,%xmm5,%xmm6\{%k7\}
17 [ ]*[a-f0-9]+: 62 f1 d7 7f 58 f4 vaddsd \{rz-sae\},%xmm4,%xmm5,%xmm6\{%k7\}
18 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 31 vaddsd \(%ecx\),%xmm5,%xmm6\{%k7\}
19 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 b4 f4 c0 1d fe ff vaddsd -0x1e240\(%esp,%esi,8\),%xmm5,%xmm6\{%k7\}
20 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 72 7f vaddsd 0x3f8\(%edx\),%xmm5,%xmm6\{%k7\}
21 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 b2 00 04 00 00 vaddsd 0x400\(%edx\),%xmm5,%xmm6\{%k7\
    [all...]
evex-lig512.d 12 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 f4 vaddsd %xmm4,%xmm5,%xmm6\{%k7\}
13 [ ]*[a-f0-9]+: 62 f1 d7 cf 58 f4 vaddsd %xmm4,%xmm5,%xmm6\{%k7\}\{z\}
14 [ ]*[a-f0-9]+: 62 f1 d7 1f 58 f4 vaddsd \{rn-sae\},%xmm4,%xmm5,%xmm6\{%k7\}
15 [ ]*[a-f0-9]+: 62 f1 d7 5f 58 f4 vaddsd \{ru-sae\},%xmm4,%xmm5,%xmm6\{%k7\}
16 [ ]*[a-f0-9]+: 62 f1 d7 3f 58 f4 vaddsd \{rd-sae\},%xmm4,%xmm5,%xmm6\{%k7\}
17 [ ]*[a-f0-9]+: 62 f1 d7 7f 58 f4 vaddsd \{rz-sae\},%xmm4,%xmm5,%xmm6\{%k7\}
18 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 31 vaddsd \(%ecx\),%xmm5,%xmm6\{%k7\}
19 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 b4 f4 c0 1d fe ff vaddsd -0x1e240\(%esp,%esi,8\),%xmm5,%xmm6\{%k7\}
20 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 72 7f vaddsd 0x3f8\(%edx\),%xmm5,%xmm6\{%k7\}
21 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 b2 00 04 00 00 vaddsd 0x400\(%edx\),%xmm5,%xmm6\{%k7\
    [all...]
avx512er-rcig.s 10 vrcp28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
11 vrcp28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
14 vrsqrt28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
15 vrsqrt28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
22 vrcp28ss xmm6{k7}, xmm5, xmm4, {sae} # AVX512ER
23 vrcp28sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512ER
26 vrsqrt28ss xmm6{k7}, xmm5, xmm4, {sae} # AVX512ER
27 vrsqrt28sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512ER
avx512f-rcig.s 10 vcmpsd $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
11 vcmpsd $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
12 vcmpss $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
13 vcmpss $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
14 vcomisd {sae}, %xmm5, %xmm6 # AVX512F
15 vcomiss {sae}, %xmm5, %xmm6 # AVX512F
20 vcvtss2sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
29 vgetexpsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
30 vgetexpss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
35 vgetmantsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
    [all...]
avx512dq-rcig.s 10 vrangesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
11 vrangesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
12 vrangess $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
13 vrangess $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
18 vreducesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
19 vreducesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
20 vreducess $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
21 vreducess $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
32 vrangesd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512DQ
33 vrangesd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512D
    [all...]
x86-64-avx-gather.s 43 vgatherdps %xmm5,0x8(,%xmm4,1),%xmm6
44 vgatherdps %xmm5,-0x8(,%xmm4,1),%xmm6
45 vgatherdps %xmm5,(,%xmm4,1),%xmm6
46 vgatherdps %xmm5,0x298(,%xmm4,1),%xmm6
47 vgatherdps %xmm5,0x8(,%xmm4,8),%xmm6
48 vgatherdps %xmm5,-0x8(,%xmm4,8),%xmm6
49 vgatherdps %xmm5,(,%xmm4,8),%xmm6
50 vgatherdps %xmm5,0x298(,%xmm4,8),%xmm6
52 vgatherdps %xmm5,0x8(,%xmm14,1),%xmm6
53 vgatherdps %xmm5,-0x8(,%xmm14,1),%xmm
    [all...]
avx-gather.s 24 vgatherdps %xmm5,0x8(,%xmm4,1),%xmm6
25 vgatherdps %xmm5,-0x8(,%xmm4,1),%xmm6
26 vgatherdps %xmm5,(,%xmm4,1),%xmm6
27 vgatherdps %xmm5,0x298(,%xmm4,1),%xmm6
28 vgatherdps %xmm5,0x8(,%xmm4,8),%xmm6
29 vgatherdps %xmm5,-0x8(,%xmm4,8),%xmm6
30 vgatherdps %xmm5,(,%xmm4,8),%xmm6
31 vgatherdps %xmm5,0x298(,%xmm4,8),%xmm6
38 vpgatherdd %xmm5,0x8(,%xmm4,1),%xmm6
39 vpgatherdd %xmm5,-0x8(,%xmm4,1),%xmm
    [all...]
avx512bw_vl-opts.s 6 vmovdqu8 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
7 vmovdqu8.s %xmm5, %xmm6{%k7} # AVX512{BW,VL}
8 vmovdqu8 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
9 vmovdqu8.s %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
10 vmovdqu8 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
11 vmovdqu8.s %xmm5, %xmm6{%k7} # AVX512{BW,VL}
12 vmovdqu8 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
13 vmovdqu8.s %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
22 vmovdqu16 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
23 vmovdqu16.s %xmm5, %xmm6{%k7} # AVX512{BW,VL
    [all...]
avx512f_vl-opts.s 6 vmovapd %xmm5, %xmm6{%k7} # AVX512{F,VL}
7 vmovapd.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
8 vmovapd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
9 vmovapd.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
10 vmovapd %xmm5, %xmm6{%k7} # AVX512{F,VL}
11 vmovapd.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
12 vmovapd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
13 vmovapd.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
22 vmovaps %xmm5, %xmm6{%k7} # AVX512{F,VL}
23 vmovaps.s %xmm5, %xmm6{%k7} # AVX512{F,VL
    [all...]
evex-lig256-intel.d 12 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4
13 [ ]*[a-f0-9]+: 62 f1 d7 af 58 f4 vaddsd xmm6\{k7\}\{z\},xmm5,xmm4
14 [ ]*[a-f0-9]+: 62 f1 d7 1f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4,\{rn-sae\}
15 [ ]*[a-f0-9]+: 62 f1 d7 5f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4,\{ru-sae\}
16 [ ]*[a-f0-9]+: 62 f1 d7 3f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4,\{rd-sae\}
17 [ ]*[a-f0-9]+: 62 f1 d7 7f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4,\{rz-sae\}
18 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 31 vaddsd xmm6\{k7\},xmm5,QWORD PTR \[ecx\]
19 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 b4 f4 c0 1d fe ff vaddsd xmm6\{k7\},xmm5,QWORD PTR \[esp\+esi\*8-0x1e240\]
20 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 72 7f vaddsd xmm6\{k7\},xmm5,QWORD PTR \[edx\+0x3f8\]
21 [ ]*[a-f0-9]+: 62 f1 d7 2f 58 b2 00 04 00 00 vaddsd xmm6\{k7\},xmm5,QWORD PTR \[edx\+0x400\
    [all...]
evex-lig512-intel.d 12 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4
13 [ ]*[a-f0-9]+: 62 f1 d7 cf 58 f4 vaddsd xmm6\{k7\}\{z\},xmm5,xmm4
14 [ ]*[a-f0-9]+: 62 f1 d7 1f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4,\{rn-sae\}
15 [ ]*[a-f0-9]+: 62 f1 d7 5f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4,\{ru-sae\}
16 [ ]*[a-f0-9]+: 62 f1 d7 3f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4,\{rd-sae\}
17 [ ]*[a-f0-9]+: 62 f1 d7 7f 58 f4 vaddsd xmm6\{k7\},xmm5,xmm4,\{rz-sae\}
18 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 31 vaddsd xmm6\{k7\},xmm5,QWORD PTR \[ecx\]
19 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 b4 f4 c0 1d fe ff vaddsd xmm6\{k7\},xmm5,QWORD PTR \[esp\+esi\*8-0x1e240\]
20 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 72 7f vaddsd xmm6\{k7\},xmm5,QWORD PTR \[edx\+0x3f8\]
21 [ ]*[a-f0-9]+: 62 f1 d7 4f 58 b2 00 04 00 00 vaddsd xmm6\{k7\},xmm5,QWORD PTR \[edx\+0x400\
    [all...]
katmai.s 10 andnps %xmm6,%xmm5
17 cmpps $0x6,%xmm5,%xmm4
18 cmpps $0x7,(%esi),%xmm5
25 cmpltps %xmm5,%xmm4
26 cmpltps (%esi),%xmm5
34 cmpunordps %xmm6,%xmm5
41 cmpnltps %xmm5,%xmm4
42 cmpnltps (%esi),%xmm5
50 cmpordps %xmm6,%xmm5
58 cvtsi2ss (%esi),%xmm5
    [all...]
avx512ifma_vl.s 6 vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
7 vpmadd52luq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{IFMA,VL}
8 vpmadd52luq (%ecx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
9 vpmadd52luq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
10 vpmadd52luq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
11 vpmadd52luq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
12 vpmadd52luq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
13 vpmadd52luq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
14 vpmadd52luq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
15 vpmadd52luq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp
    [all...]
sse2.s 11 andnpd %xmm6,%xmm5
18 cmppd $0x6,%xmm5,%xmm4
19 cmppd $0x7,(%esi),%xmm5
26 cmpltpd %xmm5,%xmm4
27 cmpltpd (%esi),%xmm5
35 cmpunordpd %xmm6,%xmm5
42 cmpnltpd %xmm5,%xmm4
43 cmpnltpd (%esi),%xmm5
51 cmpordpd %xmm6,%xmm5
59 cvtsi2sd (%esi),%xmm5
    [all...]
  /external/libvpx/libvpx/vp8/common/x86/
idctllm_sse2.asm 34 movd xmm5, [rdx]
37 pinsrw xmm5, [rdx], 4
39 pmullw xmm4, xmm5
41 ; Zero out xmm5, for use unpacking
42 pxor xmm5, xmm5
45 movd [rax], xmm5
46 movd [rax+32], xmm5
64 punpcklbw xmm0, xmm5
65 punpcklbw xmm1, xmm5
    [all...]
  /external/libjpeg-turbo/simd/
jidctfst-sse2-64.asm 142 pshufd xmm5,xmm0,0xAA ; xmm5=col2=(02 02 02 02 02 02 02 02)
167 movdqa xmm5,xmm1
171 paddw xmm5,xmm3 ; xmm5=tmp13
175 psubw xmm1,xmm5 ; xmm1=tmp12
179 psubw xmm4,xmm5 ; xmm4=tmp3
181 paddw xmm6,xmm5 ; xmm6=tmp0
193 movdqa xmm5, XMMWORD [XMMBLOCK(5,0,rsi,SIZEOF_JCOEF)]
195 pmullw xmm5, XMMWORD [XMMBLOCK(5,0,rdx,SIZEOF_IFAST_MULT_TYPE)
    [all...]
jidctfst-sse2.asm 148 pshufd xmm5,xmm0,0xAA ; xmm5=col2=(02 02 02 02 02 02 02 02)
174 movdqa xmm5,xmm1
178 paddw xmm5,xmm3 ; xmm5=tmp13
182 psubw xmm1,xmm5 ; xmm1=tmp12
186 psubw xmm4,xmm5 ; xmm4=tmp3
188 paddw xmm6,xmm5 ; xmm6=tmp0
200 movdqa xmm5, XMMWORD [XMMBLOCK(5,0,esi,SIZEOF_JCOEF)]
202 pmullw xmm5, XMMWORD [XMMBLOCK(5,0,edx,SIZEOF_IFAST_MULT_TYPE)
    [all...]
jfdctfst-sse2-64.asm 109 movdqa xmm5,xmm2 ; transpose coefficients(phase 1)
111 punpckhwd xmm5,xmm3 ; xmm5=(24 34 25 35 26 36 27 37)
122 movdqa XMMWORD [wk(1)], xmm5 ; wk(1)=(24 34 25 35 26 36 27 37)
127 movdqa xmm5,xmm1 ; transpose coefficients(phase 1)
129 punpckhwd xmm5,xmm3 ; xmm5=(64 74 65 75 66 76 67 77)
135 punpckldq xmm2,xmm5 ; xmm2=(44 54 64 74 45 55 65 75)
136 punpckhdq xmm3,xmm5 ; xmm3=(46 56 66 76 47 57 67 77)
139 movdqa xmm5, XMMWORD [wk(1)] ; xmm5=(24 34 25 35 26 36 27 37
    [all...]
jfdctfst-sse2.asm 115 movdqa xmm5,xmm2 ; transpose coefficients(phase 1)
117 punpckhwd xmm5,xmm3 ; xmm5=(24 34 25 35 26 36 27 37)
128 movdqa XMMWORD [wk(1)], xmm5 ; wk(1)=(24 34 25 35 26 36 27 37)
133 movdqa xmm5,xmm1 ; transpose coefficients(phase 1)
135 punpckhwd xmm5,xmm3 ; xmm5=(64 74 65 75 66 76 67 77)
141 punpckldq xmm2,xmm5 ; xmm2=(44 54 64 74 45 55 65 75)
142 punpckhdq xmm3,xmm5 ; xmm3=(46 56 66 76 47 57 67 77)
145 movdqa xmm5, XMMWORD [wk(1)] ; xmm5=(24 34 25 35 26 36 27 37
    [all...]
jidctflt-sse2-64.asm 105 movq xmm5, XMM_MMWORD [MMBLOCK(5,0,rsi,SIZEOF_JCOEF)]
110 por xmm5,xmm6
112 por xmm5,xmm7
113 por xmm1,xmm5
177 movaps xmm5,xmm1
181 addps xmm5,xmm3 ; xmm5=tmp13
184 subps xmm1,xmm5 ; xmm1=tmp12
188 subps xmm4,xmm5 ; xmm4=tmp3
190 addps xmm6,xmm5 ; xmm6=tmp
    [all...]

Completed in 214 milliseconds

1 2 3 4 5 6 7 8 91011>>