HomeSort by relevance Sort by last modified time
    Searched full:pxor (Results 1 - 25 of 159) sorted by null

1 2 3 4 5 6 7

  /external/valgrind/main/memcheck/tests/amd64/
xor-undef-amd64.stdout.exp 14 Complain sse2 pxor
16 No complain sse2 pxor
  /external/openssl/crypto/aes/asm/
bsaes-x86_64.S 14 pxor %xmm8,%xmm15
15 pxor %xmm8,%xmm0
17 pxor %xmm8,%xmm1
19 pxor %xmm8,%xmm2
21 pxor %xmm8,%xmm3
23 pxor %xmm8,%xmm4
25 pxor %xmm8,%xmm5
27 pxor %xmm8,%xmm6
37 pxor %xmm6,%xmm5
38 pxor %xmm4,%xmm
    [all...]
bsaes-x86_64.pl 58 # pxor, etc.). While it resulted in nominal 4% improvement on
134 pxor @b[6], @b[5]
135 pxor @b[1], @b[2]
136 pxor @b[0], @b[3]
137 pxor @b[2], @b[6]
138 pxor @b[0], @b[5]
140 pxor @b[3], @b[6]
141 pxor @b[7], @b[3]
142 pxor @b[5], @b[7]
143 pxor @b[4], @b[3
    [all...]
vpaes-x86.S 82 pxor %xmm5,%xmm2
83 pxor %xmm2,%xmm0
91 pxor %xmm5,%xmm4
94 pxor %xmm4,%xmm0
100 pxor %xmm5,%xmm2
105 pxor %xmm2,%xmm0
108 pxor %xmm0,%xmm3
111 pxor %xmm3,%xmm0
120 pxor %xmm1,%xmm0
123 pxor %xmm5,%xmm
    [all...]
vpaes-x86_64.S 33 pxor %xmm5,%xmm2
34 pxor %xmm2,%xmm0
44 pxor %xmm5,%xmm4
47 pxor %xmm4,%xmm0
53 pxor %xmm5,%xmm2
58 pxor %xmm2,%xmm0
61 pxor %xmm0,%xmm3
64 pxor %xmm3,%xmm0
75 pxor %xmm1,%xmm0
78 pxor %xmm5,%xmm
    [all...]
vpaes-x86.pl 192 &pxor ("xmm2","xmm5");
193 &pxor ("xmm0","xmm2");
203 &pxor ("xmm4","xmm5"); # 4 = sb1u + k
206 &pxor ("xmm0","xmm4"); # 0 = A
212 &pxor ("xmm2","xmm5"); # 2 = 2A
217 &pxor ("xmm0","xmm2"); # 0 = 2A+B
220 &pxor ("xmm3","xmm0"); # 3 = 2A+B+D
223 &pxor ("xmm0","xmm3"); # 0 = 2A+3B+C+D
234 &pxor ("xmm0","xmm1"); # 0 = j
237 &pxor ("xmm3","xmm5"); # 3 = iak = 1/i + a/
    [all...]
vpaes-x86_64.pl 97 pxor %xmm5, %xmm2
98 pxor %xmm2, %xmm0
108 pxor %xmm5, %xmm4 # 4 = sb1u + k
111 pxor %xmm4, %xmm0 # 0 = A
117 pxor %xmm5, %xmm2 # 2 = 2A
122 pxor %xmm2, %xmm0 # 0 = 2A+B
125 pxor %xmm0, %xmm3 # 3 = 2A+B+D
128 pxor %xmm3, %xmm0 # 0 = 2A+3B+C+D
139 pxor %xmm1, %xmm0 # 0 = j
142 pxor %xmm5, %xmm3 # 3 = iak = 1/i + a/
    [all...]
  /external/openssl/crypto/modes/asm/
ghash-x86.S 220 pxor 8(%esi,%edx,1),%mm0
224 pxor (%esi,%edx,1),%mm1
227 pxor %mm2,%mm0
232 pxor 8(%esi,%ecx,1),%mm0
235 pxor (%eax,%ebp,8),%mm1
237 pxor (%esi,%ecx,1),%mm1
239 pxor %mm2,%mm0
243 pxor 8(%esi,%edx,1),%mm0
246 pxor (%eax,%ebx,8),%mm1
248 pxor (%esi,%edx,1),%mm
    [all...]
ghash-x86_64.S 668 pxor %xmm5,%xmm5
676 pxor %xmm5,%xmm2
683 pxor %xmm0,%xmm3
684 pxor %xmm2,%xmm4
688 pxor %xmm0,%xmm3
689 pxor %xmm1,%xmm3
694 pxor %xmm3,%xmm1
695 pxor %xmm4,%xmm0
699 pxor %xmm3,%xmm0
701 pxor %xmm3,%xmm
    [all...]
ghash-x86.pl 380 &pxor ($Zlo,&QWP(8,$Htbl,$nix));
384 &pxor ($Zhi,&QWP(0,$rem_4bit,$rem[1],8)) if ($cnt<28);
386 &pxor ($Zhi,&QWP(0,$Htbl,$nix));
389 &pxor ($Zlo,$tmp);
526 &pxor ($Zlo,&QWP(8,$Htbl,$nhi));
529 &pxor ($Zhi,&QWP(0,$rem_4bit,$rem,8));
532 &pxor ($Zhi,&QWP(0,$Htbl,$nhi));
534 &pxor ($Zlo,$tmp);
543 &pxor ($Zlo,&QWP(8,$Htbl,$nlo));
545 &pxor ($Zhi,&QWP(0,$rem_4bit,$rem,8))
    [all...]
ghash-x86_64.pl 364 pxor $Xi,$T1 #
365 pxor $Hkey,$T2
371 pxor $Xi,$T1 #
372 pxor $Xhi,$T1 #
377 pxor $T1,$Xhi
378 pxor $T2,$Xi #
389 pxor $T1,$Xi #
391 pxor $T1,$Xi #
396 pxor $T1,$Xi
397 pxor $T2,$Xhi
    [all...]
  /external/openssl/crypto/
x86_64cpuid.pl 217 pxor %xmm0,%xmm0
218 pxor %xmm1,%xmm1
219 pxor %xmm2,%xmm2
220 pxor %xmm3,%xmm3
221 pxor %xmm4,%xmm4
222 pxor %xmm5,%xmm5
223 pxor %xmm6,%xmm6
224 pxor %xmm7,%xmm7
225 pxor %xmm8,%xmm8
226 pxor %xmm9,%xmm
    [all...]
x86_64cpuid.S 194 pxor %xmm0,%xmm0
195 pxor %xmm1,%xmm1
196 pxor %xmm2,%xmm2
197 pxor %xmm3,%xmm3
198 pxor %xmm4,%xmm4
199 pxor %xmm5,%xmm5
200 pxor %xmm6,%xmm6
201 pxor %xmm7,%xmm7
202 pxor %xmm8,%xmm8
203 pxor %xmm9,%xmm
    [all...]
  /external/libvpx/libvpx/vp8/encoder/x86/
quantize_mmx.asm 37 pxor mm3, mm0
55 pxor mm3, mm0
80 pxor mm7, mm4
95 pxor mm7, mm4
121 pxor mm7, mm4
136 pxor mm7, mm4
162 pxor mm7, mm4
177 pxor mm7, mm4
198 pxor mm5, mm5
199 pxor mm7, mm
    [all...]
ssim_opt.asm 79 pxor xmm0, xmm0
80 pxor xmm15,xmm15 ;sum_s
81 pxor xmm14,xmm14 ;sum_r
82 pxor xmm13,xmm13 ;sum_sq_s
83 pxor xmm12,xmm12 ;sum_sq_r
84 pxor xmm11,xmm11 ;sum_sxr
169 pxor xmm0, xmm0
170 pxor xmm15,xmm15 ;sum_s
171 pxor xmm14,xmm14 ;sum_r
172 pxor xmm13,xmm13 ;sum_sq_
    [all...]
quantize_sse2.asm 78 pxor xmm1, xmm0
79 pxor xmm5, xmm4
124 pxor xmm6, xmm6
177 pxor xmm2, xmm0
178 pxor xmm3, xmm4
204 pxor xmm2, xmm6
205 pxor xmm3, xmm6
293 pxor xmm1, xmm0
294 pxor xmm5, xmm4
311 pxor xmm1, xmm
    [all...]
encodeopt.asm 41 pxor xmm5, xmm5
74 pxor mm7, mm7
83 pxor mm1, mm1 ; from movd mm1, dc ; dc =0
140 pxor mm7, mm7
143 pxor mm2, mm2
218 pxor xmm6, xmm6
221 pxor xmm4, xmm4
290 pxor mm7, mm7
347 pxor xmm3, xmm3
365 pxor xmm0, xmm
    [all...]
subtract_sse2.asm 32 pxor mm7, mm7
102 pxor xmm1, xmm4 ;convert to signed values
103 pxor xmm2, xmm4
122 pxor xmm5, xmm4 ;convert to signed values
123 pxor xmm1, xmm4
184 pxor xmm1, xmm4 ;convert to signed values
185 pxor xmm2, xmm4
218 pxor xmm1, xmm4 ;convert to signed values
219 pxor xmm2, xmm4
  /external/llvm/test/CodeGen/X86/
2008-02-26-AsmDirectMemOp.ll 15 tail call void asm sideeffect "movd $1, %mm6 \0A\09packssdw %mm6, %mm6 \0A\09packssdw %mm6, %mm6 \0A\09movd $2, %mm5 \0A\09pxor %mm7, %mm7 \0A\09packssdw %mm5, %mm5 \0A\09packssdw %mm5, %mm5 \0A\09psubw %mm5, %mm7 \0A\09pxor %mm4, %mm4 \0A\09.align 1<<4\0A\091: \0A\09movq ($0, $3), %mm0 \0A\09movq 8($0, $3), %mm1 \0A\09pmullw %mm6, %mm0 \0A\09pmullw %mm6, %mm1 \0A\09movq ($0, $3), %mm2 \0A\09movq 8($0, $3), %mm3 \0A\09pcmpgtw %mm4, %mm2 \0A\09pcmpgtw %mm4, %mm3 \0A\09pxor %mm2, %mm0 \0A\09pxor %mm3, %mm1 \0A\09paddw %mm7, %mm0 \0A\09paddw %mm7, %mm1 \0A\09pxor %mm0, %mm2 \0A\09pxor %mm1, %mm3 \0A\09pcmpeqw %mm7, %mm0 \0A\09pcmpeqw %mm7, %mm1 \0A\09pandn %mm2, %mm0 \0A\09pandn %mm3, %mm1 \0A\09movq %mm0, ($0, $3) \0A\09movq %mm1, 8($0, $3) \0A\09add $$16, $3 \0A\09jng 1b \0A\09", "r,imr,imr,r,~{dirflag},~{fpsr},~{flags},~{memory}"( i16* null, i32 %tmp1, i32 0, i32 0 ) nounwind
isnan2.ll 1 ; RUN: llc < %s -march=x86 -mcpu=yonah | not grep pxor
  /external/libvpx/libvpx/vp8/common/x86/
variance_impl_sse2.asm 34 pxor xmm4, xmm4
123 pxor xmm0, xmm0 ; clear xmm0 for unpack
124 pxor xmm7, xmm7 ; clear xmm7 for accumulating diffs
126 pxor xmm6, xmm6 ; clear xmm6 for accumulating sse
167 pxor xmm6, xmm6
169 pxor xmm5, xmm5
245 pxor xmm0, xmm0 ; clear xmm0 for unpack
246 pxor xmm7, xmm7 ; clear xmm7 for accumulating diffs
428 pxor xmm6, xmm6 ;
429 pxor xmm7, xmm7
    [all...]
loopfilter_mmx.asm 130 pxor mm5, mm5
145 pxor mm4, mm5
151 pxor mm2, [GLOBAL(t80)] ; p1 offset to convert to signed values
152 pxor mm7, [GLOBAL(t80)] ; q1 offset to convert to signed values
155 pxor mm6, [GLOBAL(t80)] ; offset to convert to signed values
156 pxor mm0, [GLOBAL(t80)] ; offset to convert to signed values
167 pxor mm0, mm0 ;
168 pxor mm5, mm5
176 pxor mm0, mm0 ; 0
180 pxor mm1, mm1 ;
    [all...]
mfqe_sse2.asm 46 pxor xmm6, xmm6
123 pxor xmm4, xmm4
186 pxor xmm3, xmm3 ; SAD
187 pxor xmm4, xmm4 ; sum of src2
188 pxor xmm5, xmm5 ; sum of src2^2
203 pxor xmm2, xmm2
210 pxor xmm2, xmm2
244 pxor xmm2, xmm2
variance_impl_ssse3.asm 43 pxor xmm6, xmm6
44 pxor xmm7, xmm7
122 pxor xmm4, xmm4
188 pxor xmm4, xmm4
222 pxor xmm0, xmm0
257 pxor xmm0, xmm0
279 pxor xmm4, xmm4
306 pxor xmm0, xmm0
307 pxor xmm1, xmm1
308 pxor xmm5, xmm
    [all...]
  /external/openssl/crypto/bn/asm/
x86-gf2m.S 17 pxor %mm5,%mm5
18 pxor %mm4,%mm4
52 pxor %mm2,%mm0
58 pxor %mm1,%mm0
64 pxor %mm2,%mm0
70 pxor %mm1,%mm0
76 pxor %mm2,%mm0
82 pxor %mm1,%mm0
88 pxor %mm2,%mm0
94 pxor %mm1,%mm
    [all...]

Completed in 1638 milliseconds

1 2 3 4 5 6 7