HomeSort by relevance Sort by last modified time
    Searched full:xmm13 (Results 1 - 25 of 80) sorted by null

1 2 3 4

  /external/llvm/test/MC/X86/
x86_64-avx-encoding.s 275 // CHECK: vunpckhps %xmm15, %xmm12, %xmm13
277 vunpckhps %xmm15, %xmm12, %xmm13
279 // CHECK: vunpckhpd %xmm15, %xmm12, %xmm13
281 vunpckhpd %xmm15, %xmm12, %xmm13
283 // CHECK: vunpcklps %xmm15, %xmm12, %xmm13
285 vunpcklps %xmm15, %xmm12, %xmm13
287 // CHECK: vunpcklpd %xmm15, %xmm12, %xmm13
289 vunpcklpd %xmm15, %xmm12, %xmm13
331 // CHECK: vshufps $8, %xmm11, %xmm12, %xmm13
333 vshufps $8, %xmm11, %xmm12, %xmm13
    [all...]
x86_64-avx-clmul-encoding.s 7 // CHECK: vpclmulqdq $17, (%rax), %xmm10, %xmm13
9 vpclmulhqhqdq (%rax), %xmm10, %xmm13
15 // CHECK: vpclmulqdq $1, (%rax), %xmm10, %xmm13
17 vpclmulhqlqdq (%rax), %xmm10, %xmm13
23 // CHECK: vpclmulqdq $16, (%rax), %xmm10, %xmm13
25 vpclmullqhqdq (%rax), %xmm10, %xmm13
31 // CHECK: vpclmulqdq $0, (%rax), %xmm10, %xmm13
33 vpclmullqlqdq (%rax), %xmm10, %xmm13
39 // CHECK: vpclmulqdq $17, (%rax), %xmm10, %xmm13
41 vpclmulqdq $17, (%rax), %xmm10, %xmm13
    [all...]
  /external/chromium_org/third_party/openssl/openssl/crypto/aes/asm/
bsaes-x86_64.S 164 movdqa %xmm10,%xmm13
173 pand %xmm11,%xmm13
191 pxor %xmm13,%xmm9
194 pxor %xmm13,%xmm7
196 pxor %xmm13,%xmm8
197 movdqa %xmm0,%xmm13
201 pand %xmm4,%xmm13
205 pxor %xmm13,%xmm8
216 movdqa %xmm7,%xmm13
218 pxor %xmm10,%xmm13
    [all...]
vpaes-x86_64.pl 106 movdqa %xmm13, %xmm4 # 4 : sb1u
458 ## %xmm13: 0
541 movdqa %xmm13, %xmm4 # 4 : sbou
686 movaps %xmm13,0x80(%rsp)
709 movaps 0x80(%rsp),%xmm13
734 movaps %xmm13,0x80(%rsp)
762 movaps 0x80(%rsp),%xmm13
787 movaps %xmm13,0x80(%rsp)
806 movaps 0x80(%rsp),%xmm13
830 movaps %xmm13,0x80(%rsp
    [all...]
aesni-x86_64.S 901 pxor %xmm13,%xmm13
915 movdqa %xmm13,-24(%rsp)
931 pshufd $192,%xmm13,%xmm5
934 pshufd $128,%xmm13,%xmm6
937 pshufd $64,%xmm13,%xmm7
952 movdqa .Lincrement32(%rip),%xmm13
986 paddd %xmm13,%xmm12
988 paddd -24(%rsp),%xmm13
992 movdqa %xmm13,-24(%rsp
    [all...]
  /external/openssl/crypto/aes/asm/
bsaes-x86_64.S 164 movdqa %xmm10,%xmm13
173 pand %xmm11,%xmm13
191 pxor %xmm13,%xmm9
194 pxor %xmm13,%xmm7
196 pxor %xmm13,%xmm8
197 movdqa %xmm0,%xmm13
201 pand %xmm4,%xmm13
205 pxor %xmm13,%xmm8
216 movdqa %xmm7,%xmm13
218 pxor %xmm10,%xmm13
    [all...]
vpaes-x86_64.pl 106 movdqa %xmm13, %xmm4 # 4 : sb1u
458 ## %xmm13: 0
541 movdqa %xmm13, %xmm4 # 4 : sbou
686 movaps %xmm13,0x80(%rsp)
709 movaps 0x80(%rsp),%xmm13
734 movaps %xmm13,0x80(%rsp)
762 movaps 0x80(%rsp),%xmm13
787 movaps %xmm13,0x80(%rsp)
806 movaps 0x80(%rsp),%xmm13
830 movaps %xmm13,0x80(%rsp
    [all...]
aesni-x86_64.S 901 pxor %xmm13,%xmm13
915 movdqa %xmm13,-24(%rsp)
931 pshufd $192,%xmm13,%xmm5
934 pshufd $128,%xmm13,%xmm6
937 pshufd $64,%xmm13,%xmm7
952 movdqa .Lincrement32(%rip),%xmm13
986 paddd %xmm13,%xmm12
988 paddd -24(%rsp),%xmm13
992 movdqa %xmm13,-24(%rsp
    [all...]
  /external/valgrind/main/none/tests/amd64/
pcmpxstrx64.c 76 "movupd 16(%0), %%xmm13" "\n\t"
81 "pcmpistri $0x4A, %%xmm2, %%xmm13" "\n\t"
89 : /*trash*/"memory","cc","xmm2","xmm13","xmm0","rdx","rax","rcx","r15"
105 "movupd 16(%0), %%xmm13" "\n\t"
110 "pcmpistri $0x0A, %%xmm2, %%xmm13" "\n\t"
118 : /*trash*/"memory","cc","xmm2","xmm13","xmm0","rdx","rax","rcx","r15"
134 "movupd 16(%0), %%xmm13" "\n\t"
139 "pcmpistrm $0x4A, %%xmm2, %%xmm13" "\n\t"
147 : /*trash*/"memory","cc","xmm2","xmm13","xmm0","rdx","rax","rcx","r15"
163 "movupd 16(%0), %%xmm13" "\n\t
    [all...]
pcmpxstrx64w.c 77 "movupd 16(%0), %%xmm13" "\n\t"
82 "pcmpistri $0x4B, %%xmm2, %%xmm13" "\n\t"
90 : /*trash*/"memory","cc","xmm2","xmm13","xmm0","rdx","rax","rcx","r15"
106 "movupd 16(%0), %%xmm13" "\n\t"
111 "pcmpistri $0x0B, %%xmm2, %%xmm13" "\n\t"
119 : /*trash*/"memory","cc","xmm2","xmm13","xmm0","rdx","rax","rcx","r15"
135 "movupd 16(%0), %%xmm13" "\n\t"
140 "pcmpistrm $0x4B, %%xmm2, %%xmm13" "\n\t"
148 : /*trash*/"memory","cc","xmm2","xmm13","xmm0","rdx","rax","rcx","r15"
164 "movupd 16(%0), %%xmm13" "\n\t
    [all...]
  /external/libvpx/libvpx/vp8/encoder/x86/
ssim_opt.asm 19 paddd xmm13, xmm1 ; sum_sq_s
82 pxor xmm13,xmm13 ;sum_sq_s
114 SUM_ACROSS_Q xmm13
123 movd [rdi], xmm13;
172 pxor xmm13,xmm13 ;sum_sq_s
195 SUM_ACROSS_Q xmm13
204 movd [rdi], xmm13;
  /external/libvpx/libvpx/vp9/encoder/x86/
vp9_ssim_opt.asm 19 paddd xmm13, xmm1 ; sum_sq_s
82 pxor xmm13,xmm13 ;sum_sq_s
114 SUM_ACROSS_Q xmm13
123 movd [rdi], xmm13;
172 pxor xmm13,xmm13 ;sum_sq_s
195 SUM_ACROSS_Q xmm13
204 movd [rdi], xmm13;
  /external/llvm/test/TableGen/
Slice.td 52 def XMM13: Register<"xmm13">;
59 XMM12, XMM13, XMM14, XMM15]>;
TargetInstrSpec.td 54 def XMM13: Register<"xmm13">;
61 XMM12, XMM13, XMM14, XMM15]>;
cast.td 53 def XMM13: Register<"xmm13">;
60 XMM12, XMM13, XMM14, XMM15]>;
MultiPat.td 62 def XMM13: Register<"xmm13">;
69 XMM12, XMM13, XMM14, XMM15]>;
  /external/chromium_org/third_party/yasm/source/patched-yasm/modules/arch/x86/tests/
sse5-basic.asm 11 compd xmm10, xmm13, dqword [rbx+r9*4], 5 ; 0F 25 2D 054 213 A6 05
  /external/valgrind/main/memcheck/tests/amd64/
fxsave-amd64.c 62 asm __volatile__("movups " VG_SYM(vecZ) ", %xmm13");
79 asm __volatile__("movups " VG_SYM(vecZ) "(%rip), %xmm13");
113 asm __volatile__("movaps %xmm2, %xmm13");
  /external/chromium_org/third_party/openssl/openssl/crypto/
x86_64cpuid.S 207 pxor %xmm13,%xmm13
x86_64cpuid.pl 230 pxor %xmm13,%xmm13
  /external/libvpx/libvpx/vp8/common/x86/
loopfilter_block_sse2.asm 510 movdqa xmm13, xmm8
512 punpckhqdq xmm13, i5
533 movdqa i11, xmm13
542 movdqa xmm13, xmm7
546 LF_FILTER_HEV_MASK xmm0, xmm12, xmm13, xmm9, xmm4, xmm10, xmm3, xmm11
747 movdqa xmm13, xmm8
749 punpckhqdq xmm13, i13
770 movdqa s11, xmm13
  /external/openssl/crypto/
x86_64cpuid.S 207 pxor %xmm13,%xmm13
x86_64cpuid.pl 230 pxor %xmm13,%xmm13
  /external/valgrind/main/coregrind/m_gdbserver/
64bit-sse.xml 55 <reg name="xmm13" bitsize="128" type="vec128"/>
  /external/libyuv/files/source/
rotate.cc 531 "movdqa %%xmm12,%%xmm13 \n"
534 "palignr $0x8,%%xmm13,%%xmm13 \n"
567 "punpcklwd %%xmm15,%%xmm13 \n"
569 "movdqa %%xmm13,%%xmm15 \n"
608 "punpckldq %%xmm13,%%xmm9 \n"
611 "movdqa %%xmm9,%%xmm13 \n"
613 "palignr $0x8,%%xmm13,%%xmm13 \n"
614 "movq %%xmm13,(%1,%4) \n
    [all...]

Completed in 463 milliseconds

1 2 3 4