HomeSort by relevance Sort by last modified time
    Searched full:movdqa (Results 76 - 100 of 124) sorted by null

1 2 34 5

  /external/openssl/crypto/bn/asm/
modexp512-x86_64.pl 1137 movdqa %xmm4, (+$tmp16_offset+16*0)(%rsp)
1138 movdqa %xmm4, (+$tmp16_offset+16*1)(%rsp)
1139 movdqa %xmm4, (+$tmp16_offset+16*6)(%rsp)
1140 movdqa %xmm4, (+$tmp16_offset+16*7)(%rsp)
1141 movdqa %xmm0, (+$tmp16_offset+16*2)(%rsp)
1142 movdqa %xmm1, (+$tmp16_offset+16*3)(%rsp)
1143 movdqa %xmm2, (+$tmp16_offset+16*4)(%rsp)
1144 movdqa %xmm3, (+$tmp16_offset+16*5)(%rsp)
1213 movdqa %xmm0, (+$exp_offset+16*0)(%rsp)
1214 movdqa %xmm1, (+$exp_offset+16*1)(%rsp
    [all...]
x86_64-gf2m.S 212 movdqa %xmm0,%xmm4
213 movdqa %xmm1,%xmm5
221 movdqa %xmm4,%xmm5
x86_64-mont.S 577 movdqa %xmm0,(%rsp)
584 movdqa %xmm0,16(%rsp,%r14,1)
586 movdqa %xmm0,32(%rsp,%r14,1)
594 movdqa %xmm0,16(%rsp,%r14,1)
1339 movdqa %xmm0,64(%rsp)
1340 movdqa %xmm0,(%rsi)
1347 movdqa %xmm0,80(%rsp,%rbp,1)
1348 movdqa %xmm0,96(%rsp,%rbp,1)
1349 movdqa %xmm0,16(%rsi,%rbp,1)
1350 movdqa %xmm0,32(%rsi,%rbp,1
    [all...]
x86_64-gf2m.pl 184 movdqa %xmm0,%xmm4
185 movdqa %xmm1,%xmm5
193 movdqa %xmm4,%xmm5
x86_64-mont5.S 693 movdqa %xmm0,(%rsp)
700 movdqa %xmm0,16(%rsp,%r14,1)
702 movdqa %xmm0,32(%rsp,%r14,1)
710 movdqa %xmm0,16(%rsp,%r14,1)
x86_64-mont.pl 653 movdqa %xmm0,(%rsp)
660 movdqa %xmm0,16(%rsp,$i)
662 movdqa %xmm0,32(%rsp,$i)
670 movdqa %xmm0,16(%rsp,$i)
1464 movdqa %xmm0,64(%rsp) # zap lower half of temporary vector
1465 movdqa %xmm0,($nptr) # zap upper half of temporary vector
1472 movdqa %xmm0,80(%rsp,$i) # zap lower half of temporary vector
1473 movdqa %xmm0,96(%rsp,$i) # zap lower half of temporary vector
1474 movdqa %xmm0,16($nptr,$i) # zap upper half of temporary vector
1475 movdqa %xmm0,32($nptr,$i) # zap upper half of temporary vecto
    [all...]
x86_64-mont5.pl 788 movdqa %xmm0,(%rsp)
795 movdqa %xmm0,16(%rsp,$i)
797 movdqa %xmm0,32(%rsp,$i)
805 movdqa %xmm0,16(%rsp,$i)
868 .byte 0x0f,0x29,0x7c,0x24,0x10 #movdqa %xmm7,0x10(%rsp)
  /external/llvm/test/CodeGen/X86/
sse3.ll 19 ; X64: movdqa (%rsi), %xmm0
21 ; X64: movdqa %xmm0, (%rdi)
32 ; X64: movdqa (%rdi), %xmm0
121 ; X64: movdqa %xmm0, (%rdi)
vec_shuffle.ll 27 ; CHECK: movdqa
widen_arith-5.ll 2 ; CHECK: movdqa
v2f32.ll 20 ; W64-NEXT: movdqa (%rcx), %xmm0
  /external/valgrind/main/memcheck/tests/amd64/
sse_memory.stdout.exp     [all...]
sse_memory.c 300 TEST_INSN( &AllMask, 16,movdqa)
  /external/valgrind/main/memcheck/tests/x86/
sse2_memory.stdout.exp     [all...]
  /external/libyuv/files/source/
format_conversion.cc 41 movdqa xmm0, [eax]
62 "movdqa (%0),%%xmm0 \n"
convert.cc 81 movdqa xmm0, [eax]
84 movdqa [eax + edi], xmm0 local
100 "movdqa (%0),%%xmm0 \n"
103 "movdqa %%xmm0,(%0,%1) \n"
    [all...]
  /external/v8/test/cctest/
test-disasm-ia32.cc 384 __ movdqa(xmm0, Operand(ebx, ecx, times_4, 10000));
385 __ movdqa(Operand(ebx, ecx, times_4, 10000), xmm0);
test-disasm-x64.cc 367 __ movdqa(xmm0, Operand(rbx, rcx, times_4, 10000));
368 __ movdqa(Operand(rbx, rcx, times_4, 10000), xmm0);
  /external/llvm/test/MC/Disassembler/X86/
x86-32.txt 301 # CHECK: movdqa %xmm1, %xmm0
304 # CHECK: movdqa %xmm0, %xmm1
simple-tests.txt 269 # CHECK: movdqa %xmm1, %xmm0
272 # CHECK: movdqa %xmm0, %xmm1
  /external/llvm/lib/Target/X86/
README-SSE.txt 39 movdqa %xmm2, %xmm0
53 movdqa %xmm0, %xmm2
698 movdqa LC0, %xmm0
715 movdqa .LC0(%rip), %xmm0
  /external/valgrind/main/none/tests/amd64/
insn_sse2.def 99 movdqa xmm.uq[0x012345678abcdef,0xfedcba9876543210] xmm.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
100 movdqa m128.uq[0x012345678abcdef,0xfedcba9876543210] xmm.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
101 movdqa xmm.uq[0x012345678abcdef,0xfedcba9876543210] m128.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
    [all...]
  /external/valgrind/main/none/tests/x86/
insn_sse2.def 99 movdqa xmm.uq[0x012345678abcdef,0xfedcba9876543210] xmm.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
100 movdqa m128.uq[0x012345678abcdef,0xfedcba9876543210] xmm.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
101 movdqa xmm.uq[0x012345678abcdef,0xfedcba9876543210] m128.uq[0x1212121234343434,0x5656565678787878] => 1.uq[0x012345678abcdef,0xfedcba9876543210]
    [all...]
  /external/v8/src/ia32/
assembler-ia32.h 1012 void movdqa(XMMRegister dst, const Operand& src);
1013 void movdqa(const Operand& dst, XMMRegister src);
    [all...]
  /external/v8/src/
platform-cygwin.cc 90 // that requires 16 byte alignment such as movdqa on x86.

Completed in 1391 milliseconds

1 2 34 5