HomeSort by relevance Sort by last modified time
    Searched full:xmm8 (Results 1 - 25 of 108) sorted by null

1 2 3 4 5

  /external/valgrind/none/tests/amd64/
avx-1.c 84 : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \
104 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \
118 "vpor %%xmm6, %%xmm8, %%xmm7",
119 "vpor (%%rax), %%xmm8, %%xmm7")
122 "vpxor %%xmm6, %%xmm8, %%xmm7",
123 "vpxor (%%rax), %%xmm8, %%xmm7")
126 "vpsubb %%xmm6, %%xmm8, %%xmm7",
127 "vpsubb (%%rax), %%xmm8, %%xmm7")
130 "vpsubd %%xmm6, %%xmm8, %%xmm7",
131 "vpsubd (%%rax), %%xmm8, %%xmm7"
    [all...]
fma.c 103 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%2), %%xmm7; vmovaps (%3), %%xmm8;"
104 "vfmadd132ps %%xmm7, %%xmm8, %%xmm9;"
106 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9");
109 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%3), %%xmm8;"
110 "vfmadd132ps (%2), %%xmm8, %%xmm9;"
112 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9");
115 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%3), %%xmm7; vmovaps (%2), %%xmm8;"
116 "vfmadd213ps %%xmm7, %%xmm8, %%xmm9;"
118 "r" (&ft.y[i]), "r" (&ft.z[i]) : "xmm7", "xmm8", "xmm9");
121 __asm __volatile__ ("vmovaps (%1), %%xmm9; vmovaps (%2), %%xmm8;"
    [all...]
avx2-1.c 86 : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \
106 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \
758 "vpblendd $0x00, %%xmm6, %%xmm8, %%xmm7",
759 "vpblendd $0x01, (%%rax), %%xmm8, %%xmm7")
761 "vpblendd $0x02, %%xmm6, %%xmm8, %%xmm7",
762 "vpblendd $0x03, (%%rax), %%xmm8, %%xmm7")
764 "vpblendd $0x04, %%xmm6, %%xmm8, %%xmm7",
765 "vpblendd $0x05, (%%rax), %%xmm8, %%xmm7")
767 "vpblendd $0x06, %%xmm6, %%xmm8, %%xmm7",
768 "vpblendd $0x07, (%%rax), %%xmm8, %%xmm7"
    [all...]
  /external/llvm/test/CodeGen/X86/
2009-06-03-Win64SpillXMM.ll 3 ; CHECK: movaps %xmm8, 16(%rsp)
8 tail call void asm sideeffect "", "~{xmm7},~{xmm8},~{dirflag},~{fpsr},~{flags}"() nounwind
stack-folding-int-avx1.ll 14 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
23 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
32 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
41 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
50 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
59 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
88 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
108 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
117 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
126 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"(
    [all...]
stack-folding-int-sse42.ll 14 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
23 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
32 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
41 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
50 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
59 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
115 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
135 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
144 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
153 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"(
    [all...]
stack-folding-fp-sse42.ll 14 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
22 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
30 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
38 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
47 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
55 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
64 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
73 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
82 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
96 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"(
    [all...]
stack-folding-fp-avx1.ll 14 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
22 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
30 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
38 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
46 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
54 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
63 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
71 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
80 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
89 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"(
    [all...]
stack-folding-int-avx2.ll 14 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
23 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
32 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
44 %3 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
51 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
61 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
70 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
79 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
88 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
97 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"(
    [all...]
stack-folding-xop.ll 14 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
23 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
32 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
41 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
50 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
59 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
68 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
75 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
84 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"()
91 %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"(
    [all...]
  /external/valgrind/memcheck/tests/amd64/
xor-undef-amd64.c 67 "movups 16(%0), %%xmm8\n\t"
68 "xorps %%xmm8, %%xmm0\n\t"
73 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory"
79 "movups 16(%0), %%xmm8\n\t"
85 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory"
94 "movups 16(%0), %%xmm8\n\t"
95 "pxor %%xmm8, %%xmm0\n\t"
100 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory"
106 "movups 16(%0), %%xmm8\n\t"
112 : : "r"(junk) : "rax", "xmm8", "xmm0", "cc", "memory
    [all...]
  /external/boringssl/win-x86_64/crypto/aes/
bsaes-x86_64.asm 16 movdqa xmm8,XMMWORD[rax]
19 pxor xmm15,xmm8
20 pxor xmm0,xmm8
21 pxor xmm1,xmm8
22 pxor xmm2,xmm8
25 pxor xmm3,xmm8
26 pxor xmm4,xmm8
29 pxor xmm5,xmm8
30 pxor xmm6,xmm8
37 movdqa xmm8,XMMWORD[16+r11
    [all...]
aesni-x86_64.asm 384 pxor xmm8,xmm0
448 pxor xmm8,xmm0
515 movaps XMMWORD[32+rsp],xmm8
537 movdqu xmm8,XMMWORD[96+rdi]
558 movups XMMWORD[96+rsi],xmm8
559 movdqu xmm8,XMMWORD[96+rdi]
579 movups XMMWORD[96+rsi],xmm8
601 movdqu xmm8,XMMWORD[96+rdi]
610 movups XMMWORD[96+rsi],xmm8
680 movdqu xmm8,XMMWORD[96+rdi
    [all...]
  /external/libvpx/libvpx/vp8/common/x86/
loopfilter_block_sse2_x86_64.asm 204 movdqa xmm8, i5
207 LF_FILTER_HEV_MASK xmm0, xmm1, xmm2, xmm3, xmm4, xmm8, xmm9, xmm10
212 movdqa xmm8, i5
213 LF_FILTER xmm1, xmm2, xmm3, xmm8, xmm0, xmm4
219 movdqa i5, xmm8
227 LF_FILTER_HEV_MASK xmm3, xmm8, xmm0, xmm1, xmm2, xmm4, xmm10, xmm11, xmm9
232 movdqa xmm8, i9
233 LF_FILTER xmm0, xmm1, xmm4, xmm8, xmm3, xmm2
239 movdqa i9, xmm8
247 LF_FILTER_HEV_MASK xmm4, xmm8, xmm0, xmm1, xmm2, xmm3, xmm9, xmm11, xmm1
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/common/x86/
loopfilter_block_sse2.asm 204 movdqa xmm8, i5
207 LF_FILTER_HEV_MASK xmm0, xmm1, xmm2, xmm3, xmm4, xmm8, xmm9, xmm10
212 movdqa xmm8, i5
213 LF_FILTER xmm1, xmm2, xmm3, xmm8, xmm0, xmm4
219 movdqa i5, xmm8
227 LF_FILTER_HEV_MASK xmm3, xmm8, xmm0, xmm1, xmm2, xmm4, xmm10, xmm11, xmm9
232 movdqa xmm8, i9
233 LF_FILTER xmm0, xmm1, xmm4, xmm8, xmm3, xmm2
239 movdqa i9, xmm8
247 LF_FILTER_HEV_MASK xmm4, xmm8, xmm0, xmm1, xmm2, xmm3, xmm9, xmm11, xmm1
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/encoder/x86/
quantize_sse4.asm 127 pxor xmm8, xmm8
163 ZIGZAG_LOOP 8, 0, xmm3, xmm7, xmm8
168 ZIGZAG_LOOP 9, 1, xmm3, xmm7, xmm8
169 ZIGZAG_LOOP 12, 4, xmm3, xmm7, xmm8
170 ZIGZAG_LOOP 13, 5, xmm3, xmm7, xmm8
171 ZIGZAG_LOOP 10, 2, xmm3, xmm7, xmm8
173 ZIGZAG_LOOP 11, 3, xmm3, xmm7, xmm8
174 ZIGZAG_LOOP 14, 6, xmm3, xmm7, xmm8
175 ZIGZAG_LOOP 15, 7, xmm3, xmm7, xmm8
    [all...]
  /external/boringssl/linux-x86_64/crypto/aes/
bsaes-x86_64.S 14 movdqa (%rax),%xmm8
17 pxor %xmm8,%xmm15
18 pxor %xmm8,%xmm0
19 pxor %xmm8,%xmm1
20 pxor %xmm8,%xmm2
23 pxor %xmm8,%xmm3
24 pxor %xmm8,%xmm4
27 pxor %xmm8,%xmm5
28 pxor %xmm8,%xmm6
35 movdqa 16(%r11),%xmm8
    [all...]
  /external/boringssl/mac-x86_64/crypto/aes/
bsaes-x86_64.S 12 movdqa (%rax),%xmm8
15 pxor %xmm8,%xmm15
16 pxor %xmm8,%xmm0
17 pxor %xmm8,%xmm1
18 pxor %xmm8,%xmm2
21 pxor %xmm8,%xmm3
22 pxor %xmm8,%xmm4
25 pxor %xmm8,%xmm5
26 pxor %xmm8,%xmm6
33 movdqa 16(%r11),%xmm8
    [all...]
  /external/llvm/test/MC/AsmParser/
directive_seh.s 17 movups %xmm8, (%rsp)
18 .seh_savexmm %xmm8, 0
  /external/llvm/test/tools/llvm-objdump/Inputs/
win64-unwind.exe.coff-x86_64.asm 11 movups %xmm8, (%rsp)
12 .seh_savexmm %xmm8, 0
  /external/boringssl/win-x86_64/crypto/modes/
ghash-x86_64.asm 968 movdqu xmm8,XMMWORD[r8]
973 pxor xmm0,xmm8
977 pshufd xmm8,xmm0,78
978 pxor xmm8,xmm0
1005 xorps xmm8,xmm4
1009 pxor xmm8,xmm0
1011 pxor xmm8,xmm1
1013 movdqa xmm9,xmm8
1015 pslldq xmm8,8
1017 pxor xmm0,xmm8
    [all...]
  /external/llvm/test/TableGen/
cast.td 54 def XMM8: Register<"xmm8">;
65 XMM8, XMM9, XMM10, XMM11,
  /external/boringssl/win-x86_64/crypto/sha/
sha1-x86_64.asm 1270 movaps XMMWORD[(-40-64)+rax],xmm8
1321 movdqa xmm8,xmm3
1329 psrldq xmm8,4
1335 pxor xmm8,xmm2
1339 pxor xmm4,xmm8
1349 movdqa xmm8,xmm4
1355 psrld xmm8,31
1365 por xmm4,xmm8
1406 movdqa xmm8,xmm5
1412 pslldq xmm8,1
    [all...]
  /external/libvpx/libvpx/third_party/libyuv/source/
rotate.cc 531 "movdqa %%xmm0,%%xmm8 \n"
533 "punpckhbw %%xmm1,%%xmm8 \n"
536 "movdqa %%xmm8,%%xmm9 \n"
584 "punpcklwd %%xmm10,%%xmm8 \n"
586 "movdqa %%xmm8,%%xmm10 \n"
622 "punpckldq %%xmm12,%%xmm8 \n"
623 "movq %%xmm8,(%1) \n"
624 "movdqa %%xmm8,%%xmm12 \n"
655 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15"
672 "movdqa %%xmm0,%%xmm8 \n
    [all...]
  /external/libyuv/files/source/
rotate.cc 507 "movdqa %%xmm0,%%xmm8 \n"
509 "punpckhbw %%xmm1,%%xmm8 \n"
512 "movdqa %%xmm8,%%xmm9 \n"
560 "punpcklwd %%xmm10,%%xmm8 \n"
562 "movdqa %%xmm8,%%xmm10 \n"
598 "punpckldq %%xmm12,%%xmm8 \n"
599 "movq %%xmm8,(%1) \n"
600 "movdqa %%xmm8,%%xmm12 \n"
631 "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15"
648 "movdqa %%xmm0,%%xmm8 \n
    [all...]

Completed in 3005 milliseconds

1 2 3 4 5