HomeSort by relevance Sort by last modified time
    Searched refs:xmm8 (Results 1 - 25 of 42) sorted by null

1 2

  /external/llvm/test/MC/COFF/
seh.s 75 // CHECK-NEXT: 0x0E: SAVE_XMM128 reg=XMM8, offset=0x0
133 movups %xmm8, (%rsp)
134 .seh_savexmm %xmm8, 0
  /external/libvpx/libvpx/vp8/common/x86/
loopfilter_block_sse2_x86_64.asm 204 movdqa xmm8, i5
207 LF_FILTER_HEV_MASK xmm0, xmm1, xmm2, xmm3, xmm4, xmm8, xmm9, xmm10
212 movdqa xmm8, i5
213 LF_FILTER xmm1, xmm2, xmm3, xmm8, xmm0, xmm4
219 movdqa i5, xmm8
227 LF_FILTER_HEV_MASK xmm3, xmm8, xmm0, xmm1, xmm2, xmm4, xmm10, xmm11, xmm9
232 movdqa xmm8, i9
233 LF_FILTER xmm0, xmm1, xmm4, xmm8, xmm3, xmm2
239 movdqa i9, xmm8
247 LF_FILTER_HEV_MASK xmm4, xmm8, xmm0, xmm1, xmm2, xmm3, xmm9, xmm11, xmm1
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/common/x86/
loopfilter_block_sse2.asm 204 movdqa xmm8, i5
207 LF_FILTER_HEV_MASK xmm0, xmm1, xmm2, xmm3, xmm4, xmm8, xmm9, xmm10
212 movdqa xmm8, i5
213 LF_FILTER xmm1, xmm2, xmm3, xmm8, xmm0, xmm4
219 movdqa i5, xmm8
227 LF_FILTER_HEV_MASK xmm3, xmm8, xmm0, xmm1, xmm2, xmm4, xmm10, xmm11, xmm9
232 movdqa xmm8, i9
233 LF_FILTER xmm0, xmm1, xmm4, xmm8, xmm3, xmm2
239 movdqa i9, xmm8
247 LF_FILTER_HEV_MASK xmm4, xmm8, xmm0, xmm1, xmm2, xmm3, xmm9, xmm11, xmm1
    [all...]
  /external/llvm/test/tools/llvm-objdump/Inputs/
win64-unwind.exe.coff-x86_64.asm 11 movups %xmm8, (%rsp)
12 .seh_savexmm %xmm8, 0
  /external/boringssl/linux-x86_64/crypto/aes/
bsaes-x86_64.S 14 movdqa (%rax),%xmm8
17 pxor %xmm8,%xmm15
18 pxor %xmm8,%xmm0
19 pxor %xmm8,%xmm1
20 pxor %xmm8,%xmm2
23 pxor %xmm8,%xmm3
24 pxor %xmm8,%xmm4
27 pxor %xmm8,%xmm5
28 pxor %xmm8,%xmm6
35 movdqa 16(%r11),%xmm8
    [all...]
aesni-x86_64.S 383 pxor %xmm0,%xmm8
447 pxor %xmm0,%xmm8
520 movdqu 96(%rdi),%xmm8
541 movups %xmm8,96(%rsi)
542 movdqu 96(%rdi),%xmm8
562 movups %xmm8,96(%rsi)
584 movdqu 96(%rdi),%xmm8
593 movups %xmm8,96(%rsi)
663 movdqu 96(%rdi),%xmm8
684 movups %xmm8,96(%rsi
    [all...]
  /external/boringssl/mac-x86_64/crypto/aes/
bsaes-x86_64.S 12 movdqa (%rax),%xmm8
15 pxor %xmm8,%xmm15
16 pxor %xmm8,%xmm0
17 pxor %xmm8,%xmm1
18 pxor %xmm8,%xmm2
21 pxor %xmm8,%xmm3
22 pxor %xmm8,%xmm4
25 pxor %xmm8,%xmm5
26 pxor %xmm8,%xmm6
33 movdqa 16(%r11),%xmm8
    [all...]
aesni-x86_64.S 382 pxor %xmm0,%xmm8
446 pxor %xmm0,%xmm8
519 movdqu 96(%rdi),%xmm8
540 movups %xmm8,96(%rsi)
541 movdqu 96(%rdi),%xmm8
561 movups %xmm8,96(%rsi)
583 movdqu 96(%rdi),%xmm8
592 movups %xmm8,96(%rsi)
662 movdqu 96(%rdi),%xmm8
683 movups %xmm8,96(%rsi
    [all...]
  /hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/encoder/x86/
quantize_sse4.asm 127 pxor xmm8, xmm8
163 ZIGZAG_LOOP 8, 0, xmm3, xmm7, xmm8
168 ZIGZAG_LOOP 9, 1, xmm3, xmm7, xmm8
169 ZIGZAG_LOOP 12, 4, xmm3, xmm7, xmm8
170 ZIGZAG_LOOP 13, 5, xmm3, xmm7, xmm8
171 ZIGZAG_LOOP 10, 2, xmm3, xmm7, xmm8
173 ZIGZAG_LOOP 11, 3, xmm3, xmm7, xmm8
174 ZIGZAG_LOOP 14, 6, xmm3, xmm7, xmm8
175 ZIGZAG_LOOP 15, 7, xmm3, xmm7, xmm8
    [all...]
  /external/llvm/test/MC/AsmParser/
directive_seh.s 17 movups %xmm8, (%rsp)
18 .seh_savexmm %xmm8, 0
  /external/boringssl/win-x86_64/crypto/aes/
bsaes-x86_64.asm 16 movdqa xmm8,XMMWORD[rax]
19 pxor xmm15,xmm8
20 pxor xmm0,xmm8
21 pxor xmm1,xmm8
22 pxor xmm2,xmm8
25 pxor xmm3,xmm8
26 pxor xmm4,xmm8
29 pxor xmm5,xmm8
30 pxor xmm6,xmm8
37 movdqa xmm8,XMMWORD[16+r11
    [all...]
aesni-x86_64.asm 384 pxor xmm8,xmm0
448 pxor xmm8,xmm0
515 movaps XMMWORD[32+rsp],xmm8
537 movdqu xmm8,XMMWORD[96+rdi]
558 movups XMMWORD[96+rsi],xmm8
559 movdqu xmm8,XMMWORD[96+rdi]
579 movups XMMWORD[96+rsi],xmm8
601 movdqu xmm8,XMMWORD[96+rdi]
610 movups XMMWORD[96+rsi],xmm8
680 movdqu xmm8,XMMWORD[96+rdi
    [all...]
vpaes-x86_64.asm 228 movdqa xmm8,XMMWORD[$L$k_rcon]
628 movaps XMMWORD[48+rsp],xmm8
647 movaps xmm8,XMMWORD[48+rsp]
679 movaps XMMWORD[48+rsp],xmm8
703 movaps xmm8,XMMWORD[48+rsp]
735 movaps XMMWORD[48+rsp],xmm8
750 movaps xmm8,XMMWORD[48+rsp]
781 movaps XMMWORD[48+rsp],xmm8
796 movaps xmm8,XMMWORD[48+rsp]
832 movaps XMMWORD[48+rsp],xmm8
    [all...]
  /external/boringssl/linux-x86_64/crypto/modes/
ghash-x86_64.S 927 movdqu 0(%rdx),%xmm8
932 pxor %xmm8,%xmm0
936 pshufd $78,%xmm0,%xmm8
937 pxor %xmm0,%xmm8
964 xorps %xmm4,%xmm8
968 pxor %xmm0,%xmm8
970 pxor %xmm1,%xmm8
972 movdqa %xmm8,%xmm9
974 pslldq $8,%xmm8
976 pxor %xmm8,%xmm
    [all...]
  /external/boringssl/mac-x86_64/crypto/modes/
ghash-x86_64.S 926 movdqu 0(%rdx),%xmm8
931 pxor %xmm8,%xmm0
935 pshufd $78,%xmm0,%xmm8
936 pxor %xmm0,%xmm8
963 xorps %xmm4,%xmm8
967 pxor %xmm0,%xmm8
969 pxor %xmm1,%xmm8
971 movdqa %xmm8,%xmm9
973 pslldq $8,%xmm8
975 pxor %xmm8,%xmm
    [all...]
  /external/mesa3d/src/mesa/x86-64/
xform4.S 82 movups (%rdx), %xmm8 /* ox | oy | oz | ow */
85 pshufd $0x00, %xmm8, %xmm0 /* ox | ox | ox | ox */
87 pshufd $0x55, %xmm8, %xmm1 /* oy | oy | oy | oy */
89 pshufd $0xAA, %xmm8, %xmm2 /* oz | oz | oz | ox */
91 pshufd $0xFF, %xmm8, %xmm3 /* ow | ow | ow | ow */
168 movups (%rdx), %xmm8 /* ox | oy | oz | ow */
171 pshufd $0x00, %xmm8, %xmm0 /* ox | ox | ox | ox */
173 pshufd $0x55, %xmm8, %xmm1 /* oy | oy | oy | oy */
175 pshufd $0xAA, %xmm8, %xmm2 /* oz | oz | oz | ox */
177 pshufd $0xFF, %xmm8, %xmm3 /* ow | ow | ow | ow *
    [all...]
  /external/boringssl/win-x86_64/crypto/modes/
ghash-x86_64.asm 968 movdqu xmm8,XMMWORD[r8]
973 pxor xmm0,xmm8
977 pshufd xmm8,xmm0,78
978 pxor xmm8,xmm0
1005 xorps xmm8,xmm4
1009 pxor xmm8,xmm0
1011 pxor xmm8,xmm1
1013 movdqa xmm9,xmm8
1015 pslldq xmm8,8
1017 pxor xmm0,xmm8
    [all...]
  /external/boringssl/linux-x86_64/crypto/sha/
sha1-x86_64.S 1292 movdqa %xmm3,%xmm8
1300 psrldq $4,%xmm8
1306 pxor %xmm2,%xmm8
1310 pxor %xmm8,%xmm4
1320 movdqa %xmm4,%xmm8
1326 psrld $31,%xmm8
1336 por %xmm8,%xmm4
1377 movdqa %xmm5,%xmm8
1383 pslldq $12,%xmm8
1391 movdqa %xmm8,%xmm1
    [all...]
  /external/boringssl/mac-x86_64/crypto/sha/
sha1-x86_64.S 1291 movdqa %xmm3,%xmm8
1299 psrldq $4,%xmm8
1305 pxor %xmm2,%xmm8
1309 pxor %xmm8,%xmm4
1319 movdqa %xmm4,%xmm8
1325 psrld $31,%xmm8
1335 por %xmm8,%xmm4
1376 movdqa %xmm5,%xmm8
1382 pslldq $12,%xmm8
1390 movdqa %xmm8,%xmm1
    [all...]
  /external/boringssl/win-x86_64/crypto/sha/
sha1-x86_64.asm 1270 movaps XMMWORD[(-40-64)+rax],xmm8
1321 movdqa xmm8,xmm3
1329 psrldq xmm8,4
1335 pxor xmm8,xmm2
1339 pxor xmm4,xmm8
1349 movdqa xmm8,xmm4
1355 psrld xmm8,31
1365 por xmm4,xmm8
1406 movdqa xmm8,xmm5
1412 pslldq xmm8,1
    [all...]
  /external/llvm/test/MC/X86/
x86_64-avx-encoding.s 3 // CHECK: vaddss %xmm8, %xmm9, %xmm10
5 vaddss %xmm8, %xmm9, %xmm10
7 // CHECK: vmulss %xmm8, %xmm9, %xmm10
9 vmulss %xmm8, %xmm9, %xmm10
11 // CHECK: vsubss %xmm8, %xmm9, %xmm10
13 vsubss %xmm8, %xmm9, %xmm10
15 // CHECK: vdivss %xmm8, %xmm9, %xmm10
17 vdivss %xmm8, %xmm9, %xmm10
19 // CHECK: vaddsd %xmm8, %xmm9, %xmm10
21 vaddsd %xmm8, %xmm9, %xmm1
    [all...]
  /external/boringssl/src/crypto/aes/asm/
vpaes-x86_64.pl 80 ## Preserves %xmm6 - %xmm8 so you get some local vectors
288 movdqa .Lk_rcon(%rip), %xmm8 # load rcon
486 ## Adds rcon from low byte of %xmm8, then rotates %xmm8 for
498 # extract rcon from xmm8
500 palignr \$15, %xmm8, %xmm1
501 palignr \$15, %xmm8, %xmm8
681 movaps %xmm8,0x30(%rsp)
704 movaps 0x30(%rsp),%xmm8
    [all...]
  /external/google-breakpad/src/google_breakpad/common/
minidump_cpu_amd64.h 180 uint128_struct xmm8; member in struct:__anon9467::__anon9468::__anon9469
  /external/zlib/src/contrib/amd64/
amd64-match.S 303 movdqu 48(%prev, %rdx), %xmm8
304 pcmpeqb %xmm8, %xmm7
  /external/v8/test/mjsunit/regress/
regress-crbug-173907.js 54 var xmm8 = v*v*v*v*v*v*v*v*v;

Completed in 365 milliseconds

1 2