Home | History | Annotate | Download | only in amd64

Lines Matching full:xmm9

84           : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \
104 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \
146 "vpblendvb %%xmm9, %%xmm6, %%xmm8, %%xmm7",
147 "vpblendvb %%xmm9, (%%rax), %%xmm8, %%xmm7")
367 "vmovups %%xmm9, (%%rax)")
371 "vmovq (%%rax), %%xmm9")
374 "vpcmpestrm $0x45, %%xmm7, %%xmm8; movapd %%xmm0, %%xmm9",
375 "vpcmpestrm $0x45, (%%rax), %%xmm8; movapd %%xmm0, %%xmm9")
382 "vcvtsd2ss %%xmm9, %%xmm8, %%xmm7",
386 "vcvtss2sd %%xmm9, %%xmm8, %%xmm7",
390 "vpackuswb %%xmm9, %%xmm8, %%xmm7",
405 "vpand %%xmm9, %%xmm8, %%xmm7",
415 "vpcmpeqb %%xmm9, %%xmm8, %%xmm7",
419 "vshufps $0x39, %%xmm9, %%xmm8, %%xmm7",
423 "vmulps %%xmm9, %%xmm8, %%xmm7",
427 "vsubps %%xmm9, %%xmm8, %%xmm7",
431 "vaddps %%xmm9, %%xmm8, %%xmm7",
435 "vmaxps %%xmm9, %%xmm8, %%xmm7",
443 "vmaxpd %%xmm9, %%xmm8, %%xmm7",
451 "vminps %%xmm9, %%xmm8, %%xmm7",
459 "vminpd %%xmm9, %%xmm8, %%xmm7",
471 "vpshuflw $0x39, %%xmm9, %%xmm7",
475 "vpshufhw $0x39, %%xmm9, %%xmm7",
479 "vpmullw %%xmm9, %%xmm8, %%xmm7",
483 xmm9, %%xmm8, %%xmm7",
487 "vpmulhuw %%xmm9, %%xmm8, %%xmm7",
491 "vpaddusb %%xmm9, %%xmm8, %%xmm7",
503 "vpslld $0x5, %%xmm9, %%xmm7")
506 "vpsrld $0x5, %%xmm9, %%xmm7")
509 "vpsrad $0x5, %%xmm9, %%xmm7")
512 "vpsubusb %%xmm9, %%xmm8, %%xmm7",
516 "vpsubsb %%xmm9, %%xmm8, %%xmm7",
520 "vpsrldq $0x5, %%xmm9, %%xmm7")
523 "vpslldq $0x5, %%xmm9, %%xmm7")
526 "vpandn %%xmm9, %%xmm8, %%xmm7",
542 "vpsrlw $0x5, %%xmm9, %%xmm7")
545 "vpsllw $0x5, %%xmm9, %%xmm7")
552 "vpackssdw %%xmm9, %%xmm8, %%xmm7",
572 "vmovupd %%xmm9, %%xmm6",
577 "vmovapd (%%rax), %%xmm9")
585 "vmovaps (%%rax), %%xmm9")
588 "vmovaps %%xmm9, %%xmm6",
596 "vmovapd %%xmm9, %%xmm6",
605 "vmovdqu (%%rax), %%xmm9")
609 "vmovdqa (%%rax), %%xmm9")
616 "vmovdqu %%xmm9, %%xmm6",
620 "vmovdqa %%xmm9, %%xmm6",
631 "vmovd (%%rax), %%xmm9")
635 "vmovddup (%%rax), %%xmm9")
722 "vcvtpd2psx (%%rax), %%xmm9")
725 "vextractf128 $0x0, %%ymm7, %%xmm9",
729 "vextractf128 $0x1, %%ymm7, %%xmm9",
733 "vinsertf128 $0x0, %%xmm9, %%ymm7, %%ymm8",
737 "vinsertf128 $0x1, %%xmm9, %%ymm7, %%ymm8",
753 "vpshufd $0x39, %%xmm9, %%xmm8",
837 "vmovupd %%xmm6, %%xmm9",
878 "vmovups (%%rax), %%xmm9")
909 "vpsrlq $0x5, %%xmm9, %%xmm7")
920 "vpsllq $0x5, %%xmm9, %%xmm7")
1113 "vpsubusw %%xmm9, %%xmm8, %%xmm7",
1117 "vpsubsw %%xmm9, %%xmm8, %%xmm7",
1145 "vcvtpd2psy (%%rax), %%xmm9")
1163 "vdivps %%xmm9, %%xmm8, %%xmm7",
1204 "vpmulhw %%xmm9, %%xmm8, %%xmm7",
1212 "vpsraw $0x5, %%xmm9, %%xmm7")
1271 "vdppd $0xA5, (%%rax), %%xmm9, %%xmm6")
1274 "vdppd $0xFF, (%%rax), %%xmm9, %%xmm6")
1277 "vdppd $0x37, (%%rax), %%xmm9, %%xmm6")
1280 "vdppd $0x73, (%%rax), %%xmm9, %%xmm6")
1284 "vdpps $0xA5, (%%rax), %%xmm9, %%xmm6")
1287 "vdpps $0xFF, (%%rax), %%xmm9, %%xmm6")
1290 "vdpps $0x37, (%%rax), %%xmm9, %%xmm6")
1293 "vdpps $0x73, (%%rax), %%xmm9, %%xmm6")
1321 GEN_test_Ronly(VMOVSD_REG_XMM, "vmovsd %%xmm9, %%xmm7, %%xmm8")
1323 GEN_test_Ronly(VMOVSS_REG_XMM, "vmovss %%xmm9, %%xmm7, %%xmm8")
1330 "vshufpd $0, %%xmm9, %%xmm8, %%xmm7",
1333 "vshufpd $2, %%xmm9, %%xmm8, %%xmm7",
1461 "vcvtps2pd %%xmm9, %%ymm6",
1465 "vcvttps2dq %%xmm9, %%xmm6",
1473 "vcvtdq2ps %%xmm9, %%xmm6",
1481 "vcvttpd2dqx %%xmm9, %%xmm6",
1489 "vcvtpd2dqx %%xmm9, %%xmm6",
1497 "vmovsldup %%xmm9, %%xmm6",
1505 "vmovshdup %%xmm9, %%xmm6",
1531 "vpsllw %%xmm6, %%xmm8, %%xmm9",
1533 "vpsllw 128(%%rax), %%xmm8, %%xmm9")
1538 "vpsrlw %%xmm6, %%xmm8, %%xmm9",
1540 "vpsrlw 128(%%rax), %%xmm8, %%xmm9")
1545 "vpsraw %%xmm6, %%xmm8, %%xmm9",
1547 "vpsraw 128(%%rax), %%xmm8, %%xmm9")
1552 "vpslld %%xmm6, %%xmm8, %%xmm9",
1554 "vpslld 128(%%rax), %%xmm8, %%xmm9")
1559 "vpsrld %%xmm6, %%xmm8, %%xmm9",
1561 "vpsrld 128(%%rax), %%xmm8, %%xmm9")
1566 "vpsrad %%xmm6, %%xmm8, %%xmm9",
1568 "vpsrad 128(%%rax), %%xmm8, %%xmm9")
1573 "vpsllq %%xmm6, %%xmm8, %%xmm9",
1575 "vpsllq 128(%%rax), %%xmm8, %%xmm9")
1580 "vpsrlq %%xmm6, %%xmm8, %%xmm9",
1582 "vpsrlq 128(%%rax), %%xmm8, %%xmm9")
1585 "vroundps $0x0, %%xmm8, %%xmm9",
1586 "vroundps $0x0, (%%rax), %%xmm9")
1588 "vroundps $0x1, %%xmm8, %%xmm9",
1589 "vroundps $0x1, (%%rax), %%xmm9")
1591 "vroundps $0x2, %%xmm8, %%xmm9",
1592 "vroundps $0x2, (%%rax), %%xmm9")
1594 "vroundps $0x3, %%xmm8, %%xmm9",
1595 "vroundps $0x3, (%%rax), %%xmm9")
1597 "vroundps $0x4, %%xmm8, %%xmm9",
1598 "vroundps $0x4, (%%rax), %%xmm9")
1617 "vroundpd $0x0, %%xmm8, %%xmm9",
1618 "vroundpd $0x0, (%%rax), %%xmm9")
1620 "vroundpd $0x1, %%xmm8, %%xmm9",
1621 "vroundpd $0x1, (%%rax), %%xmm9")
1623 "vroundpd $0x2, %%xmm8, %%xmm9",
1624 "vroundpd $0x2, (%%rax), %%xmm9")
1626 "vroundpd $0x3, %%xmm8, %%xmm9",
1627 "vroundpd $0x3, (%%rax), %%xmm9")
1629 "vroundpd $0x4, %%xmm8, %%xmm9",
1630 "vroundpd $0x4, (%%rax), %%xmm9")
1669 "vroundss $0x0, %%xmm8, %%xmm6, %%xmm9",
1670 "vroundss $0x0, (%%rax), %%xmm6, %%xmm9")
1672 "vroundss $0x1, %%xmm8, %%xmm6, %%xmm9",
1673 "vroundss $0x1, (%%rax), %%xmm6, %%xmm9")
1675 "vroundss $0x2, %%xmm8, %%xmm6, %%xmm9",
1676 "vroundss $0x2, (%%rax), %%xmm6, %%xmm9")
1678 "vroundss $0x3, %%xmm8, %%xmm6, %%xmm9",
1679 "vroundss $0x3, (%%rax), %%xmm6, %%xmm9")
1681 "vroundss $0x4, %%xmm8, %%xmm6, %%xmm9",
1682 "vroundss $0x4, (%%rax), %%xmm6, %%xmm9")
1684 "vroundss $0x5, %%xmm8, %%xmm6, %%xmm9",
1685 "vroundss $0x5, (%%rax), %%xmm6, %%xmm9")
1688 "vroundsd $0x0, %%xmm8, %%xmm6, %%xmm9",
1689 "vroundsd $0x0, (%%rax), %%xmm6, %%xmm9")
1691 "vroundsd $0x1, %%xmm8, %%xmm6, %%xmm9",
1692 "vroundsd $0x1, (%%rax), %%xmm6, %%xmm9")
1694 "vroundsd $0x2, %%xmm8, %%xmm6, %%xmm9",
1695 "vroundsd $0x2, (%%rax), %%xmm6, %%xmm9")
1697 "vroundsd $0x3, %%xmm8, %%xmm6, %%xmm9",
1698 "vroundsd $0x3, (%%rax), %%xmm6, %%xmm9")
1700 "vroundsd $0x4, %%xmm8, %%xmm6, %%xmm9",
1701 "vroundsd $0x4, (%%rax), %%xmm6, %%xmm9")
1703 "vroundsd $0x5, %%xmm8, %%xmm6, %%xmm9",
1704 "vroundsd $0x5, (%%rax), %%xmm6, %%xmm9")
1773 "vtestps %%xmm8, %%xmm9; "
1775 "vtestps (%%rax), %%xmm9; "
1833 "vtestpd %%xmm8, %%xmm9; "
1835 "vtestpd (%%rax), %%xmm9; "
1872 "vblendvps %%xmm9, %%xmm6, %%xmm8, %%xmm7",
1873 "vblendvps %%xmm9, (%%rax), %%xmm8, %%xmm7")
1880 "vblendvpd %%xmm9, %%xmm6, %%xmm8, %%xmm7",
1881 "vblendvpd %%xmm9, (%%rax), %%xmm8, %%xmm7")
1943 "vmovntdqa (%%rax), %%xmm9")
1947 "vmaskmovdqu %%xmm8, %%xmm9;"
1951 "vmovmskpd %%xmm9, %%r14d")
1957 "vmovmskps %%xmm9, %%r14d")
1963 "vmovntpd %%xmm9, (%%rax)")
1969 "vmovntps %%xmm9, (%%rax)")
2203 "vmaskmovps (%%rax,%%rax,4), %%xmm6, %%xmm9")
2213 "vmaskmovpd (%%rax,%%rax,4), %%xmm6, %%xmm9")
2223 "vmaskmovps %%xmm9, %%xmm6, (%%rax,%%rax,4)")
2233 "vmaskmovpd %%xmm9, %%xmm6, (%%rax,%%rax,4)")