Lines Matching full:xmm9
84 : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \
104 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \
146 "vpblendvb %%xmm9, %%xmm6, %%xmm8, %%xmm7",
147 "vpblendvb %%xmm9, (%%rax), %%xmm8, %%xmm7")
337 "vmovups %%xmm9, (%%rax)")
341 "vmovq (%%rax), %%xmm9")
344 "vpcmpestrm $0x45, %%xmm7, %%xmm8; movapd %%xmm0, %%xmm9",
345 "vpcmpestrm $0x45, (%%rax), %%xmm8; movapd %%xmm0, %%xmm9")
352 "vcvtsd2ss %%xmm9, %%xmm8, %%xmm7",
356 "vcvtss2sd %%xmm9, %%xmm8, %%xmm7",
360 "vpackuswb %%xmm9, %%xmm8, %%xmm7",
375 "vpand %%xmm9, %%xmm8, %%xmm7",
385 "vpcmpeqb %%xmm9, %%xmm8, %%xmm7",
389 "vshufps $0x39, %%xmm9, %%xmm8, %%xmm7",
393 "vmulps %%xmm9, %%xmm8, %%xmm7",
397 "vsubps %%xmm9, %%xmm8, %%xmm7",
401 "vaddps %%xmm9, %%xmm8, %%xmm7",
405 "vmaxps %%xmm9, %%xmm8, %%xmm7",
413 "vmaxpd %%xmm9, %%xmm8, %%xmm7",
421 "vminps %%xmm9, %%xmm8, %%xmm7",
429 "vminpd %%xmm9, %%xmm8, %%xmm7",
441 "vpshuflw $0x39, %%xmm9, %%xmm7",
445 "vpshufhw $0x39, %%xmm9, %%xmm7",
449 "vpmullw %%xmm9, %%xmm8, %%xmm7",
453 "vpaddusw %%xmm9, %%xmm8, %%xmm7",
457 "vpmulhuw %%xmm9, %%xmm8, %%xmm7",
461 "vpaddusb %%xmm9, %%xmm8, %%xmm7",
473 "vpslld $0x5, %%xmm9, %%xmm7")
476 "vpsrld $0x5, %%xmm9, %%xmm7")
479 "vpsrad $0x5, %%xmm9, %%xmm7")
482 "vpsubusb %%xmm9, %%xmm8, %%xmm7",
486 "vpsubsb %%xmm9, %%xmm8, %%xmm7",
490 "vpsrldq $0x5, %%xmm9, %%xmm7")
493 "vpslldq $0x5, %%xmm9, %%xmm7")
496 "vpandn %%xmm9, %%xmm8, %%xmm7",
512 "vpsrlw $0x5, %%xmm9, %%xmm7")
515 "vpsllw $0x5, %%xmm9, %%xmm7")
522 "vpackssdw %%xmm9, %%xmm8, %%xmm7",
542 "vmovupd %%xmm9, %%xmm6",
547 "vmovapd (%%rax), %%xmm9")
555 "vmovaps (%%rax), %%xmm9")
558 "vmovaps %%xmm9, %%xmm6",
566 "vmovapd %%xmm9, %%xmm6",
575 "vmovdqu (%%rax), %%xmm9")
579 "vmovdqa (%%rax), %%xmm9")
586 "vmovdqu %%xmm9, %%xmm6",
590 "vmovdqa %%xmm9, %%xmm6",
601 "vmovd (%%rax), %%xmm9")
605 "vmovddup (%%rax), %%xmm9")
662 "vcvtpd2psx (%%rax), %%xmm9")
665 "vextractf128 $0x0, %%ymm7, %%xmm9",
669 "vextractf128 $0x1, %%ymm7, %%xmm9",
673 "vinsertf128 $0x0, %%xmm9, %%ymm7, %%ymm8",
677 "vinsertf128 $0x1, %%xmm9, %%ymm7, %%ymm8",
693 "vpshufd $0x39, %%xmm9, %%xmm8",
777 "vmovupd %%xmm6, %%xmm9",
818 "vmovups (%%rax), %%xmm9")
849 "vpsrlq $0x5, %%xmm9, %%xmm7")
860 "vpsllq $0x5, %%xmm9, %%xmm7")
1053 "vpsubusw %%xmm9, %%xmm8, %%xmm7",
1057 "vpsubsw %%xmm9, %%xmm8, %%xmm7",
1085 "vcvtpd2psy (%%rax), %%xmm9")
1103 "vdivps %%xmm9, %%xmm8, %%xmm7",
1144 "vpmulhw %%xmm9, %%xmm8, %%xmm7",
1152 "vpsraw $0x5, %%xmm9, %%xmm7")
1211 "vdppd $0xA5, (%%rax), %%xmm9, %%xmm6")
1214 "vdppd $0xFF, (%%rax), %%xmm9, %%xmm6")
1217 "vdppd $0x37, (%%rax), %%xmm9, %%xmm6")
1220 "vdppd $0x73, (%%rax), %%xmm9, %%xmm6")
1224 "vdpps $0xA5, (%%rax), %%xmm9, %%xmm6")
1227 "vdpps $0xFF, (%%rax), %%xmm9, %%xmm6")
1230 "vdpps $0x37, (%%rax), %%xmm9, %%xmm6")
1233 "vdpps $0x73, (%%rax), %%xmm9, %%xmm6")
1261 GEN_test_Ronly(VMOVSD_REG_XMM, "vmovsd %%xmm9, %%xmm7, %%xmm8")
1263 GEN_test_Ronly(VMOVSS_REG_XMM, "vmovss %%xmm9, %%xmm7, %%xmm8")
1270 "vshufpd $0, %%xmm9, %%xmm8, %%xmm7",
1273 "vshufpd $2, %%xmm9, %%xmm8, %%xmm7",
1401 "vcvtps2pd %%xmm9, %%ymm6",
1405 "vcvttps2dq %%xmm9, %%xmm6",
1413 "vcvtdq2ps %%xmm9, %%xmm6",
1421 "vcvttpd2dqx %%xmm9
1429 "vcvtpd2dqx %%xmm9, %%xmm6",
1437 "vmovsldup %%xmm9, %%xmm6",
1445 "vmovshdup %%xmm9, %%xmm6",
1471 "vpsllw %%xmm6, %%xmm8, %%xmm9",
1473 "vpsllw 128(%%rax), %%xmm8, %%xmm9")
1478 "vpsrlw %%xmm6, %%xmm8, %%xmm9",
1480 "vpsrlw 128(%%rax), %%xmm8, %%xmm9")
1485 "vpsraw %%xmm6, %%xmm8, %%xmm9",
1487 "vpsraw 128(%%rax), %%xmm8, %%xmm9")
1492 "vpslld %%xmm6, %%xmm8, %%xmm9",
1494 "vpslld 128(%%rax), %%xmm8, %%xmm9")
1499 "vpsrld %%xmm6, %%xmm8, %%xmm9",
1501 "vpsrld 128(%%rax), %%xmm8, %%xmm9")
1506 "vpsrad %%xmm6, %%xmm8, %%xmm9",
1508 "vpsrad 128(%%rax), %%xmm8, %%xmm9")
1513 "vpsllq %%xmm6, %%xmm8, %%xmm9",
1515 "vpsllq 128(%%rax), %%xmm8, %%xmm9")
1520 "vpsrlq %%xmm6, %%xmm8, %%xmm9",
1522 "vpsrlq 128(%%rax), %%xmm8, %%xmm9")
1525 "vroundps $0x0, %%xmm8, %%xmm9",
1526 "vroundps $0x0, (%%rax), %%xmm9")
1528 "vroundps $0x1, %%xmm8, %%xmm9",
1529 "vroundps $0x1, (%%rax), %%xmm9")
1531 "vroundps $0x2, %%xmm8, %%xmm9",
1532 "vroundps $0x2, (%%rax), %%xmm9")
1534 "vroundps $0x3, %%xmm8, %%xmm9",
1535 "vroundps $0x3, (%%rax), %%xmm9")
1537 "vroundps $0x4, %%xmm8, %%xmm9",
1538 "vroundps $0x4, (%%rax), %%xmm9")
1557 "vroundpd $0x0, %%xmm8, %%xmm9",
1558 "vroundpd $0x0, (%%rax), %%xmm9")
1560 "vroundpd $0x1, %%xmm8, %%xmm9",
1561 "vroundpd $0x1, (%%rax), %%xmm9")
1563 "vroundpd $0x2, %%xmm8, %%xmm9",
1564 "vroundpd $0x2, (%%rax), %%xmm9")
1566 "vroundpd $0x3, %%xmm8, %%xmm9",
1567 "vroundpd $0x3, (%%rax), %%xmm9")
1569 "vroundpd $0x4, %%xmm8, %%xmm9",
1570 "vroundpd $0x4, (%%rax), %%xmm9")
1609 "vroundss $0x0, %%xmm8, %%xmm6, %%xmm9",
1610 "vroundss $0x0, (%%rax), %%xmm6, %%xmm9")
1612 "vroundss $0x1, %%xmm8, %%xmm6, %%xmm9",
1613 "vroundss $0x1, (%%rax), %%xmm6, %%xmm9")
1615 "vroundss $0x2, %%xmm8, %%xmm6, %%xmm9",
1616 "vroundss $0x2, (%%rax), %%xmm6, %%xmm9")
1618 "vroundss $0x3, %%xmm8, %%xmm6, %%xmm9",
1619 "vroundss $0x3, (%%rax), %%xmm6, %%xmm9")
1621 "vroundss $0x4, %%xmm8, %%xmm6, %%xmm9",
1622 "vroundss $0x4, (%%rax), %%xmm6, %%xmm9")
1624 "vroundss $0x5, %%xmm8, %%xmm6, %%xmm9",
1625 "vroundss $0x5, (%%rax), %%xmm6, %%xmm9")
1628 "vroundsd $0x0, %%xmm8, %%xmm6, %%xmm9",
1629 "vroundsd $0x0, (%%rax), %%xmm6, %%xmm9")
1631 "vroundsd $0x1, %%xmm8, %%xmm6, %%xmm9",
1632 "vroundsd $0x1, (%%rax), %%xmm6, %%xmm9")
1634 "vroundsd $0x2, %%xmm8, %%xmm6, %%xmm9",
1635 "vroundsd $0x2, (%%rax), %%xmm6, %%xmm9")
1637 "vroundsd $0x3, %%xmm8, %%xmm6, %%xmm9",
1638 "vroundsd $0x3, (%%rax), %%xmm6, %%xmm9")
1640 "vroundsd $0x4, %%xmm8, %%xmm6, %%xmm9",
1641 "vroundsd $0x4, (%%rax), %%xmm6, %%xmm9")
1643 "vroundsd $0x5, %%xmm8, %%xmm6, %%xmm9",
1644 "vroundsd $0x5, (%%rax), %%xmm6, %%xmm9")
1713 "vtestps %%xmm8, %%xmm9; "
1715 "vtestps (%%rax), %%xmm9; "
1773 "vtestpd %%xmm8, %%xmm9; "
1775 "vtestpd (%%rax), %%xmm9; "
1812 "vblendvps %%xmm9, %%xmm6, %%xmm8, %%xmm7",
1813 "vblendvps %%xmm9, (%%rax), %%xmm8, %%xmm7")
1820 "vblendvpd %%xmm9, %%xmm6, %%xmm8, %%xmm7",
1821 "vblendvpd %%xmm9, (%%rax), %%xmm8, %%xmm7")
1883 "vmovntdqa (%%rax), %%xmm9")
1887 "vmaskmovdqu %%xmm8, %%xmm9;"
1891 xmm9, %%r14d")
1897 "vmovmskps %%xmm9, %%r14d")
1903 "vmovntpd %%xmm9, (%%rax)")
1909 "vmovntps %%xmm9, (%%rax)")
2143 "vmaskmovps (%%rax,%%rax,4), %%xmm6, %%xmm9")
2153 "vmaskmovpd (%%rax,%%rax,4), %%xmm6, %%xmm9")
2163 "vmaskmovps %%xmm9, %%xmm6, (%%rax,%%rax,4)")
2173 "vmaskmovpd %%xmm9, %%xmm6, (%%rax,%%rax,4)")