Lines Matching full:xmm8
90 : /*TRASH*/"xmm0","xmm7","xmm8","xmm6","xmm9","r14","memory","cc" \
110 "xmm0","xmm8","xmm7","xmm9","r14","rax","memory","cc" \
762 "vpblendd $0x00, %%xmm6, %%xmm8, %%xmm7",
763 "vpblendd $0x01, (%%rax), %%xmm8, %%xmm7")
765 "vpblendd $0x02, %%xmm6, %%xmm8, %%xmm7",
766 "vpblendd $0x03, (%%rax), %%xmm8, %%xmm7")
768 "vpblendd $0x04, %%xmm6, %%xmm8, %%xmm7",
769 "vpblendd $0x05, (%%rax), %%xmm8, %%xmm7")
771 "vpblendd $0x06, %%xmm6, %%xmm8, %%xmm7",
772 "vpblendd $0x07, (%%rax), %%xmm8, %%xmm7")
774 "vpblendd $0x08, %%xmm6, %%xmm8, %%xmm7",
775 "vpblendd $0x09, (%%rax), %%xmm8, %%xmm7")
777 "vpblendd $0x0A, %%xmm6, %%xmm8, %%xmm7",
778 "vpblendd $0x0B, (%%rax), %%xmm8, %%xmm7")
780 "vpblendd $0x0C, %%xmm6, %%xmm8, %%xmm7",
781 "vpblendd $0x0D, (%%rax), %%xmm8, %%xmm7")
783 "vpblendd $0x0E, %%xmm6, %%xmm8, %%xmm7",
784 "vpblendd $0x0F, (%%rax), %%xmm8, %%xmm7")
814 "vpsllvd %%xmm6, %%xmm8, %%xmm7",
818 "vpsllvd (%%rax), %%xmm8, %%xmm7")
835 "vpsllvq %%xmm6, %%xmm8, %%xmm7",
837 "vpsllvq (%%rax), %%xmm8, %%xmm7")
851 "vpsrlvd %%xmm6, %%xmm8, %%xmm7",
855 "vpsrlvd (%%rax), %%xmm8, %%xmm7")
872 "vpsrlvq %%xmm6, %%xmm8, %%xmm7",
874 "vpsrlvq (%%rax), %%xmm8, %%xmm7")
888 "vpsravd %%xmm6, %%xmm8, %%xmm7",
892 "vpsravd (%%rax), %%xmm8, %%xmm7")
939 "vpmaskmovd (%%rax), %%xmm8, %%xmm7;"
949 "vpmaskmovq (%%rax), %%xmm8, %%xmm7;"
959 "vpmaskmovd %%xmm8, %%xmm7, (%%rax);"
969 "vpmaskmovq %%xmm8, %%xmm7, (%%rax);"
979 "vpslld $25, %%xmm7, %%xmm8;"
980 "vpsrld $25, %%xmm8, %%xmm8;"
981 "vblendvps %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
983 "vgatherdps %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
995 "vpsllq $57, %%xmm7, %%xmm8;"
996 "vpsrlq $57, %%xmm8, %%xmm8;"
998 "vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
1001 "vgatherqps %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
1015 "vpsllq $57, %%xmm7, %%xmm8;"
1016 "vpsrlq $57, %%xmm8, %%xmm8;"
1018 "vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
1022 "vpsllq $2, %%xmm8, %%xmm8;"
1024 "vpaddq %%xmm7, %%xmm8, %%xmm8;"
1025 "vgatherqps %%xmm6, 1(,%%xmm8,1), %%xmm9;"
1026 "vpsubq %%xmm7, %%xmm8, %%xmm8;"
1047 "vpslld $26, %%xmm7, %%xmm8;"
1048 "vpsrld $26, %%xmm8, %%xmm8;"
1050 "vblendvps %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
1053 "vgatherdpd %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
1064 "vgatherdpd %%ymm6, 3(%%r14,%%xmm8,8), %%ymm9;"
1068 "vpsllq $58, %%xmm7, %%xmm8;"
1069 "vpsrlq $58, %%xmm8, %%xmm8;"
1070 "vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
1072 "vgatherqpd %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
1084 "vpsllq $58, %%xmm7, %%xmm8;"
1085 "vpsrlq $58, %%xmm8, %%xmm8;"
1086 "vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
1089 "vpsllq $2, %%xmm8, %%xmm8;"
1091 "vpaddq %%xmm7, %%xmm8, %%xmm8;"
1092 "vgatherqpd %%xmm6, 1(,%%xmm8,1), %%xmm9;"
1093 "vpsubq %%xmm7, %%xmm8, %%xmm8;"
1112 "vpslld $25, %%xmm7, %%xmm8;"
1113 "vpsrld $25, %%xmm8, %%xmm8;"
1114 "vblendvps %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
1116 "vpgatherdd %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
1128 "vpsllq $57, %%xmm7, %%xmm8;"
1129 "vpsrlq $57, %%xmm8, %%xmm8;"
1131 "vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
1134 "vpgatherqd %%xmm6, 3(%%r14,%%xmm8,4), %%xmm9;"
1148 "vpsllq $57, %%xmm7, %%xmm8;"
1149 "vpsrlq $57, %%xmm8, %%xmm8;"
1151 "vblendvpd %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
1155 "vpsllq $2, %%xmm8, %%xmm8;"
1157 "vpaddq %%xmm7, %%xmm8, %%xmm8;"
1158 "vpgatherqd %%xmm6, 1(,%%xmm8,1), %%xmm9;"
1159 "vpsubq %%xmm7, %%xmm8, %%xmm8;"
1180 "vpslld $26, %%xmm7, %%xmm8;"
1181 "vpsrld $26, %%xmm8, %%xmm8;"
1183 "vblendvps %%xmm9, %%xmm8, %%xmm7, %%xmm8;"
1186 "vpgatherdq %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
1197 "vpgatherdq %%ymm6, 3(%%r14,%%xmm8,8), %%ymm9;"
1201 "vpsllq $58, %%xmm7, %%xmm8;"
1202 "vpsrlq $58, %%xmm8, %%xmm8;"
1203 "vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
1205 "vpgatherqq %%xmm6, 3(%%r14,%%xmm8,8), %%xmm9;"
1217 "vpsllq $58, %%xmm7, %%xmm8;"
1218 "vpsrlq $58, %%xmm8, %%xmm8;"
1219 "vblendvpd %%xmm6, %%xmm8, %%xmm7, %%xmm8;"
1222 "vpsllq $2, %%xmm8, %%xmm8;"
1224 "vpaddq %%xmm7, %%xmm8, %%xmm8;"
1225 "vpgatherqq %%xmm6, 1(,%%xmm8,1), %%xmm9;"
1226 "vpsubq %%xmm7, %%xmm8, %%xmm8;"