Home | History | Annotate | Download | only in priv

Lines Matching full:gst

1954         ( VexGuestAMD64State* gst, HWord addr )
1974 do_get_x87( gst, (UChar*)&tmp );
2033 ( VexGuestAMD64State* gst, HWord addr )
2040 mxcsr = amd64g_create_mxcsr( gst->guest_SSEROUND );
2060 void LibVEX_GuestAMD64_fxsave ( /*IN*/VexGuestAMD64State* gst,
2064 amd64g_dirtyhelper_XSAVE_COMPONENT_0(gst, fp_state);
2067 amd64g_dirtyhelper_XSAVE_COMPONENT_1_EXCLUDING_XMMREGS(gst, fp_state);
2080 COPY_U128( xmm[0], gst->guest_YMM0 );
2081 COPY_U128( xmm[1], gst->guest_YMM1 );
2082 COPY_U128( xmm[2], gst->guest_YMM2 );
2083 COPY_U128( xmm[3], gst->guest_YMM3 );
2084 COPY_U128( xmm[4], gst->guest_YMM4 );
2085 COPY_U128( xmm[5], gst->guest_YMM5 );
2086 COPY_U128( xmm[6], gst->guest_YMM6 );
2087 COPY_U128( xmm[7], gst->guest_YMM7 );
2088 COPY_U128( xmm[8], gst->guest_YMM8 );
2089 COPY_U128( xmm[9], gst->guest_YMM9 );
2090 COPY_U128( xmm[10], gst->guest_YMM10 );
2091 COPY_U128( xmm[11], gst->guest_YMM11 );
2092 COPY_U128( xmm[12], gst->guest_YMM12 );
2093 COPY_U128( xmm[13], gst->guest_YMM13 );
2094 COPY_U128( xmm[14], gst->guest_YMM14 );
2095 COPY_U128( xmm[15], gst->guest_YMM15 );
2107 ( VexGuestAMD64State* gst, HWord addr )
2143 VexEmNote warnX87 = do_put_x87( True/*moveRegs*/, (UChar*)&tmp, gst );
2152 ( VexGuestAMD64State* gst, HWord addr )
2161 gst->guest_SSEROUND = w64 & 0xFFFFFFFFULL;
2176 /*MOD*/VexGuestAMD64State* gst )
2189 COPY_U128( gst->guest_YMM0, xmm[0] );
2190 COPY_U128( gst->guest_YMM1, xmm[1] );
2191 COPY_U128( gst->guest_YMM2, xmm[2] );
2192 COPY_U128( gst->guest_YMM3, xmm[3] );
2193 COPY_U128( gst->guest_YMM4, xmm[4] );
2194 COPY_U128( gst->guest_YMM5, xmm[5] );
2195 COPY_U128( gst->guest_YMM6, xmm[6] );
2196 COPY_U128( gst->guest_YMM7, xmm[7] );
2197 COPY_U128( gst->guest_YMM8, xmm[8] );
2198 COPY_U128( gst->guest_YMM9, xmm[9] );
2199 COPY_U128( gst->guest_YMM10, xmm[10] );
2200 COPY_U128( gst->guest_YMM11, xmm[11] );
2201 COPY_U128( gst->guest_YMM12, xmm[12] );
2202 COPY_U128( gst->guest_YMM13, xmm[13] );
2203 COPY_U128( gst->guest_YMM14, xmm[14] );
2204 COPY_U128( gst->guest_YMM15, xmm[15] );
2209 = amd64g_dirtyhelper_XRSTOR_COMPONENT_1_EXCLUDING_XMMREGS(gst, fp_state);
2211 = amd64g_dirtyhelper_XRSTOR_COMPONENT_0(gst, fp_state);
2227 void amd64g_dirtyhelper_FINIT ( VexGuestAMD64State* gst )
2230 gst->guest_FTOP = 0;
2232 gst->guest_FPTAG[i] = 0; /* empty */
2233 gst->guest_FPREG[i] = 0; /* IEEE754 64-bit zero */
2235 gst
2236 gst->guest_FC3210 = 0;
3801 VexGuestAMD64State* gst,
3815 V128* argL = (V128*)( ((UChar*)gst) + gstOffL );
3816 V128* argR = (V128*)( ((UChar*)gst) + gstOffR );
3889 gst->guest_YMM0[0] = resV.w32[0];
3890 gst->guest_YMM0[1] = resV.w32[1];
3891 gst->guest_YMM0[2] = resV.w32[2];
3892 gst->guest_YMM0[3] = resV.w32[3];
4143 VexGuestAMD64State* gst,
4149 V128* argD = (V128*)( ((UChar*)gst) + gstOffD );
4150 V128* argL = (V128*)( ((UChar*)gst) + gstOffL );
4151 V128* argR = (V128*)( ((UChar*)gst) + gstOffR );
4206 VexGuestAMD64State* gst,
4212 V128* argL = (V128*)( ((UChar*)gst) + gstOffL );
4213 V128* argR = (V128*)( ((UChar*)gst) + gstOffR );