Home | History | Annotate | Download | only in m_coredump

Lines Matching defs:arch

86    const ThreadArchState *arch = (const ThreadArchState *) &tst->arch;
91 if (YMM_NON_ZERO(arch->vex.guest_YMM0) ||
92 YMM_NON_ZERO(arch->vex.guest_YMM1) ||
93 YMM_NON_ZERO(arch->vex.guest_YMM2) ||
94 YMM_NON_ZERO(arch->vex.guest_YMM3) ||
95 YMM_NON_ZERO(arch->vex.guest_YMM4) ||
96 YMM_NON_ZERO(arch->vex.guest_YMM5) ||
97 YMM_NON_ZERO(arch->vex.guest_YMM6) ||
98 YMM_NON_ZERO(arch->vex.guest_YMM7) ||
99 YMM_NON_ZERO(arch->vex.guest_YMM9) ||
100 YMM_NON_ZERO(arch->vex.guest_YMM0) ||
101 YMM_NON_ZERO(arch->vex.guest_YMM10) ||
102 YMM_NON_ZERO(arch->vex.guest_YMM11) ||
103 YMM_NON_ZERO(arch->vex.guest_YMM12) ||
104 YMM_NON_ZERO(arch->vex.guest_YMM13) ||
105 YMM_NON_ZERO(arch->vex.guest_YMM14) ||
106 YMM_NON_ZERO(arch->vex.guest_YMM15))
273 const ThreadArchState *arch = (const ThreadArchState *) &tst->arch;
276 (*regs)[VKI_EIP] = arch->vex.guest_EIP;
277 (*regs)[VKI_EAX] = arch->vex.guest_EAX;
278 (*regs)[VKI_EBX] = arch->vex.guest_EBX;
279 (*regs)[VKI_ECX] = arch->vex.guest_ECX;
280 (*regs)[VKI_EDX] = arch->vex.guest_EDX;
281 (*regs)[VKI_ESI] = arch->vex.guest_ESI;
282 (*regs)[VKI_EDI] = arch->vex.guest_EDI;
283 (*regs)[VKI_EBP] = arch->vex.guest_EBP;
284 (*regs)[VKI_UESP] = arch->vex.guest_ESP;
285 (*regs)[VKI_SS] = arch->vex.guest_SS;
286 (*regs)[VKI_CS] = arch->vex.guest_CS;
287 (*regs)[VKI_DS] = arch->vex.guest_DS;
288 (*regs)[VKI_ES] = arch->vex.guest_ES;
289 (*regs)[VKI_FS] = arch->vex.guest_FS;
290 (*regs)[VKI_GS] = arch->vex.guest_GS;
291 (*regs)[VKI_EFL] = LibVEX_GuestX86_get_eflags(&arch->vex);
293 (*regs)[VKI_REG_RIP] = arch->vex.guest_RIP;
294 (*regs)[VKI_REG_RAX] = arch->vex.guest_RAX;
295 (*regs)[VKI_REG_RBX] = arch->vex.guest_RBX;
296 (*regs)[VKI_REG_RCX] = arch->vex.guest_RCX;
297 (*regs)[VKI_REG_RDX] = arch->vex.guest_RDX;
298 (*regs)[VKI_REG_RBP] = arch->vex.guest_RBP;
299 (*regs)[VKI_REG_RSI] = arch->vex.guest_RSI;
300 (*regs)[VKI_REG_RDI] = arch->vex.guest_RDI;
301 (*regs)[VKI_REG_R8] = arch->vex.guest_R8;
302 (*regs)[VKI_REG_R9] = arch->vex.guest_R9;
303 (*regs)[VKI_REG_R10] = arch->vex.guest_R10;
304 (*regs)[VKI_REG_R11] = arch->vex.guest_R11;
305 (*regs)[VKI_REG_R12] = arch->vex.guest_R12;
306 (*regs)[VKI_REG_R13] = arch->vex.guest_R13;
307 (*regs)[VKI_REG_R14] = arch->vex.guest_R14;
308 (*regs)[VKI_REG_R15] = arch->vex.guest_R15;
309 (*regs)[VKI_REG_RSP] = arch->vex.guest_RSP;
316 (*regs)[VKI_REG_FSBASE] = arch->vex.guest_FS_CONST;
318 (*regs)[VKI_REG_RFL] = LibVEX_GuestAMD64_get_rflags(&arch->vex);
326 const ThreadArchState *arch = (const ThreadArchState *) &tst->arch;
334 LibVEX_GuestX86_get_x87(CONST_CAST(VexGuestX86State *, &arch->vex),
339 &arch->vex));
350 COPY_OUT_XMM(fs->xmm[0], arch->vex.guest_XMM0);
351 COPY_OUT_XMM(fs->xmm[1], arch->vex.guest_XMM1);
352 COPY_OUT_XMM(fs->xmm[2], arch->vex.guest_XMM2);
353 COPY_OUT_XMM(fs->xmm[3], arch->vex.guest_XMM3);
354 COPY_OUT_XMM(fs->xmm[4], arch->vex.guest_XMM4);
355 COPY_OUT_XMM(fs->xmm[5], arch->vex.guest_XMM5);
356 COPY_OUT_XMM(fs->xmm[6], arch->vex.guest_XMM6);
357 COPY_OUT_XMM(fs->xmm[7], arch->vex.guest_XMM7);
365 LibVEX_GuestAMD64_fxsave(CONST_CAST(VexGuestAMD64State *, &arch->vex),
564 const ThreadArchState *arch = (const ThreadArchState *) &tst->arch;
572 &arch->vex));
583 COPY_OUT_XMM(xregs->pr_un.pr_xsave.pr_xmm[0], arch->vex.guest_XMM0);
584 COPY_OUT_XMM(xregs->pr_un.pr_xsave.pr_xmm[1], arch->vex.guest_XMM1);
585 COPY_OUT_XMM(xregs->pr_un.pr_xsave.pr_xmm[2], arch->vex.guest_XMM2);
586 COPY_OUT_XMM(xregs->pr_un.pr_xsave.pr_xmm[3], arch->vex.guest_XMM3);
587 COPY_OUT_XMM(xregs->pr_un.pr_xsave.pr_xmm[4], arch->vex.guest_XMM4);
588 COPY_OUT_XMM(xregs->pr_un.pr_xsave.pr_xmm[5], arch->vex.guest_XMM5);
589 COPY_OUT_XMM(xregs->pr_un.pr_xsave.pr_xmm[6], arch->vex.guest_XMM6);
590 COPY_OUT_XMM(xregs->pr_un.pr_xsave.pr_xmm[7], arch->vex.guest_XMM7);
599 LibVEX_GuestAMD64_fxsave(CONST_CAST(VexGuestAMD64State *, &arch->vex),