Lines Matching refs:threads
49 return INSTR_PTR( VG_(threads)[tid].arch );
52 return STACK_PTR( VG_(threads)[tid].arch );
55 return FRAME_PTR( VG_(threads)[tid].arch );
59 INSTR_PTR( VG_(threads)[tid].arch ) = ip;
62 STACK_PTR( VG_(threads)[tid].arch ) = sp;
69 regs->r_pc = (ULong)VG_(threads)[tid].arch.vex.guest_EIP;
70 regs->r_sp = (ULong)VG_(threads)[tid].arch.vex.guest_ESP;
72 = VG_(threads)[tid].arch.vex.guest_EBP;
74 regs->r_pc = VG_(threads)[tid].arch.vex.guest_RIP;
75 regs->r_sp = VG_(threads)[tid].arch.vex.guest_RSP;
77 = VG_(threads)[tid].arch.vex.guest_RBP;
79 regs->r_pc = (ULong)VG_(threads)[tid].arch.vex.guest_CIA;
80 regs->r_sp = (ULong)VG_(threads)[tid].arch.vex.guest_GPR1;
82 = VG_(threads)[tid].arch.vex.guest_LR;
84 regs->r_pc = VG_(threads)[tid].arch.vex.guest_CIA;
85 regs->r_sp = VG_(threads)[tid].arch.vex.guest_GPR1;
87 = VG_(threads)[tid].arch.vex.guest_LR;
89 regs->r_pc = (ULong)VG_(threads)[tid].arch.vex.guest_R15T;
90 regs->r_sp = (ULong)VG_(threads)[tid].arch.vex.guest_R13;
92 = VG_(threads)[tid].arch.vex.guest_R14;
94 = VG_(threads)[tid].arch.vex.guest_R12;
96 = VG_(threads)[tid].arch.vex.guest_R11;
98 = VG_(threads)[tid].arch.vex.guest_R7;
100 regs->r_pc = VG_(threads)[tid].arch.vex.guest_PC;
101 regs->r_sp = VG_(threads)[tid].arch.vex.guest_XSP;
102 regs->misc.ARM64.x29 = VG_(threads)[tid].arch.vex.guest_X29;
103 regs->misc.ARM64.x30 = VG_(threads)[tid].arch.vex.guest_X30;
105 regs->r_pc = (ULong)VG_(threads)[tid].arch.vex.guest_IA;
106 regs->r_sp = (ULong)VG_(threads)[tid].arch.vex.guest_SP;
108 = VG_(threads)[tid].arch.vex.guest_FP;
110 = VG_(threads)[tid].arch.vex.guest_LR;
112 regs->r_pc = VG_(threads)[tid].arch.vex.guest_PC;
113 regs->r_sp = VG_(threads)[tid].arch.vex.guest_r29;
115 = VG_(threads)[tid].arch.vex.guest_r30;
117 = VG_(threads)[tid].arch.vex.guest_r31;
119 = VG_(threads)[tid].arch.vex.guest_r28;
121 regs->r_pc = VG_(threads)[tid].arch.vex.guest_PC;
122 regs->r_sp = VG_(threads)[tid].arch.vex.guest_r29;
124 = VG_(threads)[tid].arch.vex.guest_r30;
126 = VG_(threads)[tid].arch.vex.guest_r31;
128 = VG_(threads)[tid].arch.vex.guest_r28;
130 regs->r_pc = VG_(threads)[tid].arch.vex.guest_pc;
131 regs->r_sp = VG_(threads)[tid].arch.vex.guest_r54;
133 = VG_(threads)[tid].arch.vex.guest_r52;
135 = VG_(threads)[tid].arch.vex.guest_r55;
154 tst = & VG_(threads)[tid];
178 tst = & VG_(threads)[tid];
422 || VG_(threads)[tid].exitreason == VgSrc_ExitProcess) {
443 if (VG_(threads)[i].status != VgTs_Empty) {
446 *stack_max = VG_(threads)[i].client_stack_highest_byte;
456 vg_assert(VG_(threads)[tid].status != VgTs_Empty);
457 threads)[tid].client_stack_highest_byte;
463 vg_assert(VG_(threads)[tid].status != VgTs_Empty);
464 return VG_(threads)[tid].client_stack_szB;
470 vg_assert(VG_(threads)[tid].status != VgTs_Empty);
471 return (Addr)VG_(threads)[tid].altstack.ss_sp;
477 vg_assert(VG_(threads)[tid].status != VgTs_Empty);
478 return VG_(threads)[tid].altstack.ss_size;