/external/valgrind/callgrind/ |
threads.c | 62 ThreadId CLG_(current_tid); 66 thread_info** CLG_(get_threads)() 71 thread_info* CLG_(get_current_thread)() 73 return thread[CLG_(current_tid)]; 76 void CLG_(init_threads)() 84 CLG_(current_tid) = VG_INVALID_THREADID; 88 void CLG_(forall_threads)(void (*func)(thread_info*)) 90 Int t, orig_tid = CLG_(current_tid); 94 CLG_(switch_thread)(t); 97 CLG_(switch_thread)(orig_tid) [all...] |
costs.h | 36 #define CLG_(str) VGAPPEND(vgCallgrind_,str) 38 extern UInt CLG_(costarray_entries); 39 extern UInt CLG_(costarray_chunks); 55 ULong* CLG_(get_costarray)(Int size);
|
events.h | 36 #define CLG_(str) VGAPPEND(vgCallgrind_,str) 55 EventGroup* CLG_(register_event_group) (int id, const HChar*); 56 EventGroup* CLG_(register_event_group2)(int id, const HChar*, const HChar*); 57 EventGroup* CLG_(register_event_group3)(int id, const HChar*, const HChar*, 59 EventGroup* CLG_(register_event_group4)(int id, const HChar*, const HChar*, 61 EventGroup* CLG_(get_event_group)(int id); 75 EventSet* CLG_(get_event_set)(Int id); 76 EventSet* CLG_(get_event_set2)(Int id1, Int id2); 77 EventSet* CLG_(add_event_group)(EventSet*, Int id); 78 EventSet* CLG_(add_event_group2)(EventSet*, Int id1, Int id2) [all...] |
callstack.c | 46 call_stack CLG_(current_call_stack); 48 void CLG_(init_call_stack)(call_stack* s) 63 call_entry* CLG_(get_call_entry)(Int sp) 65 CLG_ASSERT(sp <= CLG_(current_call_stack).sp); 66 return &(CLG_(current_call_stack).entry[sp]); 69 void CLG_(copy_current_call_stack)(call_stack* dst) 73 dst->size = CLG_(current_call_stack).size; 74 dst->entry = CLG_(current_call_stack).entry; 75 dst->sp = CLG_(current_call_stack).sp; 78 void CLG_(set_current_call_stack)(call_stack* s [all...] |
global.h | 49 #include "events.h" // defines CLG_ macro 291 /* filled by CLG_(get_fn_node) if debug info is available */ 299 /* filled by CLG_(instrument) if not seen before */ 666 #define fullOffset(group) (CLG_(sets).full->offset[group]) 675 void CLG_(set_clo_defaults)(void); 676 void CLG_(update_fn_config)(fn_node*); 677 Bool CLG_(process_cmd_line_option)(const HChar*); 678 void CLG_(print_usage)(void); 679 void CLG_(print_debug_usage)(void); 682 void CLG_(init_eventsets)(void) [all...] |
context.c | 39 fn_stack CLG_(current_fn_stack); 41 void CLG_(init_fn_stack)(fn_stack* s) 52 void CLG_(copy_current_fn_stack)(fn_stack* dst) 56 dst->size = CLG_(current_fn_stack).size; 57 dst->bottom = CLG_(current_fn_stack).bottom; 58 dst->top = CLG_(current_fn_stack).top; 61 void CLG_(set_current_fn_stack)(fn_stack* s) 65 CLG_(current_fn_stack).size = s->size; 66 CLG_(current_fn_stack).bottom = s->bottom; 67 CLG_(current_fn_stack).top = s->top [all...] |
costs.c | 35 UInt CLG_(costarray_entries) = 0; 36 UInt CLG_(costarray_chunks) = 0; 40 ULong* CLG_(get_costarray)(Int size) 61 CLG_(costarray_chunks)++; 67 CLG_(costarray_entries) += size;
|
sim.c | 109 * - Addr CLG_(bb_base) (instruction start address of original BB) 110 * - ULong* CLG_(cost_base) (start of cost array for BB) 113 Addr CLG_(bb_base); 114 ULong* CLG_(cost_base); 859 idx, CLG_(bb_base) + current_ii->instr_offset, memline); 864 CLG_(current_state).collect, loaded->use_base); 866 if (CLG_(current_state).collect && loaded->use_base) { 876 loaded->iaddr = CLG_(bb_base) + current_ii->instr_offset; 877 loaded->use_base = (CLG_(current_state).nonskipped) ? 878 CLG_(current_state).nonskipped->skipped [all...] |
bbcc.c | 43 void CLG_(init_bbcc_hash)(bbcc_hash* bbccs) 57 void CLG_(copy_current_bbcc_hash)(bbcc_hash* dst) 66 bbcc_hash* CLG_(get_current_bbcc_hash)() 71 void CLG_(set_current_bbcc_hash)(bbcc_hash* h) 83 void CLG_(zero_bbcc)(BBCC* bbcc) 104 CLG_(init_cost)( CLG_(sets).full, jcc->cost ); 112 void CLG_(forall_bbccs)(void (*func)(BBCC*)) 164 if (!CLG_(clo).separate_threads) { 168 if (bbcc->tid == CLG_(current_tid)) return bbcc [all...] |
debug.c | 47 void CLG_(print_bb)(int s, BB* bb) 75 void CLG_(print_cxt)(Int s, Context* cxt, int rec_index) 83 UInt *pactive = CLG_(get_fn_entry)(cxt->fn[0]->number); 97 void CLG_(print_execstate)(int s, exec_state* es) 115 void CLG_(print_bbcc)(int s, BBCC* bbcc) 135 CLG_(print_cxt)(s+8, bbcc->cxt, bbcc->rec_index); 138 void CLG_(print_eventset)(int s, EventSet* es) 164 eg = CLG_(get_event_group)(i); 175 void CLG_(print_cost)(int s, EventSet* es, ULong* c) 205 eg = CLG_(get_event_group)(i) [all...] |
clo.c | 397 void CLG_(update_fn_config)(fn_node* fn) 409 Bool CLG_(process_cmd_line_option)(const HChar* arg) 413 if VG_BOOL_CLO(arg, "--skip-plt", CLG_(clo).skip_plt) {} 415 else if VG_BOOL_CLO(arg, "--collect-jumps", CLG_(clo).collect_jumps) {} 417 else if VG_BOOL_CLO(arg, "--trace-jump", CLG_(clo).collect_jumps) {} 419 else if VG_BOOL_CLO(arg, "--combine-dumps", CLG_(clo).combine_dumps) {} 421 else if VG_BOOL_CLO(arg, "--collect-atstart", CLG_(clo).collect_atstart) {} 423 else if VG_BOOL_CLO(arg, "--instr-atstart", CLG_(clo).instrument_atstart) {} 425 else if VG_BOOL_CLO(arg, "--separate-threads", CLG_(clo).separate_threads) {} 427 else if VG_BOOL_CLO(arg, "--compress-strings", CLG_(clo).compress_strings) { [all...] |
main.c | 49 CommandLineOptions CLG_(clo); 50 Statistics CLG_(stat); 51 Bool CLG_(instrument_state) = True; /* Instrumentation on ? */ 54 exec_state CLG_(current_state); 58 Int CLG_(min_line_size) = 0; 65 static void CLG_(init_statistics)(Statistics* s) 115 CLG_(bb_base) + ii->instr_offset, ii->instr_size); 117 if (!CLG_(current_state).collect) return; 121 CLG_(current_state).cost[ fullOffset(EG_BUS) ]++; 123 if (CLG_(current_state).nonskipped [all...] |
dump.c | 48 FullCost CLG_(total_cost) = 0; 51 EventMapping* CLG_(dumpmap) = 0; 53 Int CLG_(get_dump_counter)(void) 84 dump_array_size = CLG_(stat).distinct_objs + 85 CLG_(stat).distinct_files + 86 CLG_(stat).distinct_fns + 87 CLG_(stat).context_counter; 92 file_dumped = obj_dumped + CLG_(stat).distinct_objs; 93 fn_dumped = file_dumped + CLG_(stat).distinct_files; 94 cxt_dumped = fn_dumped + CLG_(stat).distinct_fns [all...] |
events.c | 71 EventGroup* CLG_(register_event_group) (int id, const HChar* n1) 79 EventGroup* CLG_(register_event_group2)(int id, const HChar* n1, 89 EventGroup* CLG_(register_event_group3)(int id, const HChar* n1, 100 EventGroup* CLG_(register_event_group4)(int id, const HChar* n1, 113 EventGroup* CLG_(get_event_group)(int id) 152 EventSet* CLG_(get_event_set)(Int id) 158 EventSet* CLG_(get_event_set2)(Int id1, Int id2) 165 EventSet* CLG_(add_event_group)(EventSet* es, Int id) 172 EventSet* CLG_(add_event_group2)(EventSet* es, Int id1, Int id2) 180 EventSet* CLG_(add_event_set)(EventSet* es1, EventSet* es2 [all...] |
bb.c | 38 void CLG_(init_bb_hash)() 50 bb_hash* CLG_(get_bb_hash)() 110 CLG_(stat).bb_hash_resizes++; 156 CLG_(stat).distinct_bbs++; 163 CLG_(stat).distinct_bbs); 164 CLG_(print_bb)(0, bb); 169 CLG_(get_fn_node)(bb); 203 obj = CLG_(get_obj_node)( di ); 231 * is called from CLG_(instrument)() and a BB already exists: 240 BB* CLG_(get_bb)(Addr addr, IRSB* bbIn, /*OUT*/ Bool *seen_before [all...] |
fn.c | 208 void CLG_(init_obj_table)() 241 CLG_(stat).distinct_objs ++; 242 obj->number = CLG_(stat).distinct_objs; 264 obj_node* CLG_(get_obj_node)(DebugInfo* di) 299 CLG_(stat).distinct_files++; 300 file->number = CLG_(stat).distinct_files; 307 file_node* CLG_(get_file_node)(obj_node* curr_obj_node, 348 CLG_(stat).distinct_fns++; 349 fn->number = CLG_(stat).distinct_fns; 360 fn->pop_on_jump = CLG_(clo).pop_on_jump [all...] |
jumps.c | 39 void CLG_(init_jcc_hash)(jcc_hash* jccs) 56 void CLG_(copy_current_jcc_hash)(jcc_hash* dst) 66 void CLG_(set_current_jcc_hash)(jcc_hash* h) 128 CLG_(stat).jcc_hash_resizes++; 174 CLG_(stat).distinct_jccs++; 178 CLG_(stat).distinct_jccs, jcc); 186 jCC* CLG_(get_jcc)(BBCC* from, UInt jmp, BBCC* to) 209 CLG_(stat).jcc_lru_misses++;
|