Home | History | Annotate | Download | only in callgrind
      1 /*--------------------------------------------------------------------*/
      2 /*--- Callgrind                                                    ---*/
      3 /*---                                               ct_callstack.c ---*/
      4 /*--------------------------------------------------------------------*/
      5 
      6 /*
      7    This file is part of Callgrind, a Valgrind tool for call tracing.
      8 
      9    Copyright (C) 2002-2017, Josef Weidendorfer (Josef.Weidendorfer (at) gmx.de)
     10 
     11    This program is free software; you can redistribute it and/or
     12    modify it under the terms of the GNU General Public License as
     13    published by the Free Software Foundation; either version 2 of the
     14    License, or (at your option) any later version.
     15 
     16    This program is distributed in the hope that it will be useful, but
     17    WITHOUT ANY WARRANTY; without even the implied warranty of
     18    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
     19    General Public License for more details.
     20 
     21    You should have received a copy of the GNU General Public License
     22    along with this program; if not, write to the Free Software
     23    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
     24    02111-1307, USA.
     25 
     26    The GNU General Public License is contained in the file COPYING.
     27 */
     28 
     29 #include "global.h"
     30 
     31 /*------------------------------------------------------------*/
     32 /*--- Call stack, operations                               ---*/
     33 /*------------------------------------------------------------*/
     34 
     35 /* Stack of current thread. Gets initialized when switching to 1st thread.
     36  *
     37  * The artificial call stack is an array of call_entry's, representing
     38  * stack frames of the executing program.
     39  * Array call_stack and call_stack_esp have same size and grow on demand.
     40  * Array call_stack_esp holds SPs of corresponding stack frames.
     41  *
     42  */
     43 
     44 #define N_CALL_STACK_INITIAL_ENTRIES 500
     45 
     46 call_stack CLG_(current_call_stack);
     47 
     48 void CLG_(init_call_stack)(call_stack* s)
     49 {
     50   Int i;
     51 
     52   CLG_ASSERT(s != 0);
     53 
     54   s->size = N_CALL_STACK_INITIAL_ENTRIES;
     55   s->entry = (call_entry*) CLG_MALLOC("cl.callstack.ics.1",
     56                                       s->size * sizeof(call_entry));
     57   s->sp = 0;
     58   s->entry[0].cxt = 0; /* for assertion in push_cxt() */
     59 
     60   for(i=0; i<s->size; i++) s->entry[i].enter_cost = 0;
     61 }
     62 
     63 call_entry* CLG_(get_call_entry)(Int sp)
     64 {
     65   CLG_ASSERT(sp <= CLG_(current_call_stack).sp);
     66   return &(CLG_(current_call_stack).entry[sp]);
     67 }
     68 
     69 void CLG_(copy_current_call_stack)(call_stack* dst)
     70 {
     71   CLG_ASSERT(dst != 0);
     72 
     73   dst->size  = CLG_(current_call_stack).size;
     74   dst->entry = CLG_(current_call_stack).entry;
     75   dst->sp    = CLG_(current_call_stack).sp;
     76 }
     77 
     78 void CLG_(set_current_call_stack)(call_stack* s)
     79 {
     80   CLG_ASSERT(s != 0);
     81 
     82   CLG_(current_call_stack).size  = s->size;
     83   CLG_(current_call_stack).entry = s->entry;
     84   CLG_(current_call_stack).sp    = s->sp;
     85 }
     86 
     87 
     88 static __inline__
     89 void ensure_stack_size(Int i)
     90 {
     91   Int oldsize;
     92   call_stack *cs = &CLG_(current_call_stack);
     93 
     94   if (i < cs->size) return;
     95 
     96   oldsize = cs->size;
     97   cs->size *= 2;
     98   while (i > cs->size) cs->size *= 2;
     99 
    100   cs->entry = (call_entry*) VG_(realloc)("cl.callstack.ess.1",
    101                                          cs->entry,
    102 					 cs->size * sizeof(call_entry));
    103 
    104   for(i=oldsize; i<cs->size; i++)
    105     cs->entry[i].enter_cost = 0;
    106 
    107   CLG_(stat).call_stack_resizes++;
    108 
    109   CLG_DEBUGIF(2)
    110     VG_(printf)("        call stack enlarged to %u entries\n",
    111 		CLG_(current_call_stack).size);
    112 }
    113 
    114 
    115 
    116 /* Called when function entered nonrecursive */
    117 static void function_entered(fn_node* fn)
    118 {
    119   CLG_ASSERT(fn != 0);
    120 
    121 #if CLG_ENABLE_DEBUG
    122   if (fn->verbosity >=0) {
    123     Int old = CLG_(clo).verbose;
    124     CLG_(clo).verbose = fn->verbosity;
    125     fn->verbosity = old;
    126     VG_(message)(Vg_DebugMsg,
    127 		 "Entering %s: Verbosity set to %d\n",
    128 		 fn->name, CLG_(clo).verbose);
    129   }
    130 #endif
    131 
    132   if (fn->dump_before) {
    133     HChar trigger[VG_(strlen)(fn->name) + 20];
    134     VG_(sprintf)(trigger, "--dump-before=%s", fn->name);
    135     CLG_(dump_profile)(trigger, True);
    136   }
    137   else if (fn->zero_before) {
    138     CLG_(zero_all_cost)(True);
    139   }
    140 
    141   if (fn->toggle_collect) {
    142     CLG_(current_state).collect = !CLG_(current_state).collect;
    143     CLG_DEBUG(2,"   entering %s: toggled collection state to %s\n",
    144 	     fn->name,
    145 	     CLG_(current_state).collect ? "ON" : "OFF");
    146   }
    147 }
    148 
    149 /* Called when function left (no recursive level active) */
    150 static void function_left(fn_node* fn)
    151 {
    152   CLG_ASSERT(fn != 0);
    153 
    154   if (fn->dump_after) {
    155     HChar trigger[VG_(strlen)(fn->name) + 20];
    156     VG_(sprintf)(trigger, "--dump-after=%s", fn->name);
    157     CLG_(dump_profile)(trigger, True);
    158   }
    159   if (fn->toggle_collect) {
    160     CLG_(current_state).collect = !CLG_(current_state).collect;
    161     CLG_DEBUG(2,"   leaving %s: toggled collection state to %s\n",
    162 	     fn->name,
    163 	     CLG_(current_state).collect ? "ON" : "OFF");
    164   }
    165 
    166 #if CLG_ENABLE_DEBUG
    167   if (fn->verbosity >=0) {
    168     Int old = CLG_(clo).verbose;
    169     CLG_(clo).verbose = fn->verbosity;
    170     fn->verbosity = old;
    171     VG_(message)(Vg_DebugMsg,
    172 		 "Leaving %s: Verbosity set back to %d\n",
    173 		 fn->name, CLG_(clo).verbose);
    174   }
    175 #endif
    176 }
    177 
    178 
    179 /* Push call on call stack.
    180  *
    181  * Increment the usage count for the function called.
    182  * A jump from <from> to <to>, with <sp>.
    183  * If <skip> is true, this is a call to a function to be skipped;
    184  * for this, we set jcc = 0.
    185  */
    186 void CLG_(push_call_stack)(BBCC* from, UInt jmp, BBCC* to, Addr sp, Bool skip)
    187 {
    188     jCC* jcc;
    189     UInt* pdepth;
    190     call_entry* current_entry;
    191     Addr ret_addr;
    192 
    193     /* Ensure a call stack of size <current_sp>+1.
    194      * The +1 is needed as push_cxt will store the
    195      * context at [current_sp]
    196      */
    197     ensure_stack_size(CLG_(current_call_stack).sp +1);
    198     current_entry = &(CLG_(current_call_stack).entry[CLG_(current_call_stack).sp]);
    199 
    200     if (skip) {
    201 	jcc = 0;
    202     }
    203     else {
    204 	fn_node* to_fn = to->cxt->fn[0];
    205 
    206 	if (CLG_(current_state).nonskipped) {
    207 	    /* this is a jmp from skipped to nonskipped */
    208 	    CLG_ASSERT(CLG_(current_state).nonskipped == from);
    209 	}
    210 
    211 	/* As push_cxt() has to be called before push_call_stack if not
    212 	 * skipping, the old context should already be saved on the stack */
    213 	CLG_ASSERT(current_entry->cxt != 0);
    214 	CLG_(copy_cost_lz)( CLG_(sets).full, &(current_entry->enter_cost),
    215 			   CLG_(current_state).cost );
    216 
    217 	jcc = CLG_(get_jcc)(from, jmp, to);
    218 	CLG_ASSERT(jcc != 0);
    219 
    220 	pdepth = CLG_(get_fn_entry)(to_fn->number);
    221 	if (CLG_(clo).skip_direct_recursion) {
    222 	    /* only increment depth if another function is called */
    223 	  if (jcc->from->cxt->fn[0] != to_fn) (*pdepth)++;
    224 	}
    225 	else (*pdepth)++;
    226 
    227 	if (*pdepth>1)
    228 	  CLG_(stat).rec_call_counter++;
    229 
    230 	jcc->call_counter++;
    231 	CLG_(stat).call_counter++;
    232 
    233 	if (*pdepth == 1) function_entered(to_fn);
    234     }
    235 
    236     /* return address is only is useful with a real call;
    237      * used to detect RET w/o CALL */
    238     if (from->bb->jmp[jmp].jmpkind == jk_Call) {
    239       UInt instr = from->bb->jmp[jmp].instr;
    240       ret_addr = bb_addr(from->bb) +
    241 	from->bb->instr[instr].instr_offset +
    242 	from->bb->instr[instr].instr_size;
    243     }
    244     else
    245       ret_addr = 0;
    246 
    247     /* put jcc on call stack */
    248     current_entry->jcc = jcc;
    249     current_entry->sp = sp;
    250     current_entry->ret_addr = ret_addr;
    251     current_entry->nonskipped = CLG_(current_state).nonskipped;
    252 
    253     CLG_(current_call_stack).sp++;
    254 
    255     /* To allow for above assertion we set context of next frame to 0 */
    256     CLG_ASSERT(CLG_(current_call_stack).sp < CLG_(current_call_stack).size);
    257     current_entry++;
    258     current_entry->cxt = 0;
    259 
    260     if (!skip)
    261 	CLG_(current_state).nonskipped = 0;
    262     else if (!CLG_(current_state).nonskipped) {
    263 	/* a call from nonskipped to skipped */
    264 	CLG_(current_state).nonskipped = from;
    265 	if (!CLG_(current_state).nonskipped->skipped) {
    266 	  CLG_(init_cost_lz)( CLG_(sets).full,
    267 			     &CLG_(current_state).nonskipped->skipped);
    268 	  CLG_(stat).distinct_skips++;
    269 	}
    270     }
    271 
    272 #if CLG_ENABLE_DEBUG
    273     CLG_DEBUGIF(0) {
    274 	if (CLG_(clo).verbose<2) {
    275 	  if (jcc && jcc->to && jcc->to->bb) {
    276 	    const HChar spaces[][41] = {
    277                                   "   .   .   .   .   .   .   .   .   .   .",
    278 				  "  .   .   .   .   .   .   .   .   .   . ",
    279 				  " .   .   .   .   .   .   .   .   .   .  ",
    280 				  ".   .   .   .   .   .   .   .   .   .   " };
    281 
    282 	    int s = CLG_(current_call_stack).sp;
    283 	    UInt* pars = (UInt*) sp;
    284 
    285 	    BB* bb = jcc->to->bb;
    286 	    if (s>40) s=40;
    287 	    VG_(printf)("%s> %s(0x%x, 0x%x, ...) [%s / %#lx]\n", spaces[s%4]+40-s, bb->fn->name,
    288                         pars ? pars[1]:0,
    289 			pars ? pars[2]:0,
    290 			bb->obj->name + bb->obj->last_slash_pos,
    291 			(UWord)bb->offset);
    292 	  }
    293 	}
    294 	else if (CLG_(clo).verbose<4) {
    295 	    VG_(printf)("+ %2d ", CLG_(current_call_stack).sp);
    296 	    CLG_(print_short_jcc)(jcc);
    297 	    VG_(printf)(", SP %#lx, RA %#lx\n", sp, ret_addr);
    298 	}
    299 	else {
    300 	    VG_(printf)("  Pushed ");
    301 	    CLG_(print_stackentry)(3, CLG_(current_call_stack).sp-1);
    302 	}
    303     }
    304 #endif
    305 
    306 }
    307 
    308 
    309 /* Pop call stack and update inclusive sums.
    310  * Returns modified fcc.
    311  *
    312  * If the JCC becomes inactive, call entries are freed if possible
    313  */
    314 void CLG_(pop_call_stack)()
    315 {
    316     jCC* jcc;
    317     Int depth = 0;
    318     call_entry* lower_entry;
    319 
    320     if (CLG_(current_state).sig >0) {
    321 	/* Check if we leave a signal handler; this can happen when
    322 	 * calling longjmp() in the handler */
    323 	CLG_(run_post_signal_on_call_stack_bottom)();
    324     }
    325 
    326     lower_entry =
    327 	&(CLG_(current_call_stack).entry[CLG_(current_call_stack).sp-1]);
    328 
    329     CLG_DEBUG(4,"+ pop_call_stack: frame %d, jcc %p\n",
    330 		CLG_(current_call_stack).sp, lower_entry->jcc);
    331 
    332     /* jCC item not any more on real stack: pop */
    333     jcc = lower_entry->jcc;
    334     CLG_(current_state).nonskipped = lower_entry->nonskipped;
    335 
    336     if (jcc) {
    337 	fn_node* to_fn  = jcc->to->cxt->fn[0];
    338 	UInt* pdepth =  CLG_(get_fn_entry)(to_fn->number);
    339 	if (CLG_(clo).skip_direct_recursion) {
    340 	    /* only decrement depth if another function was called */
    341 	  if (jcc->from->cxt->fn[0] != to_fn) (*pdepth)--;
    342 	}
    343 	else (*pdepth)--;
    344 	depth = *pdepth;
    345 
    346 	/* add cost difference to sum */
    347 	if ( CLG_(add_diff_cost_lz)( CLG_(sets).full, &(jcc->cost),
    348 				    lower_entry->enter_cost,
    349 				    CLG_(current_state).cost) ) {
    350 
    351 	  /* only count this call if it attributed some cost.
    352 	   * the ret_counter is used to check if a BBCC dump is needed.
    353 	   */
    354 	  jcc->from->ret_counter++;
    355 	}
    356 	CLG_(stat).ret_counter++;
    357 
    358 	/* restore context */
    359 	CLG_(current_state).cxt  = lower_entry->cxt;
    360 	CLG_(current_fn_stack).top =
    361 	  CLG_(current_fn_stack).bottom + lower_entry->fn_sp;
    362 	CLG_ASSERT(CLG_(current_state).cxt != 0);
    363 
    364 	if (depth == 0) function_left(to_fn);
    365     }
    366 
    367     /* To allow for an assertion in push_call_stack() */
    368     lower_entry->cxt = 0;
    369 
    370     CLG_(current_call_stack).sp--;
    371 
    372 #if CLG_ENABLE_DEBUG
    373     CLG_DEBUGIF(1) {
    374 	if (CLG_(clo).verbose<4) {
    375 	    if (jcc) {
    376 		/* popped JCC target first */
    377 		VG_(printf)("- %2d %#lx => ",
    378 			    CLG_(current_call_stack).sp,
    379 			    bb_addr(jcc->to->bb));
    380 		CLG_(print_addr)(bb_jmpaddr(jcc->from->bb));
    381 		VG_(printf)(", SP %#lx\n",
    382 			    CLG_(current_call_stack).entry[CLG_(current_call_stack).sp].sp);
    383 		CLG_(print_cost)(10, CLG_(sets).full, jcc->cost);
    384 	    }
    385 	    else
    386 		VG_(printf)("- %2d [Skipped JCC], SP %#lx\n",
    387 			    CLG_(current_call_stack).sp,
    388 			    CLG_(current_call_stack).entry[CLG_(current_call_stack).sp].sp);
    389 	}
    390 	else {
    391 	    VG_(printf)("  Popped ");
    392 	    CLG_(print_stackentry)(7, CLG_(current_call_stack).sp);
    393 	    if (jcc) {
    394 		VG_(printf)("       returned to ");
    395 		CLG_(print_addr_ln)(bb_jmpaddr(jcc->from->bb));
    396 	    }
    397 	}
    398     }
    399 #endif
    400 
    401 }
    402 
    403 
    404 /* Unwind enough CallStack items to sync with current stack pointer.
    405  * Returns the number of stack frames unwinded.
    406  */
    407 Int CLG_(unwind_call_stack)(Addr sp, Int minpops)
    408 {
    409     Int csp;
    410     Int unwind_count = 0;
    411     CLG_DEBUG(4,"+ unwind_call_stack(sp %#lx, minpops %d): frame %d\n",
    412 	      sp, minpops, CLG_(current_call_stack).sp);
    413 
    414     /* We pop old stack frames.
    415      * For a call, be p the stack address with return address.
    416      *  - call_stack_esp[] has SP after the CALL: p-4
    417      *  - current sp is after a RET: >= p
    418      */
    419 
    420     while( (csp=CLG_(current_call_stack).sp) >0) {
    421 	call_entry* top_ce = &(CLG_(current_call_stack).entry[csp-1]);
    422 
    423 	if ((top_ce->sp < sp) ||
    424 	    ((top_ce->sp == sp) && minpops>0)) {
    425 
    426 	    minpops--;
    427 	    unwind_count++;
    428 	    CLG_(pop_call_stack)();
    429 	    csp=CLG_(current_call_stack).sp;
    430 	    continue;
    431 	}
    432 	break;
    433     }
    434 
    435     CLG_DEBUG(4,"- unwind_call_stack\n");
    436     return unwind_count;
    437 }
    438