Home | History | Annotate | Download | only in subcommands
      1 # Copyright 2013 The Chromium Authors. All rights reserved.
      2 # Use of this source code is governed by a BSD-style license that can be
      3 # found in the LICENSE file.
      4 
      5 import json
      6 import logging
      7 import sys
      8 
      9 from lib.bucket import BUCKET_ID, COMMITTED, ALLOC_COUNT, FREE_COUNT
     10 from lib.ordered_dict import OrderedDict
     11 from lib.subcommand import SubCommand
     12 from lib.sorter import MallocUnit, MMapUnit, SorterSet, UnhookedUnit, UnitSet
     13 
     14 
     15 LOGGER = logging.getLogger('dmprof')
     16 
     17 
     18 class CatCommand(SubCommand):
     19   def __init__(self):
     20     super(CatCommand, self).__init__('Usage: %prog cat <first-dump>')
     21     self._parser.add_option('--alternative-dirs', dest='alternative_dirs',
     22                             metavar='/path/on/target@/path/on/host[:...]',
     23                             help='Read files in /path/on/host/ instead of '
     24                                  'files in /path/on/target/.')
     25     self._parser.add_option('--indent', dest='indent', action='store_true',
     26                             help='Indent the output.')
     27 
     28   def do(self, sys_argv):
     29     options, args = self._parse_args(sys_argv, 1)
     30     dump_path = args[1]
     31     # TODO(dmikurube): Support shared memory.
     32     alternative_dirs_dict = {}
     33     if options.alternative_dirs:
     34       for alternative_dir_pair in options.alternative_dirs.split(':'):
     35         target_path, host_path = alternative_dir_pair.split('@', 1)
     36         alternative_dirs_dict[target_path] = host_path
     37     (bucket_set, dumps) = SubCommand.load_basic_files(
     38         dump_path, True, alternative_dirs=alternative_dirs_dict)
     39 
     40     # Load all sorters.
     41     sorters = SorterSet()
     42 
     43     json_root = OrderedDict()
     44     json_root['version'] = 1
     45     json_root['run_id'] = None
     46     json_root['roots'] = []
     47     for sorter in sorters:
     48       if sorter.root:
     49         json_root['roots'].append([sorter.world, sorter.name])
     50     json_root['default_template'] = 'l2'
     51     json_root['templates'] = sorters.templates.as_dict()
     52 
     53     orders = OrderedDict()
     54     orders['worlds'] = OrderedDict()
     55     for world in ['vm', 'malloc']:
     56       orders['worlds'][world] = OrderedDict()
     57       orders['worlds'][world]['breakdown'] = OrderedDict()
     58       for sorter in sorters.iter_world(world):
     59         order = []
     60         for rule in sorter.iter_rule():
     61           if rule.name not in order:
     62             order.append(rule.name)
     63         orders['worlds'][world]['breakdown'][sorter.name] = order
     64     json_root['orders'] = orders
     65 
     66     json_root['snapshots'] = []
     67 
     68     for dump in dumps:
     69       if json_root['run_id'] and json_root['run_id'] != dump.run_id:
     70         LOGGER.error('Inconsistent heap profile dumps.')
     71         json_root['run_id'] = ''
     72       else:
     73         json_root['run_id'] = dump.run_id
     74 
     75       LOGGER.info('Sorting a dump %s...' % dump.path)
     76       json_root['snapshots'].append(
     77           self._fill_snapshot(dump, bucket_set, sorters))
     78 
     79     if options.indent:
     80       json.dump(json_root, sys.stdout, indent=2)
     81     else:
     82       json.dump(json_root, sys.stdout)
     83     print ''
     84 
     85   @staticmethod
     86   def _fill_snapshot(dump, bucket_set, sorters):
     87     root = OrderedDict()
     88     root['time'] = dump.time
     89     root['worlds'] = OrderedDict()
     90     root['worlds']['vm'] = CatCommand._fill_world(
     91         dump, bucket_set, sorters, 'vm')
     92     root['worlds']['malloc'] = CatCommand._fill_world(
     93         dump, bucket_set, sorters, 'malloc')
     94     return root
     95 
     96   @staticmethod
     97   def _fill_world(dump, bucket_set, sorters, world):
     98     root = OrderedDict()
     99 
    100     root['name'] = world
    101     if world == 'vm':
    102       root['unit_fields'] = ['size', 'reserved']
    103     elif world == 'malloc':
    104       root['unit_fields'] = ['size', 'alloc_count', 'free_count']
    105 
    106     # Make { vm | malloc } units with their sizes.
    107     root['units'] = OrderedDict()
    108     unit_set = UnitSet(world)
    109     if world == 'vm':
    110       for unit in CatCommand._iterate_vm_unit(dump, None, bucket_set):
    111         unit_set.append(unit)
    112       for unit in unit_set:
    113         root['units'][unit.unit_id] = [unit.committed, unit.reserved]
    114     elif world == 'malloc':
    115       for unit in CatCommand._iterate_malloc_unit(dump, bucket_set):
    116         unit_set.append(unit)
    117       for unit in unit_set:
    118         root['units'][unit.unit_id] = [
    119             unit.size, unit.alloc_count, unit.free_count]
    120 
    121     # Iterate for { vm | malloc } sorters.
    122     root['breakdown'] = OrderedDict()
    123     for sorter in sorters.iter_world(world):
    124       LOGGER.info('  Sorting with %s:%s.' % (sorter.world, sorter.name))
    125       breakdown = OrderedDict()
    126       for rule in sorter.iter_rule():
    127         category = OrderedDict()
    128         category['name'] = rule.name
    129         subs = []
    130         for sub_world, sub_breakdown in rule.iter_subs():
    131           subs.append([sub_world, sub_breakdown])
    132         if subs:
    133           category['subs'] = subs
    134         if rule.hidden:
    135           category['hidden'] = True
    136         category['units'] = []
    137         breakdown[rule.name] = category
    138       for unit in unit_set:
    139         found = sorter.find(unit)
    140         if found:
    141           # Note that a bucket which doesn't match any rule is just dropped.
    142           breakdown[found.name]['units'].append(unit.unit_id)
    143       root['breakdown'][sorter.name] = breakdown
    144 
    145     return root
    146 
    147   @staticmethod
    148   def _iterate_vm_unit(dump, pfn_dict, bucket_set):
    149     unit_id = 0
    150     for _, region in dump.iter_map:
    151       unit_id += 1
    152       if region[0] == 'unhooked':
    153         if pfn_dict and dump.pageframe_length:
    154           for pageframe in region[1]['pageframe']:
    155             yield UnhookedUnit(unit_id, pageframe.size, pageframe.size,
    156                                region, pageframe, pfn_dict)
    157         else:
    158           yield UnhookedUnit(unit_id,
    159                              int(region[1]['committed']),
    160                              int(region[1]['reserved']),
    161                              region)
    162       elif region[0] == 'hooked':
    163         if pfn_dict and dump.pageframe_length:
    164           for pageframe in region[1]['pageframe']:
    165             yield MMapUnit(unit_id,
    166                            pageframe.size,
    167                            pageframe.size,
    168                            region, bucket_set, pageframe, pfn_dict)
    169         else:
    170           yield MMapUnit(unit_id,
    171                          int(region[1]['committed']),
    172                          int(region[1]['reserved']),
    173                          region,
    174                          bucket_set)
    175       else:
    176         LOGGER.error('Unrecognized mapping status: %s' % region[0])
    177 
    178   @staticmethod
    179   def _iterate_malloc_unit(dump, bucket_set):
    180     for line in dump.iter_stacktrace:
    181       words = line.split()
    182       bucket = bucket_set.get(int(words[BUCKET_ID]))
    183       if bucket and bucket.allocator_type == 'malloc':
    184         yield MallocUnit(int(words[BUCKET_ID]),
    185                          int(words[COMMITTED]),
    186                          int(words[ALLOC_COUNT]),
    187                          int(words[FREE_COUNT]),
    188                          bucket)
    189       elif not bucket:
    190         # 'Not-found' buckets are all assumed as malloc buckets.
    191         yield MallocUnit(int(words[BUCKET_ID]),
    192                          int(words[COMMITTED]),
    193                          int(words[ALLOC_COUNT]),
    194                          int(words[FREE_COUNT]),
    195                          None)
    196