/external/chromium_org/chrome_frame/crash_reporting/ |
crash_metrics.h | 20 enum Metric { 35 int GetMetric(Metric metric); 36 bool SetMetric(Metric metric, int value); 37 int IncrementMetric(Metric metric);
|
crash_metrics.cc | 29 bool CrashMetricsReporter::SetMetric(Metric metric, int value) { 30 DCHECK(metric >= NAVIGATION_COUNT && metric <= LAST_METRIC); 36 result = metric_key.WriteValue(g_metric_names[metric], value); 40 DLOG(ERROR) << "Failed to read ChromeFrame metric:" 41 << g_metric_names[metric] << " error: " << result; 50 int CrashMetricsReporter::GetMetric(Metric metric) { 51 DCHECK(metric >= NAVIGATION_COUNT && metric <= LAST_METRIC) [all...] |
/external/chromium_org/chrome/browser/performance_monitor/ |
metric.h | 14 // - Place the new metric above METRIC_NUMBER_OF_METRICS. 42 struct Metric { 44 Metric(); 45 Metric(MetricType metric_type, 48 Metric(MetricType metric_type, 51 ~Metric(); 53 // Check the value in the metric to make sure that it is reasonable. Since 54 // some metric-gathering methods will fail and return incorrect values, we 59 // not perform any checking on the validity of the metric, and only makes 60 // sense if the metric IsValid() [all...] |
metric.cc | 5 #include "chrome/browser/performance_monitor/metric.h" 26 // metric is valid if it is greater than or equal to the minimum and less than 29 const double kMaxUndefined = 0.0; // No undefined metric is valid. 69 Metric::Metric() : type(METRIC_UNDEFINED), value(0.0) { 72 Metric::Metric(MetricType metric_type, 78 Metric::Metric(MetricType metric_type, 87 Metric::~Metric() [all...] |
startup_timer.cc | 24 const Metric& metric) { 25 database->AddMetric(metric); 121 Metric(startup_type_ == STARTUP_NORMAL ? METRIC_STARTUP_TIME 138 Metric(METRIC_SESSION_RESTORE_TIME,
|
performance_monitor.h | 37 struct Metric; 143 void AddMetricOnBackgroundThread(const Metric& metric);
|
database.h | 19 #include "chrome/browser/performance_monitor/metric.h" 74 // Stores the most recent metric statistics to go into the database. There is 75 // only ever one entry per (metric, activity) pair. |recent_map_| keeps an 77 // metric and activity to the key used in the recent db. |recent_map_| allows us 80 // within a timerange. Without it, all the metric databases would need to be 81 // searched to see if that metric is active. 82 // Key: Time - Metric - Activity 86 // Stores the max metric statistics that have been inserted into the database. 87 // There is only ever one entry per (metric, activity) pair. |max_value_map_| 89 // concatenation of metric and activity to the max metric [all...] |
database_unittest.cc | 15 #include "chrome/browser/performance_monitor/metric.h" 37 // Override the check for a metric's validity and insert it in the database. 40 bool AddInvalidMetric(std::string activity, Metric metric) { 43 metric.type, 48 metric.ValueAsString()); 169 Metric(METRIC_CPU_USAGE, clock_->GetTime(), 50.5)); 171 Metric(METRIC_CPU_USAGE, clock_->GetTime(), 13.1)); 173 Metric(METRIC_PRIVATE_MEMORY_USAGE, 177 Metric(METRIC_PRIVATE_MEMORY_USAGE [all...] |
performance_monitor.cc | 184 Metric metric; local 186 &metric)) { 187 performance_data_for_io_thread_.network_bytes_read = metric.value; 344 void PerformanceMonitor::AddMetricOnBackgroundThread(const Metric& metric) { 348 database_->AddMetric(metric); 520 database_->AddMetric(Metric(METRIC_CPU_USAGE, time_now, cpu_usage)); 521 database_->AddMetric(Metric(METRIC_PRIVATE_MEMORY_USAGE, 524 database_->AddMetric(Metric(METRIC_SHARED_MEMORY_USAGE [all...] |
database.cc | 214 const Metric& metric) { 216 if (!metric.IsValid()) { 217 DLOG(ERROR) << "Metric to be added is invalid. Type: " << metric.type 218 << ", Time: " << metric.time.ToInternalValue() 219 << ", Value: " << metric.value << ". Ignoring."; 225 key_builder_->CreateRecentKey(metric.time, metric.type, activity); 227 key_builder_->CreateMetricKey(metric.time, metric.type, activity) [all...] |
/external/chromium_org/tools/perf/metrics/ |
loading.py | 5 from metrics import Metric 7 class LoadingMetric(Metric): 8 """A metric for page loading time based entirely on window.performance"""
|
__init__.py | 5 class Metric(object): 8 The Metric class represents a way of measuring something. Metrics are 10 multiple metrics; each metric should be focussed on collecting data 16 """Add browser options that are required by this metric. 27 """Start collecting data for this metric.""" 31 """Stop collecting data for this metric (if applicable)."""
|
cpu.py | 5 from metrics import Metric 7 class CpuMetric(Metric): 27 # Optional argument trace_name is not in base class Metric.
|
io.py | 5 from metrics import Metric 7 class IOMetric(Metric): 21 # This metric currently only returns summary results, not per-page results.
|
v8_object_stats.py | 8 from metrics import Metric 150 class V8ObjectStatsMetric(Metric):
|
media.py | 7 from metrics import Metric 10 class MediaMetric(Metric): 56 def AddOneResult(metric, unit): 59 if m.startswith(metric): 60 special_label = m[len(metric):] 66 chart_name=metric, data_type='default')
|
smoothness.py | 5 from metrics import Metric 27 class SmoothnessMetric(Metric):
|
startup_metric.py | 8 from metrics import Metric 14 class StartupMetric(Metric): 15 "A metric for browser startup time."
|
memory.py | 8 from metrics import Metric 22 class MemoryMetric(Metric): 52 """Start the per-page preparation for this metric. 96 metric = 'resident_set_size' 98 metric = 'working_set' 132 AddSummary('WorkingSetSize', 'vm_%s_final_size' % metric) 137 AddSummary('WorkingSetSizePeak', '%s_peak_size' % metric)
|
speedindex.py | 8 from metrics import Metric 11 class SpeedIndexMetric(Metric): 12 """The speed index metric is one way of measuring page load speed. 19 This speed index metric is based on WebPageTest.org (WPT). 49 # Optional argument chart_name is not in base class Metric. 62 are used in the speed index metric calculation. In general, the recording 67 should be placed in any measurement that uses this metric, e.g.:
|
timeline.py | 6 from metrics import Metric 11 class TimelineMetric(Metric):
|
/external/chromium_org/chrome/browser/ui/webui/performance_monitor/ |
performance_monitor_ui_util_unittest.cc | 9 #include "chrome/browser/performance_monitor/metric.h" 28 Database::MetricVector::const_iterator metric = metrics->begin(); local 29 while (metric != metrics->end() && metric->time < start) 30 ++metric; 48 type, &metric, metrics->end(), start, kMaxTime, resolution); 55 metric_vector.push_back(Metric(METRIC_CPU_USAGE, data_time, 1)); 81 metric_vector.push_back(Metric(METRIC_CPU_USAGE, data_time, value)); 113 metric_vector.push_back(Metric(METRIC_CPU_USAGE, 154 metric_vector.push_back(Metric(METRIC_CPU_USAGE, data_time1, value1)) [all...] |
performance_monitor_ui_util.cc | 10 #include "chrome/browser/performance_monitor/metric.h" 46 Database::MetricVector::const_iterator metric = metrics->begin(); local 47 while (metric != metrics->end() && metric->time < start) 48 ++metric; 50 // For each interval, advance the metric to the start of the interval, and 51 // append a metric vector for the aggregated data within that interval, 55 while (metric != metrics->end() && metric->time < interval->start) 56 ++metric; [all...] |
/external/ceres-solver/internal/ceres/ |
autodiff_test.cc | 236 struct Metric { 280 TEST(AutoDiff, Metric) { 286 Metric b; 305 ASSERT_TRUE((SymmetricDiff<Metric, double, 2, 4 + 3 + 3>(b, qcX, del, 319 ASSERT_TRUE((AutoDiff<Metric, double, 4, 3, 3>::Differentiate(
|
/external/chromium_org/chrome/browser/resources/file_manager/foreground/js/ |
metrics.js | 32 * Convert a short metric name to the full format. 34 * @param {string} name Short metric name. 35 * @return {string} Full metric name. 96 * @param {string} name Metric name.
|