Home | History | Annotate | Download | only in metrics
      1 // Copyright 2014 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "components/metrics/metrics_log.h"
      6 
      7 #include <string>
      8 
      9 #include "base/base64.h"
     10 #include "base/basictypes.h"
     11 #include "base/metrics/bucket_ranges.h"
     12 #include "base/metrics/sample_vector.h"
     13 #include "base/prefs/pref_service.h"
     14 #include "base/prefs/testing_pref_service.h"
     15 #include "base/strings/string_number_conversions.h"
     16 #include "base/time/time.h"
     17 #include "components/metrics/metrics_pref_names.h"
     18 #include "components/metrics/metrics_state_manager.h"
     19 #include "components/metrics/proto/chrome_user_metrics_extension.pb.h"
     20 #include "components/metrics/test_metrics_service_client.h"
     21 #include "components/variations/active_field_trials.h"
     22 #include "testing/gtest/include/gtest/gtest.h"
     23 
     24 namespace metrics {
     25 
     26 namespace {
     27 
     28 const char kClientId[] = "bogus client ID";
     29 const int64 kInstallDate = 1373051956;
     30 const int64 kInstallDateExpected = 1373050800;  // Computed from kInstallDate.
     31 const int64 kEnabledDate = 1373001211;
     32 const int64 kEnabledDateExpected = 1373000400;  // Computed from kEnabledDate.
     33 const int kSessionId = 127;
     34 const variations::ActiveGroupId kFieldTrialIds[] = {
     35   {37, 43},
     36   {13, 47},
     37   {23, 17}
     38 };
     39 const variations::ActiveGroupId kSyntheticTrials[] = {
     40   {55, 15},
     41   {66, 16}
     42 };
     43 
     44 class TestMetricsLog : public MetricsLog {
     45  public:
     46   TestMetricsLog(const std::string& client_id,
     47                  int session_id,
     48                  LogType log_type,
     49                  metrics::MetricsServiceClient* client,
     50                  TestingPrefServiceSimple* prefs)
     51       : MetricsLog(client_id, session_id, log_type, client, prefs),
     52         prefs_(prefs) {
     53     InitPrefs();
     54  }
     55 
     56   virtual ~TestMetricsLog() {}
     57 
     58   const metrics::ChromeUserMetricsExtension& uma_proto() const {
     59     return *MetricsLog::uma_proto();
     60   }
     61 
     62   const metrics::SystemProfileProto& system_profile() const {
     63     return uma_proto().system_profile();
     64   }
     65 
     66  private:
     67   void InitPrefs() {
     68     prefs_->SetString(metrics::prefs::kMetricsReportingEnabledTimestamp,
     69                       base::Int64ToString(kEnabledDate));
     70   }
     71 
     72   virtual void GetFieldTrialIds(
     73       std::vector<variations::ActiveGroupId>* field_trial_ids) const
     74       OVERRIDE {
     75     ASSERT_TRUE(field_trial_ids->empty());
     76 
     77     for (size_t i = 0; i < arraysize(kFieldTrialIds); ++i) {
     78       field_trial_ids->push_back(kFieldTrialIds[i]);
     79     }
     80   }
     81 
     82   // Weak pointer to the PrefsService used by this log.
     83   TestingPrefServiceSimple* prefs_;
     84 
     85   DISALLOW_COPY_AND_ASSIGN(TestMetricsLog);
     86 };
     87 
     88 }  // namespace
     89 
     90 class MetricsLogTest : public testing::Test {
     91  public:
     92   MetricsLogTest() {
     93     MetricsLog::RegisterPrefs(prefs_.registry());
     94     metrics::MetricsStateManager::RegisterPrefs(prefs_.registry());
     95   }
     96 
     97   virtual ~MetricsLogTest() {
     98   }
     99 
    100  protected:
    101   // Check that the values in |system_values| correspond to the test data
    102   // defined at the top of this file.
    103   void CheckSystemProfile(const metrics::SystemProfileProto& system_profile) {
    104     EXPECT_EQ(kInstallDateExpected, system_profile.install_date());
    105     EXPECT_EQ(kEnabledDateExpected, system_profile.uma_enabled_date());
    106 
    107     ASSERT_EQ(arraysize(kFieldTrialIds) + arraysize(kSyntheticTrials),
    108               static_cast<size_t>(system_profile.field_trial_size()));
    109     for (size_t i = 0; i < arraysize(kFieldTrialIds); ++i) {
    110       const metrics::SystemProfileProto::FieldTrial& field_trial =
    111           system_profile.field_trial(i);
    112       EXPECT_EQ(kFieldTrialIds[i].name, field_trial.name_id());
    113       EXPECT_EQ(kFieldTrialIds[i].group, field_trial.group_id());
    114     }
    115     // Verify the right data is present for the synthetic trials.
    116     for (size_t i = 0; i < arraysize(kSyntheticTrials); ++i) {
    117       const metrics::SystemProfileProto::FieldTrial& field_trial =
    118           system_profile.field_trial(i + arraysize(kFieldTrialIds));
    119       EXPECT_EQ(kSyntheticTrials[i].name, field_trial.name_id());
    120       EXPECT_EQ(kSyntheticTrials[i].group, field_trial.group_id());
    121     }
    122 
    123     EXPECT_EQ(metrics::TestMetricsServiceClient::kBrandForTesting,
    124               system_profile.brand_code());
    125 
    126     const metrics::SystemProfileProto::Hardware& hardware =
    127         system_profile.hardware();
    128 
    129     EXPECT_TRUE(hardware.has_cpu());
    130     EXPECT_TRUE(hardware.cpu().has_vendor_name());
    131     EXPECT_TRUE(hardware.cpu().has_signature());
    132 
    133     // TODO(isherman): Verify other data written into the protobuf as a result
    134     // of this call.
    135   }
    136 
    137  protected:
    138   TestingPrefServiceSimple prefs_;
    139 
    140  private:
    141   DISALLOW_COPY_AND_ASSIGN(MetricsLogTest);
    142 };
    143 
    144 TEST_F(MetricsLogTest, LogType) {
    145   TestMetricsServiceClient client;
    146   TestingPrefServiceSimple prefs;
    147 
    148   MetricsLog log1("id", 0, MetricsLog::ONGOING_LOG, &client, &prefs);
    149   EXPECT_EQ(MetricsLog::ONGOING_LOG, log1.log_type());
    150 
    151   MetricsLog log2("id", 0, MetricsLog::INITIAL_STABILITY_LOG, &client, &prefs);
    152   EXPECT_EQ(MetricsLog::INITIAL_STABILITY_LOG, log2.log_type());
    153 }
    154 
    155 TEST_F(MetricsLogTest, EmptyRecord) {
    156   TestMetricsServiceClient client;
    157   client.set_version_string("bogus version");
    158   TestingPrefServiceSimple prefs;
    159   MetricsLog log("totally bogus client ID", 137, MetricsLog::ONGOING_LOG,
    160                  &client, &prefs);
    161   log.CloseLog();
    162 
    163   std::string encoded;
    164   log.GetEncodedLog(&encoded);
    165 
    166   // A couple of fields are hard to mock, so these will be copied over directly
    167   // for the expected output.
    168   ChromeUserMetricsExtension parsed;
    169   ASSERT_TRUE(parsed.ParseFromString(encoded));
    170 
    171   ChromeUserMetricsExtension expected;
    172   expected.set_client_id(5217101509553811875);  // Hashed bogus client ID
    173   expected.set_session_id(137);
    174   expected.mutable_system_profile()->set_build_timestamp(
    175       parsed.system_profile().build_timestamp());
    176   expected.mutable_system_profile()->set_app_version("bogus version");
    177   expected.mutable_system_profile()->set_channel(client.GetChannel());
    178 
    179   EXPECT_EQ(expected.SerializeAsString(), encoded);
    180 }
    181 
    182 TEST_F(MetricsLogTest, HistogramBucketFields) {
    183   // Create buckets: 1-5, 5-7, 7-8, 8-9, 9-10, 10-11, 11-12.
    184   base::BucketRanges ranges(8);
    185   ranges.set_range(0, 1);
    186   ranges.set_range(1, 5);
    187   ranges.set_range(2, 7);
    188   ranges.set_range(3, 8);
    189   ranges.set_range(4, 9);
    190   ranges.set_range(5, 10);
    191   ranges.set_range(6, 11);
    192   ranges.set_range(7, 12);
    193 
    194   base::SampleVector samples(&ranges);
    195   samples.Accumulate(3, 1);   // Bucket 1-5.
    196   samples.Accumulate(6, 1);   // Bucket 5-7.
    197   samples.Accumulate(8, 1);   // Bucket 8-9. (7-8 skipped)
    198   samples.Accumulate(10, 1);  // Bucket 10-11. (9-10 skipped)
    199   samples.Accumulate(11, 1);  // Bucket 11-12.
    200 
    201   TestMetricsServiceClient client;
    202   TestingPrefServiceSimple prefs;
    203   TestMetricsLog log(
    204       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    205   log.RecordHistogramDelta("Test", samples);
    206 
    207   const ChromeUserMetricsExtension& uma_proto = log.uma_proto();
    208   const HistogramEventProto& histogram_proto =
    209       uma_proto.histogram_event(uma_proto.histogram_event_size() - 1);
    210 
    211   // Buckets with samples: 1-5, 5-7, 8-9, 10-11, 11-12.
    212   // Should become: 1-/, 5-7, /-9, 10-/, /-12.
    213   ASSERT_EQ(5, histogram_proto.bucket_size());
    214 
    215   // 1-5 becomes 1-/ (max is same as next min).
    216   EXPECT_TRUE(histogram_proto.bucket(0).has_min());
    217   EXPECT_FALSE(histogram_proto.bucket(0).has_max());
    218   EXPECT_EQ(1, histogram_proto.bucket(0).min());
    219 
    220   // 5-7 stays 5-7 (no optimization possible).
    221   EXPECT_TRUE(histogram_proto.bucket(1).has_min());
    222   EXPECT_TRUE(histogram_proto.bucket(1).has_max());
    223   EXPECT_EQ(5, histogram_proto.bucket(1).min());
    224   EXPECT_EQ(7, histogram_proto.bucket(1).max());
    225 
    226   // 8-9 becomes /-9 (min is same as max - 1).
    227   EXPECT_FALSE(histogram_proto.bucket(2).has_min());
    228   EXPECT_TRUE(histogram_proto.bucket(2).has_max());
    229   EXPECT_EQ(9, histogram_proto.bucket(2).max());
    230 
    231   // 10-11 becomes 10-/ (both optimizations apply, omit max is prioritized).
    232   EXPECT_TRUE(histogram_proto.bucket(3).has_min());
    233   EXPECT_FALSE(histogram_proto.bucket(3).has_max());
    234   EXPECT_EQ(10, histogram_proto.bucket(3).min());
    235 
    236   // 11-12 becomes /-12 (last record must keep max, min is same as max - 1).
    237   EXPECT_FALSE(histogram_proto.bucket(4).has_min());
    238   EXPECT_TRUE(histogram_proto.bucket(4).has_max());
    239   EXPECT_EQ(12, histogram_proto.bucket(4).max());
    240 }
    241 
    242 TEST_F(MetricsLogTest, RecordEnvironment) {
    243   TestMetricsServiceClient client;
    244   client.set_install_date(kInstallDate);
    245   TestMetricsLog log(
    246       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    247 
    248   std::vector<variations::ActiveGroupId> synthetic_trials;
    249   // Add two synthetic trials.
    250   synthetic_trials.push_back(kSyntheticTrials[0]);
    251   synthetic_trials.push_back(kSyntheticTrials[1]);
    252 
    253   log.RecordEnvironment(std::vector<MetricsProvider*>(),
    254                         synthetic_trials);
    255   // Check that the system profile on the log has the correct values set.
    256   CheckSystemProfile(log.system_profile());
    257 
    258   // Check that the system profile has also been written to prefs.
    259   const std::string base64_system_profile =
    260       prefs_.GetString(prefs::kStabilitySavedSystemProfile);
    261   EXPECT_FALSE(base64_system_profile.empty());
    262   std::string serialied_system_profile;
    263   EXPECT_TRUE(base::Base64Decode(base64_system_profile,
    264                                  &serialied_system_profile));
    265   SystemProfileProto decoded_system_profile;
    266   EXPECT_TRUE(decoded_system_profile.ParseFromString(serialied_system_profile));
    267   CheckSystemProfile(decoded_system_profile);
    268 }
    269 
    270 TEST_F(MetricsLogTest, LoadSavedEnvironmentFromPrefs) {
    271   const char* kSystemProfilePref = prefs::kStabilitySavedSystemProfile;
    272   const char* kSystemProfileHashPref =
    273       prefs::kStabilitySavedSystemProfileHash;
    274 
    275   TestMetricsServiceClient client;
    276   client.set_install_date(kInstallDate);
    277 
    278   // The pref value is empty, so loading it from prefs should fail.
    279   {
    280     TestMetricsLog log(
    281         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    282     EXPECT_FALSE(log.LoadSavedEnvironmentFromPrefs());
    283   }
    284 
    285   // Do a RecordEnvironment() call and check whether the pref is recorded.
    286   {
    287     TestMetricsLog log(
    288         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    289     log.RecordEnvironment(std::vector<MetricsProvider*>(),
    290                           std::vector<variations::ActiveGroupId>());
    291     EXPECT_FALSE(prefs_.GetString(kSystemProfilePref).empty());
    292     EXPECT_FALSE(prefs_.GetString(kSystemProfileHashPref).empty());
    293   }
    294 
    295   {
    296     TestMetricsLog log(
    297         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    298     EXPECT_TRUE(log.LoadSavedEnvironmentFromPrefs());
    299     // Check some values in the system profile.
    300     EXPECT_EQ(kInstallDateExpected, log.system_profile().install_date());
    301     EXPECT_EQ(kEnabledDateExpected, log.system_profile().uma_enabled_date());
    302     // Ensure that the call cleared the prefs.
    303     EXPECT_TRUE(prefs_.GetString(kSystemProfilePref).empty());
    304     EXPECT_TRUE(prefs_.GetString(kSystemProfileHashPref).empty());
    305   }
    306 
    307   // Ensure that a non-matching hash results in the pref being invalid.
    308   {
    309     TestMetricsLog log(
    310         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    311     // Call RecordEnvironment() to record the pref again.
    312     log.RecordEnvironment(std::vector<MetricsProvider*>(),
    313                           std::vector<variations::ActiveGroupId>());
    314   }
    315 
    316   {
    317     // Set the hash to a bad value.
    318     prefs_.SetString(kSystemProfileHashPref, "deadbeef");
    319     TestMetricsLog log(
    320         kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    321     EXPECT_FALSE(log.LoadSavedEnvironmentFromPrefs());
    322     // Ensure that the prefs are cleared, even if the call failed.
    323     EXPECT_TRUE(prefs_.GetString(kSystemProfilePref).empty());
    324     EXPECT_TRUE(prefs_.GetString(kSystemProfileHashPref).empty());
    325   }
    326 }
    327 
    328 TEST_F(MetricsLogTest, InitialLogStabilityMetrics) {
    329   TestMetricsServiceClient client;
    330   TestMetricsLog log(kClientId,
    331                      kSessionId,
    332                      MetricsLog::INITIAL_STABILITY_LOG,
    333                      &client,
    334                      &prefs_);
    335   std::vector<MetricsProvider*> metrics_providers;
    336   log.RecordEnvironment(metrics_providers,
    337                         std::vector<variations::ActiveGroupId>());
    338   log.RecordStabilityMetrics(metrics_providers, base::TimeDelta(),
    339                              base::TimeDelta());
    340   const SystemProfileProto_Stability& stability =
    341       log.system_profile().stability();
    342   // Required metrics:
    343   EXPECT_TRUE(stability.has_launch_count());
    344   EXPECT_TRUE(stability.has_crash_count());
    345   // Initial log metrics:
    346   EXPECT_TRUE(stability.has_incomplete_shutdown_count());
    347   EXPECT_TRUE(stability.has_breakpad_registration_success_count());
    348   EXPECT_TRUE(stability.has_breakpad_registration_failure_count());
    349   EXPECT_TRUE(stability.has_debugger_present_count());
    350   EXPECT_TRUE(stability.has_debugger_not_present_count());
    351 }
    352 
    353 TEST_F(MetricsLogTest, OngoingLogStabilityMetrics) {
    354   TestMetricsServiceClient client;
    355   TestMetricsLog log(
    356       kClientId, kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    357   std::vector<MetricsProvider*> metrics_providers;
    358   log.RecordEnvironment(metrics_providers,
    359                         std::vector<variations::ActiveGroupId>());
    360   log.RecordStabilityMetrics(metrics_providers, base::TimeDelta(),
    361                              base::TimeDelta());
    362   const SystemProfileProto_Stability& stability =
    363       log.system_profile().stability();
    364   // Required metrics:
    365   EXPECT_TRUE(stability.has_launch_count());
    366   EXPECT_TRUE(stability.has_crash_count());
    367   // Initial log metrics:
    368   EXPECT_FALSE(stability.has_incomplete_shutdown_count());
    369   EXPECT_FALSE(stability.has_breakpad_registration_success_count());
    370   EXPECT_FALSE(stability.has_breakpad_registration_failure_count());
    371   EXPECT_FALSE(stability.has_debugger_present_count());
    372   EXPECT_FALSE(stability.has_debugger_not_present_count());
    373 }
    374 
    375 TEST_F(MetricsLogTest, ChromeChannelWrittenToProtobuf) {
    376   TestMetricsServiceClient client;
    377   TestMetricsLog log(
    378       "user (at) test.com", kSessionId, MetricsLog::ONGOING_LOG, &client, &prefs_);
    379   EXPECT_TRUE(log.uma_proto().system_profile().has_channel());
    380 }
    381 
    382 }  // namespace metrics
    383