Home | History | Annotate | Download | only in launcher
      1 // Copyright 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "base/test/launcher/test_results_tracker.h"
      6 
      7 #include "base/base64.h"
      8 #include "base/command_line.h"
      9 #include "base/file_util.h"
     10 #include "base/files/file_path.h"
     11 #include "base/format_macros.h"
     12 #include "base/json/json_file_value_serializer.h"
     13 #include "base/json/string_escape.h"
     14 #include "base/logging.h"
     15 #include "base/strings/string_util.h"
     16 #include "base/strings/stringprintf.h"
     17 #include "base/test/launcher/test_launcher.h"
     18 #include "base/values.h"
     19 
     20 namespace base {
     21 
     22 // See https://groups.google.com/a/chromium.org/d/msg/chromium-dev/nkdTP7sstSc/uT3FaE_sgkAJ .
     23 using ::operator<<;
     24 
     25 namespace {
     26 
     27 // The default output file for XML output.
     28 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
     29     "test_detail.xml");
     30 
     31 // Utility function to print a list of test names. Uses iterator to be
     32 // compatible with different containers, like vector and set.
     33 template<typename InputIterator>
     34 void PrintTests(InputIterator first,
     35                 InputIterator last,
     36                 const std::string& description) {
     37   size_t count = std::distance(first, last);
     38   if (count == 0)
     39     return;
     40 
     41   fprintf(stdout,
     42           "%" PRIuS " test%s %s:\n",
     43           count,
     44           count != 1 ? "s" : "",
     45           description.c_str());
     46   for (InputIterator i = first; i != last; ++i)
     47     fprintf(stdout, "    %s\n", (*i).c_str());
     48   fflush(stdout);
     49 }
     50 
     51 std::string TestNameWithoutDisabledPrefix(const std::string& test_name) {
     52   std::string test_name_no_disabled(test_name);
     53   ReplaceSubstringsAfterOffset(&test_name_no_disabled, 0, "DISABLED_", "");
     54   return test_name_no_disabled;
     55 }
     56 
     57 }  // namespace
     58 
     59 TestResultsTracker::TestResultsTracker() : iteration_(-1), out_(NULL) {
     60 }
     61 
     62 TestResultsTracker::~TestResultsTracker() {
     63   DCHECK(thread_checker_.CalledOnValidThread());
     64 
     65   if (!out_)
     66     return;
     67   fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
     68   fprintf(out_, "<testsuites name=\"AllTests\" tests=\"\" failures=\"\""
     69           " disabled=\"\" errors=\"\" time=\"\">\n");
     70 
     71   // Maps test case names to test results.
     72   typedef std::map<std::string, std::vector<TestResult> > TestCaseMap;
     73   TestCaseMap test_case_map;
     74 
     75   for (PerIterationData::ResultsMap::iterator i =
     76            per_iteration_data_[iteration_].results.begin();
     77        i != per_iteration_data_[iteration_].results.end();
     78        ++i) {
     79     // Use the last test result as the final one.
     80     TestResult result = i->second.test_results.back();
     81     test_case_map[result.GetTestCaseName()].push_back(result);
     82   }
     83   for (TestCaseMap::iterator i = test_case_map.begin();
     84        i != test_case_map.end();
     85        ++i) {
     86     fprintf(out_, "  <testsuite name=\"%s\" tests=\"%" PRIuS "\" failures=\"\""
     87             " disabled=\"\" errors=\"\" time=\"\">\n",
     88             i->first.c_str(), i->second.size());
     89     for (size_t j = 0; j < i->second.size(); ++j) {
     90       const TestResult& result = i->second[j];
     91       fprintf(out_, "    <testcase name=\"%s\" status=\"run\" time=\"%.3f\""
     92               " classname=\"%s\">\n",
     93               result.GetTestName().c_str(),
     94               result.elapsed_time.InSecondsF(),
     95               result.GetTestCaseName().c_str());
     96       if (result.status != TestResult::TEST_SUCCESS)
     97         fprintf(out_, "      <failure message=\"\" type=\"\"></failure>\n");
     98       fprintf(out_, "    </testcase>\n");
     99     }
    100     fprintf(out_, "  </testsuite>\n");
    101   }
    102   fprintf(out_, "</testsuites>\n");
    103   fclose(out_);
    104 }
    105 
    106 bool TestResultsTracker::Init(const CommandLine& command_line) {
    107   DCHECK(thread_checker_.CalledOnValidThread());
    108 
    109   // Prevent initializing twice.
    110   if (out_) {
    111     NOTREACHED();
    112     return false;
    113   }
    114 
    115   if (!command_line.HasSwitch(kGTestOutputFlag))
    116     return true;
    117 
    118   std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
    119   size_t colon_pos = flag.find(':');
    120   FilePath path;
    121   if (colon_pos != std::string::npos) {
    122     FilePath flag_path =
    123         command_line.GetSwitchValuePath(kGTestOutputFlag);
    124     FilePath::StringType path_string = flag_path.value();
    125     path = FilePath(path_string.substr(colon_pos + 1));
    126     // If the given path ends with '/', consider it is a directory.
    127     // Note: This does NOT check that a directory (or file) actually exists
    128     // (the behavior is same as what gtest does).
    129     if (path.EndsWithSeparator()) {
    130       FilePath executable = command_line.GetProgram().BaseName();
    131       path = path.Append(executable.ReplaceExtension(
    132                              FilePath::StringType(FILE_PATH_LITERAL("xml"))));
    133     }
    134   }
    135   if (path.value().empty())
    136     path = FilePath(kDefaultOutputFile);
    137   FilePath dir_name = path.DirName();
    138   if (!DirectoryExists(dir_name)) {
    139     LOG(WARNING) << "The output directory does not exist. "
    140                  << "Creating the directory: " << dir_name.value();
    141     // Create the directory if necessary (because the gtest does the same).
    142     if (!base::CreateDirectory(dir_name)) {
    143       LOG(ERROR) << "Failed to created directory " << dir_name.value();
    144       return false;
    145     }
    146   }
    147   out_ = OpenFile(path, "w");
    148   if (!out_) {
    149     LOG(ERROR) << "Cannot open output file: "
    150                << path.value() << ".";
    151     return false;
    152   }
    153 
    154   return true;
    155 }
    156 
    157 void TestResultsTracker::OnTestIterationStarting() {
    158   DCHECK(thread_checker_.CalledOnValidThread());
    159 
    160   // Start with a fresh state for new iteration.
    161   iteration_++;
    162   per_iteration_data_.push_back(PerIterationData());
    163 }
    164 
    165 void TestResultsTracker::AddTest(const std::string& test_name) {
    166   // Record disabled test names without DISABLED_ prefix so that they are easy
    167   // to compare with regular test names, e.g. before or after disabling.
    168   all_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
    169 }
    170 
    171 void TestResultsTracker::AddDisabledTest(const std::string& test_name) {
    172   // Record disabled test names without DISABLED_ prefix so that they are easy
    173   // to compare with regular test names, e.g. before or after disabling.
    174   disabled_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
    175 }
    176 
    177 void TestResultsTracker::AddTestResult(const TestResult& result) {
    178   DCHECK(thread_checker_.CalledOnValidThread());
    179 
    180   per_iteration_data_[iteration_].results[
    181       result.full_name].test_results.push_back(result);
    182 }
    183 
    184 void TestResultsTracker::PrintSummaryOfCurrentIteration() const {
    185   TestStatusMap tests_by_status(GetTestStatusMapForCurrentIteration());
    186 
    187   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
    188              tests_by_status[TestResult::TEST_FAILURE].end(),
    189              "failed");
    190   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
    191              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
    192              "failed on exit");
    193   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
    194              tests_by_status[TestResult::TEST_TIMEOUT].end(),
    195              "timed out");
    196   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
    197              tests_by_status[TestResult::TEST_CRASH].end(),
    198              "crashed");
    199   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
    200              tests_by_status[TestResult::TEST_SKIPPED].end(),
    201              "skipped");
    202   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
    203              tests_by_status[TestResult::TEST_UNKNOWN].end(),
    204              "had unknown result");
    205 }
    206 
    207 void TestResultsTracker::PrintSummaryOfAllIterations() const {
    208   DCHECK(thread_checker_.CalledOnValidThread());
    209 
    210   TestStatusMap tests_by_status(GetTestStatusMapForAllIterations());
    211 
    212   fprintf(stdout, "Summary of all test iterations:\n");
    213   fflush(stdout);
    214 
    215   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
    216              tests_by_status[TestResult::TEST_FAILURE].end(),
    217              "failed");
    218   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
    219              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
    220              "failed on exit");
    221   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
    222              tests_by_status[TestResult::TEST_TIMEOUT].end(),
    223              "timed out");
    224   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
    225              tests_by_status[TestResult::TEST_CRASH].end(),
    226              "crashed");
    227   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
    228              tests_by_status[TestResult::TEST_SKIPPED].end(),
    229              "skipped");
    230   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
    231              tests_by_status[TestResult::TEST_UNKNOWN].end(),
    232              "had unknown result");
    233 
    234   fprintf(stdout, "End of the summary.\n");
    235   fflush(stdout);
    236 }
    237 
    238 void TestResultsTracker::AddGlobalTag(const std::string& tag) {
    239   global_tags_.insert(tag);
    240 }
    241 
    242 bool TestResultsTracker::SaveSummaryAsJSON(const FilePath& path) const {
    243   scoped_ptr<DictionaryValue> summary_root(new DictionaryValue);
    244 
    245   ListValue* global_tags = new ListValue;
    246   summary_root->Set("global_tags", global_tags);
    247 
    248   for (std::set<std::string>::const_iterator i = global_tags_.begin();
    249        i != global_tags_.end();
    250        ++i) {
    251     global_tags->AppendString(*i);
    252   }
    253 
    254   ListValue* all_tests = new ListValue;
    255   summary_root->Set("all_tests", all_tests);
    256 
    257   for (std::set<std::string>::const_iterator i = all_tests_.begin();
    258        i != all_tests_.end();
    259        ++i) {
    260     all_tests->AppendString(*i);
    261   }
    262 
    263   ListValue* disabled_tests = new ListValue;
    264   summary_root->Set("disabled_tests", disabled_tests);
    265 
    266   for (std::set<std::string>::const_iterator i = disabled_tests_.begin();
    267        i != disabled_tests_.end();
    268        ++i) {
    269     disabled_tests->AppendString(*i);
    270   }
    271 
    272   ListValue* per_iteration_data = new ListValue;
    273   summary_root->Set("per_iteration_data", per_iteration_data);
    274 
    275   for (int i = 0; i <= iteration_; i++) {
    276     DictionaryValue* current_iteration_data = new DictionaryValue;
    277     per_iteration_data->Append(current_iteration_data);
    278 
    279     for (PerIterationData::ResultsMap::const_iterator j =
    280              per_iteration_data_[i].results.begin();
    281          j != per_iteration_data_[i].results.end();
    282          ++j) {
    283       ListValue* test_results = new ListValue;
    284       current_iteration_data->SetWithoutPathExpansion(j->first, test_results);
    285 
    286       for (size_t k = 0; k < j->second.test_results.size(); k++) {
    287         const TestResult& test_result = j->second.test_results[k];
    288 
    289         DictionaryValue* test_result_value = new DictionaryValue;
    290         test_results->Append(test_result_value);
    291 
    292         test_result_value->SetString("status", test_result.StatusAsString());
    293         test_result_value->SetInteger(
    294             "elapsed_time_ms", test_result.elapsed_time.InMilliseconds());
    295 
    296         // There are no guarantees about character encoding of the output
    297         // snippet. Escape it and record whether it was losless.
    298         // It's useful to have the output snippet as string in the summary
    299         // for easy viewing.
    300         std::string escaped_output_snippet;
    301         bool losless_snippet = EscapeJSONString(
    302             test_result.output_snippet, false, &escaped_output_snippet);
    303         test_result_value->SetString("output_snippet",
    304                                      escaped_output_snippet);
    305         test_result_value->SetBoolean("losless_snippet", losless_snippet);
    306 
    307         // Also include the raw version (base64-encoded so that it can be safely
    308         // JSON-serialized - there are no guarantees about character encoding
    309         // of the snippet). This can be very useful piece of information when
    310         // debugging a test failure related to character encoding.
    311         std::string base64_output_snippet;
    312         Base64Encode(test_result.output_snippet, &base64_output_snippet);
    313         test_result_value->SetString("output_snippet_base64",
    314                                      base64_output_snippet);
    315       }
    316     }
    317   }
    318 
    319   JSONFileValueSerializer serializer(path);
    320   return serializer.Serialize(*summary_root);
    321 }
    322 
    323 TestResultsTracker::TestStatusMap
    324     TestResultsTracker::GetTestStatusMapForCurrentIteration() const {
    325   TestStatusMap tests_by_status;
    326   GetTestStatusForIteration(iteration_, &tests_by_status);
    327   return tests_by_status;
    328 }
    329 
    330 TestResultsTracker::TestStatusMap
    331     TestResultsTracker::GetTestStatusMapForAllIterations() const {
    332   TestStatusMap tests_by_status;
    333   for (int i = 0; i <= iteration_; i++)
    334     GetTestStatusForIteration(i, &tests_by_status);
    335   return tests_by_status;
    336 }
    337 
    338 void TestResultsTracker::GetTestStatusForIteration(
    339     int iteration, TestStatusMap* map) const {
    340   for (PerIterationData::ResultsMap::const_iterator j =
    341            per_iteration_data_[iteration].results.begin();
    342        j != per_iteration_data_[iteration].results.end();
    343        ++j) {
    344     // Use the last test result as the final one.
    345     const TestResult& result = j->second.test_results.back();
    346     (*map)[result.status].insert(result.full_name);
    347   }
    348 }
    349 
    350 TestResultsTracker::AggregateTestResult::AggregateTestResult() {
    351 }
    352 
    353 TestResultsTracker::AggregateTestResult::~AggregateTestResult() {
    354 }
    355 
    356 TestResultsTracker::PerIterationData::PerIterationData() {
    357 }
    358 
    359 TestResultsTracker::PerIterationData::~PerIterationData() {
    360 }
    361 
    362 }  // namespace base
    363