Home | History | Annotate | Download | only in launcher
      1 // Copyright 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "base/test/launcher/test_results_tracker.h"
      6 
      7 #include "base/base64.h"
      8 #include "base/command_line.h"
      9 #include "base/files/file_path.h"
     10 #include "base/files/file_util.h"
     11 #include "base/format_macros.h"
     12 #include "base/json/json_file_value_serializer.h"
     13 #include "base/json/string_escape.h"
     14 #include "base/logging.h"
     15 #include "base/strings/string_util.h"
     16 #include "base/strings/stringprintf.h"
     17 #include "base/test/launcher/test_launcher.h"
     18 #include "base/values.h"
     19 
     20 namespace base {
     21 
     22 namespace {
     23 
     24 // The default output file for XML output.
     25 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
     26     "test_detail.xml");
     27 
     28 // Utility function to print a list of test names. Uses iterator to be
     29 // compatible with different containers, like vector and set.
     30 template<typename InputIterator>
     31 void PrintTests(InputIterator first,
     32                 InputIterator last,
     33                 const std::string& description) {
     34   size_t count = std::distance(first, last);
     35   if (count == 0)
     36     return;
     37 
     38   fprintf(stdout,
     39           "%" PRIuS " test%s %s:\n",
     40           count,
     41           count != 1 ? "s" : "",
     42           description.c_str());
     43   for (InputIterator i = first; i != last; ++i)
     44     fprintf(stdout, "    %s\n", (*i).c_str());
     45   fflush(stdout);
     46 }
     47 
     48 std::string TestNameWithoutDisabledPrefix(const std::string& test_name) {
     49   std::string test_name_no_disabled(test_name);
     50   ReplaceSubstringsAfterOffset(&test_name_no_disabled, 0, "DISABLED_", "");
     51   return test_name_no_disabled;
     52 }
     53 
     54 }  // namespace
     55 
     56 TestResultsTracker::TestResultsTracker() : iteration_(-1), out_(NULL) {
     57 }
     58 
     59 TestResultsTracker::~TestResultsTracker() {
     60   DCHECK(thread_checker_.CalledOnValidThread());
     61 
     62   if (!out_)
     63     return;
     64   fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
     65   fprintf(out_, "<testsuites name=\"AllTests\" tests=\"\" failures=\"\""
     66           " disabled=\"\" errors=\"\" time=\"\">\n");
     67 
     68   // Maps test case names to test results.
     69   typedef std::map<std::string, std::vector<TestResult> > TestCaseMap;
     70   TestCaseMap test_case_map;
     71 
     72   for (PerIterationData::ResultsMap::iterator i =
     73            per_iteration_data_[iteration_].results.begin();
     74        i != per_iteration_data_[iteration_].results.end();
     75        ++i) {
     76     // Use the last test result as the final one.
     77     TestResult result = i->second.test_results.back();
     78     test_case_map[result.GetTestCaseName()].push_back(result);
     79   }
     80   for (TestCaseMap::iterator i = test_case_map.begin();
     81        i != test_case_map.end();
     82        ++i) {
     83     fprintf(out_, "  <testsuite name=\"%s\" tests=\"%" PRIuS "\" failures=\"\""
     84             " disabled=\"\" errors=\"\" time=\"\">\n",
     85             i->first.c_str(), i->second.size());
     86     for (size_t j = 0; j < i->second.size(); ++j) {
     87       const TestResult& result = i->second[j];
     88       fprintf(out_, "    <testcase name=\"%s\" status=\"run\" time=\"%.3f\""
     89               " classname=\"%s\">\n",
     90               result.GetTestName().c_str(),
     91               result.elapsed_time.InSecondsF(),
     92               result.GetTestCaseName().c_str());
     93       if (result.status != TestResult::TEST_SUCCESS)
     94         fprintf(out_, "      <failure message=\"\" type=\"\"></failure>\n");
     95       fprintf(out_, "    </testcase>\n");
     96     }
     97     fprintf(out_, "  </testsuite>\n");
     98   }
     99   fprintf(out_, "</testsuites>\n");
    100   fclose(out_);
    101 }
    102 
    103 bool TestResultsTracker::Init(const CommandLine& command_line) {
    104   DCHECK(thread_checker_.CalledOnValidThread());
    105 
    106   // Prevent initializing twice.
    107   if (out_) {
    108     NOTREACHED();
    109     return false;
    110   }
    111 
    112   if (!command_line.HasSwitch(kGTestOutputFlag))
    113     return true;
    114 
    115   std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
    116   size_t colon_pos = flag.find(':');
    117   FilePath path;
    118   if (colon_pos != std::string::npos) {
    119     FilePath flag_path =
    120         command_line.GetSwitchValuePath(kGTestOutputFlag);
    121     FilePath::StringType path_string = flag_path.value();
    122     path = FilePath(path_string.substr(colon_pos + 1));
    123     // If the given path ends with '/', consider it is a directory.
    124     // Note: This does NOT check that a directory (or file) actually exists
    125     // (the behavior is same as what gtest does).
    126     if (path.EndsWithSeparator()) {
    127       FilePath executable = command_line.GetProgram().BaseName();
    128       path = path.Append(executable.ReplaceExtension(
    129                              FilePath::StringType(FILE_PATH_LITERAL("xml"))));
    130     }
    131   }
    132   if (path.value().empty())
    133     path = FilePath(kDefaultOutputFile);
    134   FilePath dir_name = path.DirName();
    135   if (!DirectoryExists(dir_name)) {
    136     LOG(WARNING) << "The output directory does not exist. "
    137                  << "Creating the directory: " << dir_name.value();
    138     // Create the directory if necessary (because the gtest does the same).
    139     if (!base::CreateDirectory(dir_name)) {
    140       LOG(ERROR) << "Failed to created directory " << dir_name.value();
    141       return false;
    142     }
    143   }
    144   out_ = OpenFile(path, "w");
    145   if (!out_) {
    146     LOG(ERROR) << "Cannot open output file: "
    147                << path.value() << ".";
    148     return false;
    149   }
    150 
    151   return true;
    152 }
    153 
    154 void TestResultsTracker::OnTestIterationStarting() {
    155   DCHECK(thread_checker_.CalledOnValidThread());
    156 
    157   // Start with a fresh state for new iteration.
    158   iteration_++;
    159   per_iteration_data_.push_back(PerIterationData());
    160 }
    161 
    162 void TestResultsTracker::AddTest(const std::string& test_name) {
    163   // Record disabled test names without DISABLED_ prefix so that they are easy
    164   // to compare with regular test names, e.g. before or after disabling.
    165   all_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
    166 }
    167 
    168 void TestResultsTracker::AddDisabledTest(const std::string& test_name) {
    169   // Record disabled test names without DISABLED_ prefix so that they are easy
    170   // to compare with regular test names, e.g. before or after disabling.
    171   disabled_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
    172 }
    173 
    174 void TestResultsTracker::AddTestResult(const TestResult& result) {
    175   DCHECK(thread_checker_.CalledOnValidThread());
    176 
    177   per_iteration_data_[iteration_].results[
    178       result.full_name].test_results.push_back(result);
    179 }
    180 
    181 void TestResultsTracker::PrintSummaryOfCurrentIteration() const {
    182   TestStatusMap tests_by_status(GetTestStatusMapForCurrentIteration());
    183 
    184   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
    185              tests_by_status[TestResult::TEST_FAILURE].end(),
    186              "failed");
    187   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
    188              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
    189              "failed on exit");
    190   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
    191              tests_by_status[TestResult::TEST_TIMEOUT].end(),
    192              "timed out");
    193   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
    194              tests_by_status[TestResult::TEST_CRASH].end(),
    195              "crashed");
    196   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
    197              tests_by_status[TestResult::TEST_SKIPPED].end(),
    198              "skipped");
    199   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
    200              tests_by_status[TestResult::TEST_UNKNOWN].end(),
    201              "had unknown result");
    202 }
    203 
    204 void TestResultsTracker::PrintSummaryOfAllIterations() const {
    205   DCHECK(thread_checker_.CalledOnValidThread());
    206 
    207   TestStatusMap tests_by_status(GetTestStatusMapForAllIterations());
    208 
    209   fprintf(stdout, "Summary of all test iterations:\n");
    210   fflush(stdout);
    211 
    212   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
    213              tests_by_status[TestResult::TEST_FAILURE].end(),
    214              "failed");
    215   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
    216              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
    217              "failed on exit");
    218   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
    219              tests_by_status[TestResult::TEST_TIMEOUT].end(),
    220              "timed out");
    221   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
    222              tests_by_status[TestResult::TEST_CRASH].end(),
    223              "crashed");
    224   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
    225              tests_by_status[TestResult::TEST_SKIPPED].end(),
    226              "skipped");
    227   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
    228              tests_by_status[TestResult::TEST_UNKNOWN].end(),
    229              "had unknown result");
    230 
    231   fprintf(stdout, "End of the summary.\n");
    232   fflush(stdout);
    233 }
    234 
    235 void TestResultsTracker::AddGlobalTag(const std::string& tag) {
    236   global_tags_.insert(tag);
    237 }
    238 
    239 bool TestResultsTracker::SaveSummaryAsJSON(const FilePath& path) const {
    240   scoped_ptr<DictionaryValue> summary_root(new DictionaryValue);
    241 
    242   ListValue* global_tags = new ListValue;
    243   summary_root->Set("global_tags", global_tags);
    244 
    245   for (std::set<std::string>::const_iterator i = global_tags_.begin();
    246        i != global_tags_.end();
    247        ++i) {
    248     global_tags->AppendString(*i);
    249   }
    250 
    251   ListValue* all_tests = new ListValue;
    252   summary_root->Set("all_tests", all_tests);
    253 
    254   for (std::set<std::string>::const_iterator i = all_tests_.begin();
    255        i != all_tests_.end();
    256        ++i) {
    257     all_tests->AppendString(*i);
    258   }
    259 
    260   ListValue* disabled_tests = new ListValue;
    261   summary_root->Set("disabled_tests", disabled_tests);
    262 
    263   for (std::set<std::string>::const_iterator i = disabled_tests_.begin();
    264        i != disabled_tests_.end();
    265        ++i) {
    266     disabled_tests->AppendString(*i);
    267   }
    268 
    269   ListValue* per_iteration_data = new ListValue;
    270   summary_root->Set("per_iteration_data", per_iteration_data);
    271 
    272   for (int i = 0; i <= iteration_; i++) {
    273     DictionaryValue* current_iteration_data = new DictionaryValue;
    274     per_iteration_data->Append(current_iteration_data);
    275 
    276     for (PerIterationData::ResultsMap::const_iterator j =
    277              per_iteration_data_[i].results.begin();
    278          j != per_iteration_data_[i].results.end();
    279          ++j) {
    280       ListValue* test_results = new ListValue;
    281       current_iteration_data->SetWithoutPathExpansion(j->first, test_results);
    282 
    283       for (size_t k = 0; k < j->second.test_results.size(); k++) {
    284         const TestResult& test_result = j->second.test_results[k];
    285 
    286         DictionaryValue* test_result_value = new DictionaryValue;
    287         test_results->Append(test_result_value);
    288 
    289         test_result_value->SetString("status", test_result.StatusAsString());
    290         test_result_value->SetInteger(
    291             "elapsed_time_ms", test_result.elapsed_time.InMilliseconds());
    292 
    293         // There are no guarantees about character encoding of the output
    294         // snippet. Escape it and record whether it was losless.
    295         // It's useful to have the output snippet as string in the summary
    296         // for easy viewing.
    297         std::string escaped_output_snippet;
    298         bool losless_snippet = EscapeJSONString(
    299             test_result.output_snippet, false, &escaped_output_snippet);
    300         test_result_value->SetString("output_snippet",
    301                                      escaped_output_snippet);
    302         test_result_value->SetBoolean("losless_snippet", losless_snippet);
    303 
    304         // Also include the raw version (base64-encoded so that it can be safely
    305         // JSON-serialized - there are no guarantees about character encoding
    306         // of the snippet). This can be very useful piece of information when
    307         // debugging a test failure related to character encoding.
    308         std::string base64_output_snippet;
    309         Base64Encode(test_result.output_snippet, &base64_output_snippet);
    310         test_result_value->SetString("output_snippet_base64",
    311                                      base64_output_snippet);
    312       }
    313     }
    314   }
    315 
    316   JSONFileValueSerializer serializer(path);
    317   return serializer.Serialize(*summary_root);
    318 }
    319 
    320 TestResultsTracker::TestStatusMap
    321     TestResultsTracker::GetTestStatusMapForCurrentIteration() const {
    322   TestStatusMap tests_by_status;
    323   GetTestStatusForIteration(iteration_, &tests_by_status);
    324   return tests_by_status;
    325 }
    326 
    327 TestResultsTracker::TestStatusMap
    328     TestResultsTracker::GetTestStatusMapForAllIterations() const {
    329   TestStatusMap tests_by_status;
    330   for (int i = 0; i <= iteration_; i++)
    331     GetTestStatusForIteration(i, &tests_by_status);
    332   return tests_by_status;
    333 }
    334 
    335 void TestResultsTracker::GetTestStatusForIteration(
    336     int iteration, TestStatusMap* map) const {
    337   for (PerIterationData::ResultsMap::const_iterator j =
    338            per_iteration_data_[iteration].results.begin();
    339        j != per_iteration_data_[iteration].results.end();
    340        ++j) {
    341     // Use the last test result as the final one.
    342     const TestResult& result = j->second.test_results.back();
    343     (*map)[result.status].insert(result.full_name);
    344   }
    345 }
    346 
    347 TestResultsTracker::AggregateTestResult::AggregateTestResult() {
    348 }
    349 
    350 TestResultsTracker::AggregateTestResult::~AggregateTestResult() {
    351 }
    352 
    353 TestResultsTracker::PerIterationData::PerIterationData() {
    354 }
    355 
    356 TestResultsTracker::PerIterationData::~PerIterationData() {
    357 }
    358 
    359 }  // namespace base
    360