Home | History | Annotate | Download | only in launcher
      1 // Copyright 2013 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "base/test/launcher/test_results_tracker.h"
      6 
      7 #include "base/base64.h"
      8 #include "base/command_line.h"
      9 #include "base/file_util.h"
     10 #include "base/files/file_path.h"
     11 #include "base/format_macros.h"
     12 #include "base/json/json_file_value_serializer.h"
     13 #include "base/json/string_escape.h"
     14 #include "base/logging.h"
     15 #include "base/strings/stringprintf.h"
     16 #include "base/test/launcher/test_launcher.h"
     17 #include "base/values.h"
     18 
     19 namespace base {
     20 
     21 // See https://groups.google.com/a/chromium.org/d/msg/chromium-dev/nkdTP7sstSc/uT3FaE_sgkAJ .
     22 using ::operator<<;
     23 
     24 namespace {
     25 
     26 // The default output file for XML output.
     27 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
     28     "test_detail.xml");
     29 
     30 // Utility function to print a list of test names. Uses iterator to be
     31 // compatible with different containers, like vector and set.
     32 template<typename InputIterator>
     33 void PrintTests(InputIterator first,
     34                 InputIterator last,
     35                 const std::string& description) {
     36   size_t count = std::distance(first, last);
     37   if (count == 0)
     38     return;
     39 
     40   fprintf(stdout,
     41           "%" PRIuS " test%s %s:\n",
     42           count,
     43           count != 1 ? "s" : "",
     44           description.c_str());
     45   for (InputIterator i = first; i != last; ++i)
     46     fprintf(stdout, "    %s\n", (*i).c_str());
     47   fflush(stdout);
     48 }
     49 
     50 }  // namespace
     51 
     52 TestResultsTracker::TestResultsTracker() : iteration_(-1), out_(NULL) {
     53 }
     54 
     55 TestResultsTracker::~TestResultsTracker() {
     56   DCHECK(thread_checker_.CalledOnValidThread());
     57 
     58   if (!out_)
     59     return;
     60   fprintf(out_, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
     61   fprintf(out_, "<testsuites name=\"AllTests\" tests=\"\" failures=\"\""
     62           " disabled=\"\" errors=\"\" time=\"\">\n");
     63 
     64   // Maps test case names to test results.
     65   typedef std::map<std::string, std::vector<TestResult> > TestCaseMap;
     66   TestCaseMap test_case_map;
     67 
     68   for (PerIterationData::ResultsMap::iterator i =
     69            per_iteration_data_[iteration_].results.begin();
     70        i != per_iteration_data_[iteration_].results.end();
     71        ++i) {
     72     // Use the last test result as the final one.
     73     TestResult result = i->second.test_results.back();
     74     test_case_map[result.GetTestCaseName()].push_back(result);
     75   }
     76   for (TestCaseMap::iterator i = test_case_map.begin();
     77        i != test_case_map.end();
     78        ++i) {
     79     fprintf(out_, "  <testsuite name=\"%s\" tests=\"%" PRIuS "\" failures=\"\""
     80             " disabled=\"\" errors=\"\" time=\"\">\n",
     81             i->first.c_str(), i->second.size());
     82     for (size_t j = 0; j < i->second.size(); ++j) {
     83       const TestResult& result = i->second[j];
     84       fprintf(out_, "    <testcase name=\"%s\" status=\"run\" time=\"%.3f\""
     85               " classname=\"%s\">\n",
     86               result.GetTestName().c_str(),
     87               result.elapsed_time.InSecondsF(),
     88               result.GetTestCaseName().c_str());
     89       if (result.status != TestResult::TEST_SUCCESS)
     90         fprintf(out_, "      <failure message=\"\" type=\"\"></failure>\n");
     91       fprintf(out_, "    </testcase>\n");
     92     }
     93     fprintf(out_, "  </testsuite>\n");
     94   }
     95   fprintf(out_, "</testsuites>\n");
     96   fclose(out_);
     97 }
     98 
     99 bool TestResultsTracker::Init(const CommandLine& command_line) {
    100   DCHECK(thread_checker_.CalledOnValidThread());
    101 
    102   // Prevent initializing twice.
    103   if (out_) {
    104     NOTREACHED();
    105     return false;
    106   }
    107 
    108   if (!command_line.HasSwitch(kGTestOutputFlag))
    109     return true;
    110 
    111   std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
    112   size_t colon_pos = flag.find(':');
    113   FilePath path;
    114   if (colon_pos != std::string::npos) {
    115     FilePath flag_path =
    116         command_line.GetSwitchValuePath(kGTestOutputFlag);
    117     FilePath::StringType path_string = flag_path.value();
    118     path = FilePath(path_string.substr(colon_pos + 1));
    119     // If the given path ends with '/', consider it is a directory.
    120     // Note: This does NOT check that a directory (or file) actually exists
    121     // (the behavior is same as what gtest does).
    122     if (path.EndsWithSeparator()) {
    123       FilePath executable = command_line.GetProgram().BaseName();
    124       path = path.Append(executable.ReplaceExtension(
    125                              FilePath::StringType(FILE_PATH_LITERAL("xml"))));
    126     }
    127   }
    128   if (path.value().empty())
    129     path = FilePath(kDefaultOutputFile);
    130   FilePath dir_name = path.DirName();
    131   if (!DirectoryExists(dir_name)) {
    132     LOG(WARNING) << "The output directory does not exist. "
    133                  << "Creating the directory: " << dir_name.value();
    134     // Create the directory if necessary (because the gtest does the same).
    135     if (!base::CreateDirectory(dir_name)) {
    136       LOG(ERROR) << "Failed to created directory " << dir_name.value();
    137       return false;
    138     }
    139   }
    140   out_ = OpenFile(path, "w");
    141   if (!out_) {
    142     LOG(ERROR) << "Cannot open output file: "
    143                << path.value() << ".";
    144     return false;
    145   }
    146 
    147   return true;
    148 }
    149 
    150 void TestResultsTracker::OnTestIterationStarting() {
    151   DCHECK(thread_checker_.CalledOnValidThread());
    152 
    153   // Start with a fresh state for new iteration.
    154   iteration_++;
    155   per_iteration_data_.push_back(PerIterationData());
    156 }
    157 
    158 void TestResultsTracker::AddTestResult(const TestResult& result) {
    159   DCHECK(thread_checker_.CalledOnValidThread());
    160 
    161   per_iteration_data_[iteration_].results[
    162       result.full_name].test_results.push_back(result);
    163 }
    164 
    165 void TestResultsTracker::PrintSummaryOfCurrentIteration() const {
    166   std::map<TestResult::Status, std::set<std::string> > tests_by_status;
    167 
    168   for (PerIterationData::ResultsMap::const_iterator j =
    169            per_iteration_data_[iteration_].results.begin();
    170        j != per_iteration_data_[iteration_].results.end();
    171        ++j) {
    172     // Use the last test result as the final one.
    173     TestResult result = j->second.test_results.back();
    174     tests_by_status[result.status].insert(result.full_name);
    175   }
    176 
    177   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
    178              tests_by_status[TestResult::TEST_FAILURE].end(),
    179              "failed");
    180   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
    181              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
    182              "failed on exit");
    183   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
    184              tests_by_status[TestResult::TEST_TIMEOUT].end(),
    185              "timed out");
    186   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
    187              tests_by_status[TestResult::TEST_CRASH].end(),
    188              "crashed");
    189   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
    190              tests_by_status[TestResult::TEST_SKIPPED].end(),
    191              "skipped");
    192   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
    193              tests_by_status[TestResult::TEST_UNKNOWN].end(),
    194              "had unknown result");
    195 }
    196 
    197 void TestResultsTracker::PrintSummaryOfAllIterations() const {
    198   DCHECK(thread_checker_.CalledOnValidThread());
    199 
    200   std::map<TestResult::Status, std::set<std::string> > tests_by_status;
    201 
    202   for (int i = 0; i <= iteration_; i++) {
    203     for (PerIterationData::ResultsMap::const_iterator j =
    204              per_iteration_data_[i].results.begin();
    205          j != per_iteration_data_[i].results.end();
    206          ++j) {
    207       // Use the last test result as the final one.
    208       TestResult result = j->second.test_results.back();
    209       tests_by_status[result.status].insert(result.full_name);
    210     }
    211   }
    212 
    213   fprintf(stdout, "Summary of all itest iterations:\n");
    214   fflush(stdout);
    215 
    216   PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
    217              tests_by_status[TestResult::TEST_FAILURE].end(),
    218              "failed");
    219   PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
    220              tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
    221              "failed on exit");
    222   PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
    223              tests_by_status[TestResult::TEST_TIMEOUT].end(),
    224              "timed out");
    225   PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
    226              tests_by_status[TestResult::TEST_CRASH].end(),
    227              "crashed");
    228   PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
    229              tests_by_status[TestResult::TEST_SKIPPED].end(),
    230              "skipped");
    231   PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
    232              tests_by_status[TestResult::TEST_UNKNOWN].end(),
    233              "had unknown result");
    234 
    235   fprintf(stdout, "End of the summary.\n");
    236   fflush(stdout);
    237 }
    238 
    239 void TestResultsTracker::AddGlobalTag(const std::string& tag) {
    240   global_tags_.insert(tag);
    241 }
    242 
    243 bool TestResultsTracker::SaveSummaryAsJSON(const FilePath& path) const {
    244   scoped_ptr<DictionaryValue> summary_root(new DictionaryValue);
    245 
    246   ListValue* global_tags = new ListValue;
    247   summary_root->Set("global_tags", global_tags);
    248 
    249   for (std::set<std::string>::const_iterator i = global_tags_.begin();
    250        i != global_tags_.end();
    251        ++i) {
    252     global_tags->AppendString(*i);
    253   }
    254 
    255   ListValue* per_iteration_data = new ListValue;
    256   summary_root->Set("per_iteration_data", per_iteration_data);
    257 
    258   for (int i = 0; i <= iteration_; i++) {
    259     DictionaryValue* current_iteration_data = new DictionaryValue;
    260     per_iteration_data->Append(current_iteration_data);
    261 
    262     for (PerIterationData::ResultsMap::const_iterator j =
    263              per_iteration_data_[i].results.begin();
    264          j != per_iteration_data_[i].results.end();
    265          ++j) {
    266       ListValue* test_results = new ListValue;
    267       current_iteration_data->SetWithoutPathExpansion(j->first, test_results);
    268 
    269       for (size_t k = 0; k < j->second.test_results.size(); k++) {
    270         const TestResult& test_result = j->second.test_results[k];
    271 
    272         DictionaryValue* test_result_value = new DictionaryValue;
    273         test_results->Append(test_result_value);
    274 
    275         test_result_value->SetString("status", test_result.StatusAsString());
    276         test_result_value->SetInteger(
    277             "elapsed_time_ms", test_result.elapsed_time.InMilliseconds());
    278 
    279         // There are no guarantees about character encoding of the output
    280         // snippet. Escape it and record whether it was losless.
    281         // It's useful to have the output snippet as string in the summary
    282         // for easy viewing.
    283         std::string escaped_output_snippet;
    284         bool losless_snippet = EscapeJSONString(
    285             test_result.output_snippet, false, &escaped_output_snippet);
    286         test_result_value->SetString("output_snippet",
    287                                      escaped_output_snippet);
    288         test_result_value->SetBoolean("losless_snippet", losless_snippet);
    289 
    290         // Also include the raw version (base64-encoded so that it can be safely
    291         // JSON-serialized - there are no guarantees about character encoding
    292         // of the snippet). This can be very useful piece of information when
    293         // debugging a test failure related to character encoding.
    294         std::string base64_output_snippet;
    295         Base64Encode(test_result.output_snippet, &base64_output_snippet);
    296         test_result_value->SetString("output_snippet_base64",
    297                                      base64_output_snippet);
    298       }
    299     }
    300   }
    301 
    302   JSONFileValueSerializer serializer(path);
    303   return serializer.Serialize(*summary_root);
    304 }
    305 
    306 TestResultsTracker::AggregateTestResult::AggregateTestResult() {
    307 }
    308 
    309 TestResultsTracker::AggregateTestResult::~AggregateTestResult() {
    310 }
    311 
    312 TestResultsTracker::PerIterationData::PerIterationData() {
    313 }
    314 
    315 TestResultsTracker::PerIterationData::~PerIterationData() {
    316 }
    317 
    318 }  // namespace base
    319