Home | History | Annotate | Download | only in tests
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <gtest/gtest.h>
     18 
     19 #include <ctype.h>
     20 #include <errno.h>
     21 #include <fcntl.h>
     22 #include <inttypes.h>
     23 #include <libgen.h>
     24 #include <limits.h>
     25 #include <signal.h>
     26 #include <spawn.h>
     27 #include <stdarg.h>
     28 #include <stdio.h>
     29 #include <string.h>
     30 #include <sys/wait.h>
     31 #include <unistd.h>
     32 
     33 #include <chrono>
     34 #include <string>
     35 #include <tuple>
     36 #include <utility>
     37 #include <vector>
     38 
     39 #include <android-base/file.h>
     40 #include <android-base/strings.h>
     41 #include <android-base/unique_fd.h>
     42 
     43 #ifndef TEMP_FAILURE_RETRY
     44 
     45 /* Used to retry syscalls that can return EINTR. */
     46 #define TEMP_FAILURE_RETRY(exp) ({         \
     47     __typeof__(exp) _rc;                   \
     48     do {                                   \
     49         _rc = (exp);                       \
     50     } while (_rc == -1 && errno == EINTR); \
     51     _rc; })
     52 
     53 #endif
     54 
     55 static std::string g_executable_path;
     56 static int g_argc;
     57 static char** g_argv;
     58 static char** g_envp;
     59 
     60 const std::string& get_executable_path() {
     61   return g_executable_path;
     62 }
     63 
     64 int get_argc() {
     65   return g_argc;
     66 }
     67 
     68 char** get_argv() {
     69   return g_argv;
     70 }
     71 
     72 char** get_envp() {
     73   return g_envp;
     74 }
     75 
     76 namespace testing {
     77 namespace internal {
     78 
     79 // Reuse of testing::internal::ColoredPrintf in gtest.
     80 enum GTestColor {
     81   COLOR_DEFAULT,
     82   COLOR_RED,
     83   COLOR_GREEN,
     84   COLOR_YELLOW
     85 };
     86 
     87 void ColoredPrintf(GTestColor color, const char* fmt, ...);
     88 
     89 }  // namespace internal
     90 }  // namespace testing
     91 
     92 using testing::internal::GTestColor;
     93 using testing::internal::COLOR_RED;
     94 using testing::internal::COLOR_GREEN;
     95 using testing::internal::COLOR_YELLOW;
     96 using testing::internal::ColoredPrintf;
     97 
     98 constexpr int DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS = 90000;
     99 constexpr int DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS = 2000;
    100 
    101 // The time each test can run before killed for the reason of timeout.
    102 // It takes effect only with --isolate option.
    103 static int global_test_run_deadline_ms = DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS;
    104 
    105 // The time each test can run before be warned for too much running time.
    106 // It takes effect only with --isolate option.
    107 static int global_test_run_slow_threshold_ms = DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS;
    108 
    109 // Return timeout duration for a test, in ms.
    110 static int GetTimeoutMs(const std::string& /*test_name*/) {
    111   return global_test_run_deadline_ms;
    112 }
    113 
    114 // Return threshold for calling a test slow, in ms.
    115 static int GetSlowThresholdMs(const std::string& /*test_name*/) {
    116   return global_test_run_slow_threshold_ms;
    117 }
    118 
    119 static void PrintHelpInfo() {
    120   printf("Bionic Unit Test Options:\n"
    121          "  -j [JOB_COUNT] or -j[JOB_COUNT]\n"
    122          "      Run up to JOB_COUNT tests in parallel.\n"
    123          "      Use isolation mode, Run each test in a separate process.\n"
    124          "      If JOB_COUNT is not given, it is set to the count of available processors.\n"
    125          "  --no-isolate\n"
    126          "      Don't use isolation mode, run all tests in a single process.\n"
    127          "  --deadline=[TIME_IN_MS]\n"
    128          "      Run each test in no longer than [TIME_IN_MS] time.\n"
    129          "      Only valid in isolation mode. Default deadline is 90000 ms.\n"
    130          "  --slow-threshold=[TIME_IN_MS]\n"
    131          "      Test running longer than [TIME_IN_MS] will be called slow.\n"
    132          "      Only valid in isolation mode. Default slow threshold is 2000 ms.\n"
    133          "  --gtest-filter=POSITIVE_PATTERNS[-NEGATIVE_PATTERNS]\n"
    134          "      Used as a synonym for --gtest_filter option in gtest.\n"
    135          "Default bionic unit test option is -j.\n"
    136          "In isolation mode, you can send SIGQUIT to the parent process to show current\n"
    137          "running tests, or send SIGINT to the parent process to stop testing and\n"
    138          "clean up current running tests.\n"
    139          "\n");
    140 }
    141 
    142 enum TestResult {
    143   TEST_SUCCESS = 0,
    144   TEST_FAILED,
    145   TEST_TIMEOUT
    146 };
    147 
    148 class Test {
    149  public:
    150   Test() {} // For std::vector<Test>.
    151   explicit Test(const char* name) : name_(name) {}
    152 
    153   const std::string& GetName() const { return name_; }
    154 
    155   void SetResult(TestResult result) {
    156     // Native xfails are inherently likely to actually be relying on undefined
    157     // behavior/uninitialized memory, and thus likely to pass from time to time
    158     // on CTS. Avoid that unpleasantness by just rewriting all xfail failures
    159     // as successes. You'll still see the actual failure details.
    160     if (GetName().find("xfail") == 0) result = TEST_SUCCESS;
    161     result_ = result;
    162   }
    163 
    164   TestResult GetResult() const { return result_; }
    165 
    166   void SetTestTime(int64_t elapsed_time_ns) { elapsed_time_ns_ = elapsed_time_ns; }
    167 
    168   int64_t GetTestTime() const { return elapsed_time_ns_; }
    169 
    170   void AppendTestOutput(const std::string& s) { output_ += s; }
    171 
    172   const std::string& GetTestOutput() const { return output_; }
    173 
    174  private:
    175   const std::string name_;
    176   TestResult result_;
    177   int64_t elapsed_time_ns_;
    178   std::string output_;
    179 };
    180 
    181 class TestCase {
    182  public:
    183   TestCase() {} // For std::vector<TestCase>.
    184   explicit TestCase(const char* name) : name_(name) {}
    185 
    186   const std::string& GetName() const { return name_; }
    187 
    188   void AppendTest(const char* test_name) {
    189     test_list_.push_back(Test(test_name));
    190   }
    191 
    192   size_t TestCount() const { return test_list_.size(); }
    193 
    194   std::string GetTestName(size_t test_id) const {
    195     VerifyTestId(test_id);
    196     return name_ + "." + test_list_[test_id].GetName();
    197   }
    198 
    199   Test& GetTest(size_t test_id) {
    200     VerifyTestId(test_id);
    201     return test_list_[test_id];
    202   }
    203 
    204   const Test& GetTest(size_t test_id) const {
    205     VerifyTestId(test_id);
    206     return test_list_[test_id];
    207   }
    208 
    209   void SetTestResult(size_t test_id, TestResult result) {
    210     VerifyTestId(test_id);
    211     test_list_[test_id].SetResult(result);
    212   }
    213 
    214   TestResult GetTestResult(size_t test_id) const {
    215     VerifyTestId(test_id);
    216     return test_list_[test_id].GetResult();
    217   }
    218 
    219   bool GetTestSuccess(size_t test_id) const {
    220     return GetTestResult(test_id) == TEST_SUCCESS;
    221   }
    222 
    223   void SetTestTime(size_t test_id, int64_t elapsed_time_ns) {
    224     VerifyTestId(test_id);
    225     test_list_[test_id].SetTestTime(elapsed_time_ns);
    226   }
    227 
    228   int64_t GetTestTime(size_t test_id) const {
    229     VerifyTestId(test_id);
    230     return test_list_[test_id].GetTestTime();
    231   }
    232 
    233  private:
    234   void VerifyTestId(size_t test_id) const {
    235     if(test_id >= test_list_.size()) {
    236       fprintf(stderr, "test_id %zu out of range [0, %zu)\n", test_id, test_list_.size());
    237       exit(1);
    238     }
    239   }
    240 
    241  private:
    242   const std::string name_;
    243   std::vector<Test> test_list_;
    244 };
    245 
    246 class TestResultPrinter : public testing::EmptyTestEventListener {
    247  public:
    248   TestResultPrinter() : pinfo_(NULL) {}
    249   virtual void OnTestStart(const testing::TestInfo& test_info) {
    250     pinfo_ = &test_info; // Record test_info for use in OnTestPartResult.
    251   }
    252   virtual void OnTestPartResult(const testing::TestPartResult& result);
    253 
    254  private:
    255   const testing::TestInfo* pinfo_;
    256 };
    257 
    258 // Called after an assertion failure.
    259 void TestResultPrinter::OnTestPartResult(const testing::TestPartResult& result) {
    260   // If the test part succeeded, we don't need to do anything.
    261   if (result.type() == testing::TestPartResult::kSuccess)
    262     return;
    263 
    264   // Print failure message from the assertion (e.g. expected this and got that).
    265   printf("%s:(%d) Failure in test %s.%s\n%s\n", result.file_name(), result.line_number(),
    266          pinfo_->test_case_name(), pinfo_->name(), result.message());
    267   fflush(stdout);
    268 }
    269 
    270 static int64_t NanoTime() {
    271   std::chrono::nanoseconds duration(std::chrono::steady_clock::now().time_since_epoch());
    272   return static_cast<int64_t>(duration.count());
    273 }
    274 
    275 static bool EnumerateTests(int argc, char** argv, std::vector<TestCase>& testcase_list) {
    276   std::vector<const char*> args(argv, argv + argc);
    277   args.push_back("--gtest_list_tests");
    278   args.push_back(nullptr);
    279 
    280   // We use posix_spawn(3) rather than the simpler popen(3) because we don't want an intervening
    281   // surprise shell invocation making quoting interesting for --gtest_filter (http://b/68949647).
    282 
    283   android::base::unique_fd read_fd;
    284   android::base::unique_fd write_fd;
    285   if (!android::base::Pipe(&read_fd, &write_fd)) {
    286     perror("pipe");
    287     return false;
    288   }
    289 
    290   posix_spawn_file_actions_t fa;
    291   posix_spawn_file_actions_init(&fa);
    292   posix_spawn_file_actions_addclose(&fa, read_fd);
    293   posix_spawn_file_actions_adddup2(&fa, write_fd, 1);
    294   posix_spawn_file_actions_adddup2(&fa, write_fd, 2);
    295   posix_spawn_file_actions_addclose(&fa, write_fd);
    296 
    297   pid_t pid;
    298   int result = posix_spawnp(&pid, argv[0], &fa, nullptr, const_cast<char**>(args.data()), nullptr);
    299   posix_spawn_file_actions_destroy(&fa);
    300   if (result == -1) {
    301     perror("posix_spawn");
    302     return false;
    303   }
    304   write_fd.reset();
    305 
    306   std::string content;
    307   if (!android::base::ReadFdToString(read_fd, &content)) {
    308     perror("ReadFdToString");
    309     return false;
    310   }
    311 
    312   for (auto& line : android::base::Split(content, "\n")) {
    313     line = android::base::Split(line, "#")[0];
    314     line = android::base::Trim(line);
    315     if (line.empty()) continue;
    316     if (android::base::EndsWith(line, ".")) {
    317       line.pop_back();
    318       testcase_list.push_back(TestCase(line.c_str()));
    319     } else {
    320       testcase_list.back().AppendTest(line.c_str());
    321     }
    322   }
    323 
    324   int status;
    325   if (TEMP_FAILURE_RETRY(waitpid(pid, &status, 0)) != pid) {
    326     perror("waitpid");
    327     return false;
    328   }
    329   return (WIFEXITED(status) && WEXITSTATUS(status) == 0);
    330 }
    331 
    332 // Part of the following *Print functions are copied from external/gtest/src/gtest.cc:
    333 // PrettyUnitTestResultPrinter. The reason for copy is that PrettyUnitTestResultPrinter
    334 // is defined and used in gtest.cc, which is hard to reuse.
    335 static void OnTestIterationStartPrint(const std::vector<TestCase>& testcase_list, size_t iteration,
    336                                       int iteration_count, size_t job_count) {
    337   if (iteration_count != 1) {
    338     printf("\nRepeating all tests (iteration %zu) . . .\n\n", iteration);
    339   }
    340   ColoredPrintf(COLOR_GREEN,  "[==========] ");
    341 
    342   size_t testcase_count = testcase_list.size();
    343   size_t test_count = 0;
    344   for (const auto& testcase : testcase_list) {
    345     test_count += testcase.TestCount();
    346   }
    347 
    348   printf("Running %zu %s from %zu %s (%zu %s).\n",
    349          test_count, (test_count == 1) ? "test" : "tests",
    350          testcase_count, (testcase_count == 1) ? "test case" : "test cases",
    351          job_count, (job_count == 1) ? "job" : "jobs");
    352   fflush(stdout);
    353 }
    354 
    355 // bionic cts test needs gtest output format.
    356 #if defined(USING_GTEST_OUTPUT_FORMAT)
    357 
    358 static void OnTestEndPrint(const TestCase& testcase, size_t test_id) {
    359   ColoredPrintf(COLOR_GREEN, "[ RUN      ] ");
    360   printf("%s\n", testcase.GetTestName(test_id).c_str());
    361 
    362   const std::string& test_output = testcase.GetTest(test_id).GetTestOutput();
    363   printf("%s", test_output.c_str());
    364 
    365   TestResult result = testcase.GetTestResult(test_id);
    366   if (result == TEST_SUCCESS) {
    367     ColoredPrintf(COLOR_GREEN, "[       OK ] ");
    368   } else {
    369     ColoredPrintf(COLOR_RED, "[  FAILED  ] ");
    370   }
    371   printf("%s", testcase.GetTestName(test_id).c_str());
    372   if (testing::GTEST_FLAG(print_time)) {
    373     printf(" (%" PRId64 " ms)", testcase.GetTestTime(test_id) / 1000000);
    374   }
    375   printf("\n");
    376   fflush(stdout);
    377 }
    378 
    379 #else  // !defined(USING_GTEST_OUTPUT_FORMAT)
    380 
    381 static void OnTestEndPrint(const TestCase& testcase, size_t test_id) {
    382   TestResult result = testcase.GetTestResult(test_id);
    383   if (result == TEST_SUCCESS) {
    384     ColoredPrintf(COLOR_GREEN, "[    OK    ] ");
    385   } else if (result == TEST_FAILED) {
    386     ColoredPrintf(COLOR_RED, "[  FAILED  ] ");
    387   } else if (result == TEST_TIMEOUT) {
    388     ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
    389   }
    390 
    391   printf("%s", testcase.GetTestName(test_id).c_str());
    392   if (testing::GTEST_FLAG(print_time)) {
    393     printf(" (%" PRId64 " ms)", testcase.GetTestTime(test_id) / 1000000);
    394   }
    395   printf("\n");
    396 
    397   const std::string& test_output = testcase.GetTest(test_id).GetTestOutput();
    398   printf("%s", test_output.c_str());
    399   fflush(stdout);
    400 }
    401 
    402 #endif  // !defined(USING_GTEST_OUTPUT_FORMAT)
    403 
    404 static void OnTestIterationEndPrint(const std::vector<TestCase>& testcase_list, size_t /*iteration*/,
    405                                     int64_t elapsed_time_ns) {
    406 
    407   std::vector<std::string> fail_test_name_list;
    408   std::vector<std::pair<std::string, int64_t>> timeout_test_list;
    409 
    410   // For tests that were slow but didn't time out.
    411   std::vector<std::tuple<std::string, int64_t, int>> slow_test_list;
    412   size_t testcase_count = testcase_list.size();
    413   size_t test_count = 0;
    414   size_t success_test_count = 0;
    415   size_t expected_failure_count = 0;
    416 
    417   for (const auto& testcase : testcase_list) {
    418     test_count += testcase.TestCount();
    419     for (size_t i = 0; i < testcase.TestCount(); ++i) {
    420       TestResult result = testcase.GetTestResult(i);
    421       if (result == TEST_TIMEOUT) {
    422         timeout_test_list.push_back(
    423             std::make_pair(testcase.GetTestName(i), testcase.GetTestTime(i)));
    424       } else if (result == TEST_SUCCESS) {
    425         ++success_test_count;
    426         if (testcase.GetTestName(i).find(".xfail_") != std::string::npos) ++expected_failure_count;
    427       } else if (result == TEST_FAILED) {
    428           fail_test_name_list.push_back(testcase.GetTestName(i));
    429       }
    430       if (result != TEST_TIMEOUT &&
    431           testcase.GetTestTime(i) / 1000000 >= GetSlowThresholdMs(testcase.GetTestName(i))) {
    432         slow_test_list.push_back(std::make_tuple(testcase.GetTestName(i),
    433                                                  testcase.GetTestTime(i),
    434                                                  GetSlowThresholdMs(testcase.GetTestName(i))));
    435       }
    436     }
    437   }
    438 
    439   ColoredPrintf(COLOR_GREEN,  "[==========] ");
    440   printf("%zu %s from %zu %s ran.", test_count, (test_count == 1) ? "test" : "tests",
    441                                     testcase_count, (testcase_count == 1) ? "test case" : "test cases");
    442   if (testing::GTEST_FLAG(print_time)) {
    443     printf(" (%" PRId64 " ms total)", elapsed_time_ns / 1000000);
    444   }
    445   printf("\n");
    446   ColoredPrintf(COLOR_GREEN,  "[   PASS   ] ");
    447   printf("%zu %s.", success_test_count, (success_test_count == 1) ? "test" : "tests");
    448   if (expected_failure_count > 0) {
    449     printf(" (%zu expected failure%s.)", expected_failure_count,
    450            (expected_failure_count == 1) ? "" : "s");
    451   }
    452   printf("\n");
    453 
    454   // Print tests that timed out.
    455   size_t timeout_test_count = timeout_test_list.size();
    456   if (timeout_test_count > 0) {
    457     ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
    458     printf("%zu %s, listed below:\n", timeout_test_count, (timeout_test_count == 1) ? "test" : "tests");
    459     for (const auto& timeout_pair : timeout_test_list) {
    460       ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
    461       printf("%s (stopped at %" PRId64 " ms)\n", timeout_pair.first.c_str(),
    462                                                  timeout_pair.second / 1000000);
    463     }
    464   }
    465 
    466   // Print tests that were slow.
    467   size_t slow_test_count = slow_test_list.size();
    468   if (slow_test_count > 0) {
    469     ColoredPrintf(COLOR_YELLOW, "[   SLOW   ] ");
    470     printf("%zu %s, listed below:\n", slow_test_count, (slow_test_count == 1) ? "test" : "tests");
    471     for (const auto& slow_tuple : slow_test_list) {
    472       ColoredPrintf(COLOR_YELLOW, "[   SLOW   ] ");
    473       printf("%s (%" PRId64 " ms, exceeded %d ms)\n", std::get<0>(slow_tuple).c_str(),
    474              std::get<1>(slow_tuple) / 1000000, std::get<2>(slow_tuple));
    475     }
    476   }
    477 
    478   // Print tests that failed.
    479   size_t fail_test_count = fail_test_name_list.size();
    480   if (fail_test_count > 0) {
    481     ColoredPrintf(COLOR_RED,  "[   FAIL   ] ");
    482     printf("%zu %s, listed below:\n", fail_test_count, (fail_test_count == 1) ? "test" : "tests");
    483     for (const auto& name : fail_test_name_list) {
    484       ColoredPrintf(COLOR_RED, "[   FAIL   ] ");
    485       printf("%s\n", name.c_str());
    486     }
    487   }
    488 
    489   if (timeout_test_count > 0 || slow_test_count > 0 || fail_test_count > 0) {
    490     printf("\n");
    491   }
    492 
    493   if (timeout_test_count > 0) {
    494     printf("%2zu TIMEOUT %s\n", timeout_test_count, (timeout_test_count == 1) ? "TEST" : "TESTS");
    495   }
    496   if (slow_test_count > 0) {
    497     printf("%2zu SLOW %s\n", slow_test_count, (slow_test_count == 1) ? "TEST" : "TESTS");
    498   }
    499   if (fail_test_count > 0) {
    500     printf("%2zu FAILED %s\n", fail_test_count, (fail_test_count == 1) ? "TEST" : "TESTS");
    501   }
    502 
    503   fflush(stdout);
    504 }
    505 
    506 std::string XmlEscape(const std::string& xml) {
    507   std::string escaped;
    508   escaped.reserve(xml.size());
    509 
    510   for (auto c : xml) {
    511     switch (c) {
    512     case '<':
    513       escaped.append("&lt;");
    514       break;
    515     case '>':
    516       escaped.append("&gt;");
    517       break;
    518     case '&':
    519       escaped.append("&amp;");
    520       break;
    521     case '\'':
    522       escaped.append("&apos;");
    523       break;
    524     case '"':
    525       escaped.append("&quot;");
    526       break;
    527     default:
    528       escaped.append(1, c);
    529       break;
    530     }
    531   }
    532 
    533   return escaped;
    534 }
    535 
    536 // Output xml file when --gtest_output is used, write this function as we can't reuse
    537 // gtest.cc:XmlUnitTestResultPrinter. The reason is XmlUnitTestResultPrinter is totally
    538 // defined in gtest.cc and not expose to outside. What's more, as we don't run gtest in
    539 // the parent process, we don't have gtest classes which are needed by XmlUnitTestResultPrinter.
    540 void OnTestIterationEndXmlPrint(const std::string& xml_output_filename,
    541                                 const std::vector<TestCase>& testcase_list,
    542                                 time_t epoch_iteration_start_time,
    543                                 int64_t elapsed_time_ns) {
    544   FILE* fp = fopen(xml_output_filename.c_str(), "we");
    545   if (fp == NULL) {
    546     fprintf(stderr, "failed to open '%s': %s\n", xml_output_filename.c_str(), strerror(errno));
    547     exit(1);
    548   }
    549 
    550   size_t total_test_count = 0;
    551   size_t total_failed_count = 0;
    552   std::vector<size_t> failed_count_list(testcase_list.size(), 0);
    553   std::vector<int64_t> elapsed_time_list(testcase_list.size(), 0);
    554   for (size_t i = 0; i < testcase_list.size(); ++i) {
    555     auto& testcase = testcase_list[i];
    556     total_test_count += testcase.TestCount();
    557     for (size_t j = 0; j < testcase.TestCount(); ++j) {
    558       if (!testcase.GetTestSuccess(j)) {
    559         ++failed_count_list[i];
    560       }
    561       elapsed_time_list[i] += testcase.GetTestTime(j);
    562     }
    563     total_failed_count += failed_count_list[i];
    564   }
    565 
    566   const tm* time_struct = localtime(&epoch_iteration_start_time);
    567   char timestamp[40];
    568   snprintf(timestamp, sizeof(timestamp), "%4d-%02d-%02dT%02d:%02d:%02d",
    569            time_struct->tm_year + 1900, time_struct->tm_mon + 1, time_struct->tm_mday,
    570            time_struct->tm_hour, time_struct->tm_min, time_struct->tm_sec);
    571 
    572   fputs("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n", fp);
    573   fprintf(fp, "<testsuites tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"",
    574           total_test_count, total_failed_count);
    575   fprintf(fp, " timestamp=\"%s\" time=\"%.3lf\" name=\"AllTests\">\n", timestamp, elapsed_time_ns / 1e9);
    576   for (size_t i = 0; i < testcase_list.size(); ++i) {
    577     auto& testcase = testcase_list[i];
    578     fprintf(fp, "  <testsuite name=\"%s\" tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"",
    579             testcase.GetName().c_str(), testcase.TestCount(), failed_count_list[i]);
    580     fprintf(fp, " time=\"%.3lf\">\n", elapsed_time_list[i] / 1e9);
    581 
    582     for (size_t j = 0; j < testcase.TestCount(); ++j) {
    583       fprintf(fp, "    <testcase name=\"%s\" status=\"run\" time=\"%.3lf\" classname=\"%s\"",
    584               testcase.GetTest(j).GetName().c_str(), testcase.GetTestTime(j) / 1e9,
    585               testcase.GetName().c_str());
    586       if (!testcase.GetTestSuccess(j)) {
    587         fputs(" />\n", fp);
    588       } else {
    589         fputs(">\n", fp);
    590         const std::string& test_output = testcase.GetTest(j).GetTestOutput();
    591         const std::string escaped_test_output = XmlEscape(test_output);
    592         fprintf(fp, "      <failure message=\"%s\" type=\"\">\n", escaped_test_output.c_str());
    593         fputs("      </failure>\n", fp);
    594         fputs("    </testcase>\n", fp);
    595       }
    596     }
    597 
    598     fputs("  </testsuite>\n", fp);
    599   }
    600   fputs("</testsuites>\n", fp);
    601   fclose(fp);
    602 }
    603 
    604 static bool sigint_flag;
    605 static bool sigquit_flag;
    606 
    607 static void signal_handler(int sig) {
    608   if (sig == SIGINT) {
    609     sigint_flag = true;
    610   } else if (sig == SIGQUIT) {
    611     sigquit_flag = true;
    612   }
    613 }
    614 
    615 static bool RegisterSignalHandler() {
    616   sigint_flag = false;
    617   sigquit_flag = false;
    618   sig_t ret = signal(SIGINT, signal_handler);
    619   if (ret != SIG_ERR) {
    620     ret = signal(SIGQUIT, signal_handler);
    621   }
    622   if (ret == SIG_ERR) {
    623     perror("RegisterSignalHandler");
    624     return false;
    625   }
    626   return true;
    627 }
    628 
    629 static bool UnregisterSignalHandler() {
    630   sig_t ret = signal(SIGINT, SIG_DFL);
    631   if (ret != SIG_ERR) {
    632     ret = signal(SIGQUIT, SIG_DFL);
    633   }
    634   if (ret == SIG_ERR) {
    635     perror("UnregisterSignalHandler");
    636     return false;
    637   }
    638   return true;
    639 }
    640 
    641 struct ChildProcInfo {
    642   pid_t pid;
    643   int64_t start_time_ns;
    644   int64_t end_time_ns;
    645   int64_t deadline_end_time_ns; // The time when the test is thought of as timeout.
    646   size_t testcase_id, test_id;
    647   bool finished;
    648   bool timed_out;
    649   int exit_status;
    650   int child_read_fd; // File descriptor to read child test failure info.
    651 };
    652 
    653 // Forked Child process, run the single test.
    654 static void ChildProcessFn(int argc, char** argv, const std::string& test_name) {
    655   char** new_argv = new char*[argc + 2];
    656   memcpy(new_argv, argv, sizeof(char*) * argc);
    657 
    658   char* filter_arg = new char [test_name.size() + 20];
    659   strcpy(filter_arg, "--gtest_filter=");
    660   strcat(filter_arg, test_name.c_str());
    661   new_argv[argc] = filter_arg;
    662   new_argv[argc + 1] = NULL;
    663 
    664   int new_argc = argc + 1;
    665   testing::InitGoogleTest(&new_argc, new_argv);
    666   int result = RUN_ALL_TESTS();
    667   exit(result);
    668 }
    669 
    670 static ChildProcInfo RunChildProcess(const std::string& test_name, int testcase_id, int test_id,
    671                                      int argc, char** argv) {
    672   int pipefd[2];
    673   if (pipe(pipefd) == -1) {
    674     perror("pipe in RunTestInSeparateProc");
    675     exit(1);
    676   }
    677   if (fcntl(pipefd[0], F_SETFL, O_NONBLOCK) == -1) {
    678     perror("fcntl in RunTestInSeparateProc");
    679     exit(1);
    680   }
    681   pid_t pid = fork();
    682   if (pid == -1) {
    683     perror("fork in RunTestInSeparateProc");
    684     exit(1);
    685   } else if (pid == 0) {
    686     // In child process, run a single test.
    687     close(pipefd[0]);
    688     close(STDOUT_FILENO);
    689     close(STDERR_FILENO);
    690     dup2(pipefd[1], STDOUT_FILENO);
    691     dup2(pipefd[1], STDERR_FILENO);
    692 
    693     if (!UnregisterSignalHandler()) {
    694       exit(1);
    695     }
    696     ChildProcessFn(argc, argv, test_name);
    697     // Unreachable.
    698   }
    699   // In parent process, initialize child process info.
    700   close(pipefd[1]);
    701   ChildProcInfo child_proc;
    702   child_proc.child_read_fd = pipefd[0];
    703   child_proc.pid = pid;
    704   child_proc.start_time_ns = NanoTime();
    705   child_proc.deadline_end_time_ns = child_proc.start_time_ns + GetTimeoutMs(test_name) * 1000000LL;
    706   child_proc.testcase_id = testcase_id;
    707   child_proc.test_id = test_id;
    708   child_proc.finished = false;
    709   return child_proc;
    710 }
    711 
    712 static void HandleSignals(std::vector<TestCase>& testcase_list,
    713                             std::vector<ChildProcInfo>& child_proc_list) {
    714   if (sigquit_flag) {
    715     sigquit_flag = false;
    716     // Print current running tests.
    717     printf("List of current running tests:\n");
    718     for (const auto& child_proc : child_proc_list) {
    719       if (child_proc.pid != 0) {
    720         std::string test_name = testcase_list[child_proc.testcase_id].GetTestName(child_proc.test_id);
    721         int64_t current_time_ns = NanoTime();
    722         int64_t run_time_ms = (current_time_ns - child_proc.start_time_ns) / 1000000;
    723         printf("  %s (%" PRId64 " ms)\n", test_name.c_str(), run_time_ms);
    724       }
    725     }
    726   } else if (sigint_flag) {
    727     sigint_flag = false;
    728     // Kill current running tests.
    729     for (const auto& child_proc : child_proc_list) {
    730       if (child_proc.pid != 0) {
    731         // Send SIGKILL to ensure the child process can be killed unconditionally.
    732         kill(child_proc.pid, SIGKILL);
    733       }
    734     }
    735     // SIGINT kills the parent process as well.
    736     exit(1);
    737   }
    738 }
    739 
    740 static bool CheckChildProcExit(pid_t exit_pid, int exit_status,
    741                                std::vector<ChildProcInfo>& child_proc_list) {
    742   for (size_t i = 0; i < child_proc_list.size(); ++i) {
    743     if (child_proc_list[i].pid == exit_pid) {
    744       child_proc_list[i].finished = true;
    745       child_proc_list[i].timed_out = false;
    746       child_proc_list[i].exit_status = exit_status;
    747       child_proc_list[i].end_time_ns = NanoTime();
    748       return true;
    749     }
    750   }
    751   return false;
    752 }
    753 
    754 static size_t CheckChildProcTimeout(std::vector<ChildProcInfo>& child_proc_list) {
    755   int64_t current_time_ns = NanoTime();
    756   size_t timeout_child_count = 0;
    757   for (size_t i = 0; i < child_proc_list.size(); ++i) {
    758     if (child_proc_list[i].deadline_end_time_ns <= current_time_ns) {
    759       child_proc_list[i].finished = true;
    760       child_proc_list[i].timed_out = true;
    761       child_proc_list[i].end_time_ns = current_time_ns;
    762       ++timeout_child_count;
    763     }
    764   }
    765   return timeout_child_count;
    766 }
    767 
    768 static void ReadChildProcOutput(std::vector<TestCase>& testcase_list,
    769                                 std::vector<ChildProcInfo>& child_proc_list) {
    770   for (const auto& child_proc : child_proc_list) {
    771     TestCase& testcase = testcase_list[child_proc.testcase_id];
    772     int test_id = child_proc.test_id;
    773     while (true) {
    774       char buf[1024];
    775       ssize_t bytes_read = TEMP_FAILURE_RETRY(read(child_proc.child_read_fd, buf, sizeof(buf) - 1));
    776       if (bytes_read > 0) {
    777         buf[bytes_read] = '\0';
    778         testcase.GetTest(test_id).AppendTestOutput(buf);
    779       } else if (bytes_read == 0) {
    780         break; // Read end.
    781       } else {
    782         if (errno == EAGAIN) {
    783           break;
    784         }
    785         perror("failed to read child_read_fd");
    786         exit(1);
    787       }
    788     }
    789   }
    790 }
    791 
    792 static void WaitChildProcs(std::vector<TestCase>& testcase_list,
    793                            std::vector<ChildProcInfo>& child_proc_list) {
    794   size_t finished_child_count = 0;
    795   while (true) {
    796     int status;
    797     pid_t result;
    798     while ((result = TEMP_FAILURE_RETRY(waitpid(-1, &status, WNOHANG))) > 0) {
    799       if (CheckChildProcExit(result, status, child_proc_list)) {
    800         ++finished_child_count;
    801       }
    802     }
    803 
    804     if (result == -1) {
    805       if (errno == ECHILD) {
    806         // This happens when we have no running child processes.
    807         return;
    808       } else {
    809         perror("waitpid");
    810         exit(1);
    811       }
    812     } else if (result == 0) {
    813       finished_child_count += CheckChildProcTimeout(child_proc_list);
    814     }
    815 
    816     ReadChildProcOutput(testcase_list, child_proc_list);
    817     if (finished_child_count > 0) {
    818       return;
    819     }
    820 
    821     HandleSignals(testcase_list, child_proc_list);
    822 
    823     // sleep 1 ms to avoid busy looping.
    824     timespec sleep_time;
    825     sleep_time.tv_sec = 0;
    826     sleep_time.tv_nsec = 1000000;
    827     nanosleep(&sleep_time, NULL);
    828   }
    829 }
    830 
    831 static TestResult WaitForOneChild(pid_t pid) {
    832   int exit_status;
    833   pid_t result = TEMP_FAILURE_RETRY(waitpid(pid, &exit_status, 0));
    834 
    835   TestResult test_result = TEST_SUCCESS;
    836   if (result != pid || WEXITSTATUS(exit_status) != 0) {
    837     test_result = TEST_FAILED;
    838   }
    839   return test_result;
    840 }
    841 
    842 static void CollectChildTestResult(const ChildProcInfo& child_proc, TestCase& testcase) {
    843   int test_id = child_proc.test_id;
    844   testcase.SetTestTime(test_id, child_proc.end_time_ns - child_proc.start_time_ns);
    845   if (child_proc.timed_out) {
    846     // The child process marked as timed_out has not exited, and we should kill it manually.
    847     kill(child_proc.pid, SIGKILL);
    848     WaitForOneChild(child_proc.pid);
    849   }
    850   close(child_proc.child_read_fd);
    851 
    852   if (child_proc.timed_out) {
    853     testcase.SetTestResult(test_id, TEST_TIMEOUT);
    854     char buf[1024];
    855     snprintf(buf, sizeof(buf), "%s killed because of timeout at %" PRId64 " ms.\n",
    856              testcase.GetTestName(test_id).c_str(), testcase.GetTestTime(test_id) / 1000000);
    857     testcase.GetTest(test_id).AppendTestOutput(buf);
    858 
    859   } else if (WIFSIGNALED(child_proc.exit_status)) {
    860     // Record signal terminated test as failed.
    861     testcase.SetTestResult(test_id, TEST_FAILED);
    862     char buf[1024];
    863     snprintf(buf, sizeof(buf), "%s terminated by signal: %s.\n",
    864              testcase.GetTestName(test_id).c_str(), strsignal(WTERMSIG(child_proc.exit_status)));
    865     testcase.GetTest(test_id).AppendTestOutput(buf);
    866 
    867   } else {
    868     int exitcode = WEXITSTATUS(child_proc.exit_status);
    869     testcase.SetTestResult(test_id, exitcode == 0 ? TEST_SUCCESS : TEST_FAILED);
    870     if (exitcode != 0) {
    871       char buf[1024];
    872       snprintf(buf, sizeof(buf), "%s exited with exitcode %d.\n",
    873                testcase.GetTestName(test_id).c_str(), exitcode);
    874       testcase.GetTest(test_id).AppendTestOutput(buf);
    875     }
    876   }
    877 }
    878 
    879 // We choose to use multi-fork and multi-wait here instead of multi-thread, because it always
    880 // makes deadlock to use fork in multi-thread.
    881 // Returns true if all tests run successfully, otherwise return false.
    882 static bool RunTestInSeparateProc(int argc, char** argv, std::vector<TestCase>& testcase_list,
    883                                   int iteration_count, size_t job_count,
    884                                   const std::string& xml_output_filename) {
    885   // Stop default result printer to avoid environment setup/teardown information for each test.
    886   testing::UnitTest::GetInstance()->listeners().Release(
    887                         testing::UnitTest::GetInstance()->listeners().default_result_printer());
    888   testing::UnitTest::GetInstance()->listeners().Append(new TestResultPrinter);
    889 
    890   if (!RegisterSignalHandler()) {
    891     exit(1);
    892   }
    893 
    894   bool all_tests_passed = true;
    895 
    896   for (size_t iteration = 1;
    897        iteration_count < 0 || iteration <= static_cast<size_t>(iteration_count);
    898        ++iteration) {
    899     OnTestIterationStartPrint(testcase_list, iteration, iteration_count, job_count);
    900     int64_t iteration_start_time_ns = NanoTime();
    901     time_t epoch_iteration_start_time = time(NULL);
    902 
    903     // Run up to job_count tests in parallel, each test in a child process.
    904     std::vector<ChildProcInfo> child_proc_list;
    905 
    906     // Next test to run is [next_testcase_id:next_test_id].
    907     size_t next_testcase_id = 0;
    908     size_t next_test_id = 0;
    909 
    910     // Record how many tests are finished.
    911     std::vector<size_t> finished_test_count_list(testcase_list.size(), 0);
    912     size_t finished_testcase_count = 0;
    913 
    914     while (finished_testcase_count < testcase_list.size()) {
    915       // run up to job_count child processes.
    916       while (child_proc_list.size() < job_count && next_testcase_id < testcase_list.size()) {
    917         std::string test_name = testcase_list[next_testcase_id].GetTestName(next_test_id);
    918         ChildProcInfo child_proc = RunChildProcess(test_name, next_testcase_id, next_test_id,
    919                                                    argc, argv);
    920         child_proc_list.push_back(child_proc);
    921         if (++next_test_id == testcase_list[next_testcase_id].TestCount()) {
    922           next_test_id = 0;
    923           ++next_testcase_id;
    924         }
    925       }
    926 
    927       // Wait for any child proc finish or timeout.
    928       WaitChildProcs(testcase_list, child_proc_list);
    929 
    930       // Collect result.
    931       auto it = child_proc_list.begin();
    932       while (it != child_proc_list.end()) {
    933         auto& child_proc = *it;
    934         if (child_proc.finished == true) {
    935           size_t testcase_id = child_proc.testcase_id;
    936           size_t test_id = child_proc.test_id;
    937           TestCase& testcase = testcase_list[testcase_id];
    938 
    939           CollectChildTestResult(child_proc, testcase);
    940           OnTestEndPrint(testcase, test_id);
    941 
    942           if (++finished_test_count_list[testcase_id] == testcase.TestCount()) {
    943             ++finished_testcase_count;
    944           }
    945           if (!testcase.GetTestSuccess(test_id)) {
    946             all_tests_passed = false;
    947           }
    948 
    949           it = child_proc_list.erase(it);
    950         } else {
    951           ++it;
    952         }
    953       }
    954     }
    955 
    956     int64_t elapsed_time_ns = NanoTime() - iteration_start_time_ns;
    957     OnTestIterationEndPrint(testcase_list, iteration, elapsed_time_ns);
    958     if (!xml_output_filename.empty()) {
    959       OnTestIterationEndXmlPrint(xml_output_filename, testcase_list, epoch_iteration_start_time,
    960                                  elapsed_time_ns);
    961     }
    962   }
    963 
    964   if (!UnregisterSignalHandler()) {
    965     exit(1);
    966   }
    967 
    968   return all_tests_passed;
    969 }
    970 
    971 static size_t GetDefaultJobCount() {
    972   return static_cast<size_t>(sysconf(_SC_NPROCESSORS_ONLN));
    973 }
    974 
    975 static void AddPathSeparatorInTestProgramPath(std::vector<char*>& args) {
    976   // To run DeathTest in threadsafe mode, gtest requires that the user must invoke the
    977   // test program via a valid path that contains at least one path separator.
    978   // The reason is that gtest uses clone() + execve() to run DeathTest in threadsafe mode,
    979   // and execve() doesn't read environment variable PATH, so execve() will not success
    980   // until we specify the absolute path or relative path of the test program directly.
    981   if (strchr(args[0], '/') == nullptr) {
    982     args[0] = strdup(g_executable_path.c_str());
    983   }
    984 }
    985 
    986 static void AddGtestFilterSynonym(std::vector<char*>& args) {
    987   // Support --gtest-filter as a synonym for --gtest_filter.
    988   for (size_t i = 1; i < args.size(); ++i) {
    989     if (strncmp(args[i], "--gtest-filter", strlen("--gtest-filter")) == 0) {
    990       args[i][7] = '_';
    991     }
    992   }
    993 }
    994 
    995 struct IsolationTestOptions {
    996   bool isolate;
    997   size_t job_count;
    998   int test_deadline_ms;
    999   int test_slow_threshold_ms;
   1000   std::string gtest_color;
   1001   bool gtest_print_time;
   1002   int gtest_repeat;
   1003   std::string gtest_output;
   1004 };
   1005 
   1006 // Pick options not for gtest: There are two parts in args, one part is used in isolation test mode
   1007 // as described in PrintHelpInfo(), the other part is handled by testing::InitGoogleTest() in
   1008 // gtest. PickOptions() picks the first part into IsolationTestOptions structure, leaving the second
   1009 // part in args.
   1010 // Arguments:
   1011 //   args is used to pass in all command arguments, and pass out only the part of options for gtest.
   1012 //   options is used to pass out test options in isolation mode.
   1013 // Return false if there is error in arguments.
   1014 static bool PickOptions(std::vector<char*>& args, IsolationTestOptions& options) {
   1015   for (size_t i = 1; i < args.size(); ++i) {
   1016     if (strcmp(args[i], "--help") == 0 || strcmp(args[i], "-h") == 0) {
   1017       PrintHelpInfo();
   1018       options.isolate = false;
   1019       return true;
   1020     }
   1021   }
   1022 
   1023   AddPathSeparatorInTestProgramPath(args);
   1024   AddGtestFilterSynonym(args);
   1025 
   1026   // if --bionic-selftest argument is used, only enable self tests, otherwise remove self tests.
   1027   bool enable_selftest = false;
   1028   for (size_t i = 1; i < args.size(); ++i) {
   1029     if (strcmp(args[i], "--bionic-selftest") == 0) {
   1030       // This argument is to enable "bionic_selftest*" for self test, and is not shown in help info.
   1031       // Don't remove this option from arguments.
   1032       enable_selftest = true;
   1033     }
   1034   }
   1035   std::string gtest_filter_str;
   1036   for (size_t i = args.size() - 1; i >= 1; --i) {
   1037     if (strncmp(args[i], "--gtest_filter=", strlen("--gtest_filter=")) == 0) {
   1038       gtest_filter_str = args[i] + strlen("--gtest_filter=");
   1039       args.erase(args.begin() + i);
   1040       break;
   1041     }
   1042   }
   1043   if (enable_selftest == true) {
   1044     gtest_filter_str = "bionic_selftest*";
   1045   } else {
   1046     if (gtest_filter_str.empty()) {
   1047       gtest_filter_str = "-bionic_selftest*";
   1048     } else {
   1049       // Find if '-' for NEGATIVE_PATTERNS exists.
   1050       if (gtest_filter_str.find('-') != std::string::npos) {
   1051         gtest_filter_str += ":bionic_selftest*";
   1052       } else {
   1053         gtest_filter_str += ":-bionic_selftest*";
   1054       }
   1055     }
   1056   }
   1057   gtest_filter_str = "--gtest_filter=" + gtest_filter_str;
   1058   args.push_back(strdup(gtest_filter_str.c_str()));
   1059 
   1060   options.isolate = true;
   1061   // Parse arguments that make us can't run in isolation mode.
   1062   for (size_t i = 1; i < args.size(); ++i) {
   1063     if (strcmp(args[i], "--no-isolate") == 0) {
   1064       options.isolate = false;
   1065     } else if (strcmp(args[i], "--gtest_list_tests") == 0) {
   1066       options.isolate = false;
   1067     }
   1068   }
   1069 
   1070   // Stop parsing if we will not run in isolation mode.
   1071   if (options.isolate == false) {
   1072     return true;
   1073   }
   1074 
   1075   // Init default isolation test options.
   1076   options.job_count = GetDefaultJobCount();
   1077   options.test_deadline_ms = DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS;
   1078   options.test_slow_threshold_ms = DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS;
   1079   options.gtest_color = testing::GTEST_FLAG(color);
   1080   options.gtest_print_time = testing::GTEST_FLAG(print_time);
   1081   options.gtest_repeat = testing::GTEST_FLAG(repeat);
   1082   options.gtest_output = testing::GTEST_FLAG(output);
   1083 
   1084   // Parse arguments speficied for isolation mode.
   1085   for (size_t i = 1; i < args.size(); ++i) {
   1086     if (strncmp(args[i], "-j", strlen("-j")) == 0) {
   1087       char* p = args[i] + strlen("-j");
   1088       int count = 0;
   1089       if (*p != '\0') {
   1090         // Argument like -j5.
   1091         count = atoi(p);
   1092       } else if (args.size() > i + 1) {
   1093         // Arguments like -j 5.
   1094         count = atoi(args[i + 1]);
   1095         ++i;
   1096       }
   1097       if (count <= 0) {
   1098         fprintf(stderr, "invalid job count: %d\n", count);
   1099         return false;
   1100       }
   1101       options.job_count = static_cast<size_t>(count);
   1102     } else if (strncmp(args[i], "--deadline=", strlen("--deadline=")) == 0) {
   1103       int time_ms = atoi(args[i] + strlen("--deadline="));
   1104       if (time_ms <= 0) {
   1105         fprintf(stderr, "invalid deadline: %d\n", time_ms);
   1106         return false;
   1107       }
   1108       options.test_deadline_ms = time_ms;
   1109     } else if (strncmp(args[i], "--slow-threshold=", strlen("--slow-threshold=")) == 0) {
   1110       int time_ms = atoi(args[i] + strlen("--slow-threshold="));
   1111       if (time_ms <= 0) {
   1112         fprintf(stderr, "invalid slow test threshold: %d\n", time_ms);
   1113         return false;
   1114       }
   1115       options.test_slow_threshold_ms = time_ms;
   1116     } else if (strncmp(args[i], "--gtest_color=", strlen("--gtest_color=")) == 0) {
   1117       options.gtest_color = args[i] + strlen("--gtest_color=");
   1118     } else if (strcmp(args[i], "--gtest_print_time=0") == 0) {
   1119       options.gtest_print_time = false;
   1120     } else if (strncmp(args[i], "--gtest_repeat=", strlen("--gtest_repeat=")) == 0) {
   1121       // If the value of gtest_repeat is < 0, then it indicates the tests
   1122       // should be repeated forever.
   1123       options.gtest_repeat = atoi(args[i] + strlen("--gtest_repeat="));
   1124       // Remove --gtest_repeat=xx from arguments, so child process only run one iteration for a single test.
   1125       args.erase(args.begin() + i);
   1126       --i;
   1127     } else if (strncmp(args[i], "--gtest_output=", strlen("--gtest_output=")) == 0) {
   1128       std::string output = args[i] + strlen("--gtest_output=");
   1129       // generate output xml file path according to the strategy in gtest.
   1130       bool success = true;
   1131       if (strncmp(output.c_str(), "xml:", strlen("xml:")) == 0) {
   1132         output = output.substr(strlen("xml:"));
   1133         if (output.size() == 0) {
   1134           success = false;
   1135         }
   1136         // Make absolute path.
   1137         if (success && output[0] != '/') {
   1138           char* cwd = getcwd(NULL, 0);
   1139           if (cwd != NULL) {
   1140             output = std::string(cwd) + "/" + output;
   1141             free(cwd);
   1142           } else {
   1143             success = false;
   1144           }
   1145         }
   1146         // Add file name if output is a directory.
   1147         if (success && output.back() == '/') {
   1148           output += "test_details.xml";
   1149         }
   1150       }
   1151       if (success) {
   1152         options.gtest_output = output;
   1153       } else {
   1154         fprintf(stderr, "invalid gtest_output file: %s\n", args[i]);
   1155         return false;
   1156       }
   1157 
   1158       // Remove --gtest_output=xxx from arguments, so child process will not write xml file.
   1159       args.erase(args.begin() + i);
   1160       --i;
   1161     }
   1162   }
   1163 
   1164   // Add --no-isolate in args to prevent child process from running in isolation mode again.
   1165   // As DeathTest will try to call execve(), this argument should always be added.
   1166   args.insert(args.begin() + 1, strdup("--no-isolate"));
   1167   return true;
   1168 }
   1169 
   1170 static std::string get_proc_self_exe() {
   1171   char path[PATH_MAX];
   1172   ssize_t path_len = readlink("/proc/self/exe", path, sizeof(path));
   1173   if (path_len <= 0 || path_len >= static_cast<ssize_t>(sizeof(path))) {
   1174     perror("readlink");
   1175     exit(1);
   1176   }
   1177 
   1178   return std::string(path, path_len);
   1179 }
   1180 
   1181 int main(int argc, char** argv, char** envp) {
   1182   g_executable_path = get_proc_self_exe();
   1183   g_argc = argc;
   1184   g_argv = argv;
   1185   g_envp = envp;
   1186   std::vector<char*> arg_list(argv, argv + argc);
   1187 
   1188   IsolationTestOptions options;
   1189   if (PickOptions(arg_list, options) == false) {
   1190     return 1;
   1191   }
   1192 
   1193   if (options.isolate == true) {
   1194     // Set global variables.
   1195     global_test_run_deadline_ms = options.test_deadline_ms;
   1196     global_test_run_slow_threshold_ms = options.test_slow_threshold_ms;
   1197     testing::GTEST_FLAG(color) = options.gtest_color.c_str();
   1198     testing::GTEST_FLAG(print_time) = options.gtest_print_time;
   1199     std::vector<TestCase> testcase_list;
   1200 
   1201     argc = static_cast<int>(arg_list.size());
   1202     arg_list.push_back(NULL);
   1203     if (EnumerateTests(argc, arg_list.data(), testcase_list) == false) {
   1204       return 1;
   1205     }
   1206     bool all_test_passed =  RunTestInSeparateProc(argc, arg_list.data(), testcase_list,
   1207                               options.gtest_repeat, options.job_count, options.gtest_output);
   1208     return all_test_passed ? 0 : 1;
   1209   } else {
   1210     argc = static_cast<int>(arg_list.size());
   1211     arg_list.push_back(NULL);
   1212     testing::InitGoogleTest(&argc, arg_list.data());
   1213     return RUN_ALL_TESTS();
   1214   }
   1215 }
   1216 
   1217 //################################################################################
   1218 // Bionic Gtest self test, run this by --bionic-selftest option.
   1219 
   1220 TEST(bionic_selftest, test_success) {
   1221   ASSERT_EQ(1, 1);
   1222 }
   1223 
   1224 TEST(bionic_selftest, test_fail) {
   1225   ASSERT_EQ(0, 1);
   1226 }
   1227 
   1228 TEST(bionic_selftest, test_time_warn) {
   1229   sleep(4);
   1230 }
   1231 
   1232 TEST(bionic_selftest, test_timeout) {
   1233   while (1) {}
   1234 }
   1235 
   1236 TEST(bionic_selftest, test_signal_SEGV_terminated) {
   1237   char* p = reinterpret_cast<char*>(static_cast<intptr_t>(atoi("0")));
   1238   *p = 3;
   1239 }
   1240 
   1241 class bionic_selftest_DeathTest : public ::testing::Test {
   1242  protected:
   1243   virtual void SetUp() {
   1244     ::testing::FLAGS_gtest_death_test_style = "threadsafe";
   1245   }
   1246 };
   1247 
   1248 static void deathtest_helper_success() {
   1249   ASSERT_EQ(1, 1);
   1250   exit(0);
   1251 }
   1252 
   1253 TEST_F(bionic_selftest_DeathTest, success) {
   1254   ASSERT_EXIT(deathtest_helper_success(), ::testing::ExitedWithCode(0), "");
   1255 }
   1256 
   1257 static void deathtest_helper_fail() {
   1258   ASSERT_EQ(1, 0);
   1259 }
   1260 
   1261 TEST_F(bionic_selftest_DeathTest, fail) {
   1262   ASSERT_EXIT(deathtest_helper_fail(), ::testing::ExitedWithCode(0), "");
   1263 }
   1264 
   1265 class BionicSelfTest : public ::testing::TestWithParam<bool> {
   1266 };
   1267 
   1268 TEST_P(BionicSelfTest, test_success) {
   1269   ASSERT_EQ(GetParam(), GetParam());
   1270 }
   1271 
   1272 INSTANTIATE_TEST_CASE_P(bionic_selftest, BionicSelfTest, ::testing::Values(true, false));
   1273 
   1274 template <typename T>
   1275 class bionic_selftest_TestT : public ::testing::Test {
   1276 };
   1277 
   1278 typedef ::testing::Types<char, int> MyTypes;
   1279 
   1280 TYPED_TEST_CASE(bionic_selftest_TestT, MyTypes);
   1281 
   1282 TYPED_TEST(bionic_selftest_TestT, test_success) {
   1283   ASSERT_EQ(true, true);
   1284 }
   1285