Home | History | Annotate | Download | only in tests
      1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
      2 // Use of this source code is governed by a BSD-style license that can be
      3 // found in the LICENSE file.
      4 
      5 #include "ppapi/tests/testing_instance.h"
      6 
      7 #include <algorithm>
      8 #include <cstring>
      9 #include <iomanip>
     10 #include <sstream>
     11 #include <vector>
     12 
     13 #include "ppapi/cpp/core.h"
     14 #include "ppapi/cpp/module.h"
     15 #include "ppapi/cpp/var.h"
     16 #include "ppapi/cpp/view.h"
     17 #include "ppapi/tests/test_case.h"
     18 
     19 TestCaseFactory* TestCaseFactory::head_ = NULL;
     20 
     21 // Cookie value we use to signal "we're still working." See the comment above
     22 // the class declaration for how this works.
     23 static const char kProgressSignal[] = "...";
     24 
     25 // Returns a new heap-allocated test case for the given test, or NULL on
     26 // failure.
     27 TestingInstance::TestingInstance(PP_Instance instance)
     28 #if (defined __native_client__)
     29     : pp::Instance(instance),
     30 #else
     31     : pp::InstancePrivate(instance),
     32 #endif
     33       current_case_(NULL),
     34       executed_tests_(false),
     35       number_tests_executed_(0),
     36       nacl_mode_(false),
     37       ssl_server_port_(-1),
     38       websocket_port_(-1),
     39       remove_plugin_(true) {
     40   callback_factory_.Initialize(this);
     41 }
     42 
     43 TestingInstance::~TestingInstance() {
     44   if (current_case_)
     45     delete current_case_;
     46 }
     47 
     48 bool TestingInstance::Init(uint32_t argc,
     49                            const char* argn[],
     50                            const char* argv[]) {
     51   for (uint32_t i = 0; i < argc; i++) {
     52     if (std::strcmp(argn[i], "mode") == 0) {
     53       if (std::strcmp(argv[i], "nacl") == 0)
     54         nacl_mode_ = true;
     55     } else if (std::strcmp(argn[i], "protocol") == 0) {
     56       protocol_ = argv[i];
     57     } else if (std::strcmp(argn[i], "websocket_host") == 0) {
     58       websocket_host_ = argv[i];
     59     } else if (std::strcmp(argn[i], "websocket_port") == 0) {
     60       websocket_port_ = atoi(argv[i]);
     61     } else if (std::strcmp(argn[i], "ssl_server_port") == 0) {
     62       ssl_server_port_ = atoi(argv[i]);
     63     }
     64   }
     65   // Create the proper test case from the argument.
     66   for (uint32_t i = 0; i < argc; i++) {
     67     if (std::strcmp(argn[i], "testcase") == 0) {
     68       if (argv[i][0] == '\0')
     69         break;
     70       current_case_ = CaseForTestName(argv[i]);
     71       test_filter_ = argv[i];
     72       if (!current_case_)
     73         errors_.append(std::string("Unknown test case ") + argv[i]);
     74       else if (!current_case_->Init())
     75         errors_.append(" Test case could not initialize.");
     76       return true;
     77     }
     78   }
     79 
     80   // In DidChangeView, we'll dump out a list of all available tests.
     81   return true;
     82 }
     83 
     84 #if !(defined __native_client__)
     85 pp::Var TestingInstance::GetInstanceObject() {
     86   if (current_case_)
     87     return current_case_->GetTestObject();
     88 
     89   return pp::VarPrivate();
     90 }
     91 #endif
     92 
     93 void TestingInstance::HandleMessage(const pp::Var& message_data) {
     94   if (current_case_)
     95     current_case_->HandleMessage(message_data);
     96 }
     97 
     98 void TestingInstance::DidChangeView(const pp::View& view) {
     99   if (!executed_tests_) {
    100     executed_tests_ = true;
    101     pp::Module::Get()->core()->CallOnMainThread(
    102         0,
    103         callback_factory_.NewCallback(&TestingInstance::ExecuteTests));
    104   }
    105   if (current_case_)
    106     current_case_->DidChangeView(view);
    107 }
    108 
    109 bool TestingInstance::HandleInputEvent(const pp::InputEvent& event) {
    110   if (current_case_)
    111     return current_case_->HandleInputEvent(event);
    112   return false;
    113 }
    114 
    115 void TestingInstance::EvalScript(const std::string& script) {
    116   SendTestCommand("EvalScript", script);
    117 }
    118 
    119 void TestingInstance::SetCookie(const std::string& name,
    120                                 const std::string& value) {
    121   SendTestCommand("SetCookie", name + "=" + value);
    122 }
    123 
    124 void TestingInstance::LogTest(const std::string& test_name,
    125                               const std::string& error_message,
    126                               PP_TimeTicks start_time) {
    127   // Compute the time to run the test and save it in a string for logging:
    128   PP_TimeTicks end_time(pp::Module::Get()->core()->GetTimeTicks());
    129   std::ostringstream number_stream;
    130   PP_TimeTicks elapsed_time(end_time - start_time);
    131   number_stream << std::fixed << std::setprecision(3) << elapsed_time;
    132   std::string time_string(number_stream.str());
    133 
    134   // Tell the browser we're still working.
    135   ReportProgress(kProgressSignal);
    136 
    137   number_tests_executed_++;
    138 
    139   std::string html;
    140   html.append("<div class=\"test_line\"><span class=\"test_name\">");
    141   html.append(test_name);
    142   html.append("</span> ");
    143   if (error_message.empty()) {
    144     html.append("<span class=\"pass\">PASS</span>");
    145   } else {
    146     html.append("<span class=\"fail\">FAIL</span>: <span class=\"err_msg\">");
    147     html.append(error_message);
    148     html.append("</span>");
    149 
    150     if (!errors_.empty())
    151       errors_.append(", ");  // Separator for different error messages.
    152     errors_.append(test_name + " FAIL: " + error_message);
    153   }
    154   html.append(" <span class=\"time\">(");
    155   html.append(time_string);
    156   html.append("s)</span>");
    157 
    158   html.append("</div>");
    159   LogHTML(html);
    160 }
    161 
    162 void TestingInstance::AppendError(const std::string& message) {
    163   if (!errors_.empty())
    164     errors_.append(", ");
    165   errors_.append(message);
    166 }
    167 
    168 void TestingInstance::ExecuteTests(int32_t unused) {
    169   ReportProgress(kProgressSignal);
    170 
    171   // Clear the console.
    172   SendTestCommand("ClearConsole");
    173 
    174   if (!errors_.empty()) {
    175     // Catch initialization errors and output the current error string to
    176     // the console.
    177     LogError("Plugin initialization failed: " + errors_);
    178   } else if (!current_case_) {
    179     LogAvailableTests();
    180     errors_.append("FAIL: Only listed tests");
    181   } else {
    182     current_case_->RunTests(test_filter_);
    183 
    184     if (number_tests_executed_ == 0) {
    185       errors_.append("No tests executed. The test filter might be too "
    186                      "restrictive: '" + test_filter_ + "'.");
    187       LogError(errors_);
    188     }
    189     if (current_case_->skipped_tests().size()) {
    190       // TODO(dmichael): Convert all TestCases to run all tests in one fixture,
    191       //                 and enable this check. Currently, a lot of our tests
    192       //                 run 1 test per fixture, which is slow.
    193       /*
    194       errors_.append("Some tests were not listed and thus were not run. Make "
    195                      "sure all tests are passed in the test_case URL (even if "
    196                      "they are marked DISABLED_). Forgotten tests: ");
    197       std::set<std::string>::const_iterator iter =
    198           current_case_->skipped_tests().begin();
    199       for (; iter != current_case_->skipped_tests().end(); ++iter) {
    200         errors_.append(*iter);
    201         errors_.append(" ");
    202       }
    203       LogError(errors_);
    204       */
    205     }
    206     if (current_case_->remaining_tests().size()) {
    207       errors_.append("Some listed tests were not found in the TestCase. Check "
    208                      "the test names that were passed to make sure they match "
    209                      "tests in the TestCase. Unknown tests: ");
    210       std::map<std::string, bool>::const_iterator iter =
    211           current_case_->remaining_tests().begin();
    212       for (; iter != current_case_->remaining_tests().end(); ++iter) {
    213         errors_.append(iter->first);
    214         errors_.append(" ");
    215       }
    216       LogError(errors_);
    217     }
    218   }
    219 
    220   if (remove_plugin_)
    221     SendTestCommand("RemovePluginWhenFinished");
    222   std::string result(errors_);
    223   if (result.empty())
    224     result = "PASS";
    225   SendTestCommand("DidExecuteTests", result);
    226   // Note, DidExecuteTests may unload the plugin. We can't really do anything
    227   // after this point.
    228 }
    229 
    230 TestCase* TestingInstance::CaseForTestName(const std::string& name) {
    231   std::string case_name = name.substr(0, name.find_first_of('_'));
    232   TestCaseFactory* iter = TestCaseFactory::head_;
    233   while (iter != NULL) {
    234     if (case_name == iter->name_)
    235       return iter->method_(this);
    236     iter = iter->next_;
    237   }
    238   return NULL;
    239 }
    240 
    241 void TestingInstance::SendTestCommand(const std::string& command) {
    242   std::string msg("TESTING_MESSAGE:");
    243   msg += command;
    244   PostMessage(pp::Var(msg));
    245 }
    246 
    247 void TestingInstance::SendTestCommand(const std::string& command,
    248                                       const std::string& params) {
    249   SendTestCommand(command + ":" + params);
    250 }
    251 
    252 
    253 void TestingInstance::LogAvailableTests() {
    254   // Print out a listing of all tests.
    255   std::vector<std::string> test_cases;
    256   TestCaseFactory* iter = TestCaseFactory::head_;
    257   while (iter != NULL) {
    258     test_cases.push_back(iter->name_);
    259     iter = iter->next_;
    260   }
    261   std::sort(test_cases.begin(), test_cases.end());
    262 
    263   std::string html;
    264   html.append("Available test cases: <dl>");
    265   for (size_t i = 0; i < test_cases.size(); ++i) {
    266     html.append("<dd><a href='?testcase=");
    267     html.append(test_cases[i]);
    268     if (nacl_mode_)
    269        html.append("&mode=nacl");
    270     html.append("'>");
    271     html.append(test_cases[i]);
    272     html.append("</a></dd>");
    273   }
    274   html.append("</dl>");
    275   html.append("<button onclick='RunAll()'>Run All Tests</button>");
    276 
    277   LogHTML(html);
    278 }
    279 
    280 void TestingInstance::LogError(const std::string& text) {
    281   std::string html;
    282   html.append("<span class=\"fail\">FAIL</span>: <span class=\"err_msg\">");
    283   html.append(text);
    284   html.append("</span>");
    285   LogHTML(html);
    286 }
    287 
    288 void TestingInstance::LogHTML(const std::string& html) {
    289   SendTestCommand("LogHTML", html);
    290 }
    291 
    292 void TestingInstance::ReportProgress(const std::string& progress_value) {
    293   SendTestCommand("ReportProgress", progress_value);
    294 }
    295 
    296 void TestingInstance::AddPostCondition(const std::string& script) {
    297   SendTestCommand("AddPostCondition", script);
    298 }
    299 
    300 class Module : public pp::Module {
    301  public:
    302   Module() : pp::Module() {}
    303   virtual ~Module() {}
    304 
    305   virtual pp::Instance* CreateInstance(PP_Instance instance) {
    306     return new TestingInstance(instance);
    307   }
    308 };
    309 
    310 namespace pp {
    311 
    312 Module* CreateModule() {
    313   return new ::Module();
    314 }
    315 
    316 }  // namespace pp
    317