1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "base/basictypes.h" 6 #include "base/bind.h" 7 #include "base/bind_helpers.h" 8 #include "base/file_util.h" 9 #include "base/files/file_path.h" 10 #include "base/files/scoped_temp_dir.h" 11 #include "base/path_service.h" 12 #include "base/strings/utf_string_conversions.h" 13 #include "chrome/browser/history/history_service.h" 14 #include "testing/gtest/include/gtest/gtest.h" 15 16 using base::Time; 17 using base::TimeDelta; 18 19 // Tests the history service for querying functionality. 20 21 namespace history { 22 23 namespace { 24 25 struct TestEntry { 26 const char* url; 27 const char* title; 28 const int days_ago; 29 Time time; // Filled by SetUp. 30 } test_entries[] = { 31 // This one is visited super long ago so it will be in a different database 32 // from the next appearance of it at the end. 33 {"http://example.com/", "Other", 180}, 34 35 // These are deliberately added out of chronological order. The history 36 // service should sort them by visit time when returning query results. 37 // The correct index sort order is 4 2 3 1 7 6 5 0. 38 {"http://www.google.com/1", "Title PAGEONE FOO some text", 10}, 39 {"http://www.google.com/3", "Title PAGETHREE BAR some hello world", 8}, 40 {"http://www.google.com/2", "Title PAGETWO FOO some more blah blah blah", 9}, 41 42 // A more recent visit of the first one. 43 {"http://example.com/", "Other", 6}, 44 45 {"http://www.google.com/6", "Title I'm the second oldest", 13}, 46 {"http://www.google.com/4", "Title four", 12}, 47 {"http://www.google.com/5", "Title five", 11}, 48 }; 49 50 // Returns true if the nth result in the given results set matches. It will 51 // return false on a non-match or if there aren't enough results. 52 bool NthResultIs(const QueryResults& results, 53 int n, // Result index to check. 54 int test_entry_index) { // Index of test_entries to compare. 55 if (static_cast<int>(results.size()) <= n) 56 return false; 57 58 const URLResult& result = results[n]; 59 60 // Check the visit time. 61 if (result.visit_time() != test_entries[test_entry_index].time) 62 return false; 63 64 // Now check the URL & title. 65 return result.url() == GURL(test_entries[test_entry_index].url) && 66 result.title() == UTF8ToUTF16(test_entries[test_entry_index].title); 67 } 68 69 } // namespace 70 71 class HistoryQueryTest : public testing::Test { 72 public: 73 HistoryQueryTest() : page_id_(0) { 74 } 75 76 // Acts like a synchronous call to history's QueryHistory. 77 void QueryHistory(const std::string& text_query, 78 const QueryOptions& options, 79 QueryResults* results) { 80 history_->QueryHistory( 81 UTF8ToUTF16(text_query), options, &consumer_, 82 base::Bind(&HistoryQueryTest::QueryHistoryComplete, 83 base::Unretained(this))); 84 // Will go until ...Complete calls Quit. 85 base::MessageLoop::current()->Run(); 86 results->Swap(&last_query_results_); 87 } 88 89 // Test paging through results, with a fixed number of results per page. 90 // Defined here so code can be shared for the text search and the non-text 91 // seach versions. 92 void TestPaging(const std::string& query_text, 93 const int* expected_results, 94 int results_length) { 95 ASSERT_TRUE(history_.get()); 96 97 QueryOptions options; 98 QueryResults results; 99 100 options.max_count = 1; 101 for (int i = 0; i < results_length; i++) { 102 SCOPED_TRACE(testing::Message() << "i = " << i); 103 QueryHistory(query_text, options, &results); 104 ASSERT_EQ(1U, results.size()); 105 EXPECT_TRUE(NthResultIs(results, 0, expected_results[i])); 106 options.end_time = results.back().visit_time(); 107 } 108 QueryHistory(query_text, options, &results); 109 EXPECT_EQ(0U, results.size()); 110 111 // Try with a max_count > 1. 112 options.max_count = 2; 113 options.end_time = base::Time(); 114 for (int i = 0; i < results_length / 2; i++) { 115 SCOPED_TRACE(testing::Message() << "i = " << i); 116 QueryHistory(query_text, options, &results); 117 ASSERT_EQ(2U, results.size()); 118 EXPECT_TRUE(NthResultIs(results, 0, expected_results[i * 2])); 119 EXPECT_TRUE(NthResultIs(results, 1, expected_results[i * 2 + 1])); 120 options.end_time = results.back().visit_time(); 121 } 122 123 // Add a couple of entries with duplicate timestamps. Use |query_text| as 124 // the title of both entries so that they match a text query. 125 TestEntry duplicates[] = { 126 { "http://www.google.com/x", query_text.c_str(), 1, }, 127 { "http://www.google.com/y", query_text.c_str(), 1, } 128 }; 129 AddEntryToHistory(duplicates[0]); 130 AddEntryToHistory(duplicates[1]); 131 132 // Make sure that paging proceeds even if there are duplicate timestamps. 133 options.end_time = base::Time(); 134 do { 135 QueryHistory(query_text, options, &results); 136 ASSERT_NE(options.end_time, results.back().visit_time()); 137 options.end_time = results.back().visit_time(); 138 } while (!results.reached_beginning()); 139 } 140 141 protected: 142 scoped_ptr<HistoryService> history_; 143 144 // Counter used to generate a unique ID for each page added to the history. 145 int32 page_id_; 146 147 void AddEntryToHistory(const TestEntry& entry) { 148 // We need the ID scope and page ID so that the visit tracker can find it. 149 const void* id_scope = reinterpret_cast<void*>(1); 150 GURL url(entry.url); 151 152 history_->AddPage(url, entry.time, id_scope, page_id_++, GURL(), 153 history::RedirectList(), content::PAGE_TRANSITION_LINK, 154 history::SOURCE_BROWSED, false); 155 history_->SetPageTitle(url, UTF8ToUTF16(entry.title)); 156 } 157 158 private: 159 virtual void SetUp() { 160 ASSERT_TRUE(temp_dir_.CreateUniqueTempDir()); 161 history_dir_ = temp_dir_.path().AppendASCII("HistoryTest"); 162 ASSERT_TRUE(file_util::CreateDirectory(history_dir_)); 163 164 history_.reset(new HistoryService); 165 if (!history_->Init(history_dir_, NULL)) { 166 history_.reset(); // Tests should notice this NULL ptr & fail. 167 return; 168 } 169 170 // Fill the test data. 171 Time now = Time::Now().LocalMidnight(); 172 for (size_t i = 0; i < arraysize(test_entries); i++) { 173 test_entries[i].time = 174 now - (test_entries[i].days_ago * TimeDelta::FromDays(1)); 175 AddEntryToHistory(test_entries[i]); 176 } 177 } 178 179 virtual void TearDown() { 180 if (history_) { 181 history_->SetOnBackendDestroyTask(base::MessageLoop::QuitClosure()); 182 history_->Cleanup(); 183 history_.reset(); 184 base::MessageLoop::current()->Run(); // Wait for the other thread. 185 } 186 } 187 188 void QueryHistoryComplete(HistoryService::Handle, QueryResults* results) { 189 results->Swap(&last_query_results_); 190 base::MessageLoop::current()->Quit(); // Will return out to QueryHistory. 191 } 192 193 base::ScopedTempDir temp_dir_; 194 195 base::MessageLoop message_loop_; 196 197 base::FilePath history_dir_; 198 199 CancelableRequestConsumer consumer_; 200 201 // The QueryHistoryComplete callback will put the results here so QueryHistory 202 // can return them. 203 QueryResults last_query_results_; 204 205 DISALLOW_COPY_AND_ASSIGN(HistoryQueryTest); 206 }; 207 208 TEST_F(HistoryQueryTest, Basic) { 209 ASSERT_TRUE(history_.get()); 210 211 QueryOptions options; 212 QueryResults results; 213 214 // Test duplicate collapsing. 0 is an older duplicate of 4, and should not 215 // appear in the result set. 216 QueryHistory(std::string(), options, &results); 217 EXPECT_EQ(7U, results.size()); 218 219 EXPECT_TRUE(NthResultIs(results, 0, 4)); 220 EXPECT_TRUE(NthResultIs(results, 1, 2)); 221 EXPECT_TRUE(NthResultIs(results, 2, 3)); 222 EXPECT_TRUE(NthResultIs(results, 3, 1)); 223 EXPECT_TRUE(NthResultIs(results, 4, 7)); 224 EXPECT_TRUE(NthResultIs(results, 5, 6)); 225 EXPECT_TRUE(NthResultIs(results, 6, 5)); 226 227 // Next query a time range. The beginning should be inclusive, the ending 228 // should be exclusive. 229 options.begin_time = test_entries[3].time; 230 options.end_time = test_entries[2].time; 231 QueryHistory(std::string(), options, &results); 232 EXPECT_EQ(1U, results.size()); 233 EXPECT_TRUE(NthResultIs(results, 0, 3)); 234 } 235 236 // Tests max_count feature for basic (non-Full Text Search) queries. 237 TEST_F(HistoryQueryTest, BasicCount) { 238 ASSERT_TRUE(history_.get()); 239 240 QueryOptions options; 241 QueryResults results; 242 243 // Query all time but with a limit on the number of entries. We should 244 // get the N most recent entries. 245 options.max_count = 2; 246 QueryHistory(std::string(), options, &results); 247 EXPECT_EQ(2U, results.size()); 248 EXPECT_TRUE(NthResultIs(results, 0, 4)); 249 EXPECT_TRUE(NthResultIs(results, 1, 2)); 250 } 251 252 TEST_F(HistoryQueryTest, ReachedBeginning) { 253 ASSERT_TRUE(history_.get()); 254 255 QueryOptions options; 256 QueryResults results; 257 258 QueryHistory(std::string(), options, &results); 259 EXPECT_TRUE(results.reached_beginning()); 260 QueryHistory("some", options, &results); 261 EXPECT_TRUE(results.reached_beginning()); 262 263 options.begin_time = test_entries[1].time; 264 QueryHistory(std::string(), options, &results); 265 EXPECT_FALSE(results.reached_beginning()); 266 QueryHistory("some", options, &results); 267 EXPECT_FALSE(results.reached_beginning()); 268 269 // Try |begin_time| just later than the oldest visit. 270 options.begin_time = test_entries[0].time + TimeDelta::FromMicroseconds(1); 271 QueryHistory(std::string(), options, &results); 272 EXPECT_FALSE(results.reached_beginning()); 273 QueryHistory("some", options, &results); 274 EXPECT_FALSE(results.reached_beginning()); 275 276 // Try |begin_time| equal to the oldest visit. 277 options.begin_time = test_entries[0].time; 278 QueryHistory(std::string(), options, &results); 279 EXPECT_TRUE(results.reached_beginning()); 280 QueryHistory("some", options, &results); 281 EXPECT_TRUE(results.reached_beginning()); 282 283 // Try |begin_time| just earlier than the oldest visit. 284 options.begin_time = test_entries[0].time - TimeDelta::FromMicroseconds(1); 285 QueryHistory(std::string(), options, &results); 286 EXPECT_TRUE(results.reached_beginning()); 287 QueryHistory("some", options, &results); 288 EXPECT_TRUE(results.reached_beginning()); 289 290 // Test with |max_count| specified. 291 options.max_count = 1; 292 QueryHistory(std::string(), options, &results); 293 EXPECT_FALSE(results.reached_beginning()); 294 QueryHistory("some", options, &results); 295 EXPECT_FALSE(results.reached_beginning()); 296 297 // Test with |max_count| greater than the number of results, 298 // and exactly equal to the number of results. 299 options.max_count = 100; 300 QueryHistory(std::string(), options, &results); 301 EXPECT_TRUE(results.reached_beginning()); 302 options.max_count = results.size(); 303 QueryHistory(std::string(), options, &results); 304 EXPECT_TRUE(results.reached_beginning()); 305 306 options.max_count = 100; 307 QueryHistory("some", options, &results); 308 EXPECT_TRUE(results.reached_beginning()); 309 options.max_count = results.size(); 310 QueryHistory("some", options, &results); 311 EXPECT_TRUE(results.reached_beginning()); 312 } 313 314 // This does most of the same tests above, but performs a text searches for a 315 // string that will match the pages in question. This will trigger a 316 // different code path. 317 TEST_F(HistoryQueryTest, TextSearch) { 318 ASSERT_TRUE(history_.get()); 319 320 QueryOptions options; 321 QueryResults results; 322 323 // Query all of them to make sure they are there and in order. Note that 324 // this query will return the starred item twice since we requested all 325 // starred entries and no de-duping. 326 QueryHistory("some", options, &results); 327 EXPECT_EQ(3U, results.size()); 328 EXPECT_TRUE(NthResultIs(results, 0, 2)); 329 EXPECT_TRUE(NthResultIs(results, 1, 3)); 330 EXPECT_TRUE(NthResultIs(results, 2, 1)); 331 332 // Do a query that should only match one of them. 333 QueryHistory("PAGETWO", options, &results); 334 EXPECT_EQ(1U, results.size()); 335 EXPECT_TRUE(NthResultIs(results, 0, 3)); 336 337 // Next query a time range. The beginning should be inclusive, the ending 338 // should be exclusive. 339 options.begin_time = test_entries[1].time; 340 options.end_time = test_entries[3].time; 341 QueryHistory("some", options, &results); 342 EXPECT_EQ(1U, results.size()); 343 EXPECT_TRUE(NthResultIs(results, 0, 1)); 344 } 345 346 // Tests prefix searching for text search queries. 347 TEST_F(HistoryQueryTest, TextSearchPrefix) { 348 ASSERT_TRUE(history_.get()); 349 350 QueryOptions options; 351 QueryResults results; 352 353 // Query with a prefix search. Should return matches for "PAGETWO" and 354 // "PAGETHREE". 355 QueryHistory("PAGET", options, &results); 356 EXPECT_EQ(2U, results.size()); 357 EXPECT_TRUE(NthResultIs(results, 0, 2)); 358 EXPECT_TRUE(NthResultIs(results, 1, 3)); 359 } 360 361 // Tests max_count feature for text search queries. 362 TEST_F(HistoryQueryTest, TextSearchCount) { 363 ASSERT_TRUE(history_.get()); 364 365 QueryOptions options; 366 QueryResults results; 367 368 // Query all time but with a limit on the number of entries. We should 369 // get the N most recent entries. 370 options.max_count = 2; 371 QueryHistory("some", options, &results); 372 EXPECT_EQ(2U, results.size()); 373 EXPECT_TRUE(NthResultIs(results, 0, 2)); 374 EXPECT_TRUE(NthResultIs(results, 1, 3)); 375 376 // Now query a subset of the pages and limit by N items. "FOO" should match 377 // the 2nd & 3rd pages, but we should only get the 3rd one because of the one 378 // page max restriction. 379 options.max_count = 1; 380 QueryHistory("FOO", options, &results); 381 EXPECT_EQ(1U, results.size()); 382 EXPECT_TRUE(NthResultIs(results, 0, 3)); 383 } 384 385 // Tests that text search queries can find URLs when they exist only in the 386 // archived database. This also tests that imported URLs can be found, since 387 // we use AddPageWithDetails just like the importer. 388 TEST_F(HistoryQueryTest, TextSearchArchived) { 389 ASSERT_TRUE(history_.get()); 390 391 URLRows urls_to_add; 392 393 URLRow row1(GURL("http://foo.bar/")); 394 row1.set_title(UTF8ToUTF16("archived title same")); 395 row1.set_last_visit(Time::Now() - TimeDelta::FromDays(365)); 396 urls_to_add.push_back(row1); 397 398 URLRow row2(GURL("http://foo.bar/")); 399 row2.set_title(UTF8ToUTF16("nonarchived title same")); 400 row2.set_last_visit(Time::Now()); 401 urls_to_add.push_back(row2); 402 403 history_->AddPagesWithDetails(urls_to_add, history::SOURCE_BROWSED); 404 405 QueryOptions options; 406 QueryResults results; 407 408 // Query all time. The title we get should be the one in the archived and 409 // not the most current title (since otherwise highlighting in 410 // the title might be wrong). 411 QueryHistory("archived", options, &results); 412 ASSERT_EQ(1U, results.size()); 413 EXPECT_TRUE(row1.url() == results[0].url()); 414 EXPECT_TRUE(row1.title() == results[0].title()); 415 416 // Check query is ordered correctly when split between archived and 417 // non-archived database. 418 QueryHistory("same", options, &results); 419 ASSERT_EQ(2U, results.size()); 420 EXPECT_TRUE(row2.url() == results[0].url()); 421 EXPECT_TRUE(row2.title() == results[0].title()); 422 EXPECT_TRUE(row1.url() == results[1].url()); 423 EXPECT_TRUE(row1.title() == results[1].title()); 424 } 425 426 /* TODO(brettw) re-enable this. It is commented out because the current history 427 code prohibits adding more than one indexed page with the same URL. When we 428 have tiered history, there could be a dupe in the archived history which 429 won't get picked up by the deletor and it can happen again. When this is the 430 case, we should fix this test to duplicate that situation. 431 432 // Tests duplicate collapsing and not in text search situations. 433 TEST_F(HistoryQueryTest, TextSearchDupes) { 434 ASSERT_TRUE(history_.get()); 435 436 QueryOptions options; 437 QueryResults results; 438 439 QueryHistory("Other", options, &results); 440 EXPECT_EQ(1U, results.size()); 441 EXPECT_TRUE(NthResultIs(results, 0, 4)); 442 } 443 */ 444 445 // Test iterating over pages of results. 446 TEST_F(HistoryQueryTest, Paging) { 447 // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not 448 // be de-duplicated. 449 int expected_results[] = { 4, 2, 3, 1, 7, 6, 5, 0 }; 450 TestPaging(std::string(), expected_results, arraysize(expected_results)); 451 } 452 453 TEST_F(HistoryQueryTest, TextSearchPaging) { 454 // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not 455 // be de-duplicated. Entry #4 does not contain the text "title", so it 456 // shouldn't appear. 457 int expected_results[] = { 2, 3, 1, 7, 6, 5 }; 458 TestPaging("title", expected_results, arraysize(expected_results)); 459 } 460 461 } // namespace history 462