1 /* 2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved. 3 * 4 * This library is free software; you can redistribute it and/or 5 * modify it under the terms of the GNU Library General Public 6 * License as published by the Free Software Foundation; either 7 * version 2 of the License, or (at your option) any later version. 8 * 9 * This library is distributed in the hope that it will be useful, 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 12 * Library General Public License for more details. 13 * 14 * You should have received a copy of the GNU Library General Public License 15 * along with this library; see the file COPYING.LIB. If not, write to 16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, 17 * Boston, MA 02110-1301, USA. 18 * 19 */ 20 21 #ifndef WTF_Vector_h 22 #define WTF_Vector_h 23 24 #include "wtf/Alignment.h" 25 #include "wtf/FastAllocBase.h" 26 #include "wtf/Noncopyable.h" 27 #include "wtf/NotFound.h" 28 #include "wtf/StdLibExtras.h" 29 #include "wtf/UnusedParam.h" 30 #include "wtf/VectorTraits.h" 31 #include <limits> 32 #include <utility> 33 #include <string.h> 34 35 namespace WTF { 36 37 #if defined(MEMORY_TOOL_REPLACES_ALLOCATOR) 38 static const size_t kInitialVectorSize = 1; 39 #else 40 static const size_t kInitialVectorSize = 16; 41 #endif 42 43 template <bool needsDestruction, typename T> 44 struct VectorDestructor; 45 46 template<typename T> 47 struct VectorDestructor<false, T> 48 { 49 static void destruct(T*, T*) {} 50 }; 51 52 template<typename T> 53 struct VectorDestructor<true, T> 54 { 55 static void destruct(T* begin, T* end) 56 { 57 for (T* cur = begin; cur != end; ++cur) 58 cur->~T(); 59 } 60 }; 61 62 template <bool needsInitialization, bool canInitializeWithMemset, typename T> 63 struct VectorInitializer; 64 65 template<bool ignore, typename T> 66 struct VectorInitializer<false, ignore, T> 67 { 68 static void initialize(T*, T*) {} 69 }; 70 71 template<typename T> 72 struct VectorInitializer<true, false, T> 73 { 74 static void initialize(T* begin, T* end) 75 { 76 for (T* cur = begin; cur != end; ++cur) 77 new (NotNull, cur) T; 78 } 79 }; 80 81 template<typename T> 82 struct VectorInitializer<true, true, T> 83 { 84 static void initialize(T* begin, T* end) 85 { 86 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin)); 87 } 88 }; 89 90 template <bool canMoveWithMemcpy, typename T> 91 struct VectorMover; 92 93 template<typename T> 94 struct VectorMover<false, T> 95 { 96 static void move(const T* src, const T* srcEnd, T* dst) 97 { 98 while (src != srcEnd) { 99 new (NotNull, dst) T(*src); 100 src->~T(); 101 ++dst; 102 ++src; 103 } 104 } 105 static void moveOverlapping(const T* src, const T* srcEnd, T* dst) 106 { 107 if (src > dst) 108 move(src, srcEnd, dst); 109 else { 110 T* dstEnd = dst + (srcEnd - src); 111 while (src != srcEnd) { 112 --srcEnd; 113 --dstEnd; 114 new (NotNull, dstEnd) T(*srcEnd); 115 srcEnd->~T(); 116 } 117 } 118 } 119 }; 120 121 template<typename T> 122 struct VectorMover<true, T> 123 { 124 static void move(const T* src, const T* srcEnd, T* dst) 125 { 126 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src)); 127 } 128 static void moveOverlapping(const T* src, const T* srcEnd, T* dst) 129 { 130 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src)); 131 } 132 }; 133 134 template <bool canCopyWithMemcpy, typename T> 135 struct VectorCopier; 136 137 template<typename T> 138 struct VectorCopier<false, T> 139 { 140 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) 141 { 142 while (src != srcEnd) { 143 new (NotNull, dst) T(*src); 144 ++dst; 145 ++src; 146 } 147 } 148 }; 149 150 template<typename T> 151 struct VectorCopier<true, T> 152 { 153 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) 154 { 155 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src)); 156 } 157 }; 158 159 template <bool canFillWithMemset, typename T> 160 struct VectorFiller; 161 162 template<typename T> 163 struct VectorFiller<false, T> 164 { 165 static void uninitializedFill(T* dst, T* dstEnd, const T& val) 166 { 167 while (dst != dstEnd) { 168 new (NotNull, dst) T(val); 169 ++dst; 170 } 171 } 172 }; 173 174 template<typename T> 175 struct VectorFiller<true, T> 176 { 177 static void uninitializedFill(T* dst, T* dstEnd, const T& val) 178 { 179 ASSERT(sizeof(T) == sizeof(char)); 180 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE) 181 if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst))) 182 #endif 183 memset(dst, val, dstEnd - dst); 184 } 185 }; 186 187 template<bool canCompareWithMemcmp, typename T> 188 struct VectorComparer; 189 190 template<typename T> 191 struct VectorComparer<false, T> 192 { 193 static bool compare(const T* a, const T* b, size_t size) 194 { 195 for (size_t i = 0; i < size; ++i) 196 if (!(a[i] == b[i])) 197 return false; 198 return true; 199 } 200 }; 201 202 template<typename T> 203 struct VectorComparer<true, T> 204 { 205 static bool compare(const T* a, const T* b, size_t size) 206 { 207 return memcmp(a, b, sizeof(T) * size) == 0; 208 } 209 }; 210 211 template<typename T> 212 struct VectorTypeOperations 213 { 214 static void destruct(T* begin, T* end) 215 { 216 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end); 217 } 218 219 static void initialize(T* begin, T* end) 220 { 221 VectorInitializer<VectorTraits<T>::needsInitialization, VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end); 222 } 223 224 static void move(const T* src, const T* srcEnd, T* dst) 225 { 226 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst); 227 } 228 229 static void moveOverlapping(const T* src, const T* srcEnd, T* dst) 230 { 231 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst); 232 } 233 234 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) 235 { 236 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst); 237 } 238 239 static void uninitializedFill(T* dst, T* dstEnd, const T& val) 240 { 241 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val); 242 } 243 244 static bool compare(const T* a, const T* b, size_t size) 245 { 246 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size); 247 } 248 }; 249 250 template<typename T> 251 class VectorBufferBase { 252 WTF_MAKE_NONCOPYABLE(VectorBufferBase); 253 public: 254 void allocateBuffer(size_t newCapacity) 255 { 256 ASSERT(newCapacity); 257 // Using "unsigned" is not a limitation because Chromium's max malloc() is 2GB even on 64-bit. 258 RELEASE_ASSERT(newCapacity <= std::numeric_limits<unsigned>::max() / sizeof(T)); 259 size_t sizeToAllocate = fastMallocGoodSize(newCapacity * sizeof(T)); 260 m_capacity = sizeToAllocate / sizeof(T); 261 m_buffer = static_cast<T*>(fastMalloc(sizeToAllocate)); 262 } 263 264 bool shouldReallocateBuffer(size_t newCapacity) const 265 { 266 return VectorTraits<T>::canMoveWithMemcpy && m_capacity && newCapacity; 267 } 268 269 void reallocateBuffer(size_t newCapacity) 270 { 271 ASSERT(shouldReallocateBuffer(newCapacity)); 272 // Using "unsigned" is not a limitation because Chromium's max malloc() is 2GB even on 64-bit. 273 RELEASE_ASSERT(newCapacity <= std::numeric_limits<unsigned>::max() / sizeof(T)); 274 size_t sizeToAllocate = fastMallocGoodSize(newCapacity * sizeof(T)); 275 m_capacity = sizeToAllocate / sizeof(T); 276 m_buffer = static_cast<T*>(fastRealloc(m_buffer, sizeToAllocate)); 277 } 278 279 void deallocateBuffer(T* bufferToDeallocate) 280 { 281 if (!bufferToDeallocate) 282 return; 283 284 if (m_buffer == bufferToDeallocate) { 285 m_buffer = 0; 286 m_capacity = 0; 287 } 288 289 fastFree(bufferToDeallocate); 290 } 291 292 T* buffer() { return m_buffer; } 293 const T* buffer() const { return m_buffer; } 294 size_t capacity() const { return m_capacity; } 295 296 T* releaseBuffer() 297 { 298 T* buffer = m_buffer; 299 m_buffer = 0; 300 m_capacity = 0; 301 return buffer; 302 } 303 304 protected: 305 VectorBufferBase() 306 : m_buffer(0) 307 , m_capacity(0) 308 { 309 } 310 311 VectorBufferBase(T* buffer, size_t capacity) 312 : m_buffer(buffer) 313 , m_capacity(capacity) 314 { 315 } 316 317 ~VectorBufferBase() 318 { 319 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here. 320 } 321 322 T* m_buffer; 323 unsigned m_capacity; 324 unsigned m_size; 325 }; 326 327 template<typename T, size_t inlineCapacity> 328 class VectorBuffer; 329 330 template<typename T> 331 class VectorBuffer<T, 0> : private VectorBufferBase<T> { 332 private: 333 typedef VectorBufferBase<T> Base; 334 public: 335 VectorBuffer() 336 { 337 } 338 339 VectorBuffer(size_t capacity) 340 { 341 // Calling malloc(0) might take a lock and may actually do an 342 // allocation on some systems. 343 if (capacity) 344 allocateBuffer(capacity); 345 } 346 347 ~VectorBuffer() 348 { 349 deallocateBuffer(buffer()); 350 } 351 352 void swap(VectorBuffer<T, 0>& other) 353 { 354 std::swap(m_buffer, other.m_buffer); 355 std::swap(m_capacity, other.m_capacity); 356 } 357 358 void restoreInlineBufferIfNeeded() { } 359 360 using Base::allocateBuffer; 361 using Base::shouldReallocateBuffer; 362 using Base::reallocateBuffer; 363 using Base::deallocateBuffer; 364 365 using Base::buffer; 366 using Base::capacity; 367 368 using Base::releaseBuffer; 369 370 protected: 371 using Base::m_size; 372 373 private: 374 using Base::m_buffer; 375 using Base::m_capacity; 376 }; 377 378 template<typename T, size_t inlineCapacity> 379 class VectorBuffer : private VectorBufferBase<T> { 380 WTF_MAKE_NONCOPYABLE(VectorBuffer); 381 private: 382 typedef VectorBufferBase<T> Base; 383 public: 384 VectorBuffer() 385 : Base(inlineBuffer(), inlineCapacity) 386 { 387 } 388 389 VectorBuffer(size_t capacity) 390 : Base(inlineBuffer(), inlineCapacity) 391 { 392 if (capacity > inlineCapacity) 393 Base::allocateBuffer(capacity); 394 } 395 396 ~VectorBuffer() 397 { 398 deallocateBuffer(buffer()); 399 } 400 401 void allocateBuffer(size_t newCapacity) 402 { 403 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks. 404 if (newCapacity > inlineCapacity) 405 Base::allocateBuffer(newCapacity); 406 else { 407 m_buffer = inlineBuffer(); 408 m_capacity = inlineCapacity; 409 } 410 } 411 412 void deallocateBuffer(T* bufferToDeallocate) 413 { 414 if (bufferToDeallocate == inlineBuffer()) 415 return; 416 Base::deallocateBuffer(bufferToDeallocate); 417 } 418 419 bool shouldReallocateBuffer(size_t newCapacity) const 420 { 421 // We cannot reallocate the inline buffer. 422 return Base::shouldReallocateBuffer(newCapacity) && std::min(static_cast<size_t>(m_capacity), newCapacity) > inlineCapacity; 423 } 424 425 void reallocateBuffer(size_t newCapacity) 426 { 427 ASSERT(shouldReallocateBuffer(newCapacity)); 428 Base::reallocateBuffer(newCapacity); 429 } 430 431 void swap(VectorBuffer<T, inlineCapacity>& other) 432 { 433 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) { 434 WTF::swap(m_inlineBuffer, other.m_inlineBuffer); 435 std::swap(m_capacity, other.m_capacity); 436 } else if (buffer() == inlineBuffer()) { 437 m_buffer = other.m_buffer; 438 other.m_buffer = other.inlineBuffer(); 439 WTF::swap(m_inlineBuffer, other.m_inlineBuffer); 440 std::swap(m_capacity, other.m_capacity); 441 } else if (other.buffer() == other.inlineBuffer()) { 442 other.m_buffer = m_buffer; 443 m_buffer = inlineBuffer(); 444 WTF::swap(m_inlineBuffer, other.m_inlineBuffer); 445 std::swap(m_capacity, other.m_capacity); 446 } else { 447 std::swap(m_buffer, other.m_buffer); 448 std::swap(m_capacity, other.m_capacity); 449 } 450 } 451 452 void restoreInlineBufferIfNeeded() 453 { 454 if (m_buffer) 455 return; 456 m_buffer = inlineBuffer(); 457 m_capacity = inlineCapacity; 458 } 459 460 using Base::buffer; 461 using Base::capacity; 462 463 T* releaseBuffer() 464 { 465 if (buffer() == inlineBuffer()) 466 return 0; 467 return Base::releaseBuffer(); 468 } 469 470 protected: 471 using Base::m_size; 472 473 private: 474 using Base::m_buffer; 475 using Base::m_capacity; 476 477 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T); 478 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); } 479 const T* inlineBuffer() const { return reinterpret_cast_ptr<const T*>(m_inlineBuffer.buffer); } 480 481 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer; 482 }; 483 484 template<typename T, size_t inlineCapacity = 0> 485 class Vector : private VectorBuffer<T, inlineCapacity> { 486 WTF_MAKE_FAST_ALLOCATED; 487 private: 488 typedef VectorBuffer<T, inlineCapacity> Base; 489 typedef VectorTypeOperations<T> TypeOperations; 490 491 public: 492 typedef T ValueType; 493 494 typedef T* iterator; 495 typedef const T* const_iterator; 496 typedef std::reverse_iterator<iterator> reverse_iterator; 497 typedef std::reverse_iterator<const_iterator> const_reverse_iterator; 498 499 Vector() 500 { 501 m_size = 0; 502 } 503 504 explicit Vector(size_t size) 505 : Base(size) 506 { 507 m_size = size; 508 if (begin()) 509 TypeOperations::initialize(begin(), end()); 510 } 511 512 ~Vector() 513 { 514 if (m_size) 515 shrink(0); 516 } 517 518 Vector(const Vector&); 519 template<size_t otherCapacity> 520 Vector(const Vector<T, otherCapacity>&); 521 522 Vector& operator=(const Vector&); 523 template<size_t otherCapacity> 524 Vector& operator=(const Vector<T, otherCapacity>&); 525 526 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES) 527 Vector(Vector&&); 528 Vector& operator=(Vector&&); 529 #endif 530 531 size_t size() const { return m_size; } 532 size_t capacity() const { return Base::capacity(); } 533 bool isEmpty() const { return !size(); } 534 535 T& at(size_t i) 536 { 537 RELEASE_ASSERT(i < size()); 538 return Base::buffer()[i]; 539 } 540 const T& at(size_t i) const 541 { 542 RELEASE_ASSERT(i < size()); 543 return Base::buffer()[i]; 544 } 545 546 T& operator[](size_t i) { return at(i); } 547 const T& operator[](size_t i) const { return at(i); } 548 549 T* data() { return Base::buffer(); } 550 const T* data() const { return Base::buffer(); } 551 552 iterator begin() { return data(); } 553 iterator end() { return begin() + m_size; } 554 const_iterator begin() const { return data(); } 555 const_iterator end() const { return begin() + m_size; } 556 557 reverse_iterator rbegin() { return reverse_iterator(end()); } 558 reverse_iterator rend() { return reverse_iterator(begin()); } 559 const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); } 560 const_reverse_iterator rend() const { return const_reverse_iterator(begin()); } 561 562 T& first() { return at(0); } 563 const T& first() const { return at(0); } 564 T& last() { return at(size() - 1); } 565 const T& last() const { return at(size() - 1); } 566 567 template<typename U> bool contains(const U&) const; 568 template<typename U> size_t find(const U&) const; 569 template<typename U> size_t reverseFind(const U&) const; 570 571 void shrink(size_t size); 572 void grow(size_t size); 573 void resize(size_t size); 574 void reserveCapacity(size_t newCapacity); 575 void reserveInitialCapacity(size_t initialCapacity); 576 void shrinkCapacity(size_t newCapacity); 577 void shrinkToFit() { shrinkCapacity(size()); } 578 579 void clear() { shrinkCapacity(0); } 580 581 template<typename U> void append(const U*, size_t); 582 template<typename U> void append(const U&); 583 template<typename U> void uncheckedAppend(const U& val); 584 template<size_t otherCapacity> void append(const Vector<T, otherCapacity>&); 585 template<typename U, size_t otherCapacity> void appendVector(const Vector<U, otherCapacity>&); 586 587 template<typename U> void insert(size_t position, const U*, size_t); 588 template<typename U> void insert(size_t position, const U&); 589 template<typename U, size_t c> void insert(size_t position, const Vector<U, c>&); 590 591 template<typename U> void prepend(const U*, size_t); 592 template<typename U> void prepend(const U&); 593 template<typename U, size_t c> void prepend(const Vector<U, c>&); 594 595 void remove(size_t position); 596 void remove(size_t position, size_t length); 597 598 void removeLast() 599 { 600 ASSERT(!isEmpty()); 601 shrink(size() - 1); 602 } 603 604 Vector(size_t size, const T& val) 605 : Base(size) 606 { 607 m_size = size; 608 if (begin()) 609 TypeOperations::uninitializedFill(begin(), end(), val); 610 } 611 612 void fill(const T&, size_t); 613 void fill(const T& val) { fill(val, size()); } 614 615 template<typename Iterator> void appendRange(Iterator start, Iterator end); 616 617 T* releaseBuffer(); 618 619 void swap(Vector<T, inlineCapacity>& other) 620 { 621 std::swap(m_size, other.m_size); 622 Base::swap(other); 623 } 624 625 void reverse(); 626 627 private: 628 void expandCapacity(size_t newMinCapacity); 629 const T* expandCapacity(size_t newMinCapacity, const T*); 630 template<typename U> U* expandCapacity(size_t newMinCapacity, U*); 631 template<typename U> void appendSlowCase(const U&); 632 633 using Base::m_size; 634 using Base::buffer; 635 using Base::capacity; 636 using Base::swap; 637 using Base::allocateBuffer; 638 using Base::deallocateBuffer; 639 using Base::shouldReallocateBuffer; 640 using Base::reallocateBuffer; 641 using Base::restoreInlineBufferIfNeeded; 642 using Base::releaseBuffer; 643 }; 644 645 template<typename T, size_t inlineCapacity> 646 Vector<T, inlineCapacity>::Vector(const Vector& other) 647 : Base(other.capacity()) 648 { 649 m_size = other.size(); 650 if (begin()) 651 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin()); 652 } 653 654 template<typename T, size_t inlineCapacity> 655 template<size_t otherCapacity> 656 Vector<T, inlineCapacity>::Vector(const Vector<T, otherCapacity>& other) 657 : Base(other.capacity()) 658 { 659 m_size = other.size(); 660 if (begin()) 661 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin()); 662 } 663 664 template<typename T, size_t inlineCapacity> 665 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, inlineCapacity>& other) 666 { 667 if (&other == this) 668 return *this; 669 670 if (size() > other.size()) 671 shrink(other.size()); 672 else if (other.size() > capacity()) { 673 clear(); 674 reserveCapacity(other.size()); 675 ASSERT(begin()); 676 } 677 678 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last 679 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL 680 if (!begin()) 681 return *this; 682 #endif 683 684 std::copy(other.begin(), other.begin() + size(), begin()); 685 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()); 686 m_size = other.size(); 687 688 return *this; 689 } 690 691 inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; } 692 693 template<typename T, size_t inlineCapacity> 694 template<size_t otherCapacity> 695 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, otherCapacity>& other) 696 { 697 // If the inline capacities match, we should call the more specific 698 // template. If the inline capacities don't match, the two objects 699 // shouldn't be allocated the same address. 700 ASSERT(!typelessPointersAreEqual(&other, this)); 701 702 if (size() > other.size()) 703 shrink(other.size()); 704 else if (other.size() > capacity()) { 705 clear(); 706 reserveCapacity(other.size()); 707 ASSERT(begin()); 708 } 709 710 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last 711 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL 712 if (!begin()) 713 return *this; 714 #endif 715 716 std::copy(other.begin(), other.begin() + size(), begin()); 717 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()); 718 m_size = other.size(); 719 720 return *this; 721 } 722 723 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES) 724 template<typename T, size_t inlineCapacity> 725 Vector<T, inlineCapacity>::Vector(Vector<T, inlineCapacity>&& other) 726 { 727 m_size = 0; 728 // It's a little weird to implement a move constructor using swap but this way we 729 // don't have to add a move constructor to VectorBuffer. 730 swap(other); 731 } 732 733 template<typename T, size_t inlineCapacity> 734 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(Vector<T, inlineCapacity>&& other) 735 { 736 swap(other); 737 return *this; 738 } 739 #endif 740 741 template<typename T, size_t inlineCapacity> 742 template<typename U> 743 bool Vector<T, inlineCapacity>::contains(const U& value) const 744 { 745 return find(value) != notFound; 746 } 747 748 template<typename T, size_t inlineCapacity> 749 template<typename U> 750 size_t Vector<T, inlineCapacity>::find(const U& value) const 751 { 752 const T* b = begin(); 753 const T* e = end(); 754 for (const T* iter = b; iter < e; ++iter) { 755 if (*iter == value) 756 return iter - b; 757 } 758 return notFound; 759 } 760 761 template<typename T, size_t inlineCapacity> 762 template<typename U> 763 size_t Vector<T, inlineCapacity>::reverseFind(const U& value) const 764 { 765 const T* b = begin(); 766 const T* iter = end(); 767 while (iter > b) { 768 --iter; 769 if (*iter == value) 770 return iter - b; 771 } 772 return notFound; 773 } 774 775 template<typename T, size_t inlineCapacity> 776 void Vector<T, inlineCapacity>::fill(const T& val, size_t newSize) 777 { 778 if (size() > newSize) 779 shrink(newSize); 780 else if (newSize > capacity()) { 781 clear(); 782 reserveCapacity(newSize); 783 ASSERT(begin()); 784 } 785 786 std::fill(begin(), end(), val); 787 TypeOperations::uninitializedFill(end(), begin() + newSize, val); 788 m_size = newSize; 789 } 790 791 template<typename T, size_t inlineCapacity> 792 template<typename Iterator> 793 void Vector<T, inlineCapacity>::appendRange(Iterator start, Iterator end) 794 { 795 for (Iterator it = start; it != end; ++it) 796 append(*it); 797 } 798 799 template<typename T, size_t inlineCapacity> 800 void Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity) 801 { 802 reserveCapacity(std::max(newMinCapacity, std::max(static_cast<size_t>(kInitialVectorSize), capacity() + capacity() / 4 + 1))); 803 } 804 805 template<typename T, size_t inlineCapacity> 806 const T* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, const T* ptr) 807 { 808 if (ptr < begin() || ptr >= end()) { 809 expandCapacity(newMinCapacity); 810 return ptr; 811 } 812 size_t index = ptr - begin(); 813 expandCapacity(newMinCapacity); 814 return begin() + index; 815 } 816 817 template<typename T, size_t inlineCapacity> template<typename U> 818 inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr) 819 { 820 expandCapacity(newMinCapacity); 821 return ptr; 822 } 823 824 template<typename T, size_t inlineCapacity> 825 inline void Vector<T, inlineCapacity>::resize(size_t size) 826 { 827 if (size <= m_size) 828 TypeOperations::destruct(begin() + size, end()); 829 else { 830 if (size > capacity()) 831 expandCapacity(size); 832 if (begin()) 833 TypeOperations::initialize(end(), begin() + size); 834 } 835 836 m_size = size; 837 } 838 839 template<typename T, size_t inlineCapacity> 840 void Vector<T, inlineCapacity>::shrink(size_t size) 841 { 842 ASSERT(size <= m_size); 843 TypeOperations::destruct(begin() + size, end()); 844 m_size = size; 845 } 846 847 template<typename T, size_t inlineCapacity> 848 void Vector<T, inlineCapacity>::grow(size_t size) 849 { 850 ASSERT(size >= m_size); 851 if (size > capacity()) 852 expandCapacity(size); 853 if (begin()) 854 TypeOperations::initialize(end(), begin() + size); 855 m_size = size; 856 } 857 858 template<typename T, size_t inlineCapacity> 859 void Vector<T, inlineCapacity>::reserveCapacity(size_t newCapacity) 860 { 861 if (newCapacity <= capacity()) 862 return; 863 T* oldBuffer = begin(); 864 T* oldEnd = end(); 865 Base::allocateBuffer(newCapacity); 866 if (begin()) 867 TypeOperations::move(oldBuffer, oldEnd, begin()); 868 Base::deallocateBuffer(oldBuffer); 869 } 870 871 template<typename T, size_t inlineCapacity> 872 inline void Vector<T, inlineCapacity>::reserveInitialCapacity(size_t initialCapacity) 873 { 874 ASSERT(!m_size); 875 ASSERT(capacity() == inlineCapacity); 876 if (initialCapacity > inlineCapacity) 877 Base::allocateBuffer(initialCapacity); 878 } 879 880 template<typename T, size_t inlineCapacity> 881 void Vector<T, inlineCapacity>::shrinkCapacity(size_t newCapacity) 882 { 883 if (newCapacity >= capacity()) 884 return; 885 886 if (newCapacity < size()) 887 shrink(newCapacity); 888 889 T* oldBuffer = begin(); 890 if (newCapacity > 0) { 891 if (Base::shouldReallocateBuffer(newCapacity)) { 892 Base::reallocateBuffer(newCapacity); 893 return; 894 } 895 896 T* oldEnd = end(); 897 Base::allocateBuffer(newCapacity); 898 if (begin() != oldBuffer) 899 TypeOperations::move(oldBuffer, oldEnd, begin()); 900 } 901 902 Base::deallocateBuffer(oldBuffer); 903 Base::restoreInlineBufferIfNeeded(); 904 } 905 906 // Templatizing these is better than just letting the conversion happen implicitly, 907 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector 908 // without refcount thrash. 909 910 template<typename T, size_t inlineCapacity> template<typename U> 911 void Vector<T, inlineCapacity>::append(const U* data, size_t dataSize) 912 { 913 size_t newSize = m_size + dataSize; 914 if (newSize > capacity()) { 915 data = expandCapacity(newSize, data); 916 ASSERT(begin()); 917 } 918 RELEASE_ASSERT(newSize >= m_size); 919 T* dest = end(); 920 for (size_t i = 0; i < dataSize; ++i) 921 new (NotNull, &dest[i]) T(data[i]); 922 m_size = newSize; 923 } 924 925 template<typename T, size_t inlineCapacity> template<typename U> 926 ALWAYS_INLINE void Vector<T, inlineCapacity>::append(const U& val) 927 { 928 if (LIKELY(size() != capacity())) { 929 new (NotNull, end()) T(val); 930 ++m_size; 931 return; 932 } 933 934 appendSlowCase(val); 935 } 936 937 template<typename T, size_t inlineCapacity> template<typename U> 938 void Vector<T, inlineCapacity>::appendSlowCase(const U& val) 939 { 940 ASSERT(size() == capacity()); 941 942 const U* ptr = &val; 943 ptr = expandCapacity(size() + 1, ptr); 944 ASSERT(begin()); 945 946 new (NotNull, end()) T(*ptr); 947 ++m_size; 948 } 949 950 // This version of append saves a branch in the case where you know that the 951 // vector's capacity is large enough for the append to succeed. 952 953 template<typename T, size_t inlineCapacity> template<typename U> 954 inline void Vector<T, inlineCapacity>::uncheckedAppend(const U& val) 955 { 956 ASSERT(size() < capacity()); 957 const U* ptr = &val; 958 new (NotNull, end()) T(*ptr); 959 ++m_size; 960 } 961 962 // This method should not be called append, a better name would be appendElements. 963 // It could also be eliminated entirely, and call sites could just use 964 // appendRange(val.begin(), val.end()). 965 template<typename T, size_t inlineCapacity> template<size_t otherCapacity> 966 inline void Vector<T, inlineCapacity>::append(const Vector<T, otherCapacity>& val) 967 { 968 append(val.begin(), val.size()); 969 } 970 971 template<typename T, size_t inlineCapacity> template<typename U, size_t otherCapacity> 972 inline void Vector<T, inlineCapacity>::appendVector(const Vector<U, otherCapacity>& val) 973 { 974 append(val.begin(), val.size()); 975 } 976 977 template<typename T, size_t inlineCapacity> template<typename U> 978 void Vector<T, inlineCapacity>::insert(size_t position, const U* data, size_t dataSize) 979 { 980 RELEASE_ASSERT(position <= size()); 981 size_t newSize = m_size + dataSize; 982 if (newSize > capacity()) { 983 data = expandCapacity(newSize, data); 984 ASSERT(begin()); 985 } 986 RELEASE_ASSERT(newSize >= m_size); 987 T* spot = begin() + position; 988 TypeOperations::moveOverlapping(spot, end(), spot + dataSize); 989 for (size_t i = 0; i < dataSize; ++i) 990 new (NotNull, &spot[i]) T(data[i]); 991 m_size = newSize; 992 } 993 994 template<typename T, size_t inlineCapacity> template<typename U> 995 inline void Vector<T, inlineCapacity>::insert(size_t position, const U& val) 996 { 997 RELEASE_ASSERT(position <= size()); 998 const U* data = &val; 999 if (size() == capacity()) { 1000 data = expandCapacity(size() + 1, data); 1001 ASSERT(begin()); 1002 } 1003 T* spot = begin() + position; 1004 TypeOperations::moveOverlapping(spot, end(), spot + 1); 1005 new (NotNull, spot) T(*data); 1006 ++m_size; 1007 } 1008 1009 template<typename T, size_t inlineCapacity> template<typename U, size_t c> 1010 inline void Vector<T, inlineCapacity>::insert(size_t position, const Vector<U, c>& val) 1011 { 1012 insert(position, val.begin(), val.size()); 1013 } 1014 1015 template<typename T, size_t inlineCapacity> template<typename U> 1016 void Vector<T, inlineCapacity>::prepend(const U* data, size_t dataSize) 1017 { 1018 insert(0, data, dataSize); 1019 } 1020 1021 template<typename T, size_t inlineCapacity> template<typename U> 1022 inline void Vector<T, inlineCapacity>::prepend(const U& val) 1023 { 1024 insert(0, val); 1025 } 1026 1027 template<typename T, size_t inlineCapacity> template<typename U, size_t c> 1028 inline void Vector<T, inlineCapacity>::prepend(const Vector<U, c>& val) 1029 { 1030 insert(0, val.begin(), val.size()); 1031 } 1032 1033 template<typename T, size_t inlineCapacity> 1034 inline void Vector<T, inlineCapacity>::remove(size_t position) 1035 { 1036 RELEASE_ASSERT(position < size()); 1037 T* spot = begin() + position; 1038 spot->~T(); 1039 TypeOperations::moveOverlapping(spot + 1, end(), spot); 1040 --m_size; 1041 } 1042 1043 template<typename T, size_t inlineCapacity> 1044 inline void Vector<T, inlineCapacity>::remove(size_t position, size_t length) 1045 { 1046 ASSERT_WITH_SECURITY_IMPLICATION(position <= size()); 1047 RELEASE_ASSERT(position + length <= size()); 1048 T* beginSpot = begin() + position; 1049 T* endSpot = beginSpot + length; 1050 TypeOperations::destruct(beginSpot, endSpot); 1051 TypeOperations::moveOverlapping(endSpot, end(), beginSpot); 1052 m_size -= length; 1053 } 1054 1055 template<typename T, size_t inlineCapacity> 1056 inline void Vector<T, inlineCapacity>::reverse() 1057 { 1058 for (size_t i = 0; i < m_size / 2; ++i) 1059 std::swap(at(i), at(m_size - 1 - i)); 1060 } 1061 1062 template<typename T, size_t inlineCapacity> 1063 inline T* Vector<T, inlineCapacity>::releaseBuffer() 1064 { 1065 T* buffer = Base::releaseBuffer(); 1066 if (inlineCapacity && !buffer && m_size) { 1067 // If the vector had some data, but no buffer to release, 1068 // that means it was using the inline buffer. In that case, 1069 // we create a brand new buffer so the caller always gets one. 1070 size_t bytes = m_size * sizeof(T); 1071 buffer = static_cast<T*>(fastMalloc(bytes)); 1072 memcpy(buffer, data(), bytes); 1073 } 1074 m_size = 0; 1075 return buffer; 1076 } 1077 1078 template<typename T, size_t inlineCapacity> 1079 void deleteAllValues(const Vector<T, inlineCapacity>& collection) 1080 { 1081 typedef typename Vector<T, inlineCapacity>::const_iterator iterator; 1082 iterator end = collection.end(); 1083 for (iterator it = collection.begin(); it != end; ++it) 1084 delete *it; 1085 } 1086 1087 template<typename T, size_t inlineCapacity> 1088 inline void swap(Vector<T, inlineCapacity>& a, Vector<T, inlineCapacity>& b) 1089 { 1090 a.swap(b); 1091 } 1092 1093 template<typename T, size_t inlineCapacity> 1094 bool operator==(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b) 1095 { 1096 if (a.size() != b.size()) 1097 return false; 1098 1099 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size()); 1100 } 1101 1102 template<typename T, size_t inlineCapacity> 1103 inline bool operator!=(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b) 1104 { 1105 return !(a == b); 1106 } 1107 1108 } // namespace WTF 1109 1110 using WTF::Vector; 1111 1112 #endif // WTF_Vector_h 1113