1 /* 2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved. 3 * 4 * This library is free software; you can redistribute it and/or 5 * modify it under the terms of the GNU Library General Public 6 * License as published by the Free Software Foundation; either 7 * version 2 of the License, or (at your option) any later version. 8 * 9 * This library is distributed in the hope that it will be useful, 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 12 * Library General Public License for more details. 13 * 14 * You should have received a copy of the GNU Library General Public License 15 * along with this library; see the file COPYING.LIB. If not, write to 16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, 17 * Boston, MA 02110-1301, USA. 18 * 19 */ 20 21 #ifndef WTF_Vector_h 22 #define WTF_Vector_h 23 24 #include "wtf/Alignment.h" 25 #include "wtf/FastAllocBase.h" 26 #include "wtf/Noncopyable.h" 27 #include "wtf/NotFound.h" 28 #include "wtf/PartitionAlloc.h" 29 #include "wtf/QuantizedAllocation.h" 30 #include "wtf/StdLibExtras.h" 31 #include "wtf/VectorTraits.h" 32 #include "wtf/WTF.h" 33 #include <string.h> 34 #include <utility> 35 36 namespace WTF { 37 38 #if defined(MEMORY_TOOL_REPLACES_ALLOCATOR) 39 static const size_t kInitialVectorSize = 1; 40 #else 41 #ifndef WTF_VECTOR_INITIAL_SIZE 42 #define WTF_VECTOR_INITIAL_SIZE 4 43 #endif 44 static const size_t kInitialVectorSize = WTF_VECTOR_INITIAL_SIZE; 45 #endif 46 47 template <bool needsDestruction, typename T> 48 struct VectorDestructor; 49 50 template<typename T> 51 struct VectorDestructor<false, T> 52 { 53 static void destruct(T*, T*) {} 54 }; 55 56 template<typename T> 57 struct VectorDestructor<true, T> 58 { 59 static void destruct(T* begin, T* end) 60 { 61 for (T* cur = begin; cur != end; ++cur) 62 cur->~T(); 63 } 64 }; 65 66 template <bool needsInitialization, bool canInitializeWithMemset, typename T> 67 struct VectorInitializer; 68 69 template<bool ignore, typename T> 70 struct VectorInitializer<false, ignore, T> 71 { 72 static void initialize(T*, T*) {} 73 }; 74 75 template<typename T> 76 struct VectorInitializer<true, false, T> 77 { 78 static void initialize(T* begin, T* end) 79 { 80 for (T* cur = begin; cur != end; ++cur) 81 new (NotNull, cur) T; 82 } 83 }; 84 85 template<typename T> 86 struct VectorInitializer<true, true, T> 87 { 88 static void initialize(T* begin, T* end) 89 { 90 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin)); 91 } 92 }; 93 94 template <bool canMoveWithMemcpy, typename T> 95 struct VectorMover; 96 97 template<typename T> 98 struct VectorMover<false, T> 99 { 100 static void move(const T* src, const T* srcEnd, T* dst) 101 { 102 while (src != srcEnd) { 103 new (NotNull, dst) T(*src); 104 src->~T(); 105 ++dst; 106 ++src; 107 } 108 } 109 static void moveOverlapping(const T* src, const T* srcEnd, T* dst) 110 { 111 if (src > dst) 112 move(src, srcEnd, dst); 113 else { 114 T* dstEnd = dst + (srcEnd - src); 115 while (src != srcEnd) { 116 --srcEnd; 117 --dstEnd; 118 new (NotNull, dstEnd) T(*srcEnd); 119 srcEnd->~T(); 120 } 121 } 122 } 123 }; 124 125 template<typename T> 126 struct VectorMover<true, T> 127 { 128 static void move(const T* src, const T* srcEnd, T* dst) 129 { 130 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src)); 131 } 132 static void moveOverlapping(const T* src, const T* srcEnd, T* dst) 133 { 134 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src)); 135 } 136 }; 137 138 template <bool canCopyWithMemcpy, typename T> 139 struct VectorCopier; 140 141 template<typename T> 142 struct VectorCopier<false, T> 143 { 144 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) 145 { 146 while (src != srcEnd) { 147 new (NotNull, dst) T(*src); 148 ++dst; 149 ++src; 150 } 151 } 152 }; 153 154 template<typename T> 155 struct VectorCopier<true, T> 156 { 157 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) 158 { 159 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src)); 160 } 161 }; 162 163 template <bool canFillWithMemset, typename T> 164 struct VectorFiller; 165 166 template<typename T> 167 struct VectorFiller<false, T> 168 { 169 static void uninitializedFill(T* dst, T* dstEnd, const T& val) 170 { 171 while (dst != dstEnd) { 172 new (NotNull, dst) T(val); 173 ++dst; 174 } 175 } 176 }; 177 178 template<typename T> 179 struct VectorFiller<true, T> 180 { 181 static void uninitializedFill(T* dst, T* dstEnd, const T& val) 182 { 183 COMPILE_ASSERT(sizeof(T) == sizeof(char), Size_of_type_should_be_equal_to_one); 184 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE) 185 if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst))) 186 #endif 187 memset(dst, val, dstEnd - dst); 188 } 189 }; 190 191 template<bool canCompareWithMemcmp, typename T> 192 struct VectorComparer; 193 194 template<typename T> 195 struct VectorComparer<false, T> 196 { 197 static bool compare(const T* a, const T* b, size_t size) 198 { 199 for (size_t i = 0; i < size; ++i) 200 if (!(a[i] == b[i])) 201 return false; 202 return true; 203 } 204 }; 205 206 template<typename T> 207 struct VectorComparer<true, T> 208 { 209 static bool compare(const T* a, const T* b, size_t size) 210 { 211 return memcmp(a, b, sizeof(T) * size) == 0; 212 } 213 }; 214 215 template<typename T> 216 struct VectorTypeOperations 217 { 218 static void destruct(T* begin, T* end) 219 { 220 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end); 221 } 222 223 static void initialize(T* begin, T* end) 224 { 225 VectorInitializer<VectorTraits<T>::needsInitialization, VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end); 226 } 227 228 static void move(const T* src, const T* srcEnd, T* dst) 229 { 230 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst); 231 } 232 233 static void moveOverlapping(const T* src, const T* srcEnd, T* dst) 234 { 235 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst); 236 } 237 238 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst) 239 { 240 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst); 241 } 242 243 static void uninitializedFill(T* dst, T* dstEnd, const T& val) 244 { 245 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val); 246 } 247 248 static bool compare(const T* a, const T* b, size_t size) 249 { 250 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size); 251 } 252 }; 253 254 template<typename T> 255 class VectorBufferBase { 256 WTF_MAKE_NONCOPYABLE(VectorBufferBase); 257 public: 258 void allocateBuffer(size_t newCapacity) 259 { 260 ASSERT(newCapacity); 261 RELEASE_ASSERT(newCapacity <= QuantizedAllocation::kMaxUnquantizedAllocation / sizeof(T)); 262 size_t sizeToAllocate = allocationSize(newCapacity); 263 m_capacity = sizeToAllocate / sizeof(T); 264 m_buffer = static_cast<T*>(partitionAllocGeneric(Partitions::getBufferPartition(), sizeToAllocate)); 265 } 266 267 size_t allocationSize(size_t capacity) const 268 { 269 return QuantizedAllocation::quantizedSize(capacity * sizeof(T)); 270 } 271 272 T* buffer() { return m_buffer; } 273 const T* buffer() const { return m_buffer; } 274 size_t capacity() const { return m_capacity; } 275 276 protected: 277 VectorBufferBase() 278 : m_buffer(0) 279 , m_capacity(0) 280 { 281 } 282 283 VectorBufferBase(T* buffer, size_t capacity) 284 : m_buffer(buffer) 285 , m_capacity(capacity) 286 { 287 } 288 289 ~VectorBufferBase() 290 { 291 } 292 293 T* m_buffer; 294 unsigned m_capacity; 295 unsigned m_size; 296 }; 297 298 template<typename T, size_t inlineCapacity> 299 class VectorBuffer; 300 301 template<typename T> 302 class VectorBuffer<T, 0> : private VectorBufferBase<T> { 303 private: 304 typedef VectorBufferBase<T> Base; 305 public: 306 VectorBuffer() 307 { 308 } 309 310 VectorBuffer(size_t capacity) 311 { 312 // Calling malloc(0) might take a lock and may actually do an 313 // allocation on some systems. 314 if (capacity) 315 allocateBuffer(capacity); 316 } 317 318 ~VectorBuffer() 319 { 320 } 321 322 void destruct() 323 { 324 deallocateBuffer(m_buffer); 325 m_buffer = 0; 326 } 327 328 void deallocateBuffer(T* bufferToDeallocate) 329 { 330 if (LIKELY(bufferToDeallocate != 0)) 331 partitionFreeGeneric(Partitions::getBufferPartition(), bufferToDeallocate); 332 } 333 334 void resetBufferPointer() 335 { 336 m_buffer = 0; 337 m_capacity = 0; 338 } 339 340 void swap(VectorBuffer<T, 0>& other) 341 { 342 std::swap(m_buffer, other.m_buffer); 343 std::swap(m_capacity, other.m_capacity); 344 } 345 346 using Base::allocateBuffer; 347 using Base::allocationSize; 348 349 using Base::buffer; 350 using Base::capacity; 351 352 protected: 353 using Base::m_size; 354 355 private: 356 using Base::m_buffer; 357 using Base::m_capacity; 358 }; 359 360 template<typename T, size_t inlineCapacity> 361 class VectorBuffer : private VectorBufferBase<T> { 362 WTF_MAKE_NONCOPYABLE(VectorBuffer); 363 private: 364 typedef VectorBufferBase<T> Base; 365 public: 366 VectorBuffer() 367 : Base(inlineBuffer(), inlineCapacity) 368 { 369 } 370 371 VectorBuffer(size_t capacity) 372 : Base(inlineBuffer(), inlineCapacity) 373 { 374 if (capacity > inlineCapacity) 375 Base::allocateBuffer(capacity); 376 } 377 378 ~VectorBuffer() 379 { 380 } 381 382 void destruct() 383 { 384 deallocateBuffer(m_buffer); 385 m_buffer = 0; 386 } 387 388 void deallocateBuffer(T* bufferToDeallocate) 389 { 390 if (LIKELY(bufferToDeallocate == inlineBuffer())) 391 return; 392 partitionFreeGeneric(Partitions::getBufferPartition(), bufferToDeallocate); 393 } 394 395 void resetBufferPointer() 396 { 397 m_buffer = inlineBuffer(); 398 m_capacity = inlineCapacity; 399 } 400 401 void allocateBuffer(size_t newCapacity) 402 { 403 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks. 404 if (newCapacity > inlineCapacity) 405 Base::allocateBuffer(newCapacity); 406 else 407 resetBufferPointer(); 408 } 409 410 size_t allocationSize(size_t capacity) const 411 { 412 if (capacity <= inlineCapacity) 413 return m_inlineBufferSize; 414 return Base::allocationSize(capacity); 415 } 416 417 void swap(VectorBuffer<T, inlineCapacity>& other) 418 { 419 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) { 420 WTF::swap(m_inlineBuffer, other.m_inlineBuffer); 421 std::swap(m_capacity, other.m_capacity); 422 } else if (buffer() == inlineBuffer()) { 423 m_buffer = other.m_buffer; 424 other.m_buffer = other.inlineBuffer(); 425 WTF::swap(m_inlineBuffer, other.m_inlineBuffer); 426 std::swap(m_capacity, other.m_capacity); 427 } else if (other.buffer() == other.inlineBuffer()) { 428 other.m_buffer = m_buffer; 429 m_buffer = inlineBuffer(); 430 WTF::swap(m_inlineBuffer, other.m_inlineBuffer); 431 std::swap(m_capacity, other.m_capacity); 432 } else { 433 std::swap(m_buffer, other.m_buffer); 434 std::swap(m_capacity, other.m_capacity); 435 } 436 } 437 438 using Base::buffer; 439 using Base::capacity; 440 441 protected: 442 using Base::m_size; 443 444 private: 445 using Base::m_buffer; 446 using Base::m_capacity; 447 448 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T); 449 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); } 450 const T* inlineBuffer() const { return reinterpret_cast_ptr<const T*>(m_inlineBuffer.buffer); } 451 452 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer; 453 }; 454 455 template<typename T, size_t inlineCapacity = 0> 456 class Vector : private VectorBuffer<T, inlineCapacity> { 457 WTF_MAKE_FAST_ALLOCATED; 458 private: 459 typedef VectorBuffer<T, inlineCapacity> Base; 460 typedef VectorTypeOperations<T> TypeOperations; 461 462 public: 463 typedef T ValueType; 464 465 typedef T* iterator; 466 typedef const T* const_iterator; 467 typedef std::reverse_iterator<iterator> reverse_iterator; 468 typedef std::reverse_iterator<const_iterator> const_reverse_iterator; 469 470 Vector() 471 { 472 m_size = 0; 473 } 474 475 explicit Vector(size_t size) 476 : Base(size) 477 { 478 m_size = size; 479 TypeOperations::initialize(begin(), end()); 480 } 481 482 ~Vector() 483 { 484 if (!inlineCapacity) { 485 if (LIKELY(!Base::buffer())) 486 return; 487 } 488 if (LIKELY(m_size)) 489 shrink(0); 490 491 Base::destruct(); 492 } 493 494 Vector(const Vector&); 495 template<size_t otherCapacity> 496 explicit Vector(const Vector<T, otherCapacity>&); 497 498 Vector& operator=(const Vector&); 499 template<size_t otherCapacity> 500 Vector& operator=(const Vector<T, otherCapacity>&); 501 502 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES) 503 Vector(Vector&&); 504 Vector& operator=(Vector&&); 505 #endif 506 507 size_t size() const { return m_size; } 508 size_t capacity() const { return Base::capacity(); } 509 bool isEmpty() const { return !size(); } 510 511 T& at(size_t i) 512 { 513 RELEASE_ASSERT(i < size()); 514 return Base::buffer()[i]; 515 } 516 const T& at(size_t i) const 517 { 518 RELEASE_ASSERT(i < size()); 519 return Base::buffer()[i]; 520 } 521 522 T& operator[](size_t i) { return at(i); } 523 const T& operator[](size_t i) const { return at(i); } 524 525 T* data() { return Base::buffer(); } 526 const T* data() const { return Base::buffer(); } 527 528 iterator begin() { return data(); } 529 iterator end() { return begin() + m_size; } 530 const_iterator begin() const { return data(); } 531 const_iterator end() const { return begin() + m_size; } 532 533 reverse_iterator rbegin() { return reverse_iterator(end()); } 534 reverse_iterator rend() { return reverse_iterator(begin()); } 535 const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); } 536 const_reverse_iterator rend() const { return const_reverse_iterator(begin()); } 537 538 T& first() { return at(0); } 539 const T& first() const { return at(0); } 540 T& last() { return at(size() - 1); } 541 const T& last() const { return at(size() - 1); } 542 543 template<typename U> bool contains(const U&) const; 544 template<typename U> size_t find(const U&) const; 545 template<typename U> size_t reverseFind(const U&) const; 546 547 void shrink(size_t size); 548 void grow(size_t size); 549 void resize(size_t size); 550 void reserveCapacity(size_t newCapacity); 551 void reserveInitialCapacity(size_t initialCapacity); 552 void shrinkToFit() { shrinkCapacity(size()); } 553 554 void clear() { shrinkCapacity(0); } 555 556 template<typename U> void append(const U*, size_t); 557 template<typename U> void append(const U&); 558 template<typename U> void uncheckedAppend(const U& val); 559 template<size_t otherCapacity> void append(const Vector<T, otherCapacity>&); 560 template<typename U, size_t otherCapacity> void appendVector(const Vector<U, otherCapacity>&); 561 562 template<typename U> void insert(size_t position, const U*, size_t); 563 template<typename U> void insert(size_t position, const U&); 564 template<typename U, size_t c> void insert(size_t position, const Vector<U, c>&); 565 566 template<typename U> void prepend(const U*, size_t); 567 template<typename U> void prepend(const U&); 568 template<typename U, size_t c> void prepend(const Vector<U, c>&); 569 570 void remove(size_t position); 571 void remove(size_t position, size_t length); 572 573 void removeLast() 574 { 575 ASSERT(!isEmpty()); 576 shrink(size() - 1); 577 } 578 579 Vector(size_t size, const T& val) 580 : Base(size) 581 { 582 m_size = size; 583 TypeOperations::uninitializedFill(begin(), end(), val); 584 } 585 586 void fill(const T&, size_t); 587 void fill(const T& val) { fill(val, size()); } 588 589 template<typename Iterator> void appendRange(Iterator start, Iterator end); 590 591 void swap(Vector<T, inlineCapacity>& other) 592 { 593 std::swap(m_size, other.m_size); 594 Base::swap(other); 595 } 596 597 void reverse(); 598 599 private: 600 void expandCapacity(size_t newMinCapacity); 601 const T* expandCapacity(size_t newMinCapacity, const T*); 602 template<typename U> U* expandCapacity(size_t newMinCapacity, U*); 603 void shrinkCapacity(size_t newCapacity); 604 template<typename U> void appendSlowCase(const U&); 605 606 using Base::m_size; 607 using Base::buffer; 608 using Base::capacity; 609 using Base::swap; 610 using Base::allocateBuffer; 611 using Base::allocationSize; 612 }; 613 614 template<typename T, size_t inlineCapacity> 615 Vector<T, inlineCapacity>::Vector(const Vector& other) 616 : Base(other.capacity()) 617 { 618 m_size = other.size(); 619 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin()); 620 } 621 622 template<typename T, size_t inlineCapacity> 623 template<size_t otherCapacity> 624 Vector<T, inlineCapacity>::Vector(const Vector<T, otherCapacity>& other) 625 : Base(other.capacity()) 626 { 627 m_size = other.size(); 628 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin()); 629 } 630 631 template<typename T, size_t inlineCapacity> 632 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, inlineCapacity>& other) 633 { 634 if (UNLIKELY(&other == this)) 635 return *this; 636 637 if (size() > other.size()) 638 shrink(other.size()); 639 else if (other.size() > capacity()) { 640 clear(); 641 reserveCapacity(other.size()); 642 ASSERT(begin()); 643 } 644 645 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last 646 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL 647 if (!begin()) 648 return *this; 649 #endif 650 651 std::copy(other.begin(), other.begin() + size(), begin()); 652 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()); 653 m_size = other.size(); 654 655 return *this; 656 } 657 658 inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; } 659 660 template<typename T, size_t inlineCapacity> 661 template<size_t otherCapacity> 662 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, otherCapacity>& other) 663 { 664 // If the inline capacities match, we should call the more specific 665 // template. If the inline capacities don't match, the two objects 666 // shouldn't be allocated the same address. 667 ASSERT(!typelessPointersAreEqual(&other, this)); 668 669 if (size() > other.size()) 670 shrink(other.size()); 671 else if (other.size() > capacity()) { 672 clear(); 673 reserveCapacity(other.size()); 674 ASSERT(begin()); 675 } 676 677 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last 678 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL 679 if (!begin()) 680 return *this; 681 #endif 682 683 std::copy(other.begin(), other.begin() + size(), begin()); 684 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end()); 685 m_size = other.size(); 686 687 return *this; 688 } 689 690 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES) 691 template<typename T, size_t inlineCapacity> 692 Vector<T, inlineCapacity>::Vector(Vector<T, inlineCapacity>&& other) 693 { 694 m_size = 0; 695 // It's a little weird to implement a move constructor using swap but this way we 696 // don't have to add a move constructor to VectorBuffer. 697 swap(other); 698 } 699 700 template<typename T, size_t inlineCapacity> 701 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(Vector<T, inlineCapacity>&& other) 702 { 703 swap(other); 704 return *this; 705 } 706 #endif 707 708 template<typename T, size_t inlineCapacity> 709 template<typename U> 710 bool Vector<T, inlineCapacity>::contains(const U& value) const 711 { 712 return find(value) != kNotFound; 713 } 714 715 template<typename T, size_t inlineCapacity> 716 template<typename U> 717 size_t Vector<T, inlineCapacity>::find(const U& value) const 718 { 719 const T* b = begin(); 720 const T* e = end(); 721 for (const T* iter = b; iter < e; ++iter) { 722 if (*iter == value) 723 return iter - b; 724 } 725 return kNotFound; 726 } 727 728 template<typename T, size_t inlineCapacity> 729 template<typename U> 730 size_t Vector<T, inlineCapacity>::reverseFind(const U& value) const 731 { 732 const T* b = begin(); 733 const T* iter = end(); 734 while (iter > b) { 735 --iter; 736 if (*iter == value) 737 return iter - b; 738 } 739 return kNotFound; 740 } 741 742 template<typename T, size_t inlineCapacity> 743 void Vector<T, inlineCapacity>::fill(const T& val, size_t newSize) 744 { 745 if (size() > newSize) 746 shrink(newSize); 747 else if (newSize > capacity()) { 748 clear(); 749 reserveCapacity(newSize); 750 ASSERT(begin()); 751 } 752 753 std::fill(begin(), end(), val); 754 TypeOperations::uninitializedFill(end(), begin() + newSize, val); 755 m_size = newSize; 756 } 757 758 template<typename T, size_t inlineCapacity> 759 template<typename Iterator> 760 void Vector<T, inlineCapacity>::appendRange(Iterator start, Iterator end) 761 { 762 for (Iterator it = start; it != end; ++it) 763 append(*it); 764 } 765 766 template<typename T, size_t inlineCapacity> 767 void Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity) 768 { 769 size_t oldCapacity = capacity(); 770 size_t expandedCapacity = oldCapacity; 771 // We use a more aggressive expansion strategy for Vectors with inline storage. 772 // This is because they are more likely to be on the stack, so the risk of heap bloat is minimized. 773 // Furthermore, exceeding the inline capacity limit is not supposed to happen in the common case and may indicate a pathological condition or microbenchmark. 774 if (inlineCapacity) { 775 expandedCapacity *= 2; 776 // Check for integer overflow, which could happen in the 32-bit build. 777 RELEASE_ASSERT(expandedCapacity > oldCapacity); 778 } else { 779 // This cannot integer overflow. 780 // On 64-bit, the "expanded" integer is 32-bit, and any encroachment above 2^32 will fail allocation in allocateBuffer(). 781 // On 32-bit, there's not enough address space to hold the old and new buffers. 782 // In addition, our underlying allocator is supposed to always fail on > (2^31 - 1) allocations. 783 expandedCapacity += (expandedCapacity / 4) + 1; 784 } 785 reserveCapacity(std::max(newMinCapacity, std::max(static_cast<size_t>(kInitialVectorSize), expandedCapacity))); 786 } 787 788 template<typename T, size_t inlineCapacity> 789 const T* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, const T* ptr) 790 { 791 if (ptr < begin() || ptr >= end()) { 792 expandCapacity(newMinCapacity); 793 return ptr; 794 } 795 size_t index = ptr - begin(); 796 expandCapacity(newMinCapacity); 797 return begin() + index; 798 } 799 800 template<typename T, size_t inlineCapacity> template<typename U> 801 inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr) 802 { 803 expandCapacity(newMinCapacity); 804 return ptr; 805 } 806 807 template<typename T, size_t inlineCapacity> 808 inline void Vector<T, inlineCapacity>::resize(size_t size) 809 { 810 if (size <= m_size) 811 TypeOperations::destruct(begin() + size, end()); 812 else { 813 if (size > capacity()) 814 expandCapacity(size); 815 TypeOperations::initialize(end(), begin() + size); 816 } 817 818 m_size = size; 819 } 820 821 template<typename T, size_t inlineCapacity> 822 void Vector<T, inlineCapacity>::shrink(size_t size) 823 { 824 ASSERT(size <= m_size); 825 TypeOperations::destruct(begin() + size, end()); 826 m_size = size; 827 } 828 829 template<typename T, size_t inlineCapacity> 830 void Vector<T, inlineCapacity>::grow(size_t size) 831 { 832 ASSERT(size >= m_size); 833 if (size > capacity()) 834 expandCapacity(size); 835 TypeOperations::initialize(end(), begin() + size); 836 m_size = size; 837 } 838 839 template<typename T, size_t inlineCapacity> 840 void Vector<T, inlineCapacity>::reserveCapacity(size_t newCapacity) 841 { 842 if (UNLIKELY(newCapacity <= capacity())) 843 return; 844 T* oldBuffer = begin(); 845 T* oldEnd = end(); 846 Base::allocateBuffer(newCapacity); 847 TypeOperations::move(oldBuffer, oldEnd, begin()); 848 Base::deallocateBuffer(oldBuffer); 849 } 850 851 template<typename T, size_t inlineCapacity> 852 inline void Vector<T, inlineCapacity>::reserveInitialCapacity(size_t initialCapacity) 853 { 854 ASSERT(!m_size); 855 ASSERT(capacity() == inlineCapacity); 856 if (initialCapacity > inlineCapacity) 857 Base::allocateBuffer(initialCapacity); 858 } 859 860 template<typename T, size_t inlineCapacity> 861 void Vector<T, inlineCapacity>::shrinkCapacity(size_t newCapacity) 862 { 863 if (newCapacity >= capacity()) 864 return; 865 866 if (newCapacity < size()) 867 shrink(newCapacity); 868 869 T* oldBuffer = begin(); 870 if (newCapacity > 0) { 871 // Optimization: if we're downsizing inside the same allocator bucket, we can exit with no additional work. 872 if (Base::allocationSize(capacity()) == Base::allocationSize(newCapacity)) 873 return; 874 875 T* oldEnd = end(); 876 Base::allocateBuffer(newCapacity); 877 if (begin() != oldBuffer) 878 TypeOperations::move(oldBuffer, oldEnd, begin()); 879 } else { 880 Base::resetBufferPointer(); 881 } 882 883 Base::deallocateBuffer(oldBuffer); 884 } 885 886 // Templatizing these is better than just letting the conversion happen implicitly, 887 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector 888 // without refcount thrash. 889 890 template<typename T, size_t inlineCapacity> template<typename U> 891 void Vector<T, inlineCapacity>::append(const U* data, size_t dataSize) 892 { 893 size_t newSize = m_size + dataSize; 894 if (newSize > capacity()) { 895 data = expandCapacity(newSize, data); 896 ASSERT(begin()); 897 } 898 RELEASE_ASSERT(newSize >= m_size); 899 T* dest = end(); 900 for (size_t i = 0; i < dataSize; ++i) 901 new (NotNull, &dest[i]) T(data[i]); 902 m_size = newSize; 903 } 904 905 template<typename T, size_t inlineCapacity> template<typename U> 906 ALWAYS_INLINE void Vector<T, inlineCapacity>::append(const U& val) 907 { 908 if (LIKELY(size() != capacity())) { 909 new (NotNull, end()) T(val); 910 ++m_size; 911 return; 912 } 913 914 appendSlowCase(val); 915 } 916 917 template<typename T, size_t inlineCapacity> template<typename U> 918 NEVER_INLINE void Vector<T, inlineCapacity>::appendSlowCase(const U& val) 919 { 920 ASSERT(size() == capacity()); 921 922 const U* ptr = &val; 923 ptr = expandCapacity(size() + 1, ptr); 924 ASSERT(begin()); 925 926 new (NotNull, end()) T(*ptr); 927 ++m_size; 928 } 929 930 // This version of append saves a branch in the case where you know that the 931 // vector's capacity is large enough for the append to succeed. 932 933 template<typename T, size_t inlineCapacity> template<typename U> 934 ALWAYS_INLINE void Vector<T, inlineCapacity>::uncheckedAppend(const U& val) 935 { 936 ASSERT(size() < capacity()); 937 const U* ptr = &val; 938 new (NotNull, end()) T(*ptr); 939 ++m_size; 940 } 941 942 // This method should not be called append, a better name would be appendElements. 943 // It could also be eliminated entirely, and call sites could just use 944 // appendRange(val.begin(), val.end()). 945 template<typename T, size_t inlineCapacity> template<size_t otherCapacity> 946 inline void Vector<T, inlineCapacity>::append(const Vector<T, otherCapacity>& val) 947 { 948 append(val.begin(), val.size()); 949 } 950 951 template<typename T, size_t inlineCapacity> template<typename U, size_t otherCapacity> 952 inline void Vector<T, inlineCapacity>::appendVector(const Vector<U, otherCapacity>& val) 953 { 954 append(val.begin(), val.size()); 955 } 956 957 template<typename T, size_t inlineCapacity> template<typename U> 958 void Vector<T, inlineCapacity>::insert(size_t position, const U* data, size_t dataSize) 959 { 960 RELEASE_ASSERT(position <= size()); 961 size_t newSize = m_size + dataSize; 962 if (newSize > capacity()) { 963 data = expandCapacity(newSize, data); 964 ASSERT(begin()); 965 } 966 RELEASE_ASSERT(newSize >= m_size); 967 T* spot = begin() + position; 968 TypeOperations::moveOverlapping(spot, end(), spot + dataSize); 969 for (size_t i = 0; i < dataSize; ++i) 970 new (NotNull, &spot[i]) T(data[i]); 971 m_size = newSize; 972 } 973 974 template<typename T, size_t inlineCapacity> template<typename U> 975 inline void Vector<T, inlineCapacity>::insert(size_t position, const U& val) 976 { 977 RELEASE_ASSERT(position <= size()); 978 const U* data = &val; 979 if (size() == capacity()) { 980 data = expandCapacity(size() + 1, data); 981 ASSERT(begin()); 982 } 983 T* spot = begin() + position; 984 TypeOperations::moveOverlapping(spot, end(), spot + 1); 985 new (NotNull, spot) T(*data); 986 ++m_size; 987 } 988 989 template<typename T, size_t inlineCapacity> template<typename U, size_t c> 990 inline void Vector<T, inlineCapacity>::insert(size_t position, const Vector<U, c>& val) 991 { 992 insert(position, val.begin(), val.size()); 993 } 994 995 template<typename T, size_t inlineCapacity> template<typename U> 996 void Vector<T, inlineCapacity>::prepend(const U* data, size_t dataSize) 997 { 998 insert(0, data, dataSize); 999 } 1000 1001 template<typename T, size_t inlineCapacity> template<typename U> 1002 inline void Vector<T, inlineCapacity>::prepend(const U& val) 1003 { 1004 insert(0, val); 1005 } 1006 1007 template<typename T, size_t inlineCapacity> template<typename U, size_t c> 1008 inline void Vector<T, inlineCapacity>::prepend(const Vector<U, c>& val) 1009 { 1010 insert(0, val.begin(), val.size()); 1011 } 1012 1013 template<typename T, size_t inlineCapacity> 1014 inline void Vector<T, inlineCapacity>::remove(size_t position) 1015 { 1016 RELEASE_ASSERT(position < size()); 1017 T* spot = begin() + position; 1018 spot->~T(); 1019 TypeOperations::moveOverlapping(spot + 1, end(), spot); 1020 --m_size; 1021 } 1022 1023 template<typename T, size_t inlineCapacity> 1024 inline void Vector<T, inlineCapacity>::remove(size_t position, size_t length) 1025 { 1026 ASSERT_WITH_SECURITY_IMPLICATION(position <= size()); 1027 RELEASE_ASSERT(position + length <= size()); 1028 T* beginSpot = begin() + position; 1029 T* endSpot = beginSpot + length; 1030 TypeOperations::destruct(beginSpot, endSpot); 1031 TypeOperations::moveOverlapping(endSpot, end(), beginSpot); 1032 m_size -= length; 1033 } 1034 1035 template<typename T, size_t inlineCapacity> 1036 inline void Vector<T, inlineCapacity>::reverse() 1037 { 1038 for (size_t i = 0; i < m_size / 2; ++i) 1039 std::swap(at(i), at(m_size - 1 - i)); 1040 } 1041 1042 template<typename T, size_t inlineCapacity> 1043 void deleteAllValues(const Vector<T, inlineCapacity>& collection) 1044 { 1045 typedef typename Vector<T, inlineCapacity>::const_iterator iterator; 1046 iterator end = collection.end(); 1047 for (iterator it = collection.begin(); it != end; ++it) 1048 delete *it; 1049 } 1050 1051 template<typename T, size_t inlineCapacity> 1052 inline void swap(Vector<T, inlineCapacity>& a, Vector<T, inlineCapacity>& b) 1053 { 1054 a.swap(b); 1055 } 1056 1057 template<typename T, size_t inlineCapacity> 1058 bool operator==(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b) 1059 { 1060 if (a.size() != b.size()) 1061 return false; 1062 1063 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size()); 1064 } 1065 1066 template<typename T, size_t inlineCapacity> 1067 inline bool operator!=(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b) 1068 { 1069 return !(a == b); 1070 } 1071 1072 } // namespace WTF 1073 1074 using WTF::Vector; 1075 1076 #endif // WTF_Vector_h 1077