Home | History | Annotate | Download | only in wtf
      1 /*
      2  *  Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
      3  *
      4  *  This library is free software; you can redistribute it and/or
      5  *  modify it under the terms of the GNU Library General Public
      6  *  License as published by the Free Software Foundation; either
      7  *  version 2 of the License, or (at your option) any later version.
      8  *
      9  *  This library is distributed in the hope that it will be useful,
     10  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
     11  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
     12  *  Library General Public License for more details.
     13  *
     14  *  You should have received a copy of the GNU Library General Public License
     15  *  along with this library; see the file COPYING.LIB.  If not, write to
     16  *  the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
     17  *  Boston, MA 02110-1301, USA.
     18  *
     19  */
     20 
     21 #ifndef WTF_Vector_h
     22 #define WTF_Vector_h
     23 
     24 #include "wtf/Alignment.h"
     25 #include "wtf/DefaultAllocator.h"
     26 #include "wtf/FastAllocBase.h"
     27 #include "wtf/Noncopyable.h"
     28 #include "wtf/NotFound.h"
     29 #include "wtf/StdLibExtras.h"
     30 #include "wtf/VectorTraits.h"
     31 #include "wtf/WTF.h"
     32 #include <string.h>
     33 #include <utility>
     34 
     35 namespace WTF {
     36 
     37 #if defined(MEMORY_TOOL_REPLACES_ALLOCATOR)
     38 static const size_t kInitialVectorSize = 1;
     39 #else
     40 #ifndef WTF_VECTOR_INITIAL_SIZE
     41 #define WTF_VECTOR_INITIAL_SIZE 4
     42 #endif
     43 static const size_t kInitialVectorSize = WTF_VECTOR_INITIAL_SIZE;
     44 #endif
     45 
     46     template<typename T, size_t inlineBuffer, typename Allocator>
     47     class Deque;
     48 
     49     template <bool needsDestruction, typename T>
     50     struct VectorDestructor;
     51 
     52     template<typename T>
     53     struct VectorDestructor<false, T>
     54     {
     55         static void destruct(T*, T*) {}
     56     };
     57 
     58     template<typename T>
     59     struct VectorDestructor<true, T>
     60     {
     61         static void destruct(T* begin, T* end)
     62         {
     63             for (T* cur = begin; cur != end; ++cur)
     64                 cur->~T();
     65         }
     66     };
     67 
     68     template <bool unusedSlotsMustBeZeroed, typename T>
     69     struct VectorUnusedSlotClearer;
     70 
     71     template<typename T>
     72     struct VectorUnusedSlotClearer<false, T> {
     73         static void clear(T*, T*) { }
     74     };
     75 
     76     template<typename T>
     77     struct VectorUnusedSlotClearer<true, T> {
     78         static void clear(T* begin, T* end)
     79         {
     80             // We clear out unused slots so that the visitor and the finalizer
     81             // do not visit them (or at least it does not matter if they do).
     82             memset(begin, 0, sizeof(T) * (end - begin));
     83         }
     84     };
     85 
     86     template <bool canInitializeWithMemset, typename T>
     87     struct VectorInitializer;
     88 
     89     template<typename T>
     90     struct VectorInitializer<false, T>
     91     {
     92         static void initialize(T* begin, T* end)
     93         {
     94             for (T* cur = begin; cur != end; ++cur)
     95                 new (NotNull, cur) T;
     96         }
     97     };
     98 
     99     template<typename T>
    100     struct VectorInitializer<true, T>
    101     {
    102         static void initialize(T* begin, T* end)
    103         {
    104             memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
    105         }
    106     };
    107 
    108     template <bool canMoveWithMemcpy, typename T>
    109     struct VectorMover;
    110 
    111     template<typename T>
    112     struct VectorMover<false, T>
    113     {
    114         static void move(const T* src, const T* srcEnd, T* dst)
    115         {
    116             while (src != srcEnd) {
    117                 new (NotNull, dst) T(*src);
    118                 src->~T();
    119                 ++dst;
    120                 ++src;
    121             }
    122         }
    123         static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
    124         {
    125             if (src > dst)
    126                 move(src, srcEnd, dst);
    127             else {
    128                 T* dstEnd = dst + (srcEnd - src);
    129                 while (src != srcEnd) {
    130                     --srcEnd;
    131                     --dstEnd;
    132                     new (NotNull, dstEnd) T(*srcEnd);
    133                     srcEnd->~T();
    134                 }
    135             }
    136         }
    137         static void swap(T* src, T* srcEnd, T* dst)
    138         {
    139             std::swap_ranges(src, srcEnd, dst);
    140         }
    141     };
    142 
    143     template<typename T>
    144     struct VectorMover<true, T>
    145     {
    146         static void move(const T* src, const T* srcEnd, T* dst)
    147         {
    148             memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
    149         }
    150         static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
    151         {
    152             memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
    153         }
    154         static void swap(T* src, T* srcEnd, T* dst)
    155         {
    156             std::swap_ranges(reinterpret_cast<char*>(src), reinterpret_cast<char*>(srcEnd), reinterpret_cast<char*>(dst));
    157         }
    158     };
    159 
    160     template <bool canCopyWithMemcpy, typename T>
    161     struct VectorCopier;
    162 
    163     template<typename T>
    164     struct VectorCopier<false, T>
    165     {
    166         template<typename U>
    167         static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
    168         {
    169             while (src != srcEnd) {
    170                 new (NotNull, dst) T(*src);
    171                 ++dst;
    172                 ++src;
    173             }
    174         }
    175     };
    176 
    177     template<typename T>
    178     struct VectorCopier<true, T>
    179     {
    180         static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
    181         {
    182             memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
    183         }
    184         template<typename U>
    185         static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
    186         {
    187             VectorCopier<false, T>::uninitializedCopy(src, srcEnd, dst);
    188         }
    189     };
    190 
    191     template <bool canFillWithMemset, typename T>
    192     struct VectorFiller;
    193 
    194     template<typename T>
    195     struct VectorFiller<false, T>
    196     {
    197         static void uninitializedFill(T* dst, T* dstEnd, const T& val)
    198         {
    199             while (dst != dstEnd) {
    200                 new (NotNull, dst) T(val);
    201                 ++dst;
    202             }
    203         }
    204     };
    205 
    206     template<typename T>
    207     struct VectorFiller<true, T>
    208     {
    209         static void uninitializedFill(T* dst, T* dstEnd, const T& val)
    210         {
    211             COMPILE_ASSERT(sizeof(T) == sizeof(char), Size_of_type_should_be_equal_to_one);
    212 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE)
    213             if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst)))
    214 #endif
    215                 memset(dst, val, dstEnd - dst);
    216         }
    217     };
    218 
    219     template<bool canCompareWithMemcmp, typename T>
    220     struct VectorComparer;
    221 
    222     template<typename T>
    223     struct VectorComparer<false, T>
    224     {
    225         static bool compare(const T* a, const T* b, size_t size)
    226         {
    227             if (LIKELY(a && b))
    228                 return std::equal(a, a + size, b);
    229             return !a && !b;
    230         }
    231     };
    232 
    233     template<typename T>
    234     struct VectorComparer<true, T>
    235     {
    236         static bool compare(const T* a, const T* b, size_t size)
    237         {
    238             return memcmp(a, b, sizeof(T) * size) == 0;
    239         }
    240     };
    241 
    242     template<typename T>
    243     struct VectorTypeOperations
    244     {
    245         static void destruct(T* begin, T* end)
    246         {
    247             VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
    248         }
    249 
    250         static void initialize(T* begin, T* end)
    251         {
    252             VectorInitializer<VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
    253         }
    254 
    255         static void move(const T* src, const T* srcEnd, T* dst)
    256         {
    257             VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
    258         }
    259 
    260         static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
    261         {
    262             VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
    263         }
    264 
    265         static void swap(T* src, T* srcEnd, T* dst)
    266         {
    267             VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::swap(src, srcEnd, dst);
    268         }
    269 
    270         static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
    271         {
    272             VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
    273         }
    274 
    275         static void uninitializedFill(T* dst, T* dstEnd, const T& val)
    276         {
    277             VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
    278         }
    279 
    280         static bool compare(const T* a, const T* b, size_t size)
    281         {
    282             return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
    283         }
    284     };
    285 
    286     template<typename T, typename Allocator>
    287     class VectorBufferBase {
    288         WTF_MAKE_NONCOPYABLE(VectorBufferBase);
    289     public:
    290         void allocateBuffer(size_t newCapacity)
    291         {
    292             typedef typename Allocator::template VectorBackingHelper<T, VectorTraits<T> >::Type VectorBacking;
    293             ASSERT(newCapacity);
    294             size_t sizeToAllocate = allocationSize(newCapacity);
    295             m_buffer = Allocator::template backingMalloc<T*, VectorBacking>(sizeToAllocate);
    296             m_capacity = sizeToAllocate / sizeof(T);
    297         }
    298 
    299         size_t allocationSize(size_t capacity) const
    300         {
    301             return Allocator::Quantizer::template quantizedSize<T>(capacity);
    302         }
    303 
    304         T* buffer() { return m_buffer; }
    305         const T* buffer() const { return m_buffer; }
    306         size_t capacity() const { return m_capacity; }
    307 
    308         void clearUnusedSlots(T* from, T* to)
    309         {
    310             VectorUnusedSlotClearer<Allocator::isGarbageCollected && (VectorTraits<T>::needsDestruction || ShouldBeTraced<VectorTraits<T> >::value), T>::clear(from, to);
    311         }
    312 
    313     protected:
    314         VectorBufferBase()
    315             : m_buffer(0)
    316             , m_capacity(0)
    317         {
    318         }
    319 
    320         VectorBufferBase(T* buffer, size_t capacity)
    321             : m_buffer(buffer)
    322             , m_capacity(capacity)
    323         {
    324         }
    325 
    326         T* m_buffer;
    327         unsigned m_capacity;
    328         unsigned m_size;
    329     };
    330 
    331     template<typename T, size_t inlineCapacity, typename Allocator = DefaultAllocator>
    332     class VectorBuffer;
    333 
    334     template<typename T, typename Allocator>
    335     class VectorBuffer<T, 0, Allocator> : protected VectorBufferBase<T, Allocator> {
    336     private:
    337         typedef VectorBufferBase<T, Allocator> Base;
    338     public:
    339         VectorBuffer()
    340         {
    341         }
    342 
    343         VectorBuffer(size_t capacity)
    344         {
    345             // Calling malloc(0) might take a lock and may actually do an
    346             // allocation on some systems.
    347             if (capacity)
    348                 allocateBuffer(capacity);
    349         }
    350 
    351         void destruct()
    352         {
    353             deallocateBuffer(m_buffer);
    354             m_buffer = 0;
    355         }
    356 
    357         void deallocateBuffer(T* bufferToDeallocate)
    358         {
    359             Allocator::backingFree(bufferToDeallocate);
    360         }
    361 
    362         void resetBufferPointer()
    363         {
    364             m_buffer = 0;
    365             m_capacity = 0;
    366         }
    367 
    368         void swapVectorBuffer(VectorBuffer<T, 0, Allocator>& other)
    369         {
    370             std::swap(m_buffer, other.m_buffer);
    371             std::swap(m_capacity, other.m_capacity);
    372         }
    373 
    374         using Base::allocateBuffer;
    375         using Base::allocationSize;
    376 
    377         using Base::buffer;
    378         using Base::capacity;
    379 
    380         using Base::clearUnusedSlots;
    381 
    382         bool hasOutOfLineBuffer() const
    383         {
    384             // When inlineCapacity is 0 we have an out of line buffer if we have a buffer.
    385             return buffer();
    386         }
    387 
    388     protected:
    389         using Base::m_size;
    390 
    391     private:
    392         using Base::m_buffer;
    393         using Base::m_capacity;
    394     };
    395 
    396     template<typename T, size_t inlineCapacity, typename Allocator>
    397     class VectorBuffer : protected VectorBufferBase<T, Allocator> {
    398         WTF_MAKE_NONCOPYABLE(VectorBuffer);
    399     private:
    400         typedef VectorBufferBase<T, Allocator> Base;
    401     public:
    402         VectorBuffer()
    403             : Base(inlineBuffer(), inlineCapacity)
    404         {
    405         }
    406 
    407         VectorBuffer(size_t capacity)
    408             : Base(inlineBuffer(), inlineCapacity)
    409         {
    410             if (capacity > inlineCapacity)
    411                 Base::allocateBuffer(capacity);
    412         }
    413 
    414         void destruct()
    415         {
    416             deallocateBuffer(m_buffer);
    417             m_buffer = 0;
    418         }
    419 
    420         NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate)
    421         {
    422             Allocator::backingFree(bufferToDeallocate);
    423         }
    424 
    425         void deallocateBuffer(T* bufferToDeallocate)
    426         {
    427             if (UNLIKELY(bufferToDeallocate != inlineBuffer()))
    428                 reallyDeallocateBuffer(bufferToDeallocate);
    429         }
    430 
    431         void resetBufferPointer()
    432         {
    433             m_buffer = inlineBuffer();
    434             m_capacity = inlineCapacity;
    435         }
    436 
    437         void allocateBuffer(size_t newCapacity)
    438         {
    439             // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks.
    440             if (newCapacity > inlineCapacity)
    441                 Base::allocateBuffer(newCapacity);
    442             else
    443                 resetBufferPointer();
    444         }
    445 
    446         size_t allocationSize(size_t capacity) const
    447         {
    448             if (capacity <= inlineCapacity)
    449                 return m_inlineBufferSize;
    450             return Base::allocationSize(capacity);
    451         }
    452 
    453         void swapVectorBuffer(VectorBuffer<T, inlineCapacity, Allocator>& other)
    454         {
    455             typedef VectorTypeOperations<T> TypeOperations;
    456 
    457             if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) {
    458                 ASSERT(m_capacity == other.m_capacity);
    459                 if (m_size > other.m_size) {
    460                     TypeOperations::swap(inlineBuffer(), inlineBuffer() + other.m_size, other.inlineBuffer());
    461                     TypeOperations::move(inlineBuffer() + other.m_size, inlineBuffer() + m_size, other.inlineBuffer() + other.m_size);
    462                 } else {
    463                     TypeOperations::swap(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
    464                     TypeOperations::move(other.inlineBuffer() + m_size, other.inlineBuffer() + other.m_size, inlineBuffer() + m_size);
    465                 }
    466             } else if (buffer() == inlineBuffer()) {
    467                 m_buffer = other.m_buffer;
    468                 other.m_buffer = other.inlineBuffer();
    469                 TypeOperations::move(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
    470                 std::swap(m_capacity, other.m_capacity);
    471             } else if (other.buffer() == other.inlineBuffer()) {
    472                 other.m_buffer = m_buffer;
    473                 m_buffer = inlineBuffer();
    474                 TypeOperations::move(other.inlineBuffer(), other.inlineBuffer() + other.m_size, inlineBuffer());
    475                 std::swap(m_capacity, other.m_capacity);
    476             } else {
    477                 std::swap(m_buffer, other.m_buffer);
    478                 std::swap(m_capacity, other.m_capacity);
    479             }
    480         }
    481 
    482         using Base::buffer;
    483         using Base::capacity;
    484 
    485         bool hasOutOfLineBuffer() const
    486         {
    487             return buffer() && buffer() != inlineBuffer();
    488         }
    489 
    490     protected:
    491         using Base::m_size;
    492 
    493     private:
    494         using Base::m_buffer;
    495         using Base::m_capacity;
    496 
    497         static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
    498         T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); }
    499         const T* inlineBuffer() const { return reinterpret_cast_ptr<const T*>(m_inlineBuffer.buffer); }
    500 
    501         AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
    502         template<typename U, size_t inlineBuffer, typename V>
    503         friend class Deque;
    504     };
    505 
    506     template<typename T, size_t inlineCapacity, typename Allocator>
    507     class Vector;
    508 
    509     // VectorDestructorBase defines the destructor of a vector. This base is used in order to
    510     // completely avoid creating a destructor for a vector that does not need to be destructed.
    511     // By doing so, the clang compiler will have correct information about whether or not a
    512     // vector has a trivial destructor and we use that in a compiler plugin to ensure the
    513     // correctness of non-finalized garbage-collected classes and the use of VectorTraits::needsDestruction.
    514 
    515     // All non-GC managed vectors need a destructor. This destructor will simply call finalize on the actual vector type.
    516     template<typename Derived, typename Elements, bool hasInlineCapacity, bool isGarbageCollected>
    517     class VectorDestructorBase {
    518     public:
    519         ~VectorDestructorBase() { static_cast<Derived*>(this)->finalize(); }
    520     };
    521 
    522     // Heap-allocated vectors with no inlineCapacity never need a destructor.
    523     template<typename Derived, typename Elements>
    524     class VectorDestructorBase<Derived, Elements, false, true> { };
    525 
    526     // Heap-allocator vectors with inlineCapacity need a destructor if the inline elements do.
    527     // The use of VectorTraits<Elements>::needsDestruction is delayed until we know that
    528     // inlineCapacity is non-zero to allow classes that recursively refer to themselves in vector
    529     // members. If inlineCapacity is non-zero doing so would have undefined meaning, so in this
    530     // case we can use HeapVectorWithInlineCapacityDestructorBase to define a destructor
    531     // depending on the value of VectorTraits<Elements>::needsDestruction.
    532     template<typename Derived, bool elementsNeedsDestruction>
    533     class HeapVectorWithInlineCapacityDestructorBase;
    534 
    535     template<typename Derived>
    536     class HeapVectorWithInlineCapacityDestructorBase<Derived, true> {
    537     public:
    538         ~HeapVectorWithInlineCapacityDestructorBase() { static_cast<Derived*>(this)->finalize(); }
    539     };
    540 
    541     template<typename Derived>
    542     class HeapVectorWithInlineCapacityDestructorBase<Derived, false> { };
    543 
    544     template<typename Derived, typename Elements>
    545     class VectorDestructorBase<Derived, Elements, true, true> : public HeapVectorWithInlineCapacityDestructorBase<Derived, VectorTraits<Elements>::needsDestruction> { };
    546 
    547     template<typename T, size_t inlineCapacity = 0, typename Allocator = DefaultAllocator>
    548     class Vector : private VectorBuffer<T, inlineCapacity, Allocator>, public VectorDestructorBase<Vector<T, inlineCapacity, Allocator>, T, (inlineCapacity > 0), Allocator::isGarbageCollected> {
    549         WTF_USE_ALLOCATOR(Vector, Allocator);
    550     private:
    551         typedef VectorBuffer<T, inlineCapacity, Allocator> Base;
    552         typedef VectorTypeOperations<T> TypeOperations;
    553 
    554     public:
    555         typedef T ValueType;
    556 
    557         typedef T* iterator;
    558         typedef const T* const_iterator;
    559         typedef std::reverse_iterator<iterator> reverse_iterator;
    560         typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
    561 
    562         Vector()
    563         {
    564             // Unused slots are initialized to zero so that the visitor and the
    565             // finalizer can visit them safely. canInitializeWithMemset tells us
    566             // that the class does not expect matching constructor and
    567             // destructor calls as long as the memory is zeroed.
    568             COMPILE_ASSERT(!Allocator::isGarbageCollected || !VectorTraits<T>::needsDestruction || VectorTraits<T>::canInitializeWithMemset, ClassHasProblemsWithFinalizersCalledOnClearedMemory);
    569             COMPILE_ASSERT(!WTF::IsPolymorphic<T>::value || !VectorTraits<T>::canInitializeWithMemset, CantInitializeWithMemsetIfThereIsAVtable);
    570             m_size = 0;
    571         }
    572 
    573         explicit Vector(size_t size)
    574             : Base(size)
    575         {
    576             // Unused slots are initialized to zero so that the visitor and the
    577             // finalizer can visit them safely. canInitializeWithMemset tells us
    578             // that the class does not expect matching constructor and
    579             // destructor calls as long as the memory is zeroed.
    580             COMPILE_ASSERT(!Allocator::isGarbageCollected || !VectorTraits<T>::needsDestruction || VectorTraits<T>::canInitializeWithMemset, ClassHasProblemsWithFinalizersCalledOnClearedMemory);
    581             m_size = size;
    582             TypeOperations::initialize(begin(), end());
    583         }
    584 
    585         // Off-GC-heap vectors: Destructor should be called.
    586         // On-GC-heap vectors: Destructor should be called for inline buffers
    587         // (if any) but destructor shouldn't be called for vector backing since
    588         // it is managed by the traced GC heap.
    589         void finalize()
    590         {
    591             if (!inlineCapacity) {
    592                 if (LIKELY(!Base::buffer()))
    593                     return;
    594             }
    595             if (LIKELY(m_size) && !(Allocator::isGarbageCollected && this->hasOutOfLineBuffer())) {
    596                 TypeOperations::destruct(begin(), end());
    597                 m_size = 0; // Partial protection against use-after-free.
    598             }
    599 
    600             Base::destruct();
    601         }
    602 
    603         void finalizeGarbageCollectedObject()
    604         {
    605             finalize();
    606         }
    607 
    608         Vector(const Vector&);
    609         template<size_t otherCapacity>
    610         explicit Vector(const Vector<T, otherCapacity, Allocator>&);
    611 
    612         Vector& operator=(const Vector&);
    613         template<size_t otherCapacity>
    614         Vector& operator=(const Vector<T, otherCapacity, Allocator>&);
    615 
    616 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES)
    617         Vector(Vector&&);
    618         Vector& operator=(Vector&&);
    619 #endif
    620 
    621         size_t size() const { return m_size; }
    622         size_t capacity() const { return Base::capacity(); }
    623         bool isEmpty() const { return !size(); }
    624 
    625         T& at(size_t i)
    626         {
    627             RELEASE_ASSERT(i < size());
    628             return Base::buffer()[i];
    629         }
    630         const T& at(size_t i) const
    631         {
    632             RELEASE_ASSERT(i < size());
    633             return Base::buffer()[i];
    634         }
    635 
    636         T& operator[](size_t i) { return at(i); }
    637         const T& operator[](size_t i) const { return at(i); }
    638 
    639         T* data() { return Base::buffer(); }
    640         const T* data() const { return Base::buffer(); }
    641 
    642         iterator begin() { return data(); }
    643         iterator end() { return begin() + m_size; }
    644         const_iterator begin() const { return data(); }
    645         const_iterator end() const { return begin() + m_size; }
    646 
    647         reverse_iterator rbegin() { return reverse_iterator(end()); }
    648         reverse_iterator rend() { return reverse_iterator(begin()); }
    649         const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); }
    650         const_reverse_iterator rend() const { return const_reverse_iterator(begin()); }
    651 
    652         T& first() { return at(0); }
    653         const T& first() const { return at(0); }
    654         T& last() { return at(size() - 1); }
    655         const T& last() const { return at(size() - 1); }
    656 
    657         template<typename U> bool contains(const U&) const;
    658         template<typename U> size_t find(const U&) const;
    659         template<typename U> size_t reverseFind(const U&) const;
    660 
    661         void shrink(size_t size);
    662         void grow(size_t size);
    663         void resize(size_t size);
    664         void reserveCapacity(size_t newCapacity);
    665         void reserveInitialCapacity(size_t initialCapacity);
    666         void shrinkToFit() { shrinkCapacity(size()); }
    667         void shrinkToReasonableCapacity()
    668         {
    669             if (size() * 2 < capacity())
    670                 shrinkCapacity(size() + size() / 4 + 1);
    671         }
    672 
    673         void clear() { shrinkCapacity(0); }
    674 
    675         template<typename U> void append(const U*, size_t);
    676         template<typename U> void append(const U&);
    677         template<typename U> void uncheckedAppend(const U& val);
    678         template<typename U, size_t otherCapacity, typename V> void appendVector(const Vector<U, otherCapacity, V>&);
    679 
    680         template<typename U> void insert(size_t position, const U*, size_t);
    681         template<typename U> void insert(size_t position, const U&);
    682         template<typename U, size_t c, typename V> void insert(size_t position, const Vector<U, c, V>&);
    683 
    684         template<typename U> void prepend(const U*, size_t);
    685         template<typename U> void prepend(const U&);
    686         template<typename U, size_t c, typename V> void prepend(const Vector<U, c, V>&);
    687 
    688         void remove(size_t position);
    689         void remove(size_t position, size_t length);
    690 
    691         void removeLast()
    692         {
    693             ASSERT(!isEmpty());
    694             shrink(size() - 1);
    695         }
    696 
    697         Vector(size_t size, const T& val)
    698             : Base(size)
    699         {
    700             m_size = size;
    701             TypeOperations::uninitializedFill(begin(), end(), val);
    702         }
    703 
    704         void fill(const T&, size_t);
    705         void fill(const T& val) { fill(val, size()); }
    706 
    707         template<typename Iterator> void appendRange(Iterator start, Iterator end);
    708 
    709         void swap(Vector& other)
    710         {
    711             Base::swapVectorBuffer(other);
    712             std::swap(m_size, other.m_size);
    713         }
    714 
    715         void reverse();
    716 
    717         void trace(typename Allocator::Visitor*);
    718 
    719     private:
    720         void expandCapacity(size_t newMinCapacity);
    721         const T* expandCapacity(size_t newMinCapacity, const T*);
    722         template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
    723         void shrinkCapacity(size_t newCapacity);
    724         template<typename U> void appendSlowCase(const U&);
    725 
    726         using Base::m_size;
    727         using Base::buffer;
    728         using Base::capacity;
    729         using Base::swapVectorBuffer;
    730         using Base::allocateBuffer;
    731         using Base::allocationSize;
    732         using Base::clearUnusedSlots;
    733     };
    734 
    735     template<typename T, size_t inlineCapacity, typename Allocator>
    736     Vector<T, inlineCapacity, Allocator>::Vector(const Vector& other)
    737         : Base(other.capacity())
    738     {
    739         m_size = other.size();
    740         TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
    741     }
    742 
    743     template<typename T, size_t inlineCapacity, typename Allocator>
    744     template<size_t otherCapacity>
    745     Vector<T, inlineCapacity, Allocator>::Vector(const Vector<T, otherCapacity, Allocator>& other)
    746         : Base(other.capacity())
    747     {
    748         m_size = other.size();
    749         TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
    750     }
    751 
    752     template<typename T, size_t inlineCapacity, typename Allocator>
    753     Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, inlineCapacity, Allocator>& other)
    754     {
    755         if (UNLIKELY(&other == this))
    756             return *this;
    757 
    758         if (size() > other.size())
    759             shrink(other.size());
    760         else if (other.size() > capacity()) {
    761             clear();
    762             reserveCapacity(other.size());
    763             ASSERT(begin());
    764         }
    765 
    766 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last
    767 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL
    768         if (!begin())
    769             return *this;
    770 #endif
    771 
    772         std::copy(other.begin(), other.begin() + size(), begin());
    773         TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
    774         m_size = other.size();
    775 
    776         return *this;
    777     }
    778 
    779     inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; }
    780 
    781     template<typename T, size_t inlineCapacity, typename Allocator>
    782     template<size_t otherCapacity>
    783     Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, otherCapacity, Allocator>& other)
    784     {
    785         // If the inline capacities match, we should call the more specific
    786         // template.  If the inline capacities don't match, the two objects
    787         // shouldn't be allocated the same address.
    788         ASSERT(!typelessPointersAreEqual(&other, this));
    789 
    790         if (size() > other.size())
    791             shrink(other.size());
    792         else if (other.size() > capacity()) {
    793             clear();
    794             reserveCapacity(other.size());
    795             ASSERT(begin());
    796         }
    797 
    798 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last
    799 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL
    800         if (!begin())
    801             return *this;
    802 #endif
    803 
    804         std::copy(other.begin(), other.begin() + size(), begin());
    805         TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
    806         m_size = other.size();
    807 
    808         return *this;
    809     }
    810 
    811 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES)
    812     template<typename T, size_t inlineCapacity, typename Allocator>
    813     Vector<T, inlineCapacity, Allocator>::Vector(Vector<T, inlineCapacity, Allocator>&& other)
    814     {
    815         m_size = 0;
    816         // It's a little weird to implement a move constructor using swap but this way we
    817         // don't have to add a move constructor to VectorBuffer.
    818         swap(other);
    819     }
    820 
    821     template<typename T, size_t inlineCapacity, typename Allocator>
    822     Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(Vector<T, inlineCapacity, Allocator>&& other)
    823     {
    824         swap(other);
    825         return *this;
    826     }
    827 #endif
    828 
    829     template<typename T, size_t inlineCapacity, typename Allocator>
    830     template<typename U>
    831     bool Vector<T, inlineCapacity, Allocator>::contains(const U& value) const
    832     {
    833         return find(value) != kNotFound;
    834     }
    835 
    836     template<typename T, size_t inlineCapacity, typename Allocator>
    837     template<typename U>
    838     size_t Vector<T, inlineCapacity, Allocator>::find(const U& value) const
    839     {
    840         const T* b = begin();
    841         const T* e = end();
    842         for (const T* iter = b; iter < e; ++iter) {
    843             if (*iter == value)
    844                 return iter - b;
    845         }
    846         return kNotFound;
    847     }
    848 
    849     template<typename T, size_t inlineCapacity, typename Allocator>
    850     template<typename U>
    851     size_t Vector<T, inlineCapacity, Allocator>::reverseFind(const U& value) const
    852     {
    853         const T* b = begin();
    854         const T* iter = end();
    855         while (iter > b) {
    856             --iter;
    857             if (*iter == value)
    858                 return iter - b;
    859         }
    860         return kNotFound;
    861     }
    862 
    863     template<typename T, size_t inlineCapacity, typename Allocator>
    864     void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize)
    865     {
    866         if (size() > newSize)
    867             shrink(newSize);
    868         else if (newSize > capacity()) {
    869             clear();
    870             reserveCapacity(newSize);
    871             ASSERT(begin());
    872         }
    873 
    874         std::fill(begin(), end(), val);
    875         TypeOperations::uninitializedFill(end(), begin() + newSize, val);
    876         m_size = newSize;
    877     }
    878 
    879     template<typename T, size_t inlineCapacity, typename Allocator>
    880     template<typename Iterator>
    881     void Vector<T, inlineCapacity, Allocator>::appendRange(Iterator start, Iterator end)
    882     {
    883         for (Iterator it = start; it != end; ++it)
    884             append(*it);
    885     }
    886 
    887     template<typename T, size_t inlineCapacity, typename Allocator>
    888     void Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity)
    889     {
    890         size_t oldCapacity = capacity();
    891         size_t expandedCapacity = oldCapacity;
    892         // We use a more aggressive expansion strategy for Vectors with inline storage.
    893         // This is because they are more likely to be on the stack, so the risk of heap bloat is minimized.
    894         // Furthermore, exceeding the inline capacity limit is not supposed to happen in the common case and may indicate a pathological condition or microbenchmark.
    895         if (inlineCapacity) {
    896             expandedCapacity *= 2;
    897             // Check for integer overflow, which could happen in the 32-bit build.
    898             RELEASE_ASSERT(expandedCapacity > oldCapacity);
    899         } else {
    900             // This cannot integer overflow.
    901             // On 64-bit, the "expanded" integer is 32-bit, and any encroachment above 2^32 will fail allocation in allocateBuffer().
    902             // On 32-bit, there's not enough address space to hold the old and new buffers.
    903             // In addition, our underlying allocator is supposed to always fail on > (2^31 - 1) allocations.
    904             expandedCapacity += (expandedCapacity / 4) + 1;
    905         }
    906         reserveCapacity(std::max(newMinCapacity, std::max(static_cast<size_t>(kInitialVectorSize), expandedCapacity)));
    907     }
    908 
    909     template<typename T, size_t inlineCapacity, typename Allocator>
    910     const T* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, const T* ptr)
    911     {
    912         if (ptr < begin() || ptr >= end()) {
    913             expandCapacity(newMinCapacity);
    914             return ptr;
    915         }
    916         size_t index = ptr - begin();
    917         expandCapacity(newMinCapacity);
    918         return begin() + index;
    919     }
    920 
    921     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
    922     inline U* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, U* ptr)
    923     {
    924         expandCapacity(newMinCapacity);
    925         return ptr;
    926     }
    927 
    928     template<typename T, size_t inlineCapacity, typename Allocator>
    929     inline void Vector<T, inlineCapacity, Allocator>::resize(size_t size)
    930     {
    931         if (size <= m_size)
    932             TypeOperations::destruct(begin() + size, end());
    933         else {
    934             if (size > capacity())
    935                 expandCapacity(size);
    936             TypeOperations::initialize(end(), begin() + size);
    937         }
    938 
    939         m_size = size;
    940     }
    941 
    942     template<typename T, size_t inlineCapacity, typename Allocator>
    943     void Vector<T, inlineCapacity, Allocator>::shrink(size_t size)
    944     {
    945         ASSERT(size <= m_size);
    946         TypeOperations::destruct(begin() + size, end());
    947         clearUnusedSlots(begin() + size, end());
    948         m_size = size;
    949     }
    950 
    951     template<typename T, size_t inlineCapacity, typename Allocator>
    952     void Vector<T, inlineCapacity, Allocator>::grow(size_t size)
    953     {
    954         ASSERT(size >= m_size);
    955         if (size > capacity())
    956             expandCapacity(size);
    957         TypeOperations::initialize(end(), begin() + size);
    958         m_size = size;
    959     }
    960 
    961     template<typename T, size_t inlineCapacity, typename Allocator>
    962     void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity)
    963     {
    964         if (UNLIKELY(newCapacity <= capacity()))
    965             return;
    966         T* oldBuffer = begin();
    967         T* oldEnd = end();
    968         Base::allocateBuffer(newCapacity);
    969         TypeOperations::move(oldBuffer, oldEnd, begin());
    970         Base::deallocateBuffer(oldBuffer);
    971     }
    972 
    973     template<typename T, size_t inlineCapacity, typename Allocator>
    974     inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity(size_t initialCapacity)
    975     {
    976         ASSERT(!m_size);
    977         ASSERT(capacity() == inlineCapacity);
    978         if (initialCapacity > inlineCapacity)
    979             Base::allocateBuffer(initialCapacity);
    980     }
    981 
    982     template<typename T, size_t inlineCapacity, typename Allocator>
    983     void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity)
    984     {
    985         if (newCapacity >= capacity())
    986             return;
    987 
    988         if (newCapacity < size())
    989             shrink(newCapacity);
    990 
    991         T* oldBuffer = begin();
    992         if (newCapacity > 0) {
    993             // Optimization: if we're downsizing inside the same allocator bucket, we can exit with no additional work.
    994             if (Base::allocationSize(capacity()) == Base::allocationSize(newCapacity))
    995                 return;
    996 
    997             T* oldEnd = end();
    998             Base::allocateBuffer(newCapacity);
    999             if (begin() != oldBuffer)
   1000                 TypeOperations::move(oldBuffer, oldEnd, begin());
   1001         } else {
   1002             Base::resetBufferPointer();
   1003         }
   1004 
   1005         Base::deallocateBuffer(oldBuffer);
   1006     }
   1007 
   1008     // Templatizing these is better than just letting the conversion happen implicitly,
   1009     // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
   1010     // without refcount thrash.
   1011 
   1012     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
   1013     void Vector<T, inlineCapacity, Allocator>::append(const U* data, size_t dataSize)
   1014     {
   1015         size_t newSize = m_size + dataSize;
   1016         if (newSize > capacity()) {
   1017             data = expandCapacity(newSize, data);
   1018             ASSERT(begin());
   1019         }
   1020         RELEASE_ASSERT(newSize >= m_size);
   1021         T* dest = end();
   1022         VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], dest);
   1023         m_size = newSize;
   1024     }
   1025 
   1026     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
   1027     ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(const U& val)
   1028     {
   1029         if (LIKELY(size() != capacity())) {
   1030             new (NotNull, end()) T(val);
   1031             ++m_size;
   1032             return;
   1033         }
   1034 
   1035         appendSlowCase(val);
   1036     }
   1037 
   1038     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
   1039     NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase(const U& val)
   1040     {
   1041         ASSERT(size() == capacity());
   1042 
   1043         const U* ptr = &val;
   1044         ptr = expandCapacity(size() + 1, ptr);
   1045         ASSERT(begin());
   1046 
   1047         new (NotNull, end()) T(*ptr);
   1048         ++m_size;
   1049     }
   1050 
   1051     // This version of append saves a branch in the case where you know that the
   1052     // vector's capacity is large enough for the append to succeed.
   1053 
   1054     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
   1055     ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend(const U& val)
   1056     {
   1057         ASSERT(size() < capacity());
   1058         const U* ptr = &val;
   1059         new (NotNull, end()) T(*ptr);
   1060         ++m_size;
   1061     }
   1062 
   1063     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t otherCapacity, typename OtherAllocator>
   1064     inline void Vector<T, inlineCapacity, Allocator>::appendVector(const Vector<U, otherCapacity, OtherAllocator>& val)
   1065     {
   1066         append(val.begin(), val.size());
   1067     }
   1068 
   1069     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
   1070     void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U* data, size_t dataSize)
   1071     {
   1072         RELEASE_ASSERT(position <= size());
   1073         size_t newSize = m_size + dataSize;
   1074         if (newSize > capacity()) {
   1075             data = expandCapacity(newSize, data);
   1076             ASSERT(begin());
   1077         }
   1078         RELEASE_ASSERT(newSize >= m_size);
   1079         T* spot = begin() + position;
   1080         TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
   1081         VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], spot);
   1082         m_size = newSize;
   1083     }
   1084 
   1085     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
   1086     inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U& val)
   1087     {
   1088         RELEASE_ASSERT(position <= size());
   1089         const U* data = &val;
   1090         if (size() == capacity()) {
   1091             data = expandCapacity(size() + 1, data);
   1092             ASSERT(begin());
   1093         }
   1094         T* spot = begin() + position;
   1095         TypeOperations::moveOverlapping(spot, end(), spot + 1);
   1096         new (NotNull, spot) T(*data);
   1097         ++m_size;
   1098     }
   1099 
   1100     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename OtherAllocator>
   1101     inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const Vector<U, c, OtherAllocator>& val)
   1102     {
   1103         insert(position, val.begin(), val.size());
   1104     }
   1105 
   1106     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
   1107     void Vector<T, inlineCapacity, Allocator>::prepend(const U* data, size_t dataSize)
   1108     {
   1109         insert(0, data, dataSize);
   1110     }
   1111 
   1112     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
   1113     inline void Vector<T, inlineCapacity, Allocator>::prepend(const U& val)
   1114     {
   1115         insert(0, val);
   1116     }
   1117 
   1118     template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename V>
   1119     inline void Vector<T, inlineCapacity, Allocator>::prepend(const Vector<U, c, V>& val)
   1120     {
   1121         insert(0, val.begin(), val.size());
   1122     }
   1123 
   1124     template<typename T, size_t inlineCapacity, typename Allocator>
   1125     inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position)
   1126     {
   1127         RELEASE_ASSERT(position < size());
   1128         T* spot = begin() + position;
   1129         spot->~T();
   1130         TypeOperations::moveOverlapping(spot + 1, end(), spot);
   1131         clearUnusedSlots(end() - 1, end());
   1132         --m_size;
   1133     }
   1134 
   1135     template<typename T, size_t inlineCapacity, typename Allocator>
   1136     inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position, size_t length)
   1137     {
   1138         ASSERT_WITH_SECURITY_IMPLICATION(position <= size());
   1139         RELEASE_ASSERT(position + length <= size());
   1140         T* beginSpot = begin() + position;
   1141         T* endSpot = beginSpot + length;
   1142         TypeOperations::destruct(beginSpot, endSpot);
   1143         TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
   1144         clearUnusedSlots(end() - length, end());
   1145         m_size -= length;
   1146     }
   1147 
   1148     template<typename T, size_t inlineCapacity, typename Allocator>
   1149     inline void Vector<T, inlineCapacity, Allocator>::reverse()
   1150     {
   1151         for (size_t i = 0; i < m_size / 2; ++i)
   1152             std::swap(at(i), at(m_size - 1 - i));
   1153     }
   1154 
   1155     template<typename T, size_t inlineCapacity, typename Allocator>
   1156     void deleteAllValues(const Vector<T, inlineCapacity, Allocator>& collection)
   1157     {
   1158         typedef typename Vector<T, inlineCapacity, Allocator>::const_iterator iterator;
   1159         iterator end = collection.end();
   1160         for (iterator it = collection.begin(); it != end; ++it)
   1161             delete *it;
   1162     }
   1163 
   1164     template<typename T, size_t inlineCapacity, typename Allocator>
   1165     inline void swap(Vector<T, inlineCapacity, Allocator>& a, Vector<T, inlineCapacity, Allocator>& b)
   1166     {
   1167         a.swap(b);
   1168     }
   1169 
   1170     template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
   1171     bool operator==(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
   1172     {
   1173         if (a.size() != b.size())
   1174             return false;
   1175 
   1176         return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
   1177     }
   1178 
   1179     template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
   1180     inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
   1181     {
   1182         return !(a == b);
   1183     }
   1184 
   1185     // This is only called if the allocator is a HeapAllocator. It is used when
   1186     // visiting during a tracing GC.
   1187     template<typename T, size_t inlineCapacity, typename Allocator>
   1188     void Vector<T, inlineCapacity, Allocator>::trace(typename Allocator::Visitor* visitor)
   1189     {
   1190         COMPILE_ASSERT(Allocator::isGarbageCollected, Garbage_collector_must_be_enabled);
   1191         const T* bufferBegin = buffer();
   1192         const T* bufferEnd = buffer() + size();
   1193         if (ShouldBeTraced<VectorTraits<T> >::value) {
   1194             for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; bufferEntry++)
   1195                 Allocator::template trace<T, VectorTraits<T> >(visitor, *const_cast<T*>(bufferEntry));
   1196         }
   1197         if (this->hasOutOfLineBuffer())
   1198             Allocator::markNoTracing(visitor, buffer());
   1199     }
   1200 
   1201 } // namespace WTF
   1202 
   1203 using WTF::Vector;
   1204 
   1205 #endif // WTF_Vector_h
   1206