Home | History | Annotate | Download | only in SparseCore
      1 // This file is part of Eigen, a lightweight C++ template library
      2 // for linear algebra.
      3 //
      4 // Copyright (C) 2008-2009 Gael Guennebaud <gael.guennebaud (at) inria.fr>
      5 //
      6 // This Source Code Form is subject to the terms of the Mozilla
      7 // Public License v. 2.0. If a copy of the MPL was not distributed
      8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
      9 
     10 #ifndef EIGEN_SPARSEVECTOR_H
     11 #define EIGEN_SPARSEVECTOR_H
     12 
     13 namespace Eigen {
     14 
     15 /** \ingroup SparseCore_Module
     16   * \class SparseVector
     17   *
     18   * \brief a sparse vector class
     19   *
     20   * \tparam _Scalar the scalar type, i.e. the type of the coefficients
     21   *
     22   * See http://www.netlib.org/linalg/html_templates/node91.html for details on the storage scheme.
     23   *
     24   * This class can be extended with the help of the plugin mechanism described on the page
     25   * \ref TopicCustomizingEigen by defining the preprocessor symbol \c EIGEN_SPARSEVECTOR_PLUGIN.
     26   */
     27 
     28 namespace internal {
     29 template<typename _Scalar, int _Options, typename _Index>
     30 struct traits<SparseVector<_Scalar, _Options, _Index> >
     31 {
     32   typedef _Scalar Scalar;
     33   typedef _Index Index;
     34   typedef Sparse StorageKind;
     35   typedef MatrixXpr XprKind;
     36   enum {
     37     IsColVector = (_Options & RowMajorBit) ? 0 : 1,
     38 
     39     RowsAtCompileTime = IsColVector ? Dynamic : 1,
     40     ColsAtCompileTime = IsColVector ? 1 : Dynamic,
     41     MaxRowsAtCompileTime = RowsAtCompileTime,
     42     MaxColsAtCompileTime = ColsAtCompileTime,
     43     Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit),
     44     CoeffReadCost = NumTraits<Scalar>::ReadCost,
     45     SupportedAccessPatterns = InnerRandomAccessPattern
     46   };
     47 };
     48 
     49 // Sparse-Vector-Assignment kinds:
     50 enum {
     51   SVA_RuntimeSwitch,
     52   SVA_Inner,
     53   SVA_Outer
     54 };
     55 
     56 template< typename Dest, typename Src,
     57           int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
     58                              : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
     59                              : SVA_Inner>
     60 struct sparse_vector_assign_selector;
     61 
     62 }
     63 
     64 template<typename _Scalar, int _Options, typename _Index>
     65 class SparseVector
     66   : public SparseMatrixBase<SparseVector<_Scalar, _Options, _Index> >
     67 {
     68     typedef SparseMatrixBase<SparseVector> SparseBase;
     69 
     70   public:
     71     EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
     72     EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
     73     EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
     74 
     75     typedef internal::CompressedStorage<Scalar,Index> Storage;
     76     enum { IsColVector = internal::traits<SparseVector>::IsColVector };
     77 
     78     enum {
     79       Options = _Options
     80     };
     81 
     82     EIGEN_STRONG_INLINE Index rows() const { return IsColVector ? m_size : 1; }
     83     EIGEN_STRONG_INLINE Index cols() const { return IsColVector ? 1 : m_size; }
     84     EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
     85     EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
     86 
     87     EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return &m_data.value(0); }
     88     EIGEN_STRONG_INLINE Scalar* valuePtr() { return &m_data.value(0); }
     89 
     90     EIGEN_STRONG_INLINE const Index* innerIndexPtr() const { return &m_data.index(0); }
     91     EIGEN_STRONG_INLINE Index* innerIndexPtr() { return &m_data.index(0); }
     92 
     93     /** \internal */
     94     inline Storage& data() { return m_data; }
     95     /** \internal */
     96     inline const Storage& data() const { return m_data; }
     97 
     98     inline Scalar coeff(Index row, Index col) const
     99     {
    100       eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
    101       return coeff(IsColVector ? row : col);
    102     }
    103     inline Scalar coeff(Index i) const
    104     {
    105       eigen_assert(i>=0 && i<m_size);
    106       return m_data.at(i);
    107     }
    108 
    109     inline Scalar& coeffRef(Index row, Index col)
    110     {
    111       eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
    112       return coeff(IsColVector ? row : col);
    113     }
    114 
    115     /** \returns a reference to the coefficient value at given index \a i
    116       * This operation involes a log(rho*size) binary search. If the coefficient does not
    117       * exist yet, then a sorted insertion into a sequential buffer is performed.
    118       *
    119       * This insertion might be very costly if the number of nonzeros above \a i is large.
    120       */
    121     inline Scalar& coeffRef(Index i)
    122     {
    123       eigen_assert(i>=0 && i<m_size);
    124       return m_data.atWithInsertion(i);
    125     }
    126 
    127   public:
    128 
    129     class InnerIterator;
    130     class ReverseInnerIterator;
    131 
    132     inline void setZero() { m_data.clear(); }
    133 
    134     /** \returns the number of non zero coefficients */
    135     inline Index nonZeros() const  { return static_cast<Index>(m_data.size()); }
    136 
    137     inline void startVec(Index outer)
    138     {
    139       EIGEN_UNUSED_VARIABLE(outer);
    140       eigen_assert(outer==0);
    141     }
    142 
    143     inline Scalar& insertBackByOuterInner(Index outer, Index inner)
    144     {
    145       EIGEN_UNUSED_VARIABLE(outer);
    146       eigen_assert(outer==0);
    147       return insertBack(inner);
    148     }
    149     inline Scalar& insertBack(Index i)
    150     {
    151       m_data.append(0, i);
    152       return m_data.value(m_data.size()-1);
    153     }
    154 
    155     inline Scalar& insert(Index row, Index col)
    156     {
    157       eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
    158 
    159       Index inner = IsColVector ? row : col;
    160       Index outer = IsColVector ? col : row;
    161       eigen_assert(outer==0);
    162       return insert(inner);
    163     }
    164     Scalar& insert(Index i)
    165     {
    166       eigen_assert(i>=0 && i<m_size);
    167 
    168       Index startId = 0;
    169       Index p = Index(m_data.size()) - 1;
    170       // TODO smart realloc
    171       m_data.resize(p+2,1);
    172 
    173       while ( (p >= startId) && (m_data.index(p) > i) )
    174       {
    175         m_data.index(p+1) = m_data.index(p);
    176         m_data.value(p+1) = m_data.value(p);
    177         --p;
    178       }
    179       m_data.index(p+1) = i;
    180       m_data.value(p+1) = 0;
    181       return m_data.value(p+1);
    182     }
    183 
    184     /**
    185       */
    186     inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
    187 
    188 
    189     inline void finalize() {}
    190 
    191     void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
    192     {
    193       m_data.prune(reference,epsilon);
    194     }
    195 
    196     void resize(Index rows, Index cols)
    197     {
    198       eigen_assert(rows==1 || cols==1);
    199       resize(IsColVector ? rows : cols);
    200     }
    201 
    202     void resize(Index newSize)
    203     {
    204       m_size = newSize;
    205       m_data.clear();
    206     }
    207 
    208     void resizeNonZeros(Index size) { m_data.resize(size); }
    209 
    210     inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
    211 
    212     inline SparseVector(Index size) : m_size(0) { check_template_parameters(); resize(size); }
    213 
    214     inline SparseVector(Index rows, Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); }
    215 
    216     template<typename OtherDerived>
    217     inline SparseVector(const SparseMatrixBase<OtherDerived>& other)
    218       : m_size(0)
    219     {
    220       check_template_parameters();
    221       *this = other.derived();
    222     }
    223 
    224     inline SparseVector(const SparseVector& other)
    225       : SparseBase(other), m_size(0)
    226     {
    227       check_template_parameters();
    228       *this = other.derived();
    229     }
    230 
    231     /** Swaps the values of \c *this and \a other.
    232       * Overloaded for performance: this version performs a \em shallow swap by swaping pointers and attributes only.
    233       * \sa SparseMatrixBase::swap()
    234       */
    235     inline void swap(SparseVector& other)
    236     {
    237       std::swap(m_size, other.m_size);
    238       m_data.swap(other.m_data);
    239     }
    240 
    241     inline SparseVector& operator=(const SparseVector& other)
    242     {
    243       if (other.isRValue())
    244       {
    245         swap(other.const_cast_derived());
    246       }
    247       else
    248       {
    249         resize(other.size());
    250         m_data = other.m_data;
    251       }
    252       return *this;
    253     }
    254 
    255     template<typename OtherDerived>
    256     inline SparseVector& operator=(const SparseMatrixBase<OtherDerived>& other)
    257     {
    258       SparseVector tmp(other.size());
    259       internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
    260       this->swap(tmp);
    261       return *this;
    262     }
    263 
    264     #ifndef EIGEN_PARSED_BY_DOXYGEN
    265     template<typename Lhs, typename Rhs>
    266     inline SparseVector& operator=(const SparseSparseProduct<Lhs,Rhs>& product)
    267     {
    268       return Base::operator=(product);
    269     }
    270     #endif
    271 
    272     friend std::ostream & operator << (std::ostream & s, const SparseVector& m)
    273     {
    274       for (Index i=0; i<m.nonZeros(); ++i)
    275         s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
    276       s << std::endl;
    277       return s;
    278     }
    279 
    280     /** Destructor */
    281     inline ~SparseVector() {}
    282 
    283     /** Overloaded for performance */
    284     Scalar sum() const;
    285 
    286   public:
    287 
    288     /** \internal \deprecated use setZero() and reserve() */
    289     EIGEN_DEPRECATED void startFill(Index reserve)
    290     {
    291       setZero();
    292       m_data.reserve(reserve);
    293     }
    294 
    295     /** \internal \deprecated use insertBack(Index,Index) */
    296     EIGEN_DEPRECATED Scalar& fill(Index r, Index c)
    297     {
    298       eigen_assert(r==0 || c==0);
    299       return fill(IsColVector ? r : c);
    300     }
    301 
    302     /** \internal \deprecated use insertBack(Index) */
    303     EIGEN_DEPRECATED Scalar& fill(Index i)
    304     {
    305       m_data.append(0, i);
    306       return m_data.value(m_data.size()-1);
    307     }
    308 
    309     /** \internal \deprecated use insert(Index,Index) */
    310     EIGEN_DEPRECATED Scalar& fillrand(Index r, Index c)
    311     {
    312       eigen_assert(r==0 || c==0);
    313       return fillrand(IsColVector ? r : c);
    314     }
    315 
    316     /** \internal \deprecated use insert(Index) */
    317     EIGEN_DEPRECATED Scalar& fillrand(Index i)
    318     {
    319       return insert(i);
    320     }
    321 
    322     /** \internal \deprecated use finalize() */
    323     EIGEN_DEPRECATED void endFill() {}
    324 
    325     // These two functions were here in the 3.1 release, so let's keep them in case some code rely on them.
    326     /** \internal \deprecated use data() */
    327     EIGEN_DEPRECATED Storage& _data() { return m_data; }
    328     /** \internal \deprecated use data() */
    329     EIGEN_DEPRECATED const Storage& _data() const { return m_data; }
    330 
    331 #   ifdef EIGEN_SPARSEVECTOR_PLUGIN
    332 #     include EIGEN_SPARSEVECTOR_PLUGIN
    333 #   endif
    334 
    335 protected:
    336 
    337     static void check_template_parameters()
    338     {
    339       EIGEN_STATIC_ASSERT(NumTraits<Index>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
    340       EIGEN_STATIC_ASSERT((_Options&(ColMajor|RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
    341     }
    342 
    343     Storage m_data;
    344     Index m_size;
    345 };
    346 
    347 template<typename Scalar, int _Options, typename _Index>
    348 class SparseVector<Scalar,_Options,_Index>::InnerIterator
    349 {
    350   public:
    351     InnerIterator(const SparseVector& vec, Index outer=0)
    352       : m_data(vec.m_data), m_id(0), m_end(static_cast<Index>(m_data.size()))
    353     {
    354       EIGEN_UNUSED_VARIABLE(outer);
    355       eigen_assert(outer==0);
    356     }
    357 
    358     InnerIterator(const internal::CompressedStorage<Scalar,Index>& data)
    359       : m_data(data), m_id(0), m_end(static_cast<Index>(m_data.size()))
    360     {}
    361 
    362     inline InnerIterator& operator++() { m_id++; return *this; }
    363 
    364     inline Scalar value() const { return m_data.value(m_id); }
    365     inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id)); }
    366 
    367     inline Index index() const { return m_data.index(m_id); }
    368     inline Index row() const { return IsColVector ? index() : 0; }
    369     inline Index col() const { return IsColVector ? 0 : index(); }
    370 
    371     inline operator bool() const { return (m_id < m_end); }
    372 
    373   protected:
    374     const internal::CompressedStorage<Scalar,Index>& m_data;
    375     Index m_id;
    376     const Index m_end;
    377 };
    378 
    379 template<typename Scalar, int _Options, typename _Index>
    380 class SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
    381 {
    382   public:
    383     ReverseInnerIterator(const SparseVector& vec, Index outer=0)
    384       : m_data(vec.m_data), m_id(static_cast<Index>(m_data.size())), m_start(0)
    385     {
    386       EIGEN_UNUSED_VARIABLE(outer);
    387       eigen_assert(outer==0);
    388     }
    389 
    390     ReverseInnerIterator(const internal::CompressedStorage<Scalar,Index>& data)
    391       : m_data(data), m_id(static_cast<Index>(m_data.size())), m_start(0)
    392     {}
    393 
    394     inline ReverseInnerIterator& operator--() { m_id--; return *this; }
    395 
    396     inline Scalar value() const { return m_data.value(m_id-1); }
    397     inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id-1)); }
    398 
    399     inline Index index() const { return m_data.index(m_id-1); }
    400     inline Index row() const { return IsColVector ? index() : 0; }
    401     inline Index col() const { return IsColVector ? 0 : index(); }
    402 
    403     inline operator bool() const { return (m_id > m_start); }
    404 
    405   protected:
    406     const internal::CompressedStorage<Scalar,Index>& m_data;
    407     Index m_id;
    408     const Index m_start;
    409 };
    410 
    411 namespace internal {
    412 
    413 template< typename Dest, typename Src>
    414 struct sparse_vector_assign_selector<Dest,Src,SVA_Inner> {
    415   static void run(Dest& dst, const Src& src) {
    416     eigen_internal_assert(src.innerSize()==src.size());
    417     for(typename Src::InnerIterator it(src, 0); it; ++it)
    418       dst.insert(it.index()) = it.value();
    419   }
    420 };
    421 
    422 template< typename Dest, typename Src>
    423 struct sparse_vector_assign_selector<Dest,Src,SVA_Outer> {
    424   static void run(Dest& dst, const Src& src) {
    425     eigen_internal_assert(src.outerSize()==src.size());
    426     for(typename Dest::Index i=0; i<src.size(); ++i)
    427     {
    428       typename Src::InnerIterator it(src, i);
    429       if(it)
    430         dst.insert(i) = it.value();
    431     }
    432   }
    433 };
    434 
    435 template< typename Dest, typename Src>
    436 struct sparse_vector_assign_selector<Dest,Src,SVA_RuntimeSwitch> {
    437   static void run(Dest& dst, const Src& src) {
    438     if(src.outerSize()==1)  sparse_vector_assign_selector<Dest,Src,SVA_Inner>::run(dst, src);
    439     else                    sparse_vector_assign_selector<Dest,Src,SVA_Outer>::run(dst, src);
    440   }
    441 };
    442 
    443 }
    444 
    445 } // end namespace Eigen
    446 
    447 #endif // EIGEN_SPARSEVECTOR_H
    448