File indexing completed on 2025-02-22 10:34:45
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #ifndef EIGEN_SPARSEVECTOR_H
0011 #define EIGEN_SPARSEVECTOR_H
0012
0013 namespace Eigen {
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028 namespace internal {
0029 template<typename _Scalar, int _Options, typename _StorageIndex>
0030 struct traits<SparseVector<_Scalar, _Options, _StorageIndex> >
0031 {
0032 typedef _Scalar Scalar;
0033 typedef _StorageIndex StorageIndex;
0034 typedef Sparse StorageKind;
0035 typedef MatrixXpr XprKind;
0036 enum {
0037 IsColVector = (_Options & RowMajorBit) ? 0 : 1,
0038
0039 RowsAtCompileTime = IsColVector ? Dynamic : 1,
0040 ColsAtCompileTime = IsColVector ? 1 : Dynamic,
0041 MaxRowsAtCompileTime = RowsAtCompileTime,
0042 MaxColsAtCompileTime = ColsAtCompileTime,
0043 Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit) | CompressedAccessBit,
0044 SupportedAccessPatterns = InnerRandomAccessPattern
0045 };
0046 };
0047
0048
0049 enum {
0050 SVA_RuntimeSwitch,
0051 SVA_Inner,
0052 SVA_Outer
0053 };
0054
0055 template< typename Dest, typename Src,
0056 int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
0057 : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
0058 : SVA_Inner>
0059 struct sparse_vector_assign_selector;
0060
0061 }
0062
0063 template<typename _Scalar, int _Options, typename _StorageIndex>
0064 class SparseVector
0065 : public SparseCompressedBase<SparseVector<_Scalar, _Options, _StorageIndex> >
0066 {
0067 typedef SparseCompressedBase<SparseVector> Base;
0068 using Base::convert_index;
0069 public:
0070 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
0071 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
0072 EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
0073
0074 typedef internal::CompressedStorage<Scalar,StorageIndex> Storage;
0075 enum { IsColVector = internal::traits<SparseVector>::IsColVector };
0076
0077 enum {
0078 Options = _Options
0079 };
0080
0081 EIGEN_STRONG_INLINE Index rows() const { return IsColVector ? m_size : 1; }
0082 EIGEN_STRONG_INLINE Index cols() const { return IsColVector ? 1 : m_size; }
0083 EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
0084 EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
0085
0086 EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return m_data.valuePtr(); }
0087 EIGEN_STRONG_INLINE Scalar* valuePtr() { return m_data.valuePtr(); }
0088
0089 EIGEN_STRONG_INLINE const StorageIndex* innerIndexPtr() const { return m_data.indexPtr(); }
0090 EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() { return m_data.indexPtr(); }
0091
0092 inline const StorageIndex* outerIndexPtr() const { return 0; }
0093 inline StorageIndex* outerIndexPtr() { return 0; }
0094 inline const StorageIndex* innerNonZeroPtr() const { return 0; }
0095 inline StorageIndex* innerNonZeroPtr() { return 0; }
0096
0097
0098 inline Storage& data() { return m_data; }
0099
0100 inline const Storage& data() const { return m_data; }
0101
0102 inline Scalar coeff(Index row, Index col) const
0103 {
0104 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
0105 return coeff(IsColVector ? row : col);
0106 }
0107 inline Scalar coeff(Index i) const
0108 {
0109 eigen_assert(i>=0 && i<m_size);
0110 return m_data.at(StorageIndex(i));
0111 }
0112
0113 inline Scalar& coeffRef(Index row, Index col)
0114 {
0115 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
0116 return coeffRef(IsColVector ? row : col);
0117 }
0118
0119
0120
0121
0122
0123
0124
0125 inline Scalar& coeffRef(Index i)
0126 {
0127 eigen_assert(i>=0 && i<m_size);
0128
0129 return m_data.atWithInsertion(StorageIndex(i));
0130 }
0131
0132 public:
0133
0134 typedef typename Base::InnerIterator InnerIterator;
0135 typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
0136
0137 inline void setZero() { m_data.clear(); }
0138
0139
0140 inline Index nonZeros() const { return m_data.size(); }
0141
0142 inline void startVec(Index outer)
0143 {
0144 EIGEN_UNUSED_VARIABLE(outer);
0145 eigen_assert(outer==0);
0146 }
0147
0148 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
0149 {
0150 EIGEN_UNUSED_VARIABLE(outer);
0151 eigen_assert(outer==0);
0152 return insertBack(inner);
0153 }
0154 inline Scalar& insertBack(Index i)
0155 {
0156 m_data.append(0, i);
0157 return m_data.value(m_data.size()-1);
0158 }
0159
0160 Scalar& insertBackByOuterInnerUnordered(Index outer, Index inner)
0161 {
0162 EIGEN_UNUSED_VARIABLE(outer);
0163 eigen_assert(outer==0);
0164 return insertBackUnordered(inner);
0165 }
0166 inline Scalar& insertBackUnordered(Index i)
0167 {
0168 m_data.append(0, i);
0169 return m_data.value(m_data.size()-1);
0170 }
0171
0172 inline Scalar& insert(Index row, Index col)
0173 {
0174 eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
0175
0176 Index inner = IsColVector ? row : col;
0177 Index outer = IsColVector ? col : row;
0178 EIGEN_ONLY_USED_FOR_DEBUG(outer);
0179 eigen_assert(outer==0);
0180 return insert(inner);
0181 }
0182 Scalar& insert(Index i)
0183 {
0184 eigen_assert(i>=0 && i<m_size);
0185
0186 Index startId = 0;
0187 Index p = Index(m_data.size()) - 1;
0188
0189 m_data.resize(p+2,1);
0190
0191 while ( (p >= startId) && (m_data.index(p) > i) )
0192 {
0193 m_data.index(p+1) = m_data.index(p);
0194 m_data.value(p+1) = m_data.value(p);
0195 --p;
0196 }
0197 m_data.index(p+1) = convert_index(i);
0198 m_data.value(p+1) = 0;
0199 return m_data.value(p+1);
0200 }
0201
0202
0203
0204 inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); }
0205
0206
0207 inline void finalize() {}
0208
0209
0210 void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
0211 {
0212 m_data.prune(reference,epsilon);
0213 }
0214
0215
0216
0217
0218
0219
0220
0221
0222
0223 void resize(Index rows, Index cols)
0224 {
0225 eigen_assert((IsColVector ? cols : rows)==1 && "Outer dimension must equal 1");
0226 resize(IsColVector ? rows : cols);
0227 }
0228
0229
0230
0231
0232
0233 void resize(Index newSize)
0234 {
0235 m_size = newSize;
0236 m_data.clear();
0237 }
0238
0239
0240
0241
0242
0243
0244
0245
0246 void conservativeResize(Index newSize)
0247 {
0248 if (newSize < m_size)
0249 {
0250 Index i = 0;
0251 while (i<m_data.size() && m_data.index(i)<newSize) ++i;
0252 m_data.resize(i);
0253 }
0254 m_size = newSize;
0255 }
0256
0257 void resizeNonZeros(Index size) { m_data.resize(size); }
0258
0259 inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
0260
0261 explicit inline SparseVector(Index size) : m_size(0) { check_template_parameters(); resize(size); }
0262
0263 inline SparseVector(Index rows, Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); }
0264
0265 template<typename OtherDerived>
0266 inline SparseVector(const SparseMatrixBase<OtherDerived>& other)
0267 : m_size(0)
0268 {
0269 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0270 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0271 #endif
0272 check_template_parameters();
0273 *this = other.derived();
0274 }
0275
0276 inline SparseVector(const SparseVector& other)
0277 : Base(other), m_size(0)
0278 {
0279 check_template_parameters();
0280 *this = other.derived();
0281 }
0282
0283
0284
0285
0286
0287 inline void swap(SparseVector& other)
0288 {
0289 std::swap(m_size, other.m_size);
0290 m_data.swap(other.m_data);
0291 }
0292
0293 template<int OtherOptions>
0294 inline void swap(SparseMatrix<Scalar,OtherOptions,StorageIndex>& other)
0295 {
0296 eigen_assert(other.outerSize()==1);
0297 std::swap(m_size, other.m_innerSize);
0298 m_data.swap(other.m_data);
0299 }
0300
0301 inline SparseVector& operator=(const SparseVector& other)
0302 {
0303 if (other.isRValue())
0304 {
0305 swap(other.const_cast_derived());
0306 }
0307 else
0308 {
0309 resize(other.size());
0310 m_data = other.m_data;
0311 }
0312 return *this;
0313 }
0314
0315 template<typename OtherDerived>
0316 inline SparseVector& operator=(const SparseMatrixBase<OtherDerived>& other)
0317 {
0318 SparseVector tmp(other.size());
0319 internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
0320 this->swap(tmp);
0321 return *this;
0322 }
0323
0324 #ifndef EIGEN_PARSED_BY_DOXYGEN
0325 template<typename Lhs, typename Rhs>
0326 inline SparseVector& operator=(const SparseSparseProduct<Lhs,Rhs>& product)
0327 {
0328 return Base::operator=(product);
0329 }
0330 #endif
0331
0332 friend std::ostream & operator << (std::ostream & s, const SparseVector& m)
0333 {
0334 for (Index i=0; i<m.nonZeros(); ++i)
0335 s << "(" << m.m_data.value(i) << "," << m.m_data.index(i) << ") ";
0336 s << std::endl;
0337 return s;
0338 }
0339
0340
0341 inline ~SparseVector() {}
0342
0343
0344 Scalar sum() const;
0345
0346 public:
0347
0348
0349 EIGEN_DEPRECATED void startFill(Index reserve)
0350 {
0351 setZero();
0352 m_data.reserve(reserve);
0353 }
0354
0355
0356 EIGEN_DEPRECATED Scalar& fill(Index r, Index c)
0357 {
0358 eigen_assert(r==0 || c==0);
0359 return fill(IsColVector ? r : c);
0360 }
0361
0362
0363 EIGEN_DEPRECATED Scalar& fill(Index i)
0364 {
0365 m_data.append(0, i);
0366 return m_data.value(m_data.size()-1);
0367 }
0368
0369
0370 EIGEN_DEPRECATED Scalar& fillrand(Index r, Index c)
0371 {
0372 eigen_assert(r==0 || c==0);
0373 return fillrand(IsColVector ? r : c);
0374 }
0375
0376
0377 EIGEN_DEPRECATED Scalar& fillrand(Index i)
0378 {
0379 return insert(i);
0380 }
0381
0382
0383 EIGEN_DEPRECATED void endFill() {}
0384
0385
0386
0387 EIGEN_DEPRECATED Storage& _data() { return m_data; }
0388
0389 EIGEN_DEPRECATED const Storage& _data() const { return m_data; }
0390
0391 # ifdef EIGEN_SPARSEVECTOR_PLUGIN
0392 # include EIGEN_SPARSEVECTOR_PLUGIN
0393 # endif
0394
0395 protected:
0396
0397 static void check_template_parameters()
0398 {
0399 EIGEN_STATIC_ASSERT(NumTraits<StorageIndex>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
0400 EIGEN_STATIC_ASSERT((_Options&(ColMajor|RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
0401 }
0402
0403 Storage m_data;
0404 Index m_size;
0405 };
0406
0407 namespace internal {
0408
0409 template<typename _Scalar, int _Options, typename _Index>
0410 struct evaluator<SparseVector<_Scalar,_Options,_Index> >
0411 : evaluator_base<SparseVector<_Scalar,_Options,_Index> >
0412 {
0413 typedef SparseVector<_Scalar,_Options,_Index> SparseVectorType;
0414 typedef evaluator_base<SparseVectorType> Base;
0415 typedef typename SparseVectorType::InnerIterator InnerIterator;
0416 typedef typename SparseVectorType::ReverseInnerIterator ReverseInnerIterator;
0417
0418 enum {
0419 CoeffReadCost = NumTraits<_Scalar>::ReadCost,
0420 Flags = SparseVectorType::Flags
0421 };
0422
0423 evaluator() : Base() {}
0424
0425 explicit evaluator(const SparseVectorType &mat) : m_matrix(&mat)
0426 {
0427 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
0428 }
0429
0430 inline Index nonZerosEstimate() const {
0431 return m_matrix->nonZeros();
0432 }
0433
0434 operator SparseVectorType&() { return m_matrix->const_cast_derived(); }
0435 operator const SparseVectorType&() const { return *m_matrix; }
0436
0437 const SparseVectorType *m_matrix;
0438 };
0439
0440 template< typename Dest, typename Src>
0441 struct sparse_vector_assign_selector<Dest,Src,SVA_Inner> {
0442 static void run(Dest& dst, const Src& src) {
0443 eigen_internal_assert(src.innerSize()==src.size());
0444 typedef internal::evaluator<Src> SrcEvaluatorType;
0445 SrcEvaluatorType srcEval(src);
0446 for(typename SrcEvaluatorType::InnerIterator it(srcEval, 0); it; ++it)
0447 dst.insert(it.index()) = it.value();
0448 }
0449 };
0450
0451 template< typename Dest, typename Src>
0452 struct sparse_vector_assign_selector<Dest,Src,SVA_Outer> {
0453 static void run(Dest& dst, const Src& src) {
0454 eigen_internal_assert(src.outerSize()==src.size());
0455 typedef internal::evaluator<Src> SrcEvaluatorType;
0456 SrcEvaluatorType srcEval(src);
0457 for(Index i=0; i<src.size(); ++i)
0458 {
0459 typename SrcEvaluatorType::InnerIterator it(srcEval, i);
0460 if(it)
0461 dst.insert(i) = it.value();
0462 }
0463 }
0464 };
0465
0466 template< typename Dest, typename Src>
0467 struct sparse_vector_assign_selector<Dest,Src,SVA_RuntimeSwitch> {
0468 static void run(Dest& dst, const Src& src) {
0469 if(src.outerSize()==1) sparse_vector_assign_selector<Dest,Src,SVA_Inner>::run(dst, src);
0470 else sparse_vector_assign_selector<Dest,Src,SVA_Outer>::run(dst, src);
0471 }
0472 };
0473
0474 }
0475
0476 }
0477
0478 #endif