File indexing completed on 2025-01-18 09:57:07
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
0011 #define EIGEN_DYNAMIC_SPARSEMATRIX_H
0012
0013 namespace Eigen {
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035 namespace internal {
0036 template<typename _Scalar, int _Options, typename _StorageIndex>
0037 struct traits<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
0038 {
0039 typedef _Scalar Scalar;
0040 typedef _StorageIndex StorageIndex;
0041 typedef Sparse StorageKind;
0042 typedef MatrixXpr XprKind;
0043 enum {
0044 RowsAtCompileTime = Dynamic,
0045 ColsAtCompileTime = Dynamic,
0046 MaxRowsAtCompileTime = Dynamic,
0047 MaxColsAtCompileTime = Dynamic,
0048 Flags = _Options | NestByRefBit | LvalueBit,
0049 CoeffReadCost = NumTraits<Scalar>::ReadCost,
0050 SupportedAccessPatterns = OuterRandomAccessPattern
0051 };
0052 };
0053 }
0054
0055 template<typename _Scalar, int _Options, typename _StorageIndex>
0056 class DynamicSparseMatrix
0057 : public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
0058 {
0059 typedef SparseMatrixBase<DynamicSparseMatrix> Base;
0060 using Base::convert_index;
0061 public:
0062 EIGEN_SPARSE_PUBLIC_INTERFACE(DynamicSparseMatrix)
0063
0064
0065
0066 typedef MappedSparseMatrix<Scalar,Flags> Map;
0067 using Base::IsRowMajor;
0068 using Base::operator=;
0069 enum {
0070 Options = _Options
0071 };
0072
0073 protected:
0074
0075 typedef DynamicSparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0), StorageIndex> TransposedSparseMatrix;
0076
0077 Index m_innerSize;
0078 std::vector<internal::CompressedStorage<Scalar,StorageIndex> > m_data;
0079
0080 public:
0081
0082 inline Index rows() const { return IsRowMajor ? outerSize() : m_innerSize; }
0083 inline Index cols() const { return IsRowMajor ? m_innerSize : outerSize(); }
0084 inline Index innerSize() const { return m_innerSize; }
0085 inline Index outerSize() const { return convert_index(m_data.size()); }
0086 inline Index innerNonZeros(Index j) const { return m_data[j].size(); }
0087
0088 std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() { return m_data; }
0089 const std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() const { return m_data; }
0090
0091
0092
0093
0094 inline Scalar coeff(Index row, Index col) const
0095 {
0096 const Index outer = IsRowMajor ? row : col;
0097 const Index inner = IsRowMajor ? col : row;
0098 return m_data[outer].at(inner);
0099 }
0100
0101
0102
0103
0104
0105 inline Scalar& coeffRef(Index row, Index col)
0106 {
0107 const Index outer = IsRowMajor ? row : col;
0108 const Index inner = IsRowMajor ? col : row;
0109 return m_data[outer].atWithInsertion(inner);
0110 }
0111
0112 class InnerIterator;
0113 class ReverseInnerIterator;
0114
0115 void setZero()
0116 {
0117 for (Index j=0; j<outerSize(); ++j)
0118 m_data[j].clear();
0119 }
0120
0121
0122 Index nonZeros() const
0123 {
0124 Index res = 0;
0125 for (Index j=0; j<outerSize(); ++j)
0126 res += m_data[j].size();
0127 return res;
0128 }
0129
0130
0131
0132 void reserve(Index reserveSize = 1000)
0133 {
0134 if (outerSize()>0)
0135 {
0136 Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),Index(4));
0137 for (Index j=0; j<outerSize(); ++j)
0138 {
0139 m_data[j].reserve(reserveSizePerVector);
0140 }
0141 }
0142 }
0143
0144
0145 inline void startVec(Index ) {}
0146
0147
0148
0149
0150
0151
0152 inline Scalar& insertBack(Index row, Index col)
0153 {
0154 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
0155 }
0156
0157
0158 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
0159 {
0160 eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize && "out of range");
0161 eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner))
0162 && "wrong sorted insertion");
0163 m_data[outer].append(0, inner);
0164 return m_data[outer].value(m_data[outer].size()-1);
0165 }
0166
0167 inline Scalar& insert(Index row, Index col)
0168 {
0169 const Index outer = IsRowMajor ? row : col;
0170 const Index inner = IsRowMajor ? col : row;
0171
0172 Index startId = 0;
0173 Index id = static_cast<Index>(m_data[outer].size()) - 1;
0174 m_data[outer].resize(id+2,1);
0175
0176 while ( (id >= startId) && (m_data[outer].index(id) > inner) )
0177 {
0178 m_data[outer].index(id+1) = m_data[outer].index(id);
0179 m_data[outer].value(id+1) = m_data[outer].value(id);
0180 --id;
0181 }
0182 m_data[outer].index(id+1) = inner;
0183 m_data[outer].value(id+1) = 0;
0184 return m_data[outer].value(id+1);
0185 }
0186
0187
0188 inline void finalize() {}
0189
0190
0191 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
0192 {
0193 for (Index j=0; j<outerSize(); ++j)
0194 m_data[j].prune(reference,epsilon);
0195 }
0196
0197
0198
0199 void resize(Index rows, Index cols)
0200 {
0201 const Index outerSize = IsRowMajor ? rows : cols;
0202 m_innerSize = convert_index(IsRowMajor ? cols : rows);
0203 setZero();
0204 if (Index(m_data.size()) != outerSize)
0205 {
0206 m_data.resize(outerSize);
0207 }
0208 }
0209
0210 void resizeAndKeepData(Index rows, Index cols)
0211 {
0212 const Index outerSize = IsRowMajor ? rows : cols;
0213 const Index innerSize = IsRowMajor ? cols : rows;
0214 if (m_innerSize>innerSize)
0215 {
0216
0217
0218
0219 exit(2);
0220 }
0221 if (m_data.size() != outerSize)
0222 {
0223 m_data.resize(outerSize);
0224 }
0225 }
0226
0227
0228 EIGEN_DEPRECATED inline DynamicSparseMatrix()
0229 : m_innerSize(0), m_data(0)
0230 {
0231 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0232 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0233 #endif
0234 eigen_assert(innerSize()==0 && outerSize()==0);
0235 }
0236
0237
0238 EIGEN_DEPRECATED inline DynamicSparseMatrix(Index rows, Index cols)
0239 : m_innerSize(0)
0240 {
0241 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0242 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0243 #endif
0244 resize(rows, cols);
0245 }
0246
0247
0248 template<typename OtherDerived>
0249 EIGEN_DEPRECATED explicit inline DynamicSparseMatrix(const SparseMatrixBase<OtherDerived>& other)
0250 : m_innerSize(0)
0251 {
0252 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0253 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0254 #endif
0255 Base::operator=(other.derived());
0256 }
0257
0258 inline DynamicSparseMatrix(const DynamicSparseMatrix& other)
0259 : Base(), m_innerSize(0)
0260 {
0261 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0262 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
0263 #endif
0264 *this = other.derived();
0265 }
0266
0267 inline void swap(DynamicSparseMatrix& other)
0268 {
0269
0270 std::swap(m_innerSize, other.m_innerSize);
0271
0272 m_data.swap(other.m_data);
0273 }
0274
0275 inline DynamicSparseMatrix& operator=(const DynamicSparseMatrix& other)
0276 {
0277 if (other.isRValue())
0278 {
0279 swap(other.const_cast_derived());
0280 }
0281 else
0282 {
0283 resize(other.rows(), other.cols());
0284 m_data = other.m_data;
0285 }
0286 return *this;
0287 }
0288
0289
0290 inline ~DynamicSparseMatrix() {}
0291
0292 public:
0293
0294
0295
0296 EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
0297 {
0298 setZero();
0299 reserve(reserveSize);
0300 }
0301
0302
0303
0304
0305
0306
0307
0308
0309
0310
0311 EIGEN_DEPRECATED Scalar& fill(Index row, Index col)
0312 {
0313 const Index outer = IsRowMajor ? row : col;
0314 const Index inner = IsRowMajor ? col : row;
0315 return insertBack(outer,inner);
0316 }
0317
0318
0319
0320
0321
0322
0323 EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col)
0324 {
0325 return insert(row,col);
0326 }
0327
0328
0329
0330 EIGEN_DEPRECATED void endFill() {}
0331
0332 # ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
0333 # include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
0334 # endif
0335 };
0336
0337 template<typename Scalar, int _Options, typename _StorageIndex>
0338 class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::InnerIterator : public SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator
0339 {
0340 typedef typename SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator Base;
0341 public:
0342 InnerIterator(const DynamicSparseMatrix& mat, Index outer)
0343 : Base(mat.m_data[outer]), m_outer(outer)
0344 {}
0345
0346 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
0347 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
0348 inline Index outer() const { return m_outer; }
0349
0350 protected:
0351 const Index m_outer;
0352 };
0353
0354 template<typename Scalar, int _Options, typename _StorageIndex>
0355 class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::ReverseInnerIterator : public SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator
0356 {
0357 typedef typename SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator Base;
0358 public:
0359 ReverseInnerIterator(const DynamicSparseMatrix& mat, Index outer)
0360 : Base(mat.m_data[outer]), m_outer(outer)
0361 {}
0362
0363 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
0364 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
0365 inline Index outer() const { return m_outer; }
0366
0367 protected:
0368 const Index m_outer;
0369 };
0370
0371 namespace internal {
0372
0373 template<typename _Scalar, int _Options, typename _StorageIndex>
0374 struct evaluator<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
0375 : evaluator_base<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
0376 {
0377 typedef _Scalar Scalar;
0378 typedef DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> SparseMatrixType;
0379 typedef typename SparseMatrixType::InnerIterator InnerIterator;
0380 typedef typename SparseMatrixType::ReverseInnerIterator ReverseInnerIterator;
0381
0382 enum {
0383 CoeffReadCost = NumTraits<_Scalar>::ReadCost,
0384 Flags = SparseMatrixType::Flags
0385 };
0386
0387 evaluator() : m_matrix(0) {}
0388 evaluator(const SparseMatrixType &mat) : m_matrix(&mat) {}
0389
0390 operator SparseMatrixType&() { return m_matrix->const_cast_derived(); }
0391 operator const SparseMatrixType&() const { return *m_matrix; }
0392
0393 Scalar coeff(Index row, Index col) const { return m_matrix->coeff(row,col); }
0394
0395 Index nonZerosEstimate() const { return m_matrix->nonZeros(); }
0396
0397 const SparseMatrixType *m_matrix;
0398 };
0399
0400 }
0401
0402 }
0403
0404 #endif