File indexing completed on 2025-02-22 10:34:41
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
0011 #define EIGEN_SPARSE_COMPRESSED_BASE_H
0012
0013 namespace Eigen {
0014
0015 template<typename Derived> class SparseCompressedBase;
0016
0017 namespace internal {
0018
0019 template<typename Derived>
0020 struct traits<SparseCompressedBase<Derived> > : traits<Derived>
0021 {};
0022
0023 }
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035 template<typename Derived>
0036 class SparseCompressedBase
0037 : public SparseMatrixBase<Derived>
0038 {
0039 public:
0040 typedef SparseMatrixBase<Derived> Base;
0041 EIGEN_SPARSE_PUBLIC_INTERFACE(SparseCompressedBase)
0042 using Base::operator=;
0043 using Base::IsRowMajor;
0044
0045 class InnerIterator;
0046 class ReverseInnerIterator;
0047
0048 protected:
0049 typedef typename Base::IndexVector IndexVector;
0050 Eigen::Map<IndexVector> innerNonZeros() { return Eigen::Map<IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); }
0051 const Eigen::Map<const IndexVector> innerNonZeros() const { return Eigen::Map<const IndexVector>(innerNonZeroPtr(), isCompressed()?0:derived().outerSize()); }
0052
0053 public:
0054
0055
0056 inline Index nonZeros() const
0057 {
0058 if(Derived::IsVectorAtCompileTime && outerIndexPtr()==0)
0059 return derived().nonZeros();
0060 else if(isCompressed())
0061 return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0];
0062 else if(derived().outerSize()==0)
0063 return 0;
0064 else
0065 return innerNonZeros().sum();
0066 }
0067
0068
0069
0070
0071 inline const Scalar* valuePtr() const { return derived().valuePtr(); }
0072
0073
0074
0075 inline Scalar* valuePtr() { return derived().valuePtr(); }
0076
0077
0078
0079
0080 inline const StorageIndex* innerIndexPtr() const { return derived().innerIndexPtr(); }
0081
0082
0083
0084 inline StorageIndex* innerIndexPtr() { return derived().innerIndexPtr(); }
0085
0086
0087
0088
0089
0090 inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
0091
0092
0093
0094
0095 inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
0096
0097
0098
0099
0100 inline const StorageIndex* innerNonZeroPtr() const { return derived().innerNonZeroPtr(); }
0101
0102
0103
0104 inline StorageIndex* innerNonZeroPtr() { return derived().innerNonZeroPtr(); }
0105
0106
0107 inline bool isCompressed() const { return innerNonZeroPtr()==0; }
0108
0109
0110
0111
0112
0113
0114 const Map<const Array<Scalar,Dynamic,1> > coeffs() const { eigen_assert(isCompressed()); return Array<Scalar,Dynamic,1>::Map(valuePtr(),nonZeros()); }
0115
0116
0117
0118
0119
0120
0121
0122
0123
0124
0125
0126 Map<Array<Scalar,Dynamic,1> > coeffs() { eigen_assert(isCompressed()); return Array<Scalar,Dynamic,1>::Map(valuePtr(),nonZeros()); }
0127
0128 protected:
0129
0130 SparseCompressedBase() {}
0131
0132
0133
0134
0135 internal::LowerBoundIndex lower_bound(Index row, Index col) const
0136 {
0137 eigen_internal_assert(row>=0 && row<this->rows() && col>=0 && col<this->cols());
0138
0139 const Index outer = Derived::IsRowMajor ? row : col;
0140 const Index inner = Derived::IsRowMajor ? col : row;
0141
0142 Index start = this->outerIndexPtr()[outer];
0143 Index end = this->isCompressed() ? this->outerIndexPtr()[outer+1] : this->outerIndexPtr()[outer] + this->innerNonZeroPtr()[outer];
0144 eigen_assert(end>=start && "you are using a non finalized sparse matrix or written coefficient does not exist");
0145 internal::LowerBoundIndex p;
0146 p.value = std::lower_bound(this->innerIndexPtr()+start, this->innerIndexPtr()+end,inner) - this->innerIndexPtr();
0147 p.found = (p.value<end) && (this->innerIndexPtr()[p.value]==inner);
0148 return p;
0149 }
0150
0151 friend struct internal::evaluator<SparseCompressedBase<Derived> >;
0152
0153 private:
0154 template<typename OtherDerived> explicit SparseCompressedBase(const SparseCompressedBase<OtherDerived>&);
0155 };
0156
0157 template<typename Derived>
0158 class SparseCompressedBase<Derived>::InnerIterator
0159 {
0160 public:
0161 InnerIterator()
0162 : m_values(0), m_indices(0), m_outer(0), m_id(0), m_end(0)
0163 {}
0164
0165 InnerIterator(const InnerIterator& other)
0166 : m_values(other.m_values), m_indices(other.m_indices), m_outer(other.m_outer), m_id(other.m_id), m_end(other.m_end)
0167 {}
0168
0169 InnerIterator& operator=(const InnerIterator& other)
0170 {
0171 m_values = other.m_values;
0172 m_indices = other.m_indices;
0173 const_cast<OuterType&>(m_outer).setValue(other.m_outer.value());
0174 m_id = other.m_id;
0175 m_end = other.m_end;
0176 return *this;
0177 }
0178
0179 InnerIterator(const SparseCompressedBase& mat, Index outer)
0180 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
0181 {
0182 if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
0183 {
0184 m_id = 0;
0185 m_end = mat.nonZeros();
0186 }
0187 else
0188 {
0189 m_id = mat.outerIndexPtr()[outer];
0190 if(mat.isCompressed())
0191 m_end = mat.outerIndexPtr()[outer+1];
0192 else
0193 m_end = m_id + mat.innerNonZeroPtr()[outer];
0194 }
0195 }
0196
0197 explicit InnerIterator(const SparseCompressedBase& mat)
0198 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_id(0), m_end(mat.nonZeros())
0199 {
0200 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
0201 }
0202
0203 explicit InnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
0204 : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_id(0), m_end(data.size())
0205 {
0206 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
0207 }
0208
0209 inline InnerIterator& operator++() { m_id++; return *this; }
0210 inline InnerIterator& operator+=(Index i) { m_id += i ; return *this; }
0211
0212 inline InnerIterator operator+(Index i)
0213 {
0214 InnerIterator result = *this;
0215 result += i;
0216 return result;
0217 }
0218
0219 inline const Scalar& value() const { return m_values[m_id]; }
0220 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
0221
0222 inline StorageIndex index() const { return m_indices[m_id]; }
0223 inline Index outer() const { return m_outer.value(); }
0224 inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
0225 inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
0226
0227 inline operator bool() const { return (m_id < m_end); }
0228
0229 protected:
0230 const Scalar* m_values;
0231 const StorageIndex* m_indices;
0232 typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
0233 const OuterType m_outer;
0234 Index m_id;
0235 Index m_end;
0236 private:
0237
0238
0239
0240 template<typename T> InnerIterator(const SparseMatrixBase<T>&, Index outer);
0241 };
0242
0243 template<typename Derived>
0244 class SparseCompressedBase<Derived>::ReverseInnerIterator
0245 {
0246 public:
0247 ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
0248 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
0249 {
0250 if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
0251 {
0252 m_start = 0;
0253 m_id = mat.nonZeros();
0254 }
0255 else
0256 {
0257 m_start = mat.outerIndexPtr()[outer];
0258 if(mat.isCompressed())
0259 m_id = mat.outerIndexPtr()[outer+1];
0260 else
0261 m_id = m_start + mat.innerNonZeroPtr()[outer];
0262 }
0263 }
0264
0265 explicit ReverseInnerIterator(const SparseCompressedBase& mat)
0266 : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros())
0267 {
0268 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
0269 }
0270
0271 explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
0272 : m_values(data.valuePtr()), m_indices(data.indexPtr()), m_outer(0), m_start(0), m_id(data.size())
0273 {
0274 EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
0275 }
0276
0277 inline ReverseInnerIterator& operator--() { --m_id; return *this; }
0278 inline ReverseInnerIterator& operator-=(Index i) { m_id -= i; return *this; }
0279
0280 inline ReverseInnerIterator operator-(Index i)
0281 {
0282 ReverseInnerIterator result = *this;
0283 result -= i;
0284 return result;
0285 }
0286
0287 inline const Scalar& value() const { return m_values[m_id-1]; }
0288 inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
0289
0290 inline StorageIndex index() const { return m_indices[m_id-1]; }
0291 inline Index outer() const { return m_outer.value(); }
0292 inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
0293 inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
0294
0295 inline operator bool() const { return (m_id > m_start); }
0296
0297 protected:
0298 const Scalar* m_values;
0299 const StorageIndex* m_indices;
0300 typedef internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> OuterType;
0301 const OuterType m_outer;
0302 Index m_start;
0303 Index m_id;
0304 };
0305
0306 namespace internal {
0307
0308 template<typename Derived>
0309 struct evaluator<SparseCompressedBase<Derived> >
0310 : evaluator_base<Derived>
0311 {
0312 typedef typename Derived::Scalar Scalar;
0313 typedef typename Derived::InnerIterator InnerIterator;
0314
0315 enum {
0316 CoeffReadCost = NumTraits<Scalar>::ReadCost,
0317 Flags = Derived::Flags
0318 };
0319
0320 evaluator() : m_matrix(0), m_zero(0)
0321 {
0322 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
0323 }
0324 explicit evaluator(const Derived &mat) : m_matrix(&mat), m_zero(0)
0325 {
0326 EIGEN_INTERNAL_CHECK_COST_VALUE(CoeffReadCost);
0327 }
0328
0329 inline Index nonZerosEstimate() const {
0330 return m_matrix->nonZeros();
0331 }
0332
0333 operator Derived&() { return m_matrix->const_cast_derived(); }
0334 operator const Derived&() const { return *m_matrix; }
0335
0336 typedef typename DenseCoeffsBase<Derived,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
0337 const Scalar& coeff(Index row, Index col) const
0338 {
0339 Index p = find(row,col);
0340
0341 if(p==Dynamic)
0342 return m_zero;
0343 else
0344 return m_matrix->const_cast_derived().valuePtr()[p];
0345 }
0346
0347 Scalar& coeffRef(Index row, Index col)
0348 {
0349 Index p = find(row,col);
0350 eigen_assert(p!=Dynamic && "written coefficient does not exist");
0351 return m_matrix->const_cast_derived().valuePtr()[p];
0352 }
0353
0354 protected:
0355
0356 Index find(Index row, Index col) const
0357 {
0358 internal::LowerBoundIndex p = m_matrix->lower_bound(row,col);
0359 return p.found ? p.value : Dynamic;
0360 }
0361
0362 const Derived *m_matrix;
0363 const Scalar m_zero;
0364 };
0365
0366 }
0367
0368 }
0369
0370 #endif