File indexing completed on 2025-02-22 10:34:40
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #ifndef EIGEN_COMPRESSED_STORAGE_H
0011 #define EIGEN_COMPRESSED_STORAGE_H
0012
0013 namespace Eigen {
0014
0015 namespace internal {
0016
0017
0018
0019
0020
0021 template<typename _Scalar,typename _StorageIndex>
0022 class CompressedStorage
0023 {
0024 public:
0025
0026 typedef _Scalar Scalar;
0027 typedef _StorageIndex StorageIndex;
0028
0029 protected:
0030
0031 typedef typename NumTraits<Scalar>::Real RealScalar;
0032
0033 public:
0034
0035 CompressedStorage()
0036 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
0037 {}
0038
0039 explicit CompressedStorage(Index size)
0040 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
0041 {
0042 resize(size);
0043 }
0044
0045 CompressedStorage(const CompressedStorage& other)
0046 : m_values(0), m_indices(0), m_size(0), m_allocatedSize(0)
0047 {
0048 *this = other;
0049 }
0050
0051 CompressedStorage& operator=(const CompressedStorage& other)
0052 {
0053 resize(other.size());
0054 if(other.size()>0)
0055 {
0056 internal::smart_copy(other.m_values, other.m_values + m_size, m_values);
0057 internal::smart_copy(other.m_indices, other.m_indices + m_size, m_indices);
0058 }
0059 return *this;
0060 }
0061
0062 void swap(CompressedStorage& other)
0063 {
0064 std::swap(m_values, other.m_values);
0065 std::swap(m_indices, other.m_indices);
0066 std::swap(m_size, other.m_size);
0067 std::swap(m_allocatedSize, other.m_allocatedSize);
0068 }
0069
0070 ~CompressedStorage()
0071 {
0072 delete[] m_values;
0073 delete[] m_indices;
0074 }
0075
0076 void reserve(Index size)
0077 {
0078 Index newAllocatedSize = m_size + size;
0079 if (newAllocatedSize > m_allocatedSize)
0080 reallocate(newAllocatedSize);
0081 }
0082
0083 void squeeze()
0084 {
0085 if (m_allocatedSize>m_size)
0086 reallocate(m_size);
0087 }
0088
0089 void resize(Index size, double reserveSizeFactor = 0)
0090 {
0091 if (m_allocatedSize<size)
0092 {
0093 Index realloc_size = (std::min<Index>)(NumTraits<StorageIndex>::highest(), size + Index(reserveSizeFactor*double(size)));
0094 if(realloc_size<size)
0095 internal::throw_std_bad_alloc();
0096 reallocate(realloc_size);
0097 }
0098 m_size = size;
0099 }
0100
0101 void append(const Scalar& v, Index i)
0102 {
0103 Index id = m_size;
0104 resize(m_size+1, 1);
0105 m_values[id] = v;
0106 m_indices[id] = internal::convert_index<StorageIndex>(i);
0107 }
0108
0109 inline Index size() const { return m_size; }
0110 inline Index allocatedSize() const { return m_allocatedSize; }
0111 inline void clear() { m_size = 0; }
0112
0113 const Scalar* valuePtr() const { return m_values; }
0114 Scalar* valuePtr() { return m_values; }
0115 const StorageIndex* indexPtr() const { return m_indices; }
0116 StorageIndex* indexPtr() { return m_indices; }
0117
0118 inline Scalar& value(Index i) { eigen_internal_assert(m_values!=0); return m_values[i]; }
0119 inline const Scalar& value(Index i) const { eigen_internal_assert(m_values!=0); return m_values[i]; }
0120
0121 inline StorageIndex& index(Index i) { eigen_internal_assert(m_indices!=0); return m_indices[i]; }
0122 inline const StorageIndex& index(Index i) const { eigen_internal_assert(m_indices!=0); return m_indices[i]; }
0123
0124
0125 inline Index searchLowerIndex(Index key) const
0126 {
0127 return searchLowerIndex(0, m_size, key);
0128 }
0129
0130
0131 inline Index searchLowerIndex(Index start, Index end, Index key) const
0132 {
0133 while(end>start)
0134 {
0135 Index mid = (end+start)>>1;
0136 if (m_indices[mid]<key)
0137 start = mid+1;
0138 else
0139 end = mid;
0140 }
0141 return start;
0142 }
0143
0144
0145
0146 inline Scalar at(Index key, const Scalar& defaultValue = Scalar(0)) const
0147 {
0148 if (m_size==0)
0149 return defaultValue;
0150 else if (key==m_indices[m_size-1])
0151 return m_values[m_size-1];
0152
0153
0154 const Index id = searchLowerIndex(0,m_size-1,key);
0155 return ((id<m_size) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
0156 }
0157
0158
0159 inline Scalar atInRange(Index start, Index end, Index key, const Scalar &defaultValue = Scalar(0)) const
0160 {
0161 if (start>=end)
0162 return defaultValue;
0163 else if (end>start && key==m_indices[end-1])
0164 return m_values[end-1];
0165
0166
0167 const Index id = searchLowerIndex(start,end-1,key);
0168 return ((id<end) && (m_indices[id]==key)) ? m_values[id] : defaultValue;
0169 }
0170
0171
0172
0173
0174 inline Scalar& atWithInsertion(Index key, const Scalar& defaultValue = Scalar(0))
0175 {
0176 Index id = searchLowerIndex(0,m_size,key);
0177 if (id>=m_size || m_indices[id]!=key)
0178 {
0179 if (m_allocatedSize<m_size+1)
0180 {
0181 m_allocatedSize = 2*(m_size+1);
0182 internal::scoped_array<Scalar> newValues(m_allocatedSize);
0183 internal::scoped_array<StorageIndex> newIndices(m_allocatedSize);
0184
0185
0186 internal::smart_copy(m_values, m_values +id, newValues.ptr());
0187 internal::smart_copy(m_indices, m_indices+id, newIndices.ptr());
0188
0189
0190 if(m_size>id)
0191 {
0192 internal::smart_copy(m_values +id, m_values +m_size, newValues.ptr() +id+1);
0193 internal::smart_copy(m_indices+id, m_indices+m_size, newIndices.ptr()+id+1);
0194 }
0195 std::swap(m_values,newValues.ptr());
0196 std::swap(m_indices,newIndices.ptr());
0197 }
0198 else if(m_size>id)
0199 {
0200 internal::smart_memmove(m_values +id, m_values +m_size, m_values +id+1);
0201 internal::smart_memmove(m_indices+id, m_indices+m_size, m_indices+id+1);
0202 }
0203 m_size++;
0204 m_indices[id] = internal::convert_index<StorageIndex>(key);
0205 m_values[id] = defaultValue;
0206 }
0207 return m_values[id];
0208 }
0209
0210 void moveChunk(Index from, Index to, Index chunkSize)
0211 {
0212 eigen_internal_assert(to+chunkSize <= m_size);
0213 if(to>from && from+chunkSize>to)
0214 {
0215
0216 internal::smart_memmove(m_values+from, m_values+from+chunkSize, m_values+to);
0217 internal::smart_memmove(m_indices+from, m_indices+from+chunkSize, m_indices+to);
0218 }
0219 else
0220 {
0221 internal::smart_copy(m_values+from, m_values+from+chunkSize, m_values+to);
0222 internal::smart_copy(m_indices+from, m_indices+from+chunkSize, m_indices+to);
0223 }
0224 }
0225
0226 void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
0227 {
0228 Index k = 0;
0229 Index n = size();
0230 for (Index i=0; i<n; ++i)
0231 {
0232 if (!internal::isMuchSmallerThan(value(i), reference, epsilon))
0233 {
0234 value(k) = value(i);
0235 index(k) = index(i);
0236 ++k;
0237 }
0238 }
0239 resize(k,0);
0240 }
0241
0242 protected:
0243
0244 inline void reallocate(Index size)
0245 {
0246 #ifdef EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
0247 EIGEN_SPARSE_COMPRESSED_STORAGE_REALLOCATE_PLUGIN
0248 #endif
0249 eigen_internal_assert(size!=m_allocatedSize);
0250 internal::scoped_array<Scalar> newValues(size);
0251 internal::scoped_array<StorageIndex> newIndices(size);
0252 Index copySize = (std::min)(size, m_size);
0253 if (copySize>0) {
0254 internal::smart_copy(m_values, m_values+copySize, newValues.ptr());
0255 internal::smart_copy(m_indices, m_indices+copySize, newIndices.ptr());
0256 }
0257 std::swap(m_values,newValues.ptr());
0258 std::swap(m_indices,newIndices.ptr());
0259 m_allocatedSize = size;
0260 }
0261
0262 protected:
0263 Scalar* m_values;
0264 StorageIndex* m_indices;
0265 Index m_size;
0266 Index m_allocatedSize;
0267
0268 };
0269
0270 }
0271
0272 }
0273
0274 #endif