File indexing completed on 2025-02-22 10:34:40
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010 #ifndef EIGEN_AMBIVECTOR_H
0011 #define EIGEN_AMBIVECTOR_H
0012
0013 namespace Eigen {
0014
0015 namespace internal {
0016
0017
0018
0019
0020
0021
0022 template<typename _Scalar, typename _StorageIndex>
0023 class AmbiVector
0024 {
0025 public:
0026 typedef _Scalar Scalar;
0027 typedef _StorageIndex StorageIndex;
0028 typedef typename NumTraits<Scalar>::Real RealScalar;
0029
0030 explicit AmbiVector(Index size)
0031 : m_buffer(0), m_zero(0), m_size(0), m_end(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1)
0032 {
0033 resize(size);
0034 }
0035
0036 void init(double estimatedDensity);
0037 void init(int mode);
0038
0039 Index nonZeros() const;
0040
0041
0042 void setBounds(Index start, Index end) { m_start = convert_index(start); m_end = convert_index(end); }
0043
0044 void setZero();
0045
0046 void restart();
0047 Scalar& coeffRef(Index i);
0048 Scalar& coeff(Index i);
0049
0050 class Iterator;
0051
0052 ~AmbiVector() { delete[] m_buffer; }
0053
0054 void resize(Index size)
0055 {
0056 if (m_allocatedSize < size)
0057 reallocate(size);
0058 m_size = convert_index(size);
0059 }
0060
0061 StorageIndex size() const { return m_size; }
0062
0063 protected:
0064 StorageIndex convert_index(Index idx)
0065 {
0066 return internal::convert_index<StorageIndex>(idx);
0067 }
0068
0069 void reallocate(Index size)
0070 {
0071
0072
0073 delete[] m_buffer;
0074 if (size<1000)
0075 {
0076 Index allocSize = (size * sizeof(ListEl) + sizeof(Scalar) - 1)/sizeof(Scalar);
0077 m_allocatedElements = convert_index((allocSize*sizeof(Scalar))/sizeof(ListEl));
0078 m_buffer = new Scalar[allocSize];
0079 }
0080 else
0081 {
0082 m_allocatedElements = convert_index((size*sizeof(Scalar))/sizeof(ListEl));
0083 m_buffer = new Scalar[size];
0084 }
0085 m_size = convert_index(size);
0086 m_start = 0;
0087 m_end = m_size;
0088 }
0089
0090 void reallocateSparse()
0091 {
0092 Index copyElements = m_allocatedElements;
0093 m_allocatedElements = (std::min)(StorageIndex(m_allocatedElements*1.5),m_size);
0094 Index allocSize = m_allocatedElements * sizeof(ListEl);
0095 allocSize = (allocSize + sizeof(Scalar) - 1)/sizeof(Scalar);
0096 Scalar* newBuffer = new Scalar[allocSize];
0097 std::memcpy(newBuffer, m_buffer, copyElements * sizeof(ListEl));
0098 delete[] m_buffer;
0099 m_buffer = newBuffer;
0100 }
0101
0102 protected:
0103
0104 struct ListEl
0105 {
0106 StorageIndex next;
0107 StorageIndex index;
0108 Scalar value;
0109 };
0110
0111
0112 Scalar* m_buffer;
0113 Scalar m_zero;
0114 StorageIndex m_size;
0115 StorageIndex m_start;
0116 StorageIndex m_end;
0117 StorageIndex m_allocatedSize;
0118 StorageIndex m_allocatedElements;
0119 StorageIndex m_mode;
0120
0121
0122 StorageIndex m_llStart;
0123 StorageIndex m_llCurrent;
0124 StorageIndex m_llSize;
0125 };
0126
0127
0128 template<typename _Scalar,typename _StorageIndex>
0129 Index AmbiVector<_Scalar,_StorageIndex>::nonZeros() const
0130 {
0131 if (m_mode==IsSparse)
0132 return m_llSize;
0133 else
0134 return m_end - m_start;
0135 }
0136
0137 template<typename _Scalar,typename _StorageIndex>
0138 void AmbiVector<_Scalar,_StorageIndex>::init(double estimatedDensity)
0139 {
0140 if (estimatedDensity>0.1)
0141 init(IsDense);
0142 else
0143 init(IsSparse);
0144 }
0145
0146 template<typename _Scalar,typename _StorageIndex>
0147 void AmbiVector<_Scalar,_StorageIndex>::init(int mode)
0148 {
0149 m_mode = mode;
0150
0151
0152 {
0153 m_llSize = 0;
0154 m_llStart = -1;
0155 }
0156 }
0157
0158
0159
0160
0161
0162
0163 template<typename _Scalar,typename _StorageIndex>
0164 void AmbiVector<_Scalar,_StorageIndex>::restart()
0165 {
0166 m_llCurrent = m_llStart;
0167 }
0168
0169
0170 template<typename _Scalar,typename _StorageIndex>
0171 void AmbiVector<_Scalar,_StorageIndex>::setZero()
0172 {
0173 if (m_mode==IsDense)
0174 {
0175 for (Index i=m_start; i<m_end; ++i)
0176 m_buffer[i] = Scalar(0);
0177 }
0178 else
0179 {
0180 eigen_assert(m_mode==IsSparse);
0181 m_llSize = 0;
0182 m_llStart = -1;
0183 }
0184 }
0185
0186 template<typename _Scalar,typename _StorageIndex>
0187 _Scalar& AmbiVector<_Scalar,_StorageIndex>::coeffRef(Index i)
0188 {
0189 if (m_mode==IsDense)
0190 return m_buffer[i];
0191 else
0192 {
0193 ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
0194
0195 eigen_assert(m_mode==IsSparse);
0196 if (m_llSize==0)
0197 {
0198
0199 m_llStart = 0;
0200 m_llCurrent = 0;
0201 ++m_llSize;
0202 llElements[0].value = Scalar(0);
0203 llElements[0].index = convert_index(i);
0204 llElements[0].next = -1;
0205 return llElements[0].value;
0206 }
0207 else if (i<llElements[m_llStart].index)
0208 {
0209
0210 ListEl& el = llElements[m_llSize];
0211 el.value = Scalar(0);
0212 el.index = convert_index(i);
0213 el.next = m_llStart;
0214 m_llStart = m_llSize;
0215 ++m_llSize;
0216 m_llCurrent = m_llStart;
0217 return el.value;
0218 }
0219 else
0220 {
0221 StorageIndex nextel = llElements[m_llCurrent].next;
0222 eigen_assert(i>=llElements[m_llCurrent].index && "you must call restart() before inserting an element with lower or equal index");
0223 while (nextel >= 0 && llElements[nextel].index<=i)
0224 {
0225 m_llCurrent = nextel;
0226 nextel = llElements[nextel].next;
0227 }
0228
0229 if (llElements[m_llCurrent].index==i)
0230 {
0231
0232 return llElements[m_llCurrent].value;
0233 }
0234 else
0235 {
0236 if (m_llSize>=m_allocatedElements)
0237 {
0238 reallocateSparse();
0239 llElements = reinterpret_cast<ListEl*>(m_buffer);
0240 }
0241 eigen_internal_assert(m_llSize<m_allocatedElements && "internal error: overflow in sparse mode");
0242
0243 ListEl& el = llElements[m_llSize];
0244 el.value = Scalar(0);
0245 el.index = convert_index(i);
0246 el.next = llElements[m_llCurrent].next;
0247 llElements[m_llCurrent].next = m_llSize;
0248 ++m_llSize;
0249 return el.value;
0250 }
0251 }
0252 }
0253 }
0254
0255 template<typename _Scalar,typename _StorageIndex>
0256 _Scalar& AmbiVector<_Scalar,_StorageIndex>::coeff(Index i)
0257 {
0258 if (m_mode==IsDense)
0259 return m_buffer[i];
0260 else
0261 {
0262 ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_buffer);
0263 eigen_assert(m_mode==IsSparse);
0264 if ((m_llSize==0) || (i<llElements[m_llStart].index))
0265 {
0266 return m_zero;
0267 }
0268 else
0269 {
0270 Index elid = m_llStart;
0271 while (elid >= 0 && llElements[elid].index<i)
0272 elid = llElements[elid].next;
0273
0274 if (llElements[elid].index==i)
0275 return llElements[m_llCurrent].value;
0276 else
0277 return m_zero;
0278 }
0279 }
0280 }
0281
0282
0283 template<typename _Scalar,typename _StorageIndex>
0284 class AmbiVector<_Scalar,_StorageIndex>::Iterator
0285 {
0286 public:
0287 typedef _Scalar Scalar;
0288 typedef typename NumTraits<Scalar>::Real RealScalar;
0289
0290
0291
0292
0293
0294
0295
0296 explicit Iterator(const AmbiVector& vec, const RealScalar& epsilon = 0)
0297 : m_vector(vec)
0298 {
0299 using std::abs;
0300 m_epsilon = epsilon;
0301 m_isDense = m_vector.m_mode==IsDense;
0302 if (m_isDense)
0303 {
0304 m_currentEl = 0;
0305 m_cachedValue = 0;
0306 m_cachedIndex = m_vector.m_start-1;
0307 ++(*this);
0308 }
0309 else
0310 {
0311 ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
0312 m_currentEl = m_vector.m_llStart;
0313 while (m_currentEl>=0 && abs(llElements[m_currentEl].value)<=m_epsilon)
0314 m_currentEl = llElements[m_currentEl].next;
0315 if (m_currentEl<0)
0316 {
0317 m_cachedValue = 0;
0318 m_cachedIndex = -1;
0319 }
0320 else
0321 {
0322 m_cachedIndex = llElements[m_currentEl].index;
0323 m_cachedValue = llElements[m_currentEl].value;
0324 }
0325 }
0326 }
0327
0328 StorageIndex index() const { return m_cachedIndex; }
0329 Scalar value() const { return m_cachedValue; }
0330
0331 operator bool() const { return m_cachedIndex>=0; }
0332
0333 Iterator& operator++()
0334 {
0335 using std::abs;
0336 if (m_isDense)
0337 {
0338 do {
0339 ++m_cachedIndex;
0340 } while (m_cachedIndex<m_vector.m_end && abs(m_vector.m_buffer[m_cachedIndex])<=m_epsilon);
0341 if (m_cachedIndex<m_vector.m_end)
0342 m_cachedValue = m_vector.m_buffer[m_cachedIndex];
0343 else
0344 m_cachedIndex=-1;
0345 }
0346 else
0347 {
0348 ListEl* EIGEN_RESTRICT llElements = reinterpret_cast<ListEl*>(m_vector.m_buffer);
0349 do {
0350 m_currentEl = llElements[m_currentEl].next;
0351 } while (m_currentEl>=0 && abs(llElements[m_currentEl].value)<=m_epsilon);
0352 if (m_currentEl<0)
0353 {
0354 m_cachedIndex = -1;
0355 }
0356 else
0357 {
0358 m_cachedIndex = llElements[m_currentEl].index;
0359 m_cachedValue = llElements[m_currentEl].value;
0360 }
0361 }
0362 return *this;
0363 }
0364
0365 protected:
0366 const AmbiVector& m_vector;
0367 StorageIndex m_currentEl;
0368 RealScalar m_epsilon;
0369 StorageIndex m_cachedIndex;
0370 Scalar m_cachedValue;
0371 bool m_isDense;
0372 };
0373
0374 }
0375
0376 }
0377
0378 #endif