10 #ifndef EIGEN_AMBIVECTOR_H 11 #define EIGEN_AMBIVECTOR_H 22 template<
typename _Scalar,
typename _StorageIndex>
26 typedef _Scalar Scalar;
27 typedef _StorageIndex StorageIndex;
28 typedef typename NumTraits<Scalar>::Real RealScalar;
30 explicit AmbiVector(
Index size)
31 : m_buffer(0), m_zero(0), m_size(0), m_end(0), m_allocatedSize(0), m_allocatedElements(0), m_mode(-1)
36 void init(
double estimatedDensity);
39 Index nonZeros()
const;
42 void setBounds(
Index start,
Index end) { m_start = convert_index(start); m_end = convert_index(end); }
47 Scalar& coeffRef(
Index i);
48 Scalar& coeff(
Index i);
52 ~AmbiVector() {
delete[] m_buffer; }
54 void resize(
Index size)
56 if (m_allocatedSize < size)
58 m_size = convert_index(size);
61 StorageIndex size()
const {
return m_size; }
64 StorageIndex convert_index(
Index idx)
66 return internal::convert_index<StorageIndex>(idx);
69 void reallocate(
Index size)
76 Index allocSize = (size *
sizeof(ListEl) +
sizeof(Scalar) - 1)/
sizeof(Scalar);
77 m_allocatedElements = convert_index((allocSize*
sizeof(Scalar))/
sizeof(ListEl));
78 m_buffer =
new Scalar[allocSize];
82 m_allocatedElements = convert_index((size*
sizeof(Scalar))/
sizeof(ListEl));
83 m_buffer =
new Scalar[size];
85 m_size = convert_index(size);
90 void reallocateSparse()
92 Index copyElements = m_allocatedElements;
93 m_allocatedElements = (std::min)(StorageIndex(m_allocatedElements*1.5),m_size);
94 Index allocSize = m_allocatedElements *
sizeof(ListEl);
95 allocSize = (allocSize +
sizeof(Scalar) - 1)/
sizeof(Scalar);
96 Scalar* newBuffer =
new Scalar[allocSize];
97 std::memcpy(newBuffer, m_buffer, copyElements *
sizeof(ListEl));
115 StorageIndex m_start;
117 StorageIndex m_allocatedSize;
118 StorageIndex m_allocatedElements;
122 StorageIndex m_llStart;
123 StorageIndex m_llCurrent;
124 StorageIndex m_llSize;
128 template<
typename _Scalar,
typename _StorageIndex>
129 Index AmbiVector<_Scalar,_StorageIndex>::nonZeros()
const 131 if (m_mode==IsSparse)
134 return m_end - m_start;
137 template<
typename _Scalar,
typename _StorageIndex>
138 void AmbiVector<_Scalar,_StorageIndex>::init(
double estimatedDensity)
140 if (estimatedDensity>0.1)
146 template<
typename _Scalar,
typename _StorageIndex>
147 void AmbiVector<_Scalar,_StorageIndex>::init(
int mode)
163 template<
typename _Scalar,
typename _StorageIndex>
164 void AmbiVector<_Scalar,_StorageIndex>::restart()
166 m_llCurrent = m_llStart;
170 template<
typename _Scalar,
typename _StorageIndex>
171 void AmbiVector<_Scalar,_StorageIndex>::setZero()
175 for (
Index i=m_start; i<m_end; ++i)
176 m_buffer[i] = Scalar(0);
180 eigen_assert(m_mode==IsSparse);
186 template<
typename _Scalar,
typename _StorageIndex>
187 _Scalar& AmbiVector<_Scalar,_StorageIndex>::coeffRef(
Index i)
193 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
195 eigen_assert(m_mode==IsSparse);
202 llElements[0].value = Scalar(0);
203 llElements[0].index = convert_index(i);
204 llElements[0].next = -1;
205 return llElements[0].value;
207 else if (i<llElements[m_llStart].index)
210 ListEl& el = llElements[m_llSize];
211 el.value = Scalar(0);
212 el.index = convert_index(i);
214 m_llStart = m_llSize;
216 m_llCurrent = m_llStart;
221 StorageIndex nextel = llElements[m_llCurrent].next;
222 eigen_assert(i>=llElements[m_llCurrent].index &&
"you must call restart() before inserting an element with lower or equal index");
223 while (nextel >= 0 && llElements[nextel].index<=i)
225 m_llCurrent = nextel;
226 nextel = llElements[nextel].next;
229 if (llElements[m_llCurrent].index==i)
232 return llElements[m_llCurrent].value;
236 if (m_llSize>=m_allocatedElements)
239 llElements =
reinterpret_cast<ListEl*
>(m_buffer);
241 eigen_internal_assert(m_llSize<m_allocatedElements &&
"internal error: overflow in sparse mode");
243 ListEl& el = llElements[m_llSize];
244 el.value = Scalar(0);
245 el.index = convert_index(i);
246 el.next = llElements[m_llCurrent].next;
247 llElements[m_llCurrent].next = m_llSize;
255 template<
typename _Scalar,
typename _StorageIndex>
256 _Scalar& AmbiVector<_Scalar,_StorageIndex>::coeff(
Index i)
262 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_buffer);
263 eigen_assert(m_mode==IsSparse);
264 if ((m_llSize==0) || (i<llElements[m_llStart].index))
270 Index elid = m_llStart;
271 while (elid >= 0 && llElements[elid].index<i)
272 elid = llElements[elid].next;
274 if (llElements[elid].index==i)
275 return llElements[m_llCurrent].value;
283 template<
typename _Scalar,
typename _StorageIndex>
284 class AmbiVector<_Scalar,_StorageIndex>::Iterator
287 typedef _Scalar Scalar;
288 typedef typename NumTraits<Scalar>::Real RealScalar;
296 explicit Iterator(
const AmbiVector& vec,
const RealScalar& epsilon = 0)
301 m_isDense = m_vector.m_mode==IsDense;
306 m_cachedIndex = m_vector.m_start-1;
311 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
312 m_currentEl = m_vector.m_llStart;
313 while (m_currentEl>=0 &&
abs(llElements[m_currentEl].value)<=m_epsilon)
314 m_currentEl = llElements[m_currentEl].next;
322 m_cachedIndex = llElements[m_currentEl].index;
323 m_cachedValue = llElements[m_currentEl].value;
328 StorageIndex index()
const {
return m_cachedIndex; }
329 Scalar value()
const {
return m_cachedValue; }
331 operator bool()
const {
return m_cachedIndex>=0; }
333 Iterator& operator++()
340 }
while (m_cachedIndex<m_vector.m_end &&
abs(m_vector.m_buffer[m_cachedIndex])<=m_epsilon);
341 if (m_cachedIndex<m_vector.m_end)
342 m_cachedValue = m_vector.m_buffer[m_cachedIndex];
348 ListEl* EIGEN_RESTRICT llElements =
reinterpret_cast<ListEl*
>(m_vector.m_buffer);
350 m_currentEl = llElements[m_currentEl].next;
351 }
while (m_currentEl>=0 &&
abs(llElements[m_currentEl].value)<=m_epsilon);
358 m_cachedIndex = llElements[m_currentEl].index;
359 m_cachedValue = llElements[m_currentEl].value;
366 const AmbiVector& m_vector;
367 StorageIndex m_currentEl;
368 RealScalar m_epsilon;
369 StorageIndex m_cachedIndex;
370 Scalar m_cachedValue;
378 #endif // EIGEN_AMBIVECTOR_H Namespace containing all symbols from the Eigen library.
Definition: Core:141
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:74
const Eigen::CwiseUnaryOp< Eigen::internal::scalar_abs_op< typename Derived::Scalar >, const Derived > abs(const Eigen::ArrayBase< Derived > &x)
Definition: Eigen_Colamd.h:50