Please, help us to better know about our user community by answering the following short survey: https://forms.gle/wpyrxWi18ox9Z5ae9
 
Loading...
Searching...
No Matches
DynamicSparseMatrix.h
1// This file is part of Eigen, a lightweight C++ template library
2// for linear algebra.
3//
4// Copyright (C) 2008-2009 Gael Guennebaud <gael.guennebaud@inria.fr>
5//
6// This Source Code Form is subject to the terms of the Mozilla
7// Public License v. 2.0. If a copy of the MPL was not distributed
8// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9
10#ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H
11#define EIGEN_DYNAMIC_SPARSEMATRIX_H
12
13namespace Eigen {
14
35namespace internal {
36template<typename _Scalar, int _Options, typename _StorageIndex>
37struct traits<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
38{
39 typedef _Scalar Scalar;
40 typedef _StorageIndex StorageIndex;
41 typedef Sparse StorageKind;
42 typedef MatrixXpr XprKind;
43 enum {
44 RowsAtCompileTime = Dynamic,
45 ColsAtCompileTime = Dynamic,
46 MaxRowsAtCompileTime = Dynamic,
47 MaxColsAtCompileTime = Dynamic,
48 Flags = _Options | NestByRefBit | LvalueBit,
49 CoeffReadCost = NumTraits<Scalar>::ReadCost,
50 SupportedAccessPatterns = OuterRandomAccessPattern
51 };
52};
53}
54
55template<typename _Scalar, int _Options, typename _StorageIndex>
57 : public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _StorageIndex> >
58{
60 using Base::convert_index;
61 public:
62 EIGEN_SPARSE_PUBLIC_INTERFACE(DynamicSparseMatrix)
63 // FIXME: why are these operator already alvailable ???
64 // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, +=)
65 // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, -=)
67 using Base::IsRowMajor;
68 using Base::operator=;
69 enum {
70 Options = _Options
71 };
72
73 protected:
74
75 typedef DynamicSparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0), StorageIndex> TransposedSparseMatrix;
76
77 Index m_innerSize;
78 std::vector<internal::CompressedStorage<Scalar,StorageIndex> > m_data;
79
80 public:
81
82 inline Index rows() const { return IsRowMajor ? outerSize() : m_innerSize; }
83 inline Index cols() const { return IsRowMajor ? m_innerSize : outerSize(); }
84 inline Index innerSize() const { return m_innerSize; }
85 inline Index outerSize() const { return convert_index(m_data.size()); }
86 inline Index innerNonZeros(Index j) const { return m_data[j].size(); }
87
88 std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() { return m_data; }
89 const std::vector<internal::CompressedStorage<Scalar,StorageIndex> >& _data() const { return m_data; }
90
94 inline Scalar coeff(Index row, Index col) const
95 {
96 const Index outer = IsRowMajor ? row : col;
97 const Index inner = IsRowMajor ? col : row;
98 return m_data[outer].at(inner);
99 }
100
105 inline Scalar& coeffRef(Index row, Index col)
106 {
107 const Index outer = IsRowMajor ? row : col;
108 const Index inner = IsRowMajor ? col : row;
109 return m_data[outer].atWithInsertion(inner);
110 }
111
112 class InnerIterator;
113 class ReverseInnerIterator;
114
115 void setZero()
116 {
117 for (Index j=0; j<outerSize(); ++j)
118 m_data[j].clear();
119 }
120
123 {
124 Index res = 0;
125 for (Index j=0; j<outerSize(); ++j)
126 res += m_data[j].size();
127 return res;
128 }
129
130
131
132 void reserve(Index reserveSize = 1000)
133 {
134 if (outerSize()>0)
135 {
136 Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),Index(4));
137 for (Index j=0; j<outerSize(); ++j)
138 {
139 m_data[j].reserve(reserveSizePerVector);
140 }
141 }
142 }
143
145 inline void startVec(Index /*outer*/) {}
146
152 inline Scalar& insertBack(Index row, Index col)
153 {
154 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row);
155 }
156
158 inline Scalar& insertBackByOuterInner(Index outer, Index inner)
159 {
160 eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize && "out of range");
161 eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner))
162 && "wrong sorted insertion");
163 m_data[outer].append(0, inner);
164 return m_data[outer].value(m_data[outer].size()-1);
165 }
166
167 inline Scalar& insert(Index row, Index col)
168 {
169 const Index outer = IsRowMajor ? row : col;
170 const Index inner = IsRowMajor ? col : row;
171
172 Index startId = 0;
173 Index id = static_cast<Index>(m_data[outer].size()) - 1;
174 m_data[outer].resize(id+2,1);
175
176 while ( (id >= startId) && (m_data[outer].index(id) > inner) )
177 {
178 m_data[outer].index(id+1) = m_data[outer].index(id);
179 m_data[outer].value(id+1) = m_data[outer].value(id);
180 --id;
181 }
182 m_data[outer].index(id+1) = inner;
183 m_data[outer].value(id+1) = 0;
184 return m_data[outer].value(id+1);
185 }
186
188 inline void finalize() {}
189
191 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
192 {
193 for (Index j=0; j<outerSize(); ++j)
194 m_data[j].prune(reference,epsilon);
195 }
196
199 void resize(Index rows, Index cols)
200 {
201 const Index outerSize = IsRowMajor ? rows : cols;
202 m_innerSize = convert_index(IsRowMajor ? cols : rows);
203 setZero();
204 if (Index(m_data.size()) != outerSize)
205 {
206 m_data.resize(outerSize);
207 }
208 }
209
210 void resizeAndKeepData(Index rows, Index cols)
211 {
212 const Index outerSize = IsRowMajor ? rows : cols;
213 const Index innerSize = IsRowMajor ? cols : rows;
214 if (m_innerSize>innerSize)
215 {
216 // remove all coefficients with innerCoord>=innerSize
217 // TODO
218 //std::cerr << "not implemented yet\n";
219 exit(2);
220 }
221 if (m_data.size() != outerSize)
222 {
223 m_data.resize(outerSize);
224 }
225 }
226
228 EIGEN_DEPRECATED inline DynamicSparseMatrix()
229 : m_innerSize(0), m_data(0)
230 {
231 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
232 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
233 #endif
234 eigen_assert(innerSize()==0 && outerSize()==0);
235 }
236
238 EIGEN_DEPRECATED inline DynamicSparseMatrix(Index rows, Index cols)
239 : m_innerSize(0)
240 {
241 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
242 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
243 #endif
244 resize(rows, cols);
245 }
246
248 template<typename OtherDerived>
249 EIGEN_DEPRECATED explicit inline DynamicSparseMatrix(const SparseMatrixBase<OtherDerived>& other)
250 : m_innerSize(0)
251 {
252 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
253 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
254 #endif
255 Base::operator=(other.derived());
256 }
257
258 inline DynamicSparseMatrix(const DynamicSparseMatrix& other)
259 : Base(), m_innerSize(0)
260 {
261 #ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
262 EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
263 #endif
264 *this = other.derived();
265 }
266
267 inline void swap(DynamicSparseMatrix& other)
268 {
269 //EIGEN_DBG_SPARSE(std::cout << "SparseMatrix:: swap\n");
270 std::swap(m_innerSize, other.m_innerSize);
271 //std::swap(m_outerSize, other.m_outerSize);
272 m_data.swap(other.m_data);
273 }
274
275 inline DynamicSparseMatrix& operator=(const DynamicSparseMatrix& other)
276 {
277 if (other.isRValue())
278 {
279 swap(other.const_cast_derived());
280 }
281 else
282 {
283 resize(other.rows(), other.cols());
284 m_data = other.m_data;
285 }
286 return *this;
287 }
288
291
292 public:
293
296 EIGEN_DEPRECATED void startFill(Index reserveSize = 1000)
297 {
298 setZero();
299 reserve(reserveSize);
300 }
301
311 EIGEN_DEPRECATED Scalar& fill(Index row, Index col)
312 {
313 const Index outer = IsRowMajor ? row : col;
314 const Index inner = IsRowMajor ? col : row;
315 return insertBack(outer,inner);
316 }
317
323 EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col)
324 {
325 return insert(row,col);
326 }
327
330 EIGEN_DEPRECATED void endFill() {}
331
332# ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
333# include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN
334# endif
335 };
336
337template<typename Scalar, int _Options, typename _StorageIndex>
338class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::InnerIterator : public SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator
339{
340 typedef typename SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator Base;
341 public:
342 InnerIterator(const DynamicSparseMatrix& mat, Index outer)
343 : Base(mat.m_data[outer]), m_outer(outer)
344 {}
345
346 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
347 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
348 inline Index outer() const { return m_outer; }
349
350 protected:
351 const Index m_outer;
352};
353
354template<typename Scalar, int _Options, typename _StorageIndex>
355class DynamicSparseMatrix<Scalar,_Options,_StorageIndex>::ReverseInnerIterator : public SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator
356{
357 typedef typename SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator Base;
358 public:
359 ReverseInnerIterator(const DynamicSparseMatrix& mat, Index outer)
360 : Base(mat.m_data[outer]), m_outer(outer)
361 {}
362
363 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); }
364 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; }
365 inline Index outer() const { return m_outer; }
366
367 protected:
368 const Index m_outer;
369};
370
371namespace internal {
372
373template<typename _Scalar, int _Options, typename _StorageIndex>
374struct evaluator<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
375 : evaluator_base<DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> >
376{
377 typedef _Scalar Scalar;
378 typedef DynamicSparseMatrix<_Scalar,_Options,_StorageIndex> SparseMatrixType;
379 typedef typename SparseMatrixType::InnerIterator InnerIterator;
380 typedef typename SparseMatrixType::ReverseInnerIterator ReverseInnerIterator;
381
382 enum {
383 CoeffReadCost = NumTraits<_Scalar>::ReadCost,
384 Flags = SparseMatrixType::Flags
385 };
386
387 evaluator() : m_matrix(0) {}
388 evaluator(const SparseMatrixType &mat) : m_matrix(&mat) {}
389
390 operator SparseMatrixType&() { return m_matrix->const_cast_derived(); }
391 operator const SparseMatrixType&() const { return *m_matrix; }
392
393 Scalar coeff(Index row, Index col) const { return m_matrix->coeff(row,col); }
394
395 Index nonZerosEstimate() const { return m_matrix->nonZeros(); }
396
397 const SparseMatrixType *m_matrix;
398};
399
400}
401
402} // end namespace Eigen
403
404#endif // EIGEN_DYNAMIC_SPARSEMATRIX_H
A sparse matrix class designed for matrix assembly purpose.
Definition: DynamicSparseMatrix.h:58
Scalar & coeffRef(Index row, Index col)
Definition: DynamicSparseMatrix.h:105
void startVec(Index)
Definition: DynamicSparseMatrix.h:145
void resize(Index rows, Index cols)
Definition: DynamicSparseMatrix.h:199
Scalar & insertBackByOuterInner(Index outer, Index inner)
Definition: DynamicSparseMatrix.h:158
EIGEN_DEPRECATED DynamicSparseMatrix()
Definition: DynamicSparseMatrix.h:228
Index nonZeros() const
Definition: DynamicSparseMatrix.h:122
Scalar coeff(Index row, Index col) const
Definition: DynamicSparseMatrix.h:94
EIGEN_DEPRECATED Scalar & fillrand(Index row, Index col)
Definition: DynamicSparseMatrix.h:323
void finalize()
Definition: DynamicSparseMatrix.h:188
EIGEN_DEPRECATED void endFill()
Definition: DynamicSparseMatrix.h:330
EIGEN_DEPRECATED DynamicSparseMatrix(const SparseMatrixBase< OtherDerived > &other)
Definition: DynamicSparseMatrix.h:249
EIGEN_DEPRECATED void startFill(Index reserveSize=1000)
Definition: DynamicSparseMatrix.h:296
EIGEN_DEPRECATED DynamicSparseMatrix(Index rows, Index cols)
Definition: DynamicSparseMatrix.h:238
~DynamicSparseMatrix()
Definition: DynamicSparseMatrix.h:290
Scalar & insertBack(Index row, Index col)
Definition: DynamicSparseMatrix.h:152
void prune(Scalar reference, RealScalar epsilon=NumTraits< RealScalar >::dummy_precision())
Definition: DynamicSparseMatrix.h:191
EIGEN_DEPRECATED Scalar & fill(Index row, Index col)
Definition: DynamicSparseMatrix.h:311
const unsigned int LvalueBit
const unsigned int RowMajorBit
Namespace containing all symbols from the Eigen library.
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
const int Dynamic
Eigen::Index Index
Derived & derived()