10#ifndef EIGEN_SPARSE_BLOCK_H
11#define EIGEN_SPARSE_BLOCK_H
16template<
typename XprType,
int BlockRows,
int BlockCols>
17class BlockImpl<XprType,BlockRows,BlockCols,true,Sparse>
18 :
public SparseMatrixBase<Block<XprType,BlockRows,BlockCols,true> >
20 typedef typename internal::remove_all<typename XprType::Nested>::type _MatrixTypeNested;
21 typedef Block<XprType, BlockRows, BlockCols, true> BlockType;
23 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
25 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
26 typedef SparseMatrixBase<BlockType> Base;
27 using Base::convert_index;
29 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
31 inline BlockImpl(XprType& xpr,
Index i)
32 : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize)
36 : m_matrix(xpr), m_outerStart(convert_index(IsRowMajor ? startRow : startCol)), m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols))
39 EIGEN_STRONG_INLINE
Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
40 EIGEN_STRONG_INLINE
Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
42 Index nonZeros()
const
44 typedef internal::evaluator<XprType> EvaluatorType;
45 EvaluatorType matEval(m_matrix);
47 Index end = m_outerStart + m_outerSize.value();
48 for(
Index j=m_outerStart; j<end; ++j)
49 for(
typename EvaluatorType::InnerIterator it(matEval, j); it; ++it)
54 inline const Scalar coeff(
Index row,
Index col)
const
56 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
59 inline const Scalar coeff(
Index index)
const
61 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
64 inline const XprType& nestedExpression()
const {
return m_matrix; }
65 inline XprType& nestedExpression() {
return m_matrix; }
66 Index startRow()
const {
return IsRowMajor ? m_outerStart : 0; }
67 Index startCol()
const {
return IsRowMajor ? 0 : m_outerStart; }
68 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
69 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
73 typename internal::ref_selector<XprType>::non_const_type m_matrix;
75 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
81 BlockImpl& operator=(
const T&)
83 EIGEN_STATIC_ASSERT(
sizeof(T)==0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
95template<
typename SparseMatrixType,
int BlockRows,
int BlockCols>
96class sparse_matrix_block_impl
97 :
public SparseCompressedBase<Block<SparseMatrixType,BlockRows,BlockCols,true> >
99 typedef typename internal::remove_all<typename SparseMatrixType::Nested>::type _MatrixTypeNested;
100 typedef Block<SparseMatrixType, BlockRows, BlockCols, true> BlockType;
101 typedef SparseCompressedBase<Block<SparseMatrixType,BlockRows,BlockCols,true> > Base;
102 using Base::convert_index;
104 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
105 EIGEN_SPARSE_PUBLIC_INTERFACE(BlockType)
107 typedef typename Base::IndexVector IndexVector;
108 enum { OuterSize = IsRowMajor ? BlockRows : BlockCols };
111 inline sparse_matrix_block_impl(SparseMatrixType& xpr,
Index i)
112 : m_matrix(xpr), m_outerStart(convert_index(i)), m_outerSize(OuterSize)
115 inline sparse_matrix_block_impl(SparseMatrixType& xpr,
Index startRow,
Index startCol,
Index blockRows,
Index blockCols)
116 : m_matrix(xpr), m_outerStart(convert_index(IsRowMajor ? startRow : startCol)), m_outerSize(convert_index(IsRowMajor ? blockRows : blockCols))
119 template<
typename OtherDerived>
120 inline BlockType& operator=(
const SparseMatrixBase<OtherDerived>& other)
122 typedef typename internal::remove_all<typename SparseMatrixType::Nested>::type _NestedMatrixType;
123 _NestedMatrixType& matrix = m_matrix;
128 Ref<const SparseMatrix<Scalar, IsRowMajor ? RowMajor : ColMajor, StorageIndex> > tmp(other.derived());
129 eigen_internal_assert(tmp.outerSize()==m_outerSize.value());
132 Index nnz = tmp.nonZeros();
133 Index start = m_outerStart==0 ? 0 : m_matrix.outerIndexPtr()[m_outerStart];
134 Index end = m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()];
135 Index block_size = end - start;
136 Index tail_size = m_matrix.outerIndexPtr()[m_matrix.outerSize()] - end;
138 Index free_size = m_matrix.isCompressed()
139 ?
Index(matrix.data().allocatedSize()) + block_size
142 Index tmp_start = tmp.outerIndexPtr()[0];
144 bool update_trailing_pointers =
false;
148 typename SparseMatrixType::Storage newdata(m_matrix.data().allocatedSize() - block_size + nnz);
150 internal::smart_copy(m_matrix.valuePtr(), m_matrix.valuePtr() + start, newdata.valuePtr());
151 internal::smart_copy(m_matrix.innerIndexPtr(), m_matrix.innerIndexPtr() + start, newdata.indexPtr());
153 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, newdata.valuePtr() + start);
154 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz, newdata.indexPtr() + start);
156 internal::smart_copy(matrix.valuePtr()+end, matrix.valuePtr()+end + tail_size, newdata.valuePtr()+start+nnz);
157 internal::smart_copy(matrix.innerIndexPtr()+end, matrix.innerIndexPtr()+end + tail_size, newdata.indexPtr()+start+nnz);
159 newdata.resize(m_matrix.outerIndexPtr()[m_matrix.outerSize()] - block_size + nnz);
161 matrix.data().swap(newdata);
163 update_trailing_pointers =
true;
167 if(m_matrix.isCompressed() && nnz!=block_size)
170 matrix.data().resize(start + nnz + tail_size);
172 internal::smart_memmove(matrix.valuePtr()+end, matrix.valuePtr() + end+tail_size, matrix.valuePtr() + start+nnz);
173 internal::smart_memmove(matrix.innerIndexPtr()+end, matrix.innerIndexPtr() + end+tail_size, matrix.innerIndexPtr() + start+nnz);
175 update_trailing_pointers =
true;
178 internal::smart_copy(tmp.valuePtr() + tmp_start, tmp.valuePtr() + tmp_start + nnz, matrix.valuePtr() + start);
179 internal::smart_copy(tmp.innerIndexPtr() + tmp_start, tmp.innerIndexPtr() + tmp_start + nnz, matrix.innerIndexPtr() + start);
185 if(!m_matrix.isCompressed())
186 matrix.innerNonZeroPtr()[m_outerStart] =
StorageIndex(nnz);
187 matrix.outerIndexPtr()[m_outerStart] =
StorageIndex(start);
192 for(
Index k=0; k<m_outerSize.value(); ++k)
194 StorageIndex nnz_k = internal::convert_index<StorageIndex>(tmp.innerVector(k).nonZeros());
195 if(!m_matrix.isCompressed())
196 matrix.innerNonZeroPtr()[m_outerStart+k] = nnz_k;
197 matrix.outerIndexPtr()[m_outerStart+k] = p;
202 if(update_trailing_pointers)
204 StorageIndex offset = internal::convert_index<StorageIndex>(nnz - block_size);
205 for(
Index k = m_outerStart + m_outerSize.value(); k<=matrix.outerSize(); ++k)
207 matrix.outerIndexPtr()[k] += offset;
214 inline BlockType& operator=(
const BlockType& other)
216 return operator=<BlockType>(other);
219 inline const Scalar*
valuePtr()
const
220 {
return m_matrix.valuePtr(); }
222 {
return m_matrix.valuePtr(); }
225 {
return m_matrix.innerIndexPtr(); }
227 {
return m_matrix.innerIndexPtr(); }
230 {
return m_matrix.outerIndexPtr() + m_outerStart; }
232 {
return m_matrix.outerIndexPtr() + m_outerStart; }
235 {
return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr()+m_outerStart); }
237 {
return isCompressed() ? 0 : (m_matrix.innerNonZeroPtr()+m_outerStart); }
239 bool isCompressed()
const {
return m_matrix.innerNonZeroPtr()==0; }
243 return m_matrix.coeffRef(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
246 inline const Scalar coeff(
Index row,
Index col)
const
248 return m_matrix.coeff(row + (IsRowMajor ? m_outerStart : 0), col + (IsRowMajor ? 0 : m_outerStart));
251 inline const Scalar coeff(
Index index)
const
253 return m_matrix.coeff(IsRowMajor ? m_outerStart : index, IsRowMajor ? index : m_outerStart);
256 const Scalar& lastCoeff()
const
258 EIGEN_STATIC_ASSERT_VECTOR_ONLY(sparse_matrix_block_impl);
260 if(m_matrix.isCompressed())
261 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
263 return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];
266 EIGEN_STRONG_INLINE
Index rows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
267 EIGEN_STRONG_INLINE
Index cols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
269 inline const SparseMatrixType& nestedExpression()
const {
return m_matrix; }
270 inline SparseMatrixType& nestedExpression() {
return m_matrix; }
271 Index startRow()
const {
return IsRowMajor ? m_outerStart : 0; }
272 Index startCol()
const {
return IsRowMajor ? 0 : m_outerStart; }
273 Index blockRows()
const {
return IsRowMajor ? m_outerSize.value() : m_matrix.rows(); }
274 Index blockCols()
const {
return IsRowMajor ? m_matrix.cols() : m_outerSize.value(); }
278 typename internal::ref_selector<SparseMatrixType>::non_const_type m_matrix;
280 const internal::variable_if_dynamic<Index, OuterSize> m_outerSize;
286template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int BlockRows,
int BlockCols>
287class BlockImpl<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true,Sparse>
288 :
public internal::sparse_matrix_block_impl<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols>
291 typedef _StorageIndex StorageIndex;
292 typedef SparseMatrix<_Scalar, _Options, _StorageIndex> SparseMatrixType;
293 typedef internal::sparse_matrix_block_impl<SparseMatrixType,BlockRows,BlockCols> Base;
294 inline BlockImpl(SparseMatrixType& xpr,
Index i)
298 inline BlockImpl(SparseMatrixType& xpr,
Index startRow,
Index startCol,
Index blockRows,
Index blockCols)
299 : Base(xpr, startRow, startCol, blockRows, blockCols)
302 using Base::operator=;
305template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int BlockRows,
int BlockCols>
306class BlockImpl<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true,Sparse>
307 :
public internal::sparse_matrix_block_impl<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols>
310 typedef _StorageIndex StorageIndex;
311 typedef const SparseMatrix<_Scalar, _Options, _StorageIndex> SparseMatrixType;
312 typedef internal::sparse_matrix_block_impl<SparseMatrixType,BlockRows,BlockCols> Base;
313 inline BlockImpl(SparseMatrixType& xpr,
Index i)
317 inline BlockImpl(SparseMatrixType& xpr,
Index startRow,
Index startCol,
Index blockRows,
Index blockCols)
318 : Base(xpr, startRow, startCol, blockRows, blockCols)
321 using Base::operator=;
323 template<
typename Derived> BlockImpl(
const SparseMatrixBase<Derived>& xpr,
Index i);
324 template<
typename Derived> BlockImpl(
const SparseMatrixBase<Derived>& xpr);
332template<
typename XprType,
int BlockRows,
int BlockCols,
bool InnerPanel>
333class BlockImpl<XprType,BlockRows,BlockCols,InnerPanel,
Sparse>
334 :
public SparseMatrixBase<Block<XprType,BlockRows,BlockCols,InnerPanel> >, internal::no_assignment_operator
338 using Base::convert_index;
340 enum { IsRowMajor = internal::traits<BlockType>::IsRowMajor };
343 typedef typename internal::remove_all<typename XprType::Nested>::type _MatrixTypeNested;
349 m_startRow( (BlockRows==1) && (BlockCols==XprType::ColsAtCompileTime) ? convert_index(i) : 0),
350 m_startCol( (BlockRows==XprType::RowsAtCompileTime) && (BlockCols==1) ? convert_index(i) : 0),
351 m_blockRows(BlockRows==1 ? 1 : xpr.rows()),
352 m_blockCols(BlockCols==1 ? 1 : xpr.cols())
358 : m_matrix(xpr), m_startRow(convert_index(startRow)), m_startCol(convert_index(startCol)), m_blockRows(convert_index(blockRows)), m_blockCols(convert_index(blockCols))
361 inline Index rows()
const {
return m_blockRows.value(); }
362 inline Index cols()
const {
return m_blockCols.value(); }
366 return m_matrix.coeffRef(row + m_startRow.value(), col + m_startCol.value());
369 inline const Scalar coeff(
Index row,
Index col)
const
371 return m_matrix.coeff(row + m_startRow.value(), col + m_startCol.value());
374 inline Scalar& coeffRef(
Index index)
376 return m_matrix.coeffRef(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
377 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
380 inline const Scalar coeff(
Index index)
const
382 return m_matrix.coeff(m_startRow.value() + (RowsAtCompileTime == 1 ? 0 : index),
383 m_startCol.value() + (RowsAtCompileTime == 1 ? index : 0));
386 inline const XprType& nestedExpression()
const {
return m_matrix; }
387 inline XprType& nestedExpression() {
return m_matrix; }
388 Index startRow()
const {
return m_startRow.value(); }
389 Index startCol()
const {
return m_startCol.value(); }
390 Index blockRows()
const {
return m_blockRows.value(); }
391 Index blockCols()
const {
return m_blockCols.value(); }
395 friend struct internal::unary_evaluator<Block<XprType,BlockRows,BlockCols,InnerPanel>, internal::IteratorBased, Scalar >;
399 typename internal::ref_selector<XprType>::non_const_type m_matrix;
400 const internal::variable_if_dynamic<Index, XprType::RowsAtCompileTime == 1 ? 0 : Dynamic> m_startRow;
401 const internal::variable_if_dynamic<Index, XprType::ColsAtCompileTime == 1 ? 0 : Dynamic> m_startCol;
402 const internal::variable_if_dynamic<Index, RowsAtCompileTime> m_blockRows;
403 const internal::variable_if_dynamic<Index, ColsAtCompileTime> m_blockCols;
409 BlockImpl& operator=(
const T&)
411 EIGEN_STATIC_ASSERT(
sizeof(T)==0, THIS_SPARSE_BLOCK_SUBEXPRESSION_IS_READ_ONLY);
419template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
420struct unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased >
421 :
public evaluator_base<Block<ArgType,BlockRows,BlockCols,InnerPanel> >
423 class InnerVectorInnerIterator;
424 class OuterVectorInnerIterator;
426 typedef Block<ArgType,BlockRows,BlockCols,InnerPanel> XprType;
427 typedef typename XprType::StorageIndex StorageIndex;
428 typedef typename XprType::Scalar Scalar;
431 IsRowMajor = XprType::IsRowMajor,
433 OuterVector = (BlockCols==1 && ArgType::IsRowMajor)
436 (BlockRows==1 && !ArgType::IsRowMajor),
438 CoeffReadCost = evaluator<ArgType>::CoeffReadCost,
439 Flags = XprType::Flags
442 typedef typename internal::conditional<OuterVector,OuterVectorInnerIterator,InnerVectorInnerIterator>::type InnerIterator;
444 explicit unary_evaluator(
const XprType& op)
445 : m_argImpl(op.nestedExpression()), m_block(op)
448 inline Index nonZerosEstimate()
const {
449 const Index nnz = m_block.nonZeros();
453 const Index nested_sz = m_block.nestedExpression().size();
454 return nested_sz == 0 ? 0 : m_argImpl.nonZerosEstimate() * m_block.size() / nested_sz;
460 typedef typename evaluator<ArgType>::InnerIterator EvalIterator;
462 evaluator<ArgType> m_argImpl;
463 const XprType &m_block;
466template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
467class unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased>::InnerVectorInnerIterator
468 :
public EvalIterator
473 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
474 const XprType& m_block;
478 EIGEN_STRONG_INLINE InnerVectorInnerIterator(
const unary_evaluator& aEval,
Index outer)
479 : EvalIterator(aEval.m_argImpl, outer + (XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol())),
480 m_block(aEval.m_block),
481 m_end(XprIsRowMajor ? aEval.m_block.startCol()+aEval.m_block.blockCols() : aEval.m_block.startRow()+aEval.m_block.blockRows())
483 while( (EvalIterator::operator
bool()) && (EvalIterator::index() < (XprIsRowMajor ? m_block.startCol() : m_block.startRow())) )
484 EvalIterator::operator++();
487 inline StorageIndex index()
const {
return EvalIterator::index() - convert_index<StorageIndex>(XprIsRowMajor ? m_block.startCol() : m_block.startRow()); }
488 inline Index outer()
const {
return EvalIterator::outer() - (XprIsRowMajor ? m_block.startRow() : m_block.startCol()); }
489 inline Index row()
const {
return EvalIterator::row() - m_block.startRow(); }
490 inline Index col()
const {
return EvalIterator::col() - m_block.startCol(); }
492 inline operator bool()
const {
return EvalIterator::operator bool() && EvalIterator::index() < m_end; }
495template<
typename ArgType,
int BlockRows,
int BlockCols,
bool InnerPanel>
496class unary_evaluator<Block<ArgType,BlockRows,BlockCols,InnerPanel>, IteratorBased>::OuterVectorInnerIterator
499 enum { XprIsRowMajor = unary_evaluator::IsRowMajor };
500 const unary_evaluator& m_eval;
502 const Index m_innerIndex;
507 EIGEN_STRONG_INLINE OuterVectorInnerIterator(
const unary_evaluator& aEval,
Index outer)
509 m_outerPos( (XprIsRowMajor ? aEval.m_block.startCol() : aEval.m_block.startRow()) ),
510 m_innerIndex(XprIsRowMajor ? aEval.m_block.startRow() : aEval.m_block.startCol()),
511 m_end(XprIsRowMajor ? aEval.m_block.startCol()+aEval.m_block.blockCols() : aEval.m_block.startRow()+aEval.m_block.blockRows()),
512 m_it(m_eval.m_argImpl, m_outerPos)
514 EIGEN_UNUSED_VARIABLE(outer);
515 eigen_assert(outer==0);
517 while(m_it && m_it.index() < m_innerIndex) ++m_it;
518 if((!m_it) || (m_it.index()!=m_innerIndex))
522 inline StorageIndex index()
const {
return convert_index<StorageIndex>(m_outerPos - (XprIsRowMajor ? m_eval.m_block.startCol() : m_eval.m_block.startRow())); }
523 inline Index outer()
const {
return 0; }
524 inline Index row()
const {
return XprIsRowMajor ? 0 : index(); }
525 inline Index col()
const {
return XprIsRowMajor ? index() : 0; }
527 inline Scalar value()
const {
return m_it.value(); }
528 inline Scalar& valueRef() {
return m_it.valueRef(); }
530 inline OuterVectorInnerIterator& operator++()
533 while(++m_outerPos<m_end)
536 m_it.~EvalIterator();
537 ::new (&m_it) EvalIterator(m_eval.m_argImpl, m_outerPos);
539 while(m_it && m_it.index() < m_innerIndex) ++m_it;
540 if(m_it && m_it.index()==m_innerIndex)
break;
545 inline operator bool()
const {
return m_outerPos < m_end; }
548template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int BlockRows,
int BlockCols>
549struct unary_evaluator<Block<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true>, IteratorBased>
550 : evaluator<SparseCompressedBase<Block<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true> > >
552 typedef Block<SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,
true> XprType;
553 typedef evaluator<SparseCompressedBase<XprType> > Base;
554 explicit unary_evaluator(
const XprType &xpr) : Base(xpr) {}
557template<
typename _Scalar,
int _Options,
typename _StorageIndex,
int BlockRows,
int BlockCols>
558struct unary_evaluator<Block<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true>, IteratorBased>
559 : evaluator<SparseCompressedBase<Block<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,true> > >
561 typedef Block<const SparseMatrix<_Scalar, _Options, _StorageIndex>,BlockRows,BlockCols,
true> XprType;
562 typedef evaluator<SparseCompressedBase<XprType> > Base;
563 explicit unary_evaluator(
const XprType &xpr) : Base(xpr) {}
BlockImpl(XprType &xpr, Index startRow, Index startCol, Index blockRows, Index blockCols)
Definition: SparseBlock.h:357
BlockImpl(XprType &xpr, Index i)
Definition: SparseBlock.h:347
Expression of a fixed-size or dynamic-size block.
Definition: Block.h:105
Index nonZeros() const
Definition: SparseCompressedBase.h:56
const StorageIndex * innerIndexPtr() const
Definition: SparseCompressedBase.h:80
const Scalar * valuePtr() const
Definition: SparseCompressedBase.h:71
bool isCompressed() const
Definition: SparseCompressedBase.h:107
const StorageIndex * outerIndexPtr() const
Definition: SparseCompressedBase.h:90
const StorageIndex * innerNonZeroPtr() const
Definition: SparseCompressedBase.h:100
Base class of any sparse matrices or sparse expressions.
Definition: SparseMatrixBase.h:28
internal::traits< Derived >::StorageIndex StorageIndex
Definition: SparseMatrixBase.h:43
Index rows() const
Definition: SparseMatrixBase.h:176
@ IsVectorAtCompileTime
Definition: SparseMatrixBase.h:84
Index cols() const
Definition: SparseMatrixBase.h:178
Namespace containing all symbols from the Eigen library.
Definition: Core:141
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:74
const int Dynamic
Definition: Constants.h:22
Eigen::Index Index
The interface type of indices.
Definition: EigenBase.h:39
Derived & derived()
Definition: EigenBase.h:46
Definition: Constants.h:510