11#ifndef EIGEN_CXX11_TENSOR_TENSOR_H
12#define EIGEN_CXX11_TENSOR_TENSOR_H
62template<
typename Scalar_,
int NumIndices_,
int Options_,
typename IndexType_>
68 typedef typename Eigen::internal::nested<Self>::type Nested;
69 typedef typename internal::traits<Self>::StorageKind StorageKind;
70 typedef typename internal::traits<Self>::Index Index;
71 typedef Scalar_ Scalar;
73 typedef typename Base::CoeffReturnType CoeffReturnType;
76 IsAligned = bool(EIGEN_MAX_ALIGN_BYTES>0) & !(Options_&
DontAlign),
82 static const int Options = Options_;
83 static const int NumIndices = NumIndices_;
84 typedef DSizes<Index, NumIndices_> Dimensions;
87 TensorStorage<Scalar, Dimensions, Options> m_storage;
89#ifdef EIGEN_HAS_SFINAE
90 template<
typename CustomIndices>
91 struct isOfNormalIndex{
92 static const bool is_array = internal::is_base_of<array<Index, NumIndices>, CustomIndices>::value;
94 static const bool value = is_array | is_int;
100 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index rank()
const {
return NumIndices; }
101 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index dimension(std::size_t n)
const {
return m_storage.dimensions()[n]; }
102 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Dimensions& dimensions()
const {
return m_storage.dimensions(); }
103 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index size()
const {
return m_storage.size(); }
104 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar *data() {
return m_storage.data(); }
105 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar *data()
const {
return m_storage.data(); }
110 inline Self& base() {
return *
this; }
111 inline const Self& base()
const {
return *
this; }
113#if EIGEN_HAS_VARIADIC_TEMPLATES
114 template<
typename... IndexTypes>
115 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& coeff(Index firstIndex, Index secondIndex, IndexTypes... otherIndices)
const
118 EIGEN_STATIC_ASSERT(
sizeof...(otherIndices) + 2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
119 return coeff(array<Index, NumIndices>{{firstIndex, secondIndex, otherIndices...}});
124 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& coeff(
const array<Index, NumIndices>& indices)
const
126 eigen_internal_assert(checkIndexRange(indices));
127 return m_storage.data()[linearizedIndex(indices)];
131#ifdef EIGEN_HAS_SFINAE
132 template<
typename CustomIndices,
133 EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomIndices>::value) )
135 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& coeff(CustomIndices& indices)
const
137 return coeff(internal::customIndices2Array<Index,NumIndices>(indices));
141 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& coeff()
const
143 EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
144 return m_storage.data()[0];
147 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& coeff(Index index)
const
149 eigen_internal_assert(index >= 0 && index < size());
150 return m_storage.data()[index];
153#if EIGEN_HAS_VARIADIC_TEMPLATES
154 template<
typename... IndexTypes>
155 inline Scalar& coeffRef(Index firstIndex, Index secondIndex, IndexTypes... otherIndices)
158 EIGEN_STATIC_ASSERT(
sizeof...(otherIndices) + 2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
159 return coeffRef(array<Index, NumIndices>{{firstIndex, secondIndex, otherIndices...}});
164 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef(
const array<Index, NumIndices>& indices)
166 eigen_internal_assert(checkIndexRange(indices));
167 return m_storage.data()[linearizedIndex(indices)];
171#ifdef EIGEN_HAS_SFINAE
172 template<
typename CustomIndices,
173 EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomIndices>::value) )
175 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef(CustomIndices& indices)
177 return coeffRef(internal::customIndices2Array<Index,NumIndices>(indices));
181 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef()
183 EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
184 return m_storage.data()[0];
187 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef(Index index)
189 eigen_internal_assert(index >= 0 && index < size());
190 return m_storage.data()[index];
193#if EIGEN_HAS_VARIADIC_TEMPLATES
194 template<
typename... IndexTypes>
195 inline const Scalar& operator()(Index firstIndex, Index secondIndex, IndexTypes... otherIndices)
const
198 EIGEN_STATIC_ASSERT(
sizeof...(otherIndices) + 2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
199 return this->operator()(array<Index, NumIndices>{{firstIndex, secondIndex, otherIndices...}});
203 EIGEN_STRONG_INLINE
const Scalar& operator()(Index i0, Index i1)
const
205 return coeff(array<Index, 2>(i0, i1));
208 EIGEN_STRONG_INLINE
const Scalar& operator()(Index i0, Index i1, Index i2)
const
210 return coeff(array<Index, 3>(i0, i1, i2));
213 EIGEN_STRONG_INLINE
const Scalar& operator()(Index i0, Index i1, Index i2, Index i3)
const
215 return coeff(array<Index, 4>(i0, i1, i2, i3));
218 EIGEN_STRONG_INLINE
const Scalar& operator()(Index i0, Index i1, Index i2, Index i3, Index i4)
const
220 return coeff(array<Index, 5>(i0, i1, i2, i3, i4));
225#ifdef EIGEN_HAS_SFINAE
226 template<
typename CustomIndices,
227 EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomIndices>::value) )
229 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& operator()(CustomIndices& indices)
const
231 return coeff(internal::customIndices2Array<Index,NumIndices>(indices));
236 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& operator()(
const array<Index, NumIndices>& indices)
const
238 return coeff(indices);
241 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& operator()(Index index)
const
243 eigen_internal_assert(index >= 0 && index < size());
247 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& operator()()
const
249 EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
253 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Scalar& operator[](Index index)
const
256 EIGEN_STATIC_ASSERT(NumIndices == 1, YOU_MADE_A_PROGRAMMING_MISTAKE);
260#if EIGEN_HAS_VARIADIC_TEMPLATES
261 template<
typename... IndexTypes>
262 inline Scalar& operator()(Index firstIndex, Index secondIndex, IndexTypes... otherIndices)
265 EIGEN_STATIC_ASSERT(
sizeof...(otherIndices) + 2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
266 return operator()(array<Index, NumIndices>{{firstIndex, secondIndex, otherIndices...}});
270 EIGEN_STRONG_INLINE Scalar& operator()(Index i0, Index i1)
272 return coeffRef(array<Index, 2>(i0, i1));
275 EIGEN_STRONG_INLINE Scalar& operator()(Index i0, Index i1, Index i2)
277 return coeffRef(array<Index, 3>(i0, i1, i2));
280 EIGEN_STRONG_INLINE Scalar& operator()(Index i0, Index i1, Index i2, Index i3)
282 return coeffRef(array<Index, 4>(i0, i1, i2, i3));
285 EIGEN_STRONG_INLINE Scalar& operator()(Index i0, Index i1, Index i2, Index i3, Index i4)
287 return coeffRef(array<Index, 5>(i0, i1, i2, i3, i4));
292 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator()(
const array<Index, NumIndices>& indices)
294 return coeffRef(indices);
298#ifdef EIGEN_HAS_SFINAE
299 template<
typename CustomIndices,
300 EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomIndices>::value) )
302 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator()(CustomIndices& indices)
304 return coeffRef(internal::customIndices2Array<Index,NumIndices>(indices));
308 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator()(Index index)
310 eigen_assert(index >= 0 && index < size());
311 return coeffRef(index);
314 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator()()
316 EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
320 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator[](Index index)
323 EIGEN_STATIC_ASSERT(NumIndices == 1, YOU_MADE_A_PROGRAMMING_MISTAKE)
324 return coeffRef(index);
328 EIGEN_STRONG_INLINE
Tensor()
335 : m_storage(other.m_storage)
339#if EIGEN_HAS_VARIADIC_TEMPLATES
340 template<
typename... IndexTypes>
341 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
Tensor(Index firstDimension, IndexTypes... otherDimensions)
342 : m_storage(firstDimension, otherDimensions...)
345 EIGEN_STATIC_ASSERT(
sizeof...(otherDimensions) + 1 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
348 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
explicit Tensor(Index dim1)
349 : m_storage(dim1, array<Index, 1>(dim1))
351 EIGEN_STATIC_ASSERT(1 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
353 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
Tensor(Index dim1, Index dim2)
354 : m_storage(dim1*dim2, array<Index, 2>(dim1, dim2))
356 EIGEN_STATIC_ASSERT(2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
358 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
Tensor(Index dim1, Index dim2, Index dim3)
359 : m_storage(dim1*dim2*dim3, array<Index, 3>(dim1, dim2, dim3))
361 EIGEN_STATIC_ASSERT(3 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
363 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
Tensor(Index dim1, Index dim2, Index dim3, Index dim4)
364 : m_storage(dim1*dim2*dim3*dim4, array<Index, 4>(dim1, dim2, dim3, dim4))
366 EIGEN_STATIC_ASSERT(4 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
368 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
Tensor(Index dim1, Index dim2, Index dim3, Index dim4, Index dim5)
369 : m_storage(dim1*dim2*dim3*dim4*dim5, array<Index, 5>(dim1, dim2, dim3, dim4, dim5))
371 EIGEN_STATIC_ASSERT(5 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
376 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
explicit Tensor(
const array<Index, NumIndices>& dimensions)
377 : m_storage(internal::array_prod(dimensions), dimensions)
379 EIGEN_INITIALIZE_COEFFS_IF_THAT_OPTION_IS_ENABLED
382 template<
typename OtherDerived>
386 typedef TensorAssignOp<Tensor, const OtherDerived> Assign;
387 Assign assign(*
this, other.derived());
389 internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
392 template<
typename OtherDerived>
394 EIGEN_STRONG_INLINE Tensor(
const TensorBase<OtherDerived, WriteAccessors>& other)
396 typedef TensorAssignOp<Tensor, const OtherDerived> Assign;
397 Assign assign(*
this, other.derived());
398 resize(TensorEvaluator<const Assign, DefaultDevice>(assign, DefaultDevice()).dimensions());
399 internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
402 #if EIGEN_HAS_RVALUE_REFERENCES
404 EIGEN_STRONG_INLINE Tensor(Self&& other)
405 : m_storage(std::move(other.m_storage))
409 EIGEN_STRONG_INLINE Tensor& operator=(Self&& other)
411 m_storage = std::move(other.m_storage);
417 EIGEN_STRONG_INLINE Tensor& operator=(
const Tensor& other)
419 typedef TensorAssignOp<Tensor, const Tensor> Assign;
420 Assign assign(*
this, other);
421 resize(TensorEvaluator<const Assign, DefaultDevice>(assign, DefaultDevice()).dimensions());
422 internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
425 template<
typename OtherDerived>
427 EIGEN_STRONG_INLINE Tensor& operator=(
const OtherDerived& other)
429 typedef TensorAssignOp<Tensor, const OtherDerived> Assign;
430 Assign assign(*
this, other);
431 resize(TensorEvaluator<const Assign, DefaultDevice>(assign, DefaultDevice()).dimensions());
432 internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
436#if EIGEN_HAS_VARIADIC_TEMPLATES
437 template<
typename... IndexTypes> EIGEN_DEVICE_FUNC
438 void resize(Index firstDimension, IndexTypes... otherDimensions)
441 EIGEN_STATIC_ASSERT(
sizeof...(otherDimensions) + 1 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
442 resize(array<Index, NumIndices>{{firstDimension, otherDimensions...}});
447 EIGEN_DEVICE_FUNC
void resize(
const array<Index, NumIndices>& dimensions)
450 Index size = Index(1);
451 for (i = 0; i < NumIndices; i++) {
452 internal::check_rows_cols_for_overflow<Dynamic>::run(size, dimensions[i]);
453 size *= dimensions[i];
455 #ifdef EIGEN_INITIALIZE_COEFFS
456 bool size_changed = size != this->size();
457 m_storage.resize(size, dimensions);
458 if(size_changed) EIGEN_INITIALIZE_COEFFS_IF_THAT_OPTION_IS_ENABLED
460 m_storage.resize(size, dimensions);
465 EIGEN_DEVICE_FUNC
void resize(
const DSizes<Index, NumIndices>& dimensions) {
466 array<Index, NumIndices> dims;
467 for (
int i = 0; i < NumIndices; ++i) {
468 dims[i] = dimensions[i];
476 EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
480#ifdef EIGEN_HAS_INDEX_LIST
481 template <
typename FirstType,
typename... OtherTypes>
483 void resize(
const Eigen::IndexList<FirstType, OtherTypes...>& dimensions) {
484 array<Index, NumIndices> dims;
485 for (
int i = 0; i < NumIndices; ++i) {
486 dims[i] =
static_cast<Index>(dimensions[i]);
493#ifdef EIGEN_HAS_SFINAE
494 template<
typename CustomDimension,
495 EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomDimension>::value) )
497 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
void resize(CustomDimension& dimensions)
499 resize(internal::customIndices2Array<Index,NumIndices>(dimensions));
503#ifndef EIGEN_EMULATE_CXX11_META_H
504 template <
typename std::ptrdiff_t... Indices>
506 void resize(
const Sizes<Indices...>& dimensions) {
507 array<Index, NumIndices> dims;
508 for (
int i = 0; i < NumIndices; ++i) {
509 dims[i] =
static_cast<Index
>(dimensions[i]);
514 template <std::
size_t V1, std::
size_t V2, std::
size_t V3, std::
size_t V4, std::
size_t V5>
516 void resize(
const Sizes<V1, V2, V3, V4, V5>& dimensions) {
517 array<Index, NumIndices> dims;
518 for (
int i = 0; i < NumIndices; ++i) {
519 dims[i] =
static_cast<Index>(dimensions[i]);
527 bool checkIndexRange(
const array<Index, NumIndices>& indices)
const
529 using internal::array_apply_and_reduce;
530 using internal::array_zip_and_reduce;
531 using internal::greater_equal_zero_op;
532 using internal::logical_and_op;
533 using internal::lesser_op;
537 array_apply_and_reduce<logical_and_op, greater_equal_zero_op>(indices) &&
539 array_zip_and_reduce<logical_and_op, lesser_op>(indices, m_storage.dimensions());
542 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
Index linearizedIndex(
const array<Index, NumIndices>& indices)
const
545 return m_storage.dimensions().IndexOfRowMajor(indices);
547 return m_storage.dimensions().IndexOfColMajor(indices);
The tensor base class.
Definition: TensorForwardDeclarations.h:56
The tensor class.
Definition: Tensor.h:64
void resize(const Sizes< Indices... > &dimensions)
Definition: Tensor.h:506
Tensor(const array< Index, NumIndices > &dimensions)
Definition: Tensor.h:376
void resize(const array< Index, NumIndices > &dimensions)
Definition: Tensor.h:447
Namespace containing all symbols from the Eigen library.
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
A cost model used to limit the number of threads used for evaluating tensor expression.
Definition: TensorEvaluator.h:29