10 #ifndef EIGEN_CXX11_TENSOR_TENSOR_CUSTOM_OP_H 11 #define EIGEN_CXX11_TENSOR_TENSOR_CUSTOM_OP_H 23 template<
typename CustomUnaryFunc,
typename XprType>
24 struct traits<TensorCustomUnaryOp<CustomUnaryFunc, XprType> >
26 typedef typename XprType::Scalar Scalar;
27 typedef typename XprType::StorageKind StorageKind;
28 typedef typename XprType::Index Index;
29 typedef typename XprType::Nested Nested;
30 typedef typename remove_reference<Nested>::type _Nested;
31 static const int NumDimensions = traits<XprType>::NumDimensions;
32 static const int Layout = traits<XprType>::Layout;
35 template<
typename CustomUnaryFunc,
typename XprType>
36 struct eval<TensorCustomUnaryOp<CustomUnaryFunc, XprType>,
Eigen::Dense>
38 typedef const TensorCustomUnaryOp<CustomUnaryFunc, XprType>& type;
41 template<
typename CustomUnaryFunc,
typename XprType>
42 struct nested<TensorCustomUnaryOp<CustomUnaryFunc, XprType> >
44 typedef TensorCustomUnaryOp<CustomUnaryFunc, XprType> type;
51 template<
typename CustomUnaryFunc,
typename XprType>
55 typedef typename internal::traits<TensorCustomUnaryOp>::Scalar Scalar;
56 typedef typename Eigen::NumTraits<Scalar>::Real RealScalar;
57 typedef typename XprType::CoeffReturnType CoeffReturnType;
58 typedef typename internal::nested<TensorCustomUnaryOp>::type Nested;
59 typedef typename internal::traits<TensorCustomUnaryOp>::StorageKind StorageKind;
60 typedef typename internal::traits<TensorCustomUnaryOp>::Index Index;
62 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
TensorCustomUnaryOp(
const XprType& expr,
const CustomUnaryFunc& func)
63 : m_expr(expr), m_func(func) {}
66 const CustomUnaryFunc& func()
const {
return m_func; }
69 const typename internal::remove_all<typename XprType::Nested>::type&
70 expression()
const {
return m_expr; }
73 typename XprType::Nested m_expr;
74 const CustomUnaryFunc m_func;
79 template<
typename CustomUnaryFunc,
typename XprType,
typename Device>
80 struct TensorEvaluator<const TensorCustomUnaryOp<CustomUnaryFunc, XprType>, Device>
83 typedef typename internal::traits<ArgType>::Index Index;
84 static const int NumDims = internal::traits<ArgType>::NumDimensions;
85 typedef DSizes<Index, NumDims> Dimensions;
86 typedef typename internal::remove_const<typename ArgType::Scalar>::type Scalar;
87 typedef typename internal::remove_const<typename XprType::CoeffReturnType>::type CoeffReturnType;
88 typedef typename PacketType<CoeffReturnType, Device>::type PacketReturnType;
89 static const int PacketSize = internal::unpacket_traits<PacketReturnType>::size;
93 PacketAccess = (internal::packet_traits<Scalar>::size > 1),
100 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
TensorEvaluator(
const ArgType& op,
const Device& device)
101 : m_op(op), m_device(device), m_result(NULL)
103 m_dimensions = op.func().dimensions(op.expression());
106 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Dimensions& dimensions()
const {
return m_dimensions; }
108 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
bool evalSubExprsIfNeeded(CoeffReturnType* data) {
113 m_result =
static_cast<CoeffReturnType*
>(
114 m_device.allocate(dimensions().TotalSize() *
sizeof(Scalar)));
120 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
void cleanup() {
121 if (m_result != NULL) {
122 m_device.deallocate(m_result);
127 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(Index index)
const {
128 return m_result[index];
131 template<
int LoadMode>
132 EIGEN_DEVICE_FUNC PacketReturnType packet(Index index)
const {
133 return internal::ploadt<PacketReturnType, LoadMode>(m_result + index);
136 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorOpCost costPerCoeff(
bool vectorized)
const {
138 return TensorOpCost(
sizeof(CoeffReturnType), 0, 0, vectorized, PacketSize);
141 EIGEN_DEVICE_FUNC CoeffReturnType* data()
const {
return m_result; }
144 EIGEN_DEVICE_FUNC
void evalTo(Scalar* data) {
147 m_op.func().eval(m_op.expression(), result, m_device);
150 Dimensions m_dimensions;
152 const Device& m_device;
153 CoeffReturnType* m_result;
166 template<
typename CustomBinaryFunc,
typename LhsXprType,
typename RhsXprType>
167 struct traits<TensorCustomBinaryOp<CustomBinaryFunc, LhsXprType, RhsXprType> >
169 typedef typename internal::promote_storage_type<
typename LhsXprType::Scalar,
170 typename RhsXprType::Scalar>::ret Scalar;
171 typedef typename internal::promote_storage_type<
typename LhsXprType::CoeffReturnType,
172 typename RhsXprType::CoeffReturnType>::ret CoeffReturnType;
173 typedef typename promote_storage_type<typename traits<LhsXprType>::StorageKind,
174 typename traits<RhsXprType>::StorageKind>::ret StorageKind;
175 typedef typename promote_index_type<typename traits<LhsXprType>::Index,
176 typename traits<RhsXprType>::Index>::type Index;
177 typedef typename LhsXprType::Nested LhsNested;
178 typedef typename RhsXprType::Nested RhsNested;
179 typedef typename remove_reference<LhsNested>::type _LhsNested;
180 typedef typename remove_reference<RhsNested>::type _RhsNested;
181 static const int NumDimensions = traits<LhsXprType>::NumDimensions;
182 static const int Layout = traits<LhsXprType>::Layout;
185 template<
typename CustomBinaryFunc,
typename LhsXprType,
typename RhsXprType>
186 struct eval<TensorCustomBinaryOp<CustomBinaryFunc, LhsXprType, RhsXprType>, Eigen::Dense>
191 template<
typename CustomBinaryFunc,
typename LhsXprType,
typename RhsXprType>
192 struct nested<TensorCustomBinaryOp<CustomBinaryFunc, LhsXprType, RhsXprType> >
201 template<
typename CustomBinaryFunc,
typename LhsXprType,
typename RhsXprType>
205 typedef typename internal::traits<TensorCustomBinaryOp>::Scalar Scalar;
206 typedef typename Eigen::NumTraits<Scalar>::Real RealScalar;
207 typedef typename internal::traits<TensorCustomBinaryOp>::CoeffReturnType CoeffReturnType;
208 typedef typename internal::nested<TensorCustomBinaryOp>::type Nested;
209 typedef typename internal::traits<TensorCustomBinaryOp>::StorageKind StorageKind;
210 typedef typename internal::traits<TensorCustomBinaryOp>::Index Index;
212 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
TensorCustomBinaryOp(
const LhsXprType& lhs,
const RhsXprType& rhs,
const CustomBinaryFunc& func)
214 : m_lhs_xpr(lhs), m_rhs_xpr(rhs), m_func(func) {}
217 const CustomBinaryFunc& func()
const {
return m_func; }
220 const typename internal::remove_all<typename LhsXprType::Nested>::type&
221 lhsExpression()
const {
return m_lhs_xpr; }
224 const typename internal::remove_all<typename RhsXprType::Nested>::type&
225 rhsExpression()
const {
return m_rhs_xpr; }
228 typename LhsXprType::Nested m_lhs_xpr;
229 typename RhsXprType::Nested m_rhs_xpr;
230 const CustomBinaryFunc m_func;
235 template<
typename CustomBinaryFunc,
typename LhsXprType,
typename RhsXprType,
typename Device>
236 struct TensorEvaluator<const TensorCustomBinaryOp<CustomBinaryFunc, LhsXprType, RhsXprType>, Device>
239 typedef typename internal::traits<XprType>::Index Index;
240 static const int NumDims = internal::traits<XprType>::NumDimensions;
241 typedef DSizes<Index, NumDims> Dimensions;
242 typedef typename XprType::Scalar Scalar;
243 typedef typename internal::remove_const<typename XprType::CoeffReturnType>::type CoeffReturnType;
244 typedef typename PacketType<CoeffReturnType, Device>::type PacketReturnType;
245 static const int PacketSize = internal::unpacket_traits<PacketReturnType>::size;
249 PacketAccess = (internal::packet_traits<Scalar>::size > 1),
256 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
TensorEvaluator(
const XprType& op,
const Device& device)
257 : m_op(op), m_device(device), m_result(NULL)
259 m_dimensions = op.func().dimensions(op.lhsExpression(), op.rhsExpression());
262 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Dimensions& dimensions()
const {
return m_dimensions; }
264 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
bool evalSubExprsIfNeeded(CoeffReturnType* data) {
269 m_result =
static_cast<Scalar *
>(m_device.allocate(dimensions().TotalSize() *
sizeof(Scalar)));
275 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
void cleanup() {
276 if (m_result != NULL) {
277 m_device.deallocate(m_result);
282 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(Index index)
const {
283 return m_result[index];
286 template<
int LoadMode>
287 EIGEN_DEVICE_FUNC PacketReturnType packet(Index index)
const {
288 return internal::ploadt<PacketReturnType, LoadMode>(m_result + index);
291 EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorOpCost costPerCoeff(
bool vectorized)
const {
293 return TensorOpCost(
sizeof(CoeffReturnType), 0, 0, vectorized, PacketSize);
296 EIGEN_DEVICE_FUNC CoeffReturnType* data()
const {
return m_result; }
299 EIGEN_DEVICE_FUNC
void evalTo(Scalar* data) {
301 m_op.func().eval(m_op.lhsExpression(), m_op.rhsExpression(), result, m_device);
304 Dimensions m_dimensions;
306 const Device& m_device;
307 CoeffReturnType* m_result;
313 #endif // EIGEN_CXX11_TENSOR_TENSOR_CUSTOM_OP_H Tensor custom class.
Definition: TensorCustomOp.h:52
Tensor custom class.
Definition: TensorCustomOp.h:202
Namespace containing all symbols from the Eigen library.
Definition: AdolcForward:45
A cost model used to limit the number of threads used for evaluating tensor expression.
Definition: TensorEvaluator.h:28
A tensor expression mapping an existing array of data.
Definition: TensorForwardDeclarations.h:25
The tensor base class.
Definition: TensorBase.h:827