26 template<
typename Func,
typename Derived>
30 typedef typename find_best_packet<typename Derived::Scalar,Derived::SizeAtCompileTime>::type PacketType;
32 PacketSize = unpacket_traits<PacketType>::size,
33 InnerMaxSize = int(Derived::IsRowMajor)
34 ? Derived::MaxColsAtCompileTime
35 : Derived::MaxRowsAtCompileTime
40 && (functor_traits<Func>::PacketAccess),
41 MayLinearVectorize = bool(MightVectorize) && (int(Derived::Flags)&
LinearAccessBit),
42 MaySliceVectorize =
bool(MightVectorize) && int(InnerMaxSize)>=3*PacketSize
47 Traversal = int(MayLinearVectorize) ? int(LinearVectorizedTraversal)
48 : int(MaySliceVectorize) ? int(SliceVectorizedTraversal)
49 : int(DefaultTraversal)
55 : Derived::SizeAtCompileTime * Derived::CoeffReadCost + (Derived::SizeAtCompileTime-1) * functor_traits<Func>::Cost,
56 UnrollingLimit = EIGEN_UNROLLING_LIMIT * (
int(Traversal) == int(DefaultTraversal) ? 1 : int(PacketSize))
61 Unrolling = Cost <= UnrollingLimit ? CompleteUnrolling : NoUnrolling
64 #ifdef EIGEN_DEBUG_ASSIGN 67 std::cerr <<
"Xpr: " <<
typeid(
typename Derived::XprType).name() << std::endl;
68 std::cerr.setf(std::ios::hex, std::ios::basefield);
69 EIGEN_DEBUG_VAR(Derived::Flags)
70 std::cerr.unsetf(std::ios::hex);
71 EIGEN_DEBUG_VAR(InnerMaxSize)
72 EIGEN_DEBUG_VAR(PacketSize)
73 EIGEN_DEBUG_VAR(MightVectorize)
74 EIGEN_DEBUG_VAR(MayLinearVectorize)
75 EIGEN_DEBUG_VAR(MaySliceVectorize)
76 EIGEN_DEBUG_VAR(Traversal)
77 EIGEN_DEBUG_VAR(UnrollingLimit)
78 EIGEN_DEBUG_VAR(Unrolling)
79 std::cerr << std::endl;
90 template<
typename Func,
typename Derived,
int Start,
int Length>
91 struct redux_novec_unroller
97 typedef typename Derived::Scalar Scalar;
100 static EIGEN_STRONG_INLINE Scalar run(
const Derived &mat,
const Func& func)
102 return func(redux_novec_unroller<Func, Derived, Start, HalfLength>::run(mat,func),
103 redux_novec_unroller<Func, Derived, Start+HalfLength, Length-HalfLength>::run(mat,func));
107 template<
typename Func,
typename Derived,
int Start>
108 struct redux_novec_unroller<Func, Derived, Start, 1>
111 outer = Start / Derived::InnerSizeAtCompileTime,
112 inner = Start % Derived::InnerSizeAtCompileTime
115 typedef typename Derived::Scalar Scalar;
118 static EIGEN_STRONG_INLINE Scalar run(
const Derived &mat,
const Func&)
120 return mat.coeffByOuterInner(outer, inner);
127 template<
typename Func,
typename Derived,
int Start>
128 struct redux_novec_unroller<Func, Derived, Start, 0>
130 typedef typename Derived::Scalar Scalar;
132 static EIGEN_STRONG_INLINE Scalar run(
const Derived&,
const Func&) {
return Scalar(); }
137 template<
typename Func,
typename Derived,
int Start,
int Length>
138 struct redux_vec_unroller
141 PacketSize = redux_traits<Func, Derived>::PacketSize,
142 HalfLength = Length/2
145 typedef typename Derived::Scalar Scalar;
146 typedef typename redux_traits<Func, Derived>::PacketType PacketScalar;
148 static EIGEN_STRONG_INLINE PacketScalar run(
const Derived &mat,
const Func& func)
150 return func.packetOp(
151 redux_vec_unroller<Func, Derived, Start, HalfLength>::run(mat,func),
152 redux_vec_unroller<Func, Derived, Start+HalfLength, Length-HalfLength>::run(mat,func) );
156 template<
typename Func,
typename Derived,
int Start>
157 struct redux_vec_unroller<Func, Derived, Start, 1>
160 index = Start * redux_traits<Func, Derived>::PacketSize,
161 outer = index / int(Derived::InnerSizeAtCompileTime),
162 inner = index % int(Derived::InnerSizeAtCompileTime),
163 alignment = Derived::Alignment
166 typedef typename Derived::Scalar Scalar;
167 typedef typename redux_traits<Func, Derived>::PacketType PacketScalar;
169 static EIGEN_STRONG_INLINE PacketScalar run(
const Derived &mat,
const Func&)
171 return mat.template packetByOuterInner<alignment,PacketScalar>(outer, inner);
179 template<
typename Func,
typename Derived,
180 int Traversal = redux_traits<Func, Derived>::Traversal,
181 int Unrolling = redux_traits<Func, Derived>::Unrolling
185 template<
typename Func,
typename Derived>
186 struct redux_impl<Func, Derived, DefaultTraversal, NoUnrolling>
188 typedef typename Derived::Scalar Scalar;
190 static EIGEN_STRONG_INLINE Scalar run(
const Derived &mat,
const Func& func)
192 eigen_assert(mat.rows()>0 && mat.cols()>0 &&
"you are using an empty matrix");
194 res = mat.coeffByOuterInner(0, 0);
195 for(
Index i = 1; i < mat.innerSize(); ++i)
196 res = func(res, mat.coeffByOuterInner(0, i));
197 for(
Index i = 1; i < mat.outerSize(); ++i)
198 for(
Index j = 0; j < mat.innerSize(); ++j)
199 res = func(res, mat.coeffByOuterInner(i, j));
204 template<
typename Func,
typename Derived>
205 struct redux_impl<Func,Derived, DefaultTraversal, CompleteUnrolling>
206 :
public redux_novec_unroller<Func,Derived, 0, Derived::SizeAtCompileTime>
209 template<
typename Func,
typename Derived>
210 struct redux_impl<Func, Derived, LinearVectorizedTraversal, NoUnrolling>
212 typedef typename Derived::Scalar Scalar;
213 typedef typename redux_traits<Func, Derived>::PacketType PacketScalar;
215 static Scalar run(
const Derived &mat,
const Func& func)
217 const Index size = mat.size();
219 const Index packetSize = redux_traits<Func, Derived>::PacketSize;
220 const int packetAlignment = unpacket_traits<PacketScalar>::alignment;
222 alignment0 = (bool(Derived::Flags &
DirectAccessBit) && bool(packet_traits<Scalar>::AlignedOnScalar)) ?
int(packetAlignment) : int(
Unaligned),
223 alignment = EIGEN_PLAIN_ENUM_MAX(alignment0, Derived::Alignment)
225 const Index alignedStart = internal::first_default_aligned(mat.nestedExpression());
226 const Index alignedSize2 = ((size-alignedStart)/(2*packetSize))*(2*packetSize);
227 const Index alignedSize = ((size-alignedStart)/(packetSize))*(packetSize);
228 const Index alignedEnd2 = alignedStart + alignedSize2;
229 const Index alignedEnd = alignedStart + alignedSize;
233 PacketScalar packet_res0 = mat.template packet<alignment,PacketScalar>(alignedStart);
234 if(alignedSize>packetSize)
236 PacketScalar packet_res1 = mat.template packet<alignment,PacketScalar>(alignedStart+packetSize);
237 for(
Index index = alignedStart + 2*packetSize; index < alignedEnd2; index += 2*packetSize)
239 packet_res0 = func.packetOp(packet_res0, mat.template packet<alignment,PacketScalar>(index));
240 packet_res1 = func.packetOp(packet_res1, mat.template packet<alignment,PacketScalar>(index+packetSize));
243 packet_res0 = func.packetOp(packet_res0,packet_res1);
244 if(alignedEnd>alignedEnd2)
245 packet_res0 = func.packetOp(packet_res0, mat.template packet<alignment,PacketScalar>(alignedEnd2));
247 res = func.predux(packet_res0);
249 for(
Index index = 0; index < alignedStart; ++index)
250 res = func(res,mat.coeff(index));
252 for(
Index index = alignedEnd; index < size; ++index)
253 res = func(res,mat.coeff(index));
259 for(
Index index = 1; index < size; ++index)
260 res = func(res,mat.coeff(index));
268 template<
typename Func,
typename Derived,
int Unrolling>
269 struct redux_impl<Func, Derived, SliceVectorizedTraversal, Unrolling>
271 typedef typename Derived::Scalar Scalar;
272 typedef typename redux_traits<Func, Derived>::PacketType PacketType;
274 EIGEN_DEVICE_FUNC
static Scalar run(
const Derived &mat,
const Func& func)
276 eigen_assert(mat.rows()>0 && mat.cols()>0 &&
"you are using an empty matrix");
277 const Index innerSize = mat.innerSize();
278 const Index outerSize = mat.outerSize();
280 packetSize = redux_traits<Func, Derived>::PacketSize
282 const Index packetedInnerSize = ((innerSize)/packetSize)*packetSize;
284 if(packetedInnerSize)
286 PacketType packet_res = mat.template packet<Unaligned,PacketType>(0,0);
287 for(
Index j=0; j<outerSize; ++j)
288 for(
Index i=(j==0?packetSize:0); i<packetedInnerSize; i+=
Index(packetSize))
289 packet_res = func.packetOp(packet_res, mat.template packetByOuterInner<Unaligned,PacketType>(j,i));
291 res = func.predux(packet_res);
292 for(
Index j=0; j<outerSize; ++j)
293 for(
Index i=packetedInnerSize; i<innerSize; ++i)
294 res = func(res, mat.coeffByOuterInner(j,i));
299 res = redux_impl<Func, Derived, DefaultTraversal, NoUnrolling>::run(mat, func);
306 template<
typename Func,
typename Derived>
307 struct redux_impl<Func, Derived, LinearVectorizedTraversal, CompleteUnrolling>
309 typedef typename Derived::Scalar Scalar;
311 typedef typename redux_traits<Func, Derived>::PacketType PacketScalar;
313 PacketSize = redux_traits<Func, Derived>::PacketSize,
314 Size = Derived::SizeAtCompileTime,
315 VectorizedSize = (Size / PacketSize) * PacketSize
317 EIGEN_DEVICE_FUNC
static EIGEN_STRONG_INLINE Scalar run(
const Derived &mat,
const Func& func)
319 eigen_assert(mat.rows()>0 && mat.cols()>0 &&
"you are using an empty matrix");
320 if (VectorizedSize > 0) {
321 Scalar res = func.predux(redux_vec_unroller<Func, Derived, 0, Size / PacketSize>::run(mat,func));
322 if (VectorizedSize != Size)
323 res = func(res,redux_novec_unroller<Func, Derived, VectorizedSize, Size-VectorizedSize>::run(mat,func));
327 return redux_novec_unroller<Func, Derived, 0, Size>::run(mat,func);
333 template<
typename _XprType>
334 class redux_evaluator
337 typedef _XprType XprType;
338 EIGEN_DEVICE_FUNC
explicit redux_evaluator(
const XprType &xpr) : m_evaluator(xpr), m_xpr(xpr) {}
340 typedef typename XprType::Scalar Scalar;
341 typedef typename XprType::CoeffReturnType CoeffReturnType;
342 typedef typename XprType::PacketScalar PacketScalar;
343 typedef typename XprType::PacketReturnType PacketReturnType;
346 MaxRowsAtCompileTime = XprType::MaxRowsAtCompileTime,
347 MaxColsAtCompileTime = XprType::MaxColsAtCompileTime,
350 IsRowMajor = XprType::IsRowMajor,
351 SizeAtCompileTime = XprType::SizeAtCompileTime,
352 InnerSizeAtCompileTime = XprType::InnerSizeAtCompileTime,
353 CoeffReadCost = evaluator<XprType>::CoeffReadCost,
354 Alignment = evaluator<XprType>::Alignment
357 EIGEN_DEVICE_FUNC
Index rows()
const {
return m_xpr.rows(); }
358 EIGEN_DEVICE_FUNC
Index cols()
const {
return m_xpr.cols(); }
359 EIGEN_DEVICE_FUNC
Index size()
const {
return m_xpr.size(); }
360 EIGEN_DEVICE_FUNC
Index innerSize()
const {
return m_xpr.innerSize(); }
361 EIGEN_DEVICE_FUNC
Index outerSize()
const {
return m_xpr.outerSize(); }
364 CoeffReturnType coeff(
Index row,
Index col)
const 365 {
return m_evaluator.coeff(row, col); }
368 CoeffReturnType coeff(
Index index)
const 369 {
return m_evaluator.coeff(index); }
371 template<
int LoadMode,
typename PacketType>
373 {
return m_evaluator.template packet<LoadMode,PacketType>(row, col); }
375 template<
int LoadMode,
typename PacketType>
376 PacketType packet(
Index index)
const 377 {
return m_evaluator.template packet<LoadMode,PacketType>(index); }
380 CoeffReturnType coeffByOuterInner(
Index outer,
Index inner)
const 381 {
return m_evaluator.coeff(IsRowMajor ? outer : inner, IsRowMajor ? inner : outer); }
383 template<
int LoadMode,
typename PacketType>
384 PacketType packetByOuterInner(
Index outer,
Index inner)
const 385 {
return m_evaluator.template packet<LoadMode,PacketType>(IsRowMajor ? outer : inner, IsRowMajor ? inner : outer); }
387 const XprType & nestedExpression()
const {
return m_xpr; }
390 internal::evaluator<XprType> m_evaluator;
391 const XprType &m_xpr;
408 template<
typename Derived>
409 template<
typename Func>
410 typename internal::traits<Derived>::Scalar
413 eigen_assert(this->rows()>0 && this->cols()>0 &&
"you are using an empty matrix");
415 typedef typename internal::redux_evaluator<Derived> ThisEvaluator;
416 ThisEvaluator thisEval(derived());
418 return internal::redux_impl<Func, ThisEvaluator>::run(thisEval, func);
424 template<
typename Derived>
425 EIGEN_STRONG_INLINE
typename internal::traits<Derived>::Scalar
428 return derived().redux(Eigen::internal::scalar_min_op<Scalar,Scalar>());
434 template<
typename Derived>
435 EIGEN_STRONG_INLINE
typename internal::traits<Derived>::Scalar
438 return derived().redux(Eigen::internal::scalar_max_op<Scalar,Scalar>());
447 template<
typename Derived>
448 EIGEN_STRONG_INLINE
typename internal::traits<Derived>::Scalar
451 if(SizeAtCompileTime==0 || (SizeAtCompileTime==
Dynamic && size()==0))
453 return derived().redux(Eigen::internal::scalar_sum_op<Scalar,Scalar>());
460 template<
typename Derived>
461 EIGEN_STRONG_INLINE
typename internal::traits<Derived>::Scalar
464 #ifdef __INTEL_COMPILER 466 #pragma warning ( disable : 2259 ) 468 return Scalar(derived().redux(Eigen::internal::scalar_sum_op<Scalar,Scalar>())) / Scalar(this->size());
469 #ifdef __INTEL_COMPILER 481 template<
typename Derived>
482 EIGEN_STRONG_INLINE
typename internal::traits<Derived>::Scalar
485 if(SizeAtCompileTime==0 || (SizeAtCompileTime==
Dynamic && size()==0))
487 return derived().redux(Eigen::internal::scalar_product_op<Scalar>());
496 template<
typename Derived>
497 EIGEN_STRONG_INLINE
typename internal::traits<Derived>::Scalar
500 return derived().diagonal().sum();
505 #endif // EIGEN_REDUX_H Scalar trace() const
Definition: Redux.h:498
Scalar sum() const
Definition: Redux.h:449
const int HugeCost
Definition: Constants.h:39
const unsigned int DirectAccessBit
Definition: Constants.h:150
Namespace containing all symbols from the Eigen library.
Definition: Core:287
Definition: Constants.h:228
Base class for all dense matrices, vectors, and arrays.
Definition: DenseBase.h:41
EIGEN_DEFAULT_DENSE_INDEX_TYPE Index
The Index type as used for the API.
Definition: Meta.h:33
Scalar prod() const
Definition: Redux.h:483
Definition: Eigen_Colamd.h:50
internal::traits< Derived >::Scalar maxCoeff() const
Definition: Redux.h:436
const int Dynamic
Definition: Constants.h:21
const unsigned int ActualPacketAccessBit
Definition: Constants.h:100
const unsigned int LinearAccessBit
Definition: Constants.h:125
internal::traits< Derived >::Scalar minCoeff() const
Definition: Redux.h:426
Scalar mean() const
Definition: Redux.h:462