Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'extern/Eigen3/Eigen/src/SparseCore/SparseVector.h')
-rw-r--r--extern/Eigen3/Eigen/src/SparseCore/SparseVector.h159
1 files changed, 104 insertions, 55 deletions
diff --git a/extern/Eigen3/Eigen/src/SparseCore/SparseVector.h b/extern/Eigen3/Eigen/src/SparseCore/SparseVector.h
index c952f654038..7e15c814b6f 100644
--- a/extern/Eigen3/Eigen/src/SparseCore/SparseVector.h
+++ b/extern/Eigen3/Eigen/src/SparseCore/SparseVector.h
@@ -45,35 +45,40 @@ struct traits<SparseVector<_Scalar, _Options, _Index> >
SupportedAccessPatterns = InnerRandomAccessPattern
};
};
+
+// Sparse-Vector-Assignment kinds:
+enum {
+ SVA_RuntimeSwitch,
+ SVA_Inner,
+ SVA_Outer
+};
+
+template< typename Dest, typename Src,
+ int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch
+ : Src::InnerSizeAtCompileTime==1 ? SVA_Outer
+ : SVA_Inner>
+struct sparse_vector_assign_selector;
+
}
template<typename _Scalar, int _Options, typename _Index>
class SparseVector
: public SparseMatrixBase<SparseVector<_Scalar, _Options, _Index> >
{
+ typedef SparseMatrixBase<SparseVector> SparseBase;
+
public:
EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
-
- protected:
- public:
-
- typedef SparseMatrixBase<SparseVector> SparseBase;
+
+ typedef internal::CompressedStorage<Scalar,Index> Storage;
enum { IsColVector = internal::traits<SparseVector>::IsColVector };
enum {
Options = _Options
};
-
- internal::CompressedStorage<Scalar,Index> m_data;
- Index m_size;
-
- internal::CompressedStorage<Scalar,Index>& _data() { return m_data; }
- internal::CompressedStorage<Scalar,Index>& _data() const { return m_data; }
-
- public:
-
+
EIGEN_STRONG_INLINE Index rows() const { return IsColVector ? m_size : 1; }
EIGEN_STRONG_INLINE Index cols() const { return IsColVector ? 1 : m_size; }
EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
@@ -84,17 +89,26 @@ class SparseVector
EIGEN_STRONG_INLINE const Index* innerIndexPtr() const { return &m_data.index(0); }
EIGEN_STRONG_INLINE Index* innerIndexPtr() { return &m_data.index(0); }
+
+ /** \internal */
+ inline Storage& data() { return m_data; }
+ /** \internal */
+ inline const Storage& data() const { return m_data; }
inline Scalar coeff(Index row, Index col) const
{
- eigen_assert((IsColVector ? col : row)==0);
+ eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
return coeff(IsColVector ? row : col);
}
- inline Scalar coeff(Index i) const { return m_data.at(i); }
+ inline Scalar coeff(Index i) const
+ {
+ eigen_assert(i>=0 && i<m_size);
+ return m_data.at(i);
+ }
inline Scalar& coeffRef(Index row, Index col)
{
- eigen_assert((IsColVector ? col : row)==0);
+ eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
return coeff(IsColVector ? row : col);
}
@@ -106,6 +120,7 @@ class SparseVector
*/
inline Scalar& coeffRef(Index i)
{
+ eigen_assert(i>=0 && i<m_size);
return m_data.atWithInsertion(i);
}
@@ -139,6 +154,8 @@ class SparseVector
inline Scalar& insert(Index row, Index col)
{
+ eigen_assert(IsColVector ? (col==0 && row>=0 && row<m_size) : (row==0 && col>=0 && col<m_size));
+
Index inner = IsColVector ? row : col;
Index outer = IsColVector ? col : row;
eigen_assert(outer==0);
@@ -146,6 +163,8 @@ class SparseVector
}
Scalar& insert(Index i)
{
+ eigen_assert(i>=0 && i<m_size);
+
Index startId = 0;
Index p = Index(m_data.size()) - 1;
// TODO smart realloc
@@ -169,7 +188,7 @@ class SparseVector
inline void finalize() {}
- void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision())
+ void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits<RealScalar>::dummy_precision())
{
m_data.prune(reference,epsilon);
}
@@ -188,25 +207,31 @@ class SparseVector
void resizeNonZeros(Index size) { m_data.resize(size); }
- inline SparseVector() : m_size(0) { resize(0); }
+ inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); }
- inline SparseVector(Index size) : m_size(0) { resize(size); }
+ inline SparseVector(Index size) : m_size(0) { check_template_parameters(); resize(size); }
- inline SparseVector(Index rows, Index cols) : m_size(0) { resize(rows,cols); }
+ inline SparseVector(Index rows, Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); }
template<typename OtherDerived>
inline SparseVector(const SparseMatrixBase<OtherDerived>& other)
: m_size(0)
{
+ check_template_parameters();
*this = other.derived();
}
inline SparseVector(const SparseVector& other)
- : m_size(0)
+ : SparseBase(other), m_size(0)
{
+ check_template_parameters();
*this = other.derived();
}
+ /** Swaps the values of \c *this and \a other.
+ * Overloaded for performance: this version performs a \em shallow swap by swaping pointers and attributes only.
+ * \sa SparseMatrixBase::swap()
+ */
inline void swap(SparseVector& other)
{
std::swap(m_size, other.m_size);
@@ -230,10 +255,10 @@ class SparseVector
template<typename OtherDerived>
inline SparseVector& operator=(const SparseMatrixBase<OtherDerived>& other)
{
- if (int(RowsAtCompileTime)!=int(OtherDerived::RowsAtCompileTime))
- return assign(other.transpose());
- else
- return assign(other);
+ SparseVector tmp(other.size());
+ internal::sparse_vector_assign_selector<SparseVector,OtherDerived>::run(tmp,other.derived());
+ this->swap(tmp);
+ return *this;
}
#ifndef EIGEN_PARSED_BY_DOXYGEN
@@ -260,73 +285,63 @@ class SparseVector
public:
- /** \deprecated use setZero() and reserve() */
+ /** \internal \deprecated use setZero() and reserve() */
EIGEN_DEPRECATED void startFill(Index reserve)
{
setZero();
m_data.reserve(reserve);
}
- /** \deprecated use insertBack(Index,Index) */
+ /** \internal \deprecated use insertBack(Index,Index) */
EIGEN_DEPRECATED Scalar& fill(Index r, Index c)
{
eigen_assert(r==0 || c==0);
return fill(IsColVector ? r : c);
}
- /** \deprecated use insertBack(Index) */
+ /** \internal \deprecated use insertBack(Index) */
EIGEN_DEPRECATED Scalar& fill(Index i)
{
m_data.append(0, i);
return m_data.value(m_data.size()-1);
}
- /** \deprecated use insert(Index,Index) */
+ /** \internal \deprecated use insert(Index,Index) */
EIGEN_DEPRECATED Scalar& fillrand(Index r, Index c)
{
eigen_assert(r==0 || c==0);
return fillrand(IsColVector ? r : c);
}
- /** \deprecated use insert(Index) */
+ /** \internal \deprecated use insert(Index) */
EIGEN_DEPRECATED Scalar& fillrand(Index i)
{
return insert(i);
}
- /** \deprecated use finalize() */
+ /** \internal \deprecated use finalize() */
EIGEN_DEPRECATED void endFill() {}
+ // These two functions were here in the 3.1 release, so let's keep them in case some code rely on them.
+ /** \internal \deprecated use data() */
+ EIGEN_DEPRECATED Storage& _data() { return m_data; }
+ /** \internal \deprecated use data() */
+ EIGEN_DEPRECATED const Storage& _data() const { return m_data; }
+
# ifdef EIGEN_SPARSEVECTOR_PLUGIN
# include EIGEN_SPARSEVECTOR_PLUGIN
# endif
protected:
- template<typename OtherDerived>
- EIGEN_DONT_INLINE SparseVector& assign(const SparseMatrixBase<OtherDerived>& _other)
+
+ static void check_template_parameters()
{
- const OtherDerived& other(_other.derived());
- const bool needToTranspose = (Flags & RowMajorBit) != (OtherDerived::Flags & RowMajorBit);
- if(needToTranspose)
- {
- Index size = other.size();
- Index nnz = other.nonZeros();
- resize(size);
- reserve(nnz);
- for(Index i=0; i<size; ++i)
- {
- typename OtherDerived::InnerIterator it(other, i);
- if(it)
- insert(i) = it.value();
- }
- return *this;
- }
- else
- {
- // there is no special optimization
- return Base::operator=(other);
- }
+ EIGEN_STATIC_ASSERT(NumTraits<Index>::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE);
+ EIGEN_STATIC_ASSERT((_Options&(ColMajor|RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS);
}
+
+ Storage m_data;
+ Index m_size;
};
template<typename Scalar, int _Options, typename _Index>
@@ -393,6 +408,40 @@ class SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator
const Index m_start;
};
+namespace internal {
+
+template< typename Dest, typename Src>
+struct sparse_vector_assign_selector<Dest,Src,SVA_Inner> {
+ static void run(Dest& dst, const Src& src) {
+ eigen_internal_assert(src.innerSize()==src.size());
+ for(typename Src::InnerIterator it(src, 0); it; ++it)
+ dst.insert(it.index()) = it.value();
+ }
+};
+
+template< typename Dest, typename Src>
+struct sparse_vector_assign_selector<Dest,Src,SVA_Outer> {
+ static void run(Dest& dst, const Src& src) {
+ eigen_internal_assert(src.outerSize()==src.size());
+ for(typename Dest::Index i=0; i<src.size(); ++i)
+ {
+ typename Src::InnerIterator it(src, i);
+ if(it)
+ dst.insert(i) = it.value();
+ }
+ }
+};
+
+template< typename Dest, typename Src>
+struct sparse_vector_assign_selector<Dest,Src,SVA_RuntimeSwitch> {
+ static void run(Dest& dst, const Src& src) {
+ if(src.outerSize()==1) sparse_vector_assign_selector<Dest,Src,SVA_Inner>::run(dst, src);
+ else sparse_vector_assign_selector<Dest,Src,SVA_Outer>::run(dst, src);
+ }
+};
+
+}
+
} // end namespace Eigen
#endif // EIGEN_SPARSEVECTOR_H