Make SparseVector derive from SparseCompressedBase, thus improving compatibility between sparse vectors and matrices

This commit is contained in:
Gael Guennebaud 2015-10-06 11:41:03 +02:00
parent 6100d1ae64
commit 1b43860bc1
2 changed files with 63 additions and 96 deletions

View File

@ -45,13 +45,14 @@ class SparseCompressedBase
/** \returns the number of non zero coefficients */
inline Index nonZeros() const
{
if(isCompressed())
if(Derived::IsVectorAtCompileTime && outerIndexPtr()==0)
return derived().nonZeros();
else if(isCompressed())
return outerIndexPtr()[derived().outerSize()]-outerIndexPtr()[0];
else if(derived().outerSize()==0)
return 0;
else
return innerNonZeros().sum();
}
/** \returns a const pointer to the array of values.
@ -74,10 +75,12 @@ class SparseCompressedBase
/** \returns a const pointer to the array of the starting positions of the inner vectors.
* This function is aimed at interoperability with other libraries.
* \warning it returns the null pointer 0 for SparseVector
* \sa valuePtr(), innerIndexPtr() */
inline const StorageIndex* outerIndexPtr() const { return derived().outerIndexPtr(); }
/** \returns a non-const pointer to the array of the starting positions of the inner vectors.
* This function is aimed at interoperability with other libraries.
* \warning it returns the null pointer 0 for SparseVector
* \sa valuePtr(), innerIndexPtr() */
inline StorageIndex* outerIndexPtr() { return derived().outerIndexPtr(); }
@ -100,12 +103,27 @@ class SparseCompressedBase<Derived>::InnerIterator
{
public:
InnerIterator(const SparseCompressedBase& mat, Index outer)
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.outerIndexPtr()[outer])
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
{
if(mat.isCompressed())
m_end = mat.outerIndexPtr()[outer+1];
if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
{
m_id = 0;
m_end = mat.nonZeros();
}
else
m_end = m_id + mat.innerNonZeroPtr()[outer];
{
m_id = mat.outerIndexPtr()[outer];
if(mat.isCompressed())
m_end = mat.outerIndexPtr()[outer+1];
else
m_end = m_id + mat.innerNonZeroPtr()[outer];
}
}
InnerIterator(const SparseCompressedBase& mat)
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_id(0), m_end(mat.nonZeros())
{
EIGEN_STATIC_ASSERT_VECTOR_ONLY(Derived);
}
inline InnerIterator& operator++() { m_id++; return *this; }
@ -114,16 +132,16 @@ class SparseCompressedBase<Derived>::InnerIterator
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
inline StorageIndex index() const { return m_indices[m_id]; }
inline Index outer() const { return m_outer; }
inline Index row() const { return IsRowMajor ? m_outer : index(); }
inline Index col() const { return IsRowMajor ? index() : m_outer; }
inline Index outer() const { return m_outer.value(); }
inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
inline operator bool() const { return (m_id < m_end); }
protected:
const Scalar* m_values;
const StorageIndex* m_indices;
const Index m_outer;
const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_outer;
Index m_id;
Index m_end;
private:
@ -138,32 +156,45 @@ class SparseCompressedBase<Derived>::ReverseInnerIterator
{
public:
ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.outerIndexPtr()[outer])
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer)
{
if(mat.isCompressed())
m_id = mat.outerIndexPtr()[outer+1];
if(Derived::IsVectorAtCompileTime && mat.outerIndexPtr()==0)
{
m_start = 0;
m_id = mat.nonZeros();
}
else
m_id = m_start + mat.innerNonZeroPtr()[outer];
{
m_start.value() = mat.outerIndexPtr()[outer];
if(mat.isCompressed())
m_id = mat.outerIndexPtr()[outer+1];
else
m_id = m_start.value() + mat.innerNonZeroPtr()[outer];
}
}
ReverseInnerIterator(const SparseCompressedBase& mat)
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(0), m_start(0), m_id(mat.nonZeros())
{}
inline ReverseInnerIterator& operator--() { --m_id; return *this; }
inline const Scalar& value() const { return m_values[m_id-1]; }
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
inline StorageIndex index() const { return m_indices[m_id-1]; }
inline Index outer() const { return m_outer; }
inline Index row() const { return IsRowMajor ? m_outer : index(); }
inline Index col() const { return IsRowMajor ? index() : m_outer; }
inline Index outer() const { return m_outer.value(); }
inline Index row() const { return IsRowMajor ? m_outer.value() : index(); }
inline Index col() const { return IsRowMajor ? index() : m_outer.value(); }
inline operator bool() const { return (m_id > m_start); }
inline operator bool() const { return (m_id > m_start.value()); }
protected:
const Scalar* m_values;
const StorageIndex* m_indices;
const Index m_outer;
const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_outer;
Index m_id;
const Index m_start;
const internal::variable_if_dynamic<Index,Derived::IsVectorAtCompileTime?0:Dynamic> m_start;
};
namespace internal {

View File

@ -40,7 +40,7 @@ struct traits<SparseVector<_Scalar, _Options, _StorageIndex> >
ColsAtCompileTime = IsColVector ? 1 : Dynamic,
MaxRowsAtCompileTime = RowsAtCompileTime,
MaxColsAtCompileTime = ColsAtCompileTime,
Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit),
Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit) | CompressedAccessBit,
CoeffReadCost = NumTraits<Scalar>::ReadCost,
SupportedAccessPatterns = InnerRandomAccessPattern
};
@ -63,12 +63,12 @@ struct sparse_vector_assign_selector;
template<typename _Scalar, int _Options, typename _StorageIndex>
class SparseVector
: public SparseMatrixBase<SparseVector<_Scalar, _Options, _StorageIndex> >
: public SparseCompressedBase<SparseVector<_Scalar, _Options, _StorageIndex> >
{
typedef SparseMatrixBase<SparseVector> SparseBase;
typedef SparseCompressedBase<SparseVector> Base;
public:
EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
_EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector)
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=)
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=)
@ -89,6 +89,11 @@ class SparseVector
EIGEN_STRONG_INLINE const StorageIndex* innerIndexPtr() const { return &m_data.index(0); }
EIGEN_STRONG_INLINE StorageIndex* innerIndexPtr() { return &m_data.index(0); }
inline const StorageIndex* outerIndexPtr() const { return 0; }
inline StorageIndex* outerIndexPtr() { return 0; }
inline const StorageIndex* innerNonZeroPtr() const { return 0; }
inline StorageIndex* innerNonZeroPtr() { return 0; }
/** \internal */
inline Storage& data() { return m_data; }
@ -126,8 +131,8 @@ class SparseVector
public:
class InnerIterator;
class ReverseInnerIterator;
typedef typename Base::InnerIterator InnerIterator;
typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
inline void setZero() { m_data.clear(); }
@ -235,7 +240,7 @@ class SparseVector
}
inline SparseVector(const SparseVector& other)
: SparseBase(other), m_size(0)
: Base(other), m_size(0)
{
check_template_parameters();
*this = other.derived();
@ -357,75 +362,6 @@ protected:
Index m_size;
};
template<typename Scalar, int _Options, typename _StorageIndex>
class SparseVector<Scalar,_Options,_StorageIndex>::InnerIterator
{
public:
explicit InnerIterator(const SparseVector& vec, Index outer=0)
: m_data(vec.m_data), m_id(0), m_end(m_data.size())
{
EIGEN_UNUSED_VARIABLE(outer);
eigen_assert(outer==0);
}
explicit InnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
: m_data(data), m_id(0), m_end(m_data.size())
{}
inline InnerIterator& operator++() { m_id++; return *this; }
inline Scalar value() const { return m_data.value(m_id); }
inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id)); }
inline StorageIndex index() const { return m_data.index(m_id); }
inline Index row() const { return IsColVector ? index() : 0; }
inline Index col() const { return IsColVector ? 0 : index(); }
inline operator bool() const { return (m_id < m_end); }
protected:
const internal::CompressedStorage<Scalar,StorageIndex>& m_data;
Index m_id;
const Index m_end;
private:
// If you get here, then you're not using the right InnerIterator type, e.g.:
// SparseMatrix<double,RowMajor> A;
// SparseMatrix<double>::InnerIterator it(A,0);
template<typename T> InnerIterator(const SparseMatrixBase<T>&,Index outer=0);
};
template<typename Scalar, int _Options, typename _StorageIndex>
class SparseVector<Scalar,_Options,_StorageIndex>::ReverseInnerIterator
{
public:
explicit ReverseInnerIterator(const SparseVector& vec, Index outer=0)
: m_data(vec.m_data), m_id(m_data.size()), m_start(0)
{
EIGEN_UNUSED_VARIABLE(outer);
eigen_assert(outer==0);
}
explicit ReverseInnerIterator(const internal::CompressedStorage<Scalar,StorageIndex>& data)
: m_data(data), m_id(m_data.size()), m_start(0)
{}
inline ReverseInnerIterator& operator--() { m_id--; return *this; }
inline Scalar value() const { return m_data.value(m_id-1); }
inline Scalar& valueRef() { return const_cast<Scalar&>(m_data.value(m_id-1)); }
inline StorageIndex index() const { return m_data.index(m_id-1); }
inline Index row() const { return IsColVector ? index() : 0; }
inline Index col() const { return IsColVector ? 0 : index(); }
inline operator bool() const { return (m_id > m_start); }
protected:
const internal::CompressedStorage<Scalar,StorageIndex>& m_data;
Index m_id;
const Index m_start;
};
namespace internal {
template<typename _Scalar, int _Options, typename _Index>