mirror of
https://gitlab.com/libeigen/eigen.git
synced 2025-02-17 18:09:55 +08:00
Add a SparseCompressedBase class providing (un)compressed accessors (like data()/*Stride() for dense matrices),
and a CompressedAccessBit flag (similar to DirectAccessBit for dense matrices).
This commit is contained in:
parent
b1eca55328
commit
7838fda82c
@ -31,6 +31,7 @@
|
||||
#include "src/SparseCore/SparseAssign.h"
|
||||
#include "src/SparseCore/CompressedStorage.h"
|
||||
#include "src/SparseCore/AmbiVector.h"
|
||||
#include "src/SparseCore/SparseCompressedBase.h"
|
||||
#include "src/SparseCore/SparseMatrix.h"
|
||||
#include "src/SparseCore/MappedSparseMatrix.h"
|
||||
#include "src/SparseCore/SparseVector.h"
|
||||
|
@ -163,6 +163,19 @@ const unsigned int NestByRefBit = 0x100;
|
||||
* \sa \ref RowMajorBit, \ref TopicStorageOrders */
|
||||
const unsigned int NoPreferredStorageOrderBit = 0x200;
|
||||
|
||||
/** \ingroup flags
|
||||
*
|
||||
* Means that the underlying coefficients can be accessed through pointers to the sparse (un)compressed storage format,
|
||||
* that is, the expression provides:
|
||||
* \code
|
||||
inline const Scalar* valuePtr() const;
|
||||
inline const Index* innerIndexPtr() const;
|
||||
inline const Index* outerIndexPtr() const;
|
||||
inline const Index* innerNonZeroPtr() const;
|
||||
\endcode
|
||||
*/
|
||||
const unsigned int CompressedAccessBit = 0x400;
|
||||
|
||||
|
||||
// list of flags that are inherited by default
|
||||
const unsigned int HereditaryBits = RowMajorBit
|
||||
|
199
Eigen/src/SparseCore/SparseCompressedBase.h
Normal file
199
Eigen/src/SparseCore/SparseCompressedBase.h
Normal file
@ -0,0 +1,199 @@
|
||||
// This file is part of Eigen, a lightweight C++ template library
|
||||
// for linear algebra.
|
||||
//
|
||||
// Copyright (C) 2015 Gael Guennebaud <gael.guennebaud@inria.fr>
|
||||
//
|
||||
// This Source Code Form is subject to the terms of the Mozilla
|
||||
// Public License v. 2.0. If a copy of the MPL was not distributed
|
||||
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
#ifndef EIGEN_SPARSE_COMPRESSED_BASE_H
|
||||
#define EIGEN_SPARSE_COMPRESSED_BASE_H
|
||||
|
||||
namespace Eigen {
|
||||
|
||||
template<typename Derived> class SparseCompressedBase;
|
||||
|
||||
namespace internal {
|
||||
|
||||
template<typename Derived>
|
||||
struct traits<SparseCompressedBase<Derived> > : traits<Derived>
|
||||
{};
|
||||
|
||||
} // end namespace internal
|
||||
|
||||
template<typename Derived>
|
||||
class SparseCompressedBase
|
||||
: public SparseMatrixBase<Derived>
|
||||
{
|
||||
public:
|
||||
typedef SparseMatrixBase<Derived> Base;
|
||||
_EIGEN_SPARSE_PUBLIC_INTERFACE(SparseCompressedBase)
|
||||
using Base::operator=;
|
||||
using Base::IsRowMajor;
|
||||
|
||||
class InnerIterator;
|
||||
class ReverseInnerIterator;
|
||||
|
||||
/** \returns a const pointer to the array of values.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa innerIndexPtr(), outerIndexPtr() */
|
||||
inline const Scalar* valuePtr() const { return derived().valuePtr(); }
|
||||
/** \returns a non-const pointer to the array of values.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa innerIndexPtr(), outerIndexPtr() */
|
||||
inline Scalar* valuePtr() { return derived().valuePtr(); }
|
||||
|
||||
/** \returns a const pointer to the array of inner indices.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa valuePtr(), outerIndexPtr() */
|
||||
inline const Index* innerIndexPtr() const { return derived().innerIndexPtr(); }
|
||||
/** \returns a non-const pointer to the array of inner indices.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa valuePtr(), outerIndexPtr() */
|
||||
inline Index* innerIndexPtr() { return derived().innerIndexPtr(); }
|
||||
|
||||
/** \returns a const pointer to the array of the starting positions of the inner vectors.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa valuePtr(), innerIndexPtr() */
|
||||
inline const Index* outerIndexPtr() const { return derived().outerIndexPtr(); }
|
||||
/** \returns a non-const pointer to the array of the starting positions of the inner vectors.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \sa valuePtr(), innerIndexPtr() */
|
||||
inline Index* outerIndexPtr() { return derived().outerIndexPtr(); }
|
||||
|
||||
/** \returns a const pointer to the array of the number of non zeros of the inner vectors.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \warning it returns the null pointer 0 in compressed mode */
|
||||
inline const Index* innerNonZeroPtr() const { return derived().innerNonZeroPtr(); }
|
||||
/** \returns a non-const pointer to the array of the number of non zeros of the inner vectors.
|
||||
* This function is aimed at interoperability with other libraries.
|
||||
* \warning it returns the null pointer 0 in compressed mode */
|
||||
inline Index* innerNonZeroPtr() { return derived().innerNonZeroPtr(); }
|
||||
|
||||
/** \returns whether \c *this is in compressed form. */
|
||||
inline bool isCompressed() const { return innerNonZeroPtr()==0; }
|
||||
|
||||
};
|
||||
|
||||
template<typename Derived>
|
||||
class SparseCompressedBase<Derived>::InnerIterator
|
||||
{
|
||||
public:
|
||||
InnerIterator(const SparseCompressedBase& mat, Index outer)
|
||||
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.outerIndexPtr()[outer])
|
||||
{
|
||||
if(mat.isCompressed())
|
||||
m_end = mat.outerIndexPtr()[outer+1];
|
||||
else
|
||||
m_end = m_id + mat.innerNonZeroPtr()[outer];
|
||||
}
|
||||
|
||||
inline InnerIterator& operator++() { m_id++; return *this; }
|
||||
|
||||
inline const Scalar& value() const { return m_values[m_id]; }
|
||||
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
|
||||
|
||||
inline Index index() const { return m_indices[m_id]; }
|
||||
inline Index outer() const { return m_outer; }
|
||||
inline Index row() const { return IsRowMajor ? m_outer : index(); }
|
||||
inline Index col() const { return IsRowMajor ? index() : m_outer; }
|
||||
|
||||
inline operator bool() const { return (m_id < m_end); }
|
||||
|
||||
protected:
|
||||
const Scalar* m_values;
|
||||
const Index* m_indices;
|
||||
const Index m_outer;
|
||||
Index m_id;
|
||||
Index m_end;
|
||||
private:
|
||||
// If you get here, then you're not using the right InnerIterator type, e.g.:
|
||||
// SparseMatrix<double,RowMajor> A;
|
||||
// SparseMatrix<double>::InnerIterator it(A,0);
|
||||
template<typename T> InnerIterator(const SparseMatrixBase<T>&,Index outer);
|
||||
};
|
||||
|
||||
template<typename Derived>
|
||||
class SparseCompressedBase<Derived>::ReverseInnerIterator
|
||||
{
|
||||
public:
|
||||
ReverseInnerIterator(const SparseCompressedBase& mat, Index outer)
|
||||
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.outerIndexPtr()[outer])
|
||||
{
|
||||
if(mat.isCompressed())
|
||||
m_id = mat.outerIndexPtr()[outer+1];
|
||||
else
|
||||
m_id = m_start + mat.innerNonZeroPtr()[outer];
|
||||
}
|
||||
|
||||
inline ReverseInnerIterator& operator--() { --m_id; return *this; }
|
||||
|
||||
inline const Scalar& value() const { return m_values[m_id-1]; }
|
||||
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
|
||||
|
||||
inline Index index() const { return m_indices[m_id-1]; }
|
||||
inline Index outer() const { return m_outer; }
|
||||
inline Index row() const { return IsRowMajor ? m_outer : index(); }
|
||||
inline Index col() const { return IsRowMajor ? index() : m_outer; }
|
||||
|
||||
inline operator bool() const { return (m_id > m_start); }
|
||||
|
||||
protected:
|
||||
const Scalar* m_values;
|
||||
const Index* m_indices;
|
||||
const Index m_outer;
|
||||
Index m_id;
|
||||
const Index m_start;
|
||||
};
|
||||
|
||||
namespace internal {
|
||||
|
||||
template<typename Derived>
|
||||
struct evaluator<SparseCompressedBase<Derived> >
|
||||
: evaluator_base<Derived>
|
||||
{
|
||||
typedef typename Derived::Scalar Scalar;
|
||||
typedef typename Derived::Index Index;
|
||||
typedef typename Derived::InnerIterator InnerIterator;
|
||||
typedef typename Derived::ReverseInnerIterator ReverseInnerIterator;
|
||||
|
||||
enum {
|
||||
CoeffReadCost = NumTraits<Scalar>::ReadCost,
|
||||
Flags = Derived::Flags
|
||||
};
|
||||
|
||||
evaluator() : m_matrix(0) {}
|
||||
explicit evaluator(const Derived &mat) : m_matrix(&mat) {}
|
||||
|
||||
operator Derived&() { return m_matrix->const_cast_derived(); }
|
||||
operator const Derived&() const { return *m_matrix; }
|
||||
|
||||
typedef typename DenseCoeffsBase<Derived,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
|
||||
Scalar coeff(Index row, Index col) const
|
||||
{ return m_matrix->coeff(row,col); }
|
||||
|
||||
Scalar& coeffRef(Index row, Index col)
|
||||
{
|
||||
eigen_internal_assert(row>=0 && row<m_matrix->rows() && col>=0 && col<m_matrix->cols());
|
||||
|
||||
const Index outer = Derived::IsRowMajor ? row : col;
|
||||
const Index inner = Derived::IsRowMajor ? col : row;
|
||||
|
||||
Index start = m_matrix->outerIndexPtr()[outer];
|
||||
Index end = m_matrix->isCompressed() ? m_matrix->outerIndexPtr()[outer+1] : m_matrix->outerIndexPtr()[outer] + m_matrix->innerNonZeroPtr()[outer];
|
||||
eigen_assert(end>start && "you are using a non finalized sparse matrix or written coefficient does not exist");
|
||||
const Index p = std::lower_bound(m_matrix->innerIndexPtr()+start, m_matrix->innerIndexPtr()+end,inner)
|
||||
- m_matrix->innerIndexPtr();
|
||||
eigen_assert((p<end) && (m_matrix->innerIndexPtr()[p]==inner) && "written coefficient does not exist");
|
||||
return m_matrix->const_cast_derived().valuePtr()[p];
|
||||
}
|
||||
|
||||
const Derived *m_matrix;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
} // end namespace Eigen
|
||||
|
||||
#endif // EIGEN_SPARSE_COMPRESSED_BASE_H
|
@ -51,7 +51,7 @@ struct traits<SparseMatrix<_Scalar, _Options, _Index> >
|
||||
ColsAtCompileTime = Dynamic,
|
||||
MaxRowsAtCompileTime = Dynamic,
|
||||
MaxColsAtCompileTime = Dynamic,
|
||||
Flags = _Options | NestByRefBit | LvalueBit,
|
||||
Flags = _Options | NestByRefBit | LvalueBit | CompressedAccessBit,
|
||||
SupportedAccessPatterns = InnerRandomAccessPattern
|
||||
};
|
||||
};
|
||||
@ -90,16 +90,20 @@ struct traits<Diagonal<const SparseMatrix<_Scalar, _Options, _Index>, DiagIndex>
|
||||
|
||||
template<typename _Scalar, int _Options, typename _Index>
|
||||
class SparseMatrix
|
||||
: public SparseMatrixBase<SparseMatrix<_Scalar, _Options, _Index> >
|
||||
: public SparseCompressedBase<SparseMatrix<_Scalar, _Options, _Index> >
|
||||
{
|
||||
public:
|
||||
EIGEN_SPARSE_PUBLIC_INTERFACE(SparseMatrix)
|
||||
typedef SparseCompressedBase<SparseMatrix> Base;
|
||||
using Base::isCompressed;
|
||||
_EIGEN_SPARSE_PUBLIC_INTERFACE(SparseMatrix)
|
||||
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseMatrix, +=)
|
||||
EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseMatrix, -=)
|
||||
|
||||
typedef MappedSparseMatrix<Scalar,Flags> Map;
|
||||
typedef Diagonal<SparseMatrix> DiagonalReturnType;
|
||||
typedef Diagonal<const SparseMatrix> ConstDiagonalReturnType;
|
||||
typedef typename Base::InnerIterator InnerIterator;
|
||||
typedef typename Base::ReverseInnerIterator ReverseInnerIterator;
|
||||
|
||||
|
||||
using Base::IsRowMajor;
|
||||
@ -123,9 +127,6 @@ class SparseMatrix
|
||||
|
||||
public:
|
||||
|
||||
/** \returns whether \c *this is in compressed form. */
|
||||
inline bool isCompressed() const { return m_innerNonZeros==0; }
|
||||
|
||||
/** \returns the number of rows of the matrix */
|
||||
inline Index rows() const { return IsRowMajor ? m_outerSize : m_innerSize; }
|
||||
/** \returns the number of columns of the matrix */
|
||||
@ -241,9 +242,6 @@ class SparseMatrix
|
||||
|
||||
public:
|
||||
|
||||
class InnerIterator;
|
||||
class ReverseInnerIterator;
|
||||
|
||||
/** Removes all non zeros but keep allocated memory */
|
||||
inline void setZero()
|
||||
{
|
||||
@ -875,76 +873,6 @@ private:
|
||||
};
|
||||
};
|
||||
|
||||
template<typename Scalar, int _Options, typename _Index>
|
||||
class SparseMatrix<Scalar,_Options,_Index>::InnerIterator
|
||||
{
|
||||
public:
|
||||
InnerIterator(const SparseMatrix& mat, Index outer)
|
||||
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer])
|
||||
{
|
||||
if(mat.isCompressed())
|
||||
m_end = mat.m_outerIndex[outer+1];
|
||||
else
|
||||
m_end = m_id + mat.m_innerNonZeros[outer];
|
||||
}
|
||||
|
||||
inline InnerIterator& operator++() { m_id++; return *this; }
|
||||
|
||||
inline const Scalar& value() const { return m_values[m_id]; }
|
||||
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id]); }
|
||||
|
||||
inline Index index() const { return m_indices[m_id]; }
|
||||
inline Index outer() const { return m_outer; }
|
||||
inline Index row() const { return IsRowMajor ? m_outer : index(); }
|
||||
inline Index col() const { return IsRowMajor ? index() : m_outer; }
|
||||
|
||||
inline operator bool() const { return (m_id < m_end); }
|
||||
|
||||
protected:
|
||||
const Scalar* m_values;
|
||||
const Index* m_indices;
|
||||
const Index m_outer;
|
||||
Index m_id;
|
||||
Index m_end;
|
||||
private:
|
||||
// If you get here, then you're not using the right InnerIterator type, e.g.:
|
||||
// SparseMatrix<double,RowMajor> A;
|
||||
// SparseMatrix<double>::InnerIterator it(A,0);
|
||||
template<typename T> InnerIterator(const SparseMatrixBase<T>&,Index outer);
|
||||
};
|
||||
|
||||
template<typename Scalar, int _Options, typename _Index>
|
||||
class SparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator
|
||||
{
|
||||
public:
|
||||
ReverseInnerIterator(const SparseMatrix& mat, Index outer)
|
||||
: m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer])
|
||||
{
|
||||
if(mat.isCompressed())
|
||||
m_id = mat.m_outerIndex[outer+1];
|
||||
else
|
||||
m_id = m_start + mat.m_innerNonZeros[outer];
|
||||
}
|
||||
|
||||
inline ReverseInnerIterator& operator--() { --m_id; return *this; }
|
||||
|
||||
inline const Scalar& value() const { return m_values[m_id-1]; }
|
||||
inline Scalar& valueRef() { return const_cast<Scalar&>(m_values[m_id-1]); }
|
||||
|
||||
inline Index index() const { return m_indices[m_id-1]; }
|
||||
inline Index outer() const { return m_outer; }
|
||||
inline Index row() const { return IsRowMajor ? m_outer : index(); }
|
||||
inline Index col() const { return IsRowMajor ? index() : m_outer; }
|
||||
|
||||
inline operator bool() const { return (m_id > m_start); }
|
||||
|
||||
protected:
|
||||
const Scalar* m_values;
|
||||
const Index* m_indices;
|
||||
const Index m_outer;
|
||||
Index m_id;
|
||||
const Index m_start;
|
||||
};
|
||||
|
||||
namespace internal {
|
||||
|
||||
@ -1075,6 +1003,10 @@ EIGEN_DONT_INLINE SparseMatrix<Scalar,_Options,_Index>& SparseMatrix<Scalar,_Opt
|
||||
EIGEN_STATIC_ASSERT((internal::is_same<Scalar, typename OtherDerived::Scalar>::value),
|
||||
YOU_MIXED_DIFFERENT_NUMERIC_TYPES__YOU_NEED_TO_USE_THE_CAST_METHOD_OF_MATRIXBASE_TO_CAST_NUMERIC_TYPES_EXPLICITLY)
|
||||
|
||||
#ifdef EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
|
||||
EIGEN_SPARSE_CREATE_TEMPORARY_PLUGIN
|
||||
#endif
|
||||
|
||||
const bool needToTranspose = (Flags & RowMajorBit) != (internal::evaluator<OtherDerived>::Flags & RowMajorBit);
|
||||
if (needToTranspose)
|
||||
{
|
||||
@ -1277,45 +1209,12 @@ namespace internal {
|
||||
|
||||
template<typename _Scalar, int _Options, typename _Index>
|
||||
struct evaluator<SparseMatrix<_Scalar,_Options,_Index> >
|
||||
: evaluator_base<SparseMatrix<_Scalar,_Options,_Index> >
|
||||
: evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_Index> > >
|
||||
{
|
||||
typedef _Scalar Scalar;
|
||||
typedef _Index Index;
|
||||
typedef evaluator<SparseCompressedBase<SparseMatrix<_Scalar,_Options,_Index> > > Base;
|
||||
typedef SparseMatrix<_Scalar,_Options,_Index> SparseMatrixType;
|
||||
typedef typename SparseMatrixType::InnerIterator InnerIterator;
|
||||
typedef typename SparseMatrixType::ReverseInnerIterator ReverseInnerIterator;
|
||||
|
||||
enum {
|
||||
CoeffReadCost = NumTraits<_Scalar>::ReadCost,
|
||||
Flags = SparseMatrixType::Flags
|
||||
};
|
||||
|
||||
evaluator() : m_matrix(0) {}
|
||||
explicit evaluator(const SparseMatrixType &mat) : m_matrix(&mat) {}
|
||||
|
||||
operator SparseMatrixType&() { return m_matrix->const_cast_derived(); }
|
||||
operator const SparseMatrixType&() const { return *m_matrix; }
|
||||
|
||||
typedef typename DenseCoeffsBase<SparseMatrixType,ReadOnlyAccessors>::CoeffReturnType CoeffReturnType;
|
||||
Scalar coeff(Index row, Index col) const
|
||||
{ return m_matrix->coeff(row,col); }
|
||||
|
||||
Scalar& coeffRef(Index row, Index col)
|
||||
{
|
||||
eigen_internal_assert(row>=0 && row<m_matrix->rows() && col>=0 && col<m_matrix->cols());
|
||||
|
||||
const Index outer = SparseMatrixType::IsRowMajor ? row : col;
|
||||
const Index inner = SparseMatrixType::IsRowMajor ? col : row;
|
||||
|
||||
Index start = m_matrix->outerIndexPtr()[outer];
|
||||
Index end = m_matrix->isCompressed() ? m_matrix->outerIndexPtr()[outer+1] : m_matrix->outerIndexPtr()[outer] + m_matrix->innerNonZeroPtr()[outer];
|
||||
eigen_assert(end>start && "you are using a non finalized sparse matrix or written coefficient does not exist");
|
||||
const Index p = m_matrix->data().searchLowerIndex(start,end-1,inner);
|
||||
eigen_assert((p<end) && (m_matrix->data().index(p)==inner) && "written coefficient does not exist");
|
||||
return m_matrix->const_cast_derived().data().value(p);
|
||||
}
|
||||
|
||||
const SparseMatrixType *m_matrix;
|
||||
evaluator() : Base() {}
|
||||
explicit evaluator(const SparseMatrixType &mat) : Base(mat) {}
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -43,8 +43,7 @@ EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(Derived, -=) \
|
||||
EIGEN_SPARSE_INHERIT_SCALAR_ASSIGNMENT_OPERATOR(Derived, *=) \
|
||||
EIGEN_SPARSE_INHERIT_SCALAR_ASSIGNMENT_OPERATOR(Derived, /=)
|
||||
|
||||
#define _EIGEN_SPARSE_PUBLIC_INTERFACE(Derived, BaseClass) \
|
||||
typedef BaseClass Base; \
|
||||
#define _EIGEN_SPARSE_PUBLIC_INTERFACE(Derived) \
|
||||
typedef typename Eigen::internal::traits<Derived >::Scalar Scalar; \
|
||||
typedef typename Eigen::NumTraits<Scalar>::Real RealScalar; \
|
||||
typedef typename Eigen::internal::nested<Derived >::type Nested; \
|
||||
@ -59,7 +58,8 @@ EIGEN_SPARSE_INHERIT_SCALAR_ASSIGNMENT_OPERATOR(Derived, /=)
|
||||
using Base::const_cast_derived;
|
||||
|
||||
#define EIGEN_SPARSE_PUBLIC_INTERFACE(Derived) \
|
||||
_EIGEN_SPARSE_PUBLIC_INTERFACE(Derived, Eigen::SparseMatrixBase<Derived >)
|
||||
typedef Eigen::SparseMatrixBase<Derived > Base; \
|
||||
_EIGEN_SPARSE_PUBLIC_INTERFACE(Derived)
|
||||
|
||||
const int CoherentAccessPattern = 0x1;
|
||||
const int InnerRandomAccessPattern = 0x2 | CoherentAccessPattern;
|
||||
|
Loading…
Reference in New Issue
Block a user