aboutsummaryrefslogtreecommitdiffhomepage
path: root/Eigen/src/SparseCore
diff options
context:
space:
mode:
authorGravatar Gael Guennebaud <g.gael@free.fr>2011-12-04 12:19:26 +0100
committerGravatar Gael Guennebaud <g.gael@free.fr>2011-12-04 12:19:26 +0100
commit32917515df2fb1568d852e5727b20ecb27acbe6b (patch)
treec3c089a568b32513850e747b09666b5c6f16cb0b /Eigen/src/SparseCore
parent1cdbae62db538c3fb73d5195facd1c1481b48696 (diff)
make the accessors to internal sparse storage part of the public API and remove their "_" prefix.
Diffstat (limited to 'Eigen/src/SparseCore')
-rw-r--r--Eigen/src/SparseCore/MappedSparseMatrix.h26
-rw-r--r--Eigen/src/SparseCore/SparseBlock.h42
-rw-r--r--Eigen/src/SparseCore/SparseMatrix.h68
-rw-r--r--Eigen/src/SparseCore/SparseSelfAdjointView.h32
-rw-r--r--Eigen/src/SparseCore/SparseVector.h8
5 files changed, 91 insertions, 85 deletions
diff --git a/Eigen/src/SparseCore/MappedSparseMatrix.h b/Eigen/src/SparseCore/MappedSparseMatrix.h
index 8f23bfffa..87aab8fed 100644
--- a/Eigen/src/SparseCore/MappedSparseMatrix.h
+++ b/Eigen/src/SparseCore/MappedSparseMatrix.h
@@ -66,14 +66,14 @@ class MappedSparseMatrix
//----------------------------------------
// direct access interface
- inline const Scalar* _valuePtr() const { return m_values; }
- inline Scalar* _valuePtr() { return m_values; }
+ inline const Scalar* valuePtr() const { return m_values; }
+ inline Scalar* valuePtr() { return m_values; }
- inline const Index* _innerIndexPtr() const { return m_innerIndices; }
- inline Index* _innerIndexPtr() { return m_innerIndices; }
+ inline const Index* innerIndexPtr() const { return m_innerIndices; }
+ inline Index* innerIndexPtr() { return m_innerIndices; }
- inline const Index* _outerIndexPtr() const { return m_outerIndex; }
- inline Index* _outerIndexPtr() { return m_outerIndex; }
+ inline const Index* outerIndexPtr() const { return m_outerIndex; }
+ inline Index* outerIndexPtr() { return m_outerIndex; }
//----------------------------------------
inline Scalar coeff(Index row, Index col) const
@@ -131,23 +131,23 @@ class MappedSparseMatrix<Scalar,_Flags,_Index>::InnerIterator
InnerIterator(const MappedSparseMatrix& mat, Index outer)
: m_matrix(mat),
m_outer(outer),
- m_id(mat._outerIndexPtr()[outer]),
+ m_id(mat.outerIndexPtr()[outer]),
m_start(m_id),
- m_end(mat._outerIndexPtr()[outer+1])
+ m_end(mat.outerIndexPtr()[outer+1])
{}
template<unsigned int Added, unsigned int Removed>
InnerIterator(const Flagged<MappedSparseMatrix,Added,Removed>& mat, Index outer)
- : m_matrix(mat._expression()), m_id(m_matrix._outerIndexPtr()[outer]),
- m_start(m_id), m_end(m_matrix._outerIndexPtr()[outer+1])
+ : m_matrix(mat._expression()), m_id(m_matrix.outerIndexPtr()[outer]),
+ m_start(m_id), m_end(m_matrix.outerIndexPtr()[outer+1])
{}
inline InnerIterator& operator++() { m_id++; return *this; }
- inline Scalar value() const { return m_matrix._valuePtr()[m_id]; }
- inline Scalar& valueRef() { return const_cast<Scalar&>(m_matrix._valuePtr()[m_id]); }
+ inline Scalar value() const { return m_matrix.valuePtr()[m_id]; }
+ inline Scalar& valueRef() { return const_cast<Scalar&>(m_matrix.valuePtr()[m_id]); }
- inline Index index() const { return m_matrix._innerIndexPtr()[m_id]; }
+ inline Index index() const { return m_matrix.innerIndexPtr()[m_id]; }
inline Index row() const { return IsRowMajor ? m_outer : index(); }
inline Index col() const { return IsRowMajor ? index() : m_outer; }
diff --git a/Eigen/src/SparseCore/SparseBlock.h b/Eigen/src/SparseCore/SparseBlock.h
index 777fd1b76..34c98926b 100644
--- a/Eigen/src/SparseCore/SparseBlock.h
+++ b/Eigen/src/SparseCore/SparseBlock.h
@@ -156,8 +156,8 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
Index nnz = tmp.nonZeros();
Index nnz_previous = nonZeros();
Index free_size = matrix.data().allocatedSize() + nnz_previous;
- std::size_t nnz_head = m_outerStart==0 ? 0 : matrix._outerIndexPtr()[m_outerStart];
- std::size_t tail = m_matrix._outerIndexPtr()[m_outerStart+m_outerSize.value()];
+ std::size_t nnz_head = m_outerStart==0 ? 0 : matrix.outerIndexPtr()[m_outerStart];
+ std::size_t tail = m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()];
std::size_t nnz_tail = matrix.nonZeros() - tail;
if(nnz>free_size)
@@ -203,13 +203,13 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
Index p = nnz_head;
for(Index k=0; k<m_outerSize.value(); ++k)
{
- matrix._outerIndexPtr()[m_outerStart+k] = p;
+ matrix.outerIndexPtr()[m_outerStart+k] = p;
p += tmp.innerVector(k).nonZeros();
}
std::ptrdiff_t offset = nnz - nnz_previous;
for(Index k = m_outerStart + m_outerSize.value(); k<=matrix.outerSize(); ++k)
{
- matrix._outerIndexPtr()[k] += offset;
+ matrix.outerIndexPtr()[k] += offset;
}
return *this;
@@ -220,30 +220,30 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
return operator=<SparseInnerVectorSet>(other);
}
- inline const Scalar* _valuePtr() const
- { return m_matrix._valuePtr() + m_matrix._outerIndexPtr()[m_outerStart]; }
- inline Scalar* _valuePtr()
- { return m_matrix.const_cast_derived()._valuePtr() + m_matrix._outerIndexPtr()[m_outerStart]; }
+ inline const Scalar* valuePtr() const
+ { return m_matrix.valuePtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
+ inline Scalar* valuePtr()
+ { return m_matrix.const_cast_derived().valuePtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
- inline const Index* _innerIndexPtr() const
- { return m_matrix._innerIndexPtr() + m_matrix._outerIndexPtr()[m_outerStart]; }
- inline Index* _innerIndexPtr()
- { return m_matrix.const_cast_derived()._innerIndexPtr() + m_matrix._outerIndexPtr()[m_outerStart]; }
+ inline const Index* innerIndexPtr() const
+ { return m_matrix.innerIndexPtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
+ inline Index* innerIndexPtr()
+ { return m_matrix.const_cast_derived().innerIndexPtr() + m_matrix.outerIndexPtr()[m_outerStart]; }
- inline const Index* _outerIndexPtr() const
- { return m_matrix._outerIndexPtr() + m_outerStart; }
- inline Index* _outerIndexPtr()
- { return m_matrix.const_cast_derived()._outerIndexPtr() + m_outerStart; }
+ inline const Index* outerIndexPtr() const
+ { return m_matrix.outerIndexPtr() + m_outerStart; }
+ inline Index* outerIndexPtr()
+ { return m_matrix.const_cast_derived().outerIndexPtr() + m_outerStart; }
Index nonZeros() const
{
if(m_matrix.compressed())
- return std::size_t(m_matrix._outerIndexPtr()[m_outerStart+m_outerSize.value()])
- - std::size_t(m_matrix._outerIndexPtr()[m_outerStart]);
+ return std::size_t(m_matrix.outerIndexPtr()[m_outerStart+m_outerSize.value()])
+ - std::size_t(m_matrix.outerIndexPtr()[m_outerStart]);
else if(m_outerSize.value()==0)
return 0;
else
- return Map<const Matrix<Index,Size,1> >(m_matrix._innerNonZeroPtr(), m_outerSize.value()).sum();
+ return Map<const Matrix<Index,Size,1> >(m_matrix.innerNonZeroPtr(), m_outerSize.value()).sum();
}
const Scalar& lastCoeff() const
@@ -251,9 +251,9 @@ class SparseInnerVectorSet<SparseMatrix<_Scalar, _Options, _Index>, Size>
EIGEN_STATIC_ASSERT_VECTOR_ONLY(SparseInnerVectorSet);
eigen_assert(nonZeros()>0);
if(m_matrix.compressed())
- return m_matrix._valuePtr()[m_matrix._outerIndexPtr()[m_outerStart+1]-1];
+ return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart+1]-1];
else
- return m_matrix._valuePtr()[m_matrix._outerIndexPtr()[m_outerStart]+m_matrix._innerNonZeroPtr()[m_outerStart]-1];
+ return m_matrix.valuePtr()[m_matrix.outerIndexPtr()[m_outerStart]+m_matrix.innerNonZeroPtr()[m_outerStart]-1];
}
// template<typename Sparse>
diff --git a/Eigen/src/SparseCore/SparseMatrix.h b/Eigen/src/SparseCore/SparseMatrix.h
index cc2805a53..d4530e4ce 100644
--- a/Eigen/src/SparseCore/SparseMatrix.h
+++ b/Eigen/src/SparseCore/SparseMatrix.h
@@ -116,35 +116,41 @@ class SparseMatrix
/** \returns the number of columns (resp. rows) of the matrix if the storage order column major (resp. row major) */
inline Index outerSize() const { return m_outerSize; }
- /** \internal
- * \returns a const pointer to the array of values */
- inline const Scalar* _valuePtr() const { return &m_data.value(0); }
- /** \internal
- * \returns a non-const pointer to the array of values */
- inline Scalar* _valuePtr() { return &m_data.value(0); }
-
- /** \internal
- * \returns a const pointer to the array of inner indices */
- inline const Index* _innerIndexPtr() const { return &m_data.index(0); }
- /** \internal
- * \returns a non-const pointer to the array of inner indices */
- inline Index* _innerIndexPtr() { return &m_data.index(0); }
-
- /** \internal
- * \returns a const pointer to the array of the starting positions of the inner vectors */
- inline const Index* _outerIndexPtr() const { return m_outerIndex; }
- /** \internal
- * \returns a non-const pointer to the array of the starting positions of the inner vectors */
- inline Index* _outerIndexPtr() { return m_outerIndex; }
-
- /** \internal
- * \returns a const pointer to the array of the number of non zeros of the inner vectors
- * \warning it returns 0 in compressed mode */
- inline const Index* _innerNonZeroPtr() const { return m_innerNonZeros; }
- /** \internal
- * \returns a non-const pointer to the array of the number of non zeros of the inner vectors
- * \warning it returns 0 in compressed mode */
- inline Index* _innerNonZeroPtr() { return m_innerNonZeros; }
+ /** \returns a const pointer to the array of values.
+ * This function is aimed at interoperability with other libraries.
+ * \sa innerIndexPtr(), outerIndexPtr() */
+ inline const Scalar* valuePtr() const { return &m_data.value(0); }
+ /** \returns a non-const pointer to the array of values.
+ * This function is aimed at interoperability with other libraries.
+ * \sa innerIndexPtr(), outerIndexPtr() */
+ inline Scalar* valuePtr() { return &m_data.value(0); }
+
+ /** \returns a const pointer to the array of inner indices.
+ * This function is aimed at interoperability with other libraries.
+ * \sa valuePtr(), outerIndexPtr() */
+ inline const Index* innerIndexPtr() const { return &m_data.index(0); }
+ /** \returns a non-const pointer to the array of inner indices.
+ * This function is aimed at interoperability with other libraries.
+ * \sa valuePtr(), outerIndexPtr() */
+ inline Index* innerIndexPtr() { return &m_data.index(0); }
+
+ /** \returns a const pointer to the array of the starting positions of the inner vectors.
+ * This function is aimed at interoperability with other libraries.
+ * \sa valuePtr(), innerIndexPtr() */
+ inline const Index* outerIndexPtr() const { return m_outerIndex; }
+ /** \returns a non-const pointer to the array of the starting positions of the inner vectors.
+ * This function is aimed at interoperability with other libraries.
+ * \sa valuePtr(), innerIndexPtr() */
+ inline Index* outerIndexPtr() { return m_outerIndex; }
+
+ /** \returns a const pointer to the array of the number of non zeros of the inner vectors.
+ * This function is aimed at interoperability with other libraries.
+ * \warning it returns the null pointer 0 in compressed mode */
+ inline const Index* innerNonZeroPtr() const { return m_innerNonZeros; }
+ /** \returns a non-const pointer to the array of the number of non zeros of the inner vectors.
+ * This function is aimed at interoperability with other libraries.
+ * \warning it returns the null pointer 0 in compressed mode */
+ inline Index* innerNonZeroPtr() { return m_innerNonZeros; }
/** \internal */
inline Storage& data() { return m_data; }
@@ -862,7 +868,7 @@ class SparseMatrix<Scalar,_Options,_Index>::InnerIterator
{
public:
InnerIterator(const SparseMatrix& mat, Index outer)
- : m_values(mat._valuePtr()), m_indices(mat._innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer])
+ : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_id(mat.m_outerIndex[outer])
{
if(mat.compressed())
m_end = mat.m_outerIndex[outer+1];
@@ -895,7 +901,7 @@ class SparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator
{
public:
ReverseInnerIterator(const SparseMatrix& mat, Index outer)
- : m_values(mat._valuePtr()), m_indices(mat._innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer])
+ : m_values(mat.valuePtr()), m_indices(mat.innerIndexPtr()), m_outer(outer), m_start(mat.m_outerIndex[outer])
{
if(mat.compressed())
m_id = mat.m_outerIndex[outer+1];
diff --git a/Eigen/src/SparseCore/SparseSelfAdjointView.h b/Eigen/src/SparseCore/SparseSelfAdjointView.h
index 0becb8073..fc6f56adc 100644
--- a/Eigen/src/SparseCore/SparseSelfAdjointView.h
+++ b/Eigen/src/SparseCore/SparseSelfAdjointView.h
@@ -326,11 +326,11 @@ void permute_symm_to_fullsymm(const MatrixType& mat, SparseMatrix<typename Matri
// reserve space
dest.reserve(nnz);
- dest._outerIndexPtr()[0] = 0;
+ dest.outerIndexPtr()[0] = 0;
for(Index j=0; j<size; ++j)
- dest._outerIndexPtr()[j+1] = dest._outerIndexPtr()[j] + count[j];
+ dest.outerIndexPtr()[j+1] = dest.outerIndexPtr()[j] + count[j];
for(Index j=0; j<size; ++j)
- count[j] = dest._outerIndexPtr()[j];
+ count[j] = dest.outerIndexPtr()[j];
// copy data
for(Index j = 0; j<size; ++j)
@@ -343,17 +343,17 @@ void permute_symm_to_fullsymm(const MatrixType& mat, SparseMatrix<typename Matri
if(i==j)
{
int k = count[ip]++;
- dest._innerIndexPtr()[k] = ip;
- dest._valuePtr()[k] = it.value();
+ dest.innerIndexPtr()[k] = ip;
+ dest.valuePtr()[k] = it.value();
}
else if((UpLo==Lower && i>j) || (UpLo==Upper && i<j))
{
int k = count[jp]++;
- dest._innerIndexPtr()[k] = ip;
- dest._valuePtr()[k] = it.value();
+ dest.innerIndexPtr()[k] = ip;
+ dest.valuePtr()[k] = it.value();
k = count[ip]++;
- dest._innerIndexPtr()[k] = jp;
- dest._valuePtr()[k] = internal::conj(it.value());
+ dest.innerIndexPtr()[k] = jp;
+ dest.valuePtr()[k] = internal::conj(it.value());
}
}
}
@@ -386,12 +386,12 @@ void permute_symm_to_symm(const MatrixType& mat, SparseMatrix<typename MatrixTyp
count[DstUpLo==Lower ? (std::min)(ip,jp) : (std::max)(ip,jp)]++;
}
}
- dest._outerIndexPtr()[0] = 0;
+ dest.outerIndexPtr()[0] = 0;
for(Index j=0; j<size; ++j)
- dest._outerIndexPtr()[j+1] = dest._outerIndexPtr()[j] + count[j];
- dest.resizeNonZeros(dest._outerIndexPtr()[size]);
+ dest.outerIndexPtr()[j+1] = dest.outerIndexPtr()[j] + count[j];
+ dest.resizeNonZeros(dest.outerIndexPtr()[size]);
for(Index j=0; j<size; ++j)
- count[j] = dest._outerIndexPtr()[j];
+ count[j] = dest.outerIndexPtr()[j];
for(Index j = 0; j<size; ++j)
{
@@ -404,12 +404,12 @@ void permute_symm_to_symm(const MatrixType& mat, SparseMatrix<typename MatrixTyp
Index ip = perm? perm[i] : i;
Index k = count[DstUpLo==Lower ? (std::min)(ip,jp) : (std::max)(ip,jp)]++;
- dest._innerIndexPtr()[k] = DstUpLo==Lower ? (std::max)(ip,jp) : (std::min)(ip,jp);
+ dest.innerIndexPtr()[k] = DstUpLo==Lower ? (std::max)(ip,jp) : (std::min)(ip,jp);
if((DstUpLo==Lower && ip<jp) || (DstUpLo==Upper && ip>jp))
- dest._valuePtr()[k] = conj(it.value());
+ dest.valuePtr()[k] = conj(it.value());
else
- dest._valuePtr()[k] = it.value();
+ dest.valuePtr()[k] = it.value();
}
}
}
diff --git a/Eigen/src/SparseCore/SparseVector.h b/Eigen/src/SparseCore/SparseVector.h
index 363cae3ff..53647af99 100644
--- a/Eigen/src/SparseCore/SparseVector.h
+++ b/Eigen/src/SparseCore/SparseVector.h
@@ -92,11 +92,11 @@ class SparseVector
EIGEN_STRONG_INLINE Index innerSize() const { return m_size; }
EIGEN_STRONG_INLINE Index outerSize() const { return 1; }
- EIGEN_STRONG_INLINE const Scalar* _valuePtr() const { return &m_data.value(0); }
- EIGEN_STRONG_INLINE Scalar* _valuePtr() { return &m_data.value(0); }
+ EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return &m_data.value(0); }
+ EIGEN_STRONG_INLINE Scalar* valuePtr() { return &m_data.value(0); }
- EIGEN_STRONG_INLINE const Index* _innerIndexPtr() const { return &m_data.index(0); }
- EIGEN_STRONG_INLINE Index* _innerIndexPtr() { return &m_data.index(0); }
+ EIGEN_STRONG_INLINE const Index* innerIndexPtr() const { return &m_data.index(0); }
+ EIGEN_STRONG_INLINE Index* innerIndexPtr() { return &m_data.index(0); }
inline Scalar coeff(Index row, Index col) const
{