From f41959ccb2d9d4c722fe8fc3351401d53bcf4900 Mon Sep 17 00:00:00 2001 From: Manjunath Kudlur Date: Fri, 6 Nov 2015 16:27:58 -0800 Subject: TensorFlow: Initial commit of TensorFlow library. TensorFlow is an open source software library for numerical computation using data flow graphs. Base CL: 107276108 --- .../eigen3/Eigen/src/SparseCore/SparseVector.h | 447 +++++++++++++++++++++ 1 file changed, 447 insertions(+) create mode 100644 third_party/eigen3/Eigen/src/SparseCore/SparseVector.h (limited to 'third_party/eigen3/Eigen/src/SparseCore/SparseVector.h') diff --git a/third_party/eigen3/Eigen/src/SparseCore/SparseVector.h b/third_party/eigen3/Eigen/src/SparseCore/SparseVector.h new file mode 100644 index 0000000000..7e15c814b6 --- /dev/null +++ b/third_party/eigen3/Eigen/src/SparseCore/SparseVector.h @@ -0,0 +1,447 @@ +// This file is part of Eigen, a lightweight C++ template library +// for linear algebra. +// +// Copyright (C) 2008-2009 Gael Guennebaud +// +// This Source Code Form is subject to the terms of the Mozilla +// Public License v. 2.0. If a copy of the MPL was not distributed +// with this file, You can obtain one at http://mozilla.org/MPL/2.0/. + +#ifndef EIGEN_SPARSEVECTOR_H +#define EIGEN_SPARSEVECTOR_H + +namespace Eigen { + +/** \ingroup SparseCore_Module + * \class SparseVector + * + * \brief a sparse vector class + * + * \tparam _Scalar the scalar type, i.e. the type of the coefficients + * + * See http://www.netlib.org/linalg/html_templates/node91.html for details on the storage scheme. + * + * This class can be extended with the help of the plugin mechanism described on the page + * \ref TopicCustomizingEigen by defining the preprocessor symbol \c EIGEN_SPARSEVECTOR_PLUGIN. + */ + +namespace internal { +template +struct traits > +{ + typedef _Scalar Scalar; + typedef _Index Index; + typedef Sparse StorageKind; + typedef MatrixXpr XprKind; + enum { + IsColVector = (_Options & RowMajorBit) ? 0 : 1, + + RowsAtCompileTime = IsColVector ? Dynamic : 1, + ColsAtCompileTime = IsColVector ? 1 : Dynamic, + MaxRowsAtCompileTime = RowsAtCompileTime, + MaxColsAtCompileTime = ColsAtCompileTime, + Flags = _Options | NestByRefBit | LvalueBit | (IsColVector ? 0 : RowMajorBit), + CoeffReadCost = NumTraits::ReadCost, + SupportedAccessPatterns = InnerRandomAccessPattern + }; +}; + +// Sparse-Vector-Assignment kinds: +enum { + SVA_RuntimeSwitch, + SVA_Inner, + SVA_Outer +}; + +template< typename Dest, typename Src, + int AssignmentKind = !bool(Src::IsVectorAtCompileTime) ? SVA_RuntimeSwitch + : Src::InnerSizeAtCompileTime==1 ? SVA_Outer + : SVA_Inner> +struct sparse_vector_assign_selector; + +} + +template +class SparseVector + : public SparseMatrixBase > +{ + typedef SparseMatrixBase SparseBase; + + public: + EIGEN_SPARSE_PUBLIC_INTERFACE(SparseVector) + EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, +=) + EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(SparseVector, -=) + + typedef internal::CompressedStorage Storage; + enum { IsColVector = internal::traits::IsColVector }; + + enum { + Options = _Options + }; + + EIGEN_STRONG_INLINE Index rows() const { return IsColVector ? m_size : 1; } + EIGEN_STRONG_INLINE Index cols() const { return IsColVector ? 1 : m_size; } + EIGEN_STRONG_INLINE Index innerSize() const { return m_size; } + EIGEN_STRONG_INLINE Index outerSize() const { return 1; } + + EIGEN_STRONG_INLINE const Scalar* valuePtr() const { return &m_data.value(0); } + EIGEN_STRONG_INLINE Scalar* valuePtr() { return &m_data.value(0); } + + EIGEN_STRONG_INLINE const Index* innerIndexPtr() const { return &m_data.index(0); } + EIGEN_STRONG_INLINE Index* innerIndexPtr() { return &m_data.index(0); } + + /** \internal */ + inline Storage& data() { return m_data; } + /** \internal */ + inline const Storage& data() const { return m_data; } + + inline Scalar coeff(Index row, Index col) const + { + eigen_assert(IsColVector ? (col==0 && row>=0 && row=0 && col=0 && i=0 && row=0 && col=0 && i(m_data.size()); } + + inline void startVec(Index outer) + { + EIGEN_UNUSED_VARIABLE(outer); + eigen_assert(outer==0); + } + + inline Scalar& insertBackByOuterInner(Index outer, Index inner) + { + EIGEN_UNUSED_VARIABLE(outer); + eigen_assert(outer==0); + return insertBack(inner); + } + inline Scalar& insertBack(Index i) + { + m_data.append(0, i); + return m_data.value(m_data.size()-1); + } + + inline Scalar& insert(Index row, Index col) + { + eigen_assert(IsColVector ? (col==0 && row>=0 && row=0 && col=0 && i= startId) && (m_data.index(p) > i) ) + { + m_data.index(p+1) = m_data.index(p); + m_data.value(p+1) = m_data.value(p); + --p; + } + m_data.index(p+1) = i; + m_data.value(p+1) = 0; + return m_data.value(p+1); + } + + /** + */ + inline void reserve(Index reserveSize) { m_data.reserve(reserveSize); } + + + inline void finalize() {} + + void prune(const Scalar& reference, const RealScalar& epsilon = NumTraits::dummy_precision()) + { + m_data.prune(reference,epsilon); + } + + void resize(Index rows, Index cols) + { + eigen_assert(rows==1 || cols==1); + resize(IsColVector ? rows : cols); + } + + void resize(Index newSize) + { + m_size = newSize; + m_data.clear(); + } + + void resizeNonZeros(Index size) { m_data.resize(size); } + + inline SparseVector() : m_size(0) { check_template_parameters(); resize(0); } + + inline SparseVector(Index size) : m_size(0) { check_template_parameters(); resize(size); } + + inline SparseVector(Index rows, Index cols) : m_size(0) { check_template_parameters(); resize(rows,cols); } + + template + inline SparseVector(const SparseMatrixBase& other) + : m_size(0) + { + check_template_parameters(); + *this = other.derived(); + } + + inline SparseVector(const SparseVector& other) + : SparseBase(other), m_size(0) + { + check_template_parameters(); + *this = other.derived(); + } + + /** Swaps the values of \c *this and \a other. + * Overloaded for performance: this version performs a \em shallow swap by swaping pointers and attributes only. + * \sa SparseMatrixBase::swap() + */ + inline void swap(SparseVector& other) + { + std::swap(m_size, other.m_size); + m_data.swap(other.m_data); + } + + inline SparseVector& operator=(const SparseVector& other) + { + if (other.isRValue()) + { + swap(other.const_cast_derived()); + } + else + { + resize(other.size()); + m_data = other.m_data; + } + return *this; + } + + template + inline SparseVector& operator=(const SparseMatrixBase& other) + { + SparseVector tmp(other.size()); + internal::sparse_vector_assign_selector::run(tmp,other.derived()); + this->swap(tmp); + return *this; + } + + #ifndef EIGEN_PARSED_BY_DOXYGEN + template + inline SparseVector& operator=(const SparseSparseProduct& product) + { + return Base::operator=(product); + } + #endif + + friend std::ostream & operator << (std::ostream & s, const SparseVector& m) + { + for (Index i=0; i::IsSigned,THE_INDEX_TYPE_MUST_BE_A_SIGNED_TYPE); + EIGEN_STATIC_ASSERT((_Options&(ColMajor|RowMajor))==Options,INVALID_MATRIX_TEMPLATE_PARAMETERS); + } + + Storage m_data; + Index m_size; +}; + +template +class SparseVector::InnerIterator +{ + public: + InnerIterator(const SparseVector& vec, Index outer=0) + : m_data(vec.m_data), m_id(0), m_end(static_cast(m_data.size())) + { + EIGEN_UNUSED_VARIABLE(outer); + eigen_assert(outer==0); + } + + InnerIterator(const internal::CompressedStorage& data) + : m_data(data), m_id(0), m_end(static_cast(m_data.size())) + {} + + inline InnerIterator& operator++() { m_id++; return *this; } + + inline Scalar value() const { return m_data.value(m_id); } + inline Scalar& valueRef() { return const_cast(m_data.value(m_id)); } + + inline Index index() const { return m_data.index(m_id); } + inline Index row() const { return IsColVector ? index() : 0; } + inline Index col() const { return IsColVector ? 0 : index(); } + + inline operator bool() const { return (m_id < m_end); } + + protected: + const internal::CompressedStorage& m_data; + Index m_id; + const Index m_end; +}; + +template +class SparseVector::ReverseInnerIterator +{ + public: + ReverseInnerIterator(const SparseVector& vec, Index outer=0) + : m_data(vec.m_data), m_id(static_cast(m_data.size())), m_start(0) + { + EIGEN_UNUSED_VARIABLE(outer); + eigen_assert(outer==0); + } + + ReverseInnerIterator(const internal::CompressedStorage& data) + : m_data(data), m_id(static_cast(m_data.size())), m_start(0) + {} + + inline ReverseInnerIterator& operator--() { m_id--; return *this; } + + inline Scalar value() const { return m_data.value(m_id-1); } + inline Scalar& valueRef() { return const_cast(m_data.value(m_id-1)); } + + inline Index index() const { return m_data.index(m_id-1); } + inline Index row() const { return IsColVector ? index() : 0; } + inline Index col() const { return IsColVector ? 0 : index(); } + + inline operator bool() const { return (m_id > m_start); } + + protected: + const internal::CompressedStorage& m_data; + Index m_id; + const Index m_start; +}; + +namespace internal { + +template< typename Dest, typename Src> +struct sparse_vector_assign_selector { + static void run(Dest& dst, const Src& src) { + eigen_internal_assert(src.innerSize()==src.size()); + for(typename Src::InnerIterator it(src, 0); it; ++it) + dst.insert(it.index()) = it.value(); + } +}; + +template< typename Dest, typename Src> +struct sparse_vector_assign_selector { + static void run(Dest& dst, const Src& src) { + eigen_internal_assert(src.outerSize()==src.size()); + for(typename Dest::Index i=0; i +struct sparse_vector_assign_selector { + static void run(Dest& dst, const Src& src) { + if(src.outerSize()==1) sparse_vector_assign_selector::run(dst, src); + else sparse_vector_assign_selector::run(dst, src); + } +}; + +} + +} // end namespace Eigen + +#endif // EIGEN_SPARSEVECTOR_H -- cgit v1.2.3