$treeview $search $mathjax
Eigen-unsupported
3.2.5
$projectbrief
|
$projectbrief
|
$searchbox |
00001 // This file is part of Eigen, a lightweight C++ template library 00002 // for linear algebra. 00003 // 00004 // Copyright (C) 2008-2009 Gael Guennebaud <gael.guennebaud@inria.fr> 00005 // 00006 // This Source Code Form is subject to the terms of the Mozilla 00007 // Public License v. 2.0. If a copy of the MPL was not distributed 00008 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/. 00009 00010 #ifndef EIGEN_DYNAMIC_SPARSEMATRIX_H 00011 #define EIGEN_DYNAMIC_SPARSEMATRIX_H 00012 00013 namespace Eigen { 00014 00035 namespace internal { 00036 template<typename _Scalar, int _Options, typename _Index> 00037 struct traits<DynamicSparseMatrix<_Scalar, _Options, _Index> > 00038 { 00039 typedef _Scalar Scalar; 00040 typedef _Index Index; 00041 typedef Sparse StorageKind; 00042 typedef MatrixXpr XprKind; 00043 enum { 00044 RowsAtCompileTime = Dynamic, 00045 ColsAtCompileTime = Dynamic, 00046 MaxRowsAtCompileTime = Dynamic, 00047 MaxColsAtCompileTime = Dynamic, 00048 Flags = _Options | NestByRefBit | LvalueBit, 00049 CoeffReadCost = NumTraits<Scalar>::ReadCost, 00050 SupportedAccessPatterns = OuterRandomAccessPattern 00051 }; 00052 }; 00053 } 00054 00055 template<typename _Scalar, int _Options, typename _Index> 00056 class DynamicSparseMatrix 00057 : public SparseMatrixBase<DynamicSparseMatrix<_Scalar, _Options, _Index> > 00058 { 00059 public: 00060 EIGEN_SPARSE_PUBLIC_INTERFACE(DynamicSparseMatrix) 00061 // FIXME: why are these operator already alvailable ??? 00062 // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, +=) 00063 // EIGEN_SPARSE_INHERIT_ASSIGNMENT_OPERATOR(DynamicSparseMatrix, -=) 00064 typedef MappedSparseMatrix<Scalar,Flags> Map; 00065 using Base::IsRowMajor; 00066 using Base::operator=; 00067 enum { 00068 Options = _Options 00069 }; 00070 00071 protected: 00072 00073 typedef DynamicSparseMatrix<Scalar,(Flags&~RowMajorBit)|(IsRowMajor?RowMajorBit:0)> TransposedSparseMatrix; 00074 00075 Index m_innerSize; 00076 std::vector<internal::CompressedStorage<Scalar,Index> > m_data; 00077 00078 public: 00079 00080 inline Index rows() const { return IsRowMajor ? outerSize() : m_innerSize; } 00081 inline Index cols() const { return IsRowMajor ? m_innerSize : outerSize(); } 00082 inline Index innerSize() const { return m_innerSize; } 00083 inline Index outerSize() const { return static_cast<Index>(m_data.size()); } 00084 inline Index innerNonZeros(Index j) const { return m_data[j].size(); } 00085 00086 std::vector<internal::CompressedStorage<Scalar,Index> >& _data() { return m_data; } 00087 const std::vector<internal::CompressedStorage<Scalar,Index> >& _data() const { return m_data; } 00088 00092 inline Scalar coeff(Index row, Index col) const 00093 { 00094 const Index outer = IsRowMajor ? row : col; 00095 const Index inner = IsRowMajor ? col : row; 00096 return m_data[outer].at(inner); 00097 } 00098 00103 inline Scalar& coeffRef(Index row, Index col) 00104 { 00105 const Index outer = IsRowMajor ? row : col; 00106 const Index inner = IsRowMajor ? col : row; 00107 return m_data[outer].atWithInsertion(inner); 00108 } 00109 00110 class InnerIterator; 00111 class ReverseInnerIterator; 00112 00113 void setZero() 00114 { 00115 for (Index j=0; j<outerSize(); ++j) 00116 m_data[j].clear(); 00117 } 00118 00120 Index nonZeros() const 00121 { 00122 Index res = 0; 00123 for (Index j=0; j<outerSize(); ++j) 00124 res += static_cast<Index>(m_data[j].size()); 00125 return res; 00126 } 00127 00128 00129 00130 void reserve(Index reserveSize = 1000) 00131 { 00132 if (outerSize()>0) 00133 { 00134 Index reserveSizePerVector = (std::max)(reserveSize/outerSize(),Index(4)); 00135 for (Index j=0; j<outerSize(); ++j) 00136 { 00137 m_data[j].reserve(reserveSizePerVector); 00138 } 00139 } 00140 } 00141 00143 inline void startVec(Index /*outer*/) {} 00144 00150 inline Scalar& insertBack(Index row, Index col) 00151 { 00152 return insertBackByOuterInner(IsRowMajor?row:col, IsRowMajor?col:row); 00153 } 00154 00156 inline Scalar& insertBackByOuterInner(Index outer, Index inner) 00157 { 00158 eigen_assert(outer<Index(m_data.size()) && inner<m_innerSize && "out of range"); 00159 eigen_assert(((m_data[outer].size()==0) || (m_data[outer].index(m_data[outer].size()-1)<inner)) 00160 && "wrong sorted insertion"); 00161 m_data[outer].append(0, inner); 00162 return m_data[outer].value(m_data[outer].size()-1); 00163 } 00164 00165 inline Scalar& insert(Index row, Index col) 00166 { 00167 const Index outer = IsRowMajor ? row : col; 00168 const Index inner = IsRowMajor ? col : row; 00169 00170 Index startId = 0; 00171 Index id = static_cast<Index>(m_data[outer].size()) - 1; 00172 m_data[outer].resize(id+2,1); 00173 00174 while ( (id >= startId) && (m_data[outer].index(id) > inner) ) 00175 { 00176 m_data[outer].index(id+1) = m_data[outer].index(id); 00177 m_data[outer].value(id+1) = m_data[outer].value(id); 00178 --id; 00179 } 00180 m_data[outer].index(id+1) = inner; 00181 m_data[outer].value(id+1) = 0; 00182 return m_data[outer].value(id+1); 00183 } 00184 00186 inline void finalize() {} 00187 00189 void prune(Scalar reference, RealScalar epsilon = NumTraits<RealScalar>::dummy_precision()) 00190 { 00191 for (Index j=0; j<outerSize(); ++j) 00192 m_data[j].prune(reference,epsilon); 00193 } 00194 00197 void resize(Index rows, Index cols) 00198 { 00199 const Index outerSize = IsRowMajor ? rows : cols; 00200 m_innerSize = IsRowMajor ? cols : rows; 00201 setZero(); 00202 if (Index(m_data.size()) != outerSize) 00203 { 00204 m_data.resize(outerSize); 00205 } 00206 } 00207 00208 void resizeAndKeepData(Index rows, Index cols) 00209 { 00210 const Index outerSize = IsRowMajor ? rows : cols; 00211 const Index innerSize = IsRowMajor ? cols : rows; 00212 if (m_innerSize>innerSize) 00213 { 00214 // remove all coefficients with innerCoord>=innerSize 00215 // TODO 00216 //std::cerr << "not implemented yet\n"; 00217 exit(2); 00218 } 00219 if (m_data.size() != outerSize) 00220 { 00221 m_data.resize(outerSize); 00222 } 00223 } 00224 00226 EIGEN_DEPRECATED inline DynamicSparseMatrix() 00227 : m_innerSize(0), m_data(0) 00228 { 00229 eigen_assert(innerSize()==0 && outerSize()==0); 00230 } 00231 00233 EIGEN_DEPRECATED inline DynamicSparseMatrix(Index rows, Index cols) 00234 : m_innerSize(0) 00235 { 00236 resize(rows, cols); 00237 } 00238 00240 template<typename OtherDerived> 00241 EIGEN_DEPRECATED explicit inline DynamicSparseMatrix(const SparseMatrixBase<OtherDerived>& other) 00242 : m_innerSize(0) 00243 { 00244 Base::operator=(other.derived()); 00245 } 00246 00247 inline DynamicSparseMatrix(const DynamicSparseMatrix& other) 00248 : Base(), m_innerSize(0) 00249 { 00250 *this = other.derived(); 00251 } 00252 00253 inline void swap(DynamicSparseMatrix& other) 00254 { 00255 //EIGEN_DBG_SPARSE(std::cout << "SparseMatrix:: swap\n"); 00256 std::swap(m_innerSize, other.m_innerSize); 00257 //std::swap(m_outerSize, other.m_outerSize); 00258 m_data.swap(other.m_data); 00259 } 00260 00261 inline DynamicSparseMatrix& operator=(const DynamicSparseMatrix& other) 00262 { 00263 if (other.isRValue()) 00264 { 00265 swap(other.const_cast_derived()); 00266 } 00267 else 00268 { 00269 resize(other.rows(), other.cols()); 00270 m_data = other.m_data; 00271 } 00272 return *this; 00273 } 00274 00276 inline ~DynamicSparseMatrix() {} 00277 00278 public: 00279 00282 EIGEN_DEPRECATED void startFill(Index reserveSize = 1000) 00283 { 00284 setZero(); 00285 reserve(reserveSize); 00286 } 00287 00297 EIGEN_DEPRECATED Scalar& fill(Index row, Index col) 00298 { 00299 const Index outer = IsRowMajor ? row : col; 00300 const Index inner = IsRowMajor ? col : row; 00301 return insertBack(outer,inner); 00302 } 00303 00309 EIGEN_DEPRECATED Scalar& fillrand(Index row, Index col) 00310 { 00311 return insert(row,col); 00312 } 00313 00316 EIGEN_DEPRECATED void endFill() {} 00317 00318 # ifdef EIGEN_DYNAMICSPARSEMATRIX_PLUGIN 00319 # include EIGEN_DYNAMICSPARSEMATRIX_PLUGIN 00320 # endif 00321 }; 00322 00323 template<typename Scalar, int _Options, typename _Index> 00324 class DynamicSparseMatrix<Scalar,_Options,_Index>::InnerIterator : public SparseVector<Scalar,_Options,_Index>::InnerIterator 00325 { 00326 typedef typename SparseVector<Scalar,_Options,_Index>::InnerIterator Base; 00327 public: 00328 InnerIterator(const DynamicSparseMatrix& mat, Index outer) 00329 : Base(mat.m_data[outer]), m_outer(outer) 00330 {} 00331 00332 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); } 00333 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; } 00334 00335 protected: 00336 const Index m_outer; 00337 }; 00338 00339 template<typename Scalar, int _Options, typename _Index> 00340 class DynamicSparseMatrix<Scalar,_Options,_Index>::ReverseInnerIterator : public SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator 00341 { 00342 typedef typename SparseVector<Scalar,_Options,_Index>::ReverseInnerIterator Base; 00343 public: 00344 ReverseInnerIterator(const DynamicSparseMatrix& mat, Index outer) 00345 : Base(mat.m_data[outer]), m_outer(outer) 00346 {} 00347 00348 inline Index row() const { return IsRowMajor ? m_outer : Base::index(); } 00349 inline Index col() const { return IsRowMajor ? Base::index() : m_outer; } 00350 00351 protected: 00352 const Index m_outer; 00353 }; 00354 00355 } // end namespace Eigen 00356 00357 #endif // EIGEN_DYNAMIC_SPARSEMATRIX_H