SparseQR.h 27.7 KB
Newer Older
Don Gagne's avatar
Don Gagne committed
1 2 3 4
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2012-2013 Desire Nuentsa <desire.nuentsa_wakam@inria.fr>
5
// Copyright (C) 2012-2014 Gael Guennebaud <gael.guennebaud@inria.fr>
Don Gagne's avatar
Don Gagne committed
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.

#ifndef EIGEN_SPARSE_QR_H
#define EIGEN_SPARSE_QR_H

namespace Eigen {

template<typename MatrixType, typename OrderingType> class SparseQR;
template<typename SparseQRType> struct SparseQRMatrixQReturnType;
template<typename SparseQRType> struct SparseQRMatrixQTransposeReturnType;
template<typename SparseQRType, typename Derived> struct SparseQR_QProduct;
namespace internal {
  template <typename SparseQRType> struct traits<SparseQRMatrixQReturnType<SparseQRType> >
  {
    typedef typename SparseQRType::MatrixType ReturnType;
24
    typedef typename ReturnType::StorageIndex StorageIndex;
Don Gagne's avatar
Don Gagne committed
25
    typedef typename ReturnType::StorageKind StorageKind;
26 27 28 29
    enum {
      RowsAtCompileTime = Dynamic,
      ColsAtCompileTime = Dynamic
    };
Don Gagne's avatar
Don Gagne committed
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54
  };
  template <typename SparseQRType> struct traits<SparseQRMatrixQTransposeReturnType<SparseQRType> >
  {
    typedef typename SparseQRType::MatrixType ReturnType;
  };
  template <typename SparseQRType, typename Derived> struct traits<SparseQR_QProduct<SparseQRType, Derived> >
  {
    typedef typename Derived::PlainObject ReturnType;
  };
} // End namespace internal

/**
  * \ingroup SparseQR_Module
  * \class SparseQR
  * \brief Sparse left-looking rank-revealing QR factorization
  * 
  * This class implements a left-looking rank-revealing QR decomposition 
  * of sparse matrices. When a column has a norm less than a given tolerance
  * it is implicitly permuted to the end. The QR factorization thus obtained is 
  * given by A*P = Q*R where R is upper triangular or trapezoidal. 
  * 
  * P is the column permutation which is the product of the fill-reducing and the
  * rank-revealing permutations. Use colsPermutation() to get it.
  * 
  * Q is the orthogonal matrix represented as products of Householder reflectors. 
55
  * Use matrixQ() to get an expression and matrixQ().adjoint() to get the adjoint.
Don Gagne's avatar
Don Gagne committed
56 57 58 59 60 61 62 63 64
  * You can then apply it to a vector.
  * 
  * R is the sparse triangular or trapezoidal matrix. The later occurs when A is rank-deficient.
  * matrixR().topLeftCorner(rank(), rank()) always returns a triangular factor of full rank.
  * 
  * \tparam _MatrixType The type of the sparse matrix A, must be a column-major SparseMatrix<>
  * \tparam _OrderingType The fill-reducing ordering method. See the \link OrderingMethods_Module 
  *  OrderingMethods \endlink module for the list of built-in and external ordering methods.
  * 
65 66
  * \implsparsesolverconcept
  *
67
  * \warning The input sparse matrix A must be in compressed mode (see SparseMatrix::makeCompressed()).
68
  * \warning For complex matrices matrixQ().transpose() will actually return the adjoint matrix.
Don Gagne's avatar
Don Gagne committed
69 70 71
  * 
  */
template<typename _MatrixType, typename _OrderingType>
72
class SparseQR : public SparseSolverBase<SparseQR<_MatrixType,_OrderingType> >
Don Gagne's avatar
Don Gagne committed
73
{
74 75 76
  protected:
    typedef SparseSolverBase<SparseQR<_MatrixType,_OrderingType> > Base;
    using Base::m_isInitialized;
Don Gagne's avatar
Don Gagne committed
77
  public:
78
    using Base::_solve_impl;
Don Gagne's avatar
Don Gagne committed
79 80 81 82
    typedef _MatrixType MatrixType;
    typedef _OrderingType OrderingType;
    typedef typename MatrixType::Scalar Scalar;
    typedef typename MatrixType::RealScalar RealScalar;
83 84 85
    typedef typename MatrixType::StorageIndex StorageIndex;
    typedef SparseMatrix<Scalar,ColMajor,StorageIndex> QRMatrixType;
    typedef Matrix<StorageIndex, Dynamic, 1> IndexVector;
Don Gagne's avatar
Don Gagne committed
86
    typedef Matrix<Scalar, Dynamic, 1> ScalarVector;
87 88 89 90 91 92 93
    typedef PermutationMatrix<Dynamic, Dynamic, StorageIndex> PermutationType;

    enum {
      ColsAtCompileTime = MatrixType::ColsAtCompileTime,
      MaxColsAtCompileTime = MatrixType::MaxColsAtCompileTime
    };
    
Don Gagne's avatar
Don Gagne committed
94
  public:
95
    SparseQR () :  m_analysisIsok(false), m_lastError(""), m_useDefaultThreshold(true),m_isQSorted(false),m_isEtreeOk(false)
Don Gagne's avatar
Don Gagne committed
96 97
    { }
    
98 99 100 101 102 103
    /** Construct a QR factorization of the matrix \a mat.
      * 
      * \warning The matrix \a mat must be in compressed mode (see SparseMatrix::makeCompressed()).
      * 
      * \sa compute()
      */
104
    explicit SparseQR(const MatrixType& mat) : m_analysisIsok(false), m_lastError(""), m_useDefaultThreshold(true),m_isQSorted(false),m_isEtreeOk(false)
Don Gagne's avatar
Don Gagne committed
105 106 107
    {
      compute(mat);
    }
108 109 110 111 112 113 114
    
    /** Computes the QR factorization of the sparse matrix \a mat.
      * 
      * \warning The matrix \a mat must be in compressed mode (see SparseMatrix::makeCompressed()).
      * 
      * \sa analyzePattern(), factorize()
      */
Don Gagne's avatar
Don Gagne committed
115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131
    void compute(const MatrixType& mat)
    {
      analyzePattern(mat);
      factorize(mat);
    }
    void analyzePattern(const MatrixType& mat);
    void factorize(const MatrixType& mat);
    
    /** \returns the number of rows of the represented matrix. 
      */
    inline Index rows() const { return m_pmat.rows(); }
    
    /** \returns the number of columns of the represented matrix. 
      */
    inline Index cols() const { return m_pmat.cols();}
    
    /** \returns a const reference to the \b sparse upper triangular matrix R of the QR factorization.
132 133 134 135 136 137 138 139 140 141 142
      * \warning The entries of the returned matrix are not sorted. This means that using it in algorithms
      *          expecting sorted entries will fail. This include random coefficient accesses (SpaseMatrix::coeff()),
      *          and coefficient-wise operations. Matrix products and triangular solves are fine though.
      *
      * To sort the entries, you can assign it to a row-major matrix, and if a column-major matrix
      * is required, you can copy it again:
      * \code
      * SparseMatrix<double>          R  = qr.matrixR();  // column-major, not sorted!
      * SparseMatrix<double,RowMajor> Rr = qr.matrixR();  // row-major, sorted
      * SparseMatrix<double>          Rc = Rr;            // column-major, sorted
      * \endcode
Don Gagne's avatar
Don Gagne committed
143 144 145 146 147 148 149
      */
    const QRMatrixType& matrixR() const { return m_R; }
    
    /** \returns the number of non linearly dependent columns as determined by the pivoting threshold.
      *
      * \sa setPivotThreshold()
      */
150
    Index rank() const
Don Gagne's avatar
Don Gagne committed
151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192
    {
      eigen_assert(m_isInitialized && "The factorization should be called first, use compute()");
      return m_nonzeropivots; 
    }
    
    /** \returns an expression of the matrix Q as products of sparse Householder reflectors.
    * The common usage of this function is to apply it to a dense matrix or vector
    * \code
    * VectorXd B1, B2;
    * // Initialize B1
    * B2 = matrixQ() * B1;
    * \endcode
    *
    * To get a plain SparseMatrix representation of Q:
    * \code
    * SparseMatrix<double> Q;
    * Q = SparseQR<SparseMatrix<double> >(A).matrixQ();
    * \endcode
    * Internally, this call simply performs a sparse product between the matrix Q
    * and a sparse identity matrix. However, due to the fact that the sparse
    * reflectors are stored unsorted, two transpositions are needed to sort
    * them before performing the product.
    */
    SparseQRMatrixQReturnType<SparseQR> matrixQ() const 
    { return SparseQRMatrixQReturnType<SparseQR>(*this); }
    
    /** \returns a const reference to the column permutation P that was applied to A such that A*P = Q*R
      * It is the combination of the fill-in reducing permutation and numerical column pivoting.
      */
    const PermutationType& colsPermutation() const
    { 
      eigen_assert(m_isInitialized && "Decomposition is not initialized.");
      return m_outputPerm_c;
    }
    
    /** \returns A string describing the type of error.
      * This method is provided to ease debugging, not to handle errors.
      */
    std::string lastErrorMessage() const { return m_lastError; }
    
    /** \internal */
    template<typename Rhs, typename Dest>
193
    bool _solve_impl(const MatrixBase<Rhs> &B, MatrixBase<Dest> &dest) const
Don Gagne's avatar
Don Gagne committed
194 195 196 197 198 199
    {
      eigen_assert(m_isInitialized && "The factorization should be called first, use compute()");
      eigen_assert(this->rows() == B.rows() && "SparseQR::solve() : invalid number of rows in the right hand side matrix");

      Index rank = this->rank();
      
200
      // Compute Q^* * b;
Don Gagne's avatar
Don Gagne committed
201
      typename Dest::PlainObject y, b;
202
      y = this->matrixQ().adjoint() * B;
Don Gagne's avatar
Don Gagne committed
203 204 205
      b = y;
      
      // Solve with the triangular matrix R
206
      y.resize((std::max<Index>)(cols(),y.rows()),y.cols());
Don Gagne's avatar
Don Gagne committed
207
      y.topRows(rank) = this->matrixR().topLeftCorner(rank, rank).template triangularView<Upper>().solve(b.topRows(rank));
208
      y.bottomRows(y.rows()-rank).setZero();
209
      
Don Gagne's avatar
Don Gagne committed
210
      // Apply the column permutation
211
      if (m_perm_c.size())  dest = colsPermutation() * y.topRows(cols());
Don Gagne's avatar
Don Gagne committed
212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233
      else                  dest = y.topRows(cols());
      
      m_info = Success;
      return true;
    }

    /** Sets the threshold that is used to determine linearly dependent columns during the factorization.
      *
      * In practice, if during the factorization the norm of the column that has to be eliminated is below
      * this threshold, then the entire column is treated as zero, and it is moved at the end.
      */
    void setPivotThreshold(const RealScalar& threshold)
    {
      m_useDefaultThreshold = false;
      m_threshold = threshold;
    }
    
    /** \returns the solution X of \f$ A X = B \f$ using the current decomposition of A.
      *
      * \sa compute()
      */
    template<typename Rhs>
234
    inline const Solve<SparseQR, Rhs> solve(const MatrixBase<Rhs>& B) const 
Don Gagne's avatar
Don Gagne committed
235 236 237
    {
      eigen_assert(m_isInitialized && "The factorization should be called first, use compute()");
      eigen_assert(this->rows() == B.rows() && "SparseQR::solve() : invalid number of rows in the right hand side matrix");
238
      return Solve<SparseQR, Rhs>(*this, B.derived());
Don Gagne's avatar
Don Gagne committed
239 240
    }
    template<typename Rhs>
241
    inline const Solve<SparseQR, Rhs> solve(const SparseMatrixBase<Rhs>& B) const
Don Gagne's avatar
Don Gagne committed
242 243 244
    {
          eigen_assert(m_isInitialized && "The factorization should be called first, use compute()");
          eigen_assert(this->rows() == B.rows() && "SparseQR::solve() : invalid number of rows in the right hand side matrix");
245
          return Solve<SparseQR, Rhs>(*this, B.derived());
Don Gagne's avatar
Don Gagne committed
246 247 248 249
    }
    
    /** \brief Reports whether previous computation was successful.
      *
250
      * \returns \c Success if computation was successful,
Don Gagne's avatar
Don Gagne committed
251 252 253 254 255 256 257 258 259 260 261
      *          \c NumericalIssue if the QR factorization reports a numerical problem
      *          \c InvalidInput if the input matrix is invalid
      *
      * \sa iparm()          
      */
    ComputationInfo info() const
    {
      eigen_assert(m_isInitialized && "Decomposition is not initialized.");
      return m_info;
    }

262 263 264

    /** \internal */
    inline void _sort_matrix_Q()
Don Gagne's avatar
Don Gagne committed
265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287
    {
      if(this->m_isQSorted) return;
      // The matrix Q is sorted during the transposition
      SparseMatrix<Scalar, RowMajor, Index> mQrm(this->m_Q);
      this->m_Q = mQrm;
      this->m_isQSorted = true;
    }

    
  protected:
    bool m_analysisIsok;
    bool m_factorizationIsok;
    mutable ComputationInfo m_info;
    std::string m_lastError;
    QRMatrixType m_pmat;            // Temporary matrix
    QRMatrixType m_R;               // The triangular factor matrix
    QRMatrixType m_Q;               // The orthogonal reflectors
    ScalarVector m_hcoeffs;         // The Householder coefficients
    PermutationType m_perm_c;       // Fill-reducing  Column  permutation
    PermutationType m_pivotperm;    // The permutation for rank revealing
    PermutationType m_outputPerm_c; // The final column permutation
    RealScalar m_threshold;         // Threshold to determine null Householder reflections
    bool m_useDefaultThreshold;     // Use default threshold
288
    Index m_nonzeropivots;          // Number of non zero pivots found
Don Gagne's avatar
Don Gagne committed
289 290
    IndexVector m_etree;            // Column elimination tree
    IndexVector m_firstRowElt;      // First element in each row
291 292
    bool m_isQSorted;               // whether Q is sorted or not
    bool m_isEtreeOk;               // whether the elimination tree match the initial input matrix
Don Gagne's avatar
Don Gagne committed
293 294 295 296 297 298
    
    template <typename, typename > friend struct SparseQR_QProduct;
    
};

/** \brief Preprocessing step of a QR factorization 
299 300
  * 
  * \warning The matrix \a mat must be in compressed mode (see SparseMatrix::makeCompressed()).
Don Gagne's avatar
Don Gagne committed
301 302
  * 
  * In this step, the fill-reducing permutation is computed and applied to the columns of A
303
  * and the column elimination tree is computed as well. Only the sparsity pattern of \a mat is exploited.
Don Gagne's avatar
Don Gagne committed
304 305 306 307 308 309
  * 
  * \note In this step it is assumed that there is no empty row in the matrix \a mat.
  */
template <typename MatrixType, typename OrderingType>
void SparseQR<MatrixType,OrderingType>::analyzePattern(const MatrixType& mat)
{
310 311 312
  eigen_assert(mat.isCompressed() && "SparseQR requires a sparse matrix in compressed mode. Call .makeCompressed() before passing it to SparseQR");
  // Copy to a column major matrix if the input is rowmajor
  typename internal::conditional<MatrixType::IsRowMajor,QRMatrixType,const MatrixType&>::type matCpy(mat);
Don Gagne's avatar
Don Gagne committed
313 314
  // Compute the column fill reducing ordering
  OrderingType ord; 
315
  ord(matCpy, m_perm_c); 
Don Gagne's avatar
Don Gagne committed
316 317
  Index n = mat.cols();
  Index m = mat.rows();
318
  Index diagSize = (std::min)(m,n);
Don Gagne's avatar
Don Gagne committed
319 320 321 322
  
  if (!m_perm_c.size())
  {
    m_perm_c.resize(n);
323
    m_perm_c.indices().setLinSpaced(n, 0,StorageIndex(n-1));
Don Gagne's avatar
Don Gagne committed
324 325 326 327
  }
  
  // Compute the column elimination tree of the permuted matrix
  m_outputPerm_c = m_perm_c.inverse();
328 329
  internal::coletree(matCpy, m_etree, m_firstRowElt, m_outputPerm_c.indices().data());
  m_isEtreeOk = true;
Don Gagne's avatar
Don Gagne committed
330
  
331 332
  m_R.resize(m, n);
  m_Q.resize(m, diagSize);
Don Gagne's avatar
Don Gagne committed
333 334 335 336
  
  // Allocate space for nonzero elements : rough estimation
  m_R.reserve(2*mat.nonZeros()); //FIXME Get a more accurate estimation through symbolic factorization with the etree
  m_Q.reserve(2*mat.nonZeros());
337
  m_hcoeffs.resize(diagSize);
Don Gagne's avatar
Don Gagne committed
338 339 340 341 342 343
  m_analysisIsok = true;
}

/** \brief Performs the numerical QR factorization of the input matrix
  * 
  * The function SparseQR::analyzePattern(const MatrixType&) must have been called beforehand with
344
  * a matrix having the same sparsity pattern than \a mat.
Don Gagne's avatar
Don Gagne committed
345 346 347 348 349 350 351 352 353
  * 
  * \param mat The sparse column-major matrix
  */
template <typename MatrixType, typename OrderingType>
void SparseQR<MatrixType,OrderingType>::factorize(const MatrixType& mat)
{
  using std::abs;
  
  eigen_assert(m_analysisIsok && "analyzePattern() should be called before this step");
354 355 356
  StorageIndex m = StorageIndex(mat.rows());
  StorageIndex n = StorageIndex(mat.cols());
  StorageIndex diagSize = (std::min)(m,n);
357 358 359 360 361 362 363 364
  IndexVector mark((std::max)(m,n)); mark.setConstant(-1);  // Record the visited nodes
  IndexVector Ridx(n), Qidx(m);                             // Store temporarily the row indexes for the current column of R and Q
  Index nzcolR, nzcolQ;                                     // Number of nonzero for the current column of R and Q
  ScalarVector tval(m);                                     // The dense vector used to compute the current column
  RealScalar pivotThreshold = m_threshold;
  
  m_R.setZero();
  m_Q.setZero();
Don Gagne's avatar
Don Gagne committed
365
  m_pmat = mat;
366 367 368 369 370 371 372
  if(!m_isEtreeOk)
  {
    m_outputPerm_c = m_perm_c.inverse();
    internal::coletree(m_pmat, m_etree, m_firstRowElt, m_outputPerm_c.indices().data());
    m_isEtreeOk = true;
  }

Don Gagne's avatar
Don Gagne committed
373
  m_pmat.uncompress(); // To have the innerNonZeroPtr allocated
374
  
Don Gagne's avatar
Don Gagne committed
375 376
  // Apply the fill-in reducing permutation lazily:
  {
377 378 379 380
    // If the input is row major, copy the original column indices,
    // otherwise directly use the input matrix
    // 
    IndexVector originalOuterIndicesCpy;
381
    const StorageIndex *originalOuterIndices = mat.outerIndexPtr();
382 383 384 385 386 387 388 389 390 391 392 393
    if(MatrixType::IsRowMajor)
    {
      originalOuterIndicesCpy = IndexVector::Map(m_pmat.outerIndexPtr(),n+1);
      originalOuterIndices = originalOuterIndicesCpy.data();
    }
    
    for (int i = 0; i < n; i++)
    {
      Index p = m_perm_c.size() ? m_perm_c.indices()(i) : i;
      m_pmat.outerIndexPtr()[p] = originalOuterIndices[i]; 
      m_pmat.innerNonZeroPtr()[p] = originalOuterIndices[i+1] - originalOuterIndices[i]; 
    }
Don Gagne's avatar
Don Gagne committed
394 395
  }
  
396
  /* Compute the default threshold as in MatLab, see:
Don Gagne's avatar
Don Gagne committed
397 398 399 400 401 402
   * Tim Davis, "Algorithm 915, SuiteSparseQR: Multifrontal Multithreaded Rank-Revealing
   * Sparse QR Factorization, ACM Trans. on Math. Soft. 38(1), 2011, Page 8:3 
   */
  if(m_useDefaultThreshold) 
  {
    RealScalar max2Norm = 0.0;
403
    for (int j = 0; j < n; j++) max2Norm = numext::maxi(max2Norm, m_pmat.col(j).norm());
404 405 406
    if(max2Norm==RealScalar(0))
      max2Norm = RealScalar(1);
    pivotThreshold = 20 * (m + n) * max2Norm * NumTraits<RealScalar>::epsilon();
Don Gagne's avatar
Don Gagne committed
407 408 409 410 411
  }
  
  // Initialize the numerical permutation
  m_pivotperm.setIdentity(n);
  
412
  StorageIndex nonzeroCol = 0; // Record the number of valid pivots
413 414
  m_Q.startVec(0);

Don Gagne's avatar
Don Gagne committed
415
  // Left looking rank-revealing QR factorization: compute a column of R and Q at a time
416
  for (StorageIndex col = 0; col < n; ++col)
Don Gagne's avatar
Don Gagne committed
417 418 419 420 421 422
  {
    mark.setConstant(-1);
    m_R.startVec(col);
    mark(nonzeroCol) = col;
    Qidx(0) = nonzeroCol;
    nzcolR = 0; nzcolQ = 1;
423
    bool found_diag = nonzeroCol>=m;
Don Gagne's avatar
Don Gagne committed
424 425 426 427 428 429
    tval.setZero(); 
    
    // Symbolic factorization: find the nonzero locations of the column k of the factors R and Q, i.e.,
    // all the nodes (with indexes lower than rank) reachable through the column elimination tree (etree) rooted at node k.
    // Note: if the diagonal entry does not exist, then its contribution must be explicitly added,
    // thus the trick with found_diag that permits to do one more iteration on the diagonal element if this one has not been found.
430
    for (typename QRMatrixType::InnerIterator itp(m_pmat, col); itp || !found_diag; ++itp)
Don Gagne's avatar
Don Gagne committed
431
    {
432 433
      StorageIndex curIdx = nonzeroCol;
      if(itp) curIdx = StorageIndex(itp.row());
Don Gagne's avatar
Don Gagne committed
434 435 436
      if(curIdx == nonzeroCol) found_diag = true;
      
      // Get the nonzeros indexes of the current column of R
437
      StorageIndex st = m_firstRowElt(curIdx); // The traversal of the etree starts here
Don Gagne's avatar
Don Gagne committed
438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473
      if (st < 0 )
      {
        m_lastError = "Empty row found during numerical factorization";
        m_info = InvalidInput;
        return;
      }

      // Traverse the etree 
      Index bi = nzcolR;
      for (; mark(st) != col; st = m_etree(st))
      {
        Ridx(nzcolR) = st;  // Add this row to the list,
        mark(st) = col;     // and mark this row as visited
        nzcolR++;
      }

      // Reverse the list to get the topological ordering
      Index nt = nzcolR-bi;
      for(Index i = 0; i < nt/2; i++) std::swap(Ridx(bi+i), Ridx(nzcolR-i-1));
       
      // Copy the current (curIdx,pcol) value of the input matrix
      if(itp) tval(curIdx) = itp.value();
      else    tval(curIdx) = Scalar(0);
      
      // Compute the pattern of Q(:,k)
      if(curIdx > nonzeroCol && mark(curIdx) != col ) 
      {
        Qidx(nzcolQ) = curIdx;  // Add this row to the pattern of Q,
        mark(curIdx) = col;     // and mark it as visited
        nzcolQ++;
      }
    }

    // Browse all the indexes of R(:,col) in reverse order
    for (Index i = nzcolR-1; i >= 0; i--)
    {
474
      Index curIdx = Ridx(i);
Don Gagne's avatar
Don Gagne committed
475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493
      
      // Apply the curIdx-th householder vector to the current column (temporarily stored into tval)
      Scalar tdot(0);
      
      // First compute q' * tval
      tdot = m_Q.col(curIdx).dot(tval);

      tdot *= m_hcoeffs(curIdx);
      
      // Then update tval = tval - q * tau
      // FIXME: tval -= tdot * m_Q.col(curIdx) should amount to the same (need to check/add support for efficient "dense ?= sparse")
      for (typename QRMatrixType::InnerIterator itq(m_Q, curIdx); itq; ++itq)
        tval(itq.row()) -= itq.value() * tdot;

      // Detect fill-in for the current column of Q
      if(m_etree(Ridx(i)) == nonzeroCol)
      {
        for (typename QRMatrixType::InnerIterator itq(m_Q, curIdx); itq; ++itq)
        {
494
          StorageIndex iQ = StorageIndex(itq.row());
Don Gagne's avatar
Don Gagne committed
495 496 497 498 499 500 501 502 503
          if (mark(iQ) != col)
          {
            Qidx(nzcolQ++) = iQ;  // Add this row to the pattern of Q,
            mark(iQ) = col;       // and mark it as visited
          }
        }
      }
    } // End update current column
    
504
    Scalar tau = RealScalar(0);
505
    RealScalar beta = 0;
Don Gagne's avatar
Don Gagne committed
506
    
507
    if(nonzeroCol < diagSize)
Don Gagne's avatar
Don Gagne committed
508
    {
509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532
      // Compute the Householder reflection that eliminate the current column
      // FIXME this step should call the Householder module.
      Scalar c0 = nzcolQ ? tval(Qidx(0)) : Scalar(0);
      
      // First, the squared norm of Q((col+1):m, col)
      RealScalar sqrNorm = 0.;
      for (Index itq = 1; itq < nzcolQ; ++itq) sqrNorm += numext::abs2(tval(Qidx(itq)));
      if(sqrNorm == RealScalar(0) && numext::imag(c0) == RealScalar(0))
      {
        beta = numext::real(c0);
        tval(Qidx(0)) = 1;
      }
      else
      {
        using std::sqrt;
        beta = sqrt(numext::abs2(c0) + sqrNorm);
        if(numext::real(c0) >= RealScalar(0))
          beta = -beta;
        tval(Qidx(0)) = 1;
        for (Index itq = 1; itq < nzcolQ; ++itq)
          tval(Qidx(itq)) /= (c0 - beta);
        tau = numext::conj((beta-c0) / beta);
          
      }
Don Gagne's avatar
Don Gagne committed
533 534 535 536 537 538 539 540 541 542 543 544 545
    }

    // Insert values in R
    for (Index  i = nzcolR-1; i >= 0; i--)
    {
      Index curIdx = Ridx(i);
      if(curIdx < nonzeroCol) 
      {
        m_R.insertBackByOuterInnerUnordered(col, curIdx) = tval(curIdx);
        tval(curIdx) = Scalar(0.);
      }
    }

546
    if(nonzeroCol < diagSize && abs(beta) >= pivotThreshold)
Don Gagne's avatar
Don Gagne committed
547 548 549
    {
      m_R.insertBackByOuterInner(col, nonzeroCol) = beta;
      // The householder coefficient
550
      m_hcoeffs(nonzeroCol) = tau;
Don Gagne's avatar
Don Gagne committed
551 552 553 554
      // Record the householder reflections
      for (Index itq = 0; itq < nzcolQ; ++itq)
      {
        Index iQ = Qidx(itq);
555
        m_Q.insertBackByOuterInnerUnordered(nonzeroCol,iQ) = tval(iQ);
Don Gagne's avatar
Don Gagne committed
556
        tval(iQ) = Scalar(0.);
557 558 559 560
      }
      nonzeroCol++;
      if(nonzeroCol<diagSize)
        m_Q.startVec(nonzeroCol);
Don Gagne's avatar
Don Gagne committed
561 562 563 564 565 566 567 568 569
    }
    else
    {
      // Zero pivot found: move implicitly this column to the end
      for (Index j = nonzeroCol; j < n-1; j++) 
        std::swap(m_pivotperm.indices()(j), m_pivotperm.indices()[j+1]);
      
      // Recompute the column elimination tree
      internal::coletree(m_pmat, m_etree, m_firstRowElt, m_pivotperm.indices().data());
570
      m_isEtreeOk = false;
Don Gagne's avatar
Don Gagne committed
571 572 573
    }
  }
  
574 575
  m_hcoeffs.tail(diagSize-nonzeroCol).setZero();
  
Don Gagne's avatar
Don Gagne committed
576 577 578 579 580 581
  // Finalize the column pointers of the sparse matrices R and Q
  m_Q.finalize();
  m_Q.makeCompressed();
  m_R.finalize();
  m_R.makeCompressed();
  m_isQSorted = false;
582

Don Gagne's avatar
Don Gagne committed
583 584 585 586 587
  m_nonzeropivots = nonzeroCol;
  
  if(nonzeroCol<n)
  {
    // Permute the triangular factor to put the 'dead' columns to the end
588
    QRMatrixType tempR(m_R);
Don Gagne's avatar
Don Gagne committed
589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607
    m_R = tempR * m_pivotperm;
    
    // Update the column permutation
    m_outputPerm_c = m_outputPerm_c * m_pivotperm;
  }
  
  m_isInitialized = true; 
  m_factorizationIsok = true;
  m_info = Success;
}

template <typename SparseQRType, typename Derived>
struct SparseQR_QProduct : ReturnByValue<SparseQR_QProduct<SparseQRType, Derived> >
{
  typedef typename SparseQRType::QRMatrixType MatrixType;
  typedef typename SparseQRType::Scalar Scalar;
  // Get the references 
  SparseQR_QProduct(const SparseQRType& qr, const Derived& other, bool transpose) : 
  m_qr(qr),m_other(other),m_transpose(transpose) {}
608
  inline Index rows() const { return m_qr.matrixQ().rows(); }
Don Gagne's avatar
Don Gagne committed
609 610 611 612 613 614
  inline Index cols() const { return m_other.cols(); }
  
  // Assign to a vector
  template<typename DesType>
  void evalTo(DesType& res) const
  {
615
    Index m = m_qr.rows();
Don Gagne's avatar
Don Gagne committed
616
    Index n = m_qr.cols();
617
    Index diagSize = (std::min)(m,n);
Don Gagne's avatar
Don Gagne committed
618 619 620 621 622 623
    res = m_other;
    if (m_transpose)
    {
      eigen_assert(m_qr.m_Q.rows() == m_other.rows() && "Non conforming object sizes");
      //Compute res = Q' * other column by column
      for(Index j = 0; j < res.cols(); j++){
624
        for (Index k = 0; k < diagSize; k++)
Don Gagne's avatar
Don Gagne committed
625 626 627
        {
          Scalar tau = Scalar(0);
          tau = m_qr.m_Q.col(k).dot(res.col(j));
628
          if(tau==Scalar(0)) continue;
Don Gagne's avatar
Don Gagne committed
629 630 631 632 633 634 635
          tau = tau * m_qr.m_hcoeffs(k);
          res.col(j) -= tau * m_qr.m_Q.col(k);
        }
      }
    }
    else
    {
636 637 638 639
      eigen_assert(m_qr.matrixQ().cols() == m_other.rows() && "Non conforming object sizes");

      res.conservativeResize(rows(), cols());

640
      // Compute res = Q * other column by column
Don Gagne's avatar
Don Gagne committed
641 642
      for(Index j = 0; j < res.cols(); j++)
      {
643
        for (Index k = diagSize-1; k >=0; k--)
Don Gagne's avatar
Don Gagne committed
644 645 646
        {
          Scalar tau = Scalar(0);
          tau = m_qr.m_Q.col(k).dot(res.col(j));
647
          if(tau==Scalar(0)) continue;
648
          tau = tau * numext::conj(m_qr.m_hcoeffs(k));
Don Gagne's avatar
Don Gagne committed
649 650 651 652 653 654 655 656
          res.col(j) -= tau * m_qr.m_Q.col(k);
        }
      }
    }
  }
  
  const SparseQRType& m_qr;
  const Derived& m_other;
657
  bool m_transpose; // TODO this actually means adjoint
Don Gagne's avatar
Don Gagne committed
658 659 660 661 662 663 664
};

template<typename SparseQRType>
struct SparseQRMatrixQReturnType : public EigenBase<SparseQRMatrixQReturnType<SparseQRType> >
{  
  typedef typename SparseQRType::Scalar Scalar;
  typedef Matrix<Scalar,Dynamic,Dynamic> DenseMatrix;
665 666 667 668 669
  enum {
    RowsAtCompileTime = Dynamic,
    ColsAtCompileTime = Dynamic
  };
  explicit SparseQRMatrixQReturnType(const SparseQRType& qr) : m_qr(qr) {}
Don Gagne's avatar
Don Gagne committed
670 671 672 673 674
  template<typename Derived>
  SparseQR_QProduct<SparseQRType, Derived> operator*(const MatrixBase<Derived>& other)
  {
    return SparseQR_QProduct<SparseQRType,Derived>(m_qr,other.derived(),false);
  }
675
  // To use for operations with the adjoint of Q
Don Gagne's avatar
Don Gagne committed
676 677 678 679 680
  SparseQRMatrixQTransposeReturnType<SparseQRType> adjoint() const
  {
    return SparseQRMatrixQTransposeReturnType<SparseQRType>(m_qr);
  }
  inline Index rows() const { return m_qr.rows(); }
681 682
  inline Index cols() const { return m_qr.rows(); }
  // To use for operations with the transpose of Q FIXME this is the same as adjoint at the moment
Don Gagne's avatar
Don Gagne committed
683 684 685 686 687 688 689
  SparseQRMatrixQTransposeReturnType<SparseQRType> transpose() const
  {
    return SparseQRMatrixQTransposeReturnType<SparseQRType>(m_qr);
  }
  const SparseQRType& m_qr;
};

690
// TODO this actually represents the adjoint of Q
Don Gagne's avatar
Don Gagne committed
691 692 693
template<typename SparseQRType>
struct SparseQRMatrixQTransposeReturnType
{
694
  explicit SparseQRMatrixQTransposeReturnType(const SparseQRType& qr) : m_qr(qr) {}
Don Gagne's avatar
Don Gagne committed
695 696 697 698 699 700 701 702
  template<typename Derived>
  SparseQR_QProduct<SparseQRType,Derived> operator*(const MatrixBase<Derived>& other)
  {
    return SparseQR_QProduct<SparseQRType,Derived>(m_qr,other.derived(), true);
  }
  const SparseQRType& m_qr;
};

703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742
namespace internal {
  
template<typename SparseQRType>
struct evaluator_traits<SparseQRMatrixQReturnType<SparseQRType> >
{
  typedef typename SparseQRType::MatrixType MatrixType;
  typedef typename storage_kind_to_evaluator_kind<typename MatrixType::StorageKind>::Kind Kind;
  typedef SparseShape Shape;
};

template< typename DstXprType, typename SparseQRType>
struct Assignment<DstXprType, SparseQRMatrixQReturnType<SparseQRType>, internal::assign_op<typename DstXprType::Scalar,typename DstXprType::Scalar>, Sparse2Sparse>
{
  typedef SparseQRMatrixQReturnType<SparseQRType> SrcXprType;
  typedef typename DstXprType::Scalar Scalar;
  typedef typename DstXprType::StorageIndex StorageIndex;
  static void run(DstXprType &dst, const SrcXprType &src, const internal::assign_op<Scalar,Scalar> &/*func*/)
  {
    typename DstXprType::PlainObject idMat(src.rows(), src.cols());
    idMat.setIdentity();
    // Sort the sparse householder reflectors if needed
    const_cast<SparseQRType *>(&src.m_qr)->_sort_matrix_Q();
    dst = SparseQR_QProduct<SparseQRType, DstXprType>(src.m_qr, idMat, false);
  }
};

template< typename DstXprType, typename SparseQRType>
struct Assignment<DstXprType, SparseQRMatrixQReturnType<SparseQRType>, internal::assign_op<typename DstXprType::Scalar,typename DstXprType::Scalar>, Sparse2Dense>
{
  typedef SparseQRMatrixQReturnType<SparseQRType> SrcXprType;
  typedef typename DstXprType::Scalar Scalar;
  typedef typename DstXprType::StorageIndex StorageIndex;
  static void run(DstXprType &dst, const SrcXprType &src, const internal::assign_op<Scalar,Scalar> &/*func*/)
  {
    dst = src.m_qr.matrixQ() * DstXprType::Identity(src.m_qr.rows(), src.m_qr.rows());
  }
};

} // end namespace internal

Don Gagne's avatar
Don Gagne committed
743 744 745
} // end namespace Eigen

#endif