aboutsummaryrefslogtreecommitdiffhomepage
path: root/Eigen/src/IterativeLinearSolvers
diff options
context:
space:
mode:
authorGravatar Gael Guennebaud <g.gael@free.fr>2013-07-17 13:21:35 +0200
committerGravatar Gael Guennebaud <g.gael@free.fr>2013-07-17 13:21:35 +0200
commit2f593ee67cd2ce995fcf52560daf88774c7c64f2 (patch)
tree973b12ded629a9778d2cb05961edba799d8e908e /Eigen/src/IterativeLinearSolvers
parent231d4a6fdae342af5f2a482104539eafe4fc5cdb (diff)
parent20e535e1429cdb2f2dace3e2e6915e33968aa198 (diff)
merge with main branch
Diffstat (limited to 'Eigen/src/IterativeLinearSolvers')
-rw-r--r--Eigen/src/IterativeLinearSolvers/BiCGSTAB.h26
-rw-r--r--Eigen/src/IterativeLinearSolvers/ConjugateGradient.h4
-rw-r--r--Eigen/src/IterativeLinearSolvers/IncompleteLUT.h5
3 files changed, 24 insertions, 11 deletions
diff --git a/Eigen/src/IterativeLinearSolvers/BiCGSTAB.h b/Eigen/src/IterativeLinearSolvers/BiCGSTAB.h
index fbefb696f..6fc6ab852 100644
--- a/Eigen/src/IterativeLinearSolvers/BiCGSTAB.h
+++ b/Eigen/src/IterativeLinearSolvers/BiCGSTAB.h
@@ -43,8 +43,9 @@ bool bicgstab(const MatrixType& mat, const Rhs& rhs, Dest& x,
VectorType r = rhs - mat * x;
VectorType r0 = r;
- RealScalar r0_sqnorm = rhs.squaredNorm();
- if(r0_sqnorm == 0)
+ RealScalar r0_sqnorm = r0.squaredNorm();
+ RealScalar rhs_sqnorm = rhs.squaredNorm();
+ if(rhs_sqnorm == 0)
{
x.setZero();
return true;
@@ -61,13 +62,22 @@ bool bicgstab(const MatrixType& mat, const Rhs& rhs, Dest& x,
RealScalar tol2 = tol*tol;
int i = 0;
+ int restarts = 0;
- while ( r.squaredNorm()/r0_sqnorm > tol2 && i<maxIters )
+ while ( r.squaredNorm()/rhs_sqnorm > tol2 && i<maxIters )
{
Scalar rho_old = rho;
rho = r0.dot(r);
- if (rho == Scalar(0)) return false; /* New search directions cannot be found */
+ if (internal::isMuchSmallerThan(rho,r0_sqnorm))
+ {
+ // The new residual vector became too orthogonal to the arbitrarily choosen direction r0
+ // Let's restart with a new r0:
+ r0 = r;
+ rho = r0_sqnorm = r.squaredNorm();
+ if(restarts++ == 0)
+ i = 0;
+ }
Scalar beta = (rho/rho_old) * (alpha / w);
p = r + beta * (p - w * v);
@@ -81,12 +91,16 @@ bool bicgstab(const MatrixType& mat, const Rhs& rhs, Dest& x,
z = precond.solve(s);
t.noalias() = mat * z;
- w = t.dot(s) / t.squaredNorm();
+ RealScalar tmp = t.squaredNorm();
+ if(tmp>RealScalar(0))
+ w = t.dot(s) / tmp;
+ else
+ w = Scalar(0);
x += alpha * y + w * z;
r = s - w * t;
++i;
}
- tol_error = sqrt(r.squaredNorm()/r0_sqnorm);
+ tol_error = sqrt(r.squaredNorm()/rhs_sqnorm);
iters = i;
return true;
}
diff --git a/Eigen/src/IterativeLinearSolvers/ConjugateGradient.h b/Eigen/src/IterativeLinearSolvers/ConjugateGradient.h
index 00b5647c6..a74a8155e 100644
--- a/Eigen/src/IterativeLinearSolvers/ConjugateGradient.h
+++ b/Eigen/src/IterativeLinearSolvers/ConjugateGradient.h
@@ -63,7 +63,7 @@ void conjugate_gradient(const MatrixType& mat, const Rhs& rhs, Dest& x,
p = precond.solve(residual); //initial search direction
VectorType z(n), tmp(n);
- RealScalar absNew = internal::real(residual.dot(p)); // the square of the absolute value of r scaled by invM
+ RealScalar absNew = numext::real(residual.dot(p)); // the square of the absolute value of r scaled by invM
int i = 0;
while(i < maxIters)
{
@@ -80,7 +80,7 @@ void conjugate_gradient(const MatrixType& mat, const Rhs& rhs, Dest& x,
z = precond.solve(residual); // approximately solve for "A z = residual"
RealScalar absOld = absNew;
- absNew = internal::real(residual.dot(z)); // update the absolute value of r
+ absNew = numext::real(residual.dot(z)); // update the absolute value of r
RealScalar beta = absNew / absOld; // calculate the Gram-Schmidt value used to create the new search direction
p = z + beta * p; // update search direction
i++;
diff --git a/Eigen/src/IterativeLinearSolvers/IncompleteLUT.h b/Eigen/src/IterativeLinearSolvers/IncompleteLUT.h
index 17d18ef58..b55afc136 100644
--- a/Eigen/src/IterativeLinearSolvers/IncompleteLUT.h
+++ b/Eigen/src/IterativeLinearSolvers/IncompleteLUT.h
@@ -150,8 +150,7 @@ class IncompleteLUT : internal::noncopyable
{
analyzePattern(amat);
factorize(amat);
- eigen_assert(m_factorizationIsOk == true);
- m_isInitialized = true;
+ m_isInitialized = m_factorizationIsOk;
return *this;
}
@@ -310,7 +309,7 @@ void IncompleteLUT<Scalar>::factorize(const _MatrixType& amat)
jr(k) = jpos;
++sizeu;
}
- rownorm += internal::abs2(j_it.value());
+ rownorm += numext::abs2(j_it.value());
}
// 2 - detect possible zero row