From aa0db35185f7eda94eb103b6bb92630c432512e5 Mon Sep 17 00:00:00 2001 From: Jitse Niesen Date: Sat, 18 Jan 2014 01:16:17 +0000 Subject: Add doc page on computing Least Squares. --- doc/LeastSquares.dox | 70 ++++++++++++++++++++++++++++ doc/Manual.dox | 2 + doc/TutorialLinearAlgebra.dox | 11 +++-- doc/snippets/LeastSquaresNormalEquations.cpp | 4 ++ doc/snippets/LeastSquaresQR.cpp | 4 ++ 5 files changed, 86 insertions(+), 5 deletions(-) create mode 100644 doc/LeastSquares.dox create mode 100644 doc/snippets/LeastSquaresNormalEquations.cpp create mode 100644 doc/snippets/LeastSquaresQR.cpp (limited to 'doc') diff --git a/doc/LeastSquares.dox b/doc/LeastSquares.dox new file mode 100644 index 000000000..e2191a22f --- /dev/null +++ b/doc/LeastSquares.dox @@ -0,0 +1,70 @@ +namespace Eigen { + +/** \eigenManualPage LeastSquares Solving linear least squares systems + +This page describes how to solve linear least squares systems using %Eigen. An overdetermined system +of equations, say \a Ax = \a b, has no solutions. In this case, it makes sense to search for the +vector \a x which is closest to being a solution, in the sense that the difference \a Ax - \a b is +as small as possible. This \a x is called the least square solution (if the Euclidean norm is used). + +The three methods discussed on this page are the SVD decomposition, the QR decomposition and normal +equations. Of these, the SVD decomposition is generally the most accurate but the slowest, normal +equations is the fastest but least accurate, and the QR decomposition is in between. + +\eigenAutoToc + + +\section LeastSquaresSVD Using the SVD decomposition + +The \link JacobiSVD::solve() solve() \endlink method in the JacobiSVD class can be directly used to +solve linear squares systems. It is not enough to compute only the singular values (the default for +this class); you also need the singular vectors but the thin SVD decomposition suffices for +computing least squares solutions: + + + + + + + +
Example:Output:
\include TutorialLinAlgSVDSolve.cpp \verbinclude TutorialLinAlgSVDSolve.out
+ +This is example from the page \link TutorialLinearAlgebra Linear algebra and decompositions \endlink. + + +\section LeastSquaresQR Using the QR decomposition + +The solve() method in QR decomposition classes also computes the least squares solution. There are +three QR decomposition classes: HouseholderQR (no pivoting, so fast but unstable), +ColPivHouseholderQR (column pivoting, thus a bit slower but more accurate) and FullPivHouseholderQR +(full pivoting, so slowest and most stable). Here is an example with column pivoting: + + + + + + + +
Example:Output:
\include LeastSquaresQR.cpp \verbinclude LeastSquaresQR.out
+ + +\section LeastSquaresNormalEquations Using normal equations + +Finding the least squares solution of \a Ax = \a b is equivalent to solving the normal equation +ATAx = ATb. This leads to the following code + + + + + + + +
Example:Output:
\include LeastSquaresNormalEquations.cpp \verbinclude LeastSquaresNormalEquations.out
+ +If the matrix \a A is ill-conditioned, then this is not a good method, because the condition number +of ATA is the square of the condition number of \a A. This means that you +lose twice as many digits using normal equation than if you use the other methods. + +*/ + +} \ No newline at end of file diff --git a/doc/Manual.dox b/doc/Manual.dox index 3367982ca..0d3787675 100644 --- a/doc/Manual.dox +++ b/doc/Manual.dox @@ -96,6 +96,8 @@ namespace Eigen { \ingroup DenseLinearSolvers_chapter */ /** \addtogroup TopicLinearAlgebraDecompositions \ingroup DenseLinearSolvers_chapter */ +/** \addtogroup LeastSquares + \ingroup DenseLinearSolvers_chapter */ /** \addtogroup DenseLinearSolvers_Reference \ingroup DenseLinearSolvers_chapter */ diff --git a/doc/TutorialLinearAlgebra.dox b/doc/TutorialLinearAlgebra.dox index b09f3543e..e6c41fd70 100644 --- a/doc/TutorialLinearAlgebra.dox +++ b/doc/TutorialLinearAlgebra.dox @@ -167,8 +167,8 @@ Here is an example: \section TutorialLinAlgLeastsquares Least squares solving -The best way to do least squares solving is with a SVD decomposition. Eigen provides one as the JacobiSVD class, and its solve() -is doing least-squares solving. +The most accurate method to do least squares solving is with a SVD decomposition. Eigen provides one +as the JacobiSVD class, and its solve() is doing least-squares solving. Here is an example: @@ -179,9 +179,10 @@ Here is an example:
-Another way, potentially faster but less reliable, is to use a LDLT decomposition -of the normal matrix. In any case, just read any reference text on least squares, and it will be very easy for you -to implement any linear least squares computation on top of Eigen. +Another methods, potentially faster but less reliable, are to use a Cholesky decomposition of the +normal matrix or a QR decomposition. Our page on \link LeastSquares least squares solving \endlink +has more details. + \section TutorialLinAlgSeparateComputation Separating the computation from the construction diff --git a/doc/snippets/LeastSquaresNormalEquations.cpp b/doc/snippets/LeastSquaresNormalEquations.cpp new file mode 100644 index 000000000..997cf1715 --- /dev/null +++ b/doc/snippets/LeastSquaresNormalEquations.cpp @@ -0,0 +1,4 @@ +MatrixXf A = MatrixXf::Random(3, 2); +VectorXf b = VectorXf::Random(3); +cout << "The solution using normal equations is:\n" + << (A.transpose() * A).ldlt().solve(A.transpose() * b) << endl; diff --git a/doc/snippets/LeastSquaresQR.cpp b/doc/snippets/LeastSquaresQR.cpp new file mode 100644 index 000000000..6c9704547 --- /dev/null +++ b/doc/snippets/LeastSquaresQR.cpp @@ -0,0 +1,4 @@ +MatrixXf A = MatrixXf::Random(3, 2); +VectorXf b = VectorXf::Random(3); +cout << "The solution using the QR decomposition is:\n" + << A.colPivHouseholderQr().solve(b) << endl; -- cgit v1.2.3