lsst.afw g5c39d4753b+955d02b84f
LeastSquares.cc
Go to the documentation of this file.
1// -*- LSST-C++ -*-
2
3/*
4 * LSST Data Management System
5 * Copyright 2008, 2009, 2010, 2011 LSST Corporation.
6 *
7 * This product includes software developed by the
8 * LSST Project (http://www.lsst.org/).
9 *
10 * This program is free software: you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation, either version 3 of the License, or
13 * (at your option) any later version.
14 *
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
19 *
20 * You should have received a copy of the LSST License Statement and
21 * the GNU General Public License along with this program. If not,
22 * see <http://www.lsstcorp.org/LegalNotices/>.
23 */
24
25#include "Eigen/Eigenvalues"
26#include "Eigen/SVD"
27#include "Eigen/Cholesky"
28#include "boost/format.hpp"
29#include <memory>
30
32#include "lsst/pex/exceptions.h"
33#include "lsst/log/Log.h"
34
35namespace {
36LOG_LOGGER _log = LOG_GET("lsst.afw.math.LeastSquares");
37}
38
39namespace lsst {
40namespace afw {
41namespace math {
42
44public:
48 RHS_VECTOR = 0x004,
52 DESIGN_AND_DATA = 0x040
53 };
54
55 int state;
57 int rank;
60 double threshold;
61
62 Eigen::MatrixXd design;
63 Eigen::VectorXd data;
64 Eigen::MatrixXd fisher;
65 Eigen::VectorXd rhs;
66
67 ndarray::Array<double, 1, 1> solution;
68 ndarray::Array<double, 2, 2> covariance;
69 ndarray::Array<double, 1, 1> diagnostic;
70
71 template <typename D>
72 void setRank(Eigen::MatrixBase<D> const& values) {
73 double cond = threshold * values[0];
74 if (cond <= 0.0) {
75 rank = 0;
76 } else {
77 for (rank = dimension; (rank > 1) && (values[rank - 1] < cond); --rank)
78 ;
79 }
80 }
81
82 void ensure(int desired) {
84 if (desired & FULL_FISHER_MATRIX) desired |= LOWER_FISHER_MATRIX;
85 int toAdd = ~state & desired;
86 if (toAdd & LOWER_FISHER_MATRIX) {
87 assert(state & DESIGN_AND_DATA);
88 fisher = Eigen::MatrixXd::Zero(design.cols(), design.cols());
89 fisher.selfadjointView<Eigen::Lower>().rankUpdate(design.adjoint());
90 }
91 if (toAdd & FULL_FISHER_MATRIX) {
92 fisher.triangularView<Eigen::StrictlyUpper>() = fisher.adjoint();
93 }
94 if (toAdd & RHS_VECTOR) {
95 assert(state & DESIGN_AND_DATA);
96 rhs = design.adjoint() * data;
97 }
98 if (toAdd & SOLUTION_ARRAY) {
99 if (solution.isEmpty()) solution = ndarray::allocate(dimension);
101 }
102 if (toAdd & COVARIANCE_ARRAY) {
103 if (covariance.isEmpty()) covariance = ndarray::allocate(dimension, dimension);
105 }
106 if (toAdd & DIAGNOSTIC_ARRAY) {
107 if (diagnostic.isEmpty()) diagnostic = ndarray::allocate(dimension);
109 }
110 state |= toAdd;
111 }
112
113 virtual void factor() = 0;
114
115 virtual void updateRank() = 0;
116
117 virtual void updateSolution() = 0;
118 virtual void updateCovariance() = 0;
119
120 virtual void updateDiagnostic() = 0;
121
122 Impl(int dimension_, double threshold_ = std::numeric_limits<double>::epsilon())
123 : state(0), dimension(dimension_), rank(dimension_), threshold(threshold_) {}
124
125 virtual ~Impl() = default;
126};
127
128namespace {
129
130class EigensystemSolver : public LeastSquares::Impl {
131public:
132 explicit EigensystemSolver(int dimension) : Impl(dimension), _eig(dimension), _svd(), _tmp(dimension) {}
133
134 void factor() override {
135 ensure(LOWER_FISHER_MATRIX | RHS_VECTOR);
136 _eig.compute(fisher);
137 if (_eig.info() == Eigen::Success) {
138 setRank(_eig.eigenvalues().reverse());
139 LOGL_DEBUG(_log, "SelfAdjointEigenSolver succeeded: dimension=%d, rank=%d", dimension, rank);
140 } else {
141 // Note that the fallback is using SVD of the Fisher to compute the Eigensystem, because those
142 // are the same for a symmetric matrix; this is very different from doing a direct SVD of
143 // the design matrix.
144 ensure(FULL_FISHER_MATRIX);
145 _svd.compute(fisher, Eigen::ComputeFullU); // Matrix is symmetric, so V == U == eigenvectors
146 setRank(_svd.singularValues());
147 LOGL_DEBUG(_log,
148 "SelfAdjointEigenSolver failed; falling back to equivalent SVD: dimension=%d, rank=%d",
149 dimension, rank);
150 }
151 }
152
153 void updateRank() override {
154 if (_eig.info() == Eigen::Success) {
155 setRank(_eig.eigenvalues().reverse());
156 } else {
157 setRank(_svd.singularValues());
158 }
159 }
160
161 void updateDiagnostic() override {
162 if (whichDiagnostic == LeastSquares::NORMAL_CHOLESKY) {
163 throw LSST_EXCEPT(
165 "Cannot compute NORMAL_CHOLESKY diagnostic from NORMAL_EIGENSYSTEM factorization.");
166 }
167 if (_eig.info() == Eigen::Success) {
168 ndarray::asEigenMatrix(diagnostic) = _eig.eigenvalues().reverse();
169 } else {
170 ndarray::asEigenMatrix(diagnostic) = _svd.singularValues();
171 }
172 if (whichDiagnostic == LeastSquares::DIRECT_SVD) {
173 ndarray::asEigenArray(diagnostic) = ndarray::asEigenArray(diagnostic).sqrt();
174 }
175 }
176
177 void updateSolution() override {
178 if (rank == 0) {
179 ndarray::asEigenMatrix(solution).setZero();
180 return;
181 }
182 if (_eig.info() == Eigen::Success) {
183 _tmp.head(rank) = _eig.eigenvectors().rightCols(rank).adjoint() * rhs;
184 _tmp.head(rank).array() /= _eig.eigenvalues().tail(rank).array();
185 ndarray::asEigenMatrix(solution) = _eig.eigenvectors().rightCols(rank) * _tmp.head(rank);
186 } else {
187 _tmp.head(rank) = _svd.matrixU().leftCols(rank).adjoint() * rhs;
188 _tmp.head(rank).array() /= _svd.singularValues().head(rank).array();
189 ndarray::asEigenMatrix(solution) = _svd.matrixU().leftCols(rank) * _tmp.head(rank);
190 }
191 }
192
193 void updateCovariance() override {
194 if (rank == 0) {
195 ndarray::asEigenMatrix(covariance).setZero();
196 return;
197 }
198 if (_eig.info() == Eigen::Success) {
199 ndarray::asEigenMatrix(covariance) =
200 _eig.eigenvectors().rightCols(rank) *
201 _eig.eigenvalues().tail(rank).array().inverse().matrix().asDiagonal() *
202 _eig.eigenvectors().rightCols(rank).adjoint();
203 } else {
204 ndarray::asEigenMatrix(covariance) =
205 _svd.matrixU().leftCols(rank) *
206 _svd.singularValues().head(rank).array().inverse().matrix().asDiagonal() *
207 _svd.matrixU().leftCols(rank).adjoint();
208 }
209 }
210
211private:
212 Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> _eig;
213 Eigen::JacobiSVD<Eigen::MatrixXd> _svd; // only used if Eigendecomposition fails, should be very rare
214 Eigen::VectorXd _tmp;
215};
216
217class CholeskySolver : public LeastSquares::Impl {
218public:
219 explicit CholeskySolver(int dimension) : Impl(dimension, 0.0), _ldlt(dimension) {}
220
221 void factor() override {
222 ensure(LOWER_FISHER_MATRIX | RHS_VECTOR);
223 _ldlt.compute(fisher);
224 }
225
226 void updateRank() override {}
227
228 void updateDiagnostic() override {
229 if (whichDiagnostic != LeastSquares::NORMAL_CHOLESKY) {
230 throw LSST_EXCEPT(
231 pex::exceptions::LogicError,
232 "Can only compute NORMAL_CHOLESKY diagnostic from NORMAL_CHOLESKY factorization.");
233 }
234 ndarray::asEigenMatrix(diagnostic) = _ldlt.vectorD();
235 }
236
237 void updateSolution() override { ndarray::asEigenMatrix(solution) = _ldlt.solve(rhs); }
238
239 void updateCovariance() override {
240 auto cov = ndarray::asEigenMatrix(covariance);
241 cov.setIdentity();
242 cov = _ldlt.solve(cov);
243 }
244
245private:
246 Eigen::LDLT<Eigen::MatrixXd> _ldlt;
247};
248
249class SvdSolver : public LeastSquares::Impl {
250public:
251 explicit SvdSolver(int dimension) : Impl(dimension), _svd(), _tmp(dimension) {}
252
253 void factor() override {
254 if (!(state & DESIGN_AND_DATA)) {
255 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
256 "Cannot initialize DIRECT_SVD solver with normal equations.");
257 }
258 _svd.compute(design, Eigen::ComputeThinU | Eigen::ComputeThinV);
259 setRank(_svd.singularValues());
260 LOGL_DEBUG(_log, "Using direct SVD method; dimension=%d, rank=%d", dimension, rank);
261 }
262
263 void updateRank() override { setRank(_svd.singularValues()); }
264
265 void updateDiagnostic() override {
266 switch (whichDiagnostic) {
268 ndarray::asEigenArray(diagnostic) = _svd.singularValues().array().square();
269 break;
271 throw LSST_EXCEPT(
272 pex::exceptions::LogicError,
273 "Can only compute NORMAL_CHOLESKY diagnostic from DIRECT_SVD factorization.");
275 ndarray::asEigenMatrix(diagnostic) = _svd.singularValues();
276 break;
277 }
278 }
279
280 void updateSolution() override {
281 if (rank == 0) {
282 ndarray::asEigenMatrix(solution).setZero();
283 return;
284 }
285 _tmp.head(rank) = _svd.matrixU().leftCols(rank).adjoint() * data;
286 _tmp.head(rank).array() /= _svd.singularValues().head(rank).array();
287 ndarray::asEigenMatrix(solution) = _svd.matrixV().leftCols(rank) * _tmp.head(rank);
288 }
289
290 void updateCovariance() override {
291 if (rank == 0) {
292 ndarray::asEigenMatrix(covariance).setZero();
293 return;
294 }
295 ndarray::asEigenMatrix(covariance) =
296 _svd.matrixV().leftCols(rank) *
297 _svd.singularValues().head(rank).array().inverse().square().matrix().asDiagonal() *
298 _svd.matrixV().leftCols(rank).adjoint();
299 }
300
301private:
302 Eigen::JacobiSVD<Eigen::MatrixXd> _svd;
303 Eigen::VectorXd _tmp;
304};
305
306} // namespace
307
308void LeastSquares::setThreshold(double threshold) {
309 _impl->threshold = threshold;
310 _impl->state &= ~Impl::SOLUTION_ARRAY;
311 _impl->state &= ~Impl::COVARIANCE_ARRAY;
312 _impl->updateRank();
313}
314
315double LeastSquares::getThreshold() const { return _impl->threshold; }
316
317ndarray::Array<double const, 1, 1> LeastSquares::getSolution() {
318 _impl->ensure(Impl::SOLUTION_ARRAY);
319 return _impl->solution;
320}
321
322ndarray::Array<double const, 2, 2> LeastSquares::getCovariance() {
323 _impl->ensure(Impl::COVARIANCE_ARRAY);
324 return _impl->covariance;
325}
326
327ndarray::Array<double const, 2, 2> LeastSquares::getFisherMatrix() {
328 _impl->ensure(Impl::FULL_FISHER_MATRIX);
329 // Wrap the Eigen::MatrixXd in an ndarray::Array, using _impl as the reference-counted owner.
330 // Doesn't matter if we swap strides, because it's symmetric.
331 return ndarray::external(_impl->fisher.data(), ndarray::makeVector(_impl->dimension, _impl->dimension),
332 ndarray::makeVector(_impl->dimension, 1), _impl);
333}
334
335ndarray::Array<double const, 1, 1> LeastSquares::getDiagnostic(Factorization factorization) {
336 if (_impl->whichDiagnostic != factorization) {
337 _impl->state &= ~Impl::DIAGNOSTIC_ARRAY;
338 _impl->whichDiagnostic = factorization;
339 }
340 _impl->ensure(Impl::DIAGNOSTIC_ARRAY);
341 return _impl->diagnostic;
342}
343
344int LeastSquares::getDimension() const { return _impl->dimension; }
345
346int LeastSquares::getRank() const { return _impl->rank; }
347
348LeastSquares::Factorization LeastSquares::getFactorization() const { return _impl->factorization; }
349
350LeastSquares::LeastSquares(Factorization factorization, int dimension) {
351 switch (factorization) {
353 _impl = std::make_shared<EigensystemSolver>(dimension);
354 break;
355 case NORMAL_CHOLESKY:
356 _impl = std::make_shared<CholeskySolver>(dimension);
357 break;
358 case DIRECT_SVD:
359 _impl = std::make_shared<SvdSolver>(dimension);
360 break;
361 }
362 _impl->factorization = factorization;
363}
364
369
371
372Eigen::MatrixXd& LeastSquares::_getDesignMatrix() { return _impl->design; }
373Eigen::VectorXd& LeastSquares::_getDataVector() { return _impl->data; }
374
375Eigen::MatrixXd& LeastSquares::_getFisherMatrix() { return _impl->fisher; }
376Eigen::VectorXd& LeastSquares::_getRhsVector() { return _impl->rhs; }
377
378void LeastSquares::_factor(bool haveNormalEquations) {
379 if (haveNormalEquations) {
380 if (_getFisherMatrix().rows() != _impl->dimension) {
381 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
382 (boost::format("Number of rows of Fisher matrix (%d) does not match"
383 " dimension of LeastSquares solver.") %
384 _getFisherMatrix().rows() % _impl->dimension)
385 .str());
386 }
387 if (_getFisherMatrix().cols() != _impl->dimension) {
388 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
389 (boost::format("Number of columns of Fisher matrix (%d) does not match"
390 " dimension of LeastSquares solver.") %
391 _getFisherMatrix().cols() % _impl->dimension)
392 .str());
393 }
394 if (_getRhsVector().size() != _impl->dimension) {
395 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
396 (boost::format("Number of elements in RHS vector (%d) does not match"
397 " dimension of LeastSquares solver.") %
398 _getRhsVector().size() % _impl->dimension)
399 .str());
400 }
402 } else {
403 if (_getDesignMatrix().cols() != _impl->dimension) {
404 throw LSST_EXCEPT(
405 pex::exceptions::InvalidParameterError,
406 "Number of columns of design matrix does not match dimension of LeastSquares solver.");
407 }
408 if (_getDesignMatrix().rows() != _getDataVector().size()) {
409 throw LSST_EXCEPT(pex::exceptions::InvalidParameterError,
410 (boost::format("Number of rows of design matrix (%d) does not match number of "
411 "data points (%d)") %
412 _getDesignMatrix().rows() % _getDataVector().size())
413 .str());
414 }
415 if (_getDesignMatrix().cols() > _getDataVector().size()) {
416 throw LSST_EXCEPT(
417 pex::exceptions::InvalidParameterError,
418 (boost::format("Number of columns of design matrix (%d) must be smaller than number of "
419 "data points (%d)") %
420 _getDesignMatrix().cols() % _getDataVector().size())
421 .str());
422 }
423 _impl->state = Impl::DESIGN_AND_DATA;
424 }
425 _impl->factor();
426}
427} // namespace math
428} // namespace afw
429} // namespace lsst
char * data
Definition: BaseRecord.cc:61
#define LSST_EXCEPT(type,...)
#define LOG_GET(logger)
#define LOGL_DEBUG(logger, message...)
ndarray::Array< double, 1, 1 > solution
Definition: LeastSquares.cc:67
ndarray::Array< double, 2, 2 > covariance
Definition: LeastSquares.cc:68
ndarray::Array< double, 1, 1 > diagnostic
Definition: LeastSquares.cc:69
void setRank(Eigen::MatrixBase< D > const &values)
Definition: LeastSquares.cc:72
Impl(int dimension_, double threshold_=std::numeric_limits< double >::epsilon())
Solver for linear least-squares problems.
Definition: LeastSquares.h:67
ndarray::Array< double const, 1, 1 > getSolution()
Return the vector solution to the least squares problem.
ndarray::Array< double const, 2, 2 > getFisherMatrix()
Return the Fisher matrix (inverse of the covariance) of the parameters.
int getRank() const
Return the rank of the problem (number of nonzero Eigenvalues).
ndarray::Array< double const, 2, 2 > getCovariance()
Return the covariance matrix of the least squares problem.
LeastSquares & operator=(LeastSquares const &)
Factorization
Private implementation; forward-declared publicly so we can inherit from it in .cc.
Definition: LeastSquares.h:71
@ NORMAL_EIGENSYSTEM
Use the normal equations with a symmetric Eigensystem decomposition.
Definition: LeastSquares.h:72
@ NORMAL_CHOLESKY
Use the normal equations with a Cholesky decomposition.
Definition: LeastSquares.h:80
@ DIRECT_SVD
Use a thin singular value decomposition of the design matrix.
Definition: LeastSquares.h:88
LeastSquares(Factorization factorization, int dimension)
Construct a least-squares object for the given factorization and dimensionality.
int getDimension() const
Return the number of parameters.
Factorization getFactorization() const
Retun the type of factorization used by the solver.
ndarray::Array< double const, 1, 1 > getDiagnostic(Factorization factorization)
Return a factorization-dependent vector that can be used to characterize the stability of the solutio...
double getThreshold() const
Get the threshold used to determine when to truncate Eigenvalues.
void setThreshold(double threshold)
Set the threshold used to determine when to truncate Eigenvalues.
A base class for image defects.