29 #include "boost/math/tools/minima.hpp" 44 #define LSST_ScaledPolynomialTransformFitter_TEST_IN_PLACE 0 46 namespace lsst {
namespace meas {
namespace astrom {
69 static Keys const it(0);
86 schema,
"src",
"source positions in pixel coordinates.",
"pix" 91 schema,
"intermediate",
"reference positions in intermediate world coordinates",
"deg" 96 schema,
"initial",
"reference positions transformed by initial WCS",
"pix" 101 schema,
"model",
"result of applying transform to reference positions",
"pix" 106 schema,
"src", {
"x",
"y"},
"pix" 112 "True if the match should be rejected from the fit." 123 schema,
"output",
"grid output positions in intermediate world coordinates",
"deg" 128 schema,
"input",
"grid input positions in pixel coordinates.",
"pix" 133 schema,
"model",
"result of applying transform to input positions",
"deg" 150 for (
auto const & record : data) {
164 double intrinsicScatter
169 float var2 = intrinsicScatter*intrinsicScatter;
170 for (
auto const & match : matches) {
171 auto record = catalog.
addNew();
172 record->set(keys.
refId, match.first->getId());
173 record->set(keys.
srcId, match.second->getId());
176 record->set(keys.
output, match.second->getCentroid());
177 record->set(keys.
outputErr, match.second->getCentroidErr() + var2*Eigen::Matrix2f::Identity());
185 computeScaling(catalog, keys.
input),
186 computeScaling(catalog, keys.
output)
193 int nGridX,
int nGridY,
198 catalog.
reserve(nGridX*nGridY);
201 for (
int iy = 0; iy < nGridY; ++iy) {
202 for (
int ix = 0; ix < nGridX; ++ix) {
204 auto record = catalog.
addNew();
205 record->set(keys.
output, point);
206 record->set(keys.
input, toInvert(point));
214 computeScaling(catalog, keys.
input),
215 computeScaling(catalog, keys.
output)
219 ScaledPolynomialTransformFitter::ScaledPolynomialTransformFitter(
223 double intrinsicScatter,
228 _intrinsicScatter(intrinsicScatter),
230 _outputScaling(outputScaling),
249 for (
int n = 0, j = 0; n <= maxOrder; ++n) {
250 for (
int p = 0, q = n; p <= n; ++p, --q, ++j) {
251 _vandermonde(i, j) = _transform._poly._u[p] * _transform._poly._v[q];
258 int maxOrder = _transform.getPoly().getOrder();
262 if (order > maxOrder) {
265 (boost::format(
"Order (%d) exceeded maximum order for the fitter (%d)")
266 % order % maxOrder).str()
272 if (_keys.rejected.isValid()) {
273 for (
auto const & record : _data) {
274 if (!record.get(_keys.rejected)) {
279 nGood = _data.size();
284 Eigen::MatrixXd
m = Eigen::MatrixXd::Zero(nGood, packedSize);
286 Eigen::VectorXd vx = Eigen::VectorXd::Zero(nGood);
287 Eigen::VectorXd vy = Eigen::VectorXd::Zero(nGood);
290 Eigen::ArrayXd sxx(nGood);
291 Eigen::ArrayXd syy(nGood);
292 Eigen::ArrayXd sxy(nGood);
293 Eigen::Matrix2d outS = _outputScaling.getLinear().getMatrix();
294 for (
std::size_t i1 = 0, i2 = 0; i1 < _data.size(); ++i1) {
297 if (!_keys.rejected.isValid() || !_data[i1].get(_keys.rejected)) {
299 vx[i2] = output.getX();
300 vy[i2] = output.getY();
301 m.row(i2) = _vandermonde.row(i1).head(packedSize);
302 if (_keys.outputErr.isValid()) {
303 Eigen::Matrix2d modelErr = outS*_data[i1].get(_keys.outputErr).cast<
double>()*outS.adjoint();
304 sxx[i2] = modelErr(0, 0);
305 sxy[i2] = modelErr(0, 1);
306 syy[i2] = modelErr(1, 1);
316 Eigen::ArrayXd fxx = 1.0/(sxx - sxy.square()/syy);
317 Eigen::ArrayXd fyy = 1.0/(syy - sxy.square()/sxx);
318 Eigen::ArrayXd fxy = -(sxy/sxx)*fyy;
319 #ifdef LSST_ScaledPolynomialTransformFitter_TEST_IN_PLACE 320 assert((sxx*fxx + sxy*fxy).isApproxToConstant(1.0));
321 assert((syy*fyy + sxy*fxy).isApproxToConstant(1.0));
322 assert((sxx*fxy).isApprox(-sxy*fyy));
323 assert((sxy*fxx).isApprox(-syy*fxy));
327 Eigen::MatrixXd h(2*packedSize, 2*packedSize);
328 h.topLeftCorner(packedSize, packedSize) = m.adjoint() * fxx.matrix().asDiagonal() * m;
329 h.topRightCorner(packedSize, packedSize) = m.adjoint() * fxy.matrix().asDiagonal() * m;
330 h.bottomLeftCorner(packedSize, packedSize) = h.topRightCorner(packedSize, packedSize).adjoint();
331 h.bottomRightCorner(packedSize, packedSize) = m.adjoint() * fyy.matrix().asDiagonal() * m;
333 Eigen::VectorXd g(2*packedSize);
334 g.head(packedSize) = m.adjoint() * (fxx.matrix().asDiagonal()*vx + fxy.matrix().asDiagonal()*vy);
335 g.tail(packedSize) = m.adjoint() * (fxy.matrix().asDiagonal()*vx + fyy.matrix().asDiagonal()*vy);
338 auto solution = lstsq.getSolution();
340 for (
int n = 0, j = 0; n <= order; ++n) {
341 for (
int p = 0, q = n; p <= n; ++p, --q, ++j) {
342 _transform._poly._xCoeffs(p, q) = solution[j];
343 _transform._poly._yCoeffs(p, q) = solution[j + packedSize];
349 for (
auto & record : _data) {
352 _transform(record.get(_keys.input))
358 if (!_keys.rejected.isValid()) {
361 "Cannot compute intrinsic scatter on fitter initialized with fromGrid." 364 double newIntrinsicScatter = computeIntrinsicScatter();
365 float varDiff = newIntrinsicScatter*newIntrinsicScatter - _intrinsicScatter*_intrinsicScatter;
366 for (
auto & record : _data) {
367 record.set(_keys.outputErr, record.get(_keys.outputErr) + varDiff*Eigen::Matrix2f::Identity());
369 _intrinsicScatter = newIntrinsicScatter;
370 return _intrinsicScatter;
373 double ScaledPolynomialTransformFitter::computeIntrinsicScatter()
const {
379 double directVariance = 0.0;
380 double maxMeasurementVariance = 0.0;
381 double oldIntrinsicVariance = _intrinsicScatter*_intrinsicScatter;
383 for (
auto const & record : _data) {
384 if (!_keys.rejected.isValid() || !record.get(_keys.rejected)) {
385 auto delta = record.get(_keys.output) - record.get(_keys.model);
386 directVariance += 0.5*delta.computeSquaredNorm();
387 double cxx = _keys.outputErr.getElement(record, 0, 0) - oldIntrinsicVariance;
388 double cyy = _keys.outputErr.getElement(record, 1, 1) - oldIntrinsicVariance;
389 double cxy = _keys.outputErr.getElement(record, 0, 1);
391 double ca2 = 0.5*(cxx + cyy +
std::sqrt(cxx*cxx + cyy*cyy + 4*cxy*cxy - 2*cxx*cyy));
392 maxMeasurementVariance =
std::max(maxMeasurementVariance, ca2);
396 directVariance /= nGood;
400 auto logLikelihood = [
this](
double intrinsicVariance) {
403 double varDiff = intrinsicVariance - this->_intrinsicScatter*this->_intrinsicScatter;
405 for (
auto & record : this->_data) {
406 double x = record.get(this->_keys.output.getX()) - record.get(_keys.model.getX());
407 double y = record.get(this->_keys.output.getY()) - record.get(_keys.model.getY());
408 double cxx = this->_keys.outputErr.getElement(record, 0, 0) + varDiff;
409 double cyy = this->_keys.outputErr.getElement(record, 1, 1) + varDiff;
410 double cxy = this->_keys.outputErr.getElement(record, 0, 1);
411 double det = cxx*cyy - cxy*cxy;
412 q += (x*x*cyy - 2*x*y*cxy + y*y*cxx)/det +
std::log(det);
419 double minIntrinsicVariance =
std::max(0.0, directVariance - maxMeasurementVariance);
422 static constexpr
int BITS_REQUIRED = 16;
423 boost::uintmax_t maxIterations = 20;
424 auto result = boost::math::tools::brent_find_minima(
426 minIntrinsicVariance,
440 if (!_keys.rejected.isValid()) {
443 "Cannot reject outliers on fitter initialized with fromGrid." 446 if (static_cast<std::size_t>(ctrl.
nClipMin) >= _data.size()) {
449 (boost::format(
"Not enough values (%d) to clip %d.")
450 % _data.size() % ctrl.
nClipMin).str()
454 for (
auto & record : _data) {
455 Eigen::Matrix2d cov = record.get(_keys.outputErr).cast<
double>();
456 Eigen::Vector2d d = (record.get(_keys.output) - record.get(_keys.model)).asEigen();
457 double r2 = d.dot(cov.inverse() * d);
460 auto cutoff = rankings.upper_bound(ctrl.
nSigma * ctrl.
nSigma);
461 int nClip = 0, nGood = 0;
462 for (
auto iter = rankings.begin(); iter != cutoff; ++iter) {
463 iter->second->set(_keys.rejected,
false);
466 for (
auto iter = cutoff; iter != rankings.end(); ++iter) {
467 iter->second->set(_keys.rejected,
true);
470 assert(static_cast<std::size_t>(nGood + nClip) == _data.size());
473 cutoff->second->set(_keys.rejected,
true);
476 while (nClip > ctrl.
nClipMax && cutoff != rankings.end()) {
477 cutoff->second->set(_keys.rejected,
false);
482 if (cutoff != rankings.end()) {
daf::base::Citizen & getCitizen()
static PointKey addFields(Schema &schema, std::string const &name, std::string const &doc, std::string const &unit)
int nClipMax
"Never clip more than this many matches." ;
static LeastSquares fromNormalEquations(ndarray::Array< T1, 2, C1 > const &fisher, ndarray::Array< T2, 1, C2 > const &rhs, Factorization factorization=NORMAL_EIGENSYSTEM)
geom::Point2D skyToPixel(geom::Angle sky1, geom::Angle sky2) const
int nClipMin
"Always clip at least this many matches." ;
Key< T > addField(Field< T > const &field, bool doReplace=false)
int computePackedSize(int order)
Compute this size of a packed 2-d polynomial coefficient array.
void markPersistent(void)
Point2D const getCenter() const
void include(Point2D const &point)
geom::Point2D skyToIntermediateWorldCoord(coord::Coord const &coord) const
void reserve(size_type n)
#define LSST_EXCEPT(type,...)
double nSigma
"Number of sigma to clip at." ;
Control object for outlier rejection in ScaledPolynomialTransformFitter.
std::shared_ptr< RecordT > addNew()
void computePowers(Eigen::VectorXd &r, double x)
Fill an array with integer powers of x, so .
Point2D const getMin() const