Coverage for python / lsst / drp / tasks / fit_turbulence.py: 17%
249 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-05-05 08:35 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-05-05 08:35 +0000
1# This file is part of drp_tasks.
2#
3# LSST Data Management System
4# This product includes software developed by the
5# LSST Project (http://www.lsst.org/).
6# See COPYRIGHT file at the top of the source tree.
7#
8# This program is free software: you can redistribute it and/or modify
9# it under the terms of the GNU General Public License as published by
10# the Free Software Foundation, either version 3 of the License, or
11# (at your option) any later version.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the LSST License Statement and
19# the GNU General Public License along with this program. If not,
20# see <https://www.lsstcorp.org/LegalNotices/>.
21#
22import dataclasses
24import astropy.units as u
25import astshim as ast
26import matplotlib.pyplot as plt
27import numpy as np
28import treegp
29from astropy.table import Table
30from scipy.interpolate import RectBivariateSpline
32import lsst.afw.geom as afwgeom
33import lsst.afw.table
34import lsst.pex.config as pexConfig
35import lsst.pipe.base as pipeBase
37__all__ = [
38 "GaussianProcessesTurbulenceFitConnections",
39 "GaussianProcessesTurbulenceFitConfig",
40 "GaussianProcessesTurbulenceFitTask",
41]
44def plot_visit(x, y, dx, dy, predx, predy):
45 """Utility function for plotting Gaussian Processes results.
47 Parameters
48 ----------
49 x : `np.ndarray`
50 x-direction coordinates.
51 y : `np.ndarray`
52 y-direction coordinates.
53 dx : `np.ndarray`
54 x-direction residuals to be fit.
55 dy : `np.ndarray`
56 x-direction residuals to be fit.
57 predx : `np.ndarray`
58 x-direction prediction.
59 predy : `np.ndarray`
60 y-direction prediction.
62 Returns
63 -------
64 fig : `matplotlib.pyplot.Figure`
65 Figure showing input data, Gaussian Processes prediction, and E and
66 B-modes.
67 """
69 xie, xib, logr = treegp.comp_eb_treecorr(x, y, dx, dy, rmin=20 / 3600, rmax=0.6, dlogr=0.3)
70 xie_resid, xib_resid, logr_resid = treegp.comp_eb_treecorr(
71 x, y, dx - predx, dy - predy, rmin=20 / 3600, rmax=0.6, dlogr=0.3
72 )
74 residualLimit = np.nanstd(dx)
76 fig, subs = plt.subplot_mosaic(
77 [["dx", "predx", "residx", "eb"], ["dy", "predy", "residy", "eb"]],
78 figsize=(15, 8),
79 layout="constrained",
80 )
81 plt.subplots_adjust(wspace=0.3, right=0.99, left=0.05)
82 im = subs["dx"].scatter(x, y, c=dx, vmin=-residualLimit, vmax=residualLimit, cmap=plt.cm.seismic, s=1)
83 subs["dy"].scatter(x, y, c=dy, vmin=-residualLimit, vmax=residualLimit, cmap=plt.cm.seismic, s=1)
85 subs["predx"].scatter(x, y, c=predx, vmin=-residualLimit, vmax=residualLimit, cmap=plt.cm.seismic, s=1)
86 subs["predy"].scatter(x, y, c=predy, vmin=-residualLimit, vmax=residualLimit, cmap=plt.cm.seismic, s=1)
88 subs["residx"].scatter(
89 x, y, c=dx - predx, vmin=-residualLimit, vmax=residualLimit, cmap=plt.cm.seismic, s=1
90 )
91 subs["residy"].scatter(
92 x, y, c=dy - predy, vmin=-residualLimit, vmax=residualLimit, cmap=plt.cm.seismic, s=1
93 )
95 cb = fig.colorbar(
96 im, ax=[subs["dx"], subs["dy"], subs["predx"], subs["predy"], subs["residx"], subs["residy"]]
97 )
99 subs["eb"].scatter(np.exp(logr) * 60, xie, c="b", label="E-mode")
100 subs["eb"].scatter(np.exp(logr) * 60, xib, c="r", label="B-mode")
102 subs["eb"].scatter(
103 np.exp(logr_resid) * 60, xie_resid, c="b", marker="+", label="E-mode after GP correction"
104 )
105 subs["eb"].scatter(
106 np.exp(logr_resid) * 60, xib_resid, c="r", marker="+", label="B-mode after GP correction"
107 )
108 subs["eb"].legend()
109 subs["eb"].grid(True)
111 subs["dx"].set_aspect("equal")
112 subs["dy"].set_aspect("equal")
113 subs["predx"].set_aspect("equal")
114 subs["predy"].set_aspect("equal")
115 subs["residx"].set_aspect("equal")
116 subs["residy"].set_aspect("equal")
117 subs["dy"].set_xlabel("x (degree)")
118 subs["predy"].set_xlabel("x (degree)")
119 subs["residy"].set_xlabel("x (degree)")
120 subs["dy"].set_ylabel("y (degree)")
121 subs["dx"].set_ylabel("y (degree)")
123 subs["dx"].set_title(r"$\delta$x")
124 subs["predx"].set_title("GP prediction")
125 subs["residx"].set_title("Residual")
127 subs["dy"].set_title(r"$\delta$y")
128 subs["predy"].set_title("GP prediction")
129 subs["residy"].set_title("Residual")
131 cb.set_label("mas")
133 subs["eb"].set_title("E and B modes")
134 subs["eb"].set_ylabel(r"$\xi_{E/B}$ (mas$^2$)")
135 subs["eb"].set_xlabel(r"$\Delta \theta$ (arcmin)")
137 return fig
140class SingularMatrixError(pipeBase.AlgorithmError):
141 """Raised if the Gaussian Processes fit raises a Singular Matrix linear
142 algebra error."""
144 def __init__(self, nSources) -> None:
145 super().__init__("The Gaussian Processes fit failed with a singular matrix linear algebra error.")
146 self._nSources = nSources
148 @property
149 def metadata(self):
150 return {
151 "nSources": self._nSources,
152 }
155class NotPositiveDefiniteMatrixError(pipeBase.AlgorithmError):
156 """Raised if the Gaussian Processes fit raises a not positive definite
157 linear algebra error."""
159 def __init__(self, nSources) -> None:
160 super().__init__(
161 "The Gaussian Processes fit failed with a not-positive-definite linear algebra error."
162 )
163 self._nSources = nSources
165 @property
166 def metadata(self):
167 return {
168 "nSources": self._nSources,
169 }
172class GaussianProcessesTurbulenceFitConnections(
173 pipeBase.PipelineTaskConnections,
174 dimensions=("instrument", "visit", "healpix3"),
175 defaultTemplates={
176 "inputName": "gbdesHealpix3AstrometricFit",
177 },
178):
179 inputWcs = pipeBase.connectionTypes.Input(
180 doc=(
181 "Per-healpix, per-visit world coordinate systems derived from the fitted model."
182 " These catalogs only contain entries for detectors with an output, and use"
183 " the detector id for the catalog id, sorted on id for fast lookups of a detector."
184 ),
185 name="{inputName}SkyWcsCatalog",
186 storageClass="ExposureCatalog",
187 dimensions=("instrument", "visit", "healpix3"),
188 )
189 inputPositions = pipeBase.connectionTypes.Input(
190 doc=(
191 "Catalog of sources used in fit, along with residuals in pixel coordinates and tangent "
192 "plane coordinates and chisq values."
193 ),
194 name="{inputName}_fitStars",
195 storageClass="ArrowAstropy",
196 dimensions=("instrument", "healpix3", "physical_filter"),
197 deferLoad=True,
198 )
199 outputWcs = pipeBase.connectionTypes.Output(
200 doc=(
201 "Per-visit world coordinate systems derived from the fitted model. These catalogs only contain "
202 "entries for detectors with an output, and use the detector id for the catalog id, sorted on id "
203 "for fast lookups of a detector."
204 ),
205 name="turbulenceCorrectedSkyWcsCatalog",
206 storageClass="ExposureCatalog",
207 dimensions=("instrument", "visit", "healpix3"),
208 )
209 hyperparameters = pipeBase.connectionTypes.Output(
210 doc="Best fit hyperparameters for the Gaussian Processes fit.",
211 name="turbulence_fit_hyperparameters",
212 storageClass="ArrowAstropy",
213 dimensions=("instrument", "visit", "healpix3"),
214 )
216 def __init__(self, *, config=None):
217 super().__init__(config=config)
219 if not self.config.healpix:
220 self.dimensions.remove("healpix3")
221 if self.config.healpix is None:
222 extra_dimensions = []
223 else:
224 extra_dimensions = ["tract", "skymap"]
225 self.dimensions.update(extra_dimensions)
226 self.inputWcs = dataclasses.replace(
227 self.inputWcs, dimensions=["instrument", "visit"] + extra_dimensions
228 )
229 self.inputPositions = dataclasses.replace(
230 self.inputPositions, dimensions=["instrument", "band", "physical_filter"] + extra_dimensions
231 )
232 self.outputWcs = dataclasses.replace(
233 self.outputWcs, dimensions=["instrument", "visit"] + extra_dimensions
234 )
235 self.hyperparameters = dataclasses.replace(
236 self.hyperparameters, dimensions=["instrument", "visit"] + extra_dimensions
237 )
240class GaussianProcessesTurbulenceFitConfig(
241 pipeBase.PipelineTaskConfig, pipelineConnections=GaussianProcessesTurbulenceFitConnections
242):
243 initKernel = pexConfig.Field(
244 dtype=str,
245 doc="The type of function that will be used to modeled spatial correlation.",
246 default="15**2 * AnisotropicVonKarman(invLam=array([[1./0.8**2,0],[0,1./0.8**2]]))",
247 )
248 initAnisotropicCorrelationLength = pexConfig.ListField(
249 dtype=float,
250 doc=(
251 "The initial parameters for fiting the anisotropic correlation length. p0[0] is equivalent of "
252 "the isotropic correlation length in degrees, and p0[1]/p0[2] are ellipticity parameters and are "
253 "mathematically equivalent to e1/e2 in weak-lensing. p0[1]/p0[2] must be in the range [-1,1], "
254 "where 0 means the correlation is isotropic."
255 ),
256 default=[1, -0.2, -0.2],
257 )
258 correlationSeparationMin = pexConfig.Field(
259 dtype=float,
260 doc="Minimum distance separation in degrees in the computation of the 2-point correlation function.",
261 default=0.0,
262 optional=True,
263 )
264 correlationSeparationMax = pexConfig.Field(
265 dtype=float,
266 doc="Maximum distance separation in degrees in the computation of the 2-point correlation function.",
267 default=0.3,
268 optional=True,
269 )
270 maxTrainingPoints = pexConfig.Field(
271 dtype=int,
272 doc="Maximum number of points to use in the Gaussian Processes training.",
273 default=10000,
274 )
275 pixelSize = pexConfig.Field(
276 dtype=float,
277 doc="Pixel size in arcseconds.",
278 default=0.2,
279 )
280 healpix = pexConfig.Field(
281 dtype=bool,
282 doc="Use input WCS calculated over healpix-based region. If false, use tract-based WCS.",
283 default=True,
284 optional=True,
285 )
286 splineDegree = pexConfig.Field(
287 dtype=int,
288 doc="Degree of the spline expressing Gaussian Processes prediction.",
289 default=4,
290 )
291 splineNNodes = pexConfig.Field(
292 dtype=int,
293 doc="Number of nodes to use for the spline expressing Gaussian Processes prediction.",
294 default=30,
295 )
296 splineBuffer = pexConfig.Field(
297 dtype=float,
298 doc="Minimum distance in degrees to extend spline map outside the detector boundary.",
299 default=0.1,
300 )
303class GaussianProcessesTurbulenceFitTask(pipeBase.PipelineTask):
304 """Run Gaussian Processes on astrometric residuals with the assumption that
305 they are due to atmospheric turbulence."""
307 ConfigClass = GaussianProcessesTurbulenceFitConfig
308 _DefaultName = "gaussianProcessesTurbulenceFit"
310 def run(self, inputWcs, inputPositions):
311 """Run Gaussian Processes on position residuals and subtract the fitted
312 Gaussian Processes prediction from the WCS to account for atmospheric
313 turbulence.
315 Parameters
316 ----------
317 inputWcs : `lsst.afw.table.ExposureCatalog`
318 Catalog with WCSs for each detector of the input exposure.
319 inputPositions : `astropy.table.Table`
320 Catalog of input positions with residuals to the current best fit.
322 Returns
323 -------
324 result : `lsst.pipe.base.Struct`
325 ``outputWcs`` : `lsst.afw.table.ExposureCatalog`
326 Catalog with WCS after inserting the correction for atmospheric
327 turbulence.
328 ``hyperparameters`` : `astropy.table.Table`
329 Table of best-fit hyperparameters in x and y-directions.
330 """
332 visit = inputWcs[0]["visit"]
334 inputPositions = inputPositions.get(
335 parameters={
336 "columns": [
337 "xworld",
338 "yworld",
339 "xresw",
340 "yresw",
341 "exposureName",
342 "xpix",
343 "ypix",
344 "deviceName",
345 "clip",
346 "covTotalW_00",
347 "covTotalW_11",
348 ]
349 }
350 )
352 visitPositions = inputPositions[
353 (inputPositions["exposureName"] == str(visit)) & ~inputPositions["clip"]
354 ]
356 gpx, gpy, trainInd, testInd, hyperparameters = self.runGP(inputWcs, visitPositions)
358 self.evaluate(gpx, gpy, visitPositions, trainInd, testInd, inputWcs)
360 wcsWithSpline = self.addGPToWcs(gpx, gpy, inputWcs)
362 return pipeBase.Struct(outputWcs=wcsWithSpline, hyperparameters=hyperparameters)
364 def runGP(self, inputWcs, positions):
365 """Run Gaussian Processes in tangent plane coordinates.
367 Parameters
368 ----------
369 inputWcs : `lsst.afw.table.ExposureCatalog`
370 Catalog with WCSs for each detector of the input exposure.
371 inputPositions : `astropy.table.Table`
372 Catalog of input positions with residuals to the current best fit.
374 Returns
375 -------
376 gpx : `treegp.gp_interp.GPInterpolation`
377 Gaussian Processes interpolator for x-direction residuals.
378 gpy : `treegp.gp_interp.GPInterpolation`
379 Gaussian Processes interpolator for y-direction residuals.
380 trainInds : `numpy.ndarray`
381 Array of indices for points used in training.
382 testInds : `numpy.ndarray`
383 Array of indices for points not used in training.
384 """
385 dx = positions["xresw"]
386 dy = positions["yresw"]
387 dxErr = positions["covTotalW_00"] ** 0.5
388 dyErr = positions["covTotalW_11"] ** 0.5
390 # Get tangent plane coordinates for input points
391 allTPCoords = np.zeros((len(positions), 2))
392 for detector in inputWcs:
393 detId = detector["id"]
394 detWCS = detector.wcs
395 detInd = positions["deviceName"].astype(int) == detId
396 detectorSources = positions[detInd]
398 tangentPlaneToSky = detWCS.getFrameDict().getMapping("PIXELS", "IWC")
399 tangentPlaneCoords = tangentPlaneToSky.applyForward(
400 np.array([detectorSources["xpix"], detectorSources["ypix"]])
401 )
402 allTPCoords[detInd] = tangentPlaneCoords.T
404 # Choose a random subset for training.
405 rng = np.random.default_rng(1234)
406 nPoints = len(allTPCoords)
407 nTrain = min([nPoints, self.config.maxTrainingPoints])
408 perm = rng.permutation(np.arange(nPoints))
409 trainInds = perm[:nTrain]
410 testInds = perm[nTrain:]
412 # Solve Gaussian Processes in dx direction.
413 gpx = treegp.GPInterpolation(
414 kernel=self.config.initKernel,
415 optimizer="anisotropic",
416 normalize=True,
417 nbins=21,
418 min_sep=self.config.correlationSeparationMin,
419 max_sep=self.config.correlationSeparationMax,
420 p0=self.config.initAnisotropicCorrelationLength,
421 )
423 gpx.initialize(allTPCoords[trainInds], dx[trainInds], y_err=dxErr[trainInds])
425 # Solve Gaussian Processes in dy direction.
426 gpy = treegp.GPInterpolation(
427 kernel=self.config.initKernel,
428 optimizer="anisotropic",
429 normalize=True,
430 nbins=21,
431 min_sep=self.config.correlationSeparationMin,
432 max_sep=self.config.correlationSeparationMax,
433 p0=self.config.initAnisotropicCorrelationLength,
434 )
436 gpy.initialize(allTPCoords[trainInds], dy[trainInds], y_err=dyErr[trainInds])
438 try:
439 gpx.solve()
440 gpy.solve()
441 except np.linalg.LinAlgError as e:
442 if "Singular matrix" in str(e):
443 error = pipeBase.AnnotatedPartialOutputsError.annotate(
444 SingularMatrixError(len(allTPCoords[trainInds])),
445 self,
446 log=self.log,
447 )
448 raise error from e
449 elif "not positive definite" in str(e):
450 error = pipeBase.AnnotatedPartialOutputsError.annotate(
451 NotPositiveDefiniteMatrixError(len(allTPCoords[trainInds])),
452 self,
453 log=self.log,
454 )
455 raise error from e
456 else:
457 raise
459 hyperparameters = Table(
460 {"x": np.array(gpx._optimizer._results_robust), "y": np.array(gpy._optimizer._results_robust)}
461 )
463 return gpx, gpy, trainInds, testInds, hyperparameters
465 def predict(self, gpx, gpy, inputWcs, sourceCatalog):
466 """Get the positions for sources after correction for atmospheric
467 turbulence.
469 Parameters
470 ----------
471 gpx : `treegp.gp_interp.GPInterpolation`
472 Gaussian Processes interpolator for x-direction residuals.
473 gpy : `treegp.gp_interp.GPInterpolation`
474 Gaussian Processes interpolator for y-direction residuals.
475 inputWcs : `lsst.afw.table.ExposureCatalog`
476 Catalog with WCSs for each detector of the input exposure.
477 inputPositions : `astropy.table.Table`
478 Catalog of input positions with residuals to the current best fit.
480 Returns
481 -------
482 outCat : `astropy.table.Table`
483 Catalog matching `inputPositions`, with `coord_ra` and `coord_dec`
484 columns corrected for atmospheric turbulence.
485 """
486 correctedCoordinates = np.zeros((len(sourceCatalog), 2))
487 prediction = np.zeros((len(sourceCatalog), 2))
488 allTPCoords = np.zeros((len(sourceCatalog), 2))
489 allCoords = np.zeros((len(sourceCatalog), 2))
491 for detector in inputWcs:
492 detId = detector["id"]
493 detWCS = detector.wcs
494 detInd = sourceCatalog["detector"] == detId
495 detectorSources = sourceCatalog[detInd]
497 # The Gaussian Processes is fit on the tangent plane coordinates,
498 # so we must transform points to the tangent plane, then subtract
499 # the effect of atmospheric turbulence, then transform the tangent
500 # plane coordinates to sky coordinates.
501 initialSky = detWCS.pixelToSkyArray(detectorSources["x"], detectorSources["y"])
502 allCoords[detInd] = np.array(initialSky).T
503 tangentPlaneToSky = detWCS.getFrameDict().getMapping("IWC", "SKY")
504 tangentPlaneCoords = tangentPlaneToSky.applyInverse(np.array(initialSky)).T
505 allTPCoords[detInd] = tangentPlaneCoords
507 xPred = gpx.predict(tangentPlaneCoords)
508 xPrediction = (xPred * u.mas).to(u.degree)
509 yPred = gpy.predict(tangentPlaneCoords)
510 yPrediction = (yPred * u.mas).to(u.degree)
511 prediction[detInd, 0] = xPred
512 prediction[detInd, 1] = yPred
514 correctedTangentPlaneX = tangentPlaneCoords[:, 0] * u.degree - xPrediction
515 correctedTangentPlaneY = tangentPlaneCoords[:, 1] * u.degree - yPrediction
516 correctedSkyCoords = tangentPlaneToSky.applyForward(
517 np.array([correctedTangentPlaneX, correctedTangentPlaneY])
518 )
519 correctedCoordinates[detInd] = ((correctedSkyCoords.T) * u.radian).to(u.degree).value
521 outCat = sourceCatalog.copy()
522 outCat["coord_ra"] = correctedCoordinates[:, 0]
523 outCat["coord_dec"] = correctedCoordinates[:, 1]
525 return outCat
527 def addGPToWcs(self, gpx, gpy, inputWcs):
528 """Convert Gaussian Processes prediction to a spline, and insert it in
529 the WCS for each detector.
531 Parameters
532 ----------
533 gpx : `treegp.gp_interp.GPInterpolation`
534 Gaussian Processes interpolator for x-direction residuals.
535 gpy : `treegp.gp_interp.GPInterpolation`
536 Gaussian Processes interpolator for y-direction residuals.
537 inputWcs : `lsst.afw.table.ExposureCatalog`
538 Catalog with WCSs for each detector of the input exposure.
540 Returns
541 -------
542 catalog : `lsst.afw.table.ExposureCatalog`
543 Exposure catalog with the WCS set to the existing WCS plus the
544 gaussian processes fit.
545 """
546 pixelFrame = ast.Frame(2, "Domain=PIXELS")
547 tpFrame = ast.Frame(2, "Domain=TP")
548 iwcFrame = ast.Frame(2, "Domain=IWC")
550 # Set up the schema for the output catalogs
551 schema = lsst.afw.table.ExposureTable.makeMinimalSchema()
552 schema.addField("visit", type="L", doc="Visit number")
554 catalog = lsst.afw.table.ExposureCatalog(schema)
555 catalog.resize(len(inputWcs))
556 catalog["visit"] = inputWcs["visit"]
558 for d, detectorRow in enumerate(inputWcs):
559 detId = detectorRow.getId()
560 catalog[d].setId(detId)
562 # Make a grid of points in tangent plane coordinates.
563 bbox = detectorRow.getBBox()
564 catalog[d].setBBox(bbox)
565 corners = np.array(
566 [
567 [bbox.getBeginX(), bbox.getEndX(), bbox.getEndX(), bbox.getBeginX()],
568 [bbox.getBeginY(), bbox.getBeginY(), bbox.getEndY(), bbox.getEndY()],
569 ]
570 ).astype(float)
572 initWcsRow = inputWcs.find(detId)
573 pixToTPMap = initWcsRow.wcs.getFrameDict().getMapping("PIXELS", "IWC")
574 tpToSky = initWcsRow.wcs.getFrameDict().getMapping("IWC", "SKY")
575 skyFrame = initWcsRow.wcs.getFrameDict().getFrame("SKY")
576 tangentPlaneX, tangentPlaneY = pixToTPMap.applyForward(corners)
578 xs = np.linspace(
579 tangentPlaneX.min() - self.config.splineBuffer,
580 tangentPlaneX.max() + self.config.splineBuffer,
581 self.config.splineNNodes,
582 )
583 ys = np.linspace(
584 tangentPlaneY.min() - self.config.splineBuffer,
585 tangentPlaneY.max() + self.config.splineBuffer,
586 self.config.splineNNodes,
587 )
589 xx, yy = np.meshgrid(xs, ys)
590 inArray = np.array([xx.ravel(), yy.ravel()]).T
592 # Get Gaussian Processes prediction on grid and fit spline to it.
593 xPred = (gpx.predict(inArray) * u.mas).to(u.degree).value
595 splineX = RectBivariateSpline(
596 xs,
597 ys,
598 (xx - xPred.reshape(self.config.splineNNodes, self.config.splineNNodes)).T,
599 s=0,
600 kx=self.config.splineDegree - 1,
601 ky=self.config.splineDegree - 1,
602 )
603 (tx, ty) = splineX.get_knots()
604 coeffsX = splineX.get_coeffs()
606 yPred = (gpy.predict(inArray) * u.mas).to(u.degree).value
607 splineY = RectBivariateSpline(
608 xs,
609 ys,
610 (yy - yPred.reshape(self.config.splineNNodes, self.config.splineNNodes)).T,
611 s=0,
612 kx=self.config.splineDegree - 1,
613 ky=self.config.splineDegree - 1,
614 )
615 coeffsY = splineY.get_coeffs()
617 # Turn spline into AST object and insert in new WCS.
618 splineMap = ast.SplineMap(
619 self.config.splineDegree,
620 self.config.splineDegree,
621 self.config.splineNNodes,
622 self.config.splineNNodes,
623 tx,
624 ty,
625 coeffsX,
626 coeffsY,
627 options="OutUnit=1",
628 )
630 newFrameDict = ast.FrameDict(pixelFrame)
631 newFrameDict.addFrame("PIXELS", pixToTPMap, tpFrame)
632 newFrameDict.addFrame("TP", splineMap, iwcFrame)
633 newFrameDict.addFrame("IWC", tpToSky, skyFrame)
634 outWcs = afwgeom.SkyWcs(newFrameDict)
635 catalog[d].setWcs(outWcs)
637 return catalog
639 def evaluate(self, gpx, gpy, positions, trainInd, testInd, inputWcs, makeValidationPlot=False):
640 """Calculate E and B-modes in the 2-point correlation function before
641 and after correcting for atmospheric turbulence, and validate
642 prediction on some of the test data.
644 Parameters
645 ----------
646 gpx : `treegp.gp_interp.GPInterpolation`
647 Gaussian Processes interpolator for x-direction residuals.
648 gpy : `treegp.gp_interp.GPInterpolation`
649 Gaussian Processes interpolator for y-direction residuals.
650 positions : `astropy.table.Table`
651 Catalog of input positions with residuals to the best fit.
652 trainInds : `numpy.ndarray`
653 Array of indices for points used in training.
654 testInds : `numpy.ndarray`
655 Array of indices for points not used in training.
656 inputWcs : `lsst.afw.table.ExposureCatalog`
657 Catalog with WCSs for each detector of the input exposure.
658 makeValidationPlot : `bool`, optional
659 Whether to make a plot showing the prediction on the validation
660 data.
661 """
662 dx = positions["xresw"]
663 dy = positions["yresw"]
665 # Get tangent plane coordinates for input points
666 tpCoords = np.zeros((len(positions), 2))
667 for detector in inputWcs:
668 detId = detector["id"]
669 detWCS = detector.wcs
670 detInd = positions["deviceName"].astype(int) == detId
671 detectorSources = positions[detInd]
673 tangentPlaneToSky = detWCS.getFrameDict().getMapping("PIXELS", "IWC")
674 tangentPlaneCoords = tangentPlaneToSky.applyForward(
675 np.array([detectorSources["xpix"], detectorSources["ypix"]])
676 )
677 tpCoords[detInd] = tangentPlaneCoords.T
679 # Calculate E/B modes before and after Gaussian Processes correction.
680 xPredict = gpx.predict(tpCoords[trainInd])
681 yPredict = gpy.predict(tpCoords[trainInd])
682 xie, xib, logr = treegp.comp_eb_treecorr(
683 tpCoords[trainInd, 0],
684 tpCoords[trainInd, 1],
685 dx[trainInd],
686 dy[trainInd],
687 rmin=20 / 3600,
688 rmax=0.6,
689 dlogr=0.3,
690 )
691 start, stop = np.searchsorted(np.exp(logr), [0, 15])
692 meanE = np.mean(xie[start:stop])
693 meanB = np.mean(xib[start:stop])
694 self.log.info(
695 "Original average correlation level over 0-15 arcminutes: E-mode=%0.2f, B-mode=%0.2f",
696 meanE,
697 meanB,
698 )
700 xie_resid, xib_resid, logr = treegp.comp_eb_treecorr(
701 tpCoords[trainInd, 0],
702 tpCoords[trainInd, 1],
703 dx[trainInd] - xPredict,
704 dy[trainInd] - yPredict,
705 rmin=20 / 3600,
706 rmax=0.6,
707 dlogr=0.3,
708 )
709 start, stop = np.searchsorted(np.exp(logr), [0, 15])
710 meanE_resid = np.mean(xie_resid[start:stop])
711 meanB_resid = np.mean(xib_resid[start:stop])
712 self.log.info(
713 "Correlation level after GP correction over 0-15 arcminutes: E-mode=%0.2f, B-mode=%0.2f",
714 meanE_resid,
715 meanB_resid,
716 )
718 # Predict on all test data and make a plot.
719 if makeValidationPlot:
720 print(len(testInd))
721 testInd = testInd[:50000]
722 chunkSize = 5000
723 nChunks = np.ceil(len(testInd) / chunkSize).astype(int)
724 xPredict = np.zeros(len(testInd))
725 yPredict = np.zeros(len(testInd))
726 for i in range(nChunks):
727 ind = testInd[chunkSize * i : chunkSize * (i + 1)]
728 xPredict[chunkSize * i : chunkSize * (i + 1)] = gpx.predict(tpCoords[ind])
729 yPredict[chunkSize * i : chunkSize * (i + 1)] = gpy.predict(tpCoords[ind])
730 fig = plot_visit(
731 tpCoords[testInd, 0], tpCoords[testInd, 1], dx[testInd], dy[testInd], xPredict, yPredict
732 )
733 return fig