Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 10%
783 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-15 02:55 -0700
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-15 02:55 -0700
1# This file is part of drp_tasks.
2#
3# LSST Data Management System
4# This product includes software developed by the
5# LSST Project (http://www.lsst.org/).
6# See COPYRIGHT file at the top of the source tree.
7#
8# This program is free software: you can redistribute it and/or modify
9# it under the terms of the GNU General Public License as published by
10# the Free Software Foundation, either version 3 of the License, or
11# (at your option) any later version.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the LSST License Statement and
19# the GNU General Public License along with this program. If not,
20# see <https://www.lsstcorp.org/LegalNotices/>.
21#
22import re
24import astropy.coordinates
25import astropy.time
26import astropy.units as u
27import astshim
28import lsst.afw.geom as afwgeom
29import lsst.afw.table
30import lsst.geom
31import lsst.pex.config as pexConfig
32import lsst.pipe.base as pipeBase
33import lsst.sphgeom
34import numpy as np
35import wcsfit
36import yaml
37from lsst.meas.algorithms import (
38 LoadReferenceObjectsConfig,
39 ReferenceObjectLoader,
40 ReferenceSourceSelectorTask,
41)
42from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry
43from sklearn.cluster import AgglomerativeClustering
44from smatch.matcher import Matcher
46__all__ = [
47 "GbdesAstrometricFitConnections",
48 "GbdesAstrometricFitConfig",
49 "GbdesAstrometricFitTask",
50 "GbdesGlobalAstrometricFitConnections",
51 "GbdesGlobalAstrometricFitConfig",
52 "GbdesGlobalAstrometricFitTask",
53]
56def _make_ref_covariance_matrix(
57 refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec, outputPMUnit=u.marcsec, version=1
58):
59 """Make a covariance matrix for the reference catalog including proper
60 motion and parallax.
62 The output is flattened to one dimension to match the format expected by
63 `gbdes`.
65 Parameters
66 ----------
67 refCat : `lsst.afw.table.SimpleCatalog`
68 Catalog including proper motion and parallax measurements.
69 inputUnit : `astropy.unit.core.Unit`
70 Units of the input catalog
71 outputCoordUnit : `astropy.unit.core.Unit`
72 Units required for the coordinates in the covariance matrix. `gbdes`
73 expects milliarcseconds.
74 outputPMUnit : `astropy.unit.core.Unit`
75 Units required for the proper motion/parallax in the covariance matrix.
76 `gbdes` expects milliarcseconds.
77 version : `int`
78 Version of the reference catalog. Version 2 includes covariance
79 measurements.
80 Returns
81 -------
82 cov : `list` [`float`]
83 Flattened output covariance matrix.
84 """
85 cov = np.zeros((len(refCat), 25))
86 if version == 1:
87 # Here is the standard ordering of components in the cov matrix,
88 # to match the PM enumeration in C++ code of gbdes package's Match.
89 # Each tuple gives: the array holding the 1d error,
90 # the string in Gaia column names for this
91 # the ordering in the Gaia catalog
92 # and the ordering of the tuples is the order we want in our cov matrix
93 raErr = (refCat["coord_raErr"]).to(outputCoordUnit).to_value()
94 decErr = (refCat["coord_decErr"]).to(outputCoordUnit).to_value()
95 raPMErr = (refCat["pm_raErr"]).to(outputPMUnit).to_value()
96 decPMErr = (refCat["pm_decErr"]).to(outputPMUnit).to_value()
97 parallaxErr = (refCat["parallaxErr"]).to(outputPMUnit).to_value()
98 stdOrder = (
99 (raErr, "ra", 0),
100 (decErr, "dec", 1),
101 (raPMErr, "pmra", 3),
102 (decPMErr, "pmdec", 4),
103 (parallaxErr, "parallax", 2),
104 )
106 k = 0
107 for i, pr1 in enumerate(stdOrder):
108 for j, pr2 in enumerate(stdOrder):
109 if pr1[2] < pr2[2]:
110 cov[:, k] = 0
111 elif pr1[2] > pr2[2]:
112 cov[:, k] = 0
113 else:
114 # diagnonal element
115 cov[:, k] = pr1[0] * pr2[0]
116 k = k + 1
118 elif version == 2:
119 positionParameters = ["coord_ra", "coord_dec", "pm_ra", "pm_dec", "parallax"]
120 units = [outputCoordUnit, outputCoordUnit, outputPMUnit, outputPMUnit, outputPMUnit]
121 k = 0
122 for i, pi in enumerate(positionParameters):
123 for j, pj in enumerate(positionParameters):
124 if i == j:
125 cov[:, k] = ((refCat[f"{pi}Err"].value) ** 2 * inputUnit**2).to(units[j] * units[j]).value
126 elif i > j:
127 cov[:, k] = (refCat[f"{pj}_{pi}_Cov"].value * inputUnit**2).to_value(units[i] * units[j])
128 else:
129 cov[:, k] = (refCat[f"{pi}_{pj}_Cov"].value * inputUnit**2).to_value(units[i] * units[j])
130 k += 1
131 return cov
134def _nCoeffsFromDegree(degree):
135 """Get the number of coefficients for a polynomial of a certain degree with
136 two variables.
138 This uses the general formula that the number of coefficients for a
139 polynomial of degree d with n variables is (n + d) choose d, where in this
140 case n is fixed to 2.
142 Parameters
143 ----------
144 degree : `int`
145 Degree of the polynomial in question.
147 Returns
148 -------
149 nCoeffs : `int`
150 Number of coefficients for the polynomial in question.
151 """
152 nCoeffs = int((degree + 2) * (degree + 1) / 2)
153 return nCoeffs
156def _degreeFromNCoeffs(nCoeffs):
157 """Get the degree for a polynomial with two variables and a certain number
158 of coefficients.
160 This is done by applying the quadratic formula to the
161 formula for calculating the number of coefficients of the polynomial.
163 Parameters
164 ----------
165 nCoeffs : `int`
166 Number of coefficients for the polynomial in question.
168 Returns
169 -------
170 degree : `int`
171 Degree of the polynomial in question.
172 """
173 degree = int(-1.5 + 0.5 * (1 + 8 * nCoeffs) ** 0.5)
174 return degree
177def _convert_to_ast_polymap_coefficients(coefficients):
178 """Convert vector of polynomial coefficients from the format used in
179 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in
180 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output
181 coordinates.
183 Parameters
184 ----------
185 coefficients : `list`
186 Coefficients of the polynomials.
187 degree : `int`
188 Degree of the polynomial.
190 Returns
191 -------
192 astPoly : `astshim.PolyMap`
193 Coefficients in AST polynomial format.
194 """
195 polyArray = np.zeros((len(coefficients), 4))
196 N = len(coefficients) / 2
197 degree = _degreeFromNCoeffs(N)
199 for outVar in [1, 2]:
200 for i in range(degree + 1):
201 for j in range(degree + 1):
202 if (i + j) > degree:
203 continue
204 vectorIndex = int(((i + j) * (i + j + 1)) / 2 + j + N * (outVar - 1))
205 polyArray[vectorIndex, 0] = coefficients[vectorIndex]
206 polyArray[vectorIndex, 1] = outVar
207 polyArray[vectorIndex, 2] = i
208 polyArray[vectorIndex, 3] = j
210 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7")
211 return astPoly
214class GbdesAstrometricFitConnections(
215 pipeBase.PipelineTaskConnections, dimensions=("skymap", "tract", "instrument", "physical_filter")
216):
217 """Middleware input/output connections for task data."""
219 inputCatalogRefs = pipeBase.connectionTypes.Input(
220 doc="Source table in parquet format, per visit.",
221 name="preSourceTable_visit",
222 storageClass="DataFrame",
223 dimensions=("instrument", "visit"),
224 deferLoad=True,
225 multiple=True,
226 )
227 inputVisitSummaries = pipeBase.connectionTypes.Input(
228 doc=(
229 "Per-visit consolidated exposure metadata built from calexps. "
230 "These catalogs use detector id for the id and must be sorted for "
231 "fast lookups of a detector."
232 ),
233 name="visitSummary",
234 storageClass="ExposureCatalog",
235 dimensions=("instrument", "visit"),
236 multiple=True,
237 )
238 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput(
239 doc="The astrometry reference catalog to match to loaded input catalog sources.",
240 name="gaia_dr3_20230707",
241 storageClass="SimpleCatalog",
242 dimensions=("skypix",),
243 deferLoad=True,
244 multiple=True,
245 )
246 inputCameraModel = pipeBase.connectionTypes.PrerequisiteInput(
247 doc="Camera parameters to use for 'device' part of model",
248 name="gbdesAstrometricFit_cameraModel",
249 storageClass="ArrowNumpyDict",
250 dimensions=("instrument", "physical_filter"),
251 )
252 outputWcs = pipeBase.connectionTypes.Output(
253 doc=(
254 "Per-tract, per-visit world coordinate systems derived from the fitted model."
255 " These catalogs only contain entries for detectors with an output, and use"
256 " the detector id for the catalog id, sorted on id for fast lookups of a detector."
257 ),
258 name="gbdesAstrometricFitSkyWcsCatalog",
259 storageClass="ExposureCatalog",
260 dimensions=("instrument", "visit", "skymap", "tract"),
261 multiple=True,
262 )
263 outputCatalog = pipeBase.connectionTypes.Output(
264 doc=(
265 "Catalog of sources used in fit, along with residuals in pixel coordinates and tangent "
266 "plane coordinates and chisq values."
267 ),
268 name="gbdesAstrometricFit_fitStars",
269 storageClass="ArrowNumpyDict",
270 dimensions=("instrument", "skymap", "tract", "physical_filter"),
271 )
272 starCatalog = pipeBase.connectionTypes.Output(
273 doc=(
274 "Catalog of best-fit object positions. Also includes the fit proper motion and parallax if "
275 "fitProperMotion is True."
276 ),
277 name="gbdesAstrometricFit_starCatalog",
278 storageClass="ArrowNumpyDict",
279 dimensions=("instrument", "skymap", "tract", "physical_filter"),
280 )
281 modelParams = pipeBase.connectionTypes.Output(
282 doc="WCS parameters and covariance.",
283 name="gbdesAstrometricFit_modelParams",
284 storageClass="ArrowNumpyDict",
285 dimensions=("instrument", "skymap", "tract", "physical_filter"),
286 )
287 outputCameraModel = pipeBase.connectionTypes.Output(
288 doc="Camera parameters to use for 'device' part of model",
289 name="gbdesAstrometricFit_cameraModel",
290 storageClass="ArrowNumpyDict",
291 dimensions=("instrument", "physical_filter"),
292 )
294 def getSpatialBoundsConnections(self):
295 return ("inputVisitSummaries",)
297 def __init__(self, *, config=None):
298 super().__init__(config=config)
300 if not self.config.saveModelParams:
301 self.outputs.remove("modelParams")
302 if not self.config.useInputCameraModel:
303 self.prerequisiteInputs.remove("inputCameraModel")
304 if not self.config.saveCameraModel:
305 self.outputs.remove("outputCameraModel")
308class GbdesAstrometricFitConfig(
309 pipeBase.PipelineTaskConfig, pipelineConnections=GbdesAstrometricFitConnections
310):
311 """Configuration for GbdesAstrometricFitTask"""
313 sourceSelector = sourceSelectorRegistry.makeField(
314 doc="How to select sources for cross-matching.", default="science"
315 )
316 referenceSelector = pexConfig.ConfigurableField(
317 target=ReferenceSourceSelectorTask,
318 doc="How to down-select the loaded astrometry reference catalog.",
319 )
320 referenceFilter = pexConfig.Field(
321 dtype=str,
322 doc="Name of filter to load from reference catalog. This is a required argument, although the values"
323 "returned are not used.",
324 default="phot_g_mean",
325 )
326 applyRefCatProperMotion = pexConfig.Field(
327 dtype=bool,
328 doc="Apply proper motion to shift reference catalog to epoch of observations.",
329 default=True,
330 )
331 matchRadius = pexConfig.Field(
332 doc="Matching tolerance between associated objects (arcseconds).", dtype=float, default=1.0
333 )
334 minMatches = pexConfig.Field(
335 doc="Number of matches required to keep a source object.", dtype=int, default=2
336 )
337 allowSelfMatches = pexConfig.Field(
338 doc="Allow multiple sources from the same visit to be associated with the same object.",
339 dtype=bool,
340 default=False,
341 )
342 sourceFluxType = pexConfig.Field(
343 dtype=str,
344 doc="Source flux field to use in source selection and to get fluxes from the catalog.",
345 default="apFlux_12_0",
346 )
347 systematicError = pexConfig.Field(
348 dtype=float,
349 doc=(
350 "Systematic error padding added in quadrature for the science catalogs (marcsec). The default"
351 "value is equivalent to 0.02 pixels for HSC."
352 ),
353 default=0.0034,
354 )
355 referenceSystematicError = pexConfig.Field(
356 dtype=float,
357 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).",
358 default=0.0,
359 )
360 modelComponents = pexConfig.ListField(
361 dtype=str,
362 doc=(
363 "List of mappings to apply to transform from pixels to sky, in order of their application."
364 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'."
365 ),
366 default=["INSTRUMENT/DEVICE", "EXPOSURE"],
367 )
368 deviceModel = pexConfig.ListField(
369 dtype=str,
370 doc=(
371 "List of mappings to apply to transform from detector pixels to intermediate frame. Map names"
372 "should match the format 'BAND/DEVICE/<map name>'."
373 ),
374 default=["BAND/DEVICE/poly"],
375 )
376 exposureModel = pexConfig.ListField(
377 dtype=str,
378 doc=(
379 "List of mappings to apply to transform from intermediate frame to sky coordinates. Map names"
380 "should match the format 'EXPOSURE/<map name>'."
381 ),
382 default=["EXPOSURE/poly"],
383 )
384 devicePolyOrder = pexConfig.Field(dtype=int, doc="Order of device polynomial model.", default=4)
385 exposurePolyOrder = pexConfig.Field(dtype=int, doc="Order of exposure polynomial model.", default=6)
386 fitProperMotion = pexConfig.Field(dtype=bool, doc="Fit the proper motions of the objects.", default=False)
387 excludeNonPMObjects = pexConfig.Field(
388 dtype=bool, doc="Exclude reference objects without proper motion/parallax information.", default=True
389 )
390 fitReserveFraction = pexConfig.Field(
391 dtype=float, default=0.2, doc="Fraction of objects to reserve from fit for validation."
392 )
393 fitReserveRandomSeed = pexConfig.Field(
394 dtype=int,
395 doc="Set the random seed for selecting data points to reserve from the fit for validation.",
396 default=1234,
397 )
398 saveModelParams = pexConfig.Field(
399 dtype=bool,
400 doc=(
401 "Save the parameters and covariance of the WCS model. Default to "
402 "false because this can be very large."
403 ),
404 default=False,
405 )
406 useInputCameraModel = pexConfig.Field(
407 dtype=bool,
408 doc=(
409 "Use a preexisting model for the 'device' part of the model. When true, the device part of the"
410 " model will be held fixed in the fitting process."
411 ),
412 default=False,
413 )
414 saveCameraModel = pexConfig.Field(
415 dtype=bool,
416 doc="Save the 'device' part of the model to be used as input in future runs.",
417 default=False,
418 )
420 def setDefaults(self):
421 # Use only stars because aperture fluxes of galaxies are biased and
422 # depend on seeing.
423 self.sourceSelector["science"].doUnresolved = True
424 self.sourceSelector["science"].unresolved.name = "sizeExtendedness"
426 # Use only isolated sources.
427 self.sourceSelector["science"].doIsolated = True
428 self.sourceSelector["science"].isolated.parentName = "parentSourceId"
429 self.sourceSelector["science"].isolated.nChildName = "deblend_nChild"
430 # Do not use either flux or centroid measurements with flags,
431 # chosen from the usual QA flags for stars.
432 self.sourceSelector["science"].doFlags = True
433 badFlags = [
434 "pixelFlags_edge",
435 "pixelFlags_saturated",
436 "pixelFlags_interpolatedCenter",
437 "pixelFlags_interpolated",
438 "pixelFlags_crCenter",
439 "pixelFlags_bad",
440 "hsmPsfMoments_flag",
441 f"{self.sourceFluxType}_flag",
442 ]
443 self.sourceSelector["science"].flags.bad = badFlags
445 # Use only primary sources.
446 self.sourceSelector["science"].doRequirePrimary = True
448 def validate(self):
449 super().validate()
451 # Check if all components of the device and exposure models are
452 # supported.
453 for component in self.deviceModel:
454 if not (("poly" in component.lower()) or ("identity" in component.lower())):
455 raise pexConfig.FieldValidationError(
456 GbdesAstrometricFitConfig.deviceModel,
457 self,
458 f"deviceModel component {component} is not supported.",
459 )
461 for component in self.exposureModel:
462 if not (("poly" in component.lower()) or ("identity" in component.lower())):
463 raise pexConfig.FieldValidationError(
464 GbdesAstrometricFitConfig.exposureModel,
465 self,
466 f"exposureModel component {component} is not supported.",
467 )
469 if self.saveCameraModel and self.useInputCameraModel:
470 raise pexConfig.FieldValidationError(
471 GbdesAstrometricFitConfig.saveCameraModel,
472 self,
473 "saveCameraModel and useInputCameraModel cannot both be true.",
474 )
477class GbdesAstrometricFitTask(pipeBase.PipelineTask):
478 """Calibrate the WCS across multiple visits of the same field using the
479 GBDES package.
480 """
482 ConfigClass = GbdesAstrometricFitConfig
483 _DefaultName = "gbdesAstrometricFit"
485 def __init__(self, **kwargs):
486 super().__init__(**kwargs)
487 self.makeSubtask("sourceSelector")
488 self.makeSubtask("referenceSelector")
490 def runQuantum(self, butlerQC, inputRefs, outputRefs):
491 # We override runQuantum to set up the refObjLoaders
492 inputs = butlerQC.get(inputRefs)
494 instrumentName = butlerQC.quantum.dataId["instrument"]
496 # Ensure the inputs are in a consistent and deterministic order
497 inputCatVisits = np.array([inputCat.dataId["visit"] for inputCat in inputs["inputCatalogRefs"]])
498 inputs["inputCatalogRefs"] = [inputs["inputCatalogRefs"][v] for v in inputCatVisits.argsort()]
499 inputSumVisits = np.array([inputSum[0]["visit"] for inputSum in inputs["inputVisitSummaries"]])
500 inputs["inputVisitSummaries"] = [inputs["inputVisitSummaries"][v] for v in inputSumVisits.argsort()]
501 inputRefHtm7s = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputRefs.referenceCatalog])
502 inputRefCatRefs = [inputRefs.referenceCatalog[htm7] for htm7 in inputRefHtm7s.argsort()]
503 inputRefCats = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputs["referenceCatalog"]])
504 inputs["referenceCatalog"] = [inputs["referenceCatalog"][v] for v in inputRefCats.argsort()]
506 refConfig = LoadReferenceObjectsConfig()
507 if self.config.applyRefCatProperMotion:
508 refConfig.requireProperMotion = True
509 refObjectLoader = ReferenceObjectLoader(
510 dataIds=[ref.datasetRef.dataId for ref in inputRefCatRefs],
511 refCats=inputs.pop("referenceCatalog"),
512 config=refConfig,
513 log=self.log,
514 )
516 output = self.run(**inputs, instrumentName=instrumentName, refObjectLoader=refObjectLoader)
518 wcsOutputRefDict = {outWcsRef.dataId["visit"]: outWcsRef for outWcsRef in outputRefs.outputWcs}
519 for visit, outputWcs in output.outputWcss.items():
520 butlerQC.put(outputWcs, wcsOutputRefDict[visit])
521 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog)
522 butlerQC.put(output.starCatalog, outputRefs.starCatalog)
523 if self.config.saveModelParams:
524 butlerQC.put(output.modelParams, outputRefs.modelParams)
525 if self.config.saveCameraModel:
526 butlerQC.put(output.cameraModelParams, outputRefs.outputCameraModel)
528 def run(
529 self,
530 inputCatalogRefs,
531 inputVisitSummaries,
532 instrumentName="",
533 refEpoch=None,
534 refObjectLoader=None,
535 inputCameraModel=None,
536 ):
537 """Run the WCS fit for a given set of visits
539 Parameters
540 ----------
541 inputCatalogRefs : `list` [`DeferredDatasetHandle`]
542 List of handles pointing to visit-level source
543 tables.
544 inputVisitSummaries : `list` [`lsst.afw.table.ExposureCatalog`]
545 List of catalogs with per-detector summary information.
546 instrumentName : `str`, optional
547 Name of the instrument used. This is only used for labelling.
548 refEpoch : `float`
549 Epoch of the reference objects in MJD.
550 refObjectLoader : instance of
551 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader`
552 Referencef object loader instance.
553 inputCameraModel : `dict` [`str`, `np.ndarray`], optional
554 Parameters to use for the device part of the model.
556 Returns
557 -------
558 result : `lsst.pipe.base.Struct`
559 ``outputWcss`` : `list` [`lsst.afw.table.ExposureCatalog`]
560 List of exposure catalogs (one per visit) with the WCS for each
561 detector set by the new fitted WCS.
562 ``fitModel`` : `wcsfit.WCSFit`
563 Model-fitting object with final model parameters.
564 ``outputCatalog`` : `pyarrow.Table`
565 Catalog with fit residuals of all sources used.
566 ``starCatalog`` : `pyarrow.Table`
567 Catalog with best-fit positions of the objects fit.
568 ``modelParams`` : `dict`
569 Parameters and covariance of the best-fit WCS model.
570 ``cameraModelParams`` : `dict` [`str`, `np.ndarray`]
571 Parameters of the device part of the model, in the format
572 needed as input for future runs.
573 """
574 self.log.info("Gather instrument, exposure, and field info")
575 # Set up an instrument object
576 instrument = wcsfit.Instrument(instrumentName)
578 # Get RA, Dec, MJD, etc., for the input visits
579 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info(
580 inputVisitSummaries, instrument
581 )
583 # Get information about the extent of the input visits
584 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummaries, exposureInfo.medianEpoch)
586 self.log.info("Load catalogs and associate sources")
587 # Set up class to associate sources into matches using a
588 # friends-of-friends algorithm
589 associations = wcsfit.FoFClass(
590 fields,
591 [instrument],
592 exposuresHelper,
593 [fieldRadius.asDegrees()],
594 (self.config.matchRadius * u.arcsec).to(u.degree).value,
595 )
597 # Add the reference catalog to the associator
598 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format="decimalyear").mjd
599 refObjects, refCovariance = self._load_refcat(
600 refObjectLoader,
601 extensionInfo,
602 epoch=medianEpoch,
603 center=fieldCenter,
604 radius=fieldRadius,
605 associations=associations,
606 )
608 # Add the science catalogs and associate new sources as they are added
609 sourceIndices, usedColumns = self._load_catalogs_and_associate(
610 associations, inputCatalogRefs, extensionInfo
611 )
612 self._check_degeneracies(associations, extensionInfo)
614 self.log.info("Fit the WCSs")
615 # Set up a YAML-type string using the config variables and a sample
616 # visit
617 inputYaml, mapTemplate = self.make_yaml(
618 inputVisitSummaries[0],
619 inputCameraModel=(inputCameraModel if self.config.useInputCameraModel else None),
620 )
622 # Set the verbosity level for WCSFit from the task log level.
623 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are
624 # properly propagated.
625 loglevel = self.log.getEffectiveLevel()
626 if loglevel >= self.log.WARNING:
627 verbose = 0
628 elif loglevel == self.log.INFO:
629 verbose = 1
630 else:
631 verbose = 2
633 # Set up the WCS-fitting class using the results of the FOF associator
634 fixMaps = ",".join([f"HSC/{i}/poly" for i in exposureInfo.detectors])
635 wcsf = wcsfit.WCSFit(
636 fields,
637 [instrument],
638 exposuresHelper,
639 extensionInfo.visitIndex,
640 extensionInfo.detectorIndex,
641 inputYaml,
642 extensionInfo.wcs,
643 associations.sequence,
644 associations.extn,
645 associations.obj,
646 sysErr=self.config.systematicError,
647 refSysErr=self.config.referenceSystematicError,
648 usePM=self.config.fitProperMotion,
649 verbose=verbose,
650 fixMaps=(fixMaps if self.config.useInputCameraModel else ""),
651 )
653 # Add the science and reference sources
654 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns)
655 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo)
657 # There must be at least as many sources per visit as the number of
658 # free parameters in the per-visit mapping. Set minFitExposures to be
659 # the number of free parameters, so that visits with fewer visits are
660 # dropped.
661 nCoeffVisitModel = _nCoeffsFromDegree(self.config.exposurePolyOrder)
662 # Do the WCS fit
663 wcsf.fit(
664 reserveFraction=self.config.fitReserveFraction,
665 randomNumberSeed=self.config.fitReserveRandomSeed,
666 minFitExposures=nCoeffVisitModel,
667 )
668 self.log.info("WCS fitting done")
670 outputWcss, cameraParams = self._make_outputs(
671 wcsf,
672 inputVisitSummaries,
673 exposureInfo,
674 mapTemplate,
675 inputCameraModel=(inputCameraModel if self.config.useInputCameraModel else None),
676 )
677 outputCatalog = wcsf.getOutputCatalog()
678 starCatalog = wcsf.getStarCatalog()
679 modelParams = self._compute_model_params(wcsf) if self.config.saveModelParams else None
681 return pipeBase.Struct(
682 outputWcss=outputWcss,
683 fitModel=wcsf,
684 outputCatalog=outputCatalog,
685 starCatalog=starCatalog,
686 modelParams=modelParams,
687 cameraModelParams=cameraParams,
688 )
690 def _prep_sky(self, inputVisitSummaries, epoch, fieldName="Field"):
691 """Get center and radius of the input tract. This assumes that all
692 visits will be put into the same `wcsfit.Field` and fit together.
694 Paramaters
695 ----------
696 inputVisitSummaries : `list` [`lsst.afw.table.ExposureCatalog`]
697 List of catalogs with per-detector summary information.
698 epoch : float
699 Reference epoch.
700 fieldName : str
701 Name of the field, used internally.
703 Returns
704 -------
705 fields : `wcsfit.Fields`
706 Object with field information.
707 center : `lsst.geom.SpherePoint`
708 Center of the field.
709 radius : `lsst.sphgeom._sphgeom.Angle`
710 Radius of the bounding circle of the tract.
711 """
712 allDetectorCorners = []
713 for visSum in inputVisitSummaries:
714 detectorCorners = [
715 lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector()
716 for (ra, dec) in zip(visSum["raCorners"].ravel(), visSum["decCorners"].ravel())
717 if (np.isfinite(ra) and (np.isfinite(dec)))
718 ]
719 allDetectorCorners.extend(detectorCorners)
720 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle()
721 center = lsst.geom.SpherePoint(boundingCircle.getCenter())
722 ra = center.getRa().asDegrees()
723 dec = center.getDec().asDegrees()
724 radius = boundingCircle.getOpeningAngle()
726 # wcsfit.Fields describes a list of fields, but we assume all
727 # observations will be fit together in one field.
728 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch])
730 return fields, center, radius
732 def _get_exposure_info(
733 self,
734 inputVisitSummaries,
735 instrument,
736 fieldNumber=0,
737 instrumentNumber=0,
738 refEpoch=None,
739 fieldRegions=None,
740 ):
741 """Get various information about the input visits to feed to the
742 fitting routines.
744 Parameters
745 ----------
746 inputVisitSummaries : `list [`lsst.afw.table.ExposureCatalog`]
747 Tables for each visit with information for detectors.
748 instrument : `wcsfit.Instrument`
749 Instrument object to which detector information is added.
750 fieldNumber : `int`, optional
751 Index of the field for these visits. Should be zero if all data is
752 being fit together. This is ignored if `fieldRegions` is not None.
753 instrumentNumber : `int`, optional
754 Index of the instrument for these visits. Should be zero if all
755 data comes from the same instrument.
756 refEpoch : `float`, optional
757 Epoch of the reference objects in MJD.
758 fieldRegions : `dict` [`int`, `lsst.sphgeom.ConvexPolygon`], optional
759 Dictionary of regions encompassing each group of input visits
760 keyed by an arbitrary index.
762 Returns
763 -------
764 exposureInfo : `lsst.pipe.base.Struct`
765 Struct containing general properties for the visits:
766 ``visits`` : `list`
767 List of visit names.
768 ``detectors`` : `list`
769 List of all detectors in any visit.
770 ``ras`` : `list` [`float`]
771 List of boresight RAs for each visit.
772 ``decs`` : `list` [`float`]
773 List of borseight Decs for each visit.
774 ``medianEpoch`` : float
775 Median epoch of all visits in decimal-year format.
776 exposuresHelper : `wcsfit.ExposuresHelper`
777 Object containing information about the input visits.
778 extensionInfo : `lsst.pipe.base.Struct`
779 Struct containing properties for each extension (visit/detector):
780 ``visit`` : `np.ndarray`
781 Name of the visit for this extension.
782 ``detector`` : `np.ndarray`
783 Name of the detector for this extension.
784 ``visitIndex` : `np.ndarray` [`int`]
785 Index of visit for this extension.
786 ``detectorIndex`` : `np.ndarray` [`int`]
787 Index of the detector for this extension.
788 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
789 Initial WCS for this extension.
790 ``extensionType`` : `np.ndarray` [`str`]
791 "SCIENCE" or "REFERENCE".
792 """
793 exposureNames = []
794 ras = []
795 decs = []
796 visits = []
797 detectors = []
798 airmasses = []
799 exposureTimes = []
800 mjds = []
801 observatories = []
802 wcss = []
803 fieldNumbers = []
805 extensionType = []
806 extensionVisitIndices = []
807 extensionDetectorIndices = []
808 extensionVisits = []
809 extensionDetectors = []
810 # Get information for all the science visits
811 for v, visitSummary in enumerate(inputVisitSummaries):
812 visitInfo = visitSummary[0].getVisitInfo()
813 visit = visitSummary[0]["visit"]
814 visits.append(visit)
815 exposureNames.append(str(visit))
816 raDec = visitInfo.getBoresightRaDec()
817 ras.append(raDec.getRa().asRadians())
818 decs.append(raDec.getDec().asRadians())
819 if fieldRegions is not None:
820 inField = [r for r, region in fieldRegions.items() if region.contains(raDec.getVector())]
821 if len(inField) != 1:
822 raise RuntimeError(
823 f"Visit should be in one and only one field, but {visit} is contained "
824 f"in {len(inField)} fields."
825 )
826 fieldNumbers.append(inField[0])
827 else:
828 fieldNumbers.append(fieldNumber)
829 airmasses.append(visitInfo.getBoresightAirmass())
830 exposureTimes.append(visitInfo.getExposureTime())
831 obsDate = visitInfo.getDate()
832 obsMJD = obsDate.get(obsDate.MJD)
833 mjds.append(obsMJD)
834 # Get the observatory ICRS position for use in fitting parallax
835 obsLon = visitInfo.observatory.getLongitude().asDegrees()
836 obsLat = visitInfo.observatory.getLatitude().asDegrees()
837 obsElev = visitInfo.observatory.getElevation()
838 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev)
839 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format="mjd"))
840 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS())
841 # We want the position in AU in Cartesian coordinates
842 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value)
844 for row in visitSummary:
845 detector = row["id"]
847 wcs = row.getWcs()
848 if wcs is None:
849 self.log.warning(
850 "WCS is None for visit %d, detector %d: this extension (visit/detector) will be "
851 "dropped.",
852 visit,
853 detector,
854 )
855 continue
856 else:
857 wcsRA = wcs.getSkyOrigin().getRa().asRadians()
858 wcsDec = wcs.getSkyOrigin().getDec().asRadians()
859 tangentPoint = wcsfit.Gnomonic(wcsRA, wcsDec)
860 mapping = wcs.getFrameDict().getMapping("PIXELS", "IWC")
861 gbdes_wcs = wcsfit.Wcs(wcsfit.ASTMap(mapping), tangentPoint)
862 wcss.append(gbdes_wcs)
864 if detector not in detectors:
865 detectors.append(detector)
866 detectorBounds = wcsfit.Bounds(
867 row["bbox_min_x"], row["bbox_max_x"], row["bbox_min_y"], row["bbox_max_y"]
868 )
869 instrument.addDevice(str(detector), detectorBounds)
871 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0]
872 extensionVisitIndices.append(v)
873 extensionDetectorIndices.append(detectorIndex)
874 extensionVisits.append(visit)
875 extensionDetectors.append(detector)
876 extensionType.append("SCIENCE")
878 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber)
880 # Set the reference epoch to be the median of the science visits.
881 # The reference catalog will be shifted to this date.
882 medianMJD = np.median(mjds)
883 medianEpoch = astropy.time.Time(medianMJD, format="mjd").decimalyear
885 # Add information for the reference catalog. Most of the values are
886 # not used. There needs to be a separate catalog for each field.
887 if fieldRegions is None:
888 fieldRegions = {0: None}
889 for f in fieldRegions:
890 exposureNames.append("REFERENCE")
891 # Make the "visit" number the field * -1 to disambiguate it from
892 # any potential visit number:
893 visits.append(-1 * f)
894 fieldNumbers.append(f)
895 if self.config.fitProperMotion:
896 instrumentNumbers.append(-2)
897 else:
898 instrumentNumbers.append(-1)
899 ras.append(0.0)
900 decs.append(0.0)
901 airmasses.append(0.0)
902 exposureTimes.append(0)
903 mjds.append((refEpoch if (refEpoch is not None) else medianMJD))
904 observatories.append(np.array([0, 0, 0]))
905 identity = wcsfit.IdentityMap()
906 icrs = wcsfit.SphericalICRS()
907 refWcs = wcsfit.Wcs(identity, icrs, "Identity", np.pi / 180.0)
908 wcss.append(refWcs)
910 extensionVisitIndices.append(len(exposureNames) - 1)
911 extensionDetectorIndices.append(-1) # REFERENCE device must be -1
912 extensionVisits.append(-1 * f)
913 extensionDetectors.append(-1)
914 extensionType.append("REFERENCE")
916 # Make a table of information to use elsewhere in the class
917 extensionInfo = pipeBase.Struct(
918 visit=np.array(extensionVisits),
919 detector=np.array(extensionDetectors),
920 visitIndex=np.array(extensionVisitIndices),
921 detectorIndex=np.array(extensionDetectorIndices),
922 wcs=np.array(wcss),
923 extensionType=np.array(extensionType),
924 )
926 # Make the exposureHelper object to use in the fitting routines
927 exposuresHelper = wcsfit.ExposuresHelper(
928 exposureNames,
929 fieldNumbers,
930 instrumentNumbers,
931 ras,
932 decs,
933 airmasses,
934 exposureTimes,
935 mjds,
936 observatories,
937 )
939 exposureInfo = pipeBase.Struct(
940 visits=visits, detectors=detectors, ras=ras, decs=decs, medianEpoch=medianEpoch
941 )
943 return exposureInfo, exposuresHelper, extensionInfo
945 def _load_refcat(
946 self,
947 refObjectLoader,
948 extensionInfo,
949 epoch=None,
950 fieldIndex=0,
951 associations=None,
952 center=None,
953 radius=None,
954 region=None,
955 ):
956 """Load the reference catalog and add reference objects to the
957 `wcsfit.FoFClass` object.
959 Parameters
960 ----------
961 refObjectLoader :
962 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader`
963 Object set up to load reference catalog objects.
964 extensionInfo : `lsst.pipe.base.Struct`
965 Struct containing properties for each extension (visit/detector).
966 ``visit`` : `np.ndarray`
967 Name of the visit for this extension.
968 ``detector`` : `np.ndarray`
969 Name of the detector for this extension.
970 ``visitIndex` : `np.ndarray` [`int`]
971 Index of visit for this extension.
972 ``detectorIndex`` : `np.ndarray` [`int`]
973 Index of the detector for this extension.
974 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
975 Initial WCS for this extension.
976 ``extensionType`` : `np.ndarray` [`str`]
977 "SCIENCE" or "REFERENCE".
978 epoch : `float`, optional
979 MJD to which to correct the object positions.
980 fieldIndex : `int`, optional
981 Index of the field. Should be zero if all the data is fit together.
982 associations : `wcsfit.FoFClass`, optional
983 Object to which to add the catalog of reference objects.
984 center : `lsst.geom.SpherePoint`, optional
985 Center of the circle in which to load reference objects. Ignored if
986 `region` is set. If used, `radius` must also be set.
987 radius : `lsst.sphgeom._sphgeom.Angle`, optional
988 Radius of the circle in which to load reference objects. Ignored if
989 `region` is set. If used, `center` must also be set.
990 region : `lsst.sphgeom.ConvexPolygon`, optional
991 Region in which to load reference objects.
993 Returns
994 -------
995 refObjects : `dict`
996 Position and error information of reference objects.
997 refCovariance : `list` [`float`]
998 Flattened output covariance matrix.
999 """
1000 if self.config.applyRefCatProperMotion:
1001 formattedEpoch = astropy.time.Time(epoch, format="mjd")
1002 else:
1003 formattedEpoch = None
1005 if region is not None:
1006 skyRegion = refObjectLoader.loadRegion(region, self.config.referenceFilter, epoch=formattedEpoch)
1007 elif (center is not None) and (radius is not None):
1008 skyRegion = refObjectLoader.loadSkyCircle(
1009 center, radius, self.config.referenceFilter, epoch=formattedEpoch
1010 )
1011 else:
1012 raise RuntimeError("Either `region` or `center` and `radius` must be set.")
1014 selected = self.referenceSelector.run(skyRegion.refCat)
1015 # Need memory contiguity to get reference filters as a vector.
1016 if not selected.sourceCat.isContiguous():
1017 refCat = selected.sourceCat.copy(deep=True)
1018 else:
1019 refCat = selected.sourceCat
1020 refCat = refCat.asAstropy()
1022 # In Gaia DR3, missing values are denoted by NaNs.
1023 finiteInd = np.isfinite(refCat["coord_ra"]) & np.isfinite(refCat["coord_dec"])
1024 refCat = refCat[finiteInd]
1026 if self.config.excludeNonPMObjects and self.config.applyRefCatProperMotion:
1027 # Gaia DR2 has zeros for missing data, while Gaia DR3 has NaNs:
1028 hasPM = (
1029 (refCat["pm_raErr"] != 0) & np.isfinite(refCat["pm_raErr"]) & np.isfinite(refCat["pm_decErr"])
1030 )
1031 refCat = refCat[hasPM]
1033 ra = (refCat["coord_ra"]).to(u.degree).to_value().tolist()
1034 dec = (refCat["coord_dec"]).to(u.degree).to_value().tolist()
1035 raCov = ((refCat["coord_raErr"]).to(u.degree).to_value() ** 2).tolist()
1036 decCov = ((refCat["coord_decErr"]).to(u.degree).to_value() ** 2).tolist()
1038 # Get refcat version from refcat metadata
1039 refCatMetadata = refObjectLoader.refCats[0].get().getMetadata()
1040 refCatVersion = refCatMetadata["REFCAT_FORMAT_VERSION"]
1041 if refCatVersion == 2:
1042 raDecCov = (refCat["coord_ra_coord_dec_Cov"]).to(u.degree**2).to_value().tolist()
1043 else:
1044 raDecCov = np.zeros(len(ra))
1046 refObjects = {"ra": ra, "dec": dec, "raCov": raCov, "decCov": decCov, "raDecCov": raDecCov}
1047 refCovariance = []
1049 if self.config.fitProperMotion:
1050 raPM = (refCat["pm_ra"]).to(u.marcsec).to_value().tolist()
1051 decPM = (refCat["pm_dec"]).to(u.marcsec).to_value().tolist()
1052 parallax = (refCat["parallax"]).to(u.marcsec).to_value().tolist()
1053 cov = _make_ref_covariance_matrix(refCat, version=refCatVersion)
1054 pmDict = {"raPM": raPM, "decPM": decPM, "parallax": parallax}
1055 refObjects.update(pmDict)
1056 refCovariance = cov
1058 if associations is not None:
1059 extensionIndex = np.flatnonzero(extensionInfo.extensionType == "REFERENCE")[0]
1060 visitIndex = extensionInfo.visitIndex[extensionIndex]
1061 detectorIndex = extensionInfo.detectorIndex[extensionIndex]
1062 instrumentIndex = -1 # -1 indicates the reference catalog
1063 refWcs = extensionInfo.wcs[extensionIndex]
1065 associations.addCatalog(
1066 refWcs,
1067 "STELLAR",
1068 visitIndex,
1069 fieldIndex,
1070 instrumentIndex,
1071 detectorIndex,
1072 extensionIndex,
1073 np.ones(len(refCat), dtype=bool),
1074 ra,
1075 dec,
1076 np.arange(len(ra)),
1077 )
1079 return refObjects, refCovariance
1081 @staticmethod
1082 def _find_extension_index(extensionInfo, visit, detector):
1083 """Find the index for a given extension from its visit and detector
1084 number.
1086 If no match is found, None is returned.
1088 Parameters
1089 ----------
1090 extensionInfo : `lsst.pipe.base.Struct`
1091 Struct containing properties for each extension.
1092 visit : `int`
1093 Visit number
1094 detector : `int`
1095 Detector number
1097 Returns
1098 -------
1099 extensionIndex : `int` or None
1100 Index of this extension
1101 """
1102 findExtension = np.flatnonzero((extensionInfo.visit == visit) & (extensionInfo.detector == detector))
1103 if len(findExtension) == 0:
1104 extensionIndex = None
1105 else:
1106 extensionIndex = findExtension[0]
1107 return extensionIndex
1109 def _load_catalogs_and_associate(
1110 self, associations, inputCatalogRefs, extensionInfo, fieldIndex=0, instrumentIndex=0
1111 ):
1112 """Load the science catalogs and add the sources to the associator
1113 class `wcsfit.FoFClass`, associating them into matches as you go.
1115 Parameters
1116 ----------
1117 associations : `wcsfit.FoFClass`
1118 Object to which to add the catalog of source and which performs
1119 the source association.
1120 inputCatalogRefs : `list`
1121 List of DeferredDatasetHandles pointing to visit-level source
1122 tables.
1123 extensionInfo : `lsst.pipe.base.Struct`
1124 Struct containing properties for each extension (visit/detector).
1125 ``visit`` : `np.ndarray`
1126 Name of the visit for this extension.
1127 ``detector`` : `np.ndarray`
1128 Name of the detector for this extension.
1129 ``visitIndex` : `np.ndarray` [`int`]
1130 Index of visit for this extension.
1131 ``detectorIndex`` : `np.ndarray` [`int`]
1132 Index of the detector for this extension.
1133 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
1134 Initial WCS for this extension.
1135 ``extensionType`` : `np.ndarray` [`str`]
1136 "SCIENCE" or "REFERENCE".
1137 fieldIndex : `int`
1138 Index of the field for these catalogs. Should be zero assuming all
1139 data is being fit together.
1140 instrumentIndex : `int`
1141 Index of the instrument for these catalogs. Should be zero
1142 assuming all data comes from the same instrument.
1144 Returns
1145 -------
1146 sourceIndices : `list`
1147 List of boolean arrays used to select sources.
1148 columns : `list` [`str`]
1149 List of columns needed from source tables.
1150 """
1151 columns = [
1152 "detector",
1153 "sourceId",
1154 "x",
1155 "xErr",
1156 "y",
1157 "yErr",
1158 "ixx",
1159 "iyy",
1160 "ixy",
1161 f"{self.config.sourceFluxType}_instFlux",
1162 f"{self.config.sourceFluxType}_instFluxErr",
1163 ]
1164 if self.sourceSelector.config.doFlags:
1165 columns.extend(self.sourceSelector.config.flags.bad)
1166 if self.sourceSelector.config.doUnresolved:
1167 columns.append(self.sourceSelector.config.unresolved.name)
1168 if self.sourceSelector.config.doIsolated:
1169 columns.append(self.sourceSelector.config.isolated.parentName)
1170 columns.append(self.sourceSelector.config.isolated.nChildName)
1171 if self.sourceSelector.config.doRequirePrimary:
1172 columns.append(self.sourceSelector.config.requirePrimary.primaryColName)
1174 sourceIndices = [None] * len(extensionInfo.visit)
1175 for inputCatalogRef in inputCatalogRefs:
1176 visit = inputCatalogRef.dataId["visit"]
1177 inputCatalog = inputCatalogRef.get(parameters={"columns": columns})
1178 # Get a sorted array of detector names
1179 detectors = np.unique(inputCatalog["detector"])
1181 for detector in detectors:
1182 detectorSources = inputCatalog[inputCatalog["detector"] == detector]
1183 xCov = detectorSources["xErr"] ** 2
1184 yCov = detectorSources["yErr"] ** 2
1185 xyCov = (
1186 detectorSources["ixy"] * (xCov + yCov) / (detectorSources["ixx"] + detectorSources["iyy"])
1187 )
1188 # Remove sources with bad shape measurements
1189 goodShapes = xyCov**2 <= (xCov * yCov)
1190 selected = self.sourceSelector.run(detectorSources)
1191 goodInds = selected.selected & goodShapes
1193 isStar = np.ones(goodInds.sum())
1194 extensionIndex = self._find_extension_index(extensionInfo, visit, detector)
1195 if extensionIndex is None:
1196 # This extension does not have information necessary for
1197 # fit. Skip the detections from this detector for this
1198 # visit.
1199 continue
1200 detectorIndex = extensionInfo.detectorIndex[extensionIndex]
1201 visitIndex = extensionInfo.visitIndex[extensionIndex]
1203 sourceIndices[extensionIndex] = goodInds
1205 wcs = extensionInfo.wcs[extensionIndex]
1206 associations.reprojectWCS(wcs, fieldIndex)
1208 associations.addCatalog(
1209 wcs,
1210 "STELLAR",
1211 visitIndex,
1212 fieldIndex,
1213 instrumentIndex,
1214 detectorIndex,
1215 extensionIndex,
1216 isStar,
1217 detectorSources[goodInds]["x"].to_list(),
1218 detectorSources[goodInds]["y"].to_list(),
1219 np.arange(goodInds.sum()),
1220 )
1222 associations.sortMatches(
1223 fieldIndex, minMatches=self.config.minMatches, allowSelfMatches=self.config.allowSelfMatches
1224 )
1226 return sourceIndices, columns
1228 def _check_degeneracies(self, associations, extensionInfo):
1229 """Check that the minimum number of visits and sources needed to
1230 constrain the model are present.
1232 This does not guarantee that the Hessian matrix of the chi-square,
1233 which is used to fit the model, will be positive-definite, but if the
1234 checks here do not pass, the matrix is certain to not be
1235 positive-definite and the model cannot be fit.
1237 Parameters
1238 ----------
1239 associations : `wcsfit.FoFClass`
1240 Object holding the source association information.
1241 extensionInfo : `lsst.pipe.base.Struct`
1242 Struct containing properties for each extension (visit/detector):
1243 ``visit`` : `np.ndarray`
1244 Name of the visit for this extension.
1245 ``detector`` : `np.ndarray`
1246 Name of the detector for this extension.
1247 ``visitIndex` : `np.ndarray` [`int`]
1248 Index of visit for this extension.
1249 ``detectorIndex`` : `np.ndarray` [`int`]
1250 Index of the detector for this extension.
1251 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
1252 Initial WCS for this extension.
1253 ``extensionType`` : `np.ndarray` [`str`]
1254 "SCIENCE" or "REFERENCE".
1255 """
1256 # As a baseline, need to have more stars per detector than per-detector
1257 # parameters, and more stars per visit than per-visit parameters.
1258 whichExtension = np.array(associations.extn)
1259 whichDetector = np.zeros(len(whichExtension))
1260 whichVisit = np.zeros(len(whichExtension))
1262 for extension, (detector, visit) in enumerate(zip(extensionInfo.detector, extensionInfo.visit)):
1263 ex_ind = whichExtension == extension
1264 whichDetector[ex_ind] = detector
1265 whichVisit[ex_ind] = visit
1267 if (not self.config.useInputCameraModel) and ("BAND/DEVICE/poly" in self.config.deviceModel):
1268 nCoeffDetectorModel = _nCoeffsFromDegree(self.config.devicePolyOrder)
1269 unconstrainedDetectors = []
1270 for detector in np.unique(extensionInfo.detector):
1271 numSources = (whichDetector == detector).sum()
1272 if numSources < nCoeffDetectorModel:
1273 unconstrainedDetectors.append(str(detector))
1275 if unconstrainedDetectors:
1276 raise RuntimeError(
1277 "The model is not constrained. The following detectors do not have enough "
1278 f"sources ({nCoeffDetectorModel} required): ",
1279 ", ".join(unconstrainedDetectors),
1280 )
1282 def make_yaml(self, inputVisitSummary, inputFile=None, inputCameraModel=None):
1283 """Make a YAML-type object that describes the parameters of the fit
1284 model.
1286 Parameters
1287 ----------
1288 inputVisitSummary : `lsst.afw.table.ExposureCatalog`
1289 Catalog with per-detector summary information.
1290 inputFile : `str`
1291 Path to a file that contains a basic model.
1292 inputCameraModel : `dict` [`str`, `np.ndarray`], optional
1293 Parameters to use for the device part of the model.
1295 Returns
1296 -------
1297 inputYaml : `wcsfit.YAMLCollector`
1298 YAML object containing the model description.
1299 inputDict : `dict` [`str`, `str`]
1300 Dictionary containing the model description.
1301 """
1302 if inputFile is not None:
1303 inputYaml = wcsfit.YAMLCollector(inputFile, "PixelMapCollection")
1304 else:
1305 inputYaml = wcsfit.YAMLCollector("", "PixelMapCollection")
1306 inputDict = {}
1307 modelComponents = ["INSTRUMENT/DEVICE", "EXPOSURE"]
1308 baseMap = {"Type": "Composite", "Elements": modelComponents}
1309 inputDict["EXPOSURE/DEVICE/base"] = baseMap
1311 xMin = str(inputVisitSummary["bbox_min_x"].min())
1312 xMax = str(inputVisitSummary["bbox_max_x"].max())
1313 yMin = str(inputVisitSummary["bbox_min_y"].min())
1314 yMax = str(inputVisitSummary["bbox_max_y"].max())
1316 deviceModel = {"Type": "Composite", "Elements": self.config.deviceModel.list()}
1317 inputDict["INSTRUMENT/DEVICE"] = deviceModel
1318 for component in self.config.deviceModel:
1319 if "poly" in component.lower():
1320 componentDict = {
1321 "Type": "Poly",
1322 "XPoly": {"OrderX": self.config.devicePolyOrder, "SumOrder": True},
1323 "YPoly": {"OrderX": self.config.devicePolyOrder, "SumOrder": True},
1324 "XMin": xMin,
1325 "XMax": xMax,
1326 "YMin": yMin,
1327 "YMax": yMax,
1328 }
1329 elif "identity" in component.lower():
1330 componentDict = {"Type": "Identity"}
1332 inputDict[component] = componentDict
1334 if (inputCameraModel is not None) and self.config.useInputCameraModel:
1335 # This assumes that the input camera model is a 'poly' model
1336 nCoeffs = _nCoeffsFromDegree(self.config.devicePolyOrder)
1337 for key, coeffs in inputCameraModel.items():
1338 if len(coeffs) != nCoeffs * 2:
1339 raise RuntimeError(
1340 "Input camera model polynomial order does not match the devicePolyOrder"
1341 )
1342 mapDict = {
1343 "Type": "Poly",
1344 "XPoly": {
1345 "OrderX": self.config.devicePolyOrder,
1346 "SumOrder": True,
1347 "Coefficients": coeffs[:nCoeffs].tolist(),
1348 },
1349 "YPoly": {
1350 "OrderX": self.config.devicePolyOrder,
1351 "SumOrder": True,
1352 "Coefficients": coeffs[nCoeffs:].tolist(),
1353 },
1354 "XMin": xMin,
1355 "XMax": xMax,
1356 "YMin": yMin,
1357 "YMax": yMax,
1358 }
1359 inputDict[key] = mapDict
1361 exposureModel = {"Type": "Composite", "Elements": self.config.exposureModel.list()}
1362 inputDict["EXPOSURE"] = exposureModel
1363 for component in self.config.exposureModel:
1364 if "poly" in component.lower():
1365 componentDict = {
1366 "Type": "Poly",
1367 "XPoly": {"OrderX": self.config.exposurePolyOrder, "SumOrder": "true"},
1368 "YPoly": {"OrderX": self.config.exposurePolyOrder, "SumOrder": "true"},
1369 }
1370 elif "identity" in component.lower():
1371 componentDict = {"Type": "Identity"}
1373 inputDict[component] = componentDict
1375 inputYaml.addInput(yaml.dump(inputDict))
1376 inputYaml.addInput("Identity:\n Type: Identity\n")
1378 return inputYaml, inputDict
1380 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns):
1381 """Add science sources to the wcsfit.WCSFit object.
1383 Parameters
1384 ----------
1385 wcsf : `wcsfit.WCSFit`
1386 WCS-fitting object.
1387 inputCatalogRefs : `list`
1388 List of DeferredDatasetHandles pointing to visit-level source
1389 tables.
1390 sourceIndices : `list`
1391 List of boolean arrays used to select sources.
1392 extensionInfo : `lsst.pipe.base.Struct`
1393 Struct containing properties for each extension (visit/detector):
1394 ``visit`` : `np.ndarray`
1395 Name of the visit for this extension.
1396 ``detector`` : `np.ndarray`
1397 Name of the detector for this extension.
1398 ``visitIndex` : `np.ndarray` [`int`]
1399 Index of visit for this extension.
1400 ``detectorIndex`` : `np.ndarray` [`int`]
1401 Index of the detector for this extension.
1402 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
1403 Initial WCS for this extension.
1404 ``extensionType`` : `np.ndarray` [`str`]
1405 "SCIENCE" or "REFERENCE".
1406 columns : `list` [`str`]
1407 List of columns needed from source tables.
1408 """
1409 for inputCatalogRef in inputCatalogRefs:
1410 visit = inputCatalogRef.dataId["visit"]
1411 inputCatalog = inputCatalogRef.get(parameters={"columns": columns})
1412 detectors = np.unique(inputCatalog["detector"])
1414 for detector in detectors:
1415 detectorSources = inputCatalog[inputCatalog["detector"] == detector]
1417 extensionIndex = self._find_extension_index(extensionInfo, visit, detector)
1418 if extensionIndex is None:
1419 # This extension does not have information necessary for
1420 # fit. Skip the detections from this detector for this
1421 # visit.
1422 continue
1424 sourceCat = detectorSources[sourceIndices[extensionIndex]]
1426 xCov = sourceCat["xErr"] ** 2
1427 yCov = sourceCat["yErr"] ** 2
1428 xyCov = sourceCat["ixy"] * (xCov + yCov) / (sourceCat["ixx"] + sourceCat["iyy"])
1429 # TODO: add correct xyErr if DM-7101 is ever done.
1431 d = {
1432 "x": sourceCat["x"].to_numpy(),
1433 "y": sourceCat["y"].to_numpy(),
1434 "xCov": xCov.to_numpy(),
1435 "yCov": yCov.to_numpy(),
1436 "xyCov": xyCov.to_numpy(),
1437 }
1439 wcsf.setObjects(extensionIndex, d, "x", "y", ["xCov", "yCov", "xyCov"])
1441 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo, fieldIndex=0):
1442 """Add reference sources to the wcsfit.WCSFit object.
1444 Parameters
1445 ----------
1446 wcsf : `wcsfit.WCSFit`
1447 WCS-fitting object.
1448 refObjects : `dict`
1449 Position and error information of reference objects.
1450 refCovariance : `list` [`float`]
1451 Flattened output covariance matrix.
1452 extensionInfo : `lsst.pipe.base.Struct`
1453 Struct containing properties for each extension (visit/detector):
1454 ``visit`` : `np.ndarray`
1455 Name of the visit for this extension.
1456 ``detector`` : `np.ndarray`
1457 Name of the detector for this extension.
1458 ``visitIndex` : `np.ndarray` [`int`]
1459 Index of visit for this extension.
1460 ``detectorIndex`` : `np.ndarray` [`int`]
1461 Index of the detector for this extension.
1462 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
1463 Initial WCS for this extension.
1464 ``extensionType`` : `np.ndarray` [`str`]
1465 "SCIENCE" or "REFERENCE".
1466 fieldIndex : `int`, optional
1467 Index of the field to which these sources correspond.
1468 """
1469 extensionIndex = np.flatnonzero(
1470 (extensionInfo.extensionType == "REFERENCE") & (extensionInfo.visit == fieldIndex)
1471 )[0]
1472 if self.config.fitProperMotion:
1473 wcsf.setObjects(
1474 extensionIndex,
1475 refObjects,
1476 "ra",
1477 "dec",
1478 ["raCov", "decCov", "raDecCov"],
1479 pmDecKey="decPM",
1480 pmRaKey="raPM",
1481 parallaxKey="parallax",
1482 pmCovKey="fullCov",
1483 pmCov=refCovariance,
1484 )
1485 else:
1486 wcsf.setObjects(extensionIndex, refObjects, "ra", "dec", ["raCov", "decCov", "raDecCov"])
1488 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1):
1489 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings.
1491 Parameters
1492 ----------
1493 mapDict : `dict`
1494 Dictionary of mapping parameters.
1495 centerRA : `lsst.geom.Angle`
1496 RA of the tangent point.
1497 centerDec : `lsst.geom.Angle`
1498 Declination of the tangent point.
1499 doNormalizePixels : `bool`
1500 Whether to normalize pixels so that range is [-1,1].
1501 xScale : `float`
1502 Factor by which to normalize x-dimension. Corresponds to width of
1503 detector.
1504 yScale : `float`
1505 Factor by which to normalize y-dimension. Corresponds to height of
1506 detector.
1508 Returns
1509 -------
1510 outWCS : `lsst.afw.geom.SkyWcs`
1511 WCS constructed from the input mappings
1512 """
1513 # Set up pixel frames
1514 pixelFrame = astshim.Frame(2, "Domain=PIXELS")
1515 normedPixelFrame = astshim.Frame(2, "Domain=NORMEDPIXELS")
1517 if doNormalizePixels:
1518 # Pixels will need to be rescaled before going into the mappings
1519 normCoefficients = [-1.0, 2.0 / xScale, 0, -1.0, 0, 2.0 / yScale]
1520 normMap = _convert_to_ast_polymap_coefficients(normCoefficients)
1521 else:
1522 normMap = astshim.UnitMap(2)
1524 # All of the detectors for one visit map to the same tangent plane
1525 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec)
1526 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True)
1527 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix)
1528 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping("PIXELS", "SKY")
1529 skyFrame = iwcToSkyWcs.getFrameDict().getFrame("SKY")
1531 frameDict = astshim.FrameDict(pixelFrame)
1532 frameDict.addFrame("PIXELS", normMap, normedPixelFrame)
1534 currentFrameName = "NORMEDPIXELS"
1536 # Dictionary values are ordered according to the maps' application.
1537 for m, mapElement in enumerate(mapDict.values()):
1538 mapType = mapElement["Type"]
1540 if mapType == "Poly":
1541 mapCoefficients = mapElement["Coefficients"]
1542 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients)
1543 elif mapType == "Identity":
1544 astMap = astshim.UnitMap(2)
1545 else:
1546 raise ValueError(f"Converting map type {mapType} to WCS is not supported")
1548 if m == len(mapDict) - 1:
1549 newFrameName = "IWC"
1550 else:
1551 newFrameName = "INTERMEDIATE" + str(m)
1552 newFrame = astshim.Frame(2, f"Domain={newFrameName}")
1553 frameDict.addFrame(currentFrameName, astMap, newFrame)
1554 currentFrameName = newFrameName
1555 frameDict.addFrame("IWC", iwcToSkyMap, skyFrame)
1557 outWCS = afwgeom.SkyWcs(frameDict)
1558 return outWCS
1560 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo, mapTemplate, inputCameraModel=None):
1561 """Make a WCS object out of the WCS models.
1563 Parameters
1564 ----------
1565 wcsf : `wcsfit.WCSFit`
1566 WCSFit object, assumed to have fit model.
1567 visitSummaryTables : `list` [`lsst.afw.table.ExposureCatalog`]
1568 Catalogs with per-detector summary information from which to grab
1569 detector information.
1570 extensionInfo : `lsst.pipe.base.Struct`
1571 Struct containing properties for each extension (visit/detector):
1572 ``visit`` : `np.ndarray`
1573 Name of the visit for this extension.
1574 ``detector`` : `np.ndarray`
1575 Name of the detector for this extension.
1576 ``visitIndex` : `np.ndarray` [`int`]
1577 Index of visit for this extension.
1578 ``detectorIndex`` : `np.ndarray` [`int`]
1579 Index of the detector for this extension.
1580 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
1581 Initial WCS for this extension.
1582 ``extensionType`` : `np.ndarray` [`str`]
1583 "SCIENCE" or "REFERENCE".
1584 mapTemplate : `dict` [`str`, `str`]
1585 Dictionary containing the model description.
1586 inputCameraModel : `dict` [`str`, `np.ndarray`], optional
1587 Parameters to use for the device part of the model. This must be
1588 provided if an input camera model was used.
1590 Returns
1591 -------
1592 catalogs : `dict` [`str`, `lsst.afw.table.ExposureCatalog`]
1593 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS
1594 set to the WCS fit in wcsf, keyed by the visit name.
1595 cameraParams : `dict` [`str`, `np.ndarray`], optional
1596 Parameters for the device part of the model in the format needed
1597 when used as input for future runs.
1598 """
1599 # Get the parameters of the fit models
1600 mapParams = wcsf.mapCollection.getParamDict()
1601 cameraParams = {}
1602 if self.config.saveCameraModel:
1603 for element in mapTemplate["INSTRUMENT/DEVICE"]["Elements"]:
1604 for detector in exposureInfo.detectors:
1605 detectorTemplate = element.replace("DEVICE", str(detector))
1606 detectorTemplate = detectorTemplate.replace("BAND", ".+")
1607 for k, params in mapParams.items():
1608 if re.fullmatch(detectorTemplate, k):
1609 cameraParams[k] = params
1610 if self.config.useInputCameraModel:
1611 if inputCameraModel is None:
1612 raise RuntimeError(
1613 "inputCameraModel must be provided to _make_outputs in order to build output WCS."
1614 )
1615 mapParams.update(inputCameraModel)
1617 # Set up the schema for the output catalogs
1618 schema = lsst.afw.table.ExposureTable.makeMinimalSchema()
1619 schema.addField("visit", type="L", doc="Visit number")
1620 schema.addField(
1621 "recoveredWcs",
1622 type="Flag",
1623 doc="Input WCS missing, output recovered from other input visit/detectors.",
1624 )
1626 # Pixels will need to be rescaled before going into the mappings
1627 sampleDetector = visitSummaryTables[0][0]
1628 xscale = sampleDetector["bbox_max_x"] - sampleDetector["bbox_min_x"]
1629 yscale = sampleDetector["bbox_max_y"] - sampleDetector["bbox_min_y"]
1631 catalogs = {}
1632 for v, visitSummary in enumerate(visitSummaryTables):
1633 visit = visitSummary[0]["visit"]
1635 visitMap = wcsf.mapCollection.orderAtoms(f"{visit}")[0]
1636 visitMapType = wcsf.mapCollection.getMapType(visitMap)
1637 if (visitMap not in mapParams) and (visitMapType != "Identity"):
1638 self.log.warning("Visit %d was dropped because of an insufficient number of sources.", visit)
1639 continue
1641 catalog = lsst.afw.table.ExposureCatalog(schema)
1642 catalog.resize(len(exposureInfo.detectors))
1643 catalog["visit"] = visit
1645 for d, detector in enumerate(exposureInfo.detectors):
1646 mapName = f"{visit}/{detector}"
1647 if mapName in wcsf.mapCollection.allMapNames():
1648 mapElements = wcsf.mapCollection.orderAtoms(f"{mapName}/base")
1649 catalog[d]["recoveredWcs"] = False
1650 else:
1651 # This extension was not fit, but the WCS can be recovered
1652 # using the maps fit from sources on other visits but the
1653 # same detector and from sources on other detectors from
1654 # this visit.
1655 genericElements = mapTemplate["EXPOSURE/DEVICE/base"]["Elements"]
1656 mapElements = []
1657 instrument = visitSummary[0].getVisitInfo().instrumentLabel
1658 # Go through the generic map components to build the names
1659 # of the specific maps for this extension.
1660 for component in genericElements:
1661 elements = mapTemplate[component]["Elements"]
1662 for element in elements:
1663 # TODO: DM-42519, gbdes sets the "BAND" to the
1664 # instrument name currently. This will need to be
1665 # disambiguated if we run on multiple bands at
1666 # once.
1667 element = element.replace("BAND", str(instrument))
1668 element = element.replace("EXPOSURE", str(visit))
1669 element = element.replace("DEVICE", str(detector))
1670 mapElements.append(element)
1671 catalog[d]["recoveredWcs"] = True
1672 mapDict = {}
1673 for m, mapElement in enumerate(mapElements):
1674 mapType = wcsf.mapCollection.getMapType(mapElement)
1675 mapDict[mapElement] = {"Type": mapType}
1677 if mapType == "Poly":
1678 mapCoefficients = mapParams[mapElement]
1679 mapDict[mapElement]["Coefficients"] = mapCoefficients
1681 # The RA and Dec of the visit are needed for the last step of
1682 # the mapping from the visit tangent plane to RA and Dec
1683 outWCS = self._make_afw_wcs(
1684 mapDict,
1685 exposureInfo.ras[v] * lsst.geom.radians,
1686 exposureInfo.decs[v] * lsst.geom.radians,
1687 doNormalizePixels=True,
1688 xScale=xscale,
1689 yScale=yscale,
1690 )
1692 catalog[d].setId(detector)
1693 catalog[d].setWcs(outWCS)
1694 catalog.sort()
1695 catalogs[visit] = catalog
1697 return catalogs, cameraParams
1699 def _compute_model_params(self, wcsf):
1700 """Get the WCS model parameters and covariance and convert to a
1701 dictionary that will be readable as a pandas dataframe or other table.
1703 Parameters
1704 ----------
1705 wcsf : `wcsfit.WCSFit`
1706 WCSFit object, assumed to have fit model.
1708 Returns
1709 -------
1710 modelParams : `dict`
1711 Parameters and covariance of the best-fit WCS model.
1712 """
1713 modelParamDict = wcsf.mapCollection.getParamDict()
1714 modelCovariance = wcsf.getModelCovariance()
1716 modelParams = {k: [] for k in ["mapName", "coordinate", "parameter", "coefficientNumber"]}
1717 i = 0
1718 for mapName, params in modelParamDict.items():
1719 nCoeffs = len(params)
1720 # There are an equal number of x and y coordinate parameters
1721 nCoordCoeffs = nCoeffs // 2
1722 modelParams["mapName"].extend([mapName] * nCoeffs)
1723 modelParams["coordinate"].extend(["x"] * nCoordCoeffs)
1724 modelParams["coordinate"].extend(["y"] * nCoordCoeffs)
1725 modelParams["parameter"].extend(params)
1726 modelParams["coefficientNumber"].extend(np.arange(nCoordCoeffs))
1727 modelParams["coefficientNumber"].extend(np.arange(nCoordCoeffs))
1729 for p in range(nCoeffs):
1730 if p < nCoordCoeffs:
1731 coord = "x"
1732 else:
1733 coord = "y"
1734 modelParams[f"{mapName}_{coord}_{p}_cov"] = modelCovariance[i]
1735 i += 1
1737 # Convert the dictionary values from lists to numpy arrays.
1738 for key, value in modelParams.items():
1739 modelParams[key] = np.array(value)
1741 return modelParams
1744class GbdesGlobalAstrometricFitConnections(
1745 pipeBase.PipelineTaskConnections, dimensions=("instrument", "physical_filter")
1746):
1747 inputVisitSummaries = pipeBase.connectionTypes.Input(
1748 doc=(
1749 "Per-visit consolidated exposure metadata built from calexps. "
1750 "These catalogs use detector id for the id and must be sorted for "
1751 "fast lookups of a detector."
1752 ),
1753 name="visitSummary",
1754 storageClass="ExposureCatalog",
1755 dimensions=("instrument", "visit"),
1756 multiple=True,
1757 )
1758 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput(
1759 doc="The astrometry reference catalog to match to loaded input catalog sources.",
1760 name="gaia_dr3_20230707",
1761 storageClass="SimpleCatalog",
1762 dimensions=("skypix",),
1763 deferLoad=True,
1764 multiple=True,
1765 )
1766 isolatedStarSources = pipeBase.connectionTypes.Input(
1767 doc="Catalog of matched sources.",
1768 name="isolated_star_sources",
1769 storageClass="DataFrame",
1770 dimensions=(
1771 "instrument",
1772 "skymap",
1773 "tract",
1774 ),
1775 multiple=True,
1776 deferLoad=True,
1777 )
1778 isolatedStarCatalogs = pipeBase.connectionTypes.Input(
1779 doc="Catalog of objects corresponding to the isolatedStarSources.",
1780 name="isolated_star_cat",
1781 storageClass="DataFrame",
1782 dimensions=(
1783 "instrument",
1784 "skymap",
1785 "tract",
1786 ),
1787 multiple=True,
1788 deferLoad=True,
1789 )
1790 inputCameraModel = pipeBase.connectionTypes.PrerequisiteInput(
1791 doc="Camera parameters to use for 'device' part of model",
1792 name="gbdesAstrometricFit_cameraModel",
1793 storageClass="ArrowNumpyDict",
1794 dimensions=("instrument", "physical_filter"),
1795 )
1796 outputWcs = pipeBase.connectionTypes.Output(
1797 doc=(
1798 "Per-visit world coordinate systems derived from the fitted model. These catalogs only contain "
1799 "entries for detectors with an output, and use the detector id for the catalog id, sorted on id "
1800 "for fast lookups of a detector."
1801 ),
1802 name="gbdesGlobalAstrometricFitSkyWcsCatalog",
1803 storageClass="ExposureCatalog",
1804 dimensions=("instrument", "visit"),
1805 multiple=True,
1806 )
1807 outputCatalog = pipeBase.connectionTypes.Output(
1808 doc=(
1809 "Catalog of sources used in fit, along with residuals in pixel coordinates and tangent "
1810 "plane coordinates and chisq values."
1811 ),
1812 name="gbdesGlobalAstrometricFit_fitStars",
1813 storageClass="ArrowNumpyDict",
1814 dimensions=("instrument", "physical_filter"),
1815 )
1816 starCatalog = pipeBase.connectionTypes.Output(
1817 doc=(
1818 "Catalog of best-fit object positions. Also includes the fit proper motion and parallax if "
1819 "fitProperMotion is True."
1820 ),
1821 name="gbdesGlobalAstrometricFit_starCatalog",
1822 storageClass="ArrowNumpyDict",
1823 dimensions=("instrument", "physical_filter"),
1824 )
1825 modelParams = pipeBase.connectionTypes.Output(
1826 doc="WCS parameters and covariance.",
1827 name="gbdesGlobalAstrometricFit_modelParams",
1828 storageClass="ArrowNumpyDict",
1829 dimensions=("instrument", "physical_filter"),
1830 )
1831 outputCameraModel = pipeBase.connectionTypes.Output(
1832 doc="Camera parameters to use for 'device' part of model",
1833 name="gbdesAstrometricFit_cameraModel",
1834 storageClass="ArrowNumpyDict",
1835 dimensions=("instrument", "physical_filter"),
1836 )
1838 def getSpatialBoundsConnections(self):
1839 return ("inputVisitSummaries",)
1841 def __init__(self, *, config=None):
1842 super().__init__(config=config)
1844 if not self.config.saveModelParams:
1845 self.outputs.remove("modelParams")
1846 if not self.config.useInputCameraModel:
1847 self.prerequisiteInputs.remove("inputCameraModel")
1848 if not self.config.saveCameraModel:
1849 self.outputs.remove("outputCameraModel")
1852class GbdesGlobalAstrometricFitConfig(
1853 GbdesAstrometricFitConfig, pipelineConnections=GbdesGlobalAstrometricFitConnections
1854):
1855 visitOverlap = pexConfig.Field(
1856 dtype=float,
1857 default=1.0,
1858 doc=(
1859 "The linkage distance threshold above which clustered groups of visits will not be merged "
1860 "together in an agglomerative clustering algorithm. The linkage distance is calculated using the "
1861 "minimum distance between the field-of-view centers of a given visit and all other visits in a "
1862 "group, and is in units of the field-of-view radius. The resulting groups of visits define the "
1863 "fields for the astrometric fit."
1864 ),
1865 )
1868class GbdesGlobalAstrometricFitTask(GbdesAstrometricFitTask):
1869 """Calibrate the WCS across multiple visits and multiple fields using the
1870 GBDES package.
1872 This class assumes that the input visits can be separated into contiguous
1873 groups, for which an individual group covers an area of less than a
1874 hemisphere.
1875 """
1877 ConfigClass = GbdesGlobalAstrometricFitConfig
1878 _DefaultName = "gbdesAstrometricFit"
1880 def runQuantum(self, butlerQC, inputRefs, outputRefs):
1881 # We override runQuantum to set up the refObjLoaders
1882 inputs = butlerQC.get(inputRefs)
1884 instrumentName = butlerQC.quantum.dataId["instrument"]
1886 # Ensure the inputs are in a consistent and deterministic order
1887 inputSumVisits = np.array([inputSum[0]["visit"] for inputSum in inputs["inputVisitSummaries"]])
1888 inputs["inputVisitSummaries"] = [inputs["inputVisitSummaries"][v] for v in inputSumVisits.argsort()]
1889 inputRefHtm7s = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputRefs.referenceCatalog])
1890 inputRefCatRefs = [inputRefs.referenceCatalog[htm7] for htm7 in inputRefHtm7s.argsort()]
1891 inputRefCats = np.array([inputRefCat.dataId["htm7"] for inputRefCat in inputs["referenceCatalog"]])
1892 inputs["referenceCatalog"] = [inputs["referenceCatalog"][v] for v in inputRefCats.argsort()]
1893 inputIsolatedStarSourceTracts = np.array(
1894 [isolatedStarSource.dataId["tract"] for isolatedStarSource in inputs["isolatedStarSources"]]
1895 )
1896 inputIsolatedStarCatalogTracts = np.array(
1897 [isolatedStarCatalog.dataId["tract"] for isolatedStarCatalog in inputs["isolatedStarCatalogs"]]
1898 )
1899 for tract in inputIsolatedStarCatalogTracts:
1900 if tract not in inputIsolatedStarSourceTracts:
1901 raise RuntimeError(f"tract {tract} in isolated_star_cats but not isolated_star_sources")
1902 inputs["isolatedStarSources"] = np.array(
1903 [inputs["isolatedStarSources"][t] for t in inputIsolatedStarSourceTracts.argsort()]
1904 )
1905 inputs["isolatedStarCatalogs"] = np.array(
1906 [inputs["isolatedStarCatalogs"][t] for t in inputIsolatedStarSourceTracts.argsort()]
1907 )
1909 refConfig = LoadReferenceObjectsConfig()
1910 if self.config.applyRefCatProperMotion:
1911 refConfig.requireProperMotion = True
1912 refObjectLoader = ReferenceObjectLoader(
1913 dataIds=[ref.datasetRef.dataId for ref in inputRefCatRefs],
1914 refCats=inputs.pop("referenceCatalog"),
1915 config=refConfig,
1916 log=self.log,
1917 )
1919 output = self.run(**inputs, instrumentName=instrumentName, refObjectLoader=refObjectLoader)
1921 for outputRef in outputRefs.outputWcs:
1922 visit = outputRef.dataId["visit"]
1923 butlerQC.put(output.outputWcss[visit], outputRef)
1924 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog)
1925 butlerQC.put(output.starCatalog, outputRefs.starCatalog)
1926 if self.config.saveModelParams:
1927 butlerQC.put(output.modelParams, outputRefs.modelParams)
1928 if self.config.saveCameraModel:
1929 butlerQC.put(output.cameraModelParams, outputRefs.outputCameraModel)
1931 def run(
1932 self,
1933 inputVisitSummaries,
1934 isolatedStarSources,
1935 isolatedStarCatalogs,
1936 instrumentName="",
1937 refEpoch=None,
1938 refObjectLoader=None,
1939 inputCameraModel=None,
1940 ):
1941 """Run the WCS fit for a given set of visits
1943 Parameters
1944 ----------
1945 inputVisitSummaries : `list` [`lsst.afw.table.ExposureCatalog`]
1946 List of catalogs with per-detector summary information.
1947 isolatedStarSources : `list` [`DeferredDatasetHandle`]
1948 List of handles pointing to isolated star sources.
1949 isolatedStarCatalog: `list` [`DeferredDatasetHandle`]
1950 List of handles pointing to isolated star catalogs.
1951 instrumentName : `str`, optional
1952 Name of the instrument used. This is only used for labelling.
1953 refEpoch : `float`, optional
1954 Epoch of the reference objects in MJD.
1955 refObjectLoader : instance of
1956 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader`,
1957 optional
1958 Reference object loader instance.
1959 inputCameraModel : `dict` [`str`, `np.ndarray`], optional
1960 Parameters to use for the device part of the model.
1962 Returns
1963 -------
1964 result : `lsst.pipe.base.Struct`
1965 ``outputWcss`` : `list` [`lsst.afw.table.ExposureCatalog`]
1966 List of exposure catalogs (one per visit) with the WCS for each
1967 detector set by the new fitted WCS.
1968 ``fitModel`` : `wcsfit.WCSFit`
1969 Model-fitting object with final model parameters.
1970 ``outputCatalog`` : `pyarrow.Table`
1971 Catalog with fit residuals of all sources used.
1972 ``starCatalog`` : `pyarrow.Table`
1973 Catalog with best-fit positions of the objects fit.
1974 ``modelParams`` : `dict`
1975 Parameters and covariance of the best-fit WCS model.
1976 ``cameraModelParams`` : `dict` [`str`, `np.ndarray`]
1977 Parameters of the device part of the model, in the format
1978 needed as input for future runs.
1979 """
1980 self.log.info("Gather instrument, exposure, and field info")
1981 # Set up an instrument object
1982 instrument = wcsfit.Instrument(instrumentName)
1984 # Get information about the extent of the input visits
1985 fields, fieldRegions = self._prep_sky(inputVisitSummaries)
1987 # Get RA, Dec, MJD, etc., for the input visits
1988 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info(
1989 inputVisitSummaries, instrument, fieldRegions=fieldRegions
1990 )
1992 self.log.info("Load associated sources")
1993 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format="decimalyear").mjd
1994 allRefObjects, allRefCovariances = {}, {}
1995 for f, fieldRegion in fieldRegions.items():
1996 refObjects, refCovariance = self._load_refcat(
1997 refObjectLoader, extensionInfo, epoch=medianEpoch, region=fieldRegion
1998 )
1999 allRefObjects[f] = refObjects
2000 allRefCovariances[f] = refCovariance
2002 associations, sourceDict = self._associate_from_isolated_sources(
2003 isolatedStarSources, isolatedStarCatalogs, extensionInfo, allRefObjects
2004 )
2006 self.log.info("Fit the WCSs")
2007 # Set up a YAML-type string using the config variables and a sample
2008 # visit
2009 inputYaml, mapTemplate = self.make_yaml(
2010 inputVisitSummaries[0],
2011 inputCameraModel=(inputCameraModel if self.config.useInputCameraModel else None),
2012 )
2014 # Set the verbosity level for WCSFit from the task log level.
2015 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are
2016 # properly propagated.
2017 loglevel = self.log.getEffectiveLevel()
2018 if loglevel >= self.log.WARNING:
2019 verbose = 0
2020 elif loglevel == self.log.INFO:
2021 verbose = 1
2022 else:
2023 verbose = 2
2025 # Set up the WCS-fitting class using the source matches from the
2026 # isolated star sources plus the reference catalog.
2027 wcsf = wcsfit.WCSFit(
2028 fields,
2029 [instrument],
2030 exposuresHelper,
2031 extensionInfo.visitIndex,
2032 extensionInfo.detectorIndex,
2033 inputYaml,
2034 extensionInfo.wcs,
2035 associations.sequence,
2036 associations.extn,
2037 associations.obj,
2038 sysErr=self.config.systematicError,
2039 refSysErr=self.config.referenceSystematicError,
2040 usePM=self.config.fitProperMotion,
2041 verbose=verbose,
2042 )
2044 # Add the science and reference sources
2045 self._add_objects(wcsf, sourceDict, extensionInfo)
2046 for f in fieldRegions.keys():
2047 self._add_ref_objects(
2048 wcsf, allRefObjects[f], allRefCovariances[f], extensionInfo, fieldIndex=-1 * f
2049 )
2051 # Do the WCS fit
2052 wcsf.fit(
2053 reserveFraction=self.config.fitReserveFraction, randomNumberSeed=self.config.fitReserveRandomSeed
2054 )
2055 self.log.info("WCS fitting done")
2057 outputWcss, cameraParams = self._make_outputs(
2058 wcsf,
2059 inputVisitSummaries,
2060 exposureInfo,
2061 mapTemplate,
2062 inputCameraModel=(inputCameraModel if self.config.useInputCameraModel else None),
2063 )
2064 outputCatalog = wcsf.getOutputCatalog()
2065 starCatalog = wcsf.getStarCatalog()
2066 modelParams = self._compute_model_params(wcsf) if self.config.saveModelParams else None
2068 return pipeBase.Struct(
2069 outputWcss=outputWcss,
2070 fitModel=wcsf,
2071 outputCatalog=outputCatalog,
2072 starCatalog=starCatalog,
2073 modelParams=modelParams,
2074 cameraModelParams=cameraParams,
2075 )
2077 def _prep_sky(self, inputVisitSummaries):
2078 """Cluster the input visits into disjoint groups that will define
2079 separate fields in the astrometric fit, and, for each group, get the
2080 convex hull around all of its component visits.
2082 The groups are created such that each visit overlaps with at least one
2083 other visit in the same group by the `visitOverlap` amount, which is
2084 calculated as a fraction of the field-of-view radius, and no visits
2085 from separate groups overlap by more than this amount.
2087 Paramaters
2088 ----------
2089 inputVisitSummaries : `list` [`lsst.afw.table.ExposureCatalog`]
2090 List of catalogs with per-detector summary information.
2092 Returns
2093 -------
2094 fields : `wcsfit.Fields`
2095 Object with field information.
2096 fieldRegions : `dict` [`int`, `lsst.sphgeom.ConvexPolygon`]
2097 Dictionary of regions encompassing each group of input visits,
2098 keyed by an arbitrary index.
2099 """
2100 allDetectorCorners = []
2101 mjds = []
2102 radecs = []
2103 radii = []
2104 for visSum in inputVisitSummaries:
2105 detectorCorners = [
2106 lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector()
2107 for (ra, dec) in zip(visSum["raCorners"].ravel(), visSum["decCorners"].ravel())
2108 if (np.isfinite(ra) and (np.isfinite(dec)))
2109 ]
2110 allDetectorCorners.append(detectorCorners)
2112 # Get center and approximate radius of field of view
2113 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(detectorCorners).getBoundingCircle()
2114 center = lsst.geom.SpherePoint(boundingCircle.getCenter())
2115 ra = center.getRa().asDegrees()
2116 dec = center.getDec().asDegrees()
2117 radecs.append([ra, dec])
2118 radius = boundingCircle.getOpeningAngle()
2119 radii.append(radius)
2121 obsDate = visSum[0].getVisitInfo().getDate()
2122 obsMJD = obsDate.get(obsDate.MJD)
2123 mjds.append(obsMJD)
2125 # Find groups of visits where any one of the visits overlaps another by
2126 # a given fraction of the field-of-view radius.
2127 distance = self.config.visitOverlap * np.median(radii)
2128 clustering = AgglomerativeClustering(
2129 distance_threshold=distance.asDegrees(), n_clusters=None, linkage="single"
2130 )
2131 clusters = clustering.fit(np.array(radecs))
2133 medianMJD = np.median(mjds)
2134 medianEpoch = astropy.time.Time(medianMJD, format="mjd").decimalyear
2136 fieldNames = []
2137 fieldRAs = []
2138 fieldDecs = []
2139 epochs = []
2140 fieldRegions = {}
2142 for i in range(clusters.n_clusters_):
2143 fieldInd = clusters.labels_ == i
2144 # Concatenate the lists of all detector corners that are in this
2145 # field
2146 fieldDetectors = sum([allDetectorCorners[f] for f, fInd in enumerate(fieldInd) if fInd], [])
2147 hull = lsst.sphgeom.ConvexPolygon.convexHull(fieldDetectors)
2148 center = lsst.geom.SpherePoint(hull.getCentroid())
2149 ra = center.getRa().asDegrees()
2150 dec = center.getDec().asDegrees()
2152 fieldRegions[i] = hull
2153 fieldNames.append(str(i))
2154 fieldRAs.append(ra)
2155 fieldDecs.append(dec)
2156 # Use the same median epoch for all fields so that the final object
2157 # positions are calculated for the same epoch.
2158 epochs.append(medianEpoch)
2160 fields = wcsfit.Fields(fieldNames, fieldRAs, fieldDecs, epochs)
2162 return fields, fieldRegions
2164 def _associate_from_isolated_sources(
2165 self, isolatedStarSourceRefs, isolatedStarCatalogRefs, extensionInfo, refObjects
2166 ):
2167 """Match the input catalog of isolated stars with the reference catalog
2168 and transform the combined isolated star sources and reference source
2169 into the format needed for gbdes.
2171 Parameters
2172 ----------
2173 isolatedStarSourceRefs : `list` [`DeferredDatasetHandle`]
2174 List of handles pointing to isolated star sources.
2175 isolatedStarCatalogRefs: `list` [`DeferredDatasetHandle`]
2176 List of handles pointing to isolated star catalogs.
2177 extensionInfo : `lsst.pipe.base.Struct`
2178 Struct containing properties for each extension (visit/detector).
2179 ``visit`` : `np.ndarray`
2180 Name of the visit for this extension.
2181 ``detector`` : `np.ndarray`
2182 Name of the detector for this extension.
2183 ``visitIndex` : `np.ndarray` [`int`]
2184 Index of visit for this extension.
2185 ``detectorIndex`` : `np.ndarray` [`int`]
2186 Index of the detector for this extension.
2187 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
2188 Initial WCS for this extension.
2189 ``extensionType`` : `np.ndarray` [`str`]
2190 "SCIENCE" or "REFERENCE".
2191 refObjects : `dict`
2192 Dictionary of dictionaries containing the position and error
2193 information of reference objects.
2195 Returns
2196 -------
2197 associations : `lsst.pipe.base.Struct`
2198 Struct containing the associations of sources with objects.
2199 sourceDict : `dict` [`int`, [`int`, [`str`, `list` [`float`]]]]
2200 Dictionary containing the source centroids for each visit.
2201 """
2202 sequences = []
2203 extensions = []
2204 object_indices = []
2206 sourceColumns = ["x", "y", "xErr", "yErr", "ixx", "ixy", "iyy", "obj_index", "visit", "detector"]
2207 catalogColumns = ["ra", "dec"]
2209 sourceDict = dict([(visit, {}) for visit in np.unique(extensionInfo.visit)])
2210 for visit, detector in zip(extensionInfo.visit, extensionInfo.detector):
2211 sourceDict[visit][detector] = {"x": [], "y": [], "xCov": [], "yCov": [], "xyCov": []}
2213 for isolatedStarCatalogRef, isolatedStarSourceRef in zip(
2214 isolatedStarCatalogRefs, isolatedStarSourceRefs
2215 ):
2216 isolatedStarCatalog = isolatedStarCatalogRef.get(parameters={"columns": catalogColumns})
2217 isolatedStarSources = isolatedStarSourceRef.get(parameters={"columns": sourceColumns})
2218 if len(isolatedStarCatalog) == 0:
2219 # This is expected when only one visit overlaps with a given
2220 # tract, meaning that no sources can be associated.
2221 self.log.debug(
2222 "Skipping tract %d, which has no associated isolated stars",
2223 isolatedStarCatalogRef.dataId["tract"],
2224 )
2225 continue
2227 # Match the reference stars to the existing isolated stars, then
2228 # insert the reference stars into the isolated star sources.
2229 allVisits = np.copy(isolatedStarSources["visit"])
2230 allDetectors = np.copy(isolatedStarSources["detector"])
2231 allObjectIndices = np.copy(isolatedStarSources["obj_index"])
2232 issIndices = np.copy(isolatedStarSources.index)
2233 for f, regionRefObjects in refObjects.items():
2234 # Use the same matching technique that is done in
2235 # isolatedStarAssociation and fgcmBuildFromIsolatedStars.
2236 with Matcher(
2237 isolatedStarCatalog["ra"].to_numpy(), isolatedStarCatalog["dec"].to_numpy()
2238 ) as matcher:
2239 idx, i1, i2, d = matcher.query_radius(
2240 np.array(regionRefObjects["ra"]),
2241 np.array(regionRefObjects["dec"]),
2242 self.config.matchRadius / 3600.0,
2243 return_indices=True,
2244 )
2246 refSort = np.searchsorted(isolatedStarSources["obj_index"], i1)
2247 refDetector = np.ones(len(i1)) * -1
2248 # The "visit" for the reference catalogs is the field times -1.
2249 refVisit = np.ones(len(i1)) * f * -1
2251 allVisits = np.insert(allVisits, refSort, refVisit)
2252 allDetectors = np.insert(allDetectors, refSort, refDetector)
2253 allObjectIndices = np.insert(allObjectIndices, refSort, i1)
2254 issIndices = np.insert(issIndices, refSort, i2)
2256 # Loop through the associated sources to convert them to the gbdes
2257 # format, which requires the extension index, the source's index in
2258 # the input table, and a sequence number corresponding to the
2259 # object with which it is associated.
2260 sequence = 0
2261 obj_index = allObjectIndices[0]
2262 for visit, detector, row, obj_ind in zip(allVisits, allDetectors, issIndices, allObjectIndices):
2263 extensionIndex = np.flatnonzero(
2264 (extensionInfo.visit == visit) & (extensionInfo.detector == detector)
2265 )
2266 if len(extensionIndex) == 0:
2267 # This happens for runs where you are not using all the
2268 # visits overlapping a given tract that were included in
2269 # the isolated star association task."
2270 continue
2271 else:
2272 extensionIndex = extensionIndex[0]
2274 extensions.append(extensionIndex)
2275 if visit <= 0:
2276 object_indices.append(row)
2277 else:
2278 object_indices.append(len(sourceDict[visit][detector]["x"]))
2279 source = isolatedStarSources.loc[row]
2280 sourceDict[visit][detector]["x"].append(source["x"])
2281 sourceDict[visit][detector]["y"].append(source["y"])
2282 xCov = source["xErr"] ** 2
2283 yCov = source["yErr"] ** 2
2284 xyCov = source["ixy"] * (xCov + yCov) / (source["ixx"] + source["iyy"])
2285 # TODO: add correct xyErr if DM-7101 is ever done.
2286 sourceDict[visit][detector]["xCov"].append(xCov)
2287 sourceDict[visit][detector]["yCov"].append(yCov)
2288 sourceDict[visit][detector]["xyCov"].append(xyCov)
2289 if obj_ind != obj_index:
2290 sequence = 0
2291 sequences.append(sequence)
2292 obj_index = obj_ind
2293 sequence += 1
2294 else:
2295 sequences.append(sequence)
2296 sequence += 1
2298 associations = pipeBase.Struct(extn=extensions, obj=object_indices, sequence=sequences)
2299 return associations, sourceDict
2301 def _add_objects(self, wcsf, sourceDict, extensionInfo):
2302 """Add science sources to the wcsfit.WCSFit object.
2304 Parameters
2305 ----------
2306 wcsf : `wcsfit.WCSFit`
2307 WCS-fitting object.
2308 sourceDict : `dict`
2309 Dictionary containing the source centroids for each visit.
2310 extensionInfo : `lsst.pipe.base.Struct`
2311 Struct containing properties for each extension (visit/detector).
2312 ``visit`` : `np.ndarray`
2313 Name of the visit for this extension.
2314 ``detector`` : `np.ndarray`
2315 Name of the detector for this extension.
2316 ``visitIndex` : `np.ndarray` [`int`]
2317 Index of visit for this extension.
2318 ``detectorIndex`` : `np.ndarray` [`int`]
2319 Index of the detector for this extension.
2320 ``wcss`` : `np.ndarray` [`lsst.afw.geom.SkyWcs`]
2321 Initial WCS for this extension.
2322 ``extensionType`` : `np.ndarray` [`str`]
2323 "SCIENCE" or "REFERENCE".
2324 """
2325 for visit, visitSources in sourceDict.items():
2326 # Visit numbers equal or below zero connote the reference catalog.
2327 if visit <= 0:
2328 # This "visit" number corresponds to a reference catalog.
2329 continue
2331 for detector, sourceCat in visitSources.items():
2332 extensionIndex = np.flatnonzero(
2333 (extensionInfo.visit == visit) & (extensionInfo.detector == detector)
2334 )[0]
2336 d = {
2337 "x": np.array(sourceCat["x"]),
2338 "y": np.array(sourceCat["y"]),
2339 "xCov": np.array(sourceCat["xCov"]),
2340 "yCov": np.array(sourceCat["yCov"]),
2341 "xyCov": np.array(sourceCat["xyCov"]),
2342 }
2343 wcsf.setObjects(extensionIndex, d, "x", "y", ["xCov", "yCov", "xyCov"])