Coverage for python/lsst/drp/tasks/gbdesAstrometricFit.py: 11%
451 statements
« prev ^ index » next coverage.py v7.3.0, created at 2023-08-30 08:37 +0000
« prev ^ index » next coverage.py v7.3.0, created at 2023-08-30 08:37 +0000
1# This file is part of drp_tasks.
2#
3# LSST Data Management System
4# This product includes software developed by the
5# LSST Project (http://www.lsst.org/).
6# See COPYRIGHT file at the top of the source tree.
7#
8# This program is free software: you can redistribute it and/or modify
9# it under the terms of the GNU General Public License as published by
10# the Free Software Foundation, either version 3 of the License, or
11# (at your option) any later version.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the LSST License Statement and
19# the GNU General Public License along with this program. If not,
20# see <https://www.lsstcorp.org/LegalNotices/>.
21#
22import numpy as np
23import astropy.time
24import astropy.units as u
25import astropy.coordinates
26import yaml
27import wcsfit
28import astshim
30import lsst.geom
31import lsst.pex.config as pexConfig
32import lsst.pipe.base as pipeBase
33import lsst.sphgeom
34import lsst.afw.table
35import lsst.afw.geom as afwgeom
36from lsst.meas.algorithms import (LoadReferenceObjectsConfig, ReferenceObjectLoader,
37 ReferenceSourceSelectorTask)
38from lsst.meas.algorithms.sourceSelector import sourceSelectorRegistry
40__all__ = ['GbdesAstrometricFitConnections', 'GbdesAstrometricFitConfig', 'GbdesAstrometricFitTask']
43def _make_ref_covariance_matrix(refCat, inputUnit=u.radian, outputCoordUnit=u.marcsec,
44 outputPMUnit=u.marcsec, version=1):
45 """Make a covariance matrix for the reference catalog including proper
46 motion and parallax.
48 The output is flattened to one dimension to match the format expected by
49 `gbdes`.
51 Parameters
52 ----------
53 refCat : `lsst.afw.table.SimpleCatalog`
54 Catalog including proper motion and parallax measurements.
55 inputUnit : `astropy.unit.core.Unit`
56 Units of the input catalog
57 outputCoordUnit : `astropy.unit.core.Unit`
58 Units required for the coordinates in the covariance matrix. `gbdes`
59 expects milliarcseconds.
60 outputPMUnit : `astropy.unit.core.Unit`
61 Units required for the proper motion/parallax in the covariance matrix.
62 `gbdes` expects milliarcseconds.
63 version : `int`
64 Version of the reference catalog. Version 2 includes covariance
65 measurements.
66 Returns
67 -------
68 cov : `list` of `float`
69 Flattened output covariance matrix.
70 """
71 cov = np.zeros((len(refCat), 25))
72 if version == 1:
73 # Here is the standard ordering of components in the cov matrix,
74 # to match the PM enumeration in C++ code of gbdes package's Match.
75 # Each tuple gives: the array holding the 1d error,
76 # the string in Gaia column names for this
77 # the ordering in the Gaia catalog
78 # and the ordering of the tuples is the order we want in our cov matrix
79 raErr = (refCat['coord_raErr'] * inputUnit).to(outputCoordUnit).to_value()
80 decErr = (refCat['coord_decErr'] * inputUnit).to(outputCoordUnit).to_value()
81 raPMErr = (refCat['pm_raErr'] * inputUnit).to(outputPMUnit).to_value()
82 decPMErr = (refCat['pm_decErr'] * inputUnit).to(outputPMUnit).to_value()
83 parallaxErr = (refCat['parallaxErr'] * inputUnit).to(outputPMUnit).to_value()
84 stdOrder = ((raErr, 'ra', 0),
85 (decErr, 'dec', 1),
86 (raPMErr, 'pmra', 3),
87 (decPMErr, 'pmdec', 4),
88 (parallaxErr, 'parallax', 2))
90 k = 0
91 for i, pr1 in enumerate(stdOrder):
92 for j, pr2 in enumerate(stdOrder):
93 if pr1[2] < pr2[2]:
94 cov[:, k] = 0
95 elif pr1[2] > pr2[2]:
96 cov[:, k] = 0
97 else:
98 # diagnonal element
99 cov[:, k] = pr1[0] * pr2[0]
100 k = k+1
102 elif version == 2:
103 positionParameters = ['coord_ra', 'coord_dec', 'pm_ra', 'pm_dec', 'parallax']
104 units = [outputCoordUnit, outputCoordUnit, outputPMUnit, outputPMUnit, outputPMUnit]
105 k = 0
106 for i, pi in enumerate(positionParameters):
107 for j, pj in enumerate(positionParameters):
108 if i == j:
109 cov[:, k] = (refCat[f'{pi}Err']**2 * inputUnit**2).to_value(units[j] * units[j])
110 elif i > j:
111 cov[:, k] = (refCat[f'{pj}_{pi}_Cov'] * inputUnit**2).to_value(units[i] * units[j])
112 else:
113 cov[:, k] = (refCat[f'{pi}_{pj}_Cov'] * inputUnit**2).to_value(units[i] * units[j])
115 k += 1
116 return cov
119def _convert_to_ast_polymap_coefficients(coefficients):
120 """Convert vector of polynomial coefficients from the format used in
121 `gbdes` into AST format (see Poly2d::vectorIndex(i, j) in
122 gbdes/gbutil/src/Poly2d.cpp). This assumes two input and two output
123 coordinates.
125 Parameters
126 ----------
127 coefficients : `list`
128 Coefficients of the polynomials.
129 degree : `int`
130 Degree of the polynomial.
132 Returns
133 -------
134 astPoly : `astshim.PolyMap`
135 Coefficients in AST polynomial format.
136 """
137 polyArray = np.zeros((len(coefficients), 4))
138 N = len(coefficients) / 2
139 # Get the degree of the polynomial by applying the quadratic formula to the
140 # formula for calculating the number of coefficients of the polynomial.
141 degree = int(-1.5 + 0.5 * (1 + 8 * N)**0.5)
143 for outVar in [1, 2]:
144 for i in range(degree + 1):
145 for j in range(degree + 1):
146 if (i + j) > degree:
147 continue
148 vectorIndex = int(((i+j)*(i+j+1))/2+j + N * (outVar - 1))
149 polyArray[vectorIndex, 0] = coefficients[vectorIndex]
150 polyArray[vectorIndex, 1] = outVar
151 polyArray[vectorIndex, 2] = i
152 polyArray[vectorIndex, 3] = j
154 astPoly = astshim.PolyMap(polyArray, 2, options="IterInverse=1,NIterInverse=10,TolInverse=1e-7")
155 return astPoly
158def _get_wcs_from_sip(butlerWcs):
159 """Get wcsfit.Wcs in TPV format from the SIP-formatted input WCS.
161 Parameters
162 ----------
163 butlerWcs : `lsst.afw.geom.SkyWcs`
164 Input WCS from the calexp in SIP format.
166 Returns
167 -------
168 wcs : `wcsfit.Wcs`
169 WCS object in TPV format.
170 """
171 fits_metadata = butlerWcs.getFitsMetadata()
172 if not ((fits_metadata.get('CTYPE1') == 'RA---TAN-SIP')
173 and (fits_metadata.get('CTYPE2') == 'DEC--TAN-SIP')):
174 raise ValueError(f"CTYPES {fits_metadata.get('CTYPE1')} and {fits_metadata.get('CTYPE2')}"
175 "do not match SIP convention")
177 # Correct CRPIX values to correspond to source table pixel indexing
178 # convention
179 crpix1 = fits_metadata.get('CRPIX1')
180 crpix2 = fits_metadata.get('CRPIX2')
181 fits_metadata.set('CRPIX1', crpix1 - 1)
182 fits_metadata.set('CRPIX2', crpix2 - 1)
184 floatDict = {k: fits_metadata[k] for k in fits_metadata if isinstance(fits_metadata[k], (int, float))}
186 wcs = wcsfit.readTPVFromSIP(floatDict, 'SIP')
188 return wcs
191class GbdesAstrometricFitConnections(pipeBase.PipelineTaskConnections,
192 dimensions=('skymap', 'tract', 'instrument', 'physical_filter')):
193 """Middleware input/output connections for task data."""
194 inputCatalogRefs = pipeBase.connectionTypes.Input(
195 doc="Source table in parquet format, per visit.",
196 name='preSourceTable_visit',
197 storageClass='DataFrame',
198 dimensions=('instrument', 'visit'),
199 deferLoad=True,
200 multiple=True,
201 )
202 inputVisitSummaries = pipeBase.connectionTypes.Input(
203 doc=("Per-visit consolidated exposure metadata built from calexps. "
204 "These catalogs use detector id for the id and must be sorted for "
205 "fast lookups of a detector."),
206 name='visitSummary',
207 storageClass='ExposureCatalog',
208 dimensions=('instrument', 'visit'),
209 multiple=True,
210 )
211 referenceCatalog = pipeBase.connectionTypes.PrerequisiteInput(
212 doc="The astrometry reference catalog to match to loaded input catalog sources.",
213 name='gaia_dr3_20230707',
214 storageClass='SimpleCatalog',
215 dimensions=('skypix',),
216 deferLoad=True,
217 multiple=True,
218 )
219 outputWcs = pipeBase.connectionTypes.Output(
220 doc=("Per-tract, per-visit world coordinate systems derived from the fitted model."
221 " These catalogs only contain entries for detectors with an output, and use"
222 " the detector id for the catalog id, sorted on id for fast lookups of a detector."),
223 name='gbdesAstrometricFitSkyWcsCatalog',
224 storageClass='ExposureCatalog',
225 dimensions=('instrument', 'visit', 'skymap', 'tract'),
226 multiple=True
227 )
228 outputCatalog = pipeBase.connectionTypes.Output(
229 doc=("Source table with stars used in fit, along with residuals in pixel coordinates and tangent "
230 "plane coordinates and chisq values."),
231 name='gbdesAstrometricFit_fitStars',
232 storageClass='ArrowNumpyDict',
233 dimensions=('instrument', 'skymap', 'tract', 'physical_filter'),
234 )
235 starCatalog = pipeBase.connectionTypes.Output(
236 doc="Star catalog.",
237 name='gbdesAstrometricFit_starCatalog',
238 storageClass='ArrowNumpyDict',
239 dimensions=('instrument', 'skymap', 'tract', 'physical_filter')
240 )
242 def getSpatialBoundsConnections(self):
243 return ("inputVisitSummaries",)
246class GbdesAstrometricFitConfig(pipeBase.PipelineTaskConfig,
247 pipelineConnections=GbdesAstrometricFitConnections):
248 """Configuration for GbdesAstrometricFitTask"""
249 sourceSelector = sourceSelectorRegistry.makeField(
250 doc="How to select sources for cross-matching.",
251 default='science'
252 )
253 referenceSelector = pexConfig.ConfigurableField(
254 target=ReferenceSourceSelectorTask,
255 doc="How to down-select the loaded astrometry reference catalog.",
256 )
257 matchRadius = pexConfig.Field(
258 doc="Matching tolerance between associated objects (arcseconds).",
259 dtype=float,
260 default=1.0
261 )
262 minMatches = pexConfig.Field(
263 doc="Number of matches required to keep a source object.",
264 dtype=int,
265 default=2
266 )
267 allowSelfMatches = pexConfig.Field(
268 doc="Allow multiple sources from the same visit to be associated with the same object.",
269 dtype=bool,
270 default=False
271 )
272 sourceFluxType = pexConfig.Field(
273 dtype=str,
274 doc="Source flux field to use in source selection and to get fluxes from the catalog.",
275 default='apFlux_12_0'
276 )
277 systematicError = pexConfig.Field(
278 dtype=float,
279 doc=("Systematic error padding added in quadrature for the science catalogs (marcsec). The default"
280 "value is equivalent to 0.02 pixels for HSC."),
281 default=0.0034
282 )
283 referenceSystematicError = pexConfig.Field(
284 dtype=float,
285 doc="Systematic error padding added in quadrature for the reference catalog (marcsec).",
286 default=0.0
287 )
288 modelComponents = pexConfig.ListField(
289 dtype=str,
290 doc=("List of mappings to apply to transform from pixels to sky, in order of their application."
291 "Supported options are 'INSTRUMENT/DEVICE' and 'EXPOSURE'."),
292 default=['INSTRUMENT/DEVICE', 'EXPOSURE']
293 )
294 deviceModel = pexConfig.ListField(
295 dtype=str,
296 doc=("List of mappings to apply to transform from detector pixels to intermediate frame. Map names"
297 "should match the format 'BAND/DEVICE/<map name>'."),
298 default=['BAND/DEVICE/poly']
299 )
300 exposureModel = pexConfig.ListField(
301 dtype=str,
302 doc=("List of mappings to apply to transform from intermediate frame to sky coordinates. Map names"
303 "should match the format 'EXPOSURE/<map name>'."),
304 default=['EXPOSURE/poly']
305 )
306 devicePolyOrder = pexConfig.Field(
307 dtype=int,
308 doc="Order of device polynomial model.",
309 default=4
310 )
311 exposurePolyOrder = pexConfig.Field(
312 dtype=int,
313 doc="Order of exposure polynomial model.",
314 default=6
315 )
316 fitProperMotion = pexConfig.Field(
317 dtype=bool,
318 doc="Fit the proper motions of the objects.",
319 default=False
320 )
321 excludeNonPMObjects = pexConfig.Field(
322 dtype=bool,
323 doc="Exclude reference objects without proper motion/parallax information.",
324 default=True
325 )
326 fitReserveFraction = pexConfig.Field(
327 dtype=float,
328 default=0.2,
329 doc="Fraction of objects to reserve from fit for validation."
330 )
331 fitReserveRandomSeed = pexConfig.Field(
332 dtype=int,
333 doc="Set the random seed for selecting data points to reserve from the fit for validation.",
334 default=1234
335 )
337 def setDefaults(self):
338 # Use only stars because aperture fluxes of galaxies are biased and
339 # depend on seeing.
340 self.sourceSelector['science'].doUnresolved = True
341 self.sourceSelector['science'].unresolved.name = 'extendedness'
343 # Use only isolated sources.
344 self.sourceSelector['science'].doIsolated = True
345 self.sourceSelector['science'].isolated.parentName = 'parentSourceId'
346 self.sourceSelector['science'].isolated.nChildName = 'deblend_nChild'
347 # Do not use either flux or centroid measurements with flags,
348 # chosen from the usual QA flags for stars.
349 self.sourceSelector['science'].doFlags = True
350 badFlags = ['pixelFlags_edge',
351 'pixelFlags_saturated',
352 'pixelFlags_interpolatedCenter',
353 'pixelFlags_interpolated',
354 'pixelFlags_crCenter',
355 'pixelFlags_bad',
356 'hsmPsfMoments_flag',
357 f'{self.sourceFluxType}_flag',
358 ]
359 self.sourceSelector['science'].flags.bad = badFlags
361 # Use only primary sources.
362 self.sourceSelector['science'].doRequirePrimary = True
364 def validate(self):
365 super().validate()
367 # Check if all components of the device and exposure models are
368 # supported.
369 for component in self.deviceModel:
370 if not (('poly' in component.lower()) or ('identity' in component.lower())):
371 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.deviceModel, self,
372 f'deviceModel component {component} is not supported.')
374 for component in self.exposureModel:
375 if not (('poly' in component.lower()) or ('identity' in component.lower())):
376 raise pexConfig.FieldValidationError(GbdesAstrometricFitConfig.exposureModel, self,
377 f'exposureModel component {component} is not supported.')
380class GbdesAstrometricFitTask(pipeBase.PipelineTask):
381 """Calibrate the WCS across multiple visits of the same field using the
382 GBDES package.
383 """
385 ConfigClass = GbdesAstrometricFitConfig
386 _DefaultName = 'gbdesAstrometricFit'
388 def __init__(self, **kwargs):
389 super().__init__(**kwargs)
390 self.makeSubtask('sourceSelector')
391 self.makeSubtask('referenceSelector')
393 def runQuantum(self, butlerQC, inputRefs, outputRefs):
394 # We override runQuantum to set up the refObjLoaders
395 inputs = butlerQC.get(inputRefs)
397 instrumentName = butlerQC.quantum.dataId['instrument']
399 # Ensure the inputs are in a consistent order
400 inputCatVisits = np.array([inputCat.dataId['visit'] for inputCat in inputs['inputCatalogRefs']])
401 inputs['inputCatalogRefs'] = [inputs['inputCatalogRefs'][v] for v in inputCatVisits.argsort()]
402 inputSumVisits = np.array([inputSum[0]['visit'] for inputSum in inputs['inputVisitSummaries']])
403 inputs['inputVisitSummaries'] = [inputs['inputVisitSummaries'][v] for v in inputSumVisits.argsort()]
404 inputRefHtm7s = np.array([inputRefCat.dataId['htm7'] for inputRefCat in inputRefs.referenceCatalog])
405 inputRefCatRefs = [inputRefs.referenceCatalog[htm7] for htm7 in inputRefHtm7s.argsort()]
406 inputRefCats = np.array([inputRefCat.dataId['htm7'] for inputRefCat in inputs['referenceCatalog']])
407 inputs['referenceCatalog'] = [inputs['referenceCatalog'][v] for v in inputRefCats.argsort()]
409 sampleRefCat = inputs['referenceCatalog'][0].get()
410 refEpoch = sampleRefCat[0]['epoch']
412 refConfig = LoadReferenceObjectsConfig()
413 refConfig.anyFilterMapsToThis = 'phot_g_mean'
414 refConfig.requireProperMotion = True
415 refObjectLoader = ReferenceObjectLoader(dataIds=[ref.datasetRef.dataId
416 for ref in inputRefCatRefs],
417 refCats=inputs.pop('referenceCatalog'),
418 config=refConfig,
419 log=self.log)
421 output = self.run(**inputs, instrumentName=instrumentName, refEpoch=refEpoch,
422 refObjectLoader=refObjectLoader)
424 for outputRef in outputRefs.outputWcs:
425 visit = outputRef.dataId['visit']
426 butlerQC.put(output.outputWCSs[visit], outputRef)
427 butlerQC.put(output.outputCatalog, outputRefs.outputCatalog)
428 butlerQC.put(output.starCatalog, outputRefs.starCatalog)
430 def run(self, inputCatalogRefs, inputVisitSummaries, instrumentName="", refEpoch=None,
431 refObjectLoader=None):
432 """Run the WCS fit for a given set of visits
434 Parameters
435 ----------
436 inputCatalogRefs : `list`
437 List of `DeferredDatasetHandle`s pointing to visit-level source
438 tables.
439 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog`
440 List of catalogs with per-detector summary information.
441 instrumentName : `str`, optional
442 Name of the instrument used. This is only used for labelling.
443 refEpoch : `float`
444 Epoch of the reference objects in MJD.
445 refObjectLoader : instance of
446 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader`
447 Referencef object loader instance.
449 Returns
450 -------
451 result : `lsst.pipe.base.Struct`
452 ``outputWCSs`` : `list` of `lsst.afw.table.ExposureCatalog`
453 List of exposure catalogs (one per visit) with the WCS for each
454 detector set by the new fitted WCS.
455 ``fitModel`` : `wcsfit.WCSFit`
456 Model-fitting object with final model parameters.
457 ``outputCatalog`` : `pyarrow.Table`
458 Catalog with fit residuals of all sources used.
459 """
460 self.log.info("Gathering instrument, exposure, and field info")
461 # Set up an instrument object
462 instrument = wcsfit.Instrument(instrumentName)
464 # Get RA, Dec, MJD, etc., for the input visits
465 exposureInfo, exposuresHelper, extensionInfo = self._get_exposure_info(inputVisitSummaries,
466 instrument)
468 # Get information about the extent of the input visits
469 fields, fieldCenter, fieldRadius = self._prep_sky(inputVisitSummaries, exposureInfo.medianEpoch)
471 self.log.info("Load catalogs and associate sources")
472 # Set up class to associate sources into matches using a
473 # friends-of-friends algorithm
474 associations = wcsfit.FoFClass(fields, [instrument], exposuresHelper,
475 [fieldRadius.asDegrees()],
476 (self.config.matchRadius * u.arcsec).to(u.degree).value)
478 # Add the reference catalog to the associator
479 medianEpoch = astropy.time.Time(exposureInfo.medianEpoch, format='decimalyear').mjd
480 refObjects, refCovariance = self._load_refcat(associations, refObjectLoader, fieldCenter, fieldRadius,
481 extensionInfo, epoch=medianEpoch)
483 # Add the science catalogs and associate new sources as they are added
484 sourceIndices, usedColumns = self._load_catalogs_and_associate(associations, inputCatalogRefs,
485 extensionInfo)
487 self.log.info("Fit the WCSs")
488 # Set up a YAML-type string using the config variables and a sample
489 # visit
490 inputYAML = self.make_yaml(inputVisitSummaries[0])
492 # Set the verbosity level for WCSFit from the task log level.
493 # TODO: DM-36850, Add lsst.log to gbdes so that log messages are
494 # properly propagated.
495 loglevel = self.log.getEffectiveLevel()
496 if loglevel >= self.log.WARNING:
497 verbose = 0
498 elif loglevel == self.log.INFO:
499 verbose = 1
500 else:
501 verbose = 2
503 # Set up the WCS-fitting class using the results of the FOF associator
504 wcsf = wcsfit.WCSFit(fields, [instrument], exposuresHelper,
505 extensionInfo.visitIndex, extensionInfo.detectorIndex,
506 inputYAML, extensionInfo.wcs, associations.sequence, associations.extn,
507 associations.obj, sysErr=self.config.systematicError,
508 refSysErr=self.config.referenceSystematicError,
509 usePM=self.config.fitProperMotion,
510 verbose=verbose)
512 # Add the science and reference sources
513 self._add_objects(wcsf, inputCatalogRefs, sourceIndices, extensionInfo, usedColumns)
514 self._add_ref_objects(wcsf, refObjects, refCovariance, extensionInfo)
516 # Do the WCS fit
517 wcsf.fit(reserveFraction=self.config.fitReserveFraction,
518 randomNumberSeed=self.config.fitReserveRandomSeed)
519 self.log.info("WCS fitting done")
521 outputWCSs = self._make_outputs(wcsf, inputVisitSummaries, exposureInfo)
522 outputCatalog = wcsf.getOutputCatalog()
523 starCatalog = wcsf.getStarCatalog()
525 return pipeBase.Struct(outputWCSs=outputWCSs,
526 fitModel=wcsf,
527 outputCatalog=outputCatalog,
528 starCatalog=starCatalog)
530 def _prep_sky(self, inputVisitSummaries, epoch, fieldName='Field'):
531 """Get center and radius of the input tract. This assumes that all
532 visits will be put into the same `wcsfit.Field` and fit together.
534 Paramaters
535 ----------
536 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog`
537 List of catalogs with per-detector summary information.
538 epoch : float
539 Reference epoch.
540 fieldName : str
541 Name of the field, used internally.
543 Returns
544 -------
545 fields : `wcsfit.Fields`
546 Object with field information.
547 center : `lsst.geom.SpherePoint`
548 Center of the field.
549 radius : `lsst.sphgeom._sphgeom.Angle`
550 Radius of the bounding circle of the tract.
551 """
552 allDetectorCorners = []
553 for visSum in inputVisitSummaries:
554 detectorCorners = [lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees).getVector() for (ra, dec)
555 in zip(visSum['raCorners'].ravel(), visSum['decCorners'].ravel())]
556 allDetectorCorners.extend(detectorCorners)
557 boundingCircle = lsst.sphgeom.ConvexPolygon.convexHull(allDetectorCorners).getBoundingCircle()
558 center = lsst.geom.SpherePoint(boundingCircle.getCenter())
559 ra = center.getRa().asDegrees()
560 dec = center.getDec().asDegrees()
561 radius = boundingCircle.getOpeningAngle()
563 # wcsfit.Fields describes a list of fields, but we assume all
564 # observations will be fit together in one field.
565 fields = wcsfit.Fields([fieldName], [ra], [dec], [epoch])
567 return fields, center, radius
569 def _get_exposure_info(self, inputVisitSummaries, instrument, fieldNumber=0, instrumentNumber=0,
570 refEpoch=None):
571 """Get various information about the input visits to feed to the
572 fitting routines.
574 Parameters
575 ----------
576 inputVisitSummaries : `list` of `lsst.afw.table.ExposureCatalog`
577 Tables for each visit with information for detectors.
578 instrument : `wcsfit.Instrument`
579 Instrument object to which detector information is added.
580 fieldNumber : `int`
581 Index of the field for these visits. Should be zero if all data is
582 being fit together.
583 instrumentNumber : `int`
584 Index of the instrument for these visits. Should be zero if all
585 data comes from the same instrument.
586 refEpoch : `float`
587 Epoch of the reference objects in MJD.
589 Returns
590 -------
591 exposureInfo : `lsst.pipe.base.Struct`
592 Struct containing general properties for the visits:
593 ``visits`` : `list`
594 List of visit names.
595 ``detectors`` : `list`
596 List of all detectors in any visit.
597 ``ras`` : `list` of float
598 List of boresight RAs for each visit.
599 ``decs`` : `list` of float
600 List of borseight Decs for each visit.
601 ``medianEpoch`` : float
602 Median epoch of all visits in decimal-year format.
603 exposuresHelper : `wcsfit.ExposuresHelper`
604 Object containing information about the input visits.
605 extensionInfo : `lsst.pipe.base.Struct`
606 Struct containing properties for each extension:
607 ``visit`` : `np.ndarray`
608 Name of the visit for this extension.
609 ``detector`` : `np.ndarray`
610 Name of the detector for this extension.
611 ``visitIndex` : `np.ndarray` of `int`
612 Index of visit for this extension.
613 ``detectorIndex`` : `np.ndarray` of `int`
614 Index of the detector for this extension.
615 ``wcss`` : `np.ndarray` of `lsst.afw.geom.SkyWcs`
616 Initial WCS for this extension.
617 ``extensionType`` : `np.ndarray` of `str`
618 "SCIENCE" or "REFERENCE".
619 """
620 exposureNames = []
621 ras = []
622 decs = []
623 visits = []
624 detectors = []
625 airmasses = []
626 exposureTimes = []
627 mjds = []
628 observatories = []
629 wcss = []
631 extensionType = []
632 extensionVisitIndices = []
633 extensionDetectorIndices = []
634 extensionVisits = []
635 extensionDetectors = []
636 # Get information for all the science visits
637 for v, visitSummary in enumerate(inputVisitSummaries):
638 visitInfo = visitSummary[0].getVisitInfo()
639 visit = visitSummary[0]['visit']
640 visits.append(visit)
641 exposureNames.append(str(visit))
642 raDec = visitInfo.getBoresightRaDec()
643 ras.append(raDec.getRa().asRadians())
644 decs.append(raDec.getDec().asRadians())
645 airmasses.append(visitInfo.getBoresightAirmass())
646 exposureTimes.append(visitInfo.getExposureTime())
647 obsDate = visitInfo.getDate()
648 obsMJD = obsDate.get(obsDate.MJD)
649 mjds.append(obsMJD)
650 # Get the observatory ICRS position for use in fitting parallax
651 obsLon = visitInfo.observatory.getLongitude().asDegrees()
652 obsLat = visitInfo.observatory.getLatitude().asDegrees()
653 obsElev = visitInfo.observatory.getElevation()
654 earthLocation = astropy.coordinates.EarthLocation.from_geodetic(obsLon, obsLat, obsElev)
655 observatory_gcrs = earthLocation.get_gcrs(astropy.time.Time(obsMJD, format='mjd'))
656 observatory_icrs = observatory_gcrs.transform_to(astropy.coordinates.ICRS())
657 # We want the position in AU in Cartesian coordinates
658 observatories.append(observatory_icrs.cartesian.xyz.to(u.AU).value)
660 for row in visitSummary:
661 detector = row['id']
662 if detector not in detectors:
663 detectors.append(detector)
664 detectorBounds = wcsfit.Bounds(row['bbox_min_x'], row['bbox_max_x'],
665 row['bbox_min_y'], row['bbox_max_y'])
666 instrument.addDevice(str(detector), detectorBounds)
668 detectorIndex = np.flatnonzero(detector == np.array(detectors))[0]
669 extensionVisitIndices.append(v)
670 extensionDetectorIndices.append(detectorIndex)
671 extensionVisits.append(visit)
672 extensionDetectors.append(detector)
673 extensionType.append('SCIENCE')
675 wcs = row.getWcs()
676 wcss.append(_get_wcs_from_sip(wcs))
678 fieldNumbers = list(np.ones(len(exposureNames), dtype=int) * fieldNumber)
679 instrumentNumbers = list(np.ones(len(exposureNames), dtype=int) * instrumentNumber)
681 # Set the reference epoch to be the median of the science visits.
682 # The reference catalog will be shifted to this date.
683 medianMJD = np.median(mjds)
684 medianEpoch = astropy.time.Time(medianMJD, format='mjd').decimalyear
686 # Add information for the reference catalog. Most of the values are
687 # not used.
688 exposureNames.append('REFERENCE')
689 visits.append(-1)
690 fieldNumbers.append(0)
691 if self.config.fitProperMotion:
692 instrumentNumbers.append(-2)
693 else:
694 instrumentNumbers.append(-1)
695 ras.append(0.0)
696 decs.append(0.0)
697 airmasses.append(0.0)
698 exposureTimes.append(0)
699 mjds.append((refEpoch if (refEpoch is not None) else medianMJD))
700 observatories.append(np.array([0, 0, 0]))
701 identity = wcsfit.IdentityMap()
702 icrs = wcsfit.SphericalICRS()
703 refWcs = wcsfit.Wcs(identity, icrs, 'Identity', np.pi / 180.)
704 wcss.append(refWcs)
706 extensionVisitIndices.append(len(exposureNames) - 1)
707 extensionDetectorIndices.append(-1) # REFERENCE device must be -1
708 extensionVisits.append(-1)
709 extensionDetectors.append(-1)
710 extensionType.append('REFERENCE')
712 # Make a table of information to use elsewhere in the class
713 extensionInfo = pipeBase.Struct(visit=np.array(extensionVisits),
714 detector=np.array(extensionDetectors),
715 visitIndex=np.array(extensionVisitIndices),
716 detectorIndex=np.array(extensionDetectorIndices),
717 wcs=np.array(wcss),
718 extensionType=np.array(extensionType))
720 # Make the exposureHelper object to use in the fitting routines
721 exposuresHelper = wcsfit.ExposuresHelper(exposureNames,
722 fieldNumbers,
723 instrumentNumbers,
724 ras,
725 decs,
726 airmasses,
727 exposureTimes,
728 mjds,
729 observatories)
731 exposureInfo = pipeBase.Struct(visits=visits,
732 detectors=detectors,
733 ras=ras,
734 decs=decs,
735 medianEpoch=medianEpoch)
737 return exposureInfo, exposuresHelper, extensionInfo
739 def _load_refcat(self, associations, refObjectLoader, center, radius, extensionInfo, epoch=None,
740 fieldIndex=0):
741 """Load the reference catalog and add reference objects to the
742 `wcsfit.FoFClass` object.
744 Parameters
745 ----------
746 associations : `wcsfit.FoFClass`
747 Object to which to add the catalog of reference objects.
748 refObjectLoader :
749 `lsst.meas.algorithms.loadReferenceObjects.ReferenceObjectLoader`
750 Object set up to load reference catalog objects.
751 center : `lsst.geom.SpherePoint`
752 Center of the circle in which to load reference objects.
753 radius : `lsst.sphgeom._sphgeom.Angle`
754 Radius of the circle in which to load reference objects.
755 extensionInfo : `lsst.pipe.base.Struct`
756 Struct containing properties for each extension.
757 epoch : `float`
758 MJD to which to correct the object positions.
759 fieldIndex : `int`
760 Index of the field. Should be zero if all the data is fit together.
762 Returns
763 -------
764 refObjects : `dict`
765 Position and error information of reference objects.
766 refCovariance : `list` of `float`
767 Flattened output covariance matrix.
768 """
769 formattedEpoch = astropy.time.Time(epoch, format='mjd')
771 refFilter = refObjectLoader.config.anyFilterMapsToThis
772 skyCircle = refObjectLoader.loadSkyCircle(center, radius, refFilter, epoch=formattedEpoch)
774 selected = self.referenceSelector.run(skyCircle.refCat)
775 # Need memory contiguity to get reference filters as a vector.
776 if not selected.sourceCat.isContiguous():
777 refCat = selected.sourceCat.copy(deep=True)
778 else:
779 refCat = selected.sourceCat
781 # In Gaia DR3, missing values are denoted by NaNs.
782 finiteInd = np.isfinite(refCat['coord_ra']) & np.isfinite(refCat['coord_dec'])
783 refCat = refCat[finiteInd]
785 if self.config.excludeNonPMObjects:
786 # Gaia DR2 has zeros for missing data, while Gaia DR3 has NaNs:
787 hasPM = ((refCat['pm_raErr'] != 0) & np.isfinite(refCat['pm_raErr'])
788 & np.isfinite(refCat['pm_decErr']))
789 refCat = refCat[hasPM]
791 ra = (refCat['coord_ra'] * u.radian).to(u.degree).to_value().tolist()
792 dec = (refCat['coord_dec'] * u.radian).to(u.degree).to_value().tolist()
793 raCov = ((refCat['coord_raErr'] * u.radian).to(u.degree).to_value()**2).tolist()
794 decCov = ((refCat['coord_decErr'] * u.radian).to(u.degree).to_value()**2).tolist()
796 # Get refcat version from refcat metadata
797 refCatMetadata = refObjectLoader.refCats[0].get().getMetadata()
798 refCatVersion = refCatMetadata['REFCAT_FORMAT_VERSION']
799 if refCatVersion == 2:
800 raDecCov = (refCat['coord_ra_coord_dec_Cov'] * u.radian**2).to(u.degree**2).to_value().tolist()
801 else:
802 raDecCov = np.zeros(len(ra))
804 refObjects = {'ra': ra, 'dec': dec, 'raCov': raCov, 'decCov': decCov, 'raDecCov': raDecCov}
805 refCovariance = []
807 if self.config.fitProperMotion:
808 raPM = (refCat['pm_ra'] * u.radian).to(u.marcsec).to_value().tolist()
809 decPM = (refCat['pm_dec'] * u.radian).to(u.marcsec).to_value().tolist()
810 parallax = (refCat['parallax'] * u.radian).to(u.marcsec).to_value().tolist()
811 cov = _make_ref_covariance_matrix(refCat, version=refCatVersion)
812 pmDict = {'raPM': raPM, 'decPM': decPM, 'parallax': parallax}
813 refObjects.update(pmDict)
814 refCovariance = cov
816 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0]
817 visitIndex = extensionInfo.visitIndex[extensionIndex]
818 detectorIndex = extensionInfo.detectorIndex[extensionIndex]
819 instrumentIndex = -1 # -1 indicates the reference catalog
820 refWcs = extensionInfo.wcs[extensionIndex]
822 associations.addCatalog(refWcs, 'STELLAR', visitIndex, fieldIndex, instrumentIndex, detectorIndex,
823 extensionIndex, np.ones(len(refCat), dtype=bool),
824 ra, dec, np.arange(len(ra)))
826 return refObjects, refCovariance
828 def _load_catalogs_and_associate(self, associations, inputCatalogRefs, extensionInfo,
829 fieldIndex=0, instrumentIndex=0):
830 """Load the science catalogs and add the sources to the associator
831 class `wcsfit.FoFClass`, associating them into matches as you go.
833 Parameters
834 ----------
835 associations : `wcsfit.FoFClass`
836 Object to which to add the catalog of reference objects.
837 inputCatalogRefs : `list`
838 List of DeferredDatasetHandles pointing to visit-level source
839 tables.
840 extensionInfo : `lsst.pipe.base.Struct`
841 Struct containing properties for each extension.
842 fieldIndex : `int`
843 Index of the field for these catalogs. Should be zero assuming all
844 data is being fit together.
845 instrumentIndex : `int`
846 Index of the instrument for these catalogs. Should be zero
847 assuming all data comes from the same instrument.
849 Returns
850 -------
851 sourceIndices : `list`
852 List of boolean arrays used to select sources.
853 columns : `list` of `str`
854 List of columns needed from source tables.
855 """
856 columns = ['detector', 'sourceId', 'x', 'xErr', 'y', 'yErr', 'ixx', 'iyy', 'ixy',
857 f'{self.config.sourceFluxType}_instFlux', f'{self.config.sourceFluxType}_instFluxErr']
858 if self.sourceSelector.config.doFlags:
859 columns.extend(self.sourceSelector.config.flags.bad)
860 if self.sourceSelector.config.doUnresolved:
861 columns.append(self.sourceSelector.config.unresolved.name)
862 if self.sourceSelector.config.doIsolated:
863 columns.append(self.sourceSelector.config.isolated.parentName)
864 columns.append(self.sourceSelector.config.isolated.nChildName)
865 if self.sourceSelector.config.doRequirePrimary:
866 columns.append(self.sourceSelector.config.requirePrimary.primaryColName)
868 sourceIndices = [None] * len(extensionInfo.visit)
869 for inputCatalogRef in inputCatalogRefs:
870 visit = inputCatalogRef.dataId['visit']
871 inputCatalog = inputCatalogRef.get(parameters={'columns': columns})
872 # Get a sorted array of detector names
873 detectors = np.unique(inputCatalog['detector'])
875 for detector in detectors:
876 detectorSources = inputCatalog[inputCatalog['detector'] == detector]
877 xCov = detectorSources['xErr']**2
878 yCov = detectorSources['yErr']**2
879 xyCov = (detectorSources['ixy'] * (xCov + yCov)
880 / (detectorSources['ixx'] + detectorSources['iyy']))
881 # Remove sources with bad shape measurements
882 goodShapes = xyCov**2 <= (xCov * yCov)
883 selected = self.sourceSelector.run(detectorSources)
884 goodInds = selected.selected & goodShapes
886 isStar = np.ones(goodInds.sum())
887 extensionIndex = np.flatnonzero((extensionInfo.visit == visit)
888 & (extensionInfo.detector == detector))[0]
889 detectorIndex = extensionInfo.detectorIndex[extensionIndex]
890 visitIndex = extensionInfo.visitIndex[extensionIndex]
892 sourceIndices[extensionIndex] = goodInds
894 wcs = extensionInfo.wcs[extensionIndex]
895 associations.reprojectWCS(wcs, fieldIndex)
897 associations.addCatalog(wcs, 'STELLAR', visitIndex, fieldIndex,
898 instrumentIndex, detectorIndex, extensionIndex, isStar,
899 detectorSources[goodInds]['x'].to_list(),
900 detectorSources[goodInds]['y'].to_list(),
901 np.arange(goodInds.sum()))
903 associations.sortMatches(fieldIndex, minMatches=self.config.minMatches,
904 allowSelfMatches=self.config.allowSelfMatches)
906 return sourceIndices, columns
908 def make_yaml(self, inputVisitSummary, inputFile=None):
909 """Make a YAML-type object that describes the parameters of the fit
910 model.
912 Parameters
913 ----------
914 inputVisitSummary : `lsst.afw.table.ExposureCatalog`
915 Catalog with per-detector summary information.
916 inputFile : `str`
917 Path to a file that contains a basic model.
919 Returns
920 -------
921 inputYAML : `wcsfit.YAMLCollector`
922 YAML object containing the model description.
923 """
924 if inputFile is not None:
925 inputYAML = wcsfit.YAMLCollector(inputFile, 'PixelMapCollection')
926 else:
927 inputYAML = wcsfit.YAMLCollector('', 'PixelMapCollection')
928 inputDict = {}
929 modelComponents = ['INSTRUMENT/DEVICE', 'EXPOSURE']
930 baseMap = {'Type': 'Composite', 'Elements': modelComponents}
931 inputDict['EXPOSURE/DEVICE/base'] = baseMap
933 xMin = str(inputVisitSummary['bbox_min_x'].min())
934 xMax = str(inputVisitSummary['bbox_max_x'].max())
935 yMin = str(inputVisitSummary['bbox_min_y'].min())
936 yMax = str(inputVisitSummary['bbox_max_y'].max())
938 deviceModel = {'Type': 'Composite', 'Elements': self.config.deviceModel.list()}
939 inputDict['INSTRUMENT/DEVICE'] = deviceModel
940 for component in self.config.deviceModel:
941 if 'poly' in component.lower():
942 componentDict = {'Type': 'Poly',
943 'XPoly': {'OrderX': self.config.devicePolyOrder,
944 'SumOrder': True},
945 'YPoly': {'OrderX': self.config.devicePolyOrder,
946 'SumOrder': True},
947 'XMin': xMin, 'XMax': xMax, 'YMin': yMin, 'YMax': yMax}
948 elif 'identity' in component.lower():
949 componentDict = {'Type': 'Identity'}
951 inputDict[component] = componentDict
953 exposureModel = {'Type': 'Composite', 'Elements': self.config.exposureModel.list()}
954 inputDict['EXPOSURE'] = exposureModel
955 for component in self.config.exposureModel:
956 if 'poly' in component.lower():
957 componentDict = {'Type': 'Poly',
958 'XPoly': {'OrderX': self.config.exposurePolyOrder,
959 'SumOrder': 'true'},
960 'YPoly': {'OrderX': self.config.exposurePolyOrder,
961 'SumOrder': 'true'}}
962 elif 'identity' in component.lower():
963 componentDict = {'Type': 'Identity'}
965 inputDict[component] = componentDict
967 inputYAML.addInput(yaml.dump(inputDict))
968 inputYAML.addInput('Identity:\n Type: Identity\n')
970 return inputYAML
972 def _add_objects(self, wcsf, inputCatalogRefs, sourceIndices, extensionInfo, columns):
973 """Add science sources to the wcsfit.WCSFit object.
975 Parameters
976 ----------
977 wcsf : `wcsfit.WCSFit`
978 WCS-fitting object.
979 inputCatalogRefs : `list`
980 List of DeferredDatasetHandles pointing to visit-level source
981 tables.
982 sourceIndices : `list`
983 List of boolean arrays used to select sources.
984 extensionInfo : `lsst.pipe.base.Struct`
985 Struct containing properties for each extension.
986 columns : `list` of `str`
987 List of columns needed from source tables.
988 """
989 for inputCatalogRef in inputCatalogRefs:
990 visit = inputCatalogRef.dataId['visit']
991 inputCatalog = inputCatalogRef.get(parameters={'columns': columns})
992 detectors = np.unique(inputCatalog['detector'])
994 for detector in detectors:
995 detectorSources = inputCatalog[inputCatalog['detector'] == detector]
997 extensionIndex = np.flatnonzero((extensionInfo.visit == visit)
998 & (extensionInfo.detector == detector))[0]
999 sourceCat = detectorSources[sourceIndices[extensionIndex]]
1001 xCov = sourceCat['xErr']**2
1002 yCov = sourceCat['yErr']**2
1003 xyCov = (sourceCat['ixy'] * (xCov + yCov)
1004 / (sourceCat['ixx'] + sourceCat['iyy']))
1005 # TODO: add correct xyErr if DM-7101 is ever done.
1007 d = {'x': sourceCat['x'].to_numpy(), 'y': sourceCat['y'].to_numpy(),
1008 'xCov': xCov.to_numpy(), 'yCov': yCov.to_numpy(), 'xyCov': xyCov.to_numpy()}
1010 wcsf.setObjects(extensionIndex, d, 'x', 'y', ['xCov', 'yCov', 'xyCov'])
1012 def _add_ref_objects(self, wcsf, refObjects, refCovariance, extensionInfo):
1013 """Add reference sources to the wcsfit.WCSFit object.
1015 Parameters
1016 ----------
1017 wcsf : `wcsfit.WCSFit`
1018 WCS-fitting object.
1019 refObjects : `dict`
1020 Position and error information of reference objects.
1021 refCovariance : `list` of `float`
1022 Flattened output covariance matrix.
1023 extensionInfo : `lsst.pipe.base.Struct`
1024 Struct containing properties for each extension.
1025 """
1026 extensionIndex = np.flatnonzero(extensionInfo.extensionType == 'REFERENCE')[0]
1028 if self.config.fitProperMotion:
1029 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov'],
1030 pmDecKey='decPM', pmRaKey='raPM', parallaxKey='parallax', pmCovKey='fullCov',
1031 pmCov=refCovariance)
1032 else:
1033 wcsf.setObjects(extensionIndex, refObjects, 'ra', 'dec', ['raCov', 'decCov', 'raDecCov'])
1035 def _make_afw_wcs(self, mapDict, centerRA, centerDec, doNormalizePixels=False, xScale=1, yScale=1):
1036 """Make an `lsst.afw.geom.SkyWcs` from a dictionary of mappings.
1038 Parameters
1039 ----------
1040 mapDict : `dict`
1041 Dictionary of mapping parameters.
1042 centerRA : `lsst.geom.Angle`
1043 RA of the tangent point.
1044 centerDec : `lsst.geom.Angle`
1045 Declination of the tangent point.
1046 doNormalizePixels : `bool`
1047 Whether to normalize pixels so that range is [-1,1].
1048 xScale : `float`
1049 Factor by which to normalize x-dimension. Corresponds to width of
1050 detector.
1051 yScale : `float`
1052 Factor by which to normalize y-dimension. Corresponds to height of
1053 detector.
1055 Returns
1056 -------
1057 outWCS : `lsst.afw.geom.SkyWcs`
1058 WCS constructed from the input mappings
1059 """
1060 # Set up pixel frames
1061 pixelFrame = astshim.Frame(2, 'Domain=PIXELS')
1062 normedPixelFrame = astshim.Frame(2, 'Domain=NORMEDPIXELS')
1064 if doNormalizePixels:
1065 # Pixels will need to be rescaled before going into the mappings
1066 normCoefficients = [-1.0, 2.0/xScale, 0,
1067 -1.0, 0, 2.0/yScale]
1068 normMap = _convert_to_ast_polymap_coefficients(normCoefficients)
1069 else:
1070 normMap = astshim.UnitMap(2)
1072 # All of the detectors for one visit map to the same tangent plane
1073 tangentPoint = lsst.geom.SpherePoint(centerRA, centerDec)
1074 cdMatrix = afwgeom.makeCdMatrix(1.0 * lsst.geom.degrees, 0 * lsst.geom.degrees, True)
1075 iwcToSkyWcs = afwgeom.makeSkyWcs(lsst.geom.Point2D(0, 0), tangentPoint, cdMatrix)
1076 iwcToSkyMap = iwcToSkyWcs.getFrameDict().getMapping('PIXELS', 'SKY')
1077 skyFrame = iwcToSkyWcs.getFrameDict().getFrame('SKY')
1079 frameDict = astshim.FrameDict(pixelFrame)
1080 frameDict.addFrame('PIXELS', normMap, normedPixelFrame)
1082 currentFrameName = 'NORMEDPIXELS'
1084 # Dictionary values are ordered according to the maps' application.
1085 for m, mapElement in enumerate(mapDict.values()):
1086 mapType = mapElement['Type']
1088 if mapType == 'Poly':
1089 mapCoefficients = mapElement['Coefficients']
1090 astMap = _convert_to_ast_polymap_coefficients(mapCoefficients)
1091 elif mapType == 'Identity':
1092 astMap = astshim.UnitMap(2)
1093 else:
1094 raise ValueError(f"Converting map type {mapType} to WCS is not supported")
1096 if m == len(mapDict) - 1:
1097 newFrameName = 'IWC'
1098 else:
1099 newFrameName = 'INTERMEDIATE' + str(m)
1100 newFrame = astshim.Frame(2, f'Domain={newFrameName}')
1101 frameDict.addFrame(currentFrameName, astMap, newFrame)
1102 currentFrameName = newFrameName
1103 frameDict.addFrame('IWC', iwcToSkyMap, skyFrame)
1105 outWCS = afwgeom.SkyWcs(frameDict)
1106 return outWCS
1108 def _make_outputs(self, wcsf, visitSummaryTables, exposureInfo):
1109 """Make a WCS object out of the WCS models.
1111 Parameters
1112 ----------
1113 wcsf : `wcsfit.WCSFit`
1114 WCSFit object, assumed to have fit model.
1115 visitSummaryTables : `list` of `lsst.afw.table.ExposureCatalog`
1116 Catalogs with per-detector summary information from which to grab
1117 detector information.
1118 extensionInfo : `lsst.pipe.base.Struct`
1119 Struct containing properties for each extension.
1121 Returns
1122 -------
1123 catalogs : `dict` of [`str`, `lsst.afw.table.ExposureCatalog`]
1124 Dictionary of `lsst.afw.table.ExposureCatalog` objects with the WCS
1125 set to the WCS fit in wcsf, keyed by the visit name.
1126 """
1127 # Get the parameters of the fit models
1128 mapParams = wcsf.mapCollection.getParamDict()
1130 # Set up the schema for the output catalogs
1131 schema = lsst.afw.table.ExposureTable.makeMinimalSchema()
1132 schema.addField('visit', type='L', doc='Visit number')
1134 # Pixels will need to be rescaled before going into the mappings
1135 sampleDetector = visitSummaryTables[0][0]
1136 xscale = sampleDetector['bbox_max_x'] - sampleDetector['bbox_min_x']
1137 yscale = sampleDetector['bbox_max_y'] - sampleDetector['bbox_min_y']
1139 catalogs = {}
1140 for v, visitSummary in enumerate(visitSummaryTables):
1141 visit = visitSummary[0]['visit']
1143 catalog = lsst.afw.table.ExposureCatalog(schema)
1144 catalog.resize(len(exposureInfo.detectors))
1145 catalog['visit'] = visit
1147 for d, detector in enumerate(visitSummary['id']):
1148 mapName = f'{visit}/{detector}'
1150 mapElements = wcsf.mapCollection.orderAtoms(f'{mapName}/base')
1151 mapDict = {}
1152 for m, mapElement in enumerate(mapElements):
1153 mapType = wcsf.mapCollection.getMapType(mapElement)
1154 mapDict[mapElement] = {'Type': mapType}
1156 if mapType == 'Poly':
1157 mapCoefficients = mapParams[mapElement]
1158 mapDict[mapElement]['Coefficients'] = mapCoefficients
1160 # The RA and Dec of the visit are needed for the last step of
1161 # the mapping from the visit tangent plane to RA and Dec
1162 outWCS = self._make_afw_wcs(mapDict, exposureInfo.ras[v] * lsst.geom.radians,
1163 exposureInfo.decs[v] * lsst.geom.radians,
1164 doNormalizePixels=True,
1165 xScale=xscale, yScale=yscale)
1167 catalog[d].setId(detector)
1168 catalog[d].setWcs(outWCS)
1169 catalog.sort()
1170 catalogs[visit] = catalog
1172 return catalogs