Coverage for python/lsst/faro/utils/matcher.py: 7%
138 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-02 08:20 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-02 08:20 +0000
1# This file is part of faro.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22from lsst.afw.geom import SkyWcs
23from lsst.afw.image import PhotoCalib
24from lsst.afw.table import (
25 SchemaMapper,
26 Field,
27 MultiMatch,
28 SimpleRecord,
29 SourceCatalog,
30 updateSourceCoords,
31)
32from lsst.faro.utils.calibrated_catalog import CalibratedCatalog
33from lsst.faro.utils.prefilter import preFilter
35import numpy as np
36from astropy.table import join, Table
37from typing import Dict, List
39__all__ = (
40 "matchCatalogs",
41 "ellipticityFromCat",
42 "ellipticity",
43 "makeMatchedPhotom",
44 "mergeCatalogs",
45)
48def matchCatalogs(
49 inputs: List[SourceCatalog],
50 photoCalibs: List[PhotoCalib],
51 astromCalibs: List[SkyWcs],
52 dataIds,
53 matchRadius: float,
54 config,
55 logger=None,
56):
57 schema = inputs[0].schema
58 mapper = SchemaMapper(schema)
59 mapper.addMinimalSchema(schema)
60 mapper.addOutputField(Field[float]("base_PsfFlux_snr", "PSF flux SNR"))
61 mapper.addOutputField(Field[float]("base_PsfFlux_mag", "PSF magnitude"))
62 mapper.addOutputField(
63 Field[float]("base_PsfFlux_magErr", "PSF magnitude uncertainty")
64 )
65 # Needed because addOutputField(... 'slot_ModelFlux_mag') will add a field with that literal name
66 aliasMap = schema.getAliasMap()
67 # Possibly not needed since base_GaussianFlux is the default, but this ought to be safe
68 modelName = (
69 aliasMap["slot_ModelFlux"]
70 if "slot_ModelFlux" in aliasMap.keys()
71 else "base_GaussianFlux"
72 )
73 mapper.addOutputField(Field[float](f"{modelName}_mag", "Model magnitude"))
74 mapper.addOutputField(
75 Field[float](f"{modelName}_magErr", "Model magnitude uncertainty")
76 )
77 mapper.addOutputField(Field[float](f"{modelName}_snr", "Model flux snr"))
78 mapper.addOutputField(Field[float]("e1", "Source Ellipticity 1"))
79 mapper.addOutputField(Field[float]("e2", "Source Ellipticity 1"))
80 mapper.addOutputField(Field[float]("psf_e1", "PSF Ellipticity 1"))
81 mapper.addOutputField(Field[float]("psf_e2", "PSF Ellipticity 1"))
82 mapper.addOutputField(Field[np.int32]("filt", "filter code"))
83 newSchema = mapper.getOutputSchema()
84 newSchema.setAliasMap(schema.getAliasMap())
86 # Create an object that matches multiple catalogs with same schema
87 mmatch = MultiMatch(
88 newSchema,
89 dataIdFormat={"visit": np.int64, "detector": np.int32},
90 radius=matchRadius,
91 RecordClass=SimpleRecord,
92 )
94 # create the new extended source catalog
95 srcVis = SourceCatalog(newSchema)
97 filter_dict = {
98 "u": 1,
99 "g": 2,
100 "r": 3,
101 "i": 4,
102 "z": 5,
103 "y": 6,
104 "HSC-U": 1,
105 "HSC-G": 2,
106 "HSC-R": 3,
107 "HSC-I": 4,
108 "HSC-Z": 5,
109 "HSC-Y": 6,
110 }
112 # Sort by visit, detector, then filter
113 vislist = [v["visit"] for v in dataIds]
114 ccdlist = [v["detector"] for v in dataIds]
115 filtlist = [v["band"] for v in dataIds]
116 tab_vids = Table([vislist, ccdlist, filtlist], names=["vis", "ccd", "filt"])
117 sortinds = np.argsort(tab_vids, order=("vis", "ccd", "filt"))
119 for ind in sortinds:
120 oldSrc = inputs[ind]
121 photoCalib = photoCalibs[ind]
122 wcs = astromCalibs[ind]
123 dataId = dataIds[ind]
124 if wcs is None:
125 if logger:
126 logger.info("WCS is None for dataId %s. Skipping...", dataId)
127 continue
128 if photoCalib is None:
129 if logger:
130 logger.info("photoCalib is None for dataId %s. Skipping...", dataId)
131 continue
132 if logger:
133 logger.debug(
134 "%d sources in ccd %s visit %s",
135 len(oldSrc),
136 dataId["detector"],
137 dataId["visit"],
138 )
140 # create temporary catalog
141 tmpCat = SourceCatalog(SourceCatalog(newSchema).table)
142 tmpCat.extend(oldSrc, mapper=mapper)
144 filtnum = filter_dict[dataId["band"]]
145 tmpCat["filt"] = np.repeat(filtnum, len(oldSrc))
147 tmpCat["base_PsfFlux_snr"][:] = (
148 tmpCat["base_PsfFlux_instFlux"] / tmpCat["base_PsfFlux_instFluxErr"]
149 )
151 updateSourceCoords(wcs, tmpCat)
153 photoCalib.instFluxToMagnitude(tmpCat, "base_PsfFlux", "base_PsfFlux")
154 tmpCat["slot_ModelFlux_snr"][:] = (
155 tmpCat["slot_ModelFlux_instFlux"] / tmpCat["slot_ModelFlux_instFluxErr"]
156 )
157 photoCalib.instFluxToMagnitude(tmpCat, "slot_ModelFlux", "slot_ModelFlux")
159 _, psf_e1, psf_e2 = ellipticityFromCat(oldSrc, slot_shape="slot_PsfShape")
160 _, star_e1, star_e2 = ellipticityFromCat(oldSrc, slot_shape="slot_Shape")
161 tmpCat["e1"][:] = star_e1
162 tmpCat["e2"][:] = star_e2
163 tmpCat["psf_e1"][:] = psf_e1
164 tmpCat["psf_e2"][:] = psf_e2
166 tmpCat = preFilter(tmpCat, snrMin=config.snrMin, snrMax=config.snrMax,
167 brightMagCut=config.brightMagCut, faintMagCut=config.faintMagCut,
168 extended=config.selectExtended)
170 srcVis.extend(tmpCat, False)
171 mmatch.add(catalog=tmpCat, dataId=dataId)
173 # Complete the match, returning a catalog that includes
174 # all matched sources with object IDs that can be used to group them.
175 matchCat = mmatch.finish()
177 # Create a mapping object that allows the matches to be manipulated
178 # as a mapping of object ID to catalog of sources.
180 # I don't think I can persist a group view, so this may need to be called in a subsequent task
181 # allMatches = GroupView.build(matchCat)
183 return srcVis, matchCat
186def ellipticityFromCat(cat, slot_shape="slot_Shape"):
187 """Calculate the ellipticity of the Shapes in a catalog from the 2nd moments.
188 Parameters
189 ----------
190 cat : `lsst.afw.table.BaseCatalog`
191 A catalog with 'slot_Shape' defined and '_xx', '_xy', '_yy'
192 entries for the target of 'slot_Shape'.
193 E.g., 'slot_shape' defined as 'base_SdssShape'
194 And 'base_SdssShape_xx', 'base_SdssShape_xy', 'base_SdssShape_yy' defined.
195 slot_shape : str, optional
196 Specify what slot shape requested. Intended use is to get the PSF shape
197 estimates by specifying 'slot_shape=slot_PsfShape'
198 instead of the default 'slot_shape=slot_Shape'.
199 Returns
200 -------
201 e, e1, e2 : complex, float, float
202 Complex ellipticity, real part, imaginary part
203 """
204 i_xx, i_xy, i_yy = (
205 cat.get(slot_shape + "_xx"),
206 cat.get(slot_shape + "_xy"),
207 cat.get(slot_shape + "_yy"),
208 )
209 return ellipticity(i_xx, i_xy, i_yy)
212def ellipticity(i_xx, i_xy, i_yy):
213 """Calculate ellipticity from second moments.
214 Parameters
215 ----------
216 i_xx : float or `numpy.array`
217 i_xy : float or `numpy.array`
218 i_yy : float or `numpy.array`
219 Returns
220 -------
221 e, e1, e2 : (float, float, float) or (numpy.array, numpy.array, numpy.array)
222 Complex ellipticity, real component, imaginary component
223 """
224 e = (i_xx - i_yy + 2j * i_xy) / (i_xx + i_yy)
225 e1 = np.real(e)
226 e2 = np.imag(e)
227 return e, e1, e2
230def makeMatchedPhotom(data: Dict[str, List[CalibratedCatalog]], logger=None):
231 """ Merge catalogs in multiple bands into a single shared catalog.
232 """
234 cat_all = None
236 for band, cat_list in data.items():
237 cat_tmp = []
238 calibs_photo = []
239 for cat_calib in cat_list:
240 cat_tmp_i = cat_calib.catalog
241 qual_cuts = (
242 (cat_tmp_i["base_ClassificationExtendedness_value"] < 0.5)
243 & ~cat_tmp_i["base_PixelFlags_flag_saturated"]
244 & ~cat_tmp_i["base_PixelFlags_flag_cr"]
245 & ~cat_tmp_i["base_PixelFlags_flag_bad"]
246 & ~cat_tmp_i["base_PixelFlags_flag_edge"]
247 )
248 cat_tmp.append(cat_tmp_i[qual_cuts])
249 calibs_photo.append(cat_calib.photoCalib)
251 if logger:
252 logger.debug("Merging %d catalogs for band %s.", len(cat_tmp), band)
253 cat_tmp = mergeCatalogs(cat_tmp, calibs_photo, models=['base_PsfFlux'],
254 logger=logger)
255 if cat_tmp:
256 if not cat_tmp.isContiguous():
257 if logger:
258 logger.debug("Deep copying the %s band catalog to make it "
259 "contiguous.", band)
260 cat_tmp = cat_tmp.copy(deep=True)
262 cat_tmp = cat_tmp.asAstropy()
264 # Put the bandpass name in the column names:
265 for c in cat_tmp.colnames:
266 if c != "id":
267 cat_tmp[c].name = f"{c}_{band}"
269 if cat_all:
270 if logger:
271 logger.debug("Joining the %s band catalog with the main "
272 "catalog.", band)
273 cat_all = join(cat_all, cat_tmp, keys="id")
274 else:
275 cat_all = cat_tmp
277 # Return the astropy table of matched catalogs:
278 return cat_all
281def mergeCatalogs(
282 catalogs,
283 photoCalibs=None,
284 astromCalibs=None,
285 models=["slot_PsfFlux"],
286 applyExternalWcs=False,
287 logger=None,
288):
289 """Merge catalogs and optionally apply photometric and astrometric calibrations.
290 """
292 schema = catalogs[0].schema
293 mapper = SchemaMapper(schema)
294 mapper.addMinimalSchema(schema)
295 aliasMap = schema.getAliasMap()
296 for model in models:
297 modelName = aliasMap[model] if model in aliasMap.keys() else model
298 mapper.addOutputField(
299 Field[float](f"{modelName}_mag", f"{modelName} magnitude")
300 )
301 mapper.addOutputField(
302 Field[float](f"{modelName}_magErr", f"{modelName} magnitude uncertainty")
303 )
304 newSchema = mapper.getOutputSchema()
305 newSchema.setAliasMap(schema.getAliasMap())
307 size = sum([len(cat) for cat in catalogs])
308 catalog = SourceCatalog(newSchema)
309 catalog.reserve(size)
311 for ii in range(0, len(catalogs)):
312 cat = catalogs[ii]
314 # Create temporary catalog. Is this step needed?
315 tempCat = SourceCatalog(SourceCatalog(newSchema).table)
316 tempCat.extend(cat, mapper=mapper)
318 if applyExternalWcs and astromCalibs is not None:
319 wcs = astromCalibs[ii]
320 updateSourceCoords(wcs, tempCat)
322 if photoCalibs is not None:
323 photoCalib = photoCalibs[ii]
324 if photoCalib is not None:
325 for model in models:
326 modelName = aliasMap[model] if model in aliasMap.keys() else model
327 photoCalib.instFluxToMagnitude(tempCat, modelName, modelName)
329 catalog.extend(tempCat)
331 if logger:
332 logger.verbose("Merged %d catalog(s) out of %d." % (ii + 1, len(cat)))
334 return catalog