Coverage for python/lsst/faro/utils/tex.py : 19%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1import astropy.units as u
2import numpy as np
3import treecorr
5from lsst.faro.utils.matcher import mergeCatalogs
8__all__ = (
9 "TraceSize",
10 "PsfTraceSizeDiff",
11 "E1",
12 "E2",
13 "E1Resids",
14 "E2Resids",
15 "RhoStatistics",
16 "corrSpin0",
17 "corrSpin2",
18 "calculateTEx",
19)
22class TraceSize(object):
23 """Functor to calculate trace radius size for sources.
24 """
26 def __init__(self, column):
27 self.column = column
29 def __call__(self, catalog):
30 srcSize = np.sqrt(
31 0.5 * (catalog[self.column + "_xx"] + catalog[self.column + "_yy"])
32 )
33 return np.array(srcSize)
36class PsfTraceSizeDiff(object):
37 """Functor to calculate trace radius size difference (%) between object and
38 PSF model.
39 """
41 def __init__(self, column, psfColumn):
42 self.column = column
43 self.psfColumn = psfColumn
45 def __call__(self, catalog):
46 srcSize = np.sqrt(
47 0.5 * (catalog[self.column + "_xx"] + catalog[self.column + "_yy"])
48 )
49 psfSize = np.sqrt(
50 0.5 * (catalog[self.psfColumn + "_xx"] + catalog[self.psfColumn + "_yy"])
51 )
52 sizeDiff = 100 * (srcSize - psfSize) / (0.5 * (srcSize + psfSize))
53 return np.array(sizeDiff)
56class E1(object):
57 """Function to calculate e1 ellipticities from a given catalog.
58 Parameters
59 ----------
60 column : `str`
61 The name of the shape measurement algorithm. It should be one of
62 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments") or
63 ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") for corresponding
64 PSF ellipticities.
65 unitScale : `float`, optional
66 A numerical scaling factor to multiply the ellipticity.
67 shearConvention: `bool`, optional
68 Option to use shear convention. When set to False, the distortion
69 convention is used.
70 Returns
71 -------
72 e1 : `numpy.array`
73 A numpy array of e1 ellipticity values.
74 """
76 def __init__(self, column, unitScale=1.0, shearConvention=False):
77 self.column = column
78 self.unitScale = unitScale
79 self.shearConvention = shearConvention
81 def __call__(self, catalog):
82 xx = catalog[self.column + "_xx"]
83 yy = catalog[self.column + "_yy"]
84 if self.shearConvention:
85 xy = catalog[self.column + "_xy"]
86 e1 = (xx - yy) / (xx + yy + 2.0 * np.sqrt(xx * yy - xy ** 2))
87 else:
88 e1 = (xx - yy) / (xx + yy)
89 return np.array(e1) * self.unitScale
92class E2(object):
93 """Function to calculate e2 ellipticities from a given catalog.
94 Parameters
95 ----------
96 column : `str`
97 The name of the shape measurement algorithm. It should be one of
98 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments") or
99 ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") for corresponding
100 PSF ellipticities.
101 unitScale : `float`, optional
102 A numerical scaling factor to multiply the ellipticity.
103 shearConvention: `bool`, optional
104 Option to use shear convention. When set to False, the distortion
105 convention is used.
106 Returns
107 -------
108 e2 : `numpy.array`
109 A numpy array of e2 ellipticity values.
110 """
112 def __init__(self, column, unitScale=1.0, shearConvention=False):
113 self.column = column
114 self.unitScale = unitScale
115 self.shearConvention = shearConvention
117 def __call__(self, catalog):
118 xx = catalog[self.column + "_xx"]
119 yy = catalog[self.column + "_yy"]
120 xy = catalog[self.column + "_xy"]
121 if self.shearConvention:
122 e2 = (2.0 * xy) / (xx + yy + 2.0 * np.sqrt(xx * yy - xy ** 2))
123 else:
124 e2 = (2.0 * xy) / (xx + yy)
125 return np.array(e2) * self.unitScale
128class E1Resids(object):
129 """Functor to calculate e1 ellipticity residuals from an object catalog
130 and PSF model.
131 Parameters
132 ----------
133 column : `str`
134 The name of the shape measurement algorithm. It should be one of
135 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments").
136 psfColumn : `str`
137 The name used for PSF shape measurements from the same algorithm.
138 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments")
139 and correspond to the algorithm name specified for ``column``.
140 unitScale : `float`, optional
141 A numerical scaling factor to multiply both the object and PSF
142 ellipticities.
143 shearConvention: `bool`, optional
144 Option to use shear convention. When set to False, the distortion
145 convention is used.
146 Returns
147 -------
148 e1Resids : `numpy.array`
149 A numpy array of e1 residual ellipticity values.
150 """
152 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False):
153 self.column = column
154 self.psfColumn = psfColumn
155 self.unitScale = unitScale
156 self.shearConvention = shearConvention
158 def __call__(self, catalog):
159 srcE1func = E1(self.column, self.unitScale, self.shearConvention)
160 psfE1func = E1(self.psfColumn, self.unitScale, self.shearConvention)
162 srcE1 = srcE1func(catalog)
163 psfE1 = psfE1func(catalog)
165 e1Resids = srcE1 - psfE1
166 return e1Resids
169class E2Resids(object):
170 """Functor to calculate e2 ellipticity residuals from an object catalog
171 and PSF model.
172 Parameters
173 ----------
174 column : `str`
175 The name of the shape measurement algorithm. It should be one of
176 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments").
177 psfColumn : `str`
178 The name used for PSF shape measurements from the same algorithm.
179 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments")
180 and correspond to the algorithm name specified for ``column``.
181 unitScale : `float`, optional
182 A numerical scaling factor to multiply both the object and PSF
183 ellipticities.
184 shearConvention: `bool`, optional
185 Option to use shear convention. When set to False, the distortion
186 convention is used.
187 Returns
188 -------
189 e2Resids : `numpy.array`
190 A numpy array of e2 residual ellipticity values.
191 """
193 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False):
194 self.column = column
195 self.psfColumn = psfColumn
196 self.unitScale = unitScale
197 self.shearConvention = shearConvention
199 def __call__(self, catalog):
200 srcE2func = E2(self.column, self.unitScale, self.shearConvention)
201 psfE2func = E2(self.psfColumn, self.unitScale, self.shearConvention)
203 srcE2 = srcE2func(catalog)
204 psfE2 = psfE2func(catalog)
206 e2Resids = srcE2 - psfE2
207 return e2Resids
210class RhoStatistics(object):
211 """Functor to compute Rho statistics given star catalog and PSF model.
212 For detailed description of Rho statistics, refer to
213 Rowe (2010) and Jarvis et al., (2016).
214 Parameters
215 ----------
216 column : `str`
217 The name of the shape measurement algorithm. It should be one of
218 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments").
219 psfColumn : `str`
220 The name used for PSF shape measurements from the same algorithm.
221 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments")
222 and correspond to the algorithm name specified for ``column``.
223 shearConvention: `bool`, optional
224 Option to use shear convention. When set to False, the distortion
225 convention is used.
226 **kwargs
227 Additional keyword arguments passed to treecorr. See
228 https://rmjarvis.github.io/TreeCorr/_build/html/gg.html for details.
229 Returns
230 -------
231 rhoStats : `dict` [`int`, `treecorr.KKCorrelation` or
232 `treecorr.GGCorrelation`]
233 A dictionary with keys 0..5, containing one `treecorr.KKCorrelation`
234 object (key 0) and five `treecorr.GGCorrelation` objects corresponding
235 to Rho statistic indices. rho0 corresponds to autocorrelation function
236 of PSF size residuals.
237 """
239 def __init__(self, column, psfColumn, shearConvention=False, **kwargs):
240 self.column = column
241 self.psfColumn = psfColumn
242 self.shearConvention = shearConvention
243 self.e1Func = E1(self.psfColumn, shearConvention=self.shearConvention)
244 self.e2Func = E2(self.psfColumn, shearConvention=self.shearConvention)
245 self.e1ResidsFunc = E1Resids(
246 self.column, self.psfColumn, shearConvention=self.shearConvention
247 )
248 self.e2ResidsFunc = E2Resids(
249 self.column, self.psfColumn, shearConvention=self.shearConvention
250 )
251 self.traceSizeFunc = TraceSize(self.column)
252 self.psfTraceSizeFunc = TraceSize(self.psfColumn)
253 self.kwargs = kwargs
255 def __call__(self, catalog):
256 e1 = self.e1Func(catalog)
257 e2 = self.e2Func(catalog)
258 e1Res = self.e1ResidsFunc(catalog)
259 e2Res = self.e2ResidsFunc(catalog)
260 traceSize2 = self.traceSizeFunc(catalog) ** 2
261 psfTraceSize2 = self.psfTraceSizeFunc(catalog) ** 2
262 SizeRes = (traceSize2 - psfTraceSize2) / (0.5 * (traceSize2 + psfTraceSize2))
264 isFinite = np.isfinite(e1Res) & np.isfinite(e2Res) & np.isfinite(SizeRes)
265 e1 = e1[isFinite]
266 e2 = e2[isFinite]
267 e1Res = e1Res[isFinite]
268 e2Res = e2Res[isFinite]
269 SizeRes = SizeRes[isFinite]
271 # Scale the SizeRes by ellipticities
272 e1SizeRes = e1 * SizeRes
273 e2SizeRes = e2 * SizeRes
275 # Package the arguments to capture auto-/cross-correlations for the
276 # Rho statistics.
277 args = {
278 0: (SizeRes, None),
279 1: (e1Res, e2Res, None, None),
280 2: (e1, e2, e1Res, e2Res),
281 3: (e1SizeRes, e2SizeRes, None, None),
282 4: (e1Res, e2Res, e1SizeRes, e2SizeRes),
283 5: (e1, e2, e1SizeRes, e2SizeRes),
284 }
286 ra = np.rad2deg(catalog["coord_ra"][isFinite]) * 60.0 # arcmin
287 dec = np.rad2deg(catalog["coord_dec"][isFinite]) * 60.0 # arcmin
289 # Pass the appropriate arguments to the correlator and build a dict
290 rhoStats = {
291 rhoIndex: corrSpin2(
292 ra,
293 dec,
294 *(args[rhoIndex]),
295 raUnits="arcmin",
296 decUnits="arcmin",
297 **self.kwargs
298 )
299 for rhoIndex in range(1, 6)
300 }
301 rhoStats[0] = corrSpin0(
302 ra, dec, *(args[0]), raUnits="arcmin", decUnits="arcmin", **self.kwargs
303 )
305 return rhoStats
308def corrSpin0(
309 ra, dec, k1, k2=None, raUnits="degrees", decUnits="degrees", **treecorrKwargs
310):
311 """Function to compute correlations between at most two scalar fields.
312 This is used to compute Rho0 statistics, given the appropriate spin-0
313 (scalar) fields, usually fractional size residuals.
314 Parameters
315 ----------
316 ra : `numpy.array`
317 The right ascension values of entries in the catalog.
318 dec : `numpy.array`
319 The declination values of entries in the catalog.
320 k1 : `numpy.array`
321 The primary scalar field.
322 k2 : `numpy.array`, optional
323 The secondary scalar field.
324 Autocorrelation of the primary field is computed if `None` (default).
325 raUnits : `str`, optional
326 Unit of the right ascension values.
327 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians".
328 decUnits : `str`, optional
329 Unit of the declination values.
330 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians".
331 **treecorrKwargs
332 Keyword arguments to be passed to `treecorr.KKCorrelation`.
333 Returns
334 -------
335 xy : `treecorr.KKCorrelation`
336 A `treecorr.KKCorrelation` object containing the correlation function.
337 """
339 xy = treecorr.KKCorrelation(**treecorrKwargs)
340 catA = treecorr.Catalog(ra=ra, dec=dec, k=k1, ra_units=raUnits, dec_units=decUnits)
341 if k2 is None:
342 # Calculate the auto-correlation
343 xy.process(catA)
344 else:
345 catB = treecorr.Catalog(
346 ra=ra, dec=dec, k=k2, ra_units=raUnits, dec_units=decUnits
347 )
348 # Calculate the cross-correlation
349 xy.process(catA, catB)
351 return xy
354def corrSpin2(
355 ra,
356 dec,
357 g1a,
358 g2a,
359 g1b=None,
360 g2b=None,
361 raUnits="degrees",
362 decUnits="degrees",
363 **treecorrKwargs
364):
365 """Function to compute correlations between at most two shear-like fields.
366 This is used to compute Rho statistics, given the appropriate spin-2
367 (shear-like) fields.
368 Parameters
369 ----------
370 ra : `numpy.array`
371 The right ascension values of entries in the catalog.
372 dec : `numpy.array`
373 The declination values of entries in the catalog.
374 g1a : `numpy.array`
375 The first component of the primary shear-like field.
376 g2a : `numpy.array`
377 The second component of the primary shear-like field.
378 g1b : `numpy.array`, optional
379 The first component of the secondary shear-like field.
380 Autocorrelation of the primary field is computed if `None` (default).
381 g2b : `numpy.array`, optional
382 The second component of the secondary shear-like field.
383 Autocorrelation of the primary field is computed if `None` (default).
384 raUnits : `str`, optional
385 Unit of the right ascension values.
386 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians".
387 decUnits : `str`, optional
388 Unit of the declination values.
389 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians".
390 **treecorrKwargs
391 Keyword arguments to be passed to `treecorr.GGCorrelation`.
392 Returns
393 -------
394 xy : `treecorr.GGCorrelation`
395 A `treecorr.GGCorrelation` object containing the correlation function.
396 """
397 xy = treecorr.GGCorrelation(**treecorrKwargs)
398 catA = treecorr.Catalog(
399 ra=ra, dec=dec, g1=g1a, g2=g2a, ra_units=raUnits, dec_units=decUnits
400 )
401 if g1b is None or g2b is None:
402 # Calculate the auto-correlation
403 xy.process(catA)
404 else:
405 catB = treecorr.Catalog(
406 ra=ra, dec=dec, g1=g1b, g2=g2b, ra_units=raUnits, dec_units=decUnits
407 )
408 # Calculate the cross-correlation
409 xy.process(catA, catB)
411 return xy
414def calculateTEx(catalogs, photoCalibs, astromCalibs, config):
415 """Compute ellipticity residual correlation metrics.
416 """
418 catalog = mergeCatalogs(catalogs, photoCalibs, astromCalibs)
420 # Filtering should be pulled out into a separate function for standard quality selections
421 snrMin = 50
422 selection = (
423 (catalog["base_ClassificationExtendedness_value"] < 0.5)
424 & (
425 (catalog["slot_PsfFlux_instFlux"] / catalog["slot_PsfFlux_instFluxErr"])
426 > snrMin
427 )
428 & (catalog["deblend_nChild"] == 0)
429 )
431 nMinSources = 50
432 if np.sum(selection) < nMinSources:
433 return {"nomeas": np.nan * u.Unit("")}
435 treecorrKwargs = dict(
436 nbins=config.nbins,
437 min_sep=config.minSep,
438 max_sep=config.maxSep,
439 sep_units="arcmin",
440 )
441 rhoStatistics = RhoStatistics(
442 config.column, config.columnPsf, config.shearConvention, **treecorrKwargs
443 )
444 xy = rhoStatistics(catalog[selection])[config.rhoStat]
446 radius = np.exp(xy.meanlogr) * u.arcmin
447 if config.rhoStat == 0:
448 corr = xy.xi * u.Unit("")
449 corrErr = np.sqrt(xy.varxip) * u.Unit("")
450 else:
451 corr = xy.xip * u.Unit("")
452 corrErr = np.sqrt(xy.varxip) * u.Unit("")
454 result = dict(radius=radius, corr=corr, corrErr=corrErr)
455 return result