Coverage for python/lsst/faro/utils/tex.py: 20%

142 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-01 03:52 -0700

1# This file is part of faro. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import astropy.units as u 

23import numpy as np 

24import treecorr 

25from typing import List 

26 

27from lsst.faro.utils.calibrated_catalog import CalibratedCatalog 

28from lsst.faro.utils.matcher import mergeCatalogs 

29 

30 

31__all__ = ( 

32 "TraceSize", 

33 "PsfTraceSizeDiff", 

34 "E1", 

35 "E2", 

36 "E1Resids", 

37 "E2Resids", 

38 "RhoStatistics", 

39 "corrSpin0", 

40 "corrSpin2", 

41 "calculateTEx", 

42) 

43 

44 

45class TraceSize(object): 

46 """Functor to calculate trace radius size for sources.""" 

47 

48 def __init__(self, column): 

49 self.column = column 

50 

51 def __call__(self, catalog): 

52 srcSize = np.sqrt( 

53 0.5 * (catalog[self.column + "_xx"] + catalog[self.column + "_yy"]) 

54 ) 

55 return np.array(srcSize) 

56 

57 

58class PsfTraceSizeDiff(object): 

59 """Functor to calculate trace radius size difference (%) between object and 

60 PSF model. 

61 """ 

62 

63 def __init__(self, column, psfColumn): 

64 self.column = column 

65 self.psfColumn = psfColumn 

66 

67 def __call__(self, catalog): 

68 srcSize = np.sqrt( 

69 0.5 * (catalog[self.column + "_xx"] + catalog[self.column + "_yy"]) 

70 ) 

71 psfSize = np.sqrt( 

72 0.5 * (catalog[self.psfColumn + "_xx"] + catalog[self.psfColumn + "_yy"]) 

73 ) 

74 sizeDiff = 100 * (srcSize - psfSize) / (0.5 * (srcSize + psfSize)) 

75 return np.array(sizeDiff) 

76 

77 

78class E1(object): 

79 """Function to calculate e1 ellipticities from a given catalog. 

80 Parameters 

81 ---------- 

82 column : `str` 

83 The name of the shape measurement algorithm. It should be one of 

84 ("slot_Shape", "ext_shapeHSM_HsmSourceMoments") or 

85 ("slot_PsfShape", "ext_shapeHSM_HsmPsfMoments") for corresponding 

86 PSF ellipticities. 

87 unitScale : `float`, optional 

88 A numerical scaling factor to multiply the ellipticity. 

89 shearConvention: `bool`, optional 

90 Option to use shear convention. When set to False, the distortion 

91 convention is used. 

92 Returns 

93 ------- 

94 e1 : `numpy.array` 

95 A numpy array of e1 ellipticity values. 

96 """ 

97 

98 def __init__(self, column, unitScale=1.0, shearConvention=False): 

99 self.column = column 

100 self.unitScale = unitScale 

101 self.shearConvention = shearConvention 

102 

103 def __call__(self, catalog): 

104 xx = catalog[self.column + "_xx"] 

105 yy = catalog[self.column + "_yy"] 

106 if self.shearConvention: 

107 xy = catalog[self.column + "_xy"] 

108 e1 = (xx - yy) / (xx + yy + 2.0 * np.sqrt(xx * yy - xy ** 2)) 

109 else: 

110 e1 = (xx - yy) / (xx + yy) 

111 return np.array(e1) * self.unitScale 

112 

113 

114class E2(object): 

115 """Function to calculate e2 ellipticities from a given catalog. 

116 Parameters 

117 ---------- 

118 column : `str` 

119 The name of the shape measurement algorithm. It should be one of 

120 ("slot_Shape", "ext_shapeHSM_HsmSourceMoments") or 

121 ("slot_PsfShape", "ext_shapeHSM_HsmPsfMoments") for corresponding 

122 PSF ellipticities. 

123 unitScale : `float`, optional 

124 A numerical scaling factor to multiply the ellipticity. 

125 shearConvention: `bool`, optional 

126 Option to use shear convention. When set to False, the distortion 

127 convention is used. 

128 Returns 

129 ------- 

130 e2 : `numpy.array` 

131 A numpy array of e2 ellipticity values. 

132 """ 

133 

134 def __init__(self, column, unitScale=1.0, shearConvention=False): 

135 self.column = column 

136 self.unitScale = unitScale 

137 self.shearConvention = shearConvention 

138 

139 def __call__(self, catalog): 

140 xx = catalog[self.column + "_xx"] 

141 yy = catalog[self.column + "_yy"] 

142 xy = catalog[self.column + "_xy"] 

143 if self.shearConvention: 

144 e2 = (2.0 * xy) / (xx + yy + 2.0 * np.sqrt(xx * yy - xy ** 2)) 

145 else: 

146 e2 = (2.0 * xy) / (xx + yy) 

147 return np.array(e2) * self.unitScale 

148 

149 

150class E1Resids(object): 

151 """Functor to calculate e1 ellipticity residuals from an object catalog 

152 and PSF model. 

153 Parameters 

154 ---------- 

155 column : `str` 

156 The name of the shape measurement algorithm. It should be one of 

157 ("slot_Shape", "ext_shapeHSM_HsmSourceMoments"). 

158 psfColumn : `str` 

159 The name used for PSF shape measurements from the same algorithm. 

160 It must be one of ("slot_PsfShape", "ext_shapeHSM_HsmPsfMoments") 

161 and correspond to the algorithm name specified for ``column``. 

162 unitScale : `float`, optional 

163 A numerical scaling factor to multiply both the object and PSF 

164 ellipticities. 

165 shearConvention: `bool`, optional 

166 Option to use shear convention. When set to False, the distortion 

167 convention is used. 

168 Returns 

169 ------- 

170 e1Resids : `numpy.array` 

171 A numpy array of e1 residual ellipticity values. 

172 """ 

173 

174 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False): 

175 self.column = column 

176 self.psfColumn = psfColumn 

177 self.unitScale = unitScale 

178 self.shearConvention = shearConvention 

179 

180 def __call__(self, catalog): 

181 srcE1func = E1(self.column, self.unitScale, self.shearConvention) 

182 psfE1func = E1(self.psfColumn, self.unitScale, self.shearConvention) 

183 

184 srcE1 = srcE1func(catalog) 

185 psfE1 = psfE1func(catalog) 

186 

187 e1Resids = srcE1 - psfE1 

188 return e1Resids 

189 

190 

191class E2Resids(object): 

192 """Functor to calculate e2 ellipticity residuals from an object catalog 

193 and PSF model. 

194 Parameters 

195 ---------- 

196 column : `str` 

197 The name of the shape measurement algorithm. It should be one of 

198 ("slot_Shape", "ext_shapeHSM_HsmSourceMoments"). 

199 psfColumn : `str` 

200 The name used for PSF shape measurements from the same algorithm. 

201 It must be one of ("slot_PsfShape", "ext_shapeHSM_HsmPsfMoments") 

202 and correspond to the algorithm name specified for ``column``. 

203 unitScale : `float`, optional 

204 A numerical scaling factor to multiply both the object and PSF 

205 ellipticities. 

206 shearConvention: `bool`, optional 

207 Option to use shear convention. When set to False, the distortion 

208 convention is used. 

209 Returns 

210 ------- 

211 e2Resids : `numpy.array` 

212 A numpy array of e2 residual ellipticity values. 

213 """ 

214 

215 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False): 

216 self.column = column 

217 self.psfColumn = psfColumn 

218 self.unitScale = unitScale 

219 self.shearConvention = shearConvention 

220 

221 def __call__(self, catalog): 

222 srcE2func = E2(self.column, self.unitScale, self.shearConvention) 

223 psfE2func = E2(self.psfColumn, self.unitScale, self.shearConvention) 

224 

225 srcE2 = srcE2func(catalog) 

226 psfE2 = psfE2func(catalog) 

227 

228 e2Resids = srcE2 - psfE2 

229 return e2Resids 

230 

231 

232class RhoStatistics(object): 

233 """Functor to compute Rho statistics given star catalog and PSF model. 

234 For detailed description of Rho statistics, refer to 

235 Rowe (2010) and Jarvis et al., (2016). 

236 Parameters 

237 ---------- 

238 column : `str` 

239 The name of the shape measurement algorithm. It should be one of 

240 ("slot_Shape", "ext_shapeHSM_HsmSourceMoments"). 

241 psfColumn : `str` 

242 The name used for PSF shape measurements from the same algorithm. 

243 It must be one of ("slot_PsfShape", "ext_shapeHSM_HsmPsfMoments") 

244 and correspond to the algorithm name specified for ``column``. 

245 shearConvention: `bool`, optional 

246 Option to use shear convention. When set to False, the distortion 

247 convention is used. 

248 **kwargs 

249 Additional keyword arguments passed to treecorr. See 

250 https://rmjarvis.github.io/TreeCorr/_build/html/gg.html for details. 

251 Returns 

252 ------- 

253 rhoStats : `dict` [`int`, `treecorr.KKCorrelation` or 

254 `treecorr.GGCorrelation`] 

255 A dictionary with keys 0..5, containing one `treecorr.KKCorrelation` 

256 object (key 0) and five `treecorr.GGCorrelation` objects corresponding 

257 to Rho statistic indices. rho0 corresponds to autocorrelation function 

258 of PSF size residuals. 

259 """ 

260 

261 def __init__(self, column, psfColumn, shearConvention=False, **kwargs): 

262 self.column = column 

263 self.psfColumn = psfColumn 

264 self.shearConvention = shearConvention 

265 self.e1Func = E1(self.psfColumn, shearConvention=self.shearConvention) 

266 self.e2Func = E2(self.psfColumn, shearConvention=self.shearConvention) 

267 self.e1ResidsFunc = E1Resids( 

268 self.column, self.psfColumn, shearConvention=self.shearConvention 

269 ) 

270 self.e2ResidsFunc = E2Resids( 

271 self.column, self.psfColumn, shearConvention=self.shearConvention 

272 ) 

273 self.traceSizeFunc = TraceSize(self.column) 

274 self.psfTraceSizeFunc = TraceSize(self.psfColumn) 

275 self.kwargs = kwargs 

276 

277 def __call__(self, catalog): 

278 e1 = self.e1Func(catalog) 

279 e2 = self.e2Func(catalog) 

280 e1Res = self.e1ResidsFunc(catalog) 

281 e2Res = self.e2ResidsFunc(catalog) 

282 traceSize2 = self.traceSizeFunc(catalog) ** 2 

283 psfTraceSize2 = self.psfTraceSizeFunc(catalog) ** 2 

284 SizeRes = (traceSize2 - psfTraceSize2) / (0.5 * (traceSize2 + psfTraceSize2)) 

285 

286 isFinite = np.isfinite(e1Res) & np.isfinite(e2Res) & np.isfinite(SizeRes) 

287 e1 = e1[isFinite] 

288 e2 = e2[isFinite] 

289 e1Res = e1Res[isFinite] 

290 e2Res = e2Res[isFinite] 

291 SizeRes = SizeRes[isFinite] 

292 

293 # Scale the SizeRes by ellipticities 

294 e1SizeRes = e1 * SizeRes 

295 e2SizeRes = e2 * SizeRes 

296 

297 # Package the arguments to capture auto-/cross-correlations for the 

298 # Rho statistics. 

299 args = { 

300 0: (SizeRes, None), 

301 1: (e1Res, e2Res, None, None), 

302 2: (e1, e2, e1Res, e2Res), 

303 3: (e1SizeRes, e2SizeRes, None, None), 

304 4: (e1Res, e2Res, e1SizeRes, e2SizeRes), 

305 5: (e1, e2, e1SizeRes, e2SizeRes), 

306 } 

307 

308 ra = np.rad2deg(catalog["coord_ra"][isFinite]) * 60.0 # arcmin 

309 dec = np.rad2deg(catalog["coord_dec"][isFinite]) * 60.0 # arcmin 

310 

311 # Pass the appropriate arguments to the correlator and build a dict 

312 rhoStats = { 

313 rhoIndex: corrSpin2( 

314 ra, 

315 dec, 

316 *(args[rhoIndex]), 

317 raUnits="arcmin", 

318 decUnits="arcmin", 

319 **self.kwargs 

320 ) 

321 for rhoIndex in range(1, 6) 

322 } 

323 rhoStats[0] = corrSpin0( 

324 ra, dec, *(args[0]), raUnits="arcmin", decUnits="arcmin", **self.kwargs 

325 ) 

326 

327 return rhoStats 

328 

329 

330def corrSpin0( 

331 ra, dec, k1, k2=None, raUnits="degrees", decUnits="degrees", **treecorrKwargs 

332): 

333 """Function to compute correlations between at most two scalar fields. 

334 This is used to compute Rho0 statistics, given the appropriate spin-0 

335 (scalar) fields, usually fractional size residuals. 

336 Parameters 

337 ---------- 

338 ra : `numpy.array` 

339 The right ascension values of entries in the catalog. 

340 dec : `numpy.array` 

341 The declination values of entries in the catalog. 

342 k1 : `numpy.array` 

343 The primary scalar field. 

344 k2 : `numpy.array`, optional 

345 The secondary scalar field. 

346 Autocorrelation of the primary field is computed if `None` (default). 

347 raUnits : `str`, optional 

348 Unit of the right ascension values. 

349 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

350 decUnits : `str`, optional 

351 Unit of the declination values. 

352 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

353 **treecorrKwargs 

354 Keyword arguments to be passed to `treecorr.KKCorrelation`. 

355 Returns 

356 ------- 

357 xy : `treecorr.KKCorrelation` 

358 A `treecorr.KKCorrelation` object containing the correlation function. 

359 """ 

360 

361 xy = treecorr.KKCorrelation(**treecorrKwargs) 

362 catA = treecorr.Catalog(ra=ra, dec=dec, k=k1, ra_units=raUnits, dec_units=decUnits) 

363 if k2 is None: 

364 # Calculate the auto-correlation 

365 xy.process(catA) 

366 else: 

367 catB = treecorr.Catalog( 

368 ra=ra, dec=dec, k=k2, ra_units=raUnits, dec_units=decUnits 

369 ) 

370 # Calculate the cross-correlation 

371 xy.process(catA, catB) 

372 

373 return xy 

374 

375 

376def corrSpin2( 

377 ra, 

378 dec, 

379 g1a, 

380 g2a, 

381 g1b=None, 

382 g2b=None, 

383 raUnits="degrees", 

384 decUnits="degrees", 

385 **treecorrKwargs 

386): 

387 """Function to compute correlations between at most two shear-like fields. 

388 This is used to compute Rho statistics, given the appropriate spin-2 

389 (shear-like) fields. 

390 Parameters 

391 ---------- 

392 ra : `numpy.array` 

393 The right ascension values of entries in the catalog. 

394 dec : `numpy.array` 

395 The declination values of entries in the catalog. 

396 g1a : `numpy.array` 

397 The first component of the primary shear-like field. 

398 g2a : `numpy.array` 

399 The second component of the primary shear-like field. 

400 g1b : `numpy.array`, optional 

401 The first component of the secondary shear-like field. 

402 Autocorrelation of the primary field is computed if `None` (default). 

403 g2b : `numpy.array`, optional 

404 The second component of the secondary shear-like field. 

405 Autocorrelation of the primary field is computed if `None` (default). 

406 raUnits : `str`, optional 

407 Unit of the right ascension values. 

408 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

409 decUnits : `str`, optional 

410 Unit of the declination values. 

411 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

412 **treecorrKwargs 

413 Keyword arguments to be passed to `treecorr.GGCorrelation`. 

414 Returns 

415 ------- 

416 xy : `treecorr.GGCorrelation` 

417 A `treecorr.GGCorrelation` object containing the correlation function. 

418 """ 

419 xy = treecorr.GGCorrelation(**treecorrKwargs) 

420 catA = treecorr.Catalog( 

421 ra=ra, dec=dec, g1=g1a, g2=g2a, ra_units=raUnits, dec_units=decUnits 

422 ) 

423 if g1b is None or g2b is None: 

424 # Calculate the auto-correlation 

425 xy.process(catA) 

426 else: 

427 catB = treecorr.Catalog( 

428 ra=ra, dec=dec, g1=g1b, g2=g2b, ra_units=raUnits, dec_units=decUnits 

429 ) 

430 # Calculate the cross-correlation 

431 xy.process(catA, catB) 

432 

433 return xy 

434 

435 

436def calculateTEx(data: List[CalibratedCatalog], config): 

437 """Compute ellipticity residual correlation metrics.""" 

438 

439 catalog = mergeCatalogs( 

440 [x.catalog for x in data], 

441 [x.photoCalib for x in data], 

442 [x.astromCalib for x in data], 

443 ) 

444 

445 # Filtering should be pulled out into a separate function for standard quality selections 

446 # and only use sources that are single sources. 

447 # This is the same as `isPrimary`, except that it also includes 

448 # sources that are outside of the inner tract/patch regions. 

449 snrMin = 50 

450 selection = ( 

451 (catalog["base_ClassificationExtendedness_value"] < 0.5) 

452 & ( 

453 (catalog["slot_PsfFlux_instFlux"] / catalog["slot_PsfFlux_instFluxErr"]) 

454 > snrMin 

455 ) 

456 & catalog["detect_isDeblendedSource"] 

457 ) 

458 

459 nMinSources = 50 

460 if np.sum(selection) < nMinSources: 

461 return {"nomeas": np.nan * u.Unit("")} 

462 

463 treecorrKwargs = dict( 

464 nbins=config.nbins, 

465 min_sep=config.minSep, 

466 max_sep=config.maxSep, 

467 sep_units="arcmin", 

468 brute=config.brute 

469 ) 

470 rhoStatistics = RhoStatistics( 

471 config.column, 

472 config.columnPsf, 

473 shearConvention=config.shearConvention, 

474 **treecorrKwargs 

475 ) 

476 xy = rhoStatistics(catalog[selection])[config.rhoStat] 

477 

478 radius = np.exp(xy.meanlogr) * u.arcmin 

479 if config.rhoStat == 0: 

480 corr = xy.xi * u.Unit("") 

481 corrErr = np.sqrt(xy.varxip) * u.Unit("") 

482 else: 

483 corr = xy.xip * u.Unit("") 

484 corrErr = np.sqrt(xy.varxip) * u.Unit("") 

485 

486 result = dict(radius=radius, corr=corr, corrErr=corrErr) 

487 return result