Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of faro. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import astropy.units as u 

23import numpy as np 

24import treecorr 

25from typing import List 

26 

27from lsst.faro.utils.calibrated_catalog import CalibratedCatalog 

28from lsst.faro.utils.matcher import mergeCatalogs 

29 

30 

31__all__ = ( 

32 "TraceSize", 

33 "PsfTraceSizeDiff", 

34 "E1", 

35 "E2", 

36 "E1Resids", 

37 "E2Resids", 

38 "RhoStatistics", 

39 "corrSpin0", 

40 "corrSpin2", 

41 "calculateTEx", 

42) 

43 

44 

45class TraceSize(object): 

46 """Functor to calculate trace radius size for sources. 

47 """ 

48 

49 def __init__(self, column): 

50 self.column = column 

51 

52 def __call__(self, catalog): 

53 srcSize = np.sqrt( 

54 0.5 * (catalog[self.column + "_xx"] + catalog[self.column + "_yy"]) 

55 ) 

56 return np.array(srcSize) 

57 

58 

59class PsfTraceSizeDiff(object): 

60 """Functor to calculate trace radius size difference (%) between object and 

61 PSF model. 

62 """ 

63 

64 def __init__(self, column, psfColumn): 

65 self.column = column 

66 self.psfColumn = psfColumn 

67 

68 def __call__(self, catalog): 

69 srcSize = np.sqrt( 

70 0.5 * (catalog[self.column + "_xx"] + catalog[self.column + "_yy"]) 

71 ) 

72 psfSize = np.sqrt( 

73 0.5 * (catalog[self.psfColumn + "_xx"] + catalog[self.psfColumn + "_yy"]) 

74 ) 

75 sizeDiff = 100 * (srcSize - psfSize) / (0.5 * (srcSize + psfSize)) 

76 return np.array(sizeDiff) 

77 

78 

79class E1(object): 

80 """Function to calculate e1 ellipticities from a given catalog. 

81 Parameters 

82 ---------- 

83 column : `str` 

84 The name of the shape measurement algorithm. It should be one of 

85 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments") or 

86 ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") for corresponding 

87 PSF ellipticities. 

88 unitScale : `float`, optional 

89 A numerical scaling factor to multiply the ellipticity. 

90 shearConvention: `bool`, optional 

91 Option to use shear convention. When set to False, the distortion 

92 convention is used. 

93 Returns 

94 ------- 

95 e1 : `numpy.array` 

96 A numpy array of e1 ellipticity values. 

97 """ 

98 

99 def __init__(self, column, unitScale=1.0, shearConvention=False): 

100 self.column = column 

101 self.unitScale = unitScale 

102 self.shearConvention = shearConvention 

103 

104 def __call__(self, catalog): 

105 xx = catalog[self.column + "_xx"] 

106 yy = catalog[self.column + "_yy"] 

107 if self.shearConvention: 

108 xy = catalog[self.column + "_xy"] 

109 e1 = (xx - yy) / (xx + yy + 2.0 * np.sqrt(xx * yy - xy ** 2)) 

110 else: 

111 e1 = (xx - yy) / (xx + yy) 

112 return np.array(e1) * self.unitScale 

113 

114 

115class E2(object): 

116 """Function to calculate e2 ellipticities from a given catalog. 

117 Parameters 

118 ---------- 

119 column : `str` 

120 The name of the shape measurement algorithm. It should be one of 

121 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments") or 

122 ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") for corresponding 

123 PSF ellipticities. 

124 unitScale : `float`, optional 

125 A numerical scaling factor to multiply the ellipticity. 

126 shearConvention: `bool`, optional 

127 Option to use shear convention. When set to False, the distortion 

128 convention is used. 

129 Returns 

130 ------- 

131 e2 : `numpy.array` 

132 A numpy array of e2 ellipticity values. 

133 """ 

134 

135 def __init__(self, column, unitScale=1.0, shearConvention=False): 

136 self.column = column 

137 self.unitScale = unitScale 

138 self.shearConvention = shearConvention 

139 

140 def __call__(self, catalog): 

141 xx = catalog[self.column + "_xx"] 

142 yy = catalog[self.column + "_yy"] 

143 xy = catalog[self.column + "_xy"] 

144 if self.shearConvention: 

145 e2 = (2.0 * xy) / (xx + yy + 2.0 * np.sqrt(xx * yy - xy ** 2)) 

146 else: 

147 e2 = (2.0 * xy) / (xx + yy) 

148 return np.array(e2) * self.unitScale 

149 

150 

151class E1Resids(object): 

152 """Functor to calculate e1 ellipticity residuals from an object catalog 

153 and PSF model. 

154 Parameters 

155 ---------- 

156 column : `str` 

157 The name of the shape measurement algorithm. It should be one of 

158 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

159 psfColumn : `str` 

160 The name used for PSF shape measurements from the same algorithm. 

161 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

162 and correspond to the algorithm name specified for ``column``. 

163 unitScale : `float`, optional 

164 A numerical scaling factor to multiply both the object and PSF 

165 ellipticities. 

166 shearConvention: `bool`, optional 

167 Option to use shear convention. When set to False, the distortion 

168 convention is used. 

169 Returns 

170 ------- 

171 e1Resids : `numpy.array` 

172 A numpy array of e1 residual ellipticity values. 

173 """ 

174 

175 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False): 

176 self.column = column 

177 self.psfColumn = psfColumn 

178 self.unitScale = unitScale 

179 self.shearConvention = shearConvention 

180 

181 def __call__(self, catalog): 

182 srcE1func = E1(self.column, self.unitScale, self.shearConvention) 

183 psfE1func = E1(self.psfColumn, self.unitScale, self.shearConvention) 

184 

185 srcE1 = srcE1func(catalog) 

186 psfE1 = psfE1func(catalog) 

187 

188 e1Resids = srcE1 - psfE1 

189 return e1Resids 

190 

191 

192class E2Resids(object): 

193 """Functor to calculate e2 ellipticity residuals from an object catalog 

194 and PSF model. 

195 Parameters 

196 ---------- 

197 column : `str` 

198 The name of the shape measurement algorithm. It should be one of 

199 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

200 psfColumn : `str` 

201 The name used for PSF shape measurements from the same algorithm. 

202 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

203 and correspond to the algorithm name specified for ``column``. 

204 unitScale : `float`, optional 

205 A numerical scaling factor to multiply both the object and PSF 

206 ellipticities. 

207 shearConvention: `bool`, optional 

208 Option to use shear convention. When set to False, the distortion 

209 convention is used. 

210 Returns 

211 ------- 

212 e2Resids : `numpy.array` 

213 A numpy array of e2 residual ellipticity values. 

214 """ 

215 

216 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False): 

217 self.column = column 

218 self.psfColumn = psfColumn 

219 self.unitScale = unitScale 

220 self.shearConvention = shearConvention 

221 

222 def __call__(self, catalog): 

223 srcE2func = E2(self.column, self.unitScale, self.shearConvention) 

224 psfE2func = E2(self.psfColumn, self.unitScale, self.shearConvention) 

225 

226 srcE2 = srcE2func(catalog) 

227 psfE2 = psfE2func(catalog) 

228 

229 e2Resids = srcE2 - psfE2 

230 return e2Resids 

231 

232 

233class RhoStatistics(object): 

234 """Functor to compute Rho statistics given star catalog and PSF model. 

235 For detailed description of Rho statistics, refer to 

236 Rowe (2010) and Jarvis et al., (2016). 

237 Parameters 

238 ---------- 

239 column : `str` 

240 The name of the shape measurement algorithm. It should be one of 

241 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

242 psfColumn : `str` 

243 The name used for PSF shape measurements from the same algorithm. 

244 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

245 and correspond to the algorithm name specified for ``column``. 

246 shearConvention: `bool`, optional 

247 Option to use shear convention. When set to False, the distortion 

248 convention is used. 

249 **kwargs 

250 Additional keyword arguments passed to treecorr. See 

251 https://rmjarvis.github.io/TreeCorr/_build/html/gg.html for details. 

252 Returns 

253 ------- 

254 rhoStats : `dict` [`int`, `treecorr.KKCorrelation` or 

255 `treecorr.GGCorrelation`] 

256 A dictionary with keys 0..5, containing one `treecorr.KKCorrelation` 

257 object (key 0) and five `treecorr.GGCorrelation` objects corresponding 

258 to Rho statistic indices. rho0 corresponds to autocorrelation function 

259 of PSF size residuals. 

260 """ 

261 

262 def __init__(self, column, psfColumn, shearConvention=False, **kwargs): 

263 self.column = column 

264 self.psfColumn = psfColumn 

265 self.shearConvention = shearConvention 

266 self.e1Func = E1(self.psfColumn, shearConvention=self.shearConvention) 

267 self.e2Func = E2(self.psfColumn, shearConvention=self.shearConvention) 

268 self.e1ResidsFunc = E1Resids( 

269 self.column, self.psfColumn, shearConvention=self.shearConvention 

270 ) 

271 self.e2ResidsFunc = E2Resids( 

272 self.column, self.psfColumn, shearConvention=self.shearConvention 

273 ) 

274 self.traceSizeFunc = TraceSize(self.column) 

275 self.psfTraceSizeFunc = TraceSize(self.psfColumn) 

276 self.kwargs = kwargs 

277 

278 def __call__(self, catalog): 

279 e1 = self.e1Func(catalog) 

280 e2 = self.e2Func(catalog) 

281 e1Res = self.e1ResidsFunc(catalog) 

282 e2Res = self.e2ResidsFunc(catalog) 

283 traceSize2 = self.traceSizeFunc(catalog) ** 2 

284 psfTraceSize2 = self.psfTraceSizeFunc(catalog) ** 2 

285 SizeRes = (traceSize2 - psfTraceSize2) / (0.5 * (traceSize2 + psfTraceSize2)) 

286 

287 isFinite = np.isfinite(e1Res) & np.isfinite(e2Res) & np.isfinite(SizeRes) 

288 e1 = e1[isFinite] 

289 e2 = e2[isFinite] 

290 e1Res = e1Res[isFinite] 

291 e2Res = e2Res[isFinite] 

292 SizeRes = SizeRes[isFinite] 

293 

294 # Scale the SizeRes by ellipticities 

295 e1SizeRes = e1 * SizeRes 

296 e2SizeRes = e2 * SizeRes 

297 

298 # Package the arguments to capture auto-/cross-correlations for the 

299 # Rho statistics. 

300 args = { 

301 0: (SizeRes, None), 

302 1: (e1Res, e2Res, None, None), 

303 2: (e1, e2, e1Res, e2Res), 

304 3: (e1SizeRes, e2SizeRes, None, None), 

305 4: (e1Res, e2Res, e1SizeRes, e2SizeRes), 

306 5: (e1, e2, e1SizeRes, e2SizeRes), 

307 } 

308 

309 ra = np.rad2deg(catalog["coord_ra"][isFinite]) * 60.0 # arcmin 

310 dec = np.rad2deg(catalog["coord_dec"][isFinite]) * 60.0 # arcmin 

311 

312 # Pass the appropriate arguments to the correlator and build a dict 

313 rhoStats = { 

314 rhoIndex: corrSpin2( 

315 ra, 

316 dec, 

317 *(args[rhoIndex]), 

318 raUnits="arcmin", 

319 decUnits="arcmin", 

320 **self.kwargs 

321 ) 

322 for rhoIndex in range(1, 6) 

323 } 

324 rhoStats[0] = corrSpin0( 

325 ra, dec, *(args[0]), raUnits="arcmin", decUnits="arcmin", **self.kwargs 

326 ) 

327 

328 return rhoStats 

329 

330 

331def corrSpin0( 

332 ra, dec, k1, k2=None, raUnits="degrees", decUnits="degrees", **treecorrKwargs 

333): 

334 """Function to compute correlations between at most two scalar fields. 

335 This is used to compute Rho0 statistics, given the appropriate spin-0 

336 (scalar) fields, usually fractional size residuals. 

337 Parameters 

338 ---------- 

339 ra : `numpy.array` 

340 The right ascension values of entries in the catalog. 

341 dec : `numpy.array` 

342 The declination values of entries in the catalog. 

343 k1 : `numpy.array` 

344 The primary scalar field. 

345 k2 : `numpy.array`, optional 

346 The secondary scalar field. 

347 Autocorrelation of the primary field is computed if `None` (default). 

348 raUnits : `str`, optional 

349 Unit of the right ascension values. 

350 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

351 decUnits : `str`, optional 

352 Unit of the declination values. 

353 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

354 **treecorrKwargs 

355 Keyword arguments to be passed to `treecorr.KKCorrelation`. 

356 Returns 

357 ------- 

358 xy : `treecorr.KKCorrelation` 

359 A `treecorr.KKCorrelation` object containing the correlation function. 

360 """ 

361 

362 xy = treecorr.KKCorrelation(**treecorrKwargs) 

363 catA = treecorr.Catalog(ra=ra, dec=dec, k=k1, ra_units=raUnits, dec_units=decUnits) 

364 if k2 is None: 

365 # Calculate the auto-correlation 

366 xy.process(catA) 

367 else: 

368 catB = treecorr.Catalog( 

369 ra=ra, dec=dec, k=k2, ra_units=raUnits, dec_units=decUnits 

370 ) 

371 # Calculate the cross-correlation 

372 xy.process(catA, catB) 

373 

374 return xy 

375 

376 

377def corrSpin2( 

378 ra, 

379 dec, 

380 g1a, 

381 g2a, 

382 g1b=None, 

383 g2b=None, 

384 raUnits="degrees", 

385 decUnits="degrees", 

386 **treecorrKwargs 

387): 

388 """Function to compute correlations between at most two shear-like fields. 

389 This is used to compute Rho statistics, given the appropriate spin-2 

390 (shear-like) fields. 

391 Parameters 

392 ---------- 

393 ra : `numpy.array` 

394 The right ascension values of entries in the catalog. 

395 dec : `numpy.array` 

396 The declination values of entries in the catalog. 

397 g1a : `numpy.array` 

398 The first component of the primary shear-like field. 

399 g2a : `numpy.array` 

400 The second component of the primary shear-like field. 

401 g1b : `numpy.array`, optional 

402 The first component of the secondary shear-like field. 

403 Autocorrelation of the primary field is computed if `None` (default). 

404 g2b : `numpy.array`, optional 

405 The second component of the secondary shear-like field. 

406 Autocorrelation of the primary field is computed if `None` (default). 

407 raUnits : `str`, optional 

408 Unit of the right ascension values. 

409 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

410 decUnits : `str`, optional 

411 Unit of the declination values. 

412 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

413 **treecorrKwargs 

414 Keyword arguments to be passed to `treecorr.GGCorrelation`. 

415 Returns 

416 ------- 

417 xy : `treecorr.GGCorrelation` 

418 A `treecorr.GGCorrelation` object containing the correlation function. 

419 """ 

420 xy = treecorr.GGCorrelation(**treecorrKwargs) 

421 catA = treecorr.Catalog( 

422 ra=ra, dec=dec, g1=g1a, g2=g2a, ra_units=raUnits, dec_units=decUnits 

423 ) 

424 if g1b is None or g2b is None: 

425 # Calculate the auto-correlation 

426 xy.process(catA) 

427 else: 

428 catB = treecorr.Catalog( 

429 ra=ra, dec=dec, g1=g1b, g2=g2b, ra_units=raUnits, dec_units=decUnits 

430 ) 

431 # Calculate the cross-correlation 

432 xy.process(catA, catB) 

433 

434 return xy 

435 

436 

437def calculateTEx(data: List[CalibratedCatalog], config): 

438 """Compute ellipticity residual correlation metrics. 

439 """ 

440 

441 catalog = mergeCatalogs( 

442 [x.catalog for x in data], 

443 [x.photoCalib for x in data], 

444 [x.astromCalib for x in data], 

445 ) 

446 

447 # Filtering should be pulled out into a separate function for standard quality selections 

448 snrMin = 50 

449 selection = ( 

450 (catalog["base_ClassificationExtendedness_value"] < 0.5) 

451 & ( 

452 (catalog["slot_PsfFlux_instFlux"] / catalog["slot_PsfFlux_instFluxErr"]) 

453 > snrMin 

454 ) 

455 & (catalog["deblend_nChild"] == 0) 

456 ) 

457 

458 nMinSources = 50 

459 if np.sum(selection) < nMinSources: 

460 return {"nomeas": np.nan * u.Unit("")} 

461 

462 treecorrKwargs = dict( 

463 nbins=config.nbins, 

464 min_sep=config.minSep, 

465 max_sep=config.maxSep, 

466 sep_units="arcmin", 

467 ) 

468 rhoStatistics = RhoStatistics( 

469 config.column, config.columnPsf, config.shearConvention, **treecorrKwargs 

470 ) 

471 xy = rhoStatistics(catalog[selection])[config.rhoStat] 

472 

473 radius = np.exp(xy.meanlogr) * u.arcmin 

474 if config.rhoStat == 0: 

475 corr = xy.xi * u.Unit("") 

476 corrErr = np.sqrt(xy.varxip) * u.Unit("") 

477 else: 

478 corr = xy.xip * u.Unit("") 

479 corrErr = np.sqrt(xy.varxip) * u.Unit("") 

480 

481 result = dict(radius=radius, corr=corr, corrErr=corrErr) 

482 return result