Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of faro. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import astropy.units as u 

23import numpy as np 

24import treecorr 

25 

26from lsst.faro.utils.matcher import mergeCatalogs 

27 

28 

29__all__ = ( 

30 "TraceSize", 

31 "PsfTraceSizeDiff", 

32 "E1", 

33 "E2", 

34 "E1Resids", 

35 "E2Resids", 

36 "RhoStatistics", 

37 "corrSpin0", 

38 "corrSpin2", 

39 "calculateTEx", 

40) 

41 

42 

43class TraceSize(object): 

44 """Functor to calculate trace radius size for sources. 

45 """ 

46 

47 def __init__(self, column): 

48 self.column = column 

49 

50 def __call__(self, catalog): 

51 srcSize = np.sqrt( 

52 0.5 * (catalog[self.column + "_xx"] + catalog[self.column + "_yy"]) 

53 ) 

54 return np.array(srcSize) 

55 

56 

57class PsfTraceSizeDiff(object): 

58 """Functor to calculate trace radius size difference (%) between object and 

59 PSF model. 

60 """ 

61 

62 def __init__(self, column, psfColumn): 

63 self.column = column 

64 self.psfColumn = psfColumn 

65 

66 def __call__(self, catalog): 

67 srcSize = np.sqrt( 

68 0.5 * (catalog[self.column + "_xx"] + catalog[self.column + "_yy"]) 

69 ) 

70 psfSize = np.sqrt( 

71 0.5 * (catalog[self.psfColumn + "_xx"] + catalog[self.psfColumn + "_yy"]) 

72 ) 

73 sizeDiff = 100 * (srcSize - psfSize) / (0.5 * (srcSize + psfSize)) 

74 return np.array(sizeDiff) 

75 

76 

77class E1(object): 

78 """Function to calculate e1 ellipticities from a given catalog. 

79 Parameters 

80 ---------- 

81 column : `str` 

82 The name of the shape measurement algorithm. It should be one of 

83 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments") or 

84 ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") for corresponding 

85 PSF ellipticities. 

86 unitScale : `float`, optional 

87 A numerical scaling factor to multiply the ellipticity. 

88 shearConvention: `bool`, optional 

89 Option to use shear convention. When set to False, the distortion 

90 convention is used. 

91 Returns 

92 ------- 

93 e1 : `numpy.array` 

94 A numpy array of e1 ellipticity values. 

95 """ 

96 

97 def __init__(self, column, unitScale=1.0, shearConvention=False): 

98 self.column = column 

99 self.unitScale = unitScale 

100 self.shearConvention = shearConvention 

101 

102 def __call__(self, catalog): 

103 xx = catalog[self.column + "_xx"] 

104 yy = catalog[self.column + "_yy"] 

105 if self.shearConvention: 

106 xy = catalog[self.column + "_xy"] 

107 e1 = (xx - yy) / (xx + yy + 2.0 * np.sqrt(xx * yy - xy ** 2)) 

108 else: 

109 e1 = (xx - yy) / (xx + yy) 

110 return np.array(e1) * self.unitScale 

111 

112 

113class E2(object): 

114 """Function to calculate e2 ellipticities from a given catalog. 

115 Parameters 

116 ---------- 

117 column : `str` 

118 The name of the shape measurement algorithm. It should be one of 

119 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments") or 

120 ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") for corresponding 

121 PSF ellipticities. 

122 unitScale : `float`, optional 

123 A numerical scaling factor to multiply the ellipticity. 

124 shearConvention: `bool`, optional 

125 Option to use shear convention. When set to False, the distortion 

126 convention is used. 

127 Returns 

128 ------- 

129 e2 : `numpy.array` 

130 A numpy array of e2 ellipticity values. 

131 """ 

132 

133 def __init__(self, column, unitScale=1.0, shearConvention=False): 

134 self.column = column 

135 self.unitScale = unitScale 

136 self.shearConvention = shearConvention 

137 

138 def __call__(self, catalog): 

139 xx = catalog[self.column + "_xx"] 

140 yy = catalog[self.column + "_yy"] 

141 xy = catalog[self.column + "_xy"] 

142 if self.shearConvention: 

143 e2 = (2.0 * xy) / (xx + yy + 2.0 * np.sqrt(xx * yy - xy ** 2)) 

144 else: 

145 e2 = (2.0 * xy) / (xx + yy) 

146 return np.array(e2) * self.unitScale 

147 

148 

149class E1Resids(object): 

150 """Functor to calculate e1 ellipticity residuals from an object catalog 

151 and PSF model. 

152 Parameters 

153 ---------- 

154 column : `str` 

155 The name of the shape measurement algorithm. It should be one of 

156 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

157 psfColumn : `str` 

158 The name used for PSF shape measurements from the same algorithm. 

159 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

160 and correspond to the algorithm name specified for ``column``. 

161 unitScale : `float`, optional 

162 A numerical scaling factor to multiply both the object and PSF 

163 ellipticities. 

164 shearConvention: `bool`, optional 

165 Option to use shear convention. When set to False, the distortion 

166 convention is used. 

167 Returns 

168 ------- 

169 e1Resids : `numpy.array` 

170 A numpy array of e1 residual ellipticity values. 

171 """ 

172 

173 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False): 

174 self.column = column 

175 self.psfColumn = psfColumn 

176 self.unitScale = unitScale 

177 self.shearConvention = shearConvention 

178 

179 def __call__(self, catalog): 

180 srcE1func = E1(self.column, self.unitScale, self.shearConvention) 

181 psfE1func = E1(self.psfColumn, self.unitScale, self.shearConvention) 

182 

183 srcE1 = srcE1func(catalog) 

184 psfE1 = psfE1func(catalog) 

185 

186 e1Resids = srcE1 - psfE1 

187 return e1Resids 

188 

189 

190class E2Resids(object): 

191 """Functor to calculate e2 ellipticity residuals from an object catalog 

192 and PSF model. 

193 Parameters 

194 ---------- 

195 column : `str` 

196 The name of the shape measurement algorithm. It should be one of 

197 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

198 psfColumn : `str` 

199 The name used for PSF shape measurements from the same algorithm. 

200 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

201 and correspond to the algorithm name specified for ``column``. 

202 unitScale : `float`, optional 

203 A numerical scaling factor to multiply both the object and PSF 

204 ellipticities. 

205 shearConvention: `bool`, optional 

206 Option to use shear convention. When set to False, the distortion 

207 convention is used. 

208 Returns 

209 ------- 

210 e2Resids : `numpy.array` 

211 A numpy array of e2 residual ellipticity values. 

212 """ 

213 

214 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False): 

215 self.column = column 

216 self.psfColumn = psfColumn 

217 self.unitScale = unitScale 

218 self.shearConvention = shearConvention 

219 

220 def __call__(self, catalog): 

221 srcE2func = E2(self.column, self.unitScale, self.shearConvention) 

222 psfE2func = E2(self.psfColumn, self.unitScale, self.shearConvention) 

223 

224 srcE2 = srcE2func(catalog) 

225 psfE2 = psfE2func(catalog) 

226 

227 e2Resids = srcE2 - psfE2 

228 return e2Resids 

229 

230 

231class RhoStatistics(object): 

232 """Functor to compute Rho statistics given star catalog and PSF model. 

233 For detailed description of Rho statistics, refer to 

234 Rowe (2010) and Jarvis et al., (2016). 

235 Parameters 

236 ---------- 

237 column : `str` 

238 The name of the shape measurement algorithm. It should be one of 

239 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

240 psfColumn : `str` 

241 The name used for PSF shape measurements from the same algorithm. 

242 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

243 and correspond to the algorithm name specified for ``column``. 

244 shearConvention: `bool`, optional 

245 Option to use shear convention. When set to False, the distortion 

246 convention is used. 

247 **kwargs 

248 Additional keyword arguments passed to treecorr. See 

249 https://rmjarvis.github.io/TreeCorr/_build/html/gg.html for details. 

250 Returns 

251 ------- 

252 rhoStats : `dict` [`int`, `treecorr.KKCorrelation` or 

253 `treecorr.GGCorrelation`] 

254 A dictionary with keys 0..5, containing one `treecorr.KKCorrelation` 

255 object (key 0) and five `treecorr.GGCorrelation` objects corresponding 

256 to Rho statistic indices. rho0 corresponds to autocorrelation function 

257 of PSF size residuals. 

258 """ 

259 

260 def __init__(self, column, psfColumn, shearConvention=False, **kwargs): 

261 self.column = column 

262 self.psfColumn = psfColumn 

263 self.shearConvention = shearConvention 

264 self.e1Func = E1(self.psfColumn, shearConvention=self.shearConvention) 

265 self.e2Func = E2(self.psfColumn, shearConvention=self.shearConvention) 

266 self.e1ResidsFunc = E1Resids( 

267 self.column, self.psfColumn, shearConvention=self.shearConvention 

268 ) 

269 self.e2ResidsFunc = E2Resids( 

270 self.column, self.psfColumn, shearConvention=self.shearConvention 

271 ) 

272 self.traceSizeFunc = TraceSize(self.column) 

273 self.psfTraceSizeFunc = TraceSize(self.psfColumn) 

274 self.kwargs = kwargs 

275 

276 def __call__(self, catalog): 

277 e1 = self.e1Func(catalog) 

278 e2 = self.e2Func(catalog) 

279 e1Res = self.e1ResidsFunc(catalog) 

280 e2Res = self.e2ResidsFunc(catalog) 

281 traceSize2 = self.traceSizeFunc(catalog) ** 2 

282 psfTraceSize2 = self.psfTraceSizeFunc(catalog) ** 2 

283 SizeRes = (traceSize2 - psfTraceSize2) / (0.5 * (traceSize2 + psfTraceSize2)) 

284 

285 isFinite = np.isfinite(e1Res) & np.isfinite(e2Res) & np.isfinite(SizeRes) 

286 e1 = e1[isFinite] 

287 e2 = e2[isFinite] 

288 e1Res = e1Res[isFinite] 

289 e2Res = e2Res[isFinite] 

290 SizeRes = SizeRes[isFinite] 

291 

292 # Scale the SizeRes by ellipticities 

293 e1SizeRes = e1 * SizeRes 

294 e2SizeRes = e2 * SizeRes 

295 

296 # Package the arguments to capture auto-/cross-correlations for the 

297 # Rho statistics. 

298 args = { 

299 0: (SizeRes, None), 

300 1: (e1Res, e2Res, None, None), 

301 2: (e1, e2, e1Res, e2Res), 

302 3: (e1SizeRes, e2SizeRes, None, None), 

303 4: (e1Res, e2Res, e1SizeRes, e2SizeRes), 

304 5: (e1, e2, e1SizeRes, e2SizeRes), 

305 } 

306 

307 ra = np.rad2deg(catalog["coord_ra"][isFinite]) * 60.0 # arcmin 

308 dec = np.rad2deg(catalog["coord_dec"][isFinite]) * 60.0 # arcmin 

309 

310 # Pass the appropriate arguments to the correlator and build a dict 

311 rhoStats = { 

312 rhoIndex: corrSpin2( 

313 ra, 

314 dec, 

315 *(args[rhoIndex]), 

316 raUnits="arcmin", 

317 decUnits="arcmin", 

318 **self.kwargs 

319 ) 

320 for rhoIndex in range(1, 6) 

321 } 

322 rhoStats[0] = corrSpin0( 

323 ra, dec, *(args[0]), raUnits="arcmin", decUnits="arcmin", **self.kwargs 

324 ) 

325 

326 return rhoStats 

327 

328 

329def corrSpin0( 

330 ra, dec, k1, k2=None, raUnits="degrees", decUnits="degrees", **treecorrKwargs 

331): 

332 """Function to compute correlations between at most two scalar fields. 

333 This is used to compute Rho0 statistics, given the appropriate spin-0 

334 (scalar) fields, usually fractional size residuals. 

335 Parameters 

336 ---------- 

337 ra : `numpy.array` 

338 The right ascension values of entries in the catalog. 

339 dec : `numpy.array` 

340 The declination values of entries in the catalog. 

341 k1 : `numpy.array` 

342 The primary scalar field. 

343 k2 : `numpy.array`, optional 

344 The secondary scalar field. 

345 Autocorrelation of the primary field is computed if `None` (default). 

346 raUnits : `str`, optional 

347 Unit of the right ascension values. 

348 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

349 decUnits : `str`, optional 

350 Unit of the declination values. 

351 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

352 **treecorrKwargs 

353 Keyword arguments to be passed to `treecorr.KKCorrelation`. 

354 Returns 

355 ------- 

356 xy : `treecorr.KKCorrelation` 

357 A `treecorr.KKCorrelation` object containing the correlation function. 

358 """ 

359 

360 xy = treecorr.KKCorrelation(**treecorrKwargs) 

361 catA = treecorr.Catalog(ra=ra, dec=dec, k=k1, ra_units=raUnits, dec_units=decUnits) 

362 if k2 is None: 

363 # Calculate the auto-correlation 

364 xy.process(catA) 

365 else: 

366 catB = treecorr.Catalog( 

367 ra=ra, dec=dec, k=k2, ra_units=raUnits, dec_units=decUnits 

368 ) 

369 # Calculate the cross-correlation 

370 xy.process(catA, catB) 

371 

372 return xy 

373 

374 

375def corrSpin2( 

376 ra, 

377 dec, 

378 g1a, 

379 g2a, 

380 g1b=None, 

381 g2b=None, 

382 raUnits="degrees", 

383 decUnits="degrees", 

384 **treecorrKwargs 

385): 

386 """Function to compute correlations between at most two shear-like fields. 

387 This is used to compute Rho statistics, given the appropriate spin-2 

388 (shear-like) fields. 

389 Parameters 

390 ---------- 

391 ra : `numpy.array` 

392 The right ascension values of entries in the catalog. 

393 dec : `numpy.array` 

394 The declination values of entries in the catalog. 

395 g1a : `numpy.array` 

396 The first component of the primary shear-like field. 

397 g2a : `numpy.array` 

398 The second component of the primary shear-like field. 

399 g1b : `numpy.array`, optional 

400 The first component of the secondary shear-like field. 

401 Autocorrelation of the primary field is computed if `None` (default). 

402 g2b : `numpy.array`, optional 

403 The second component of the secondary shear-like field. 

404 Autocorrelation of the primary field is computed if `None` (default). 

405 raUnits : `str`, optional 

406 Unit of the right ascension values. 

407 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

408 decUnits : `str`, optional 

409 Unit of the declination values. 

410 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

411 **treecorrKwargs 

412 Keyword arguments to be passed to `treecorr.GGCorrelation`. 

413 Returns 

414 ------- 

415 xy : `treecorr.GGCorrelation` 

416 A `treecorr.GGCorrelation` object containing the correlation function. 

417 """ 

418 xy = treecorr.GGCorrelation(**treecorrKwargs) 

419 catA = treecorr.Catalog( 

420 ra=ra, dec=dec, g1=g1a, g2=g2a, ra_units=raUnits, dec_units=decUnits 

421 ) 

422 if g1b is None or g2b is None: 

423 # Calculate the auto-correlation 

424 xy.process(catA) 

425 else: 

426 catB = treecorr.Catalog( 

427 ra=ra, dec=dec, g1=g1b, g2=g2b, ra_units=raUnits, dec_units=decUnits 

428 ) 

429 # Calculate the cross-correlation 

430 xy.process(catA, catB) 

431 

432 return xy 

433 

434 

435def calculateTEx(catalogs, photoCalibs, astromCalibs, config): 

436 """Compute ellipticity residual correlation metrics. 

437 """ 

438 

439 catalog = mergeCatalogs(catalogs, photoCalibs, astromCalibs) 

440 

441 # Filtering should be pulled out into a separate function for standard quality selections 

442 snrMin = 50 

443 selection = ( 

444 (catalog["base_ClassificationExtendedness_value"] < 0.5) 

445 & ( 

446 (catalog["slot_PsfFlux_instFlux"] / catalog["slot_PsfFlux_instFluxErr"]) 

447 > snrMin 

448 ) 

449 & (catalog["deblend_nChild"] == 0) 

450 ) 

451 

452 nMinSources = 50 

453 if np.sum(selection) < nMinSources: 

454 return {"nomeas": np.nan * u.Unit("")} 

455 

456 treecorrKwargs = dict( 

457 nbins=config.nbins, 

458 min_sep=config.minSep, 

459 max_sep=config.maxSep, 

460 sep_units="arcmin", 

461 ) 

462 rhoStatistics = RhoStatistics( 

463 config.column, config.columnPsf, config.shearConvention, **treecorrKwargs 

464 ) 

465 xy = rhoStatistics(catalog[selection])[config.rhoStat] 

466 

467 radius = np.exp(xy.meanlogr) * u.arcmin 

468 if config.rhoStat == 0: 

469 corr = xy.xi * u.Unit("") 

470 corrErr = np.sqrt(xy.varxip) * u.Unit("") 

471 else: 

472 corr = xy.xip * u.Unit("") 

473 corrErr = np.sqrt(xy.varxip) * u.Unit("") 

474 

475 result = dict(radius=radius, corr=corr, corrErr=corrErr) 

476 return result