Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1import astropy.units as u 

2import numpy as np 

3import treecorr 

4 

5from lsst.faro.utils.matcher import mergeCatalogs 

6 

7 

8__all__ = ("TraceSize", "PsfTraceSizeDiff", "E1", "E2", "E1Resids", "E2Resids", 

9 "RhoStatistics", "corrSpin0", "corrSpin2", "calculateTEx") 

10 

11 

12class TraceSize(object): 

13 """Functor to calculate trace radius size for sources. 

14 """ 

15 def __init__(self, column): 

16 self.column = column 

17 

18 def __call__(self, catalog): 

19 srcSize = np.sqrt(0.5*(catalog[self.column + "_xx"] + catalog[self.column + "_yy"])) 

20 return np.array(srcSize) 

21 

22 

23class PsfTraceSizeDiff(object): 

24 """Functor to calculate trace radius size difference (%) between object and 

25 PSF model. 

26 """ 

27 def __init__(self, column, psfColumn): 

28 self.column = column 

29 self.psfColumn = psfColumn 

30 

31 def __call__(self, catalog): 

32 srcSize = np.sqrt(0.5*(catalog[self.column + "_xx"] + catalog[self.column + "_yy"])) 

33 psfSize = np.sqrt(0.5*(catalog[self.psfColumn + "_xx"] + catalog[self.psfColumn + "_yy"])) 

34 sizeDiff = 100*(srcSize - psfSize)/(0.5*(srcSize + psfSize)) 

35 return np.array(sizeDiff) 

36 

37 

38class E1(object): 

39 """Function to calculate e1 ellipticities from a given catalog. 

40 Parameters 

41 ---------- 

42 column : `str` 

43 The name of the shape measurement algorithm. It should be one of 

44 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments") or 

45 ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") for corresponding 

46 PSF ellipticities. 

47 unitScale : `float`, optional 

48 A numerical scaling factor to multiply the ellipticity. 

49 shearConvention: `bool`, optional 

50 Option to use shear convention. When set to False, the distortion 

51 convention is used. 

52 Returns 

53 ------- 

54 e1 : `numpy.array` 

55 A numpy array of e1 ellipticity values. 

56 """ 

57 def __init__(self, column, unitScale=1.0, shearConvention=False): 

58 self.column = column 

59 self.unitScale = unitScale 

60 self.shearConvention = shearConvention 

61 

62 def __call__(self, catalog): 

63 xx = catalog[self.column + "_xx"] 

64 yy = catalog[self.column + "_yy"] 

65 if self.shearConvention: 

66 xy = catalog[self.column + "_xy"] 

67 e1 = (xx - yy) / (xx + yy + 2. * np.sqrt(xx * yy - xy**2)) 

68 else: 

69 e1 = (xx - yy) / (xx + yy) 

70 return np.array(e1) * self.unitScale 

71 

72 

73class E2(object): 

74 """Function to calculate e2 ellipticities from a given catalog. 

75 Parameters 

76 ---------- 

77 column : `str` 

78 The name of the shape measurement algorithm. It should be one of 

79 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments") or 

80 ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") for corresponding 

81 PSF ellipticities. 

82 unitScale : `float`, optional 

83 A numerical scaling factor to multiply the ellipticity. 

84 shearConvention: `bool`, optional 

85 Option to use shear convention. When set to False, the distortion 

86 convention is used. 

87 Returns 

88 ------- 

89 e2 : `numpy.array` 

90 A numpy array of e2 ellipticity values. 

91 """ 

92 def __init__(self, column, unitScale=1.0, shearConvention=False): 

93 self.column = column 

94 self.unitScale = unitScale 

95 self.shearConvention = shearConvention 

96 

97 def __call__(self, catalog): 

98 xx = catalog[self.column + "_xx"] 

99 yy = catalog[self.column + "_yy"] 

100 xy = catalog[self.column + "_xy"] 

101 if self.shearConvention: 

102 e2 = (2. * xy) / (xx + yy + 2. * np.sqrt(xx * yy - xy**2)) 

103 else: 

104 e2 = (2. * xy) / (xx + yy) 

105 return np.array(e2)*self.unitScale 

106 

107 

108class E1Resids(object): 

109 """Functor to calculate e1 ellipticity residuals from an object catalog 

110 and PSF model. 

111 Parameters 

112 ---------- 

113 column : `str` 

114 The name of the shape measurement algorithm. It should be one of 

115 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

116 psfColumn : `str` 

117 The name used for PSF shape measurements from the same algorithm. 

118 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

119 and correspond to the algorithm name specified for ``column``. 

120 unitScale : `float`, optional 

121 A numerical scaling factor to multiply both the object and PSF 

122 ellipticities. 

123 shearConvention: `bool`, optional 

124 Option to use shear convention. When set to False, the distortion 

125 convention is used. 

126 Returns 

127 ------- 

128 e1Resids : `numpy.array` 

129 A numpy array of e1 residual ellipticity values. 

130 """ 

131 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False): 

132 self.column = column 

133 self.psfColumn = psfColumn 

134 self.unitScale = unitScale 

135 self.shearConvention = shearConvention 

136 

137 def __call__(self, catalog): 

138 srcE1func = E1(self.column, self.unitScale, self.shearConvention) 

139 psfE1func = E1(self.psfColumn, self.unitScale, self.shearConvention) 

140 

141 srcE1 = srcE1func(catalog) 

142 psfE1 = psfE1func(catalog) 

143 

144 e1Resids = srcE1 - psfE1 

145 return e1Resids 

146 

147 

148class E2Resids(object): 

149 """Functor to calculate e2 ellipticity residuals from an object catalog 

150 and PSF model. 

151 Parameters 

152 ---------- 

153 column : `str` 

154 The name of the shape measurement algorithm. It should be one of 

155 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

156 psfColumn : `str` 

157 The name used for PSF shape measurements from the same algorithm. 

158 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

159 and correspond to the algorithm name specified for ``column``. 

160 unitScale : `float`, optional 

161 A numerical scaling factor to multiply both the object and PSF 

162 ellipticities. 

163 shearConvention: `bool`, optional 

164 Option to use shear convention. When set to False, the distortion 

165 convention is used. 

166 Returns 

167 ------- 

168 e2Resids : `numpy.array` 

169 A numpy array of e2 residual ellipticity values. 

170 """ 

171 def __init__(self, column, psfColumn, unitScale=1.0, shearConvention=False): 

172 self.column = column 

173 self.psfColumn = psfColumn 

174 self.unitScale = unitScale 

175 self.shearConvention = shearConvention 

176 

177 def __call__(self, catalog): 

178 srcE2func = E2(self.column, self.unitScale, self.shearConvention) 

179 psfE2func = E2(self.psfColumn, self.unitScale, self.shearConvention) 

180 

181 srcE2 = srcE2func(catalog) 

182 psfE2 = psfE2func(catalog) 

183 

184 e2Resids = srcE2 - psfE2 

185 return e2Resids 

186 

187 

188class RhoStatistics(object): 

189 """Functor to compute Rho statistics given star catalog and PSF model. 

190 For detailed description of Rho statistics, refer to 

191 Rowe (2010) and Jarvis et al., (2016). 

192 Parameters 

193 ---------- 

194 column : `str` 

195 The name of the shape measurement algorithm. It should be one of 

196 ("base_SdssShape", "ext_shapeHSM_HsmSourceMoments"). 

197 psfColumn : `str` 

198 The name used for PSF shape measurements from the same algorithm. 

199 It must be one of ("base_SdssShape_psf", "ext_shapeHSM_HsmPsfMoments") 

200 and correspond to the algorithm name specified for ``column``. 

201 shearConvention: `bool`, optional 

202 Option to use shear convention. When set to False, the distortion 

203 convention is used. 

204 **kwargs 

205 Additional keyword arguments passed to treecorr. See 

206 https://rmjarvis.github.io/TreeCorr/_build/html/gg.html for details. 

207 Returns 

208 ------- 

209 rhoStats : `dict` [`int`, `treecorr.KKCorrelation` or 

210 `treecorr.GGCorrelation`] 

211 A dictionary with keys 0..5, containing one `treecorr.KKCorrelation` 

212 object (key 0) and five `treecorr.GGCorrelation` objects corresponding 

213 to Rho statistic indices. rho0 corresponds to autocorrelation function 

214 of PSF size residuals. 

215 """ 

216 def __init__(self, column, psfColumn, shearConvention=False, **kwargs): 

217 self.column = column 

218 self.psfColumn = psfColumn 

219 self.shearConvention = shearConvention 

220 self.e1Func = E1(self.psfColumn, shearConvention=self.shearConvention) 

221 self.e2Func = E2(self.psfColumn, shearConvention=self.shearConvention) 

222 self.e1ResidsFunc = E1Resids(self.column, self.psfColumn, shearConvention=self.shearConvention) 

223 self.e2ResidsFunc = E2Resids(self.column, self.psfColumn, shearConvention=self.shearConvention) 

224 self.traceSizeFunc = TraceSize(self.column) 

225 self.psfTraceSizeFunc = TraceSize(self.psfColumn) 

226 self.kwargs = kwargs 

227 

228 def __call__(self, catalog): 

229 e1 = self.e1Func(catalog) 

230 e2 = self.e2Func(catalog) 

231 e1Res = self.e1ResidsFunc(catalog) 

232 e2Res = self.e2ResidsFunc(catalog) 

233 traceSize2 = self.traceSizeFunc(catalog)**2 

234 psfTraceSize2 = self.psfTraceSizeFunc(catalog)**2 

235 SizeRes = (traceSize2 - psfTraceSize2)/(0.5*(traceSize2 + psfTraceSize2)) 

236 

237 isFinite = np.isfinite(e1Res) & np.isfinite(e2Res) & np.isfinite(SizeRes) 

238 e1 = e1[isFinite] 

239 e2 = e2[isFinite] 

240 e1Res = e1Res[isFinite] 

241 e2Res = e2Res[isFinite] 

242 SizeRes = SizeRes[isFinite] 

243 

244 # Scale the SizeRes by ellipticities 

245 e1SizeRes = e1*SizeRes 

246 e2SizeRes = e2*SizeRes 

247 

248 # Package the arguments to capture auto-/cross-correlations for the 

249 # Rho statistics. 

250 args = {0: (SizeRes, None), 

251 1: (e1Res, e2Res, None, None), 

252 2: (e1, e2, e1Res, e2Res), 

253 3: (e1SizeRes, e2SizeRes, None, None), 

254 4: (e1Res, e2Res, e1SizeRes, e2SizeRes), 

255 5: (e1, e2, e1SizeRes, e2SizeRes)} 

256 

257 ra = np.rad2deg(catalog["coord_ra"][isFinite])*60. # arcmin 

258 dec = np.rad2deg(catalog["coord_dec"][isFinite])*60. # arcmin 

259 

260 # Pass the appropriate arguments to the correlator and build a dict 

261 rhoStats = {rhoIndex: corrSpin2(ra, dec, *(args[rhoIndex]), raUnits="arcmin", decUnits="arcmin", 

262 **self.kwargs) for rhoIndex in range(1, 6)} 

263 rhoStats[0] = corrSpin0(ra, dec, *(args[0]), raUnits="arcmin", decUnits="arcmin", **self.kwargs) 

264 

265 return rhoStats 

266 

267 

268def corrSpin0(ra, dec, k1, k2=None, raUnits="degrees", decUnits="degrees", **treecorrKwargs): 

269 """Function to compute correlations between at most two scalar fields. 

270 This is used to compute Rho0 statistics, given the appropriate spin-0 

271 (scalar) fields, usually fractional size residuals. 

272 Parameters 

273 ---------- 

274 ra : `numpy.array` 

275 The right ascension values of entries in the catalog. 

276 dec : `numpy.array` 

277 The declination values of entries in the catalog. 

278 k1 : `numpy.array` 

279 The primary scalar field. 

280 k2 : `numpy.array`, optional 

281 The secondary scalar field. 

282 Autocorrelation of the primary field is computed if `None` (default). 

283 raUnits : `str`, optional 

284 Unit of the right ascension values. 

285 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

286 decUnits : `str`, optional 

287 Unit of the declination values. 

288 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

289 **treecorrKwargs 

290 Keyword arguments to be passed to `treecorr.KKCorrelation`. 

291 Returns 

292 ------- 

293 xy : `treecorr.KKCorrelation` 

294 A `treecorr.KKCorrelation` object containing the correlation function. 

295 """ 

296 

297 xy = treecorr.KKCorrelation(**treecorrKwargs) 

298 catA = treecorr.Catalog(ra=ra, dec=dec, k=k1, ra_units=raUnits, 

299 dec_units=decUnits) 

300 if k2 is None: 

301 # Calculate the auto-correlation 

302 xy.process(catA) 

303 else: 

304 catB = treecorr.Catalog(ra=ra, dec=dec, k=k2, ra_units=raUnits, 

305 dec_units=decUnits) 

306 # Calculate the cross-correlation 

307 xy.process(catA, catB) 

308 

309 return xy 

310 

311 

312def corrSpin2(ra, dec, g1a, g2a, g1b=None, g2b=None, raUnits="degrees", decUnits="degrees", **treecorrKwargs): 

313 """Function to compute correlations between at most two shear-like fields. 

314 This is used to compute Rho statistics, given the appropriate spin-2 

315 (shear-like) fields. 

316 Parameters 

317 ---------- 

318 ra : `numpy.array` 

319 The right ascension values of entries in the catalog. 

320 dec : `numpy.array` 

321 The declination values of entries in the catalog. 

322 g1a : `numpy.array` 

323 The first component of the primary shear-like field. 

324 g2a : `numpy.array` 

325 The second component of the primary shear-like field. 

326 g1b : `numpy.array`, optional 

327 The first component of the secondary shear-like field. 

328 Autocorrelation of the primary field is computed if `None` (default). 

329 g2b : `numpy.array`, optional 

330 The second component of the secondary shear-like field. 

331 Autocorrelation of the primary field is computed if `None` (default). 

332 raUnits : `str`, optional 

333 Unit of the right ascension values. 

334 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

335 decUnits : `str`, optional 

336 Unit of the declination values. 

337 Valid options are "degrees", "arcmin", "arcsec", "hours" or "radians". 

338 **treecorrKwargs 

339 Keyword arguments to be passed to `treecorr.GGCorrelation`. 

340 Returns 

341 ------- 

342 xy : `treecorr.GGCorrelation` 

343 A `treecorr.GGCorrelation` object containing the correlation function. 

344 """ 

345 xy = treecorr.GGCorrelation(**treecorrKwargs) 

346 catA = treecorr.Catalog(ra=ra, dec=dec, g1=g1a, g2=g2a, ra_units=raUnits, 

347 dec_units=decUnits) 

348 if g1b is None or g2b is None: 

349 # Calculate the auto-correlation 

350 xy.process(catA) 

351 else: 

352 catB = treecorr.Catalog(ra=ra, dec=dec, g1=g1b, g2=g2b, ra_units=raUnits, 

353 dec_units=decUnits) 

354 # Calculate the cross-correlation 

355 xy.process(catA, catB) 

356 

357 return xy 

358 

359 

360def calculateTEx(catalogs, photoCalibs, astromCalibs, config): 

361 """Compute ellipticity residual correlation metrics. 

362 """ 

363 

364 catalog = mergeCatalogs(catalogs, photoCalibs, astromCalibs) 

365 

366 # Filtering should be pulled out into a separate function for standard quality selections 

367 snrMin = 50 

368 selection = (catalog['base_ClassificationExtendedness_value'] < 0.5) \ 

369 & ((catalog['slot_PsfFlux_instFlux'] / catalog['slot_PsfFlux_instFluxErr']) > snrMin) \ 

370 & (catalog['deblend_nChild'] == 0) 

371 

372 nMinSources = 50 

373 if np.sum(selection) < nMinSources: 

374 return {'nomeas': np.nan * u.Unit('')} 

375 

376 treecorrKwargs = dict(nbins=config.nbins, 

377 min_sep=config.minSep, 

378 max_sep=config.maxSep, 

379 sep_units='arcmin') 

380 rhoStatistics = RhoStatistics(config.column, config.columnPsf, config.shearConvention, **treecorrKwargs) 

381 xy = rhoStatistics(catalog[selection])[config.rhoStat] 

382 

383 radius = np.exp(xy.meanlogr) * u.arcmin 

384 if config.rhoStat == 0: 

385 corr = xy.xi * u.Unit('') 

386 corrErr = np.sqrt(xy.varxip) * u.Unit('') 

387 else: 

388 corr = xy.xip * u.Unit('') 

389 corrErr = np.sqrt(xy.varxip) * u.Unit('') 

390 

391 result = dict(radius=radius, corr=corr, corrErr=corrErr) 

392 return result