Coverage for python/lsst/faro/utils/matcher.py: 8%

130 statements  

« prev     ^ index     » next       coverage.py v6.4.2, created at 2022-07-27 11:32 +0000

1# This file is part of faro. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from lsst.afw.geom import SkyWcs 

23from lsst.afw.image import PhotoCalib 

24from lsst.afw.table import ( 

25 SchemaMapper, 

26 Field, 

27 MultiMatch, 

28 SimpleRecord, 

29 SourceCatalog, 

30 updateSourceCoords, 

31) 

32from lsst.faro.utils.calibrated_catalog import CalibratedCatalog 

33from lsst.faro.utils.prefilter import preFilter 

34 

35import numpy as np 

36from astropy.table import join, Table 

37from typing import Dict, List 

38 

39__all__ = ( 

40 "matchCatalogs", 

41 "ellipticityFromCat", 

42 "ellipticity", 

43 "makeMatchedPhotom", 

44 "mergeCatalogs", 

45) 

46 

47 

48def matchCatalogs( 

49 inputs: List[SourceCatalog], 

50 photoCalibs: List[PhotoCalib], 

51 astromCalibs: List[SkyWcs], 

52 dataIds, 

53 matchRadius: float, 

54 config, 

55 logger=None, 

56): 

57 schema = inputs[0].schema 

58 mapper = SchemaMapper(schema) 

59 mapper.addMinimalSchema(schema) 

60 mapper.addOutputField(Field[float]("base_PsfFlux_snr", "PSF flux SNR")) 

61 mapper.addOutputField(Field[float]("base_PsfFlux_mag", "PSF magnitude")) 

62 mapper.addOutputField( 

63 Field[float]("base_PsfFlux_magErr", "PSF magnitude uncertainty") 

64 ) 

65 # Needed because addOutputField(... 'slot_ModelFlux_mag') will add a field with that literal name 

66 aliasMap = schema.getAliasMap() 

67 # Possibly not needed since base_GaussianFlux is the default, but this ought to be safe 

68 modelName = ( 

69 aliasMap["slot_ModelFlux"] 

70 if "slot_ModelFlux" in aliasMap.keys() 

71 else "base_GaussianFlux" 

72 ) 

73 mapper.addOutputField(Field[float](f"{modelName}_mag", "Model magnitude")) 

74 mapper.addOutputField( 

75 Field[float](f"{modelName}_magErr", "Model magnitude uncertainty") 

76 ) 

77 mapper.addOutputField(Field[float](f"{modelName}_snr", "Model flux snr")) 

78 mapper.addOutputField(Field[float]("e1", "Source Ellipticity 1")) 

79 mapper.addOutputField(Field[float]("e2", "Source Ellipticity 1")) 

80 mapper.addOutputField(Field[float]("psf_e1", "PSF Ellipticity 1")) 

81 mapper.addOutputField(Field[float]("psf_e2", "PSF Ellipticity 1")) 

82 mapper.addOutputField(Field[np.int32]("filt", "filter code")) 

83 newSchema = mapper.getOutputSchema() 

84 newSchema.setAliasMap(schema.getAliasMap()) 

85 

86 # Create an object that matches multiple catalogs with same schema 

87 mmatch = MultiMatch( 

88 newSchema, 

89 dataIdFormat={"visit": np.int64, "detector": np.int32}, 

90 radius=matchRadius, 

91 RecordClass=SimpleRecord, 

92 ) 

93 

94 # create the new extended source catalog 

95 srcVis = SourceCatalog(newSchema) 

96 

97 filter_dict = { 

98 "u": 1, 

99 "g": 2, 

100 "r": 3, 

101 "i": 4, 

102 "z": 5, 

103 "y": 6, 

104 "HSC-U": 1, 

105 "HSC-G": 2, 

106 "HSC-R": 3, 

107 "HSC-I": 4, 

108 "HSC-Z": 5, 

109 "HSC-Y": 6, 

110 } 

111 

112 # Sort by visit, detector, then filter 

113 vislist = [v["visit"] for v in dataIds] 

114 ccdlist = [v["detector"] for v in dataIds] 

115 filtlist = [v["band"] for v in dataIds] 

116 tab_vids = Table([vislist, ccdlist, filtlist], names=["vis", "ccd", "filt"]) 

117 sortinds = np.argsort(tab_vids, order=("vis", "ccd", "filt")) 

118 

119 for ind in sortinds: 

120 oldSrc = inputs[ind] 

121 photoCalib = photoCalibs[ind] 

122 wcs = astromCalibs[ind] 

123 dataId = dataIds[ind] 

124 

125 if logger: 

126 logger.debug( 

127 "%d sources in ccd %s visit %s", 

128 len(oldSrc), 

129 dataId["detector"], 

130 dataId["visit"], 

131 ) 

132 

133 # create temporary catalog 

134 tmpCat = SourceCatalog(SourceCatalog(newSchema).table) 

135 tmpCat.extend(oldSrc, mapper=mapper) 

136 

137 filtnum = filter_dict[dataId["band"]] 

138 tmpCat["filt"] = np.repeat(filtnum, len(oldSrc)) 

139 

140 tmpCat["base_PsfFlux_snr"][:] = ( 

141 tmpCat["base_PsfFlux_instFlux"] / tmpCat["base_PsfFlux_instFluxErr"] 

142 ) 

143 

144 updateSourceCoords(wcs, tmpCat) 

145 

146 photoCalib.instFluxToMagnitude(tmpCat, "base_PsfFlux", "base_PsfFlux") 

147 tmpCat["slot_ModelFlux_snr"][:] = ( 

148 tmpCat["slot_ModelFlux_instFlux"] / tmpCat["slot_ModelFlux_instFluxErr"] 

149 ) 

150 photoCalib.instFluxToMagnitude(tmpCat, "slot_ModelFlux", "slot_ModelFlux") 

151 

152 _, psf_e1, psf_e2 = ellipticityFromCat(oldSrc, slot_shape="slot_PsfShape") 

153 _, star_e1, star_e2 = ellipticityFromCat(oldSrc, slot_shape="slot_Shape") 

154 tmpCat["e1"][:] = star_e1 

155 tmpCat["e2"][:] = star_e2 

156 tmpCat["psf_e1"][:] = psf_e1 

157 tmpCat["psf_e2"][:] = psf_e2 

158 

159 tmpCat = preFilter(tmpCat, snrMin=config.snrMin, snrMax=config.snrMax, 

160 brightMagCut=config.brightMagCut, faintMagCut=config.faintMagCut, 

161 extended=config.selectExtended) 

162 

163 srcVis.extend(tmpCat, False) 

164 mmatch.add(catalog=tmpCat, dataId=dataId) 

165 

166 # Complete the match, returning a catalog that includes 

167 # all matched sources with object IDs that can be used to group them. 

168 matchCat = mmatch.finish() 

169 

170 # Create a mapping object that allows the matches to be manipulated 

171 # as a mapping of object ID to catalog of sources. 

172 

173 # I don't think I can persist a group view, so this may need to be called in a subsequent task 

174 # allMatches = GroupView.build(matchCat) 

175 

176 return srcVis, matchCat 

177 

178 

179def ellipticityFromCat(cat, slot_shape="slot_Shape"): 

180 """Calculate the ellipticity of the Shapes in a catalog from the 2nd moments. 

181 Parameters 

182 ---------- 

183 cat : `lsst.afw.table.BaseCatalog` 

184 A catalog with 'slot_Shape' defined and '_xx', '_xy', '_yy' 

185 entries for the target of 'slot_Shape'. 

186 E.g., 'slot_shape' defined as 'base_SdssShape' 

187 And 'base_SdssShape_xx', 'base_SdssShape_xy', 'base_SdssShape_yy' defined. 

188 slot_shape : str, optional 

189 Specify what slot shape requested. Intended use is to get the PSF shape 

190 estimates by specifying 'slot_shape=slot_PsfShape' 

191 instead of the default 'slot_shape=slot_Shape'. 

192 Returns 

193 ------- 

194 e, e1, e2 : complex, float, float 

195 Complex ellipticity, real part, imaginary part 

196 """ 

197 i_xx, i_xy, i_yy = ( 

198 cat.get(slot_shape + "_xx"), 

199 cat.get(slot_shape + "_xy"), 

200 cat.get(slot_shape + "_yy"), 

201 ) 

202 return ellipticity(i_xx, i_xy, i_yy) 

203 

204 

205def ellipticity(i_xx, i_xy, i_yy): 

206 """Calculate ellipticity from second moments. 

207 Parameters 

208 ---------- 

209 i_xx : float or `numpy.array` 

210 i_xy : float or `numpy.array` 

211 i_yy : float or `numpy.array` 

212 Returns 

213 ------- 

214 e, e1, e2 : (float, float, float) or (numpy.array, numpy.array, numpy.array) 

215 Complex ellipticity, real component, imaginary component 

216 """ 

217 e = (i_xx - i_yy + 2j * i_xy) / (i_xx + i_yy) 

218 e1 = np.real(e) 

219 e2 = np.imag(e) 

220 return e, e1, e2 

221 

222 

223def makeMatchedPhotom(data: Dict[str, List[CalibratedCatalog]], logger=None): 

224 """ Merge catalogs in multiple bands into a single shared catalog. 

225 """ 

226 

227 cat_all = None 

228 

229 for band, cat_list in data.items(): 

230 cat_tmp = [] 

231 calibs_photo = [] 

232 for cat_calib in cat_list: 

233 cat_tmp_i = cat_calib.catalog 

234 qual_cuts = ( 

235 (cat_tmp_i["base_ClassificationExtendedness_value"] < 0.5) 

236 & ~cat_tmp_i["base_PixelFlags_flag_saturated"] 

237 & ~cat_tmp_i["base_PixelFlags_flag_cr"] 

238 & ~cat_tmp_i["base_PixelFlags_flag_bad"] 

239 & ~cat_tmp_i["base_PixelFlags_flag_edge"] 

240 ) 

241 cat_tmp.append(cat_tmp_i[qual_cuts]) 

242 calibs_photo.append(cat_calib.photoCalib) 

243 

244 if logger: 

245 logger.debug("Merging %d catalogs for band %s.", len(cat_tmp), band) 

246 cat_tmp = mergeCatalogs(cat_tmp, calibs_photo, models=['base_PsfFlux'], 

247 logger=logger) 

248 if cat_tmp: 

249 if not cat_tmp.isContiguous(): 

250 if logger: 

251 logger.debug("Deep copying the %s band catalog to make it " 

252 "contiguous.", band) 

253 cat_tmp = cat_tmp.copy(deep=True) 

254 

255 cat_tmp = cat_tmp.asAstropy() 

256 

257 # Put the bandpass name in the column names: 

258 for c in cat_tmp.colnames: 

259 if c != "id": 

260 cat_tmp[c].name = f"{c}_{band}" 

261 

262 if cat_all: 

263 if logger: 

264 logger.debug("Joining the %s band catalog with the main " 

265 "catalog.", band) 

266 cat_all = join(cat_all, cat_tmp, keys="id") 

267 else: 

268 cat_all = cat_tmp 

269 

270 # Return the astropy table of matched catalogs: 

271 return cat_all 

272 

273 

274def mergeCatalogs( 

275 catalogs, 

276 photoCalibs=None, 

277 astromCalibs=None, 

278 models=["slot_PsfFlux"], 

279 applyExternalWcs=False, 

280 logger=None, 

281): 

282 """Merge catalogs and optionally apply photometric and astrometric calibrations. 

283 """ 

284 

285 schema = catalogs[0].schema 

286 mapper = SchemaMapper(schema) 

287 mapper.addMinimalSchema(schema) 

288 aliasMap = schema.getAliasMap() 

289 for model in models: 

290 modelName = aliasMap[model] if model in aliasMap.keys() else model 

291 mapper.addOutputField( 

292 Field[float](f"{modelName}_mag", f"{modelName} magnitude") 

293 ) 

294 mapper.addOutputField( 

295 Field[float](f"{modelName}_magErr", f"{modelName} magnitude uncertainty") 

296 ) 

297 newSchema = mapper.getOutputSchema() 

298 newSchema.setAliasMap(schema.getAliasMap()) 

299 

300 size = sum([len(cat) for cat in catalogs]) 

301 catalog = SourceCatalog(newSchema) 

302 catalog.reserve(size) 

303 

304 for ii in range(0, len(catalogs)): 

305 cat = catalogs[ii] 

306 

307 # Create temporary catalog. Is this step needed? 

308 tempCat = SourceCatalog(SourceCatalog(newSchema).table) 

309 tempCat.extend(cat, mapper=mapper) 

310 

311 if applyExternalWcs and astromCalibs is not None: 

312 wcs = astromCalibs[ii] 

313 updateSourceCoords(wcs, tempCat) 

314 

315 if photoCalibs is not None: 

316 photoCalib = photoCalibs[ii] 

317 if photoCalib is not None: 

318 for model in models: 

319 modelName = aliasMap[model] if model in aliasMap.keys() else model 

320 photoCalib.instFluxToMagnitude(tempCat, modelName, modelName) 

321 

322 catalog.extend(tempCat) 

323 

324 if logger: 

325 logger.verbose("Merged %d catalog(s) out of %d." % (ii + 1, len(cat))) 

326 

327 return catalog