Coverage for tests/test_htmIndex.py: 14%

260 statements  

« prev     ^ index     » next       coverage.py v7.2.1, created at 2023-03-12 03:05 -0700

1# 

2# LSST Data Management System 

3# 

4# Copyright 2008-2016 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23 

24import os 

25import unittest 

26from collections import Counter 

27 

28import astropy.time 

29import astropy.units 

30import numpy as np 

31 

32import lsst.geom 

33import lsst.afw.table as afwTable 

34import lsst.afw.geom as afwGeom 

35import lsst.daf.persistence as dafPersist 

36from lsst.meas.algorithms import (IngestIndexedReferenceTask, LoadIndexedReferenceObjectsTask, 

37 LoadIndexedReferenceObjectsConfig, getRefFluxField) 

38from lsst.meas.algorithms.loadReferenceObjects import hasNanojanskyFluxUnits 

39import lsst.utils 

40 

41from ingestIndexTestBase import (makeConvertConfig, ConvertReferenceCatalogTestBase, 

42 make_coord) 

43 

44 

45class IngestIndexTaskValidateTestCase(lsst.utils.tests.TestCase): 

46 """Test validation of IngestIndexReferenceConfig.""" 

47 def testValidateRaDecMag(self): 

48 config = makeConvertConfig() 

49 config.validate() 

50 

51 for name in ("ra_name", "dec_name", "mag_column_list"): 

52 with self.subTest(name=name): 

53 config = makeConvertConfig() 

54 setattr(config, name, None) 

55 with self.assertRaises(ValueError): 

56 config.validate() 

57 

58 def testValidateRaDecErr(self): 

59 # check that a basic config validates 

60 config = makeConvertConfig(withRaDecErr=True) 

61 config.validate() 

62 

63 # check that a config with any of these fields missing does not validate 

64 for name in ("ra_err_name", "dec_err_name", "coord_err_unit"): 

65 with self.subTest(name=name): 

66 config = makeConvertConfig(withRaDecErr=True) 

67 setattr(config, name, None) 

68 with self.assertRaises(ValueError): 

69 config.validate() 

70 

71 # check that coord_err_unit must be an astropy unit 

72 config = makeConvertConfig(withRaDecErr=True) 

73 config.coord_err_unit = "nonsense unit" 

74 with self.assertRaisesRegex(ValueError, "is not a valid astropy unit string"): 

75 config.validate() 

76 

77 def testValidateMagErr(self): 

78 config = makeConvertConfig(withMagErr=True) 

79 config.validate() 

80 

81 # test for missing names 

82 for name in config.mag_column_list: 

83 with self.subTest(name=name): 

84 config = makeConvertConfig(withMagErr=True) 

85 del config.mag_err_column_map[name] 

86 with self.assertRaises(ValueError): 

87 config.validate() 

88 

89 # test for incorrect names 

90 for name in config.mag_column_list: 

91 with self.subTest(name=name): 

92 config = makeConvertConfig(withMagErr=True) 

93 config.mag_err_column_map["badName"] = config.mag_err_column_map[name] 

94 del config.mag_err_column_map[name] 

95 with self.assertRaises(ValueError): 

96 config.validate() 

97 

98 def testValidatePm(self): 

99 basicNames = ["pm_ra_name", "pm_dec_name", "epoch_name", "epoch_format", "epoch_scale"] 

100 

101 for withPmErr in (False, True): 

102 config = makeConvertConfig(withPm=True, withPmErr=withPmErr) 

103 config.validate() 

104 del config 

105 

106 if withPmErr: 

107 names = basicNames + ["pm_ra_err_name", "pm_dec_err_name"] 

108 else: 

109 names = basicNames 

110 for name in names: 

111 with self.subTest(name=name, withPmErr=withPmErr): 

112 config = makeConvertConfig(withPm=True, withPmErr=withPmErr) 

113 setattr(config, name, None) 

114 with self.assertRaises(ValueError): 

115 config.validate() 

116 

117 def testValidateParallax(self): 

118 """Validation should fail if any parallax-related fields are missing. 

119 """ 

120 names = ["parallax_name", "epoch_name", "epoch_format", "epoch_scale", "parallax_err_name"] 

121 

122 config = makeConvertConfig(withParallax=True) 

123 config.validate() 

124 del config 

125 

126 for name in names: 

127 with self.subTest(name=name): 

128 config = makeConvertConfig(withParallax=True) 

129 setattr(config, name, None) 

130 with self.assertRaises(ValueError, msg=name): 

131 config.validate() 

132 

133 

134class ReferenceCatalogIngestAndLoadTestCase(ConvertReferenceCatalogTestBase, lsst.utils.tests.TestCase): 

135 """Tests of converting, ingesting, loading and validating an HTM Indexed 

136 Reference Catalog (gen2 code path). 

137 """ 

138 @classmethod 

139 def setUpClass(cls): 

140 super().setUpClass() 

141 cls.obs_test_dir = lsst.utils.getPackageDir('obs_test') 

142 cls.input_dir = os.path.join(cls.obs_test_dir, "data", "input") 

143 

144 # Run the ingest once to create a butler repo we can compare to 

145 config = makeConvertConfig(withMagErr=True, withRaDecErr=True, withPm=True, withPmErr=True, 

146 withParallax=True) 

147 # Pregenerated gen2 test refcats have the "cal_ref_cat" name. 

148 config.dataset_config.ref_dataset_name = "cal_ref_cat" 

149 config.dataset_config.indexer.active.depth = cls.depth 

150 config.id_name = 'id' 

151 config.pm_scale = 1000.0 # arcsec/yr --> mas/yr 

152 config.parallax_scale = 1e3 # arcsec -> milliarcsec 

153 # np.savetxt prepends '# ' to the header lines, so use a reader that understands that 

154 config.file_reader.format = 'ascii.commented_header' 

155 IngestIndexedReferenceTask.parseAndRun(args=[cls.input_dir, "--output", cls.testRepoPath, 

156 cls.skyCatalogFile], config=config) 

157 cls.testButler = dafPersist.Butler(cls.testRepoPath) 

158 

159 @classmethod 

160 def tearDownClass(cls): 

161 del cls.testButler 

162 

163 def testSanity(self): 

164 """Sanity-check that compCats contains some entries with sources.""" 

165 numWithSources = 0 

166 for idList in self.compCats.values(): 

167 if len(idList) > 0: 

168 numWithSources += 1 

169 self.assertGreater(numWithSources, 0) 

170 

171 def testIngestSetsVersion(self): 

172 """Test that newly ingested catalogs get the correct version number set. 

173 """ 

174 def runTest(withRaDecErr): 

175 outputPath = os.path.join(self.outPath, "output_setsVersion" 

176 + "_withRaDecErr" if withRaDecErr else "") 

177 # Test with multiple files and standard config 

178 config = makeConvertConfig(withRaDecErr=withRaDecErr, withMagErr=True, 

179 withPm=True, withPmErr=True) 

180 # Pregenerated gen2 test refcats have the "cal_ref_cat" name. 

181 config.dataset_config.ref_dataset_name = "cal_ref_cat" 

182 # don't use the default depth, to avoid taking the time to create thousands of file locks 

183 config.dataset_config.indexer.active.depth = self.depth 

184 IngestIndexedReferenceTask.parseAndRun( 

185 args=[self.input_dir, "--output", outputPath, self.skyCatalogFile], 

186 config=config) 

187 # A newly-ingested refcat should be marked format_version=1. 

188 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler(outputPath)) 

189 self.assertEqual(loader.dataset_config.format_version, 1) 

190 

191 runTest(withRaDecErr=True) 

192 runTest(withRaDecErr=False) 

193 

194 def testIngestConfigOverrides(self): 

195 """Test IngestIndexedReferenceTask with different configs. 

196 """ 

197 config2 = makeConvertConfig(withRaDecErr=True, withMagErr=True, withPm=True, withPmErr=True, 

198 withParallax=True) 

199 config2.ra_name = "ra" 

200 config2.dec_name = "dec" 

201 config2.dataset_config.ref_dataset_name = 'myrefcat' 

202 # Change the indexing depth to prove we can. 

203 # Smaller is better than larger because it makes fewer files. 

204 config2.dataset_config.indexer.active.depth = self.depth - 1 

205 config2.is_photometric_name = 'is_phot' 

206 config2.is_resolved_name = 'is_res' 

207 config2.is_variable_name = 'is_var' 

208 config2.id_name = 'id' 

209 config2.extra_col_names = ['val1', 'val2', 'val3'] 

210 config2.file_reader.header_lines = 1 

211 config2.file_reader.colnames = [ 

212 'id', 'ra', 'dec', 'ra_err', 'dec_err', 'a', 'a_err', 'b', 'b_err', 'is_phot', 

213 'is_res', 'is_var', 'val1', 'val2', 'val3', 'pm_ra', 'pm_dec', 'pm_ra_err', 

214 'pm_dec_err', 'parallax', 'parallax_err', 'unixtime', 

215 ] 

216 config2.file_reader.delimiter = '|' 

217 # this also tests changing the delimiter 

218 IngestIndexedReferenceTask.parseAndRun( 

219 args=[self.input_dir, "--output", self.outPath+"/output_override", 

220 self.skyCatalogFileDelim], config=config2) 

221 

222 # Test if we can get back the catalog with a non-standard dataset name 

223 butler = dafPersist.Butler(self.outPath+"/output_override") 

224 loaderConfig = LoadIndexedReferenceObjectsConfig() 

225 loaderConfig.ref_dataset_name = "myrefcat" 

226 loader = LoadIndexedReferenceObjectsTask(butler=butler, config=loaderConfig) 

227 self.checkAllRowsInRefcat(loader, self.skyCatalog, config2) 

228 

229 def testLoadSkyCircle(self): 

230 """Test LoadIndexedReferenceObjectsTask.loadSkyCircle with default config.""" 

231 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler) 

232 for tupl, idList in self.compCats.items(): 

233 cent = make_coord(*tupl) 

234 lcat = loader.loadSkyCircle(cent, self.searchRadius, filterName='a') 

235 self.assertTrue(lcat.refCat.isContiguous()) 

236 self.assertFalse("camFlux" in lcat.refCat.schema) 

237 self.assertEqual(Counter(lcat.refCat['id']), Counter(idList)) 

238 if len(lcat.refCat) > 0: 

239 # make sure there are no duplicate ids 

240 self.assertEqual(len(set(Counter(lcat.refCat['id']).values())), 1) 

241 self.assertEqual(len(set(Counter(idList).values())), 1) 

242 # A default-loaded sky circle should not have centroids 

243 self.assertNotIn("centroid_x", lcat.refCat.schema) 

244 self.assertNotIn("centroid_y", lcat.refCat.schema) 

245 self.assertNotIn("hasCentroid", lcat.refCat.schema) 

246 else: 

247 self.assertEqual(len(idList), 0) 

248 

249 def testLoadPixelBox(self): 

250 """Test LoadIndexedReferenceObjectsTask.loadPixelBox with default config.""" 

251 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler) 

252 numFound = 0 

253 for tupl, idList in self.compCats.items(): 

254 cent = make_coord(*tupl) 

255 bbox = lsst.geom.Box2I(lsst.geom.Point2I(30, -5), lsst.geom.Extent2I(1000, 1004)) # arbitrary 

256 ctr_pix = bbox.getCenter() 

257 # catalog is sparse, so set pixel scale such that bbox encloses region 

258 # used to generate compCats 

259 pixel_scale = 2*self.searchRadius/max(bbox.getHeight(), bbox.getWidth()) 

260 cdMatrix = afwGeom.makeCdMatrix(scale=pixel_scale) 

261 wcs = afwGeom.makeSkyWcs(crval=cent, crpix=ctr_pix, cdMatrix=cdMatrix) 

262 result = loader.loadPixelBox(bbox=bbox, wcs=wcs, filterName="a") 

263 # The following is to ensure the reference catalog coords are 

264 # getting corrected for proper motion when an epoch is provided. 

265 # Use an extreme epoch so that differences in corrected coords 

266 # will be significant. Note that this simply tests that the coords 

267 # do indeed change when the epoch is passed. It makes no attempt 

268 # at assessing the correctness of the change. This is left to the 

269 # explicit testProperMotion() test below. 

270 resultWithEpoch = loader.loadPixelBox(bbox=bbox, wcs=wcs, filterName="a", 

271 epoch=astropy.time.Time(30000, format='mjd', scale="tai")) 

272 self.assertFloatsNotEqual(result.refCat["coord_ra"], resultWithEpoch.refCat["coord_ra"], 

273 rtol=1.0e-4) 

274 self.assertFloatsNotEqual(result.refCat["coord_dec"], resultWithEpoch.refCat["coord_dec"], 

275 rtol=1.0e-4) 

276 self.assertFalse("camFlux" in result.refCat.schema) 

277 self.assertGreaterEqual(len(result.refCat), len(idList)) 

278 numFound += len(result.refCat) 

279 self.assertGreater(numFound, 0) 

280 

281 def testDefaultFilterAndFilterMap(self): 

282 """Test defaultFilter and filterMap parameters of LoadIndexedReferenceObjectsConfig.""" 

283 config = LoadIndexedReferenceObjectsConfig() 

284 config.defaultFilter = "b" 

285 config.filterMap = {"aprime": "a"} 

286 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config) 

287 for tupl, idList in self.compCats.items(): 

288 cent = make_coord(*tupl) 

289 lcat = loader.loadSkyCircle(cent, self.searchRadius) 

290 self.assertEqual(lcat.fluxField, "camFlux") 

291 if len(idList) > 0: 

292 defFluxFieldName = getRefFluxField(lcat.refCat.schema, None) 

293 self.assertTrue(defFluxFieldName in lcat.refCat.schema) 

294 aprimeFluxFieldName = getRefFluxField(lcat.refCat.schema, "aprime") 

295 self.assertTrue(aprimeFluxFieldName in lcat.refCat.schema) 

296 break # just need one test 

297 

298 def testProperMotion(self): 

299 """Test proper motion correction""" 

300 center = make_coord(93.0, -90.0) 

301 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler) 

302 references = loader.loadSkyCircle(center, self.searchRadius, filterName='a').refCat 

303 original = references.copy(True) 

304 

305 # Zero epoch change --> no proper motion correction (except minor numerical effects) 

306 loader.applyProperMotions(references, self.epoch) 

307 self.assertFloatsAlmostEqual(references["coord_ra"], original["coord_ra"], rtol=1.0e-14) 

308 self.assertFloatsAlmostEqual(references["coord_dec"], original["coord_dec"], rtol=1.0e-14) 

309 self.assertFloatsEqual(references["coord_raErr"], original["coord_raErr"]) 

310 self.assertFloatsEqual(references["coord_decErr"], original["coord_decErr"]) 

311 

312 # One year difference 

313 loader.applyProperMotions(references, self.epoch + 1.0*astropy.units.yr) 

314 self.assertFloatsEqual(references["pm_raErr"], original["pm_raErr"]) 

315 self.assertFloatsEqual(references["pm_decErr"], original["pm_decErr"]) 

316 for orig, ref in zip(original, references): 

317 self.assertAnglesAlmostEqual(orig.getCoord().separation(ref.getCoord()), 

318 self.properMotionAmt, maxDiff=1.0e-6*lsst.geom.arcseconds) 

319 self.assertAnglesAlmostEqual(orig.getCoord().bearingTo(ref.getCoord()), 

320 self.properMotionDir, maxDiff=1.0e-4*lsst.geom.arcseconds) 

321 predictedRaErr = np.hypot(original["coord_raErr"], original["pm_raErr"]) 

322 predictedDecErr = np.hypot(original["coord_decErr"], original["pm_decErr"]) 

323 self.assertFloatsAlmostEqual(references["coord_raErr"], predictedRaErr) 

324 self.assertFloatsAlmostEqual(references["coord_decErr"], predictedDecErr) 

325 

326 def testRequireProperMotion(self): 

327 """Tests of the requireProperMotion config field. 

328 

329 Requiring proper motion corrections for a catalog that does not 

330 contain valid PM data should result in an exception. 

331 

332 `data/testHtmIndex-ps1-bad-pm.fits` is a random shard taken from the 

333 ps1_pv3_3pi_20170110 refcat (that has the unitless PM fields), 

334 stripped to only 2 rows: we patch it in here to simplify test setup. 

335 """ 

336 path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/testHtmIndex-ps1-bad-pm.fits') 

337 refcatData = lsst.afw.table.SimpleCatalog.readFits(path) 

338 center = make_coord(93.0, -90.0) 

339 epoch = self.epoch + 1.0*astropy.units.yr 

340 

341 # malformatted catalogs should warn and raise if we require proper motion corrections 

342 config = LoadIndexedReferenceObjectsConfig() 

343 config.requireProperMotion = True 

344 config.anyFilterMapsToThis = "g" # to use a catalog not made for obs_test 

345 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config) 

346 with unittest.mock.patch.object(self.testButler, 'get', return_value=refcatData): 

347 msg = "requireProperMotion=True but refcat pm_ra field is not an Angle" 

348 with self.assertRaisesRegex(RuntimeError, msg): 

349 loader.loadSkyCircle(center, self.searchRadius, epoch=epoch) 

350 

351 # not specifying `epoch` with requireProperMotion=True should raise for any catalog 

352 config = LoadIndexedReferenceObjectsConfig() 

353 config.requireProperMotion = True 

354 config.anyFilterMapsToThis = "g" # to use a catalog not made for obs_test 

355 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config) 

356 msg = "requireProperMotion=True but epoch not provided to loader" 

357 with self.assertRaisesRegex(RuntimeError, msg): 

358 loader.loadSkyCircle(center, self.searchRadius, epoch=None) 

359 

360 # malformatted catalogs should just warn if we do not require proper motion corrections 

361 config = LoadIndexedReferenceObjectsConfig() 

362 config.requireProperMotion = False 

363 config.anyFilterMapsToThis = "g" # to use a catalog not made for obs_test 

364 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config) 

365 with unittest.mock.patch.object(self.testButler, 'get', return_value=refcatData): 

366 with lsst.log.UsePythonLogging(), self.assertLogs(level="WARNING") as cm: 

367 loader.loadSkyCircle(center, self.searchRadius, epoch=epoch) 

368 warnLog1 = "Reference catalog pm_ra field is not an Angle; cannot apply proper motion." 

369 self.assertEqual(cm.output, [f"WARNING:LoadIndexedReferenceObjectsTask:{warnLog1}"]) 

370 

371 def testLoadVersion0(self): 

372 """Test reading a pre-written format_version=0 (Jy flux) catalog. 

373 It should be converted to have nJy fluxes. 

374 """ 

375 path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/version0') 

376 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler(path)) 

377 self.assertEqual(loader.dataset_config.format_version, 0) 

378 result = loader.loadSkyCircle(make_coord(10, 20), 

379 5*lsst.geom.degrees, filterName='a') 

380 self.assertTrue(hasNanojanskyFluxUnits(result.refCat.schema)) 

381 catalog = afwTable.SimpleCatalog.readFits(os.path.join(path, 'ref_cats/cal_ref_cat/4022.fits')) 

382 self.assertFloatsEqual(catalog['a_flux']*1e9, result.refCat['a_flux']) 

383 self.assertFloatsEqual(catalog['a_fluxSigma']*1e9, result.refCat['a_fluxErr']) 

384 self.assertFloatsEqual(catalog['b_flux']*1e9, result.refCat['b_flux']) 

385 self.assertFloatsEqual(catalog['b_fluxSigma']*1e9, result.refCat['b_fluxErr']) 

386 

387 def testLoadVersion1(self): 

388 """Test reading a format_version=1 catalog (fluxes unchanged).""" 

389 path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/version1') 

390 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler(path)) 

391 self.assertEqual(loader.dataset_config.format_version, 1) 

392 result = loader.loadSkyCircle(make_coord(10, 20), 

393 5*lsst.geom.degrees, filterName='a') 

394 self.assertTrue(hasNanojanskyFluxUnits(result.refCat.schema)) 

395 catalog = afwTable.SimpleCatalog.readFits(os.path.join(path, 'ref_cats/cal_ref_cat/4022.fits')) 

396 self.assertFloatsEqual(catalog['a_flux'], result.refCat['a_flux']) 

397 self.assertFloatsEqual(catalog['a_fluxErr'], result.refCat['a_fluxErr']) 

398 self.assertFloatsEqual(catalog['b_flux'], result.refCat['b_flux']) 

399 self.assertFloatsEqual(catalog['b_fluxErr'], result.refCat['b_fluxErr']) 

400 

401 

402class TestMemory(lsst.utils.tests.MemoryTestCase): 

403 pass 

404 

405 

406def setup_module(module): 

407 lsst.utils.tests.init() 

408 

409 

410if __name__ == "__main__": 410 ↛ 411line 410 didn't jump to line 411, because the condition on line 410 was never true

411 lsst.utils.tests.init() 

412 unittest.main()