Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# 

2# LSST Data Management System 

3# 

4# Copyright 2008-2016 AURA/LSST. 

5# 

6# This product includes software developed by the 

7# LSST Project (http://www.lsst.org/). 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the LSST License Statement and 

20# the GNU General Public License along with this program. If not, 

21# see <https://www.lsstcorp.org/LegalNotices/>. 

22# 

23 

24import os 

25import unittest 

26from collections import Counter 

27 

28import astropy.time 

29import numpy as np 

30 

31import lsst.geom 

32import lsst.afw.table as afwTable 

33import lsst.afw.geom as afwGeom 

34import lsst.daf.persistence as dafPersist 

35from lsst.meas.algorithms import (IngestIndexedReferenceTask, LoadIndexedReferenceObjectsTask, 

36 LoadIndexedReferenceObjectsConfig, getRefFluxField) 

37from lsst.meas.algorithms.loadReferenceObjects import hasNanojanskyFluxUnits 

38import lsst.utils 

39 

40import ingestIndexTestBase 

41 

42REGENERATE_COMPARISON = False # Regenerate comparison data? 

43 

44 

45def make_coord(ra, dec): 

46 """Make an ICRS coord given its RA, Dec in degrees.""" 

47 return lsst.geom.SpherePoint(ra, dec, lsst.geom.degrees) 

48 

49 

50class HtmIndexTestCase(ingestIndexTestBase.IngestIndexCatalogTestBase, lsst.utils.tests.TestCase): 

51 """Tests of ingesting and validating an HTM Indexed Reference Catalog. 

52 """ 

53 def testSanity(self): 

54 """Sanity-check that compCats contains some entries with sources.""" 

55 numWithSources = 0 

56 for idList in self.compCats.values(): 

57 if len(idList) > 0: 

58 numWithSources += 1 

59 self.assertGreater(numWithSources, 0) 

60 

61 def testAgainstPersisted(self): 

62 """Test that we can get a specific shard from a pre-persisted refcat. 

63 """ 

64 shardId = 2222 

65 dataset_name = IngestIndexedReferenceTask.ConfigClass().dataset_config.ref_dataset_name 

66 dataId = self.indexer.makeDataId(shardId, dataset_name) 

67 self.assertTrue(self.testButler.datasetExists('ref_cat', dataId)) 

68 refCat = self.testButler.get('ref_cat', dataId) 

69 if REGENERATE_COMPARISON: 

70 if os.path.exists(self.testCatPath): 

71 os.unlink(self.testCatPath) 

72 refCat.writeFits(self.testCatPath) 

73 self.fail("New comparison data written; unset REGENERATE_COMPARISON in order to proceed") 

74 

75 ex1 = refCat.extract('*') 

76 testCat = afwTable.SimpleCatalog.readFits(self.testCatPath) 

77 

78 ex2 = testCat.extract('*') 

79 self.assertEqual(set(ex1.keys()), set(ex2.keys())) 

80 for kk in ex1: 

81 np.testing.assert_array_almost_equal(ex1[kk], ex2[kk], ) 

82 

83 def testValidateRaDecMag(self): 

84 config = self.makeConfig() 

85 config.validate() 

86 

87 for name in ("ra_name", "dec_name", "mag_column_list"): 

88 with self.subTest(name=name): 

89 config = self.makeConfig() 

90 setattr(config, name, None) 

91 with self.assertRaises(ValueError): 

92 config.validate() 

93 

94 def testValidateRaDecErr(self): 

95 config = self.makeConfig(withRaDecErr=True) 

96 config.validate() 

97 

98 for name in ("ra_err_name", "dec_err_name"): 

99 with self.subTest(name=name): 

100 config = self.makeConfig(withRaDecErr=True) 

101 setattr(config, name, None) 

102 with self.assertRaises(ValueError): 

103 config.validate() 

104 

105 def testValidateMagErr(self): 

106 config = self.makeConfig(withMagErr=True) 

107 config.validate() 

108 

109 # test for missing names 

110 for name in config.mag_column_list: 

111 with self.subTest(name=name): 

112 config = self.makeConfig(withMagErr=True) 

113 del config.mag_err_column_map[name] 

114 with self.assertRaises(ValueError): 

115 config.validate() 

116 

117 # test for incorrect names 

118 for name in config.mag_column_list: 

119 with self.subTest(name=name): 

120 config = self.makeConfig(withMagErr=True) 

121 config.mag_err_column_map["badName"] = config.mag_err_column_map[name] 

122 del config.mag_err_column_map[name] 

123 with self.assertRaises(ValueError): 

124 config.validate() 

125 

126 def testValidatePm(self): 

127 basicNames = ["pm_ra_name", "pm_dec_name", "epoch_name", "epoch_format", "epoch_scale"] 

128 

129 for withPmErr in (False, True): 

130 config = self.makeConfig(withPm=True, withPmErr=withPmErr) 

131 config.validate() 

132 del config 

133 

134 if withPmErr: 

135 names = basicNames + ["pm_ra_err_name", "pm_dec_err_name"] 

136 else: 

137 names = basicNames 

138 for name in names: 

139 with self.subTest(name=name, withPmErr=withPmErr): 

140 config = self.makeConfig(withPm=True, withPmErr=withPmErr) 

141 setattr(config, name, None) 

142 with self.assertRaises(ValueError): 

143 config.validate() 

144 

145 def testValidateParallax(self): 

146 """Validation should fail if any parallax-related fields are missing. 

147 """ 

148 names = ["parallax_name", "epoch_name", "epoch_format", "epoch_scale", "parallax_err_name"] 

149 

150 config = self.makeConfig(withParallax=True) 

151 config.validate() 

152 del config 

153 

154 for name in names: 

155 with self.subTest(name=name): 

156 config = self.makeConfig(withParallax=True) 

157 setattr(config, name, None) 

158 with self.assertRaises(ValueError, msg=name): 

159 config.validate() 

160 

161 def testIngestSetsVersion(self): 

162 """Test that newly ingested catalogs get the correct version number set. 

163 """ 

164 # Test with multiple files and standard config 

165 config = self.makeConfig(withRaDecErr=True, withMagErr=True, withPm=True, withPmErr=True) 

166 # don't use the default depth, to avoid taking the time to create thousands of file locks 

167 config.dataset_config.indexer.active.depth = self.depth 

168 IngestIndexedReferenceTask.parseAndRun( 

169 args=[self.input_dir, "--output", self.outPath + "/output_setsVersion", 

170 self.skyCatalogFile], 

171 config=config) 

172 # A newly-ingested refcat should be marked format_version=1. 

173 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler( 

174 self.outPath + "/output_setsVersion")) 

175 self.assertEqual(loader.dataset_config.format_version, 1) 

176 

177 def testIngestConfigOverrides(self): 

178 """Test IngestIndexedReferenceTask with different configs.""" 

179 config2 = self.makeConfig(withRaDecErr=True, withMagErr=True, withPm=True, withPmErr=True, 

180 withParallax=True) 

181 config2.ra_name = "ra" 

182 config2.dec_name = "dec" 

183 config2.dataset_config.ref_dataset_name = 'myrefcat' 

184 # Change the indexing depth to prove we can. 

185 # Smaller is better than larger because it makes fewer files. 

186 config2.dataset_config.indexer.active.depth = self.depth - 1 

187 config2.is_photometric_name = 'is_phot' 

188 config2.is_resolved_name = 'is_res' 

189 config2.is_variable_name = 'is_var' 

190 config2.id_name = 'id' 

191 config2.extra_col_names = ['val1', 'val2', 'val3'] 

192 config2.file_reader.header_lines = 1 

193 config2.file_reader.colnames = [ 

194 'id', 'ra', 'dec', 'ra_err', 'dec_err', 'a', 'a_err', 'b', 'b_err', 'is_phot', 

195 'is_res', 'is_var', 'val1', 'val2', 'val3', 'pm_ra', 'pm_dec', 'pm_ra_err', 

196 'pm_dec_err', 'parallax', 'parallax_err', 'unixtime', 

197 ] 

198 config2.file_reader.delimiter = '|' 

199 # this also tests changing the delimiter 

200 IngestIndexedReferenceTask.parseAndRun( 

201 args=[self.input_dir, "--output", self.outPath+"/output_override", 

202 self.skyCatalogFileDelim], config=config2) 

203 

204 # Test if we can get back the catalog with a non-standard dataset name 

205 butler = dafPersist.Butler(self.outPath+"/output_override") 

206 loaderConfig = LoadIndexedReferenceObjectsConfig() 

207 loaderConfig.ref_dataset_name = "myrefcat" 

208 loader = LoadIndexedReferenceObjectsTask(butler=butler, config=loaderConfig) 

209 self.checkAllRowsInRefcat(loader, self.skyCatalog, config2) 

210 

211 # test that a catalog can be loaded even with a name not used for ingestion 

212 butler = dafPersist.Butler(self.testRepoPath) 

213 loaderConfig2 = LoadIndexedReferenceObjectsConfig() 

214 loaderConfig2.ref_dataset_name = self.testDatasetName 

215 loader = LoadIndexedReferenceObjectsTask(butler=butler, config=loaderConfig2) 

216 self.checkAllRowsInRefcat(loader, self.skyCatalog, config2) 

217 

218 def testLoadIndexedReferenceConfig(self): 

219 """Make sure LoadIndexedReferenceConfig has needed fields.""" 

220 """ 

221 Including at least one from the base class LoadReferenceObjectsConfig 

222 """ 

223 config = LoadIndexedReferenceObjectsConfig() 

224 self.assertEqual(config.ref_dataset_name, "cal_ref_cat") 

225 self.assertEqual(config.defaultFilter, "") 

226 

227 def testLoadSkyCircle(self): 

228 """Test LoadIndexedReferenceObjectsTask.loadSkyCircle with default config.""" 

229 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler) 

230 for tupl, idList in self.compCats.items(): 

231 cent = ingestIndexTestBase.make_coord(*tupl) 

232 lcat = loader.loadSkyCircle(cent, self.searchRadius, filterName='a') 

233 self.assertTrue(lcat.refCat.isContiguous()) 

234 self.assertFalse("camFlux" in lcat.refCat.schema) 

235 self.assertEqual(Counter(lcat.refCat['id']), Counter(idList)) 

236 if len(lcat.refCat) > 0: 

237 # make sure there are no duplicate ids 

238 self.assertEqual(len(set(Counter(lcat.refCat['id']).values())), 1) 

239 self.assertEqual(len(set(Counter(idList).values())), 1) 

240 # A default-loaded sky circle should not have centroids 

241 self.assertNotIn("centroid_x", lcat.refCat.schema) 

242 self.assertNotIn("centroid_y", lcat.refCat.schema) 

243 self.assertNotIn("hasCentroid", lcat.refCat.schema) 

244 else: 

245 self.assertEqual(len(idList), 0) 

246 

247 def testLoadPixelBox(self): 

248 """Test LoadIndexedReferenceObjectsTask.loadPixelBox with default config.""" 

249 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler) 

250 numFound = 0 

251 for tupl, idList in self.compCats.items(): 

252 cent = ingestIndexTestBase.make_coord(*tupl) 

253 bbox = lsst.geom.Box2I(lsst.geom.Point2I(30, -5), lsst.geom.Extent2I(1000, 1004)) # arbitrary 

254 ctr_pix = bbox.getCenter() 

255 # catalog is sparse, so set pixel scale such that bbox encloses region 

256 # used to generate compCats 

257 pixel_scale = 2*self.searchRadius/max(bbox.getHeight(), bbox.getWidth()) 

258 cdMatrix = afwGeom.makeCdMatrix(scale=pixel_scale) 

259 wcs = afwGeom.makeSkyWcs(crval=cent, crpix=ctr_pix, cdMatrix=cdMatrix) 

260 result = loader.loadPixelBox(bbox=bbox, wcs=wcs, filterName="a") 

261 self.assertFalse("camFlux" in result.refCat.schema) 

262 self.assertGreaterEqual(len(result.refCat), len(idList)) 

263 numFound += len(result.refCat) 

264 self.assertGreater(numFound, 0) 

265 

266 def testDefaultFilterAndFilterMap(self): 

267 """Test defaultFilter and filterMap parameters of LoadIndexedReferenceObjectsConfig.""" 

268 config = LoadIndexedReferenceObjectsConfig() 

269 config.defaultFilter = "b" 

270 config.filterMap = {"aprime": "a"} 

271 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config) 

272 for tupl, idList in self.compCats.items(): 

273 cent = ingestIndexTestBase.make_coord(*tupl) 

274 lcat = loader.loadSkyCircle(cent, self.searchRadius) 

275 self.assertEqual(lcat.fluxField, "camFlux") 

276 if len(idList) > 0: 

277 defFluxFieldName = getRefFluxField(lcat.refCat.schema, None) 

278 self.assertTrue(defFluxFieldName in lcat.refCat.schema) 

279 aprimeFluxFieldName = getRefFluxField(lcat.refCat.schema, "aprime") 

280 self.assertTrue(aprimeFluxFieldName in lcat.refCat.schema) 

281 break # just need one test 

282 

283 def testProperMotion(self): 

284 """Test proper motion correction""" 

285 center = ingestIndexTestBase.make_coord(93.0, -90.0) 

286 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler) 

287 references = loader.loadSkyCircle(center, self.searchRadius, filterName='a').refCat 

288 original = references.copy(True) 

289 

290 # Zero epoch change --> no proper motion correction (except minor numerical effects) 

291 loader.applyProperMotions(references, self.epoch) 

292 self.assertFloatsAlmostEqual(references["coord_ra"], original["coord_ra"], rtol=1.0e-14) 

293 self.assertFloatsAlmostEqual(references["coord_dec"], original["coord_dec"], rtol=1.0e-14) 

294 self.assertFloatsEqual(references["coord_raErr"], original["coord_raErr"]) 

295 self.assertFloatsEqual(references["coord_decErr"], original["coord_decErr"]) 

296 

297 # One year difference 

298 loader.applyProperMotions(references, self.epoch + 1.0*astropy.units.yr) 

299 self.assertFloatsEqual(references["pm_raErr"], original["pm_raErr"]) 

300 self.assertFloatsEqual(references["pm_decErr"], original["pm_decErr"]) 

301 for orig, ref in zip(original, references): 

302 self.assertAnglesAlmostEqual(orig.getCoord().separation(ref.getCoord()), 

303 self.properMotionAmt, maxDiff=1.0e-6*lsst.geom.arcseconds) 

304 self.assertAnglesAlmostEqual(orig.getCoord().bearingTo(ref.getCoord()), 

305 self.properMotionDir, maxDiff=1.0e-4*lsst.geom.arcseconds) 

306 predictedRaErr = np.hypot(original["coord_raErr"], original["pm_raErr"]) 

307 predictedDecErr = np.hypot(original["coord_decErr"], original["pm_decErr"]) 

308 self.assertFloatsAlmostEqual(references["coord_raErr"], predictedRaErr) 

309 self.assertFloatsAlmostEqual(references["coord_decErr"], predictedDecErr) 

310 

311 def testLoadVersion0(self): 

312 """Test reading a pre-written format_version=0 (Jy flux) catalog. 

313 It should be converted to have nJy fluxes. 

314 """ 

315 path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/version0') 

316 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler(path)) 

317 self.assertEqual(loader.dataset_config.format_version, 0) 

318 result = loader.loadSkyCircle(ingestIndexTestBase.make_coord(10, 20), 

319 5*lsst.geom.degrees, filterName='a') 

320 self.assertTrue(hasNanojanskyFluxUnits(result.refCat.schema)) 

321 catalog = afwTable.SimpleCatalog.readFits(os.path.join(path, 'ref_cats/cal_ref_cat/4022.fits')) 

322 self.assertFloatsEqual(catalog['a_flux']*1e9, result.refCat['a_flux']) 

323 self.assertFloatsEqual(catalog['a_fluxSigma']*1e9, result.refCat['a_fluxErr']) 

324 self.assertFloatsEqual(catalog['b_flux']*1e9, result.refCat['b_flux']) 

325 self.assertFloatsEqual(catalog['b_fluxSigma']*1e9, result.refCat['b_fluxErr']) 

326 

327 def testLoadVersion1(self): 

328 """Test reading a format_version=1 catalog (fluxes unchanged).""" 

329 path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/version1') 

330 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler(path)) 

331 self.assertEqual(loader.dataset_config.format_version, 1) 

332 result = loader.loadSkyCircle(ingestIndexTestBase.make_coord(10, 20), 

333 5*lsst.geom.degrees, filterName='a') 

334 self.assertTrue(hasNanojanskyFluxUnits(result.refCat.schema)) 

335 catalog = afwTable.SimpleCatalog.readFits(os.path.join(path, 'ref_cats/cal_ref_cat/4022.fits')) 

336 self.assertFloatsEqual(catalog['a_flux'], result.refCat['a_flux']) 

337 self.assertFloatsEqual(catalog['a_fluxErr'], result.refCat['a_fluxErr']) 

338 self.assertFloatsEqual(catalog['b_flux'], result.refCat['b_flux']) 

339 self.assertFloatsEqual(catalog['b_fluxErr'], result.refCat['b_fluxErr']) 

340 

341 

342class TestMemory(lsst.utils.tests.MemoryTestCase): 

343 pass 

344 

345 

346def setup_module(module): 

347 lsst.utils.tests.init() 

348 

349 

350if __name__ == "__main__": 350 ↛ 351line 350 didn't jump to line 351, because the condition on line 350 was never true

351 lsst.utils.tests.init() 

352 unittest.main()