Coverage for tests/test_htmIndex.py : 13%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#
2# LSST Data Management System
3#
4# Copyright 2008-2016 AURA/LSST.
5#
6# This product includes software developed by the
7# LSST Project (http://www.lsst.org/).
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the LSST License Statement and
20# the GNU General Public License along with this program. If not,
21# see <https://www.lsstcorp.org/LegalNotices/>.
22#
24import os
25import unittest
26from collections import Counter
28import astropy.time
29import astropy.units
30import numpy as np
32import lsst.geom
33import lsst.afw.table as afwTable
34import lsst.afw.geom as afwGeom
35import lsst.daf.persistence as dafPersist
36from lsst.meas.algorithms import (IngestIndexedReferenceTask, LoadIndexedReferenceObjectsTask,
37 LoadIndexedReferenceObjectsConfig, getRefFluxField)
38from lsst.meas.algorithms.loadReferenceObjects import hasNanojanskyFluxUnits
39import lsst.utils
41from ingestIndexTestBase import (makeIngestIndexConfig, IngestIndexCatalogTestBase,
42 make_coord)
44REGENERATE_COMPARISON = False # Regenerate comparison data?
47class IngestIndexTaskValidateTestCase(lsst.utils.tests.TestCase):
48 """Test validation of IngestIndexReferenceConfig."""
49 def testValidateRaDecMag(self):
50 config = makeIngestIndexConfig()
51 config.validate()
53 for name in ("ra_name", "dec_name", "mag_column_list"):
54 with self.subTest(name=name):
55 config = makeIngestIndexConfig()
56 setattr(config, name, None)
57 with self.assertRaises(ValueError):
58 config.validate()
60 def testValidateRaDecErr(self):
61 # check that a basic config validates
62 config = makeIngestIndexConfig(withRaDecErr=True)
63 config.validate()
65 # check that a config with any of these fields missing does not validate
66 for name in ("ra_err_name", "dec_err_name", "coord_err_unit"):
67 with self.subTest(name=name):
68 config = makeIngestIndexConfig(withRaDecErr=True)
69 setattr(config, name, None)
70 with self.assertRaises(ValueError):
71 config.validate()
73 # check that coord_err_unit must be an astropy unit
74 config = makeIngestIndexConfig(withRaDecErr=True)
75 config.coord_err_unit = "nonsense unit"
76 with self.assertRaisesRegex(ValueError, "is not a valid astropy unit string"):
77 config.validate()
79 def testValidateMagErr(self):
80 config = makeIngestIndexConfig(withMagErr=True)
81 config.validate()
83 # test for missing names
84 for name in config.mag_column_list:
85 with self.subTest(name=name):
86 config = makeIngestIndexConfig(withMagErr=True)
87 del config.mag_err_column_map[name]
88 with self.assertRaises(ValueError):
89 config.validate()
91 # test for incorrect names
92 for name in config.mag_column_list:
93 with self.subTest(name=name):
94 config = makeIngestIndexConfig(withMagErr=True)
95 config.mag_err_column_map["badName"] = config.mag_err_column_map[name]
96 del config.mag_err_column_map[name]
97 with self.assertRaises(ValueError):
98 config.validate()
100 def testValidatePm(self):
101 basicNames = ["pm_ra_name", "pm_dec_name", "epoch_name", "epoch_format", "epoch_scale"]
103 for withPmErr in (False, True):
104 config = makeIngestIndexConfig(withPm=True, withPmErr=withPmErr)
105 config.validate()
106 del config
108 if withPmErr:
109 names = basicNames + ["pm_ra_err_name", "pm_dec_err_name"]
110 else:
111 names = basicNames
112 for name in names:
113 with self.subTest(name=name, withPmErr=withPmErr):
114 config = makeIngestIndexConfig(withPm=True, withPmErr=withPmErr)
115 setattr(config, name, None)
116 with self.assertRaises(ValueError):
117 config.validate()
119 def testValidateParallax(self):
120 """Validation should fail if any parallax-related fields are missing.
121 """
122 names = ["parallax_name", "epoch_name", "epoch_format", "epoch_scale", "parallax_err_name"]
124 config = makeIngestIndexConfig(withParallax=True)
125 config.validate()
126 del config
128 for name in names:
129 with self.subTest(name=name):
130 config = makeIngestIndexConfig(withParallax=True)
131 setattr(config, name, None)
132 with self.assertRaises(ValueError, msg=name):
133 config.validate()
136class IngestIndexReferenceTaskTestCase(IngestIndexCatalogTestBase, lsst.utils.tests.TestCase):
137 """Tests of ingesting and validating an HTM Indexed Reference Catalog.
138 """
139 def testSanity(self):
140 """Sanity-check that compCats contains some entries with sources."""
141 numWithSources = 0
142 for idList in self.compCats.values():
143 if len(idList) > 0:
144 numWithSources += 1
145 self.assertGreater(numWithSources, 0)
147 def testAgainstPersisted(self):
148 """Test that we can get a specific shard from a pre-persisted refcat.
149 """
150 shardId = 2222
151 dataset_name = IngestIndexedReferenceTask.ConfigClass().dataset_config.ref_dataset_name
152 dataId = self.indexer.makeDataId(shardId, dataset_name)
153 self.assertTrue(self.testButler.datasetExists('ref_cat', dataId))
154 refCat = self.testButler.get('ref_cat', dataId)
155 if REGENERATE_COMPARISON:
156 if os.path.exists(self.testCatPath):
157 os.unlink(self.testCatPath)
158 refCat.writeFits(self.testCatPath)
159 self.fail("New comparison data written; unset REGENERATE_COMPARISON in order to proceed")
161 ex1 = refCat.extract('*')
162 testCat = afwTable.SimpleCatalog.readFits(self.testCatPath)
164 ex2 = testCat.extract('*')
165 self.assertEqual(set(ex1.keys()), set(ex2.keys()))
166 for key in ex1:
167 np.testing.assert_array_almost_equal(ex1[key], ex2[key], err_msg=f"{key} values not equal")
169 def testIngestSetsVersion(self):
170 """Test that newly ingested catalogs get the correct version number set.
171 """
172 def runTest(withRaDecErr):
173 outputPath = os.path.join(self.outPath, "output_setsVersion"
174 + "_withRaDecErr" if withRaDecErr else "")
175 # Test with multiple files and standard config
176 config = makeIngestIndexConfig(withRaDecErr=withRaDecErr, withMagErr=True,
177 withPm=True, withPmErr=True)
178 # don't use the default depth, to avoid taking the time to create thousands of file locks
179 config.dataset_config.indexer.active.depth = self.depth
180 IngestIndexedReferenceTask.parseAndRun(
181 args=[self.input_dir, "--output", outputPath, self.skyCatalogFile],
182 config=config)
183 # A newly-ingested refcat should be marked format_version=1.
184 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler(outputPath))
185 self.assertEqual(loader.dataset_config.format_version, 1)
187 runTest(withRaDecErr=True)
188 runTest(withRaDecErr=False)
190 def testIngestConfigOverrides(self):
191 """Test IngestIndexedReferenceTask with different configs.
192 """
193 config2 = makeIngestIndexConfig(withRaDecErr=True, withMagErr=True, withPm=True, withPmErr=True,
194 withParallax=True)
195 config2.ra_name = "ra"
196 config2.dec_name = "dec"
197 config2.dataset_config.ref_dataset_name = 'myrefcat'
198 # Change the indexing depth to prove we can.
199 # Smaller is better than larger because it makes fewer files.
200 config2.dataset_config.indexer.active.depth = self.depth - 1
201 config2.is_photometric_name = 'is_phot'
202 config2.is_resolved_name = 'is_res'
203 config2.is_variable_name = 'is_var'
204 config2.id_name = 'id'
205 config2.extra_col_names = ['val1', 'val2', 'val3']
206 config2.file_reader.header_lines = 1
207 config2.file_reader.colnames = [
208 'id', 'ra', 'dec', 'ra_err', 'dec_err', 'a', 'a_err', 'b', 'b_err', 'is_phot',
209 'is_res', 'is_var', 'val1', 'val2', 'val3', 'pm_ra', 'pm_dec', 'pm_ra_err',
210 'pm_dec_err', 'parallax', 'parallax_err', 'unixtime',
211 ]
212 config2.file_reader.delimiter = '|'
213 # this also tests changing the delimiter
214 IngestIndexedReferenceTask.parseAndRun(
215 args=[self.input_dir, "--output", self.outPath+"/output_override",
216 self.skyCatalogFileDelim], config=config2)
218 # Test if we can get back the catalog with a non-standard dataset name
219 butler = dafPersist.Butler(self.outPath+"/output_override")
220 loaderConfig = LoadIndexedReferenceObjectsConfig()
221 loaderConfig.ref_dataset_name = "myrefcat"
222 loader = LoadIndexedReferenceObjectsTask(butler=butler, config=loaderConfig)
223 self.checkAllRowsInRefcat(loader, self.skyCatalog, config2)
225 # test that a catalog can be loaded even with a name not used for ingestion
226 butler = dafPersist.Butler(self.testRepoPath)
227 loaderConfig2 = LoadIndexedReferenceObjectsConfig()
228 loaderConfig2.ref_dataset_name = self.testDatasetName
229 loader = LoadIndexedReferenceObjectsTask(butler=butler, config=loaderConfig2)
230 self.checkAllRowsInRefcat(loader, self.skyCatalog, config2)
232 def testLoadIndexedReferenceConfig(self):
233 """Make sure LoadIndexedReferenceConfig has needed fields."""
234 """
235 Including at least one from the base class LoadReferenceObjectsConfig
236 """
237 config = LoadIndexedReferenceObjectsConfig()
238 self.assertEqual(config.ref_dataset_name, "cal_ref_cat")
239 self.assertEqual(config.defaultFilter, "")
241 def testLoadSkyCircle(self):
242 """Test LoadIndexedReferenceObjectsTask.loadSkyCircle with default config."""
243 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler)
244 for tupl, idList in self.compCats.items():
245 cent = make_coord(*tupl)
246 lcat = loader.loadSkyCircle(cent, self.searchRadius, filterName='a')
247 self.assertTrue(lcat.refCat.isContiguous())
248 self.assertFalse("camFlux" in lcat.refCat.schema)
249 self.assertEqual(Counter(lcat.refCat['id']), Counter(idList))
250 if len(lcat.refCat) > 0:
251 # make sure there are no duplicate ids
252 self.assertEqual(len(set(Counter(lcat.refCat['id']).values())), 1)
253 self.assertEqual(len(set(Counter(idList).values())), 1)
254 # A default-loaded sky circle should not have centroids
255 self.assertNotIn("centroid_x", lcat.refCat.schema)
256 self.assertNotIn("centroid_y", lcat.refCat.schema)
257 self.assertNotIn("hasCentroid", lcat.refCat.schema)
258 else:
259 self.assertEqual(len(idList), 0)
261 def testLoadPixelBox(self):
262 """Test LoadIndexedReferenceObjectsTask.loadPixelBox with default config."""
263 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler)
264 numFound = 0
265 for tupl, idList in self.compCats.items():
266 cent = make_coord(*tupl)
267 bbox = lsst.geom.Box2I(lsst.geom.Point2I(30, -5), lsst.geom.Extent2I(1000, 1004)) # arbitrary
268 ctr_pix = bbox.getCenter()
269 # catalog is sparse, so set pixel scale such that bbox encloses region
270 # used to generate compCats
271 pixel_scale = 2*self.searchRadius/max(bbox.getHeight(), bbox.getWidth())
272 cdMatrix = afwGeom.makeCdMatrix(scale=pixel_scale)
273 wcs = afwGeom.makeSkyWcs(crval=cent, crpix=ctr_pix, cdMatrix=cdMatrix)
274 result = loader.loadPixelBox(bbox=bbox, wcs=wcs, filterName="a")
275 # The following is to ensure the reference catalog coords are
276 # getting corrected for proper motion when an epoch is provided.
277 # Use an extreme epoch so that differences in corrected coords
278 # will be significant. Note that this simply tests that the coords
279 # do indeed change when the epoch is passed. It makes no attempt
280 # at assessing the correctness of the change. This is left to the
281 # explicit testProperMotion() test below.
282 resultWithEpoch = loader.loadPixelBox(bbox=bbox, wcs=wcs, filterName="a",
283 epoch=astropy.time.Time(30000, format='mjd', scale="tai"))
284 self.assertFloatsNotEqual(result.refCat["coord_ra"], resultWithEpoch.refCat["coord_ra"],
285 rtol=1.0e-4)
286 self.assertFloatsNotEqual(result.refCat["coord_dec"], resultWithEpoch.refCat["coord_dec"],
287 rtol=1.0e-4)
288 self.assertFalse("camFlux" in result.refCat.schema)
289 self.assertGreaterEqual(len(result.refCat), len(idList))
290 numFound += len(result.refCat)
291 self.assertGreater(numFound, 0)
293 def testDefaultFilterAndFilterMap(self):
294 """Test defaultFilter and filterMap parameters of LoadIndexedReferenceObjectsConfig."""
295 config = LoadIndexedReferenceObjectsConfig()
296 config.defaultFilter = "b"
297 config.filterMap = {"aprime": "a"}
298 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config)
299 for tupl, idList in self.compCats.items():
300 cent = make_coord(*tupl)
301 lcat = loader.loadSkyCircle(cent, self.searchRadius)
302 self.assertEqual(lcat.fluxField, "camFlux")
303 if len(idList) > 0:
304 defFluxFieldName = getRefFluxField(lcat.refCat.schema, None)
305 self.assertTrue(defFluxFieldName in lcat.refCat.schema)
306 aprimeFluxFieldName = getRefFluxField(lcat.refCat.schema, "aprime")
307 self.assertTrue(aprimeFluxFieldName in lcat.refCat.schema)
308 break # just need one test
310 def testProperMotion(self):
311 """Test proper motion correction"""
312 center = make_coord(93.0, -90.0)
313 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler)
314 references = loader.loadSkyCircle(center, self.searchRadius, filterName='a').refCat
315 original = references.copy(True)
317 # Zero epoch change --> no proper motion correction (except minor numerical effects)
318 loader.applyProperMotions(references, self.epoch)
319 self.assertFloatsAlmostEqual(references["coord_ra"], original["coord_ra"], rtol=1.0e-14)
320 self.assertFloatsAlmostEqual(references["coord_dec"], original["coord_dec"], rtol=1.0e-14)
321 self.assertFloatsEqual(references["coord_raErr"], original["coord_raErr"])
322 self.assertFloatsEqual(references["coord_decErr"], original["coord_decErr"])
324 # One year difference
325 loader.applyProperMotions(references, self.epoch + 1.0*astropy.units.yr)
326 self.assertFloatsEqual(references["pm_raErr"], original["pm_raErr"])
327 self.assertFloatsEqual(references["pm_decErr"], original["pm_decErr"])
328 for orig, ref in zip(original, references):
329 self.assertAnglesAlmostEqual(orig.getCoord().separation(ref.getCoord()),
330 self.properMotionAmt, maxDiff=1.0e-6*lsst.geom.arcseconds)
331 self.assertAnglesAlmostEqual(orig.getCoord().bearingTo(ref.getCoord()),
332 self.properMotionDir, maxDiff=1.0e-4*lsst.geom.arcseconds)
333 predictedRaErr = np.hypot(original["coord_raErr"], original["pm_raErr"])
334 predictedDecErr = np.hypot(original["coord_decErr"], original["pm_decErr"])
335 self.assertFloatsAlmostEqual(references["coord_raErr"], predictedRaErr)
336 self.assertFloatsAlmostEqual(references["coord_decErr"], predictedDecErr)
338 def testRequireProperMotion(self):
339 """Tests of the requireProperMotion config field.
341 Requiring proper motion corrections for a catalog that does not
342 contain valid PM data should result in an exception.
344 `data/testHtmIndex-ps1-bad-pm.fits` is a random shard taken from the
345 ps1_pv3_3pi_20170110 refcat (that has the unitless PM fields),
346 stripped to only 2 rows: we patch it in here to simplify test setup.
347 """
348 path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/testHtmIndex-ps1-bad-pm.fits')
349 refcatData = lsst.afw.table.SimpleCatalog.readFits(path)
350 center = make_coord(93.0, -90.0)
351 epoch = self.epoch + 1.0*astropy.units.yr
353 # malformatted catalogs should warn and raise if we require proper motion corrections
354 config = LoadIndexedReferenceObjectsConfig()
355 config.requireProperMotion = True
356 config.anyFilterMapsToThis = "g" # to use a catalog not made for obs_test
357 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config)
358 with unittest.mock.patch.object(self.testButler, 'get', return_value=refcatData):
359 msg = "requireProperMotion=True but refcat pm_ra field is not an Angle"
360 with self.assertRaisesRegex(RuntimeError, msg):
361 loader.loadSkyCircle(center, self.searchRadius, epoch=epoch)
363 # not specifying `epoch` with requireProperMotion=True should raise for any catalog
364 config = LoadIndexedReferenceObjectsConfig()
365 config.requireProperMotion = True
366 config.anyFilterMapsToThis = "g" # to use a catalog not made for obs_test
367 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config)
368 msg = "requireProperMotion=True but epoch not provided to loader"
369 with self.assertRaisesRegex(RuntimeError, msg):
370 loader.loadSkyCircle(center, self.searchRadius, epoch=None)
372 # malformatted catalogs should just warn if we do not require proper motion corrections
373 config = LoadIndexedReferenceObjectsConfig()
374 config.requireProperMotion = False
375 config.anyFilterMapsToThis = "g" # to use a catalog not made for obs_test
376 loader = LoadIndexedReferenceObjectsTask(butler=self.testButler, config=config)
377 with unittest.mock.patch.object(self.testButler, 'get', return_value=refcatData):
378 with lsst.log.UsePythonLogging(), self.assertLogs(level="WARNING") as cm:
379 loader.loadSkyCircle(center, self.searchRadius, epoch=epoch)
380 warnLog1 = "Reference catalog pm_ra field is not an Angle; cannot apply proper motion."
381 self.assertEqual(cm.output, [f"WARNING:LoadIndexedReferenceObjectsTask:{warnLog1}"])
383 def testLoadVersion0(self):
384 """Test reading a pre-written format_version=0 (Jy flux) catalog.
385 It should be converted to have nJy fluxes.
386 """
387 path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/version0')
388 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler(path))
389 self.assertEqual(loader.dataset_config.format_version, 0)
390 result = loader.loadSkyCircle(make_coord(10, 20),
391 5*lsst.geom.degrees, filterName='a')
392 self.assertTrue(hasNanojanskyFluxUnits(result.refCat.schema))
393 catalog = afwTable.SimpleCatalog.readFits(os.path.join(path, 'ref_cats/cal_ref_cat/4022.fits'))
394 self.assertFloatsEqual(catalog['a_flux']*1e9, result.refCat['a_flux'])
395 self.assertFloatsEqual(catalog['a_fluxSigma']*1e9, result.refCat['a_fluxErr'])
396 self.assertFloatsEqual(catalog['b_flux']*1e9, result.refCat['b_flux'])
397 self.assertFloatsEqual(catalog['b_fluxSigma']*1e9, result.refCat['b_fluxErr'])
399 def testLoadVersion1(self):
400 """Test reading a format_version=1 catalog (fluxes unchanged)."""
401 path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/version1')
402 loader = LoadIndexedReferenceObjectsTask(butler=dafPersist.Butler(path))
403 self.assertEqual(loader.dataset_config.format_version, 1)
404 result = loader.loadSkyCircle(make_coord(10, 20),
405 5*lsst.geom.degrees, filterName='a')
406 self.assertTrue(hasNanojanskyFluxUnits(result.refCat.schema))
407 catalog = afwTable.SimpleCatalog.readFits(os.path.join(path, 'ref_cats/cal_ref_cat/4022.fits'))
408 self.assertFloatsEqual(catalog['a_flux'], result.refCat['a_flux'])
409 self.assertFloatsEqual(catalog['a_fluxErr'], result.refCat['a_fluxErr'])
410 self.assertFloatsEqual(catalog['b_flux'], result.refCat['b_flux'])
411 self.assertFloatsEqual(catalog['b_fluxErr'], result.refCat['b_fluxErr'])
414class TestMemory(lsst.utils.tests.MemoryTestCase):
415 pass
418def setup_module(module):
419 lsst.utils.tests.init()
422if __name__ == "__main__": 422 ↛ 423line 422 didn't jump to line 423, because the condition on line 422 was never true
423 lsst.utils.tests.init()
424 unittest.main()