Coverage for python/lsst/obs/base/ingest_tests.py: 33%
208 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-14 20:02 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-14 20:02 +0000
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22"""Base class for writing Gen3 raw data ingest tests.
23"""
25__all__ = ("IngestTestBase",)
27import abc
28import os
29import shutil
30import tempfile
31import unittest
33import lsst.afw.cameraGeom
34import lsst.afw.cameraGeom.testUtils # For assertDetectorsEqual
35import lsst.obs.base
36from lsst.daf.butler import Butler, Registry
37from lsst.daf.butler.cli.butler import cli as butlerCli
38from lsst.daf.butler.cli.utils import LogCliRunner
39from lsst.pipe.base import Instrument
40from lsst.resources import ResourcePath
41from lsst.utils import doImportType
43from . import script
46class IngestTestBase(metaclass=abc.ABCMeta):
47 """Base class for tests of gen3 ingest. Subclass from this, then
48 `unittest.TestCase` to get a working test suite.
49 """
51 ingestDir = ""
52 """Root path to ingest files into. Typically `obs_package/tests/`; the
53 actual directory will be a tempdir under this one.
54 """
56 ingestDatasetTypeName = "raw"
57 """The DatasetType to use for the ingest.
59 If this is not an Exposure dataset type the tests will be more limited.
60 """
62 dataIds = []
63 """list of butler data IDs of files that should have been ingested."""
65 file = ""
66 """Full path to a file to ingest in tests."""
68 filterLabel = None
69 """The lsst.afw.image.FilterLabel that should be returned by the above
70 file."""
72 rawIngestTask = "lsst.obs.base.RawIngestTask"
73 """The task to use in the Ingest test."""
75 curatedCalibrationDatasetTypes = None
76 """List or tuple of Datasets types that should be present after calling
77 writeCuratedCalibrations. If `None` writeCuratedCalibrations will
78 not be called and the test will be skipped."""
80 defineVisitsTask = lsst.obs.base.DefineVisitsTask
81 """The task to use to define visits from groups of exposures.
82 This is ignored if ``visits`` is `None`.
83 """
85 visits = {}
86 """A dictionary mapping visit data IDs the lists of exposure data IDs that
87 are associated with them.
88 If this is empty (but not `None`), visit definition will be run but no
89 visits will be expected (e.g. because no exposures are on-sky
90 observations).
91 """
93 seed_config = None
94 """Location of a seed configuration file to pass to butler create.
96 Useful if additional formatters or storage classes need to be defined.
97 """
99 @property
100 @abc.abstractmethod
101 def instrumentClassName(self):
102 """The fully qualified instrument class name.
104 Returns
105 -------
106 `str`
107 The fully qualified instrument class name.
108 """
109 pass
111 @property
112 def instrumentClass(self):
113 """The instrument class."""
114 return doImportType(self.instrumentClassName)
116 @property
117 def instrumentName(self):
118 """The name of the instrument.
120 Returns
121 -------
122 `str`
123 The name of the instrument.
124 """
125 return self.instrumentClass.getName()
127 @classmethod
128 def setUpClass(cls):
129 # Use a temporary working directory.
130 cls.root = tempfile.mkdtemp(dir=cls.ingestDir)
131 cls._createRepo()
133 # Register the instrument and its static metadata.
134 cls._registerInstrument()
136 # Determine the relevant datastore root to use for testing.
137 butler = Butler(cls.root)
138 roots = butler.get_datastore_roots()
139 assert len(roots) == 1 # Only one datastore.
140 cls.datastore_root = list(roots.values())[0]
142 def setUp(self):
143 # Want a unique run name per test.
144 self.outputRun = "raw_ingest_" + self.id()
146 @classmethod
147 def tearDownClass(cls):
148 if os.path.exists(cls.root):
149 shutil.rmtree(cls.root, ignore_errors=True)
151 def verifyIngest(self, files=None, cli=False, fullCheck=False):
152 """
153 Test that RawIngestTask ingested the expected files.
155 Parameters
156 ----------
157 files : `list` [`str`], or None
158 List of files to be ingested, or None to use ``self.file``
159 fullCheck : `bool`, optional
160 If `True`, read the full raw dataset and check component
161 consistency. If `False` check that a component can be read
162 but do not read the entire raw exposure.
164 Notes
165 -----
166 Reading all the ingested test data can be expensive. The code paths
167 for reading the second raw are the same as reading the first so
168 we do not gain anything by doing full checks of everything.
169 Only read full pixel data for first dataset from file.
170 Don't even do that if we are requested not to by the caller.
171 This only really affects files that contain multiple datasets.
172 """
173 butler = Butler(self.root, run=self.outputRun)
174 datasets = list(butler.registry.queryDatasets(self.ingestDatasetTypeName, collections=self.outputRun))
175 self.assertEqual(len(datasets), len(self.dataIds))
177 # Get the URI to the first dataset and check it is inside the
178 # datastore.
179 datasetUri = butler.getURI(datasets[0])
180 self.assertIsNotNone(datasetUri.relative_to(self.datastore_root))
182 # Get the relevant dataset type.
183 datasetType = butler.registry.getDatasetType(self.ingestDatasetTypeName)
185 for dataId in self.dataIds:
186 # For testing we only read the entire dataset the first time
187 # round if this is an Exposure. If it's not an Exposure
188 # we always read it completely but we don't read components
189 # because for an arbitrary dataset type we can't easily tell
190 # what component to test.
192 if not datasetType.storageClass.name.startswith("Exposure"):
193 exposure = butler.get(self.ingestDatasetTypeName, dataId)
194 # Could be anything so nothing to test by default
195 continue
197 # Check that we can read metadata from a raw.
198 metadata = butler.get(f"{self.ingestDatasetTypeName}.metadata", dataId)
199 if not fullCheck:
200 continue
201 fullCheck = False
202 exposure = butler.get(self.ingestDatasetTypeName, dataId)
204 # Comparing headers will not work directly because of header
205 # fix up provenance.
206 metadata_headers = metadata.toDict()
207 exposure_headers = exposure.getMetadata().toDict()
208 metadata_headers.pop("HIERARCH ASTRO METADATA FIX DATE", None)
209 exposure_headers.pop("HIERARCH ASTRO METADATA FIX DATE", None)
210 self.assertEqual(metadata_headers, exposure_headers)
212 # Since components follow a different code path we check that
213 # WCS match and also we check that at least the shape
214 # of the image is the same (rather than doing per-pixel equality)
215 wcs = butler.get(f"{self.ingestDatasetTypeName}.wcs", dataId)
216 self.assertEqual(wcs, exposure.getWcs())
218 rawImage = butler.get(f"{self.ingestDatasetTypeName}.image", dataId)
219 self.assertEqual(rawImage.getBBox(), exposure.getBBox())
221 # Check that the filter label got the correct band.
222 filterLabel = butler.get(f"{self.ingestDatasetTypeName}.filter", dataId)
223 self.assertEqual(filterLabel, self.filterLabel)
225 # Check that the exposure's Detector is the same as the component
226 # we would read (this is tricky for LSST, which modifies its
227 # detector at read time; for most other cameras it should be
228 # trivially satisfied.
229 detector = butler.get(f"{self.ingestDatasetTypeName}.detector", dataId)
230 self.assertDetectorsEqual(detector, exposure.getDetector(), compareTransforms=False)
232 self.checkRepo(files=files)
234 def checkRepo(self, files=None):
235 """Check the state of the repository after ingest.
237 This is an optional hook provided for subclasses; by default it does
238 nothing.
240 Parameters
241 ----------
242 files : `list` [`str`], or None
243 List of files to be ingested, or None to use ``self.file``
244 """
245 pass
247 @classmethod
248 def _createRepo(cls):
249 """Use the Click `testing` module to call the butler command line api
250 to create a repository."""
251 runner = LogCliRunner()
252 args = []
253 if cls.seed_config:
254 args.extend(["--seed-config", cls.seed_config])
255 result = runner.invoke(butlerCli, ["create", cls.root, *args])
256 # Classmethod so assertEqual does not work.
257 assert result.exit_code == 0, f"output: {result.output} exception: {result.exception}"
259 def _ingestRaws(self, transfer, file=None):
260 """Use the Click `testing` module to call the butler command line api
261 to ingest raws.
263 Parameters
264 ----------
265 transfer : `str`
266 The external data transfer type.
267 file : `str`
268 Path to a file to ingest instead of the default associated with
269 the object.
270 """
271 if file is None:
272 file = self.file
273 runner = LogCliRunner()
274 result = runner.invoke(
275 butlerCli,
276 [
277 "ingest-raws",
278 self.root,
279 file,
280 "--output-run",
281 self.outputRun,
282 "--transfer",
283 transfer,
284 "--ingest-task",
285 self.rawIngestTask,
286 ],
287 )
288 self.assertEqual(result.exit_code, 0, f"output: {result.output} exception: {result.exception}")
290 @classmethod
291 def _registerInstrument(cls):
292 """Use the Click `testing` module to call the butler command line api
293 to register the instrument."""
294 runner = LogCliRunner()
295 result = runner.invoke(butlerCli, ["register-instrument", cls.root, cls.instrumentClassName])
296 # Classmethod so assertEqual does not work.
297 assert result.exit_code == 0, f"output: {result.output} exception: {result.exception}"
299 def _writeCuratedCalibrations(self):
300 """Use the Click `testing` module to call the butler command line api
301 to write curated calibrations."""
302 runner = LogCliRunner()
303 result = runner.invoke(butlerCli, ["write-curated-calibrations", self.root, self.instrumentName])
304 self.assertEqual(result.exit_code, 0, f"output: {result.output} exception: {result.exception}")
306 def testLink(self):
307 self._ingestRaws(transfer="link")
308 self.verifyIngest()
310 def testSymLink(self):
311 self._ingestRaws(transfer="symlink")
312 self.verifyIngest()
314 def testDirect(self):
315 self._ingestRaws(transfer="direct")
317 # Check that it really did have a URI outside of datastore.
318 srcUri = ResourcePath(self.file, forceAbsolute=True)
319 butler = Butler(self.root, run=self.outputRun)
320 datasets = list(butler.registry.queryDatasets(self.ingestDatasetTypeName, collections=self.outputRun))
321 datastoreUri = butler.getURI(datasets[0])
322 self.assertEqual(datastoreUri, srcUri)
324 def testCopy(self):
325 self._ingestRaws(transfer="copy")
326 # Only test full read of raws for the copy test. No need to do it
327 # in the other tests since the formatter will be the same in all
328 # cases.
329 self.verifyIngest(fullCheck=True)
331 def testHardLink(self):
332 try:
333 self._ingestRaws(transfer="hardlink")
334 # Running ingest through the Click testing infrastructure causes
335 # the original exception indicating that we can't hard-link
336 # on this filesystem to be turned into a nonzero exit code, which
337 # then trips the test assertion.
338 except (AssertionError, PermissionError) as err:
339 raise unittest.SkipTest(
340 "Skipping hard-link test because input data is on a different filesystem."
341 ) from err
342 self.verifyIngest()
344 def testInPlace(self):
345 """Test that files already in the directory can be added to the
346 registry in-place.
347 """
348 butler = Butler(self.root, run=self.outputRun)
350 # If the test uses an index file the index file needs to also
351 # appear in the datastore root along with the file to be ingested.
352 # In that scenario the file name being used for ingest can not
353 # be modified and must have the same name as found in the index
354 # file itself.
355 source_file_uri = ResourcePath(self.file)
356 index_file = source_file_uri.dirname().join("_index.json")
357 pathInStore = source_file_uri.basename()
358 if index_file.exists():
359 os.symlink(index_file.ospath, self.datastore_root.join("_index.json").ospath)
360 else:
361 # No index file so we are free to pick any name.
362 pathInStore = "prefix-" + pathInStore
364 # Create a symlink to the original file so that it looks like it
365 # is now inside the datastore.
366 newPath = self.datastore_root.join(pathInStore)
367 os.symlink(os.path.abspath(self.file), newPath.ospath)
369 # If there is a sidecar file it needs to be linked in as well
370 # since ingest code does not follow symlinks.
371 sidecar_uri = ResourcePath(source_file_uri).updatedExtension(".json")
372 if sidecar_uri.exists():
373 newSidecar = ResourcePath(newPath).updatedExtension(".json")
374 os.symlink(sidecar_uri.ospath, newSidecar.ospath)
376 # Run ingest with auto mode since that should automatically determine
377 # that an in-place ingest is happening.
378 self._ingestRaws(transfer="auto", file=newPath.ospath)
379 self.verifyIngest()
381 # Recreate a butler post-ingest (the earlier one won't see the
382 # ingested files).
383 butler = Butler(self.root, run=self.outputRun)
385 # Check that the URI associated with this path is the right one.
386 uri = butler.getURI(self.ingestDatasetTypeName, self.dataIds[0])
387 self.assertEqual(uri.relative_to(self.datastore_root), pathInStore)
389 def testFailOnConflict(self):
390 """Re-ingesting the same data into the repository should fail."""
391 self._ingestRaws(transfer="symlink")
392 with self.assertRaises(Exception):
393 self._ingestRaws(transfer="symlink")
395 def testWriteCuratedCalibrations(self):
396 """Test that we can ingest the curated calibrations, and read them
397 with `loadCamera` both before and after.
398 """
399 if self.curatedCalibrationDatasetTypes is None:
400 raise unittest.SkipTest("Class requests disabling of writeCuratedCalibrations test")
402 butler = Butler(self.root, writeable=False)
403 collection = self.instrumentClass().makeCalibrationCollectionName()
405 # Trying to load a camera with a data ID not known to the registry
406 # is an error, because we can't get any temporal information.
407 with self.assertRaises(LookupError):
408 lsst.obs.base.loadCamera(butler, {"exposure": 0}, collections=collection)
410 # Ingest raws in order to get some exposure records.
411 self._ingestRaws(transfer="auto")
413 # Load camera should returned an unversioned camera because there's
414 # nothing in the repo.
415 camera, isVersioned = lsst.obs.base.loadCamera(butler, self.dataIds[0], collections=collection)
416 self.assertFalse(isVersioned)
417 self.assertIsInstance(camera, lsst.afw.cameraGeom.Camera)
419 self._writeCuratedCalibrations()
421 # Make a new butler instance to make sure we don't have any stale
422 # caches (e.g. of DatasetTypes). Note that we didn't give
423 # _writeCuratedCalibrations the butler instance we had, because it's
424 # trying to test the CLI interface anyway.
425 butler = Butler(self.root, writeable=False)
427 instrumentClass = self.instrumentClass()
428 calibration_names = instrumentClass.getCuratedCalibrationNames()
430 for datasetTypeName in self.curatedCalibrationDatasetTypes:
431 with self.subTest(dtype=datasetTypeName):
432 found = list(
433 butler.registry.queryDatasetAssociations(
434 datasetTypeName,
435 collections=collection,
436 )
437 )
438 self.assertGreater(len(found), 0, f"Checking {datasetTypeName}")
439 self.assertIn(datasetTypeName, calibration_names)
441 # Load camera should returned the versioned camera from the repo.
442 camera, isVersioned = lsst.obs.base.loadCamera(butler, self.dataIds[0], collections=collection)
443 self.assertTrue(isVersioned)
444 self.assertIsInstance(camera, lsst.afw.cameraGeom.Camera)
446 def testDefineVisits(self):
447 if self.visits is None:
448 self.skipTest("Expected visits were not defined.")
449 self._ingestRaws(transfer="link")
451 # Check that obscore table (if configured) has correct contents.
452 butler = Butler(self.root, run=self.outputRun)
453 self._check_obscore(butler.registry, has_visits=False)
455 # Calling defineVisits tests the implementation of the butler command
456 # line interface "define-visits" subcommand. Functions in the script
457 # folder are generally considered protected and should not be used
458 # as public api.
459 script.defineVisits(
460 self.root,
461 config_file=None,
462 collections=self.outputRun,
463 instrument=self.instrumentName,
464 raw_name=self.ingestDatasetTypeName,
465 )
467 # Test that we got the visits we expected.
468 visits = butler.registry.queryDataIds(["visit"]).expanded().toSet()
469 self.assertCountEqual(visits, self.visits.keys())
470 instr = Instrument.from_string(self.instrumentName, butler.registry)
471 camera = instr.getCamera()
472 for foundVisit, (expectedVisit, expectedExposures) in zip(visits, self.visits.items()):
473 # Test that this visit is associated with the expected exposures.
474 foundExposures = (
475 butler.registry.queryDataIds(["exposure"], dataId=expectedVisit).expanded().toSet()
476 )
477 self.assertCountEqual(foundExposures, expectedExposures)
478 # Test that we have a visit region, and that it contains all of the
479 # detector+visit regions.
480 self.assertIsNotNone(foundVisit.region)
481 detectorVisitDataIds = (
482 butler.registry.queryDataIds(["visit", "detector"], dataId=expectedVisit).expanded().toSet()
483 )
484 self.assertEqual(len(detectorVisitDataIds), len(camera))
485 for dataId in detectorVisitDataIds:
486 self.assertTrue(foundVisit.region.contains(dataId.region))
488 # Check obscore table again.
489 self._check_obscore(butler.registry, has_visits=True)
491 def _check_obscore(self, registry: Registry, has_visits: bool) -> None:
492 """Verify contents of obscore table."""
493 return