Coverage for python/lsst/obs/base/ingest_tests.py : 27%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22"""Base class for writing Gen3 raw data ingest tests.
23"""
25__all__ = ("IngestTestBase",)
27import abc
28import tempfile
29import unittest
30import os
31import shutil
33import lsst.afw.cameraGeom
34from lsst.daf.butler import Butler, ButlerURI
35from lsst.daf.butler.cli.butler import cli as butlerCli
36from lsst.daf.butler.cli.utils import LogCliRunner
37import lsst.obs.base
38from lsst.utils import doImport
39from .utils import getInstrument
40from . import script
43class IngestTestBase(metaclass=abc.ABCMeta):
44 """Base class for tests of gen3 ingest. Subclass from this, then
45 `unittest.TestCase` to get a working test suite.
46 """
48 ingestDir = ""
49 """Root path to ingest files into. Typically `obs_package/tests/`; the
50 actual directory will be a tempdir under this one.
51 """
53 ingestDatasetTypeName = "raw"
54 """The DatasetType to use for the ingest.
56 If this is not an Exposure dataset type the tests will be more limited.
57 """
59 dataIds = []
60 """list of butler data IDs of files that should have been ingested."""
62 file = ""
63 """Full path to a file to ingest in tests."""
65 filterLabel = None
66 """The lsst.afw.image.FilterLabel that should be returned by the above
67 file."""
69 rawIngestTask = "lsst.obs.base.RawIngestTask"
70 """The task to use in the Ingest test."""
72 curatedCalibrationDatasetTypes = None
73 """List or tuple of Datasets types that should be present after calling
74 writeCuratedCalibrations. If `None` writeCuratedCalibrations will
75 not be called and the test will be skipped."""
77 defineVisitsTask = lsst.obs.base.DefineVisitsTask
78 """The task to use to define visits from groups of exposures.
79 This is ignored if ``visits`` is `None`.
80 """
82 visits = {}
83 """A dictionary mapping visit data IDs the lists of exposure data IDs that
84 are associated with them.
85 If this is empty (but not `None`), visit definition will be run but no
86 visits will be expected (e.g. because no exposures are on-sky
87 observations).
88 """
90 @property
91 @abc.abstractmethod
92 def instrumentClassName(self):
93 """The fully qualified instrument class name.
95 Returns
96 -------
97 `str`
98 The fully qualified instrument class name.
99 """
100 pass
102 @property
103 def instrumentClass(self):
104 """The instrument class."""
105 return doImport(self.instrumentClassName)
107 @property
108 def instrumentName(self):
109 """The name of the instrument.
111 Returns
112 -------
113 `str`
114 The name of the instrument.
115 """
116 return self.instrumentClass.getName()
118 @classmethod
119 def setUpClass(cls):
120 # Use a temporary working directory.
121 cls.root = tempfile.mkdtemp(dir=cls.ingestDir)
122 cls._createRepo()
124 # Register the instrument and its static metadata.
125 cls._registerInstrument()
127 def setUp(self):
128 # Want a unique run name per test.
129 self.outputRun = "raw_ingest_" + self.id()
131 @classmethod
132 def tearDownClass(cls):
133 if os.path.exists(cls.root):
134 shutil.rmtree(cls.root, ignore_errors=True)
136 def verifyIngest(self, files=None, cli=False, fullCheck=False):
137 """
138 Test that RawIngestTask ingested the expected files.
140 Parameters
141 ----------
142 files : `list` [`str`], or None
143 List of files to be ingested, or None to use ``self.file``
144 fullCheck : `bool`, optional
145 If `True`, read the full raw dataset and check component
146 consistency. If `False` check that a component can be read
147 but do not read the entire raw exposure.
149 Notes
150 -----
151 Reading all the ingested test data can be expensive. The code paths
152 for reading the second raw are the same as reading the first so
153 we do not gain anything by doing full checks of everything.
154 Only read full pixel data for first dataset from file.
155 Don't even do that if we are requested not to by the caller.
156 This only really affects files that contain multiple datasets.
157 """
158 butler = Butler(self.root, run=self.outputRun)
159 datasets = list(butler.registry.queryDatasets(self.ingestDatasetTypeName, collections=self.outputRun))
160 self.assertEqual(len(datasets), len(self.dataIds))
162 # Get the URI to the first dataset and check it is inside the
163 # datastore.
164 datasetUri = butler.getURI(datasets[0])
165 self.assertIsNotNone(datasetUri.relative_to(butler.datastore.root))
167 # Get the relevant dataset type.
168 datasetType = butler.registry.getDatasetType(self.ingestDatasetTypeName)
170 for dataId in self.dataIds:
171 # For testing we only read the entire dataset the first time
172 # round if this is an Exposure. If it's not an Exposure
173 # we always read it completely but we don't read components
174 # because for an arbitrary dataset type we can't easily tell
175 # what component to test.
177 if not datasetType.storageClass.name.startswith("Exposure"):
178 exposure = butler.get(self.ingestDatasetTypeName, dataId)
179 # Could be anything so nothing to test by default
180 continue
182 # Check that we can read metadata from a raw.
183 metadata = butler.get(f"{self.ingestDatasetTypeName}.metadata", dataId)
184 if not fullCheck:
185 continue
186 fullCheck = False
187 exposure = butler.get(self.ingestDatasetTypeName, dataId)
189 self.assertEqual(metadata.toDict(), exposure.getMetadata().toDict())
191 # Since components follow a different code path we check that
192 # WCS match and also we check that at least the shape
193 # of the image is the same (rather than doing per-pixel equality)
194 wcs = butler.get(f"{self.ingestDatasetTypeName}.wcs", dataId)
195 self.assertEqual(wcs, exposure.getWcs())
197 rawImage = butler.get(f"{self.ingestDatasetTypeName}.image", dataId)
198 self.assertEqual(rawImage.getBBox(), exposure.getBBox())
200 # Check that the filter label got the correct band.
201 filterLabel = butler.get(f"{self.ingestDatasetTypeName}.filterLabel", dataId)
202 self.assertEqual(filterLabel, self.filterLabel)
204 self.checkRepo(files=files)
206 def checkRepo(self, files=None):
207 """Check the state of the repository after ingest.
209 This is an optional hook provided for subclasses; by default it does
210 nothing.
212 Parameters
213 ----------
214 files : `list` [`str`], or None
215 List of files to be ingested, or None to use ``self.file``
216 """
217 pass
219 @classmethod
220 def _createRepo(cls):
221 """Use the Click `testing` module to call the butler command line api
222 to create a repository."""
223 runner = LogCliRunner()
224 result = runner.invoke(butlerCli, ["create", cls.root])
225 # Classmethod so assertEqual does not work.
226 assert result.exit_code == 0, f"output: {result.output} exception: {result.exception}"
228 def _ingestRaws(self, transfer, file=None):
229 """Use the Click `testing` module to call the butler command line api
230 to ingest raws.
232 Parameters
233 ----------
234 transfer : `str`
235 The external data transfer type.
236 file : `str`
237 Path to a file to ingest instead of the default associated with
238 the object.
239 """
240 if file is None:
241 file = self.file
242 runner = LogCliRunner()
243 result = runner.invoke(butlerCli, ["ingest-raws", self.root, file,
244 "--output-run", self.outputRun,
245 "--transfer", transfer,
246 "--ingest-task", self.rawIngestTask])
247 self.assertEqual(result.exit_code, 0, f"output: {result.output} exception: {result.exception}")
249 @classmethod
250 def _registerInstrument(cls):
251 """Use the Click `testing` module to call the butler command line api
252 to register the instrument."""
253 runner = LogCliRunner()
254 result = runner.invoke(butlerCli, ["register-instrument", cls.root, cls.instrumentClassName])
255 # Classmethod so assertEqual does not work.
256 assert result.exit_code == 0, f"output: {result.output} exception: {result.exception}"
258 def _writeCuratedCalibrations(self):
259 """Use the Click `testing` module to call the butler command line api
260 to write curated calibrations."""
261 runner = LogCliRunner()
262 result = runner.invoke(butlerCli, ["write-curated-calibrations", self.root, self.instrumentName])
263 self.assertEqual(result.exit_code, 0, f"output: {result.output} exception: {result.exception}")
265 def testLink(self):
266 self._ingestRaws(transfer="link")
267 self.verifyIngest()
269 def testSymLink(self):
270 self._ingestRaws(transfer="symlink")
271 self.verifyIngest()
273 def testDirect(self):
274 self._ingestRaws(transfer="direct")
276 # Check that it really did have a URI outside of datastore.
277 srcUri = ButlerURI(self.file, forceAbsolute=True)
278 butler = Butler(self.root, run=self.outputRun)
279 datasets = list(butler.registry.queryDatasets(self.ingestDatasetTypeName, collections=self.outputRun))
280 datastoreUri = butler.getURI(datasets[0])
281 self.assertEqual(datastoreUri, srcUri)
283 def testCopy(self):
284 self._ingestRaws(transfer="copy")
285 # Only test full read of raws for the copy test. No need to do it
286 # in the other tests since the formatter will be the same in all
287 # cases.
288 self.verifyIngest(fullCheck=True)
290 def testHardLink(self):
291 try:
292 self._ingestRaws(transfer="hardlink")
293 # Running ingest through the Click testing infrastructure causes
294 # the original exception indicating that we can't hard-link
295 # on this filesystem to be turned into a nonzero exit code, which
296 # then trips the test assertion.
297 except (AssertionError, PermissionError) as err:
298 raise unittest.SkipTest("Skipping hard-link test because input data"
299 " is on a different filesystem.") from err
300 self.verifyIngest()
302 def testInPlace(self):
303 """Test that files already in the directory can be added to the
304 registry in-place.
305 """
306 butler = Butler(self.root, run=self.outputRun)
308 # If the test uses an index file the index file needs to also
309 # appear in the datastore root along with the file to be ingested.
310 # In that scenario the file name being used for ingest can not
311 # be modified and must have the same name as found in the index
312 # file itself.
313 source_file_uri = ButlerURI(self.file)
314 index_file = source_file_uri.dirname().join("_index.json")
315 pathInStore = source_file_uri.basename()
316 if index_file.exists():
317 os.symlink(index_file.ospath, butler.datastore.root.join("_index.json").ospath)
318 else:
319 # No index file so we are free to pick any name.
320 pathInStore = "prefix-" + pathInStore
322 # Create a symlink to the original file so that it looks like it
323 # is now inside the datastore.
324 newPath = butler.datastore.root.join(pathInStore)
325 os.symlink(os.path.abspath(self.file), newPath.ospath)
327 # If there is a sidecar file it needs to be linked in as well
328 # since ingest code does not follow symlinks.
329 sidecar_uri = ButlerURI(source_file_uri).updatedExtension(".json")
330 if sidecar_uri.exists():
331 newSidecar = ButlerURI(newPath).updatedExtension(".json")
332 os.symlink(sidecar_uri.ospath, newSidecar.ospath)
334 # Run ingest with auto mode since that should automatically determine
335 # that an in-place ingest is happening.
336 self._ingestRaws(transfer="auto", file=newPath.ospath)
337 self.verifyIngest()
339 # Recreate a butler post-ingest (the earlier one won't see the
340 # ingested files).
341 butler = Butler(self.root, run=self.outputRun)
343 # Check that the URI associated with this path is the right one.
344 uri = butler.getURI(self.ingestDatasetTypeName, self.dataIds[0])
345 self.assertEqual(uri.relative_to(butler.datastore.root), pathInStore)
347 def testFailOnConflict(self):
348 """Re-ingesting the same data into the repository should fail.
349 """
350 self._ingestRaws(transfer="symlink")
351 with self.assertRaises(Exception):
352 self._ingestRaws(transfer="symlink")
354 def testWriteCuratedCalibrations(self):
355 """Test that we can ingest the curated calibrations, and read them
356 with `loadCamera` both before and after.
357 """
358 if self.curatedCalibrationDatasetTypes is None:
359 raise unittest.SkipTest("Class requests disabling of writeCuratedCalibrations test")
361 butler = Butler(self.root, writeable=False)
362 collection = self.instrumentClass.makeCalibrationCollectionName()
364 # Trying to load a camera with a data ID not known to the registry
365 # is an error, because we can't get any temporal information.
366 with self.assertRaises(LookupError):
367 lsst.obs.base.loadCamera(butler, {"exposure": 0}, collections=collection)
369 # Ingest raws in order to get some exposure records.
370 self._ingestRaws(transfer="auto")
372 # Load camera should returned an unversioned camera because there's
373 # nothing in the repo.
374 camera, isVersioned = lsst.obs.base.loadCamera(butler, self.dataIds[0], collections=collection)
375 self.assertFalse(isVersioned)
376 self.assertIsInstance(camera, lsst.afw.cameraGeom.Camera)
378 self._writeCuratedCalibrations()
380 # Make a new butler instance to make sure we don't have any stale
381 # caches (e.g. of DatasetTypes). Note that we didn't give
382 # _writeCuratedCalibrations the butler instance we had, because it's
383 # trying to test the CLI interface anyway.
384 butler = Butler(self.root, writeable=False)
386 for datasetTypeName in self.curatedCalibrationDatasetTypes:
387 with self.subTest(dtype=datasetTypeName):
388 found = list(
389 butler.registry.queryDatasetAssociations(
390 datasetTypeName,
391 collections=collection,
392 )
393 )
394 self.assertGreater(len(found), 0, f"Checking {datasetTypeName}")
396 # Load camera should returned the versioned camera from the repo.
397 camera, isVersioned = lsst.obs.base.loadCamera(butler, self.dataIds[0], collections=collection)
398 self.assertTrue(isVersioned)
399 self.assertIsInstance(camera, lsst.afw.cameraGeom.Camera)
401 def testDefineVisits(self):
402 if self.visits is None:
403 self.skipTest("Expected visits were not defined.")
404 self._ingestRaws(transfer="link")
406 # Calling defineVisits tests the implementation of the butler command
407 # line interface "define-visits" subcommand. Functions in the script
408 # folder are generally considered protected and should not be used
409 # as public api.
410 script.defineVisits(self.root, config_file=None, collections=self.outputRun,
411 instrument=self.instrumentName)
413 # Test that we got the visits we expected.
414 butler = Butler(self.root, run=self.outputRun)
415 visits = butler.registry.queryDataIds(["visit"]).expanded().toSet()
416 self.assertCountEqual(visits, self.visits.keys())
417 instr = getInstrument(self.instrumentName, butler.registry)
418 camera = instr.getCamera()
419 for foundVisit, (expectedVisit, expectedExposures) in zip(visits, self.visits.items()):
420 # Test that this visit is associated with the expected exposures.
421 foundExposures = butler.registry.queryDataIds(["exposure"], dataId=expectedVisit
422 ).expanded().toSet()
423 self.assertCountEqual(foundExposures, expectedExposures)
424 # Test that we have a visit region, and that it contains all of the
425 # detector+visit regions.
426 self.assertIsNotNone(foundVisit.region)
427 detectorVisitDataIds = butler.registry.queryDataIds(["visit", "detector"], dataId=expectedVisit
428 ).expanded().toSet()
429 self.assertEqual(len(detectorVisitDataIds), len(camera))
430 for dataId in detectorVisitDataIds:
431 self.assertTrue(foundVisit.region.contains(dataId.region))