Coverage for tests/test_datastore.py : 18%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import os
23import unittest
24import shutil
25import yaml
26import tempfile
27import lsst.utils
29from lsst.utils import doImport
31from lsst.daf.butler import StorageClassFactory, StorageClass, DimensionUniverse, FileDataset
32from lsst.daf.butler import DatastoreConfig, DatasetTypeNotSupportedError, DatastoreValidationError
33from lsst.daf.butler import ButlerURI
34from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter
36from lsst.daf.butler.tests import (DatasetTestHelper, DatastoreTestHelper, BadWriteFormatter,
37 BadNoWriteFormatter, MetricsExample, DummyRegistry)
40TESTDIR = os.path.dirname(__file__)
43def makeExampleMetrics():
44 return MetricsExample({"AM1": 5.2, "AM2": 30.6},
45 {"a": [1, 2, 3],
46 "b": {"blue": 5, "red": "green"}},
47 [563, 234, 456.7]
48 )
51class TransactionTestError(Exception):
52 """Specific error for transactions, to prevent misdiagnosing
53 that might otherwise occur when a standard exception is used.
54 """
55 pass
58class DatastoreTestsBase(DatasetTestHelper, DatastoreTestHelper):
59 """Support routines for datastore testing"""
60 root = None
62 @classmethod
63 def setUpClass(cls):
64 # Storage Classes are fixed for all datastores in these tests
65 scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml")
66 cls.storageClassFactory = StorageClassFactory()
67 cls.storageClassFactory.addFromConfig(scConfigFile)
69 # Read the Datastore config so we can get the class
70 # information (since we should not assume the constructor
71 # name here, but rely on the configuration file itself)
72 datastoreConfig = DatastoreConfig(cls.configFile)
73 cls.datastoreType = doImport(datastoreConfig["cls"])
74 cls.universe = DimensionUniverse()
76 def setUp(self):
77 self.setUpDatastoreTests(DummyRegistry, DatastoreConfig)
79 def tearDown(self):
80 if self.root is not None and os.path.exists(self.root):
81 shutil.rmtree(self.root, ignore_errors=True)
84class DatastoreTests(DatastoreTestsBase):
85 """Some basic tests of a simple datastore."""
87 hasUnsupportedPut = True
89 def testConfigRoot(self):
90 full = DatastoreConfig(self.configFile)
91 config = DatastoreConfig(self.configFile, mergeDefaults=False)
92 newroot = "/random/location"
93 self.datastoreType.setConfigRoot(newroot, config, full)
94 if self.rootKeys:
95 for k in self.rootKeys:
96 self.assertIn(newroot, config[k])
98 def testConstructor(self):
99 datastore = self.makeDatastore()
100 self.assertIsNotNone(datastore)
101 self.assertIs(datastore.isEphemeral, self.isEphemeral)
103 def testConfigurationValidation(self):
104 datastore = self.makeDatastore()
105 sc = self.storageClassFactory.getStorageClass("ThingOne")
106 datastore.validateConfiguration([sc])
108 sc2 = self.storageClassFactory.getStorageClass("ThingTwo")
109 if self.validationCanFail:
110 with self.assertRaises(DatastoreValidationError):
111 datastore.validateConfiguration([sc2], logFailures=True)
113 dimensions = self.universe.extract(("visit", "physical_filter"))
114 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
115 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
116 datastore.validateConfiguration([ref])
118 def testParameterValidation(self):
119 """Check that parameters are validated"""
120 sc = self.storageClassFactory.getStorageClass("ThingOne")
121 dimensions = self.universe.extract(("visit", "physical_filter"))
122 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
123 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
124 datastore = self.makeDatastore()
125 data = {1: 2, 3: 4}
126 datastore.put(data, ref)
127 newdata = datastore.get(ref)
128 self.assertEqual(data, newdata)
129 with self.assertRaises(KeyError):
130 newdata = datastore.get(ref, parameters={"missing": 5})
132 def testBasicPutGet(self):
133 metrics = makeExampleMetrics()
134 datastore = self.makeDatastore()
136 # Create multiple storage classes for testing different formulations
137 storageClasses = [self.storageClassFactory.getStorageClass(sc)
138 for sc in ("StructuredData",
139 "StructuredDataJson",
140 "StructuredDataPickle")]
142 dimensions = self.universe.extract(("visit", "physical_filter"))
143 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
145 for sc in storageClasses:
146 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
147 print("Using storageClass: {}".format(sc.name))
148 datastore.put(metrics, ref)
150 # Does it exist?
151 self.assertTrue(datastore.exists(ref))
153 # Get
154 metricsOut = datastore.get(ref, parameters=None)
155 self.assertEqual(metrics, metricsOut)
157 uri = datastore.getUri(ref)
158 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
160 # Get a component -- we need to construct new refs for them
161 # with derived storage classes but with parent ID
162 comp = "output"
163 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(comp), dimensions,
164 sc.components[comp], dataId, id=ref.id)
165 output = datastore.get(compRef)
166 self.assertEqual(output, metricsOut.output)
168 uri = datastore.getUri(compRef)
169 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
171 storageClass = sc
173 # Check that a put fails if the dataset type is not supported
174 if self.hasUnsupportedPut:
175 sc = StorageClass("UnsupportedSC", pytype=type(metrics))
176 ref = self.makeDatasetRef("unsupportedType", dimensions, sc, dataId)
177 with self.assertRaises(DatasetTypeNotSupportedError):
178 datastore.put(metrics, ref)
180 # These should raise
181 ref = self.makeDatasetRef("metrics", dimensions, storageClass, dataId, id=10000)
182 with self.assertRaises(FileNotFoundError):
183 # non-existing file
184 datastore.get(ref)
186 # Get a URI from it
187 uri = datastore.getUri(ref, predict=True)
188 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
190 with self.assertRaises(FileNotFoundError):
191 datastore.getUri(ref)
193 def testCompositePutGet(self):
194 metrics = makeExampleMetrics()
195 datastore = self.makeDatastore()
197 # Create multiple storage classes for testing different formulations
198 # of composites
199 storageClasses = [self.storageClassFactory.getStorageClass(sc)
200 for sc in ("StructuredComposite",
201 "StructuredCompositeTestA",
202 "StructuredCompositeTestB")]
204 dimensions = self.universe.extract(("visit", "physical_filter"))
205 dataId = {"instrument": "dummy", "visit": 428, "physical_filter": "R"}
207 for sc in storageClasses:
208 print("Using storageClass: {}".format(sc.name))
209 ref = self.makeDatasetRef("metric", dimensions, sc, dataId,
210 conform=False)
212 components = sc.assembler().disassemble(metrics)
213 self.assertTrue(components)
215 compsRead = {}
216 for compName, compInfo in components.items():
217 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(compName), dimensions,
218 components[compName].storageClass, dataId,
219 conform=False)
221 print("Writing component {} with {}".format(compName, compRef.datasetType.storageClass.name))
222 datastore.put(compInfo.component, compRef)
224 uri = datastore.getUri(compRef)
225 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
227 compsRead[compName] = datastore.get(compRef)
229 # We can generate identical files for each storage class
230 # so remove the component here
231 datastore.remove(compRef)
233 # combine all the components we read back into a new composite
234 metricsOut = sc.assembler().assemble(compsRead)
235 self.assertEqual(metrics, metricsOut)
237 def testRemove(self):
238 metrics = makeExampleMetrics()
239 datastore = self.makeDatastore()
240 # Put
241 dimensions = self.universe.extract(("visit", "physical_filter"))
242 dataId = {"instrument": "dummy", "visit": 638, "physical_filter": "U"}
244 sc = self.storageClassFactory.getStorageClass("StructuredData")
245 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
246 datastore.put(metrics, ref)
248 # Does it exist?
249 self.assertTrue(datastore.exists(ref))
251 # Get
252 metricsOut = datastore.get(ref)
253 self.assertEqual(metrics, metricsOut)
254 # Remove
255 datastore.remove(ref)
257 # Does it exist?
258 self.assertFalse(datastore.exists(ref))
260 # Do we now get a predicted URI?
261 uri = datastore.getUri(ref, predict=True)
262 self.assertTrue(uri.endswith("#predicted"))
264 # Get should now fail
265 with self.assertRaises(FileNotFoundError):
266 datastore.get(ref)
267 # Can only delete once
268 with self.assertRaises(FileNotFoundError):
269 datastore.remove(ref)
271 def testTransfer(self):
272 metrics = makeExampleMetrics()
274 dimensions = self.universe.extract(("visit", "physical_filter"))
275 dataId = {"instrument": "dummy", "visit": 2048, "physical_filter": "Uprime"}
277 sc = self.storageClassFactory.getStorageClass("StructuredData")
278 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
280 inputDatastore = self.makeDatastore("test_input_datastore")
281 outputDatastore = self.makeDatastore("test_output_datastore")
283 inputDatastore.put(metrics, ref)
284 outputDatastore.transfer(inputDatastore, ref)
286 metricsOut = outputDatastore.get(ref)
287 self.assertEqual(metrics, metricsOut)
289 def testBasicTransaction(self):
290 datastore = self.makeDatastore()
291 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
292 dimensions = self.universe.extract(("visit", "physical_filter"))
293 nDatasets = 6
294 dataIds = [{"instrument": "dummy", "visit": i, "physical_filter": "V"} for i in range(nDatasets)]
295 data = [(self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False),
296 makeExampleMetrics(),)
297 for dataId in dataIds]
298 succeed = data[:nDatasets//2]
299 fail = data[nDatasets//2:]
300 # All datasets added in this transaction should continue to exist
301 with datastore.transaction():
302 for ref, metrics in succeed:
303 datastore.put(metrics, ref)
304 # Whereas datasets added in this transaction should not
305 with self.assertRaises(TransactionTestError):
306 with datastore.transaction():
307 for ref, metrics in fail:
308 datastore.put(metrics, ref)
309 raise TransactionTestError("This should propagate out of the context manager")
310 # Check for datasets that should exist
311 for ref, metrics in succeed:
312 # Does it exist?
313 self.assertTrue(datastore.exists(ref))
314 # Get
315 metricsOut = datastore.get(ref, parameters=None)
316 self.assertEqual(metrics, metricsOut)
317 # URI
318 uri = datastore.getUri(ref)
319 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
320 # Check for datasets that should not exist
321 for ref, _ in fail:
322 # These should raise
323 with self.assertRaises(FileNotFoundError):
324 # non-existing file
325 datastore.get(ref)
326 with self.assertRaises(FileNotFoundError):
327 datastore.getUri(ref)
329 def testNestedTransaction(self):
330 datastore = self.makeDatastore()
331 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
332 dimensions = self.universe.extract(("visit", "physical_filter"))
333 metrics = makeExampleMetrics()
335 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
336 refBefore = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
337 conform=False)
338 datastore.put(metrics, refBefore)
339 with self.assertRaises(TransactionTestError):
340 with datastore.transaction():
341 dataId = {"instrument": "dummy", "visit": 1, "physical_filter": "V"}
342 refOuter = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
343 conform=False)
344 datastore.put(metrics, refOuter)
345 with datastore.transaction():
346 dataId = {"instrument": "dummy", "visit": 2, "physical_filter": "V"}
347 refInner = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
348 conform=False)
349 datastore.put(metrics, refInner)
350 # All datasets should exist
351 for ref in (refBefore, refOuter, refInner):
352 metricsOut = datastore.get(ref, parameters=None)
353 self.assertEqual(metrics, metricsOut)
354 raise TransactionTestError("This should roll back the transaction")
355 # Dataset(s) inserted before the transaction should still exist
356 metricsOut = datastore.get(refBefore, parameters=None)
357 self.assertEqual(metrics, metricsOut)
358 # But all datasets inserted during the (rolled back) transaction
359 # should be gone
360 with self.assertRaises(FileNotFoundError):
361 datastore.get(refOuter)
362 with self.assertRaises(FileNotFoundError):
363 datastore.get(refInner)
365 def _prepareIngestTest(self):
366 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
367 dimensions = self.universe.extract(("visit", "physical_filter"))
368 metrics = makeExampleMetrics()
369 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
370 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
371 return metrics, ref
373 def runIngestTest(self, func, expectOutput=True):
374 metrics, ref = self._prepareIngestTest()
375 with lsst.utils.tests.getTempFilePath(".yaml", expectOutput=expectOutput) as path:
376 with open(path, 'w') as fd:
377 yaml.dump(metrics._asdict(), stream=fd)
378 func(metrics, path, ref)
380 def testIngestNoTransfer(self):
381 """Test ingesting existing files with no transfer.
382 """
383 datastore = self.makeDatastore()
385 def succeed(obj, path, ref):
386 """Ingest a file already in the datastore root."""
387 # first move it into the root, and adjust the path accordingly
388 path = shutil.copy(path, datastore.root)
389 path = os.path.relpath(path, start=datastore.root)
390 datastore.ingest(FileDataset(path=path, refs=ref), transfer=None)
391 self.assertEqual(obj, datastore.get(ref))
393 def failInputDoesNotExist(obj, path, ref):
394 """Can't ingest files if we're given a bad path."""
395 with self.assertRaises(FileNotFoundError):
396 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref), transfer=None)
397 self.assertFalse(datastore.exists(ref))
399 def failOutsideRoot(obj, path, ref):
400 """Can't ingest files outside of datastore root."""
401 with self.assertRaises(RuntimeError):
402 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=None)
403 self.assertFalse(datastore.exists(ref))
405 def failNotImplemented(obj, path, ref):
406 with self.assertRaises(NotImplementedError):
407 datastore.ingest(FileDataset(path=path, refs=ref), transfer=None)
409 if None in self.ingestTransferModes:
410 self.runIngestTest(failOutsideRoot)
411 self.runIngestTest(failInputDoesNotExist)
412 self.runIngestTest(succeed)
413 else:
414 self.runIngestTest(failNotImplemented)
416 def testIngestTransfer(self):
417 """Test ingesting existing files after transferring them.
418 """
419 for mode in ("copy", "move", "hardlink", "symlink"):
420 with self.subTest(mode=mode):
421 datastore = self.makeDatastore(mode)
423 def succeed(obj, path, ref):
424 """Ingest a file by transferring it to the template
425 location."""
426 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
427 self.assertEqual(obj, datastore.get(ref))
429 def failInputDoesNotExist(obj, path, ref):
430 """Can't ingest files if we're given a bad path."""
431 with self.assertRaises(FileNotFoundError):
432 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref),
433 transfer=mode)
434 self.assertFalse(datastore.exists(ref))
436 def failOutputExists(obj, path, ref):
437 """Can't ingest files if transfer destination already
438 exists."""
439 with self.assertRaises(FileExistsError):
440 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
441 self.assertFalse(datastore.exists(ref))
443 def failNotImplemented(obj, path, ref):
444 with self.assertRaises(NotImplementedError):
445 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
447 if mode in self.ingestTransferModes:
448 self.runIngestTest(failInputDoesNotExist)
449 self.runIngestTest(succeed, expectOutput=(mode != "move"))
450 self.runIngestTest(failOutputExists)
451 else:
452 self.runIngestTest(failNotImplemented)
454 def testIngestSymlinkOfSymlink(self):
455 """Special test for symlink to a symlink ingest"""
456 mode = "symlink"
457 if mode not in self.ingestTransferModes:
458 return
459 metrics, ref = self._prepareIngestTest()
460 # The aim of this test is to create a dataset on disk, then
461 # create a symlink to it and finally ingest the symlink such that
462 # the symlink in the datastore points to the original dataset.
463 with lsst.utils.tests.getTempFilePath(".yaml") as realpath:
464 with open(realpath, 'w') as fd:
465 yaml.dump(metrics._asdict(), stream=fd)
466 with lsst.utils.tests.getTempFilePath(".yaml") as sympath:
467 os.symlink(os.path.abspath(realpath), sympath)
469 datastore = self.makeDatastore()
470 datastore.ingest(FileDataset(path=os.path.abspath(sympath), refs=ref), transfer=mode)
472 uri = ButlerURI(datastore.getUri(ref))
473 self.assertTrue(not uri.scheme or uri.scheme == "file", f"Check {uri.scheme}")
474 self.assertTrue(os.path.islink(uri.path))
475 self.assertEqual(os.readlink(uri.path), os.path.abspath(realpath))
478class PosixDatastoreTestCase(DatastoreTests, unittest.TestCase):
479 """PosixDatastore specialization"""
480 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
481 uriScheme = "file:"
482 ingestTransferModes = (None, "copy", "move", "hardlink", "symlink")
483 isEphemeral = False
484 rootKeys = ("root",)
485 validationCanFail = True
487 def setUp(self):
488 # Override the working directory before calling the base class
489 self.root = tempfile.mkdtemp(dir=TESTDIR)
490 super().setUp()
493class PosixDatastoreNoChecksumsTestCase(PosixDatastoreTestCase):
494 """Posix datastore tests but with checksums disabled."""
495 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreNoChecksums.yaml")
497 def testChecksum(self):
498 """Ensure that checksums have not been calculated."""
500 datastore = self.makeDatastore()
501 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
502 dimensions = self.universe.extract(("visit", "physical_filter"))
503 metrics = makeExampleMetrics()
505 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
506 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
507 conform=False)
509 # Configuration should have disabled checksum calculation
510 datastore.put(metrics, ref)
511 info = datastore.getStoredItemInfo(ref)
512 self.assertIsNone(info.checksum)
514 # Remove put back but with checksums enabled explicitly
515 datastore.remove(ref)
516 datastore.useChecksum = True
517 datastore.put(metrics, ref)
519 info = datastore.getStoredItemInfo(ref)
520 self.assertIsNotNone(info.checksum)
523class CleanupPosixDatastoreTestCase(DatastoreTestsBase, unittest.TestCase):
524 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
526 def setUp(self):
527 # Override the working directory before calling the base class
528 self.root = tempfile.mkdtemp(dir=TESTDIR)
529 super().setUp()
531 def testCleanup(self):
532 """Test that a failed formatter write does cleanup a partial file."""
533 metrics = makeExampleMetrics()
534 datastore = self.makeDatastore()
536 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
538 dimensions = self.universe.extract(("visit", "physical_filter"))
539 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
541 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
543 # Determine where the file will end up (we assume Formatters use
544 # the same file extension)
545 expectedUri = datastore.getUri(ref, predict=True)
546 self.assertTrue(expectedUri.endswith(".yaml#predicted"),
547 f"Is there a file extension in {expectedUri}")
549 # Convert to ButlerURI so we can extract the path component
550 expectedUri = ButlerURI(expectedUri)
551 expectedFile = expectedUri.path
553 # Try formatter that fails and formatter that fails and leaves
554 # a file behind
555 for formatter in (BadWriteFormatter, BadNoWriteFormatter):
556 with self.subTest(formatter=formatter):
558 # Monkey patch the formatter
559 datastore.formatterFactory.registerFormatter(ref.datasetType, formatter,
560 overwrite=True)
562 # Try to put the dataset, it should fail
563 with self.assertRaises(Exception):
564 datastore.put(metrics, ref)
566 # Check that there is no file on disk
567 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
569 # Check that there is a directory
570 self.assertTrue(os.path.exists(os.path.dirname(expectedFile)),
571 f"Check for existence of directory {os.path.dirname(expectedFile)}")
573 # Force YamlFormatter and check that this time a file is written
574 datastore.formatterFactory.registerFormatter(ref.datasetType, YamlFormatter,
575 overwrite=True)
576 datastore.put(metrics, ref)
577 self.assertTrue(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
578 datastore.remove(ref)
579 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of now removed {expectedFile}")
582class InMemoryDatastoreTestCase(DatastoreTests, unittest.TestCase):
583 """PosixDatastore specialization"""
584 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastore.yaml")
585 uriScheme = "mem:"
586 hasUnsupportedPut = False
587 ingestTransferModes = ()
588 isEphemeral = True
589 rootKeys = None
590 validationCanFail = False
593class ChainedDatastoreTestCase(PosixDatastoreTestCase):
594 """ChainedDatastore specialization using a POSIXDatastore"""
595 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore.yaml")
596 hasUnsupportedPut = False
597 ingestTransferModes = ("copy", "hardlink", "symlink")
598 isEphemeral = False
599 rootKeys = (".datastores.1.root", ".datastores.2.root")
600 validationCanFail = True
603class ChainedDatastoreMemoryTestCase(InMemoryDatastoreTestCase):
604 """ChainedDatastore specialization using all InMemoryDatastore"""
605 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2.yaml")
606 validationCanFail = False
609class DatastoreConstraintsTests(DatastoreTestsBase):
610 """Basic tests of constraints model of Datastores."""
612 def testConstraints(self):
613 """Test constraints model. Assumes that each test class has the
614 same constraints."""
615 metrics = makeExampleMetrics()
616 datastore = self.makeDatastore()
618 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
619 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
620 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
621 dataId = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
623 # Write empty file suitable for ingest check
624 testfile = tempfile.NamedTemporaryFile()
625 for datasetTypeName, sc, accepted in (("metric", sc1, True), ("metric2", sc1, False),
626 ("metric33", sc1, True), ("metric2", sc2, True)):
627 with self.subTest(datasetTypeName=datasetTypeName):
628 ref = self.makeDatasetRef(datasetTypeName, dimensions, sc, dataId, conform=False)
629 if accepted:
630 datastore.put(metrics, ref)
631 self.assertTrue(datastore.exists(ref))
632 datastore.remove(ref)
634 # Try ingest
635 if self.canIngest:
636 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
637 self.assertTrue(datastore.exists(ref))
638 datastore.remove(ref)
639 else:
640 with self.assertRaises(DatasetTypeNotSupportedError):
641 datastore.put(metrics, ref)
642 self.assertFalse(datastore.exists(ref))
644 # Again with ingest
645 if self.canIngest:
646 with self.assertRaises(DatasetTypeNotSupportedError):
647 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
648 self.assertFalse(datastore.exists(ref))
651class PosixDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
652 """PosixDatastore specialization"""
653 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreP.yaml")
654 canIngest = True
656 def setUp(self):
657 # Override the working directory before calling the base class
658 self.root = tempfile.mkdtemp(dir=TESTDIR)
659 super().setUp()
662class InMemoryDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
663 """InMemoryDatastore specialization"""
664 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastoreP.yaml")
665 canIngest = False
668class ChainedDatastoreConstraintsNativeTestCase(PosixDatastoreConstraintsTestCase):
669 """ChainedDatastore specialization using a POSIXDatastore and constraints
670 at the ChainedDatstore """
671 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePa.yaml")
674class ChainedDatastoreConstraintsTestCase(PosixDatastoreConstraintsTestCase):
675 """ChainedDatastore specialization using a POSIXDatastore"""
676 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastoreP.yaml")
679class ChainedDatastoreMemoryConstraintsTestCase(InMemoryDatastoreConstraintsTestCase):
680 """ChainedDatastore specialization using all InMemoryDatastore"""
681 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2P.yaml")
682 canIngest = False
685class ChainedDatastorePerStoreConstraintsTests(DatastoreTestsBase, unittest.TestCase):
686 """Test that a chained datastore can control constraints per-datastore
687 even if child datastore would accept."""
689 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePb.yaml")
691 def setUp(self):
692 # Override the working directory before calling the base class
693 self.root = tempfile.mkdtemp(dir=TESTDIR)
694 super().setUp()
696 def testConstraints(self):
697 """Test chained datastore constraints model."""
698 metrics = makeExampleMetrics()
699 datastore = self.makeDatastore()
701 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
702 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
703 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
704 dataId1 = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
705 dataId2 = {"visit": 52, "physical_filter": "V", "instrument": "HSC"}
707 # Write empty file suitable for ingest check
708 testfile = tempfile.NamedTemporaryFile()
710 for typeName, dataId, sc, accept, ingest in (("metric", dataId1, sc1, (False, True, False), True),
711 ("metric2", dataId1, sc1, (False, False, False), False),
712 ("metric2", dataId2, sc1, (True, False, False), False),
713 ("metric33", dataId2, sc2, (True, True, False), True),
714 ("metric2", dataId1, sc2, (False, True, False), True)):
715 with self.subTest(datasetTypeName=typeName, dataId=dataId, sc=sc.name):
716 ref = self.makeDatasetRef(typeName, dimensions, sc, dataId,
717 conform=False)
718 if any(accept):
719 datastore.put(metrics, ref)
720 self.assertTrue(datastore.exists(ref))
722 # Check each datastore inside the chained datastore
723 for childDatastore, expected in zip(datastore.datastores, accept):
724 self.assertEqual(childDatastore.exists(ref), expected,
725 f"Testing presence of {ref} in datastore {childDatastore.name}")
727 datastore.remove(ref)
729 # Check that ingest works
730 if ingest:
731 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
732 self.assertTrue(datastore.exists(ref))
734 # Check each datastore inside the chained datastore
735 for childDatastore, expected in zip(datastore.datastores, accept):
736 # Ephemeral datastores means InMemory at the moment
737 # and that does not accept ingest of files.
738 if childDatastore.isEphemeral:
739 expected = False
740 self.assertEqual(childDatastore.exists(ref), expected,
741 f"Testing presence of ingested {ref} in datastore"
742 f" {childDatastore.name}")
744 datastore.remove(ref)
745 else:
746 with self.assertRaises(DatasetTypeNotSupportedError):
747 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
749 else:
750 with self.assertRaises(DatasetTypeNotSupportedError):
751 datastore.put(metrics, ref)
752 self.assertFalse(datastore.exists(ref))
754 # Again with ingest
755 with self.assertRaises(DatasetTypeNotSupportedError):
756 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
757 self.assertFalse(datastore.exists(ref))
760if __name__ == "__main__": 760 ↛ 761line 760 didn't jump to line 761, because the condition on line 760 was never true
761 unittest.main()