Coverage for tests/test_datastore.py : 18%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import os
23import unittest
24import shutil
25import yaml
26import tempfile
27import lsst.utils
29from lsst.utils import doImport
31from lsst.daf.butler import StorageClassFactory, StorageClass, DimensionUniverse, FileDataset
32from lsst.daf.butler import DatastoreConfig, DatasetTypeNotSupportedError, DatastoreValidationError
33from lsst.daf.butler import ButlerURI
34from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter
36from lsst.daf.butler.tests import (DatasetTestHelper, DatastoreTestHelper, BadWriteFormatter,
37 BadNoWriteFormatter, MetricsExample, DummyRegistry)
40TESTDIR = os.path.dirname(__file__)
43def makeExampleMetrics():
44 return MetricsExample({"AM1": 5.2, "AM2": 30.6},
45 {"a": [1, 2, 3],
46 "b": {"blue": 5, "red": "green"}},
47 [563, 234, 456.7]
48 )
51class TransactionTestError(Exception):
52 """Specific error for transactions, to prevent misdiagnosing
53 that might otherwise occur when a standard exception is used.
54 """
55 pass
58class DatastoreTestsBase(DatasetTestHelper, DatastoreTestHelper):
59 """Support routines for datastore testing"""
60 root = None
62 @classmethod
63 def setUpClass(cls):
64 # Storage Classes are fixed for all datastores in these tests
65 scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml")
66 cls.storageClassFactory = StorageClassFactory()
67 cls.storageClassFactory.addFromConfig(scConfigFile)
69 # Read the Datastore config so we can get the class
70 # information (since we should not assume the constructor
71 # name here, but rely on the configuration file itself)
72 datastoreConfig = DatastoreConfig(cls.configFile)
73 cls.datastoreType = doImport(datastoreConfig["cls"])
74 cls.universe = DimensionUniverse()
76 def setUp(self):
77 self.setUpDatastoreTests(DummyRegistry, DatastoreConfig)
79 def tearDown(self):
80 if self.root is not None and os.path.exists(self.root):
81 shutil.rmtree(self.root, ignore_errors=True)
84class DatastoreTests(DatastoreTestsBase):
85 """Some basic tests of a simple datastore."""
87 hasUnsupportedPut = True
89 def testConfigRoot(self):
90 full = DatastoreConfig(self.configFile)
91 config = DatastoreConfig(self.configFile, mergeDefaults=False)
92 newroot = "/random/location"
93 self.datastoreType.setConfigRoot(newroot, config, full)
94 if self.rootKeys:
95 for k in self.rootKeys:
96 self.assertIn(newroot, config[k])
98 def testConstructor(self):
99 datastore = self.makeDatastore()
100 self.assertIsNotNone(datastore)
101 self.assertIs(datastore.isEphemeral, self.isEphemeral)
103 def testConfigurationValidation(self):
104 datastore = self.makeDatastore()
105 sc = self.storageClassFactory.getStorageClass("ThingOne")
106 datastore.validateConfiguration([sc])
108 sc2 = self.storageClassFactory.getStorageClass("ThingTwo")
109 if self.validationCanFail:
110 with self.assertRaises(DatastoreValidationError):
111 datastore.validateConfiguration([sc2], logFailures=True)
113 dimensions = self.universe.extract(("visit", "physical_filter"))
114 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
115 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
116 datastore.validateConfiguration([ref])
118 def testParameterValidation(self):
119 """Check that parameters are validated"""
120 sc = self.storageClassFactory.getStorageClass("ThingOne")
121 dimensions = self.universe.extract(("visit", "physical_filter"))
122 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
123 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
124 datastore = self.makeDatastore()
125 data = {1: 2, 3: 4}
126 datastore.put(data, ref)
127 newdata = datastore.get(ref)
128 self.assertEqual(data, newdata)
129 with self.assertRaises(KeyError):
130 newdata = datastore.get(ref, parameters={"missing": 5})
132 def testBasicPutGet(self):
133 metrics = makeExampleMetrics()
134 datastore = self.makeDatastore()
136 # Create multiple storage classes for testing different formulations
137 storageClasses = [self.storageClassFactory.getStorageClass(sc)
138 for sc in ("StructuredData",
139 "StructuredDataJson",
140 "StructuredDataPickle")]
142 dimensions = self.universe.extract(("visit", "physical_filter"))
143 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
145 for sc in storageClasses:
146 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
147 print("Using storageClass: {}".format(sc.name))
148 datastore.put(metrics, ref)
150 # Does it exist?
151 self.assertTrue(datastore.exists(ref))
153 # Get
154 metricsOut = datastore.get(ref, parameters=None)
155 self.assertEqual(metrics, metricsOut)
157 uri = datastore.getUri(ref)
158 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
160 # Get a component -- we need to construct new refs for them
161 # with derived storage classes but with parent ID
162 comp = "output"
163 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(comp), dimensions,
164 sc.components[comp], dataId, id=ref.id)
165 output = datastore.get(compRef)
166 self.assertEqual(output, metricsOut.output)
168 uri = datastore.getUri(compRef)
169 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
171 storageClass = sc
173 # Check that a put fails if the dataset type is not supported
174 if self.hasUnsupportedPut:
175 sc = StorageClass("UnsupportedSC", pytype=type(metrics))
176 ref = self.makeDatasetRef("unsupportedType", dimensions, sc, dataId)
177 with self.assertRaises(DatasetTypeNotSupportedError):
178 datastore.put(metrics, ref)
180 # These should raise
181 ref = self.makeDatasetRef("metrics", dimensions, storageClass, dataId, id=10000)
182 with self.assertRaises(FileNotFoundError):
183 # non-existing file
184 datastore.get(ref)
186 # Get a URI from it
187 uri = datastore.getUri(ref, predict=True)
188 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
190 with self.assertRaises(FileNotFoundError):
191 datastore.getUri(ref)
193 def testCompositePutGet(self):
194 metrics = makeExampleMetrics()
195 datastore = self.makeDatastore()
197 # Create multiple storage classes for testing different formulations
198 # of composites
199 storageClasses = [self.storageClassFactory.getStorageClass(sc)
200 for sc in ("StructuredComposite",
201 "StructuredCompositeTestA",
202 "StructuredCompositeTestB")]
204 dimensions = self.universe.extract(("visit", "physical_filter"))
205 dataId = {"instrument": "dummy", "visit": 428, "physical_filter": "R"}
207 for sc in storageClasses:
208 print("Using storageClass: {}".format(sc.name))
209 ref = self.makeDatasetRef("metric", dimensions, sc, dataId,
210 conform=False)
212 components = sc.assembler().disassemble(metrics)
213 self.assertTrue(components)
215 compsRead = {}
216 for compName, compInfo in components.items():
217 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(compName), dimensions,
218 components[compName].storageClass, dataId,
219 conform=False)
221 print("Writing component {} with {}".format(compName, compRef.datasetType.storageClass.name))
222 datastore.put(compInfo.component, compRef)
224 uri = datastore.getUri(compRef)
225 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
227 compsRead[compName] = datastore.get(compRef)
229 # We can generate identical files for each storage class
230 # so remove the component here
231 datastore.remove(compRef)
233 # combine all the components we read back into a new composite
234 metricsOut = sc.assembler().assemble(compsRead)
235 self.assertEqual(metrics, metricsOut)
237 def testRemove(self):
238 metrics = makeExampleMetrics()
239 datastore = self.makeDatastore()
240 # Put
241 dimensions = self.universe.extract(("visit", "physical_filter"))
242 dataId = {"instrument": "dummy", "visit": 638, "physical_filter": "U"}
244 sc = self.storageClassFactory.getStorageClass("StructuredData")
245 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
246 datastore.put(metrics, ref)
248 # Does it exist?
249 self.assertTrue(datastore.exists(ref))
251 # Get
252 metricsOut = datastore.get(ref)
253 self.assertEqual(metrics, metricsOut)
254 # Remove
255 datastore.remove(ref)
257 # Does it exist?
258 self.assertFalse(datastore.exists(ref))
260 # Do we now get a predicted URI?
261 uri = datastore.getUri(ref, predict=True)
262 self.assertTrue(uri.endswith("#predicted"))
264 # Get should now fail
265 with self.assertRaises(FileNotFoundError):
266 datastore.get(ref)
267 # Can only delete once
268 with self.assertRaises(FileNotFoundError):
269 datastore.remove(ref)
271 def testTransfer(self):
272 metrics = makeExampleMetrics()
274 dimensions = self.universe.extract(("visit", "physical_filter"))
275 dataId = {"instrument": "dummy", "visit": 2048, "physical_filter": "Uprime"}
277 sc = self.storageClassFactory.getStorageClass("StructuredData")
278 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
280 inputDatastore = self.makeDatastore("test_input_datastore")
281 outputDatastore = self.makeDatastore("test_output_datastore")
283 inputDatastore.put(metrics, ref)
284 outputDatastore.transfer(inputDatastore, ref)
286 metricsOut = outputDatastore.get(ref)
287 self.assertEqual(metrics, metricsOut)
289 def testBasicTransaction(self):
290 datastore = self.makeDatastore()
291 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
292 dimensions = self.universe.extract(("visit", "physical_filter"))
293 nDatasets = 6
294 dataIds = [{"instrument": "dummy", "visit": i, "physical_filter": "V"} for i in range(nDatasets)]
295 data = [(self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False),
296 makeExampleMetrics(),)
297 for dataId in dataIds]
298 succeed = data[:nDatasets//2]
299 fail = data[nDatasets//2:]
300 # All datasets added in this transaction should continue to exist
301 with datastore.transaction():
302 for ref, metrics in succeed:
303 datastore.put(metrics, ref)
304 # Whereas datasets added in this transaction should not
305 with self.assertRaises(TransactionTestError):
306 with datastore.transaction():
307 for ref, metrics in fail:
308 datastore.put(metrics, ref)
309 raise TransactionTestError("This should propagate out of the context manager")
310 # Check for datasets that should exist
311 for ref, metrics in succeed:
312 # Does it exist?
313 self.assertTrue(datastore.exists(ref))
314 # Get
315 metricsOut = datastore.get(ref, parameters=None)
316 self.assertEqual(metrics, metricsOut)
317 # URI
318 uri = datastore.getUri(ref)
319 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
320 # Check for datasets that should not exist
321 for ref, _ in fail:
322 # These should raise
323 with self.assertRaises(FileNotFoundError):
324 # non-existing file
325 datastore.get(ref)
326 with self.assertRaises(FileNotFoundError):
327 datastore.getUri(ref)
329 def testNestedTransaction(self):
330 datastore = self.makeDatastore()
331 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
332 dimensions = self.universe.extract(("visit", "physical_filter"))
333 metrics = makeExampleMetrics()
335 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
336 refBefore = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
337 conform=False)
338 datastore.put(metrics, refBefore)
339 with self.assertRaises(TransactionTestError):
340 with datastore.transaction():
341 dataId = {"instrument": "dummy", "visit": 1, "physical_filter": "V"}
342 refOuter = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
343 conform=False)
344 datastore.put(metrics, refOuter)
345 with datastore.transaction():
346 dataId = {"instrument": "dummy", "visit": 2, "physical_filter": "V"}
347 refInner = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
348 conform=False)
349 datastore.put(metrics, refInner)
350 # All datasets should exist
351 for ref in (refBefore, refOuter, refInner):
352 metricsOut = datastore.get(ref, parameters=None)
353 self.assertEqual(metrics, metricsOut)
354 raise TransactionTestError("This should roll back the transaction")
355 # Dataset(s) inserted before the transaction should still exist
356 metricsOut = datastore.get(refBefore, parameters=None)
357 self.assertEqual(metrics, metricsOut)
358 # But all datasets inserted during the (rolled back) transaction
359 # should be gone
360 with self.assertRaises(FileNotFoundError):
361 datastore.get(refOuter)
362 with self.assertRaises(FileNotFoundError):
363 datastore.get(refInner)
365 def _prepareIngestTest(self):
366 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
367 dimensions = self.universe.extract(("visit", "physical_filter"))
368 metrics = makeExampleMetrics()
369 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
370 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
371 return metrics, ref
373 def runIngestTest(self, func, expectOutput=True):
374 metrics, ref = self._prepareIngestTest()
375 with lsst.utils.tests.getTempFilePath(".yaml", expectOutput=expectOutput) as path:
376 with open(path, 'w') as fd:
377 yaml.dump(metrics._asdict(), stream=fd)
378 func(metrics, path, ref)
380 def testIngestNoTransfer(self):
381 """Test ingesting existing files with no transfer.
382 """
383 for mode in (None, "auto"):
385 # Some datastores have auto but can't do in place transfer
386 if mode == "auto" and "auto" in self.ingestTransferModes and not self.canIngestNoTransferAuto:
387 continue
389 with self.subTest(mode=mode):
390 datastore = self.makeDatastore()
392 def succeed(obj, path, ref):
393 """Ingest a file already in the datastore root."""
394 # first move it into the root, and adjust the path
395 # accordingly
396 path = shutil.copy(path, datastore.root)
397 path = os.path.relpath(path, start=datastore.root)
398 datastore.ingest(FileDataset(path=path, refs=ref), transfer=mode)
399 self.assertEqual(obj, datastore.get(ref))
401 def failInputDoesNotExist(obj, path, ref):
402 """Can't ingest files if we're given a bad path."""
403 with self.assertRaises(FileNotFoundError):
404 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref),
405 transfer=mode)
406 self.assertFalse(datastore.exists(ref))
408 def failOutsideRoot(obj, path, ref):
409 """Can't ingest files outside of datastore root unless
410 auto."""
411 if mode == "auto":
412 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
413 self.assertTrue(datastore.exists(ref))
414 else:
415 with self.assertRaises(RuntimeError):
416 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
417 self.assertFalse(datastore.exists(ref))
419 def failNotImplemented(obj, path, ref):
420 with self.assertRaises(NotImplementedError):
421 datastore.ingest(FileDataset(path=path, refs=ref), transfer=mode)
423 if mode in self.ingestTransferModes:
424 self.runIngestTest(failOutsideRoot)
425 self.runIngestTest(failInputDoesNotExist)
426 self.runIngestTest(succeed)
427 else:
428 self.runIngestTest(failNotImplemented)
430 def testIngestTransfer(self):
431 """Test ingesting existing files after transferring them.
432 """
433 for mode in ("copy", "move", "link", "hardlink", "symlink", "auto"):
434 with self.subTest(mode=mode):
435 datastore = self.makeDatastore(mode)
437 def succeed(obj, path, ref):
438 """Ingest a file by transferring it to the template
439 location."""
440 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
441 self.assertEqual(obj, datastore.get(ref))
443 def failInputDoesNotExist(obj, path, ref):
444 """Can't ingest files if we're given a bad path."""
445 with self.assertRaises(FileNotFoundError):
446 # Ensure the file does not look like it is in
447 # datastore for auto mode
448 datastore.ingest(FileDataset(path="../this-file-does-not-exist.yaml", refs=ref),
449 transfer=mode)
450 self.assertFalse(datastore.exists(ref))
452 def failOutputExists(obj, path, ref):
453 """Can't ingest files if transfer destination already
454 exists."""
455 with self.assertRaises(FileExistsError):
456 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
457 self.assertFalse(datastore.exists(ref))
459 def failNotImplemented(obj, path, ref):
460 with self.assertRaises(NotImplementedError):
461 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
463 if mode in self.ingestTransferModes:
464 self.runIngestTest(failInputDoesNotExist)
465 self.runIngestTest(succeed, expectOutput=(mode != "move"))
466 self.runIngestTest(failOutputExists)
467 else:
468 self.runIngestTest(failNotImplemented)
470 def testIngestSymlinkOfSymlink(self):
471 """Special test for symlink to a symlink ingest"""
472 mode = "symlink"
473 if mode not in self.ingestTransferModes:
474 return
475 metrics, ref = self._prepareIngestTest()
476 # The aim of this test is to create a dataset on disk, then
477 # create a symlink to it and finally ingest the symlink such that
478 # the symlink in the datastore points to the original dataset.
479 with lsst.utils.tests.getTempFilePath(".yaml") as realpath:
480 with open(realpath, 'w') as fd:
481 yaml.dump(metrics._asdict(), stream=fd)
482 with lsst.utils.tests.getTempFilePath(".yaml") as sympath:
483 os.symlink(os.path.abspath(realpath), sympath)
485 datastore = self.makeDatastore()
486 datastore.ingest(FileDataset(path=os.path.abspath(sympath), refs=ref), transfer=mode)
488 uri = ButlerURI(datastore.getUri(ref))
489 self.assertTrue(not uri.scheme or uri.scheme == "file", f"Check {uri.scheme}")
490 self.assertTrue(os.path.islink(uri.path))
491 self.assertEqual(os.readlink(uri.path), os.path.abspath(realpath))
494class PosixDatastoreTestCase(DatastoreTests, unittest.TestCase):
495 """PosixDatastore specialization"""
496 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
497 uriScheme = "file:"
498 canIngestNoTransferAuto = True
499 ingestTransferModes = (None, "copy", "move", "link", "hardlink", "symlink", "auto")
500 isEphemeral = False
501 rootKeys = ("root",)
502 validationCanFail = True
504 def setUp(self):
505 # Override the working directory before calling the base class
506 self.root = tempfile.mkdtemp(dir=TESTDIR)
507 super().setUp()
510class PosixDatastoreNoChecksumsTestCase(PosixDatastoreTestCase):
511 """Posix datastore tests but with checksums disabled."""
512 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreNoChecksums.yaml")
514 def testChecksum(self):
515 """Ensure that checksums have not been calculated."""
517 datastore = self.makeDatastore()
518 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
519 dimensions = self.universe.extract(("visit", "physical_filter"))
520 metrics = makeExampleMetrics()
522 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
523 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
524 conform=False)
526 # Configuration should have disabled checksum calculation
527 datastore.put(metrics, ref)
528 info = datastore.getStoredItemInfo(ref)
529 self.assertIsNone(info.checksum)
531 # Remove put back but with checksums enabled explicitly
532 datastore.remove(ref)
533 datastore.useChecksum = True
534 datastore.put(metrics, ref)
536 info = datastore.getStoredItemInfo(ref)
537 self.assertIsNotNone(info.checksum)
540class CleanupPosixDatastoreTestCase(DatastoreTestsBase, unittest.TestCase):
541 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
543 def setUp(self):
544 # Override the working directory before calling the base class
545 self.root = tempfile.mkdtemp(dir=TESTDIR)
546 super().setUp()
548 def testCleanup(self):
549 """Test that a failed formatter write does cleanup a partial file."""
550 metrics = makeExampleMetrics()
551 datastore = self.makeDatastore()
553 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
555 dimensions = self.universe.extract(("visit", "physical_filter"))
556 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
558 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
560 # Determine where the file will end up (we assume Formatters use
561 # the same file extension)
562 expectedUri = datastore.getUri(ref, predict=True)
563 self.assertTrue(expectedUri.endswith(".yaml#predicted"),
564 f"Is there a file extension in {expectedUri}")
566 # Convert to ButlerURI so we can extract the path component
567 expectedUri = ButlerURI(expectedUri)
568 expectedFile = expectedUri.path
570 # Try formatter that fails and formatter that fails and leaves
571 # a file behind
572 for formatter in (BadWriteFormatter, BadNoWriteFormatter):
573 with self.subTest(formatter=formatter):
575 # Monkey patch the formatter
576 datastore.formatterFactory.registerFormatter(ref.datasetType, formatter,
577 overwrite=True)
579 # Try to put the dataset, it should fail
580 with self.assertRaises(Exception):
581 datastore.put(metrics, ref)
583 # Check that there is no file on disk
584 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
586 # Check that there is a directory
587 self.assertTrue(os.path.exists(os.path.dirname(expectedFile)),
588 f"Check for existence of directory {os.path.dirname(expectedFile)}")
590 # Force YamlFormatter and check that this time a file is written
591 datastore.formatterFactory.registerFormatter(ref.datasetType, YamlFormatter,
592 overwrite=True)
593 datastore.put(metrics, ref)
594 self.assertTrue(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
595 datastore.remove(ref)
596 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of now removed {expectedFile}")
599class InMemoryDatastoreTestCase(DatastoreTests, unittest.TestCase):
600 """PosixDatastore specialization"""
601 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastore.yaml")
602 uriScheme = "mem:"
603 hasUnsupportedPut = False
604 ingestTransferModes = ()
605 isEphemeral = True
606 rootKeys = None
607 validationCanFail = False
610class ChainedDatastoreTestCase(PosixDatastoreTestCase):
611 """ChainedDatastore specialization using a POSIXDatastore"""
612 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore.yaml")
613 hasUnsupportedPut = False
614 canIngestNoTransferAuto = False
615 ingestTransferModes = ("copy", "hardlink", "symlink", "link", "auto")
616 isEphemeral = False
617 rootKeys = (".datastores.1.root", ".datastores.2.root")
618 validationCanFail = True
621class ChainedDatastoreMemoryTestCase(InMemoryDatastoreTestCase):
622 """ChainedDatastore specialization using all InMemoryDatastore"""
623 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2.yaml")
624 validationCanFail = False
627class DatastoreConstraintsTests(DatastoreTestsBase):
628 """Basic tests of constraints model of Datastores."""
630 def testConstraints(self):
631 """Test constraints model. Assumes that each test class has the
632 same constraints."""
633 metrics = makeExampleMetrics()
634 datastore = self.makeDatastore()
636 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
637 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
638 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
639 dataId = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
641 # Write empty file suitable for ingest check
642 testfile = tempfile.NamedTemporaryFile()
643 for datasetTypeName, sc, accepted in (("metric", sc1, True), ("metric2", sc1, False),
644 ("metric33", sc1, True), ("metric2", sc2, True)):
645 with self.subTest(datasetTypeName=datasetTypeName):
646 ref = self.makeDatasetRef(datasetTypeName, dimensions, sc, dataId, conform=False)
647 if accepted:
648 datastore.put(metrics, ref)
649 self.assertTrue(datastore.exists(ref))
650 datastore.remove(ref)
652 # Try ingest
653 if self.canIngest:
654 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
655 self.assertTrue(datastore.exists(ref))
656 datastore.remove(ref)
657 else:
658 with self.assertRaises(DatasetTypeNotSupportedError):
659 datastore.put(metrics, ref)
660 self.assertFalse(datastore.exists(ref))
662 # Again with ingest
663 if self.canIngest:
664 with self.assertRaises(DatasetTypeNotSupportedError):
665 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
666 self.assertFalse(datastore.exists(ref))
669class PosixDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
670 """PosixDatastore specialization"""
671 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreP.yaml")
672 canIngest = True
674 def setUp(self):
675 # Override the working directory before calling the base class
676 self.root = tempfile.mkdtemp(dir=TESTDIR)
677 super().setUp()
680class InMemoryDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
681 """InMemoryDatastore specialization"""
682 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastoreP.yaml")
683 canIngest = False
686class ChainedDatastoreConstraintsNativeTestCase(PosixDatastoreConstraintsTestCase):
687 """ChainedDatastore specialization using a POSIXDatastore and constraints
688 at the ChainedDatstore """
689 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePa.yaml")
692class ChainedDatastoreConstraintsTestCase(PosixDatastoreConstraintsTestCase):
693 """ChainedDatastore specialization using a POSIXDatastore"""
694 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastoreP.yaml")
697class ChainedDatastoreMemoryConstraintsTestCase(InMemoryDatastoreConstraintsTestCase):
698 """ChainedDatastore specialization using all InMemoryDatastore"""
699 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2P.yaml")
700 canIngest = False
703class ChainedDatastorePerStoreConstraintsTests(DatastoreTestsBase, unittest.TestCase):
704 """Test that a chained datastore can control constraints per-datastore
705 even if child datastore would accept."""
707 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePb.yaml")
709 def setUp(self):
710 # Override the working directory before calling the base class
711 self.root = tempfile.mkdtemp(dir=TESTDIR)
712 super().setUp()
714 def testConstraints(self):
715 """Test chained datastore constraints model."""
716 metrics = makeExampleMetrics()
717 datastore = self.makeDatastore()
719 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
720 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
721 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
722 dataId1 = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
723 dataId2 = {"visit": 52, "physical_filter": "V", "instrument": "HSC"}
725 # Write empty file suitable for ingest check
726 testfile = tempfile.NamedTemporaryFile()
728 for typeName, dataId, sc, accept, ingest in (("metric", dataId1, sc1, (False, True, False), True),
729 ("metric2", dataId1, sc1, (False, False, False), False),
730 ("metric2", dataId2, sc1, (True, False, False), False),
731 ("metric33", dataId2, sc2, (True, True, False), True),
732 ("metric2", dataId1, sc2, (False, True, False), True)):
733 with self.subTest(datasetTypeName=typeName, dataId=dataId, sc=sc.name):
734 ref = self.makeDatasetRef(typeName, dimensions, sc, dataId,
735 conform=False)
736 if any(accept):
737 datastore.put(metrics, ref)
738 self.assertTrue(datastore.exists(ref))
740 # Check each datastore inside the chained datastore
741 for childDatastore, expected in zip(datastore.datastores, accept):
742 self.assertEqual(childDatastore.exists(ref), expected,
743 f"Testing presence of {ref} in datastore {childDatastore.name}")
745 datastore.remove(ref)
747 # Check that ingest works
748 if ingest:
749 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
750 self.assertTrue(datastore.exists(ref))
752 # Check each datastore inside the chained datastore
753 for childDatastore, expected in zip(datastore.datastores, accept):
754 # Ephemeral datastores means InMemory at the moment
755 # and that does not accept ingest of files.
756 if childDatastore.isEphemeral:
757 expected = False
758 self.assertEqual(childDatastore.exists(ref), expected,
759 f"Testing presence of ingested {ref} in datastore"
760 f" {childDatastore.name}")
762 datastore.remove(ref)
763 else:
764 with self.assertRaises(DatasetTypeNotSupportedError):
765 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
767 else:
768 with self.assertRaises(DatasetTypeNotSupportedError):
769 datastore.put(metrics, ref)
770 self.assertFalse(datastore.exists(ref))
772 # Again with ingest
773 with self.assertRaises(DatasetTypeNotSupportedError):
774 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
775 self.assertFalse(datastore.exists(ref))
778if __name__ == "__main__": 778 ↛ 779line 778 didn't jump to line 779, because the condition on line 778 was never true
779 unittest.main()