Coverage for tests/test_datastore.py : 17%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import os
23import unittest
24import shutil
25import yaml
26import tempfile
27import lsst.utils
29from lsst.utils import doImport
31from lsst.daf.butler import StorageClassFactory, StorageClass, DimensionUniverse, FileDataset
32from lsst.daf.butler import DatastoreConfig, DatasetTypeNotSupportedError, DatastoreValidationError
33from lsst.daf.butler import ButlerURI
34from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter
36from lsst.daf.butler.tests import (DatasetTestHelper, DatastoreTestHelper, BadWriteFormatter,
37 BadNoWriteFormatter, MetricsExample, DummyRegistry)
40TESTDIR = os.path.dirname(__file__)
43def makeExampleMetrics(use_none=False):
44 if use_none:
45 array = None
46 else:
47 array = [563, 234, 456.7]
48 return MetricsExample({"AM1": 5.2, "AM2": 30.6},
49 {"a": [1, 2, 3],
50 "b": {"blue": 5, "red": "green"}},
51 array,
52 )
55class TransactionTestError(Exception):
56 """Specific error for transactions, to prevent misdiagnosing
57 that might otherwise occur when a standard exception is used.
58 """
59 pass
62class DatastoreTestsBase(DatasetTestHelper, DatastoreTestHelper):
63 """Support routines for datastore testing"""
64 root = None
66 @classmethod
67 def setUpClass(cls):
68 # Storage Classes are fixed for all datastores in these tests
69 scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml")
70 cls.storageClassFactory = StorageClassFactory()
71 cls.storageClassFactory.addFromConfig(scConfigFile)
73 # Read the Datastore config so we can get the class
74 # information (since we should not assume the constructor
75 # name here, but rely on the configuration file itself)
76 datastoreConfig = DatastoreConfig(cls.configFile)
77 cls.datastoreType = doImport(datastoreConfig["cls"])
78 cls.universe = DimensionUniverse()
80 def setUp(self):
81 self.setUpDatastoreTests(DummyRegistry, DatastoreConfig)
83 def tearDown(self):
84 if self.root is not None and os.path.exists(self.root):
85 shutil.rmtree(self.root, ignore_errors=True)
88class DatastoreTests(DatastoreTestsBase):
89 """Some basic tests of a simple datastore."""
91 hasUnsupportedPut = True
93 def testConfigRoot(self):
94 full = DatastoreConfig(self.configFile)
95 config = DatastoreConfig(self.configFile, mergeDefaults=False)
96 newroot = "/random/location"
97 self.datastoreType.setConfigRoot(newroot, config, full)
98 if self.rootKeys:
99 for k in self.rootKeys:
100 self.assertIn(newroot, config[k])
102 def testConstructor(self):
103 datastore = self.makeDatastore()
104 self.assertIsNotNone(datastore)
105 self.assertIs(datastore.isEphemeral, self.isEphemeral)
107 def testConfigurationValidation(self):
108 datastore = self.makeDatastore()
109 sc = self.storageClassFactory.getStorageClass("ThingOne")
110 datastore.validateConfiguration([sc])
112 sc2 = self.storageClassFactory.getStorageClass("ThingTwo")
113 if self.validationCanFail:
114 with self.assertRaises(DatastoreValidationError):
115 datastore.validateConfiguration([sc2], logFailures=True)
117 dimensions = self.universe.extract(("visit", "physical_filter"))
118 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
119 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
120 datastore.validateConfiguration([ref])
122 def testParameterValidation(self):
123 """Check that parameters are validated"""
124 sc = self.storageClassFactory.getStorageClass("ThingOne")
125 dimensions = self.universe.extract(("visit", "physical_filter"))
126 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
127 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
128 datastore = self.makeDatastore()
129 data = {1: 2, 3: 4}
130 datastore.put(data, ref)
131 newdata = datastore.get(ref)
132 self.assertEqual(data, newdata)
133 with self.assertRaises(KeyError):
134 newdata = datastore.get(ref, parameters={"missing": 5})
136 def testBasicPutGet(self):
137 metrics = makeExampleMetrics()
138 datastore = self.makeDatastore()
140 # Create multiple storage classes for testing different formulations
141 storageClasses = [self.storageClassFactory.getStorageClass(sc)
142 for sc in ("StructuredData",
143 "StructuredDataJson",
144 "StructuredDataPickle")]
146 dimensions = self.universe.extract(("visit", "physical_filter"))
147 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
149 for sc in storageClasses:
150 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
151 print("Using storageClass: {}".format(sc.name))
152 datastore.put(metrics, ref)
154 # Does it exist?
155 self.assertTrue(datastore.exists(ref))
157 # Get
158 metricsOut = datastore.get(ref, parameters=None)
159 self.assertEqual(metrics, metricsOut)
161 uri = datastore.getUri(ref)
162 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
164 # Get a component -- we need to construct new refs for them
165 # with derived storage classes but with parent ID
166 for comp in ("data", "output"):
167 compRef = ref.components[comp]
168 output = datastore.get(compRef)
169 self.assertEqual(output, getattr(metricsOut, comp))
171 uri = datastore.getUri(compRef)
172 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
174 storageClass = sc
176 # Check that we can put a metric with None in a component and
177 # get it back as None
178 metricsNone = makeExampleMetrics(use_none=True)
179 dataIdNone = {"instrument": "dummy", "visit": 54, "physical_filter": "V"}
180 refNone = self.makeDatasetRef("metric", dimensions, sc, dataIdNone, conform=False)
181 datastore.put(metricsNone, refNone)
183 comp = "data"
184 for comp in ("data", "output"):
185 compRef = refNone.components[comp]
186 output = datastore.get(compRef)
187 self.assertEqual(output, getattr(metricsNone, comp))
189 # Check that a put fails if the dataset type is not supported
190 if self.hasUnsupportedPut:
191 sc = StorageClass("UnsupportedSC", pytype=type(metrics))
192 ref = self.makeDatasetRef("unsupportedType", dimensions, sc, dataId)
193 with self.assertRaises(DatasetTypeNotSupportedError):
194 datastore.put(metrics, ref)
196 # These should raise
197 ref = self.makeDatasetRef("metrics", dimensions, storageClass, dataId, id=10000)
198 with self.assertRaises(FileNotFoundError):
199 # non-existing file
200 datastore.get(ref)
202 # Get a URI from it
203 uri = datastore.getUri(ref, predict=True)
204 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
206 with self.assertRaises(FileNotFoundError):
207 datastore.getUri(ref)
209 def testCompositePutGet(self):
210 metrics = makeExampleMetrics()
211 datastore = self.makeDatastore()
213 # Create multiple storage classes for testing different formulations
214 # of composites
215 storageClasses = [self.storageClassFactory.getStorageClass(sc)
216 for sc in ("StructuredComposite",
217 "StructuredCompositeTestA",
218 "StructuredCompositeTestB")]
220 dimensions = self.universe.extract(("visit", "physical_filter"))
221 dataId = {"instrument": "dummy", "visit": 428, "physical_filter": "R"}
223 for sc in storageClasses:
224 print("Using storageClass: {}".format(sc.name))
225 ref = self.makeDatasetRef("metric", dimensions, sc, dataId,
226 conform=False)
228 components = sc.assembler().disassemble(metrics)
229 self.assertTrue(components)
231 compsRead = {}
232 for compName, compInfo in components.items():
233 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(compName), dimensions,
234 components[compName].storageClass, dataId,
235 conform=False)
237 print("Writing component {} with {}".format(compName, compRef.datasetType.storageClass.name))
238 datastore.put(compInfo.component, compRef)
240 uri = datastore.getUri(compRef)
241 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
243 compsRead[compName] = datastore.get(compRef)
245 # We can generate identical files for each storage class
246 # so remove the component here
247 datastore.remove(compRef)
249 # combine all the components we read back into a new composite
250 metricsOut = sc.assembler().assemble(compsRead)
251 self.assertEqual(metrics, metricsOut)
253 def testRemove(self):
254 metrics = makeExampleMetrics()
255 datastore = self.makeDatastore()
256 # Put
257 dimensions = self.universe.extract(("visit", "physical_filter"))
258 dataId = {"instrument": "dummy", "visit": 638, "physical_filter": "U"}
260 sc = self.storageClassFactory.getStorageClass("StructuredData")
261 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
262 datastore.put(metrics, ref)
264 # Does it exist?
265 self.assertTrue(datastore.exists(ref))
267 # Get
268 metricsOut = datastore.get(ref)
269 self.assertEqual(metrics, metricsOut)
270 # Remove
271 datastore.remove(ref)
273 # Does it exist?
274 self.assertFalse(datastore.exists(ref))
276 # Do we now get a predicted URI?
277 uri = datastore.getUri(ref, predict=True)
278 self.assertTrue(uri.endswith("#predicted"))
280 # Get should now fail
281 with self.assertRaises(FileNotFoundError):
282 datastore.get(ref)
283 # Can only delete once
284 with self.assertRaises(FileNotFoundError):
285 datastore.remove(ref)
287 def testTransfer(self):
288 metrics = makeExampleMetrics()
290 dimensions = self.universe.extract(("visit", "physical_filter"))
291 dataId = {"instrument": "dummy", "visit": 2048, "physical_filter": "Uprime"}
293 sc = self.storageClassFactory.getStorageClass("StructuredData")
294 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
296 inputDatastore = self.makeDatastore("test_input_datastore")
297 outputDatastore = self.makeDatastore("test_output_datastore")
299 inputDatastore.put(metrics, ref)
300 outputDatastore.transfer(inputDatastore, ref)
302 metricsOut = outputDatastore.get(ref)
303 self.assertEqual(metrics, metricsOut)
305 def testBasicTransaction(self):
306 datastore = self.makeDatastore()
307 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
308 dimensions = self.universe.extract(("visit", "physical_filter"))
309 nDatasets = 6
310 dataIds = [{"instrument": "dummy", "visit": i, "physical_filter": "V"} for i in range(nDatasets)]
311 data = [(self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False),
312 makeExampleMetrics(),)
313 for dataId in dataIds]
314 succeed = data[:nDatasets//2]
315 fail = data[nDatasets//2:]
316 # All datasets added in this transaction should continue to exist
317 with datastore.transaction():
318 for ref, metrics in succeed:
319 datastore.put(metrics, ref)
320 # Whereas datasets added in this transaction should not
321 with self.assertRaises(TransactionTestError):
322 with datastore.transaction():
323 for ref, metrics in fail:
324 datastore.put(metrics, ref)
325 raise TransactionTestError("This should propagate out of the context manager")
326 # Check for datasets that should exist
327 for ref, metrics in succeed:
328 # Does it exist?
329 self.assertTrue(datastore.exists(ref))
330 # Get
331 metricsOut = datastore.get(ref, parameters=None)
332 self.assertEqual(metrics, metricsOut)
333 # URI
334 uri = datastore.getUri(ref)
335 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
336 # Check for datasets that should not exist
337 for ref, _ in fail:
338 # These should raise
339 with self.assertRaises(FileNotFoundError):
340 # non-existing file
341 datastore.get(ref)
342 with self.assertRaises(FileNotFoundError):
343 datastore.getUri(ref)
345 def testNestedTransaction(self):
346 datastore = self.makeDatastore()
347 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
348 dimensions = self.universe.extract(("visit", "physical_filter"))
349 metrics = makeExampleMetrics()
351 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
352 refBefore = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
353 conform=False)
354 datastore.put(metrics, refBefore)
355 with self.assertRaises(TransactionTestError):
356 with datastore.transaction():
357 dataId = {"instrument": "dummy", "visit": 1, "physical_filter": "V"}
358 refOuter = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
359 conform=False)
360 datastore.put(metrics, refOuter)
361 with datastore.transaction():
362 dataId = {"instrument": "dummy", "visit": 2, "physical_filter": "V"}
363 refInner = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
364 conform=False)
365 datastore.put(metrics, refInner)
366 # All datasets should exist
367 for ref in (refBefore, refOuter, refInner):
368 metricsOut = datastore.get(ref, parameters=None)
369 self.assertEqual(metrics, metricsOut)
370 raise TransactionTestError("This should roll back the transaction")
371 # Dataset(s) inserted before the transaction should still exist
372 metricsOut = datastore.get(refBefore, parameters=None)
373 self.assertEqual(metrics, metricsOut)
374 # But all datasets inserted during the (rolled back) transaction
375 # should be gone
376 with self.assertRaises(FileNotFoundError):
377 datastore.get(refOuter)
378 with self.assertRaises(FileNotFoundError):
379 datastore.get(refInner)
381 def _prepareIngestTest(self):
382 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
383 dimensions = self.universe.extract(("visit", "physical_filter"))
384 metrics = makeExampleMetrics()
385 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
386 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
387 return metrics, ref
389 def runIngestTest(self, func, expectOutput=True):
390 metrics, ref = self._prepareIngestTest()
391 with lsst.utils.tests.getTempFilePath(".yaml", expectOutput=expectOutput) as path:
392 with open(path, 'w') as fd:
393 yaml.dump(metrics._asdict(), stream=fd)
394 func(metrics, path, ref)
396 def testIngestNoTransfer(self):
397 """Test ingesting existing files with no transfer.
398 """
399 for mode in (None, "auto"):
401 # Some datastores have auto but can't do in place transfer
402 if mode == "auto" and "auto" in self.ingestTransferModes and not self.canIngestNoTransferAuto:
403 continue
405 with self.subTest(mode=mode):
406 datastore = self.makeDatastore()
408 def succeed(obj, path, ref):
409 """Ingest a file already in the datastore root."""
410 # first move it into the root, and adjust the path
411 # accordingly
412 path = shutil.copy(path, datastore.root)
413 path = os.path.relpath(path, start=datastore.root)
414 datastore.ingest(FileDataset(path=path, refs=ref), transfer=mode)
415 self.assertEqual(obj, datastore.get(ref))
417 def failInputDoesNotExist(obj, path, ref):
418 """Can't ingest files if we're given a bad path."""
419 with self.assertRaises(FileNotFoundError):
420 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref),
421 transfer=mode)
422 self.assertFalse(datastore.exists(ref))
424 def failOutsideRoot(obj, path, ref):
425 """Can't ingest files outside of datastore root unless
426 auto."""
427 if mode == "auto":
428 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
429 self.assertTrue(datastore.exists(ref))
430 else:
431 with self.assertRaises(RuntimeError):
432 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
433 self.assertFalse(datastore.exists(ref))
435 def failNotImplemented(obj, path, ref):
436 with self.assertRaises(NotImplementedError):
437 datastore.ingest(FileDataset(path=path, refs=ref), transfer=mode)
439 if mode in self.ingestTransferModes:
440 self.runIngestTest(failOutsideRoot)
441 self.runIngestTest(failInputDoesNotExist)
442 self.runIngestTest(succeed)
443 else:
444 self.runIngestTest(failNotImplemented)
446 def testIngestTransfer(self):
447 """Test ingesting existing files after transferring them.
448 """
449 for mode in ("copy", "move", "link", "hardlink", "symlink", "relsymlink", "auto"):
450 with self.subTest(mode=mode):
451 datastore = self.makeDatastore(mode)
453 def succeed(obj, path, ref):
454 """Ingest a file by transferring it to the template
455 location."""
456 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
457 self.assertEqual(obj, datastore.get(ref))
459 def failInputDoesNotExist(obj, path, ref):
460 """Can't ingest files if we're given a bad path."""
461 with self.assertRaises(FileNotFoundError):
462 # Ensure the file does not look like it is in
463 # datastore for auto mode
464 datastore.ingest(FileDataset(path="../this-file-does-not-exist.yaml", refs=ref),
465 transfer=mode)
466 self.assertFalse(datastore.exists(ref))
468 def failOutputExists(obj, path, ref):
469 """Can't ingest files if transfer destination already
470 exists."""
471 with self.assertRaises(FileExistsError):
472 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
473 self.assertFalse(datastore.exists(ref))
475 def failNotImplemented(obj, path, ref):
476 with self.assertRaises(NotImplementedError):
477 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
479 if mode in self.ingestTransferModes:
480 self.runIngestTest(failInputDoesNotExist)
481 self.runIngestTest(succeed, expectOutput=(mode != "move"))
482 self.runIngestTest(failOutputExists)
483 else:
484 self.runIngestTest(failNotImplemented)
486 def testIngestSymlinkOfSymlink(self):
487 """Special test for symlink to a symlink ingest"""
488 metrics, ref = self._prepareIngestTest()
489 # The aim of this test is to create a dataset on disk, then
490 # create a symlink to it and finally ingest the symlink such that
491 # the symlink in the datastore points to the original dataset.
492 for mode in ("symlink", "relsymlink"):
493 if mode not in self.ingestTransferModes:
494 continue
496 print(f"Trying mode {mode}")
497 with lsst.utils.tests.getTempFilePath(".yaml") as realpath:
498 with open(realpath, 'w') as fd:
499 yaml.dump(metrics._asdict(), stream=fd)
500 with lsst.utils.tests.getTempFilePath(".yaml") as sympath:
501 os.symlink(os.path.abspath(realpath), sympath)
503 datastore = self.makeDatastore()
504 datastore.ingest(FileDataset(path=os.path.abspath(sympath), refs=ref), transfer=mode)
506 uri = ButlerURI(datastore.getUri(ref))
507 self.assertTrue(not uri.scheme or uri.scheme == "file", f"Check {uri.scheme}")
508 self.assertTrue(os.path.islink(uri.path))
510 linkTarget = os.readlink(uri.path)
511 if mode == "relsymlink":
512 self.assertFalse(os.path.isabs(linkTarget))
513 else:
514 self.assertEqual(linkTarget, os.path.abspath(realpath))
516 # Check that we can get the dataset back regardless of mode
517 metric2 = datastore.get(ref)
518 self.assertEqual(metric2, metrics)
520 # Cleanup the file for next time round loop
521 # since it will get the same file name in store
522 datastore.remove(ref)
525class PosixDatastoreTestCase(DatastoreTests, unittest.TestCase):
526 """PosixDatastore specialization"""
527 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
528 uriScheme = "file:"
529 canIngestNoTransferAuto = True
530 ingestTransferModes = (None, "copy", "move", "link", "hardlink", "symlink", "relsymlink", "auto")
531 isEphemeral = False
532 rootKeys = ("root",)
533 validationCanFail = True
535 def setUp(self):
536 # Override the working directory before calling the base class
537 self.root = tempfile.mkdtemp(dir=TESTDIR)
538 super().setUp()
541class PosixDatastoreNoChecksumsTestCase(PosixDatastoreTestCase):
542 """Posix datastore tests but with checksums disabled."""
543 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreNoChecksums.yaml")
545 def testChecksum(self):
546 """Ensure that checksums have not been calculated."""
548 datastore = self.makeDatastore()
549 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
550 dimensions = self.universe.extract(("visit", "physical_filter"))
551 metrics = makeExampleMetrics()
553 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
554 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
555 conform=False)
557 # Configuration should have disabled checksum calculation
558 datastore.put(metrics, ref)
559 info = datastore.getStoredItemInfo(ref)
560 self.assertIsNone(info.checksum)
562 # Remove put back but with checksums enabled explicitly
563 datastore.remove(ref)
564 datastore.useChecksum = True
565 datastore.put(metrics, ref)
567 info = datastore.getStoredItemInfo(ref)
568 self.assertIsNotNone(info.checksum)
571class CleanupPosixDatastoreTestCase(DatastoreTestsBase, unittest.TestCase):
572 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
574 def setUp(self):
575 # Override the working directory before calling the base class
576 self.root = tempfile.mkdtemp(dir=TESTDIR)
577 super().setUp()
579 def testCleanup(self):
580 """Test that a failed formatter write does cleanup a partial file."""
581 metrics = makeExampleMetrics()
582 datastore = self.makeDatastore()
584 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
586 dimensions = self.universe.extract(("visit", "physical_filter"))
587 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
589 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
591 # Determine where the file will end up (we assume Formatters use
592 # the same file extension)
593 expectedUri = datastore.getUri(ref, predict=True)
594 self.assertTrue(expectedUri.endswith(".yaml#predicted"),
595 f"Is there a file extension in {expectedUri}")
597 # Convert to ButlerURI so we can extract the path component
598 expectedUri = ButlerURI(expectedUri)
599 expectedFile = expectedUri.path
601 # Try formatter that fails and formatter that fails and leaves
602 # a file behind
603 for formatter in (BadWriteFormatter, BadNoWriteFormatter):
604 with self.subTest(formatter=formatter):
606 # Monkey patch the formatter
607 datastore.formatterFactory.registerFormatter(ref.datasetType, formatter,
608 overwrite=True)
610 # Try to put the dataset, it should fail
611 with self.assertRaises(Exception):
612 datastore.put(metrics, ref)
614 # Check that there is no file on disk
615 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
617 # Check that there is a directory
618 self.assertTrue(os.path.exists(os.path.dirname(expectedFile)),
619 f"Check for existence of directory {os.path.dirname(expectedFile)}")
621 # Force YamlFormatter and check that this time a file is written
622 datastore.formatterFactory.registerFormatter(ref.datasetType, YamlFormatter,
623 overwrite=True)
624 datastore.put(metrics, ref)
625 self.assertTrue(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
626 datastore.remove(ref)
627 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of now removed {expectedFile}")
630class InMemoryDatastoreTestCase(DatastoreTests, unittest.TestCase):
631 """PosixDatastore specialization"""
632 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastore.yaml")
633 uriScheme = "mem:"
634 hasUnsupportedPut = False
635 ingestTransferModes = ()
636 isEphemeral = True
637 rootKeys = None
638 validationCanFail = False
641class ChainedDatastoreTestCase(PosixDatastoreTestCase):
642 """ChainedDatastore specialization using a POSIXDatastore"""
643 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore.yaml")
644 hasUnsupportedPut = False
645 canIngestNoTransferAuto = False
646 ingestTransferModes = ("copy", "hardlink", "symlink", "relsymlink", "link", "auto")
647 isEphemeral = False
648 rootKeys = (".datastores.1.root", ".datastores.2.root")
649 validationCanFail = True
652class ChainedDatastoreMemoryTestCase(InMemoryDatastoreTestCase):
653 """ChainedDatastore specialization using all InMemoryDatastore"""
654 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2.yaml")
655 validationCanFail = False
658class DatastoreConstraintsTests(DatastoreTestsBase):
659 """Basic tests of constraints model of Datastores."""
661 def testConstraints(self):
662 """Test constraints model. Assumes that each test class has the
663 same constraints."""
664 metrics = makeExampleMetrics()
665 datastore = self.makeDatastore()
667 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
668 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
669 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
670 dataId = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
672 # Write empty file suitable for ingest check
673 testfile = tempfile.NamedTemporaryFile()
674 for datasetTypeName, sc, accepted in (("metric", sc1, True), ("metric2", sc1, False),
675 ("metric33", sc1, True), ("metric2", sc2, True)):
676 with self.subTest(datasetTypeName=datasetTypeName):
677 ref = self.makeDatasetRef(datasetTypeName, dimensions, sc, dataId, conform=False)
678 if accepted:
679 datastore.put(metrics, ref)
680 self.assertTrue(datastore.exists(ref))
681 datastore.remove(ref)
683 # Try ingest
684 if self.canIngest:
685 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
686 self.assertTrue(datastore.exists(ref))
687 datastore.remove(ref)
688 else:
689 with self.assertRaises(DatasetTypeNotSupportedError):
690 datastore.put(metrics, ref)
691 self.assertFalse(datastore.exists(ref))
693 # Again with ingest
694 if self.canIngest:
695 with self.assertRaises(DatasetTypeNotSupportedError):
696 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
697 self.assertFalse(datastore.exists(ref))
700class PosixDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
701 """PosixDatastore specialization"""
702 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreP.yaml")
703 canIngest = True
705 def setUp(self):
706 # Override the working directory before calling the base class
707 self.root = tempfile.mkdtemp(dir=TESTDIR)
708 super().setUp()
711class InMemoryDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
712 """InMemoryDatastore specialization"""
713 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastoreP.yaml")
714 canIngest = False
717class ChainedDatastoreConstraintsNativeTestCase(PosixDatastoreConstraintsTestCase):
718 """ChainedDatastore specialization using a POSIXDatastore and constraints
719 at the ChainedDatstore """
720 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePa.yaml")
723class ChainedDatastoreConstraintsTestCase(PosixDatastoreConstraintsTestCase):
724 """ChainedDatastore specialization using a POSIXDatastore"""
725 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastoreP.yaml")
728class ChainedDatastoreMemoryConstraintsTestCase(InMemoryDatastoreConstraintsTestCase):
729 """ChainedDatastore specialization using all InMemoryDatastore"""
730 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2P.yaml")
731 canIngest = False
734class ChainedDatastorePerStoreConstraintsTests(DatastoreTestsBase, unittest.TestCase):
735 """Test that a chained datastore can control constraints per-datastore
736 even if child datastore would accept."""
738 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePb.yaml")
740 def setUp(self):
741 # Override the working directory before calling the base class
742 self.root = tempfile.mkdtemp(dir=TESTDIR)
743 super().setUp()
745 def testConstraints(self):
746 """Test chained datastore constraints model."""
747 metrics = makeExampleMetrics()
748 datastore = self.makeDatastore()
750 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
751 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
752 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
753 dataId1 = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
754 dataId2 = {"visit": 52, "physical_filter": "V", "instrument": "HSC"}
756 # Write empty file suitable for ingest check
757 testfile = tempfile.NamedTemporaryFile()
759 for typeName, dataId, sc, accept, ingest in (("metric", dataId1, sc1, (False, True, False), True),
760 ("metric2", dataId1, sc1, (False, False, False), False),
761 ("metric2", dataId2, sc1, (True, False, False), False),
762 ("metric33", dataId2, sc2, (True, True, False), True),
763 ("metric2", dataId1, sc2, (False, True, False), True)):
764 with self.subTest(datasetTypeName=typeName, dataId=dataId, sc=sc.name):
765 ref = self.makeDatasetRef(typeName, dimensions, sc, dataId,
766 conform=False)
767 if any(accept):
768 datastore.put(metrics, ref)
769 self.assertTrue(datastore.exists(ref))
771 # Check each datastore inside the chained datastore
772 for childDatastore, expected in zip(datastore.datastores, accept):
773 self.assertEqual(childDatastore.exists(ref), expected,
774 f"Testing presence of {ref} in datastore {childDatastore.name}")
776 datastore.remove(ref)
778 # Check that ingest works
779 if ingest:
780 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
781 self.assertTrue(datastore.exists(ref))
783 # Check each datastore inside the chained datastore
784 for childDatastore, expected in zip(datastore.datastores, accept):
785 # Ephemeral datastores means InMemory at the moment
786 # and that does not accept ingest of files.
787 if childDatastore.isEphemeral:
788 expected = False
789 self.assertEqual(childDatastore.exists(ref), expected,
790 f"Testing presence of ingested {ref} in datastore"
791 f" {childDatastore.name}")
793 datastore.remove(ref)
794 else:
795 with self.assertRaises(DatasetTypeNotSupportedError):
796 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
798 else:
799 with self.assertRaises(DatasetTypeNotSupportedError):
800 datastore.put(metrics, ref)
801 self.assertFalse(datastore.exists(ref))
803 # Again with ingest
804 with self.assertRaises(DatasetTypeNotSupportedError):
805 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
806 self.assertFalse(datastore.exists(ref))
809if __name__ == "__main__": 809 ↛ 810line 809 didn't jump to line 810, because the condition on line 809 was never true
810 unittest.main()