Coverage for tests/test_datastore.py : 17%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import os
23import unittest
24import shutil
25import yaml
26import tempfile
27import lsst.utils
29from lsst.utils import doImport
31from lsst.daf.butler import StorageClassFactory, StorageClass, DimensionUniverse, FileDataset
32from lsst.daf.butler import DatastoreConfig, DatasetTypeNotSupportedError, DatastoreValidationError
33from lsst.daf.butler import ButlerURI
34from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter
36from lsst.daf.butler.tests import (DatasetTestHelper, DatastoreTestHelper, BadWriteFormatter,
37 BadNoWriteFormatter, MetricsExample, DummyRegistry)
40TESTDIR = os.path.dirname(__file__)
43def makeExampleMetrics(use_none=False):
44 if use_none:
45 array = None
46 else:
47 array = [563, 234, 456.7]
48 return MetricsExample({"AM1": 5.2, "AM2": 30.6},
49 {"a": [1, 2, 3],
50 "b": {"blue": 5, "red": "green"}},
51 array,
52 )
55class TransactionTestError(Exception):
56 """Specific error for transactions, to prevent misdiagnosing
57 that might otherwise occur when a standard exception is used.
58 """
59 pass
62class DatastoreTestsBase(DatasetTestHelper, DatastoreTestHelper):
63 """Support routines for datastore testing"""
64 root = None
66 @classmethod
67 def setUpClass(cls):
68 # Storage Classes are fixed for all datastores in these tests
69 scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml")
70 cls.storageClassFactory = StorageClassFactory()
71 cls.storageClassFactory.addFromConfig(scConfigFile)
73 # Read the Datastore config so we can get the class
74 # information (since we should not assume the constructor
75 # name here, but rely on the configuration file itself)
76 datastoreConfig = DatastoreConfig(cls.configFile)
77 cls.datastoreType = doImport(datastoreConfig["cls"])
78 cls.universe = DimensionUniverse()
80 def setUp(self):
81 self.setUpDatastoreTests(DummyRegistry, DatastoreConfig)
83 def tearDown(self):
84 if self.root is not None and os.path.exists(self.root):
85 shutil.rmtree(self.root, ignore_errors=True)
88class DatastoreTests(DatastoreTestsBase):
89 """Some basic tests of a simple datastore."""
91 hasUnsupportedPut = True
93 def testConfigRoot(self):
94 full = DatastoreConfig(self.configFile)
95 config = DatastoreConfig(self.configFile, mergeDefaults=False)
96 newroot = "/random/location"
97 self.datastoreType.setConfigRoot(newroot, config, full)
98 if self.rootKeys:
99 for k in self.rootKeys:
100 self.assertIn(newroot, config[k])
102 def testConstructor(self):
103 datastore = self.makeDatastore()
104 self.assertIsNotNone(datastore)
105 self.assertIs(datastore.isEphemeral, self.isEphemeral)
107 def testConfigurationValidation(self):
108 datastore = self.makeDatastore()
109 sc = self.storageClassFactory.getStorageClass("ThingOne")
110 datastore.validateConfiguration([sc])
112 sc2 = self.storageClassFactory.getStorageClass("ThingTwo")
113 if self.validationCanFail:
114 with self.assertRaises(DatastoreValidationError):
115 datastore.validateConfiguration([sc2], logFailures=True)
117 dimensions = self.universe.extract(("visit", "physical_filter"))
118 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
119 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
120 datastore.validateConfiguration([ref])
122 def testParameterValidation(self):
123 """Check that parameters are validated"""
124 sc = self.storageClassFactory.getStorageClass("ThingOne")
125 dimensions = self.universe.extract(("visit", "physical_filter"))
126 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
127 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
128 datastore = self.makeDatastore()
129 data = {1: 2, 3: 4}
130 datastore.put(data, ref)
131 newdata = datastore.get(ref)
132 self.assertEqual(data, newdata)
133 with self.assertRaises(KeyError):
134 newdata = datastore.get(ref, parameters={"missing": 5})
136 def testBasicPutGet(self):
137 metrics = makeExampleMetrics()
138 datastore = self.makeDatastore()
140 # Create multiple storage classes for testing different formulations
141 storageClasses = [self.storageClassFactory.getStorageClass(sc)
142 for sc in ("StructuredData",
143 "StructuredDataJson",
144 "StructuredDataPickle")]
146 dimensions = self.universe.extract(("visit", "physical_filter"))
147 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
149 for sc in storageClasses:
150 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
151 print("Using storageClass: {}".format(sc.name))
152 datastore.put(metrics, ref)
154 # Does it exist?
155 self.assertTrue(datastore.exists(ref))
157 # Get
158 metricsOut = datastore.get(ref, parameters=None)
159 self.assertEqual(metrics, metricsOut)
161 uri = datastore.getUri(ref)
162 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
164 # Get a component -- we need to construct new refs for them
165 # with derived storage classes but with parent ID
166 for comp in ("data", "output"):
167 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(comp), dimensions,
168 sc.components[comp], dataId, id=ref.id)
169 output = datastore.get(compRef)
170 self.assertEqual(output, getattr(metricsOut, comp))
172 uri = datastore.getUri(compRef)
173 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
175 storageClass = sc
177 # Check that we can put a metric with None in a component and
178 # get it back as None
179 metricsNone = makeExampleMetrics(use_none=True)
180 dataIdNone = {"instrument": "dummy", "visit": 54, "physical_filter": "V"}
181 refNone = self.makeDatasetRef("metric", dimensions, sc, dataIdNone, conform=False)
182 datastore.put(metricsNone, refNone)
184 comp = "data"
185 for comp in ("data", "output"):
186 compRef = self.makeDatasetRef(refNone.datasetType.componentTypeName(comp), dimensions,
187 sc.components[comp], dataIdNone, id=refNone.id)
188 output = datastore.get(compRef)
189 self.assertEqual(output, getattr(metricsNone, comp))
191 # Check that a put fails if the dataset type is not supported
192 if self.hasUnsupportedPut:
193 sc = StorageClass("UnsupportedSC", pytype=type(metrics))
194 ref = self.makeDatasetRef("unsupportedType", dimensions, sc, dataId)
195 with self.assertRaises(DatasetTypeNotSupportedError):
196 datastore.put(metrics, ref)
198 # These should raise
199 ref = self.makeDatasetRef("metrics", dimensions, storageClass, dataId, id=10000)
200 with self.assertRaises(FileNotFoundError):
201 # non-existing file
202 datastore.get(ref)
204 # Get a URI from it
205 uri = datastore.getUri(ref, predict=True)
206 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
208 with self.assertRaises(FileNotFoundError):
209 datastore.getUri(ref)
211 def testCompositePutGet(self):
212 metrics = makeExampleMetrics()
213 datastore = self.makeDatastore()
215 # Create multiple storage classes for testing different formulations
216 # of composites
217 storageClasses = [self.storageClassFactory.getStorageClass(sc)
218 for sc in ("StructuredComposite",
219 "StructuredCompositeTestA",
220 "StructuredCompositeTestB")]
222 dimensions = self.universe.extract(("visit", "physical_filter"))
223 dataId = {"instrument": "dummy", "visit": 428, "physical_filter": "R"}
225 for sc in storageClasses:
226 print("Using storageClass: {}".format(sc.name))
227 ref = self.makeDatasetRef("metric", dimensions, sc, dataId,
228 conform=False)
230 components = sc.assembler().disassemble(metrics)
231 self.assertTrue(components)
233 compsRead = {}
234 for compName, compInfo in components.items():
235 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(compName), dimensions,
236 components[compName].storageClass, dataId,
237 conform=False)
239 print("Writing component {} with {}".format(compName, compRef.datasetType.storageClass.name))
240 datastore.put(compInfo.component, compRef)
242 uri = datastore.getUri(compRef)
243 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
245 compsRead[compName] = datastore.get(compRef)
247 # We can generate identical files for each storage class
248 # so remove the component here
249 datastore.remove(compRef)
251 # combine all the components we read back into a new composite
252 metricsOut = sc.assembler().assemble(compsRead)
253 self.assertEqual(metrics, metricsOut)
255 def testRemove(self):
256 metrics = makeExampleMetrics()
257 datastore = self.makeDatastore()
258 # Put
259 dimensions = self.universe.extract(("visit", "physical_filter"))
260 dataId = {"instrument": "dummy", "visit": 638, "physical_filter": "U"}
262 sc = self.storageClassFactory.getStorageClass("StructuredData")
263 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
264 datastore.put(metrics, ref)
266 # Does it exist?
267 self.assertTrue(datastore.exists(ref))
269 # Get
270 metricsOut = datastore.get(ref)
271 self.assertEqual(metrics, metricsOut)
272 # Remove
273 datastore.remove(ref)
275 # Does it exist?
276 self.assertFalse(datastore.exists(ref))
278 # Do we now get a predicted URI?
279 uri = datastore.getUri(ref, predict=True)
280 self.assertTrue(uri.endswith("#predicted"))
282 # Get should now fail
283 with self.assertRaises(FileNotFoundError):
284 datastore.get(ref)
285 # Can only delete once
286 with self.assertRaises(FileNotFoundError):
287 datastore.remove(ref)
289 def testTransfer(self):
290 metrics = makeExampleMetrics()
292 dimensions = self.universe.extract(("visit", "physical_filter"))
293 dataId = {"instrument": "dummy", "visit": 2048, "physical_filter": "Uprime"}
295 sc = self.storageClassFactory.getStorageClass("StructuredData")
296 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
298 inputDatastore = self.makeDatastore("test_input_datastore")
299 outputDatastore = self.makeDatastore("test_output_datastore")
301 inputDatastore.put(metrics, ref)
302 outputDatastore.transfer(inputDatastore, ref)
304 metricsOut = outputDatastore.get(ref)
305 self.assertEqual(metrics, metricsOut)
307 def testBasicTransaction(self):
308 datastore = self.makeDatastore()
309 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
310 dimensions = self.universe.extract(("visit", "physical_filter"))
311 nDatasets = 6
312 dataIds = [{"instrument": "dummy", "visit": i, "physical_filter": "V"} for i in range(nDatasets)]
313 data = [(self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False),
314 makeExampleMetrics(),)
315 for dataId in dataIds]
316 succeed = data[:nDatasets//2]
317 fail = data[nDatasets//2:]
318 # All datasets added in this transaction should continue to exist
319 with datastore.transaction():
320 for ref, metrics in succeed:
321 datastore.put(metrics, ref)
322 # Whereas datasets added in this transaction should not
323 with self.assertRaises(TransactionTestError):
324 with datastore.transaction():
325 for ref, metrics in fail:
326 datastore.put(metrics, ref)
327 raise TransactionTestError("This should propagate out of the context manager")
328 # Check for datasets that should exist
329 for ref, metrics in succeed:
330 # Does it exist?
331 self.assertTrue(datastore.exists(ref))
332 # Get
333 metricsOut = datastore.get(ref, parameters=None)
334 self.assertEqual(metrics, metricsOut)
335 # URI
336 uri = datastore.getUri(ref)
337 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
338 # Check for datasets that should not exist
339 for ref, _ in fail:
340 # These should raise
341 with self.assertRaises(FileNotFoundError):
342 # non-existing file
343 datastore.get(ref)
344 with self.assertRaises(FileNotFoundError):
345 datastore.getUri(ref)
347 def testNestedTransaction(self):
348 datastore = self.makeDatastore()
349 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
350 dimensions = self.universe.extract(("visit", "physical_filter"))
351 metrics = makeExampleMetrics()
353 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
354 refBefore = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
355 conform=False)
356 datastore.put(metrics, refBefore)
357 with self.assertRaises(TransactionTestError):
358 with datastore.transaction():
359 dataId = {"instrument": "dummy", "visit": 1, "physical_filter": "V"}
360 refOuter = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
361 conform=False)
362 datastore.put(metrics, refOuter)
363 with datastore.transaction():
364 dataId = {"instrument": "dummy", "visit": 2, "physical_filter": "V"}
365 refInner = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
366 conform=False)
367 datastore.put(metrics, refInner)
368 # All datasets should exist
369 for ref in (refBefore, refOuter, refInner):
370 metricsOut = datastore.get(ref, parameters=None)
371 self.assertEqual(metrics, metricsOut)
372 raise TransactionTestError("This should roll back the transaction")
373 # Dataset(s) inserted before the transaction should still exist
374 metricsOut = datastore.get(refBefore, parameters=None)
375 self.assertEqual(metrics, metricsOut)
376 # But all datasets inserted during the (rolled back) transaction
377 # should be gone
378 with self.assertRaises(FileNotFoundError):
379 datastore.get(refOuter)
380 with self.assertRaises(FileNotFoundError):
381 datastore.get(refInner)
383 def _prepareIngestTest(self):
384 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
385 dimensions = self.universe.extract(("visit", "physical_filter"))
386 metrics = makeExampleMetrics()
387 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
388 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
389 return metrics, ref
391 def runIngestTest(self, func, expectOutput=True):
392 metrics, ref = self._prepareIngestTest()
393 with lsst.utils.tests.getTempFilePath(".yaml", expectOutput=expectOutput) as path:
394 with open(path, 'w') as fd:
395 yaml.dump(metrics._asdict(), stream=fd)
396 func(metrics, path, ref)
398 def testIngestNoTransfer(self):
399 """Test ingesting existing files with no transfer.
400 """
401 for mode in (None, "auto"):
403 # Some datastores have auto but can't do in place transfer
404 if mode == "auto" and "auto" in self.ingestTransferModes and not self.canIngestNoTransferAuto:
405 continue
407 with self.subTest(mode=mode):
408 datastore = self.makeDatastore()
410 def succeed(obj, path, ref):
411 """Ingest a file already in the datastore root."""
412 # first move it into the root, and adjust the path
413 # accordingly
414 path = shutil.copy(path, datastore.root)
415 path = os.path.relpath(path, start=datastore.root)
416 datastore.ingest(FileDataset(path=path, refs=ref), transfer=mode)
417 self.assertEqual(obj, datastore.get(ref))
419 def failInputDoesNotExist(obj, path, ref):
420 """Can't ingest files if we're given a bad path."""
421 with self.assertRaises(FileNotFoundError):
422 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref),
423 transfer=mode)
424 self.assertFalse(datastore.exists(ref))
426 def failOutsideRoot(obj, path, ref):
427 """Can't ingest files outside of datastore root unless
428 auto."""
429 if mode == "auto":
430 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
431 self.assertTrue(datastore.exists(ref))
432 else:
433 with self.assertRaises(RuntimeError):
434 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
435 self.assertFalse(datastore.exists(ref))
437 def failNotImplemented(obj, path, ref):
438 with self.assertRaises(NotImplementedError):
439 datastore.ingest(FileDataset(path=path, refs=ref), transfer=mode)
441 if mode in self.ingestTransferModes:
442 self.runIngestTest(failOutsideRoot)
443 self.runIngestTest(failInputDoesNotExist)
444 self.runIngestTest(succeed)
445 else:
446 self.runIngestTest(failNotImplemented)
448 def testIngestTransfer(self):
449 """Test ingesting existing files after transferring them.
450 """
451 for mode in ("copy", "move", "link", "hardlink", "symlink", "relsymlink", "auto"):
452 with self.subTest(mode=mode):
453 datastore = self.makeDatastore(mode)
455 def succeed(obj, path, ref):
456 """Ingest a file by transferring it to the template
457 location."""
458 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
459 self.assertEqual(obj, datastore.get(ref))
461 def failInputDoesNotExist(obj, path, ref):
462 """Can't ingest files if we're given a bad path."""
463 with self.assertRaises(FileNotFoundError):
464 # Ensure the file does not look like it is in
465 # datastore for auto mode
466 datastore.ingest(FileDataset(path="../this-file-does-not-exist.yaml", refs=ref),
467 transfer=mode)
468 self.assertFalse(datastore.exists(ref))
470 def failOutputExists(obj, path, ref):
471 """Can't ingest files if transfer destination already
472 exists."""
473 with self.assertRaises(FileExistsError):
474 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
475 self.assertFalse(datastore.exists(ref))
477 def failNotImplemented(obj, path, ref):
478 with self.assertRaises(NotImplementedError):
479 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
481 if mode in self.ingestTransferModes:
482 self.runIngestTest(failInputDoesNotExist)
483 self.runIngestTest(succeed, expectOutput=(mode != "move"))
484 self.runIngestTest(failOutputExists)
485 else:
486 self.runIngestTest(failNotImplemented)
488 def testIngestSymlinkOfSymlink(self):
489 """Special test for symlink to a symlink ingest"""
490 metrics, ref = self._prepareIngestTest()
491 # The aim of this test is to create a dataset on disk, then
492 # create a symlink to it and finally ingest the symlink such that
493 # the symlink in the datastore points to the original dataset.
494 for mode in ("symlink", "relsymlink"):
495 if mode not in self.ingestTransferModes:
496 continue
498 print(f"Trying mode {mode}")
499 with lsst.utils.tests.getTempFilePath(".yaml") as realpath:
500 with open(realpath, 'w') as fd:
501 yaml.dump(metrics._asdict(), stream=fd)
502 with lsst.utils.tests.getTempFilePath(".yaml") as sympath:
503 os.symlink(os.path.abspath(realpath), sympath)
505 datastore = self.makeDatastore()
506 datastore.ingest(FileDataset(path=os.path.abspath(sympath), refs=ref), transfer=mode)
508 uri = ButlerURI(datastore.getUri(ref))
509 self.assertTrue(not uri.scheme or uri.scheme == "file", f"Check {uri.scheme}")
510 self.assertTrue(os.path.islink(uri.path))
512 linkTarget = os.readlink(uri.path)
513 if mode == "relsymlink":
514 self.assertFalse(os.path.isabs(linkTarget))
515 else:
516 self.assertEqual(linkTarget, os.path.abspath(realpath))
518 # Check that we can get the dataset back regardless of mode
519 metric2 = datastore.get(ref)
520 self.assertEqual(metric2, metrics)
522 # Cleanup the file for next time round loop
523 # since it will get the same file name in store
524 datastore.remove(ref)
527class PosixDatastoreTestCase(DatastoreTests, unittest.TestCase):
528 """PosixDatastore specialization"""
529 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
530 uriScheme = "file:"
531 canIngestNoTransferAuto = True
532 ingestTransferModes = (None, "copy", "move", "link", "hardlink", "symlink", "relsymlink", "auto")
533 isEphemeral = False
534 rootKeys = ("root",)
535 validationCanFail = True
537 def setUp(self):
538 # Override the working directory before calling the base class
539 self.root = tempfile.mkdtemp(dir=TESTDIR)
540 super().setUp()
543class PosixDatastoreNoChecksumsTestCase(PosixDatastoreTestCase):
544 """Posix datastore tests but with checksums disabled."""
545 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreNoChecksums.yaml")
547 def testChecksum(self):
548 """Ensure that checksums have not been calculated."""
550 datastore = self.makeDatastore()
551 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
552 dimensions = self.universe.extract(("visit", "physical_filter"))
553 metrics = makeExampleMetrics()
555 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
556 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
557 conform=False)
559 # Configuration should have disabled checksum calculation
560 datastore.put(metrics, ref)
561 info = datastore.getStoredItemInfo(ref)
562 self.assertIsNone(info.checksum)
564 # Remove put back but with checksums enabled explicitly
565 datastore.remove(ref)
566 datastore.useChecksum = True
567 datastore.put(metrics, ref)
569 info = datastore.getStoredItemInfo(ref)
570 self.assertIsNotNone(info.checksum)
573class CleanupPosixDatastoreTestCase(DatastoreTestsBase, unittest.TestCase):
574 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
576 def setUp(self):
577 # Override the working directory before calling the base class
578 self.root = tempfile.mkdtemp(dir=TESTDIR)
579 super().setUp()
581 def testCleanup(self):
582 """Test that a failed formatter write does cleanup a partial file."""
583 metrics = makeExampleMetrics()
584 datastore = self.makeDatastore()
586 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
588 dimensions = self.universe.extract(("visit", "physical_filter"))
589 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
591 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
593 # Determine where the file will end up (we assume Formatters use
594 # the same file extension)
595 expectedUri = datastore.getUri(ref, predict=True)
596 self.assertTrue(expectedUri.endswith(".yaml#predicted"),
597 f"Is there a file extension in {expectedUri}")
599 # Convert to ButlerURI so we can extract the path component
600 expectedUri = ButlerURI(expectedUri)
601 expectedFile = expectedUri.path
603 # Try formatter that fails and formatter that fails and leaves
604 # a file behind
605 for formatter in (BadWriteFormatter, BadNoWriteFormatter):
606 with self.subTest(formatter=formatter):
608 # Monkey patch the formatter
609 datastore.formatterFactory.registerFormatter(ref.datasetType, formatter,
610 overwrite=True)
612 # Try to put the dataset, it should fail
613 with self.assertRaises(Exception):
614 datastore.put(metrics, ref)
616 # Check that there is no file on disk
617 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
619 # Check that there is a directory
620 self.assertTrue(os.path.exists(os.path.dirname(expectedFile)),
621 f"Check for existence of directory {os.path.dirname(expectedFile)}")
623 # Force YamlFormatter and check that this time a file is written
624 datastore.formatterFactory.registerFormatter(ref.datasetType, YamlFormatter,
625 overwrite=True)
626 datastore.put(metrics, ref)
627 self.assertTrue(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
628 datastore.remove(ref)
629 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of now removed {expectedFile}")
632class InMemoryDatastoreTestCase(DatastoreTests, unittest.TestCase):
633 """PosixDatastore specialization"""
634 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastore.yaml")
635 uriScheme = "mem:"
636 hasUnsupportedPut = False
637 ingestTransferModes = ()
638 isEphemeral = True
639 rootKeys = None
640 validationCanFail = False
643class ChainedDatastoreTestCase(PosixDatastoreTestCase):
644 """ChainedDatastore specialization using a POSIXDatastore"""
645 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore.yaml")
646 hasUnsupportedPut = False
647 canIngestNoTransferAuto = False
648 ingestTransferModes = ("copy", "hardlink", "symlink", "relsymlink", "link", "auto")
649 isEphemeral = False
650 rootKeys = (".datastores.1.root", ".datastores.2.root")
651 validationCanFail = True
654class ChainedDatastoreMemoryTestCase(InMemoryDatastoreTestCase):
655 """ChainedDatastore specialization using all InMemoryDatastore"""
656 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2.yaml")
657 validationCanFail = False
660class DatastoreConstraintsTests(DatastoreTestsBase):
661 """Basic tests of constraints model of Datastores."""
663 def testConstraints(self):
664 """Test constraints model. Assumes that each test class has the
665 same constraints."""
666 metrics = makeExampleMetrics()
667 datastore = self.makeDatastore()
669 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
670 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
671 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
672 dataId = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
674 # Write empty file suitable for ingest check
675 testfile = tempfile.NamedTemporaryFile()
676 for datasetTypeName, sc, accepted in (("metric", sc1, True), ("metric2", sc1, False),
677 ("metric33", sc1, True), ("metric2", sc2, True)):
678 with self.subTest(datasetTypeName=datasetTypeName):
679 ref = self.makeDatasetRef(datasetTypeName, dimensions, sc, dataId, conform=False)
680 if accepted:
681 datastore.put(metrics, ref)
682 self.assertTrue(datastore.exists(ref))
683 datastore.remove(ref)
685 # Try ingest
686 if self.canIngest:
687 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
688 self.assertTrue(datastore.exists(ref))
689 datastore.remove(ref)
690 else:
691 with self.assertRaises(DatasetTypeNotSupportedError):
692 datastore.put(metrics, ref)
693 self.assertFalse(datastore.exists(ref))
695 # Again with ingest
696 if self.canIngest:
697 with self.assertRaises(DatasetTypeNotSupportedError):
698 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
699 self.assertFalse(datastore.exists(ref))
702class PosixDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
703 """PosixDatastore specialization"""
704 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreP.yaml")
705 canIngest = True
707 def setUp(self):
708 # Override the working directory before calling the base class
709 self.root = tempfile.mkdtemp(dir=TESTDIR)
710 super().setUp()
713class InMemoryDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
714 """InMemoryDatastore specialization"""
715 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastoreP.yaml")
716 canIngest = False
719class ChainedDatastoreConstraintsNativeTestCase(PosixDatastoreConstraintsTestCase):
720 """ChainedDatastore specialization using a POSIXDatastore and constraints
721 at the ChainedDatstore """
722 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePa.yaml")
725class ChainedDatastoreConstraintsTestCase(PosixDatastoreConstraintsTestCase):
726 """ChainedDatastore specialization using a POSIXDatastore"""
727 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastoreP.yaml")
730class ChainedDatastoreMemoryConstraintsTestCase(InMemoryDatastoreConstraintsTestCase):
731 """ChainedDatastore specialization using all InMemoryDatastore"""
732 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2P.yaml")
733 canIngest = False
736class ChainedDatastorePerStoreConstraintsTests(DatastoreTestsBase, unittest.TestCase):
737 """Test that a chained datastore can control constraints per-datastore
738 even if child datastore would accept."""
740 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePb.yaml")
742 def setUp(self):
743 # Override the working directory before calling the base class
744 self.root = tempfile.mkdtemp(dir=TESTDIR)
745 super().setUp()
747 def testConstraints(self):
748 """Test chained datastore constraints model."""
749 metrics = makeExampleMetrics()
750 datastore = self.makeDatastore()
752 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
753 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
754 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
755 dataId1 = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
756 dataId2 = {"visit": 52, "physical_filter": "V", "instrument": "HSC"}
758 # Write empty file suitable for ingest check
759 testfile = tempfile.NamedTemporaryFile()
761 for typeName, dataId, sc, accept, ingest in (("metric", dataId1, sc1, (False, True, False), True),
762 ("metric2", dataId1, sc1, (False, False, False), False),
763 ("metric2", dataId2, sc1, (True, False, False), False),
764 ("metric33", dataId2, sc2, (True, True, False), True),
765 ("metric2", dataId1, sc2, (False, True, False), True)):
766 with self.subTest(datasetTypeName=typeName, dataId=dataId, sc=sc.name):
767 ref = self.makeDatasetRef(typeName, dimensions, sc, dataId,
768 conform=False)
769 if any(accept):
770 datastore.put(metrics, ref)
771 self.assertTrue(datastore.exists(ref))
773 # Check each datastore inside the chained datastore
774 for childDatastore, expected in zip(datastore.datastores, accept):
775 self.assertEqual(childDatastore.exists(ref), expected,
776 f"Testing presence of {ref} in datastore {childDatastore.name}")
778 datastore.remove(ref)
780 # Check that ingest works
781 if ingest:
782 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
783 self.assertTrue(datastore.exists(ref))
785 # Check each datastore inside the chained datastore
786 for childDatastore, expected in zip(datastore.datastores, accept):
787 # Ephemeral datastores means InMemory at the moment
788 # and that does not accept ingest of files.
789 if childDatastore.isEphemeral:
790 expected = False
791 self.assertEqual(childDatastore.exists(ref), expected,
792 f"Testing presence of ingested {ref} in datastore"
793 f" {childDatastore.name}")
795 datastore.remove(ref)
796 else:
797 with self.assertRaises(DatasetTypeNotSupportedError):
798 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
800 else:
801 with self.assertRaises(DatasetTypeNotSupportedError):
802 datastore.put(metrics, ref)
803 self.assertFalse(datastore.exists(ref))
805 # Again with ingest
806 with self.assertRaises(DatasetTypeNotSupportedError):
807 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
808 self.assertFalse(datastore.exists(ref))
811if __name__ == "__main__": 811 ↛ 812line 811 didn't jump to line 812, because the condition on line 811 was never true
812 unittest.main()