Coverage for tests/test_datastore.py : 18%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import os
23import unittest
24import shutil
25import yaml
26import tempfile
27import lsst.utils
29from lsst.utils import doImport
31from lsst.daf.butler import StorageClassFactory, StorageClass, DimensionUniverse, FileDataset
32from lsst.daf.butler import DatastoreConfig, DatasetTypeNotSupportedError, DatastoreValidationError
33from lsst.daf.butler import ButlerURI
34from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter
36from lsst.daf.butler.tests import (DatasetTestHelper, DatastoreTestHelper, BadWriteFormatter,
37 BadNoWriteFormatter, MetricsExample, DummyRegistry)
40TESTDIR = os.path.dirname(__file__)
43def makeExampleMetrics():
44 return MetricsExample({"AM1": 5.2, "AM2": 30.6},
45 {"a": [1, 2, 3],
46 "b": {"blue": 5, "red": "green"}},
47 [563, 234, 456.7]
48 )
51class TransactionTestError(Exception):
52 """Specific error for transactions, to prevent misdiagnosing
53 that might otherwise occur when a standard exception is used.
54 """
55 pass
58class DatastoreTestsBase(DatasetTestHelper, DatastoreTestHelper):
59 """Support routines for datastore testing"""
60 root = None
62 @classmethod
63 def setUpClass(cls):
64 # Storage Classes are fixed for all datastores in these tests
65 scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml")
66 cls.storageClassFactory = StorageClassFactory()
67 cls.storageClassFactory.addFromConfig(scConfigFile)
69 # Read the Datastore config so we can get the class
70 # information (since we should not assume the constructor
71 # name here, but rely on the configuration file itself)
72 datastoreConfig = DatastoreConfig(cls.configFile)
73 cls.datastoreType = doImport(datastoreConfig["cls"])
74 cls.universe = DimensionUniverse()
76 def setUp(self):
77 self.setUpDatastoreTests(DummyRegistry, DatastoreConfig)
79 def tearDown(self):
80 if self.root is not None and os.path.exists(self.root):
81 shutil.rmtree(self.root, ignore_errors=True)
84class DatastoreTests(DatastoreTestsBase):
85 """Some basic tests of a simple datastore."""
87 hasUnsupportedPut = True
89 def testConfigRoot(self):
90 full = DatastoreConfig(self.configFile)
91 config = DatastoreConfig(self.configFile, mergeDefaults=False)
92 newroot = "/random/location"
93 self.datastoreType.setConfigRoot(newroot, config, full)
94 if self.rootKeys:
95 for k in self.rootKeys:
96 self.assertIn(newroot, config[k])
98 def testConstructor(self):
99 datastore = self.makeDatastore()
100 self.assertIsNotNone(datastore)
101 self.assertIs(datastore.isEphemeral, self.isEphemeral)
103 def testConfigurationValidation(self):
104 datastore = self.makeDatastore()
105 sc = self.storageClassFactory.getStorageClass("ThingOne")
106 datastore.validateConfiguration([sc])
108 sc2 = self.storageClassFactory.getStorageClass("ThingTwo")
109 if self.validationCanFail:
110 with self.assertRaises(DatastoreValidationError):
111 datastore.validateConfiguration([sc2], logFailures=True)
113 dimensions = self.universe.extract(("visit", "physical_filter"))
114 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
115 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
116 datastore.validateConfiguration([ref])
118 def testParameterValidation(self):
119 """Check that parameters are validated"""
120 sc = self.storageClassFactory.getStorageClass("ThingOne")
121 dimensions = self.universe.extract(("visit", "physical_filter"))
122 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
123 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
124 datastore = self.makeDatastore()
125 data = {1: 2, 3: 4}
126 datastore.put(data, ref)
127 newdata = datastore.get(ref)
128 self.assertEqual(data, newdata)
129 with self.assertRaises(KeyError):
130 newdata = datastore.get(ref, parameters={"missing": 5})
132 def testBasicPutGet(self):
133 metrics = makeExampleMetrics()
134 datastore = self.makeDatastore()
136 # Create multiple storage classes for testing different formulations
137 storageClasses = [self.storageClassFactory.getStorageClass(sc)
138 for sc in ("StructuredData",
139 "StructuredDataJson",
140 "StructuredDataPickle")]
142 dimensions = self.universe.extract(("visit", "physical_filter"))
143 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
145 for sc in storageClasses:
146 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
147 print("Using storageClass: {}".format(sc.name))
148 datastore.put(metrics, ref)
150 # Does it exist?
151 self.assertTrue(datastore.exists(ref))
153 # Get
154 metricsOut = datastore.get(ref, parameters=None)
155 self.assertEqual(metrics, metricsOut)
157 uri = datastore.getUri(ref)
158 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
160 # Get a component -- we need to construct new refs for them
161 # with derived storage classes but with parent ID
162 comp = "output"
163 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(comp), dimensions,
164 sc.components[comp], dataId, id=ref.id)
165 output = datastore.get(compRef)
166 self.assertEqual(output, metricsOut.output)
168 uri = datastore.getUri(compRef)
169 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
171 storageClass = sc
173 # Check that a put fails if the dataset type is not supported
174 if self.hasUnsupportedPut:
175 sc = StorageClass("UnsupportedSC", pytype=type(metrics))
176 ref = self.makeDatasetRef("unsupportedType", dimensions, sc, dataId)
177 with self.assertRaises(DatasetTypeNotSupportedError):
178 datastore.put(metrics, ref)
180 # These should raise
181 ref = self.makeDatasetRef("metrics", dimensions, storageClass, dataId, id=10000)
182 with self.assertRaises(FileNotFoundError):
183 # non-existing file
184 datastore.get(ref)
186 # Get a URI from it
187 uri = datastore.getUri(ref, predict=True)
188 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
190 with self.assertRaises(FileNotFoundError):
191 datastore.getUri(ref)
193 def testCompositePutGet(self):
194 metrics = makeExampleMetrics()
195 datastore = self.makeDatastore()
197 # Create multiple storage classes for testing different formulations
198 # of composites
199 storageClasses = [self.storageClassFactory.getStorageClass(sc)
200 for sc in ("StructuredComposite",
201 "StructuredCompositeTestA",
202 "StructuredCompositeTestB")]
204 dimensions = self.universe.extract(("visit", "physical_filter"))
205 dataId = {"instrument": "dummy", "visit": 428, "physical_filter": "R"}
207 for sc in storageClasses:
208 print("Using storageClass: {}".format(sc.name))
209 ref = self.makeDatasetRef("metric", dimensions, sc, dataId,
210 conform=False)
212 components = sc.assembler().disassemble(metrics)
213 self.assertTrue(components)
215 compsRead = {}
216 for compName, compInfo in components.items():
217 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(compName), dimensions,
218 components[compName].storageClass, dataId,
219 conform=False)
221 print("Writing component {} with {}".format(compName, compRef.datasetType.storageClass.name))
222 datastore.put(compInfo.component, compRef)
224 uri = datastore.getUri(compRef)
225 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
227 compsRead[compName] = datastore.get(compRef)
229 # We can generate identical files for each storage class
230 # so remove the component here
231 datastore.remove(compRef)
233 # combine all the components we read back into a new composite
234 metricsOut = sc.assembler().assemble(compsRead)
235 self.assertEqual(metrics, metricsOut)
237 def testRemove(self):
238 metrics = makeExampleMetrics()
239 datastore = self.makeDatastore()
240 # Put
241 dimensions = self.universe.extract(("visit", "physical_filter"))
242 dataId = {"instrument": "dummy", "visit": 638, "physical_filter": "U"}
244 sc = self.storageClassFactory.getStorageClass("StructuredData")
245 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
246 datastore.put(metrics, ref)
248 # Does it exist?
249 self.assertTrue(datastore.exists(ref))
251 # Get
252 metricsOut = datastore.get(ref)
253 self.assertEqual(metrics, metricsOut)
254 # Remove
255 datastore.remove(ref)
257 # Does it exist?
258 self.assertFalse(datastore.exists(ref))
260 # Do we now get a predicted URI?
261 uri = datastore.getUri(ref, predict=True)
262 self.assertTrue(uri.endswith("#predicted"))
264 # Get should now fail
265 with self.assertRaises(FileNotFoundError):
266 datastore.get(ref)
267 # Can only delete once
268 with self.assertRaises(FileNotFoundError):
269 datastore.remove(ref)
271 def testTransfer(self):
272 metrics = makeExampleMetrics()
274 dimensions = self.universe.extract(("visit", "physical_filter"))
275 dataId = {"instrument": "dummy", "visit": 2048, "physical_filter": "Uprime"}
277 sc = self.storageClassFactory.getStorageClass("StructuredData")
278 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
280 inputDatastore = self.makeDatastore("test_input_datastore")
281 outputDatastore = self.makeDatastore("test_output_datastore")
283 inputDatastore.put(metrics, ref)
284 outputDatastore.transfer(inputDatastore, ref)
286 metricsOut = outputDatastore.get(ref)
287 self.assertEqual(metrics, metricsOut)
289 def testBasicTransaction(self):
290 datastore = self.makeDatastore()
291 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
292 dimensions = self.universe.extract(("visit", "physical_filter"))
293 nDatasets = 6
294 dataIds = [{"instrument": "dummy", "visit": i, "physical_filter": "V"} for i in range(nDatasets)]
295 data = [(self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False),
296 makeExampleMetrics(),)
297 for dataId in dataIds]
298 succeed = data[:nDatasets//2]
299 fail = data[nDatasets//2:]
300 # All datasets added in this transaction should continue to exist
301 with datastore.transaction():
302 for ref, metrics in succeed:
303 datastore.put(metrics, ref)
304 # Whereas datasets added in this transaction should not
305 with self.assertRaises(TransactionTestError):
306 with datastore.transaction():
307 for ref, metrics in fail:
308 datastore.put(metrics, ref)
309 raise TransactionTestError("This should propagate out of the context manager")
310 # Check for datasets that should exist
311 for ref, metrics in succeed:
312 # Does it exist?
313 self.assertTrue(datastore.exists(ref))
314 # Get
315 metricsOut = datastore.get(ref, parameters=None)
316 self.assertEqual(metrics, metricsOut)
317 # URI
318 uri = datastore.getUri(ref)
319 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
320 # Check for datasets that should not exist
321 for ref, _ in fail:
322 # These should raise
323 with self.assertRaises(FileNotFoundError):
324 # non-existing file
325 datastore.get(ref)
326 with self.assertRaises(FileNotFoundError):
327 datastore.getUri(ref)
329 def testNestedTransaction(self):
330 datastore = self.makeDatastore()
331 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
332 dimensions = self.universe.extract(("visit", "physical_filter"))
333 metrics = makeExampleMetrics()
335 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
336 refBefore = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
337 conform=False)
338 datastore.put(metrics, refBefore)
339 with self.assertRaises(TransactionTestError):
340 with datastore.transaction():
341 dataId = {"instrument": "dummy", "visit": 1, "physical_filter": "V"}
342 refOuter = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
343 conform=False)
344 datastore.put(metrics, refOuter)
345 with datastore.transaction():
346 dataId = {"instrument": "dummy", "visit": 2, "physical_filter": "V"}
347 refInner = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
348 conform=False)
349 datastore.put(metrics, refInner)
350 # All datasets should exist
351 for ref in (refBefore, refOuter, refInner):
352 metricsOut = datastore.get(ref, parameters=None)
353 self.assertEqual(metrics, metricsOut)
354 raise TransactionTestError("This should roll back the transaction")
355 # Dataset(s) inserted before the transaction should still exist
356 metricsOut = datastore.get(refBefore, parameters=None)
357 self.assertEqual(metrics, metricsOut)
358 # But all datasets inserted during the (rolled back) transaction
359 # should be gone
360 with self.assertRaises(FileNotFoundError):
361 datastore.get(refOuter)
362 with self.assertRaises(FileNotFoundError):
363 datastore.get(refInner)
365 def _prepareIngestTest(self):
366 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
367 dimensions = self.universe.extract(("visit", "physical_filter"))
368 metrics = makeExampleMetrics()
369 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
370 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
371 return metrics, ref
373 def runIngestTest(self, func, expectOutput=True):
374 metrics, ref = self._prepareIngestTest()
375 with lsst.utils.tests.getTempFilePath(".yaml", expectOutput=expectOutput) as path:
376 with open(path, 'w') as fd:
377 yaml.dump(metrics._asdict(), stream=fd)
378 func(metrics, path, ref)
380 def testIngestNoTransfer(self):
381 """Test ingesting existing files with no transfer.
382 """
383 for mode in (None, "auto"):
385 # Some datastores have auto but can't do in place transfer
386 if mode == "auto" and "auto" in self.ingestTransferModes and not self.canIngestNoTransferAuto:
387 continue
389 with self.subTest(mode=mode):
390 datastore = self.makeDatastore()
392 def succeed(obj, path, ref):
393 """Ingest a file already in the datastore root."""
394 # first move it into the root, and adjust the path
395 # accordingly
396 path = shutil.copy(path, datastore.root)
397 path = os.path.relpath(path, start=datastore.root)
398 datastore.ingest(FileDataset(path=path, refs=ref), transfer=mode)
399 self.assertEqual(obj, datastore.get(ref))
401 def failInputDoesNotExist(obj, path, ref):
402 """Can't ingest files if we're given a bad path."""
403 with self.assertRaises(FileNotFoundError):
404 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref),
405 transfer=mode)
406 self.assertFalse(datastore.exists(ref))
408 def failOutsideRoot(obj, path, ref):
409 """Can't ingest files outside of datastore root unless
410 auto."""
411 if mode == "auto":
412 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
413 self.assertTrue(datastore.exists(ref))
414 else:
415 with self.assertRaises(RuntimeError):
416 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
417 self.assertFalse(datastore.exists(ref))
419 def failNotImplemented(obj, path, ref):
420 with self.assertRaises(NotImplementedError):
421 datastore.ingest(FileDataset(path=path, refs=ref), transfer=mode)
423 if mode in self.ingestTransferModes:
424 self.runIngestTest(failOutsideRoot)
425 self.runIngestTest(failInputDoesNotExist)
426 self.runIngestTest(succeed)
427 else:
428 self.runIngestTest(failNotImplemented)
430 def testIngestTransfer(self):
431 """Test ingesting existing files after transferring them.
432 """
433 for mode in ("copy", "move", "link", "hardlink", "symlink", "relsymlink", "auto"):
434 with self.subTest(mode=mode):
435 datastore = self.makeDatastore(mode)
437 def succeed(obj, path, ref):
438 """Ingest a file by transferring it to the template
439 location."""
440 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
441 self.assertEqual(obj, datastore.get(ref))
443 def failInputDoesNotExist(obj, path, ref):
444 """Can't ingest files if we're given a bad path."""
445 with self.assertRaises(FileNotFoundError):
446 # Ensure the file does not look like it is in
447 # datastore for auto mode
448 datastore.ingest(FileDataset(path="../this-file-does-not-exist.yaml", refs=ref),
449 transfer=mode)
450 self.assertFalse(datastore.exists(ref))
452 def failOutputExists(obj, path, ref):
453 """Can't ingest files if transfer destination already
454 exists."""
455 with self.assertRaises(FileExistsError):
456 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
457 self.assertFalse(datastore.exists(ref))
459 def failNotImplemented(obj, path, ref):
460 with self.assertRaises(NotImplementedError):
461 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
463 if mode in self.ingestTransferModes:
464 self.runIngestTest(failInputDoesNotExist)
465 self.runIngestTest(succeed, expectOutput=(mode != "move"))
466 self.runIngestTest(failOutputExists)
467 else:
468 self.runIngestTest(failNotImplemented)
470 def testIngestSymlinkOfSymlink(self):
471 """Special test for symlink to a symlink ingest"""
472 metrics, ref = self._prepareIngestTest()
473 # The aim of this test is to create a dataset on disk, then
474 # create a symlink to it and finally ingest the symlink such that
475 # the symlink in the datastore points to the original dataset.
476 for mode in ("symlink", "relsymlink"):
477 if mode not in self.ingestTransferModes:
478 continue
480 print(f"Trying mode {mode}")
481 with lsst.utils.tests.getTempFilePath(".yaml") as realpath:
482 with open(realpath, 'w') as fd:
483 yaml.dump(metrics._asdict(), stream=fd)
484 with lsst.utils.tests.getTempFilePath(".yaml") as sympath:
485 os.symlink(os.path.abspath(realpath), sympath)
487 datastore = self.makeDatastore()
488 datastore.ingest(FileDataset(path=os.path.abspath(sympath), refs=ref), transfer=mode)
490 uri = ButlerURI(datastore.getUri(ref))
491 self.assertTrue(not uri.scheme or uri.scheme == "file", f"Check {uri.scheme}")
492 self.assertTrue(os.path.islink(uri.path))
494 linkTarget = os.readlink(uri.path)
495 if mode == "relsymlink":
496 self.assertFalse(os.path.isabs(linkTarget))
497 else:
498 self.assertEqual(linkTarget, os.path.abspath(realpath))
500 # Check that we can get the dataset back regardless of mode
501 metric2 = datastore.get(ref)
502 self.assertEqual(metric2, metrics)
504 # Cleanup the file for next time round loop
505 # since it will get the same file name in store
506 datastore.remove(ref)
509class PosixDatastoreTestCase(DatastoreTests, unittest.TestCase):
510 """PosixDatastore specialization"""
511 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
512 uriScheme = "file:"
513 canIngestNoTransferAuto = True
514 ingestTransferModes = (None, "copy", "move", "link", "hardlink", "symlink", "relsymlink", "auto")
515 isEphemeral = False
516 rootKeys = ("root",)
517 validationCanFail = True
519 def setUp(self):
520 # Override the working directory before calling the base class
521 self.root = tempfile.mkdtemp(dir=TESTDIR)
522 super().setUp()
525class PosixDatastoreNoChecksumsTestCase(PosixDatastoreTestCase):
526 """Posix datastore tests but with checksums disabled."""
527 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreNoChecksums.yaml")
529 def testChecksum(self):
530 """Ensure that checksums have not been calculated."""
532 datastore = self.makeDatastore()
533 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
534 dimensions = self.universe.extract(("visit", "physical_filter"))
535 metrics = makeExampleMetrics()
537 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
538 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
539 conform=False)
541 # Configuration should have disabled checksum calculation
542 datastore.put(metrics, ref)
543 info = datastore.getStoredItemInfo(ref)
544 self.assertIsNone(info.checksum)
546 # Remove put back but with checksums enabled explicitly
547 datastore.remove(ref)
548 datastore.useChecksum = True
549 datastore.put(metrics, ref)
551 info = datastore.getStoredItemInfo(ref)
552 self.assertIsNotNone(info.checksum)
555class CleanupPosixDatastoreTestCase(DatastoreTestsBase, unittest.TestCase):
556 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
558 def setUp(self):
559 # Override the working directory before calling the base class
560 self.root = tempfile.mkdtemp(dir=TESTDIR)
561 super().setUp()
563 def testCleanup(self):
564 """Test that a failed formatter write does cleanup a partial file."""
565 metrics = makeExampleMetrics()
566 datastore = self.makeDatastore()
568 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
570 dimensions = self.universe.extract(("visit", "physical_filter"))
571 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
573 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
575 # Determine where the file will end up (we assume Formatters use
576 # the same file extension)
577 expectedUri = datastore.getUri(ref, predict=True)
578 self.assertTrue(expectedUri.endswith(".yaml#predicted"),
579 f"Is there a file extension in {expectedUri}")
581 # Convert to ButlerURI so we can extract the path component
582 expectedUri = ButlerURI(expectedUri)
583 expectedFile = expectedUri.path
585 # Try formatter that fails and formatter that fails and leaves
586 # a file behind
587 for formatter in (BadWriteFormatter, BadNoWriteFormatter):
588 with self.subTest(formatter=formatter):
590 # Monkey patch the formatter
591 datastore.formatterFactory.registerFormatter(ref.datasetType, formatter,
592 overwrite=True)
594 # Try to put the dataset, it should fail
595 with self.assertRaises(Exception):
596 datastore.put(metrics, ref)
598 # Check that there is no file on disk
599 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
601 # Check that there is a directory
602 self.assertTrue(os.path.exists(os.path.dirname(expectedFile)),
603 f"Check for existence of directory {os.path.dirname(expectedFile)}")
605 # Force YamlFormatter and check that this time a file is written
606 datastore.formatterFactory.registerFormatter(ref.datasetType, YamlFormatter,
607 overwrite=True)
608 datastore.put(metrics, ref)
609 self.assertTrue(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
610 datastore.remove(ref)
611 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of now removed {expectedFile}")
614class InMemoryDatastoreTestCase(DatastoreTests, unittest.TestCase):
615 """PosixDatastore specialization"""
616 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastore.yaml")
617 uriScheme = "mem:"
618 hasUnsupportedPut = False
619 ingestTransferModes = ()
620 isEphemeral = True
621 rootKeys = None
622 validationCanFail = False
625class ChainedDatastoreTestCase(PosixDatastoreTestCase):
626 """ChainedDatastore specialization using a POSIXDatastore"""
627 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore.yaml")
628 hasUnsupportedPut = False
629 canIngestNoTransferAuto = False
630 ingestTransferModes = ("copy", "hardlink", "symlink", "relsymlink", "link", "auto")
631 isEphemeral = False
632 rootKeys = (".datastores.1.root", ".datastores.2.root")
633 validationCanFail = True
636class ChainedDatastoreMemoryTestCase(InMemoryDatastoreTestCase):
637 """ChainedDatastore specialization using all InMemoryDatastore"""
638 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2.yaml")
639 validationCanFail = False
642class DatastoreConstraintsTests(DatastoreTestsBase):
643 """Basic tests of constraints model of Datastores."""
645 def testConstraints(self):
646 """Test constraints model. Assumes that each test class has the
647 same constraints."""
648 metrics = makeExampleMetrics()
649 datastore = self.makeDatastore()
651 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
652 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
653 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
654 dataId = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
656 # Write empty file suitable for ingest check
657 testfile = tempfile.NamedTemporaryFile()
658 for datasetTypeName, sc, accepted in (("metric", sc1, True), ("metric2", sc1, False),
659 ("metric33", sc1, True), ("metric2", sc2, True)):
660 with self.subTest(datasetTypeName=datasetTypeName):
661 ref = self.makeDatasetRef(datasetTypeName, dimensions, sc, dataId, conform=False)
662 if accepted:
663 datastore.put(metrics, ref)
664 self.assertTrue(datastore.exists(ref))
665 datastore.remove(ref)
667 # Try ingest
668 if self.canIngest:
669 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
670 self.assertTrue(datastore.exists(ref))
671 datastore.remove(ref)
672 else:
673 with self.assertRaises(DatasetTypeNotSupportedError):
674 datastore.put(metrics, ref)
675 self.assertFalse(datastore.exists(ref))
677 # Again with ingest
678 if self.canIngest:
679 with self.assertRaises(DatasetTypeNotSupportedError):
680 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
681 self.assertFalse(datastore.exists(ref))
684class PosixDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
685 """PosixDatastore specialization"""
686 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreP.yaml")
687 canIngest = True
689 def setUp(self):
690 # Override the working directory before calling the base class
691 self.root = tempfile.mkdtemp(dir=TESTDIR)
692 super().setUp()
695class InMemoryDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
696 """InMemoryDatastore specialization"""
697 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastoreP.yaml")
698 canIngest = False
701class ChainedDatastoreConstraintsNativeTestCase(PosixDatastoreConstraintsTestCase):
702 """ChainedDatastore specialization using a POSIXDatastore and constraints
703 at the ChainedDatstore """
704 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePa.yaml")
707class ChainedDatastoreConstraintsTestCase(PosixDatastoreConstraintsTestCase):
708 """ChainedDatastore specialization using a POSIXDatastore"""
709 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastoreP.yaml")
712class ChainedDatastoreMemoryConstraintsTestCase(InMemoryDatastoreConstraintsTestCase):
713 """ChainedDatastore specialization using all InMemoryDatastore"""
714 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2P.yaml")
715 canIngest = False
718class ChainedDatastorePerStoreConstraintsTests(DatastoreTestsBase, unittest.TestCase):
719 """Test that a chained datastore can control constraints per-datastore
720 even if child datastore would accept."""
722 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePb.yaml")
724 def setUp(self):
725 # Override the working directory before calling the base class
726 self.root = tempfile.mkdtemp(dir=TESTDIR)
727 super().setUp()
729 def testConstraints(self):
730 """Test chained datastore constraints model."""
731 metrics = makeExampleMetrics()
732 datastore = self.makeDatastore()
734 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
735 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
736 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
737 dataId1 = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
738 dataId2 = {"visit": 52, "physical_filter": "V", "instrument": "HSC"}
740 # Write empty file suitable for ingest check
741 testfile = tempfile.NamedTemporaryFile()
743 for typeName, dataId, sc, accept, ingest in (("metric", dataId1, sc1, (False, True, False), True),
744 ("metric2", dataId1, sc1, (False, False, False), False),
745 ("metric2", dataId2, sc1, (True, False, False), False),
746 ("metric33", dataId2, sc2, (True, True, False), True),
747 ("metric2", dataId1, sc2, (False, True, False), True)):
748 with self.subTest(datasetTypeName=typeName, dataId=dataId, sc=sc.name):
749 ref = self.makeDatasetRef(typeName, dimensions, sc, dataId,
750 conform=False)
751 if any(accept):
752 datastore.put(metrics, ref)
753 self.assertTrue(datastore.exists(ref))
755 # Check each datastore inside the chained datastore
756 for childDatastore, expected in zip(datastore.datastores, accept):
757 self.assertEqual(childDatastore.exists(ref), expected,
758 f"Testing presence of {ref} in datastore {childDatastore.name}")
760 datastore.remove(ref)
762 # Check that ingest works
763 if ingest:
764 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
765 self.assertTrue(datastore.exists(ref))
767 # Check each datastore inside the chained datastore
768 for childDatastore, expected in zip(datastore.datastores, accept):
769 # Ephemeral datastores means InMemory at the moment
770 # and that does not accept ingest of files.
771 if childDatastore.isEphemeral:
772 expected = False
773 self.assertEqual(childDatastore.exists(ref), expected,
774 f"Testing presence of ingested {ref} in datastore"
775 f" {childDatastore.name}")
777 datastore.remove(ref)
778 else:
779 with self.assertRaises(DatasetTypeNotSupportedError):
780 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
782 else:
783 with self.assertRaises(DatasetTypeNotSupportedError):
784 datastore.put(metrics, ref)
785 self.assertFalse(datastore.exists(ref))
787 # Again with ingest
788 with self.assertRaises(DatasetTypeNotSupportedError):
789 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="link")
790 self.assertFalse(datastore.exists(ref))
793if __name__ == "__main__": 793 ↛ 794line 793 didn't jump to line 794, because the condition on line 793 was never true
794 unittest.main()