Coverage for tests/test_datastore.py : 19%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import os
23import unittest
24import shutil
25import yaml
26import tempfile
27import lsst.utils
29from lsst.utils import doImport
31from lsst.daf.butler import StorageClassFactory, StorageClass, DimensionUniverse, FileDataset
32from lsst.daf.butler import DatastoreConfig, DatasetTypeNotSupportedError, DatastoreValidationError
33from lsst.daf.butler import ButlerURI
34from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter
36from lsst.daf.butler.tests import (DatasetTestHelper, DatastoreTestHelper, BadWriteFormatter,
37 BadNoWriteFormatter, MetricsExample, DummyRegistry)
40TESTDIR = os.path.dirname(__file__)
43def makeExampleMetrics():
44 return MetricsExample({"AM1": 5.2, "AM2": 30.6},
45 {"a": [1, 2, 3],
46 "b": {"blue": 5, "red": "green"}},
47 [563, 234, 456.7]
48 )
51class TransactionTestError(Exception):
52 """Specific error for transactions, to prevent misdiagnosing
53 that might otherwise occur when a standard exception is used.
54 """
55 pass
58class DatastoreTestsBase(DatasetTestHelper, DatastoreTestHelper):
59 """Support routines for datastore testing"""
60 root = None
62 @classmethod
63 def setUpClass(cls):
64 # Storage Classes are fixed for all datastores in these tests
65 scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml")
66 cls.storageClassFactory = StorageClassFactory()
67 cls.storageClassFactory.addFromConfig(scConfigFile)
69 # Read the Datastore config so we can get the class
70 # information (since we should not assume the constructor
71 # name here, but rely on the configuration file itself)
72 datastoreConfig = DatastoreConfig(cls.configFile)
73 cls.datastoreType = doImport(datastoreConfig["cls"])
74 cls.universe = DimensionUniverse()
76 def setUp(self):
77 self.setUpDatastoreTests(DummyRegistry, DatastoreConfig)
79 def tearDown(self):
80 if self.root is not None and os.path.exists(self.root):
81 shutil.rmtree(self.root, ignore_errors=True)
84class DatastoreTests(DatastoreTestsBase):
85 """Some basic tests of a simple datastore."""
87 hasUnsupportedPut = True
89 def testConfigRoot(self):
90 full = DatastoreConfig(self.configFile)
91 config = DatastoreConfig(self.configFile, mergeDefaults=False)
92 newroot = "/random/location"
93 self.datastoreType.setConfigRoot(newroot, config, full)
94 if self.rootKeys:
95 for k in self.rootKeys:
96 self.assertIn(newroot, config[k])
98 def testConstructor(self):
99 datastore = self.makeDatastore()
100 self.assertIsNotNone(datastore)
101 self.assertIs(datastore.isEphemeral, self.isEphemeral)
103 def testConfigurationValidation(self):
104 datastore = self.makeDatastore()
105 sc = self.storageClassFactory.getStorageClass("ThingOne")
106 datastore.validateConfiguration([sc])
108 sc2 = self.storageClassFactory.getStorageClass("ThingTwo")
109 if self.validationCanFail:
110 with self.assertRaises(DatastoreValidationError):
111 datastore.validateConfiguration([sc2], logFailures=True)
113 dimensions = self.universe.extract(("visit", "physical_filter"))
114 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
115 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
116 datastore.validateConfiguration([ref])
118 def testParameterValidation(self):
119 """Check that parameters are validated"""
120 sc = self.storageClassFactory.getStorageClass("ThingOne")
121 dimensions = self.universe.extract(("visit", "physical_filter"))
122 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
123 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
124 datastore = self.makeDatastore()
125 data = {1: 2, 3: 4}
126 datastore.put(data, ref)
127 newdata = datastore.get(ref)
128 self.assertEqual(data, newdata)
129 with self.assertRaises(KeyError):
130 newdata = datastore.get(ref, parameters={"missing": 5})
132 def testBasicPutGet(self):
133 metrics = makeExampleMetrics()
134 datastore = self.makeDatastore()
136 # Create multiple storage classes for testing different formulations
137 storageClasses = [self.storageClassFactory.getStorageClass(sc)
138 for sc in ("StructuredData",
139 "StructuredDataJson",
140 "StructuredDataPickle")]
142 dimensions = self.universe.extract(("visit", "physical_filter"))
143 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
145 for sc in storageClasses:
146 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
147 print("Using storageClass: {}".format(sc.name))
148 datastore.put(metrics, ref)
150 # Does it exist?
151 self.assertTrue(datastore.exists(ref))
153 # Get
154 metricsOut = datastore.get(ref, parameters=None)
155 self.assertEqual(metrics, metricsOut)
157 uri = datastore.getUri(ref)
158 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
160 # Get a component -- we need to construct new refs for them
161 # with derived storage classes but with parent ID
162 comp = "output"
163 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(comp), dimensions,
164 sc.components[comp], dataId, id=ref.id)
165 output = datastore.get(compRef)
166 self.assertEqual(output, metricsOut.output)
168 uri = datastore.getUri(compRef)
169 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
171 storageClass = sc
173 # Check that a put fails if the dataset type is not supported
174 if self.hasUnsupportedPut:
175 sc = StorageClass("UnsupportedSC", pytype=type(metrics))
176 ref = self.makeDatasetRef("unsupportedType", dimensions, sc, dataId)
177 with self.assertRaises(DatasetTypeNotSupportedError):
178 datastore.put(metrics, ref)
180 # These should raise
181 ref = self.makeDatasetRef("metrics", dimensions, storageClass, dataId, id=10000)
182 with self.assertRaises(FileNotFoundError):
183 # non-existing file
184 datastore.get(ref)
186 # Get a URI from it
187 uri = datastore.getUri(ref, predict=True)
188 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
190 with self.assertRaises(FileNotFoundError):
191 datastore.getUri(ref)
193 def testCompositePutGet(self):
194 metrics = makeExampleMetrics()
195 datastore = self.makeDatastore()
197 # Create multiple storage classes for testing different formulations
198 # of composites
199 storageClasses = [self.storageClassFactory.getStorageClass(sc)
200 for sc in ("StructuredComposite",
201 "StructuredCompositeTestA",
202 "StructuredCompositeTestB")]
204 dimensions = self.universe.extract(("visit", "physical_filter"))
205 dataId = {"instrument": "dummy", "visit": 428, "physical_filter": "R"}
207 for sc in storageClasses:
208 print("Using storageClass: {}".format(sc.name))
209 ref = self.makeDatasetRef("metric", dimensions, sc, dataId,
210 conform=False)
212 components = sc.assembler().disassemble(metrics)
213 self.assertTrue(components)
215 compsRead = {}
216 for compName, compInfo in components.items():
217 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(compName), dimensions,
218 components[compName].storageClass, dataId,
219 conform=False)
221 print("Writing component {} with {}".format(compName, compRef.datasetType.storageClass.name))
222 datastore.put(compInfo.component, compRef)
224 uri = datastore.getUri(compRef)
225 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
227 compsRead[compName] = datastore.get(compRef)
229 # We can generate identical files for each storage class
230 # so remove the component here
231 datastore.remove(compRef)
233 # combine all the components we read back into a new composite
234 metricsOut = sc.assembler().assemble(compsRead)
235 self.assertEqual(metrics, metricsOut)
237 def testRemove(self):
238 metrics = makeExampleMetrics()
239 datastore = self.makeDatastore()
240 # Put
241 dimensions = self.universe.extract(("visit", "physical_filter"))
242 dataId = {"instrument": "dummy", "visit": 638, "physical_filter": "U"}
244 sc = self.storageClassFactory.getStorageClass("StructuredData")
245 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
246 datastore.put(metrics, ref)
248 # Does it exist?
249 self.assertTrue(datastore.exists(ref))
251 # Get
252 metricsOut = datastore.get(ref)
253 self.assertEqual(metrics, metricsOut)
254 # Remove
255 datastore.remove(ref)
257 # Does it exist?
258 self.assertFalse(datastore.exists(ref))
260 # Do we now get a predicted URI?
261 uri = datastore.getUri(ref, predict=True)
262 self.assertTrue(uri.endswith("#predicted"))
264 # Get should now fail
265 with self.assertRaises(FileNotFoundError):
266 datastore.get(ref)
267 # Can only delete once
268 with self.assertRaises(FileNotFoundError):
269 datastore.remove(ref)
271 def testTransfer(self):
272 metrics = makeExampleMetrics()
274 dimensions = self.universe.extract(("visit", "physical_filter"))
275 dataId = {"instrument": "dummy", "visit": 2048, "physical_filter": "Uprime"}
277 sc = self.storageClassFactory.getStorageClass("StructuredData")
278 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False)
280 inputDatastore = self.makeDatastore("test_input_datastore")
281 outputDatastore = self.makeDatastore("test_output_datastore")
283 inputDatastore.put(metrics, ref)
284 outputDatastore.transfer(inputDatastore, ref)
286 metricsOut = outputDatastore.get(ref)
287 self.assertEqual(metrics, metricsOut)
289 def testBasicTransaction(self):
290 datastore = self.makeDatastore()
291 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
292 dimensions = self.universe.extract(("visit", "physical_filter"))
293 nDatasets = 6
294 dataIds = [{"instrument": "dummy", "visit": i, "physical_filter": "V"} for i in range(nDatasets)]
295 data = [(self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False),
296 makeExampleMetrics(),)
297 for dataId in dataIds]
298 succeed = data[:nDatasets//2]
299 fail = data[nDatasets//2:]
300 # All datasets added in this transaction should continue to exist
301 with datastore.transaction():
302 for ref, metrics in succeed:
303 datastore.put(metrics, ref)
304 # Whereas datasets added in this transaction should not
305 with self.assertRaises(TransactionTestError):
306 with datastore.transaction():
307 for ref, metrics in fail:
308 datastore.put(metrics, ref)
309 raise TransactionTestError("This should propagate out of the context manager")
310 # Check for datasets that should exist
311 for ref, metrics in succeed:
312 # Does it exist?
313 self.assertTrue(datastore.exists(ref))
314 # Get
315 metricsOut = datastore.get(ref, parameters=None)
316 self.assertEqual(metrics, metricsOut)
317 # URI
318 uri = datastore.getUri(ref)
319 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme)
320 # Check for datasets that should not exist
321 for ref, _ in fail:
322 # These should raise
323 with self.assertRaises(FileNotFoundError):
324 # non-existing file
325 datastore.get(ref)
326 with self.assertRaises(FileNotFoundError):
327 datastore.getUri(ref)
329 def testNestedTransaction(self):
330 datastore = self.makeDatastore()
331 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
332 dimensions = self.universe.extract(("visit", "physical_filter"))
333 metrics = makeExampleMetrics()
335 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
336 refBefore = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
337 conform=False)
338 datastore.put(metrics, refBefore)
339 with self.assertRaises(TransactionTestError):
340 with datastore.transaction():
341 dataId = {"instrument": "dummy", "visit": 1, "physical_filter": "V"}
342 refOuter = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
343 conform=False)
344 datastore.put(metrics, refOuter)
345 with datastore.transaction():
346 dataId = {"instrument": "dummy", "visit": 2, "physical_filter": "V"}
347 refInner = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
348 conform=False)
349 datastore.put(metrics, refInner)
350 # All datasets should exist
351 for ref in (refBefore, refOuter, refInner):
352 metricsOut = datastore.get(ref, parameters=None)
353 self.assertEqual(metrics, metricsOut)
354 raise TransactionTestError("This should roll back the transaction")
355 # Dataset(s) inserted before the transaction should still exist
356 metricsOut = datastore.get(refBefore, parameters=None)
357 self.assertEqual(metrics, metricsOut)
358 # But all datasets inserted during the (rolled back) transaction
359 # should be gone
360 with self.assertRaises(FileNotFoundError):
361 datastore.get(refOuter)
362 with self.assertRaises(FileNotFoundError):
363 datastore.get(refInner)
365 def runIngestTest(self, func, expectOutput=True):
366 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
367 dimensions = self.universe.extract(("visit", "physical_filter"))
368 metrics = makeExampleMetrics()
369 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
370 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
371 with lsst.utils.tests.getTempFilePath(".yaml", expectOutput=expectOutput) as path:
372 with open(path, 'w') as fd:
373 yaml.dump(metrics._asdict(), stream=fd)
374 func(metrics, path, ref)
376 def testIngestNoTransfer(self):
377 """Test ingesting existing files with no transfer.
378 """
379 datastore = self.makeDatastore()
381 def succeed(obj, path, ref):
382 """Ingest a file already in the datastore root."""
383 # first move it into the root, and adjust the path accordingly
384 path = shutil.copy(path, datastore.root)
385 path = os.path.relpath(path, start=datastore.root)
386 datastore.ingest(FileDataset(path=path, refs=ref), transfer=None)
387 self.assertEqual(obj, datastore.get(ref))
389 def failInputDoesNotExist(obj, path, ref):
390 """Can't ingest files if we're given a bad path."""
391 with self.assertRaises(FileNotFoundError):
392 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref), transfer=None)
393 self.assertFalse(datastore.exists(ref))
395 def failOutsideRoot(obj, path, ref):
396 """Can't ingest files outside of datastore root."""
397 with self.assertRaises(RuntimeError):
398 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=None)
399 self.assertFalse(datastore.exists(ref))
401 def failNotImplemented(obj, path, ref):
402 with self.assertRaises(NotImplementedError):
403 datastore.ingest(FileDataset(path=path, refs=ref), transfer=None)
405 if None in self.ingestTransferModes:
406 self.runIngestTest(failOutsideRoot)
407 self.runIngestTest(failInputDoesNotExist)
408 self.runIngestTest(succeed)
409 else:
410 self.runIngestTest(failNotImplemented)
412 def testIngestTransfer(self):
413 """Test ingesting existing files after transferring them.
414 """
415 for mode in ("copy", "move", "hardlink", "symlink"):
416 with self.subTest(mode=mode):
417 datastore = self.makeDatastore(mode)
419 def succeed(obj, path, ref):
420 """Ingest a file by transferring it to the template
421 location."""
422 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
423 self.assertEqual(obj, datastore.get(ref))
425 def failInputDoesNotExist(obj, path, ref):
426 """Can't ingest files if we're given a bad path."""
427 with self.assertRaises(FileNotFoundError):
428 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref),
429 transfer=mode)
430 self.assertFalse(datastore.exists(ref))
432 def failOutputExists(obj, path, ref):
433 """Can't ingest files if transfer destination already
434 exists."""
435 with self.assertRaises(FileExistsError):
436 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
437 self.assertFalse(datastore.exists(ref))
439 def failNotImplemented(obj, path, ref):
440 with self.assertRaises(NotImplementedError):
441 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode)
443 if mode in self.ingestTransferModes:
444 self.runIngestTest(failInputDoesNotExist)
445 self.runIngestTest(succeed, expectOutput=(mode != "move"))
446 self.runIngestTest(failOutputExists)
447 else:
448 self.runIngestTest(failNotImplemented)
451class PosixDatastoreTestCase(DatastoreTests, unittest.TestCase):
452 """PosixDatastore specialization"""
453 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
454 uriScheme = "file:"
455 ingestTransferModes = (None, "copy", "move", "hardlink", "symlink")
456 isEphemeral = False
457 rootKeys = ("root",)
458 validationCanFail = True
460 def setUp(self):
461 # Override the working directory before calling the base class
462 self.root = tempfile.mkdtemp(dir=TESTDIR)
463 super().setUp()
466class PosixDatastoreNoChecksumsTestCase(PosixDatastoreTestCase):
467 """Posix datastore tests but with checksums disabled."""
468 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreNoChecksums.yaml")
470 def testChecksum(self):
471 """Ensure that checksums have not been calculated."""
473 datastore = self.makeDatastore()
474 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
475 dimensions = self.universe.extract(("visit", "physical_filter"))
476 metrics = makeExampleMetrics()
478 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"}
479 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId,
480 conform=False)
482 # Configuration should have disabled checksum calculation
483 datastore.put(metrics, ref)
484 info = datastore.getStoredItemInfo(ref)
485 self.assertIsNone(info.checksum)
487 # Remove put back but with checksums enabled explicitly
488 datastore.remove(ref)
489 datastore.useChecksum = True
490 datastore.put(metrics, ref)
492 info = datastore.getStoredItemInfo(ref)
493 self.assertIsNotNone(info.checksum)
496class CleanupPosixDatastoreTestCase(DatastoreTestsBase, unittest.TestCase):
497 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml")
499 def setUp(self):
500 # Override the working directory before calling the base class
501 self.root = tempfile.mkdtemp(dir=TESTDIR)
502 super().setUp()
504 def testCleanup(self):
505 """Test that a failed formatter write does cleanup a partial file."""
506 metrics = makeExampleMetrics()
507 datastore = self.makeDatastore()
509 storageClass = self.storageClassFactory.getStorageClass("StructuredData")
511 dimensions = self.universe.extract(("visit", "physical_filter"))
512 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"}
514 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False)
516 # Determine where the file will end up (we assume Formatters use
517 # the same file extension)
518 expectedUri = datastore.getUri(ref, predict=True)
519 self.assertTrue(expectedUri.endswith(".yaml#predicted"),
520 f"Is there a file extension in {expectedUri}")
522 # Convert to ButlerURI so we can extract the path component
523 expectedUri = ButlerURI(expectedUri)
524 expectedFile = expectedUri.path
526 # Try formatter that fails and formatter that fails and leaves
527 # a file behind
528 for formatter in (BadWriteFormatter, BadNoWriteFormatter):
529 with self.subTest(formatter=formatter):
531 # Monkey patch the formatter
532 datastore.formatterFactory.registerFormatter(ref.datasetType, formatter,
533 overwrite=True)
535 # Try to put the dataset, it should fail
536 with self.assertRaises(Exception):
537 datastore.put(metrics, ref)
539 # Check that there is no file on disk
540 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
542 # Check that there is a directory
543 self.assertTrue(os.path.exists(os.path.dirname(expectedFile)),
544 f"Check for existence of directory {os.path.dirname(expectedFile)}")
546 # Force YamlFormatter and check that this time a file is written
547 datastore.formatterFactory.registerFormatter(ref.datasetType, YamlFormatter,
548 overwrite=True)
549 datastore.put(metrics, ref)
550 self.assertTrue(os.path.exists(expectedFile), f"Check for existence of {expectedFile}")
551 datastore.remove(ref)
552 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of now removed {expectedFile}")
555class InMemoryDatastoreTestCase(DatastoreTests, unittest.TestCase):
556 """PosixDatastore specialization"""
557 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastore.yaml")
558 uriScheme = "mem:"
559 hasUnsupportedPut = False
560 ingestTransferModes = ()
561 isEphemeral = True
562 rootKeys = None
563 validationCanFail = False
566class ChainedDatastoreTestCase(PosixDatastoreTestCase):
567 """ChainedDatastore specialization using a POSIXDatastore"""
568 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore.yaml")
569 hasUnsupportedPut = False
570 ingestTransferModes = ("copy", "hardlink", "symlink")
571 isEphemeral = False
572 rootKeys = (".datastores.1.root", ".datastores.2.root")
573 validationCanFail = True
576class ChainedDatastoreMemoryTestCase(InMemoryDatastoreTestCase):
577 """ChainedDatastore specialization using all InMemoryDatastore"""
578 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2.yaml")
579 validationCanFail = False
582class DatastoreConstraintsTests(DatastoreTestsBase):
583 """Basic tests of constraints model of Datastores."""
585 def testConstraints(self):
586 """Test constraints model. Assumes that each test class has the
587 same constraints."""
588 metrics = makeExampleMetrics()
589 datastore = self.makeDatastore()
591 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
592 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
593 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
594 dataId = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
596 # Write empty file suitable for ingest check
597 testfile = tempfile.NamedTemporaryFile()
598 for datasetTypeName, sc, accepted in (("metric", sc1, True), ("metric2", sc1, False),
599 ("metric33", sc1, True), ("metric2", sc2, True)):
600 with self.subTest(datasetTypeName=datasetTypeName):
601 ref = self.makeDatasetRef(datasetTypeName, dimensions, sc, dataId, conform=False)
602 if accepted:
603 datastore.put(metrics, ref)
604 self.assertTrue(datastore.exists(ref))
605 datastore.remove(ref)
607 # Try ingest
608 if self.canIngest:
609 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
610 self.assertTrue(datastore.exists(ref))
611 datastore.remove(ref)
612 else:
613 with self.assertRaises(DatasetTypeNotSupportedError):
614 datastore.put(metrics, ref)
615 self.assertFalse(datastore.exists(ref))
617 # Again with ingest
618 if self.canIngest:
619 with self.assertRaises(DatasetTypeNotSupportedError):
620 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
621 self.assertFalse(datastore.exists(ref))
624class PosixDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
625 """PosixDatastore specialization"""
626 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreP.yaml")
627 canIngest = True
629 def setUp(self):
630 # Override the working directory before calling the base class
631 self.root = tempfile.mkdtemp(dir=TESTDIR)
632 super().setUp()
635class InMemoryDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase):
636 """InMemoryDatastore specialization"""
637 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastoreP.yaml")
638 canIngest = False
641class ChainedDatastoreConstraintsNativeTestCase(PosixDatastoreConstraintsTestCase):
642 """ChainedDatastore specialization using a POSIXDatastore and constraints
643 at the ChainedDatstore """
644 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePa.yaml")
647class ChainedDatastoreConstraintsTestCase(PosixDatastoreConstraintsTestCase):
648 """ChainedDatastore specialization using a POSIXDatastore"""
649 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastoreP.yaml")
652class ChainedDatastoreMemoryConstraintsTestCase(InMemoryDatastoreConstraintsTestCase):
653 """ChainedDatastore specialization using all InMemoryDatastore"""
654 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2P.yaml")
655 canIngest = False
658class ChainedDatastorePerStoreConstraintsTests(DatastoreTestsBase, unittest.TestCase):
659 """Test that a chained datastore can control constraints per-datastore
660 even if child datastore would accept."""
662 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePb.yaml")
664 def setUp(self):
665 # Override the working directory before calling the base class
666 self.root = tempfile.mkdtemp(dir=TESTDIR)
667 super().setUp()
669 def testConstraints(self):
670 """Test chained datastore constraints model."""
671 metrics = makeExampleMetrics()
672 datastore = self.makeDatastore()
674 sc1 = self.storageClassFactory.getStorageClass("StructuredData")
675 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson")
676 dimensions = self.universe.extract(("visit", "physical_filter", "instrument"))
677 dataId1 = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"}
678 dataId2 = {"visit": 52, "physical_filter": "V", "instrument": "HSC"}
680 # Write empty file suitable for ingest check
681 testfile = tempfile.NamedTemporaryFile()
683 for typeName, dataId, sc, accept, ingest in (("metric", dataId1, sc1, (False, True, False), True),
684 ("metric2", dataId1, sc1, (False, False, False), False),
685 ("metric2", dataId2, sc1, (True, False, False), False),
686 ("metric33", dataId2, sc2, (True, True, False), True),
687 ("metric2", dataId1, sc2, (False, True, False), True)):
688 with self.subTest(datasetTypeName=typeName, dataId=dataId, sc=sc.name):
689 ref = self.makeDatasetRef(typeName, dimensions, sc, dataId,
690 conform=False)
691 if any(accept):
692 datastore.put(metrics, ref)
693 self.assertTrue(datastore.exists(ref))
695 # Check each datastore inside the chained datastore
696 for childDatastore, expected in zip(datastore.datastores, accept):
697 self.assertEqual(childDatastore.exists(ref), expected,
698 f"Testing presence of {ref} in datastore {childDatastore.name}")
700 datastore.remove(ref)
702 # Check that ingest works
703 if ingest:
704 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
705 self.assertTrue(datastore.exists(ref))
707 # Check each datastore inside the chained datastore
708 for childDatastore, expected in zip(datastore.datastores, accept):
709 # Ephemeral datastores means InMemory at the moment
710 # and that does not accept ingest of files.
711 if childDatastore.isEphemeral:
712 expected = False
713 self.assertEqual(childDatastore.exists(ref), expected,
714 f"Testing presence of ingested {ref} in datastore"
715 f" {childDatastore.name}")
717 datastore.remove(ref)
718 else:
719 with self.assertRaises(DatasetTypeNotSupportedError):
720 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
722 else:
723 with self.assertRaises(DatasetTypeNotSupportedError):
724 datastore.put(metrics, ref)
725 self.assertFalse(datastore.exists(ref))
727 # Again with ingest
728 with self.assertRaises(DatasetTypeNotSupportedError):
729 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink")
730 self.assertFalse(datastore.exists(ref))
733if __name__ == "__main__": 733 ↛ 734line 733 didn't jump to line 734, because the condition on line 733 was never true
734 unittest.main()