Coverage for python/lsst/daf/butler/tests/_datasetsHelper.py: 25%
56 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-12-01 19:55 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2022-12-01 19:55 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22__all__ = ("DatasetTestHelper", "DatastoreTestHelper",
23 "BadWriteFormatter", "BadNoWriteFormatter", "MultiDetectorFormatter")
25import os
26from lsst.daf.butler import DatasetType, DatasetRef, StorageClass
27from lsst.daf.butler.formatters.yaml import YamlFormatter
30class DatasetTestHelper:
31 """Helper methods for Datasets"""
33 def makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None,
34 conform=True):
35 """Make a DatasetType and wrap it in a DatasetRef for a test"""
36 return self._makeDatasetRef(datasetTypeName, dimensions, storageClass, dataId, id=id, run=run,
37 conform=conform)
39 def _makeDatasetRef(self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None,
40 conform=True):
41 # helper for makeDatasetRef
43 # Pretend we have a parent if this looks like a composite
44 compositeName, componentName = DatasetType.splitDatasetTypeName(datasetTypeName)
45 parentStorageClass = StorageClass("component") if componentName else None
47 datasetType = DatasetType(datasetTypeName, dimensions, storageClass,
48 parentStorageClass=parentStorageClass)
49 if id is None:
50 self.id += 1
51 id = self.id
52 if run is None:
53 run = "dummy"
54 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform)
57class DatastoreTestHelper:
58 """Helper methods for Datastore tests"""
60 def setUpDatastoreTests(self, registryClass, configClass):
61 """Shared setUp code for all Datastore tests"""
62 self.registry = registryClass()
64 # Need to keep ID for each datasetRef since we have no butler
65 # for these tests
66 self.id = 1
68 self.config = configClass(self.configFile)
70 # Some subclasses override the working root directory
71 if self.root is not None:
72 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy())
74 def makeDatastore(self, sub=None):
75 """Make a new Datastore instance of the appropriate type.
77 Parameters
78 ----------
79 sub : str, optional
80 If not None, the returned Datastore will be distinct from any
81 Datastore constructed with a different value of ``sub``. For
82 PosixDatastore, for example, the converse is also true, and ``sub``
83 is used as a subdirectory to form the new root.
85 Returns
86 -------
87 datastore : `Datastore`
88 Datastore constructed by this routine using the supplied
89 optional subdirectory if supported.
90 """
91 config = self.config.copy()
92 if sub is not None and self.root is not None:
93 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config)
94 if sub is not None:
95 # Ensure that each datastore gets its own registry
96 registryClass = type(self.registry)
97 registry = registryClass()
98 else:
99 registry = self.registry
100 return self.datastoreType(config=config, bridgeManager=registry.getDatastoreBridgeManager())
103class BadWriteFormatter(YamlFormatter):
104 """A formatter that never works but does leave a file behind."""
106 def _readFile(self, path, pytype=None):
107 raise NotImplementedError("This formatter can not read anything")
109 def _writeFile(self, inMemoryDataset):
110 """Write an empty file and then raise an exception."""
111 with open(self.fileDescriptor.location.path, "wb"):
112 pass
113 raise RuntimeError("Did not succeed in writing file")
116class BadNoWriteFormatter(BadWriteFormatter):
117 """A formatter that always fails without writing anything."""
119 def _writeFile(self, inMemoryDataset):
120 raise RuntimeError("Did not writing anything at all")
123class MultiDetectorFormatter(YamlFormatter):
125 def _writeFile(self, inMemoryDataset):
126 raise NotImplementedError("Can not write")
128 def _fromBytes(self, serializedDataset, pytype=None):
129 data = super()._fromBytes(serializedDataset)
130 if self.dataId is None:
131 raise RuntimeError("This formatter requires a dataId")
132 if "detector" not in self.dataId:
133 raise RuntimeError("This formatter requires detector to be present in dataId")
134 key = f"detector{self.dataId['detector']}"
135 if key in data:
136 return pytype(data[key])
137 raise RuntimeError(f"Could not find '{key}' in data file")