Coverage for python/lsst/daf/butler/tests/_datasetsHelper.py: 32%
60 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-08-05 01:26 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-08-05 01:26 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = (
25 "DatasetTestHelper",
26 "DatastoreTestHelper",
27 "BadWriteFormatter",
28 "BadNoWriteFormatter",
29 "MultiDetectorFormatter",
30)
32import os
33from collections.abc import Iterable, Mapping
34from typing import TYPE_CHECKING, Any
36from lsst.daf.butler import DataCoordinate, DatasetRef, DatasetType, StorageClass
37from lsst.daf.butler.formatters.yaml import YamlFormatter
39if TYPE_CHECKING:
40 from lsst.daf.butler import Config, DatasetId, Datastore, Dimension, DimensionGraph
41 from lsst.daf.butler.registry import _ButlerRegistry
44class DatasetTestHelper:
45 """Helper methods for Datasets."""
47 def makeDatasetRef(
48 self,
49 datasetTypeName: str,
50 dimensions: DimensionGraph | Iterable[str | Dimension],
51 storageClass: StorageClass | str,
52 dataId: DataCoordinate | Mapping[str, Any],
53 *,
54 id: DatasetId | None = None,
55 run: str | None = None,
56 conform: bool = True,
57 ) -> DatasetRef:
58 """Make a DatasetType and wrap it in a DatasetRef for a test."""
59 return self._makeDatasetRef(
60 datasetTypeName,
61 dimensions,
62 storageClass,
63 dataId,
64 id=id,
65 run=run,
66 conform=conform,
67 )
69 def _makeDatasetRef(
70 self,
71 datasetTypeName: str,
72 dimensions: DimensionGraph | Iterable[str | Dimension],
73 storageClass: StorageClass | str,
74 dataId: DataCoordinate | Mapping,
75 *,
76 id: DatasetId | None = None,
77 run: str | None = None,
78 conform: bool = True,
79 ) -> DatasetRef:
80 # helper for makeDatasetRef
82 # Pretend we have a parent if this looks like a composite
83 compositeName, componentName = DatasetType.splitDatasetTypeName(datasetTypeName)
84 parentStorageClass = StorageClass("component") if componentName else None
86 datasetType = DatasetType(
87 datasetTypeName, dimensions, storageClass, parentStorageClass=parentStorageClass
88 )
90 if run is None:
91 run = "dummy"
92 if not isinstance(dataId, DataCoordinate):
93 dataId = DataCoordinate.standardize(dataId, graph=datasetType.dimensions)
94 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform)
97class DatastoreTestHelper:
98 """Helper methods for Datastore tests."""
100 root: str | None
101 config: Config
102 datastoreType: type[Datastore]
103 configFile: str
105 def setUpDatastoreTests(self, registryClass: type[_ButlerRegistry], configClass: type[Config]) -> None:
106 """Shared setUp code for all Datastore tests."""
107 self.registry = registryClass()
108 self.config = configClass(self.configFile)
110 # Some subclasses override the working root directory
111 if self.root is not None:
112 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy())
114 def makeDatastore(self, sub: str | None = None) -> Datastore:
115 """Make a new Datastore instance of the appropriate type.
117 Parameters
118 ----------
119 sub : str, optional
120 If not None, the returned Datastore will be distinct from any
121 Datastore constructed with a different value of ``sub``. For
122 PosixDatastore, for example, the converse is also true, and ``sub``
123 is used as a subdirectory to form the new root.
125 Returns
126 -------
127 datastore : `Datastore`
128 Datastore constructed by this routine using the supplied
129 optional subdirectory if supported.
130 """
131 config = self.config.copy()
132 if sub is not None and self.root is not None:
133 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config)
134 if sub is not None:
135 # Ensure that each datastore gets its own registry
136 registryClass = type(self.registry)
137 registry = registryClass()
138 else:
139 registry = self.registry
140 return self.datastoreType(config=config, bridgeManager=registry.getDatastoreBridgeManager())
143class BadWriteFormatter(YamlFormatter):
144 """A formatter that never works but does leave a file behind."""
146 def _readFile(self, path: str, pytype: type[Any] | None = None) -> Any:
147 raise NotImplementedError("This formatter can not read anything")
149 def _writeFile(self, inMemoryDataset: Any) -> None:
150 """Write an empty file and then raise an exception."""
151 with open(self.fileDescriptor.location.path, "wb"):
152 pass
153 raise RuntimeError("Did not succeed in writing file")
156class BadNoWriteFormatter(BadWriteFormatter):
157 """A formatter that always fails without writing anything."""
159 def _writeFile(self, inMemoryDataset: Any) -> None:
160 raise RuntimeError("Did not writing anything at all")
163class MultiDetectorFormatter(YamlFormatter):
164 """A formatter that requires a detector to be specified in the dataID."""
166 def _writeFile(self, inMemoryDataset: Any) -> None:
167 raise NotImplementedError("Can not write")
169 def _fromBytes(self, serializedDataset: bytes, pytype: type[Any] | None = None) -> Any:
170 data = super()._fromBytes(serializedDataset)
171 if self.dataId is None:
172 raise RuntimeError("This formatter requires a dataId")
173 if "detector" not in self.dataId:
174 raise RuntimeError("This formatter requires detector to be present in dataId")
175 key = f"detector{self.dataId['detector']}"
176 assert pytype is not None
177 if key in data:
178 return pytype(data[key])
179 raise RuntimeError(f"Could not find '{key}' in data file")