Coverage for python/lsst/daf/butler/tests/_datasetsHelper.py: 33%
61 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-05 02:53 -0700
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-05 02:53 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28from __future__ import annotations
30__all__ = (
31 "DatasetTestHelper",
32 "DatastoreTestHelper",
33 "BadWriteFormatter",
34 "BadNoWriteFormatter",
35 "MultiDetectorFormatter",
36)
38import os
39from collections.abc import Iterable, Mapping
40from typing import TYPE_CHECKING, Any
42from lsst.daf.butler import DataCoordinate, DatasetRef, DatasetType, DimensionGroup, StorageClass
43from lsst.daf.butler.datastore import Datastore
44from lsst.daf.butler.formatters.yaml import YamlFormatter
46if TYPE_CHECKING:
47 from lsst.daf.butler import Config, DatasetId, Dimension, DimensionGraph
50class DatasetTestHelper:
51 """Helper methods for Datasets."""
53 def makeDatasetRef(
54 self,
55 datasetTypeName: str,
56 dimensions: DimensionGroup | DimensionGraph | Iterable[str | Dimension],
57 storageClass: StorageClass | str,
58 dataId: DataCoordinate | Mapping[str, Any],
59 *,
60 id: DatasetId | None = None,
61 run: str | None = None,
62 conform: bool = True,
63 ) -> DatasetRef:
64 """Make a DatasetType and wrap it in a DatasetRef for a test.
66 Parameters
67 ----------
68 datasetTypeName : `str`
69 The name of the dataset type.
70 dimensions : `DimensionGroup` or `~collections.abc.Iterable` of `str` \
71 or `Dimension`
72 The dimensions to use for this dataset type.
73 storageClass : `StorageClass` or `str`
74 The relevant storage class.
75 dataId : `DataCoordinate` or `~collections.abc.Mapping`
76 The data ID of this ref.
77 id : `DatasetId` or `None`, optional
78 The Id of this ref. Will be assigned automatically.
79 run : `str` or `None`, optional
80 The run for this ref. Will be assigned a default value if `None`.
81 conform : `bool`, optional
82 Whther to force the dataID to be checked for conformity with
83 the provided dimensions.
85 Returns
86 -------
87 ref : `DatasetRef`
88 The new ref.
89 """
90 return self._makeDatasetRef(
91 datasetTypeName,
92 dimensions,
93 storageClass,
94 dataId,
95 id=id,
96 run=run,
97 conform=conform,
98 )
100 def _makeDatasetRef(
101 self,
102 datasetTypeName: str,
103 dimensions: DimensionGroup | DimensionGraph | Iterable[str | Dimension],
104 storageClass: StorageClass | str,
105 dataId: DataCoordinate | Mapping,
106 *,
107 id: DatasetId | None = None,
108 run: str | None = None,
109 conform: bool = True,
110 ) -> DatasetRef:
111 # helper for makeDatasetRef
113 # Pretend we have a parent if this looks like a composite
114 compositeName, componentName = DatasetType.splitDatasetTypeName(datasetTypeName)
115 parentStorageClass = StorageClass("component") if componentName else None
117 datasetType = DatasetType(
118 datasetTypeName, dimensions, storageClass, parentStorageClass=parentStorageClass
119 )
121 if run is None:
122 run = "dummy"
123 if not isinstance(dataId, DataCoordinate):
124 dataId = DataCoordinate.standardize(dataId, dimensions=datasetType.dimensions)
125 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform)
128class DatastoreTestHelper:
129 """Helper methods for Datastore tests."""
131 root: str | None
132 config: Config
133 datastoreType: type[Datastore]
134 configFile: str
136 def setUpDatastoreTests(self, registryClass: type, configClass: type[Config]) -> None:
137 """Shared setUp code for all Datastore tests.
139 Parameters
140 ----------
141 registryClass : `type`
142 Type of registry to use.
143 configClass : `type`
144 Type of config to use.
145 """
146 self.registry = registryClass()
147 self.config = configClass(self.configFile)
149 # Some subclasses override the working root directory
150 if self.root is not None:
151 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy())
153 def makeDatastore(self, sub: str | None = None) -> Datastore:
154 """Make a new Datastore instance of the appropriate type.
156 Parameters
157 ----------
158 sub : `str`, optional
159 If not None, the returned Datastore will be distinct from any
160 Datastore constructed with a different value of ``sub``. For
161 PosixDatastore, for example, the converse is also true, and ``sub``
162 is used as a subdirectory to form the new root.
164 Returns
165 -------
166 datastore : `Datastore`
167 Datastore constructed by this routine using the supplied
168 optional subdirectory if supported.
169 """
170 config = self.config.copy()
171 if sub is not None and self.root is not None:
172 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config)
173 if sub is not None:
174 # Ensure that each datastore gets its own registry
175 registryClass = type(self.registry)
176 registry = registryClass()
177 else:
178 registry = self.registry
179 return Datastore.fromConfig(config=config, bridgeManager=registry.getDatastoreBridgeManager())
182class BadWriteFormatter(YamlFormatter):
183 """A formatter that never works but does leave a file behind."""
185 def _readFile(self, path: str, pytype: type[Any] | None = None) -> Any:
186 raise NotImplementedError("This formatter can not read anything")
188 def _writeFile(self, inMemoryDataset: Any) -> None:
189 """Write an empty file and then raise an exception."""
190 with open(self.fileDescriptor.location.path, "wb"):
191 pass
192 raise RuntimeError("Did not succeed in writing file")
195class BadNoWriteFormatter(BadWriteFormatter):
196 """A formatter that always fails without writing anything."""
198 def _writeFile(self, inMemoryDataset: Any) -> None:
199 raise RuntimeError("Did not writing anything at all")
202class MultiDetectorFormatter(YamlFormatter):
203 """A formatter that requires a detector to be specified in the dataID."""
205 def _writeFile(self, inMemoryDataset: Any) -> None:
206 raise NotImplementedError("Can not write")
208 def _fromBytes(self, serializedDataset: bytes, pytype: type[Any] | None = None) -> Any:
209 data = super()._fromBytes(serializedDataset)
210 if self.dataId is None:
211 raise RuntimeError("This formatter requires a dataId")
212 if "detector" not in self.dataId:
213 raise RuntimeError("This formatter requires detector to be present in dataId")
214 key = f"detector{self.dataId['detector']}"
215 assert pytype is not None
216 if key in data:
217 return pytype(data[key])
218 raise RuntimeError(f"Could not find '{key}' in data file")