Coverage for python/lsst/daf/butler/tests/_examplePythonTypes.py: 38%
105 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-21 09:55 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-21 09:55 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22"""
23Python classes that can be used to test datastores without requiring
24large external dependencies on python classes such as afw or serialization
25formats such as FITS or HDF5.
26"""
28from __future__ import annotations
30__all__ = (
31 "ListDelegate",
32 "MetricsDelegate",
33 "MetricsExample",
34 "registerMetricsExample",
35 "MetricsExampleModel",
36 "MetricsExampleDataclass",
37)
40import copy
41import dataclasses
42import types
43from collections.abc import Mapping
44from typing import TYPE_CHECKING, Any
46from lsst.daf.butler import StorageClass, StorageClassDelegate
47from lsst.daf.butler._compat import _BaseModelCompat
49if TYPE_CHECKING:
50 from lsst.daf.butler import Butler, Datastore, FormatterFactory
53def registerMetricsExample(butler: Butler) -> None:
54 """Modify a repository to support reading and writing
55 `MetricsExample` objects.
57 This method allows `MetricsExample` to be used with test repositories
58 in any package without needing to provide a custom configuration there.
60 Parameters
61 ----------
62 butler : `lsst.daf.butler.Butler`
63 The repository that needs to support `MetricsExample`.
65 Notes
66 -----
67 This method enables the following storage classes:
69 ``StructuredData``
70 A `MetricsExample` whose ``summary``, ``output``, and ``data`` members
71 can be retrieved as dataset components.
72 ``StructuredDataNoComponents``
73 A monolithic write of a `MetricsExample`.
74 """
75 yamlDict = _addFullStorageClass(
76 butler,
77 "StructuredDataDictYaml",
78 "lsst.daf.butler.formatters.yaml.YamlFormatter",
79 pytype=dict,
80 )
82 yamlList = _addFullStorageClass(
83 butler,
84 "StructuredDataListYaml",
85 "lsst.daf.butler.formatters.yaml.YamlFormatter",
86 pytype=list,
87 parameters={"slice"},
88 delegate="lsst.daf.butler.tests.ListDelegate",
89 )
91 _addFullStorageClass(
92 butler,
93 "StructuredDataNoComponents",
94 "lsst.daf.butler.formatters.pickle.PickleFormatter",
95 pytype=MetricsExample,
96 parameters={"slice"},
97 delegate="lsst.daf.butler.tests.MetricsDelegate",
98 )
100 _addFullStorageClass(
101 butler,
102 "StructuredData",
103 "lsst.daf.butler.formatters.yaml.YamlFormatter",
104 pytype=MetricsExample,
105 components={
106 "summary": yamlDict,
107 "output": yamlDict,
108 "data": yamlList,
109 },
110 delegate="lsst.daf.butler.tests.MetricsDelegate",
111 )
114def _addFullStorageClass(
115 butler: Butler, name: str, formatter: str, *args: Any, **kwargs: Any
116) -> StorageClass:
117 """Create a storage class-formatter pair in a repository if it does not
118 already exist.
120 Parameters
121 ----------
122 butler : `lsst.daf.butler.Butler`
123 The repository that needs to contain the class.
124 name : `str`
125 The name to use for the class.
126 formatter : `str`
127 The formatter to use with the storage class. Ignored if ``butler``
128 does not use formatters.
129 *args
130 **kwargs
131 Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass`
132 constructor.
134 Returns
135 -------
136 class : `lsst.daf.butler.StorageClass`
137 The newly created storage class, or the class of the same name
138 previously found in the repository.
139 """
140 storageRegistry = butler._datastore.storageClassFactory
142 storage = StorageClass(name, *args, **kwargs)
143 try:
144 storageRegistry.registerStorageClass(storage)
145 except ValueError:
146 storage = storageRegistry.getStorageClass(name)
148 for registry in _getAllFormatterRegistries(butler._datastore):
149 registry.registerFormatter(storage, formatter)
151 return storage
154def _getAllFormatterRegistries(datastore: Datastore) -> list[FormatterFactory]:
155 """Return all formatter registries used by a datastore.
157 Parameters
158 ----------
159 datastore : `lsst.daf.butler.Datastore`
160 A datastore containing zero or more formatter registries.
162 Returns
163 -------
164 registries : `list` [`lsst.daf.butler.FormatterFactory`]
165 A possibly empty list of all formatter registries used
166 by ``datastore``.
167 """
168 try:
169 datastores = datastore.datastores # type: ignore[attr-defined]
170 except AttributeError:
171 datastores = [datastore]
173 registries = []
174 for datastore in datastores:
175 try:
176 # Not all datastores have a formatterFactory
177 formatterRegistry = datastore.formatterFactory # type: ignore[attr-defined]
178 except AttributeError:
179 pass # no formatter needed
180 else:
181 registries.append(formatterRegistry)
182 return registries
185class MetricsExample:
186 """Smorgasboard of information that might be the result of some
187 processing.
189 Parameters
190 ----------
191 summary : `dict`
192 Simple dictionary mapping key performance metrics to a scalar
193 result.
194 output : `dict`
195 Structured nested data.
196 data : `list`, optional
197 Arbitrary array data.
198 """
200 def __init__(
201 self,
202 summary: dict[str, Any] | None = None,
203 output: dict[str, Any] | None = None,
204 data: list[Any] | None = None,
205 ) -> None:
206 self.summary = summary
207 self.output = output
208 self.data = data
210 def __eq__(self, other: Any) -> bool:
211 try:
212 return self.summary == other.summary and self.output == other.output and self.data == other.data
213 except AttributeError:
214 pass
215 return NotImplemented
217 def __str__(self) -> str:
218 return str(self.exportAsDict())
220 def __repr__(self) -> str:
221 return f"MetricsExample({self.exportAsDict()})"
223 def exportAsDict(self) -> dict[str, list | dict | None]:
224 """Convert object contents to a single python dict."""
225 exportDict: dict[str, list | dict | None] = {"summary": self.summary, "output": self.output}
226 if self.data is not None:
227 exportDict["data"] = list(self.data)
228 else:
229 exportDict["data"] = None
230 return exportDict
232 def _asdict(self) -> dict[str, list | dict | None]:
233 """Convert object contents to a single Python dict.
235 This interface is used for JSON serialization.
237 Returns
238 -------
239 exportDict : `dict`
240 Object contents in the form of a dict with keys corresponding
241 to object attributes.
242 """
243 return self.exportAsDict()
245 @classmethod
246 def makeFromDict(cls, exportDict: dict[str, list | dict | None]) -> MetricsExample:
247 """Create a new object from a dict that is compatible with that
248 created by `exportAsDict`.
250 Parameters
251 ----------
252 exportDict : `dict`
253 `dict` with keys "summary", "output", and (optionally) "data".
255 Returns
256 -------
257 newobject : `MetricsExample`
258 New `MetricsExample` object.
259 """
260 data = exportDict["data"] if "data" in exportDict else None
261 assert isinstance(data, list | types.NoneType)
262 assert isinstance(exportDict["summary"], dict | types.NoneType)
263 assert isinstance(exportDict["output"], dict | types.NoneType)
264 return cls(exportDict["summary"], exportDict["output"], data)
267class MetricsExampleModel(_BaseModelCompat):
268 """A variant of `MetricsExample` based on model."""
270 summary: dict[str, Any] | None = None
271 output: dict[str, Any] | None = None
272 data: list[Any] | None = None
274 @classmethod
275 def from_metrics(cls, metrics: MetricsExample) -> MetricsExampleModel:
276 """Create a model based on an example."""
277 return cls.parse_obj(metrics.exportAsDict())
280@dataclasses.dataclass
281class MetricsExampleDataclass:
282 """A variant of `MetricsExample` based on a dataclass."""
284 summary: dict[str, Any] | None
285 output: dict[str, Any] | None
286 data: list[Any] | None
289class ListDelegate(StorageClassDelegate):
290 """Parameter handler for list parameters."""
292 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any:
293 """Modify the in-memory dataset using the supplied parameters,
294 returning a possibly new object.
296 Parameters
297 ----------
298 inMemoryDataset : `object`
299 Object to modify based on the parameters.
300 parameters : `dict`
301 Parameters to apply. Values are specific to the parameter.
302 Supported parameters are defined in the associated
303 `StorageClass`. If no relevant parameters are specified the
304 inMemoryDataset will be return unchanged.
306 Returns
307 -------
308 inMemoryDataset : `object`
309 Updated form of supplied in-memory dataset, after parameters
310 have been used.
311 """
312 inMemoryDataset = copy.deepcopy(inMemoryDataset)
313 use = self.storageClass.filterParameters(parameters, subset={"slice"})
314 if use:
315 inMemoryDataset = inMemoryDataset[use["slice"]]
316 return inMemoryDataset
319class MetricsDelegate(StorageClassDelegate):
320 """Parameter handler for parameters using Metrics."""
322 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any:
323 """Modify the in-memory dataset using the supplied parameters,
324 returning a possibly new object.
326 Parameters
327 ----------
328 inMemoryDataset : `object`
329 Object to modify based on the parameters.
330 parameters : `dict`
331 Parameters to apply. Values are specific to the parameter.
332 Supported parameters are defined in the associated
333 `StorageClass`. If no relevant parameters are specified the
334 inMemoryDataset will be return unchanged.
336 Returns
337 -------
338 inMemoryDataset : `object`
339 Updated form of supplied in-memory dataset, after parameters
340 have been used.
341 """
342 inMemoryDataset = copy.deepcopy(inMemoryDataset)
343 use = self.storageClass.filterParameters(parameters, subset={"slice"})
344 if use:
345 inMemoryDataset.data = inMemoryDataset.data[use["slice"]]
346 return inMemoryDataset
348 def getComponent(self, composite: Any, componentName: str) -> Any:
349 if componentName == "counter":
350 return len(composite.data)
351 return super().getComponent(composite, componentName)
353 @classmethod
354 def selectResponsibleComponent(cls, readComponent: str, fromComponents: set[str | None]) -> str:
355 forwarderMap = {
356 "counter": "data",
357 }
358 forwarder = forwarderMap.get(readComponent)
359 if forwarder is not None and forwarder in fromComponents:
360 return forwarder
361 raise ValueError(f"Can not calculate read component {readComponent} from {fromComponents}")