Coverage for python/lsst/daf/butler/tests/_datasetsHelper.py: 32%
60 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-05 11:07 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-05 11:07 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28from __future__ import annotations
30__all__ = (
31 "DatasetTestHelper",
32 "DatastoreTestHelper",
33 "BadWriteFormatter",
34 "BadNoWriteFormatter",
35 "MultiDetectorFormatter",
36)
38import os
39from collections.abc import Iterable, Mapping
40from typing import TYPE_CHECKING, Any
42from lsst.daf.butler import DataCoordinate, DatasetRef, DatasetType, DimensionGroup, StorageClass
43from lsst.daf.butler.formatters.yaml import YamlFormatter
45if TYPE_CHECKING:
46 from lsst.daf.butler import Config, DatasetId, Datastore, Dimension, DimensionGraph
49class DatasetTestHelper:
50 """Helper methods for Datasets."""
52 def makeDatasetRef(
53 self,
54 datasetTypeName: str,
55 dimensions: DimensionGroup | DimensionGraph | Iterable[str | Dimension],
56 storageClass: StorageClass | str,
57 dataId: DataCoordinate | Mapping[str, Any],
58 *,
59 id: DatasetId | None = None,
60 run: str | None = None,
61 conform: bool = True,
62 ) -> DatasetRef:
63 """Make a DatasetType and wrap it in a DatasetRef for a test."""
64 return self._makeDatasetRef(
65 datasetTypeName,
66 dimensions,
67 storageClass,
68 dataId,
69 id=id,
70 run=run,
71 conform=conform,
72 )
74 def _makeDatasetRef(
75 self,
76 datasetTypeName: str,
77 dimensions: DimensionGroup | DimensionGraph | Iterable[str | Dimension],
78 storageClass: StorageClass | str,
79 dataId: DataCoordinate | Mapping,
80 *,
81 id: DatasetId | None = None,
82 run: str | None = None,
83 conform: bool = True,
84 ) -> DatasetRef:
85 # helper for makeDatasetRef
87 # Pretend we have a parent if this looks like a composite
88 compositeName, componentName = DatasetType.splitDatasetTypeName(datasetTypeName)
89 parentStorageClass = StorageClass("component") if componentName else None
91 datasetType = DatasetType(
92 datasetTypeName, dimensions, storageClass, parentStorageClass=parentStorageClass
93 )
95 if run is None:
96 run = "dummy"
97 if not isinstance(dataId, DataCoordinate):
98 dataId = DataCoordinate.standardize(dataId, dimensions=datasetType.dimensions)
99 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform)
102class DatastoreTestHelper:
103 """Helper methods for Datastore tests."""
105 root: str | None
106 config: Config
107 datastoreType: type[Datastore]
108 configFile: str
110 def setUpDatastoreTests(self, registryClass: type, configClass: type[Config]) -> None:
111 """Shared setUp code for all Datastore tests."""
112 self.registry = registryClass()
113 self.config = configClass(self.configFile)
115 # Some subclasses override the working root directory
116 if self.root is not None:
117 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy())
119 def makeDatastore(self, sub: str | None = None) -> Datastore:
120 """Make a new Datastore instance of the appropriate type.
122 Parameters
123 ----------
124 sub : str, optional
125 If not None, the returned Datastore will be distinct from any
126 Datastore constructed with a different value of ``sub``. For
127 PosixDatastore, for example, the converse is also true, and ``sub``
128 is used as a subdirectory to form the new root.
130 Returns
131 -------
132 datastore : `Datastore`
133 Datastore constructed by this routine using the supplied
134 optional subdirectory if supported.
135 """
136 config = self.config.copy()
137 if sub is not None and self.root is not None:
138 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config)
139 if sub is not None:
140 # Ensure that each datastore gets its own registry
141 registryClass = type(self.registry)
142 registry = registryClass()
143 else:
144 registry = self.registry
145 return self.datastoreType(config=config, bridgeManager=registry.getDatastoreBridgeManager())
148class BadWriteFormatter(YamlFormatter):
149 """A formatter that never works but does leave a file behind."""
151 def _readFile(self, path: str, pytype: type[Any] | None = None) -> Any:
152 raise NotImplementedError("This formatter can not read anything")
154 def _writeFile(self, inMemoryDataset: Any) -> None:
155 """Write an empty file and then raise an exception."""
156 with open(self.fileDescriptor.location.path, "wb"):
157 pass
158 raise RuntimeError("Did not succeed in writing file")
161class BadNoWriteFormatter(BadWriteFormatter):
162 """A formatter that always fails without writing anything."""
164 def _writeFile(self, inMemoryDataset: Any) -> None:
165 raise RuntimeError("Did not writing anything at all")
168class MultiDetectorFormatter(YamlFormatter):
169 """A formatter that requires a detector to be specified in the dataID."""
171 def _writeFile(self, inMemoryDataset: Any) -> None:
172 raise NotImplementedError("Can not write")
174 def _fromBytes(self, serializedDataset: bytes, pytype: type[Any] | None = None) -> Any:
175 data = super()._fromBytes(serializedDataset)
176 if self.dataId is None:
177 raise RuntimeError("This formatter requires a dataId")
178 if "detector" not in self.dataId:
179 raise RuntimeError("This formatter requires detector to be present in dataId")
180 key = f"detector{self.dataId['detector']}"
181 assert pytype is not None
182 if key in data:
183 return pytype(data[key])
184 raise RuntimeError(f"Could not find '{key}' in data file")