Coverage for python/lsst/daf/butler/tests/_datasetsHelper.py: 34%
69 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-01-11 02:31 -0800
« prev ^ index » next coverage.py v6.5.0, created at 2023-01-11 02:31 -0800
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = (
25 "DatasetTestHelper",
26 "DatastoreTestHelper",
27 "BadWriteFormatter",
28 "BadNoWriteFormatter",
29 "MultiDetectorFormatter",
30)
32import os
33from collections.abc import Iterable
34from typing import TYPE_CHECKING, Any
36from lsst.daf.butler import DatasetRef, DatasetType, StorageClass
37from lsst.daf.butler.formatters.yaml import YamlFormatter
39if TYPE_CHECKING: 39 ↛ 40line 39 didn't jump to line 40, because the condition on line 39 was never true
40 from lsst.daf.butler import (
41 Config,
42 DataCoordinate,
43 DatasetId,
44 Datastore,
45 Dimension,
46 DimensionGraph,
47 Registry,
48 )
51class DatasetTestHelper:
52 """Helper methods for Datasets"""
54 id: int = 0
55 """Instance self.id should be reset in setUp."""
57 def makeDatasetRef(
58 self,
59 datasetTypeName: str,
60 dimensions: DimensionGraph | Iterable[str | Dimension],
61 storageClass: StorageClass | str,
62 dataId: DataCoordinate,
63 *,
64 id: DatasetId | None = None,
65 run: str | None = None,
66 conform: bool = True,
67 ) -> DatasetRef:
68 """Make a DatasetType and wrap it in a DatasetRef for a test"""
69 return self._makeDatasetRef(
70 datasetTypeName, dimensions, storageClass, dataId, id=id, run=run, conform=conform
71 )
73 def _makeDatasetRef(
74 self,
75 datasetTypeName: str,
76 dimensions: DimensionGraph | Iterable[str | Dimension],
77 storageClass: StorageClass | str,
78 dataId: DataCoordinate,
79 *,
80 id: DatasetId | None = None,
81 run: str | None = None,
82 conform: bool = True,
83 ) -> DatasetRef:
84 # helper for makeDatasetRef
86 # Pretend we have a parent if this looks like a composite
87 compositeName, componentName = DatasetType.splitDatasetTypeName(datasetTypeName)
88 parentStorageClass = StorageClass("component") if componentName else None
90 datasetType = DatasetType(
91 datasetTypeName, dimensions, storageClass, parentStorageClass=parentStorageClass
92 )
94 if id is None:
95 self.id += 1
96 id = self.id
97 if run is None:
98 run = "dummy"
99 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform)
102class DatastoreTestHelper:
103 """Helper methods for Datastore tests"""
105 root: str
106 id: int
107 config: Config
108 datastoreType: type[Datastore]
109 configFile: str
111 def setUpDatastoreTests(self, registryClass: type[Registry], configClass: type[Config]) -> None:
112 """Shared setUp code for all Datastore tests"""
113 self.registry = registryClass()
115 # Need to keep ID for each datasetRef since we have no butler
116 # for these tests
117 self.id = 1
119 self.config = configClass(self.configFile)
121 # Some subclasses override the working root directory
122 if self.root is not None:
123 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy())
125 def makeDatastore(self, sub: str | None = None) -> Datastore:
126 """Make a new Datastore instance of the appropriate type.
128 Parameters
129 ----------
130 sub : str, optional
131 If not None, the returned Datastore will be distinct from any
132 Datastore constructed with a different value of ``sub``. For
133 PosixDatastore, for example, the converse is also true, and ``sub``
134 is used as a subdirectory to form the new root.
136 Returns
137 -------
138 datastore : `Datastore`
139 Datastore constructed by this routine using the supplied
140 optional subdirectory if supported.
141 """
142 config = self.config.copy()
143 if sub is not None and self.root is not None:
144 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config)
145 if sub is not None:
146 # Ensure that each datastore gets its own registry
147 registryClass = type(self.registry)
148 registry = registryClass()
149 else:
150 registry = self.registry
151 return self.datastoreType(config=config, bridgeManager=registry.getDatastoreBridgeManager())
154class BadWriteFormatter(YamlFormatter):
155 """A formatter that never works but does leave a file behind."""
157 def _readFile(self, path: str, pytype: type[Any] | None = None) -> Any:
158 raise NotImplementedError("This formatter can not read anything")
160 def _writeFile(self, inMemoryDataset: Any) -> None:
161 """Write an empty file and then raise an exception."""
162 with open(self.fileDescriptor.location.path, "wb"):
163 pass
164 raise RuntimeError("Did not succeed in writing file")
167class BadNoWriteFormatter(BadWriteFormatter):
168 """A formatter that always fails without writing anything."""
170 def _writeFile(self, inMemoryDataset: Any) -> None:
171 raise RuntimeError("Did not writing anything at all")
174class MultiDetectorFormatter(YamlFormatter):
175 def _writeFile(self, inMemoryDataset: Any) -> None:
176 raise NotImplementedError("Can not write")
178 def _fromBytes(self, serializedDataset: bytes, pytype: type[Any] | None = None) -> Any:
179 data = super()._fromBytes(serializedDataset)
180 if self.dataId is None:
181 raise RuntimeError("This formatter requires a dataId")
182 if "detector" not in self.dataId: # type: ignore[comparison-overlap]
183 raise RuntimeError("This formatter requires detector to be present in dataId")
184 key = f"detector{self.dataId['detector']}"
185 assert pytype is not None
186 if key in data:
187 return pytype(data[key])
188 raise RuntimeError(f"Could not find '{key}' in data file")