Coverage for python/lsst/daf/butler/tests/_datasetsHelper.py: 33%
67 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-25 02:06 -0700
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-25 02:06 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = (
25 "DatasetTestHelper",
26 "DatastoreTestHelper",
27 "BadWriteFormatter",
28 "BadNoWriteFormatter",
29 "MultiDetectorFormatter",
30)
32import os
33import uuid
34from collections.abc import Iterable
35from typing import TYPE_CHECKING, Any
37from lsst.daf.butler import DatasetRef, DatasetType, StorageClass
38from lsst.daf.butler.formatters.yaml import YamlFormatter
40if TYPE_CHECKING: 40 ↛ 41line 40 didn't jump to line 41, because the condition on line 40 was never true
41 from lsst.daf.butler import (
42 Config,
43 DataCoordinate,
44 DatasetId,
45 Datastore,
46 Dimension,
47 DimensionGraph,
48 Registry,
49 )
52class DatasetTestHelper:
53 """Helper methods for Datasets"""
55 def makeDatasetRef(
56 self,
57 datasetTypeName: str,
58 dimensions: DimensionGraph | Iterable[str | Dimension],
59 storageClass: StorageClass | str,
60 dataId: DataCoordinate,
61 *,
62 id: DatasetId | None = None,
63 run: str | None = None,
64 conform: bool = True,
65 ) -> DatasetRef:
66 """Make a DatasetType and wrap it in a DatasetRef for a test"""
67 return self._makeDatasetRef(
68 datasetTypeName, dimensions, storageClass, dataId, id=id, run=run, conform=conform
69 )
71 def _makeDatasetRef(
72 self,
73 datasetTypeName: str,
74 dimensions: DimensionGraph | Iterable[str | Dimension],
75 storageClass: StorageClass | str,
76 dataId: DataCoordinate,
77 *,
78 id: DatasetId | None = None,
79 run: str | None = None,
80 conform: bool = True,
81 ) -> DatasetRef:
82 # helper for makeDatasetRef
84 # Pretend we have a parent if this looks like a composite
85 compositeName, componentName = DatasetType.splitDatasetTypeName(datasetTypeName)
86 parentStorageClass = StorageClass("component") if componentName else None
88 datasetType = DatasetType(
89 datasetTypeName, dimensions, storageClass, parentStorageClass=parentStorageClass
90 )
92 if id is None:
93 id = uuid.uuid4()
94 if run is None:
95 run = "dummy"
96 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform)
99class DatastoreTestHelper:
100 """Helper methods for Datastore tests"""
102 root: str
103 id: DatasetId
104 config: Config
105 datastoreType: type[Datastore]
106 configFile: str
108 def setUpDatastoreTests(self, registryClass: type[Registry], configClass: type[Config]) -> None:
109 """Shared setUp code for all Datastore tests"""
110 self.registry = registryClass()
112 # Need to keep ID for each datasetRef since we have no butler
113 # for these tests
114 self.id = uuid.uuid4()
116 self.config = configClass(self.configFile)
118 # Some subclasses override the working root directory
119 if self.root is not None:
120 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy())
122 def makeDatastore(self, sub: str | None = None) -> Datastore:
123 """Make a new Datastore instance of the appropriate type.
125 Parameters
126 ----------
127 sub : str, optional
128 If not None, the returned Datastore will be distinct from any
129 Datastore constructed with a different value of ``sub``. For
130 PosixDatastore, for example, the converse is also true, and ``sub``
131 is used as a subdirectory to form the new root.
133 Returns
134 -------
135 datastore : `Datastore`
136 Datastore constructed by this routine using the supplied
137 optional subdirectory if supported.
138 """
139 config = self.config.copy()
140 if sub is not None and self.root is not None:
141 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config)
142 if sub is not None:
143 # Ensure that each datastore gets its own registry
144 registryClass = type(self.registry)
145 registry = registryClass()
146 else:
147 registry = self.registry
148 return self.datastoreType(config=config, bridgeManager=registry.getDatastoreBridgeManager())
151class BadWriteFormatter(YamlFormatter):
152 """A formatter that never works but does leave a file behind."""
154 def _readFile(self, path: str, pytype: type[Any] | None = None) -> Any:
155 raise NotImplementedError("This formatter can not read anything")
157 def _writeFile(self, inMemoryDataset: Any) -> None:
158 """Write an empty file and then raise an exception."""
159 with open(self.fileDescriptor.location.path, "wb"):
160 pass
161 raise RuntimeError("Did not succeed in writing file")
164class BadNoWriteFormatter(BadWriteFormatter):
165 """A formatter that always fails without writing anything."""
167 def _writeFile(self, inMemoryDataset: Any) -> None:
168 raise RuntimeError("Did not writing anything at all")
171class MultiDetectorFormatter(YamlFormatter):
172 def _writeFile(self, inMemoryDataset: Any) -> None:
173 raise NotImplementedError("Can not write")
175 def _fromBytes(self, serializedDataset: bytes, pytype: type[Any] | None = None) -> Any:
176 data = super()._fromBytes(serializedDataset)
177 if self.dataId is None:
178 raise RuntimeError("This formatter requires a dataId")
179 if "detector" not in self.dataId:
180 raise RuntimeError("This formatter requires detector to be present in dataId")
181 key = f"detector{self.dataId['detector']}"
182 assert pytype is not None
183 if key in data:
184 return pytype(data[key])
185 raise RuntimeError(f"Could not find '{key}' in data file")