Coverage for python/lsst/daf/butler/tests/testFormatters.py: 29%
96 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-10-29 02:20 -0700
« prev ^ index » next coverage.py v6.5.0, created at 2022-10-29 02:20 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = (
25 "FormatterTest",
26 "DoNothingFormatter",
27 "LenientYamlFormatter",
28 "MetricsExampleFormatter",
29 "MultipleExtensionsFormatter",
30 "SingleExtensionFormatter",
31)
33import json
34from typing import TYPE_CHECKING, Any, Mapping, Optional
36import yaml
38from ..core import Formatter
39from ..formatters.yaml import YamlFormatter
41if TYPE_CHECKING: 41 ↛ 42line 41 didn't jump to line 42, because the condition on line 41 was never true
42 from ..core import Location
45class DoNothingFormatter(Formatter):
46 """A test formatter that does not need to format anything and has
47 parameters."""
49 def read(self, component: Optional[str] = None) -> Any:
50 raise NotImplementedError("Type does not support reading")
52 def write(self, inMemoryDataset: Any) -> str:
53 raise NotImplementedError("Type does not support writing")
56class FormatterTest(Formatter):
57 """A test formatter that does not need to format anything."""
59 supportedWriteParameters = frozenset({"min", "max", "median", "comment", "extra", "recipe"})
61 def read(self, component: Optional[str] = None) -> Any:
62 raise NotImplementedError("Type does not support reading")
64 def write(self, inMemoryDataset: Any) -> str:
65 raise NotImplementedError("Type does not support writing")
67 @staticmethod
68 def validateWriteRecipes(recipes: Optional[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]:
69 if not recipes:
70 return recipes
71 for recipeName in recipes:
72 if "mode" not in recipes[recipeName]:
73 raise RuntimeError("'mode' is a required write recipe parameter")
74 return recipes
77class SingleExtensionFormatter(DoNothingFormatter):
78 """A do nothing formatter that has a single extension registered."""
80 extension = ".fits"
83class MultipleExtensionsFormatter(SingleExtensionFormatter):
84 """A formatter that has multiple extensions registered."""
86 supportedExtensions = frozenset({".fits.gz", ".fits.fz", ".fit"})
89class LenientYamlFormatter(YamlFormatter):
90 """A test formatter that allows any file extension but always reads and
91 writes YAML."""
93 extension = ".yaml"
95 @classmethod
96 def validateExtension(cls, location: Location) -> None:
97 return
100class MetricsExampleFormatter(Formatter):
101 """A specialist test formatter for metrics that supports components
102 directly without assembler delegate."""
104 supportedExtensions = frozenset({".yaml", ".json"})
106 @property
107 def extension(self) -> str:
108 """Always write yaml by default."""
109 return ".yaml"
111 def read(self, component=None):
112 """Read data from a file.
114 Parameters
115 ----------
116 component : `str`, optional
117 Component to read from the file. Only used if the `StorageClass`
118 for reading differed from the `StorageClass` used to write the
119 file.
121 Returns
122 -------
123 inMemoryDataset : `object`
124 The requested data as a Python object. The type of object
125 is controlled by the specific formatter.
127 Raises
128 ------
129 ValueError
130 Component requested but this file does not seem to be a concrete
131 composite.
132 KeyError
133 Raised when parameters passed with fileDescriptor are not
134 supported.
135 """
137 # This formatter can not read a subset from disk because it
138 # uses yaml or json.
139 path = self.fileDescriptor.location.path
141 with open(path, "r") as fd:
142 if path.endswith(".yaml"):
143 data = yaml.load(fd, Loader=yaml.SafeLoader)
144 elif path.endswith(".json"):
145 data = json.load(fd)
146 else:
147 raise RuntimeError(f"Unsupported file extension found in path '{path}'")
149 # We can slice up front if required
150 parameters = self.fileDescriptor.parameters
151 if "data" in data and parameters and "slice" in parameters:
152 data["data"] = data["data"][parameters["slice"]]
154 pytype = self.fileDescriptor.storageClass.pytype
155 inMemoryDataset = pytype(**data)
157 if not component:
158 return inMemoryDataset
160 if component == "summary":
161 return inMemoryDataset.summary
162 elif component == "output":
163 return inMemoryDataset.output
164 elif component == "data":
165 return inMemoryDataset.data
166 elif component == "counter":
167 return len(inMemoryDataset.data)
168 raise ValueError(f"Unsupported component: {component}")
170 def write(self, inMemoryDataset: Any) -> str:
171 """Write a Dataset.
173 Parameters
174 ----------
175 inMemoryDataset : `object`
176 The Dataset to store.
178 Returns
179 -------
180 path : `str`
181 The path to where the Dataset was stored within the datastore.
182 """
183 fileDescriptor = self.fileDescriptor
185 # Update the location with the formatter-preferred file extension
186 fileDescriptor.location.updateExtension(self.extension)
188 with open(fileDescriptor.location.path, "w") as fd:
189 yaml.dump(inMemoryDataset._asdict(), fd)
190 return fileDescriptor.location.pathInStore
193class MetricsExampleDataFormatter(Formatter):
194 """A specialist test formatter for the data component of a MetricsExample.
196 This is needed if the MetricsExample is disassembled and we want to
197 support the derived component.
198 """
200 unsupportedParameters = None
201 """Let the assembler delegate handle slice"""
203 extension = ".yaml"
204 """Always write YAML"""
206 def read(self, component=None):
207 """Read data from a file.
209 Parameters
210 ----------
211 component : `str`, optional
212 Component to read from the file. Only used if the `StorageClass`
213 for reading differed from the `StorageClass` used to write the
214 file.
216 Returns
217 -------
218 inMemoryDataset : `object`
219 The requested data as a Python object. The type of object
220 is controlled by the specific formatter.
222 Raises
223 ------
224 ValueError
225 Component requested but this file does not seem to be a concrete
226 composite.
227 KeyError
228 Raised when parameters passed with fileDescriptor are not
229 supported.
230 """
232 # This formatter can not read a subset from disk because it
233 # uses yaml.
234 path = self.fileDescriptor.location.path
235 with open(path, "r") as fd:
236 data = yaml.load(fd, Loader=yaml.SafeLoader)
238 # We can slice up front if required
239 parameters = self.fileDescriptor.parameters
240 if parameters and "slice" in parameters:
241 data = data[parameters["slice"]]
243 # This should be a native list
244 inMemoryDataset = data
246 if not component:
247 return inMemoryDataset
249 if component == "counter":
250 return len(inMemoryDataset)
251 raise ValueError(f"Unsupported component: {component}")
253 def write(self, inMemoryDataset: Any) -> str:
254 """Write a Dataset.
256 Parameters
257 ----------
258 inMemoryDataset : `object`
259 The Dataset to store.
261 Returns
262 -------
263 path : `str`
264 The path to where the Dataset was stored within the datastore.
265 """
266 fileDescriptor = self.fileDescriptor
268 # Update the location with the formatter-preferred file extension
269 fileDescriptor.location.updateExtension(self.extension)
271 with open(fileDescriptor.location.path, "w") as fd:
272 yaml.dump(inMemoryDataset, fd)
273 return fileDescriptor.location.pathInStore