Coverage for python/lsst/daf/butler/tests/_examplePythonTypes.py: 36%

110 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-12-01 11:00 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This software is dual licensed under the GNU General Public License and also 

10# under a 3-clause BSD license. Recipients may choose which of these licenses 

11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt, 

12# respectively. If you choose the GPL option then the following text applies 

13# (but note that there is still no warranty even if you opt for BSD instead): 

14# 

15# This program is free software: you can redistribute it and/or modify 

16# it under the terms of the GNU General Public License as published by 

17# the Free Software Foundation, either version 3 of the License, or 

18# (at your option) any later version. 

19# 

20# This program is distributed in the hope that it will be useful, 

21# but WITHOUT ANY WARRANTY; without even the implied warranty of 

22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

23# GNU General Public License for more details. 

24# 

25# You should have received a copy of the GNU General Public License 

26# along with this program. If not, see <http://www.gnu.org/licenses/>. 

27 

28""" 

29Python classes that can be used to test datastores without requiring 

30large external dependencies on python classes such as afw or serialization 

31formats such as FITS or HDF5. 

32""" 

33 

34from __future__ import annotations 

35 

36__all__ = ( 

37 "ListDelegate", 

38 "MetricsDelegate", 

39 "MetricsExample", 

40 "registerMetricsExample", 

41 "MetricsExampleModel", 

42 "MetricsExampleDataclass", 

43) 

44 

45 

46import copy 

47import dataclasses 

48import types 

49from collections.abc import Mapping 

50from typing import TYPE_CHECKING, Any 

51 

52from lsst.daf.butler import StorageClass, StorageClassDelegate 

53from pydantic import BaseModel 

54 

55if TYPE_CHECKING: 

56 from lsst.daf.butler import Butler, Datastore, FormatterFactory 

57 

58 

59def registerMetricsExample(butler: Butler) -> None: 

60 """Modify a repository to support reading and writing 

61 `MetricsExample` objects. 

62 

63 This method allows `MetricsExample` to be used with test repositories 

64 in any package without needing to provide a custom configuration there. 

65 

66 Parameters 

67 ---------- 

68 butler : `lsst.daf.butler.Butler` 

69 The repository that needs to support `MetricsExample`. 

70 

71 Notes 

72 ----- 

73 This method enables the following storage classes: 

74 

75 ``StructuredData`` 

76 A `MetricsExample` whose ``summary``, ``output``, and ``data`` members 

77 can be retrieved as dataset components. 

78 ``StructuredDataNoComponents`` 

79 A monolithic write of a `MetricsExample`. 

80 

81 These definitions must match the equivalent definitions in the test YAML 

82 files. 

83 """ 

84 yamlDict = _addFullStorageClass( 

85 butler, 

86 "StructuredDataDictYaml", 

87 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

88 pytype=dict, 

89 ) 

90 

91 yamlList = _addFullStorageClass( 

92 butler, 

93 "StructuredDataListYaml", 

94 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

95 pytype=list, 

96 parameters={"slice"}, 

97 delegate="lsst.daf.butler.tests.ListDelegate", 

98 ) 

99 

100 _addFullStorageClass( 

101 butler, 

102 "StructuredDataNoComponents", 

103 "lsst.daf.butler.formatters.pickle.PickleFormatter", 

104 pytype=MetricsExample, 

105 parameters={"slice"}, 

106 delegate="lsst.daf.butler.tests.MetricsDelegate", 

107 converters={"dict": "lsst.daf.butler.tests.MetricsExample.makeFromDict"}, 

108 ) 

109 

110 _addFullStorageClass( 

111 butler, 

112 "StructuredData", 

113 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

114 pytype=MetricsExample, 

115 components={ 

116 "summary": yamlDict, 

117 "output": yamlDict, 

118 "data": yamlList, 

119 }, 

120 delegate="lsst.daf.butler.tests.MetricsDelegate", 

121 ) 

122 

123 

124def _addFullStorageClass(butler: Butler, name: str, formatter: str, **kwargs: Any) -> StorageClass: 

125 """Create a storage class-formatter pair in a repository if it does not 

126 already exist. 

127 

128 Parameters 

129 ---------- 

130 butler : `lsst.daf.butler.Butler` 

131 The repository that needs to contain the class. 

132 name : `str` 

133 The name to use for the class. 

134 formatter : `str` 

135 The formatter to use with the storage class. Ignored if ``butler`` 

136 does not use formatters. 

137 **kwargs 

138 Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass` 

139 constructor. 

140 

141 Returns 

142 ------- 

143 class : `lsst.daf.butler.StorageClass` 

144 The newly created storage class, or the class of the same name 

145 previously found in the repository. 

146 """ 

147 storageRegistry = butler._datastore.storageClassFactory 

148 

149 # Use the special constructor to allow a subclass of storage class 

150 # to be created. This allows other test storage classes to inherit from 

151 # this one. 

152 storage_type = storageRegistry.makeNewStorageClass(name, None, **kwargs) 

153 storage = storage_type() 

154 try: 

155 storageRegistry.registerStorageClass(storage) 

156 except ValueError: 

157 storage = storageRegistry.getStorageClass(name) 

158 

159 for registry in _getAllFormatterRegistries(butler._datastore): 

160 registry.registerFormatter(storage, formatter) 

161 

162 return storage 

163 

164 

165def _getAllFormatterRegistries(datastore: Datastore) -> list[FormatterFactory]: 

166 """Return all formatter registries used by a datastore. 

167 

168 Parameters 

169 ---------- 

170 datastore : `lsst.daf.butler.Datastore` 

171 A datastore containing zero or more formatter registries. 

172 

173 Returns 

174 ------- 

175 registries : `list` [`lsst.daf.butler.FormatterFactory`] 

176 A possibly empty list of all formatter registries used 

177 by ``datastore``. 

178 """ 

179 try: 

180 datastores = datastore.datastores # type: ignore[attr-defined] 

181 except AttributeError: 

182 datastores = [datastore] 

183 

184 registries = [] 

185 for datastore in datastores: 

186 try: 

187 # Not all datastores have a formatterFactory 

188 formatterRegistry = datastore.formatterFactory # type: ignore[attr-defined] 

189 except AttributeError: 

190 pass # no formatter needed 

191 else: 

192 registries.append(formatterRegistry) 

193 return registries 

194 

195 

196class MetricsExample: 

197 """Smorgasboard of information that might be the result of some 

198 processing. 

199 

200 Parameters 

201 ---------- 

202 summary : `dict` 

203 Simple dictionary mapping key performance metrics to a scalar 

204 result. 

205 output : `dict` 

206 Structured nested data. 

207 data : `list`, optional 

208 Arbitrary array data. 

209 """ 

210 

211 def __init__( 

212 self, 

213 summary: dict[str, Any] | None = None, 

214 output: dict[str, Any] | None = None, 

215 data: list[Any] | None = None, 

216 ) -> None: 

217 self.summary = summary 

218 self.output = output 

219 self.data = data 

220 

221 def __eq__(self, other: Any) -> bool: 

222 try: 

223 return self.summary == other.summary and self.output == other.output and self.data == other.data 

224 except AttributeError: 

225 pass 

226 return NotImplemented 

227 

228 def __str__(self) -> str: 

229 return str(self.exportAsDict()) 

230 

231 def __repr__(self) -> str: 

232 return f"MetricsExample({self.exportAsDict()})" 

233 

234 def exportAsDict(self) -> dict[str, list | dict | None]: 

235 """Convert object contents to a single python dict.""" 

236 exportDict: dict[str, list | dict | None] = {"summary": self.summary, "output": self.output} 

237 if self.data is not None: 

238 exportDict["data"] = list(self.data) 

239 else: 

240 exportDict["data"] = None 

241 return exportDict 

242 

243 def _asdict(self) -> dict[str, list | dict | None]: 

244 """Convert object contents to a single Python dict. 

245 

246 This interface is used for JSON serialization. 

247 

248 Returns 

249 ------- 

250 exportDict : `dict` 

251 Object contents in the form of a dict with keys corresponding 

252 to object attributes. 

253 """ 

254 return self.exportAsDict() 

255 

256 @classmethod 

257 def makeFromDict(cls, exportDict: dict[str, list | dict | None]) -> MetricsExample: 

258 """Create a new object from a dict that is compatible with that 

259 created by `exportAsDict`. 

260 

261 Parameters 

262 ---------- 

263 exportDict : `dict` 

264 `dict` with keys "summary", "output", and (optionally) "data". 

265 

266 Returns 

267 ------- 

268 newobject : `MetricsExample` 

269 New `MetricsExample` object. 

270 """ 

271 data = exportDict["data"] if "data" in exportDict else None 

272 assert isinstance(data, list | types.NoneType) 

273 assert isinstance(exportDict["summary"], dict | types.NoneType) 

274 assert isinstance(exportDict["output"], dict | types.NoneType) 

275 return cls(exportDict["summary"], exportDict["output"], data) 

276 

277 

278class MetricsExampleModel(BaseModel): 

279 """A variant of `MetricsExample` based on model.""" 

280 

281 summary: dict[str, Any] | None = None 

282 output: dict[str, Any] | None = None 

283 data: list[Any] | None = None 

284 

285 @classmethod 

286 def from_metrics(cls, metrics: MetricsExample) -> MetricsExampleModel: 

287 """Create a model based on an example.""" 

288 d = metrics.exportAsDict() 

289 # Assume pydantic v2 but fallback to v1 

290 try: 

291 return cls.model_validate(d) # type: ignore 

292 except AttributeError: 

293 return cls.parse_obj(d) 

294 

295 

296@dataclasses.dataclass 

297class MetricsExampleDataclass: 

298 """A variant of `MetricsExample` based on a dataclass.""" 

299 

300 summary: dict[str, Any] | None 

301 output: dict[str, Any] | None 

302 data: list[Any] | None 

303 

304 

305class ListDelegate(StorageClassDelegate): 

306 """Parameter handler for list parameters.""" 

307 

308 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any: 

309 """Modify the in-memory dataset using the supplied parameters, 

310 returning a possibly new object. 

311 

312 Parameters 

313 ---------- 

314 inMemoryDataset : `object` 

315 Object to modify based on the parameters. 

316 parameters : `dict` 

317 Parameters to apply. Values are specific to the parameter. 

318 Supported parameters are defined in the associated 

319 `StorageClass`. If no relevant parameters are specified the 

320 inMemoryDataset will be return unchanged. 

321 

322 Returns 

323 ------- 

324 inMemoryDataset : `object` 

325 Updated form of supplied in-memory dataset, after parameters 

326 have been used. 

327 """ 

328 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

329 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

330 if use: 

331 inMemoryDataset = inMemoryDataset[use["slice"]] 

332 return inMemoryDataset 

333 

334 

335class MetricsDelegate(StorageClassDelegate): 

336 """Parameter handler for parameters using Metrics.""" 

337 

338 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any: 

339 """Modify the in-memory dataset using the supplied parameters, 

340 returning a possibly new object. 

341 

342 Parameters 

343 ---------- 

344 inMemoryDataset : `object` 

345 Object to modify based on the parameters. 

346 parameters : `dict` 

347 Parameters to apply. Values are specific to the parameter. 

348 Supported parameters are defined in the associated 

349 `StorageClass`. If no relevant parameters are specified the 

350 inMemoryDataset will be return unchanged. 

351 

352 Returns 

353 ------- 

354 inMemoryDataset : `object` 

355 Updated form of supplied in-memory dataset, after parameters 

356 have been used. 

357 """ 

358 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

359 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

360 if use: 

361 inMemoryDataset.data = inMemoryDataset.data[use["slice"]] 

362 return inMemoryDataset 

363 

364 def getComponent(self, composite: Any, componentName: str) -> Any: 

365 if componentName == "counter": 

366 return len(composite.data) 

367 return super().getComponent(composite, componentName) 

368 

369 @classmethod 

370 def selectResponsibleComponent(cls, readComponent: str, fromComponents: set[str | None]) -> str: 

371 forwarderMap = { 

372 "counter": "data", 

373 } 

374 forwarder = forwarderMap.get(readComponent) 

375 if forwarder is not None and forwarder in fromComponents: 

376 return forwarder 

377 raise ValueError(f"Can not calculate read component {readComponent} from {fromComponents}")