Coverage for python/lsst/daf/butler/tests/_examplePythonTypes.py: 39%

108 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-07-14 19:21 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22""" 

23Python classes that can be used to test datastores without requiring 

24large external dependencies on python classes such as afw or serialization 

25formats such as FITS or HDF5. 

26""" 

27 

28from __future__ import annotations 

29 

30__all__ = ( 

31 "ListDelegate", 

32 "MetricsDelegate", 

33 "MetricsExample", 

34 "registerMetricsExample", 

35 "MetricsExampleModel", 

36 "MetricsExampleDataclass", 

37) 

38 

39 

40import copy 

41import dataclasses 

42import types 

43from collections.abc import Mapping 

44from typing import TYPE_CHECKING, Any 

45 

46from lsst.daf.butler import StorageClass, StorageClassDelegate 

47 

48try: 

49 from pydantic.v1 import BaseModel 

50except ModuleNotFoundError: 

51 from pydantic import BaseModel # type: ignore 

52 

53if TYPE_CHECKING: 

54 from lsst.daf.butler import Butler, Datastore, FormatterFactory 

55 

56 

57def registerMetricsExample(butler: Butler) -> None: 

58 """Modify a repository to support reading and writing 

59 `MetricsExample` objects. 

60 

61 This method allows `MetricsExample` to be used with test repositories 

62 in any package without needing to provide a custom configuration there. 

63 

64 Parameters 

65 ---------- 

66 butler : `lsst.daf.butler.Butler` 

67 The repository that needs to support `MetricsExample`. 

68 

69 Notes 

70 ----- 

71 This method enables the following storage classes: 

72 

73 ``StructuredData`` 

74 A `MetricsExample` whose ``summary``, ``output``, and ``data`` members 

75 can be retrieved as dataset components. 

76 ``StructuredDataNoComponents`` 

77 A monolithic write of a `MetricsExample`. 

78 """ 

79 yamlDict = _addFullStorageClass( 

80 butler, 

81 "StructuredDataDictYaml", 

82 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

83 pytype=dict, 

84 ) 

85 

86 yamlList = _addFullStorageClass( 

87 butler, 

88 "StructuredDataListYaml", 

89 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

90 pytype=list, 

91 parameters={"slice"}, 

92 delegate="lsst.daf.butler.tests.ListDelegate", 

93 ) 

94 

95 _addFullStorageClass( 

96 butler, 

97 "StructuredDataNoComponents", 

98 "lsst.daf.butler.formatters.pickle.PickleFormatter", 

99 pytype=MetricsExample, 

100 parameters={"slice"}, 

101 delegate="lsst.daf.butler.tests.MetricsDelegate", 

102 ) 

103 

104 _addFullStorageClass( 

105 butler, 

106 "StructuredData", 

107 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

108 pytype=MetricsExample, 

109 components={ 

110 "summary": yamlDict, 

111 "output": yamlDict, 

112 "data": yamlList, 

113 }, 

114 delegate="lsst.daf.butler.tests.MetricsDelegate", 

115 ) 

116 

117 

118def _addFullStorageClass( 

119 butler: Butler, name: str, formatter: str, *args: Any, **kwargs: Any 

120) -> StorageClass: 

121 """Create a storage class-formatter pair in a repository if it does not 

122 already exist. 

123 

124 Parameters 

125 ---------- 

126 butler : `lsst.daf.butler.Butler` 

127 The repository that needs to contain the class. 

128 name : `str` 

129 The name to use for the class. 

130 formatter : `str` 

131 The formatter to use with the storage class. Ignored if ``butler`` 

132 does not use formatters. 

133 *args 

134 **kwargs 

135 Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass` 

136 constructor. 

137 

138 Returns 

139 ------- 

140 class : `lsst.daf.butler.StorageClass` 

141 The newly created storage class, or the class of the same name 

142 previously found in the repository. 

143 """ 

144 storageRegistry = butler._datastore.storageClassFactory 

145 

146 storage = StorageClass(name, *args, **kwargs) 

147 try: 

148 storageRegistry.registerStorageClass(storage) 

149 except ValueError: 

150 storage = storageRegistry.getStorageClass(name) 

151 

152 for registry in _getAllFormatterRegistries(butler._datastore): 

153 registry.registerFormatter(storage, formatter) 

154 

155 return storage 

156 

157 

158def _getAllFormatterRegistries(datastore: Datastore) -> list[FormatterFactory]: 

159 """Return all formatter registries used by a datastore. 

160 

161 Parameters 

162 ---------- 

163 datastore : `lsst.daf.butler.Datastore` 

164 A datastore containing zero or more formatter registries. 

165 

166 Returns 

167 ------- 

168 registries : `list` [`lsst.daf.butler.FormatterFactory`] 

169 A possibly empty list of all formatter registries used 

170 by ``datastore``. 

171 """ 

172 try: 

173 datastores = datastore.datastores # type: ignore[attr-defined] 

174 except AttributeError: 

175 datastores = [datastore] 

176 

177 registries = [] 

178 for datastore in datastores: 

179 try: 

180 # Not all datastores have a formatterFactory 

181 formatterRegistry = datastore.formatterFactory # type: ignore[attr-defined] 

182 except AttributeError: 

183 pass # no formatter needed 

184 else: 

185 registries.append(formatterRegistry) 

186 return registries 

187 

188 

189class MetricsExample: 

190 """Smorgasboard of information that might be the result of some 

191 processing. 

192 

193 Parameters 

194 ---------- 

195 summary : `dict` 

196 Simple dictionary mapping key performance metrics to a scalar 

197 result. 

198 output : `dict` 

199 Structured nested data. 

200 data : `list`, optional 

201 Arbitrary array data. 

202 """ 

203 

204 def __init__( 

205 self, 

206 summary: dict[str, Any] | None = None, 

207 output: dict[str, Any] | None = None, 

208 data: list[Any] | None = None, 

209 ) -> None: 

210 self.summary = summary 

211 self.output = output 

212 self.data = data 

213 

214 def __eq__(self, other: Any) -> bool: 

215 try: 

216 return self.summary == other.summary and self.output == other.output and self.data == other.data 

217 except AttributeError: 

218 pass 

219 return NotImplemented 

220 

221 def __str__(self) -> str: 

222 return str(self.exportAsDict()) 

223 

224 def __repr__(self) -> str: 

225 return f"MetricsExample({self.exportAsDict()})" 

226 

227 def exportAsDict(self) -> dict[str, list | dict | None]: 

228 """Convert object contents to a single python dict.""" 

229 exportDict: dict[str, list | dict | None] = {"summary": self.summary, "output": self.output} 

230 if self.data is not None: 

231 exportDict["data"] = list(self.data) 

232 else: 

233 exportDict["data"] = None 

234 return exportDict 

235 

236 def _asdict(self) -> dict[str, list | dict | None]: 

237 """Convert object contents to a single Python dict. 

238 

239 This interface is used for JSON serialization. 

240 

241 Returns 

242 ------- 

243 exportDict : `dict` 

244 Object contents in the form of a dict with keys corresponding 

245 to object attributes. 

246 """ 

247 return self.exportAsDict() 

248 

249 @classmethod 

250 def makeFromDict(cls, exportDict: dict[str, list | dict | None]) -> MetricsExample: 

251 """Create a new object from a dict that is compatible with that 

252 created by `exportAsDict`. 

253 

254 Parameters 

255 ---------- 

256 exportDict : `dict` 

257 `dict` with keys "summary", "output", and (optionally) "data". 

258 

259 Returns 

260 ------- 

261 newobject : `MetricsExample` 

262 New `MetricsExample` object. 

263 """ 

264 data = exportDict["data"] if "data" in exportDict else None 

265 assert isinstance(data, list | types.NoneType) 

266 assert isinstance(exportDict["summary"], dict | types.NoneType) 

267 assert isinstance(exportDict["output"], dict | types.NoneType) 

268 return cls(exportDict["summary"], exportDict["output"], data) 

269 

270 

271class MetricsExampleModel(BaseModel): 

272 """A variant of `MetricsExample` based on model.""" 

273 

274 summary: dict[str, Any] | None = None 

275 output: dict[str, Any] | None = None 

276 data: list[Any] | None = None 

277 

278 @classmethod 

279 def from_metrics(cls, metrics: MetricsExample) -> MetricsExampleModel: 

280 """Create a model based on an example.""" 

281 return cls.parse_obj(metrics.exportAsDict()) 

282 

283 

284@dataclasses.dataclass 

285class MetricsExampleDataclass: 

286 """A variant of `MetricsExample` based on a dataclass.""" 

287 

288 summary: dict[str, Any] | None 

289 output: dict[str, Any] | None 

290 data: list[Any] | None 

291 

292 

293class ListDelegate(StorageClassDelegate): 

294 """Parameter handler for list parameters.""" 

295 

296 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any: 

297 """Modify the in-memory dataset using the supplied parameters, 

298 returning a possibly new object. 

299 

300 Parameters 

301 ---------- 

302 inMemoryDataset : `object` 

303 Object to modify based on the parameters. 

304 parameters : `dict` 

305 Parameters to apply. Values are specific to the parameter. 

306 Supported parameters are defined in the associated 

307 `StorageClass`. If no relevant parameters are specified the 

308 inMemoryDataset will be return unchanged. 

309 

310 Returns 

311 ------- 

312 inMemoryDataset : `object` 

313 Updated form of supplied in-memory dataset, after parameters 

314 have been used. 

315 """ 

316 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

317 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

318 if use: 

319 inMemoryDataset = inMemoryDataset[use["slice"]] 

320 return inMemoryDataset 

321 

322 

323class MetricsDelegate(StorageClassDelegate): 

324 """Parameter handler for parameters using Metrics.""" 

325 

326 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any: 

327 """Modify the in-memory dataset using the supplied parameters, 

328 returning a possibly new object. 

329 

330 Parameters 

331 ---------- 

332 inMemoryDataset : `object` 

333 Object to modify based on the parameters. 

334 parameters : `dict` 

335 Parameters to apply. Values are specific to the parameter. 

336 Supported parameters are defined in the associated 

337 `StorageClass`. If no relevant parameters are specified the 

338 inMemoryDataset will be return unchanged. 

339 

340 Returns 

341 ------- 

342 inMemoryDataset : `object` 

343 Updated form of supplied in-memory dataset, after parameters 

344 have been used. 

345 """ 

346 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

347 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

348 if use: 

349 inMemoryDataset.data = inMemoryDataset.data[use["slice"]] 

350 return inMemoryDataset 

351 

352 def getComponent(self, composite: Any, componentName: str) -> Any: 

353 if componentName == "counter": 

354 return len(composite.data) 

355 return super().getComponent(composite, componentName) 

356 

357 @classmethod 

358 def selectResponsibleComponent(cls, readComponent: str, fromComponents: set[str | None]) -> str: 

359 forwarderMap = { 

360 "counter": "data", 

361 } 

362 forwarder = forwarderMap.get(readComponent) 

363 if forwarder is not None and forwarder in fromComponents: 

364 return forwarder 

365 raise ValueError(f"Can not calculate read component {readComponent} from {fromComponents}")