Coverage for python/lsst/daf/butler/tests/_examplePythonTypes.py: 35%

107 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-01-05 10:36 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23""" 

24Python classes that can be used to test datastores without requiring 

25large external dependencies on python classes such as afw or serialization 

26formats such as FITS or HDF5. 

27""" 

28 

29__all__ = ( 

30 "ListDelegate", 

31 "MetricsDelegate", 

32 "MetricsExample", 

33 "registerMetricsExample", 

34 "MetricsExampleModel", 

35 "MetricsExampleDataclass", 

36) 

37 

38 

39import copy 

40import dataclasses 

41import types 

42from collections.abc import Mapping 

43from typing import TYPE_CHECKING, Any 

44 

45from lsst.daf.butler import StorageClass, StorageClassDelegate 

46from pydantic import BaseModel 

47 

48if TYPE_CHECKING: 48 ↛ 49line 48 didn't jump to line 49, because the condition on line 48 was never true

49 from lsst.daf.butler import Butler, Datastore, FormatterFactory 

50 

51 

52def registerMetricsExample(butler: Butler) -> None: 

53 """Modify a repository to support reading and writing 

54 `MetricsExample` objects. 

55 

56 This method allows `MetricsExample` to be used with test repositories 

57 in any package without needing to provide a custom configuration there. 

58 

59 Parameters 

60 ---------- 

61 butler : `lsst.daf.butler.Butler` 

62 The repository that needs to support `MetricsExample`. 

63 

64 Notes 

65 ----- 

66 This method enables the following storage classes: 

67 

68 ``StructuredData`` 

69 A `MetricsExample` whose ``summary``, ``output``, and ``data`` members 

70 can be retrieved as dataset components. 

71 ``StructuredDataNoComponents`` 

72 A monolithic write of a `MetricsExample`. 

73 """ 

74 yamlDict = _addFullStorageClass( 

75 butler, 

76 "StructuredDataDictYaml", 

77 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

78 pytype=dict, 

79 ) 

80 

81 yamlList = _addFullStorageClass( 

82 butler, 

83 "StructuredDataListYaml", 

84 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

85 pytype=list, 

86 parameters={"slice"}, 

87 delegate="lsst.daf.butler.tests.ListDelegate", 

88 ) 

89 

90 _addFullStorageClass( 

91 butler, 

92 "StructuredDataNoComponents", 

93 "lsst.daf.butler.formatters.pickle.PickleFormatter", 

94 pytype=MetricsExample, 

95 parameters={"slice"}, 

96 delegate="lsst.daf.butler.tests.MetricsDelegate", 

97 ) 

98 

99 _addFullStorageClass( 

100 butler, 

101 "StructuredData", 

102 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

103 pytype=MetricsExample, 

104 components={ 

105 "summary": yamlDict, 

106 "output": yamlDict, 

107 "data": yamlList, 

108 }, 

109 delegate="lsst.daf.butler.tests.MetricsDelegate", 

110 ) 

111 

112 

113def _addFullStorageClass( 

114 butler: Butler, name: str, formatter: str, *args: Any, **kwargs: Any 

115) -> StorageClass: 

116 """Create a storage class-formatter pair in a repository if it does not 

117 already exist. 

118 

119 Parameters 

120 ---------- 

121 butler : `lsst.daf.butler.Butler` 

122 The repository that needs to contain the class. 

123 name : `str` 

124 The name to use for the class. 

125 formatter : `str` 

126 The formatter to use with the storage class. Ignored if ``butler`` 

127 does not use formatters. 

128 *args 

129 **kwargs 

130 Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass` 

131 constructor. 

132 

133 Returns 

134 ------- 

135 class : `lsst.daf.butler.StorageClass` 

136 The newly created storage class, or the class of the same name 

137 previously found in the repository. 

138 """ 

139 storageRegistry = butler.datastore.storageClassFactory 

140 

141 storage = StorageClass(name, *args, **kwargs) 

142 try: 

143 storageRegistry.registerStorageClass(storage) 

144 except ValueError: 

145 storage = storageRegistry.getStorageClass(name) 

146 

147 for registry in _getAllFormatterRegistries(butler.datastore): 

148 registry.registerFormatter(storage, formatter) 

149 

150 return storage 

151 

152 

153def _getAllFormatterRegistries(datastore: Datastore) -> list[FormatterFactory]: 

154 """Return all formatter registries used by a datastore. 

155 

156 Parameters 

157 ---------- 

158 datastore : `lsst.daf.butler.Datastore` 

159 A datastore containing zero or more formatter registries. 

160 

161 Returns 

162 ------- 

163 registries : `list` [`lsst.daf.butler.FormatterFactory`] 

164 A possibly empty list of all formatter registries used 

165 by ``datastore``. 

166 """ 

167 try: 

168 datastores = datastore.datastores # type: ignore[attr-defined] 

169 except AttributeError: 

170 datastores = [datastore] 

171 

172 registries = [] 

173 for datastore in datastores: 

174 try: 

175 # Not all datastores have a formatterFactory 

176 formatterRegistry = datastore.formatterFactory # type: ignore[attr-defined] 

177 except AttributeError: 

178 pass # no formatter needed 

179 else: 

180 registries.append(formatterRegistry) 

181 return registries 

182 

183 

184class MetricsExample: 

185 """Smorgasboard of information that might be the result of some 

186 processing. 

187 

188 Parameters 

189 ---------- 

190 summary : `dict` 

191 Simple dictionary mapping key performance metrics to a scalar 

192 result. 

193 output : `dict` 

194 Structured nested data. 

195 data : `list`, optional 

196 Arbitrary array data. 

197 """ 

198 

199 def __init__( 

200 self, 

201 summary: dict[str, Any] | None = None, 

202 output: dict[str, Any] | None = None, 

203 data: list[Any] | None = None, 

204 ) -> None: 

205 self.summary = summary 

206 self.output = output 

207 self.data = data 

208 

209 def __eq__(self, other: Any) -> bool: 

210 try: 

211 return self.summary == other.summary and self.output == other.output and self.data == other.data 

212 except AttributeError: 

213 pass 

214 return NotImplemented 

215 

216 def __str__(self) -> str: 

217 return str(self.exportAsDict()) 

218 

219 def __repr__(self) -> str: 

220 return f"MetricsExample({self.exportAsDict()})" 

221 

222 def exportAsDict(self) -> dict[str, list | dict | None]: 

223 """Convert object contents to a single python dict.""" 

224 exportDict: dict[str, list | dict | None] = {"summary": self.summary, "output": self.output} 

225 if self.data is not None: 

226 exportDict["data"] = list(self.data) 

227 else: 

228 exportDict["data"] = None 

229 return exportDict 

230 

231 def _asdict(self) -> dict[str, list | dict | None]: 

232 """Convert object contents to a single Python dict. 

233 

234 This interface is used for JSON serialization. 

235 

236 Returns 

237 ------- 

238 exportDict : `dict` 

239 Object contents in the form of a dict with keys corresponding 

240 to object attributes. 

241 """ 

242 return self.exportAsDict() 

243 

244 @classmethod 

245 def makeFromDict(cls, exportDict: dict[str, list | dict | None]) -> MetricsExample: 

246 """Create a new object from a dict that is compatible with that 

247 created by `exportAsDict`. 

248 

249 Parameters 

250 ---------- 

251 exportDict : `dict` 

252 `dict` with keys "summary", "output", and (optionally) "data". 

253 

254 Returns 

255 ------- 

256 newobject : `MetricsExample` 

257 New `MetricsExample` object. 

258 """ 

259 data = exportDict["data"] if "data" in exportDict else None 

260 assert isinstance(data, list | types.NoneType) 

261 assert isinstance(exportDict["summary"], dict | types.NoneType) 

262 assert isinstance(exportDict["output"], dict | types.NoneType) 

263 return cls(exportDict["summary"], exportDict["output"], data) 

264 

265 

266class MetricsExampleModel(BaseModel): 

267 """A variant of `MetricsExample` based on model.""" 

268 

269 summary: dict[str, Any] | None 

270 output: dict[str, Any] | None 

271 data: list[Any] | None 

272 

273 @classmethod 

274 def from_metrics(cls, metrics: MetricsExample) -> "MetricsExampleModel": 

275 """Create a model based on an example.""" 

276 return cls.parse_obj(metrics.exportAsDict()) 

277 

278 

279@dataclasses.dataclass 

280class MetricsExampleDataclass: 

281 """A variant of `MetricsExample` based on a dataclass.""" 

282 

283 summary: dict[str, Any] | None 

284 output: dict[str, Any] | None 

285 data: list[Any] | None 

286 

287 

288class ListDelegate(StorageClassDelegate): 

289 """Parameter handler for list parameters""" 

290 

291 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any: 

292 """Modify the in-memory dataset using the supplied parameters, 

293 returning a possibly new object. 

294 

295 Parameters 

296 ---------- 

297 inMemoryDataset : `object` 

298 Object to modify based on the parameters. 

299 parameters : `dict` 

300 Parameters to apply. Values are specific to the parameter. 

301 Supported parameters are defined in the associated 

302 `StorageClass`. If no relevant parameters are specified the 

303 inMemoryDataset will be return unchanged. 

304 

305 Returns 

306 ------- 

307 inMemoryDataset : `object` 

308 Updated form of supplied in-memory dataset, after parameters 

309 have been used. 

310 """ 

311 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

312 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

313 if use: 

314 inMemoryDataset = inMemoryDataset[use["slice"]] 

315 return inMemoryDataset 

316 

317 

318class MetricsDelegate(StorageClassDelegate): 

319 """Parameter handler for parameters using Metrics""" 

320 

321 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any: 

322 """Modify the in-memory dataset using the supplied parameters, 

323 returning a possibly new object. 

324 

325 Parameters 

326 ---------- 

327 inMemoryDataset : `object` 

328 Object to modify based on the parameters. 

329 parameters : `dict` 

330 Parameters to apply. Values are specific to the parameter. 

331 Supported parameters are defined in the associated 

332 `StorageClass`. If no relevant parameters are specified the 

333 inMemoryDataset will be return unchanged. 

334 

335 Returns 

336 ------- 

337 inMemoryDataset : `object` 

338 Updated form of supplied in-memory dataset, after parameters 

339 have been used. 

340 """ 

341 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

342 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

343 if use: 

344 inMemoryDataset.data = inMemoryDataset.data[use["slice"]] 

345 return inMemoryDataset 

346 

347 def getComponent(self, composite: Any, componentName: str) -> Any: 

348 if componentName == "counter": 

349 return len(composite.data) 

350 return super().getComponent(composite, componentName) 

351 

352 @classmethod 

353 def selectResponsibleComponent(cls, readComponent: str, fromComponents: set[str | None]) -> str: 

354 forwarderMap = { 

355 "counter": "data", 

356 } 

357 forwarder = forwarderMap.get(readComponent) 

358 if forwarder is not None and forwarder in fromComponents: 

359 return forwarder 

360 raise ValueError(f"Can not calculate read component {readComponent} from {fromComponents}")