Coverage for python/lsst/daf/butler/tests/_examplePythonTypes.py: 31%

101 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-11-29 02:00 -0800

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23""" 

24Python classes that can be used to test datastores without requiring 

25large external dependencies on python classes such as afw or serialization 

26formats such as FITS or HDF5. 

27""" 

28 

29__all__ = ( 

30 "ListDelegate", 

31 "MetricsDelegate", 

32 "MetricsExample", 

33 "registerMetricsExample", 

34 "MetricsExampleModel", 

35) 

36 

37 

38import copy 

39import types 

40from collections.abc import Mapping 

41from typing import TYPE_CHECKING, Any 

42 

43from lsst.daf.butler import StorageClass, StorageClassDelegate 

44from pydantic import BaseModel 

45 

46if TYPE_CHECKING: 46 ↛ 47line 46 didn't jump to line 47, because the condition on line 46 was never true

47 from lsst.daf.butler import Butler, Datastore, FormatterFactory 

48 

49 

50def registerMetricsExample(butler: Butler) -> None: 

51 """Modify a repository to support reading and writing 

52 `MetricsExample` objects. 

53 

54 This method allows `MetricsExample` to be used with test repositories 

55 in any package without needing to provide a custom configuration there. 

56 

57 Parameters 

58 ---------- 

59 butler : `lsst.daf.butler.Butler` 

60 The repository that needs to support `MetricsExample`. 

61 

62 Notes 

63 ----- 

64 This method enables the following storage classes: 

65 

66 ``StructuredData`` 

67 A `MetricsExample` whose ``summary``, ``output``, and ``data`` members 

68 can be retrieved as dataset components. 

69 ``StructuredDataNoComponents`` 

70 A monolithic write of a `MetricsExample`. 

71 """ 

72 yamlDict = _addFullStorageClass( 

73 butler, 

74 "StructuredDataDictYaml", 

75 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

76 pytype=dict, 

77 ) 

78 

79 yamlList = _addFullStorageClass( 

80 butler, 

81 "StructuredDataListYaml", 

82 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

83 pytype=list, 

84 parameters={"slice"}, 

85 delegate="lsst.daf.butler.tests.ListDelegate", 

86 ) 

87 

88 _addFullStorageClass( 

89 butler, 

90 "StructuredDataNoComponents", 

91 "lsst.daf.butler.formatters.pickle.PickleFormatter", 

92 pytype=MetricsExample, 

93 parameters={"slice"}, 

94 delegate="lsst.daf.butler.tests.MetricsDelegate", 

95 ) 

96 

97 _addFullStorageClass( 

98 butler, 

99 "StructuredData", 

100 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

101 pytype=MetricsExample, 

102 components={ 

103 "summary": yamlDict, 

104 "output": yamlDict, 

105 "data": yamlList, 

106 }, 

107 delegate="lsst.daf.butler.tests.MetricsDelegate", 

108 ) 

109 

110 

111def _addFullStorageClass( 

112 butler: Butler, name: str, formatter: str, *args: Any, **kwargs: Any 

113) -> StorageClass: 

114 """Create a storage class-formatter pair in a repository if it does not 

115 already exist. 

116 

117 Parameters 

118 ---------- 

119 butler : `lsst.daf.butler.Butler` 

120 The repository that needs to contain the class. 

121 name : `str` 

122 The name to use for the class. 

123 formatter : `str` 

124 The formatter to use with the storage class. Ignored if ``butler`` 

125 does not use formatters. 

126 *args 

127 **kwargs 

128 Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass` 

129 constructor. 

130 

131 Returns 

132 ------- 

133 class : `lsst.daf.butler.StorageClass` 

134 The newly created storage class, or the class of the same name 

135 previously found in the repository. 

136 """ 

137 storageRegistry = butler.datastore.storageClassFactory 

138 

139 storage = StorageClass(name, *args, **kwargs) 

140 try: 

141 storageRegistry.registerStorageClass(storage) 

142 except ValueError: 

143 storage = storageRegistry.getStorageClass(name) 

144 

145 for registry in _getAllFormatterRegistries(butler.datastore): 

146 registry.registerFormatter(storage, formatter) 

147 

148 return storage 

149 

150 

151def _getAllFormatterRegistries(datastore: Datastore) -> list[FormatterFactory]: 

152 """Return all formatter registries used by a datastore. 

153 

154 Parameters 

155 ---------- 

156 datastore : `lsst.daf.butler.Datastore` 

157 A datastore containing zero or more formatter registries. 

158 

159 Returns 

160 ------- 

161 registries : `list` [`lsst.daf.butler.FormatterFactory`] 

162 A possibly empty list of all formatter registries used 

163 by ``datastore``. 

164 """ 

165 try: 

166 datastores = datastore.datastores # type: ignore[attr-defined] 

167 except AttributeError: 

168 datastores = [datastore] 

169 

170 registries = [] 

171 for datastore in datastores: 

172 try: 

173 # Not all datastores have a formatterFactory 

174 formatterRegistry = datastore.formatterFactory # type: ignore[attr-defined] 

175 except AttributeError: 

176 pass # no formatter needed 

177 else: 

178 registries.append(formatterRegistry) 

179 return registries 

180 

181 

182class MetricsExample: 

183 """Smorgasboard of information that might be the result of some 

184 processing. 

185 

186 Parameters 

187 ---------- 

188 summary : `dict` 

189 Simple dictionary mapping key performance metrics to a scalar 

190 result. 

191 output : `dict` 

192 Structured nested data. 

193 data : `list`, optional 

194 Arbitrary array data. 

195 """ 

196 

197 def __init__( 

198 self, 

199 summary: dict[str, Any] | None = None, 

200 output: dict[str, Any] | None = None, 

201 data: list[Any] | None = None, 

202 ) -> None: 

203 self.summary = summary 

204 self.output = output 

205 self.data = data 

206 

207 def __eq__(self, other: Any) -> bool: 

208 try: 

209 return self.summary == other.summary and self.output == other.output and self.data == other.data 

210 except AttributeError: 

211 pass 

212 return NotImplemented 

213 

214 def __str__(self) -> str: 

215 return str(self.exportAsDict()) 

216 

217 def __repr__(self) -> str: 

218 return f"MetricsExample({self.exportAsDict()})" 

219 

220 def exportAsDict(self) -> dict[str, list | dict | None]: 

221 """Convert object contents to a single python dict.""" 

222 exportDict: dict[str, list | dict | None] = {"summary": self.summary, "output": self.output} 

223 if self.data is not None: 

224 exportDict["data"] = list(self.data) 

225 else: 

226 exportDict["data"] = None 

227 return exportDict 

228 

229 def _asdict(self) -> dict[str, list | dict | None]: 

230 """Convert object contents to a single Python dict. 

231 

232 This interface is used for JSON serialization. 

233 

234 Returns 

235 ------- 

236 exportDict : `dict` 

237 Object contents in the form of a dict with keys corresponding 

238 to object attributes. 

239 """ 

240 return self.exportAsDict() 

241 

242 @classmethod 

243 def makeFromDict(cls, exportDict: dict[str, list | dict | None]) -> MetricsExample: 

244 """Create a new object from a dict that is compatible with that 

245 created by `exportAsDict`. 

246 

247 Parameters 

248 ---------- 

249 exportDict : `dict` 

250 `dict` with keys "summary", "output", and (optionally) "data". 

251 

252 Returns 

253 ------- 

254 newobject : `MetricsExample` 

255 New `MetricsExample` object. 

256 """ 

257 data = exportDict["data"] if "data" in exportDict else None 

258 assert isinstance(data, list | types.NoneType) 

259 assert isinstance(exportDict["summary"], dict | types.NoneType) 

260 assert isinstance(exportDict["output"], dict | types.NoneType) 

261 return cls(exportDict["summary"], exportDict["output"], data) 

262 

263 

264class MetricsExampleModel(BaseModel): 

265 """A variant of `MetricsExample` based on model.""" 

266 

267 summary: dict[str, Any] | None 

268 output: dict[str, Any] | None 

269 data: list[Any] | None 

270 

271 @classmethod 

272 def from_metrics(cls, metrics: MetricsExample) -> "MetricsExampleModel": 

273 """Create a model based on an example.""" 

274 return cls.parse_obj(metrics.exportAsDict()) 

275 

276 

277class ListDelegate(StorageClassDelegate): 

278 """Parameter handler for list parameters""" 

279 

280 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any: 

281 """Modify the in-memory dataset using the supplied parameters, 

282 returning a possibly new object. 

283 

284 Parameters 

285 ---------- 

286 inMemoryDataset : `object` 

287 Object to modify based on the parameters. 

288 parameters : `dict` 

289 Parameters to apply. Values are specific to the parameter. 

290 Supported parameters are defined in the associated 

291 `StorageClass`. If no relevant parameters are specified the 

292 inMemoryDataset will be return unchanged. 

293 

294 Returns 

295 ------- 

296 inMemoryDataset : `object` 

297 Updated form of supplied in-memory dataset, after parameters 

298 have been used. 

299 """ 

300 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

301 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

302 if use: 

303 inMemoryDataset = inMemoryDataset[use["slice"]] 

304 return inMemoryDataset 

305 

306 

307class MetricsDelegate(StorageClassDelegate): 

308 """Parameter handler for parameters using Metrics""" 

309 

310 def handleParameters(self, inMemoryDataset: Any, parameters: Mapping[str, Any] | None = None) -> Any: 

311 """Modify the in-memory dataset using the supplied parameters, 

312 returning a possibly new object. 

313 

314 Parameters 

315 ---------- 

316 inMemoryDataset : `object` 

317 Object to modify based on the parameters. 

318 parameters : `dict` 

319 Parameters to apply. Values are specific to the parameter. 

320 Supported parameters are defined in the associated 

321 `StorageClass`. If no relevant parameters are specified the 

322 inMemoryDataset will be return unchanged. 

323 

324 Returns 

325 ------- 

326 inMemoryDataset : `object` 

327 Updated form of supplied in-memory dataset, after parameters 

328 have been used. 

329 """ 

330 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

331 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

332 if use: 

333 inMemoryDataset.data = inMemoryDataset.data[use["slice"]] 

334 return inMemoryDataset 

335 

336 def getComponent(self, composite: Any, componentName: str) -> Any: 

337 if componentName == "counter": 

338 return len(composite.data) 

339 return super().getComponent(composite, componentName) 

340 

341 @classmethod 

342 def selectResponsibleComponent(cls, readComponent: str, fromComponents: set[str | None]) -> str: 

343 forwarderMap = { 

344 "counter": "data", 

345 } 

346 forwarder = forwarderMap.get(readComponent) 

347 if forwarder is not None and forwarder in fromComponents: 

348 return forwarder 

349 raise ValueError(f"Can not calculate read component {readComponent} from {fromComponents}")