Coverage for python/lsst/daf/butler/tests/_examplePythonTypes.py: 29%

91 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-11-17 02:01 -0800

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22""" 

23Python classes that can be used to test datastores without requiring 

24large external dependencies on python classes such as afw or serialization 

25formats such as FITS or HDF5. 

26""" 

27 

28__all__ = ( 

29 "ListDelegate", 

30 "MetricsDelegate", 

31 "MetricsExample", 

32 "registerMetricsExample", 

33 "MetricsExampleModel", 

34) 

35 

36 

37import copy 

38from typing import Any, Dict, List, Optional 

39 

40from lsst.daf.butler import StorageClass, StorageClassDelegate 

41from pydantic import BaseModel 

42 

43 

44def registerMetricsExample(butler): 

45 """Modify a repository to support reading and writing 

46 `MetricsExample` objects. 

47 

48 This method allows `MetricsExample` to be used with test repositories 

49 in any package without needing to provide a custom configuration there. 

50 

51 Parameters 

52 ---------- 

53 butler : `lsst.daf.butler.Butler` 

54 The repository that needs to support `MetricsExample`. 

55 

56 Notes 

57 ----- 

58 This method enables the following storage classes: 

59 

60 ``StructuredData`` 

61 A `MetricsExample` whose ``summary``, ``output``, and ``data`` members 

62 can be retrieved as dataset components. 

63 ``StructuredDataNoComponents`` 

64 A monolithic write of a `MetricsExample`. 

65 """ 

66 yamlDict = _addFullStorageClass( 

67 butler, 

68 "StructuredDataDictYaml", 

69 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

70 pytype=dict, 

71 ) 

72 

73 yamlList = _addFullStorageClass( 

74 butler, 

75 "StructuredDataListYaml", 

76 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

77 pytype=list, 

78 parameters={"slice"}, 

79 delegate="lsst.daf.butler.tests.ListDelegate", 

80 ) 

81 

82 _addFullStorageClass( 

83 butler, 

84 "StructuredDataNoComponents", 

85 "lsst.daf.butler.formatters.pickle.PickleFormatter", 

86 pytype=MetricsExample, 

87 parameters={"slice"}, 

88 delegate="lsst.daf.butler.tests.MetricsDelegate", 

89 ) 

90 

91 _addFullStorageClass( 

92 butler, 

93 "StructuredData", 

94 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

95 pytype=MetricsExample, 

96 components={ 

97 "summary": yamlDict, 

98 "output": yamlDict, 

99 "data": yamlList, 

100 }, 

101 delegate="lsst.daf.butler.tests.MetricsDelegate", 

102 ) 

103 

104 

105def _addFullStorageClass(butler, name, formatter, *args, **kwargs): 

106 """Create a storage class-formatter pair in a repository if it does not 

107 already exist. 

108 

109 Parameters 

110 ---------- 

111 butler : `lsst.daf.butler.Butler` 

112 The repository that needs to contain the class. 

113 name : `str` 

114 The name to use for the class. 

115 formatter : `str` 

116 The formatter to use with the storage class. Ignored if ``butler`` 

117 does not use formatters. 

118 *args 

119 **kwargs 

120 Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass` 

121 constructor. 

122 

123 Returns 

124 ------- 

125 class : `lsst.daf.butler.StorageClass` 

126 The newly created storage class, or the class of the same name 

127 previously found in the repository. 

128 """ 

129 storageRegistry = butler.datastore.storageClassFactory 

130 

131 storage = StorageClass(name, *args, **kwargs) 

132 try: 

133 storageRegistry.registerStorageClass(storage) 

134 except ValueError: 

135 storage = storageRegistry.getStorageClass(name) 

136 

137 for registry in _getAllFormatterRegistries(butler.datastore): 

138 registry.registerFormatter(storage, formatter) 

139 

140 return storage 

141 

142 

143def _getAllFormatterRegistries(datastore): 

144 """Return all formatter registries used by a datastore. 

145 

146 Parameters 

147 ---------- 

148 datastore : `lsst.daf.butler.Datastore` 

149 A datastore containing zero or more formatter registries. 

150 

151 Returns 

152 ------- 

153 registries : `list` [`lsst.daf.butler.FormatterRegistry`] 

154 A possibly empty list of all formatter registries used 

155 by ``datastore``. 

156 """ 

157 try: 

158 datastores = datastore.datastores 

159 except AttributeError: 

160 datastores = [datastore] 

161 

162 registries = [] 

163 for datastore in datastores: 

164 try: 

165 # Not all datastores have a formatterFactory 

166 formatterRegistry = datastore.formatterFactory 

167 except AttributeError: 

168 pass # no formatter needed 

169 else: 

170 registries.append(formatterRegistry) 

171 return registries 

172 

173 

174class MetricsExample: 

175 """Smorgasboard of information that might be the result of some 

176 processing. 

177 

178 Parameters 

179 ---------- 

180 summary : `dict` 

181 Simple dictionary mapping key performance metrics to a scalar 

182 result. 

183 output : `dict` 

184 Structured nested data. 

185 data : `list`, optional 

186 Arbitrary array data. 

187 """ 

188 

189 def __init__(self, summary=None, output=None, data=None): 

190 self.summary = summary 

191 self.output = output 

192 self.data = data 

193 

194 def __eq__(self, other): 

195 return self.summary == other.summary and self.output == other.output and self.data == other.data 

196 

197 def __str__(self): 

198 return str(self.exportAsDict()) 

199 

200 def __repr__(self): 

201 return f"MetricsExample({self.exportAsDict()})" 

202 

203 def exportAsDict(self): 

204 """Convert object contents to a single python dict.""" 

205 exportDict = {"summary": self.summary, "output": self.output} 

206 if self.data is not None: 

207 exportDict["data"] = list(self.data) 

208 else: 

209 exportDict["data"] = None 

210 return exportDict 

211 

212 def _asdict(self): 

213 """Convert object contents to a single Python dict. 

214 

215 This interface is used for JSON serialization. 

216 

217 Returns 

218 ------- 

219 exportDict : `dict` 

220 Object contents in the form of a dict with keys corresponding 

221 to object attributes. 

222 """ 

223 return self.exportAsDict() 

224 

225 @classmethod 

226 def makeFromDict(cls, exportDict): 

227 """Create a new object from a dict that is compatible with that 

228 created by `exportAsDict`. 

229 

230 Parameters 

231 ---------- 

232 exportDict : `dict` 

233 `dict` with keys "summary", "output", and (optionally) "data". 

234 

235 Returns 

236 ------- 

237 newobject : `MetricsExample` 

238 New `MetricsExample` object. 

239 """ 

240 data = None 

241 if "data" in exportDict: 

242 data = exportDict["data"] 

243 return cls(exportDict["summary"], exportDict["output"], data) 

244 

245 

246class MetricsExampleModel(BaseModel): 

247 """A variant of `MetricsExample` based on model.""" 

248 

249 summary: Optional[Dict[str, Any]] 

250 output: Optional[Dict[str, Any]] 

251 data: Optional[List[Any]] 

252 

253 @classmethod 

254 def from_metrics(cls, metrics: MetricsExample) -> "MetricsExampleModel": 

255 """Create a model based on an example.""" 

256 return cls.parse_obj(metrics.exportAsDict()) 

257 

258 

259class ListDelegate(StorageClassDelegate): 

260 """Parameter handler for list parameters""" 

261 

262 def handleParameters(self, inMemoryDataset, parameters=None): 

263 """Modify the in-memory dataset using the supplied parameters, 

264 returning a possibly new object. 

265 

266 Parameters 

267 ---------- 

268 inMemoryDataset : `object` 

269 Object to modify based on the parameters. 

270 parameters : `dict` 

271 Parameters to apply. Values are specific to the parameter. 

272 Supported parameters are defined in the associated 

273 `StorageClass`. If no relevant parameters are specified the 

274 inMemoryDataset will be return unchanged. 

275 

276 Returns 

277 ------- 

278 inMemoryDataset : `object` 

279 Updated form of supplied in-memory dataset, after parameters 

280 have been used. 

281 """ 

282 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

283 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

284 if use: 

285 inMemoryDataset = inMemoryDataset[use["slice"]] 

286 return inMemoryDataset 

287 

288 

289class MetricsDelegate(StorageClassDelegate): 

290 """Parameter handler for parameters using Metrics""" 

291 

292 def handleParameters(self, inMemoryDataset, parameters=None): 

293 """Modify the in-memory dataset using the supplied parameters, 

294 returning a possibly new object. 

295 

296 Parameters 

297 ---------- 

298 inMemoryDataset : `object` 

299 Object to modify based on the parameters. 

300 parameters : `dict` 

301 Parameters to apply. Values are specific to the parameter. 

302 Supported parameters are defined in the associated 

303 `StorageClass`. If no relevant parameters are specified the 

304 inMemoryDataset will be return unchanged. 

305 

306 Returns 

307 ------- 

308 inMemoryDataset : `object` 

309 Updated form of supplied in-memory dataset, after parameters 

310 have been used. 

311 """ 

312 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

313 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

314 if use: 

315 inMemoryDataset.data = inMemoryDataset.data[use["slice"]] 

316 return inMemoryDataset 

317 

318 def getComponent(self, composite, componentName: str): 

319 if componentName == "counter": 

320 return len(composite.data) 

321 return super().getComponent(composite, componentName) 

322 

323 @classmethod 

324 def selectResponsibleComponent(cls, readComponent: str, fromComponents) -> str: 

325 forwarderMap = { 

326 "counter": "data", 

327 } 

328 forwarder = forwarderMap.get(readComponent) 

329 if forwarder is not None and forwarder in fromComponents: 

330 return forwarder 

331 raise ValueError(f"Can not calculate read component {readComponent} from {fromComponents}")