Coverage for python/lsst/daf/butler/tests/_examplePythonTypes.py: 34%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

91 statements  

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22""" 

23Python classes that can be used to test datastores without requiring 

24large external dependencies on python classes such as afw or serialization 

25formats such as FITS or HDF5. 

26""" 

27 

28__all__ = ("ListDelegate", "MetricsDelegate", "MetricsExample", "registerMetricsExample", 

29 "MetricsExampleModel") 

30 

31 

32import copy 

33from typing import Optional, Any, Dict, List 

34 

35from pydantic import BaseModel 

36from lsst.daf.butler import StorageClassDelegate, StorageClass 

37 

38 

39def registerMetricsExample(butler): 

40 """Modify a repository to support reading and writing 

41 `MetricsExample` objects. 

42 

43 This method allows `MetricsExample` to be used with test repositories 

44 in any package without needing to provide a custom configuration there. 

45 

46 Parameters 

47 ---------- 

48 butler : `lsst.daf.butler.Butler` 

49 The repository that needs to support `MetricsExample`. 

50 

51 Notes 

52 ----- 

53 This method enables the following storage classes: 

54 

55 ``StructuredData`` 

56 A `MetricsExample` whose ``summary``, ``output``, and ``data`` members 

57 can be retrieved as dataset components. 

58 ``StructuredDataNoComponents`` 

59 A monolithic write of a `MetricsExample`. 

60 """ 

61 yamlDict = _addFullStorageClass( 

62 butler, 

63 "StructuredDataDictYaml", 

64 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

65 pytype=dict, 

66 ) 

67 

68 yamlList = _addFullStorageClass( 

69 butler, 

70 "StructuredDataListYaml", 

71 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

72 pytype=list, 

73 parameters={"slice"}, 

74 delegate="lsst.daf.butler.tests.ListDelegate" 

75 ) 

76 

77 _addFullStorageClass( 

78 butler, 

79 "StructuredDataNoComponents", 

80 "lsst.daf.butler.formatters.pickle.PickleFormatter", 

81 pytype=MetricsExample, 

82 parameters={"slice"}, 

83 delegate="lsst.daf.butler.tests.MetricsDelegate" 

84 ) 

85 

86 _addFullStorageClass( 

87 butler, 

88 "StructuredData", 

89 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

90 pytype=MetricsExample, 

91 components={"summary": yamlDict, 

92 "output": yamlDict, 

93 "data": yamlList, 

94 }, 

95 delegate="lsst.daf.butler.tests.MetricsDelegate" 

96 ) 

97 

98 

99def _addFullStorageClass(butler, name, formatter, *args, **kwargs): 

100 """Create a storage class-formatter pair in a repository if it does not 

101 already exist. 

102 

103 Parameters 

104 ---------- 

105 butler : `lsst.daf.butler.Butler` 

106 The repository that needs to contain the class. 

107 name : `str` 

108 The name to use for the class. 

109 formatter : `str` 

110 The formatter to use with the storage class. Ignored if ``butler`` 

111 does not use formatters. 

112 *args 

113 **kwargs 

114 Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass` 

115 constructor. 

116 

117 Returns 

118 ------- 

119 class : `lsst.daf.butler.StorageClass` 

120 The newly created storage class, or the class of the same name 

121 previously found in the repository. 

122 """ 

123 storageRegistry = butler.datastore.storageClassFactory 

124 

125 storage = StorageClass(name, *args, **kwargs) 

126 try: 

127 storageRegistry.registerStorageClass(storage) 

128 except ValueError: 

129 storage = storageRegistry.getStorageClass(name) 

130 

131 for registry in _getAllFormatterRegistries(butler.datastore): 

132 registry.registerFormatter(storage, formatter) 

133 

134 return storage 

135 

136 

137def _getAllFormatterRegistries(datastore): 

138 """Return all formatter registries used by a datastore. 

139 

140 Parameters 

141 ---------- 

142 datastore : `lsst.daf.butler.Datastore` 

143 A datastore containing zero or more formatter registries. 

144 

145 Returns 

146 ------- 

147 registries : `list` [`lsst.daf.butler.FormatterRegistry`] 

148 A possibly empty list of all formatter registries used 

149 by ``datastore``. 

150 """ 

151 try: 

152 datastores = datastore.datastores 

153 except AttributeError: 

154 datastores = [datastore] 

155 

156 registries = [] 

157 for datastore in datastores: 

158 try: 

159 # Not all datastores have a formatterFactory 

160 formatterRegistry = datastore.formatterFactory 

161 except AttributeError: 

162 pass # no formatter needed 

163 else: 

164 registries.append(formatterRegistry) 

165 return registries 

166 

167 

168class MetricsExample: 

169 """Smorgasboard of information that might be the result of some 

170 processing. 

171 

172 Parameters 

173 ---------- 

174 summary : `dict` 

175 Simple dictionary mapping key performance metrics to a scalar 

176 result. 

177 output : `dict` 

178 Structured nested data. 

179 data : `list`, optional 

180 Arbitrary array data. 

181 """ 

182 

183 def __init__(self, summary=None, output=None, data=None): 

184 self.summary = summary 

185 self.output = output 

186 self.data = data 

187 

188 def __eq__(self, other): 

189 return self.summary == other.summary and self.output == other.output and self.data == other.data 

190 

191 def __str__(self): 

192 return str(self.exportAsDict()) 

193 

194 def __repr__(self): 

195 return f"MetricsExample({self.exportAsDict()})" 

196 

197 def exportAsDict(self): 

198 """Convert object contents to a single python dict.""" 

199 exportDict = {"summary": self.summary, 

200 "output": self.output} 

201 if self.data is not None: 

202 exportDict["data"] = list(self.data) 

203 else: 

204 exportDict["data"] = None 

205 return exportDict 

206 

207 def _asdict(self): 

208 """Convert object contents to a single Python dict. 

209 

210 This interface is used for JSON serialization. 

211 

212 Returns 

213 ------- 

214 exportDict : `dict` 

215 Object contents in the form of a dict with keys corresponding 

216 to object attributes. 

217 """ 

218 return self.exportAsDict() 

219 

220 @classmethod 

221 def makeFromDict(cls, exportDict): 

222 """Create a new object from a dict that is compatible with that 

223 created by `exportAsDict`. 

224 

225 Parameters 

226 ---------- 

227 exportDict : `dict` 

228 `dict` with keys "summary", "output", and (optionally) "data". 

229 

230 Returns 

231 ------- 

232 newobject : `MetricsExample` 

233 New `MetricsExample` object. 

234 """ 

235 data = None 

236 if "data" in exportDict: 

237 data = exportDict["data"] 

238 return cls(exportDict["summary"], exportDict["output"], data) 

239 

240 

241class MetricsExampleModel(BaseModel): 

242 """A variant of `MetricsExample` based on model.""" 

243 

244 summary: Optional[Dict[str, Any]] 

245 output: Optional[Dict[str, Any]] 

246 data: Optional[List[Any]] 

247 

248 @classmethod 

249 def from_metrics(cls, metrics: MetricsExample) -> "MetricsExampleModel": 

250 """Create a model based on an example.""" 

251 return cls.parse_obj(metrics.exportAsDict()) 

252 

253 

254class ListDelegate(StorageClassDelegate): 

255 """Parameter handler for list parameters""" 

256 

257 def handleParameters(self, inMemoryDataset, parameters=None): 

258 """Modify the in-memory dataset using the supplied parameters, 

259 returning a possibly new object. 

260 

261 Parameters 

262 ---------- 

263 inMemoryDataset : `object` 

264 Object to modify based on the parameters. 

265 parameters : `dict` 

266 Parameters to apply. Values are specific to the parameter. 

267 Supported parameters are defined in the associated 

268 `StorageClass`. If no relevant parameters are specified the 

269 inMemoryDataset will be return unchanged. 

270 

271 Returns 

272 ------- 

273 inMemoryDataset : `object` 

274 Updated form of supplied in-memory dataset, after parameters 

275 have been used. 

276 """ 

277 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

278 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

279 if use: 

280 inMemoryDataset = inMemoryDataset[use["slice"]] 

281 return inMemoryDataset 

282 

283 

284class MetricsDelegate(StorageClassDelegate): 

285 """Parameter handler for parameters using Metrics""" 

286 

287 def handleParameters(self, inMemoryDataset, parameters=None): 

288 """Modify the in-memory dataset using the supplied parameters, 

289 returning a possibly new object. 

290 

291 Parameters 

292 ---------- 

293 inMemoryDataset : `object` 

294 Object to modify based on the parameters. 

295 parameters : `dict` 

296 Parameters to apply. Values are specific to the parameter. 

297 Supported parameters are defined in the associated 

298 `StorageClass`. If no relevant parameters are specified the 

299 inMemoryDataset will be return unchanged. 

300 

301 Returns 

302 ------- 

303 inMemoryDataset : `object` 

304 Updated form of supplied in-memory dataset, after parameters 

305 have been used. 

306 """ 

307 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

308 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

309 if use: 

310 inMemoryDataset.data = inMemoryDataset.data[use["slice"]] 

311 return inMemoryDataset 

312 

313 def getComponent(self, composite, componentName: str): 

314 if componentName == "counter": 

315 return len(composite.data) 

316 return super().getComponent(composite, componentName) 

317 

318 @classmethod 

319 def selectResponsibleComponent(cls, readComponent: str, fromComponents) -> str: 

320 forwarderMap = { 

321 "counter": "data", 

322 } 

323 forwarder = forwarderMap.get(readComponent) 

324 if forwarder is not None and forwarder in fromComponents: 

325 return forwarder 

326 raise ValueError(f"Can not calculate read component {readComponent} from {fromComponents}")