Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22""" 

23Python classes that can be used to test datastores without requiring 

24large external dependencies on python classes such as afw or serialization 

25formats such as FITS or HDF5. 

26""" 

27 

28__all__ = ("ListDelegate", "MetricsDelegate", "MetricsExample", "registerMetricsExample") 

29 

30 

31import copy 

32from lsst.daf.butler import StorageClassDelegate, StorageClass 

33 

34 

35def registerMetricsExample(butler): 

36 """Modify a repository to support reading and writing 

37 `MetricsExample` objects. 

38 

39 This method allows `MetricsExample` to be used with test repositories 

40 in any package without needing to provide a custom configuration there. 

41 

42 Parameters 

43 ---------- 

44 butler : `lsst.daf.butler.Butler` 

45 The repository that needs to support `MetricsExample`. 

46 

47 Notes 

48 ----- 

49 This method enables the following storage classes: 

50 

51 ``StructuredData`` 

52 A `MetricsExample` whose ``summary``, ``output``, and ``data`` members 

53 can be retrieved as dataset components. 

54 ``StructuredDataNoComponents`` 

55 A monolithic write of a `MetricsExample`. 

56 """ 

57 yamlDict = _addFullStorageClass( 

58 butler, 

59 "StructuredDataDictYaml", 

60 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

61 pytype=dict, 

62 ) 

63 

64 yamlList = _addFullStorageClass( 

65 butler, 

66 "StructuredDataListYaml", 

67 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

68 pytype=list, 

69 parameters={"slice"}, 

70 delegate="lsst.daf.butler.tests.ListDelegate" 

71 ) 

72 

73 _addFullStorageClass( 

74 butler, 

75 "StructuredDataNoComponents", 

76 "lsst.daf.butler.formatters.pickle.PickleFormatter", 

77 pytype=MetricsExample, 

78 parameters={"slice"}, 

79 delegate="lsst.daf.butler.tests.MetricsDelegate" 

80 ) 

81 

82 _addFullStorageClass( 

83 butler, 

84 "StructuredData", 

85 "lsst.daf.butler.formatters.yaml.YamlFormatter", 

86 pytype=MetricsExample, 

87 components={"summary": yamlDict, 

88 "output": yamlDict, 

89 "data": yamlList, 

90 }, 

91 delegate="lsst.daf.butler.tests.MetricsDelegate" 

92 ) 

93 

94 

95def _addFullStorageClass(butler, name, formatter, *args, **kwargs): 

96 """Create a storage class-formatter pair in a repository if it does not 

97 already exist. 

98 

99 Parameters 

100 ---------- 

101 butler : `lsst.daf.butler.Butler` 

102 The repository that needs to contain the class. 

103 name : `str` 

104 The name to use for the class. 

105 formatter : `str` 

106 The formatter to use with the storage class. Ignored if ``butler`` 

107 does not use formatters. 

108 *args 

109 **kwargs 

110 Arguments, other than ``name``, to the `~lsst.daf.butler.StorageClass` 

111 constructor. 

112 

113 Returns 

114 ------- 

115 class : `lsst.daf.butler.StorageClass` 

116 The newly created storage class, or the class of the same name 

117 previously found in the repository. 

118 """ 

119 storageRegistry = butler.datastore.storageClassFactory 

120 

121 storage = StorageClass(name, *args, **kwargs) 

122 try: 

123 storageRegistry.registerStorageClass(storage) 

124 except ValueError: 

125 storage = storageRegistry.getStorageClass(name) 

126 

127 for registry in _getAllFormatterRegistries(butler.datastore): 

128 registry.registerFormatter(storage, formatter) 

129 

130 return storage 

131 

132 

133def _getAllFormatterRegistries(datastore): 

134 """Return all formatter registries used by a datastore. 

135 

136 Parameters 

137 ---------- 

138 datastore : `lsst.daf.butler.Datastore` 

139 A datastore containing zero or more formatter registries. 

140 

141 Returns 

142 ------- 

143 registries : `list` [`lsst.daf.butler.FormatterRegistry`] 

144 A possibly empty list of all formatter registries used 

145 by ``datastore``. 

146 """ 

147 try: 

148 datastores = datastore.datastores 

149 except AttributeError: 

150 datastores = [datastore] 

151 

152 registries = [] 

153 for datastore in datastores: 

154 try: 

155 # Not all datastores have a formatterFactory 

156 formatterRegistry = datastore.formatterFactory 

157 except AttributeError: 

158 pass # no formatter needed 

159 else: 

160 registries.append(formatterRegistry) 

161 return registries 

162 

163 

164class MetricsExample: 

165 """Smorgasboard of information that might be the result of some 

166 processing. 

167 

168 Parameters 

169 ---------- 

170 summary : `dict` 

171 Simple dictionary mapping key performance metrics to a scalar 

172 result. 

173 output : `dict` 

174 Structured nested data. 

175 data : `list`, optional 

176 Arbitrary array data. 

177 """ 

178 

179 def __init__(self, summary=None, output=None, data=None): 

180 self.summary = summary 

181 self.output = output 

182 self.data = data 

183 

184 def __eq__(self, other): 

185 return self.summary == other.summary and self.output == other.output and self.data == other.data 

186 

187 def __str__(self): 

188 return str(self.exportAsDict()) 

189 

190 def __repr__(self): 

191 return f"MetricsExample({self.exportAsDict()})" 

192 

193 def exportAsDict(self): 

194 """Convert object contents to a single python dict.""" 

195 exportDict = {"summary": self.summary, 

196 "output": self.output} 

197 if self.data is not None: 

198 exportDict["data"] = list(self.data) 

199 else: 

200 exportDict["data"] = None 

201 return exportDict 

202 

203 def _asdict(self): 

204 """Convert object contents to a single Python dict. 

205 

206 This interface is used for JSON serialization. 

207 

208 Returns 

209 ------- 

210 exportDict : `dict` 

211 Object contents in the form of a dict with keys corresponding 

212 to object attributes. 

213 """ 

214 return self.exportAsDict() 

215 

216 @classmethod 

217 def makeFromDict(cls, exportDict): 

218 """Create a new object from a dict that is compatible with that 

219 created by `exportAsDict`. 

220 

221 Parameters 

222 ---------- 

223 exportDict : `dict` 

224 `dict` with keys "summary", "output", and (optionally) "data". 

225 

226 Returns 

227 ------- 

228 newobject : `MetricsExample` 

229 New `MetricsExample` object. 

230 """ 

231 data = None 

232 if "data" in exportDict: 

233 data = exportDict["data"] 

234 return cls(exportDict["summary"], exportDict["output"], data) 

235 

236 

237class ListDelegate(StorageClassDelegate): 

238 """Parameter handler for list parameters""" 

239 

240 def handleParameters(self, inMemoryDataset, parameters=None): 

241 """Modify the in-memory dataset using the supplied parameters, 

242 returning a possibly new object. 

243 

244 Parameters 

245 ---------- 

246 inMemoryDataset : `object` 

247 Object to modify based on the parameters. 

248 parameters : `dict` 

249 Parameters to apply. Values are specific to the parameter. 

250 Supported parameters are defined in the associated 

251 `StorageClass`. If no relevant parameters are specified the 

252 inMemoryDataset will be return unchanged. 

253 

254 Returns 

255 ------- 

256 inMemoryDataset : `object` 

257 Updated form of supplied in-memory dataset, after parameters 

258 have been used. 

259 """ 

260 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

261 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

262 if use: 

263 inMemoryDataset = inMemoryDataset[use["slice"]] 

264 return inMemoryDataset 

265 

266 

267class MetricsDelegate(StorageClassDelegate): 

268 """Parameter handler for parameters using Metrics""" 

269 

270 def handleParameters(self, inMemoryDataset, parameters=None): 

271 """Modify the in-memory dataset using the supplied parameters, 

272 returning a possibly new object. 

273 

274 Parameters 

275 ---------- 

276 inMemoryDataset : `object` 

277 Object to modify based on the parameters. 

278 parameters : `dict` 

279 Parameters to apply. Values are specific to the parameter. 

280 Supported parameters are defined in the associated 

281 `StorageClass`. If no relevant parameters are specified the 

282 inMemoryDataset will be return unchanged. 

283 

284 Returns 

285 ------- 

286 inMemoryDataset : `object` 

287 Updated form of supplied in-memory dataset, after parameters 

288 have been used. 

289 """ 

290 inMemoryDataset = copy.deepcopy(inMemoryDataset) 

291 use = self.storageClass.filterParameters(parameters, subset={"slice"}) 

292 if use: 

293 inMemoryDataset.data = inMemoryDataset.data[use["slice"]] 

294 return inMemoryDataset 

295 

296 def getComponent(self, composite, componentName: str): 

297 if componentName == "counter": 

298 return len(composite.data) 

299 return super().getComponent(composite, componentName) 

300 

301 @classmethod 

302 def selectResponsibleComponent(cls, readComponent: str, fromComponents) -> str: 

303 forwarderMap = { 

304 "counter": "data", 

305 } 

306 forwarder = forwarderMap.get(readComponent) 

307 if forwarder is not None and forwarder in fromComponents: 

308 return forwarder 

309 raise ValueError(f"Can not calculate read component {readComponent} from {fromComponents}")