Coverage for python/lsst/pipe/base/butlerQuantumContext.py: 14%

108 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-04-14 02:16 -0700

1# This file is part of pipe_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24"""Module defining a butler like object specialized to a specific quantum. 

25""" 

26 

27__all__ = ("ButlerQuantumContext",) 

28 

29from typing import Any, List, Optional, Sequence, Union 

30 

31from lsst.daf.butler import Butler, DatasetRef, DimensionUniverse, LimitedButler, Quantum 

32from lsst.utils.introspection import get_full_type_name 

33from lsst.utils.logging import PeriodicLogger, getLogger 

34 

35from .connections import DeferredDatasetRef, InputQuantizedConnection, OutputQuantizedConnection 

36from .struct import Struct 

37 

38_LOG = getLogger(__name__) 

39 

40 

41class ButlerQuantumContext: 

42 """A Butler-like class specialized for a single quantum. 

43 

44 A ButlerQuantumContext class wraps a standard butler interface and 

45 specializes it to the context of a given quantum. What this means 

46 in practice is that the only gets and puts that this class allows 

47 are DatasetRefs that are contained in the quantum. 

48 

49 In the future this class will also be used to record provenance on 

50 what was actually get and put. This is in contrast to what the 

51 preflight expects to be get and put by looking at the graph before 

52 execution. 

53 

54 Do not use constructor directly, instead use `from_full` or `from_limited` 

55 factory methods. 

56 

57 Notes 

58 ----- 

59 `ButlerQuantumContext` instances are backed by either 

60 `lsst.daf.butler.Butler` or `lsst.daf.butler.LimitedButler`. When a 

61 limited butler is used then quantum has to contain dataset references 

62 that are completely resolved (usually when graph is constructed by 

63 GraphBuilder). 

64 

65 When instances are backed by full butler, the quantum graph does not have 

66 to resolve output or intermediate references, but input references of each 

67 quantum have to be resolved before they can be used by this class. When 

68 executing such graphs, intermediate references used as input to some 

69 Quantum are resolved by ``lsst.ctrl.mpexec.SingleQuantumExecutor``. If 

70 output references of a quanta are resolved, they will be unresolved when 

71 full butler is used. 

72 """ 

73 

74 def __init__(self, *, limited: LimitedButler, quantum: Quantum, butler: Butler | None = None): 

75 self.quantum = quantum 

76 self.allInputs = set() 

77 self.allOutputs = set() 

78 for refs in quantum.inputs.values(): 

79 for ref in refs: 

80 self.allInputs.add((ref.datasetType, ref.dataId)) 

81 for refs in quantum.outputs.values(): 

82 for ref in refs: 

83 self.allOutputs.add((ref.datasetType, ref.dataId)) 

84 self.__full_butler = butler 

85 self.__butler = limited 

86 

87 @classmethod 

88 def from_full(cls, butler: Butler, quantum: Quantum) -> ButlerQuantumContext: 

89 """Make ButlerQuantumContext backed by `lsst.daf.butler.Butler`. 

90 

91 Parameters 

92 ---------- 

93 butler : `lsst.daf.butler.Butler` 

94 Butler object from/to which datasets will be get/put. 

95 quantum : `lsst.daf.butler.core.Quantum` 

96 Quantum object that describes the datasets which will be get/put by 

97 a single execution of this node in the pipeline graph. All input 

98 dataset references must be resolved in this Quantum. Output 

99 references can be resolved, but they will be unresolved. 

100 

101 Returns 

102 ------- 

103 butlerQC : `ButlerQuantumContext` 

104 Instance of butler wrapper. 

105 """ 

106 return ButlerQuantumContext(limited=butler, butler=butler, quantum=quantum) 

107 

108 @classmethod 

109 def from_limited(cls, butler: LimitedButler, quantum: Quantum) -> ButlerQuantumContext: 

110 """Make ButlerQuantumContext backed by `lsst.daf.butler.LimitedButler`. 

111 

112 Parameters 

113 ---------- 

114 butler : `lsst.daf.butler.LimitedButler` 

115 Butler object from/to which datasets will be get/put. 

116 quantum : `lsst.daf.butler.core.Quantum` 

117 Quantum object that describes the datasets which will be get/put by 

118 a single execution of this node in the pipeline graph. Both input 

119 and output dataset references must be resolved in this Quantum. 

120 

121 Returns 

122 ------- 

123 butlerQC : `ButlerQuantumContext` 

124 Instance of butler wrapper. 

125 """ 

126 return ButlerQuantumContext(limited=butler, quantum=quantum) 

127 

128 def _get(self, ref: Optional[Union[DeferredDatasetRef, DatasetRef]]) -> Any: 

129 # Butler methods below will check for unresolved DatasetRefs and 

130 # raise appropriately, so no need for us to do that here. 

131 if isinstance(ref, DeferredDatasetRef): 

132 self._checkMembership(ref.datasetRef, self.allInputs) 

133 return self.__butler.getDeferred(ref.datasetRef) 

134 elif ref is None: 

135 return None 

136 else: 

137 self._checkMembership(ref, self.allInputs) 

138 return self.__butler.get(ref) 

139 

140 def _put(self, value: Any, ref: DatasetRef) -> None: 

141 """Store data in butler""" 

142 self._checkMembership(ref, self.allOutputs) 

143 if self.__full_butler is not None: 

144 # If reference is resolved we need to unresolved it first. 

145 # It is possible that we are putting a dataset into a different 

146 # run than what was originally expected. 

147 if ref.id is not None: 

148 ref = ref.unresolved() 

149 self.__full_butler.put(value, ref) 

150 else: 

151 self.__butler.put(value, ref) 

152 

153 def get( 

154 self, 

155 dataset: Union[ 

156 InputQuantizedConnection, 

157 List[Optional[DatasetRef]], 

158 List[Optional[DeferredDatasetRef]], 

159 DatasetRef, 

160 DeferredDatasetRef, 

161 None, 

162 ], 

163 ) -> Any: 

164 """Fetches data from the butler 

165 

166 Parameters 

167 ---------- 

168 dataset 

169 This argument may either be an `InputQuantizedConnection` which 

170 describes all the inputs of a quantum, a list of 

171 `~lsst.daf.butler.DatasetRef`, or a single 

172 `~lsst.daf.butler.DatasetRef`. The function will get and return 

173 the corresponding datasets from the butler. If `None` is passed in 

174 place of a `~lsst.daf.butler.DatasetRef` then the corresponding 

175 returned object will be `None`. 

176 

177 Returns 

178 ------- 

179 return : `object` 

180 This function returns arbitrary objects fetched from the bulter. 

181 The structure these objects are returned in depends on the type of 

182 the input argument. If the input dataset argument is a 

183 `InputQuantizedConnection`, then the return type will be a 

184 dictionary with keys corresponding to the attributes of the 

185 `InputQuantizedConnection` (which in turn are the attribute 

186 identifiers of the connections). If the input argument is of type 

187 `list` of `~lsst.daf.butler.DatasetRef` then the return type will 

188 be a list of objects. If the input argument is a single 

189 `~lsst.daf.butler.DatasetRef` then a single object will be 

190 returned. 

191 

192 Raises 

193 ------ 

194 ValueError 

195 Raised if a `DatasetRef` is passed to get that is not defined in 

196 the quantum object 

197 """ 

198 # Set up a periodic logger so log messages can be issued if things 

199 # are taking too long. 

200 periodic = PeriodicLogger(_LOG) 

201 

202 if isinstance(dataset, InputQuantizedConnection): 

203 retVal = {} 

204 n_connections = len(dataset) 

205 n_retrieved = 0 

206 for i, (name, ref) in enumerate(dataset): 

207 if isinstance(ref, list): 

208 val = [] 

209 n_refs = len(ref) 

210 for j, r in enumerate(ref): 

211 val.append(self._get(r)) 

212 n_retrieved += 1 

213 periodic.log( 

214 "Retrieved %d out of %d datasets for connection '%s' (%d out of %d)", 

215 j + 1, 

216 n_refs, 

217 name, 

218 i + 1, 

219 n_connections, 

220 ) 

221 else: 

222 val = self._get(ref) 

223 periodic.log( 

224 "Retrieved dataset for connection '%s' (%d out of %d)", 

225 name, 

226 i + 1, 

227 n_connections, 

228 ) 

229 n_retrieved += 1 

230 retVal[name] = val 

231 if periodic.num_issued > 0: 

232 # This took long enough that we issued some periodic log 

233 # messages, so issue a final confirmation message as well. 

234 _LOG.verbose( 

235 "Completed retrieval of %d datasets from %d connections", n_retrieved, n_connections 

236 ) 

237 return retVal 

238 elif isinstance(dataset, list): 

239 n_datasets = len(dataset) 

240 retrieved = [] 

241 for i, x in enumerate(dataset): 

242 # Mypy is not sure of the type of x because of the union 

243 # of lists so complains. Ignoring it is more efficient 

244 # than adding an isinstance assert. 

245 retrieved.append(self._get(x)) 

246 periodic.log("Retrieved %d out of %d datasets", i + 1, n_datasets) 

247 if periodic.num_issued > 0: 

248 _LOG.verbose("Completed retrieval of %d datasets", n_datasets) 

249 return retrieved 

250 elif isinstance(dataset, DatasetRef) or isinstance(dataset, DeferredDatasetRef) or dataset is None: 

251 return self._get(dataset) 

252 else: 

253 raise TypeError( 

254 f"Dataset argument ({get_full_type_name(dataset)}) is not a type that can be used to get" 

255 ) 

256 

257 def put( 

258 self, 

259 values: Union[Struct, List[Any], Any], 

260 dataset: Union[OutputQuantizedConnection, List[DatasetRef], DatasetRef], 

261 ) -> None: 

262 """Puts data into the butler 

263 

264 Parameters 

265 ---------- 

266 values : `Struct` or `list` of `object` or `object` 

267 The data that should be put with the butler. If the type of the 

268 dataset is `OutputQuantizedConnection` then this argument should be 

269 a `Struct` with corresponding attribute names. Each attribute 

270 should then correspond to either a list of object or a single 

271 object depending of the type of the corresponding attribute on 

272 dataset. I.e. if ``dataset.calexp`` is 

273 ``[datasetRef1, datasetRef2]`` then ``values.calexp`` should be 

274 ``[calexp1, calexp2]``. Like wise if there is a single ref, then 

275 only a single object need be passed. The same restriction applies 

276 if dataset is directly a `list` of `DatasetRef` or a single 

277 `DatasetRef`. 

278 dataset 

279 This argument may either be an `InputQuantizedConnection` which 

280 describes all the inputs of a quantum, a list of 

281 `lsst.daf.butler.DatasetRef`, or a single 

282 `lsst.daf.butler.DatasetRef`. The function will get and return 

283 the corresponding datasets from the butler. 

284 

285 Raises 

286 ------ 

287 ValueError 

288 Raised if a `DatasetRef` is passed to put that is not defined in 

289 the quantum object, or the type of values does not match what is 

290 expected from the type of dataset. 

291 """ 

292 if isinstance(dataset, OutputQuantizedConnection): 

293 if not isinstance(values, Struct): 

294 raise ValueError( 

295 "dataset is a OutputQuantizedConnection, a Struct with corresponding" 

296 " attributes must be passed as the values to put" 

297 ) 

298 for name, refs in dataset: 

299 valuesAttribute = getattr(values, name) 

300 if isinstance(refs, list): 

301 if len(refs) != len(valuesAttribute): 

302 raise ValueError(f"There must be a object to put for every Dataset ref in {name}") 

303 for i, ref in enumerate(refs): 

304 self._put(valuesAttribute[i], ref) 

305 else: 

306 self._put(valuesAttribute, refs) 

307 elif isinstance(dataset, list): 

308 if not isinstance(values, Sequence): 

309 raise ValueError("Values to put must be a sequence") 

310 if len(dataset) != len(values): 

311 raise ValueError("There must be a common number of references and values to put") 

312 for i, ref in enumerate(dataset): 

313 self._put(values[i], ref) 

314 elif isinstance(dataset, DatasetRef): 

315 self._put(values, dataset) 

316 else: 

317 raise TypeError("Dataset argument is not a type that can be used to put") 

318 

319 def _checkMembership(self, ref: Union[List[DatasetRef], DatasetRef], inout: set) -> None: 

320 """Internal function used to check if a DatasetRef is part of the input 

321 quantum 

322 

323 This function will raise an exception if the ButlerQuantumContext is 

324 used to get/put a DatasetRef which is not defined in the quantum. 

325 

326 Parameters 

327 ---------- 

328 ref : `list` of `DatasetRef` or `DatasetRef` 

329 Either a list or a single `DatasetRef` to check 

330 inout : `set` 

331 The connection type to check, e.g. either an input or an output. 

332 This prevents both types needing to be checked for every operation, 

333 which may be important for Quanta with lots of `DatasetRef`. 

334 """ 

335 if not isinstance(ref, list): 

336 ref = [ref] 

337 for r in ref: 

338 if (r.datasetType, r.dataId) not in inout: 

339 raise ValueError("DatasetRef is not part of the Quantum being processed") 

340 

341 @property 

342 def dimensions(self) -> DimensionUniverse: 

343 """Structure managing all dimensions recognized by this data 

344 repository (`DimensionUniverse`). 

345 """ 

346 return self.__butler.dimensions