Coverage for python/lsst/pipe/base/butlerQuantumContext.py: 14%

108 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-01-27 09:57 +0000

1# This file is part of pipe_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24"""Module defining a butler like object specialized to a specific quantum. 

25""" 

26 

27__all__ = ("ButlerQuantumContext",) 

28 

29from typing import Any, List, Optional, Sequence, Union 

30 

31from lsst.daf.butler import Butler, DatasetRef, DimensionUniverse, LimitedButler, Quantum 

32from lsst.utils.introspection import get_full_type_name 

33from lsst.utils.logging import PeriodicLogger, getLogger 

34 

35from .connections import DeferredDatasetRef, InputQuantizedConnection, OutputQuantizedConnection 

36from .struct import Struct 

37 

38_LOG = getLogger(__name__) 

39 

40 

41class ButlerQuantumContext: 

42 """A Butler-like class specialized for a single quantum. 

43 

44 A ButlerQuantumContext class wraps a standard butler interface and 

45 specializes it to the context of a given quantum. What this means 

46 in practice is that the only gets and puts that this class allows 

47 are DatasetRefs that are contained in the quantum. 

48 

49 In the future this class will also be used to record provenance on 

50 what was actually get and put. This is in contrast to what the 

51 preflight expects to be get and put by looking at the graph before 

52 execution. 

53 

54 Do not use constructor directly, instead use `from_full` or `from_limited` 

55 factory methods. 

56 

57 Notes 

58 ----- 

59 `ButlerQuantumContext` instances are backed by either 

60 `lsst.daf.butler.Butler` or `lsst.daf.butler.LimitedButler`. When a 

61 limited butler is used then quantum has to contain dataset references 

62 that are completely resolved (usually when graph is constructed by 

63 GraphBuilder). 

64 

65 When instances are backed by full butler, the quantum graph does not have 

66 to resolve output or intermediate references, but input references of each 

67 quantum have to be resolved before they can be used by this class. When 

68 executing such graphs, intermediate references used as input to some 

69 Quantum are resolved by ``lsst.ctrl.mpexec.SingleQuantumExecutor``. If 

70 output references of a quanta are resolved, they will be unresolved when 

71 full butler is used. 

72 """ 

73 

74 def __init__(self, *, limited: LimitedButler, quantum: Quantum, butler: Butler | None = None): 

75 self.quantum = quantum 

76 self.allInputs = set() 

77 self.allOutputs = set() 

78 for refs in quantum.inputs.values(): 

79 for ref in refs: 

80 self.allInputs.add((ref.datasetType, ref.dataId)) 

81 for refs in quantum.outputs.values(): 

82 for ref in refs: 

83 self.allOutputs.add((ref.datasetType, ref.dataId)) 

84 self.__full_butler = butler 

85 self.__butler = limited 

86 

87 @classmethod 

88 def from_full(cls, butler: Butler, quantum: Quantum) -> ButlerQuantumContext: 

89 """Make ButlerQuantumContext backed by `lsst.daf.butler.Butler`. 

90 

91 Parameters 

92 ---------- 

93 butler : `lsst.daf.butler.Butler` 

94 Butler object from/to which datasets will be get/put. 

95 quantum : `lsst.daf.butler.core.Quantum` 

96 Quantum object that describes the datasets which will be get/put by 

97 a single execution of this node in the pipeline graph. All input 

98 dataset references must be resolved in this Quantum. Output 

99 references can be resolved, but they will be unresolved. 

100 

101 Returns 

102 ------- 

103 butlerQC : `ButlerQuantumContext` 

104 Instance of butler wrapper. 

105 """ 

106 return ButlerQuantumContext(limited=butler, butler=butler, quantum=quantum) 

107 

108 @classmethod 

109 def from_limited(cls, butler: LimitedButler, quantum: Quantum) -> ButlerQuantumContext: 

110 """Make ButlerQuantumContext backed by `lsst.daf.butler.LimitedButler`. 

111 

112 Parameters 

113 ---------- 

114 butler : `lsst.daf.butler.LimitedButler` 

115 Butler object from/to which datasets will be get/put. 

116 quantum : `lsst.daf.butler.core.Quantum` 

117 Quantum object that describes the datasets which will be get/put by 

118 a single execution of this node in the pipeline graph. Both input 

119 and output dataset references must be resolved in this Quantum. 

120 

121 Returns 

122 ------- 

123 butlerQC : `ButlerQuantumContext` 

124 Instance of butler wrapper. 

125 """ 

126 return ButlerQuantumContext(limited=butler, quantum=quantum) 

127 

128 def _get(self, ref: Optional[Union[DeferredDatasetRef, DatasetRef]]) -> Any: 

129 # Butler methods below will check for unresolved DatasetRefs and 

130 # raise appropriately, so no need for us to do that here. 

131 if isinstance(ref, DeferredDatasetRef): 

132 self._checkMembership(ref.datasetRef, self.allInputs) 

133 return self.__butler.getDirectDeferred(ref.datasetRef) 

134 elif ref is None: 

135 return None 

136 else: 

137 self._checkMembership(ref, self.allInputs) 

138 return self.__butler.getDirect(ref) 

139 

140 def _put(self, value: Any, ref: DatasetRef) -> None: 

141 """Store data in butler""" 

142 self._checkMembership(ref, self.allOutputs) 

143 if self.__full_butler is not None: 

144 # If reference is resolved we need to unresolved it first. 

145 if ref.id is not None: 

146 ref = ref.unresolved() 

147 self.__full_butler.put(value, ref) 

148 else: 

149 self.__butler.putDirect(value, ref) 

150 

151 def get( 

152 self, 

153 dataset: Union[ 

154 InputQuantizedConnection, 

155 List[Optional[DatasetRef]], 

156 List[Optional[DeferredDatasetRef]], 

157 DatasetRef, 

158 DeferredDatasetRef, 

159 None, 

160 ], 

161 ) -> Any: 

162 """Fetches data from the butler 

163 

164 Parameters 

165 ---------- 

166 dataset 

167 This argument may either be an `InputQuantizedConnection` which 

168 describes all the inputs of a quantum, a list of 

169 `~lsst.daf.butler.DatasetRef`, or a single 

170 `~lsst.daf.butler.DatasetRef`. The function will get and return 

171 the corresponding datasets from the butler. If `None` is passed in 

172 place of a `~lsst.daf.butler.DatasetRef` then the corresponding 

173 returned object will be `None`. 

174 

175 Returns 

176 ------- 

177 return : `object` 

178 This function returns arbitrary objects fetched from the bulter. 

179 The structure these objects are returned in depends on the type of 

180 the input argument. If the input dataset argument is a 

181 `InputQuantizedConnection`, then the return type will be a 

182 dictionary with keys corresponding to the attributes of the 

183 `InputQuantizedConnection` (which in turn are the attribute 

184 identifiers of the connections). If the input argument is of type 

185 `list` of `~lsst.daf.butler.DatasetRef` then the return type will 

186 be a list of objects. If the input argument is a single 

187 `~lsst.daf.butler.DatasetRef` then a single object will be 

188 returned. 

189 

190 Raises 

191 ------ 

192 ValueError 

193 Raised if a `DatasetRef` is passed to get that is not defined in 

194 the quantum object 

195 """ 

196 # Set up a periodic logger so log messages can be issued if things 

197 # are taking too long. 

198 periodic = PeriodicLogger(_LOG) 

199 

200 if isinstance(dataset, InputQuantizedConnection): 

201 retVal = {} 

202 n_connections = len(dataset) 

203 n_retrieved = 0 

204 for i, (name, ref) in enumerate(dataset): 

205 if isinstance(ref, list): 

206 val = [] 

207 n_refs = len(ref) 

208 for j, r in enumerate(ref): 

209 val.append(self._get(r)) 

210 n_retrieved += 1 

211 periodic.log( 

212 "Retrieved %d out of %d datasets for connection '%s' (%d out of %d)", 

213 j + 1, 

214 n_refs, 

215 name, 

216 i + 1, 

217 n_connections, 

218 ) 

219 else: 

220 val = self._get(ref) 

221 periodic.log( 

222 "Retrieved dataset for connection '%s' (%d out of %d)", 

223 name, 

224 i + 1, 

225 n_connections, 

226 ) 

227 n_retrieved += 1 

228 retVal[name] = val 

229 if periodic.num_issued > 0: 

230 # This took long enough that we issued some periodic log 

231 # messages, so issue a final confirmation message as well. 

232 _LOG.verbose( 

233 "Completed retrieval of %d datasets from %d connections", n_retrieved, n_connections 

234 ) 

235 return retVal 

236 elif isinstance(dataset, list): 

237 n_datasets = len(dataset) 

238 retrieved = [] 

239 for i, x in enumerate(dataset): 

240 # Mypy is not sure of the type of x because of the union 

241 # of lists so complains. Ignoring it is more efficient 

242 # than adding an isinstance assert. 

243 retrieved.append(self._get(x)) 

244 periodic.log("Retrieved %d out of %d datasets", i + 1, n_datasets) 

245 if periodic.num_issued > 0: 

246 _LOG.verbose("Completed retrieval of %d datasets", n_datasets) 

247 return retrieved 

248 elif isinstance(dataset, DatasetRef) or isinstance(dataset, DeferredDatasetRef) or dataset is None: 

249 return self._get(dataset) 

250 else: 

251 raise TypeError( 

252 f"Dataset argument ({get_full_type_name(dataset)}) is not a type that can be used to get" 

253 ) 

254 

255 def put( 

256 self, 

257 values: Union[Struct, List[Any], Any], 

258 dataset: Union[OutputQuantizedConnection, List[DatasetRef], DatasetRef], 

259 ) -> None: 

260 """Puts data into the butler 

261 

262 Parameters 

263 ---------- 

264 values : `Struct` or `list` of `object` or `object` 

265 The data that should be put with the butler. If the type of the 

266 dataset is `OutputQuantizedConnection` then this argument should be 

267 a `Struct` with corresponding attribute names. Each attribute 

268 should then correspond to either a list of object or a single 

269 object depending of the type of the corresponding attribute on 

270 dataset. I.e. if ``dataset.calexp`` is 

271 ``[datasetRef1, datasetRef2]`` then ``values.calexp`` should be 

272 ``[calexp1, calexp2]``. Like wise if there is a single ref, then 

273 only a single object need be passed. The same restriction applies 

274 if dataset is directly a `list` of `DatasetRef` or a single 

275 `DatasetRef`. 

276 dataset 

277 This argument may either be an `InputQuantizedConnection` which 

278 describes all the inputs of a quantum, a list of 

279 `lsst.daf.butler.DatasetRef`, or a single 

280 `lsst.daf.butler.DatasetRef`. The function will get and return 

281 the corresponding datasets from the butler. 

282 

283 Raises 

284 ------ 

285 ValueError 

286 Raised if a `DatasetRef` is passed to put that is not defined in 

287 the quantum object, or the type of values does not match what is 

288 expected from the type of dataset. 

289 """ 

290 if isinstance(dataset, OutputQuantizedConnection): 

291 if not isinstance(values, Struct): 

292 raise ValueError( 

293 "dataset is a OutputQuantizedConnection, a Struct with corresponding" 

294 " attributes must be passed as the values to put" 

295 ) 

296 for name, refs in dataset: 

297 valuesAttribute = getattr(values, name) 

298 if isinstance(refs, list): 

299 if len(refs) != len(valuesAttribute): 

300 raise ValueError(f"There must be a object to put for every Dataset ref in {name}") 

301 for i, ref in enumerate(refs): 

302 self._put(valuesAttribute[i], ref) 

303 else: 

304 self._put(valuesAttribute, refs) 

305 elif isinstance(dataset, list): 

306 if not isinstance(values, Sequence): 

307 raise ValueError("Values to put must be a sequence") 

308 if len(dataset) != len(values): 

309 raise ValueError("There must be a common number of references and values to put") 

310 for i, ref in enumerate(dataset): 

311 self._put(values[i], ref) 

312 elif isinstance(dataset, DatasetRef): 

313 self._put(values, dataset) 

314 else: 

315 raise TypeError("Dataset argument is not a type that can be used to put") 

316 

317 def _checkMembership(self, ref: Union[List[DatasetRef], DatasetRef], inout: set) -> None: 

318 """Internal function used to check if a DatasetRef is part of the input 

319 quantum 

320 

321 This function will raise an exception if the ButlerQuantumContext is 

322 used to get/put a DatasetRef which is not defined in the quantum. 

323 

324 Parameters 

325 ---------- 

326 ref : `list` of `DatasetRef` or `DatasetRef` 

327 Either a list or a single `DatasetRef` to check 

328 inout : `set` 

329 The connection type to check, e.g. either an input or an output. 

330 This prevents both types needing to be checked for every operation, 

331 which may be important for Quanta with lots of `DatasetRef`. 

332 """ 

333 if not isinstance(ref, list): 

334 ref = [ref] 

335 for r in ref: 

336 if (r.datasetType, r.dataId) not in inout: 

337 raise ValueError("DatasetRef is not part of the Quantum being processed") 

338 

339 @property 

340 def dimensions(self) -> DimensionUniverse: 

341 """Structure managing all dimensions recognized by this data 

342 repository (`DimensionUniverse`). 

343 """ 

344 return self.__butler.dimensions