Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

# This file is part of daf_butler. 

# 

# Developed for the LSST Data Management System. 

# This product includes software developed by the LSST Project 

# (http://www.lsst.org). 

# See the COPYRIGHT file at the top-level directory of this distribution 

# for details of code ownership. 

# 

# This program is free software: you can redistribute it and/or modify 

# it under the terms of the GNU General Public License as published by 

# the Free Software Foundation, either version 3 of the License, or 

# (at your option) any later version. 

# 

# This program is distributed in the hope that it will be useful, 

# but WITHOUT ANY WARRANTY; without even the implied warranty of 

# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

# GNU General Public License for more details. 

# 

# You should have received a copy of the GNU General Public License 

# along with this program. If not, see <http://www.gnu.org/licenses/>. 

 

__all__ = ("Quantum",) 

 

from lsst.utils import doImport 

 

from .utils import NamedKeyDict 

from .execution import Execution 

 

 

class Quantum(Execution): 

"""A discrete unit of work that may depend on one or more datasets and 

produces one or more datasets. 

 

Most Quanta will be executions of a particular ``PipelineTask``’s 

``runQuantum`` method, but they can also be used to represent discrete 

units of work performed manually by human operators or other software 

agents. 

 

Parameters 

---------- 

taskName : `str`, optional 

Fully-qualified name of the Task class that executed or will execute 

this Quantum. If not provided, ``taskClass`` must be. 

taskClass : `type`, optional 

The Task class that executed or will execute this Quantum. If not 

provided, ``taskName`` must be. Overrides ``taskName`` if both are 

provided. 

dataId : `DataId`, optional 

The dimension values that identify this `Quantum`. 

run : `Run`, optional 

The Run this Quantum is a part of. 

initInputs : collection of `DatasetRef`, optional 

Datasets that are needed to construct an instance of the Task. May 

be a flat iterable of `DatasetRef` instances or a mapping from 

`DatasetType` to `DatasetRef`. 

predictedInputs : `~collections.abc.Mapping`, optional 

Inputs identified prior to execution, organized as a mapping from 

`DatasetType` to a list of `DatasetRef`. Must be a superset of 

``actualInputs``. 

actualInputs : `~collections.abc.Mapping`, optional 

Inputs actually used during execution, organized as a mapping from 

`DatasetType` to a list of `DatasetRef`. Must be a subset of 

``predictedInputs``. 

outputs : `~collections.abc.Mapping`, optional 

Outputs from executing this quantum of work, organized as a mapping 

from `DatasetType` to a list of `DatasetRef`. 

kwargs 

Additional arguments are forwarded to the base `Execution` constructor. 

""" 

 

__slots__ = ("_taskName", "_taskClass", "_dataId", "_run", 

"_initInputs", "_predictedInputs", "_actualInputs", "_outputs") 

 

def __init__(self, *, taskName=None, taskClass=None, dataId=None, run=None, 

initInputs=None, predictedInputs=(), actualInputs=(), outputs=(), 

**kwargs): 

super().__init__(**kwargs) 

if taskClass is not None: 

taskName = f"{taskClass.__module__}.{taskClass.__name__}" 

self._taskName = taskName 

self._taskClass = taskClass 

self._run = run 

self._dataId = dataId 

if initInputs is None: 

initInputs = {} 

elif not hasattr(initInputs, "keys"): 

initInputs = {ref.datasetType: ref for ref in initInputs} 

self._initInputs = NamedKeyDict(initInputs) 

self._predictedInputs = NamedKeyDict(predictedInputs) 

self._actualInputs = NamedKeyDict(actualInputs) 

self._outputs = NamedKeyDict(outputs) 

 

@property 

def taskClass(self): 

"""Task class associated with this `Quantum` (`type`). 

""" 

if self._taskClass is None: 

self._taskClass = doImport(self._taskName) 

return self._taskClass 

 

@property 

def taskName(self): 

"""Fully-qualified name of the task associated with `Quantum` (`str`). 

""" 

return self._taskName 

 

@property 

def run(self): 

"""The Run this Quantum is a part of (`Run`). 

""" 

return self._run 

 

@property 

def dataId(self): 

"""The dimension values of the unit of processing (`DataId`). 

""" 

return self._dataId 

 

@property 

def initInputs(self): 

"""A mapping of datasets used to construct the Task, 

with `DatasetType` instances as keys (names can also be used for 

lookups) and `DatasetRef` instances as values. 

""" 

return self._initInputs 

 

@property 

def predictedInputs(self): 

"""A mapping of input datasets that were expected to be used, 

with `DatasetType` instances as keys (names can also be used for 

lookups) and a list of `DatasetRef` instances as values. 

 

Notes 

----- 

We cannot use `set` instead of `list` for the nested container because 

`DatasetRef` instances cannot be compared reliably when some have 

integers IDs and others do not. 

""" 

return self._predictedInputs 

 

@property 

def actualInputs(self): 

"""A mapping of input datasets that were actually used, with the same 

form as `Quantum.predictedInputs`. 

 

Notes 

----- 

We cannot use `set` instead of `list` for the nested container because 

`DatasetRef` instances cannot be compared reliably when some have 

integers IDs and others do not. 

""" 

return self._actualInputs 

 

@property 

def outputs(self): 

"""A mapping of output datasets (to be) generated for this quantum, 

with the same form as `predictedInputs`. 

 

Notes 

----- 

We cannot use `set` instead of `list` for the nested container because 

`DatasetRef` instances cannot be compared reliably when some have 

integers IDs and others do not. 

""" 

return self._outputs 

 

def addPredictedInput(self, ref): 

"""Add an input `DatasetRef` to the `Quantum`. 

 

This does not automatically update a `Registry`; all `predictedInputs` 

must be present before a `Registry.addQuantum()` is called. 

 

Parameters 

---------- 

ref : `DatasetRef` 

Reference for a Dataset to add to the Quantum's predicted inputs. 

""" 

self._predictedInputs.setdefault(ref.datasetType, []).append(ref) 

 

def _markInputUsed(self, ref): 

"""Mark an input as used. 

 

This does not automatically update a `Registry`. 

For that use `Registry.markInputUsed()` instead. 

""" 

# First validate against predicted 

if ref.datasetType not in self._predictedInputs: 

raise ValueError("Dataset type {} not in predicted inputs".format(ref.datasetType.name)) 

if ref not in self._predictedInputs[ref.datasetType]: 

raise ValueError("Actual input {} was not predicted".format(ref)) 

# Now insert as actual 

self._actualInputs.setdefault(ref.datasetType, []).append(ref) 

 

def addOutput(self, ref): 

"""Add an output `DatasetRef` to the `Quantum`. 

 

This does not automatically update a `Registry`; all `outputs` 

must be present before a `Registry.addQuantum()` is called. 

 

Parameters 

---------- 

ref : `DatasetRef` 

Reference for a Dataset to add to the Quantum's outputs. 

""" 

self._outputs.setdefault(ref.datasetType, []).append(ref)