Coverage for python/lsst/daf/butler/core/quantum.py : 26%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("Quantum",)
26from typing import (
27 Iterable,
28 List,
29 Mapping,
30 Optional,
31 Type,
32 TYPE_CHECKING,
33 Union,
34 Tuple,
35 Any
36)
38from lsst.utils import doImport
40from .named import NamedKeyDict, NamedKeyMapping
42if TYPE_CHECKING: 42 ↛ 43line 42 didn't jump to line 43, because the condition on line 42 was never true
43 from .dimensions import DataCoordinate
44 from .datasets import DatasetRef, DatasetType
47class Quantum:
48 """A discrete unit of work that may depend on one or more datasets and
49 produces one or more datasets.
51 Most Quanta will be executions of a particular ``PipelineTask``’s
52 ``runQuantum`` method, but they can also be used to represent discrete
53 units of work performed manually by human operators or other software
54 agents.
56 Parameters
57 ----------
58 taskName : `str`, optional
59 Fully-qualified name of the Task class that executed or will execute
60 this Quantum. If not provided, ``taskClass`` must be.
61 taskClass : `type`, optional
62 The Task class that executed or will execute this Quantum. If not
63 provided, ``taskName`` must be. Overrides ``taskName`` if both are
64 provided.
65 dataId : `DataId`, optional
66 The dimension values that identify this `Quantum`.
67 initInputs : collection of `DatasetRef`, optional
68 Datasets that are needed to construct an instance of the Task. May
69 be a flat iterable of `DatasetRef` instances or a mapping from
70 `DatasetType` to `DatasetRef`.
71 inputs : `~collections.abc.Mapping`, optional
72 Inputs identified prior to execution, organized as a mapping from
73 `DatasetType` to a list of `DatasetRef`.
74 outputs : `~collections.abc.Mapping`, optional
75 Outputs from executing this quantum of work, organized as a mapping
76 from `DatasetType` to a list of `DatasetRef`.
77 """
79 __slots__ = ("_taskName", "_taskClass", "_dataId", "_initInputs", "_inputs", "_outputs", "_hash")
81 def __init__(self, *, taskName: Optional[str] = None,
82 taskClass: Optional[Type] = None,
83 dataId: Optional[DataCoordinate] = None,
84 initInputs: Optional[Union[Mapping[DatasetType, DatasetRef], Iterable[DatasetRef]]] = None,
85 inputs: Optional[Mapping[DatasetType, List[DatasetRef]]] = None,
86 outputs: Optional[Mapping[DatasetType, List[DatasetRef]]] = None,
87 ):
88 if taskClass is not None:
89 taskName = f"{taskClass.__module__}.{taskClass.__name__}"
90 self._taskName = taskName
91 self._taskClass = taskClass
92 self._dataId = dataId
93 if initInputs is None:
94 initInputs = {}
95 elif not isinstance(initInputs, Mapping):
96 initInputs = {ref.datasetType: ref for ref in initInputs}
97 if inputs is None:
98 inputs = {}
99 if outputs is None:
100 outputs = {}
101 self._initInputs: NamedKeyMapping[DatasetType, DatasetRef] = NamedKeyDict(initInputs).freeze()
102 self._inputs: NamedKeyMapping[DatasetType, List[DatasetRef]] = NamedKeyDict(inputs).freeze()
103 self._outputs: NamedKeyMapping[DatasetType, List[DatasetRef]] = NamedKeyDict(outputs).freeze()
105 @property
106 def taskClass(self) -> Optional[Type]:
107 """Task class associated with this `Quantum` (`type`).
108 """
109 if self._taskClass is None:
110 self._taskClass = doImport(self._taskName)
111 return self._taskClass
113 @property
114 def taskName(self) -> Optional[str]:
115 """Fully-qualified name of the task associated with `Quantum` (`str`).
116 """
117 return self._taskName
119 @property
120 def dataId(self) -> Optional[DataCoordinate]:
121 """The dimension values of the unit of processing (`DataId`).
122 """
123 return self._dataId
125 @property
126 def initInputs(self) -> NamedKeyMapping[DatasetType, DatasetRef]:
127 """A mapping of datasets used to construct the Task,
128 with `DatasetType` instances as keys (names can also be used for
129 lookups) and `DatasetRef` instances as values.
130 """
131 return self._initInputs
133 @property
134 def inputs(self) -> NamedKeyMapping[DatasetType, List[DatasetRef]]:
135 """A mapping of input datasets that were expected to be used,
136 with `DatasetType` instances as keys (names can also be used for
137 lookups) and a list of `DatasetRef` instances as values.
139 Notes
140 -----
141 We cannot use `set` instead of `list` for the nested container because
142 `DatasetRef` instances cannot be compared reliably when some have
143 integers IDs and others do not.
144 """
145 return self._inputs
147 @property
148 def outputs(self) -> NamedKeyMapping[DatasetType, List[DatasetRef]]:
149 """A mapping of output datasets (to be) generated for this quantum,
150 with the same form as `predictedInputs`.
152 Notes
153 -----
154 We cannot use `set` instead of `list` for the nested container because
155 `DatasetRef` instances cannot be compared reliably when some have
156 integers IDs and others do not.
157 """
158 return self._outputs
160 def __eq__(self, other: object) -> bool:
161 if not isinstance(other, Quantum):
162 return False
163 for item in ("taskClass", "dataId", "initInputs", "inputs", "outputs"):
164 if getattr(self, item) != getattr(other, item):
165 return False
166 return True
168 def __hash__(self) -> int:
169 return hash((self.taskClass, self.dataId))
171 def __reduce__(self) -> Union[str, Tuple[Any, ...]]:
172 return (self._reduceFactory,
173 (self.taskName, self.taskClass, self.dataId, dict(self.initInputs.items()),
174 dict(self.inputs), dict(self.outputs)))
176 @staticmethod
177 def _reduceFactory(taskName: Optional[str],
178 taskClass: Optional[Type],
179 dataId: Optional[DataCoordinate],
180 initInputs: Optional[Union[Mapping[DatasetType, DatasetRef], Iterable[DatasetRef]]],
181 inputs: Optional[Mapping[DatasetType, List[DatasetRef]]],
182 outputs: Optional[Mapping[DatasetType, List[DatasetRef]]]
183 ) -> Quantum:
184 return Quantum(taskName=taskName, taskClass=taskClass, dataId=dataId, initInputs=initInputs,
185 inputs=inputs, outputs=outputs)