Coverage for python/lsst/daf/butler/core/quantum.py : 27%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("Quantum",)
26from typing import (
27 Any,
28 Iterable,
29 List,
30 Mapping,
31 Optional,
32 Tuple,
33 Type,
34 Union,
35)
37from lsst.utils import doImport
39from .datasets import DatasetRef, DatasetType
40from .dimensions import DataCoordinate
41from .named import NamedKeyDict, NamedKeyMapping
44class Quantum:
45 """A discrete unit of work that may depend on one or more datasets and
46 produces one or more datasets.
48 Most Quanta will be executions of a particular ``PipelineTask``’s
49 ``runQuantum`` method, but they can also be used to represent discrete
50 units of work performed manually by human operators or other software
51 agents.
53 Parameters
54 ----------
55 taskName : `str`, optional
56 Fully-qualified name of the Task class that executed or will execute
57 this Quantum. If not provided, ``taskClass`` must be.
58 taskClass : `type`, optional
59 The Task class that executed or will execute this Quantum. If not
60 provided, ``taskName`` must be. Overrides ``taskName`` if both are
61 provided.
62 dataId : `DataId`, optional
63 The dimension values that identify this `Quantum`.
64 initInputs : collection of `DatasetRef`, optional
65 Datasets that are needed to construct an instance of the Task. May
66 be a flat iterable of `DatasetRef` instances or a mapping from
67 `DatasetType` to `DatasetRef`.
68 inputs : `~collections.abc.Mapping`, optional
69 Inputs identified prior to execution, organized as a mapping from
70 `DatasetType` to a list of `DatasetRef`.
71 outputs : `~collections.abc.Mapping`, optional
72 Outputs from executing this quantum of work, organized as a mapping
73 from `DatasetType` to a list of `DatasetRef`.
74 """
76 __slots__ = ("_taskName", "_taskClass", "_dataId", "_initInputs", "_inputs", "_outputs", "_hash")
78 def __init__(self, *, taskName: Optional[str] = None,
79 taskClass: Optional[Type] = None,
80 dataId: Optional[DataCoordinate] = None,
81 initInputs: Optional[Union[Mapping[DatasetType, DatasetRef], Iterable[DatasetRef]]] = None,
82 inputs: Optional[Mapping[DatasetType, List[DatasetRef]]] = None,
83 outputs: Optional[Mapping[DatasetType, List[DatasetRef]]] = None,
84 ):
85 if taskClass is not None:
86 taskName = f"{taskClass.__module__}.{taskClass.__name__}"
87 self._taskName = taskName
88 self._taskClass = taskClass
89 self._dataId = dataId
90 if initInputs is None:
91 initInputs = {}
92 elif not isinstance(initInputs, Mapping):
93 initInputs = {ref.datasetType: ref for ref in initInputs}
94 if inputs is None:
95 inputs = {}
96 if outputs is None:
97 outputs = {}
98 self._initInputs = NamedKeyDict[DatasetType, DatasetRef](initInputs).freeze()
99 self._inputs = NamedKeyDict[DatasetType, List[DatasetRef]](inputs).freeze()
100 self._outputs = NamedKeyDict[DatasetType, List[DatasetRef]](outputs).freeze()
102 @property
103 def taskClass(self) -> Optional[Type]:
104 """Task class associated with this `Quantum` (`type`).
105 """
106 if self._taskClass is None:
107 self._taskClass = doImport(self._taskName)
108 return self._taskClass
110 @property
111 def taskName(self) -> Optional[str]:
112 """Fully-qualified name of the task associated with `Quantum` (`str`).
113 """
114 return self._taskName
116 @property
117 def dataId(self) -> Optional[DataCoordinate]:
118 """The dimension values of the unit of processing (`DataId`).
119 """
120 return self._dataId
122 @property
123 def initInputs(self) -> NamedKeyMapping[DatasetType, DatasetRef]:
124 """A mapping of datasets used to construct the Task,
125 with `DatasetType` instances as keys (names can also be used for
126 lookups) and `DatasetRef` instances as values.
127 """
128 return self._initInputs
130 @property
131 def inputs(self) -> NamedKeyMapping[DatasetType, List[DatasetRef]]:
132 """A mapping of input datasets that were expected to be used,
133 with `DatasetType` instances as keys (names can also be used for
134 lookups) and a list of `DatasetRef` instances as values.
136 Notes
137 -----
138 We cannot use `set` instead of `list` for the nested container because
139 `DatasetRef` instances cannot be compared reliably when some have
140 integers IDs and others do not.
141 """
142 return self._inputs
144 @property
145 def outputs(self) -> NamedKeyMapping[DatasetType, List[DatasetRef]]:
146 """A mapping of output datasets (to be) generated for this quantum,
147 with the same form as `predictedInputs`.
149 Notes
150 -----
151 We cannot use `set` instead of `list` for the nested container because
152 `DatasetRef` instances cannot be compared reliably when some have
153 integers IDs and others do not.
154 """
155 return self._outputs
157 def __eq__(self, other: object) -> bool:
158 if not isinstance(other, Quantum):
159 return False
160 for item in ("taskClass", "dataId", "initInputs", "inputs", "outputs"):
161 if getattr(self, item) != getattr(other, item):
162 return False
163 return True
165 def __hash__(self) -> int:
166 return hash((self.taskClass, self.dataId))
168 def __reduce__(self) -> Union[str, Tuple[Any, ...]]:
169 return (self._reduceFactory,
170 (self.taskName, self.taskClass, self.dataId, dict(self.initInputs.items()),
171 dict(self.inputs), dict(self.outputs)))
173 @staticmethod
174 def _reduceFactory(taskName: Optional[str],
175 taskClass: Optional[Type],
176 dataId: Optional[DataCoordinate],
177 initInputs: Optional[Union[Mapping[DatasetType, DatasetRef], Iterable[DatasetRef]]],
178 inputs: Optional[Mapping[DatasetType, List[DatasetRef]]],
179 outputs: Optional[Mapping[DatasetType, List[DatasetRef]]]
180 ) -> Quantum:
181 return Quantum(taskName=taskName, taskClass=taskClass, dataId=dataId, initInputs=initInputs,
182 inputs=inputs, outputs=outputs)