22 """Module defining connection types to be used within a
23 `PipelineTaskConnections` class.
26 __all__ = [
"InitInput",
"InitOutput",
"Input",
"PrerequisiteInput",
27 "Output",
"BaseConnection"]
31 from typing
import Callable, Iterable, Optional
32 from collections.abc
import Mapping
34 from lsst.daf.butler
import DatasetType, DimensionUniverse, Registry, ExpandedDataCoordinate, DatasetRef
37 @dataclasses.dataclass(frozen=
True)
39 """Base class used for declaring PipelineTask connections
44 The name used to identify the dataset type
46 The storage class used when (un)/persisting the dataset type
48 Indicates if this connection should expect to contain multiple objects
49 of the given dataset type
54 multiple: bool =
False
59 This is a method used to turn a connection into a descriptor.
60 When a connection is added to a connection class, it is a class level
61 variable. This method makes accessing this connection, on the
62 instance of the connection class owning this connection, return a
63 result specialized for that instance. In the case of connections
64 this specifically means names specified in a config instance will
65 be visible instead of the default names for the connection.
73 if not hasattr(inst,
'_connectionCache'):
74 object.__setattr__(inst,
'_connectionCache', {})
77 if idSelf
in inst._connectionCache:
78 return inst._connectionCache[idSelf]
81 for field
in dataclasses.fields(self):
82 params[field.name] = getattr(self, field.name)
84 params[
'name'] = inst._nameOverrides[self.varName]
87 return inst._connectionCache.setdefault(idSelf, self.__class__(**params))
90 @dataclasses.dataclass(frozen=
True)
92 """Class used for declaring PipelineTask connections that includes
98 The name used to identify the dataset type
100 The storage class used when (un)/persisting the dataset type
102 Indicates if this connection should expect to contain multiple objects
103 of the given dataset type
104 dimensions : iterable of `str`
105 The `lsst.daf.butler.Butler` `lsst.daf.butler.Registry` dimensions used
106 to identify the dataset type identified by the specified name
108 dimensions: typing.Iterable[str] = ()
111 """Construct a true `DatasetType` instance with normalized dimensions.
114 universe : `lsst.daf.butler.DimensionUniverse`
115 Set of all known dimensions to be used to normalize the dimension
116 names specified in config.
119 datasetType : `DatasetType`
120 The `DatasetType` defined by this connection.
122 return DatasetType(self.name,
123 universe.extract(self.dimensions),
127 @dataclasses.dataclass(frozen=
True)
129 """Class used for declaring PipelineTask input connections
134 The default name used to identify the dataset type
136 The storage class used when (un)/persisting the dataset type
138 Indicates if this connection should expect to contain multiple objects
139 of the given dataset type
140 dimensions : iterable of `str`
141 The `lsst.daf.butler.Butler` `lsst.daf.butler.Registry` dimensions used
142 to identify the dataset type identified by the specified name
144 Indicates that this dataset type will be loaded as a
145 `lsst.daf.butler.DeferredDatasetHandle`. PipelineTasks can use this
146 object to load the object at a later time.
148 deferLoad: bool =
False
151 @dataclasses.dataclass(frozen=
True)
156 @dataclasses.dataclass(frozen=
True)
158 """Class used for declaring PipelineTask prerequisite connections
163 The default name used to identify the dataset type
165 The storage class used when (un)/persisting the dataset type
167 Indicates if this connection should expect to contain multiple objects
168 of the given dataset type
169 dimensions : iterable of `str`
170 The `lsst.daf.butler.Butler` `lsst.daf.butler.Registry` dimensions used
171 to identify the dataset type identified by the specified name
173 Indicates that this dataset type will be loaded as a
174 `lsst.daf.butler.DeferredDatasetHandle`. PipelineTasks can use this
175 object to load the object at a later time.
176 lookupFunction: `typing.Callable`
177 An optional callable function that will look up PrerequisiteInputs
178 using the DatasetType, registry, quantum dataId, and input collections
179 passed to it. If no function is specified, the default temporal spatial
182 lookupFunction: Optional[Callable[[DatasetType, Registry, ExpandedDataCoordinate, Mapping],
183 Iterable[DatasetRef]]] =
None
186 @dataclasses.dataclass(frozen=
True)
191 @dataclasses.dataclass(frozen=
True)
196 @dataclasses.dataclass(frozen=
True)