22 """Module defining connection types to be used within a
23 `PipelineTaskConnections` class.
26 __all__ = [
"InitInput",
"InitOutput",
"Input",
"PrerequisiteInput",
27 "Output",
"BaseConnection"]
31 from typing
import Callable, Iterable, Optional
33 from lsst.daf.butler
import (
43 @dataclasses.dataclass(frozen=
True)
45 """Base class used for declaring PipelineTask connections
50 The name used to identify the dataset type
52 The storage class used when (un)/persisting the dataset type
54 Indicates if this connection should expect to contain multiple objects
55 of the given dataset type
60 multiple: bool =
False
65 This is a method used to turn a connection into a descriptor.
66 When a connection is added to a connection class, it is a class level
67 variable. This method makes accessing this connection, on the
68 instance of the connection class owning this connection, return a
69 result specialized for that instance. In the case of connections
70 this specifically means names specified in a config instance will
71 be visible instead of the default names for the connection.
79 if not hasattr(inst,
'_connectionCache'):
80 object.__setattr__(inst,
'_connectionCache', {})
83 if idSelf
in inst._connectionCache:
84 return inst._connectionCache[idSelf]
87 for field
in dataclasses.fields(self):
88 params[field.name] = getattr(self, field.name)
90 params[
'name'] = inst._nameOverrides[self.varName]
93 return inst._connectionCache.setdefault(idSelf, self.__class__(**params))
96 @dataclasses.dataclass(frozen=
True)
98 """Class used for declaring PipelineTask connections that includes
104 The name used to identify the dataset type
106 The storage class used when (un)/persisting the dataset type
108 Indicates if this connection should expect to contain multiple objects
109 of the given dataset type
110 dimensions : iterable of `str`
111 The `lsst.daf.butler.Butler` `lsst.daf.butler.Registry` dimensions used
112 to identify the dataset type identified by the specified name
114 dimensions: typing.Iterable[str] = ()
117 if isinstance(self.dimensions, str):
118 raise TypeError(
"Dimensions must be iterable of dimensions, got str,"
119 "possibly omitted trailing comma")
120 if not isinstance(self.dimensions, typing.Iterable):
121 raise TypeError(
"Dimensions must be iterable of dimensions")
124 """Construct a true `DatasetType` instance with normalized dimensions.
127 universe : `lsst.daf.butler.DimensionUniverse`
128 Set of all known dimensions to be used to normalize the dimension
129 names specified in config.
132 datasetType : `DatasetType`
133 The `DatasetType` defined by this connection.
135 return DatasetType(self.name,
136 universe.extract(self.dimensions),
140 @dataclasses.dataclass(frozen=
True)
142 """Class used for declaring PipelineTask input connections
147 The default name used to identify the dataset type
149 The storage class used when (un)/persisting the dataset type
151 Indicates if this connection should expect to contain multiple objects
152 of the given dataset type
153 dimensions : iterable of `str`
154 The `lsst.daf.butler.Butler` `lsst.daf.butler.Registry` dimensions used
155 to identify the dataset type identified by the specified name
157 Indicates that this dataset type will be loaded as a
158 `lsst.daf.butler.DeferredDatasetHandle`. PipelineTasks can use this
159 object to load the object at a later time.
161 deferLoad: bool =
False
164 @dataclasses.dataclass(frozen=
True)
169 @dataclasses.dataclass(frozen=
True)
171 """Class used for declaring PipelineTask prerequisite connections
176 The default name used to identify the dataset type
178 The storage class used when (un)/persisting the dataset type
180 Indicates if this connection should expect to contain multiple objects
181 of the given dataset type
182 dimensions : iterable of `str`
183 The `lsst.daf.butler.Butler` `lsst.daf.butler.Registry` dimensions used
184 to identify the dataset type identified by the specified name
186 Indicates that this dataset type will be loaded as a
187 `lsst.daf.butler.DeferredDatasetHandle`. PipelineTasks can use this
188 object to load the object at a later time.
189 lookupFunction: `typing.Callable`, optional
190 An optional callable function that will look up PrerequisiteInputs
191 using the DatasetType, registry, quantum dataId, and input collections
192 passed to it. If no function is specified, the default temporal spatial
195 lookupFunction: Optional[Callable[[DatasetType, Registry, DataCoordinate, CollectionSearch],
196 Iterable[DatasetRef]]] =
None
199 @dataclasses.dataclass(frozen=
True)
204 @dataclasses.dataclass(frozen=
True)
209 @dataclasses.dataclass(frozen=
True)