Coverage for python/lsst/daf/butler/transfers/_interfaces.py: 100%
26 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-04-13 02:34 -0700
« prev ^ index » next coverage.py v6.5.0, created at 2023-04-13 02:34 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ["RepoExportBackend", "RepoImportBackend", "RepoTransferFormatConfig"]
26from abc import ABC, abstractmethod
27from typing import TYPE_CHECKING, Iterable, Optional, Set
29from ..core import (
30 ConfigSubset,
31 DatasetAssociation,
32 DatasetType,
33 Datastore,
34 DimensionElement,
35 DimensionRecord,
36 FileDataset,
37)
38from ..registry import CollectionType
39from ..registry.interfaces import CollectionRecord, DatasetIdGenEnum
41if TYPE_CHECKING:
42 from lsst.resources import ResourcePathExpression
45class RepoTransferFormatConfig(ConfigSubset):
46 """The section of butler configuration that associates repo import/export
47 backends with file formats.
48 """
50 component = "repo_transfer_formats"
51 defaultConfigFile = "repo_transfer_formats.yaml"
54class RepoExportBackend(ABC):
55 """An abstract interface for data repository export implementations.
57 Methods are guaranteed to be called in ways that reflect foreign key
58 dependencies.
59 """
61 @abstractmethod
62 def saveDimensionData(self, element: DimensionElement, *data: DimensionRecord) -> None:
63 """Export one or more dimension element records.
65 Parameters
66 ----------
67 element : `DimensionElement`
68 The `DimensionElement` whose elements are being exported.
69 data : `DimensionRecord` (variadic)
70 One or more records to export.
71 """
72 raise NotImplementedError()
74 @abstractmethod
75 def saveCollection(self, record: CollectionRecord, doc: Optional[str]) -> None:
76 """Export a collection.
78 This only exports the collection's own state, not its associations with
79 datasets.
81 Parameters
82 ----------
83 record: `CollectionRecord`
84 Object representing the collection to export.
85 doc : `str` or `None`
86 Documentation string for the collection.
87 """
88 raise NotImplementedError()
90 @abstractmethod
91 def saveDatasets(self, datasetType: DatasetType, run: str, *datasets: FileDataset) -> None:
92 """Export one or more datasets, including their associated DatasetType
93 and run information (but not including associated dimension
94 information).
96 Parameters
97 ----------
98 datasetType : `DatasetType`
99 Type of all datasets being exported with this call.
100 run : `str`
101 Run associated with all datasets being exported with this call.
102 datasets : `FileDataset`, variadic
103 Per-dataset information to be exported. `FileDataset.formatter`
104 attributes should be strings, not `Formatter` instances or classes.
105 """
106 raise NotImplementedError()
108 @abstractmethod
109 def saveDatasetAssociations(
110 self, collection: str, collectionType: CollectionType, associations: Iterable[DatasetAssociation]
111 ) -> None:
112 """Export the dataset-collection associations for a single collection.
114 Parameters
115 ----------
116 collection : `str`
117 The name of the collection.
118 collectionType : `CollectionType`
119 The type of the collection; either `CollectionType.TAGGED` or
120 `CollectionType.CALIBRATION` (as other collection types are
121 exported in other ways).
122 associations : `Iterable` [ `DatasetAssociation` ]
123 Structs representing an association between this collection and
124 this dataset.
125 """
126 raise NotImplementedError()
128 @abstractmethod
129 def finish(self) -> None:
130 """Complete the export process."""
131 raise NotImplementedError()
134class RepoImportBackend(ABC):
135 """An abstract interface for data repository import implementations.
137 Import backends are expected to be constructed with a description of
138 the objects that need to be imported (from, e.g., a file written by the
139 corresponding export backend), along with a `Registry`.
140 """
142 @abstractmethod
143 def register(self) -> None:
144 """Register all runs and dataset types associated with the backend with
145 the `Registry` the backend was constructed with.
147 These operations cannot be performed inside transactions, unlike those
148 performed by `load`, and must in general be performed before `load`.
149 """
151 @abstractmethod
152 def load(
153 self,
154 datastore: Optional[Datastore],
155 *,
156 directory: ResourcePathExpression | None = None,
157 transfer: Optional[str] = None,
158 skip_dimensions: Optional[Set] = None,
159 idGenerationMode: DatasetIdGenEnum = DatasetIdGenEnum.UNIQUE,
160 reuseIds: bool = False,
161 ) -> None:
162 """Import information associated with the backend into the given
163 registry and datastore.
165 This must be run after `register`, and may be performed inside a
166 transaction.
168 Parameters
169 ----------
170 datastore : `Datastore`
171 Datastore to import into. If `None`, datasets will only be
172 inserted into the `Registry` (primarily intended for tests).
173 directory : `~lsst.resources.ResourcePathExpression`, optional
174 Directory all dataset paths are relative to.
175 transfer : `str`, optional
176 Transfer mode forwarded to `Datastore.ingest`.
177 skip_dimensions : `set`, optional
178 Dimensions that should be skipped and not imported. This can
179 be useful when importing into a registry that already knows
180 about a specific instrument.
181 idGenerationMode : `DatasetIdGenEnum`, optional
182 Specifies option for generating dataset IDs when IDs are not
183 provided or their type does not match backend type. By default
184 unique IDs are generated for each inserted dataset.
185 reuseIds : `bool`, optional
186 If `True` then forces re-use of imported dataset IDs for integer
187 IDs which are normally generated as auto-incremented. This option
188 has no effect on the use of globally-unique IDs which are always
189 re-used (or generated if integer IDs are being imported).
190 """
191 raise NotImplementedError()