Coverage for python / lsst / daf / butler / transfers / _interfaces.py: 100%
32 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-28 08:36 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-28 08:36 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28from __future__ import annotations
30__all__ = ["RepoExportBackend", "RepoImportBackend", "RepoTransferFormatConfig"]
32from abc import ABC, abstractmethod
33from collections.abc import Iterable
34from typing import TYPE_CHECKING
36from .._collection_type import CollectionType
37from .._config import ConfigSubset
38from .._dataset_association import DatasetAssociation
39from .._dataset_type import DatasetType
40from .._file_dataset import FileDataset
41from ..datastore import Datastore
42from ..dimensions import DimensionElement, DimensionRecord
43from ..registry.interfaces import CollectionRecord
45if TYPE_CHECKING:
46 from lsst.resources import ResourcePathExpression
49class RepoTransferFormatConfig(ConfigSubset):
50 """The section of butler configuration that associates repo import/export
51 backends with file formats.
52 """
54 component = "repo_transfer_formats"
55 defaultConfigFile = "repo_transfer_formats.yaml"
58class RepoExportBackend(ABC):
59 """An abstract interface for data repository export implementations.
61 Methods are guaranteed to be called in ways that reflect foreign key
62 dependencies.
63 """
65 @abstractmethod
66 def saveDimensionData(self, element: DimensionElement, *data: DimensionRecord) -> None:
67 """Export one or more dimension element records.
69 Parameters
70 ----------
71 element : `DimensionElement`
72 The `DimensionElement` whose elements are being exported.
73 *data : `DimensionRecord` (variadic)
74 One or more records to export.
75 """
76 raise NotImplementedError()
78 @abstractmethod
79 def saveCollection(self, record: CollectionRecord, doc: str | None) -> None:
80 """Export a collection.
82 This only exports the collection's own state, not its associations with
83 datasets.
85 Parameters
86 ----------
87 record : `CollectionRecord`
88 Object representing the collection to export.
89 doc : `str` or `None`
90 Documentation string for the collection.
91 """
92 raise NotImplementedError()
94 @abstractmethod
95 def saveDatasets(self, datasetType: DatasetType, run: str, *datasets: FileDataset) -> None:
96 """Export one or more datasets, including their associated DatasetType
97 and run information (but not including associated dimension
98 information).
100 Parameters
101 ----------
102 datasetType : `DatasetType`
103 Type of all datasets being exported with this call.
104 run : `str`
105 Run associated with all datasets being exported with this call.
106 *datasets : `FileDataset`, variadic
107 Per-dataset information to be exported. `FileDataset.formatter`
108 attributes should be strings, not `Formatter` instances or classes.
109 """
110 raise NotImplementedError()
112 @abstractmethod
113 def saveDatasetAssociations(
114 self, collection: str, collectionType: CollectionType, associations: Iterable[DatasetAssociation]
115 ) -> None:
116 """Export the dataset-collection associations for a single collection.
118 Parameters
119 ----------
120 collection : `str`
121 The name of the collection.
122 collectionType : `CollectionType`
123 The type of the collection; either `CollectionType.TAGGED` or
124 `CollectionType.CALIBRATION` (as other collection types are
125 exported in other ways).
126 associations : `~collections.abc.Iterable` [ `DatasetAssociation` ]
127 Structs representing an association between this collection and
128 this dataset.
129 """
130 raise NotImplementedError()
132 @abstractmethod
133 def finish(self) -> None:
134 """Complete the export process."""
135 raise NotImplementedError()
138class RepoImportBackend(ABC):
139 """An abstract interface for data repository import implementations.
141 Import backends are expected to be constructed with a description of
142 the objects that need to be imported (from, e.g., a file written by the
143 corresponding export backend), along with a `Butler`.
144 """
146 @abstractmethod
147 def register(self) -> None:
148 """Register all runs and dataset types associated with the backend with
149 the `Butler` the backend was constructed with.
151 These operations cannot be performed inside transactions, unlike those
152 performed by `load`, and must in general be performed before `load`.
153 """
155 @abstractmethod
156 def load(
157 self,
158 datastore: Datastore | None,
159 *,
160 directory: ResourcePathExpression | None = None,
161 transfer: str | None = None,
162 skip_dimensions: set | None = None,
163 record_validation_info: bool = True,
164 ) -> None:
165 """Import information associated with the backend into the given
166 registry and datastore.
168 This must be run after `register`, and may be performed inside a
169 transaction.
171 Parameters
172 ----------
173 datastore : `Datastore`
174 Datastore to import into. If `None`, datasets will only be
175 inserted into the `Registry` (primarily intended for tests).
176 directory : `~lsst.resources.ResourcePathExpression`, optional
177 Directory all dataset paths are relative to.
178 transfer : `str`, optional
179 Transfer mode forwarded to `Datastore.ingest`.
180 skip_dimensions : `set`, optional
181 Dimensions that should be skipped and not imported. This can
182 be useful when importing into a registry that already knows
183 about a specific instrument.
184 record_validation_info : `bool`, optional
185 If `True`, the default, the datastore can record validation
186 information associated with the file. If `False` the datastore
187 will not attempt to track any information such as checksums
188 or file sizes. This can be useful if such information is tracked
189 in an external system or if the file is to be compressed in place.
190 It is up to the underlying datastore whether this parameter is
191 relevant.
192 """
193 raise NotImplementedError()