Coverage for python/lsst/daf/butler/transfers/_interfaces.py: 100%
27 statements
« prev ^ index » next coverage.py v7.3.1, created at 2023-10-02 07:59 +0000
« prev ^ index » next coverage.py v7.3.1, created at 2023-10-02 07:59 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28from __future__ import annotations
30__all__ = ["RepoExportBackend", "RepoImportBackend", "RepoTransferFormatConfig"]
32from abc import ABC, abstractmethod
33from collections.abc import Iterable
34from typing import TYPE_CHECKING
36from ..core import (
37 ConfigSubset,
38 DatasetAssociation,
39 DatasetType,
40 Datastore,
41 DimensionElement,
42 DimensionRecord,
43 FileDataset,
44)
45from ..registry import CollectionType
46from ..registry.interfaces import CollectionRecord
48if TYPE_CHECKING:
49 from lsst.resources import ResourcePathExpression
52class RepoTransferFormatConfig(ConfigSubset):
53 """The section of butler configuration that associates repo import/export
54 backends with file formats.
55 """
57 component = "repo_transfer_formats"
58 defaultConfigFile = "repo_transfer_formats.yaml"
61class RepoExportBackend(ABC):
62 """An abstract interface for data repository export implementations.
64 Methods are guaranteed to be called in ways that reflect foreign key
65 dependencies.
66 """
68 @abstractmethod
69 def saveDimensionData(self, element: DimensionElement, *data: DimensionRecord) -> None:
70 """Export one or more dimension element records.
72 Parameters
73 ----------
74 element : `DimensionElement`
75 The `DimensionElement` whose elements are being exported.
76 data : `DimensionRecord` (variadic)
77 One or more records to export.
78 """
79 raise NotImplementedError()
81 @abstractmethod
82 def saveCollection(self, record: CollectionRecord, doc: str | None) -> None:
83 """Export a collection.
85 This only exports the collection's own state, not its associations with
86 datasets.
88 Parameters
89 ----------
90 record: `CollectionRecord`
91 Object representing the collection to export.
92 doc : `str` or `None`
93 Documentation string for the collection.
94 """
95 raise NotImplementedError()
97 @abstractmethod
98 def saveDatasets(self, datasetType: DatasetType, run: str, *datasets: FileDataset) -> None:
99 """Export one or more datasets, including their associated DatasetType
100 and run information (but not including associated dimension
101 information).
103 Parameters
104 ----------
105 datasetType : `DatasetType`
106 Type of all datasets being exported with this call.
107 run : `str`
108 Run associated with all datasets being exported with this call.
109 datasets : `FileDataset`, variadic
110 Per-dataset information to be exported. `FileDataset.formatter`
111 attributes should be strings, not `Formatter` instances or classes.
112 """
113 raise NotImplementedError()
115 @abstractmethod
116 def saveDatasetAssociations(
117 self, collection: str, collectionType: CollectionType, associations: Iterable[DatasetAssociation]
118 ) -> None:
119 """Export the dataset-collection associations for a single collection.
121 Parameters
122 ----------
123 collection : `str`
124 The name of the collection.
125 collectionType : `CollectionType`
126 The type of the collection; either `CollectionType.TAGGED` or
127 `CollectionType.CALIBRATION` (as other collection types are
128 exported in other ways).
129 associations : `~collections.abc.Iterable` [ `DatasetAssociation` ]
130 Structs representing an association between this collection and
131 this dataset.
132 """
133 raise NotImplementedError()
135 @abstractmethod
136 def finish(self) -> None:
137 """Complete the export process."""
138 raise NotImplementedError()
141class RepoImportBackend(ABC):
142 """An abstract interface for data repository import implementations.
144 Import backends are expected to be constructed with a description of
145 the objects that need to be imported (from, e.g., a file written by the
146 corresponding export backend), along with a `Registry`.
147 """
149 @abstractmethod
150 def register(self) -> None:
151 """Register all runs and dataset types associated with the backend with
152 the `Registry` the backend was constructed with.
154 These operations cannot be performed inside transactions, unlike those
155 performed by `load`, and must in general be performed before `load`.
156 """
158 @abstractmethod
159 def load(
160 self,
161 datastore: Datastore | None,
162 *,
163 directory: ResourcePathExpression | None = None,
164 transfer: str | None = None,
165 skip_dimensions: set | None = None,
166 ) -> None:
167 """Import information associated with the backend into the given
168 registry and datastore.
170 This must be run after `register`, and may be performed inside a
171 transaction.
173 Parameters
174 ----------
175 datastore : `Datastore`
176 Datastore to import into. If `None`, datasets will only be
177 inserted into the `Registry` (primarily intended for tests).
178 directory : `~lsst.resources.ResourcePathExpression`, optional
179 Directory all dataset paths are relative to.
180 transfer : `str`, optional
181 Transfer mode forwarded to `Datastore.ingest`.
182 skip_dimensions : `set`, optional
183 Dimensions that should be skipped and not imported. This can
184 be useful when importing into a registry that already knows
185 about a specific instrument.
186 """
187 raise NotImplementedError()