Coverage for python/lsst/daf/butler/transfers/_interfaces.py : 77%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ["RepoExportBackend", "RepoImportBackend", "RepoTransferFormatConfig"]
26from abc import ABC, abstractmethod
27from typing import (
28 Iterable,
29 Optional,
30 Set,
31)
33from ..core import (
34 ConfigSubset,
35 DatasetAssociation,
36 DatasetType,
37 Datastore,
38 DimensionElement,
39 DimensionRecord,
40 FileDataset,
41)
43from ..registry import CollectionType
44from ..registry.interfaces import CollectionRecord
47class RepoTransferFormatConfig(ConfigSubset):
48 """The section of butler configuration that associates repo import/export
49 backends with file formats.
50 """
51 component = "repo_transfer_formats"
52 defaultConfigFile = "repo_transfer_formats.yaml"
55class RepoExportBackend(ABC):
56 """An abstract interface for data repository export implementations.
58 Methods are guaranteed to be called in ways that reflect foreign key
59 dependencies.
60 """
62 @abstractmethod
63 def saveDimensionData(self, element: DimensionElement, *data: DimensionRecord) -> None:
64 """Export one or more dimension element records.
66 Parameters
67 ----------
68 element : `DimensionElement`
69 The `DimensionElement` whose elements are being exported.
70 data : `DimensionRecord` (variadic)
71 One or more records to export.
72 """
73 raise NotImplementedError()
75 @abstractmethod
76 def saveCollection(self, record: CollectionRecord) -> None:
77 """Export a collection.
79 This only exports the collection's own state, not its associations with
80 datasets.
82 Parameters
83 ----------
84 record: `CollectionRecord`
85 Object representing the collection to export.
86 """
87 raise NotImplementedError()
89 @abstractmethod
90 def saveDatasets(self, datasetType: DatasetType, run: str, *datasets: FileDataset) -> None:
91 """Export one or more datasets, including their associated DatasetType
92 and run information (but not including associated dimension
93 information).
95 Parameters
96 ----------
97 datasetType : `DatasetType`
98 Type of all datasets being exported with this call.
99 run : `str`
100 Run associated with all datasets being exported with this call.
101 datasets : `FileDataset`, variadic
102 Per-dataset information to be exported. `FileDataset.formatter`
103 attributes should be strings, not `Formatter` instances or classes.
104 """
105 raise NotImplementedError()
107 @abstractmethod
108 def saveDatasetAssociations(self, collection: str, collectionType: CollectionType,
109 associations: Iterable[DatasetAssociation]) -> None:
110 """Export the dataset-collection associations for a single collection.
112 Parameters
113 ----------
114 collection : `str`
115 The name of the collection.
116 collectionType : `CollectionType`
117 The type of the collection; either `CollectionType.TAGGED` or
118 `CollectionType.CALIBRATION` (as other collection types are
119 exported in other ways).
120 associations : `Iterable` [ `DatasetAssociation` ]
121 Structs representing an association between this collection and
122 this dataset.
123 """
124 raise NotImplementedError()
126 @abstractmethod
127 def finish(self) -> None:
128 """Complete the export process.
129 """
130 raise NotImplementedError()
133class RepoImportBackend(ABC):
134 """An abstract interface for data repository import implementations.
136 Import backends are expected to be constructed with a description of
137 the objects that need to be imported (from, e.g., a file written by the
138 corresponding export backend), along with a `Registry`.
139 """
141 @abstractmethod
142 def register(self) -> None:
143 """Register all runs and dataset types associated with the backend with
144 the `Registry` the backend was constructed with.
146 These operations cannot be performed inside transactions, unlike those
147 performed by `load`, and must in general be performed before `load`.
148 """
150 @abstractmethod
151 def load(self, datastore: Optional[Datastore], *,
152 directory: Optional[str] = None, transfer: Optional[str] = None,
153 skip_dimensions: Optional[Set] = None) -> None:
154 """Import information associated with the backend into the given
155 registry and datastore.
157 This must be run after `register`, and may be performed inside a
158 transaction.
160 Parameters
161 ----------
162 datastore : `Datastore`
163 Datastore to import into. If `None`, datasets will only be
164 inserted into the `Registry` (primarily intended for tests).
165 directory : `str`, optional
166 File all dataset paths are relative to.
167 transfer : `str`, optional
168 Transfer mode forwarded to `Datastore.ingest`.
169 skip_dimensions : `set`, optional
170 Dimensions that should be skipped and not imported. This can
171 be useful when importing into a registry that already knows
172 about a specific instrument.
173 """
174 raise NotImplementedError()