Coverage for python/lsst/daf/butler/transfers/_interfaces.py: 100%

27 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-07-21 09:54 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ["RepoExportBackend", "RepoImportBackend", "RepoTransferFormatConfig"] 

25 

26from abc import ABC, abstractmethod 

27from collections.abc import Iterable 

28from typing import TYPE_CHECKING 

29 

30from ..core import ( 

31 ConfigSubset, 

32 DatasetAssociation, 

33 DatasetType, 

34 Datastore, 

35 DimensionElement, 

36 DimensionRecord, 

37 FileDataset, 

38) 

39from ..registry import CollectionType 

40from ..registry.interfaces import CollectionRecord 

41 

42if TYPE_CHECKING: 

43 from lsst.resources import ResourcePathExpression 

44 

45 

46class RepoTransferFormatConfig(ConfigSubset): 

47 """The section of butler configuration that associates repo import/export 

48 backends with file formats. 

49 """ 

50 

51 component = "repo_transfer_formats" 

52 defaultConfigFile = "repo_transfer_formats.yaml" 

53 

54 

55class RepoExportBackend(ABC): 

56 """An abstract interface for data repository export implementations. 

57 

58 Methods are guaranteed to be called in ways that reflect foreign key 

59 dependencies. 

60 """ 

61 

62 @abstractmethod 

63 def saveDimensionData(self, element: DimensionElement, *data: DimensionRecord) -> None: 

64 """Export one or more dimension element records. 

65 

66 Parameters 

67 ---------- 

68 element : `DimensionElement` 

69 The `DimensionElement` whose elements are being exported. 

70 data : `DimensionRecord` (variadic) 

71 One or more records to export. 

72 """ 

73 raise NotImplementedError() 

74 

75 @abstractmethod 

76 def saveCollection(self, record: CollectionRecord, doc: str | None) -> None: 

77 """Export a collection. 

78 

79 This only exports the collection's own state, not its associations with 

80 datasets. 

81 

82 Parameters 

83 ---------- 

84 record: `CollectionRecord` 

85 Object representing the collection to export. 

86 doc : `str` or `None` 

87 Documentation string for the collection. 

88 """ 

89 raise NotImplementedError() 

90 

91 @abstractmethod 

92 def saveDatasets(self, datasetType: DatasetType, run: str, *datasets: FileDataset) -> None: 

93 """Export one or more datasets, including their associated DatasetType 

94 and run information (but not including associated dimension 

95 information). 

96 

97 Parameters 

98 ---------- 

99 datasetType : `DatasetType` 

100 Type of all datasets being exported with this call. 

101 run : `str` 

102 Run associated with all datasets being exported with this call. 

103 datasets : `FileDataset`, variadic 

104 Per-dataset information to be exported. `FileDataset.formatter` 

105 attributes should be strings, not `Formatter` instances or classes. 

106 """ 

107 raise NotImplementedError() 

108 

109 @abstractmethod 

110 def saveDatasetAssociations( 

111 self, collection: str, collectionType: CollectionType, associations: Iterable[DatasetAssociation] 

112 ) -> None: 

113 """Export the dataset-collection associations for a single collection. 

114 

115 Parameters 

116 ---------- 

117 collection : `str` 

118 The name of the collection. 

119 collectionType : `CollectionType` 

120 The type of the collection; either `CollectionType.TAGGED` or 

121 `CollectionType.CALIBRATION` (as other collection types are 

122 exported in other ways). 

123 associations : `~collections.abc.Iterable` [ `DatasetAssociation` ] 

124 Structs representing an association between this collection and 

125 this dataset. 

126 """ 

127 raise NotImplementedError() 

128 

129 @abstractmethod 

130 def finish(self) -> None: 

131 """Complete the export process.""" 

132 raise NotImplementedError() 

133 

134 

135class RepoImportBackend(ABC): 

136 """An abstract interface for data repository import implementations. 

137 

138 Import backends are expected to be constructed with a description of 

139 the objects that need to be imported (from, e.g., a file written by the 

140 corresponding export backend), along with a `Registry`. 

141 """ 

142 

143 @abstractmethod 

144 def register(self) -> None: 

145 """Register all runs and dataset types associated with the backend with 

146 the `Registry` the backend was constructed with. 

147 

148 These operations cannot be performed inside transactions, unlike those 

149 performed by `load`, and must in general be performed before `load`. 

150 """ 

151 

152 @abstractmethod 

153 def load( 

154 self, 

155 datastore: Datastore | None, 

156 *, 

157 directory: ResourcePathExpression | None = None, 

158 transfer: str | None = None, 

159 skip_dimensions: set | None = None, 

160 ) -> None: 

161 """Import information associated with the backend into the given 

162 registry and datastore. 

163 

164 This must be run after `register`, and may be performed inside a 

165 transaction. 

166 

167 Parameters 

168 ---------- 

169 datastore : `Datastore` 

170 Datastore to import into. If `None`, datasets will only be 

171 inserted into the `Registry` (primarily intended for tests). 

172 directory : `~lsst.resources.ResourcePathExpression`, optional 

173 Directory all dataset paths are relative to. 

174 transfer : `str`, optional 

175 Transfer mode forwarded to `Datastore.ingest`. 

176 skip_dimensions : `set`, optional 

177 Dimensions that should be skipped and not imported. This can 

178 be useful when importing into a registry that already knows 

179 about a specific instrument. 

180 """ 

181 raise NotImplementedError()