Coverage for python/lsst/daf/butler/remote_butler/_remote_butler.py: 6%
72 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-27 09:43 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-27 09:43 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28__all__ = ("RemoteButler",)
30from collections.abc import Collection, Iterable, Sequence
31from contextlib import AbstractContextManager
32from typing import Any, TextIO
34import httpx
35from lsst.daf.butler import __version__
36from lsst.resources import ResourcePath, ResourcePathExpression
37from lsst.utils.introspection import get_full_type_name
39from .._butler import Butler
40from .._butler_config import ButlerConfig
41from .._config import Config
42from .._dataset_existence import DatasetExistence
43from .._dataset_ref import DatasetIdGenEnum, DatasetRef
44from .._dataset_type import DatasetType
45from .._deferredDatasetHandle import DeferredDatasetHandle
46from .._file_dataset import FileDataset
47from .._limited_butler import LimitedButler
48from .._storage_class import StorageClass
49from ..datastore import DatasetRefURIs
50from ..dimensions import DataId, DimensionConfig, DimensionUniverse
51from ..registry import Registry, RegistryDefaults
52from ..transfers import RepoExportContext
53from ._config import RemoteButlerConfigModel
56class RemoteButler(Butler):
57 def __init__(
58 self,
59 # These parameters are inherited from the Butler() constructor
60 config: Config | ResourcePathExpression | None = None,
61 *,
62 collections: Any = None,
63 run: str | None = None,
64 searchPaths: Sequence[ResourcePathExpression] | None = None,
65 writeable: bool | None = None,
66 inferDefaults: bool = True,
67 # Parameters unique to RemoteButler
68 http_client: httpx.Client | None = None,
69 **kwargs: Any,
70 ):
71 butler_config = ButlerConfig(config, searchPaths, without_datastore=True)
72 self._config = RemoteButlerConfigModel.model_validate(butler_config)
73 self._dimensions: DimensionUniverse | None = None
74 # TODO: RegistryDefaults should have finish() called on it, but this
75 # requires getCollectionSummary() which is not yet implemented
76 self._registry_defaults = RegistryDefaults(collections, run, inferDefaults, **kwargs)
78 if http_client is not None:
79 # We have injected a client explicitly in to the class.
80 # This is generally done for testing.
81 self._client = http_client
82 else:
83 headers = {"user-agent": f"{get_full_type_name(self)}/{__version__}"}
84 self._client = httpx.Client(headers=headers, base_url=str(self._config.remote_butler.url))
86 def isWriteable(self) -> bool:
87 # Docstring inherited.
88 return False
90 @property
91 def dimensions(self) -> DimensionUniverse:
92 # Docstring inherited.
93 if self._dimensions is not None:
94 return self._dimensions
96 response = self._client.get(self._get_url("universe"))
97 response.raise_for_status()
99 config = DimensionConfig.fromString(response.text, format="json")
100 self._dimensions = DimensionUniverse(config)
101 return self._dimensions
103 def getDatasetType(self, name: str) -> DatasetType:
104 # Docstring inherited.
105 raise NotImplementedError()
107 def transaction(self) -> AbstractContextManager[None]:
108 """Will always raise NotImplementedError.
109 Transactions are not supported by RemoteButler.
110 """
111 raise NotImplementedError()
113 def put(
114 self,
115 obj: Any,
116 datasetRefOrType: DatasetRef | DatasetType | str,
117 /,
118 dataId: DataId | None = None,
119 *,
120 run: str | None = None,
121 **kwargs: Any,
122 ) -> DatasetRef:
123 # Docstring inherited.
124 raise NotImplementedError()
126 def getDeferred(
127 self,
128 datasetRefOrType: DatasetRef | DatasetType | str,
129 /,
130 dataId: DataId | None = None,
131 *,
132 parameters: dict | None = None,
133 collections: Any = None,
134 storageClass: str | StorageClass | None = None,
135 **kwargs: Any,
136 ) -> DeferredDatasetHandle:
137 # Docstring inherited.
138 raise NotImplementedError()
140 def get(
141 self,
142 datasetRefOrType: DatasetRef | DatasetType | str,
143 /,
144 dataId: DataId | None = None,
145 *,
146 parameters: dict[str, Any] | None = None,
147 collections: Any = None,
148 storageClass: StorageClass | str | None = None,
149 **kwargs: Any,
150 ) -> Any:
151 # Docstring inherited.
152 raise NotImplementedError()
154 def getURIs(
155 self,
156 datasetRefOrType: DatasetRef | DatasetType | str,
157 /,
158 dataId: DataId | None = None,
159 *,
160 predict: bool = False,
161 collections: Any = None,
162 run: str | None = None,
163 **kwargs: Any,
164 ) -> DatasetRefURIs:
165 # Docstring inherited.
166 raise NotImplementedError()
168 def getURI(
169 self,
170 datasetRefOrType: DatasetRef | DatasetType | str,
171 /,
172 dataId: DataId | None = None,
173 *,
174 predict: bool = False,
175 collections: Any = None,
176 run: str | None = None,
177 **kwargs: Any,
178 ) -> ResourcePath:
179 # Docstring inherited.
180 raise NotImplementedError()
182 def retrieveArtifacts(
183 self,
184 refs: Iterable[DatasetRef],
185 destination: ResourcePathExpression,
186 transfer: str = "auto",
187 preserve_path: bool = True,
188 overwrite: bool = False,
189 ) -> list[ResourcePath]:
190 # Docstring inherited.
191 raise NotImplementedError()
193 def exists(
194 self,
195 dataset_ref_or_type: DatasetRef | DatasetType | str,
196 /,
197 data_id: DataId | None = None,
198 *,
199 full_check: bool = True,
200 collections: Any = None,
201 **kwargs: Any,
202 ) -> DatasetExistence:
203 # Docstring inherited.
204 raise NotImplementedError()
206 def _exists_many(
207 self,
208 refs: Iterable[DatasetRef],
209 /,
210 *,
211 full_check: bool = True,
212 ) -> dict[DatasetRef, DatasetExistence]:
213 # Docstring inherited.
214 raise NotImplementedError()
216 def removeRuns(self, names: Iterable[str], unstore: bool = True) -> None:
217 # Docstring inherited.
218 raise NotImplementedError()
220 def ingest(
221 self,
222 *datasets: FileDataset,
223 transfer: str | None = "auto",
224 run: str | None = None,
225 idGenerationMode: DatasetIdGenEnum | None = None,
226 record_validation_info: bool = True,
227 ) -> None:
228 # Docstring inherited.
229 raise NotImplementedError()
231 def export(
232 self,
233 *,
234 directory: str | None = None,
235 filename: str | None = None,
236 format: str | None = None,
237 transfer: str | None = None,
238 ) -> AbstractContextManager[RepoExportContext]:
239 # Docstring inherited.
240 raise NotImplementedError()
242 def import_(
243 self,
244 *,
245 directory: ResourcePathExpression | None = None,
246 filename: ResourcePathExpression | TextIO | None = None,
247 format: str | None = None,
248 transfer: str | None = None,
249 skip_dimensions: set | None = None,
250 ) -> None:
251 # Docstring inherited.
252 raise NotImplementedError()
254 def transfer_from(
255 self,
256 source_butler: LimitedButler,
257 source_refs: Iterable[DatasetRef],
258 transfer: str = "auto",
259 skip_missing: bool = True,
260 register_dataset_types: bool = False,
261 transfer_dimensions: bool = False,
262 ) -> Collection[DatasetRef]:
263 # Docstring inherited.
264 raise NotImplementedError()
266 def validateConfiguration(
267 self,
268 logFailures: bool = False,
269 datasetTypeNames: Iterable[str] | None = None,
270 ignore: Iterable[str] | None = None,
271 ) -> None:
272 # Docstring inherited.
273 raise NotImplementedError()
275 @property
276 def collections(self) -> Sequence[str]:
277 # Docstring inherited.
278 return self._registry_defaults.collections
280 @property
281 def run(self) -> str | None:
282 # Docstring inherited.
283 return self._registry_defaults.run
285 @property
286 def registry(self) -> Registry:
287 # Docstring inherited.
288 raise NotImplementedError()
290 def pruneDatasets(
291 self,
292 refs: Iterable[DatasetRef],
293 *,
294 disassociate: bool = True,
295 unstore: bool = False,
296 tags: Iterable[str] = (),
297 purge: bool = False,
298 ) -> None:
299 # Docstring inherited.
300 raise NotImplementedError()
302 def _get_url(self, path: str, version: str = "v1") -> str:
303 """Form the complete path to an endpoint on the server
305 Parameters
306 ----------
307 path : `str`
308 The relative path to the server endpoint. Should not include the
309 "/butler" prefix.
310 version : `str`, optional
311 Version string to prepend to path. Defaults to "v1".
313 Returns
314 -------
315 path : `str`
316 The full path to the endpoint
317 """
318 prefix = "butler"
319 return f"{prefix}/{version}/{path}"