Coverage for python / lsst / daf / butler / remote_butler / _get.py: 0%
29 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-05-01 08:17 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-05-01 08:17 +0000
1from typing import Any
3from pydantic import AnyHttpUrl
5from lsst.resources import ResourcePath
6from lsst.resources.http import HttpResourcePath
8from .._dataset_ref import DatasetRef
9from .._location import Location
10from ..datastore.cache_manager import AbstractDatastoreCacheManager, DatastoreDisabledCacheManager
11from ..datastore.stored_file_info import StoredFileInfo
12from ..datastores.file_datastore.get import (
13 DatasetLocationInformation,
14 Mapping,
15 generate_datastore_get_information,
16 get_dataset_as_python_object_from_get_info,
17)
18from .authentication.interface import RemoteButlerAuthenticationProvider
19from .server_models import FileAuthenticationMode, FileInfoPayload, FileInfoRecord
22def get_dataset_as_python_object(
23 ref: DatasetRef,
24 payload: FileInfoPayload,
25 *,
26 auth: RemoteButlerAuthenticationProvider,
27 parameters: Mapping[str, Any] | None,
28 cache_manager: AbstractDatastoreCacheManager | None = None,
29) -> Any:
30 """Retrieve an artifact from storage and return it as a Python object.
32 Parameters
33 ----------
34 ref : `DatasetRef`
35 Metadata about this artifact.
36 payload : `FileInfoPayload`
37 Pre-processed information about each file associated with this
38 artifact.
39 auth : `RemoteButlerAuthenticationProvider`
40 Provides authentication headers for HTTP service hosting the artifact
41 files.
42 parameters : `~collections.abc.Mapping` [`str`, `typing.Any`]
43 `StorageClass` and `Formatter` parameters to be used when converting
44 the artifact to a Python object.
45 cache_manager : `AbstractDatastoreCacheManager` or `None`, optional
46 Cache manager to use. If `None` the cache is disabled.
48 Returns
49 -------
50 python_object : `typing.Any`
51 The retrieved artifact, converted to a Python object.
52 """
53 fileLocations = [_to_dataset_location_information(file_info, auth) for file_info in payload.file_info]
55 datastore_file_info = generate_datastore_get_information(
56 fileLocations,
57 ref=ref,
58 parameters=parameters,
59 )
60 if cache_manager is None:
61 cache_manager = DatastoreDisabledCacheManager()
62 return get_dataset_as_python_object_from_get_info(
63 datastore_file_info, ref=ref, parameters=parameters, cache_manager=cache_manager
64 )
67def convert_http_url_to_resource_path(
68 url: AnyHttpUrl, auth: RemoteButlerAuthenticationProvider, auth_mode: FileAuthenticationMode
69) -> ResourcePath:
70 """Convert an HTTP URL to a ResourcePath instance with authentication
71 headers attached.
73 Parameters
74 ----------
75 url : `AnyHttpUrl`
76 URL to convert.
77 auth : `RemoteButlerAuthenticationProvider`
78 Provides authentication headers for the URL.
79 auth_mode : `FileAuthenticationMode`
80 Specifies which authentication headers to use.
81 """
82 if auth_mode == "none":
83 headers = None
84 elif auth_mode == "gafaelfawr":
85 headers = auth.get_server_headers()
86 elif auth_mode == "datastore":
87 headers = auth.get_datastore_headers()
88 else:
89 raise ValueError(f"Unknown authentication type: '{auth_mode}'")
91 return HttpResourcePath.create_http_resource_path(str(url), extra_headers=headers)
94def _to_dataset_location_information(
95 file_info: FileInfoRecord, auth: RemoteButlerAuthenticationProvider
96) -> DatasetLocationInformation:
97 path = convert_http_url_to_resource_path(file_info.url, auth, auth_mode=file_info.auth)
98 return (Location(None, path), StoredFileInfo.from_simple(file_info.datastoreRecords))