Coverage for python/lsst/analysis/tools/interfaces/datastore/_sasquatchDatastore.py: 58%
93 statements
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-06 12:42 +0000
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-06 12:42 +0000
1# This file is part of analysis_tools.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("SasquatchDatastore",)
26"""Sasquatch datastore"""
27import logging
28from collections.abc import Iterable, Mapping, Sequence
29from typing import TYPE_CHECKING, Any, ClassVar
31from lsst.daf.butler import DatasetRef, DatasetTypeNotSupportedError, StorageClass
32from lsst.daf.butler.datastore import DatasetRefURIs, DatastoreConfig, DatastoreOpaqueTable
33from lsst.daf.butler.datastore.generic_base import GenericBaseDatastore
34from lsst.daf.butler.datastore.record_data import DatastoreRecordData
35from lsst.daf.butler.registry.interfaces import DatastoreRegistryBridge
36from lsst.resources import ResourcePath, ResourcePathExpression
38from . import SasquatchDispatcher
40if TYPE_CHECKING: 40 ↛ 41line 40 didn't jump to line 41, because the condition on line 40 was never true
41 from lsst.daf.butler import Config, DatasetType, LookupKey
42 from lsst.daf.butler.registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager
45log = logging.getLogger(__name__)
48class SasquatchDatastore(GenericBaseDatastore):
49 """Basic Datastore for writing to an in Sasquatch instance.
51 This Datastore is currently write only, meaning that it can dispatch data
52 to a Sasquatch instance, but at the present can not be used to retrieve
53 values.
56 Parameters
57 ----------
58 config : `DatastoreConfig` or `str`
59 Configuration.
60 bridgeManager : `DatastoreRegistryBridgeManager`
61 Object that manages the interface between `Registry` and datastores.
62 butlerRoot : `str`, optional
63 Unused parameter.
64 """
66 defaultConfigFile: ClassVar[str | None] = "sasquatchDatastore.yaml"
67 """Path to configuration defaults. Accessed within the ``configs`` resource
68 or relative to a search path. Can be None if no defaults specified.
69 """
71 restProxyUrl: str
72 """Url which points to the http rest proxy. This is where datasets will be
73 dispatched to.
74 """
76 accessToken: str
77 """Access token which is used to authenticate to the restProxy.
78 """
80 namespace: str
81 """The namespace in Sasquatch where the uploaded metrics will be
82 dispatched.
83 """
85 def __init__(
86 self,
87 config: DatastoreConfig,
88 bridgeManager: DatastoreRegistryBridgeManager,
89 butlerRoot: str | None = None,
90 ):
91 super().__init__(config, bridgeManager)
93 # Name ourselves either using an explicit name or a name
94 # derived from the (unexpanded) root.
95 self.name = self.config.get("name", "{}@{}".format(type(self).__name__, self.config["restProxyUrl"]))
96 log.debug("Creating datastore %s", self.name)
98 self._bridge = bridgeManager.register(self.name, ephemeral=False)
100 self.restProxyUrl = self.config["restProxyUrl"]
102 self.accessToken = self.config.get("accessToken", "na")
104 self.namespace = self.config.get("namespace", "lsst.dm")
106 self._dispatcher = SasquatchDispatcher(self.restProxyUrl, self.accessToken, self.namespace)
108 @classmethod
109 def _create_from_config(
110 cls,
111 config: DatastoreConfig,
112 bridgeManager: DatastoreRegistryBridgeManager,
113 butlerRoot: ResourcePathExpression | None,
114 ) -> SasquatchDatastore:
115 return SasquatchDatastore(config, bridgeManager)
117 def clone(self, bridgeManager: DatastoreRegistryBridgeManager) -> SasquatchDatastore:
118 return SasquatchDatastore(self.config, bridgeManager)
120 @property
121 def bridge(self) -> DatastoreRegistryBridge:
122 return self._bridge
124 def put(self, inMemoryDataset: Any, ref: DatasetRef) -> None:
125 if self.constraints.isAcceptable(ref):
126 self._dispatcher.dispatchRef(inMemoryDataset, ref)
127 else:
128 log.debug("Could not put dataset type %s with Sasquatch datastore", ref.datasetType)
129 raise DatasetTypeNotSupportedError(
130 f"Could not put dataset type {ref.datasetType} with Sasquatch datastore"
131 )
133 def put_new(self, in_memory_dataset: Any, dataset_ref: DatasetRef) -> Mapping[str, DatasetRef]:
134 # Docstring inherited from the base class.
135 self.put(in_memory_dataset, dataset_ref)
136 # Sasquatch is a sort of ephemeral, because we do not store its
137 # datastore records in registry, so return empty dict.
138 return {}
140 def addStoredItemInfo(self, refs: Iterable[DatasetRef], infos: Iterable[Any]) -> None:
141 raise NotImplementedError()
143 def getStoredItemsInfo(self, ref: DatasetRef) -> Sequence[Any]:
144 raise NotImplementedError()
146 def removeStoredItemInfo(self, ref: DatasetRef) -> None:
147 raise NotImplementedError()
149 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None:
150 log.debug("Sasquatch datastore does not support trashing skipping %s", ref)
151 raise FileNotFoundError()
153 def emptyTrash(self, ignore_errors: bool = True) -> None:
154 log.debug("Sasquatch datastore does not support trash, nothing to empty")
156 def forget(self, ref: Iterable[DatasetRef]) -> None:
157 pass
159 def exists(self, datasetRef: DatasetRef) -> bool:
160 # sasquatch is not currently searchable
161 return False
163 def knows(self, ref: DatasetRef) -> bool:
164 return False
166 def get(
167 self,
168 datasetRef: DatasetRef,
169 parameters: Mapping[str, Any] | None = None,
170 storageClass: StorageClass | str | None = None,
171 ) -> Any:
172 raise FileNotFoundError()
174 def validateConfiguration(
175 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False
176 ) -> None:
177 """Validate some of the configuration for this datastore.
179 Parameters
180 ----------
181 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass`
182 Entities to test against this configuration. Can be differing
183 types.
184 logFailures : `bool`, optional
185 If `True`, output a log message for every validation error
186 detected.
188 Raises
189 ------
190 DatastoreValidationError
191 Raised if there is a validation problem with a configuration.
192 All the problems are reported in a single exception.
194 Notes
195 -----
196 This method is a no-op.
197 """
198 return
200 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None:
201 # Docstring is inherited from base class.
202 return
204 def getLookupKeys(self) -> set[LookupKey]:
205 # Docstring is inherited from base class.
206 return self.constraints.getLookupKeys()
208 def needs_expanded_data_ids(
209 self,
210 transfer: str | None,
211 entity: DatasetRef | DatasetType | StorageClass | None = None,
212 ) -> bool:
213 # Docstring inherited.
214 return False
216 def import_records(self, data: Mapping[str, DatastoreRecordData]) -> None:
217 # Docstring inherited from the base class.
218 return
220 def export_records(self, refs: Iterable[DatasetIdRef]) -> Mapping[str, DatastoreRecordData]:
221 # Docstring inherited from the base class.
223 # Sasquatch Datastore records cannot be exported or imported.
224 return {}
226 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath:
227 raise NotImplementedError()
229 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs:
230 raise NotImplementedError()
232 def retrieveArtifacts(
233 self,
234 refs: Iterable[DatasetRef],
235 destination: ResourcePath,
236 transfer: str = "auto",
237 preserve_path: bool = True,
238 overwrite: bool = False,
239 ) -> list[ResourcePath]:
240 raise NotImplementedError()
242 @classmethod
243 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None:
244 pass
246 def get_opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]:
247 # Docstring inherited from the base class.
248 return {}