Coverage for python/lsst/analysis/tools/interfaces/datastore/_sasquatchDatastore.py: 55%
81 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-04 23:12 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-04 23:12 +0000
1# This file is part of analysis_tools.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("SasquatchDatastore",)
26"""Sasquatch datastore"""
27import logging
28from collections.abc import Iterable, Mapping, Sequence
29from typing import TYPE_CHECKING, Any, ClassVar
31from lsst.daf.butler import (
32 DatasetRef,
33 DatasetRefURIs,
34 DatasetTypeNotSupportedError,
35 DatastoreRecordData,
36 StorageClass,
37)
38from lsst.daf.butler.datastores.genericDatastore import GenericBaseDatastore
39from lsst.daf.butler.registry.interfaces import DatastoreRegistryBridge
40from lsst.resources import ResourcePath
42from . import SasquatchDispatcher
44if TYPE_CHECKING: 44 ↛ 45line 44 didn't jump to line 45, because the condition on line 44 was never true
45 from lsst.daf.butler import Config, DatasetType, LookupKey
46 from lsst.daf.butler.registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager
49log = logging.getLogger(__name__)
52class SasquatchDatastore(GenericBaseDatastore):
53 """Basic Datastore for writing to an in Sasquatch instance.
55 This Datastore is currently write only, meaning that it can dispatch data
56 to a Sasquatch instance, but at the present can not be used to retrieve
57 values.
60 Parameters
61 ----------
62 config : `DatastoreConfig` or `str`
63 Configuration.
64 bridgeManager : `DatastoreRegistryBridgeManager`
65 Object that manages the interface between `Registry` and datastores.
66 butlerRoot : `str`, optional
67 Unused parameter.
68 """
70 defaultConfigFile: ClassVar[str | None] = "sasquatchDatastore.yaml"
71 """Path to configuration defaults. Accessed within the ``configs`` resource
72 or relative to a search path. Can be None if no defaults specified.
73 """
75 restProxyUrl: str
76 """Url which points to the http rest proxy. This is where datasets will be
77 dispatched to.
78 """
80 accessToken: str
81 """Access token which is used to authenticate to the restProxy.
82 """
84 namespace: str
85 """The namespace in Sasquatch where the uploaded metrics will be
86 dispatched.
87 """
89 def __init__(
90 self,
91 config: Config | str,
92 bridgeManager: DatastoreRegistryBridgeManager,
93 butlerRoot: str | None = None,
94 ):
95 super().__init__(config, bridgeManager)
97 # Name ourselves either using an explicit name or a name
98 # derived from the (unexpanded) root.
99 self.name = self.config.get("name", "{}@{}".format(type(self).__name__, self.config["restProxyUrl"]))
100 log.debug("Creating datastore %s", self.name)
102 self._bridge = bridgeManager.register(self.name, ephemeral=False)
104 self.restProxyUrl = self.config["restProxyUrl"]
106 self.accessToken = self.config.get("accessToken", "na")
108 self.namespace = self.config.get("namespace", "lsst.dm")
110 self._dispatcher = SasquatchDispatcher(self.restProxyUrl, self.accessToken, self.namespace)
112 @property
113 def bridge(self) -> DatastoreRegistryBridge:
114 return self._bridge
116 def put(self, inMemoryDataset: Any, ref: DatasetRef) -> None:
117 if self.constraints.isAcceptable(ref):
118 self._dispatcher.dispatchRef(inMemoryDataset, ref)
119 else:
120 log.debug("Could not put dataset type %s with Sasquatch datastore", ref.datasetType)
121 raise DatasetTypeNotSupportedError(
122 f"Could not put dataset type {ref.datasetType} with Sasquatch datastore"
123 )
125 def addStoredItemInfo(self, refs: Iterable[DatasetRef], infos: Iterable[Any]) -> None:
126 raise NotImplementedError()
128 def getStoredItemsInfo(self, ref: DatasetRef) -> Sequence[Any]:
129 raise NotImplementedError()
131 def removeStoredItemInfo(self, ref: DatasetRef) -> None:
132 raise NotImplementedError()
134 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None:
135 log.debug("Sasquatch datastore does not support trashing skipping %s", ref)
136 raise FileNotFoundError()
138 def emptyTrash(self, ignore_errors: bool = True) -> None:
139 log.debug("Sasquatch datastore does not support trash, nothing to empty")
141 def forget(self, ref: Iterable[DatasetRef]) -> None:
142 pass
144 def exists(self, datasetRef: DatasetRef) -> bool:
145 # sasquatch is not currently searchable
146 return False
148 def knows(self, ref: DatasetRef) -> bool:
149 return False
151 def get(
152 self,
153 datasetRef: DatasetRef,
154 parameters: Mapping[str, Any] | None = None,
155 storageClass: StorageClass | str | None = None,
156 ) -> Any:
157 raise FileNotFoundError()
159 def validateConfiguration(
160 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False
161 ) -> None:
162 """Validate some of the configuration for this datastore.
164 Parameters
165 ----------
166 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass`
167 Entities to test against this configuration. Can be differing
168 types.
169 logFailures : `bool`, optional
170 If `True`, output a log message for every validation error
171 detected.
173 Raises
174 ------
175 DatastoreValidationError
176 Raised if there is a validation problem with a configuration.
177 All the problems are reported in a single exception.
179 Notes
180 -----
181 This method is a no-op.
182 """
183 return
185 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None:
186 # Docstring is inherited from base class.
187 return
189 def getLookupKeys(self) -> set[LookupKey]:
190 # Docstring is inherited from base class.
191 return self.constraints.getLookupKeys()
193 def needs_expanded_data_ids(
194 self,
195 transfer: str | None,
196 entity: DatasetRef | DatasetType | StorageClass | None = None,
197 ) -> bool:
198 # Docstring inherited.
199 return False
201 def import_records(self, data: Mapping[str, DatastoreRecordData]) -> None:
202 # Docstring inherited from the base class.
203 return
205 def export_records(self, refs: Iterable[DatasetIdRef]) -> Mapping[str, DatastoreRecordData]:
206 # Docstring inherited from the base class.
208 # Sasquatch Datastore records cannot be exported or imported.
209 return {}
211 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath:
212 raise NotImplementedError()
214 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs:
215 raise NotImplementedError()
217 def retrieveArtifacts(
218 self,
219 refs: Iterable[DatasetRef],
220 destination: ResourcePath,
221 transfer: str = "auto",
222 preserve_path: bool = True,
223 overwrite: bool = False,
224 ) -> list[ResourcePath]:
225 raise NotImplementedError()
227 @classmethod
228 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None:
229 pass