Coverage for python/lsst/analysis/tools/interfaces/datastore/_sasquatchDatastore.py: 57%

88 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-11-29 11:33 +0000

1# This file is part of analysis_tools. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ("SasquatchDatastore",) 

25 

26"""Sasquatch datastore""" 

27import logging 

28from collections.abc import Iterable, Mapping, Sequence 

29from typing import TYPE_CHECKING, Any, ClassVar 

30 

31from lsst.daf.butler import DatasetRef, DatasetTypeNotSupportedError, StorageClass 

32from lsst.daf.butler.datastore import DatasetRefURIs, DatastoreOpaqueTable 

33from lsst.daf.butler.datastore.generic_base import GenericBaseDatastore 

34from lsst.daf.butler.datastore.record_data import DatastoreRecordData 

35from lsst.daf.butler.registry.interfaces import DatastoreRegistryBridge 

36from lsst.resources import ResourcePath 

37 

38from . import SasquatchDispatcher 

39 

40if TYPE_CHECKING: 40 ↛ 41line 40 didn't jump to line 41, because the condition on line 40 was never true

41 from lsst.daf.butler import Config, DatasetType, LookupKey 

42 from lsst.daf.butler.registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager 

43 

44 

45log = logging.getLogger(__name__) 

46 

47 

48class SasquatchDatastore(GenericBaseDatastore): 

49 """Basic Datastore for writing to an in Sasquatch instance. 

50 

51 This Datastore is currently write only, meaning that it can dispatch data 

52 to a Sasquatch instance, but at the present can not be used to retrieve 

53 values. 

54 

55 

56 Parameters 

57 ---------- 

58 config : `DatastoreConfig` or `str` 

59 Configuration. 

60 bridgeManager : `DatastoreRegistryBridgeManager` 

61 Object that manages the interface between `Registry` and datastores. 

62 butlerRoot : `str`, optional 

63 Unused parameter. 

64 """ 

65 

66 defaultConfigFile: ClassVar[str | None] = "sasquatchDatastore.yaml" 

67 """Path to configuration defaults. Accessed within the ``configs`` resource 

68 or relative to a search path. Can be None if no defaults specified. 

69 """ 

70 

71 restProxyUrl: str 

72 """Url which points to the http rest proxy. This is where datasets will be 

73 dispatched to. 

74 """ 

75 

76 accessToken: str 

77 """Access token which is used to authenticate to the restProxy. 

78 """ 

79 

80 namespace: str 

81 """The namespace in Sasquatch where the uploaded metrics will be 

82 dispatched. 

83 """ 

84 

85 def __init__( 

86 self, 

87 config: Config | str, 

88 bridgeManager: DatastoreRegistryBridgeManager, 

89 butlerRoot: str | None = None, 

90 ): 

91 super().__init__(config, bridgeManager) 

92 

93 # Name ourselves either using an explicit name or a name 

94 # derived from the (unexpanded) root. 

95 self.name = self.config.get("name", "{}@{}".format(type(self).__name__, self.config["restProxyUrl"])) 

96 log.debug("Creating datastore %s", self.name) 

97 

98 self._bridge = bridgeManager.register(self.name, ephemeral=False) 

99 

100 self.restProxyUrl = self.config["restProxyUrl"] 

101 

102 self.accessToken = self.config.get("accessToken", "na") 

103 

104 self.namespace = self.config.get("namespace", "lsst.dm") 

105 

106 self._dispatcher = SasquatchDispatcher(self.restProxyUrl, self.accessToken, self.namespace) 

107 

108 @property 

109 def bridge(self) -> DatastoreRegistryBridge: 

110 return self._bridge 

111 

112 def put(self, inMemoryDataset: Any, ref: DatasetRef) -> None: 

113 if self.constraints.isAcceptable(ref): 

114 self._dispatcher.dispatchRef(inMemoryDataset, ref) 

115 else: 

116 log.debug("Could not put dataset type %s with Sasquatch datastore", ref.datasetType) 

117 raise DatasetTypeNotSupportedError( 

118 f"Could not put dataset type {ref.datasetType} with Sasquatch datastore" 

119 ) 

120 

121 def put_new(self, in_memory_dataset: Any, dataset_ref: DatasetRef) -> Mapping[str, DatasetRef]: 

122 # Docstring inherited from the base class. 

123 self.put(in_memory_dataset, dataset_ref) 

124 # Sasquatch is a sort of ephemeral, because we do not store its 

125 # datastore records in registry, so return empty dict. 

126 return {} 

127 

128 def addStoredItemInfo(self, refs: Iterable[DatasetRef], infos: Iterable[Any]) -> None: 

129 raise NotImplementedError() 

130 

131 def getStoredItemsInfo(self, ref: DatasetRef) -> Sequence[Any]: 

132 raise NotImplementedError() 

133 

134 def removeStoredItemInfo(self, ref: DatasetRef) -> None: 

135 raise NotImplementedError() 

136 

137 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None: 

138 log.debug("Sasquatch datastore does not support trashing skipping %s", ref) 

139 raise FileNotFoundError() 

140 

141 def emptyTrash(self, ignore_errors: bool = True) -> None: 

142 log.debug("Sasquatch datastore does not support trash, nothing to empty") 

143 

144 def forget(self, ref: Iterable[DatasetRef]) -> None: 

145 pass 

146 

147 def exists(self, datasetRef: DatasetRef) -> bool: 

148 # sasquatch is not currently searchable 

149 return False 

150 

151 def knows(self, ref: DatasetRef) -> bool: 

152 return False 

153 

154 def get( 

155 self, 

156 datasetRef: DatasetRef, 

157 parameters: Mapping[str, Any] | None = None, 

158 storageClass: StorageClass | str | None = None, 

159 ) -> Any: 

160 raise FileNotFoundError() 

161 

162 def validateConfiguration( 

163 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False 

164 ) -> None: 

165 """Validate some of the configuration for this datastore. 

166 

167 Parameters 

168 ---------- 

169 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass` 

170 Entities to test against this configuration. Can be differing 

171 types. 

172 logFailures : `bool`, optional 

173 If `True`, output a log message for every validation error 

174 detected. 

175 

176 Raises 

177 ------ 

178 DatastoreValidationError 

179 Raised if there is a validation problem with a configuration. 

180 All the problems are reported in a single exception. 

181 

182 Notes 

183 ----- 

184 This method is a no-op. 

185 """ 

186 return 

187 

188 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None: 

189 # Docstring is inherited from base class. 

190 return 

191 

192 def getLookupKeys(self) -> set[LookupKey]: 

193 # Docstring is inherited from base class. 

194 return self.constraints.getLookupKeys() 

195 

196 def needs_expanded_data_ids( 

197 self, 

198 transfer: str | None, 

199 entity: DatasetRef | DatasetType | StorageClass | None = None, 

200 ) -> bool: 

201 # Docstring inherited. 

202 return False 

203 

204 def import_records(self, data: Mapping[str, DatastoreRecordData]) -> None: 

205 # Docstring inherited from the base class. 

206 return 

207 

208 def export_records(self, refs: Iterable[DatasetIdRef]) -> Mapping[str, DatastoreRecordData]: 

209 # Docstring inherited from the base class. 

210 

211 # Sasquatch Datastore records cannot be exported or imported. 

212 return {} 

213 

214 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath: 

215 raise NotImplementedError() 

216 

217 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs: 

218 raise NotImplementedError() 

219 

220 def retrieveArtifacts( 

221 self, 

222 refs: Iterable[DatasetRef], 

223 destination: ResourcePath, 

224 transfer: str = "auto", 

225 preserve_path: bool = True, 

226 overwrite: bool = False, 

227 ) -> list[ResourcePath]: 

228 raise NotImplementedError() 

229 

230 @classmethod 

231 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: 

232 pass 

233 

234 def get_opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]: 

235 # Docstring inherited from the base class. 

236 return {}