Coverage for python/lsst/analysis/tools/interfaces/datastore/_sasquatchDatastore.py: 55%

81 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-05-09 03:19 -0700

1# This file is part of analysis_tools. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ("SasquatchDatastore",) 

25 

26"""Sasquatch datastore""" 

27import logging 

28from collections.abc import Iterable, Mapping, Sequence 

29from typing import TYPE_CHECKING, Any, ClassVar 

30 

31from lsst.daf.butler import ( 

32 DatasetRef, 

33 DatasetRefURIs, 

34 DatasetTypeNotSupportedError, 

35 DatastoreRecordData, 

36 StorageClass, 

37) 

38from lsst.daf.butler.datastores.genericDatastore import GenericBaseDatastore 

39from lsst.daf.butler.registry.interfaces import DatastoreRegistryBridge 

40from lsst.resources import ResourcePath 

41 

42from . import SasquatchDispatcher 

43 

44if TYPE_CHECKING: 44 ↛ 45line 44 didn't jump to line 45, because the condition on line 44 was never true

45 from lsst.daf.butler import Config, DatasetType, LookupKey 

46 from lsst.daf.butler.registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager 

47 

48 

49log = logging.getLogger(__name__) 

50 

51 

52class SasquatchDatastore(GenericBaseDatastore): 

53 """Basic Datastore for writing to an in Sasquatch instance. 

54 

55 This Datastore is currently write only, meaning that it can dispatch data 

56 to a Sasquatch instance, but at the present can not be used to retrieve 

57 values. 

58 

59 

60 Parameters 

61 ---------- 

62 config : `DatastoreConfig` or `str` 

63 Configuration. 

64 bridgeManager : `DatastoreRegistryBridgeManager` 

65 Object that manages the interface between `Registry` and datastores. 

66 butlerRoot : `str`, optional 

67 Unused parameter. 

68 """ 

69 

70 defaultConfigFile: ClassVar[str | None] = "sasquatchDatastore.yaml" 

71 """Path to configuration defaults. Accessed within the ``configs`` resource 

72 or relative to a search path. Can be None if no defaults specified. 

73 """ 

74 

75 restProxyUrl: str 

76 """Url which points to the http rest proxy. This is where datasets will be 

77 dispatched to. 

78 """ 

79 

80 accessToken: str 

81 """Access token which is used to authenticate to the restProxy. 

82 """ 

83 

84 namespace: str 

85 """The namespace in Sasquatch where the uploaded metrics will be 

86 dispatched. 

87 """ 

88 

89 def __init__( 

90 self, 

91 config: Config | str, 

92 bridgeManager: DatastoreRegistryBridgeManager, 

93 butlerRoot: str | None = None, 

94 ): 

95 super().__init__(config, bridgeManager) 

96 

97 # Name ourselves either using an explicit name or a name 

98 # derived from the (unexpanded) root. 

99 self.name = self.config.get("name", "{}@{}".format(type(self).__name__, self.config["restProxyUrl"])) 

100 log.debug("Creating datastore %s", self.name) 

101 

102 self._bridge = bridgeManager.register(self.name, ephemeral=False) 

103 

104 self.restProxyUrl = self.config["restProxyUrl"] 

105 

106 self.accessToken = self.config.get("accessToken", "na") 

107 

108 self.namespace = self.config.get("namespace", "lsst.dm") 

109 

110 self._dispatcher = SasquatchDispatcher(self.restProxyUrl, self.accessToken, self.namespace) 

111 

112 @property 

113 def bridge(self) -> DatastoreRegistryBridge: 

114 return self._bridge 

115 

116 def put(self, inMemoryDataset: Any, ref: DatasetRef) -> None: 

117 if self.constraints.isAcceptable(ref): 

118 self._dispatcher.dispatchRef(inMemoryDataset, ref) 

119 else: 

120 log.debug("Could not put dataset type %s with Sasquatch datastore", ref.datasetType) 

121 raise DatasetTypeNotSupportedError( 

122 f"Could not put dataset type {ref.datasetType} with Sasquatch datastore" 

123 ) 

124 

125 def addStoredItemInfo(self, refs: Iterable[DatasetRef], infos: Iterable[Any]) -> None: 

126 raise NotImplementedError() 

127 

128 def getStoredItemsInfo(self, ref: DatasetRef) -> Sequence[Any]: 

129 raise NotImplementedError() 

130 

131 def removeStoredItemInfo(self, ref: DatasetRef) -> None: 

132 raise NotImplementedError() 

133 

134 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None: 

135 log.debug("Sasquatch datastore does not support trashing skipping %s", ref) 

136 raise FileNotFoundError() 

137 

138 def emptyTrash(self, ignore_errors: bool = True) -> None: 

139 log.debug("Sasquatch datastore does not support trash, nothing to empty") 

140 

141 def forget(self, ref: Iterable[DatasetRef]) -> None: 

142 pass 

143 

144 def exists(self, datasetRef: DatasetRef) -> bool: 

145 # sasquatch is not currently searchable 

146 return False 

147 

148 def knows(self, ref: DatasetRef) -> bool: 

149 return False 

150 

151 def get( 

152 self, 

153 datasetRef: DatasetRef, 

154 parameters: Mapping[str, Any] | None = None, 

155 storageClass: StorageClass | str | None = None, 

156 ) -> Any: 

157 raise FileNotFoundError() 

158 

159 def validateConfiguration( 

160 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False 

161 ) -> None: 

162 """Validate some of the configuration for this datastore. 

163 

164 Parameters 

165 ---------- 

166 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass` 

167 Entities to test against this configuration. Can be differing 

168 types. 

169 logFailures : `bool`, optional 

170 If `True`, output a log message for every validation error 

171 detected. 

172 

173 Raises 

174 ------ 

175 DatastoreValidationError 

176 Raised if there is a validation problem with a configuration. 

177 All the problems are reported in a single exception. 

178 

179 Notes 

180 ----- 

181 This method is a no-op. 

182 """ 

183 return 

184 

185 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None: 

186 # Docstring is inherited from base class. 

187 return 

188 

189 def getLookupKeys(self) -> set[LookupKey]: 

190 # Docstring is inherited from base class. 

191 return self.constraints.getLookupKeys() 

192 

193 def needs_expanded_data_ids( 

194 self, 

195 transfer: str | None, 

196 entity: DatasetRef | DatasetType | StorageClass | None = None, 

197 ) -> bool: 

198 # Docstring inherited. 

199 return False 

200 

201 def import_records(self, data: Mapping[str, DatastoreRecordData]) -> None: 

202 # Docstring inherited from the base class. 

203 return 

204 

205 def export_records(self, refs: Iterable[DatasetIdRef]) -> Mapping[str, DatastoreRecordData]: 

206 # Docstring inherited from the base class. 

207 

208 # Sasquatch Datastore records cannot be exported or imported. 

209 return {} 

210 

211 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath: 

212 raise NotImplementedError() 

213 

214 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs: 

215 raise NotImplementedError() 

216 

217 def retrieveArtifacts( 

218 self, 

219 refs: Iterable[DatasetRef], 

220 destination: ResourcePath, 

221 transfer: str = "auto", 

222 preserve_path: bool = True, 

223 overwrite: bool = False, 

224 ) -> list[ResourcePath]: 

225 raise NotImplementedError() 

226 

227 @classmethod 

228 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: 

229 pass