Coverage for python/lsst/analysis/tools/interfaces/datastore/_sasquatchDatastore.py: 58%

83 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-11-01 11:51 +0000

1# This file is part of analysis_tools. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ("SasquatchDatastore",) 

25 

26"""Sasquatch datastore""" 

27import logging 

28from collections.abc import Iterable, Mapping, Sequence 

29from typing import TYPE_CHECKING, Any, ClassVar 

30 

31from lsst.daf.butler import DatasetRef, DatasetTypeNotSupportedError, StorageClass 

32from lsst.daf.butler.datastore import DatasetRefURIs 

33from lsst.daf.butler.datastore.generic_base import GenericBaseDatastore 

34from lsst.daf.butler.datastore.record_data import DatastoreRecordData 

35from lsst.daf.butler.registry.interfaces import DatastoreRegistryBridge 

36from lsst.resources import ResourcePath 

37 

38from . import SasquatchDispatcher 

39 

40if TYPE_CHECKING: 40 ↛ 41line 40 didn't jump to line 41, because the condition on line 40 was never true

41 from lsst.daf.butler import Config, DatasetType, LookupKey 

42 from lsst.daf.butler.registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager 

43 

44 

45log = logging.getLogger(__name__) 

46 

47 

48class SasquatchDatastore(GenericBaseDatastore): 

49 """Basic Datastore for writing to an in Sasquatch instance. 

50 

51 This Datastore is currently write only, meaning that it can dispatch data 

52 to a Sasquatch instance, but at the present can not be used to retrieve 

53 values. 

54 

55 

56 Parameters 

57 ---------- 

58 config : `DatastoreConfig` or `str` 

59 Configuration. 

60 bridgeManager : `DatastoreRegistryBridgeManager` 

61 Object that manages the interface between `Registry` and datastores. 

62 butlerRoot : `str`, optional 

63 Unused parameter. 

64 """ 

65 

66 defaultConfigFile: ClassVar[str | None] = "sasquatchDatastore.yaml" 

67 """Path to configuration defaults. Accessed within the ``configs`` resource 

68 or relative to a search path. Can be None if no defaults specified. 

69 """ 

70 

71 restProxyUrl: str 

72 """Url which points to the http rest proxy. This is where datasets will be 

73 dispatched to. 

74 """ 

75 

76 accessToken: str 

77 """Access token which is used to authenticate to the restProxy. 

78 """ 

79 

80 namespace: str 

81 """The namespace in Sasquatch where the uploaded metrics will be 

82 dispatched. 

83 """ 

84 

85 def __init__( 

86 self, 

87 config: Config | str, 

88 bridgeManager: DatastoreRegistryBridgeManager, 

89 butlerRoot: str | None = None, 

90 ): 

91 super().__init__(config, bridgeManager) 

92 

93 # Name ourselves either using an explicit name or a name 

94 # derived from the (unexpanded) root. 

95 self.name = self.config.get("name", "{}@{}".format(type(self).__name__, self.config["restProxyUrl"])) 

96 log.debug("Creating datastore %s", self.name) 

97 

98 self._bridge = bridgeManager.register(self.name, ephemeral=False) 

99 

100 self.restProxyUrl = self.config["restProxyUrl"] 

101 

102 self.accessToken = self.config.get("accessToken", "na") 

103 

104 self.namespace = self.config.get("namespace", "lsst.dm") 

105 

106 self._dispatcher = SasquatchDispatcher(self.restProxyUrl, self.accessToken, self.namespace) 

107 

108 @property 

109 def bridge(self) -> DatastoreRegistryBridge: 

110 return self._bridge 

111 

112 def put(self, inMemoryDataset: Any, ref: DatasetRef) -> None: 

113 if self.constraints.isAcceptable(ref): 

114 self._dispatcher.dispatchRef(inMemoryDataset, ref) 

115 else: 

116 log.debug("Could not put dataset type %s with Sasquatch datastore", ref.datasetType) 

117 raise DatasetTypeNotSupportedError( 

118 f"Could not put dataset type {ref.datasetType} with Sasquatch datastore" 

119 ) 

120 

121 def addStoredItemInfo(self, refs: Iterable[DatasetRef], infos: Iterable[Any]) -> None: 

122 raise NotImplementedError() 

123 

124 def getStoredItemsInfo(self, ref: DatasetRef) -> Sequence[Any]: 

125 raise NotImplementedError() 

126 

127 def removeStoredItemInfo(self, ref: DatasetRef) -> None: 

128 raise NotImplementedError() 

129 

130 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None: 

131 log.debug("Sasquatch datastore does not support trashing skipping %s", ref) 

132 raise FileNotFoundError() 

133 

134 def emptyTrash(self, ignore_errors: bool = True) -> None: 

135 log.debug("Sasquatch datastore does not support trash, nothing to empty") 

136 

137 def forget(self, ref: Iterable[DatasetRef]) -> None: 

138 pass 

139 

140 def exists(self, datasetRef: DatasetRef) -> bool: 

141 # sasquatch is not currently searchable 

142 return False 

143 

144 def knows(self, ref: DatasetRef) -> bool: 

145 return False 

146 

147 def get( 

148 self, 

149 datasetRef: DatasetRef, 

150 parameters: Mapping[str, Any] | None = None, 

151 storageClass: StorageClass | str | None = None, 

152 ) -> Any: 

153 raise FileNotFoundError() 

154 

155 def validateConfiguration( 

156 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False 

157 ) -> None: 

158 """Validate some of the configuration for this datastore. 

159 

160 Parameters 

161 ---------- 

162 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass` 

163 Entities to test against this configuration. Can be differing 

164 types. 

165 logFailures : `bool`, optional 

166 If `True`, output a log message for every validation error 

167 detected. 

168 

169 Raises 

170 ------ 

171 DatastoreValidationError 

172 Raised if there is a validation problem with a configuration. 

173 All the problems are reported in a single exception. 

174 

175 Notes 

176 ----- 

177 This method is a no-op. 

178 """ 

179 return 

180 

181 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None: 

182 # Docstring is inherited from base class. 

183 return 

184 

185 def getLookupKeys(self) -> set[LookupKey]: 

186 # Docstring is inherited from base class. 

187 return self.constraints.getLookupKeys() 

188 

189 def needs_expanded_data_ids( 

190 self, 

191 transfer: str | None, 

192 entity: DatasetRef | DatasetType | StorageClass | None = None, 

193 ) -> bool: 

194 # Docstring inherited. 

195 return False 

196 

197 def import_records(self, data: Mapping[str, DatastoreRecordData]) -> None: 

198 # Docstring inherited from the base class. 

199 return 

200 

201 def export_records(self, refs: Iterable[DatasetIdRef]) -> Mapping[str, DatastoreRecordData]: 

202 # Docstring inherited from the base class. 

203 

204 # Sasquatch Datastore records cannot be exported or imported. 

205 return {} 

206 

207 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath: 

208 raise NotImplementedError() 

209 

210 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs: 

211 raise NotImplementedError() 

212 

213 def retrieveArtifacts( 

214 self, 

215 refs: Iterable[DatasetRef], 

216 destination: ResourcePath, 

217 transfer: str = "auto", 

218 preserve_path: bool = True, 

219 overwrite: bool = False, 

220 ) -> list[ResourcePath]: 

221 raise NotImplementedError() 

222 

223 @classmethod 

224 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: 

225 pass