Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22"""Generic datastore code useful for most datastores.""" 

23 

24__all__ = ("GenericBaseDatastore", ) 

25 

26import logging 

27from abc import abstractmethod 

28 

29from lsst.daf.butler import Datastore, DatasetTypeNotSupportedError 

30 

31log = logging.getLogger(__name__) 

32 

33 

34class GenericBaseDatastore(Datastore): 

35 """Methods useful for most implementations of a `Datastore`. 

36 

37 Should always be sub-classed since key abstract methods are missing. 

38 """ 

39 

40 @abstractmethod 

41 def addStoredItemInfo(self, refs, infos): 

42 """Record internal storage information associated with one or more 

43 datasets. 

44 

45 Parameters 

46 ---------- 

47 refs : sequence of `DatasetRef` 

48 The datasets that have been stored. 

49 infos : sequence of `StoredDatastoreItemInfo` 

50 Metadata associated with the stored datasets. 

51 """ 

52 raise NotImplementedError() 

53 

54 @abstractmethod 

55 def getStoredItemInfo(self, ref): 

56 """Retrieve information associated with file stored in this 

57 `Datastore`. 

58 

59 Parameters 

60 ---------- 

61 ref : `DatasetRef` 

62 The dataset that is to be queried. 

63 

64 Returns 

65 ------- 

66 info : `StoredFilenfo` 

67 Stored information about this file and its formatter. 

68 

69 Raises 

70 ------ 

71 KeyError 

72 Dataset with that id can not be found. 

73 """ 

74 raise NotImplementedError() 

75 

76 @abstractmethod 

77 def removeStoredItemInfo(self, ref): 

78 """Remove information about the file associated with this dataset. 

79 

80 Parameters 

81 ---------- 

82 ref : `DatasetRef` 

83 The dataset that has been removed. 

84 """ 

85 raise NotImplementedError() 

86 

87 def _register_datasets(self, refsAndInfos): 

88 """Update registry to indicate that one or more datasets have been 

89 stored. 

90 

91 Parameters 

92 ---------- 

93 refsAndInfos : sequence `tuple` [`DatasetRef`, `StoredDatasetItemInfo`] 

94 Datasets to register and the internal datastore metadata associated 

95 with them. 

96 """ 

97 expandedRefs = [] 

98 expandedItemInfos = [] 

99 

100 for ref, itemInfo in refsAndInfos: 

101 # Need the main dataset and the components 

102 expandedRefs.extend(ref.flatten([ref])) 

103 

104 # Need one for the main ref and then one for each component 

105 expandedItemInfos.extend([itemInfo] * (len(ref.components) + 1)) 

106 

107 self.registry.insertDatasetLocations(self.name, expandedRefs) 

108 self.addStoredItemInfo(expandedRefs, expandedItemInfos) 

109 

110 def _move_to_trash_in_registry(self, ref): 

111 """Tell registry that this dataset and associated components 

112 are to be trashed. 

113 

114 Parameters 

115 ---------- 

116 ref : `DatasetRef` 

117 Dataset to mark for removal from registry. 

118 

119 Notes 

120 ----- 

121 Dataset is not removed from internal stored item info table. 

122 """ 

123 

124 # Note that a ref can point to component dataset refs that 

125 # have been deleted already from registry but are still in 

126 # the python object. moveDatasetLocationToTrash will deal with that. 

127 self.registry.moveDatasetLocationToTrash(self.name, list(ref.flatten([ref]))) 

128 

129 def _post_process_get(self, inMemoryDataset, readStorageClass, assemblerParams=None): 

130 """Given the Python object read from the datastore, manipulate 

131 it based on the supplied parameters and ensure the Python 

132 type is correct. 

133 

134 Parameters 

135 ---------- 

136 inMemoryDataset : `object` 

137 Dataset to check. 

138 readStorageClass: `StorageClass` 

139 The `StorageClass` used to obtain the assembler and to 

140 check the python type. 

141 assemblerParams : `dict` 

142 Parameters to pass to the assembler. Can be `None`. 

143 """ 

144 # Process any left over parameters 

145 if assemblerParams: 

146 inMemoryDataset = readStorageClass.assembler().handleParameters(inMemoryDataset, assemblerParams) 

147 

148 # Validate the returned data type matches the expected data type 

149 pytype = readStorageClass.pytype 

150 if pytype and not isinstance(inMemoryDataset, pytype): 150 ↛ 151line 150 didn't jump to line 151, because the condition on line 150 was never true

151 raise TypeError("Got Python type {} from datastore but expected {}".format(type(inMemoryDataset), 

152 pytype)) 

153 

154 return inMemoryDataset 

155 

156 def _validate_put_parameters(self, inMemoryDataset, ref): 

157 """Validate the supplied arguments for put. 

158 

159 Parameters 

160 ---------- 

161 inMemoryDataset : `object` 

162 The dataset to store. 

163 ref : `DatasetRef` 

164 Reference to the associated Dataset. 

165 """ 

166 storageClass = ref.datasetType.storageClass 

167 

168 # Sanity check 

169 if not isinstance(inMemoryDataset, storageClass.pytype): 169 ↛ 170line 169 didn't jump to line 170, because the condition on line 169 was never true

170 raise TypeError("Inconsistency between supplied object ({}) " 

171 "and storage class type ({})".format(type(inMemoryDataset), 

172 storageClass.pytype)) 

173 

174 # Confirm that we can accept this dataset 

175 if not self.constraints.isAcceptable(ref): 

176 # Raise rather than use boolean return value. 

177 raise DatasetTypeNotSupportedError(f"Dataset {ref} has been rejected by this datastore via" 

178 " configuration.") 

179 

180 return 

181 

182 def remove(self, ref): 

183 """Indicate to the Datastore that a dataset can be removed. 

184 

185 .. warning:: 

186 

187 This method deletes the artifact associated with this 

188 dataset and can not be reversed. 

189 

190 Parameters 

191 ---------- 

192 ref : `DatasetRef` 

193 Reference to the required Dataset. 

194 

195 Raises 

196 ------ 

197 FileNotFoundError 

198 Attempt to remove a dataset that does not exist. 

199 

200 Notes 

201 ----- 

202 This method is used for immediate removal of a dataset and is 

203 generally reserved for internal testing of datastore APIs. 

204 It is implemented by calling `trash()` and then immediately calling 

205 `emptyTrash()`. This call is meant to be immediate so errors 

206 encountered during removal are not ignored. 

207 """ 

208 self.trash(ref, ignore_errors=False) 

209 self.emptyTrash(ignore_errors=False) 

210 

211 def transfer(self, inputDatastore, ref): 

212 """Retrieve a dataset from an input `Datastore`, 

213 and store the result in this `Datastore`. 

214 

215 Parameters 

216 ---------- 

217 inputDatastore : `Datastore` 

218 The external `Datastore` from which to retreive the Dataset. 

219 ref : `DatasetRef` 

220 Reference to the required dataset in the input data store. 

221 

222 """ 

223 assert inputDatastore is not self # unless we want it for renames? 

224 inMemoryDataset = inputDatastore.get(ref) 

225 return self.put(inMemoryDataset, ref)