Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22"""Generic datastore code useful for most datastores.""" 

23 

24__all__ = ("GenericBaseDatastore", ) 

25 

26import logging 

27from abc import abstractmethod 

28 

29from lsst.daf.butler import Datastore, DatasetTypeNotSupportedError 

30 

31log = logging.getLogger(__name__) 

32 

33 

34class GenericBaseDatastore(Datastore): 

35 """Methods useful for most implementations of a `Datastore`. 

36 

37 Should always be sub-classed since key abstract methods are missing. 

38 """ 

39 

40 @abstractmethod 

41 def addStoredItemInfo(self, refs, infos): 

42 """Record internal storage information associated with one or more 

43 datasets. 

44 

45 Parameters 

46 ---------- 

47 refs : sequence of `DatasetRef` 

48 The datasets that have been stored. 

49 infos : sequence of `StoredDatastoreItemInfo` 

50 Metadata associated with the stored datasets. 

51 """ 

52 raise NotImplementedError() 

53 

54 @abstractmethod 

55 def getStoredItemInfo(self, ref): 

56 """Retrieve information associated with file stored in this 

57 `Datastore`. 

58 

59 Parameters 

60 ---------- 

61 ref : `DatasetRef` 

62 The Dataset that is to be queried. 

63 

64 Returns 

65 ------- 

66 info : `StoredFilenfo` 

67 Stored information about this file and its formatter. 

68 

69 Raises 

70 ------ 

71 KeyError 

72 Dataset with that id can not be found. 

73 """ 

74 raise NotImplementedError() 

75 

76 @abstractmethod 

77 def removeStoredItemInfo(self, ref): 

78 """Remove information about the file associated with this dataset. 

79 

80 Parameters 

81 ---------- 

82 ref : `DatasetRef` 

83 The Dataset that has been removed. 

84 """ 

85 raise NotImplementedError() 

86 

87 def _register_datasets(self, refsAndInfos): 

88 """Update registry to indicate that one or more datasets have been 

89 stored. 

90 

91 Parameters 

92 ---------- 

93 refsAndInfos : sequence `tuple` [`DatasetRef`, `StoredDatasetItemInfo`] 

94 Datasets to register and the internal datastore metadata associated 

95 with them. 

96 """ 

97 expandedRefs = [] 

98 expandedItemInfos = [] 

99 

100 for ref, itemInfo in refsAndInfos: 

101 # Main dataset. 

102 expandedRefs.append(ref) 

103 expandedItemInfos.append(itemInfo) 

104 # Conmponents (using the same item info). 

105 expandedRefs.extend(ref.components.values()) 

106 expandedItemInfos.extend([itemInfo] * len(ref.components)) 

107 

108 self.registry.insertDatasetLocations(self.name, expandedRefs) 

109 self.addStoredItemInfo(expandedRefs, expandedItemInfos) 

110 

111 def _remove_from_registry(self, ref): 

112 """Remove rows from registry. 

113 

114 Parameters 

115 ---------- 

116 ref : `DatasetRef` 

117 Dataset to remove from registry. 

118 """ 

119 self.removeStoredItemInfo(ref) 

120 self.registry.removeDatasetLocation(self.name, ref) 

121 for compRef in ref.components.values(): 

122 self.registry.removeDatasetLocation(self.name, compRef) 

123 self.removeStoredItemInfo(compRef) 

124 

125 def _post_process_get(self, inMemoryDataset, readStorageClass, assemblerParams=None): 

126 """Given the Python object read from the datastore, manipulate 

127 it based on the supplied parameters and ensure the Python 

128 type is correct. 

129 

130 Parameters 

131 ---------- 

132 inMemoryDataset : `object` 

133 Dataset to check. 

134 readStorageClass: `StorageClass` 

135 The `StorageClass` used to obtain the assembler and to 

136 check the python type. 

137 assemblerParams : `dict` 

138 Parameters to pass to the assembler. Can be `None`. 

139 """ 

140 # Process any left over parameters 

141 if assemblerParams: 

142 inMemoryDataset = readStorageClass.assembler().handleParameters(inMemoryDataset, assemblerParams) 

143 

144 # Validate the returned data type matches the expected data type 

145 pytype = readStorageClass.pytype 

146 if pytype and not isinstance(inMemoryDataset, pytype): 146 ↛ 147line 146 didn't jump to line 147, because the condition on line 146 was never true

147 raise TypeError("Got Python type {} from datastore but expected {}".format(type(inMemoryDataset), 

148 pytype)) 

149 

150 return inMemoryDataset 

151 

152 def _validate_put_parameters(self, inMemoryDataset, ref): 

153 """Validate the supplied arguments for put. 

154 

155 Parameters 

156 ---------- 

157 inMemoryDataset : `object` 

158 The Dataset to store. 

159 ref : `DatasetRef` 

160 Reference to the associated Dataset. 

161 """ 

162 storageClass = ref.datasetType.storageClass 

163 

164 # Sanity check 

165 if not isinstance(inMemoryDataset, storageClass.pytype): 165 ↛ 166line 165 didn't jump to line 166, because the condition on line 165 was never true

166 raise TypeError("Inconsistency between supplied object ({}) " 

167 "and storage class type ({})".format(type(inMemoryDataset), 

168 storageClass.pytype)) 

169 

170 # Confirm that we can accept this dataset 

171 if not self.constraints.isAcceptable(ref): 

172 # Raise rather than use boolean return value. 

173 raise DatasetTypeNotSupportedError(f"Dataset {ref} has been rejected by this datastore via" 

174 " configuration.") 

175 

176 return 

177 

178 def transfer(self, inputDatastore, ref): 

179 """Retrieve a Dataset from an input `Datastore`, 

180 and store the result in this `Datastore`. 

181 

182 Parameters 

183 ---------- 

184 inputDatastore : `Datastore` 

185 The external `Datastore` from which to retreive the Dataset. 

186 ref : `DatasetRef` 

187 Reference to the required Dataset in the input data store. 

188 

189 """ 

190 assert inputDatastore is not self # unless we want it for renames? 

191 inMemoryDataset = inputDatastore.get(ref) 

192 return self.put(inMemoryDataset, ref)