Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

# This file is part of daf_butler. 

# 

# Developed for the LSST Data Management System. 

# This product includes software developed by the LSST Project 

# (http://www.lsst.org). 

# See the COPYRIGHT file at the top-level directory of this distribution 

# for details of code ownership. 

# 

# This program is free software: you can redistribute it and/or modify 

# it under the terms of the GNU General Public License as published by 

# the Free Software Foundation, either version 3 of the License, or 

# (at your option) any later version. 

# 

# This program is distributed in the hope that it will be useful, 

# but WITHOUT ANY WARRANTY; without even the implied warranty of 

# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

# GNU General Public License for more details. 

# 

# You should have received a copy of the GNU General Public License 

# along with this program. If not, see <http://www.gnu.org/licenses/>. 

 

"""Generic datastore code useful for most datastores.""" 

 

__all__ = ("GenericBaseDatastore", ) 

 

import logging 

from abc import abstractmethod 

 

from lsst.daf.butler import Datastore, DatasetTypeNotSupportedError 

 

log = logging.getLogger(__name__) 

 

 

class GenericBaseDatastore(Datastore): 

"""Methods useful for most implementations of a `Datastore`. 

 

Should always be sub-classed since key abstract methods are missing. 

""" 

 

@abstractmethod 

def addStoredItemInfo(self, refs, infos): 

"""Record internal storage information associated with one or more 

datasets. 

 

Parameters 

---------- 

refs : sequence of `DatasetRef` 

The datasets that have been stored. 

infos : sequence of `StoredDatastoreItemInfo` 

Metadata associated with the stored datasets. 

""" 

raise NotImplementedError() 

 

@abstractmethod 

def getStoredItemInfo(self, ref): 

"""Retrieve information associated with file stored in this 

`Datastore`. 

 

Parameters 

---------- 

ref : `DatasetRef` 

The Dataset that is to be queried. 

 

Returns 

------- 

info : `StoredFilenfo` 

Stored information about this file and its formatter. 

 

Raises 

------ 

KeyError 

Dataset with that id can not be found. 

""" 

raise NotImplementedError() 

 

@abstractmethod 

def removeStoredItemInfo(self, ref): 

"""Remove information about the file associated with this dataset. 

 

Parameters 

---------- 

ref : `DatasetRef` 

The Dataset that has been removed. 

""" 

raise NotImplementedError() 

 

def _register_datasets(self, refsAndInfos): 

"""Update registry to indicate that one or more datasets have been 

stored. 

 

Parameters 

---------- 

refsAndInfos : sequence `tuple` [`DatasetRef`, `StoredDatasetItemInfo`] 

Datasets to register and the internal datastore metadata associated 

with them. 

""" 

expandedRefs = [] 

expandedItemInfos = [] 

 

for ref, itemInfo in refsAndInfos: 

# Main dataset. 

expandedRefs.append(ref) 

expandedItemInfos.append(itemInfo) 

# Conmponents (using the same item info). 

expandedRefs.extend(ref.components.values()) 

expandedItemInfos.extend([itemInfo] * len(ref.components)) 

 

self.registry.insertDatasetLocations(self.name, expandedRefs) 

self.addStoredItemInfo(expandedRefs, expandedItemInfos) 

 

def _remove_from_registry(self, ref): 

"""Remove rows from registry. 

 

Parameters 

---------- 

ref : `DatasetRef` 

Dataset to remove from registry. 

""" 

self.removeStoredItemInfo(ref) 

self.registry.removeDatasetLocation(self.name, ref) 

for compRef in ref.components.values(): 

self.registry.removeDatasetLocation(self.name, compRef) 

self.removeStoredItemInfo(compRef) 

 

def _post_process_get(self, inMemoryDataset, readStorageClass, assemblerParams=None): 

"""Given the Python object read from the datastore, manipulate 

it based on the supplied parameters and ensure the Python 

type is correct. 

 

Parameters 

---------- 

inMemoryDataset : `object` 

Dataset to check. 

readStorageClass: `StorageClass` 

The `StorageClass` used to obtain the assembler and to 

check the python type. 

assemblerParams : `dict` 

Parameters to pass to the assembler. Can be `None`. 

""" 

# Process any left over parameters 

if assemblerParams: 

inMemoryDataset = readStorageClass.assembler().handleParameters(inMemoryDataset, assemblerParams) 

 

# Validate the returned data type matches the expected data type 

pytype = readStorageClass.pytype 

146 ↛ 147line 146 didn't jump to line 147, because the condition on line 146 was never true if pytype and not isinstance(inMemoryDataset, pytype): 

raise TypeError("Got Python type {} from datastore but expected {}".format(type(inMemoryDataset), 

pytype)) 

 

return inMemoryDataset 

 

def _validate_put_parameters(self, inMemoryDataset, ref): 

"""Validate the supplied arguments for put. 

 

Parameters 

---------- 

inMemoryDataset : `object` 

The Dataset to store. 

ref : `DatasetRef` 

Reference to the associated Dataset. 

""" 

storageClass = ref.datasetType.storageClass 

 

# Sanity check 

165 ↛ 166line 165 didn't jump to line 166, because the condition on line 165 was never true if not isinstance(inMemoryDataset, storageClass.pytype): 

raise TypeError("Inconsistency between supplied object ({}) " 

"and storage class type ({})".format(type(inMemoryDataset), 

storageClass.pytype)) 

 

# Confirm that we can accept this dataset 

if not self.constraints.isAcceptable(ref): 

# Raise rather than use boolean return value. 

raise DatasetTypeNotSupportedError(f"Dataset {ref} has been rejected by this datastore via" 

" configuration.") 

 

return 

 

def transfer(self, inputDatastore, ref): 

"""Retrieve a Dataset from an input `Datastore`, 

and store the result in this `Datastore`. 

 

Parameters 

---------- 

inputDatastore : `Datastore` 

The external `Datastore` from which to retreive the Dataset. 

ref : `DatasetRef` 

Reference to the required Dataset in the input data store. 

 

""" 

assert inputDatastore is not self # unless we want it for renames? 

inMemoryDataset = inputDatastore.get(ref) 

return self.put(inMemoryDataset, ref)