Coverage for python/lsst/daf/persistence/mapper.py: 37%

50 statements  

« prev     ^ index     » next       coverage.py v6.4.2, created at 2022-07-19 04:55 -0700

1#!/usr/bin/env python 

2 

3# 

4# LSST Data Management System 

5# Copyright 2008, 2009, 2010 LSST Corporation. 

6# 

7# This product includes software developed by the 

8# LSST Project (http://www.lsst.org/). 

9# 

10# This program is free software: you can redistribute it and/or modify 

11# it under the terms of the GNU General Public License as published by 

12# the Free Software Foundation, either version 3 of the License, or 

13# (at your option) any later version. 

14# 

15# This program is distributed in the hope that it will be useful, 

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

18# GNU General Public License for more details. 

19# 

20# You should have received a copy of the LSST License Statement and 

21# the GNU General Public License along with this program. If not, 

22# see <http://www.lsstcorp.org/LegalNotices/>. 

23# 

24from . import Policy 

25from .deprecation import deprecate_class 

26 

27"""This module defines the Mapper base class.""" 

28 

29 

30@deprecate_class 

31class Mapper: 

32 """Mapper is a base class for all mappers. 

33 

34 Subclasses may define the following methods: 

35 

36 map_{datasetType}(self, dataId, write) 

37 Map a dataset id for the given dataset type into a ButlerLocation. 

38 If write=True, this mapping is for an output dataset. 

39 

40 query_{datasetType}(self, key, format, dataId) 

41 Return the possible values for the format fields that would produce 

42 datasets at the granularity of key in combination with the provided 

43 partial dataId. 

44 

45 std_{datasetType}(self, item) 

46 Standardize an object of the given data set type. 

47 

48 Methods that must be overridden: 

49 

50 keys(self) 

51 Return a list of the keys that can be used in data ids. 

52 

53 Other public methods: 

54 

55 __init__(self) 

56 

57 getDatasetTypes(self) 

58 

59 map(self, datasetType, dataId, write=False) 

60 

61 queryMetadata(self, datasetType, key, format, dataId) 

62 

63 canStandardize(self, datasetType) 

64 

65 standardize(self, datasetType, item, dataId) 

66 

67 validate(self, dataId) 

68 """ 

69 

70 @staticmethod 

71 def Mapper(cfg): 

72 '''Instantiate a Mapper from a configuration. 

73 In come cases the cfg may have already been instantiated into a Mapper, this is allowed and 

74 the input var is simply returned. 

75 

76 :param cfg: the cfg for this mapper. It is recommended this be created by calling 

77 Mapper.cfg() 

78 :return: a Mapper instance 

79 ''' 

80 if isinstance(cfg, Policy): 

81 return cfg['cls'](cfg) 

82 return cfg 

83 

84 def __new__(cls, *args, **kwargs): 

85 """Create a new Mapper, saving arguments for pickling. 

86 

87 This is in __new__ instead of __init__ to save the user 

88 from having to save the arguments themselves (either explicitly, 

89 or by calling the super's __init__ with all their 

90 *args,**kwargs. The resulting pickling system (of __new__, 

91 __getstate__ and __setstate__ is similar to how __reduce__ 

92 is usually used, except that we save the user from any 

93 responsibility (except when overriding __new__, but that 

94 is not common). 

95 """ 

96 self = super().__new__(cls) 

97 self._arguments = (args, kwargs) 

98 return self 

99 

100 def __init__(self, **kwargs): 

101 pass 

102 

103 def __getstate__(self): 

104 return self._arguments 

105 

106 def __setstate__(self, state): 

107 self._arguments = state 

108 args, kwargs = state 

109 self.__init__(*args, **kwargs) 

110 

111 def keys(self): 

112 raise NotImplementedError("keys() unimplemented") 

113 

114 def queryMetadata(self, datasetType, format, dataId): 

115 """Get possible values for keys given a partial data id. 

116 

117 :param datasetType: see documentation about the use of datasetType 

118 :param key: this is used as the 'level' parameter 

119 :param format: 

120 :param dataId: see documentation about the use of dataId 

121 :return: 

122 """ 

123 func = getattr(self, 'query_' + datasetType) 

124 

125 val = func(format, self.validate(dataId)) 

126 return val 

127 

128 def getDatasetTypes(self): 

129 """Return a list of the mappable dataset types.""" 

130 

131 list = [] 

132 for attr in dir(self): 

133 if attr.startswith("map_"): 

134 list.append(attr[4:]) 

135 return list 

136 

137 def map(self, datasetType, dataId, write=False): 

138 """Map a data id using the mapping method for its dataset type. 

139 

140 Parameters 

141 ---------- 

142 datasetType : string 

143 The datasetType to map 

144 dataId : DataId instance 

145 The dataId to use when mapping 

146 write : bool, optional 

147 Indicates if the map is being performed for a read operation 

148 (False) or a write operation (True) 

149 

150 Returns 

151 ------- 

152 ButlerLocation or a list of ButlerLocation 

153 The location(s) found for the map operation. If write is True, a 

154 list is returned. If write is False a single ButlerLocation is 

155 returned. 

156 

157 Raises 

158 ------ 

159 NoResults 

160 If no locaiton was found for this map operation, the derived mapper 

161 class may raise a lsst.daf.persistence.NoResults exception. Butler 

162 catches this and will look in the next Repository if there is one. 

163 """ 

164 func = getattr(self, 'map_' + datasetType) 

165 return func(self.validate(dataId), write) 

166 

167 def canStandardize(self, datasetType): 

168 """Return true if this mapper can standardize an object of the given 

169 dataset type.""" 

170 

171 return hasattr(self, 'std_' + datasetType) 

172 

173 def standardize(self, datasetType, item, dataId): 

174 """Standardize an object using the standardization method for its data 

175 set type, if it exists.""" 

176 

177 if hasattr(self, 'std_' + datasetType): 

178 func = getattr(self, 'std_' + datasetType) 

179 return func(item, self.validate(dataId)) 

180 return item 

181 

182 def validate(self, dataId): 

183 """Validate a dataId's contents. 

184 

185 If the dataId is valid, return it. If an invalid component can be 

186 transformed into a valid one, copy the dataId, fix the component, and 

187 return the copy. Otherwise, raise an exception.""" 

188 

189 return dataId 

190 

191 def backup(self, datasetType, dataId): 

192 """Rename any existing object with the given type and dataId. 

193 

194 Not implemented in the base mapper. 

195 """ 

196 raise NotImplementedError("Base-class Mapper does not implement backups") 

197 

198 def getRegistry(self): 

199 """Get the registry""" 

200 return None