Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1#!/usr/bin/env python 

2 

3# 

4# LSST Data Management System 

5# Copyright 2008, 2009, 2010 LSST Corporation. 

6# 

7# This product includes software developed by the 

8# LSST Project (http://www.lsst.org/). 

9# 

10# This program is free software: you can redistribute it and/or modify 

11# it under the terms of the GNU General Public License as published by 

12# the Free Software Foundation, either version 3 of the License, or 

13# (at your option) any later version. 

14# 

15# This program is distributed in the hope that it will be useful, 

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

18# GNU General Public License for more details. 

19# 

20# You should have received a copy of the LSST License Statement and 

21# the GNU General Public License along with this program. If not, 

22# see <http://www.lsstcorp.org/LegalNotices/>. 

23# 

24from . import Policy 

25 

26"""This module defines the Mapper base class.""" 

27 

28 

29class Mapper: 

30 """Mapper is a base class for all mappers. 

31 

32 Subclasses may define the following methods: 

33 

34 map_{datasetType}(self, dataId, write) 

35 Map a dataset id for the given dataset type into a ButlerLocation. 

36 If write=True, this mapping is for an output dataset. 

37 

38 query_{datasetType}(self, key, format, dataId) 

39 Return the possible values for the format fields that would produce 

40 datasets at the granularity of key in combination with the provided 

41 partial dataId. 

42 

43 std_{datasetType}(self, item) 

44 Standardize an object of the given data set type. 

45 

46 Methods that must be overridden: 

47 

48 keys(self) 

49 Return a list of the keys that can be used in data ids. 

50 

51 Other public methods: 

52 

53 __init__(self) 

54 

55 getDatasetTypes(self) 

56 

57 map(self, datasetType, dataId, write=False) 

58 

59 queryMetadata(self, datasetType, key, format, dataId) 

60 

61 canStandardize(self, datasetType) 

62 

63 standardize(self, datasetType, item, dataId) 

64 

65 validate(self, dataId) 

66 """ 

67 

68 @staticmethod 

69 def Mapper(cfg): 

70 '''Instantiate a Mapper from a configuration. 

71 In come cases the cfg may have already been instantiated into a Mapper, this is allowed and 

72 the input var is simply returned. 

73 

74 :param cfg: the cfg for this mapper. It is recommended this be created by calling 

75 Mapper.cfg() 

76 :return: a Mapper instance 

77 ''' 

78 if isinstance(cfg, Policy): 

79 return cfg['cls'](cfg) 

80 return cfg 

81 

82 def __new__(cls, *args, **kwargs): 

83 """Create a new Mapper, saving arguments for pickling. 

84 

85 This is in __new__ instead of __init__ to save the user 

86 from having to save the arguments themselves (either explicitly, 

87 or by calling the super's __init__ with all their 

88 *args,**kwargs. The resulting pickling system (of __new__, 

89 __getstate__ and __setstate__ is similar to how __reduce__ 

90 is usually used, except that we save the user from any 

91 responsibility (except when overriding __new__, but that 

92 is not common). 

93 """ 

94 self = super().__new__(cls) 

95 self._arguments = (args, kwargs) 

96 return self 

97 

98 def __init__(self, **kwargs): 

99 pass 

100 

101 def __getstate__(self): 

102 return self._arguments 

103 

104 def __setstate__(self, state): 

105 self._arguments = state 

106 args, kwargs = state 

107 self.__init__(*args, **kwargs) 

108 

109 def keys(self): 

110 raise NotImplementedError("keys() unimplemented") 

111 

112 def queryMetadata(self, datasetType, format, dataId): 

113 """Get possible values for keys given a partial data id. 

114 

115 :param datasetType: see documentation about the use of datasetType 

116 :param key: this is used as the 'level' parameter 

117 :param format: 

118 :param dataId: see documentation about the use of dataId 

119 :return: 

120 """ 

121 func = getattr(self, 'query_' + datasetType) 

122 

123 val = func(format, self.validate(dataId)) 

124 return val 

125 

126 def getDatasetTypes(self): 

127 """Return a list of the mappable dataset types.""" 

128 

129 list = [] 

130 for attr in dir(self): 

131 if attr.startswith("map_"): 

132 list.append(attr[4:]) 

133 return list 

134 

135 def map(self, datasetType, dataId, write=False): 

136 """Map a data id using the mapping method for its dataset type. 

137 

138 Parameters 

139 ---------- 

140 datasetType : string 

141 The datasetType to map 

142 dataId : DataId instance 

143 The dataId to use when mapping 

144 write : bool, optional 

145 Indicates if the map is being performed for a read operation 

146 (False) or a write operation (True) 

147 

148 Returns 

149 ------- 

150 ButlerLocation or a list of ButlerLocation 

151 The location(s) found for the map operation. If write is True, a 

152 list is returned. If write is False a single ButlerLocation is 

153 returned. 

154 

155 Raises 

156 ------ 

157 NoResults 

158 If no locaiton was found for this map operation, the derived mapper 

159 class may raise a lsst.daf.persistence.NoResults exception. Butler 

160 catches this and will look in the next Repository if there is one. 

161 """ 

162 func = getattr(self, 'map_' + datasetType) 

163 return func(self.validate(dataId), write) 

164 

165 def canStandardize(self, datasetType): 

166 """Return true if this mapper can standardize an object of the given 

167 dataset type.""" 

168 

169 return hasattr(self, 'std_' + datasetType) 

170 

171 def standardize(self, datasetType, item, dataId): 

172 """Standardize an object using the standardization method for its data 

173 set type, if it exists.""" 

174 

175 if hasattr(self, 'std_' + datasetType): 

176 func = getattr(self, 'std_' + datasetType) 

177 return func(item, self.validate(dataId)) 

178 return item 

179 

180 def validate(self, dataId): 

181 """Validate a dataId's contents. 

182 

183 If the dataId is valid, return it. If an invalid component can be 

184 transformed into a valid one, copy the dataId, fix the component, and 

185 return the copy. Otherwise, raise an exception.""" 

186 

187 return dataId 

188 

189 def backup(self, datasetType, dataId): 

190 """Rename any existing object with the given type and dataId. 

191 

192 Not implemented in the base mapper. 

193 """ 

194 raise NotImplementedError("Base-class Mapper does not implement backups") 

195 

196 def getRegistry(self): 

197 """Get the registry""" 

198 return None