Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ["DimensionUniverse"] 

25 

26import math 

27import pickle 

28from typing import Optional, Iterable, List, Union, TYPE_CHECKING 

29 

30from ..config import Config 

31from ..utils import NamedValueSet, immutable 

32from .elements import Dimension, DimensionElement, SkyPixDimension 

33from .graph import DimensionGraph 

34from .config import processElementsConfig, processSkyPixConfig, DimensionConfig 

35from .packer import DimensionPackerFactory 

36 

37if TYPE_CHECKING: # Imports needed only for type annotations; may be circular. 37 ↛ 38line 37 didn't jump to line 38, because the condition on line 37 was never true

38 from .coordinate import ExpandedDataCoordinate 

39 from .packer import DimensionPacker 

40 

41 

42@immutable 

43class DimensionUniverse(DimensionGraph): 

44 """A special `DimensionGraph` that constructs and manages a complete set of 

45 compatible dimensions. 

46 

47 `DimensionUniverse` is not a class-level singleton, but all instances are 

48 tracked in a singleton map keyed by the version number in the configuration 

49 they were loaded from. Because these universes are solely responsible for 

50 constructing `DimensionElement` instances, these are also indirectly 

51 tracked by that singleton as well. 

52 

53 Parameters 

54 ---------- 

55 config : `Config`, optional 

56 Configuration describing the dimensions and their relationships. If 

57 not provided, default configuration (from 

58 ``daf_butler/config/dimensions.yaml``) wil be loaded. 

59 """ 

60 

61 _instances = {} 

62 """Singleton dictionary of all instances, keyed by version. 

63 

64 For internal use only. 

65 """ 

66 

67 def __new__(cls, config: Optional[Config] = None) -> DimensionUniverse: 

68 # Normalize the config and apply defaults. 

69 config = DimensionConfig(config) 

70 

71 # First see if an equivalent instance already exists. 

72 version = config["version"] 

73 self = cls._instances.get(version) 

74 if self is not None: 

75 return self 

76 

77 # Create the universe instance and add core attributes. 

78 # We don't want any of what DimensionGraph.__new__ does, so we just go 

79 # straight to object.__new__. The C++ side of my brain is offended by 

80 # this, but I think it's the right approach in Python, where we don't 

81 # have the option of having multiple constructors with different roles. 

82 self = object.__new__(cls) 

83 self.universe = self 

84 self._cache = {} 

85 self.dimensions = NamedValueSet() 

86 self.elements = NamedValueSet() 

87 

88 # Read the skypix dimensions from config. 

89 skyPixDimensions, self.commonSkyPix = processSkyPixConfig(config["skypix"]) 

90 # Add the skypix dimensions to the universe after sorting 

91 # lexicographically (no topological sort because skypix dimensions 

92 # never have any dependencies). 

93 for name in sorted(skyPixDimensions): 

94 skyPixDimensions[name]._finish(self, {}) 

95 

96 # Read the other dimension elements from config. 

97 elementsToDo = processElementsConfig(config["elements"]) 

98 # Add elements to the universe in topological order by identifying at 

99 # each outer iteration which elements have already had all of their 

100 # dependencies added. 

101 while elementsToDo: 

102 unblocked = [name for name, element in elementsToDo.items() 

103 if element._related.dependencies.isdisjoint(elementsToDo.keys())] 

104 unblocked.sort() # Break ties lexicographically. 

105 if not unblocked: 

106 raise RuntimeError(f"Cycle detected in dimension elements: {elementsToDo.keys()}.") 

107 for name in unblocked: 

108 # Finish initialization of the element with steps that 

109 # depend on those steps already having been run for all 

110 # dependencies. 

111 # This includes adding the element to self.elements and 

112 # (if appropriate) self.dimensions. 

113 elementsToDo.pop(name)._finish(self, elementsToDo) 

114 

115 # Add attributes for special subsets of the graph. 

116 self.empty = DimensionGraph(self, (), conform=False) 

117 self._finish() 

118 

119 # Set up factories for dataId packers as defined by config. 

120 self._packers = {} 

121 for name, subconfig in config.get("packers", {}).items(): 

122 self._packers[name] = DimensionPackerFactory.fromConfig(universe=self, config=subconfig) 

123 

124 # Use the version number from the config as a key in the singleton 

125 # dict containing all instances; that will let us transfer dimension 

126 # objects between processes using pickle without actually going 

127 # through real initialization, as long as a universe with the same 

128 # version has already been constructed in the receiving process. 

129 self._version = version 

130 cls._instances[self._version] = self 

131 return self 

132 

133 def __repr__(self) -> str: 

134 return f"DimensionUniverse({self})" 

135 

136 def extract(self, iterable: Iterable[Union[Dimension, str]]) -> DimensionGraph: 

137 """Construct a `DimensionGraph` from a possibly-heterogenous iterable 

138 of `Dimension` instances and string names thereof. 

139 

140 Constructing `DimensionGraph` directly from names or dimension 

141 instances is slightly more efficient when it is known in advance that 

142 the iterable is not heterogenous. 

143 

144 Parameters 

145 ---------- 

146 iterable: iterable of `Dimension` or `str` 

147 Dimensions that must be included in the returned graph (their 

148 dependencies will be as well). 

149 

150 Returns 

151 ------- 

152 graph : `DimensionGraph` 

153 A `DimensionGraph` instance containing all given dimensions. 

154 """ 

155 names = set() 

156 for item in iterable: 

157 try: 

158 names.add(item.name) 

159 except AttributeError: 

160 names.add(item) 

161 return DimensionGraph(universe=self, names=names) 

162 

163 def sorted(self, elements: Iterable[DimensionElement], *, reverse=False) -> List[DimensionElement]: 

164 """Return a sorted version of the given iterable of dimension elements. 

165 

166 The universe's sort order is topological (an element's dependencies 

167 precede it), starting with skypix dimensions (which never have 

168 dependencies) and then sorting lexicographically to break ties. 

169 

170 Parameters 

171 ---------- 

172 elements : iterable of `DimensionElement`. 

173 Elements to be sorted. 

174 reverse : `bool`, optional 

175 If `True`, sort in the opposite order. 

176 

177 Returns 

178 ------- 

179 sorted : `list` of `DimensionElement` 

180 A sorted list containing the same elements that were given. 

181 """ 

182 s = set(elements) 

183 result = [element for element in self.elements if element in s or element.name in s] 

184 if reverse: 

185 result.reverse() 

186 return result 

187 

188 def makePacker(self, name: str, dataId: ExpandedDataCoordinate) -> DimensionPacker: 

189 """Construct a `DimensionPacker` that can pack data ID dictionaries 

190 into unique integers. 

191 

192 Parameters 

193 ---------- 

194 name : `str` 

195 Name of the packer, matching a key in the "packers" section of the 

196 dimension configuration. 

197 dataId : `ExpandedDataCoordinate` 

198 Fully-expanded data ID that identfies the at least the "fixed" 

199 dimensions of the packer (i.e. those that are assumed/given, 

200 setting the space over which packed integer IDs are unique). 

201 """ 

202 return self._packers[name](dataId) 

203 

204 def getEncodeLength(self) -> int: 

205 """Return the size (in bytes) of the encoded size of `DimensionGraph` 

206 instances in this universe. 

207 

208 See `DimensionGraph.encode` and `DimensionGraph.decode` for more 

209 information. 

210 """ 

211 return math.ceil(len(self.dimensions)/8) 

212 

213 @classmethod 

214 def _unpickle(cls, version: bytes) -> DimensionUniverse: 

215 """Callable used for unpickling. 

216 

217 For internal use only. 

218 """ 

219 try: 

220 return cls._instances[version] 

221 except KeyError as err: 

222 raise pickle.UnpicklingError( 

223 f"DimensionUniverse with version '{version}' " 

224 f"not found. Note that DimensionUniverse objects are not " 

225 f"truly serialized; when using pickle to transfer them " 

226 f"between processes, an equivalent instance with the same " 

227 f"version must already exist in the receiving process." 

228 ) from err 

229 

230 def __reduce__(self) -> tuple: 

231 return (self._unpickle, (self._version,)) 

232 

233 # Class attributes below are shadowed by instance attributes, and are 

234 # present just to hold the docstrings for those instance attributes. 

235 

236 empty: DimensionGraph 

237 """The `DimensionGraph` that contains no dimensions (`DimensionGraph`). 

238 """ 

239 

240 commonSkyPix: SkyPixDimension 

241 """The special skypix dimension that is used to relate all other spatial 

242 dimensions in the `Registry` database (`SkyPixDimension`). 

243 """