Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ["DimensionUniverse"] 

25 

26import pickle 

27from typing import Optional, Iterable, List, Union, TYPE_CHECKING 

28 

29from ..config import Config 

30from ..utils import NamedValueSet, immutable 

31from .elements import Dimension, DimensionElement, SkyPixDimension 

32from .graph import DimensionGraph 

33from .config import processElementsConfig, processSkyPixConfig, DimensionConfig 

34from .packer import DimensionPackerFactory 

35 

36if TYPE_CHECKING: # Imports needed only for type annotations; may be circular. 36 ↛ 37line 36 didn't jump to line 37, because the condition on line 36 was never true

37 from .coordinate import ExpandedDataCoordinate 

38 from .packer import DimensionPacker 

39 

40 

41@immutable 

42class DimensionUniverse(DimensionGraph): 

43 """A special `DimensionGraph` that constructs and manages a complete set of 

44 compatible dimensions. 

45 

46 `DimensionUniverse` is not a class-level singleton, but all instances are 

47 tracked in a singleton map keyed by the version number in the configuration 

48 they were loaded from. Because these universes are solely responsible for 

49 constructing `DimensionElement` instances, these are also indirectly 

50 tracked by that singleton as well. 

51 

52 Parameters 

53 ---------- 

54 config : `Config`, optional 

55 Configuration describing the dimensions and their relationships. If 

56 not provided, default configuration (from 

57 ``daf_butler/config/dimensions.yaml``) wil be loaded. 

58 """ 

59 

60 _instances = {} 

61 """Singleton dictionary of all instances, keyed by version. 

62 

63 For internal use only. 

64 """ 

65 

66 def __new__(cls, config: Optional[Config] = None) -> DimensionUniverse: 

67 # Normalize the config and apply defaults. 

68 config = DimensionConfig(config) 

69 

70 # First see if an equivalent instance already exists. 

71 version = config["version"] 

72 self = cls._instances.get(version) 

73 if self is not None: 

74 return self 

75 

76 # Create the universe instance and add core attributes. 

77 # We don't want any of what DimensionGraph.__new__ does, so we just go 

78 # straight to object.__new__. The C++ side of my brain is offended by 

79 # this, but I think it's the right approach in Python, where we don't 

80 # have the option of having multiple constructors with different roles. 

81 self = object.__new__(cls) 

82 self.universe = self 

83 self._cache = {} 

84 self.dimensions = NamedValueSet() 

85 self.elements = NamedValueSet() 

86 

87 # Read the skypix dimensions from config. 

88 skyPixDimensions, self.commonSkyPix = processSkyPixConfig(config["skypix"]) 

89 # Add the skypix dimensions to the universe after sorting 

90 # lexicographically (no topological sort because skypix dimensions 

91 # never have any dependencies). 

92 for name in sorted(skyPixDimensions): 

93 skyPixDimensions[name]._finish(self, {}) 

94 

95 # Read the other dimension elements from config. 

96 elementsToDo = processElementsConfig(config["elements"]) 

97 # Add elements to the universe in topological order by identifying at 

98 # each outer iteration which elements have already had all of their 

99 # dependencies added. 

100 while elementsToDo: 

101 unblocked = [name for name, element in elementsToDo.items() 

102 if element._related.dependencies.isdisjoint(elementsToDo.keys())] 

103 unblocked.sort() # Break ties lexicographically. 

104 if not unblocked: 

105 raise RuntimeError(f"Cycle detected in dimension elements: {elementsToDo.keys()}.") 

106 for name in unblocked: 

107 # Finish initialization of the element with steps that 

108 # depend on those steps already having been run for all 

109 # dependencies. 

110 # This includes adding the element to self.elements and 

111 # (if appropriate) self.dimensions. 

112 elementsToDo.pop(name)._finish(self, elementsToDo) 

113 

114 # Add attributes for special subsets of the graph. 

115 self.empty = DimensionGraph(self, (), conform=False) 

116 self._finish() 

117 

118 # Set up factories for dataId packers as defined by config. 

119 self._packers = {} 

120 for name, subconfig in config.get("packers", {}).items(): 

121 self._packers[name] = DimensionPackerFactory.fromConfig(universe=self, config=subconfig) 

122 

123 # Use the version number from the config as a key in the singleton 

124 # dict containing all instances; that will let us transfer dimension 

125 # objects between processes using pickle without actually going 

126 # through real initialization, as long as a universe with the same 

127 # version has already been constructed in the receiving process. 

128 self._version = version 

129 cls._instances[self._version] = self 

130 return self 

131 

132 def __repr__(self) -> str: 

133 return f"DimensionUniverse({self})" 

134 

135 def extract(self, iterable: Iterable[Union[Dimension, str]]) -> DimensionGraph: 

136 """Construct a `DimensionGraph` from a possibly-heterogenous iterable 

137 of `Dimension` instances and string names thereof. 

138 

139 Constructing `DimensionGraph` directly from names or dimension 

140 instances is slightly more efficient when it is known in advance that 

141 the iterable is not heterogenous. 

142 

143 Parameters 

144 ---------- 

145 iterable: iterable of `Dimension` or `str` 

146 Dimensions that must be included in the returned graph (their 

147 dependencies will be as well). 

148 

149 Returns 

150 ------- 

151 graph : `DimensionGraph` 

152 A `DimensionGraph` instance containing all given dimensions. 

153 """ 

154 names = set() 

155 for item in iterable: 

156 try: 

157 names.add(item.name) 

158 except AttributeError: 

159 names.add(item) 

160 return DimensionGraph(universe=self, names=names) 

161 

162 def sorted(self, elements: Iterable[DimensionElement], *, reverse=False) -> List[DimensionElement]: 

163 """Return a sorted version of the given iterable of dimension elements. 

164 

165 The universe's sort order is topological (an element's dependencies 

166 precede it), starting with skypix dimensions (which never have 

167 dependencies) and then sorting lexicographically to break ties. 

168 

169 Parameters 

170 ---------- 

171 elements : iterable of `DimensionElement`. 

172 Elements to be sorted. 

173 reverse : `bool`, optional 

174 If `True`, sort in the opposite order. 

175 

176 Returns 

177 ------- 

178 sorted : `list` of `DimensionElement` 

179 A sorted list containing the same elements that were given. 

180 """ 

181 s = set(elements) 

182 result = [element for element in self.elements if element in s or element.name in s] 

183 if reverse: 

184 result.reverse() 

185 return result 

186 

187 def makePacker(self, name: str, dataId: ExpandedDataCoordinate) -> DimensionPacker: 

188 """Construct a `DimensionPacker` that can pack data ID dictionaries 

189 into unique integers. 

190 

191 Parameters 

192 ---------- 

193 name : `str` 

194 Name of the packer, matching a key in the "packers" section of the 

195 dimension configuration. 

196 dataId : `ExpandedDataCoordinate` 

197 Fully-expanded data ID that identfies the at least the "fixed" 

198 dimensions of the packer (i.e. those that are assumed/given, 

199 setting the space over which packed integer IDs are unique). 

200 """ 

201 return self._packers[name](dataId) 

202 

203 @classmethod 

204 def _unpickle(cls, version: bytes) -> DimensionUniverse: 

205 """Callable used for unpickling. 

206 

207 For internal use only. 

208 """ 

209 try: 

210 return cls._instances[version] 

211 except KeyError as err: 

212 raise pickle.UnpicklingError( 

213 f"DimensionUniverse with version '{version}' " 

214 f"not found. Note that DimensionUniverse objects are not " 

215 f"truly serialized; when using pickle to transfer them " 

216 f"between processes, an equivalent instance with the same " 

217 f"version must already exist in the receiving process." 

218 ) from err 

219 

220 def __reduce__(self) -> tuple: 

221 return (self._unpickle, (self._version,)) 

222 

223 # Class attributes below are shadowed by instance attributes, and are 

224 # present just to hold the docstrings for those instance attributes. 

225 

226 empty: DimensionGraph 

227 """The `DimensionGraph` that contains no dimensions (`DimensionGraph`). 

228 """ 

229 

230 commonSkyPix: SkyPixDimension 

231 """The special skypix dimension that is used to relate all other spatial 

232 dimensions in the `Registry` database (`SkyPixDimension`). 

233 """