Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

# This file is part of skymap. 

# 

# Developed for the LSST Data Management System. 

# This product includes software developed by the LSST Project 

# (http://www.lsst.org). 

# See the COPYRIGHT file at the top-level directory of this distribution 

# for details of code ownership. 

# 

# This program is free software: you can redistribute it and/or modify 

# it under the terms of the GNU General Public License as published by 

# the Free Software Foundation, either version 3 of the License, or 

# (at your option) any later version. 

# 

# This program is distributed in the hope that it will be useful, 

# but WITHOUT ANY WARRANTY; without even the implied warranty of 

# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

# GNU General Public License for more details. 

# 

# You should have received a copy of the GNU General Public License 

# along with this program. If not, see <http://www.gnu.org/licenses/>. 

 

__all__ = ("SkyMapDimensionPacker",) 

 

from lsst.daf.butler import DimensionPacker, ExpandedDataCoordinate, DimensionGraph, DataCoordinate 

 

 

class SkyMapDimensionPacker(DimensionPacker): 

"""A `DimensionPacker` for tract, patch and optionally abstract_filter, 

given a SkyMap. 

 

Parameters 

---------- 

fixed : `lsst.daf.butler.ExpandedDataCoordinate` 

Expanded data ID that must include at least the skymap dimension. 

dimensions : `lsst.daf.butler.DimensionGraph` 

The dimensions of data IDs packed by this instance. Must include 

skymap, tract, and patch, and may include abstract_filter. 

""" 

 

SUPPORTED_FILTERS = [None] + list("ugrizyUBGVRIZYJHK") # split string into single chars 

"""abstract_filter names supported by this packer. 

 

New filters should be added to the end of the list to maximize 

compatibility with existing IDs. 

""" 

 

@classmethod 

def getIntFromFilter(cls, name): 

"""Return an integer that represents the abstract_filter with the given 

name. 

""" 

try: 

return cls.SUPPORTED_FILTERS.index(name) 

except ValueError: 

raise NotImplementedError(f"abstract_filter '{name}' not supported by this ID packer.") 

 

@classmethod 

def getFilterNameFromInt(cls, num): 

"""Return an abstract_filter name from its integer representation. 

""" 

return cls.SUPPORTED_FILTERS[num] 

 

@classmethod 

def getMaxIntForFilters(cls): 

return len(cls.SUPPORTED_FILTERS) 

 

@classmethod 

def configure(cls, dimensions): 

# Docstring inherited from DataIdPacker.configure 

assert dimensions.given == ["skymap"] 

assert dimensions.required.issuperset(["tract", "patch"]) 

metadata = {"skymap": ["tract_max", "patch_nx_max", "patch_ny_max"]} 

kwds = {} 

return metadata, kwds 

 

def __init__(self, fixed: ExpandedDataCoordinate, dimensions: DimensionGraph): 

super().__init__(fixed, dimensions) 

record = fixed.records["skymap"] 

self._skyMapName = record.name 

self._patchMax = record.patch_nx_max * record.patch_ny_max 

self._tractPatchMax = self._patchMax*record.tract_max 

if "abstract_filter" in dimensions: 

self._filterMax = self.getMaxIntForFilters() 

else: 

self._filterMax = None 

 

@property 

def maxBits(self) -> int: 

# Docstring inherited from DataIdPacker.maxBits 

packedMax = self._tractPatchMax 

if self._filterMax is not None: 

packedMax *= self._filterMax 

return packedMax.bit_length() 

 

def _pack(self, dataId: DataCoordinate) -> int: 

# Docstring inherited from DataIdPacker.pack 

packed = dataId["patch"] + self._patchMax*dataId["tract"] 

if self._filterMax is not None: 

packed += self.getIntFromFilter(dataId["abstract_filter"])*self._tractPatchMax 

return packed 

 

def unpack(self, packedId: int) -> DataCoordinate: 

# Docstring inherited from DataIdPacker.unpack 

d = {"skymap": self._skyMapName} 

if self._filterMax is not None: 

d["abstract_filter"] = self.getFilterNameFromInt(packedId // self._tractPatchMax) 

packedId %= self._tractPatchMax 

d["tract"] = packedId // self._patchMax 

d["patch"] = packedId % self._patchMax 

return DataCoordinate.standardize(d, graph=self.dimensions)