Coverage for python/lsst/skymap/packers.py : 24%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of skymap.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22__all__ = ("SkyMapDimensionPacker",)
24from lsst.daf.butler import DimensionPacker, ExpandedDataCoordinate, DimensionGraph, DataCoordinate
27class SkyMapDimensionPacker(DimensionPacker):
28 """A `DimensionPacker` for tract, patch and optionally abstract_filter,
29 given a SkyMap.
31 Parameters
32 ----------
33 fixed : `lsst.daf.butler.ExpandedDataCoordinate`
34 Expanded data ID that must include at least the skymap dimension.
35 dimensions : `lsst.daf.butler.DimensionGraph`
36 The dimensions of data IDs packed by this instance. Must include
37 skymap, tract, and patch, and may include abstract_filter.
38 """
40 SUPPORTED_FILTERS = [None] + list("ugrizyUBGVRIZYJHK") # split string into single chars
41 """abstract_filter names supported by this packer.
43 New filters should be added to the end of the list to maximize
44 compatibility with existing IDs.
45 """
47 @classmethod
48 def getIntFromFilter(cls, name):
49 """Return an integer that represents the abstract_filter with the given
50 name.
51 """
52 try:
53 return cls.SUPPORTED_FILTERS.index(name)
54 except ValueError:
55 raise NotImplementedError(f"abstract_filter '{name}' not supported by this ID packer.")
57 @classmethod
58 def getFilterNameFromInt(cls, num):
59 """Return an abstract_filter name from its integer representation.
60 """
61 return cls.SUPPORTED_FILTERS[num]
63 @classmethod
64 def getMaxIntForFilters(cls):
65 return len(cls.SUPPORTED_FILTERS)
67 @classmethod
68 def configure(cls, dimensions):
69 # Docstring inherited from DataIdPacker.configure
70 assert dimensions.given == ["skymap"]
71 assert dimensions.required.issuperset(["tract", "patch"])
72 metadata = {"skymap": ["tract_max", "patch_nx_max", "patch_ny_max"]}
73 kwds = {}
74 return metadata, kwds
76 def __init__(self, fixed: ExpandedDataCoordinate, dimensions: DimensionGraph):
77 super().__init__(fixed, dimensions)
78 record = fixed.records["skymap"]
79 self._skyMapName = record.name
80 self._patchMax = record.patch_nx_max * record.patch_ny_max
81 self._tractPatchMax = self._patchMax*record.tract_max
82 if "abstract_filter" in dimensions:
83 self._filterMax = self.getMaxIntForFilters()
84 else:
85 self._filterMax = None
87 @property
88 def maxBits(self) -> int:
89 # Docstring inherited from DataIdPacker.maxBits
90 packedMax = self._tractPatchMax
91 if self._filterMax is not None:
92 packedMax *= self._filterMax
93 return packedMax.bit_length()
95 def _pack(self, dataId: DataCoordinate) -> int:
96 # Docstring inherited from DataIdPacker.pack
97 packed = dataId["patch"] + self._patchMax*dataId["tract"]
98 if self._filterMax is not None:
99 packed += self.getIntFromFilter(dataId["abstract_filter"])*self._tractPatchMax
100 return packed
102 def unpack(self, packedId: int) -> DataCoordinate:
103 # Docstring inherited from DataIdPacker.unpack
104 d = {"skymap": self._skyMapName}
105 if self._filterMax is not None:
106 d["abstract_filter"] = self.getFilterNameFromInt(packedId // self._tractPatchMax)
107 packedId %= self._tractPatchMax
108 d["tract"] = packedId // self._patchMax
109 d["patch"] = packedId % self._patchMax
110 return DataCoordinate.standardize(d, graph=self.dimensions)