Coverage for python/lsst/obs/subaru/_instrument.py: 22%
130 statements
« prev ^ index » next coverage.py v7.2.1, created at 2023-03-12 03:48 -0700
« prev ^ index » next coverage.py v7.2.1, created at 2023-03-12 03:48 -0700
1# This file is part of obs_subaru.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22"""Gen3 Butler registry declarations for Hyper Suprime-Cam.
23"""
25__all__ = ("HyperSuprimeCam",)
27import os
28import pickle
29import logging
31from functools import lru_cache
33import astropy.time
34from lsst.utils import getPackageDir
35from lsst.afw.cameraGeom import makeCameraFromPath, CameraConfig
36from lsst.daf.butler import (DatasetType, DataCoordinate, FileDataset, DatasetRef,
37 CollectionType, Timespan)
38from lsst.daf.butler.core.utils import getFullTypeName
39from lsst.obs.base import Instrument
40from lsst.obs.base.gen2to3 import TranslatorFactory, PhysicalFilterToBandKeyHandler
42from ..hsc.hscPupil import HscPupilFactory
43from ..hsc.hscFilters import HSC_FILTER_DEFINITIONS
44from ..hsc.makeTransmissionCurves import (getSensorTransmission, getOpticsTransmission,
45 getFilterTransmission, getAtmosphereTransmission)
46from .strayLight.formatter import SubaruStrayLightDataFormatter
48log = logging.getLogger(__name__)
51class HyperSuprimeCam(Instrument):
52 """Gen3 Butler specialization class for Subaru's Hyper Suprime-Cam.
53 """
55 policyName = "hsc"
56 obsDataPackage = "obs_subaru_data"
57 filterDefinitions = HSC_FILTER_DEFINITIONS
58 additionalCuratedDatasetTypes = ("bfKernel", "transmission_optics", "transmission_sensor",
59 "transmission_filter", "transmission_atmosphere", "yBackground")
61 def __init__(self, **kwargs):
62 super().__init__(**kwargs)
63 packageDir = getPackageDir("obs_subaru")
64 self.configPaths = [os.path.join(packageDir, "config"),
65 os.path.join(packageDir, "config", self.policyName)]
67 @classmethod
68 def getName(cls):
69 # Docstring inherited from Instrument.getName
70 return "HSC"
72 def register(self, registry, update=False):
73 # Docstring inherited from Instrument.register
74 camera = self.getCamera()
75 # The maximum values below make Gen3's ObservationDataIdPacker produce
76 # outputs that match Gen2's ccdExposureId.
77 obsMax = 21474800
78 with registry.transaction():
79 registry.syncDimensionData(
80 "instrument",
81 {
82 "name": self.getName(),
83 "detector_max": 200,
84 "visit_max": obsMax,
85 "exposure_max": obsMax,
86 "class_name": getFullTypeName(self),
87 },
88 update=update
89 )
90 for detector in camera:
91 registry.syncDimensionData(
92 "detector",
93 {
94 "instrument": self.getName(),
95 "id": detector.getId(),
96 "full_name": detector.getName(),
97 # TODO: make sure these definitions are consistent with
98 # those extracted by astro_metadata_translator, and
99 # test that they remain consistent somehow.
100 "name_in_raft": detector.getName().split("_")[1],
101 "raft": detector.getName().split("_")[0],
102 "purpose": str(detector.getType()).split(".")[-1],
103 },
104 update=update
105 )
106 self._registerFilters(registry, update=update)
108 def getRawFormatter(self, dataId):
109 # Docstring inherited from Instrument.getRawFormatter
110 # Import the formatter here to prevent a circular dependency.
111 from .rawFormatter import HyperSuprimeCamRawFormatter, HyperSuprimeCamCornerRawFormatter
112 if dataId["detector"] in (100, 101, 102, 103):
113 return HyperSuprimeCamCornerRawFormatter
114 else:
115 return HyperSuprimeCamRawFormatter
117 def getCamera(self):
118 """Retrieve the cameraGeom representation of HSC.
120 This is a temporary API that should go away once obs_ packages have
121 a standardized approach to writing versioned cameras to a Gen3 repo.
122 """
123 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "camera")
124 return self._getCameraFromPath(path)
126 @staticmethod
127 @lru_cache()
128 def _getCameraFromPath(path):
129 """Return the camera geometry given solely the path to the location
130 of that definition."""
131 config = CameraConfig()
132 config.load(os.path.join(path, "camera.py"))
133 return makeCameraFromPath(
134 cameraConfig=config,
135 ampInfoPath=path,
136 shortNameFunc=lambda name: name.replace(" ", "_"),
137 pupilFactoryClass=HscPupilFactory
138 )
140 def getBrighterFatterKernel(self):
141 """Return the brighter-fatter kernel for HSC as a `numpy.ndarray`.
143 This is a temporary API that should go away once obs_ packages have
144 a standardized approach to writing versioned kernels to a Gen3 repo.
145 """
146 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "brighter_fatter_kernel.pkl")
147 with open(path, "rb") as fd:
148 kernel = pickle.load(fd, encoding='latin1') # encoding for pickle written with Python 2
149 return kernel
151 def writeAdditionalCuratedCalibrations(self, butler, collection=None, labels=()):
152 # Register the CALIBRATION collection that adds validity ranges.
153 # This does nothing if it is already registered.
154 if collection is None:
155 collection = self.makeCalibrationCollectionName(*labels)
156 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
158 # Register the RUN collection that holds these datasets directly. We
159 # only need one because all of these datasets have the same (unbounded)
160 # validity range right now.
161 run = self.makeUnboundedCalibrationRunName(*labels)
162 butler.registry.registerRun(run)
163 baseDataId = butler.registry.expandDataId(instrument=self.getName())
164 refs = []
166 # Write brighter-fatter kernel, with an infinite validity range.
167 datasetType = DatasetType("bfKernel", ("instrument",), "NumpyArray",
168 universe=butler.registry.dimensions,
169 isCalibration=True)
170 butler.registry.registerDatasetType(datasetType)
172 # Load and then put instead of just moving the file in part to ensure
173 # the version in-repo is written with Python 3 and does not need
174 # `encoding='latin1'` to be read.
175 bfKernel = self.getBrighterFatterKernel()
176 refs.append(butler.put(bfKernel, datasetType, baseDataId, run=run))
178 # The following iterate over the values of the dictionaries returned
179 # by the transmission functions and ignore the date that is supplied.
180 # This is due to the dates not being ranges but single dates,
181 # which do not give the proper notion of validity. As such unbounded
182 # calibration labels are used when inserting into the database.
183 # In the future these could and probably should be updated to
184 # properly account for what ranges are considered valid.
186 # Write optical transmissions
187 opticsTransmissions = getOpticsTransmission()
188 datasetType = DatasetType("transmission_optics",
189 ("instrument",),
190 "TransmissionCurve",
191 universe=butler.registry.dimensions,
192 isCalibration=True)
193 butler.registry.registerDatasetType(datasetType)
194 for entry in opticsTransmissions.values():
195 if entry is None:
196 continue
197 refs.append(butler.put(entry, datasetType, baseDataId, run=run))
199 # Write transmission sensor
200 sensorTransmissions = getSensorTransmission()
201 datasetType = DatasetType("transmission_sensor",
202 ("instrument", "detector",),
203 "TransmissionCurve",
204 universe=butler.registry.dimensions,
205 isCalibration=True)
206 butler.registry.registerDatasetType(datasetType)
207 for entry in sensorTransmissions.values():
208 if entry is None:
209 continue
210 for sensor, curve in entry.items():
211 dataId = DataCoordinate.standardize(baseDataId, detector=sensor)
212 refs.append(butler.put(curve, datasetType, dataId, run=run))
214 # Write filter transmissions
215 filterTransmissions = getFilterTransmission()
216 datasetType = DatasetType("transmission_filter",
217 ("instrument", "physical_filter",),
218 "TransmissionCurve",
219 universe=butler.registry.dimensions,
220 isCalibration=True)
221 butler.registry.registerDatasetType(datasetType)
222 for entry in filterTransmissions.values():
223 if entry is None:
224 continue
225 for band, curve in entry.items():
226 dataId = DataCoordinate.standardize(baseDataId, physical_filter=band)
227 refs.append(butler.put(curve, datasetType, dataId, run=run))
229 # Write atmospheric transmissions
230 atmosphericTransmissions = getAtmosphereTransmission()
231 datasetType = DatasetType("transmission_atmosphere", ("instrument",),
232 "TransmissionCurve",
233 universe=butler.registry.dimensions,
234 isCalibration=True)
235 butler.registry.registerDatasetType(datasetType)
236 for entry in atmosphericTransmissions.values():
237 if entry is None:
238 continue
239 refs.append(butler.put(entry, datasetType, {"instrument": self.getName()}, run=run))
241 # Associate all datasets with the unbounded validity range.
242 butler.registry.certify(collection, refs, Timespan(begin=None, end=None))
244 def ingestStrayLightData(self, butler, directory, *, transfer=None, collection=None, labels=()):
245 """Ingest externally-produced y-band stray light data files into
246 a data repository.
248 Parameters
249 ----------
250 butler : `lsst.daf.butler.Butler`
251 Butler to write with. Any collections associated with it are
252 ignored in favor of ``collection`` and/or ``labels``.
253 directory : `str`
254 Directory containing yBackground-*.fits files.
255 transfer : `str`, optional
256 If not `None`, must be one of 'move', 'copy', 'hardlink', or
257 'symlink', indicating how to transfer the files.
258 collection : `str`, optional
259 Name to use for the calibration collection that associates all
260 datasets with a validity range. If this collection already exists,
261 it must be a `~CollectionType.CALIBRATION` collection, and it must
262 not have any datasets that would conflict with those inserted by
263 this method. If `None`, a collection name is worked out
264 automatically from the instrument name and other metadata by
265 calling ``makeCuratedCalibrationCollectionName``, but this
266 default name may not work well for long-lived repositories unless
267 ``labels`` is also provided (and changed every time curated
268 calibrations are ingested).
269 labels : `Sequence` [ `str` ], optional
270 Extra strings to include in collection names, after concatenating
271 them with the standard collection name delimeter. If provided,
272 these are inserted into to the names of the `~CollectionType.RUN`
273 collections that datasets are inserted directly into, as well the
274 `~CollectionType.CALIBRATION` collection if it is generated
275 automatically (i.e. if ``collection is None``). Usually this is
276 just the name of the ticket on which the calibration collection is
277 being created.
278 """
279 # Register the CALIBRATION collection that adds validity ranges.
280 # This does nothing if it is already registered.
281 if collection is None:
282 collection = self.makeCalibrationCollectionName(*labels)
283 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
285 # Register the RUN collection that holds these datasets directly. We
286 # only need one because there is only one validity range and hence no
287 # data ID conflicts even when there are no validity ranges.
288 run = self.makeUnboundedCalibrationRunName(*labels)
289 butler.registry.registerRun(run)
291 # LEDs covered up around 2018-01-01, no need for correctin after that
292 # date.
293 timespan = Timespan(begin=None, end=astropy.time.Time("2018-01-01", format="iso", scale="tai"))
294 datasets = []
295 # TODO: should we use a more generic name for the dataset type?
296 # This is just the (rather HSC-specific) name used in Gen2, and while
297 # the instances of this dataset are camera-specific, the datasetType
298 # (which is used in the generic IsrTask) should not be.
299 datasetType = DatasetType("yBackground",
300 dimensions=("physical_filter", "detector",),
301 storageClass="StrayLightData",
302 universe=butler.registry.dimensions,
303 isCalibration=True)
304 for detector in self.getCamera():
305 path = os.path.join(directory, f"ybackground-{detector.getId():03d}.fits")
306 if not os.path.exists(path):
307 log.warning("No stray light data found for detector %s @ %s.", detector.getId(), path)
308 continue
309 ref = DatasetRef(datasetType, dataId={"instrument": self.getName(),
310 "detector": detector.getId(),
311 "physical_filter": "HSC-Y"})
312 datasets.append(FileDataset(refs=ref, path=path, formatter=SubaruStrayLightDataFormatter))
313 butler.registry.registerDatasetType(datasetType)
314 with butler.transaction():
315 butler.ingest(*datasets, transfer=transfer, run=run)
316 refs = []
317 for dataset in datasets:
318 refs.extend(dataset.refs)
319 butler.registry.certify(collection, refs, timespan)
321 def makeDataIdTranslatorFactory(self) -> TranslatorFactory:
322 # Docstring inherited from lsst.obs.base.Instrument.
323 factory = TranslatorFactory()
324 factory.addGenericInstrumentRules(self.getName())
325 # Translate Gen2 `filter` to band if it hasn't been consumed
326 # yet and gen2keys includes tract.
327 factory.addRule(PhysicalFilterToBandKeyHandler(self.filterDefinitions),
328 instrument=self.getName(), gen2keys=("filter", "tract"), consume=("filter",))
329 return factory