Coverage for python/lsst/obs/subaru/_instrument.py: 22%
130 statements
« prev ^ index » next coverage.py v6.4.1, created at 2022-06-24 11:35 +0000
« prev ^ index » next coverage.py v6.4.1, created at 2022-06-24 11:35 +0000
1# This file is part of obs_subaru.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22"""Gen3 Butler registry declarations for Hyper Suprime-Cam.
23"""
25__all__ = ("HyperSuprimeCam",)
27import os
28import pickle
29import logging
31from functools import lru_cache
33import astropy.time
34from lsst.utils import getPackageDir
35from lsst.afw.cameraGeom import makeCameraFromPath, CameraConfig
36from lsst.daf.butler import (DatasetType, DataCoordinate, FileDataset, DatasetRef,
37 CollectionType, Timespan)
38from lsst.utils.introspection import get_full_type_name
39from lsst.obs.base import Instrument, VisitSystem
40from lsst.obs.base.gen2to3 import TranslatorFactory, PhysicalFilterToBandKeyHandler
42from ..hsc.hscPupil import HscPupilFactory
43from ..hsc.hscFilters import HSC_FILTER_DEFINITIONS
44from ..hsc.makeTransmissionCurves import (getSensorTransmission, getOpticsTransmission,
45 getFilterTransmission, getAtmosphereTransmission)
46from .strayLight.formatter import SubaruStrayLightDataFormatter
48log = logging.getLogger(__name__)
51class HyperSuprimeCam(Instrument):
52 """Gen3 Butler specialization class for Subaru's Hyper Suprime-Cam.
53 """
55 policyName = "hsc"
56 obsDataPackage = "obs_subaru_data"
57 filterDefinitions = HSC_FILTER_DEFINITIONS
58 additionalCuratedDatasetTypes = ("bfKernel", "transmission_optics", "transmission_sensor",
59 "transmission_filter", "transmission_atmosphere", "yBackground")
61 def __init__(self, **kwargs):
62 super().__init__(**kwargs)
63 packageDir = getPackageDir("obs_subaru")
64 self.configPaths = [os.path.join(packageDir, "config"),
65 os.path.join(packageDir, "config", self.policyName)]
67 @classmethod
68 def getName(cls):
69 # Docstring inherited from Instrument.getName
70 return "HSC"
72 def register(self, registry, update=False):
73 # Docstring inherited from Instrument.register
74 camera = self.getCamera()
75 # The maximum values below make Gen3's ObservationDataIdPacker produce
76 # outputs that match Gen2's ccdExposureId.
77 obsMax = 21474800
78 with registry.transaction():
79 registry.syncDimensionData(
80 "instrument",
81 {
82 "name": self.getName(),
83 "detector_max": 200,
84 "visit_max": obsMax,
85 "exposure_max": obsMax,
86 "class_name": get_full_type_name(self),
87 # Some schemas support default visit_system
88 "visit_system": VisitSystem.ONE_TO_ONE.value,
89 },
90 update=update
91 )
92 for detector in camera:
93 registry.syncDimensionData(
94 "detector",
95 {
96 "instrument": self.getName(),
97 "id": detector.getId(),
98 "full_name": detector.getName(),
99 # TODO: make sure these definitions are consistent with
100 # those extracted by astro_metadata_translator, and
101 # test that they remain consistent somehow.
102 "name_in_raft": detector.getName().split("_")[1],
103 "raft": detector.getName().split("_")[0],
104 "purpose": str(detector.getType()).split(".")[-1],
105 },
106 update=update
107 )
108 self._registerFilters(registry, update=update)
110 def getRawFormatter(self, dataId):
111 # Docstring inherited from Instrument.getRawFormatter
112 # Import the formatter here to prevent a circular dependency.
113 from .rawFormatter import HyperSuprimeCamRawFormatter, HyperSuprimeCamCornerRawFormatter
114 if dataId["detector"] in (100, 101, 102, 103):
115 return HyperSuprimeCamCornerRawFormatter
116 else:
117 return HyperSuprimeCamRawFormatter
119 def getCamera(self):
120 """Retrieve the cameraGeom representation of HSC.
122 This is a temporary API that should go away once obs_ packages have
123 a standardized approach to writing versioned cameras to a Gen3 repo.
124 """
125 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "camera")
126 return self._getCameraFromPath(path)
128 @staticmethod
129 @lru_cache()
130 def _getCameraFromPath(path):
131 """Return the camera geometry given solely the path to the location
132 of that definition."""
133 config = CameraConfig()
134 config.load(os.path.join(path, "camera.py"))
135 return makeCameraFromPath(
136 cameraConfig=config,
137 ampInfoPath=path,
138 shortNameFunc=lambda name: name.replace(" ", "_"),
139 pupilFactoryClass=HscPupilFactory
140 )
142 def getBrighterFatterKernel(self):
143 """Return the brighter-fatter kernel for HSC as a `numpy.ndarray`.
145 This is a temporary API that should go away once obs_ packages have
146 a standardized approach to writing versioned kernels to a Gen3 repo.
147 """
148 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "brighter_fatter_kernel.pkl")
149 with open(path, "rb") as fd:
150 kernel = pickle.load(fd, encoding='latin1') # encoding for pickle written with Python 2
151 return kernel
153 def writeAdditionalCuratedCalibrations(self, butler, collection=None, labels=()):
154 # Register the CALIBRATION collection that adds validity ranges.
155 # This does nothing if it is already registered.
156 if collection is None:
157 collection = self.makeCalibrationCollectionName(*labels)
158 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
160 # Register the RUN collection that holds these datasets directly. We
161 # only need one because all of these datasets have the same (unbounded)
162 # validity range right now.
163 run = self.makeUnboundedCalibrationRunName(*labels)
164 butler.registry.registerRun(run)
165 baseDataId = butler.registry.expandDataId(instrument=self.getName())
166 refs = []
168 # Write brighter-fatter kernel, with an infinite validity range.
169 datasetType = DatasetType("bfKernel", ("instrument",), "NumpyArray",
170 universe=butler.registry.dimensions,
171 isCalibration=True)
172 butler.registry.registerDatasetType(datasetType)
174 # Load and then put instead of just moving the file in part to ensure
175 # the version in-repo is written with Python 3 and does not need
176 # `encoding='latin1'` to be read.
177 bfKernel = self.getBrighterFatterKernel()
178 refs.append(butler.put(bfKernel, datasetType, baseDataId, run=run))
180 # The following iterate over the values of the dictionaries returned
181 # by the transmission functions and ignore the date that is supplied.
182 # This is due to the dates not being ranges but single dates,
183 # which do not give the proper notion of validity. As such unbounded
184 # calibration labels are used when inserting into the database.
185 # In the future these could and probably should be updated to
186 # properly account for what ranges are considered valid.
188 # Write optical transmissions
189 opticsTransmissions = getOpticsTransmission()
190 datasetType = DatasetType("transmission_optics",
191 ("instrument",),
192 "TransmissionCurve",
193 universe=butler.registry.dimensions,
194 isCalibration=True)
195 butler.registry.registerDatasetType(datasetType)
196 for entry in opticsTransmissions.values():
197 if entry is None:
198 continue
199 refs.append(butler.put(entry, datasetType, baseDataId, run=run))
201 # Write transmission sensor
202 sensorTransmissions = getSensorTransmission()
203 datasetType = DatasetType("transmission_sensor",
204 ("instrument", "detector",),
205 "TransmissionCurve",
206 universe=butler.registry.dimensions,
207 isCalibration=True)
208 butler.registry.registerDatasetType(datasetType)
209 for entry in sensorTransmissions.values():
210 if entry is None:
211 continue
212 for sensor, curve in entry.items():
213 dataId = DataCoordinate.standardize(baseDataId, detector=sensor)
214 refs.append(butler.put(curve, datasetType, dataId, run=run))
216 # Write filter transmissions
217 filterTransmissions = getFilterTransmission()
218 datasetType = DatasetType("transmission_filter",
219 ("instrument", "physical_filter",),
220 "TransmissionCurve",
221 universe=butler.registry.dimensions,
222 isCalibration=True)
223 butler.registry.registerDatasetType(datasetType)
224 for entry in filterTransmissions.values():
225 if entry is None:
226 continue
227 for band, curve in entry.items():
228 dataId = DataCoordinate.standardize(baseDataId, physical_filter=band)
229 refs.append(butler.put(curve, datasetType, dataId, run=run))
231 # Write atmospheric transmissions
232 atmosphericTransmissions = getAtmosphereTransmission()
233 datasetType = DatasetType("transmission_atmosphere", ("instrument",),
234 "TransmissionCurve",
235 universe=butler.registry.dimensions,
236 isCalibration=True)
237 butler.registry.registerDatasetType(datasetType)
238 for entry in atmosphericTransmissions.values():
239 if entry is None:
240 continue
241 refs.append(butler.put(entry, datasetType, {"instrument": self.getName()}, run=run))
243 # Associate all datasets with the unbounded validity range.
244 butler.registry.certify(collection, refs, Timespan(begin=None, end=None))
246 def ingestStrayLightData(self, butler, directory, *, transfer=None, collection=None, labels=()):
247 """Ingest externally-produced y-band stray light data files into
248 a data repository.
250 Parameters
251 ----------
252 butler : `lsst.daf.butler.Butler`
253 Butler to write with. Any collections associated with it are
254 ignored in favor of ``collection`` and/or ``labels``.
255 directory : `str`
256 Directory containing yBackground-*.fits files.
257 transfer : `str`, optional
258 If not `None`, must be one of 'move', 'copy', 'hardlink', or
259 'symlink', indicating how to transfer the files.
260 collection : `str`, optional
261 Name to use for the calibration collection that associates all
262 datasets with a validity range. If this collection already exists,
263 it must be a `~CollectionType.CALIBRATION` collection, and it must
264 not have any datasets that would conflict with those inserted by
265 this method. If `None`, a collection name is worked out
266 automatically from the instrument name and other metadata by
267 calling ``makeCuratedCalibrationCollectionName``, but this
268 default name may not work well for long-lived repositories unless
269 ``labels`` is also provided (and changed every time curated
270 calibrations are ingested).
271 labels : `Sequence` [ `str` ], optional
272 Extra strings to include in collection names, after concatenating
273 them with the standard collection name delimeter. If provided,
274 these are inserted into to the names of the `~CollectionType.RUN`
275 collections that datasets are inserted directly into, as well the
276 `~CollectionType.CALIBRATION` collection if it is generated
277 automatically (i.e. if ``collection is None``). Usually this is
278 just the name of the ticket on which the calibration collection is
279 being created.
280 """
281 # Register the CALIBRATION collection that adds validity ranges.
282 # This does nothing if it is already registered.
283 if collection is None:
284 collection = self.makeCalibrationCollectionName(*labels)
285 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
287 # Register the RUN collection that holds these datasets directly. We
288 # only need one because there is only one validity range and hence no
289 # data ID conflicts even when there are no validity ranges.
290 run = self.makeUnboundedCalibrationRunName(*labels)
291 butler.registry.registerRun(run)
293 # LEDs covered up around 2018-01-01, no need for correctin after that
294 # date.
295 timespan = Timespan(begin=None, end=astropy.time.Time("2018-01-01", format="iso", scale="tai"))
296 datasets = []
297 # TODO: should we use a more generic name for the dataset type?
298 # This is just the (rather HSC-specific) name used in Gen2, and while
299 # the instances of this dataset are camera-specific, the datasetType
300 # (which is used in the generic IsrTask) should not be.
301 datasetType = DatasetType("yBackground",
302 dimensions=("physical_filter", "detector",),
303 storageClass="StrayLightData",
304 universe=butler.registry.dimensions,
305 isCalibration=True)
306 for detector in self.getCamera():
307 path = os.path.join(directory, f"ybackground-{detector.getId():03d}.fits")
308 if not os.path.exists(path):
309 log.warning("No stray light data found for detector %s @ %s.", detector.getId(), path)
310 continue
311 ref = DatasetRef(datasetType, dataId={"instrument": self.getName(),
312 "detector": detector.getId(),
313 "physical_filter": "HSC-Y"})
314 datasets.append(FileDataset(refs=ref, path=path, formatter=SubaruStrayLightDataFormatter))
315 butler.registry.registerDatasetType(datasetType)
316 with butler.transaction():
317 butler.ingest(*datasets, transfer=transfer, run=run)
318 refs = []
319 for dataset in datasets:
320 refs.extend(dataset.refs)
321 butler.registry.certify(collection, refs, timespan)
323 def makeDataIdTranslatorFactory(self) -> TranslatorFactory:
324 # Docstring inherited from lsst.obs.base.Instrument.
325 factory = TranslatorFactory()
326 factory.addGenericInstrumentRules(self.getName())
327 # Translate Gen2 `filter` to band if it hasn't been consumed
328 # yet and gen2keys includes tract.
329 factory.addRule(PhysicalFilterToBandKeyHandler(self.filterDefinitions),
330 instrument=self.getName(), gen2keys=("filter", "tract"), consume=("filter",))
331 return factory