Coverage for python/lsst/obs/subaru/_instrument.py : 22%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_subaru.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22"""Gen3 Butler registry declarations for Hyper Suprime-Cam.
23"""
25__all__ = ("HyperSuprimeCam",)
27import os
28import pickle
29import logging
31from functools import lru_cache
33import astropy.time
34from lsst.utils import getPackageDir
35from lsst.afw.cameraGeom import makeCameraFromPath, CameraConfig
36from lsst.daf.butler import (DatasetType, DataCoordinate, FileDataset, DatasetRef,
37 CollectionType, Timespan)
38from lsst.daf.butler.core.utils import getFullTypeName
39from lsst.obs.base import Instrument
40from lsst.obs.base.gen2to3 import TranslatorFactory, PhysicalFilterToBandKeyHandler
42from ..hsc.hscPupil import HscPupilFactory
43from ..hsc.hscFilters import HSC_FILTER_DEFINITIONS
44from ..hsc.makeTransmissionCurves import (getSensorTransmission, getOpticsTransmission,
45 getFilterTransmission, getAtmosphereTransmission)
46from .strayLight.formatter import SubaruStrayLightDataFormatter
48log = logging.getLogger(__name__)
51class HyperSuprimeCam(Instrument):
52 """Gen3 Butler specialization class for Subaru's Hyper Suprime-Cam.
53 """
55 policyName = "hsc"
56 obsDataPackage = "obs_subaru_data"
57 filterDefinitions = HSC_FILTER_DEFINITIONS
58 additionalCuratedDatasetTypes = ("bfKernel", "transmission_optics", "transmission_sensor",
59 "transmission_filter", "transmission_atmosphere", "yBackground")
61 def __init__(self, **kwargs):
62 super().__init__(**kwargs)
63 packageDir = getPackageDir("obs_subaru")
64 self.configPaths = [os.path.join(packageDir, "config"),
65 os.path.join(packageDir, "config", self.policyName)]
67 @classmethod
68 def getName(cls):
69 # Docstring inherited from Instrument.getName
70 return "HSC"
72 def register(self, registry):
73 # Docstring inherited from Instrument.register
74 camera = self.getCamera()
75 # The maximum values below make Gen3's ObservationDataIdPacker produce
76 # outputs that match Gen2's ccdExposureId.
77 obsMax = 21474800
78 with registry.transaction():
79 registry.syncDimensionData(
80 "instrument",
81 {
82 "name": self.getName(),
83 "detector_max": 200,
84 "visit_max": obsMax,
85 "exposure_max": obsMax,
86 "class_name": getFullTypeName(self),
87 }
88 )
89 for detector in camera:
90 registry.syncDimensionData(
91 "detector",
92 {
93 "instrument": self.getName(),
94 "id": detector.getId(),
95 "full_name": detector.getName(),
96 # TODO: make sure these definitions are consistent with
97 # those extracted by astro_metadata_translator, and
98 # test that they remain consistent somehow.
99 "name_in_raft": detector.getName().split("_")[1],
100 "raft": detector.getName().split("_")[0],
101 "purpose": str(detector.getType()).split(".")[-1],
102 }
103 )
104 self._registerFilters(registry)
106 def getRawFormatter(self, dataId):
107 # Docstring inherited from Instrument.getRawFormatter
108 # Import the formatter here to prevent a circular dependency.
109 from .rawFormatter import HyperSuprimeCamRawFormatter, HyperSuprimeCamCornerRawFormatter
110 if dataId["detector"] in (100, 101, 102, 103):
111 return HyperSuprimeCamCornerRawFormatter
112 else:
113 return HyperSuprimeCamRawFormatter
115 def getCamera(self):
116 """Retrieve the cameraGeom representation of HSC.
118 This is a temporary API that should go away once obs_ packages have
119 a standardized approach to writing versioned cameras to a Gen3 repo.
120 """
121 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "camera")
122 return self._getCameraFromPath(path)
124 @staticmethod
125 @lru_cache()
126 def _getCameraFromPath(path):
127 """Return the camera geometry given solely the path to the location
128 of that definition."""
129 config = CameraConfig()
130 config.load(os.path.join(path, "camera.py"))
131 return makeCameraFromPath(
132 cameraConfig=config,
133 ampInfoPath=path,
134 shortNameFunc=lambda name: name.replace(" ", "_"),
135 pupilFactoryClass=HscPupilFactory
136 )
138 def getBrighterFatterKernel(self):
139 """Return the brighter-fatter kernel for HSC as a `numpy.ndarray`.
141 This is a temporary API that should go away once obs_ packages have
142 a standardized approach to writing versioned kernels to a Gen3 repo.
143 """
144 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "brighter_fatter_kernel.pkl")
145 with open(path, "rb") as fd:
146 kernel = pickle.load(fd, encoding='latin1') # encoding for pickle written with Python 2
147 return kernel
149 def writeAdditionalCuratedCalibrations(self, butler, collection=None, labels=()):
150 # Register the CALIBRATION collection that adds validity ranges.
151 # This does nothing if it is already registered.
152 if collection is None:
153 collection = self.makeCalibrationCollectionName(*labels)
154 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
156 # Register the RUN collection that holds these datasets directly. We
157 # only need one because all of these datasets have the same (unbounded)
158 # validity range right now.
159 run = self.makeUnboundedCalibrationRunName(*labels)
160 butler.registry.registerRun(run)
161 baseDataId = butler.registry.expandDataId(instrument=self.getName())
162 refs = []
164 # Write brighter-fatter kernel, with an infinite validity range.
165 datasetType = DatasetType("bfKernel", ("instrument",), "NumpyArray",
166 universe=butler.registry.dimensions,
167 isCalibration=True)
168 butler.registry.registerDatasetType(datasetType)
170 # Load and then put instead of just moving the file in part to ensure
171 # the version in-repo is written with Python 3 and does not need
172 # `encoding='latin1'` to be read.
173 bfKernel = self.getBrighterFatterKernel()
174 refs.append(butler.put(bfKernel, datasetType, baseDataId, run=run))
176 # The following iterate over the values of the dictionaries returned
177 # by the transmission functions and ignore the date that is supplied.
178 # This is due to the dates not being ranges but single dates,
179 # which do not give the proper notion of validity. As such unbounded
180 # calibration labels are used when inserting into the database.
181 # In the future these could and probably should be updated to
182 # properly account for what ranges are considered valid.
184 # Write optical transmissions
185 opticsTransmissions = getOpticsTransmission()
186 datasetType = DatasetType("transmission_optics",
187 ("instrument",),
188 "TransmissionCurve",
189 universe=butler.registry.dimensions,
190 isCalibration=True)
191 butler.registry.registerDatasetType(datasetType)
192 for entry in opticsTransmissions.values():
193 if entry is None:
194 continue
195 refs.append(butler.put(entry, datasetType, baseDataId, run=run))
197 # Write transmission sensor
198 sensorTransmissions = getSensorTransmission()
199 datasetType = DatasetType("transmission_sensor",
200 ("instrument", "detector",),
201 "TransmissionCurve",
202 universe=butler.registry.dimensions,
203 isCalibration=True)
204 butler.registry.registerDatasetType(datasetType)
205 for entry in sensorTransmissions.values():
206 if entry is None:
207 continue
208 for sensor, curve in entry.items():
209 dataId = DataCoordinate.standardize(baseDataId, detector=sensor)
210 refs.append(butler.put(curve, datasetType, dataId, run=run))
212 # Write filter transmissions
213 filterTransmissions = getFilterTransmission()
214 datasetType = DatasetType("transmission_filter",
215 ("instrument", "physical_filter",),
216 "TransmissionCurve",
217 universe=butler.registry.dimensions,
218 isCalibration=True)
219 butler.registry.registerDatasetType(datasetType)
220 for entry in filterTransmissions.values():
221 if entry is None:
222 continue
223 for band, curve in entry.items():
224 dataId = DataCoordinate.standardize(baseDataId, physical_filter=band)
225 refs.append(butler.put(curve, datasetType, dataId, run=run))
227 # Write atmospheric transmissions
228 atmosphericTransmissions = getAtmosphereTransmission()
229 datasetType = DatasetType("transmission_atmosphere", ("instrument",),
230 "TransmissionCurve",
231 universe=butler.registry.dimensions,
232 isCalibration=True)
233 butler.registry.registerDatasetType(datasetType)
234 for entry in atmosphericTransmissions.values():
235 if entry is None:
236 continue
237 refs.append(butler.put(entry, datasetType, {"instrument": self.getName()}, run=run))
239 # Associate all datasets with the unbounded validity range.
240 butler.registry.certify(collection, refs, Timespan(begin=None, end=None))
242 def ingestStrayLightData(self, butler, directory, *, transfer=None, collection=None, labels=()):
243 """Ingest externally-produced y-band stray light data files into
244 a data repository.
246 Parameters
247 ----------
248 butler : `lsst.daf.butler.Butler`
249 Butler to write with. Any collections associated with it are
250 ignored in favor of ``collection`` and/or ``labels``.
251 directory : `str`
252 Directory containing yBackground-*.fits files.
253 transfer : `str`, optional
254 If not `None`, must be one of 'move', 'copy', 'hardlink', or
255 'symlink', indicating how to transfer the files.
256 collection : `str`, optional
257 Name to use for the calibration collection that associates all
258 datasets with a validity range. If this collection already exists,
259 it must be a `~CollectionType.CALIBRATION` collection, and it must
260 not have any datasets that would conflict with those inserted by
261 this method. If `None`, a collection name is worked out
262 automatically from the instrument name and other metadata by
263 calling ``makeCuratedCalibrationCollectionName``, but this
264 default name may not work well for long-lived repositories unless
265 ``labels`` is also provided (and changed every time curated
266 calibrations are ingested).
267 labels : `Sequence` [ `str` ], optional
268 Extra strings to include in collection names, after concatenating
269 them with the standard collection name delimeter. If provided,
270 these are inserted into to the names of the `~CollectionType.RUN`
271 collections that datasets are inserted directly into, as well the
272 `~CollectionType.CALIBRATION` collection if it is generated
273 automatically (i.e. if ``collection is None``). Usually this is
274 just the name of the ticket on which the calibration collection is
275 being created.
276 """
277 # Register the CALIBRATION collection that adds validity ranges.
278 # This does nothing if it is already registered.
279 if collection is None:
280 collection = self.makeCalibrationCollectionName(*labels)
281 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
283 # Register the RUN collection that holds these datasets directly. We
284 # only need one because there is only one validity range and hence no
285 # data ID conflicts even when there are no validity ranges.
286 run = self.makeUnboundedCalibrationRunName(*labels)
287 butler.registry.registerRun(run)
289 # LEDs covered up around 2018-01-01, no need for correctin after that
290 # date.
291 timespan = Timespan(begin=None, end=astropy.time.Time("2018-01-01", format="iso", scale="tai"))
292 datasets = []
293 # TODO: should we use a more generic name for the dataset type?
294 # This is just the (rather HSC-specific) name used in Gen2, and while
295 # the instances of this dataset are camera-specific, the datasetType
296 # (which is used in the generic IsrTask) should not be.
297 datasetType = DatasetType("yBackground",
298 dimensions=("physical_filter", "detector",),
299 storageClass="StrayLightData",
300 universe=butler.registry.dimensions,
301 isCalibration=True)
302 for detector in self.getCamera():
303 path = os.path.join(directory, f"ybackground-{detector.getId():03d}.fits")
304 if not os.path.exists(path):
305 log.warning("No stray light data found for detector %s @ %s.", detector.getId(), path)
306 continue
307 ref = DatasetRef(datasetType, dataId={"instrument": self.getName(),
308 "detector": detector.getId(),
309 "physical_filter": "HSC-Y"})
310 datasets.append(FileDataset(refs=ref, path=path, formatter=SubaruStrayLightDataFormatter))
311 butler.registry.registerDatasetType(datasetType)
312 with butler.transaction():
313 butler.ingest(*datasets, transfer=transfer, run=run)
314 refs = []
315 for dataset in datasets:
316 refs.extend(dataset.refs)
317 butler.registry.certify(collection, refs, timespan)
319 def makeDataIdTranslatorFactory(self) -> TranslatorFactory:
320 # Docstring inherited from lsst.obs.base.Instrument.
321 factory = TranslatorFactory()
322 factory.addGenericInstrumentRules(self.getName())
323 # Translate Gen2 `filter` to band if it hasn't been consumed
324 # yet and gen2keys includes tract.
325 factory.addRule(PhysicalFilterToBandKeyHandler(self.filterDefinitions),
326 instrument=self.getName(), gen2keys=("filter", "tract"), consume=("filter",))
327 return factory