Coverage for python/lsst/obs/subaru/_instrument.py: 25%
126 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-14 02:35 -0700
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-14 02:35 -0700
1# This file is part of obs_subaru.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22"""Gen3 Butler registry declarations for Hyper Suprime-Cam.
23"""
25__all__ = ("HyperSuprimeCam",)
27import os
28import pickle
29import logging
31from functools import lru_cache
33from astro_metadata_translator import HscTranslator
34import astropy.time
35from lsst.utils import getPackageDir
36from lsst.afw.cameraGeom import makeCameraFromPath, CameraConfig
37from lsst.daf.butler import (DatasetType, DataCoordinate, FileDataset, DatasetRef,
38 CollectionType, Timespan)
39from lsst.utils.introspection import get_full_type_name
40from lsst.obs.base import Instrument, VisitSystem
42from ..hsc.hscPupil import HscPupilFactory
43from ..hsc.hscFilters import HSC_FILTER_DEFINITIONS
44from ..hsc.makeTransmissionCurves import (getSensorTransmission, getOpticsTransmission,
45 getFilterTransmission, getAtmosphereTransmission)
46from .strayLight.formatter import SubaruStrayLightDataFormatter
48log = logging.getLogger(__name__)
51class HyperSuprimeCam(Instrument):
52 """Gen3 Butler specialization class for Subaru's Hyper Suprime-Cam.
53 """
55 policyName = "hsc"
56 obsDataPackage = "obs_subaru_data"
57 filterDefinitions = HSC_FILTER_DEFINITIONS
58 additionalCuratedDatasetTypes = ("bfKernel", "transmission_optics", "transmission_sensor",
59 "transmission_filter", "transmission_atmosphere", "yBackground")
60 translatorClass = HscTranslator
62 def __init__(self, **kwargs):
63 super().__init__(**kwargs)
64 packageDir = getPackageDir("obs_subaru")
65 self.configPaths = [os.path.join(packageDir, "config"),
66 os.path.join(packageDir, "config", self.policyName)]
68 @classmethod
69 def getName(cls):
70 # Docstring inherited from Instrument.getName
71 return "HSC"
73 def register(self, registry, update=False):
74 # Docstring inherited from Instrument.register
75 camera = self.getCamera()
76 # The maximum values below make Gen3's ObservationDataIdPacker produce
77 # outputs that match Gen2's ccdExposureId.
78 obsMax = 21474800
79 with registry.transaction():
80 registry.syncDimensionData(
81 "instrument",
82 {
83 "name": self.getName(),
84 "detector_max": 200,
85 "visit_max": obsMax,
86 "exposure_max": obsMax,
87 "class_name": get_full_type_name(self),
88 # Some schemas support default visit_system
89 "visit_system": VisitSystem.ONE_TO_ONE.value,
90 },
91 update=update
92 )
93 for detector in camera:
94 registry.syncDimensionData(
95 "detector",
96 {
97 "instrument": self.getName(),
98 "id": detector.getId(),
99 "full_name": detector.getName(),
100 # TODO: make sure these definitions are consistent with
101 # those extracted by astro_metadata_translator, and
102 # test that they remain consistent somehow.
103 "name_in_raft": detector.getName().split("_")[1],
104 "raft": detector.getName().split("_")[0],
105 "purpose": str(detector.getType()).split(".")[-1],
106 },
107 update=update
108 )
109 self._registerFilters(registry, update=update)
111 def getRawFormatter(self, dataId):
112 # Docstring inherited from Instrument.getRawFormatter
113 # Import the formatter here to prevent a circular dependency.
114 from .rawFormatter import HyperSuprimeCamRawFormatter, HyperSuprimeCamCornerRawFormatter
115 if dataId["detector"] in (100, 101, 102, 103):
116 return HyperSuprimeCamCornerRawFormatter
117 else:
118 return HyperSuprimeCamRawFormatter
120 def getCamera(self):
121 """Retrieve the cameraGeom representation of HSC.
123 This is a temporary API that should go away once obs_ packages have
124 a standardized approach to writing versioned cameras to a Gen3 repo.
125 """
126 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "camera")
127 return self._getCameraFromPath(path)
129 @staticmethod
130 @lru_cache()
131 def _getCameraFromPath(path):
132 """Return the camera geometry given solely the path to the location
133 of that definition."""
134 config = CameraConfig()
135 config.load(os.path.join(path, "camera.py"))
136 return makeCameraFromPath(
137 cameraConfig=config,
138 ampInfoPath=path,
139 shortNameFunc=lambda name: name.replace(" ", "_"),
140 pupilFactoryClass=HscPupilFactory
141 )
143 def getBrighterFatterKernel(self):
144 """Return the brighter-fatter kernel for HSC as a `numpy.ndarray`.
146 This is a temporary API that should go away once obs_ packages have
147 a standardized approach to writing versioned kernels to a Gen3 repo.
148 """
149 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "brighter_fatter_kernel.pkl")
150 with open(path, "rb") as fd:
151 kernel = pickle.load(fd, encoding='latin1') # encoding for pickle written with Python 2
152 return kernel
154 def writeAdditionalCuratedCalibrations(self, butler, collection=None, labels=()):
155 # Register the CALIBRATION collection that adds validity ranges.
156 # This does nothing if it is already registered.
157 if collection is None:
158 collection = self.makeCalibrationCollectionName(*labels)
159 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
161 # Register the RUN collection that holds these datasets directly. We
162 # only need one because all of these datasets have the same (unbounded)
163 # validity range right now.
164 run = self.makeUnboundedCalibrationRunName(*labels)
165 butler.registry.registerRun(run)
166 baseDataId = butler.registry.expandDataId(instrument=self.getName())
167 refs = []
169 # Write brighter-fatter kernel, with an infinite validity range.
170 datasetType = DatasetType("bfKernel", ("instrument",), "NumpyArray",
171 universe=butler.dimensions,
172 isCalibration=True)
173 butler.registry.registerDatasetType(datasetType)
175 # Load and then put instead of just moving the file in part to ensure
176 # the version in-repo is written with Python 3 and does not need
177 # `encoding='latin1'` to be read.
178 bfKernel = self.getBrighterFatterKernel()
179 refs.append(butler.put(bfKernel, datasetType, baseDataId, run=run))
181 # The following iterate over the values of the dictionaries returned
182 # by the transmission functions and ignore the date that is supplied.
183 # This is due to the dates not being ranges but single dates,
184 # which do not give the proper notion of validity. As such unbounded
185 # calibration labels are used when inserting into the database.
186 # In the future these could and probably should be updated to
187 # properly account for what ranges are considered valid.
189 # Write optical transmissions
190 opticsTransmissions = getOpticsTransmission()
191 datasetType = DatasetType("transmission_optics",
192 ("instrument",),
193 "TransmissionCurve",
194 universe=butler.dimensions,
195 isCalibration=True)
196 butler.registry.registerDatasetType(datasetType)
197 for entry in opticsTransmissions.values():
198 if entry is None:
199 continue
200 refs.append(butler.put(entry, datasetType, baseDataId, run=run))
202 # Write transmission sensor
203 sensorTransmissions = getSensorTransmission()
204 datasetType = DatasetType("transmission_sensor",
205 ("instrument", "detector",),
206 "TransmissionCurve",
207 universe=butler.dimensions,
208 isCalibration=True)
209 butler.registry.registerDatasetType(datasetType)
210 for entry in sensorTransmissions.values():
211 if entry is None:
212 continue
213 for sensor, curve in entry.items():
214 dataId = DataCoordinate.standardize(baseDataId, detector=sensor)
215 refs.append(butler.put(curve, datasetType, dataId, run=run))
217 # Write filter transmissions
218 filterTransmissions = getFilterTransmission()
219 datasetType = DatasetType("transmission_filter",
220 ("instrument", "physical_filter",),
221 "TransmissionCurve",
222 universe=butler.dimensions,
223 isCalibration=True)
224 butler.registry.registerDatasetType(datasetType)
225 for entry in filterTransmissions.values():
226 if entry is None:
227 continue
228 for band, curve in entry.items():
229 dataId = DataCoordinate.standardize(baseDataId, physical_filter=band)
230 refs.append(butler.put(curve, datasetType, dataId, run=run))
232 # Write atmospheric transmissions
233 atmosphericTransmissions = getAtmosphereTransmission()
234 datasetType = DatasetType("transmission_atmosphere", ("instrument",),
235 "TransmissionCurve",
236 universe=butler.dimensions,
237 isCalibration=True)
238 butler.registry.registerDatasetType(datasetType)
239 for entry in atmosphericTransmissions.values():
240 if entry is None:
241 continue
242 refs.append(butler.put(entry, datasetType, {"instrument": self.getName()}, run=run))
244 # Associate all datasets with the unbounded validity range.
245 butler.registry.certify(collection, refs, Timespan(begin=None, end=None))
247 def ingestStrayLightData(self, butler, directory, *, transfer=None, collection=None, labels=()):
248 """Ingest externally-produced y-band stray light data files into
249 a data repository.
251 Parameters
252 ----------
253 butler : `lsst.daf.butler.Butler`
254 Butler to write with. Any collections associated with it are
255 ignored in favor of ``collection`` and/or ``labels``.
256 directory : `str`
257 Directory containing yBackground-*.fits files.
258 transfer : `str`, optional
259 If not `None`, must be one of 'move', 'copy', 'hardlink', or
260 'symlink', indicating how to transfer the files.
261 collection : `str`, optional
262 Name to use for the calibration collection that associates all
263 datasets with a validity range. If this collection already exists,
264 it must be a `~CollectionType.CALIBRATION` collection, and it must
265 not have any datasets that would conflict with those inserted by
266 this method. If `None`, a collection name is worked out
267 automatically from the instrument name and other metadata by
268 calling ``makeCuratedCalibrationCollectionName``, but this
269 default name may not work well for long-lived repositories unless
270 ``labels`` is also provided (and changed every time curated
271 calibrations are ingested).
272 labels : `Sequence` [ `str` ], optional
273 Extra strings to include in collection names, after concatenating
274 them with the standard collection name delimeter. If provided,
275 these are inserted into to the names of the `~CollectionType.RUN`
276 collections that datasets are inserted directly into, as well the
277 `~CollectionType.CALIBRATION` collection if it is generated
278 automatically (i.e. if ``collection is None``). Usually this is
279 just the name of the ticket on which the calibration collection is
280 being created.
281 """
282 # Register the CALIBRATION collection that adds validity ranges.
283 # This does nothing if it is already registered.
284 if collection is None:
285 collection = self.makeCalibrationCollectionName(*labels)
286 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
288 # Register the RUN collection that holds these datasets directly. We
289 # only need one because there is only one validity range and hence no
290 # data ID conflicts even when there are no validity ranges.
291 run = self.makeUnboundedCalibrationRunName(*labels)
292 butler.registry.registerRun(run)
294 # LEDs covered up around 2018-01-01, no need for correctin after that
295 # date.
296 timespan = Timespan(begin=None, end=astropy.time.Time("2018-01-01", format="iso", scale="tai"))
297 datasets = []
298 # TODO: should we use a more generic name for the dataset type?
299 # This is just the (rather HSC-specific) name used in Gen2, and while
300 # the instances of this dataset are camera-specific, the datasetType
301 # (which is used in the generic IsrTask) should not be.
302 datasetType = DatasetType("yBackground",
303 dimensions=("physical_filter", "detector",),
304 storageClass="StrayLightData",
305 universe=butler.dimensions,
306 isCalibration=True)
307 for detector in self.getCamera():
308 path = os.path.join(directory, f"ybackground-{detector.getId():03d}.fits")
309 if not os.path.exists(path):
310 log.warning("No stray light data found for detector %s @ %s.", detector.getId(), path)
311 continue
312 ref = DatasetRef(datasetType, dataId={"instrument": self.getName(),
313 "detector": detector.getId(),
314 "physical_filter": "HSC-Y"},
315 run=run)
316 datasets.append(FileDataset(refs=ref, path=path, formatter=SubaruStrayLightDataFormatter))
317 butler.registry.registerDatasetType(datasetType)
318 with butler.transaction():
319 butler.ingest(*datasets, transfer=transfer)
320 refs = []
321 for dataset in datasets:
322 refs.extend(dataset.refs)
323 butler.registry.certify(collection, refs, timespan)