Coverage for python/lsst/obs/subaru/_instrument.py : 21%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_subaru.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22"""Gen3 Butler registry declarations for Hyper Suprime-Cam.
23"""
25__all__ = ("HyperSuprimeCam",)
27import os
28import pickle
29import logging
31from functools import lru_cache
33import astropy.time
34from lsst.utils import getPackageDir
35from lsst.afw.cameraGeom import makeCameraFromPath, CameraConfig
36from lsst.daf.butler import (DatasetType, DataCoordinate, FileDataset, DatasetRef,
37 CollectionType, Timespan)
38from lsst.daf.butler.core.utils import getFullTypeName
39from lsst.obs.base import Instrument
40from lsst.obs.base.gen2to3 import TranslatorFactory, PhysicalFilterToBandKeyHandler
42from ..hsc.hscPupil import HscPupilFactory
43from ..hsc.hscFilters import HSC_FILTER_DEFINITIONS
44from ..hsc.makeTransmissionCurves import (getSensorTransmission, getOpticsTransmission,
45 getFilterTransmission, getAtmosphereTransmission)
46from .strayLight.formatter import SubaruStrayLightDataFormatter
48log = logging.getLogger(__name__)
51class HyperSuprimeCam(Instrument):
52 """Gen3 Butler specialization class for Subaru's Hyper Suprime-Cam.
53 """
55 policyName = "hsc"
56 obsDataPackage = "obs_subaru_data"
57 filterDefinitions = HSC_FILTER_DEFINITIONS
58 additionalCuratedDatasetTypes = ("bfKernel", "transmission_optics", "transmission_sensor",
59 "transmission_filter", "transmission_atmosphere", "yBackground")
61 def __init__(self, **kwargs):
62 super().__init__(**kwargs)
63 packageDir = getPackageDir("obs_subaru")
64 self.configPaths = [os.path.join(packageDir, "config"),
65 os.path.join(packageDir, "config", self.policyName)]
67 @classmethod
68 def getName(cls):
69 # Docstring inherited from Instrument.getName
70 return "HSC"
72 def register(self, registry):
73 # Docstring inherited from Instrument.register
74 camera = self.getCamera()
75 # The maximum values below make Gen3's ObservationDataIdPacker produce
76 # outputs that match Gen2's ccdExposureId.
77 obsMax = 21474800
78 with registry.transaction():
79 registry.syncDimensionData(
80 "instrument",
81 {
82 "name": self.getName(),
83 "detector_max": 200,
84 "visit_max": obsMax,
85 "exposure_max": obsMax,
86 "class_name": getFullTypeName(self),
87 }
88 )
89 for detector in camera:
90 registry.syncDimensionData(
91 "detector",
92 {
93 "instrument": self.getName(),
94 "id": detector.getId(),
95 "full_name": detector.getName(),
96 # TODO: make sure these definitions are consistent with
97 # those extracted by astro_metadata_translator, and
98 # test that they remain consistent somehow.
99 "name_in_raft": detector.getName().split("_")[1],
100 "raft": detector.getName().split("_")[0],
101 "purpose": str(detector.getType()).split(".")[-1],
102 }
103 )
104 self._registerFilters(registry)
106 def getRawFormatter(self, dataId):
107 # Docstring inherited from Instrument.getRawFormatter
108 # Import the formatter here to prevent a circular dependency.
109 from .rawFormatter import HyperSuprimeCamRawFormatter, HyperSuprimeCamCornerRawFormatter
110 if dataId["detector"] in (100, 101, 102, 103):
111 return HyperSuprimeCamCornerRawFormatter
112 else:
113 return HyperSuprimeCamRawFormatter
115 def getCamera(self):
116 """Retrieve the cameraGeom representation of HSC.
118 This is a temporary API that should go away once obs_ packages have
119 a standardized approach to writing versioned cameras to a Gen3 repo.
120 """
121 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "camera")
122 return self._getCameraFromPath(path)
124 @staticmethod
125 @lru_cache()
126 def _getCameraFromPath(path):
127 """Return the camera geometry given solely the path to the location
128 of that definition."""
129 config = CameraConfig()
130 config.load(os.path.join(path, "camera.py"))
131 return makeCameraFromPath(
132 cameraConfig=config,
133 ampInfoPath=path,
134 shortNameFunc=lambda name: name.replace(" ", "_"),
135 pupilFactoryClass=HscPupilFactory
136 )
138 def getBrighterFatterKernel(self):
139 """Return the brighter-fatter kernel for HSC as a `numpy.ndarray`.
141 This is a temporary API that should go away once obs_ packages have
142 a standardized approach to writing versioned kernels to a Gen3 repo.
143 """
144 path = os.path.join(getPackageDir("obs_subaru"), self.policyName, "brighter_fatter_kernel.pkl")
145 with open(path, "rb") as fd:
146 kernel = pickle.load(fd, encoding='latin1') # encoding for pickle written with Python 2
147 return kernel
149 def writeAdditionalCuratedCalibrations(self, butler, collection=None, suffixes=()):
150 # Register the CALIBRATION collection that adds validity ranges.
151 # This does nothing if it is already registered.
152 if collection is None:
153 collection = self.makeCalibrationCollectionName(*suffixes)
154 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
156 # Register the RUN collection that holds these datasets directly. We
157 # only need one because all of these datasets have the same (unbounded)
158 # validity range right now.
159 run = self.makeUnboundedCalibrationRunName(*suffixes)
160 butler.registry.registerRun(run)
161 baseDataId = butler.registry.expandDataId(instrument=self.getName())
162 refs = []
164 # Write brighter-fatter kernel, with an infinite validity range.
165 datasetType = DatasetType("bfKernel", ("instrument",), "NumpyArray",
166 universe=butler.registry.dimensions,
167 isCalibration=True)
168 butler.registry.registerDatasetType(datasetType)
170 # Load and then put instead of just moving the file in part to ensure
171 # the version in-repo is written with Python 3 and does not need
172 # `encoding='latin1'` to be read.
173 bfKernel = self.getBrighterFatterKernel()
174 refs.append(butler.put(bfKernel, datasetType, baseDataId, run=run))
176 # The following iterate over the values of the dictionaries returned
177 # by the transmission functions and ignore the date that is supplied.
178 # This is due to the dates not being ranges but single dates,
179 # which do not give the proper notion of validity. As such unbounded
180 # calibration labels are used when inserting into the database.
181 # In the future these could and probably should be updated to
182 # properly account for what ranges are considered valid.
184 # Write optical transmissions
185 opticsTransmissions = getOpticsTransmission()
186 datasetType = DatasetType("transmission_optics",
187 ("instrument",),
188 "TransmissionCurve",
189 universe=butler.registry.dimensions,
190 isCalibration=True)
191 butler.registry.registerDatasetType(datasetType)
192 for entry in opticsTransmissions.values():
193 if entry is None:
194 continue
195 refs.append(butler.put(entry, datasetType, baseDataId, run=run))
197 # Write transmission sensor
198 sensorTransmissions = getSensorTransmission()
199 datasetType = DatasetType("transmission_sensor",
200 ("instrument", "detector",),
201 "TransmissionCurve",
202 universe=butler.registry.dimensions,
203 isCalibration=True)
204 butler.registry.registerDatasetType(datasetType)
205 for entry in sensorTransmissions.values():
206 if entry is None:
207 continue
208 for sensor, curve in entry.items():
209 dataId = DataCoordinate.standardize(baseDataId, detector=sensor)
210 refs.append(butler.put(curve, datasetType, dataId, run=run))
212 # Write filter transmissions
213 filterTransmissions = getFilterTransmission()
214 datasetType = DatasetType("transmission_filter",
215 ("instrument", "physical_filter",),
216 "TransmissionCurve",
217 universe=butler.registry.dimensions,
218 isCalibration=True)
219 butler.registry.registerDatasetType(datasetType)
220 for entry in filterTransmissions.values():
221 if entry is None:
222 continue
223 for band, curve in entry.items():
224 dataId = DataCoordinate.standardize(baseDataId, physical_filter=band)
225 refs.append(butler.put(curve, datasetType, dataId, run=run))
227 # Write atmospheric transmissions
228 atmosphericTransmissions = getAtmosphereTransmission()
229 datasetType = DatasetType("transmission_atmosphere", ("instrument",),
230 "TransmissionCurve",
231 universe=butler.registry.dimensions,
232 isCalibration=True)
233 butler.registry.registerDatasetType(datasetType)
234 for entry in atmosphericTransmissions.values():
235 if entry is None:
236 continue
237 refs.append(butler.put(entry, datasetType, {"instrument": self.getName()}, run=run))
239 # Associate all datasets with the unbounded validity range.
240 butler.registry.certify(collection, refs, Timespan(begin=None, end=None))
242 def ingestStrayLightData(self, butler, directory, *, transfer=None, collection=None, suffixes=()):
243 """Ingest externally-produced y-band stray light data files into
244 a data repository.
246 Parameters
247 ----------
248 butler : `lsst.daf.butler.Butler`
249 Butler initialized with the collection to ingest into.
250 directory : `str`
251 Directory containing yBackground-*.fits files.
252 transfer : `str`, optional
253 If not `None`, must be one of 'move', 'copy', 'hardlink', or
254 'symlink', indicating how to transfer the files.
255 collection : `str`, optional
256 Name to use for the calibration collection that associates all
257 datasets with a validity range. If this collection already exists,
258 it must be a `~CollectionType.CALIBRATION` collection, and it must
259 not have any datasets that would conflict with those inserted by
260 this method. If `None`, a collection name is worked out
261 automatically from the instrument name and other metadata by
262 calling ``makeCuratedCalibrationCollectionName``, but this
263 default name may not work well for long-lived repositories unless
264 one or more ``suffixes`` are also provided (and changed every time
265 curated calibrations are ingested).
266 suffixes : `Sequence` [ `str` ], optional
267 Name suffixes to append to collection names, after concatenating
268 them with the standard collection name delimeter. If provided,
269 these are appended to the names of the `~CollectionType.RUN`
270 collections that datasets are inserted directly into, as well the
271 `~CollectionType.CALIBRATION` collection if it is generated
272 automatically (i.e. if ``collection is None``).
273 """
274 # Register the CALIBRATION collection that adds validity ranges.
275 # This does nothing if it is already registered.
276 if collection is None:
277 collection = self.makeCalibrationCollectionName(*suffixes)
278 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
280 # Register the RUN collection that holds these datasets directly. We
281 # only need one because there is only one validity range and hence no
282 # data ID conflicts even when there are no validity ranges.
283 run = self.makeUnboundedCalibrationRunName(*suffixes)
284 butler.registry.registerRun(run)
286 # LEDs covered up around 2018-01-01, no need for correctin after that
287 # date.
288 timespan = Timespan(begin=None, end=astropy.time.Time("2018-01-01", format="iso", scale="tai"))
289 datasets = []
290 # TODO: should we use a more generic name for the dataset type?
291 # This is just the (rather HSC-specific) name used in Gen2, and while
292 # the instances of this dataset are camera-specific, the datasetType
293 # (which is used in the generic IsrTask) should not be.
294 datasetType = DatasetType("yBackground",
295 dimensions=("physical_filter", "detector",),
296 storageClass="StrayLightData",
297 universe=butler.registry.dimensions,
298 isCalibration=True)
299 for detector in self.getCamera():
300 path = os.path.join(directory, f"ybackground-{detector.getId():03d}.fits")
301 if not os.path.exists(path):
302 log.warning(f"No stray light data found for detector {detector.getId()} @ {path}.")
303 continue
304 ref = DatasetRef(datasetType, dataId={"instrument": self.getName(),
305 "detector": detector.getId(),
306 "physical_filter": "HSC-Y"})
307 datasets.append(FileDataset(refs=ref, path=path, formatter=SubaruStrayLightDataFormatter))
308 butler.registry.registerDatasetType(datasetType)
309 with butler.transaction():
310 butler.ingest(*datasets, transfer=transfer, run=run)
311 refs = []
312 for dataset in datasets:
313 refs.extend(dataset.refs)
314 butler.registry.certify(collection, refs, timespan)
316 def makeDataIdTranslatorFactory(self) -> TranslatorFactory:
317 # Docstring inherited from lsst.obs.base.Instrument.
318 factory = TranslatorFactory()
319 factory.addGenericInstrumentRules(self.getName())
320 # Translate Gen2 `filter` to band if it hasn't been consumed
321 # yet and gen2keys includes tract.
322 factory.addRule(PhysicalFilterToBandKeyHandler(self.filterDefinitions),
323 instrument=self.getName(), gen2keys=("filter", "tract"), consume=("filter",))
324 return factory