Coverage for python/lsst/obs/base/_instrument.py : 21%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("Instrument", "makeExposureRecordFromObsInfo", "addUnboundedCalibrationLabel", "loadCamera")
26import os.path
27from abc import ABCMeta, abstractmethod
28from typing import Any, Tuple, TYPE_CHECKING
29import astropy.time
31from lsst.afw.cameraGeom import Camera
32from lsst.daf.butler import (
33 Butler,
34 CollectionType,
35 DataCoordinate,
36 DataId,
37 DatasetType,
38 Timespan,
39)
40from lsst.utils import getPackageDir, doImport
42if TYPE_CHECKING: 42 ↛ 43line 42 didn't jump to line 43, because the condition on line 42 was never true
43 from .gen2to3 import TranslatorFactory
44 from lsst.daf.butler import Registry
46# To be a standard text curated calibration means that we use a
47# standard definition for the corresponding DatasetType.
48StandardCuratedCalibrationDatasetTypes = {
49 "defects": {"dimensions": ("instrument", "detector", "calibration_label"),
50 "storageClass": "Defects"},
51 "qe_curve": {"dimensions": ("instrument", "detector", "calibration_label"),
52 "storageClass": "QECurve"},
53 "crosstalk": {"dimensions": ("instrument", "detector", "calibration_label"),
54 "storageClass": "CrosstalkCalib"},
55}
58class Instrument(metaclass=ABCMeta):
59 """Base class for instrument-specific logic for the Gen3 Butler.
61 Concrete instrument subclasses should be directly constructable with no
62 arguments.
63 """
65 configPaths = ()
66 """Paths to config files to read for specific Tasks.
68 The paths in this list should contain files of the form `task.py`, for
69 each of the Tasks that requires special configuration.
70 """
72 policyName = None
73 """Instrument specific name to use when locating a policy or configuration
74 file in the file system."""
76 obsDataPackage = None
77 """Name of the package containing the text curated calibration files.
78 Usually a obs _data package. If `None` no curated calibration files
79 will be read. (`str`)"""
81 standardCuratedDatasetTypes = tuple(StandardCuratedCalibrationDatasetTypes)
82 """The dataset types expected to be obtained from the obsDataPackage.
83 These dataset types are all required to have standard definitions and
84 must be known to the base class. Clearing this list will prevent
85 any of these calibrations from being stored. If a dataset type is not
86 known to a specific instrument it can still be included in this list
87 since the data package is the source of truth.
88 """
90 @property
91 @abstractmethod
92 def filterDefinitions(self):
93 """`~lsst.obs.base.FilterDefinitionCollection`, defining the filters
94 for this instrument.
95 """
96 return None
98 def __init__(self):
99 self.filterDefinitions.reset()
100 self.filterDefinitions.defineFilters()
101 self._obsDataPackageDir = None
103 @classmethod
104 @abstractmethod
105 def getName(cls):
106 """Return the short (dimension) name for this instrument.
108 This is not (in general) the same as the class name - it's what is used
109 as the value of the "instrument" field in data IDs, and is usually an
110 abbreviation of the full name.
111 """
112 raise NotImplementedError()
114 @abstractmethod
115 def getCamera(self):
116 """Retrieve the cameraGeom representation of this instrument.
118 This is a temporary API that should go away once ``obs_`` packages have
119 a standardized approach to writing versioned cameras to a Gen3 repo.
120 """
121 raise NotImplementedError()
123 @abstractmethod
124 def register(self, registry):
125 """Insert instrument, physical_filter, and detector entries into a
126 `Registry`.
127 """
128 raise NotImplementedError()
130 @property
131 def obsDataPackageDir(self):
132 """The root of the obs package that provides specializations for
133 this instrument (`str`).
134 """
135 if self.obsDataPackage is None:
136 return None
137 if self._obsDataPackageDir is None:
138 # Defer any problems with locating the package until
139 # we need to find it.
140 self._obsDataPackageDir = getPackageDir(self.obsDataPackage)
141 return self._obsDataPackageDir
143 @staticmethod
144 def fromName(name: str, registry: Registry) -> Instrument:
145 """Given an instrument name and a butler, retrieve a corresponding
146 instantiated instrument object.
148 Parameters
149 ----------
150 name : `str`
151 Name of the instrument (must match the return value of `getName`).
152 registry : `lsst.daf.butler.Registry`
153 Butler registry to query to find the information.
155 Returns
156 -------
157 instrument : `Instrument`
158 An instance of the relevant `Instrument`.
160 Notes
161 -----
162 The instrument must be registered in the corresponding butler.
164 Raises
165 ------
166 LookupError
167 Raised if the instrument is not known to the supplied registry.
168 ModuleNotFoundError
169 Raised if the class could not be imported. This could mean
170 that the relevant obs package has not been setup.
171 TypeError
172 Raised if the class name retrieved is not a string.
173 """
174 records = list(registry.queryDimensionRecords("instrument", instrument=name))
175 if not records:
176 raise LookupError(f"No registered instrument with name '{name}'.")
177 cls = records[0].class_name
178 if not isinstance(cls, str):
179 raise TypeError(f"Unexpected class name retrieved from {name} instrument dimension (got {cls})")
180 instrument = doImport(cls)
181 return instrument()
183 @staticmethod
184 def importAll(registry: Registry) -> None:
185 """Import all the instruments known to this registry.
187 This will ensure that all metadata translators have been registered.
189 Parameters
190 ----------
191 registry : `lsst.daf.butler.Registry`
192 Butler registry to query to find the information.
194 Notes
195 -----
196 It is allowed for a particular instrument class to fail on import.
197 This might simply indicate that a particular obs package has
198 not been setup.
199 """
200 records = list(registry.queryDimensionRecords("instrument"))
201 for record in records:
202 cls = record.class_name
203 try:
204 doImport(cls)
205 except Exception:
206 pass
208 def _registerFilters(self, registry):
209 """Register the physical and abstract filter Dimension relationships.
210 This should be called in the ``register`` implementation.
212 Parameters
213 ----------
214 registry : `lsst.daf.butler.core.Registry`
215 The registry to add dimensions to.
216 """
217 for filter in self.filterDefinitions:
218 # fix for undefined abstract filters causing trouble in the registry:
219 if filter.abstract_filter is None:
220 abstract_filter = filter.physical_filter
221 else:
222 abstract_filter = filter.abstract_filter
224 registry.insertDimensionData("physical_filter",
225 {"instrument": self.getName(),
226 "name": filter.physical_filter,
227 "abstract_filter": abstract_filter
228 })
230 @abstractmethod
231 def getRawFormatter(self, dataId):
232 """Return the Formatter class that should be used to read a particular
233 raw file.
235 Parameters
236 ----------
237 dataId : `DataCoordinate`
238 Dimension-based ID for the raw file or files being ingested.
240 Returns
241 -------
242 formatter : `Formatter` class
243 Class to be used that reads the file into an
244 `lsst.afw.image.Exposure` instance.
245 """
246 raise NotImplementedError()
248 def writeCuratedCalibrations(self, butler, run=None):
249 """Write human-curated calibration Datasets to the given Butler with
250 the appropriate validity ranges.
252 Parameters
253 ----------
254 butler : `lsst.daf.butler.Butler`
255 Butler to use to store these calibrations.
256 run : `str`
257 Run to use for this collection of calibrations. If `None` the
258 collection name is worked out automatically from the instrument
259 name and other metadata.
261 Notes
262 -----
263 Expected to be called from subclasses. The base method calls
264 ``writeCameraGeom`` and ``writeStandardTextCuratedCalibrations``.
265 """
266 # Need to determine the run for ingestion based on the instrument
267 # name and eventually the data package version. The camera geom
268 # is currently special in that it is not in the _data package.
269 if run is None:
270 run = self.makeCollectionName("calib")
271 butler.registry.registerCollection(run, type=CollectionType.RUN)
272 self.writeCameraGeom(butler, run=run)
273 self.writeStandardTextCuratedCalibrations(butler, run=run)
274 self.writeAdditionalCuratedCalibrations(butler, run=run)
276 def writeAdditionalCuratedCalibrations(self, butler, run=None):
277 """Write additional curated calibrations that might be instrument
278 specific and are not part of the standard set.
280 Default implementation does nothing.
282 Parameters
283 ----------
284 butler : `lsst.daf.butler.Butler`
285 Butler to use to store these calibrations.
286 run : `str`, optional
287 Name of the run to use to override the default run associated
288 with this Butler.
289 """
290 return
292 def applyConfigOverrides(self, name, config):
293 """Apply instrument-specific overrides for a task config.
295 Parameters
296 ----------
297 name : `str`
298 Name of the object being configured; typically the _DefaultName
299 of a Task.
300 config : `lsst.pex.config.Config`
301 Config instance to which overrides should be applied.
302 """
303 for root in self.configPaths:
304 path = os.path.join(root, f"{name}.py")
305 if os.path.exists(path):
306 config.load(path)
308 def writeCameraGeom(self, butler, run=None):
309 """Write the default camera geometry to the butler repository
310 with an infinite validity range.
312 Parameters
313 ----------
314 butler : `lsst.daf.butler.Butler`
315 Butler to receive these calibration datasets.
316 run : `str`, optional
317 Name of the run to use to override the default run associated
318 with this Butler.
319 """
321 datasetType = DatasetType("camera", ("instrument", "calibration_label"), "Camera",
322 universe=butler.registry.dimensions)
323 butler.registry.registerDatasetType(datasetType)
324 unboundedDataId = addUnboundedCalibrationLabel(butler.registry, self.getName())
325 camera = self.getCamera()
326 butler.put(camera, datasetType, unboundedDataId, run=run)
328 def writeStandardTextCuratedCalibrations(self, butler, run=None):
329 """Write the set of standardized curated text calibrations to
330 the repository.
332 Parameters
333 ----------
334 butler : `lsst.daf.butler.Butler`
335 Butler to receive these calibration datasets.
336 run : `str`, optional
337 Name of the run to use to override the default run associated
338 with this Butler.
339 """
341 for datasetTypeName in self.standardCuratedDatasetTypes:
342 # We need to define the dataset types.
343 if datasetTypeName not in StandardCuratedCalibrationDatasetTypes:
344 raise ValueError(f"DatasetType {datasetTypeName} not in understood list"
345 f" [{'.'.join(StandardCuratedCalibrationDatasetTypes)}]")
346 definition = StandardCuratedCalibrationDatasetTypes[datasetTypeName]
347 datasetType = DatasetType(datasetTypeName,
348 universe=butler.registry.dimensions,
349 **definition)
350 self._writeSpecificCuratedCalibrationDatasets(butler, datasetType, run=run)
352 def _writeSpecificCuratedCalibrationDatasets(self, butler, datasetType, run=None):
353 """Write standardized curated calibration datasets for this specific
354 dataset type from an obs data package.
356 Parameters
357 ----------
358 butler : `lsst.daf.butler.Butler`
359 Gen3 butler in which to put the calibrations.
360 datasetType : `lsst.daf.butler.DatasetType`
361 Dataset type to be put.
362 run : `str`, optional
363 Name of the run to use to override the default run associated
364 with this Butler.
366 Notes
367 -----
368 This method scans the location defined in the ``obsDataPackageDir``
369 class attribute for curated calibrations corresponding to the
370 supplied dataset type. The directory name in the data package must
371 match the name of the dataset type. They are assumed to use the
372 standard layout and can be read by
373 `~lsst.pipe.tasks.read_curated_calibs.read_all` and provide standard
374 metadata.
375 """
376 if self.obsDataPackageDir is None:
377 # if there is no data package then there can't be datasets
378 return
380 calibPath = os.path.join(self.obsDataPackageDir, self.policyName,
381 datasetType.name)
383 if not os.path.exists(calibPath):
384 return
386 # Register the dataset type
387 butler.registry.registerDatasetType(datasetType)
389 # obs_base can't depend on pipe_tasks but concrete obs packages
390 # can -- we therefore have to defer import
391 from lsst.pipe.tasks.read_curated_calibs import read_all
393 camera = self.getCamera()
394 calibsDict = read_all(calibPath, camera)[0] # second return is calib type
395 dimensionRecords = []
396 datasetRecords = []
397 for det in calibsDict:
398 times = sorted([k for k in calibsDict[det]])
399 calibs = [calibsDict[det][time] for time in times]
400 times = [astropy.time.Time(t, format="datetime", scale="utc") for t in times]
401 times += [None]
402 for calib, beginTime, endTime in zip(calibs, times[:-1], times[1:]):
403 md = calib.getMetadata()
404 calibrationLabel = f"{datasetType.name}/{md['CALIBDATE']}/{md['DETECTOR']}"
405 dataId = DataCoordinate.standardize(
406 universe=butler.registry.dimensions,
407 instrument=self.getName(),
408 calibration_label=calibrationLabel,
409 detector=md["DETECTOR"],
410 )
411 datasetRecords.append((calib, dataId))
412 dimensionRecords.append({
413 "instrument": self.getName(),
414 "name": calibrationLabel,
415 "timespan": Timespan(beginTime, endTime),
416 })
418 # Second loop actually does the inserts and filesystem writes.
419 with butler.transaction():
420 butler.registry.insertDimensionData("calibration_label", *dimensionRecords)
421 # TODO: vectorize these puts, once butler APIs for that become
422 # available.
423 for calib, dataId in datasetRecords:
424 butler.put(calib, datasetType, dataId, run=run)
426 @abstractmethod
427 def makeDataIdTranslatorFactory(self) -> TranslatorFactory:
428 """Return a factory for creating Gen2->Gen3 data ID translators,
429 specialized for this instrument.
431 Derived class implementations should generally call
432 `TranslatorFactory.addGenericInstrumentRules` with appropriate
433 arguments, but are not required to (and may not be able to if their
434 Gen2 raw data IDs are sufficiently different from the HSC/DECam/CFHT
435 norm).
437 Returns
438 -------
439 factory : `TranslatorFactory`.
440 Factory for `Translator` objects.
441 """
442 raise NotImplementedError("Must be implemented by derived classes.")
444 @classmethod
445 def makeDefaultRawIngestRunName(cls) -> str:
446 """Make the default instrument-specific run collection string for raw
447 data ingest.
449 Returns
450 -------
451 coll : `str`
452 Run collection name to be used as the default for ingestion of
453 raws.
454 """
455 return cls.makeCollectionName("raw/all")
457 @classmethod
458 def makeCollectionName(cls, label: str) -> str:
459 """Get the instrument-specific collection string to use as derived
460 from the supplied label.
462 Parameters
463 ----------
464 label : `str`
465 String to be combined with the instrument name to form a
466 collection name.
468 Returns
469 -------
470 name : `str`
471 Collection name to use that includes the instrument name.
472 """
473 return f"{cls.getName()}/{label}"
476def makeExposureRecordFromObsInfo(obsInfo, universe):
477 """Construct an exposure DimensionRecord from
478 `astro_metadata_translator.ObservationInfo`.
480 Parameters
481 ----------
482 obsInfo : `astro_metadata_translator.ObservationInfo`
483 A `~astro_metadata_translator.ObservationInfo` object corresponding to
484 the exposure.
485 universe : `DimensionUniverse`
486 Set of all known dimensions.
488 Returns
489 -------
490 record : `DimensionRecord`
491 A record containing exposure metadata, suitable for insertion into
492 a `Registry`.
493 """
494 dimension = universe["exposure"]
496 ra, dec, sky_angle, zenith_angle = (None, None, None, None)
497 if obsInfo.tracking_radec is not None:
498 icrs = obsInfo.tracking_radec.icrs
499 ra = icrs.ra.degree
500 dec = icrs.dec.degree
501 if obsInfo.boresight_rotation_coord == "sky":
502 sky_angle = obsInfo.boresight_rotation_angle.degree
503 if obsInfo.altaz_begin is not None:
504 zenith_angle = obsInfo.altaz_begin.zen.degree
506 return dimension.RecordClass(
507 instrument=obsInfo.instrument,
508 id=obsInfo.exposure_id,
509 name=obsInfo.observation_id,
510 group_name=obsInfo.exposure_group,
511 group_id=obsInfo.visit_id,
512 datetime_begin=obsInfo.datetime_begin,
513 datetime_end=obsInfo.datetime_end,
514 exposure_time=obsInfo.exposure_time.to_value("s"),
515 dark_time=obsInfo.dark_time.to_value("s"),
516 observation_type=obsInfo.observation_type,
517 physical_filter=obsInfo.physical_filter,
518 science_program=obsInfo.science_program,
519 target_name=obsInfo.object,
520 tracking_ra=ra,
521 tracking_dec=dec,
522 sky_angle=sky_angle,
523 zenith_angle=zenith_angle,
524 )
527def addUnboundedCalibrationLabel(registry, instrumentName):
528 """Add a special 'unbounded' calibration_label dimension entry for the
529 given camera that is valid for any exposure.
531 If such an entry already exists, this function just returns a `DataId`
532 for the existing entry.
534 Parameters
535 ----------
536 registry : `Registry`
537 Registry object in which to insert the dimension entry.
538 instrumentName : `str`
539 Name of the instrument this calibration label is associated with.
541 Returns
542 -------
543 dataId : `DataId`
544 New or existing data ID for the unbounded calibration.
545 """
546 d = dict(instrument=instrumentName, calibration_label="unbounded")
547 try:
548 return registry.expandDataId(d)
549 except LookupError:
550 pass
551 entry = d.copy()
552 entry["timespan"] = Timespan(None, None)
553 registry.insertDimensionData("calibration_label", entry)
554 return registry.expandDataId(d)
557def loadCamera(butler: Butler, dataId: DataId, *, collections: Any = None) -> Tuple[Camera, bool]:
558 """Attempt to load versioned camera geometry from a butler, but fall back
559 to obtaining a nominal camera from the `Instrument` class if that fails.
561 Parameters
562 ----------
563 butler : `lsst.daf.butler.Butler`
564 Butler instance to attempt to query for and load a ``camera`` dataset
565 from.
566 dataId : `dict` or `DataCoordinate`
567 Data ID that identifies at least the ``instrument`` and ``exposure``
568 dimensions.
569 collections : Any, optional
570 Collections to be searched, overriding ``self.butler.collections``.
571 Can be any of the types supported by the ``collections`` argument
572 to butler construction.
574 Returns
575 -------
576 camera : `lsst.afw.cameraGeom.Camera`
577 Camera object.
578 versioned : `bool`
579 If `True`, the camera was obtained from the butler and should represent
580 a versioned camera from a calibration repository. If `False`, no
581 camera datasets were found, and the returned camera was produced by
582 instantiating the appropriate `Instrument` class and calling
583 `Instrument.getCamera`.
584 """
585 if collections is None:
586 collections = butler.collections
587 # Registry would do data ID expansion internally if we didn't do it first,
588 # but we might want an expanded data ID ourselves later, so we do it here
589 # to ensure it only happens once.
590 # This will also catch problems with the data ID not having keys we need.
591 dataId = butler.registry.expandDataId(dataId, graph=butler.registry.dimensions["exposure"].graph)
592 cameraRefs = list(butler.registry.queryDatasets("camera", dataId=dataId, collections=collections,
593 deduplicate=True))
594 if cameraRefs:
595 assert len(cameraRefs) == 1, "Should be guaranteed by deduplicate=True above."
596 return butler.getDirect(cameraRefs[0]), True
597 instrument = Instrument.fromName(dataId["instrument"], butler.registry)
598 return instrument.getCamera(), False