lsst.obs.base  20.0.0-54-gba713e9+a7d430d1e1
_instrument.py
Go to the documentation of this file.
1 # This file is part of obs_base.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (https://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 
22 from __future__ import annotations
23 
24 __all__ = ("Instrument", "makeExposureRecordFromObsInfo", "loadCamera")
25 
26 import os.path
27 from abc import ABCMeta, abstractmethod
28 from collections import defaultdict
29 from typing import Any, Optional, Set, Sequence, Tuple, TYPE_CHECKING
30 from functools import lru_cache
31 
32 import astropy.time
33 
34 from lsst.afw.cameraGeom import Camera
35 from lsst.daf.butler import (
36  Butler,
37  CollectionType,
38  DataCoordinate,
39  DataId,
40  DatasetType,
41  Timespan,
42 )
43 from lsst.utils import getPackageDir, doImport
44 
45 if TYPE_CHECKING:
46  from .gen2to3 import TranslatorFactory
47  from lsst.daf.butler import Registry
48 
49 # To be a standard text curated calibration means that we use a
50 # standard definition for the corresponding DatasetType.
51 StandardCuratedCalibrationDatasetTypes = {
52  "defects": {"dimensions": ("instrument", "detector"), "storageClass": "Defects"},
53  "qe_curve": {"dimensions": ("instrument", "detector"), "storageClass": "QECurve"},
54  "crosstalk": {"dimensions": ("instrument", "detector"), "storageClass": "CrosstalkCalib"},
55 }
56 
57 
58 class Instrument(metaclass=ABCMeta):
59  """Base class for instrument-specific logic for the Gen3 Butler.
60 
61  Concrete instrument subclasses should be directly constructable with no
62  arguments.
63  """
64 
65  configPaths: Sequence[str] = ()
66  """Paths to config files to read for specific Tasks.
67 
68  The paths in this list should contain files of the form `task.py`, for
69  each of the Tasks that requires special configuration.
70  """
71 
72  policyName: Optional[str] = None
73  """Instrument specific name to use when locating a policy or configuration
74  file in the file system."""
75 
76  obsDataPackage: Optional[str] = None
77  """Name of the package containing the text curated calibration files.
78  Usually a obs _data package. If `None` no curated calibration files
79  will be read. (`str`)"""
80 
81  standardCuratedDatasetTypes: Set[str] = frozenset(StandardCuratedCalibrationDatasetTypes)
82  """The dataset types expected to be obtained from the obsDataPackage.
83 
84  These dataset types are all required to have standard definitions and
85  must be known to the base class. Clearing this list will prevent
86  any of these calibrations from being stored. If a dataset type is not
87  known to a specific instrument it can still be included in this list
88  since the data package is the source of truth. (`set` of `str`)
89  """
90 
91  additionalCuratedDatasetTypes: Set[str] = frozenset()
92  """Curated dataset types specific to this particular instrument that do
93  not follow the standard organization found in obs data packages.
94 
95  These are the instrument-specific dataset types written by
96  `writeAdditionalCuratedCalibrations` in addition to the calibrations
97  found in obs data packages that follow the standard scheme.
98  (`set` of `str`)"""
99 
100  @property
101  @abstractmethod
102  def filterDefinitions(self):
103  """`~lsst.obs.base.FilterDefinitionCollection`, defining the filters
104  for this instrument.
105  """
106  return None
107 
108  def __init__(self):
109  self.filterDefinitions.reset()
110  self.filterDefinitions.defineFilters()
111 
112  @classmethod
113  @abstractmethod
114  def getName(cls):
115  """Return the short (dimension) name for this instrument.
116 
117  This is not (in general) the same as the class name - it's what is used
118  as the value of the "instrument" field in data IDs, and is usually an
119  abbreviation of the full name.
120  """
121  raise NotImplementedError()
122 
123  @classmethod
124  @lru_cache()
125  def getCuratedCalibrationNames(cls) -> Set[str]:
126  """Return the names of all the curated calibration dataset types.
127 
128  Returns
129  -------
130  names : `set` of `str`
131  The dataset type names of all curated calibrations. This will
132  include the standard curated calibrations even if the particular
133  instrument does not support them.
134 
135  Notes
136  -----
137  The returned list does not indicate whether a particular dataset
138  is present in the Butler repository, simply that these are the
139  dataset types that are handled by ``writeCuratedCalibrations``.
140  """
141 
142  # Camera is a special dataset type that is also handled as a
143  # curated calibration.
144  curated = {"camera"}
145 
146  # Make a cursory attempt to filter out curated dataset types
147  # that are not present for this instrument
148  for datasetTypeName in cls.standardCuratedDatasetTypes:
149  calibPath = cls._getSpecificCuratedCalibrationPath(datasetTypeName)
150  if calibPath is not None:
151  curated.add(datasetTypeName)
152 
153  curated.update(cls.additionalCuratedDatasetTypes)
154  return frozenset(curated)
155 
156  @abstractmethod
157  def getCamera(self):
158  """Retrieve the cameraGeom representation of this instrument.
159 
160  This is a temporary API that should go away once ``obs_`` packages have
161  a standardized approach to writing versioned cameras to a Gen3 repo.
162  """
163  raise NotImplementedError()
164 
165  @abstractmethod
166  def register(self, registry):
167  """Insert instrument, physical_filter, and detector entries into a
168  `Registry`.
169  """
170  raise NotImplementedError()
171 
172  @classmethod
173  @lru_cache()
175  """The root of the obs data package that provides specializations for
176  this instrument.
177 
178  returns
179  -------
180  dir : `str`
181  The root of the relevat obs data package.
182  """
183  if cls.obsDataPackage is None:
184  return None
185  return getPackageDir(cls.obsDataPackage)
186 
187  @staticmethod
188  def fromName(name: str, registry: Registry) -> Instrument:
189  """Given an instrument name and a butler, retrieve a corresponding
190  instantiated instrument object.
191 
192  Parameters
193  ----------
194  name : `str`
195  Name of the instrument (must match the return value of `getName`).
196  registry : `lsst.daf.butler.Registry`
197  Butler registry to query to find the information.
198 
199  Returns
200  -------
201  instrument : `Instrument`
202  An instance of the relevant `Instrument`.
203 
204  Notes
205  -----
206  The instrument must be registered in the corresponding butler.
207 
208  Raises
209  ------
210  LookupError
211  Raised if the instrument is not known to the supplied registry.
212  ModuleNotFoundError
213  Raised if the class could not be imported. This could mean
214  that the relevant obs package has not been setup.
215  TypeError
216  Raised if the class name retrieved is not a string.
217  """
218  records = list(registry.queryDimensionRecords("instrument", instrument=name))
219  if not records:
220  raise LookupError(f"No registered instrument with name '{name}'.")
221  cls = records[0].class_name
222  if not isinstance(cls, str):
223  raise TypeError(f"Unexpected class name retrieved from {name} instrument dimension (got {cls})")
224  instrument = doImport(cls)
225  return instrument()
226 
227  @staticmethod
228  def importAll(registry: Registry) -> None:
229  """Import all the instruments known to this registry.
230 
231  This will ensure that all metadata translators have been registered.
232 
233  Parameters
234  ----------
235  registry : `lsst.daf.butler.Registry`
236  Butler registry to query to find the information.
237 
238  Notes
239  -----
240  It is allowed for a particular instrument class to fail on import.
241  This might simply indicate that a particular obs package has
242  not been setup.
243  """
244  records = list(registry.queryDimensionRecords("instrument"))
245  for record in records:
246  cls = record.class_name
247  try:
248  doImport(cls)
249  except Exception:
250  pass
251 
252  def _registerFilters(self, registry):
253  """Register the physical and abstract filter Dimension relationships.
254  This should be called in the ``register`` implementation.
255 
256  Parameters
257  ----------
258  registry : `lsst.daf.butler.core.Registry`
259  The registry to add dimensions to.
260  """
261  for filter in self.filterDefinitions:
262  # fix for undefined abstract filters causing trouble in the registry:
263  if filter.band is None:
264  band = filter.physical_filter
265  else:
266  band = filter.band
267 
268  registry.insertDimensionData("physical_filter",
269  {"instrument": self.getName(),
270  "name": filter.physical_filter,
271  "band": band
272  })
273 
274  @abstractmethod
275  def getRawFormatter(self, dataId):
276  """Return the Formatter class that should be used to read a particular
277  raw file.
278 
279  Parameters
280  ----------
281  dataId : `DataCoordinate`
282  Dimension-based ID for the raw file or files being ingested.
283 
284  Returns
285  -------
286  formatter : `Formatter` class
287  Class to be used that reads the file into an
288  `lsst.afw.image.Exposure` instance.
289  """
290  raise NotImplementedError()
291 
292  def applyConfigOverrides(self, name, config):
293  """Apply instrument-specific overrides for a task config.
294 
295  Parameters
296  ----------
297  name : `str`
298  Name of the object being configured; typically the _DefaultName
299  of a Task.
300  config : `lsst.pex.config.Config`
301  Config instance to which overrides should be applied.
302  """
303  for root in self.configPaths:
304  path = os.path.join(root, f"{name}.py")
305  if os.path.exists(path):
306  config.load(path)
307 
308  def writeCuratedCalibrations(self, butler: Butler, collection: Optional[str] = None,
309  suffixes: Sequence[str] = ()) -> None:
310  """Write human-curated calibration Datasets to the given Butler with
311  the appropriate validity ranges.
312 
313  Parameters
314  ----------
315  butler : `lsst.daf.butler.Butler`
316  Butler to use to store these calibrations.
317  collection : `str`, optional
318  Name to use for the calibration collection that associates all
319  datasets with a validity range. If this collection already exists,
320  it must be a `~CollectionType.CALIBRATION` collection, and it must
321  not have any datasets that would conflict with those inserted by
322  this method. If `None`, a collection name is worked out
323  automatically from the instrument name and other metadata by
324  calling ``makeCalibrationCollectionName``, but this
325  default name may not work well for long-lived repositories unless
326  one or more ``suffixes`` are also provided (and changed every time
327  curated calibrations are ingested).
328  suffixes : `Sequence` [ `str` ], optional
329  Name suffixes to append to collection names, after concatenating
330  them with the standard collection name delimeter. If provided,
331  these are appended to the names of the `~CollectionType.RUN`
332  collections that datasets are inserted directly into, as well the
333  `~CollectionType.CALIBRATION` collection if it is generated
334  automatically (i.e. if ``collection is None``).
335 
336  Notes
337  -----
338  Expected to be called from subclasses. The base method calls
339  ``writeCameraGeom``, ``writeStandardTextCuratedCalibrations``,
340  and ``writeAdditionalCuratdCalibrations``.
341  """
342  # Delegate registration of collections (and creating names for them)
343  # to other methods so they can be called independently with the same
344  # preconditions. Collection registration is idempotent, so this is
345  # safe, and while it adds a bit of overhead, as long as it's one
346  # registration attempt per method (not per dataset or dataset type),
347  # that's negligible.
348  self.writeCameraGeom(butler, collection, *suffixes)
349  self.writeStandardTextCuratedCalibrations(butler, collection, suffixes=suffixes)
350  self.writeAdditionalCuratedCalibrations(butler, collection, suffixes=suffixes)
351 
352  def writeAdditionalCuratedCalibrations(self, butler: Butler, collection: Optional[str] = None,
353  suffixes: Sequence[str] = ()) -> None:
354  """Write additional curated calibrations that might be instrument
355  specific and are not part of the standard set.
356 
357  Default implementation does nothing.
358 
359  Parameters
360  ----------
361  butler : `lsst.daf.butler.Butler`
362  Butler to use to store these calibrations.
363  collection : `str`, optional
364  Name to use for the calibration collection that associates all
365  datasets with a validity range. If this collection already exists,
366  it must be a `~CollectionType.CALIBRATION` collection, and it must
367  not have any datasets that would conflict with those inserted by
368  this method. If `None`, a collection name is worked out
369  automatically from the instrument name and other metadata by
370  calling ``makeCalibrationCollectionName``, but this
371  default name may not work well for long-lived repositories unless
372  one or more ``suffixes`` are also provided (and changed every time
373  curated calibrations are ingested).
374  suffixes : `Sequence` [ `str` ], optional
375  Name suffixes to append to collection names, after concatenating
376  them with the standard collection name delimeter. If provided,
377  these are appended to the names of the `~CollectionType.RUN`
378  collections that datasets are inserted directly into, as well the
379  `~CollectionType.CALIBRATION` collection if it is generated
380  automatically (i.e. if ``collection is None``).
381  """
382  return
383 
384  def writeCameraGeom(self, butler: Butler, collection: Optional[str] = None,
385  suffixes: Sequence[str] = ()) -> None:
386  """Write the default camera geometry to the butler repository and
387  associate it with the appropriate validity range in a calibration
388  collection.
389 
390  Parameters
391  ----------
392  butler : `lsst.daf.butler.Butler`
393  Butler to use to store these calibrations.
394  collection : `str`, optional
395  Name to use for the calibration collection that associates all
396  datasets with a validity range. If this collection already exists,
397  it must be a `~CollectionType.CALIBRATION` collection, and it must
398  not have any datasets that would conflict with those inserted by
399  this method. If `None`, a collection name is worked out
400  automatically from the instrument name and other metadata by
401  calling ``makeCalibrationCollectionName``, but this
402  default name may not work well for long-lived repositories unless
403  one or more ``suffixes`` are also provided (and changed every time
404  curated calibrations are ingested).
405  suffixes : `Sequence` [ `str` ], optional
406  Name suffixes to append to collection names, after concatenating
407  them with the standard collection name delimeter. If provided,
408  these are appended to the names of the `~CollectionType.RUN`
409  collections that datasets are inserted directly into, as well the
410  `~CollectionType.CALIBRATION` collection if it is generated
411  automatically (i.e. if ``collection is None``).
412  """
413  if collection is None:
414  collection = self.makeCalibrationCollectionName(*suffixes)
415  butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
416  run = self.makeUnboundedCalibrationRunName(*suffixes)
417  butler.registry.registerRun(run)
418  datasetType = DatasetType("camera", ("instrument",), "Camera", isCalibration=True,
419  universe=butler.registry.dimensions)
420  butler.registry.registerDatasetType(datasetType)
421  camera = self.getCamera()
422  ref = butler.put(camera, datasetType, {"instrument": self.getName()}, run=run)
423  butler.registry.certify(collection, [ref], Timespan(begin=None, end=None))
424 
425  def writeStandardTextCuratedCalibrations(self, butler: Butler, collection: Optional[str] = None,
426  suffixes: Sequence[str] = ()) -> None:
427  """Write the set of standardized curated text calibrations to
428  the repository.
429 
430  Parameters
431  ----------
432  butler : `lsst.daf.butler.Butler`
433  Butler to receive these calibration datasets.
434  collection : `str`, optional
435  Name to use for the calibration collection that associates all
436  datasets with a validity range. If this collection already exists,
437  it must be a `~CollectionType.CALIBRATION` collection, and it must
438  not have any datasets that would conflict with those inserted by
439  this method. If `None`, a collection name is worked out
440  automatically from the instrument name and other metadata by
441  calling ``makeCalibrationCollectionName``, but this
442  default name may not work well for long-lived repositories unless
443  one or more ``suffixes`` are also provided (and changed every time
444  curated calibrations are ingested).
445  suffixes : `Sequence` [ `str` ], optional
446  Name suffixes to append to collection names, after concatenating
447  them with the standard collection name delimeter. If provided,
448  these are appended to the names of the `~CollectionType.RUN`
449  collections that datasets are inserted directly into, as well the
450  `~CollectionType.CALIBRATION` collection if it is generated
451  automatically (i.e. if ``collection is None``).
452  """
453  if collection is None:
454  collection = self.makeCalibrationCollectionName(*suffixes)
455  butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
456  runs = set()
457  for datasetTypeName in self.standardCuratedDatasetTypes:
458  # We need to define the dataset types.
459  if datasetTypeName not in StandardCuratedCalibrationDatasetTypes:
460  raise ValueError(f"DatasetType {datasetTypeName} not in understood list"
461  f" [{'.'.join(StandardCuratedCalibrationDatasetTypes)}]")
462  definition = StandardCuratedCalibrationDatasetTypes[datasetTypeName]
463  datasetType = DatasetType(datasetTypeName,
464  universe=butler.registry.dimensions,
465  isCalibration=True,
466  **definition)
467  self._writeSpecificCuratedCalibrationDatasets(butler, datasetType, collection, runs=runs,
468  suffixes=suffixes)
469 
470  @classmethod
471  def _getSpecificCuratedCalibrationPath(cls, datasetTypeName):
472  """Return the path of the curated calibration directory.
473 
474  Parameters
475  ----------
476  datasetTypeName : `str`
477  The name of the standard dataset type to find.
478 
479  Returns
480  -------
481  path : `str`
482  The path to the standard curated data directory. `None` if the
483  dataset type is not found or the obs data package is not
484  available.
485  """
486  if cls.getObsDataPackageDir() is None:
487  # if there is no data package then there can't be datasets
488  return None
489 
490  calibPath = os.path.join(cls.getObsDataPackageDir(), cls.policyName,
491  datasetTypeName)
492 
493  if os.path.exists(calibPath):
494  return calibPath
495 
496  return None
497 
498  def _writeSpecificCuratedCalibrationDatasets(self, butler: Butler, datasetType: DatasetType,
499  collection: str, runs: Set[str], suffixes: Sequence[str]):
500  """Write standardized curated calibration datasets for this specific
501  dataset type from an obs data package.
502 
503  Parameters
504  ----------
505  butler : `lsst.daf.butler.Butler`
506  Gen3 butler in which to put the calibrations.
507  datasetType : `lsst.daf.butler.DatasetType`
508  Dataset type to be put.
509  collection : `str`
510  Name of the `~CollectionType.CALIBRATION` collection that
511  associates all datasets with validity ranges. Must have been
512  registered prior to this call.
513  runs : `set` [ `str` ]
514  Names of runs that have already been registered by previous calls
515  and need not be registered again. Should be updated by this
516  method as new runs are registered.
517  suffixes : `Sequence` [ `str` ]
518  Suffixes to append to run names when creating them from
519  ``CALIBDATE`` metadata, via calls to `makeCuratedCalibrationName`.
520 
521  Notes
522  -----
523  This method scans the location defined in the ``obsDataPackageDir``
524  class attribute for curated calibrations corresponding to the
525  supplied dataset type. The directory name in the data package must
526  match the name of the dataset type. They are assumed to use the
527  standard layout and can be read by
528  `~lsst.pipe.tasks.read_curated_calibs.read_all` and provide standard
529  metadata.
530  """
531  calibPath = self._getSpecificCuratedCalibrationPath(datasetType.name)
532  if calibPath is None:
533  return
534 
535  # Register the dataset type
536  butler.registry.registerDatasetType(datasetType)
537 
538  # obs_base can't depend on pipe_tasks but concrete obs packages
539  # can -- we therefore have to defer import
540  from lsst.pipe.tasks.read_curated_calibs import read_all
541 
542  # Read calibs, registering a new run for each CALIBDATE as needed.
543  # We try to avoid registering runs multiple times as an optimization
544  # by putting them in the ``runs`` set that was passed in.
545  camera = self.getCamera()
546  calibsDict = read_all(calibPath, camera)[0] # second return is calib type
547  datasetRecords = []
548  for det in calibsDict:
549  times = sorted([k for k in calibsDict[det]])
550  calibs = [calibsDict[det][time] for time in times]
551  times = [astropy.time.Time(t, format="datetime", scale="utc") for t in times]
552  times += [None]
553  for calib, beginTime, endTime in zip(calibs, times[:-1], times[1:]):
554  md = calib.getMetadata()
555  run = self.makeCuratedCalibrationRunName(md['CALIBDATE'], *suffixes)
556  if run not in runs:
557  butler.registry.registerRun(run)
558  runs.add(run)
559  dataId = DataCoordinate.standardize(
560  universe=butler.registry.dimensions,
561  instrument=self.getName(),
562  detector=md["DETECTOR"],
563  )
564  datasetRecords.append((calib, dataId, run, Timespan(beginTime, endTime)))
565 
566  # Second loop actually does the inserts and filesystem writes. We
567  # first do a butler.put on each dataset, inserting it into the run for
568  # its calibDate. We remember those refs and group them by timespan, so
569  # we can vectorize the certify calls as much as possible.
570  refsByTimespan = defaultdict(list)
571  with butler.transaction():
572  for calib, dataId, run, timespan in datasetRecords:
573  refsByTimespan[timespan].append(butler.put(calib, datasetType, dataId, run=run))
574  for timespan, refs in refsByTimespan.items():
575  butler.registry.certify(collection, refs, timespan)
576 
577  @abstractmethod
578  def makeDataIdTranslatorFactory(self) -> TranslatorFactory:
579  """Return a factory for creating Gen2->Gen3 data ID translators,
580  specialized for this instrument.
581 
582  Derived class implementations should generally call
583  `TranslatorFactory.addGenericInstrumentRules` with appropriate
584  arguments, but are not required to (and may not be able to if their
585  Gen2 raw data IDs are sufficiently different from the HSC/DECam/CFHT
586  norm).
587 
588  Returns
589  -------
590  factory : `TranslatorFactory`.
591  Factory for `Translator` objects.
592  """
593  raise NotImplementedError("Must be implemented by derived classes.")
594 
595  @classmethod
597  """Make the default instrument-specific run collection string for raw
598  data ingest.
599 
600  Returns
601  -------
602  coll : `str`
603  Run collection name to be used as the default for ingestion of
604  raws.
605  """
606  return cls.makeCollectionName("raw", "all")
607 
608  @classmethod
609  def makeUnboundedCalibrationRunName(cls, *suffixes: str) -> str:
610  """Make a RUN collection name appropriate for inserting calibration
611  datasets whose validity ranges are unbounded.
612 
613  Parameters
614  ----------
615  *suffixes : `str`
616  Strings to be appended to the base name, using the default
617  delimiter for collection names.
618 
619  Returns
620  -------
621  name : `str`
622  Run collection name.
623  """
624  return cls.makeCollectionName("calib", "unbounded", *suffixes)
625 
626  @classmethod
627  def makeCuratedCalibrationRunName(cls, calibDate: str, *suffixes: str) -> str:
628  """Make a RUN collection name appropriate for inserting curated
629  calibration datasets with the given ``CALIBDATE`` metadata value.
630 
631  Parameters
632  ----------
633  calibDate : `str`
634  The ``CALIBDATE`` metadata value.
635  *suffixes : `str`
636  Strings to be appended to the base name, using the default
637  delimiter for collection names.
638 
639  Returns
640  -------
641  name : `str`
642  Run collection name.
643  """
644  return cls.makeCollectionName("calib", "curated", calibDate, *suffixes)
645 
646  @classmethod
647  def makeCalibrationCollectionName(cls, *suffixes: str) -> str:
648  """Make a CALIBRATION collection name appropriate for associating
649  calibration datasets with validity ranges.
650 
651  Parameters
652  ----------
653  *suffixes : `str`
654  Strings to be appended to the base name, using the default
655  delimiter for collection names.
656 
657  Returns
658  -------
659  name : `str`
660  Calibration collection name.
661  """
662  return cls.makeCollectionName("calib", *suffixes)
663 
664  @classmethod
665  def makeCollectionName(cls, *labels: str) -> str:
666  """Get the instrument-specific collection string to use as derived
667  from the supplied labels.
668 
669  Parameters
670  ----------
671  *labels : `str`
672  Strings to be combined with the instrument name to form a
673  collection name.
674 
675  Returns
676  -------
677  name : `str`
678  Collection name to use that includes the instrument name.
679  """
680  return "/".join((cls.getName(),) + labels)
681 
682 
683 def makeExposureRecordFromObsInfo(obsInfo, universe):
684  """Construct an exposure DimensionRecord from
685  `astro_metadata_translator.ObservationInfo`.
686 
687  Parameters
688  ----------
689  obsInfo : `astro_metadata_translator.ObservationInfo`
690  A `~astro_metadata_translator.ObservationInfo` object corresponding to
691  the exposure.
692  universe : `DimensionUniverse`
693  Set of all known dimensions.
694 
695  Returns
696  -------
697  record : `DimensionRecord`
698  A record containing exposure metadata, suitable for insertion into
699  a `Registry`.
700  """
701  dimension = universe["exposure"]
702 
703  ra, dec, sky_angle, zenith_angle = (None, None, None, None)
704  if obsInfo.tracking_radec is not None:
705  icrs = obsInfo.tracking_radec.icrs
706  ra = icrs.ra.degree
707  dec = icrs.dec.degree
708  if obsInfo.boresight_rotation_coord == "sky":
709  sky_angle = obsInfo.boresight_rotation_angle.degree
710  if obsInfo.altaz_begin is not None:
711  zenith_angle = obsInfo.altaz_begin.zen.degree
712 
713  return dimension.RecordClass(
714  instrument=obsInfo.instrument,
715  id=obsInfo.exposure_id,
716  name=obsInfo.observation_id,
717  group_name=obsInfo.exposure_group,
718  group_id=obsInfo.visit_id,
719  datetime_begin=obsInfo.datetime_begin,
720  datetime_end=obsInfo.datetime_end,
721  exposure_time=obsInfo.exposure_time.to_value("s"),
722  dark_time=obsInfo.dark_time.to_value("s"),
723  observation_type=obsInfo.observation_type,
724  observation_reason=obsInfo.observation_reason,
725  physical_filter=obsInfo.physical_filter,
726  science_program=obsInfo.science_program,
727  target_name=obsInfo.object,
728  tracking_ra=ra,
729  tracking_dec=dec,
730  sky_angle=sky_angle,
731  zenith_angle=zenith_angle,
732  )
733 
734 
735 def loadCamera(butler: Butler, dataId: DataId, *, collections: Any = None) -> Tuple[Camera, bool]:
736  """Attempt to load versioned camera geometry from a butler, but fall back
737  to obtaining a nominal camera from the `Instrument` class if that fails.
738 
739  Parameters
740  ----------
741  butler : `lsst.daf.butler.Butler`
742  Butler instance to attempt to query for and load a ``camera`` dataset
743  from.
744  dataId : `dict` or `DataCoordinate`
745  Data ID that identifies at least the ``instrument`` and ``exposure``
746  dimensions.
747  collections : Any, optional
748  Collections to be searched, overriding ``self.butler.collections``.
749  Can be any of the types supported by the ``collections`` argument
750  to butler construction.
751 
752  Returns
753  -------
754  camera : `lsst.afw.cameraGeom.Camera`
755  Camera object.
756  versioned : `bool`
757  If `True`, the camera was obtained from the butler and should represent
758  a versioned camera from a calibration repository. If `False`, no
759  camera datasets were found, and the returned camera was produced by
760  instantiating the appropriate `Instrument` class and calling
761  `Instrument.getCamera`.
762  """
763  if collections is None:
764  collections = butler.collections
765  # Registry would do data ID expansion internally if we didn't do it first,
766  # but we might want an expanded data ID ourselves later, so we do it here
767  # to ensure it only happens once.
768  # This will also catch problems with the data ID not having keys we need.
769  dataId = butler.registry.expandDataId(dataId, graph=butler.registry.dimensions["exposure"].graph)
770  try:
771  cameraRef = butler.get("camera", dataId=dataId, collections=collections)
772  return cameraRef, True
773  except LookupError:
774  pass
775  instrument = Instrument.fromName(dataId["instrument"], butler.registry)
776  return instrument.getCamera(), False
lsst.obs.base._instrument.Instrument.applyConfigOverrides
def applyConfigOverrides(self, name, config)
Definition: _instrument.py:292
lsst.obs.base._instrument.loadCamera
Tuple[Camera, bool] loadCamera(Butler butler, DataId dataId, *Any collections=None)
Definition: _instrument.py:735
lsst.obs.base._instrument.Instrument.getCuratedCalibrationNames
Set[str] getCuratedCalibrationNames(cls)
Definition: _instrument.py:125
lsst.obs.base._instrument.Instrument.getName
def getName(cls)
Definition: _instrument.py:114
lsst.obs.base._instrument.Instrument.writeAdditionalCuratedCalibrations
None writeAdditionalCuratedCalibrations(self, Butler butler, Optional[str] collection=None, Sequence[str] suffixes=())
Definition: _instrument.py:352
lsst.obs.base._instrument.Instrument.importAll
None importAll(Registry registry)
Definition: _instrument.py:228
lsst.obs.base._instrument.Instrument.makeCalibrationCollectionName
str makeCalibrationCollectionName(cls, *str suffixes)
Definition: _instrument.py:647
lsst.obs.base._instrument.Instrument.getCamera
def getCamera(self)
Definition: _instrument.py:157
lsst.obs.base._instrument.Instrument.filterDefinitions
def filterDefinitions(self)
Definition: _instrument.py:102
lsst.obs.base._instrument.Instrument.fromName
Instrument fromName(str name, Registry registry)
Definition: _instrument.py:188
lsst.obs.base._instrument.Instrument.makeUnboundedCalibrationRunName
str makeUnboundedCalibrationRunName(cls, *str suffixes)
Definition: _instrument.py:609
lsst.obs.base._instrument.Instrument.getObsDataPackageDir
def getObsDataPackageDir(cls)
Definition: _instrument.py:174
lsst.obs.base._instrument.Instrument._getSpecificCuratedCalibrationPath
def _getSpecificCuratedCalibrationPath(cls, datasetTypeName)
Definition: _instrument.py:471
lsst.obs.base._instrument.Instrument.writeCameraGeom
None writeCameraGeom(self, Butler butler, Optional[str] collection=None, Sequence[str] suffixes=())
Definition: _instrument.py:384
lsst.obs.base._instrument.Instrument.writeStandardTextCuratedCalibrations
None writeStandardTextCuratedCalibrations(self, Butler butler, Optional[str] collection=None, Sequence[str] suffixes=())
Definition: _instrument.py:425
lsst.obs.base._instrument.Instrument._writeSpecificCuratedCalibrationDatasets
def _writeSpecificCuratedCalibrationDatasets(self, Butler butler, DatasetType datasetType, str collection, Set[str] runs, Sequence[str] suffixes)
Definition: _instrument.py:498
lsst::utils
lsst.obs.base._instrument.Instrument
Definition: _instrument.py:58
lsst.obs.base._instrument.Instrument.makeCollectionName
str makeCollectionName(cls, *str labels)
Definition: _instrument.py:665
lsst.obs.base._instrument.makeExposureRecordFromObsInfo
def makeExposureRecordFromObsInfo(obsInfo, universe)
Definition: _instrument.py:683
lsst.obs.base._instrument.Instrument.register
def register(self, registry)
Definition: _instrument.py:166
lsst.obs.base._instrument.Instrument.getRawFormatter
def getRawFormatter(self, dataId)
Definition: _instrument.py:275
lsst.obs.base._instrument.Instrument.makeCuratedCalibrationRunName
str makeCuratedCalibrationRunName(cls, str calibDate, *str suffixes)
Definition: _instrument.py:627
lsst.obs.base._instrument.Instrument.makeDataIdTranslatorFactory
TranslatorFactory makeDataIdTranslatorFactory(self)
Definition: _instrument.py:578
lsst.obs.base._instrument.Instrument.makeDefaultRawIngestRunName
str makeDefaultRawIngestRunName(cls)
Definition: _instrument.py:596
lsst.obs.base._instrument.Instrument.__init__
def __init__(self)
Definition: _instrument.py:108
lsst.obs.base._instrument.Instrument.writeCuratedCalibrations
None writeCuratedCalibrations(self, Butler butler, Optional[str] collection=None, Sequence[str] suffixes=())
Definition: _instrument.py:308