lsst.obs.base  20.0.0-67-g32d6278+9ea4af2944
_instrument.py
Go to the documentation of this file.
1 # This file is part of obs_base.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (https://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <http://www.gnu.org/licenses/>.
21 
22 from __future__ import annotations
23 
24 __all__ = ("Instrument", "makeExposureRecordFromObsInfo", "loadCamera")
25 
26 import os.path
27 from abc import ABCMeta, abstractmethod
28 from collections import defaultdict
29 from typing import Any, Optional, Set, Sequence, Tuple, TYPE_CHECKING
30 from functools import lru_cache
31 
32 import astropy.time
33 
34 from lsst.afw.cameraGeom import Camera
35 from lsst.daf.butler import (
36  Butler,
37  CollectionType,
38  DataCoordinate,
39  DataId,
40  DatasetType,
41  Timespan,
42 )
43 from lsst.utils import getPackageDir, doImport
44 
45 if TYPE_CHECKING:
46  from .gen2to3 import TranslatorFactory
47  from lsst.daf.butler import Registry
48 
49 # To be a standard text curated calibration means that we use a
50 # standard definition for the corresponding DatasetType.
51 StandardCuratedCalibrationDatasetTypes = {
52  "defects": {"dimensions": ("instrument", "detector"), "storageClass": "Defects"},
53  "qe_curve": {"dimensions": ("instrument", "detector"), "storageClass": "QECurve"},
54  "crosstalk": {"dimensions": ("instrument", "detector"), "storageClass": "CrosstalkCalib"},
55 }
56 
57 
58 class Instrument(metaclass=ABCMeta):
59  """Base class for instrument-specific logic for the Gen3 Butler.
60 
61  Concrete instrument subclasses should be directly constructable with no
62  arguments.
63  """
64 
65  configPaths: Sequence[str] = ()
66  """Paths to config files to read for specific Tasks.
67 
68  The paths in this list should contain files of the form `task.py`, for
69  each of the Tasks that requires special configuration.
70  """
71 
72  policyName: Optional[str] = None
73  """Instrument specific name to use when locating a policy or configuration
74  file in the file system."""
75 
76  obsDataPackage: Optional[str] = None
77  """Name of the package containing the text curated calibration files.
78  Usually a obs _data package. If `None` no curated calibration files
79  will be read. (`str`)"""
80 
81  standardCuratedDatasetTypes: Set[str] = frozenset(StandardCuratedCalibrationDatasetTypes)
82  """The dataset types expected to be obtained from the obsDataPackage.
83 
84  These dataset types are all required to have standard definitions and
85  must be known to the base class. Clearing this list will prevent
86  any of these calibrations from being stored. If a dataset type is not
87  known to a specific instrument it can still be included in this list
88  since the data package is the source of truth. (`set` of `str`)
89  """
90 
91  additionalCuratedDatasetTypes: Set[str] = frozenset()
92  """Curated dataset types specific to this particular instrument that do
93  not follow the standard organization found in obs data packages.
94 
95  These are the instrument-specific dataset types written by
96  `writeAdditionalCuratedCalibrations` in addition to the calibrations
97  found in obs data packages that follow the standard scheme.
98  (`set` of `str`)"""
99 
100  @property
101  @abstractmethod
102  def filterDefinitions(self):
103  """`~lsst.obs.base.FilterDefinitionCollection`, defining the filters
104  for this instrument.
105  """
106  return None
107 
108  def __init__(self):
109  self.filterDefinitions.reset()
110  self.filterDefinitions.defineFilters()
111 
112  @classmethod
113  @abstractmethod
114  def getName(cls):
115  """Return the short (dimension) name for this instrument.
116 
117  This is not (in general) the same as the class name - it's what is used
118  as the value of the "instrument" field in data IDs, and is usually an
119  abbreviation of the full name.
120  """
121  raise NotImplementedError()
122 
123  @classmethod
124  @lru_cache()
125  def getCuratedCalibrationNames(cls) -> Set[str]:
126  """Return the names of all the curated calibration dataset types.
127 
128  Returns
129  -------
130  names : `set` of `str`
131  The dataset type names of all curated calibrations. This will
132  include the standard curated calibrations even if the particular
133  instrument does not support them.
134 
135  Notes
136  -----
137  The returned list does not indicate whether a particular dataset
138  is present in the Butler repository, simply that these are the
139  dataset types that are handled by ``writeCuratedCalibrations``.
140  """
141 
142  # Camera is a special dataset type that is also handled as a
143  # curated calibration.
144  curated = {"camera"}
145 
146  # Make a cursory attempt to filter out curated dataset types
147  # that are not present for this instrument
148  for datasetTypeName in cls.standardCuratedDatasetTypes:
149  calibPath = cls._getSpecificCuratedCalibrationPath(datasetTypeName)
150  if calibPath is not None:
151  curated.add(datasetTypeName)
152 
153  curated.update(cls.additionalCuratedDatasetTypes)
154  return frozenset(curated)
155 
156  @abstractmethod
157  def getCamera(self):
158  """Retrieve the cameraGeom representation of this instrument.
159 
160  This is a temporary API that should go away once ``obs`` packages have
161  a standardized approach to writing versioned cameras to a Gen3 repo.
162  """
163  raise NotImplementedError()
164 
165  @abstractmethod
166  def register(self, registry):
167  """Insert instrument, physical_filter, and detector entries into a
168  `Registry`.
169 
170  Implementations should guarantee that registration is atomic (the
171  registry should not be modified if any error occurs) and idempotent at
172  the level of individual dimension entries; new detectors and filters
173  should be added, but changes to any existing record should not be.
174  This can generally be achieved via a block like::
175 
176  with registry.transaction():
177  registry.syncDimensionData("instrument", ...)
178  registry.syncDimensionData("detector", ...)
179  self.registerFilters(registry)
180 
181  Raises
182  ------
183  lsst.daf.butler.registry.ConflictingDefinitionError
184  Raised if any existing record has the same key but a different
185  definition as one being registered.
186  """
187  raise NotImplementedError()
188 
189  @classmethod
190  @lru_cache()
192  """The root of the obs data package that provides specializations for
193  this instrument.
194 
195  returns
196  -------
197  dir : `str`
198  The root of the relevat obs data package.
199  """
200  if cls.obsDataPackage is None:
201  return None
202  return getPackageDir(cls.obsDataPackage)
203 
204  @staticmethod
205  def fromName(name: str, registry: Registry) -> Instrument:
206  """Given an instrument name and a butler, retrieve a corresponding
207  instantiated instrument object.
208 
209  Parameters
210  ----------
211  name : `str`
212  Name of the instrument (must match the return value of `getName`).
213  registry : `lsst.daf.butler.Registry`
214  Butler registry to query to find the information.
215 
216  Returns
217  -------
218  instrument : `Instrument`
219  An instance of the relevant `Instrument`.
220 
221  Notes
222  -----
223  The instrument must be registered in the corresponding butler.
224 
225  Raises
226  ------
227  LookupError
228  Raised if the instrument is not known to the supplied registry.
229  ModuleNotFoundError
230  Raised if the class could not be imported. This could mean
231  that the relevant obs package has not been setup.
232  TypeError
233  Raised if the class name retrieved is not a string.
234  """
235  records = list(registry.queryDimensionRecords("instrument", instrument=name))
236  if not records:
237  raise LookupError(f"No registered instrument with name '{name}'.")
238  cls = records[0].class_name
239  if not isinstance(cls, str):
240  raise TypeError(f"Unexpected class name retrieved from {name} instrument dimension (got {cls})")
241  instrument = doImport(cls)
242  return instrument()
243 
244  @staticmethod
245  def importAll(registry: Registry) -> None:
246  """Import all the instruments known to this registry.
247 
248  This will ensure that all metadata translators have been registered.
249 
250  Parameters
251  ----------
252  registry : `lsst.daf.butler.Registry`
253  Butler registry to query to find the information.
254 
255  Notes
256  -----
257  It is allowed for a particular instrument class to fail on import.
258  This might simply indicate that a particular obs package has
259  not been setup.
260  """
261  records = list(registry.queryDimensionRecords("instrument"))
262  for record in records:
263  cls = record.class_name
264  try:
265  doImport(cls)
266  except Exception:
267  pass
268 
269  def _registerFilters(self, registry):
270  """Register the physical and abstract filter Dimension relationships.
271  This should be called in the `register` implementation, within
272  a transaction context manager block.
273 
274  Parameters
275  ----------
276  registry : `lsst.daf.butler.core.Registry`
277  The registry to add dimensions to.
278  """
279  for filter in self.filterDefinitions:
280  # fix for undefined abstract filters causing trouble in the
281  # registry:
282  if filter.band is None:
283  band = filter.physical_filter
284  else:
285  band = filter.band
286 
287  registry.syncDimensionData("physical_filter",
288  {"instrument": self.getName(),
289  "name": filter.physical_filter,
290  "band": band
291  })
292 
293  @abstractmethod
294  def getRawFormatter(self, dataId):
295  """Return the Formatter class that should be used to read a particular
296  raw file.
297 
298  Parameters
299  ----------
300  dataId : `DataCoordinate`
301  Dimension-based ID for the raw file or files being ingested.
302 
303  Returns
304  -------
305  formatter : `Formatter` class
306  Class to be used that reads the file into an
307  `lsst.afw.image.Exposure` instance.
308  """
309  raise NotImplementedError()
310 
311  def applyConfigOverrides(self, name, config):
312  """Apply instrument-specific overrides for a task config.
313 
314  Parameters
315  ----------
316  name : `str`
317  Name of the object being configured; typically the _DefaultName
318  of a Task.
319  config : `lsst.pex.config.Config`
320  Config instance to which overrides should be applied.
321  """
322  for root in self.configPaths:
323  path = os.path.join(root, f"{name}.py")
324  if os.path.exists(path):
325  config.load(path)
326 
327  def writeCuratedCalibrations(self, butler: Butler, collection: Optional[str] = None,
328  suffixes: Sequence[str] = ()) -> None:
329  """Write human-curated calibration Datasets to the given Butler with
330  the appropriate validity ranges.
331 
332  Parameters
333  ----------
334  butler : `lsst.daf.butler.Butler`
335  Butler to use to store these calibrations.
336  collection : `str`, optional
337  Name to use for the calibration collection that associates all
338  datasets with a validity range. If this collection already exists,
339  it must be a `~CollectionType.CALIBRATION` collection, and it must
340  not have any datasets that would conflict with those inserted by
341  this method. If `None`, a collection name is worked out
342  automatically from the instrument name and other metadata by
343  calling ``makeCalibrationCollectionName``, but this
344  default name may not work well for long-lived repositories unless
345  one or more ``suffixes`` are also provided (and changed every time
346  curated calibrations are ingested).
347  suffixes : `Sequence` [ `str` ], optional
348  Name suffixes to append to collection names, after concatenating
349  them with the standard collection name delimeter. If provided,
350  these are appended to the names of the `~CollectionType.RUN`
351  collections that datasets are inserted directly into, as well the
352  `~CollectionType.CALIBRATION` collection if it is generated
353  automatically (i.e. if ``collection is None``).
354 
355  Notes
356  -----
357  Expected to be called from subclasses. The base method calls
358  ``writeCameraGeom``, ``writeStandardTextCuratedCalibrations``,
359  and ``writeAdditionalCuratdCalibrations``.
360  """
361  # Delegate registration of collections (and creating names for them)
362  # to other methods so they can be called independently with the same
363  # preconditions. Collection registration is idempotent, so this is
364  # safe, and while it adds a bit of overhead, as long as it's one
365  # registration attempt per method (not per dataset or dataset type),
366  # that's negligible.
367  self.writeCameraGeom(butler, collection, *suffixes)
368  self.writeStandardTextCuratedCalibrations(butler, collection, suffixes=suffixes)
369  self.writeAdditionalCuratedCalibrations(butler, collection, suffixes=suffixes)
370 
371  def writeAdditionalCuratedCalibrations(self, butler: Butler, collection: Optional[str] = None,
372  suffixes: Sequence[str] = ()) -> None:
373  """Write additional curated calibrations that might be instrument
374  specific and are not part of the standard set.
375 
376  Default implementation does nothing.
377 
378  Parameters
379  ----------
380  butler : `lsst.daf.butler.Butler`
381  Butler to use to store these calibrations.
382  collection : `str`, optional
383  Name to use for the calibration collection that associates all
384  datasets with a validity range. If this collection already exists,
385  it must be a `~CollectionType.CALIBRATION` collection, and it must
386  not have any datasets that would conflict with those inserted by
387  this method. If `None`, a collection name is worked out
388  automatically from the instrument name and other metadata by
389  calling ``makeCalibrationCollectionName``, but this
390  default name may not work well for long-lived repositories unless
391  one or more ``suffixes`` are also provided (and changed every time
392  curated calibrations are ingested).
393  suffixes : `Sequence` [ `str` ], optional
394  Name suffixes to append to collection names, after concatenating
395  them with the standard collection name delimeter. If provided,
396  these are appended to the names of the `~CollectionType.RUN`
397  collections that datasets are inserted directly into, as well the
398  `~CollectionType.CALIBRATION` collection if it is generated
399  automatically (i.e. if ``collection is None``).
400  """
401  return
402 
403  def writeCameraGeom(self, butler: Butler, collection: Optional[str] = None,
404  suffixes: Sequence[str] = ()) -> None:
405  """Write the default camera geometry to the butler repository and
406  associate it with the appropriate validity range in a calibration
407  collection.
408 
409  Parameters
410  ----------
411  butler : `lsst.daf.butler.Butler`
412  Butler to use to store these calibrations.
413  collection : `str`, optional
414  Name to use for the calibration collection that associates all
415  datasets with a validity range. If this collection already exists,
416  it must be a `~CollectionType.CALIBRATION` collection, and it must
417  not have any datasets that would conflict with those inserted by
418  this method. If `None`, a collection name is worked out
419  automatically from the instrument name and other metadata by
420  calling ``makeCalibrationCollectionName``, but this
421  default name may not work well for long-lived repositories unless
422  one or more ``suffixes`` are also provided (and changed every time
423  curated calibrations are ingested).
424  suffixes : `Sequence` [ `str` ], optional
425  Name suffixes to append to collection names, after concatenating
426  them with the standard collection name delimeter. If provided,
427  these are appended to the names of the `~CollectionType.RUN`
428  collections that datasets are inserted directly into, as well the
429  `~CollectionType.CALIBRATION` collection if it is generated
430  automatically (i.e. if ``collection is None``).
431  """
432  if collection is None:
433  collection = self.makeCalibrationCollectionName(*suffixes)
434  butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
435  run = self.makeUnboundedCalibrationRunName(*suffixes)
436  butler.registry.registerRun(run)
437  datasetType = DatasetType("camera", ("instrument",), "Camera", isCalibration=True,
438  universe=butler.registry.dimensions)
439  butler.registry.registerDatasetType(datasetType)
440  camera = self.getCamera()
441  ref = butler.put(camera, datasetType, {"instrument": self.getName()}, run=run)
442  butler.registry.certify(collection, [ref], Timespan(begin=None, end=None))
443 
444  def writeStandardTextCuratedCalibrations(self, butler: Butler, collection: Optional[str] = None,
445  suffixes: Sequence[str] = ()) -> None:
446  """Write the set of standardized curated text calibrations to
447  the repository.
448 
449  Parameters
450  ----------
451  butler : `lsst.daf.butler.Butler`
452  Butler to receive these calibration datasets.
453  collection : `str`, optional
454  Name to use for the calibration collection that associates all
455  datasets with a validity range. If this collection already exists,
456  it must be a `~CollectionType.CALIBRATION` collection, and it must
457  not have any datasets that would conflict with those inserted by
458  this method. If `None`, a collection name is worked out
459  automatically from the instrument name and other metadata by
460  calling ``makeCalibrationCollectionName``, but this
461  default name may not work well for long-lived repositories unless
462  one or more ``suffixes`` are also provided (and changed every time
463  curated calibrations are ingested).
464  suffixes : `Sequence` [ `str` ], optional
465  Name suffixes to append to collection names, after concatenating
466  them with the standard collection name delimeter. If provided,
467  these are appended to the names of the `~CollectionType.RUN`
468  collections that datasets are inserted directly into, as well the
469  `~CollectionType.CALIBRATION` collection if it is generated
470  automatically (i.e. if ``collection is None``).
471  """
472  if collection is None:
473  collection = self.makeCalibrationCollectionName(*suffixes)
474  butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
475  runs = set()
476  for datasetTypeName in self.standardCuratedDatasetTypes:
477  # We need to define the dataset types.
478  if datasetTypeName not in StandardCuratedCalibrationDatasetTypes:
479  raise ValueError(f"DatasetType {datasetTypeName} not in understood list"
480  f" [{'.'.join(StandardCuratedCalibrationDatasetTypes)}]")
481  definition = StandardCuratedCalibrationDatasetTypes[datasetTypeName]
482  datasetType = DatasetType(datasetTypeName,
483  universe=butler.registry.dimensions,
484  isCalibration=True,
485  **definition)
486  self._writeSpecificCuratedCalibrationDatasets(butler, datasetType, collection, runs=runs,
487  suffixes=suffixes)
488 
489  @classmethod
490  def _getSpecificCuratedCalibrationPath(cls, datasetTypeName):
491  """Return the path of the curated calibration directory.
492 
493  Parameters
494  ----------
495  datasetTypeName : `str`
496  The name of the standard dataset type to find.
497 
498  Returns
499  -------
500  path : `str`
501  The path to the standard curated data directory. `None` if the
502  dataset type is not found or the obs data package is not
503  available.
504  """
505  if cls.getObsDataPackageDir() is None:
506  # if there is no data package then there can't be datasets
507  return None
508 
509  calibPath = os.path.join(cls.getObsDataPackageDir(), cls.policyName,
510  datasetTypeName)
511 
512  if os.path.exists(calibPath):
513  return calibPath
514 
515  return None
516 
517  def _writeSpecificCuratedCalibrationDatasets(self, butler: Butler, datasetType: DatasetType,
518  collection: str, runs: Set[str], suffixes: Sequence[str]):
519  """Write standardized curated calibration datasets for this specific
520  dataset type from an obs data package.
521 
522  Parameters
523  ----------
524  butler : `lsst.daf.butler.Butler`
525  Gen3 butler in which to put the calibrations.
526  datasetType : `lsst.daf.butler.DatasetType`
527  Dataset type to be put.
528  collection : `str`
529  Name of the `~CollectionType.CALIBRATION` collection that
530  associates all datasets with validity ranges. Must have been
531  registered prior to this call.
532  runs : `set` [ `str` ]
533  Names of runs that have already been registered by previous calls
534  and need not be registered again. Should be updated by this
535  method as new runs are registered.
536  suffixes : `Sequence` [ `str` ]
537  Suffixes to append to run names when creating them from
538  ``CALIBDATE`` metadata, via calls to `makeCuratedCalibrationName`.
539 
540  Notes
541  -----
542  This method scans the location defined in the ``obsDataPackageDir``
543  class attribute for curated calibrations corresponding to the
544  supplied dataset type. The directory name in the data package must
545  match the name of the dataset type. They are assumed to use the
546  standard layout and can be read by
547  `~lsst.pipe.tasks.read_curated_calibs.read_all` and provide standard
548  metadata.
549  """
550  calibPath = self._getSpecificCuratedCalibrationPath(datasetType.name)
551  if calibPath is None:
552  return
553 
554  # Register the dataset type
555  butler.registry.registerDatasetType(datasetType)
556 
557  # obs_base can't depend on pipe_tasks but concrete obs packages
558  # can -- we therefore have to defer import
559  from lsst.pipe.tasks.read_curated_calibs import read_all
560 
561  # Read calibs, registering a new run for each CALIBDATE as needed.
562  # We try to avoid registering runs multiple times as an optimization
563  # by putting them in the ``runs`` set that was passed in.
564  camera = self.getCamera()
565  calibsDict = read_all(calibPath, camera)[0] # second return is calib type
566  datasetRecords = []
567  for det in calibsDict:
568  times = sorted([k for k in calibsDict[det]])
569  calibs = [calibsDict[det][time] for time in times]
570  times = [astropy.time.Time(t, format="datetime", scale="utc") for t in times]
571  times += [None]
572  for calib, beginTime, endTime in zip(calibs, times[:-1], times[1:]):
573  md = calib.getMetadata()
574  run = self.makeCuratedCalibrationRunName(md['CALIBDATE'], *suffixes)
575  if run not in runs:
576  butler.registry.registerRun(run)
577  runs.add(run)
578  dataId = DataCoordinate.standardize(
579  universe=butler.registry.dimensions,
580  instrument=self.getName(),
581  detector=md["DETECTOR"],
582  )
583  datasetRecords.append((calib, dataId, run, Timespan(beginTime, endTime)))
584 
585  # Second loop actually does the inserts and filesystem writes. We
586  # first do a butler.put on each dataset, inserting it into the run for
587  # its calibDate. We remember those refs and group them by timespan, so
588  # we can vectorize the certify calls as much as possible.
589  refsByTimespan = defaultdict(list)
590  with butler.transaction():
591  for calib, dataId, run, timespan in datasetRecords:
592  refsByTimespan[timespan].append(butler.put(calib, datasetType, dataId, run=run))
593  for timespan, refs in refsByTimespan.items():
594  butler.registry.certify(collection, refs, timespan)
595 
596  @abstractmethod
597  def makeDataIdTranslatorFactory(self) -> TranslatorFactory:
598  """Return a factory for creating Gen2->Gen3 data ID translators,
599  specialized for this instrument.
600 
601  Derived class implementations should generally call
602  `TranslatorFactory.addGenericInstrumentRules` with appropriate
603  arguments, but are not required to (and may not be able to if their
604  Gen2 raw data IDs are sufficiently different from the HSC/DECam/CFHT
605  norm).
606 
607  Returns
608  -------
609  factory : `TranslatorFactory`.
610  Factory for `Translator` objects.
611  """
612  raise NotImplementedError("Must be implemented by derived classes.")
613 
614  @classmethod
616  """Make the default instrument-specific run collection string for raw
617  data ingest.
618 
619  Returns
620  -------
621  coll : `str`
622  Run collection name to be used as the default for ingestion of
623  raws.
624  """
625  return cls.makeCollectionName("raw", "all")
626 
627  @classmethod
628  def makeUnboundedCalibrationRunName(cls, *suffixes: str) -> str:
629  """Make a RUN collection name appropriate for inserting calibration
630  datasets whose validity ranges are unbounded.
631 
632  Parameters
633  ----------
634  *suffixes : `str`
635  Strings to be appended to the base name, using the default
636  delimiter for collection names.
637 
638  Returns
639  -------
640  name : `str`
641  Run collection name.
642  """
643  return cls.makeCollectionName("calib", "unbounded", *suffixes)
644 
645  @classmethod
646  def makeCuratedCalibrationRunName(cls, calibDate: str, *suffixes: str) -> str:
647  """Make a RUN collection name appropriate for inserting curated
648  calibration datasets with the given ``CALIBDATE`` metadata value.
649 
650  Parameters
651  ----------
652  calibDate : `str`
653  The ``CALIBDATE`` metadata value.
654  *suffixes : `str`
655  Strings to be appended to the base name, using the default
656  delimiter for collection names.
657 
658  Returns
659  -------
660  name : `str`
661  Run collection name.
662  """
663  return cls.makeCollectionName("calib", "curated", calibDate, *suffixes)
664 
665  @classmethod
666  def makeCalibrationCollectionName(cls, *suffixes: str) -> str:
667  """Make a CALIBRATION collection name appropriate for associating
668  calibration datasets with validity ranges.
669 
670  Parameters
671  ----------
672  *suffixes : `str`
673  Strings to be appended to the base name, using the default
674  delimiter for collection names.
675 
676  Returns
677  -------
678  name : `str`
679  Calibration collection name.
680  """
681  return cls.makeCollectionName("calib", *suffixes)
682 
683  @classmethod
684  def makeCollectionName(cls, *labels: str) -> str:
685  """Get the instrument-specific collection string to use as derived
686  from the supplied labels.
687 
688  Parameters
689  ----------
690  *labels : `str`
691  Strings to be combined with the instrument name to form a
692  collection name.
693 
694  Returns
695  -------
696  name : `str`
697  Collection name to use that includes the instrument name.
698  """
699  return "/".join((cls.getName(),) + labels)
700 
701 
702 def makeExposureRecordFromObsInfo(obsInfo, universe):
703  """Construct an exposure DimensionRecord from
704  `astro_metadata_translator.ObservationInfo`.
705 
706  Parameters
707  ----------
708  obsInfo : `astro_metadata_translator.ObservationInfo`
709  A `~astro_metadata_translator.ObservationInfo` object corresponding to
710  the exposure.
711  universe : `DimensionUniverse`
712  Set of all known dimensions.
713 
714  Returns
715  -------
716  record : `DimensionRecord`
717  A record containing exposure metadata, suitable for insertion into
718  a `Registry`.
719  """
720  dimension = universe["exposure"]
721 
722  ra, dec, sky_angle, zenith_angle = (None, None, None, None)
723  if obsInfo.tracking_radec is not None:
724  icrs = obsInfo.tracking_radec.icrs
725  ra = icrs.ra.degree
726  dec = icrs.dec.degree
727  if obsInfo.boresight_rotation_coord == "sky":
728  sky_angle = obsInfo.boresight_rotation_angle.degree
729  if obsInfo.altaz_begin is not None:
730  zenith_angle = obsInfo.altaz_begin.zen.degree
731 
732  return dimension.RecordClass(
733  instrument=obsInfo.instrument,
734  id=obsInfo.exposure_id,
735  obs_id=obsInfo.observation_id,
736  group_name=obsInfo.exposure_group,
737  group_id=obsInfo.visit_id,
738  datetime_begin=obsInfo.datetime_begin,
739  datetime_end=obsInfo.datetime_end,
740  exposure_time=obsInfo.exposure_time.to_value("s"),
741  # we are not mandating that dark_time be calculable
742  dark_time=obsInfo.dark_time.to_value("s") if obsInfo.dark_time is not None else None,
743  observation_type=obsInfo.observation_type,
744  observation_reason=obsInfo.observation_reason,
745  day_obs=obsInfo.observing_day,
746  seq_num=obsInfo.observation_counter,
747  physical_filter=obsInfo.physical_filter,
748  science_program=obsInfo.science_program,
749  target_name=obsInfo.object,
750  tracking_ra=ra,
751  tracking_dec=dec,
752  sky_angle=sky_angle,
753  zenith_angle=zenith_angle,
754  )
755 
756 
757 def loadCamera(butler: Butler, dataId: DataId, *, collections: Any = None) -> Tuple[Camera, bool]:
758  """Attempt to load versioned camera geometry from a butler, but fall back
759  to obtaining a nominal camera from the `Instrument` class if that fails.
760 
761  Parameters
762  ----------
763  butler : `lsst.daf.butler.Butler`
764  Butler instance to attempt to query for and load a ``camera`` dataset
765  from.
766  dataId : `dict` or `DataCoordinate`
767  Data ID that identifies at least the ``instrument`` and ``exposure``
768  dimensions.
769  collections : Any, optional
770  Collections to be searched, overriding ``self.butler.collections``.
771  Can be any of the types supported by the ``collections`` argument
772  to butler construction.
773 
774  Returns
775  -------
776  camera : `lsst.afw.cameraGeom.Camera`
777  Camera object.
778  versioned : `bool`
779  If `True`, the camera was obtained from the butler and should represent
780  a versioned camera from a calibration repository. If `False`, no
781  camera datasets were found, and the returned camera was produced by
782  instantiating the appropriate `Instrument` class and calling
783  `Instrument.getCamera`.
784  """
785  if collections is None:
786  collections = butler.collections
787  # Registry would do data ID expansion internally if we didn't do it first,
788  # but we might want an expanded data ID ourselves later, so we do it here
789  # to ensure it only happens once.
790  # This will also catch problems with the data ID not having keys we need.
791  dataId = butler.registry.expandDataId(dataId, graph=butler.registry.dimensions["exposure"].graph)
792  try:
793  cameraRef = butler.get("camera", dataId=dataId, collections=collections)
794  return cameraRef, True
795  except LookupError:
796  pass
797  instrument = Instrument.fromName(dataId["instrument"], butler.registry)
798  return instrument.getCamera(), False
lsst.obs.base._instrument.Instrument.applyConfigOverrides
def applyConfigOverrides(self, name, config)
Definition: _instrument.py:311
lsst.obs.base._instrument.loadCamera
Tuple[Camera, bool] loadCamera(Butler butler, DataId dataId, *Any collections=None)
Definition: _instrument.py:757
lsst.obs.base._instrument.Instrument.getCuratedCalibrationNames
Set[str] getCuratedCalibrationNames(cls)
Definition: _instrument.py:125
lsst.obs.base._instrument.Instrument.getName
def getName(cls)
Definition: _instrument.py:114
lsst.obs.base._instrument.Instrument.writeAdditionalCuratedCalibrations
None writeAdditionalCuratedCalibrations(self, Butler butler, Optional[str] collection=None, Sequence[str] suffixes=())
Definition: _instrument.py:371
lsst.obs.base._instrument.Instrument.importAll
None importAll(Registry registry)
Definition: _instrument.py:245
lsst.obs.base._instrument.Instrument.makeCalibrationCollectionName
str makeCalibrationCollectionName(cls, *str suffixes)
Definition: _instrument.py:666
lsst.obs.base._instrument.Instrument.getCamera
def getCamera(self)
Definition: _instrument.py:157
lsst.obs.base._instrument.Instrument.filterDefinitions
def filterDefinitions(self)
Definition: _instrument.py:102
lsst.obs.base._instrument.Instrument.fromName
Instrument fromName(str name, Registry registry)
Definition: _instrument.py:205
lsst.obs.base._instrument.Instrument.makeUnboundedCalibrationRunName
str makeUnboundedCalibrationRunName(cls, *str suffixes)
Definition: _instrument.py:628
lsst.obs.base._instrument.Instrument.getObsDataPackageDir
def getObsDataPackageDir(cls)
Definition: _instrument.py:191
lsst.obs.base._instrument.Instrument._getSpecificCuratedCalibrationPath
def _getSpecificCuratedCalibrationPath(cls, datasetTypeName)
Definition: _instrument.py:490
lsst.obs.base._instrument.Instrument.writeCameraGeom
None writeCameraGeom(self, Butler butler, Optional[str] collection=None, Sequence[str] suffixes=())
Definition: _instrument.py:403
lsst.obs.base._instrument.Instrument.writeStandardTextCuratedCalibrations
None writeStandardTextCuratedCalibrations(self, Butler butler, Optional[str] collection=None, Sequence[str] suffixes=())
Definition: _instrument.py:444
lsst.obs.base._instrument.Instrument._writeSpecificCuratedCalibrationDatasets
def _writeSpecificCuratedCalibrationDatasets(self, Butler butler, DatasetType datasetType, str collection, Set[str] runs, Sequence[str] suffixes)
Definition: _instrument.py:517
lsst::utils
lsst.obs.base._instrument.Instrument
Definition: _instrument.py:58
lsst.obs.base._instrument.Instrument.makeCollectionName
str makeCollectionName(cls, *str labels)
Definition: _instrument.py:684
lsst.obs.base._instrument.makeExposureRecordFromObsInfo
def makeExposureRecordFromObsInfo(obsInfo, universe)
Definition: _instrument.py:702
lsst.obs.base._instrument.Instrument.register
def register(self, registry)
Definition: _instrument.py:166
lsst.obs.base._instrument.Instrument.getRawFormatter
def getRawFormatter(self, dataId)
Definition: _instrument.py:294
lsst.obs.base._instrument.Instrument.makeCuratedCalibrationRunName
str makeCuratedCalibrationRunName(cls, str calibDate, *str suffixes)
Definition: _instrument.py:646
lsst.obs.base._instrument.Instrument.makeDataIdTranslatorFactory
TranslatorFactory makeDataIdTranslatorFactory(self)
Definition: _instrument.py:597
lsst.obs.base._instrument.Instrument.makeDefaultRawIngestRunName
str makeDefaultRawIngestRunName(cls)
Definition: _instrument.py:615
lsst.obs.base._instrument.Instrument.__init__
def __init__(self)
Definition: _instrument.py:108
lsst.obs.base._instrument.Instrument.writeCuratedCalibrations
None writeCuratedCalibrations(self, Butler butler, Optional[str] collection=None, Sequence[str] suffixes=())
Definition: _instrument.py:327