Coverage for python/lsst/obs/base/_instrument.py: 27%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("Instrument", "makeExposureRecordFromObsInfo", "loadCamera")
26import datetime
27import os.path
28from abc import ABCMeta, abstractmethod
29from collections import defaultdict
30from functools import lru_cache
31from typing import TYPE_CHECKING, Any, Optional, Sequence, Set, Tuple, Union
33import astropy.time
34from lsst.afw.cameraGeom import Camera
35from lsst.daf.butler import Butler, CollectionType, DataCoordinate, DataId, DatasetType, Timespan
36from lsst.daf.butler.registry import DataIdError
37from lsst.utils import doImport, getPackageDir
39if TYPE_CHECKING: 39 ↛ 40line 39 didn't jump to line 40, because the condition on line 39 was never true
40 from lsst.daf.butler import Registry
42 from .gen2to3 import TranslatorFactory
44# To be a standard text curated calibration means that we use a
45# standard definition for the corresponding DatasetType.
46StandardCuratedCalibrationDatasetTypes = {
47 "defects": {"dimensions": ("instrument", "detector"), "storageClass": "Defects"},
48 "qe_curve": {"dimensions": ("instrument", "detector"), "storageClass": "QECurve"},
49 "crosstalk": {"dimensions": ("instrument", "detector"), "storageClass": "CrosstalkCalib"},
50 "linearizer": {"dimensions": ("instrument", "detector"), "storageClass": "Linearizer"},
51 "bfk": {"dimensions": ("instrument", "detector"), "storageClass": "BrighterFatterKernel"},
52}
55class Instrument(metaclass=ABCMeta):
56 """Base class for instrument-specific logic for the Gen3 Butler.
58 Parameters
59 ----------
60 collection_prefix : `str`, optional
61 Prefix for collection names to use instead of the intrument's own name.
62 This is primarily for use in simulated-data repositories, where the
63 instrument name may not be necessary and/or sufficient to distinguish
64 between collections.
66 Notes
67 -----
68 Concrete instrument subclasses must have the same construction signature as
69 the base class.
70 """
72 configPaths: Sequence[str] = ()
73 """Paths to config files to read for specific Tasks.
75 The paths in this list should contain files of the form `task.py`, for
76 each of the Tasks that requires special configuration.
77 """
79 policyName: Optional[str] = None
80 """Instrument specific name to use when locating a policy or configuration
81 file in the file system."""
83 obsDataPackage: Optional[str] = None
84 """Name of the package containing the text curated calibration files.
85 Usually a obs _data package. If `None` no curated calibration files
86 will be read. (`str`)"""
88 standardCuratedDatasetTypes: Set[str] = frozenset(StandardCuratedCalibrationDatasetTypes)
89 """The dataset types expected to be obtained from the obsDataPackage.
91 These dataset types are all required to have standard definitions and
92 must be known to the base class. Clearing this list will prevent
93 any of these calibrations from being stored. If a dataset type is not
94 known to a specific instrument it can still be included in this list
95 since the data package is the source of truth. (`set` of `str`)
96 """
98 additionalCuratedDatasetTypes: Set[str] = frozenset()
99 """Curated dataset types specific to this particular instrument that do
100 not follow the standard organization found in obs data packages.
102 These are the instrument-specific dataset types written by
103 `writeAdditionalCuratedCalibrations` in addition to the calibrations
104 found in obs data packages that follow the standard scheme.
105 (`set` of `str`)"""
107 @property
108 @abstractmethod
109 def filterDefinitions(self):
110 """`~lsst.obs.base.FilterDefinitionCollection`, defining the filters
111 for this instrument.
112 """
113 return None
115 def __init__(self, collection_prefix: Optional[str] = None):
116 self.filterDefinitions.reset()
117 self.filterDefinitions.defineFilters()
118 if collection_prefix is None: 118 ↛ 120line 118 didn't jump to line 120, because the condition on line 118 was never false
119 collection_prefix = self.getName()
120 self.collection_prefix = collection_prefix
122 @classmethod
123 @abstractmethod
124 def getName(cls):
125 """Return the short (dimension) name for this instrument.
127 This is not (in general) the same as the class name - it's what is used
128 as the value of the "instrument" field in data IDs, and is usually an
129 abbreviation of the full name.
130 """
131 raise NotImplementedError()
133 @classmethod
134 @lru_cache()
135 def getCuratedCalibrationNames(cls) -> Set[str]:
136 """Return the names of all the curated calibration dataset types.
138 Returns
139 -------
140 names : `set` of `str`
141 The dataset type names of all curated calibrations. This will
142 include the standard curated calibrations even if the particular
143 instrument does not support them.
145 Notes
146 -----
147 The returned list does not indicate whether a particular dataset
148 is present in the Butler repository, simply that these are the
149 dataset types that are handled by ``writeCuratedCalibrations``.
150 """
152 # Camera is a special dataset type that is also handled as a
153 # curated calibration.
154 curated = {"camera"}
156 # Make a cursory attempt to filter out curated dataset types
157 # that are not present for this instrument
158 for datasetTypeName in cls.standardCuratedDatasetTypes:
159 calibPath = cls._getSpecificCuratedCalibrationPath(datasetTypeName)
160 if calibPath is not None:
161 curated.add(datasetTypeName)
163 curated.update(cls.additionalCuratedDatasetTypes)
164 return frozenset(curated)
166 @abstractmethod
167 def getCamera(self):
168 """Retrieve the cameraGeom representation of this instrument.
170 This is a temporary API that should go away once ``obs`` packages have
171 a standardized approach to writing versioned cameras to a Gen3 repo.
172 """
173 raise NotImplementedError()
175 @abstractmethod
176 def register(self, registry, *, update=False):
177 """Insert instrument, physical_filter, and detector entries into a
178 `Registry`.
180 Parameters
181 ----------
182 registry : `lsst.daf.butler.Registry`
183 Registry client for the data repository to modify.
184 update : `bool`, optional
185 If `True` (`False` is default), update existing records if they
186 differ from the new ones.
188 Raises
189 ------
190 lsst.daf.butler.registry.ConflictingDefinitionError
191 Raised if any existing record has the same key but a different
192 definition as one being registered.
194 Notes
195 -----
196 New detectors and physical filters can always be added by calling this
197 method multiple times, as long as no existing records have changed (if
198 existing records have changed, ``update=True`` must be used). Old
199 records can never be removed by this method.
201 Implementations should guarantee that registration is atomic (the
202 registry should not be modified if any error occurs) and idempotent at
203 the level of individual dimension entries; new detectors and filters
204 should be added, but changes to any existing record should not be.
205 This can generally be achieved via a block like::
207 with registry.transaction():
208 registry.syncDimensionData("instrument", ...)
209 registry.syncDimensionData("detector", ...)
210 self.registerFilters(registry)
212 """
213 raise NotImplementedError()
215 @classmethod
216 @lru_cache()
217 def getObsDataPackageDir(cls):
218 """The root of the obs data package that provides specializations for
219 this instrument.
221 returns
222 -------
223 dir : `str`
224 The root of the relevat obs data package.
225 """
226 if cls.obsDataPackage is None:
227 return None
228 return getPackageDir(cls.obsDataPackage)
230 @staticmethod
231 def fromName(name: str, registry: Registry, collection_prefix: Optional[str] = None) -> Instrument:
232 """Given an instrument name and a butler, retrieve a corresponding
233 instantiated instrument object.
235 Parameters
236 ----------
237 name : `str`
238 Name of the instrument (must match the return value of `getName`).
239 registry : `lsst.daf.butler.Registry`
240 Butler registry to query to find the information.
241 collection_prefix : `str`, optional
242 Prefix for collection names to use instead of the intrument's own
243 name. This is primarily for use in simulated-data repositories,
244 where the instrument name may not be necessary and/or sufficient to
245 distinguish between collections.
247 Returns
248 -------
249 instrument : `Instrument`
250 An instance of the relevant `Instrument`.
252 Notes
253 -----
254 The instrument must be registered in the corresponding butler.
256 Raises
257 ------
258 LookupError
259 Raised if the instrument is not known to the supplied registry.
260 ModuleNotFoundError
261 Raised if the class could not be imported. This could mean
262 that the relevant obs package has not been setup.
263 TypeError
264 Raised if the class name retrieved is not a string.
265 """
266 try:
267 records = list(registry.queryDimensionRecords("instrument", instrument=name))
268 except DataIdError:
269 records = None
270 if not records:
271 raise LookupError(f"No registered instrument with name '{name}'.")
272 cls = records[0].class_name
273 if not isinstance(cls, str):
274 raise TypeError(f"Unexpected class name retrieved from {name} instrument dimension (got {cls})")
275 instrument = doImport(cls)
276 return instrument(collection_prefix=collection_prefix)
278 @staticmethod
279 def importAll(registry: Registry) -> None:
280 """Import all the instruments known to this registry.
282 This will ensure that all metadata translators have been registered.
284 Parameters
285 ----------
286 registry : `lsst.daf.butler.Registry`
287 Butler registry to query to find the information.
289 Notes
290 -----
291 It is allowed for a particular instrument class to fail on import.
292 This might simply indicate that a particular obs package has
293 not been setup.
294 """
295 records = list(registry.queryDimensionRecords("instrument"))
296 for record in records:
297 cls = record.class_name
298 try:
299 doImport(cls)
300 except Exception:
301 pass
303 def _registerFilters(self, registry, update=False):
304 """Register the physical and abstract filter Dimension relationships.
305 This should be called in the `register` implementation, within
306 a transaction context manager block.
308 Parameters
309 ----------
310 registry : `lsst.daf.butler.core.Registry`
311 The registry to add dimensions to.
312 update : `bool`, optional
313 If `True` (`False` is default), update existing records if they
314 differ from the new ones.
315 """
316 for filter in self.filterDefinitions:
317 # fix for undefined abstract filters causing trouble in the
318 # registry:
319 if filter.band is None:
320 band = filter.physical_filter
321 else:
322 band = filter.band
324 registry.syncDimensionData(
325 "physical_filter",
326 {"instrument": self.getName(), "name": filter.physical_filter, "band": band},
327 update=update,
328 )
330 @abstractmethod
331 def getRawFormatter(self, dataId):
332 """Return the Formatter class that should be used to read a particular
333 raw file.
335 Parameters
336 ----------
337 dataId : `DataCoordinate`
338 Dimension-based ID for the raw file or files being ingested.
340 Returns
341 -------
342 formatter : `Formatter` class
343 Class to be used that reads the file into an
344 `lsst.afw.image.Exposure` instance.
345 """
346 raise NotImplementedError()
348 def applyConfigOverrides(self, name, config):
349 """Apply instrument-specific overrides for a task config.
351 Parameters
352 ----------
353 name : `str`
354 Name of the object being configured; typically the _DefaultName
355 of a Task.
356 config : `lsst.pex.config.Config`
357 Config instance to which overrides should be applied.
358 """
359 for root in self.configPaths:
360 path = os.path.join(root, f"{name}.py")
361 if os.path.exists(path):
362 config.load(path)
364 def writeCuratedCalibrations(
365 self, butler: Butler, collection: Optional[str] = None, labels: Sequence[str] = ()
366 ) -> None:
367 """Write human-curated calibration Datasets to the given Butler with
368 the appropriate validity ranges.
370 Parameters
371 ----------
372 butler : `lsst.daf.butler.Butler`
373 Butler to use to store these calibrations.
374 collection : `str`, optional
375 Name to use for the calibration collection that associates all
376 datasets with a validity range. If this collection already exists,
377 it must be a `~CollectionType.CALIBRATION` collection, and it must
378 not have any datasets that would conflict with those inserted by
379 this method. If `None`, a collection name is worked out
380 automatically from the instrument name and other metadata by
381 calling ``makeCalibrationCollectionName``, but this
382 default name may not work well for long-lived repositories unless
383 ``labels`` is also provided (and changed every time curated
384 calibrations are ingested).
385 labels : `Sequence` [ `str` ], optional
386 Extra strings to include in collection names, after concatenating
387 them with the standard collection name delimeter. If provided,
388 these are inserted into the names of the `~CollectionType.RUN`
389 collections that datasets are inserted directly into, as well the
390 `~CollectionType.CALIBRATION` collection if it is generated
391 automatically (i.e. if ``collection is None``). Usually this is
392 just the name of the ticket on which the calibration collection is
393 being created.
395 Notes
396 -----
397 Expected to be called from subclasses. The base method calls
398 ``writeCameraGeom``, ``writeStandardTextCuratedCalibrations``,
399 and ``writeAdditionalCuratdCalibrations``.
400 """
401 # Delegate registration of collections (and creating names for them)
402 # to other methods so they can be called independently with the same
403 # preconditions. Collection registration is idempotent, so this is
404 # safe, and while it adds a bit of overhead, as long as it's one
405 # registration attempt per method (not per dataset or dataset type),
406 # that's negligible.
407 self.writeCameraGeom(butler, collection, labels=labels)
408 self.writeStandardTextCuratedCalibrations(butler, collection, labels=labels)
409 self.writeAdditionalCuratedCalibrations(butler, collection, labels=labels)
411 def writeAdditionalCuratedCalibrations(
412 self, butler: Butler, collection: Optional[str] = None, labels: Sequence[str] = ()
413 ) -> None:
414 """Write additional curated calibrations that might be instrument
415 specific and are not part of the standard set.
417 Default implementation does nothing.
419 Parameters
420 ----------
421 butler : `lsst.daf.butler.Butler`
422 Butler to use to store these calibrations.
423 collection : `str`, optional
424 Name to use for the calibration collection that associates all
425 datasets with a validity range. If this collection already exists,
426 it must be a `~CollectionType.CALIBRATION` collection, and it must
427 not have any datasets that would conflict with those inserted by
428 this method. If `None`, a collection name is worked out
429 automatically from the instrument name and other metadata by
430 calling ``makeCalibrationCollectionName``, but this
431 default name may not work well for long-lived repositories unless
432 ``labels`` is also provided (and changed every time curated
433 calibrations are ingested).
434 labels : `Sequence` [ `str` ], optional
435 Extra strings to include in collection names, after concatenating
436 them with the standard collection name delimeter. If provided,
437 these are inserted into the names of the `~CollectionType.RUN`
438 collections that datasets are inserted directly into, as well the
439 `~CollectionType.CALIBRATION` collection if it is generated
440 automatically (i.e. if ``collection is None``). Usually this is
441 just the name of the ticket on which the calibration collection is
442 being created.
443 """
444 return
446 def writeCameraGeom(
447 self, butler: Butler, collection: Optional[str] = None, labels: Sequence[str] = ()
448 ) -> None:
449 """Write the default camera geometry to the butler repository and
450 associate it with the appropriate validity range in a calibration
451 collection.
453 Parameters
454 ----------
455 butler : `lsst.daf.butler.Butler`
456 Butler to use to store these calibrations.
457 collection : `str`, optional
458 Name to use for the calibration collection that associates all
459 datasets with a validity range. If this collection already exists,
460 it must be a `~CollectionType.CALIBRATION` collection, and it must
461 not have any datasets that would conflict with those inserted by
462 this method. If `None`, a collection name is worked out
463 automatically from the instrument name and other metadata by
464 calling ``makeCalibrationCollectionName``, but this
465 default name may not work well for long-lived repositories unless
466 ``labels`` is also provided (and changed every time curated
467 calibrations are ingested).
468 labels : `Sequence` [ `str` ], optional
469 Extra strings to include in collection names, after concatenating
470 them with the standard collection name delimeter. If provided,
471 these are inserted into the names of the `~CollectionType.RUN`
472 collections that datasets are inserted directly into, as well the
473 `~CollectionType.CALIBRATION` collection if it is generated
474 automatically (i.e. if ``collection is None``). Usually this is
475 just the name of the ticket on which the calibration collection is
476 being created.
477 """
478 if collection is None:
479 collection = self.makeCalibrationCollectionName(*labels)
480 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
481 run = self.makeUnboundedCalibrationRunName(*labels)
482 butler.registry.registerRun(run)
483 datasetType = DatasetType(
484 "camera", ("instrument",), "Camera", isCalibration=True, universe=butler.registry.dimensions
485 )
486 butler.registry.registerDatasetType(datasetType)
487 camera = self.getCamera()
488 ref = butler.put(camera, datasetType, {"instrument": self.getName()}, run=run)
489 butler.registry.certify(collection, [ref], Timespan(begin=None, end=None))
491 def writeStandardTextCuratedCalibrations(
492 self, butler: Butler, collection: Optional[str] = None, labels: Sequence[str] = ()
493 ) -> None:
494 """Write the set of standardized curated text calibrations to
495 the repository.
497 Parameters
498 ----------
499 butler : `lsst.daf.butler.Butler`
500 Butler to receive these calibration datasets.
501 collection : `str`, optional
502 Name to use for the calibration collection that associates all
503 datasets with a validity range. If this collection already exists,
504 it must be a `~CollectionType.CALIBRATION` collection, and it must
505 not have any datasets that would conflict with those inserted by
506 this method. If `None`, a collection name is worked out
507 automatically from the instrument name and other metadata by
508 calling ``makeCalibrationCollectionName``, but this
509 default name may not work well for long-lived repositories unless
510 ``labels`` is also provided (and changed every time curated
511 calibrations are ingested).
512 labels : `Sequence` [ `str` ], optional
513 Extra strings to include in collection names, after concatenating
514 them with the standard collection name delimeter. If provided,
515 these are inserted into the names of the `~CollectionType.RUN`
516 collections that datasets are inserted directly into, as well the
517 `~CollectionType.CALIBRATION` collection if it is generated
518 automatically (i.e. if ``collection is None``). Usually this is
519 just the name of the ticket on which the calibration collection is
520 being created.
521 """
522 if collection is None:
523 collection = self.makeCalibrationCollectionName(*labels)
524 butler.registry.registerCollection(collection, type=CollectionType.CALIBRATION)
525 runs = set()
526 for datasetTypeName in self.standardCuratedDatasetTypes:
527 # We need to define the dataset types.
528 if datasetTypeName not in StandardCuratedCalibrationDatasetTypes:
529 raise ValueError(
530 f"DatasetType {datasetTypeName} not in understood list"
531 f" [{'.'.join(StandardCuratedCalibrationDatasetTypes)}]"
532 )
533 definition = StandardCuratedCalibrationDatasetTypes[datasetTypeName]
534 datasetType = DatasetType(
535 datasetTypeName, universe=butler.registry.dimensions, isCalibration=True, **definition
536 )
537 self._writeSpecificCuratedCalibrationDatasets(
538 butler, datasetType, collection, runs=runs, labels=labels
539 )
541 @classmethod
542 def _getSpecificCuratedCalibrationPath(cls, datasetTypeName):
543 """Return the path of the curated calibration directory.
545 Parameters
546 ----------
547 datasetTypeName : `str`
548 The name of the standard dataset type to find.
550 Returns
551 -------
552 path : `str`
553 The path to the standard curated data directory. `None` if the
554 dataset type is not found or the obs data package is not
555 available.
556 """
557 if cls.getObsDataPackageDir() is None:
558 # if there is no data package then there can't be datasets
559 return None
561 calibPath = os.path.join(cls.getObsDataPackageDir(), cls.policyName, datasetTypeName)
563 if os.path.exists(calibPath):
564 return calibPath
566 return None
568 def _writeSpecificCuratedCalibrationDatasets(
569 self, butler: Butler, datasetType: DatasetType, collection: str, runs: Set[str], labels: Sequence[str]
570 ):
571 """Write standardized curated calibration datasets for this specific
572 dataset type from an obs data package.
574 Parameters
575 ----------
576 butler : `lsst.daf.butler.Butler`
577 Gen3 butler in which to put the calibrations.
578 datasetType : `lsst.daf.butler.DatasetType`
579 Dataset type to be put.
580 collection : `str`
581 Name of the `~CollectionType.CALIBRATION` collection that
582 associates all datasets with validity ranges. Must have been
583 registered prior to this call.
584 runs : `set` [ `str` ]
585 Names of runs that have already been registered by previous calls
586 and need not be registered again. Should be updated by this
587 method as new runs are registered.
588 labels : `Sequence` [ `str` ]
589 Extra strings to include in run names when creating them from
590 ``CALIBDATE`` metadata, via calls to `makeCuratedCalibrationName`.
591 Usually this is the name of the ticket on which the calibration
592 collection is being created.
594 Notes
595 -----
596 This method scans the location defined in the ``obsDataPackageDir``
597 class attribute for curated calibrations corresponding to the
598 supplied dataset type. The directory name in the data package must
599 match the name of the dataset type. They are assumed to use the
600 standard layout and can be read by
601 `~lsst.pipe.tasks.read_curated_calibs.read_all` and provide standard
602 metadata.
603 """
604 calibPath = self._getSpecificCuratedCalibrationPath(datasetType.name)
605 if calibPath is None:
606 return
608 # Register the dataset type
609 butler.registry.registerDatasetType(datasetType)
611 # obs_base can't depend on pipe_tasks but concrete obs packages
612 # can -- we therefore have to defer import
613 from lsst.pipe.tasks.read_curated_calibs import read_all
615 # Read calibs, registering a new run for each CALIBDATE as needed.
616 # We try to avoid registering runs multiple times as an optimization
617 # by putting them in the ``runs`` set that was passed in.
618 camera = self.getCamera()
619 calibsDict = read_all(calibPath, camera)[0] # second return is calib type
620 datasetRecords = []
621 for det in calibsDict:
622 times = sorted([k for k in calibsDict[det]])
623 calibs = [calibsDict[det][time] for time in times]
624 times = [astropy.time.Time(t, format="datetime", scale="utc") for t in times]
625 times += [None]
626 for calib, beginTime, endTime in zip(calibs, times[:-1], times[1:]):
627 md = calib.getMetadata()
628 run = self.makeCuratedCalibrationRunName(md["CALIBDATE"], *labels)
629 if run not in runs:
630 butler.registry.registerRun(run)
631 runs.add(run)
632 dataId = DataCoordinate.standardize(
633 universe=butler.registry.dimensions,
634 instrument=self.getName(),
635 detector=md["DETECTOR"],
636 )
637 datasetRecords.append((calib, dataId, run, Timespan(beginTime, endTime)))
639 # Second loop actually does the inserts and filesystem writes. We
640 # first do a butler.put on each dataset, inserting it into the run for
641 # its calibDate. We remember those refs and group them by timespan, so
642 # we can vectorize the certify calls as much as possible.
643 refsByTimespan = defaultdict(list)
644 with butler.transaction():
645 for calib, dataId, run, timespan in datasetRecords:
646 refsByTimespan[timespan].append(butler.put(calib, datasetType, dataId, run=run))
647 for timespan, refs in refsByTimespan.items():
648 butler.registry.certify(collection, refs, timespan)
650 @abstractmethod
651 def makeDataIdTranslatorFactory(self) -> TranslatorFactory:
652 """Return a factory for creating Gen2->Gen3 data ID translators,
653 specialized for this instrument.
655 Derived class implementations should generally call
656 `TranslatorFactory.addGenericInstrumentRules` with appropriate
657 arguments, but are not required to (and may not be able to if their
658 Gen2 raw data IDs are sufficiently different from the HSC/DECam/CFHT
659 norm).
661 Returns
662 -------
663 factory : `TranslatorFactory`.
664 Factory for `Translator` objects.
665 """
666 raise NotImplementedError("Must be implemented by derived classes.")
668 @staticmethod
669 def formatCollectionTimestamp(timestamp: Union[str, datetime.datetime]) -> str:
670 """Format a timestamp for use in a collection name.
672 Parameters
673 ----------
674 timestamp : `str` or `datetime.datetime`
675 Timestamp to format. May be a date or datetime string in extended
676 ISO format (assumed UTC), with or without a timezone specifier, a
677 datetime string in basic ISO format with a timezone specifier, a
678 naive `datetime.datetime` instance (assumed UTC) or a
679 timezone-aware `datetime.datetime` instance (converted to UTC).
680 This is intended to cover all forms that string ``CALIBDATE``
681 metadata values have taken in the past, as well as the format this
682 method itself writes out (to enable round-tripping).
684 Returns
685 -------
686 formatted : `str`
687 Standardized string form for the timestamp.
688 """
689 if isinstance(timestamp, str):
690 if "-" in timestamp:
691 # extended ISO format, with - and : delimiters
692 timestamp = datetime.datetime.fromisoformat(timestamp)
693 else:
694 # basic ISO format, with no delimiters (what this method
695 # returns)
696 timestamp = datetime.datetime.strptime(timestamp, "%Y%m%dT%H%M%S%z")
697 if not isinstance(timestamp, datetime.datetime):
698 raise TypeError(f"Unexpected date/time object: {timestamp!r}.")
699 if timestamp.tzinfo is not None:
700 timestamp = timestamp.astimezone(datetime.timezone.utc)
701 return f"{timestamp:%Y%m%dT%H%M%S}Z"
703 @staticmethod
704 def makeCollectionTimestamp() -> str:
705 """Create a timestamp string for use in a collection name from the
706 current time.
708 Returns
709 -------
710 formatted : `str`
711 Standardized string form of the current time.
712 """
713 return Instrument.formatCollectionTimestamp(datetime.datetime.now(tz=datetime.timezone.utc))
715 def makeDefaultRawIngestRunName(self) -> str:
716 """Make the default instrument-specific run collection string for raw
717 data ingest.
719 Returns
720 -------
721 coll : `str`
722 Run collection name to be used as the default for ingestion of
723 raws.
724 """
725 return self.makeCollectionName("raw", "all")
727 def makeUnboundedCalibrationRunName(self, *labels: str) -> str:
728 """Make a RUN collection name appropriate for inserting calibration
729 datasets whose validity ranges are unbounded.
731 Parameters
732 ----------
733 *labels : `str`
734 Extra strings to be included in the base name, using the default
735 delimiter for collection names. Usually this is the name of the
736 ticket on which the calibration collection is being created.
738 Returns
739 -------
740 name : `str`
741 Run collection name.
742 """
743 return self.makeCollectionName("calib", *labels, "unbounded")
745 def makeCuratedCalibrationRunName(self, calibDate: str, *labels: str) -> str:
746 """Make a RUN collection name appropriate for inserting curated
747 calibration datasets with the given ``CALIBDATE`` metadata value.
749 Parameters
750 ----------
751 calibDate : `str`
752 The ``CALIBDATE`` metadata value.
753 *labels : `str`
754 Strings to be included in the collection name (before
755 ``calibDate``, but after all other terms), using the default
756 delimiter for collection names. Usually this is the name of the
757 ticket on which the calibration collection is being created.
759 Returns
760 -------
761 name : `str`
762 Run collection name.
763 """
764 return self.makeCollectionName("calib", *labels, "curated", self.formatCollectionTimestamp(calibDate))
766 def makeCalibrationCollectionName(self, *labels: str) -> str:
767 """Make a CALIBRATION collection name appropriate for associating
768 calibration datasets with validity ranges.
770 Parameters
771 ----------
772 *labels : `str`
773 Strings to be appended to the base name, using the default
774 delimiter for collection names. Usually this is the name of the
775 ticket on which the calibration collection is being created.
777 Returns
778 -------
779 name : `str`
780 Calibration collection name.
781 """
782 return self.makeCollectionName("calib", *labels)
784 @staticmethod
785 def makeRefCatCollectionName(*labels: str) -> str:
786 """Return a global (not instrument-specific) name for a collection that
787 holds reference catalogs.
789 With no arguments, this returns the name of the collection that holds
790 all reference catalogs (usually a ``CHAINED`` collection, at least in
791 long-lived repos that may contain more than one reference catalog).
793 Parameters
794 ----------
795 *labels : `str`
796 Strings to be added to the global collection name, in order to
797 define a collection name for one or more reference catalogs being
798 ingested at the same time.
800 Returns
801 -------
802 name : `str`
803 Collection name.
805 Notes
806 -----
807 This is a ``staticmethod``, not a ``classmethod``, because it should
808 be the same for all instruments.
809 """
810 return "/".join(("refcats",) + labels)
812 def makeUmbrellaCollectionName(self) -> str:
813 """Return the name of the umbrella ``CHAINED`` collection for this
814 instrument that combines all standard recommended input collections.
816 This method should almost never be overridden by derived classes.
818 Returns
819 -------
820 name : `str`
821 Name for the umbrella collection.
822 """
823 return self.makeCollectionName("defaults")
825 def makeCollectionName(self, *labels: str) -> str:
826 """Get the instrument-specific collection string to use as derived
827 from the supplied labels.
829 Parameters
830 ----------
831 *labels : `str`
832 Strings to be combined with the instrument name to form a
833 collection name.
835 Returns
836 -------
837 name : `str`
838 Collection name to use that includes the instrument's recommended
839 prefix.
840 """
841 return "/".join((self.collection_prefix,) + labels)
844def makeExposureRecordFromObsInfo(obsInfo, universe):
845 """Construct an exposure DimensionRecord from
846 `astro_metadata_translator.ObservationInfo`.
848 Parameters
849 ----------
850 obsInfo : `astro_metadata_translator.ObservationInfo`
851 A `~astro_metadata_translator.ObservationInfo` object corresponding to
852 the exposure.
853 universe : `DimensionUniverse`
854 Set of all known dimensions.
856 Returns
857 -------
858 record : `DimensionRecord`
859 A record containing exposure metadata, suitable for insertion into
860 a `Registry`.
861 """
862 dimension = universe["exposure"]
864 ra, dec, sky_angle, zenith_angle = (None, None, None, None)
865 if obsInfo.tracking_radec is not None:
866 icrs = obsInfo.tracking_radec.icrs
867 ra = icrs.ra.degree
868 dec = icrs.dec.degree
869 if obsInfo.boresight_rotation_coord == "sky":
870 sky_angle = obsInfo.boresight_rotation_angle.degree
871 if obsInfo.altaz_begin is not None:
872 zenith_angle = obsInfo.altaz_begin.zen.degree
874 return dimension.RecordClass(
875 instrument=obsInfo.instrument,
876 id=obsInfo.exposure_id,
877 obs_id=obsInfo.observation_id,
878 group_name=obsInfo.exposure_group,
879 group_id=obsInfo.visit_id,
880 datetime_begin=obsInfo.datetime_begin,
881 datetime_end=obsInfo.datetime_end,
882 exposure_time=obsInfo.exposure_time.to_value("s"),
883 # we are not mandating that dark_time be calculable
884 dark_time=obsInfo.dark_time.to_value("s") if obsInfo.dark_time is not None else None,
885 observation_type=obsInfo.observation_type,
886 observation_reason=obsInfo.observation_reason,
887 day_obs=obsInfo.observing_day,
888 seq_num=obsInfo.observation_counter,
889 physical_filter=obsInfo.physical_filter,
890 science_program=obsInfo.science_program,
891 target_name=obsInfo.object,
892 tracking_ra=ra,
893 tracking_dec=dec,
894 sky_angle=sky_angle,
895 zenith_angle=zenith_angle,
896 )
899def loadCamera(butler: Butler, dataId: DataId, *, collections: Any = None) -> Tuple[Camera, bool]:
900 """Attempt to load versioned camera geometry from a butler, but fall back
901 to obtaining a nominal camera from the `Instrument` class if that fails.
903 Parameters
904 ----------
905 butler : `lsst.daf.butler.Butler`
906 Butler instance to attempt to query for and load a ``camera`` dataset
907 from.
908 dataId : `dict` or `DataCoordinate`
909 Data ID that identifies at least the ``instrument`` and ``exposure``
910 dimensions.
911 collections : Any, optional
912 Collections to be searched, overriding ``self.butler.collections``.
913 Can be any of the types supported by the ``collections`` argument
914 to butler construction.
916 Returns
917 -------
918 camera : `lsst.afw.cameraGeom.Camera`
919 Camera object.
920 versioned : `bool`
921 If `True`, the camera was obtained from the butler and should represent
922 a versioned camera from a calibration repository. If `False`, no
923 camera datasets were found, and the returned camera was produced by
924 instantiating the appropriate `Instrument` class and calling
925 `Instrument.getCamera`.
927 Raises
928 ------
929 LookupError
930 Raised when ``dataId`` does not specify a valid data ID.
931 """
932 if collections is None:
933 collections = butler.collections
934 # Registry would do data ID expansion internally if we didn't do it first,
935 # but we might want an expanded data ID ourselves later, so we do it here
936 # to ensure it only happens once.
937 # This will also catch problems with the data ID not having keys we need.
938 try:
939 dataId = butler.registry.expandDataId(dataId, graph=butler.registry.dimensions["exposure"].graph)
940 except DataIdError as exc:
941 raise LookupError(str(exc)) from exc
942 try:
943 cameraRef = butler.get("camera", dataId=dataId, collections=collections)
944 return cameraRef, True
945 except LookupError:
946 pass
947 instrument = Instrument.fromName(dataId["instrument"], butler.registry)
948 return instrument.getCamera(), False