Coverage for python/lsst/obs/lsst/translators/lsst.py: 34%
398 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-03-19 02:40 -0700
« prev ^ index » next coverage.py v7.4.4, created at 2024-03-19 02:40 -0700
1# This file is currently part of obs_lsst but is written to allow it
2# to be migrated to the astro_metadata_translator package at a later date.
3#
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the LICENSE file in this directory for details of code ownership.
7#
8# Use of this source code is governed by a 3-clause BSD-style
9# license that can be found in the LICENSE file.
11"""Metadata translation support code for LSST headers"""
13__all__ = ("TZERO", "SIMONYI_LOCATION", "read_detector_ids",
14 "compute_detector_exposure_id_generic", "LsstBaseTranslator",
15 "SIMONYI_TELESCOPE")
17import os.path
18import yaml
19import logging
20import re
21import datetime
22import hashlib
24import astropy.coordinates
25import astropy.units as u
26from astropy.time import Time, TimeDelta
27from astropy.coordinates import EarthLocation
29from lsst.utils import getPackageDir
31from astro_metadata_translator import cache_translation, FitsTranslator
32from astro_metadata_translator.translators.helpers import tracking_from_degree_headers, \
33 altaz_from_degree_headers
36TZERO = Time("2015-01-01T00:00", format="isot", scale="utc")
37TZERO_DATETIME = TZERO.to_datetime()
39# Delimiter to use for multiple filters/gratings
40FILTER_DELIMITER = "~"
42# Regex to use for parsing a GROUPID string
43GROUP_RE = re.compile(r"^(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d)\.(\d\d\d)(?:[\+#](\d+))?$")
45# LSST Default location in the absence of headers
46SIMONYI_LOCATION = EarthLocation.from_geodetic(-70.749417, -30.244639, 2663.0)
48# Name of the main survey telescope
49SIMONYI_TELESCOPE = "Simonyi Survey Telescope"
51# Supported controller codes.
52# The order here directly relates to the resulting exposure ID
53# calculation. Do not reorder. Add new ones to the end.
54# OCS, CCS, pHosim, P for simulated OCS, Q for simulated CCS, S for
55# simulated images.
56CONTROLLERS = "OCHPQS"
58# Number of decimal digits allocated to the sequence number in exposure_ids.
59_SEQNUM_MAXDIGITS = 5
61# Number of decimal digits allocated to the day of observation (and controller
62# code) in exposure_ids.
63_DAYOBS_MAXDIGITS = 8
65# Value added to day_obs for controllers after the default.
66_CONTROLLER_INCREMENT = 1000_00_00
68# Number of decimal digits used by exposure_ids.
69EXPOSURE_ID_MAXDIGITS = _SEQNUM_MAXDIGITS + _DAYOBS_MAXDIGITS
71obs_lsst_packageDir = getPackageDir("obs_lsst")
73log = logging.getLogger(__name__)
76def read_detector_ids(policyFile):
77 """Read a camera policy file and retrieve the mapping from CCD name
78 to ID.
80 Parameters
81 ----------
82 policyFile : `str`
83 Name of YAML policy file to read, relative to the obs_lsst
84 package.
86 Returns
87 -------
88 mapping : `dict` of `str` to (`int`, `str`)
89 A `dict` with keys being the full names of the detectors, and the
90 value is a `tuple` containing the integer detector number and the
91 detector serial number.
93 Notes
94 -----
95 Reads the camera YAML definition file directly and extracts just the
96 IDs and serials. This routine does not use the standard
97 `~lsst.obs.base.yamlCamera.YAMLCamera` infrastructure or
98 `lsst.afw.cameraGeom`. This is because the translators are intended to
99 have minimal dependencies on LSST infrastructure.
100 """
102 file = os.path.join(obs_lsst_packageDir, policyFile)
103 try:
104 with open(file) as fh:
105 # Use the fast parser since these files are large
106 camera = yaml.load(fh, Loader=yaml.CSafeLoader)
107 except OSError as e:
108 raise ValueError(f"Could not load camera policy file {file}") from e
110 mapping = {}
111 for ccd, value in camera["CCDs"].items():
112 mapping[ccd] = (int(value["id"]), value["serial"])
114 return mapping
117def compute_detector_exposure_id_generic(exposure_id, detector_num, max_num):
118 """Compute the detector_exposure_id from the exposure id and the
119 detector number.
121 Parameters
122 ----------
123 exposure_id : `int`
124 The exposure ID.
125 detector_num : `int`
126 The detector number.
127 max_num : `int`
128 Maximum number of detectors to make space for.
130 Returns
131 -------
132 detector_exposure_id : `int`
133 Computed ID.
135 Raises
136 ------
137 ValueError
138 The detector number is out of range.
139 """
141 if detector_num is None:
142 raise ValueError("Detector number must be defined.")
143 if detector_num >= max_num or detector_num < 0:
144 raise ValueError(f"Detector number out of range 0 <= {detector_num} < {max_num}")
146 return max_num*exposure_id + detector_num
149class LsstBaseTranslator(FitsTranslator):
150 """Translation methods useful for all LSST-style headers."""
152 _const_map = {}
153 _trivial_map = {}
155 # Do not specify a name for this translator
156 cameraPolicyFile = None
157 """Path to policy file relative to obs_lsst root."""
159 detectorMapping = None
160 """Mapping of detector name to detector number and serial."""
162 detectorSerials = None
163 """Mapping of detector serial number to raft, number, and name."""
165 DETECTOR_MAX = 1000
166 """Maximum number of detectors to use when calculating the
167 detector_exposure_id.
169 Note that because this is the maximum number *of* detectors, for
170 zero-based ``detector_num`` values this is one greater than the maximum
171 ``detector_num``. It is also often rounded up to the nearest power of
172 10 anyway, to allow ``detector_exposure_id`` values to be easily decoded by
173 humans.
174 """
176 _DEFAULT_LOCATION = SIMONYI_LOCATION
177 """Default telescope location in absence of relevant FITS headers."""
179 _ROLLOVER_TIME = TimeDelta(12*60*60, scale="tai", format="sec")
180 """Time delta for the definition of a Rubin Observatory start of day.
181 Used when the header is missing. See LSE-400 or SITCOMTN-032 for details.
182 """
184 @classmethod
185 def __init_subclass__(cls, **kwargs):
186 """Ensure that subclasses clear their own detector mapping entries
187 such that subclasses of translators that use detector mappings
188 do not pick up the incorrect values from a parent."""
190 cls.detectorMapping = None
191 cls.detectorSerials = None
193 super().__init_subclass__(**kwargs)
195 def search_paths(self):
196 """Search paths to use for LSST data when looking for header correction
197 files.
199 Returns
200 -------
201 path : `list`
202 List with a single element containing the full path to the
203 ``corrections`` directory within the ``obs_lsst`` package.
204 """
205 return [os.path.join(obs_lsst_packageDir, "corrections")]
207 @classmethod
208 def observing_date_to_offset(cls, observing_date: astropy.time.Time) -> astropy.time.TimeDelta | None:
209 """Return the offset to use when calculating the observing day.
211 Parameters
212 ----------
213 observing_date : `astropy.time.Time`
214 The date of the observation. Unused.
216 Returns
217 -------
218 offset : `astropy.time.TimeDelta`
219 The offset to apply. The default implementation returns a fixed
220 number but subclasses can return a different value depending
221 on whether the instrument is in the instrument lab or on the
222 mountain.
223 """
224 return cls._ROLLOVER_TIME
226 @classmethod
227 def compute_detector_exposure_id(cls, exposure_id, detector_num):
228 """Compute the detector exposure ID from detector number and
229 exposure ID.
231 This is a helper method to allow code working outside the translator
232 infrastructure to use the same algorithm.
234 Parameters
235 ----------
236 exposure_id : `int`
237 Unique exposure ID.
238 detector_num : `int`
239 Detector number.
241 Returns
242 -------
243 detector_exposure_id : `int`
244 The calculated ID.
245 """
246 from .._packer import RubinDimensionPacker
248 return RubinDimensionPacker.pack_id_pair(exposure_id, detector_num)
250 @classmethod
251 def max_detector_exposure_id(cls):
252 """The maximum detector exposure ID expected to be generated by
253 this instrument.
255 Returns
256 -------
257 max_id : `int`
258 The maximum value.
259 """
260 max_exposure_id = cls.max_exposure_id()
261 # We subtract 1 from DETECTOR_MAX because LSST detector_num values are
262 # zero-based, and detector_max is the maximum number *of* detectors,
263 # while this returns the (inclusive) maximum ID value.
264 return cls.compute_detector_exposure_id(max_exposure_id, cls.DETECTOR_MAX - 1)
266 @classmethod
267 def max_exposure_id(cls):
268 """The maximum exposure ID expected from this instrument.
270 Returns
271 -------
272 max_exposure_id : `int`
273 The maximum value.
274 """
275 max_date = "2050-12-31T23:59.999"
276 max_seqnum = 99_999
277 # This controller triggers the largest numbers
278 max_controller = CONTROLLERS[-1]
279 return cls.compute_exposure_id(max_date, max_seqnum, max_controller)
281 @classmethod
282 def detector_mapping(cls):
283 """Returns the mapping of full name to detector ID and serial.
285 Returns
286 -------
287 mapping : `dict` of `str`:`tuple`
288 Returns the mapping of full detector name (group+detector)
289 to detector number and serial.
291 Raises
292 ------
293 ValueError
294 Raised if no camera policy file has been registered with this
295 translation class.
297 Notes
298 -----
299 Will construct the mapping if none has previously been constructed.
300 """
301 if cls.cameraPolicyFile is not None:
302 if cls.detectorMapping is None:
303 cls.detectorMapping = read_detector_ids(cls.cameraPolicyFile)
304 else:
305 raise ValueError(f"Translation class '{cls.__name__}' has no registered camera policy file")
307 return cls.detectorMapping
309 @classmethod
310 def detector_serials(cls):
311 """Obtain the mapping of detector serial to detector group, name,
312 and number.
314 Returns
315 -------
316 info : `dict` of `tuple` of (`str`, `str`, `int`)
317 A `dict` with the serial numbers as keys and values of detector
318 group, name, and number.
319 """
320 if cls.detectorSerials is None:
321 detector_mapping = cls.detector_mapping()
323 if detector_mapping is not None:
324 # Form mapping to go from serial number to names/numbers
325 serials = {}
326 for fullname, (id, serial) in cls.detectorMapping.items():
327 raft, detector_name = fullname.split("_")
328 if serial in serials:
329 raise RuntimeError(f"Serial {serial} is defined in multiple places")
330 serials[serial] = (raft, detector_name, id)
331 cls.detectorSerials = serials
332 else:
333 raise RuntimeError("Unable to obtain detector mapping information")
335 return cls.detectorSerials
337 @classmethod
338 def compute_detector_num_from_name(cls, detector_group, detector_name):
339 """Helper method to return the detector number from the name.
341 Parameters
342 ----------
343 detector_group : `str`
344 Name of the detector grouping. This is generally the raft name.
345 detector_name : `str`
346 Detector name.
348 Returns
349 -------
350 num : `int`
351 Detector number.
352 """
353 fullname = f"{detector_group}_{detector_name}"
355 num = None
356 detector_mapping = cls.detector_mapping()
357 if detector_mapping is None:
358 raise RuntimeError("Unable to obtain detector mapping information")
360 if fullname in detector_mapping:
361 num = detector_mapping[fullname]
362 else:
363 log.warning(f"Unable to determine detector number from detector name {fullname}")
364 return None
366 return num[0]
368 @classmethod
369 def compute_detector_info_from_serial(cls, detector_serial):
370 """Helper method to return the detector information from the serial.
372 Parameters
373 ----------
374 detector_serial : `str`
375 Detector serial ID.
377 Returns
378 -------
379 info : `tuple` of (`str`, `str`, `int`)
380 Detector group, name, and number.
381 """
382 serial_mapping = cls.detector_serials()
383 if serial_mapping is None:
384 raise RuntimeError("Unable to obtain serial mapping information")
386 if detector_serial in serial_mapping:
387 info = serial_mapping[detector_serial]
388 else:
389 raise RuntimeError("Unable to determine detector information from detector serial"
390 f" {detector_serial}")
392 return info
394 @staticmethod
395 def compute_exposure_id(dayobs, seqnum, controller=None):
396 """Helper method to calculate the exposure_id.
398 Parameters
399 ----------
400 dayobs : `str` or `int`
401 Day of observation in either YYYYMMDD or YYYY-MM-DD format.
402 If the string looks like ISO format it will be truncated before the
403 ``T`` before being handled.
404 seqnum : `int` or `str`
405 Sequence number.
406 controller : `str`, optional
407 Controller to use. If this is "O", no change is made to the
408 exposure ID. If it is "C" a 1000 is added to the year component
409 of the exposure ID. If it is "H" a 2000 is added to the year
410 component. This sequence continues with "P" and "Q" controllers.
411 `None` indicates that the controller is not relevant to the
412 exposure ID calculation (generally this is the case for test
413 stand data).
415 Returns
416 -------
417 exposure_id : `int`
418 Exposure ID in form YYYYMMDDnnnnn form.
419 """
420 # We really want an integer but the checks require a str.
421 if isinstance(dayobs, int):
422 dayobs = str(dayobs)
424 if "T" in dayobs:
425 dayobs = dayobs[:dayobs.find("T")]
427 dayobs = dayobs.replace("-", "")
429 if len(dayobs) != 8:
430 raise ValueError(f"Malformed dayobs: {dayobs}")
432 # Expect no more than 99,999 exposures in a day
433 if seqnum >= 10**_SEQNUM_MAXDIGITS:
434 raise ValueError(f"Sequence number ({seqnum}) exceeds limit")
436 dayobs = int(dayobs)
437 if dayobs > 20231004 and controller == "C":
438 # As of this date the CCS controller has a unified counter
439 # with the OCS, so there is no need to adjust the dayobs
440 # to make unique exposure IDs.
441 controller = None
443 # Camera control changes the exposure ID
444 if controller is not None:
445 index = CONTROLLERS.find(controller)
446 if index == -1:
447 raise ValueError(f"Supplied controller, '{controller}' is not "
448 f"in supported list: {CONTROLLERS}")
450 # Increment a thousand years per controller
451 dayobs += _CONTROLLER_INCREMENT * index
453 # Form the number as a string zero padding the sequence number
454 idstr = f"{dayobs}{seqnum:0{_SEQNUM_MAXDIGITS}d}"
456 # Exposure ID has to be an integer
457 return int(idstr)
459 @staticmethod
460 def unpack_exposure_id(exposure_id):
461 """Unpack an exposure ID into dayobs, seqnum, and controller.
463 Parameters
464 ----------
465 exposure_id : `int`
466 Integer exposure ID produced by `compute_exposure_id`.
468 Returns
469 -------
470 dayobs : `str`
471 Day of observation as a YYYYMMDD string.
472 seqnum : `int`
473 Sequence number.
474 controller : `str`
475 Controller code. Will be ``O`` (but should be ignored) for IDs
476 produced by calling `compute_exposure_id` with ``controller=None``.
477 """
478 dayobs, seqnum = divmod(exposure_id, 10**_SEQNUM_MAXDIGITS)
479 controller_index = dayobs // _CONTROLLER_INCREMENT - 2
480 dayobs -= controller_index * _CONTROLLER_INCREMENT
481 return (str(dayobs), seqnum, CONTROLLERS[controller_index], )
483 def _is_on_mountain(self):
484 """Indicate whether these data are coming from the instrument
485 installed on the mountain.
487 Returns
488 -------
489 is : `bool`
490 `True` if instrument is on the mountain.
491 """
492 if "TSTAND" in self._header:
493 return False
494 return True
496 def is_on_sky(self):
497 """Determine if this is an on-sky observation.
499 Returns
500 -------
501 is_on_sky : `bool`
502 Returns True if this is a observation on sky on the
503 summit.
504 """
505 # For LSST we think on sky unless tracksys is local
506 if self.is_key_ok("TRACKSYS"):
507 if self._header["TRACKSYS"].lower() == "local":
508 # not on sky
509 return False
511 # These are obviously not on sky
512 if self.to_observation_type() in ("bias", "dark", "flat"):
513 return False
515 return self._is_on_mountain()
517 @cache_translation
518 def to_location(self):
519 # Docstring will be inherited. Property defined in properties.py
520 if not self._is_on_mountain():
521 return None
522 try:
523 # Try standard FITS headers
524 return super().to_location()
525 except (KeyError, TypeError):
526 return self._DEFAULT_LOCATION
528 @cache_translation
529 def to_datetime_begin(self):
530 # Docstring will be inherited. Property defined in properties.py
531 self._used_these_cards("MJD-OBS")
532 return Time(self._header["MJD-OBS"], scale="tai", format="mjd")
534 @cache_translation
535 def to_datetime_end(self):
536 # Docstring will be inherited. Property defined in properties.py
537 if self.is_key_ok("DATE-END"):
538 return super().to_datetime_end()
540 exposure_time = self.to_exposure_time()
541 if exposure_time.value < 0.0:
542 # Some translators deliberately return -1.0s if the exposure
543 # time can not be determined. In that scenario set end time
544 # to the same value as the start time.
545 return self.to_datetime_begin()
547 return self.to_datetime_begin() + exposure_time
549 @cache_translation
550 def to_detector_num(self):
551 # Docstring will be inherited. Property defined in properties.py
552 raft = self.to_detector_group()
553 detector = self.to_detector_name()
554 return self.compute_detector_num_from_name(raft, detector)
556 @cache_translation
557 def to_detector_exposure_id(self):
558 # Docstring will be inherited. Property defined in properties.py
559 exposure_id = self.to_exposure_id()
560 num = self.to_detector_num()
561 return self.compute_detector_exposure_id(exposure_id, num)
563 @cache_translation
564 def to_observation_type(self):
565 # Docstring will be inherited. Property defined in properties.py
566 obstype = self._header["IMGTYPE"]
567 self._used_these_cards("IMGTYPE")
568 obstype = obstype.lower()
569 if obstype in ("skyexp", "object"):
570 obstype = "science"
571 return obstype
573 @cache_translation
574 def to_observation_reason(self):
575 # Docstring will be inherited. Property defined in properties.py
576 for key in ("REASON", "TESTTYPE"):
577 if self.is_key_ok(key):
578 reason = self._header[key]
579 self._used_these_cards(key)
580 return reason.lower()
581 # no specific header present so use the default translation
582 return super().to_observation_reason()
584 @cache_translation
585 def to_dark_time(self):
586 """Calculate the dark time.
588 If a DARKTIME header is not found, the value is assumed to be
589 identical to the exposure time.
591 Returns
592 -------
593 dark : `astropy.units.Quantity`
594 The dark time in seconds.
595 """
596 if self.is_key_ok("DARKTIME"):
597 darktime = self._header["DARKTIME"]*u.s
598 self._used_these_cards("DARKTIME")
599 else:
600 log.warning("%s: Unable to determine dark time. Setting from exposure time.",
601 self._log_prefix)
602 darktime = self.to_exposure_time()
603 return darktime
605 @cache_translation
606 def to_exposure_id(self):
607 """Generate a unique exposure ID number
609 This is a combination of DAYOBS and SEQNUM, and optionally
610 CONTRLLR.
612 Returns
613 -------
614 exposure_id : `int`
615 Unique exposure number.
616 """
617 if "CALIB_ID" in self._header:
618 self._used_these_cards("CALIB_ID")
619 return None
621 dayobs = self._header["DAYOBS"]
622 seqnum = self._header["SEQNUM"]
623 self._used_these_cards("DAYOBS", "SEQNUM")
625 if self.is_key_ok("CONTRLLR"):
626 controller = self._header["CONTRLLR"]
627 self._used_these_cards("CONTRLLR")
628 else:
629 controller = None
631 return self.compute_exposure_id(dayobs, seqnum, controller=controller)
633 @cache_translation
634 def to_visit_id(self):
635 """Calculate the visit associated with this exposure.
637 Notes
638 -----
639 For LATISS and LSSTCam the default visit is derived from the
640 exposure group. For other instruments we return the exposure_id.
641 """
643 exposure_group = self.to_exposure_group()
644 # If the group is an int we return it
645 try:
646 visit_id = int(exposure_group)
647 return visit_id
648 except ValueError:
649 pass
651 # A Group is defined as ISO date with an extension
652 # The integer must be the same for a given group so we can never
653 # use datetime_begin.
654 # Nominally a GROUPID looks like "ISODATE+N" where the +N is
655 # optional. This can be converted to seconds since epoch with
656 # an adjustment for N.
657 # For early data lacking that form we hash the group and return
658 # the int.
659 matches_date = GROUP_RE.match(exposure_group)
660 if matches_date:
661 iso_str = matches_date.group(1)
662 fraction = matches_date.group(2)
663 n = matches_date.group(3)
664 if n is not None:
665 n = int(n)
666 else:
667 n = 0
668 iso = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%S")
670 tdelta = iso - TZERO_DATETIME
671 epoch = int(tdelta.total_seconds())
673 # Form the integer from EPOCH + 3 DIGIT FRAC + 0-pad N
674 visit_id = int(f"{epoch}{fraction}{n:04d}")
675 else:
676 # Non-standard string so convert to numbers
677 # using a hash function. Use the first N hex digits
678 group_bytes = exposure_group.encode("us-ascii")
679 hasher = hashlib.blake2b(group_bytes)
680 # Need to be big enough it does not possibly clash with the
681 # date-based version above
682 digest = hasher.hexdigest()[:14]
683 visit_id = int(digest, base=16)
685 # To help with hash collision, append the string length
686 visit_id = int(f"{visit_id}{len(exposure_group):02d}")
688 return visit_id
690 @cache_translation
691 def to_physical_filter(self):
692 """Calculate the physical filter name.
694 Returns
695 -------
696 filter : `str`
697 Name of filter. Can be a combination of FILTER, FILTER1 and FILTER2
698 headers joined by a "~". Returns "unknown" if no filter is declared
699 """
700 joined = self._join_keyword_values(["FILTER", "FILTER1", "FILTER2"], delim=FILTER_DELIMITER)
701 if not joined:
702 joined = "unknown"
704 # Replace instances of "NONE" with "none".
705 joined = joined.replace("NONE", "none")
707 return joined
709 @cache_translation
710 def to_tracking_radec(self):
711 # RA/DEC are *derived* headers and for the case where the DATE-BEG
712 # is 1970 they are garbage and should not be used.
713 try:
714 if self._header["DATE-OBS"] == self._header["DATE"]:
715 # A fixed up date -- use AZEL as source of truth
716 altaz = self.to_altaz_begin()
717 radec = astropy.coordinates.SkyCoord(altaz.transform_to(astropy.coordinates.ICRS()),
718 obstime=altaz.obstime,
719 location=altaz.location)
720 else:
721 radecsys = ("RADESYS",)
722 radecpairs = (("RASTART", "DECSTART"), ("RA", "DEC"))
723 radec = tracking_from_degree_headers(self, radecsys, radecpairs)
724 except Exception:
725 # If this observation was not formally on sky then we are allowed
726 # to return None.
727 if self.is_on_sky():
728 raise
729 radec = None
731 return radec
733 @cache_translation
734 def to_altaz_begin(self):
735 if not self._is_on_mountain():
736 return None
738 # Always attempt to find the alt/az values regardless of observation
739 # type.
740 return altaz_from_degree_headers(self, (("ELSTART", "AZSTART"),),
741 self.to_datetime_begin(), is_zd=False)
743 @cache_translation
744 def to_exposure_group(self):
745 """Calculate the exposure group string.
747 For LSSTCam and LATISS this is read from the ``GROUPID`` header.
748 If that header is missing the exposure_id is returned instead as
749 a string.
750 """
751 if self.is_key_ok("GROUPID"):
752 exposure_group = self._header["GROUPID"]
753 self._used_these_cards("GROUPID")
754 return exposure_group
755 return super().to_exposure_group()
757 @cache_translation
758 def to_focus_z(self):
759 """Return the defocal distance of the camera in units of mm.
760 If there is no ``FOCUSZ`` value in the header it will return
761 the default 0.0mm value.
763 Returns
764 -------
765 focus_z: `astropy.units.Quantity`
766 The defocal distance from header in mm or the 0.0mm default
767 """
768 if self.is_key_ok("FOCUSZ"):
769 focus_z = self._header["FOCUSZ"]
770 return focus_z * u.mm
771 return super().to_focus_z()
773 @staticmethod
774 def _is_filter_empty(filter):
775 """Return true if the supplied filter indicates an empty filter slot
777 Parameters
778 ----------
779 filter : `str`
780 The filter string to check.
782 Returns
783 -------
784 is_empty : `bool`
785 `True` if the filter string looks like it is referring to an
786 empty filter slot. For example this can be if the filter is
787 "empty" or "empty_2".
788 """
789 return bool(re.match(r"empty_?\d*$", filter.lower()))
791 def _determine_primary_filter(self):
792 """Determine the primary filter from the ``FILTER`` header.
794 Returns
795 -------
796 filter : `str`
797 The contents of the ``FILTER`` header with some appropriate
798 defaulting.
799 """
801 if self.is_key_ok("FILTER"):
802 physical_filter = self._header["FILTER"]
803 self._used_these_cards("FILTER")
805 if self._is_filter_empty(physical_filter):
806 physical_filter = "empty"
807 else:
808 # Be explicit about having no knowledge of the filter
809 # by setting it to "unknown". It should always have a value.
810 physical_filter = "unknown"
812 # Warn if the filter being unknown is important
813 obstype = self.to_observation_type()
814 if obstype not in ("bias", "dark"):
815 log.warning("%s: Unable to determine the filter",
816 self._log_prefix)
818 return physical_filter
820 @cache_translation
821 def to_observing_day(self):
822 """Return the day of observation as YYYYMMDD integer.
824 For LSSTCam and other compliant instruments this is the value
825 of the DAYOBS header.
827 Returns
828 -------
829 obs_day : `int`
830 The day of observation.
831 """
832 if self.is_key_ok("DAYOBS"):
833 self._used_these_cards("DAYOBS")
834 return int(self._header["DAYOBS"])
836 return super().to_observing_day()
838 @cache_translation
839 def to_observation_counter(self):
840 """Return the sequence number within the observing day.
842 Returns
843 -------
844 counter : `int`
845 The sequence number for this day.
846 """
847 if self.is_key_ok("SEQNUM"):
848 # Some older LATISS data may not have the header
849 # but this is corrected in fix_header for LATISS.
850 self._used_these_cards("SEQNUM")
851 return int(self._header["SEQNUM"])
853 # This indicates a problem so we warn and return a 0
854 log.warning("%s: Unable to determine the observation counter so returning 0",
855 self._log_prefix)
856 return 0
858 @cache_translation
859 def to_boresight_rotation_coord(self):
860 """Boresight rotation angle.
862 Only relevant for science observations.
863 """
864 unknown = "unknown"
865 if not self.is_on_sky():
866 return unknown
868 self._used_these_cards("ROTCOORD")
869 coord = self._header.get("ROTCOORD", unknown)
870 if coord is None:
871 coord = unknown
872 return coord
874 @cache_translation
875 def to_boresight_airmass(self):
876 """Calculate airmass at boresight at start of observation.
878 Notes
879 -----
880 Early data are missing AMSTART header so we fall back to calculating
881 it from ELSTART.
882 """
883 if not self.is_on_sky():
884 return None
886 # This observation should have AMSTART
887 amkey = "AMSTART"
888 if self.is_key_ok(amkey):
889 self._used_these_cards(amkey)
890 return self._header[amkey]
892 # Instead we need to look at azel
893 altaz = self.to_altaz_begin()
894 if altaz is not None:
895 return altaz.secz.to_value()
897 log.warning("%s: Unable to determine airmass of a science observation, returning 1.",
898 self._log_prefix)
899 return 1.0
901 @cache_translation
902 def to_group_counter_start(self):
903 # Effectively the start of the visit as determined by the headers.
904 counter = self.to_observation_counter()
905 # Older data does not have the CURINDEX header.
906 if self.is_key_ok("CURINDEX"):
907 # CURINDEX is 1-based.
908 seq_start = counter - self._header["CURINDEX"] + 1
909 self._used_these_cards("CURINDEX")
910 return seq_start
911 else:
912 # If the counter is 0 we need to pick something else
913 # that is not going to confuse the visit calculation
914 # (since setting everything to 0 will make one big visit).
915 return counter if counter != 0 else self.to_exposure_id()
917 @cache_translation
918 def to_group_counter_end(self):
919 # Effectively the end of the visit as determined by the headers.
920 counter = self.to_observation_counter()
921 # Older data does not have the CURINDEX or MAXINDEX headers.
922 if self.is_key_ok("CURINDEX") and self.is_key_ok("MAXINDEX"):
923 # CURINDEX is 1-based. CURINDEX == MAXINDEX indicates the
924 # final exposure in the sequence.
925 remaining = self._header["MAXINDEX"] - self._header["CURINDEX"]
926 seq_end = counter + remaining
927 self._used_these_cards("CURINDEX", "MAXINDEX")
928 return seq_end
929 else:
930 # If the counter is 0 we need to pick something else
931 # that is not going to confuse the visit calculation
932 # (since setting everything to 0 will make one big visit).
933 return counter if counter != 0 else self.to_exposure_id()
935 @cache_translation
936 def to_has_simulated_content(self):
937 # Check all the simulation flags.
938 # We do not know all the simulation flags that we may have so
939 # must check every header key. Ideally HIERARCH SIMULATE would
940 # be a hierarchical header so _header["SIMULATE"] would return
941 # everything. The header looks like:
942 #
943 # HIERARCH SIMULATE ATMCS = / ATMCS Simulation Mode
944 # HIERARCH SIMULATE ATHEXAPOD = 0 / ATHexapod Simulation Mode
945 # HIERARCH SIMULATE ATPNEUMATICS = / ATPneumatics Simulation Mode
946 # HIERARCH SIMULATE ATDOME = 1 / ATDome Simulation Mode
947 # HIERARCH SIMULATE ATSPECTROGRAPH = 0 / ATSpectrograph Simulation Mode
948 #
949 # So any header that includes "SIMULATE" in the key name and has a
950 # true value implies that something in the data is simulated.
951 for k, v in self._header.items():
952 if "SIMULATE" in k and v:
953 return True
955 # If the controller is H, P, or Q then the data are simulated.
956 ctrlr_key = "CONTRLLR"
957 if self.is_key_ok(ctrlr_key):
958 controller = self._header[ctrlr_key]
959 self._used_these_cards(ctrlr_key)
960 if controller in "HPQ":
961 return True
963 # No simulation flags set.
964 return False
966 @cache_translation
967 def to_relative_humidity(self) -> float | None:
968 key = "HUMIDITY"
969 if self.is_key_ok(key):
970 self._used_these_cards(key)
971 return self._header[key]
973 return None
975 @cache_translation
976 def to_pressure(self):
977 key = "PRESSURE"
978 if self.is_key_ok(key):
979 value = self._header[key]
980 # There has been an inconsistency in units for the pressure reading
981 # so we need to adjust for this.
982 if value > 10_000:
983 unit = u.Pa
984 else:
985 unit = u.hPa
986 return value * unit
988 return None
990 @cache_translation
991 def to_temperature(self):
992 key = "AIRTEMP"
993 if self.is_key_ok(key):
994 return self._header[key] * u.deg_C
995 return None