Coverage for python/lsst/obs/lsst/translators/lsst.py: 24%
355 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-19 11:11 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-19 11:11 +0000
1# This file is currently part of obs_lsst but is written to allow it
2# to be migrated to the astro_metadata_translator package at a later date.
3#
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the LICENSE file in this directory for details of code ownership.
7#
8# Use of this source code is governed by a 3-clause BSD-style
9# license that can be found in the LICENSE file.
11"""Metadata translation support code for LSST headers"""
13__all__ = ("TZERO", "SIMONYI_LOCATION", "read_detector_ids",
14 "compute_detector_exposure_id_generic", "LsstBaseTranslator",
15 "SIMONYI_TELESCOPE")
17import os.path
18import yaml
19import logging
20import re
21import datetime
22import hashlib
24import astropy.coordinates
25import astropy.units as u
26from astropy.time import Time, TimeDelta
27from astropy.coordinates import EarthLocation
29from lsst.utils import getPackageDir
31from astro_metadata_translator import cache_translation, FitsTranslator
32from astro_metadata_translator.translators.helpers import tracking_from_degree_headers, \
33 altaz_from_degree_headers
36TZERO = Time("2015-01-01T00:00", format="isot", scale="utc")
37TZERO_DATETIME = TZERO.to_datetime()
39# Delimiter to use for multiple filters/gratings
40FILTER_DELIMITER = "~"
42# Regex to use for parsing a GROUPID string
43GROUP_RE = re.compile(r"^(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d)\.(\d\d\d)(?:[\+#](\d+))?$")
45# LSST Default location in the absence of headers
46SIMONYI_LOCATION = EarthLocation.from_geodetic(-70.749417, -30.244639, 2663.0)
48# Name of the main survey telescope
49SIMONYI_TELESCOPE = "Simonyi Survey Telescope"
51# Supported controller codes.
52# The order here directly relates to the resulting exposure ID
53# calculation. Do not reorder. Add new ones to the end.
54# OCS, CCS, pHosim, P for simulated OCS, Q for simulated CCS.
55CONTROLLERS = "OCHPQ"
57obs_lsst_packageDir = getPackageDir("obs_lsst")
59log = logging.getLogger(__name__)
62def read_detector_ids(policyFile):
63 """Read a camera policy file and retrieve the mapping from CCD name
64 to ID.
66 Parameters
67 ----------
68 policyFile : `str`
69 Name of YAML policy file to read, relative to the obs_lsst
70 package.
72 Returns
73 -------
74 mapping : `dict` of `str` to (`int`, `str`)
75 A `dict` with keys being the full names of the detectors, and the
76 value is a `tuple` containing the integer detector number and the
77 detector serial number.
79 Notes
80 -----
81 Reads the camera YAML definition file directly and extracts just the
82 IDs and serials. This routine does not use the standard
83 `~lsst.obs.base.yamlCamera.YAMLCamera` infrastructure or
84 `lsst.afw.cameraGeom`. This is because the translators are intended to
85 have minimal dependencies on LSST infrastructure.
86 """
88 file = os.path.join(obs_lsst_packageDir, policyFile)
89 try:
90 with open(file) as fh:
91 # Use the fast parser since these files are large
92 camera = yaml.load(fh, Loader=yaml.CSafeLoader)
93 except OSError as e:
94 raise ValueError(f"Could not load camera policy file {file}") from e
96 mapping = {}
97 for ccd, value in camera["CCDs"].items():
98 mapping[ccd] = (int(value["id"]), value["serial"])
100 return mapping
103def compute_detector_exposure_id_generic(exposure_id, detector_num, max_num):
104 """Compute the detector_exposure_id from the exposure id and the
105 detector number.
107 Parameters
108 ----------
109 exposure_id : `int`
110 The exposure ID.
111 detector_num : `int`
112 The detector number.
113 max_num : `int`
114 Maximum number of detectors to make space for.
116 Returns
117 -------
118 detector_exposure_id : `int`
119 Computed ID.
121 Raises
122 ------
123 ValueError
124 The detector number is out of range.
125 """
127 if detector_num is None:
128 raise ValueError("Detector number must be defined.")
129 if detector_num >= max_num or detector_num < 0:
130 raise ValueError(f"Detector number out of range 0 <= {detector_num} < {max_num}")
132 return max_num*exposure_id + detector_num
135class LsstBaseTranslator(FitsTranslator):
136 """Translation methods useful for all LSST-style headers."""
138 _const_map = {}
139 _trivial_map = {}
141 # Do not specify a name for this translator
142 cameraPolicyFile = None
143 """Path to policy file relative to obs_lsst root."""
145 detectorMapping = None
146 """Mapping of detector name to detector number and serial."""
148 detectorSerials = None
149 """Mapping of detector serial number to raft, number, and name."""
151 DETECTOR_MAX = 1000
152 """Maximum number of detectors to use when calculating the
153 detector_exposure_id.
155 Note that because this is the maximum number *of* detectors, for
156 zero-based ``detector_num`` values this is one greater than the maximum
157 ``detector_num``. It is also often rounded up to the nearest power of
158 10 anyway, to allow ``detector_exposure_id`` values to be easily decoded by
159 humans.
160 """
162 _DEFAULT_LOCATION = SIMONYI_LOCATION
163 """Default telescope location in absence of relevant FITS headers."""
165 _ROLLOVER_TIME = TimeDelta(12*60*60, scale="tai", format="sec")
166 """Time delta for the definition of a Rubin Observatory start of day.
167 Used when the header is missing. See LSE-400 for details."""
169 @classmethod
170 def __init_subclass__(cls, **kwargs):
171 """Ensure that subclasses clear their own detector mapping entries
172 such that subclasses of translators that use detector mappings
173 do not pick up the incorrect values from a parent."""
175 cls.detectorMapping = None
176 cls.detectorSerials = None
178 super().__init_subclass__(**kwargs)
180 def search_paths(self):
181 """Search paths to use for LSST data when looking for header correction
182 files.
184 Returns
185 -------
186 path : `list`
187 List with a single element containing the full path to the
188 ``corrections`` directory within the ``obs_lsst`` package.
189 """
190 return [os.path.join(obs_lsst_packageDir, "corrections")]
192 @classmethod
193 def compute_detector_exposure_id(cls, exposure_id, detector_num):
194 """Compute the detector exposure ID from detector number and
195 exposure ID.
197 This is a helper method to allow code working outside the translator
198 infrastructure to use the same algorithm.
200 Parameters
201 ----------
202 exposure_id : `int`
203 Unique exposure ID.
204 detector_num : `int`
205 Detector number.
207 Returns
208 -------
209 detector_exposure_id : `int`
210 The calculated ID.
211 """
212 return compute_detector_exposure_id_generic(exposure_id, detector_num, max_num=cls.DETECTOR_MAX)
214 @classmethod
215 def max_detector_exposure_id(cls):
216 """The maximum detector exposure ID expected to be generated by
217 this instrument.
219 Returns
220 -------
221 max_id : `int`
222 The maximum value.
223 """
224 max_exposure_id = cls.max_exposure_id()
225 # We subtract 1 from DETECTOR_MAX because LSST detector_num values are
226 # zero-based, and detector_max is the maximum number *of* detectors,
227 # while this returns the (inclusive) maximum ID value.
228 return cls.compute_detector_exposure_id(max_exposure_id, cls.DETECTOR_MAX - 1)
230 @classmethod
231 def max_exposure_id(cls):
232 """The maximum exposure ID expected from this instrument.
234 Returns
235 -------
236 max_exposure_id : `int`
237 The maximum value.
238 """
239 max_date = "2050-12-31T23:59.999"
240 max_seqnum = 99_999
241 # This controller triggers the largest numbers
242 max_controller = CONTROLLERS[-1]
243 return cls.compute_exposure_id(max_date, max_seqnum, max_controller)
245 @classmethod
246 def detector_mapping(cls):
247 """Returns the mapping of full name to detector ID and serial.
249 Returns
250 -------
251 mapping : `dict` of `str`:`tuple`
252 Returns the mapping of full detector name (group+detector)
253 to detector number and serial.
255 Raises
256 ------
257 ValueError
258 Raised if no camera policy file has been registered with this
259 translation class.
261 Notes
262 -----
263 Will construct the mapping if none has previously been constructed.
264 """
265 if cls.cameraPolicyFile is not None:
266 if cls.detectorMapping is None:
267 cls.detectorMapping = read_detector_ids(cls.cameraPolicyFile)
268 else:
269 raise ValueError(f"Translation class '{cls.__name__}' has no registered camera policy file")
271 return cls.detectorMapping
273 @classmethod
274 def detector_serials(cls):
275 """Obtain the mapping of detector serial to detector group, name,
276 and number.
278 Returns
279 -------
280 info : `dict` of `tuple` of (`str`, `str`, `int`)
281 A `dict` with the serial numbers as keys and values of detector
282 group, name, and number.
283 """
284 if cls.detectorSerials is None:
285 detector_mapping = cls.detector_mapping()
287 if detector_mapping is not None:
288 # Form mapping to go from serial number to names/numbers
289 serials = {}
290 for fullname, (id, serial) in cls.detectorMapping.items():
291 raft, detector_name = fullname.split("_")
292 if serial in serials:
293 raise RuntimeError(f"Serial {serial} is defined in multiple places")
294 serials[serial] = (raft, detector_name, id)
295 cls.detectorSerials = serials
296 else:
297 raise RuntimeError("Unable to obtain detector mapping information")
299 return cls.detectorSerials
301 @classmethod
302 def compute_detector_num_from_name(cls, detector_group, detector_name):
303 """Helper method to return the detector number from the name.
305 Parameters
306 ----------
307 detector_group : `str`
308 Name of the detector grouping. This is generally the raft name.
309 detector_name : `str`
310 Detector name.
312 Returns
313 -------
314 num : `int`
315 Detector number.
316 """
317 fullname = f"{detector_group}_{detector_name}"
319 num = None
320 detector_mapping = cls.detector_mapping()
321 if detector_mapping is None:
322 raise RuntimeError("Unable to obtain detector mapping information")
324 if fullname in detector_mapping:
325 num = detector_mapping[fullname]
326 else:
327 log.warning(f"Unable to determine detector number from detector name {fullname}")
328 return None
330 return num[0]
332 @classmethod
333 def compute_detector_info_from_serial(cls, detector_serial):
334 """Helper method to return the detector information from the serial.
336 Parameters
337 ----------
338 detector_serial : `str`
339 Detector serial ID.
341 Returns
342 -------
343 info : `tuple` of (`str`, `str`, `int`)
344 Detector group, name, and number.
345 """
346 serial_mapping = cls.detector_serials()
347 if serial_mapping is None:
348 raise RuntimeError("Unable to obtain serial mapping information")
350 if detector_serial in serial_mapping:
351 info = serial_mapping[detector_serial]
352 else:
353 raise RuntimeError("Unable to determine detector information from detector serial"
354 f" {detector_serial}")
356 return info
358 @staticmethod
359 def compute_exposure_id(dayobs, seqnum, controller=None):
360 """Helper method to calculate the exposure_id.
362 Parameters
363 ----------
364 dayobs : `str`
365 Day of observation in either YYYYMMDD or YYYY-MM-DD format.
366 If the string looks like ISO format it will be truncated before the
367 ``T`` before being handled.
368 seqnum : `int` or `str`
369 Sequence number.
370 controller : `str`, optional
371 Controller to use. If this is "O", no change is made to the
372 exposure ID. If it is "C" a 1000 is added to the year component
373 of the exposure ID. If it is "H" a 2000 is added to the year
374 component. This sequence continues with "P" and "Q" controllers.
375 `None` indicates that the controller is not relevant to the
376 exposure ID calculation (generally this is the case for test
377 stand data).
379 Returns
380 -------
381 exposure_id : `int`
382 Exposure ID in form YYYYMMDDnnnnn form.
383 """
384 if "T" in dayobs:
385 dayobs = dayobs[:dayobs.find("T")]
387 dayobs = dayobs.replace("-", "")
389 if len(dayobs) != 8:
390 raise ValueError(f"Malformed dayobs: {dayobs}")
392 # Expect no more than 99,999 exposures in a day
393 maxdigits = 5
394 if seqnum >= 10**maxdigits:
395 raise ValueError(f"Sequence number ({seqnum}) exceeds limit")
397 # Camera control changes the exposure ID
398 if controller is not None:
399 index = CONTROLLERS.find(controller)
400 if index == -1:
401 raise ValueError(f"Supplied controller, '{controller}' is not "
402 f"in supported list: {CONTROLLERS}")
403 dayobs = int(dayobs)
404 # Increment a thousand years per controller
405 dayobs += 1000_00_00 * index
407 # Form the number as a string zero padding the sequence number
408 idstr = f"{dayobs}{seqnum:0{maxdigits}d}"
410 # Exposure ID has to be an integer
411 return int(idstr)
413 def _is_on_mountain(self):
414 """Indicate whether these data are coming from the instrument
415 installed on the mountain.
417 Returns
418 -------
419 is : `bool`
420 `True` if instrument is on the mountain.
421 """
422 if "TSTAND" in self._header:
423 return False
424 return True
426 def is_on_sky(self):
427 """Determine if this is an on-sky observation.
429 Returns
430 -------
431 is_on_sky : `bool`
432 Returns True if this is a observation on sky on the
433 summit.
434 """
435 # For LSST we think on sky unless tracksys is local
436 if self.is_key_ok("TRACKSYS"):
437 if self._header["TRACKSYS"].lower() == "local":
438 # not on sky
439 return False
441 # These are obviously not on sky
442 if self.to_observation_type() in ("bias", "dark", "flat"):
443 return False
445 return self._is_on_mountain()
447 @cache_translation
448 def to_location(self):
449 # Docstring will be inherited. Property defined in properties.py
450 if not self._is_on_mountain():
451 return None
452 try:
453 # Try standard FITS headers
454 return super().to_location()
455 except KeyError:
456 return self._DEFAULT_LOCATION
458 @cache_translation
459 def to_datetime_begin(self):
460 # Docstring will be inherited. Property defined in properties.py
461 self._used_these_cards("MJD-OBS")
462 return Time(self._header["MJD-OBS"], scale="tai", format="mjd")
464 @cache_translation
465 def to_datetime_end(self):
466 # Docstring will be inherited. Property defined in properties.py
467 if self.is_key_ok("DATE-END"):
468 return super().to_datetime_end()
470 return self.to_datetime_begin() + self.to_exposure_time()
472 @cache_translation
473 def to_detector_num(self):
474 # Docstring will be inherited. Property defined in properties.py
475 raft = self.to_detector_group()
476 detector = self.to_detector_name()
477 return self.compute_detector_num_from_name(raft, detector)
479 @cache_translation
480 def to_detector_exposure_id(self):
481 # Docstring will be inherited. Property defined in properties.py
482 exposure_id = self.to_exposure_id()
483 num = self.to_detector_num()
484 return self.compute_detector_exposure_id(exposure_id, num)
486 @cache_translation
487 def to_observation_type(self):
488 # Docstring will be inherited. Property defined in properties.py
489 obstype = self._header["IMGTYPE"]
490 self._used_these_cards("IMGTYPE")
491 obstype = obstype.lower()
492 if obstype in ("skyexp", "object"):
493 obstype = "science"
494 return obstype
496 @cache_translation
497 def to_observation_reason(self):
498 # Docstring will be inherited. Property defined in properties.py
499 for key in ("REASON", "TESTTYPE"):
500 if self.is_key_ok(key):
501 reason = self._header[key]
502 self._used_these_cards(key)
503 return reason.lower()
504 # no specific header present so use the default translation
505 return super().to_observation_reason()
507 @cache_translation
508 def to_dark_time(self):
509 """Calculate the dark time.
511 If a DARKTIME header is not found, the value is assumed to be
512 identical to the exposure time.
514 Returns
515 -------
516 dark : `astropy.units.Quantity`
517 The dark time in seconds.
518 """
519 if self.is_key_ok("DARKTIME"):
520 darktime = self._header["DARKTIME"]*u.s
521 self._used_these_cards("DARKTIME")
522 else:
523 log.warning("%s: Unable to determine dark time. Setting from exposure time.",
524 self._log_prefix)
525 darktime = self.to_exposure_time()
526 return darktime
528 @cache_translation
529 def to_exposure_id(self):
530 """Generate a unique exposure ID number
532 This is a combination of DAYOBS and SEQNUM, and optionally
533 CONTRLLR.
535 Returns
536 -------
537 exposure_id : `int`
538 Unique exposure number.
539 """
540 if "CALIB_ID" in self._header:
541 self._used_these_cards("CALIB_ID")
542 return None
544 dayobs = self._header["DAYOBS"]
545 seqnum = self._header["SEQNUM"]
546 self._used_these_cards("DAYOBS", "SEQNUM")
548 if self.is_key_ok("CONTRLLR"):
549 controller = self._header["CONTRLLR"]
550 self._used_these_cards("CONTRLLR")
551 else:
552 controller = None
554 return self.compute_exposure_id(dayobs, seqnum, controller=controller)
556 @cache_translation
557 def to_visit_id(self):
558 """Calculate the visit associated with this exposure.
560 Notes
561 -----
562 For LATISS and LSSTCam the default visit is derived from the
563 exposure group. For other instruments we return the exposure_id.
564 """
566 exposure_group = self.to_exposure_group()
567 # If the group is an int we return it
568 try:
569 visit_id = int(exposure_group)
570 return visit_id
571 except ValueError:
572 pass
574 # A Group is defined as ISO date with an extension
575 # The integer must be the same for a given group so we can never
576 # use datetime_begin.
577 # Nominally a GROUPID looks like "ISODATE+N" where the +N is
578 # optional. This can be converted to seconds since epoch with
579 # an adjustment for N.
580 # For early data lacking that form we hash the group and return
581 # the int.
582 matches_date = GROUP_RE.match(exposure_group)
583 if matches_date:
584 iso_str = matches_date.group(1)
585 fraction = matches_date.group(2)
586 n = matches_date.group(3)
587 if n is not None:
588 n = int(n)
589 else:
590 n = 0
591 iso = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%S")
593 tdelta = iso - TZERO_DATETIME
594 epoch = int(tdelta.total_seconds())
596 # Form the integer from EPOCH + 3 DIGIT FRAC + 0-pad N
597 visit_id = int(f"{epoch}{fraction}{n:04d}")
598 else:
599 # Non-standard string so convert to numbers
600 # using a hash function. Use the first N hex digits
601 group_bytes = exposure_group.encode("us-ascii")
602 hasher = hashlib.blake2b(group_bytes)
603 # Need to be big enough it does not possibly clash with the
604 # date-based version above
605 digest = hasher.hexdigest()[:14]
606 visit_id = int(digest, base=16)
608 # To help with hash collision, append the string length
609 visit_id = int(f"{visit_id}{len(exposure_group):02d}")
611 return visit_id
613 @cache_translation
614 def to_physical_filter(self):
615 """Calculate the physical filter name.
617 Returns
618 -------
619 filter : `str`
620 Name of filter. Can be a combination of FILTER, FILTER1 and FILTER2
621 headers joined by a "~". Returns "unknown" if no filter is declared
622 """
623 joined = self._join_keyword_values(["FILTER", "FILTER1", "FILTER2"], delim=FILTER_DELIMITER)
624 if not joined:
625 joined = "unknown"
627 return joined
629 @cache_translation
630 def to_tracking_radec(self):
631 if not self.is_on_sky():
632 return None
634 # RA/DEC are *derived* headers and for the case where the DATE-BEG
635 # is 1970 they are garbage and should not be used.
636 if self._header["DATE-OBS"] == self._header["DATE"]:
637 # A fixed up date -- use AZEL as source of truth
638 altaz = self.to_altaz_begin()
639 radec = astropy.coordinates.SkyCoord(altaz.transform_to(astropy.coordinates.ICRS()),
640 obstime=altaz.obstime,
641 location=altaz.location)
642 else:
643 radecsys = ("RADESYS",)
644 radecpairs = (("RASTART", "DECSTART"), ("RA", "DEC"))
645 radec = tracking_from_degree_headers(self, radecsys, radecpairs)
647 return radec
649 @cache_translation
650 def to_altaz_begin(self):
651 if not self._is_on_mountain():
652 return None
654 # ALTAZ always relevant unless bias or dark
655 if self.to_observation_type() in ("bias", "dark"):
656 return None
658 return altaz_from_degree_headers(self, (("ELSTART", "AZSTART"),),
659 self.to_datetime_begin(), is_zd=False)
661 @cache_translation
662 def to_exposure_group(self):
663 """Calculate the exposure group string.
665 For LSSTCam and LATISS this is read from the ``GROUPID`` header.
666 If that header is missing the exposure_id is returned instead as
667 a string.
668 """
669 if self.is_key_ok("GROUPID"):
670 exposure_group = self._header["GROUPID"]
671 self._used_these_cards("GROUPID")
672 return exposure_group
673 return super().to_exposure_group()
675 @cache_translation
676 def to_focus_z(self):
677 """Return the defocal distance of the camera in units of mm.
678 If there is no ``FOCUSZ`` value in the header it will return
679 the default 0.0mm value.
681 Returns
682 -------
683 focus_z: `astropy.units.Quantity`
684 The defocal distance from header in mm or the 0.0mm default
685 """
686 if self.is_key_ok("FOCUSZ"):
687 focus_z = self._header["FOCUSZ"]
688 return focus_z * u.mm
689 return super().to_focus_z()
691 @staticmethod
692 def _is_filter_empty(filter):
693 """Return true if the supplied filter indicates an empty filter slot
695 Parameters
696 ----------
697 filter : `str`
698 The filter string to check.
700 Returns
701 -------
702 is_empty : `bool`
703 `True` if the filter string looks like it is referring to an
704 empty filter slot. For example this can be if the filter is
705 "empty" or "empty_2".
706 """
707 return bool(re.match(r"empty_?\d*$", filter.lower()))
709 def _determine_primary_filter(self):
710 """Determine the primary filter from the ``FILTER`` header.
712 Returns
713 -------
714 filter : `str`
715 The contents of the ``FILTER`` header with some appropriate
716 defaulting.
717 """
719 if self.is_key_ok("FILTER"):
720 physical_filter = self._header["FILTER"]
721 self._used_these_cards("FILTER")
723 if self._is_filter_empty(physical_filter):
724 physical_filter = "empty"
725 else:
726 # Be explicit about having no knowledge of the filter
727 # by setting it to "unknown". It should always have a value.
728 physical_filter = "unknown"
730 # Warn if the filter being unknown is important
731 obstype = self.to_observation_type()
732 if obstype not in ("bias", "dark"):
733 log.warning("%s: Unable to determine the filter",
734 self._log_prefix)
736 return physical_filter
738 @cache_translation
739 def to_observing_day(self):
740 """Return the day of observation as YYYYMMDD integer.
742 For LSSTCam and other compliant instruments this is the value
743 of the DAYOBS header.
745 Returns
746 -------
747 obs_day : `int`
748 The day of observation.
749 """
750 if self.is_key_ok("DAYOBS"):
751 self._used_these_cards("DAYOBS")
752 return int(self._header["DAYOBS"])
754 # Calculate it ourselves correcting for the Rubin offset
755 date = self.to_datetime_begin().tai
756 date -= self._ROLLOVER_TIME
757 return int(date.strftime("%Y%m%d"))
759 @cache_translation
760 def to_observation_counter(self):
761 """Return the sequence number within the observing day.
763 Returns
764 -------
765 counter : `int`
766 The sequence number for this day.
767 """
768 if self.is_key_ok("SEQNUM"):
769 # Some older LATISS data may not have the header
770 # but this is corrected in fix_header for LATISS.
771 self._used_these_cards("SEQNUM")
772 return int(self._header["SEQNUM"])
774 # This indicates a problem so we warn and return a 0
775 log.warning("%s: Unable to determine the observation counter so returning 0",
776 self._log_prefix)
777 return 0
779 @cache_translation
780 def to_boresight_rotation_coord(self):
781 """Boresight rotation angle.
783 Only relevant for science observations.
784 """
785 unknown = "unknown"
786 if not self.is_on_sky():
787 return unknown
789 self._used_these_cards("ROTCOORD")
790 coord = self._header.get("ROTCOORD", unknown)
791 if coord is None:
792 coord = unknown
793 return coord
795 @cache_translation
796 def to_boresight_airmass(self):
797 """Calculate airmass at boresight at start of observation.
799 Notes
800 -----
801 Early data are missing AMSTART header so we fall back to calculating
802 it from ELSTART.
803 """
804 if not self.is_on_sky():
805 return None
807 # This observation should have AMSTART
808 amkey = "AMSTART"
809 if self.is_key_ok(amkey):
810 self._used_these_cards(amkey)
811 return self._header[amkey]
813 # Instead we need to look at azel
814 altaz = self.to_altaz_begin()
815 if altaz is not None:
816 return altaz.secz.to_value()
818 log.warning("%s: Unable to determine airmass of a science observation, returning 1.",
819 self._log_prefix)
820 return 1.0
822 @cache_translation
823 def to_group_counter_start(self):
824 # Effectively the start of the visit as determined by the headers.
825 counter = self.to_observation_counter()
826 # Older data does not have the CURINDEX header.
827 if self.is_key_ok("CURINDEX"):
828 # CURINDEX is 1-based.
829 seq_start = counter - self._header["CURINDEX"] + 1
830 self._used_these_cards("CURINDEX")
831 return seq_start
832 else:
833 # If the counter is 0 we need to pick something else
834 # that is not going to confuse the visit calculation
835 # (since setting everything to 0 will make one big visit).
836 return counter if counter != 0 else self.to_exposure_id()
838 @cache_translation
839 def to_group_counter_end(self):
840 # Effectively the end of the visit as determined by the headers.
841 counter = self.to_observation_counter()
842 # Older data does not have the CURINDEX or MAXINDEX headers.
843 if self.is_key_ok("CURINDEX") and self.is_key_ok("MAXINDEX"):
844 # CURINDEX is 1-based. CURINDEX == MAXINDEX indicates the
845 # final exposure in the sequence.
846 remaining = self._header["MAXINDEX"] - self._header["CURINDEX"]
847 seq_end = counter + remaining
848 self._used_these_cards("CURINDEX", "MAXINDEX")
849 return seq_end
850 else:
851 # If the counter is 0 we need to pick something else
852 # that is not going to confuse the visit calculation
853 # (since setting everything to 0 will make one big visit).
854 return counter if counter != 0 else self.to_exposure_id()
856 @cache_translation
857 def to_has_simulated_content(self):
858 # Check all the simulation flags.
859 # We do not know all the simulation flags that we may have so
860 # must check every header key. Ideally HIERARCH SIMULATE would
861 # be a hierarchical header so _header["SIMULATE"] would return
862 # everything. The header looks like:
863 #
864 # HIERARCH SIMULATE ATMCS = / ATMCS Simulation Mode
865 # HIERARCH SIMULATE ATHEXAPOD = 0 / ATHexapod Simulation Mode
866 # HIERARCH SIMULATE ATPNEUMATICS = / ATPneumatics Simulation Mode
867 # HIERARCH SIMULATE ATDOME = 1 / ATDome Simulation Mode
868 # HIERARCH SIMULATE ATSPECTROGRAPH = 0 / ATSpectrograph Simulation Mode
869 #
870 # So any header that includes "SIMULATE" in the key name and has a
871 # true value implies that something in the data is simulated.
872 for k, v in self._header.items():
873 if "SIMULATE" in k and v:
874 return True
876 # If the controller is H, P, or Q then the data are simulated.
877 ctrlr_key = "CONTRLLR"
878 if self.is_key_ok(ctrlr_key):
879 controller = self._header[ctrlr_key]
880 self._used_these_cards(ctrlr_key)
881 if controller in "HPQ":
882 return True
884 # No simulation flags set.
885 return False