Coverage for python/lsst/obs/lsst/translators/lsst.py: 33%

394 statements  

« prev     ^ index     » next       coverage.py v7.4.0, created at 2024-01-10 14:01 +0000

1# This file is currently part of obs_lsst but is written to allow it 

2# to be migrated to the astro_metadata_translator package at a later date. 

3# 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the LICENSE file in this directory for details of code ownership. 

7# 

8# Use of this source code is governed by a 3-clause BSD-style 

9# license that can be found in the LICENSE file. 

10 

11"""Metadata translation support code for LSST headers""" 

12 

13__all__ = ("TZERO", "SIMONYI_LOCATION", "read_detector_ids", 

14 "compute_detector_exposure_id_generic", "LsstBaseTranslator", 

15 "SIMONYI_TELESCOPE") 

16 

17import os.path 

18import yaml 

19import logging 

20import re 

21import datetime 

22import hashlib 

23 

24import astropy.coordinates 

25import astropy.units as u 

26from astropy.time import Time, TimeDelta 

27from astropy.coordinates import EarthLocation 

28 

29from lsst.utils import getPackageDir 

30 

31from astro_metadata_translator import cache_translation, FitsTranslator 

32from astro_metadata_translator.translators.helpers import tracking_from_degree_headers, \ 

33 altaz_from_degree_headers 

34 

35 

36TZERO = Time("2015-01-01T00:00", format="isot", scale="utc") 

37TZERO_DATETIME = TZERO.to_datetime() 

38 

39# Delimiter to use for multiple filters/gratings 

40FILTER_DELIMITER = "~" 

41 

42# Regex to use for parsing a GROUPID string 

43GROUP_RE = re.compile(r"^(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d)\.(\d\d\d)(?:[\+#](\d+))?$") 

44 

45# LSST Default location in the absence of headers 

46SIMONYI_LOCATION = EarthLocation.from_geodetic(-70.749417, -30.244639, 2663.0) 

47 

48# Name of the main survey telescope 

49SIMONYI_TELESCOPE = "Simonyi Survey Telescope" 

50 

51# Supported controller codes. 

52# The order here directly relates to the resulting exposure ID 

53# calculation. Do not reorder. Add new ones to the end. 

54# OCS, CCS, pHosim, P for simulated OCS, Q for simulated CCS. 

55CONTROLLERS = "OCHPQ" 

56 

57# Number of decimal digits allocated to the sequence number in exposure_ids. 

58_SEQNUM_MAXDIGITS = 5 

59 

60# Number of decimal digits allocated to the day of observation (and controller 

61# code) in exposure_ids. 

62_DAYOBS_MAXDIGITS = 8 

63 

64# Value added to day_obs for controllers after the default. 

65_CONTROLLER_INCREMENT = 1000_00_00 

66 

67# Number of decimal digits used by exposure_ids. 

68EXPOSURE_ID_MAXDIGITS = _SEQNUM_MAXDIGITS + _DAYOBS_MAXDIGITS 

69 

70obs_lsst_packageDir = getPackageDir("obs_lsst") 

71 

72log = logging.getLogger(__name__) 

73 

74 

75def read_detector_ids(policyFile): 

76 """Read a camera policy file and retrieve the mapping from CCD name 

77 to ID. 

78 

79 Parameters 

80 ---------- 

81 policyFile : `str` 

82 Name of YAML policy file to read, relative to the obs_lsst 

83 package. 

84 

85 Returns 

86 ------- 

87 mapping : `dict` of `str` to (`int`, `str`) 

88 A `dict` with keys being the full names of the detectors, and the 

89 value is a `tuple` containing the integer detector number and the 

90 detector serial number. 

91 

92 Notes 

93 ----- 

94 Reads the camera YAML definition file directly and extracts just the 

95 IDs and serials. This routine does not use the standard 

96 `~lsst.obs.base.yamlCamera.YAMLCamera` infrastructure or 

97 `lsst.afw.cameraGeom`. This is because the translators are intended to 

98 have minimal dependencies on LSST infrastructure. 

99 """ 

100 

101 file = os.path.join(obs_lsst_packageDir, policyFile) 

102 try: 

103 with open(file) as fh: 

104 # Use the fast parser since these files are large 

105 camera = yaml.load(fh, Loader=yaml.CSafeLoader) 

106 except OSError as e: 

107 raise ValueError(f"Could not load camera policy file {file}") from e 

108 

109 mapping = {} 

110 for ccd, value in camera["CCDs"].items(): 

111 mapping[ccd] = (int(value["id"]), value["serial"]) 

112 

113 return mapping 

114 

115 

116def compute_detector_exposure_id_generic(exposure_id, detector_num, max_num): 

117 """Compute the detector_exposure_id from the exposure id and the 

118 detector number. 

119 

120 Parameters 

121 ---------- 

122 exposure_id : `int` 

123 The exposure ID. 

124 detector_num : `int` 

125 The detector number. 

126 max_num : `int` 

127 Maximum number of detectors to make space for. 

128 

129 Returns 

130 ------- 

131 detector_exposure_id : `int` 

132 Computed ID. 

133 

134 Raises 

135 ------ 

136 ValueError 

137 The detector number is out of range. 

138 """ 

139 

140 if detector_num is None: 

141 raise ValueError("Detector number must be defined.") 

142 if detector_num >= max_num or detector_num < 0: 

143 raise ValueError(f"Detector number out of range 0 <= {detector_num} < {max_num}") 

144 

145 return max_num*exposure_id + detector_num 

146 

147 

148class LsstBaseTranslator(FitsTranslator): 

149 """Translation methods useful for all LSST-style headers.""" 

150 

151 _const_map = {} 

152 _trivial_map = {} 

153 

154 # Do not specify a name for this translator 

155 cameraPolicyFile = None 

156 """Path to policy file relative to obs_lsst root.""" 

157 

158 detectorMapping = None 

159 """Mapping of detector name to detector number and serial.""" 

160 

161 detectorSerials = None 

162 """Mapping of detector serial number to raft, number, and name.""" 

163 

164 DETECTOR_MAX = 1000 

165 """Maximum number of detectors to use when calculating the 

166 detector_exposure_id. 

167 

168 Note that because this is the maximum number *of* detectors, for 

169 zero-based ``detector_num`` values this is one greater than the maximum 

170 ``detector_num``. It is also often rounded up to the nearest power of 

171 10 anyway, to allow ``detector_exposure_id`` values to be easily decoded by 

172 humans. 

173 """ 

174 

175 _DEFAULT_LOCATION = SIMONYI_LOCATION 

176 """Default telescope location in absence of relevant FITS headers.""" 

177 

178 _ROLLOVER_TIME = TimeDelta(12*60*60, scale="tai", format="sec") 

179 """Time delta for the definition of a Rubin Observatory start of day. 

180 Used when the header is missing. See LSE-400 for details.""" 

181 

182 @classmethod 

183 def __init_subclass__(cls, **kwargs): 

184 """Ensure that subclasses clear their own detector mapping entries 

185 such that subclasses of translators that use detector mappings 

186 do not pick up the incorrect values from a parent.""" 

187 

188 cls.detectorMapping = None 

189 cls.detectorSerials = None 

190 

191 super().__init_subclass__(**kwargs) 

192 

193 def search_paths(self): 

194 """Search paths to use for LSST data when looking for header correction 

195 files. 

196 

197 Returns 

198 ------- 

199 path : `list` 

200 List with a single element containing the full path to the 

201 ``corrections`` directory within the ``obs_lsst`` package. 

202 """ 

203 return [os.path.join(obs_lsst_packageDir, "corrections")] 

204 

205 @classmethod 

206 def compute_detector_exposure_id(cls, exposure_id, detector_num): 

207 """Compute the detector exposure ID from detector number and 

208 exposure ID. 

209 

210 This is a helper method to allow code working outside the translator 

211 infrastructure to use the same algorithm. 

212 

213 Parameters 

214 ---------- 

215 exposure_id : `int` 

216 Unique exposure ID. 

217 detector_num : `int` 

218 Detector number. 

219 

220 Returns 

221 ------- 

222 detector_exposure_id : `int` 

223 The calculated ID. 

224 """ 

225 from .._packer import RubinDimensionPacker 

226 

227 return RubinDimensionPacker.pack_id_pair(exposure_id, detector_num) 

228 

229 @classmethod 

230 def max_detector_exposure_id(cls): 

231 """The maximum detector exposure ID expected to be generated by 

232 this instrument. 

233 

234 Returns 

235 ------- 

236 max_id : `int` 

237 The maximum value. 

238 """ 

239 max_exposure_id = cls.max_exposure_id() 

240 # We subtract 1 from DETECTOR_MAX because LSST detector_num values are 

241 # zero-based, and detector_max is the maximum number *of* detectors, 

242 # while this returns the (inclusive) maximum ID value. 

243 return cls.compute_detector_exposure_id(max_exposure_id, cls.DETECTOR_MAX - 1) 

244 

245 @classmethod 

246 def max_exposure_id(cls): 

247 """The maximum exposure ID expected from this instrument. 

248 

249 Returns 

250 ------- 

251 max_exposure_id : `int` 

252 The maximum value. 

253 """ 

254 max_date = "2050-12-31T23:59.999" 

255 max_seqnum = 99_999 

256 # This controller triggers the largest numbers 

257 max_controller = CONTROLLERS[-1] 

258 return cls.compute_exposure_id(max_date, max_seqnum, max_controller) 

259 

260 @classmethod 

261 def detector_mapping(cls): 

262 """Returns the mapping of full name to detector ID and serial. 

263 

264 Returns 

265 ------- 

266 mapping : `dict` of `str`:`tuple` 

267 Returns the mapping of full detector name (group+detector) 

268 to detector number and serial. 

269 

270 Raises 

271 ------ 

272 ValueError 

273 Raised if no camera policy file has been registered with this 

274 translation class. 

275 

276 Notes 

277 ----- 

278 Will construct the mapping if none has previously been constructed. 

279 """ 

280 if cls.cameraPolicyFile is not None: 

281 if cls.detectorMapping is None: 

282 cls.detectorMapping = read_detector_ids(cls.cameraPolicyFile) 

283 else: 

284 raise ValueError(f"Translation class '{cls.__name__}' has no registered camera policy file") 

285 

286 return cls.detectorMapping 

287 

288 @classmethod 

289 def detector_serials(cls): 

290 """Obtain the mapping of detector serial to detector group, name, 

291 and number. 

292 

293 Returns 

294 ------- 

295 info : `dict` of `tuple` of (`str`, `str`, `int`) 

296 A `dict` with the serial numbers as keys and values of detector 

297 group, name, and number. 

298 """ 

299 if cls.detectorSerials is None: 

300 detector_mapping = cls.detector_mapping() 

301 

302 if detector_mapping is not None: 

303 # Form mapping to go from serial number to names/numbers 

304 serials = {} 

305 for fullname, (id, serial) in cls.detectorMapping.items(): 

306 raft, detector_name = fullname.split("_") 

307 if serial in serials: 

308 raise RuntimeError(f"Serial {serial} is defined in multiple places") 

309 serials[serial] = (raft, detector_name, id) 

310 cls.detectorSerials = serials 

311 else: 

312 raise RuntimeError("Unable to obtain detector mapping information") 

313 

314 return cls.detectorSerials 

315 

316 @classmethod 

317 def compute_detector_num_from_name(cls, detector_group, detector_name): 

318 """Helper method to return the detector number from the name. 

319 

320 Parameters 

321 ---------- 

322 detector_group : `str` 

323 Name of the detector grouping. This is generally the raft name. 

324 detector_name : `str` 

325 Detector name. 

326 

327 Returns 

328 ------- 

329 num : `int` 

330 Detector number. 

331 """ 

332 fullname = f"{detector_group}_{detector_name}" 

333 

334 num = None 

335 detector_mapping = cls.detector_mapping() 

336 if detector_mapping is None: 

337 raise RuntimeError("Unable to obtain detector mapping information") 

338 

339 if fullname in detector_mapping: 

340 num = detector_mapping[fullname] 

341 else: 

342 log.warning(f"Unable to determine detector number from detector name {fullname}") 

343 return None 

344 

345 return num[0] 

346 

347 @classmethod 

348 def compute_detector_info_from_serial(cls, detector_serial): 

349 """Helper method to return the detector information from the serial. 

350 

351 Parameters 

352 ---------- 

353 detector_serial : `str` 

354 Detector serial ID. 

355 

356 Returns 

357 ------- 

358 info : `tuple` of (`str`, `str`, `int`) 

359 Detector group, name, and number. 

360 """ 

361 serial_mapping = cls.detector_serials() 

362 if serial_mapping is None: 

363 raise RuntimeError("Unable to obtain serial mapping information") 

364 

365 if detector_serial in serial_mapping: 

366 info = serial_mapping[detector_serial] 

367 else: 

368 raise RuntimeError("Unable to determine detector information from detector serial" 

369 f" {detector_serial}") 

370 

371 return info 

372 

373 @staticmethod 

374 def compute_exposure_id(dayobs, seqnum, controller=None): 

375 """Helper method to calculate the exposure_id. 

376 

377 Parameters 

378 ---------- 

379 dayobs : `str` or `int` 

380 Day of observation in either YYYYMMDD or YYYY-MM-DD format. 

381 If the string looks like ISO format it will be truncated before the 

382 ``T`` before being handled. 

383 seqnum : `int` or `str` 

384 Sequence number. 

385 controller : `str`, optional 

386 Controller to use. If this is "O", no change is made to the 

387 exposure ID. If it is "C" a 1000 is added to the year component 

388 of the exposure ID. If it is "H" a 2000 is added to the year 

389 component. This sequence continues with "P" and "Q" controllers. 

390 `None` indicates that the controller is not relevant to the 

391 exposure ID calculation (generally this is the case for test 

392 stand data). 

393 

394 Returns 

395 ------- 

396 exposure_id : `int` 

397 Exposure ID in form YYYYMMDDnnnnn form. 

398 """ 

399 # We really want an integer but the checks require a str. 

400 if isinstance(dayobs, int): 

401 dayobs = str(dayobs) 

402 

403 if "T" in dayobs: 

404 dayobs = dayobs[:dayobs.find("T")] 

405 

406 dayobs = dayobs.replace("-", "") 

407 

408 if len(dayobs) != 8: 

409 raise ValueError(f"Malformed dayobs: {dayobs}") 

410 

411 # Expect no more than 99,999 exposures in a day 

412 if seqnum >= 10**_SEQNUM_MAXDIGITS: 

413 raise ValueError(f"Sequence number ({seqnum}) exceeds limit") 

414 

415 dayobs = int(dayobs) 

416 if dayobs > 20231004 and controller == "C": 

417 # As of this date the CCS controller has a unified counter 

418 # with the OCS, so there is no need to adjust the dayobs 

419 # to make unique exposure IDs. 

420 controller = None 

421 

422 # Camera control changes the exposure ID 

423 if controller is not None: 

424 index = CONTROLLERS.find(controller) 

425 if index == -1: 

426 raise ValueError(f"Supplied controller, '{controller}' is not " 

427 f"in supported list: {CONTROLLERS}") 

428 

429 # Increment a thousand years per controller 

430 dayobs += _CONTROLLER_INCREMENT * index 

431 

432 # Form the number as a string zero padding the sequence number 

433 idstr = f"{dayobs}{seqnum:0{_SEQNUM_MAXDIGITS}d}" 

434 

435 # Exposure ID has to be an integer 

436 return int(idstr) 

437 

438 @staticmethod 

439 def unpack_exposure_id(exposure_id): 

440 """Unpack an exposure ID into dayobs, seqnum, and controller. 

441 

442 Parameters 

443 ---------- 

444 exposure_id : `int` 

445 Integer exposure ID produced by `compute_exposure_id`. 

446 

447 Returns 

448 ------- 

449 dayobs : `str` 

450 Day of observation as a YYYYMMDD string. 

451 seqnum : `int` 

452 Sequence number. 

453 controller : `str` 

454 Controller code. Will be `O` (but should be ignored) for IDs 

455 produced by calling `compute_exposure_id` with ``controller=None`. 

456 """ 

457 dayobs, seqnum = divmod(exposure_id, 10**_SEQNUM_MAXDIGITS) 

458 controller_index = dayobs // _CONTROLLER_INCREMENT - 2 

459 dayobs -= controller_index * _CONTROLLER_INCREMENT 

460 return (str(dayobs), seqnum, CONTROLLERS[controller_index], ) 

461 

462 def _is_on_mountain(self): 

463 """Indicate whether these data are coming from the instrument 

464 installed on the mountain. 

465 

466 Returns 

467 ------- 

468 is : `bool` 

469 `True` if instrument is on the mountain. 

470 """ 

471 if "TSTAND" in self._header: 

472 return False 

473 return True 

474 

475 def is_on_sky(self): 

476 """Determine if this is an on-sky observation. 

477 

478 Returns 

479 ------- 

480 is_on_sky : `bool` 

481 Returns True if this is a observation on sky on the 

482 summit. 

483 """ 

484 # For LSST we think on sky unless tracksys is local 

485 if self.is_key_ok("TRACKSYS"): 

486 if self._header["TRACKSYS"].lower() == "local": 

487 # not on sky 

488 return False 

489 

490 # These are obviously not on sky 

491 if self.to_observation_type() in ("bias", "dark", "flat"): 

492 return False 

493 

494 return self._is_on_mountain() 

495 

496 @cache_translation 

497 def to_location(self): 

498 # Docstring will be inherited. Property defined in properties.py 

499 if not self._is_on_mountain(): 

500 return None 

501 try: 

502 # Try standard FITS headers 

503 return super().to_location() 

504 except KeyError: 

505 return self._DEFAULT_LOCATION 

506 

507 @cache_translation 

508 def to_datetime_begin(self): 

509 # Docstring will be inherited. Property defined in properties.py 

510 self._used_these_cards("MJD-OBS") 

511 return Time(self._header["MJD-OBS"], scale="tai", format="mjd") 

512 

513 @cache_translation 

514 def to_datetime_end(self): 

515 # Docstring will be inherited. Property defined in properties.py 

516 if self.is_key_ok("DATE-END"): 

517 return super().to_datetime_end() 

518 

519 return self.to_datetime_begin() + self.to_exposure_time() 

520 

521 @cache_translation 

522 def to_detector_num(self): 

523 # Docstring will be inherited. Property defined in properties.py 

524 raft = self.to_detector_group() 

525 detector = self.to_detector_name() 

526 return self.compute_detector_num_from_name(raft, detector) 

527 

528 @cache_translation 

529 def to_detector_exposure_id(self): 

530 # Docstring will be inherited. Property defined in properties.py 

531 exposure_id = self.to_exposure_id() 

532 num = self.to_detector_num() 

533 return self.compute_detector_exposure_id(exposure_id, num) 

534 

535 @cache_translation 

536 def to_observation_type(self): 

537 # Docstring will be inherited. Property defined in properties.py 

538 obstype = self._header["IMGTYPE"] 

539 self._used_these_cards("IMGTYPE") 

540 obstype = obstype.lower() 

541 if obstype in ("skyexp", "object"): 

542 obstype = "science" 

543 return obstype 

544 

545 @cache_translation 

546 def to_observation_reason(self): 

547 # Docstring will be inherited. Property defined in properties.py 

548 for key in ("REASON", "TESTTYPE"): 

549 if self.is_key_ok(key): 

550 reason = self._header[key] 

551 self._used_these_cards(key) 

552 return reason.lower() 

553 # no specific header present so use the default translation 

554 return super().to_observation_reason() 

555 

556 @cache_translation 

557 def to_dark_time(self): 

558 """Calculate the dark time. 

559 

560 If a DARKTIME header is not found, the value is assumed to be 

561 identical to the exposure time. 

562 

563 Returns 

564 ------- 

565 dark : `astropy.units.Quantity` 

566 The dark time in seconds. 

567 """ 

568 if self.is_key_ok("DARKTIME"): 

569 darktime = self._header["DARKTIME"]*u.s 

570 self._used_these_cards("DARKTIME") 

571 else: 

572 log.warning("%s: Unable to determine dark time. Setting from exposure time.", 

573 self._log_prefix) 

574 darktime = self.to_exposure_time() 

575 return darktime 

576 

577 @cache_translation 

578 def to_exposure_id(self): 

579 """Generate a unique exposure ID number 

580 

581 This is a combination of DAYOBS and SEQNUM, and optionally 

582 CONTRLLR. 

583 

584 Returns 

585 ------- 

586 exposure_id : `int` 

587 Unique exposure number. 

588 """ 

589 if "CALIB_ID" in self._header: 

590 self._used_these_cards("CALIB_ID") 

591 return None 

592 

593 dayobs = self._header["DAYOBS"] 

594 seqnum = self._header["SEQNUM"] 

595 self._used_these_cards("DAYOBS", "SEQNUM") 

596 

597 if self.is_key_ok("CONTRLLR"): 

598 controller = self._header["CONTRLLR"] 

599 self._used_these_cards("CONTRLLR") 

600 else: 

601 controller = None 

602 

603 return self.compute_exposure_id(dayobs, seqnum, controller=controller) 

604 

605 @cache_translation 

606 def to_visit_id(self): 

607 """Calculate the visit associated with this exposure. 

608 

609 Notes 

610 ----- 

611 For LATISS and LSSTCam the default visit is derived from the 

612 exposure group. For other instruments we return the exposure_id. 

613 """ 

614 

615 exposure_group = self.to_exposure_group() 

616 # If the group is an int we return it 

617 try: 

618 visit_id = int(exposure_group) 

619 return visit_id 

620 except ValueError: 

621 pass 

622 

623 # A Group is defined as ISO date with an extension 

624 # The integer must be the same for a given group so we can never 

625 # use datetime_begin. 

626 # Nominally a GROUPID looks like "ISODATE+N" where the +N is 

627 # optional. This can be converted to seconds since epoch with 

628 # an adjustment for N. 

629 # For early data lacking that form we hash the group and return 

630 # the int. 

631 matches_date = GROUP_RE.match(exposure_group) 

632 if matches_date: 

633 iso_str = matches_date.group(1) 

634 fraction = matches_date.group(2) 

635 n = matches_date.group(3) 

636 if n is not None: 

637 n = int(n) 

638 else: 

639 n = 0 

640 iso = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%S") 

641 

642 tdelta = iso - TZERO_DATETIME 

643 epoch = int(tdelta.total_seconds()) 

644 

645 # Form the integer from EPOCH + 3 DIGIT FRAC + 0-pad N 

646 visit_id = int(f"{epoch}{fraction}{n:04d}") 

647 else: 

648 # Non-standard string so convert to numbers 

649 # using a hash function. Use the first N hex digits 

650 group_bytes = exposure_group.encode("us-ascii") 

651 hasher = hashlib.blake2b(group_bytes) 

652 # Need to be big enough it does not possibly clash with the 

653 # date-based version above 

654 digest = hasher.hexdigest()[:14] 

655 visit_id = int(digest, base=16) 

656 

657 # To help with hash collision, append the string length 

658 visit_id = int(f"{visit_id}{len(exposure_group):02d}") 

659 

660 return visit_id 

661 

662 @cache_translation 

663 def to_physical_filter(self): 

664 """Calculate the physical filter name. 

665 

666 Returns 

667 ------- 

668 filter : `str` 

669 Name of filter. Can be a combination of FILTER, FILTER1 and FILTER2 

670 headers joined by a "~". Returns "unknown" if no filter is declared 

671 """ 

672 joined = self._join_keyword_values(["FILTER", "FILTER1", "FILTER2"], delim=FILTER_DELIMITER) 

673 if not joined: 

674 joined = "unknown" 

675 

676 # Replace instances of "NONE" with "none". 

677 joined = joined.replace("NONE", "none") 

678 

679 return joined 

680 

681 @cache_translation 

682 def to_tracking_radec(self): 

683 # RA/DEC are *derived* headers and for the case where the DATE-BEG 

684 # is 1970 they are garbage and should not be used. 

685 try: 

686 if self._header["DATE-OBS"] == self._header["DATE"]: 

687 # A fixed up date -- use AZEL as source of truth 

688 altaz = self.to_altaz_begin() 

689 radec = astropy.coordinates.SkyCoord(altaz.transform_to(astropy.coordinates.ICRS()), 

690 obstime=altaz.obstime, 

691 location=altaz.location) 

692 else: 

693 radecsys = ("RADESYS",) 

694 radecpairs = (("RASTART", "DECSTART"), ("RA", "DEC")) 

695 radec = tracking_from_degree_headers(self, radecsys, radecpairs) 

696 except Exception: 

697 # If this observation was not formally on sky then we are allowed 

698 # to return None. 

699 if self.is_on_sky(): 

700 raise 

701 radec = None 

702 

703 return radec 

704 

705 @cache_translation 

706 def to_altaz_begin(self): 

707 if not self._is_on_mountain(): 

708 return None 

709 

710 # Always attempt to find the alt/az values regardless of observation 

711 # type. 

712 return altaz_from_degree_headers(self, (("ELSTART", "AZSTART"),), 

713 self.to_datetime_begin(), is_zd=False) 

714 

715 @cache_translation 

716 def to_exposure_group(self): 

717 """Calculate the exposure group string. 

718 

719 For LSSTCam and LATISS this is read from the ``GROUPID`` header. 

720 If that header is missing the exposure_id is returned instead as 

721 a string. 

722 """ 

723 if self.is_key_ok("GROUPID"): 

724 exposure_group = self._header["GROUPID"] 

725 self._used_these_cards("GROUPID") 

726 return exposure_group 

727 return super().to_exposure_group() 

728 

729 @cache_translation 

730 def to_focus_z(self): 

731 """Return the defocal distance of the camera in units of mm. 

732 If there is no ``FOCUSZ`` value in the header it will return 

733 the default 0.0mm value. 

734 

735 Returns 

736 ------- 

737 focus_z: `astropy.units.Quantity` 

738 The defocal distance from header in mm or the 0.0mm default 

739 """ 

740 if self.is_key_ok("FOCUSZ"): 

741 focus_z = self._header["FOCUSZ"] 

742 return focus_z * u.mm 

743 return super().to_focus_z() 

744 

745 @staticmethod 

746 def _is_filter_empty(filter): 

747 """Return true if the supplied filter indicates an empty filter slot 

748 

749 Parameters 

750 ---------- 

751 filter : `str` 

752 The filter string to check. 

753 

754 Returns 

755 ------- 

756 is_empty : `bool` 

757 `True` if the filter string looks like it is referring to an 

758 empty filter slot. For example this can be if the filter is 

759 "empty" or "empty_2". 

760 """ 

761 return bool(re.match(r"empty_?\d*$", filter.lower())) 

762 

763 def _determine_primary_filter(self): 

764 """Determine the primary filter from the ``FILTER`` header. 

765 

766 Returns 

767 ------- 

768 filter : `str` 

769 The contents of the ``FILTER`` header with some appropriate 

770 defaulting. 

771 """ 

772 

773 if self.is_key_ok("FILTER"): 

774 physical_filter = self._header["FILTER"] 

775 self._used_these_cards("FILTER") 

776 

777 if self._is_filter_empty(physical_filter): 

778 physical_filter = "empty" 

779 else: 

780 # Be explicit about having no knowledge of the filter 

781 # by setting it to "unknown". It should always have a value. 

782 physical_filter = "unknown" 

783 

784 # Warn if the filter being unknown is important 

785 obstype = self.to_observation_type() 

786 if obstype not in ("bias", "dark"): 

787 log.warning("%s: Unable to determine the filter", 

788 self._log_prefix) 

789 

790 return physical_filter 

791 

792 @cache_translation 

793 def to_observing_day(self): 

794 """Return the day of observation as YYYYMMDD integer. 

795 

796 For LSSTCam and other compliant instruments this is the value 

797 of the DAYOBS header. 

798 

799 Returns 

800 ------- 

801 obs_day : `int` 

802 The day of observation. 

803 """ 

804 if self.is_key_ok("DAYOBS"): 

805 self._used_these_cards("DAYOBS") 

806 return int(self._header["DAYOBS"]) 

807 

808 # Calculate it ourselves correcting for the Rubin offset 

809 date = self.to_datetime_begin().tai 

810 date -= self._ROLLOVER_TIME 

811 return int(date.strftime("%Y%m%d")) 

812 

813 @cache_translation 

814 def to_observation_counter(self): 

815 """Return the sequence number within the observing day. 

816 

817 Returns 

818 ------- 

819 counter : `int` 

820 The sequence number for this day. 

821 """ 

822 if self.is_key_ok("SEQNUM"): 

823 # Some older LATISS data may not have the header 

824 # but this is corrected in fix_header for LATISS. 

825 self._used_these_cards("SEQNUM") 

826 return int(self._header["SEQNUM"]) 

827 

828 # This indicates a problem so we warn and return a 0 

829 log.warning("%s: Unable to determine the observation counter so returning 0", 

830 self._log_prefix) 

831 return 0 

832 

833 @cache_translation 

834 def to_boresight_rotation_coord(self): 

835 """Boresight rotation angle. 

836 

837 Only relevant for science observations. 

838 """ 

839 unknown = "unknown" 

840 if not self.is_on_sky(): 

841 return unknown 

842 

843 self._used_these_cards("ROTCOORD") 

844 coord = self._header.get("ROTCOORD", unknown) 

845 if coord is None: 

846 coord = unknown 

847 return coord 

848 

849 @cache_translation 

850 def to_boresight_airmass(self): 

851 """Calculate airmass at boresight at start of observation. 

852 

853 Notes 

854 ----- 

855 Early data are missing AMSTART header so we fall back to calculating 

856 it from ELSTART. 

857 """ 

858 if not self.is_on_sky(): 

859 return None 

860 

861 # This observation should have AMSTART 

862 amkey = "AMSTART" 

863 if self.is_key_ok(amkey): 

864 self._used_these_cards(amkey) 

865 return self._header[amkey] 

866 

867 # Instead we need to look at azel 

868 altaz = self.to_altaz_begin() 

869 if altaz is not None: 

870 return altaz.secz.to_value() 

871 

872 log.warning("%s: Unable to determine airmass of a science observation, returning 1.", 

873 self._log_prefix) 

874 return 1.0 

875 

876 @cache_translation 

877 def to_group_counter_start(self): 

878 # Effectively the start of the visit as determined by the headers. 

879 counter = self.to_observation_counter() 

880 # Older data does not have the CURINDEX header. 

881 if self.is_key_ok("CURINDEX"): 

882 # CURINDEX is 1-based. 

883 seq_start = counter - self._header["CURINDEX"] + 1 

884 self._used_these_cards("CURINDEX") 

885 return seq_start 

886 else: 

887 # If the counter is 0 we need to pick something else 

888 # that is not going to confuse the visit calculation 

889 # (since setting everything to 0 will make one big visit). 

890 return counter if counter != 0 else self.to_exposure_id() 

891 

892 @cache_translation 

893 def to_group_counter_end(self): 

894 # Effectively the end of the visit as determined by the headers. 

895 counter = self.to_observation_counter() 

896 # Older data does not have the CURINDEX or MAXINDEX headers. 

897 if self.is_key_ok("CURINDEX") and self.is_key_ok("MAXINDEX"): 

898 # CURINDEX is 1-based. CURINDEX == MAXINDEX indicates the 

899 # final exposure in the sequence. 

900 remaining = self._header["MAXINDEX"] - self._header["CURINDEX"] 

901 seq_end = counter + remaining 

902 self._used_these_cards("CURINDEX", "MAXINDEX") 

903 return seq_end 

904 else: 

905 # If the counter is 0 we need to pick something else 

906 # that is not going to confuse the visit calculation 

907 # (since setting everything to 0 will make one big visit). 

908 return counter if counter != 0 else self.to_exposure_id() 

909 

910 @cache_translation 

911 def to_has_simulated_content(self): 

912 # Check all the simulation flags. 

913 # We do not know all the simulation flags that we may have so 

914 # must check every header key. Ideally HIERARCH SIMULATE would 

915 # be a hierarchical header so _header["SIMULATE"] would return 

916 # everything. The header looks like: 

917 # 

918 # HIERARCH SIMULATE ATMCS = / ATMCS Simulation Mode 

919 # HIERARCH SIMULATE ATHEXAPOD = 0 / ATHexapod Simulation Mode 

920 # HIERARCH SIMULATE ATPNEUMATICS = / ATPneumatics Simulation Mode 

921 # HIERARCH SIMULATE ATDOME = 1 / ATDome Simulation Mode 

922 # HIERARCH SIMULATE ATSPECTROGRAPH = 0 / ATSpectrograph Simulation Mode 

923 # 

924 # So any header that includes "SIMULATE" in the key name and has a 

925 # true value implies that something in the data is simulated. 

926 for k, v in self._header.items(): 

927 if "SIMULATE" in k and v: 

928 return True 

929 

930 # If the controller is H, P, or Q then the data are simulated. 

931 ctrlr_key = "CONTRLLR" 

932 if self.is_key_ok(ctrlr_key): 

933 controller = self._header[ctrlr_key] 

934 self._used_these_cards(ctrlr_key) 

935 if controller in "HPQ": 

936 return True 

937 

938 # No simulation flags set. 

939 return False 

940 

941 @cache_translation 

942 def to_relative_humidity(self) -> float | None: 

943 key = "HUMIDITY" 

944 if self.is_key_ok(key): 

945 self._used_these_cards(key) 

946 return self._header[key] 

947 

948 return None 

949 

950 @cache_translation 

951 def to_pressure(self): 

952 key = "PRESSURE" 

953 if self.is_key_ok(key): 

954 value = self._header[key] 

955 # There has been an inconsistency in units for the pressure reading 

956 # so we need to adjust for this. 

957 if value > 10_000: 

958 unit = u.Pa 

959 else: 

960 unit = u.hPa 

961 return value * unit 

962 

963 return None 

964 

965 @cache_translation 

966 def to_temperature(self): 

967 key = "AIRTEMP" 

968 if self.is_key_ok(key): 

969 return self._header[key] * u.deg_C 

970 return None