Coverage for python/lsst/obs/lsst/translators/lsst.py: 33%

403 statements  

« prev     ^ index     » next       coverage.py v7.5.1, created at 2024-05-12 02:12 -0700

1# This file is currently part of obs_lsst but is written to allow it 

2# to be migrated to the astro_metadata_translator package at a later date. 

3# 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the LICENSE file in this directory for details of code ownership. 

7# 

8# Use of this source code is governed by a 3-clause BSD-style 

9# license that can be found in the LICENSE file. 

10 

11"""Metadata translation support code for LSST headers""" 

12 

13__all__ = ("TZERO", "SIMONYI_LOCATION", "read_detector_ids", 

14 "compute_detector_exposure_id_generic", "LsstBaseTranslator", 

15 "SIMONYI_TELESCOPE") 

16 

17import os.path 

18import yaml 

19import logging 

20import re 

21import datetime 

22import hashlib 

23 

24import astropy.coordinates 

25import astropy.units as u 

26from astropy.time import Time, TimeDelta 

27from astropy.coordinates import EarthLocation 

28 

29from lsst.utils import getPackageDir 

30 

31from astro_metadata_translator import cache_translation, FitsTranslator 

32from astro_metadata_translator.translators.helpers import tracking_from_degree_headers, \ 

33 altaz_from_degree_headers 

34 

35 

36TZERO = Time("2015-01-01T00:00", format="isot", scale="utc") 

37TZERO_DATETIME = TZERO.to_datetime() 

38 

39# Delimiter to use for multiple filters/gratings 

40FILTER_DELIMITER = "~" 

41 

42# Regex to use for parsing a GROUPID string 

43GROUP_RE = re.compile(r"^(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d)\.(\d\d\d)(?:[\+#](\d+))?$") 

44 

45# LSST Default location in the absence of headers 

46SIMONYI_LOCATION = EarthLocation.from_geodetic(-70.749417, -30.244639, 2663.0) 

47 

48# Name of the main survey telescope 

49SIMONYI_TELESCOPE = "Simonyi Survey Telescope" 

50 

51# Supported controller codes. 

52# The order here directly relates to the resulting exposure ID 

53# calculation. Do not reorder. Add new ones to the end. 

54# OCS, CCS, pHosim, P for simulated OCS, Q for simulated CCS, S for 

55# simulated images. 

56SIMULATED_CONTROLLERS = "HPQS" 

57CONTROLLERS = "OC" + SIMULATED_CONTROLLERS 

58 

59# Number of decimal digits allocated to the sequence number in exposure_ids. 

60_SEQNUM_MAXDIGITS = 5 

61 

62# Number of decimal digits allocated to the day of observation (and controller 

63# code) in exposure_ids. 

64_DAYOBS_MAXDIGITS = 8 

65 

66# Value added to day_obs for controllers after the default. 

67_CONTROLLER_INCREMENT = 1000_00_00 

68 

69# Number of decimal digits used by exposure_ids. 

70EXPOSURE_ID_MAXDIGITS = _SEQNUM_MAXDIGITS + _DAYOBS_MAXDIGITS 

71 

72obs_lsst_packageDir = getPackageDir("obs_lsst") 

73 

74log = logging.getLogger(__name__) 

75 

76 

77def read_detector_ids(policyFile): 

78 """Read a camera policy file and retrieve the mapping from CCD name 

79 to ID. 

80 

81 Parameters 

82 ---------- 

83 policyFile : `str` 

84 Name of YAML policy file to read, relative to the obs_lsst 

85 package. 

86 

87 Returns 

88 ------- 

89 mapping : `dict` of `str` to (`int`, `str`) 

90 A `dict` with keys being the full names of the detectors, and the 

91 value is a `tuple` containing the integer detector number and the 

92 detector serial number. 

93 

94 Notes 

95 ----- 

96 Reads the camera YAML definition file directly and extracts just the 

97 IDs and serials. This routine does not use the standard 

98 `~lsst.obs.base.yamlCamera.YAMLCamera` infrastructure or 

99 `lsst.afw.cameraGeom`. This is because the translators are intended to 

100 have minimal dependencies on LSST infrastructure. 

101 """ 

102 

103 file = os.path.join(obs_lsst_packageDir, policyFile) 

104 try: 

105 with open(file) as fh: 

106 # Use the fast parser since these files are large 

107 camera = yaml.load(fh, Loader=yaml.CSafeLoader) 

108 except OSError as e: 

109 raise ValueError(f"Could not load camera policy file {file}") from e 

110 

111 mapping = {} 

112 for ccd, value in camera["CCDs"].items(): 

113 mapping[ccd] = (int(value["id"]), value["serial"]) 

114 

115 return mapping 

116 

117 

118def compute_detector_exposure_id_generic(exposure_id, detector_num, max_num): 

119 """Compute the detector_exposure_id from the exposure id and the 

120 detector number. 

121 

122 Parameters 

123 ---------- 

124 exposure_id : `int` 

125 The exposure ID. 

126 detector_num : `int` 

127 The detector number. 

128 max_num : `int` 

129 Maximum number of detectors to make space for. 

130 

131 Returns 

132 ------- 

133 detector_exposure_id : `int` 

134 Computed ID. 

135 

136 Raises 

137 ------ 

138 ValueError 

139 The detector number is out of range. 

140 """ 

141 

142 if detector_num is None: 

143 raise ValueError("Detector number must be defined.") 

144 if detector_num >= max_num or detector_num < 0: 

145 raise ValueError(f"Detector number out of range 0 <= {detector_num} < {max_num}") 

146 

147 return max_num*exposure_id + detector_num 

148 

149 

150class LsstBaseTranslator(FitsTranslator): 

151 """Translation methods useful for all LSST-style headers.""" 

152 

153 _const_map = {} 

154 _trivial_map = {} 

155 

156 # Do not specify a name for this translator 

157 cameraPolicyFile = None 

158 """Path to policy file relative to obs_lsst root.""" 

159 

160 detectorMapping = None 

161 """Mapping of detector name to detector number and serial.""" 

162 

163 detectorSerials = None 

164 """Mapping of detector serial number to raft, number, and name.""" 

165 

166 DETECTOR_MAX = 1000 

167 """Maximum number of detectors to use when calculating the 

168 detector_exposure_id. 

169 

170 Note that because this is the maximum number *of* detectors, for 

171 zero-based ``detector_num`` values this is one greater than the maximum 

172 ``detector_num``. It is also often rounded up to the nearest power of 

173 10 anyway, to allow ``detector_exposure_id`` values to be easily decoded by 

174 humans. 

175 """ 

176 

177 _DEFAULT_LOCATION = SIMONYI_LOCATION 

178 """Default telescope location in absence of relevant FITS headers.""" 

179 

180 _ROLLOVER_TIME = TimeDelta(12*60*60, scale="tai", format="sec") 

181 """Time delta for the definition of a Rubin Observatory start of day. 

182 Used when the header is missing. See LSE-400 or SITCOMTN-032 for details. 

183 """ 

184 

185 @classmethod 

186 def __init_subclass__(cls, **kwargs): 

187 """Ensure that subclasses clear their own detector mapping entries 

188 such that subclasses of translators that use detector mappings 

189 do not pick up the incorrect values from a parent.""" 

190 

191 cls.detectorMapping = None 

192 cls.detectorSerials = None 

193 

194 super().__init_subclass__(**kwargs) 

195 

196 def search_paths(self): 

197 """Search paths to use for LSST data when looking for header correction 

198 files. 

199 

200 Returns 

201 ------- 

202 path : `list` 

203 List with a single element containing the full path to the 

204 ``corrections`` directory within the ``obs_lsst`` package. 

205 """ 

206 return [os.path.join(obs_lsst_packageDir, "corrections")] 

207 

208 @classmethod 

209 def observing_date_to_offset(cls, observing_date: astropy.time.Time) -> astropy.time.TimeDelta | None: 

210 """Return the offset to use when calculating the observing day. 

211 

212 Parameters 

213 ---------- 

214 observing_date : `astropy.time.Time` 

215 The date of the observation. Unused. 

216 

217 Returns 

218 ------- 

219 offset : `astropy.time.TimeDelta` 

220 The offset to apply. The default implementation returns a fixed 

221 number but subclasses can return a different value depending 

222 on whether the instrument is in the instrument lab or on the 

223 mountain. 

224 """ 

225 return cls._ROLLOVER_TIME 

226 

227 @classmethod 

228 def compute_detector_exposure_id(cls, exposure_id, detector_num): 

229 """Compute the detector exposure ID from detector number and 

230 exposure ID. 

231 

232 This is a helper method to allow code working outside the translator 

233 infrastructure to use the same algorithm. 

234 

235 Parameters 

236 ---------- 

237 exposure_id : `int` 

238 Unique exposure ID. 

239 detector_num : `int` 

240 Detector number. 

241 

242 Returns 

243 ------- 

244 detector_exposure_id : `int` 

245 The calculated ID. 

246 """ 

247 from .._packer import RubinDimensionPacker 

248 

249 return RubinDimensionPacker.pack_id_pair(exposure_id, detector_num) 

250 

251 @classmethod 

252 def max_detector_exposure_id(cls): 

253 """The maximum detector exposure ID expected to be generated by 

254 this instrument. 

255 

256 Returns 

257 ------- 

258 max_id : `int` 

259 The maximum value. 

260 """ 

261 max_exposure_id = cls.max_exposure_id() 

262 # We subtract 1 from DETECTOR_MAX because LSST detector_num values are 

263 # zero-based, and detector_max is the maximum number *of* detectors, 

264 # while this returns the (inclusive) maximum ID value. 

265 return cls.compute_detector_exposure_id(max_exposure_id, cls.DETECTOR_MAX - 1) 

266 

267 @classmethod 

268 def max_exposure_id(cls): 

269 """The maximum exposure ID expected from this instrument. 

270 

271 Returns 

272 ------- 

273 max_exposure_id : `int` 

274 The maximum value. 

275 """ 

276 max_date = "2050-12-31T23:59.999" 

277 max_seqnum = 99_999 

278 # This controller triggers the largest numbers 

279 max_controller = CONTROLLERS[-1] 

280 return cls.compute_exposure_id(max_date, max_seqnum, max_controller) 

281 

282 @classmethod 

283 def detector_mapping(cls): 

284 """Returns the mapping of full name to detector ID and serial. 

285 

286 Returns 

287 ------- 

288 mapping : `dict` of `str`:`tuple` 

289 Returns the mapping of full detector name (group+detector) 

290 to detector number and serial. 

291 

292 Raises 

293 ------ 

294 ValueError 

295 Raised if no camera policy file has been registered with this 

296 translation class. 

297 

298 Notes 

299 ----- 

300 Will construct the mapping if none has previously been constructed. 

301 """ 

302 if cls.cameraPolicyFile is not None: 

303 if cls.detectorMapping is None: 

304 cls.detectorMapping = read_detector_ids(cls.cameraPolicyFile) 

305 else: 

306 raise ValueError(f"Translation class '{cls.__name__}' has no registered camera policy file") 

307 

308 return cls.detectorMapping 

309 

310 @classmethod 

311 def detector_serials(cls): 

312 """Obtain the mapping of detector serial to detector group, name, 

313 and number. 

314 

315 Returns 

316 ------- 

317 info : `dict` of `tuple` of (`str`, `str`, `int`) 

318 A `dict` with the serial numbers as keys and values of detector 

319 group, name, and number. 

320 """ 

321 if cls.detectorSerials is None: 

322 detector_mapping = cls.detector_mapping() 

323 

324 if detector_mapping is not None: 

325 # Form mapping to go from serial number to names/numbers 

326 serials = {} 

327 for fullname, (id, serial) in cls.detectorMapping.items(): 

328 raft, detector_name = fullname.split("_") 

329 if serial in serials: 

330 raise RuntimeError(f"Serial {serial} is defined in multiple places") 

331 serials[serial] = (raft, detector_name, id) 

332 cls.detectorSerials = serials 

333 else: 

334 raise RuntimeError("Unable to obtain detector mapping information") 

335 

336 return cls.detectorSerials 

337 

338 @classmethod 

339 def compute_detector_num_from_name(cls, detector_group, detector_name): 

340 """Helper method to return the detector number from the name. 

341 

342 Parameters 

343 ---------- 

344 detector_group : `str` 

345 Name of the detector grouping. This is generally the raft name. 

346 detector_name : `str` 

347 Detector name. 

348 

349 Returns 

350 ------- 

351 num : `int` 

352 Detector number. 

353 """ 

354 fullname = f"{detector_group}_{detector_name}" 

355 

356 num = None 

357 detector_mapping = cls.detector_mapping() 

358 if detector_mapping is None: 

359 raise RuntimeError("Unable to obtain detector mapping information") 

360 

361 if fullname in detector_mapping: 

362 num = detector_mapping[fullname] 

363 else: 

364 log.warning(f"Unable to determine detector number from detector name {fullname}") 

365 return None 

366 

367 return num[0] 

368 

369 @classmethod 

370 def compute_detector_info_from_serial(cls, detector_serial): 

371 """Helper method to return the detector information from the serial. 

372 

373 Parameters 

374 ---------- 

375 detector_serial : `str` 

376 Detector serial ID. 

377 

378 Returns 

379 ------- 

380 info : `tuple` of (`str`, `str`, `int`) 

381 Detector group, name, and number. 

382 """ 

383 serial_mapping = cls.detector_serials() 

384 if serial_mapping is None: 

385 raise RuntimeError("Unable to obtain serial mapping information") 

386 

387 if detector_serial in serial_mapping: 

388 info = serial_mapping[detector_serial] 

389 else: 

390 raise RuntimeError("Unable to determine detector information from detector serial" 

391 f" {detector_serial}") 

392 

393 return info 

394 

395 @staticmethod 

396 def compute_exposure_id(dayobs, seqnum, controller=None): 

397 """Helper method to calculate the exposure_id. 

398 

399 Parameters 

400 ---------- 

401 dayobs : `str` or `int` 

402 Day of observation in either YYYYMMDD or YYYY-MM-DD format. 

403 If the string looks like ISO format it will be truncated before the 

404 ``T`` before being handled. 

405 seqnum : `int` or `str` 

406 Sequence number. 

407 controller : `str`, optional 

408 Controller to use. If this is "O", no change is made to the 

409 exposure ID. If it is "C" a 1000 is added to the year component 

410 of the exposure ID. If it is "H" a 2000 is added to the year 

411 component. This sequence continues with "P" and "Q" controllers. 

412 `None` indicates that the controller is not relevant to the 

413 exposure ID calculation (generally this is the case for test 

414 stand data). 

415 

416 Returns 

417 ------- 

418 exposure_id : `int` 

419 Exposure ID in form YYYYMMDDnnnnn form. 

420 """ 

421 # We really want an integer but the checks require a str. 

422 if isinstance(dayobs, int): 

423 dayobs = str(dayobs) 

424 

425 if "T" in dayobs: 

426 dayobs = dayobs[:dayobs.find("T")] 

427 

428 dayobs = dayobs.replace("-", "") 

429 

430 if len(dayobs) != 8: 

431 raise ValueError(f"Malformed dayobs: {dayobs}") 

432 

433 # Expect no more than 99,999 exposures in a day 

434 if seqnum >= 10**_SEQNUM_MAXDIGITS: 

435 raise ValueError(f"Sequence number ({seqnum}) exceeds limit") 

436 

437 dayobs = int(dayobs) 

438 if dayobs > 20231004 and controller == "C": 

439 # As of this date the CCS controller has a unified counter 

440 # with the OCS, so there is no need to adjust the dayobs 

441 # to make unique exposure IDs. 

442 controller = None 

443 

444 # Camera control changes the exposure ID 

445 if controller is not None: 

446 index = CONTROLLERS.find(controller) 

447 if index == -1: 

448 raise ValueError(f"Supplied controller, '{controller}' is not " 

449 f"in supported list: {CONTROLLERS}") 

450 

451 # Increment a thousand years per controller 

452 dayobs += _CONTROLLER_INCREMENT * index 

453 

454 # Form the number as a string zero padding the sequence number 

455 idstr = f"{dayobs}{seqnum:0{_SEQNUM_MAXDIGITS}d}" 

456 

457 # Exposure ID has to be an integer 

458 return int(idstr) 

459 

460 @staticmethod 

461 def unpack_exposure_id(exposure_id): 

462 """Unpack an exposure ID into dayobs, seqnum, and controller. 

463 

464 Parameters 

465 ---------- 

466 exposure_id : `int` 

467 Integer exposure ID produced by `compute_exposure_id`. 

468 

469 Returns 

470 ------- 

471 dayobs : `str` 

472 Day of observation as a YYYYMMDD string. 

473 seqnum : `int` 

474 Sequence number. 

475 controller : `str` 

476 Controller code. Will be ``O`` (but should be ignored) for IDs 

477 produced by calling `compute_exposure_id` with ``controller=None``. 

478 """ 

479 dayobs, seqnum = divmod(exposure_id, 10**_SEQNUM_MAXDIGITS) 

480 controller_index = dayobs // _CONTROLLER_INCREMENT - 2 

481 dayobs -= controller_index * _CONTROLLER_INCREMENT 

482 return (str(dayobs), seqnum, CONTROLLERS[controller_index], ) 

483 

484 def _is_on_mountain(self): 

485 """Indicate whether these data are coming from the instrument 

486 installed on the mountain. 

487 

488 Returns 

489 ------- 

490 is : `bool` 

491 `True` if instrument is on the mountain. 

492 """ 

493 if "TSTAND" in self._header: 

494 return False 

495 return True 

496 

497 def is_on_sky(self): 

498 """Determine if this is an on-sky observation. 

499 

500 Returns 

501 ------- 

502 is_on_sky : `bool` 

503 Returns True if this is a observation on sky on the 

504 summit. 

505 """ 

506 # For LSST we think on sky unless tracksys is local 

507 if self.is_key_ok("TRACKSYS"): 

508 if self._header["TRACKSYS"].lower() == "local": 

509 # not on sky 

510 return False 

511 

512 # These are obviously not on sky 

513 if self.to_observation_type() in ("bias", "dark", "flat"): 

514 return False 

515 

516 return self._is_on_mountain() 

517 

518 @cache_translation 

519 def to_location(self): 

520 # Docstring will be inherited. Property defined in properties.py 

521 if not self._is_on_mountain(): 

522 return None 

523 try: 

524 # Try standard FITS headers 

525 return super().to_location() 

526 except (KeyError, TypeError): 

527 return self._DEFAULT_LOCATION 

528 

529 @cache_translation 

530 def to_datetime_begin(self): 

531 # Docstring will be inherited. Property defined in properties.py 

532 self._used_these_cards("MJD-OBS") 

533 return Time(self._header["MJD-OBS"], scale="tai", format="mjd") 

534 

535 @cache_translation 

536 def to_datetime_end(self): 

537 # Docstring will be inherited. Property defined in properties.py 

538 if self.is_key_ok("DATE-END"): 

539 return super().to_datetime_end() 

540 

541 exposure_time = self.to_exposure_time() 

542 if exposure_time.value < 0.0: 

543 # Some translators deliberately return -1.0s if the exposure 

544 # time can not be determined. In that scenario set end time 

545 # to the same value as the start time. 

546 return self.to_datetime_begin() 

547 

548 return self.to_datetime_begin() + exposure_time 

549 

550 @cache_translation 

551 def to_detector_num(self): 

552 # Docstring will be inherited. Property defined in properties.py 

553 raft = self.to_detector_group() 

554 detector = self.to_detector_name() 

555 return self.compute_detector_num_from_name(raft, detector) 

556 

557 @cache_translation 

558 def to_detector_exposure_id(self): 

559 # Docstring will be inherited. Property defined in properties.py 

560 exposure_id = self.to_exposure_id() 

561 num = self.to_detector_num() 

562 return self.compute_detector_exposure_id(exposure_id, num) 

563 

564 @cache_translation 

565 def to_observation_type(self): 

566 # Docstring will be inherited. Property defined in properties.py 

567 obstype = self._header["IMGTYPE"] 

568 self._used_these_cards("IMGTYPE") 

569 obstype = obstype.lower() 

570 if obstype in ("skyexp", "object"): 

571 obstype = "science" 

572 return obstype 

573 

574 @cache_translation 

575 def to_observation_reason(self): 

576 # Docstring will be inherited. Property defined in properties.py 

577 for key in ("REASON", "TESTTYPE"): 

578 if self.is_key_ok(key): 

579 reason = self._header[key] 

580 self._used_these_cards(key) 

581 return reason.lower() 

582 # no specific header present so use the default translation 

583 return super().to_observation_reason() 

584 

585 @cache_translation 

586 def to_dark_time(self): 

587 """Calculate the dark time. 

588 

589 If a DARKTIME header is not found, the value is assumed to be 

590 identical to the exposure time. 

591 

592 Returns 

593 ------- 

594 dark : `astropy.units.Quantity` 

595 The dark time in seconds. 

596 """ 

597 if self.is_key_ok("DARKTIME"): 

598 darktime = self._header["DARKTIME"]*u.s 

599 self._used_these_cards("DARKTIME") 

600 else: 

601 log.warning("%s: Unable to determine dark time. Setting from exposure time.", 

602 self._log_prefix) 

603 darktime = self.to_exposure_time() 

604 return darktime 

605 

606 def _get_controller_code(self) -> str | None: 

607 """Return the controller code. 

608 

609 Returns 

610 ------- 

611 code : `str` 

612 Single character code representing the controller. Returns 

613 `None` if no controller can be determined. 

614 """ 

615 key = "CONTRLLR" 

616 if self.is_key_ok(key): 

617 controller = self._header[key] 

618 self._used_these_cards(key) 

619 else: 

620 controller = None 

621 return controller 

622 

623 @cache_translation 

624 def to_exposure_id(self): 

625 """Generate a unique exposure ID number 

626 

627 This is a combination of DAYOBS and SEQNUM, and optionally 

628 CONTRLLR. 

629 

630 Returns 

631 ------- 

632 exposure_id : `int` 

633 Unique exposure number. 

634 """ 

635 if "CALIB_ID" in self._header: 

636 self._used_these_cards("CALIB_ID") 

637 return None 

638 

639 dayobs = self._header["DAYOBS"] 

640 seqnum = self._header["SEQNUM"] 

641 self._used_these_cards("DAYOBS", "SEQNUM") 

642 

643 controller = self._get_controller_code() 

644 

645 return self.compute_exposure_id(dayobs, seqnum, controller=controller) 

646 

647 @cache_translation 

648 def to_visit_id(self): 

649 """Calculate the visit associated with this exposure. 

650 

651 Notes 

652 ----- 

653 For LATISS and LSSTCam the default visit is derived from the 

654 exposure group. For other instruments we return the exposure_id. 

655 """ 

656 

657 exposure_group = self.to_exposure_group() 

658 # If the group is an int we return it 

659 try: 

660 visit_id = int(exposure_group) 

661 return visit_id 

662 except ValueError: 

663 pass 

664 

665 # A Group is defined as ISO date with an extension 

666 # The integer must be the same for a given group so we can never 

667 # use datetime_begin. 

668 # Nominally a GROUPID looks like "ISODATE+N" where the +N is 

669 # optional. This can be converted to seconds since epoch with 

670 # an adjustment for N. 

671 # For early data lacking that form we hash the group and return 

672 # the int. 

673 matches_date = GROUP_RE.match(exposure_group) 

674 if matches_date: 

675 iso_str = matches_date.group(1) 

676 fraction = matches_date.group(2) 

677 n = matches_date.group(3) 

678 if n is not None: 

679 n = int(n) 

680 else: 

681 n = 0 

682 iso = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%S") 

683 

684 tdelta = iso - TZERO_DATETIME 

685 epoch = int(tdelta.total_seconds()) 

686 

687 # Form the integer from EPOCH + 3 DIGIT FRAC + 0-pad N 

688 visit_id = int(f"{epoch}{fraction}{n:04d}") 

689 else: 

690 # Non-standard string so convert to numbers 

691 # using a hash function. Use the first N hex digits 

692 group_bytes = exposure_group.encode("us-ascii") 

693 hasher = hashlib.blake2b(group_bytes) 

694 # Need to be big enough it does not possibly clash with the 

695 # date-based version above 

696 digest = hasher.hexdigest()[:14] 

697 visit_id = int(digest, base=16) 

698 

699 # To help with hash collision, append the string length 

700 visit_id = int(f"{visit_id}{len(exposure_group):02d}") 

701 

702 return visit_id 

703 

704 @cache_translation 

705 def to_physical_filter(self): 

706 """Calculate the physical filter name. 

707 

708 Returns 

709 ------- 

710 filter : `str` 

711 Name of filter. Can be a combination of FILTER, FILTER1 and FILTER2 

712 headers joined by a "~". Returns "unknown" if no filter is declared 

713 """ 

714 joined = self._join_keyword_values(["FILTER", "FILTER1", "FILTER2"], delim=FILTER_DELIMITER) 

715 if not joined: 

716 joined = "unknown" 

717 

718 # Replace instances of "NONE" with "none". 

719 joined = joined.replace("NONE", "none") 

720 

721 return joined 

722 

723 @cache_translation 

724 def to_tracking_radec(self): 

725 # RA/DEC are *derived* headers and for the case where the DATE-BEG 

726 # is 1970 they are garbage and should not be used. 

727 try: 

728 if self._header["DATE-OBS"] == self._header["DATE"]: 

729 # A fixed up date -- use AZEL as source of truth 

730 altaz = self.to_altaz_begin() 

731 radec = astropy.coordinates.SkyCoord(altaz.transform_to(astropy.coordinates.ICRS()), 

732 obstime=altaz.obstime, 

733 location=altaz.location) 

734 else: 

735 radecsys = ("RADESYS",) 

736 radecpairs = (("RASTART", "DECSTART"), ("RA", "DEC")) 

737 radec = tracking_from_degree_headers(self, radecsys, radecpairs) 

738 except Exception: 

739 # If this observation was not formally on sky then we are allowed 

740 # to return None. 

741 if self.is_on_sky(): 

742 raise 

743 radec = None 

744 

745 return radec 

746 

747 @cache_translation 

748 def to_altaz_begin(self): 

749 if not self._is_on_mountain(): 

750 return None 

751 

752 # H controller data are sometimes science observations without 

753 # having AZSTART header. The code lets those return nothing. 

754 if self._get_controller_code() == "H" and not self.are_keys_ok(["ELSTART", "AZSTART"]): 

755 return None 

756 

757 # Always attempt to find the alt/az values regardless of observation 

758 # type. 

759 return altaz_from_degree_headers(self, (("ELSTART", "AZSTART"),), 

760 self.to_datetime_begin(), is_zd=False) 

761 

762 @cache_translation 

763 def to_exposure_group(self): 

764 """Calculate the exposure group string. 

765 

766 For LSSTCam and LATISS this is read from the ``GROUPID`` header. 

767 If that header is missing the exposure_id is returned instead as 

768 a string. 

769 """ 

770 if self.is_key_ok("GROUPID"): 

771 exposure_group = self._header["GROUPID"] 

772 self._used_these_cards("GROUPID") 

773 return exposure_group 

774 return super().to_exposure_group() 

775 

776 @cache_translation 

777 def to_focus_z(self): 

778 """Return the defocal distance of the camera in units of mm. 

779 If there is no ``FOCUSZ`` value in the header it will return 

780 the default 0.0mm value. 

781 

782 Returns 

783 ------- 

784 focus_z: `astropy.units.Quantity` 

785 The defocal distance from header in mm or the 0.0mm default 

786 """ 

787 if self.is_key_ok("FOCUSZ"): 

788 focus_z = self._header["FOCUSZ"] 

789 return focus_z * u.mm 

790 return super().to_focus_z() 

791 

792 @staticmethod 

793 def _is_filter_empty(filter): 

794 """Return true if the supplied filter indicates an empty filter slot 

795 

796 Parameters 

797 ---------- 

798 filter : `str` 

799 The filter string to check. 

800 

801 Returns 

802 ------- 

803 is_empty : `bool` 

804 `True` if the filter string looks like it is referring to an 

805 empty filter slot. For example this can be if the filter is 

806 "empty" or "empty_2". 

807 """ 

808 return bool(re.match(r"empty_?\d*$", filter.lower())) 

809 

810 def _determine_primary_filter(self): 

811 """Determine the primary filter from the ``FILTER`` header. 

812 

813 Returns 

814 ------- 

815 filter : `str` 

816 The contents of the ``FILTER`` header with some appropriate 

817 defaulting. 

818 """ 

819 

820 if self.is_key_ok("FILTER"): 

821 physical_filter = self._header["FILTER"] 

822 self._used_these_cards("FILTER") 

823 

824 if self._is_filter_empty(physical_filter): 

825 physical_filter = "empty" 

826 else: 

827 # Be explicit about having no knowledge of the filter 

828 # by setting it to "unknown". It should always have a value. 

829 physical_filter = "unknown" 

830 

831 # Warn if the filter being unknown is important 

832 obstype = self.to_observation_type() 

833 if obstype not in ("bias", "dark"): 

834 log.warning("%s: Unable to determine the filter", 

835 self._log_prefix) 

836 

837 return physical_filter 

838 

839 @cache_translation 

840 def to_observing_day(self): 

841 """Return the day of observation as YYYYMMDD integer. 

842 

843 For LSSTCam and other compliant instruments this is the value 

844 of the DAYOBS header. 

845 

846 Returns 

847 ------- 

848 obs_day : `int` 

849 The day of observation. 

850 """ 

851 if self.is_key_ok("DAYOBS"): 

852 self._used_these_cards("DAYOBS") 

853 return int(self._header["DAYOBS"]) 

854 

855 return super().to_observing_day() 

856 

857 @cache_translation 

858 def to_observation_counter(self): 

859 """Return the sequence number within the observing day. 

860 

861 Returns 

862 ------- 

863 counter : `int` 

864 The sequence number for this day. 

865 """ 

866 if self.is_key_ok("SEQNUM"): 

867 # Some older LATISS data may not have the header 

868 # but this is corrected in fix_header for LATISS. 

869 self._used_these_cards("SEQNUM") 

870 return int(self._header["SEQNUM"]) 

871 

872 # This indicates a problem so we warn and return a 0 

873 log.warning("%s: Unable to determine the observation counter so returning 0", 

874 self._log_prefix) 

875 return 0 

876 

877 @cache_translation 

878 def to_boresight_rotation_coord(self): 

879 """Boresight rotation angle. 

880 

881 Only relevant for science observations. 

882 """ 

883 unknown = "unknown" 

884 if not self.is_on_sky(): 

885 return unknown 

886 

887 self._used_these_cards("ROTCOORD") 

888 coord = self._header.get("ROTCOORD", unknown) 

889 if coord is None: 

890 coord = unknown 

891 return coord 

892 

893 @cache_translation 

894 def to_boresight_airmass(self): 

895 """Calculate airmass at boresight at start of observation. 

896 

897 Notes 

898 ----- 

899 Early data are missing AMSTART header so we fall back to calculating 

900 it from ELSTART. 

901 """ 

902 if not self.is_on_sky(): 

903 return None 

904 

905 # This observation should have AMSTART 

906 amkey = "AMSTART" 

907 if self.is_key_ok(amkey): 

908 self._used_these_cards(amkey) 

909 return self._header[amkey] 

910 

911 # Instead we need to look at azel 

912 altaz = self.to_altaz_begin() 

913 if altaz is not None: 

914 return altaz.secz.to_value() 

915 

916 log.warning("%s: Unable to determine airmass of a science observation, returning 1.", 

917 self._log_prefix) 

918 return 1.0 

919 

920 @cache_translation 

921 def to_group_counter_start(self): 

922 # Effectively the start of the visit as determined by the headers. 

923 counter = self.to_observation_counter() 

924 # Older data does not have the CURINDEX header. 

925 if self.is_key_ok("CURINDEX"): 

926 # CURINDEX is 1-based. 

927 seq_start = counter - self._header["CURINDEX"] + 1 

928 self._used_these_cards("CURINDEX") 

929 return seq_start 

930 else: 

931 # If the counter is 0 we need to pick something else 

932 # that is not going to confuse the visit calculation 

933 # (since setting everything to 0 will make one big visit). 

934 return counter if counter != 0 else self.to_exposure_id() 

935 

936 @cache_translation 

937 def to_group_counter_end(self): 

938 # Effectively the end of the visit as determined by the headers. 

939 counter = self.to_observation_counter() 

940 # Older data does not have the CURINDEX or MAXINDEX headers. 

941 if self.is_key_ok("CURINDEX") and self.is_key_ok("MAXINDEX"): 

942 # CURINDEX is 1-based. CURINDEX == MAXINDEX indicates the 

943 # final exposure in the sequence. 

944 remaining = self._header["MAXINDEX"] - self._header["CURINDEX"] 

945 seq_end = counter + remaining 

946 self._used_these_cards("CURINDEX", "MAXINDEX") 

947 return seq_end 

948 else: 

949 # If the counter is 0 we need to pick something else 

950 # that is not going to confuse the visit calculation 

951 # (since setting everything to 0 will make one big visit). 

952 return counter if counter != 0 else self.to_exposure_id() 

953 

954 @cache_translation 

955 def to_has_simulated_content(self): 

956 # Check all the simulation flags. 

957 # We do not know all the simulation flags that we may have so 

958 # must check every header key. Ideally HIERARCH SIMULATE would 

959 # be a hierarchical header so _header["SIMULATE"] would return 

960 # everything. The header looks like: 

961 # 

962 # HIERARCH SIMULATE ATMCS = / ATMCS Simulation Mode 

963 # HIERARCH SIMULATE ATHEXAPOD = 0 / ATHexapod Simulation Mode 

964 # HIERARCH SIMULATE ATPNEUMATICS = / ATPneumatics Simulation Mode 

965 # HIERARCH SIMULATE ATDOME = 1 / ATDome Simulation Mode 

966 # HIERARCH SIMULATE ATSPECTROGRAPH = 0 / ATSpectrograph Simulation Mode 

967 # 

968 # So any header that includes "SIMULATE" in the key name and has a 

969 # true value implies that something in the data is simulated. 

970 for k, v in self._header.items(): 

971 if "SIMULATE" in k and v: 

972 return True 

973 

974 # If the controller is H, P, S, or Q then the data are simulated. 

975 controller = self._get_controller_code() 

976 if controller: 

977 if controller in SIMULATED_CONTROLLERS: 

978 return True 

979 

980 # No simulation flags set. 

981 return False 

982 

983 @cache_translation 

984 def to_relative_humidity(self) -> float | None: 

985 key = "HUMIDITY" 

986 if self.is_key_ok(key): 

987 self._used_these_cards(key) 

988 return self._header[key] 

989 

990 return None 

991 

992 @cache_translation 

993 def to_pressure(self): 

994 key = "PRESSURE" 

995 if self.is_key_ok(key): 

996 value = self._header[key] 

997 # There has been an inconsistency in units for the pressure reading 

998 # so we need to adjust for this. 

999 if value > 10_000: 

1000 unit = u.Pa 

1001 else: 

1002 unit = u.hPa 

1003 return value * unit 

1004 

1005 return None 

1006 

1007 @cache_translation 

1008 def to_temperature(self): 

1009 key = "AIRTEMP" 

1010 if self.is_key_ok(key): 

1011 return self._header[key] * u.deg_C 

1012 return None