Coverage for python/lsst/obs/lsst/translators/lsst.py: 34%

395 statements  

« prev     ^ index     » next       coverage.py v7.4.3, created at 2024-03-01 14:58 +0000

1# This file is currently part of obs_lsst but is written to allow it 

2# to be migrated to the astro_metadata_translator package at a later date. 

3# 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the LICENSE file in this directory for details of code ownership. 

7# 

8# Use of this source code is governed by a 3-clause BSD-style 

9# license that can be found in the LICENSE file. 

10 

11"""Metadata translation support code for LSST headers""" 

12 

13__all__ = ("TZERO", "SIMONYI_LOCATION", "read_detector_ids", 

14 "compute_detector_exposure_id_generic", "LsstBaseTranslator", 

15 "SIMONYI_TELESCOPE") 

16 

17import os.path 

18import yaml 

19import logging 

20import re 

21import datetime 

22import hashlib 

23 

24import astropy.coordinates 

25import astropy.units as u 

26from astropy.time import Time, TimeDelta 

27from astropy.coordinates import EarthLocation 

28 

29from lsst.utils import getPackageDir 

30 

31from astro_metadata_translator import cache_translation, FitsTranslator 

32from astro_metadata_translator.translators.helpers import tracking_from_degree_headers, \ 

33 altaz_from_degree_headers 

34 

35 

36TZERO = Time("2015-01-01T00:00", format="isot", scale="utc") 

37TZERO_DATETIME = TZERO.to_datetime() 

38 

39# Delimiter to use for multiple filters/gratings 

40FILTER_DELIMITER = "~" 

41 

42# Regex to use for parsing a GROUPID string 

43GROUP_RE = re.compile(r"^(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d)\.(\d\d\d)(?:[\+#](\d+))?$") 

44 

45# LSST Default location in the absence of headers 

46SIMONYI_LOCATION = EarthLocation.from_geodetic(-70.749417, -30.244639, 2663.0) 

47 

48# Name of the main survey telescope 

49SIMONYI_TELESCOPE = "Simonyi Survey Telescope" 

50 

51# Supported controller codes. 

52# The order here directly relates to the resulting exposure ID 

53# calculation. Do not reorder. Add new ones to the end. 

54# OCS, CCS, pHosim, P for simulated OCS, Q for simulated CCS, S for 

55# simulated images. 

56CONTROLLERS = "OCHPQS" 

57 

58# Number of decimal digits allocated to the sequence number in exposure_ids. 

59_SEQNUM_MAXDIGITS = 5 

60 

61# Number of decimal digits allocated to the day of observation (and controller 

62# code) in exposure_ids. 

63_DAYOBS_MAXDIGITS = 8 

64 

65# Value added to day_obs for controllers after the default. 

66_CONTROLLER_INCREMENT = 1000_00_00 

67 

68# Number of decimal digits used by exposure_ids. 

69EXPOSURE_ID_MAXDIGITS = _SEQNUM_MAXDIGITS + _DAYOBS_MAXDIGITS 

70 

71obs_lsst_packageDir = getPackageDir("obs_lsst") 

72 

73log = logging.getLogger(__name__) 

74 

75 

76def read_detector_ids(policyFile): 

77 """Read a camera policy file and retrieve the mapping from CCD name 

78 to ID. 

79 

80 Parameters 

81 ---------- 

82 policyFile : `str` 

83 Name of YAML policy file to read, relative to the obs_lsst 

84 package. 

85 

86 Returns 

87 ------- 

88 mapping : `dict` of `str` to (`int`, `str`) 

89 A `dict` with keys being the full names of the detectors, and the 

90 value is a `tuple` containing the integer detector number and the 

91 detector serial number. 

92 

93 Notes 

94 ----- 

95 Reads the camera YAML definition file directly and extracts just the 

96 IDs and serials. This routine does not use the standard 

97 `~lsst.obs.base.yamlCamera.YAMLCamera` infrastructure or 

98 `lsst.afw.cameraGeom`. This is because the translators are intended to 

99 have minimal dependencies on LSST infrastructure. 

100 """ 

101 

102 file = os.path.join(obs_lsst_packageDir, policyFile) 

103 try: 

104 with open(file) as fh: 

105 # Use the fast parser since these files are large 

106 camera = yaml.load(fh, Loader=yaml.CSafeLoader) 

107 except OSError as e: 

108 raise ValueError(f"Could not load camera policy file {file}") from e 

109 

110 mapping = {} 

111 for ccd, value in camera["CCDs"].items(): 

112 mapping[ccd] = (int(value["id"]), value["serial"]) 

113 

114 return mapping 

115 

116 

117def compute_detector_exposure_id_generic(exposure_id, detector_num, max_num): 

118 """Compute the detector_exposure_id from the exposure id and the 

119 detector number. 

120 

121 Parameters 

122 ---------- 

123 exposure_id : `int` 

124 The exposure ID. 

125 detector_num : `int` 

126 The detector number. 

127 max_num : `int` 

128 Maximum number of detectors to make space for. 

129 

130 Returns 

131 ------- 

132 detector_exposure_id : `int` 

133 Computed ID. 

134 

135 Raises 

136 ------ 

137 ValueError 

138 The detector number is out of range. 

139 """ 

140 

141 if detector_num is None: 

142 raise ValueError("Detector number must be defined.") 

143 if detector_num >= max_num or detector_num < 0: 

144 raise ValueError(f"Detector number out of range 0 <= {detector_num} < {max_num}") 

145 

146 return max_num*exposure_id + detector_num 

147 

148 

149class LsstBaseTranslator(FitsTranslator): 

150 """Translation methods useful for all LSST-style headers.""" 

151 

152 _const_map = {} 

153 _trivial_map = {} 

154 

155 # Do not specify a name for this translator 

156 cameraPolicyFile = None 

157 """Path to policy file relative to obs_lsst root.""" 

158 

159 detectorMapping = None 

160 """Mapping of detector name to detector number and serial.""" 

161 

162 detectorSerials = None 

163 """Mapping of detector serial number to raft, number, and name.""" 

164 

165 DETECTOR_MAX = 1000 

166 """Maximum number of detectors to use when calculating the 

167 detector_exposure_id. 

168 

169 Note that because this is the maximum number *of* detectors, for 

170 zero-based ``detector_num`` values this is one greater than the maximum 

171 ``detector_num``. It is also often rounded up to the nearest power of 

172 10 anyway, to allow ``detector_exposure_id`` values to be easily decoded by 

173 humans. 

174 """ 

175 

176 _DEFAULT_LOCATION = SIMONYI_LOCATION 

177 """Default telescope location in absence of relevant FITS headers.""" 

178 

179 _ROLLOVER_TIME = TimeDelta(12*60*60, scale="tai", format="sec") 

180 """Time delta for the definition of a Rubin Observatory start of day. 

181 Used when the header is missing. See LSE-400 or SITCOMTN-032 for details. 

182 """ 

183 

184 @classmethod 

185 def __init_subclass__(cls, **kwargs): 

186 """Ensure that subclasses clear their own detector mapping entries 

187 such that subclasses of translators that use detector mappings 

188 do not pick up the incorrect values from a parent.""" 

189 

190 cls.detectorMapping = None 

191 cls.detectorSerials = None 

192 

193 super().__init_subclass__(**kwargs) 

194 

195 def search_paths(self): 

196 """Search paths to use for LSST data when looking for header correction 

197 files. 

198 

199 Returns 

200 ------- 

201 path : `list` 

202 List with a single element containing the full path to the 

203 ``corrections`` directory within the ``obs_lsst`` package. 

204 """ 

205 return [os.path.join(obs_lsst_packageDir, "corrections")] 

206 

207 @classmethod 

208 def observing_date_to_offset(cls, observing_date: astropy.time.Time) -> astropy.time.TimeDelta | None: 

209 """Return the offset to use when calculating the observing day. 

210 

211 Parameters 

212 ---------- 

213 observing_date : `astropy.time.Time` 

214 The date of the observation. Unused. 

215 

216 Returns 

217 ------- 

218 offset : `astropy.time.TimeDelta` 

219 The offset to apply. The default implementation returns a fixed 

220 number but subclasses can return a different value depending 

221 on whether the instrument is in the instrument lab or on the 

222 mountain. 

223 """ 

224 return cls._ROLLOVER_TIME 

225 

226 @classmethod 

227 def compute_detector_exposure_id(cls, exposure_id, detector_num): 

228 """Compute the detector exposure ID from detector number and 

229 exposure ID. 

230 

231 This is a helper method to allow code working outside the translator 

232 infrastructure to use the same algorithm. 

233 

234 Parameters 

235 ---------- 

236 exposure_id : `int` 

237 Unique exposure ID. 

238 detector_num : `int` 

239 Detector number. 

240 

241 Returns 

242 ------- 

243 detector_exposure_id : `int` 

244 The calculated ID. 

245 """ 

246 from .._packer import RubinDimensionPacker 

247 

248 return RubinDimensionPacker.pack_id_pair(exposure_id, detector_num) 

249 

250 @classmethod 

251 def max_detector_exposure_id(cls): 

252 """The maximum detector exposure ID expected to be generated by 

253 this instrument. 

254 

255 Returns 

256 ------- 

257 max_id : `int` 

258 The maximum value. 

259 """ 

260 max_exposure_id = cls.max_exposure_id() 

261 # We subtract 1 from DETECTOR_MAX because LSST detector_num values are 

262 # zero-based, and detector_max is the maximum number *of* detectors, 

263 # while this returns the (inclusive) maximum ID value. 

264 return cls.compute_detector_exposure_id(max_exposure_id, cls.DETECTOR_MAX - 1) 

265 

266 @classmethod 

267 def max_exposure_id(cls): 

268 """The maximum exposure ID expected from this instrument. 

269 

270 Returns 

271 ------- 

272 max_exposure_id : `int` 

273 The maximum value. 

274 """ 

275 max_date = "2050-12-31T23:59.999" 

276 max_seqnum = 99_999 

277 # This controller triggers the largest numbers 

278 max_controller = CONTROLLERS[-1] 

279 return cls.compute_exposure_id(max_date, max_seqnum, max_controller) 

280 

281 @classmethod 

282 def detector_mapping(cls): 

283 """Returns the mapping of full name to detector ID and serial. 

284 

285 Returns 

286 ------- 

287 mapping : `dict` of `str`:`tuple` 

288 Returns the mapping of full detector name (group+detector) 

289 to detector number and serial. 

290 

291 Raises 

292 ------ 

293 ValueError 

294 Raised if no camera policy file has been registered with this 

295 translation class. 

296 

297 Notes 

298 ----- 

299 Will construct the mapping if none has previously been constructed. 

300 """ 

301 if cls.cameraPolicyFile is not None: 

302 if cls.detectorMapping is None: 

303 cls.detectorMapping = read_detector_ids(cls.cameraPolicyFile) 

304 else: 

305 raise ValueError(f"Translation class '{cls.__name__}' has no registered camera policy file") 

306 

307 return cls.detectorMapping 

308 

309 @classmethod 

310 def detector_serials(cls): 

311 """Obtain the mapping of detector serial to detector group, name, 

312 and number. 

313 

314 Returns 

315 ------- 

316 info : `dict` of `tuple` of (`str`, `str`, `int`) 

317 A `dict` with the serial numbers as keys and values of detector 

318 group, name, and number. 

319 """ 

320 if cls.detectorSerials is None: 

321 detector_mapping = cls.detector_mapping() 

322 

323 if detector_mapping is not None: 

324 # Form mapping to go from serial number to names/numbers 

325 serials = {} 

326 for fullname, (id, serial) in cls.detectorMapping.items(): 

327 raft, detector_name = fullname.split("_") 

328 if serial in serials: 

329 raise RuntimeError(f"Serial {serial} is defined in multiple places") 

330 serials[serial] = (raft, detector_name, id) 

331 cls.detectorSerials = serials 

332 else: 

333 raise RuntimeError("Unable to obtain detector mapping information") 

334 

335 return cls.detectorSerials 

336 

337 @classmethod 

338 def compute_detector_num_from_name(cls, detector_group, detector_name): 

339 """Helper method to return the detector number from the name. 

340 

341 Parameters 

342 ---------- 

343 detector_group : `str` 

344 Name of the detector grouping. This is generally the raft name. 

345 detector_name : `str` 

346 Detector name. 

347 

348 Returns 

349 ------- 

350 num : `int` 

351 Detector number. 

352 """ 

353 fullname = f"{detector_group}_{detector_name}" 

354 

355 num = None 

356 detector_mapping = cls.detector_mapping() 

357 if detector_mapping is None: 

358 raise RuntimeError("Unable to obtain detector mapping information") 

359 

360 if fullname in detector_mapping: 

361 num = detector_mapping[fullname] 

362 else: 

363 log.warning(f"Unable to determine detector number from detector name {fullname}") 

364 return None 

365 

366 return num[0] 

367 

368 @classmethod 

369 def compute_detector_info_from_serial(cls, detector_serial): 

370 """Helper method to return the detector information from the serial. 

371 

372 Parameters 

373 ---------- 

374 detector_serial : `str` 

375 Detector serial ID. 

376 

377 Returns 

378 ------- 

379 info : `tuple` of (`str`, `str`, `int`) 

380 Detector group, name, and number. 

381 """ 

382 serial_mapping = cls.detector_serials() 

383 if serial_mapping is None: 

384 raise RuntimeError("Unable to obtain serial mapping information") 

385 

386 if detector_serial in serial_mapping: 

387 info = serial_mapping[detector_serial] 

388 else: 

389 raise RuntimeError("Unable to determine detector information from detector serial" 

390 f" {detector_serial}") 

391 

392 return info 

393 

394 @staticmethod 

395 def compute_exposure_id(dayobs, seqnum, controller=None): 

396 """Helper method to calculate the exposure_id. 

397 

398 Parameters 

399 ---------- 

400 dayobs : `str` or `int` 

401 Day of observation in either YYYYMMDD or YYYY-MM-DD format. 

402 If the string looks like ISO format it will be truncated before the 

403 ``T`` before being handled. 

404 seqnum : `int` or `str` 

405 Sequence number. 

406 controller : `str`, optional 

407 Controller to use. If this is "O", no change is made to the 

408 exposure ID. If it is "C" a 1000 is added to the year component 

409 of the exposure ID. If it is "H" a 2000 is added to the year 

410 component. This sequence continues with "P" and "Q" controllers. 

411 `None` indicates that the controller is not relevant to the 

412 exposure ID calculation (generally this is the case for test 

413 stand data). 

414 

415 Returns 

416 ------- 

417 exposure_id : `int` 

418 Exposure ID in form YYYYMMDDnnnnn form. 

419 """ 

420 # We really want an integer but the checks require a str. 

421 if isinstance(dayobs, int): 

422 dayobs = str(dayobs) 

423 

424 if "T" in dayobs: 

425 dayobs = dayobs[:dayobs.find("T")] 

426 

427 dayobs = dayobs.replace("-", "") 

428 

429 if len(dayobs) != 8: 

430 raise ValueError(f"Malformed dayobs: {dayobs}") 

431 

432 # Expect no more than 99,999 exposures in a day 

433 if seqnum >= 10**_SEQNUM_MAXDIGITS: 

434 raise ValueError(f"Sequence number ({seqnum}) exceeds limit") 

435 

436 dayobs = int(dayobs) 

437 if dayobs > 20231004 and controller == "C": 

438 # As of this date the CCS controller has a unified counter 

439 # with the OCS, so there is no need to adjust the dayobs 

440 # to make unique exposure IDs. 

441 controller = None 

442 

443 # Camera control changes the exposure ID 

444 if controller is not None: 

445 index = CONTROLLERS.find(controller) 

446 if index == -1: 

447 raise ValueError(f"Supplied controller, '{controller}' is not " 

448 f"in supported list: {CONTROLLERS}") 

449 

450 # Increment a thousand years per controller 

451 dayobs += _CONTROLLER_INCREMENT * index 

452 

453 # Form the number as a string zero padding the sequence number 

454 idstr = f"{dayobs}{seqnum:0{_SEQNUM_MAXDIGITS}d}" 

455 

456 # Exposure ID has to be an integer 

457 return int(idstr) 

458 

459 @staticmethod 

460 def unpack_exposure_id(exposure_id): 

461 """Unpack an exposure ID into dayobs, seqnum, and controller. 

462 

463 Parameters 

464 ---------- 

465 exposure_id : `int` 

466 Integer exposure ID produced by `compute_exposure_id`. 

467 

468 Returns 

469 ------- 

470 dayobs : `str` 

471 Day of observation as a YYYYMMDD string. 

472 seqnum : `int` 

473 Sequence number. 

474 controller : `str` 

475 Controller code. Will be ``O`` (but should be ignored) for IDs 

476 produced by calling `compute_exposure_id` with ``controller=None``. 

477 """ 

478 dayobs, seqnum = divmod(exposure_id, 10**_SEQNUM_MAXDIGITS) 

479 controller_index = dayobs // _CONTROLLER_INCREMENT - 2 

480 dayobs -= controller_index * _CONTROLLER_INCREMENT 

481 return (str(dayobs), seqnum, CONTROLLERS[controller_index], ) 

482 

483 def _is_on_mountain(self): 

484 """Indicate whether these data are coming from the instrument 

485 installed on the mountain. 

486 

487 Returns 

488 ------- 

489 is : `bool` 

490 `True` if instrument is on the mountain. 

491 """ 

492 if "TSTAND" in self._header: 

493 return False 

494 return True 

495 

496 def is_on_sky(self): 

497 """Determine if this is an on-sky observation. 

498 

499 Returns 

500 ------- 

501 is_on_sky : `bool` 

502 Returns True if this is a observation on sky on the 

503 summit. 

504 """ 

505 # For LSST we think on sky unless tracksys is local 

506 if self.is_key_ok("TRACKSYS"): 

507 if self._header["TRACKSYS"].lower() == "local": 

508 # not on sky 

509 return False 

510 

511 # These are obviously not on sky 

512 if self.to_observation_type() in ("bias", "dark", "flat"): 

513 return False 

514 

515 return self._is_on_mountain() 

516 

517 @cache_translation 

518 def to_location(self): 

519 # Docstring will be inherited. Property defined in properties.py 

520 if not self._is_on_mountain(): 

521 return None 

522 try: 

523 # Try standard FITS headers 

524 return super().to_location() 

525 except KeyError: 

526 return self._DEFAULT_LOCATION 

527 

528 @cache_translation 

529 def to_datetime_begin(self): 

530 # Docstring will be inherited. Property defined in properties.py 

531 self._used_these_cards("MJD-OBS") 

532 return Time(self._header["MJD-OBS"], scale="tai", format="mjd") 

533 

534 @cache_translation 

535 def to_datetime_end(self): 

536 # Docstring will be inherited. Property defined in properties.py 

537 if self.is_key_ok("DATE-END"): 

538 return super().to_datetime_end() 

539 

540 return self.to_datetime_begin() + self.to_exposure_time() 

541 

542 @cache_translation 

543 def to_detector_num(self): 

544 # Docstring will be inherited. Property defined in properties.py 

545 raft = self.to_detector_group() 

546 detector = self.to_detector_name() 

547 return self.compute_detector_num_from_name(raft, detector) 

548 

549 @cache_translation 

550 def to_detector_exposure_id(self): 

551 # Docstring will be inherited. Property defined in properties.py 

552 exposure_id = self.to_exposure_id() 

553 num = self.to_detector_num() 

554 return self.compute_detector_exposure_id(exposure_id, num) 

555 

556 @cache_translation 

557 def to_observation_type(self): 

558 # Docstring will be inherited. Property defined in properties.py 

559 obstype = self._header["IMGTYPE"] 

560 self._used_these_cards("IMGTYPE") 

561 obstype = obstype.lower() 

562 if obstype in ("skyexp", "object"): 

563 obstype = "science" 

564 return obstype 

565 

566 @cache_translation 

567 def to_observation_reason(self): 

568 # Docstring will be inherited. Property defined in properties.py 

569 for key in ("REASON", "TESTTYPE"): 

570 if self.is_key_ok(key): 

571 reason = self._header[key] 

572 self._used_these_cards(key) 

573 return reason.lower() 

574 # no specific header present so use the default translation 

575 return super().to_observation_reason() 

576 

577 @cache_translation 

578 def to_dark_time(self): 

579 """Calculate the dark time. 

580 

581 If a DARKTIME header is not found, the value is assumed to be 

582 identical to the exposure time. 

583 

584 Returns 

585 ------- 

586 dark : `astropy.units.Quantity` 

587 The dark time in seconds. 

588 """ 

589 if self.is_key_ok("DARKTIME"): 

590 darktime = self._header["DARKTIME"]*u.s 

591 self._used_these_cards("DARKTIME") 

592 else: 

593 log.warning("%s: Unable to determine dark time. Setting from exposure time.", 

594 self._log_prefix) 

595 darktime = self.to_exposure_time() 

596 return darktime 

597 

598 @cache_translation 

599 def to_exposure_id(self): 

600 """Generate a unique exposure ID number 

601 

602 This is a combination of DAYOBS and SEQNUM, and optionally 

603 CONTRLLR. 

604 

605 Returns 

606 ------- 

607 exposure_id : `int` 

608 Unique exposure number. 

609 """ 

610 if "CALIB_ID" in self._header: 

611 self._used_these_cards("CALIB_ID") 

612 return None 

613 

614 dayobs = self._header["DAYOBS"] 

615 seqnum = self._header["SEQNUM"] 

616 self._used_these_cards("DAYOBS", "SEQNUM") 

617 

618 if self.is_key_ok("CONTRLLR"): 

619 controller = self._header["CONTRLLR"] 

620 self._used_these_cards("CONTRLLR") 

621 else: 

622 controller = None 

623 

624 return self.compute_exposure_id(dayobs, seqnum, controller=controller) 

625 

626 @cache_translation 

627 def to_visit_id(self): 

628 """Calculate the visit associated with this exposure. 

629 

630 Notes 

631 ----- 

632 For LATISS and LSSTCam the default visit is derived from the 

633 exposure group. For other instruments we return the exposure_id. 

634 """ 

635 

636 exposure_group = self.to_exposure_group() 

637 # If the group is an int we return it 

638 try: 

639 visit_id = int(exposure_group) 

640 return visit_id 

641 except ValueError: 

642 pass 

643 

644 # A Group is defined as ISO date with an extension 

645 # The integer must be the same for a given group so we can never 

646 # use datetime_begin. 

647 # Nominally a GROUPID looks like "ISODATE+N" where the +N is 

648 # optional. This can be converted to seconds since epoch with 

649 # an adjustment for N. 

650 # For early data lacking that form we hash the group and return 

651 # the int. 

652 matches_date = GROUP_RE.match(exposure_group) 

653 if matches_date: 

654 iso_str = matches_date.group(1) 

655 fraction = matches_date.group(2) 

656 n = matches_date.group(3) 

657 if n is not None: 

658 n = int(n) 

659 else: 

660 n = 0 

661 iso = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%S") 

662 

663 tdelta = iso - TZERO_DATETIME 

664 epoch = int(tdelta.total_seconds()) 

665 

666 # Form the integer from EPOCH + 3 DIGIT FRAC + 0-pad N 

667 visit_id = int(f"{epoch}{fraction}{n:04d}") 

668 else: 

669 # Non-standard string so convert to numbers 

670 # using a hash function. Use the first N hex digits 

671 group_bytes = exposure_group.encode("us-ascii") 

672 hasher = hashlib.blake2b(group_bytes) 

673 # Need to be big enough it does not possibly clash with the 

674 # date-based version above 

675 digest = hasher.hexdigest()[:14] 

676 visit_id = int(digest, base=16) 

677 

678 # To help with hash collision, append the string length 

679 visit_id = int(f"{visit_id}{len(exposure_group):02d}") 

680 

681 return visit_id 

682 

683 @cache_translation 

684 def to_physical_filter(self): 

685 """Calculate the physical filter name. 

686 

687 Returns 

688 ------- 

689 filter : `str` 

690 Name of filter. Can be a combination of FILTER, FILTER1 and FILTER2 

691 headers joined by a "~". Returns "unknown" if no filter is declared 

692 """ 

693 joined = self._join_keyword_values(["FILTER", "FILTER1", "FILTER2"], delim=FILTER_DELIMITER) 

694 if not joined: 

695 joined = "unknown" 

696 

697 # Replace instances of "NONE" with "none". 

698 joined = joined.replace("NONE", "none") 

699 

700 return joined 

701 

702 @cache_translation 

703 def to_tracking_radec(self): 

704 # RA/DEC are *derived* headers and for the case where the DATE-BEG 

705 # is 1970 they are garbage and should not be used. 

706 try: 

707 if self._header["DATE-OBS"] == self._header["DATE"]: 

708 # A fixed up date -- use AZEL as source of truth 

709 altaz = self.to_altaz_begin() 

710 radec = astropy.coordinates.SkyCoord(altaz.transform_to(astropy.coordinates.ICRS()), 

711 obstime=altaz.obstime, 

712 location=altaz.location) 

713 else: 

714 radecsys = ("RADESYS",) 

715 radecpairs = (("RASTART", "DECSTART"), ("RA", "DEC")) 

716 radec = tracking_from_degree_headers(self, radecsys, radecpairs) 

717 except Exception: 

718 # If this observation was not formally on sky then we are allowed 

719 # to return None. 

720 if self.is_on_sky(): 

721 raise 

722 radec = None 

723 

724 return radec 

725 

726 @cache_translation 

727 def to_altaz_begin(self): 

728 if not self._is_on_mountain(): 

729 return None 

730 

731 # Always attempt to find the alt/az values regardless of observation 

732 # type. 

733 return altaz_from_degree_headers(self, (("ELSTART", "AZSTART"),), 

734 self.to_datetime_begin(), is_zd=False) 

735 

736 @cache_translation 

737 def to_exposure_group(self): 

738 """Calculate the exposure group string. 

739 

740 For LSSTCam and LATISS this is read from the ``GROUPID`` header. 

741 If that header is missing the exposure_id is returned instead as 

742 a string. 

743 """ 

744 if self.is_key_ok("GROUPID"): 

745 exposure_group = self._header["GROUPID"] 

746 self._used_these_cards("GROUPID") 

747 return exposure_group 

748 return super().to_exposure_group() 

749 

750 @cache_translation 

751 def to_focus_z(self): 

752 """Return the defocal distance of the camera in units of mm. 

753 If there is no ``FOCUSZ`` value in the header it will return 

754 the default 0.0mm value. 

755 

756 Returns 

757 ------- 

758 focus_z: `astropy.units.Quantity` 

759 The defocal distance from header in mm or the 0.0mm default 

760 """ 

761 if self.is_key_ok("FOCUSZ"): 

762 focus_z = self._header["FOCUSZ"] 

763 return focus_z * u.mm 

764 return super().to_focus_z() 

765 

766 @staticmethod 

767 def _is_filter_empty(filter): 

768 """Return true if the supplied filter indicates an empty filter slot 

769 

770 Parameters 

771 ---------- 

772 filter : `str` 

773 The filter string to check. 

774 

775 Returns 

776 ------- 

777 is_empty : `bool` 

778 `True` if the filter string looks like it is referring to an 

779 empty filter slot. For example this can be if the filter is 

780 "empty" or "empty_2". 

781 """ 

782 return bool(re.match(r"empty_?\d*$", filter.lower())) 

783 

784 def _determine_primary_filter(self): 

785 """Determine the primary filter from the ``FILTER`` header. 

786 

787 Returns 

788 ------- 

789 filter : `str` 

790 The contents of the ``FILTER`` header with some appropriate 

791 defaulting. 

792 """ 

793 

794 if self.is_key_ok("FILTER"): 

795 physical_filter = self._header["FILTER"] 

796 self._used_these_cards("FILTER") 

797 

798 if self._is_filter_empty(physical_filter): 

799 physical_filter = "empty" 

800 else: 

801 # Be explicit about having no knowledge of the filter 

802 # by setting it to "unknown". It should always have a value. 

803 physical_filter = "unknown" 

804 

805 # Warn if the filter being unknown is important 

806 obstype = self.to_observation_type() 

807 if obstype not in ("bias", "dark"): 

808 log.warning("%s: Unable to determine the filter", 

809 self._log_prefix) 

810 

811 return physical_filter 

812 

813 @cache_translation 

814 def to_observing_day(self): 

815 """Return the day of observation as YYYYMMDD integer. 

816 

817 For LSSTCam and other compliant instruments this is the value 

818 of the DAYOBS header. 

819 

820 Returns 

821 ------- 

822 obs_day : `int` 

823 The day of observation. 

824 """ 

825 if self.is_key_ok("DAYOBS"): 

826 self._used_these_cards("DAYOBS") 

827 return int(self._header["DAYOBS"]) 

828 

829 return super().to_observing_day() 

830 

831 @cache_translation 

832 def to_observation_counter(self): 

833 """Return the sequence number within the observing day. 

834 

835 Returns 

836 ------- 

837 counter : `int` 

838 The sequence number for this day. 

839 """ 

840 if self.is_key_ok("SEQNUM"): 

841 # Some older LATISS data may not have the header 

842 # but this is corrected in fix_header for LATISS. 

843 self._used_these_cards("SEQNUM") 

844 return int(self._header["SEQNUM"]) 

845 

846 # This indicates a problem so we warn and return a 0 

847 log.warning("%s: Unable to determine the observation counter so returning 0", 

848 self._log_prefix) 

849 return 0 

850 

851 @cache_translation 

852 def to_boresight_rotation_coord(self): 

853 """Boresight rotation angle. 

854 

855 Only relevant for science observations. 

856 """ 

857 unknown = "unknown" 

858 if not self.is_on_sky(): 

859 return unknown 

860 

861 self._used_these_cards("ROTCOORD") 

862 coord = self._header.get("ROTCOORD", unknown) 

863 if coord is None: 

864 coord = unknown 

865 return coord 

866 

867 @cache_translation 

868 def to_boresight_airmass(self): 

869 """Calculate airmass at boresight at start of observation. 

870 

871 Notes 

872 ----- 

873 Early data are missing AMSTART header so we fall back to calculating 

874 it from ELSTART. 

875 """ 

876 if not self.is_on_sky(): 

877 return None 

878 

879 # This observation should have AMSTART 

880 amkey = "AMSTART" 

881 if self.is_key_ok(amkey): 

882 self._used_these_cards(amkey) 

883 return self._header[amkey] 

884 

885 # Instead we need to look at azel 

886 altaz = self.to_altaz_begin() 

887 if altaz is not None: 

888 return altaz.secz.to_value() 

889 

890 log.warning("%s: Unable to determine airmass of a science observation, returning 1.", 

891 self._log_prefix) 

892 return 1.0 

893 

894 @cache_translation 

895 def to_group_counter_start(self): 

896 # Effectively the start of the visit as determined by the headers. 

897 counter = self.to_observation_counter() 

898 # Older data does not have the CURINDEX header. 

899 if self.is_key_ok("CURINDEX"): 

900 # CURINDEX is 1-based. 

901 seq_start = counter - self._header["CURINDEX"] + 1 

902 self._used_these_cards("CURINDEX") 

903 return seq_start 

904 else: 

905 # If the counter is 0 we need to pick something else 

906 # that is not going to confuse the visit calculation 

907 # (since setting everything to 0 will make one big visit). 

908 return counter if counter != 0 else self.to_exposure_id() 

909 

910 @cache_translation 

911 def to_group_counter_end(self): 

912 # Effectively the end of the visit as determined by the headers. 

913 counter = self.to_observation_counter() 

914 # Older data does not have the CURINDEX or MAXINDEX headers. 

915 if self.is_key_ok("CURINDEX") and self.is_key_ok("MAXINDEX"): 

916 # CURINDEX is 1-based. CURINDEX == MAXINDEX indicates the 

917 # final exposure in the sequence. 

918 remaining = self._header["MAXINDEX"] - self._header["CURINDEX"] 

919 seq_end = counter + remaining 

920 self._used_these_cards("CURINDEX", "MAXINDEX") 

921 return seq_end 

922 else: 

923 # If the counter is 0 we need to pick something else 

924 # that is not going to confuse the visit calculation 

925 # (since setting everything to 0 will make one big visit). 

926 return counter if counter != 0 else self.to_exposure_id() 

927 

928 @cache_translation 

929 def to_has_simulated_content(self): 

930 # Check all the simulation flags. 

931 # We do not know all the simulation flags that we may have so 

932 # must check every header key. Ideally HIERARCH SIMULATE would 

933 # be a hierarchical header so _header["SIMULATE"] would return 

934 # everything. The header looks like: 

935 # 

936 # HIERARCH SIMULATE ATMCS = / ATMCS Simulation Mode 

937 # HIERARCH SIMULATE ATHEXAPOD = 0 / ATHexapod Simulation Mode 

938 # HIERARCH SIMULATE ATPNEUMATICS = / ATPneumatics Simulation Mode 

939 # HIERARCH SIMULATE ATDOME = 1 / ATDome Simulation Mode 

940 # HIERARCH SIMULATE ATSPECTROGRAPH = 0 / ATSpectrograph Simulation Mode 

941 # 

942 # So any header that includes "SIMULATE" in the key name and has a 

943 # true value implies that something in the data is simulated. 

944 for k, v in self._header.items(): 

945 if "SIMULATE" in k and v: 

946 return True 

947 

948 # If the controller is H, P, or Q then the data are simulated. 

949 ctrlr_key = "CONTRLLR" 

950 if self.is_key_ok(ctrlr_key): 

951 controller = self._header[ctrlr_key] 

952 self._used_these_cards(ctrlr_key) 

953 if controller in "HPQ": 

954 return True 

955 

956 # No simulation flags set. 

957 return False 

958 

959 @cache_translation 

960 def to_relative_humidity(self) -> float | None: 

961 key = "HUMIDITY" 

962 if self.is_key_ok(key): 

963 self._used_these_cards(key) 

964 return self._header[key] 

965 

966 return None 

967 

968 @cache_translation 

969 def to_pressure(self): 

970 key = "PRESSURE" 

971 if self.is_key_ok(key): 

972 value = self._header[key] 

973 # There has been an inconsistency in units for the pressure reading 

974 # so we need to adjust for this. 

975 if value > 10_000: 

976 unit = u.Pa 

977 else: 

978 unit = u.hPa 

979 return value * unit 

980 

981 return None 

982 

983 @cache_translation 

984 def to_temperature(self): 

985 key = "AIRTEMP" 

986 if self.is_key_ok(key): 

987 return self._header[key] * u.deg_C 

988 return None