Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is currently part of obs_lsst but is written to allow it 

2# to be migrated to the astro_metadata_translator package at a later date. 

3# 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the LICENSE file in this directory for details of code ownership. 

7# 

8# Use of this source code is governed by a 3-clause BSD-style 

9# license that can be found in the LICENSE file. 

10 

11"""Metadata translation support code for LSST headers""" 

12 

13__all__ = ("TZERO", "SIMONYI_LOCATION", "read_detector_ids", 

14 "compute_detector_exposure_id_generic", "LsstBaseTranslator", 

15 "SIMONYI_TELESCOPE") 

16 

17import os.path 

18import yaml 

19import logging 

20import re 

21import datetime 

22import hashlib 

23 

24import astropy.coordinates 

25import astropy.units as u 

26from astropy.time import Time, TimeDelta 

27from astropy.coordinates import EarthLocation 

28 

29from lsst.utils import getPackageDir 

30 

31from astro_metadata_translator import cache_translation, FitsTranslator 

32from astro_metadata_translator.translators.helpers import tracking_from_degree_headers, \ 

33 altaz_from_degree_headers 

34 

35 

36TZERO = Time("2015-01-01T00:00", format="isot", scale="utc") 

37TZERO_DATETIME = TZERO.to_datetime() 

38 

39# Delimiter to use for multiple filters/gratings 

40FILTER_DELIMITER = "~" 

41 

42# Regex to use for parsing a GROUPID string 

43GROUP_RE = re.compile(r"^(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d)\.(\d\d\d)(?:[\+#](\d+))?$") 

44 

45# LSST Default location in the absence of headers 

46SIMONYI_LOCATION = EarthLocation.from_geodetic(-70.749417, -30.244639, 2663.0) 

47 

48# Name of the main survey telescope 

49SIMONYI_TELESCOPE = "Simonyi Survey Telescope" 

50 

51obs_lsst_packageDir = getPackageDir("obs_lsst") 

52 

53log = logging.getLogger(__name__) 

54 

55 

56def read_detector_ids(policyFile): 

57 """Read a camera policy file and retrieve the mapping from CCD name 

58 to ID. 

59 

60 Parameters 

61 ---------- 

62 policyFile : `str` 

63 Name of YAML policy file to read, relative to the obs_lsst 

64 package. 

65 

66 Returns 

67 ------- 

68 mapping : `dict` of `str` to (`int`, `str`) 

69 A `dict` with keys being the full names of the detectors, and the 

70 value is a `tuple` containing the integer detector number and the 

71 detector serial number. 

72 

73 Notes 

74 ----- 

75 Reads the camera YAML definition file directly and extracts just the 

76 IDs and serials. This routine does not use the standard 

77 `~lsst.obs.base.yamlCamera.YAMLCamera` infrastructure or 

78 `lsst.afw.cameraGeom`. This is because the translators are intended to 

79 have minimal dependencies on LSST infrastructure. 

80 """ 

81 

82 file = os.path.join(obs_lsst_packageDir, policyFile) 

83 try: 

84 with open(file) as fh: 

85 # Use the fast parser since these files are large 

86 camera = yaml.load(fh, Loader=yaml.CSafeLoader) 

87 except OSError as e: 

88 raise ValueError(f"Could not load camera policy file {file}") from e 

89 

90 mapping = {} 

91 for ccd, value in camera["CCDs"].items(): 

92 mapping[ccd] = (int(value["id"]), value["serial"]) 

93 

94 return mapping 

95 

96 

97def compute_detector_exposure_id_generic(exposure_id, detector_num, max_num): 

98 """Compute the detector_exposure_id from the exposure id and the 

99 detector number. 

100 

101 Parameters 

102 ---------- 

103 exposure_id : `int` 

104 The exposure ID. 

105 detector_num : `int` 

106 The detector number. 

107 max_num : `int` 

108 Maximum number of detectors to make space for. 

109 

110 Returns 

111 ------- 

112 detector_exposure_id : `int` 

113 Computed ID. 

114 

115 Raises 

116 ------ 

117 ValueError 

118 The detector number is out of range. 

119 """ 

120 

121 if detector_num is None: 

122 raise ValueError("Detector number must be defined.") 

123 if detector_num >= max_num or detector_num < 0: 

124 raise ValueError(f"Detector number out of range 0 <= {detector_num} < {max_num}") 

125 

126 return max_num*exposure_id + detector_num 

127 

128 

129class LsstBaseTranslator(FitsTranslator): 

130 """Translation methods useful for all LSST-style headers.""" 

131 

132 _const_map = {} 

133 _trivial_map = {} 

134 

135 # Do not specify a name for this translator 

136 cameraPolicyFile = None 

137 """Path to policy file relative to obs_lsst root.""" 

138 

139 detectorMapping = None 

140 """Mapping of detector name to detector number and serial.""" 

141 

142 detectorSerials = None 

143 """Mapping of detector serial number to raft, number, and name.""" 

144 

145 DETECTOR_MAX = 1000 

146 """Maximum number of detectors to use when calculating the 

147 detector_exposure_id. 

148 

149 Note that because this is the maximum number *of* detectors, for 

150 zero-based ``detector_num`` values this is one greater than the maximum 

151 ``detector_num``. It is also often rounded up to the nearest power of 

152 10 anyway, to allow ``detector_exposure_id`` values to be easily decoded by 

153 humans. 

154 """ 

155 

156 _DEFAULT_LOCATION = SIMONYI_LOCATION 

157 """Default telescope location in absence of relevant FITS headers.""" 

158 

159 _ROLLOVER_TIME = TimeDelta(12*60*60, scale="tai", format="sec") 

160 """Time delta for the definition of a Rubin Observatory start of day. 

161 Used when the header is missing. See LSE-400 for details.""" 

162 

163 @classmethod 

164 def __init_subclass__(cls, **kwargs): 

165 """Ensure that subclasses clear their own detector mapping entries 

166 such that subclasses of translators that use detector mappings 

167 do not pick up the incorrect values from a parent.""" 

168 

169 cls.detectorMapping = None 

170 cls.detectorSerials = None 

171 

172 super().__init_subclass__(**kwargs) 

173 

174 def search_paths(self): 

175 """Search paths to use for LSST data when looking for header correction 

176 files. 

177 

178 Returns 

179 ------- 

180 path : `list` 

181 List with a single element containing the full path to the 

182 ``corrections`` directory within the ``obs_lsst`` package. 

183 """ 

184 return [os.path.join(obs_lsst_packageDir, "corrections")] 

185 

186 @classmethod 

187 def compute_detector_exposure_id(cls, exposure_id, detector_num): 

188 """Compute the detector exposure ID from detector number and 

189 exposure ID. 

190 

191 This is a helper method to allow code working outside the translator 

192 infrastructure to use the same algorithm. 

193 

194 Parameters 

195 ---------- 

196 exposure_id : `int` 

197 Unique exposure ID. 

198 detector_num : `int` 

199 Detector number. 

200 

201 Returns 

202 ------- 

203 detector_exposure_id : `int` 

204 The calculated ID. 

205 """ 

206 return compute_detector_exposure_id_generic(exposure_id, detector_num, max_num=cls.DETECTOR_MAX) 

207 

208 @classmethod 

209 def max_detector_exposure_id(cls): 

210 """The maximum detector exposure ID expected to be generated by 

211 this instrument. 

212 

213 Returns 

214 ------- 

215 max_id : `int` 

216 The maximum value. 

217 """ 

218 max_exposure_id = cls.max_exposure_id() 

219 # We subtract 1 from DETECTOR_MAX because LSST detector_num values are 

220 # zero-based, and detector_max is the maximum number *of* detectors, 

221 # while this returns the (inclusive) maximum ID value. 

222 return cls.compute_detector_exposure_id(max_exposure_id, cls.DETECTOR_MAX - 1) 

223 

224 @classmethod 

225 def max_exposure_id(cls): 

226 """The maximum exposure ID expected from this instrument. 

227 

228 Returns 

229 ------- 

230 max_exposure_id : `int` 

231 The maximum value. 

232 """ 

233 max_date = "2050-12-31T23:59.999" 

234 max_seqnum = 99_999 

235 max_controller = "C" # This controller triggers the largest numbers 

236 return cls.compute_exposure_id(max_date, max_seqnum, max_controller) 

237 

238 @classmethod 

239 def detector_mapping(cls): 

240 """Returns the mapping of full name to detector ID and serial. 

241 

242 Returns 

243 ------- 

244 mapping : `dict` of `str`:`tuple` 

245 Returns the mapping of full detector name (group+detector) 

246 to detector number and serial. 

247 

248 Raises 

249 ------ 

250 ValueError 

251 Raised if no camera policy file has been registered with this 

252 translation class. 

253 

254 Notes 

255 ----- 

256 Will construct the mapping if none has previously been constructed. 

257 """ 

258 if cls.cameraPolicyFile is not None: 

259 if cls.detectorMapping is None: 

260 cls.detectorMapping = read_detector_ids(cls.cameraPolicyFile) 

261 else: 

262 raise ValueError(f"Translation class '{cls.__name__}' has no registered camera policy file") 

263 

264 return cls.detectorMapping 

265 

266 @classmethod 

267 def detector_serials(cls): 

268 """Obtain the mapping of detector serial to detector group, name, 

269 and number. 

270 

271 Returns 

272 ------- 

273 info : `dict` of `tuple` of (`str`, `str`, `int`) 

274 A `dict` with the serial numbers as keys and values of detector 

275 group, name, and number. 

276 """ 

277 if cls.detectorSerials is None: 

278 detector_mapping = cls.detector_mapping() 

279 

280 if detector_mapping is not None: 

281 # Form mapping to go from serial number to names/numbers 

282 serials = {} 

283 for fullname, (id, serial) in cls.detectorMapping.items(): 

284 raft, detector_name = fullname.split("_") 

285 if serial in serials: 

286 raise RuntimeError(f"Serial {serial} is defined in multiple places") 

287 serials[serial] = (raft, detector_name, id) 

288 cls.detectorSerials = serials 

289 else: 

290 raise RuntimeError("Unable to obtain detector mapping information") 

291 

292 return cls.detectorSerials 

293 

294 @classmethod 

295 def compute_detector_num_from_name(cls, detector_group, detector_name): 

296 """Helper method to return the detector number from the name. 

297 

298 Parameters 

299 ---------- 

300 detector_group : `str` 

301 Name of the detector grouping. This is generally the raft name. 

302 detector_name : `str` 

303 Detector name. 

304 

305 Returns 

306 ------- 

307 num : `int` 

308 Detector number. 

309 """ 

310 fullname = f"{detector_group}_{detector_name}" 

311 

312 num = None 

313 detector_mapping = cls.detector_mapping() 

314 if detector_mapping is None: 

315 raise RuntimeError("Unable to obtain detector mapping information") 

316 

317 if fullname in detector_mapping: 

318 num = detector_mapping[fullname] 

319 else: 

320 log.warning(f"Unable to determine detector number from detector name {fullname}") 

321 return None 

322 

323 return num[0] 

324 

325 @classmethod 

326 def compute_detector_info_from_serial(cls, detector_serial): 

327 """Helper method to return the detector information from the serial. 

328 

329 Parameters 

330 ---------- 

331 detector_serial : `str` 

332 Detector serial ID. 

333 

334 Returns 

335 ------- 

336 info : `tuple` of (`str`, `str`, `int`) 

337 Detector group, name, and number. 

338 """ 

339 serial_mapping = cls.detector_serials() 

340 if serial_mapping is None: 

341 raise RuntimeError("Unable to obtain serial mapping information") 

342 

343 if detector_serial in serial_mapping: 

344 info = serial_mapping[detector_serial] 

345 else: 

346 raise RuntimeError("Unable to determine detector information from detector serial" 

347 f" {detector_serial}") 

348 

349 return info 

350 

351 @staticmethod 

352 def compute_exposure_id(dayobs, seqnum, controller=None): 

353 """Helper method to calculate the exposure_id. 

354 

355 Parameters 

356 ---------- 

357 dayobs : `str` 

358 Day of observation in either YYYYMMDD or YYYY-MM-DD format. 

359 If the string looks like ISO format it will be truncated before the 

360 ``T`` before being handled. 

361 seqnum : `int` or `str` 

362 Sequence number. 

363 controller : `str`, optional 

364 Controller to use. If this is "O", no change is made to the 

365 exposure ID. If it is "C" a 1000 is added to the year component 

366 of the exposure ID. If it is "H" a 2000 is added to the year 

367 component. 

368 `None` indicates that the controller is not relevant to the 

369 exposure ID calculation (generally this is the case for test 

370 stand data). 

371 

372 Returns 

373 ------- 

374 exposure_id : `int` 

375 Exposure ID in form YYYYMMDDnnnnn form. 

376 """ 

377 if "T" in dayobs: 

378 dayobs = dayobs[:dayobs.find("T")] 

379 

380 dayobs = dayobs.replace("-", "") 

381 

382 if len(dayobs) != 8: 

383 raise ValueError(f"Malformed dayobs: {dayobs}") 

384 

385 # Expect no more than 99,999 exposures in a day 

386 maxdigits = 5 

387 if seqnum >= 10**maxdigits: 

388 raise ValueError(f"Sequence number ({seqnum}) exceeds limit") 

389 

390 # Camera control changes the exposure ID 

391 if controller is not None: 

392 if controller == "O": 

393 pass 

394 elif controller == "C": 

395 # Add 1000 to the year component 

396 dayobs = int(dayobs) 

397 dayobs += 1000_00_00 

398 elif controller == "H": 

399 # Add 2000 to the year component for pHosim 

400 dayobs = int(dayobs) 

401 dayobs += 2000_00_00 

402 else: 

403 raise ValueError(f"Supplied controller, '{controller}' is neither 'O' nor 'C' nor 'H'") 

404 

405 # Form the number as a string zero padding the sequence number 

406 idstr = f"{dayobs}{seqnum:0{maxdigits}d}" 

407 

408 # Exposure ID has to be an integer 

409 return int(idstr) 

410 

411 def _is_on_mountain(self): 

412 """Indicate whether these data are coming from the instrument 

413 installed on the mountain. 

414 

415 Returns 

416 ------- 

417 is : `bool` 

418 `True` if instrument is on the mountain. 

419 """ 

420 if "TSTAND" in self._header: 

421 return False 

422 return True 

423 

424 def is_on_sky(self): 

425 """Determine if this is an on-sky observation. 

426 

427 Returns 

428 ------- 

429 is_on_sky : `bool` 

430 Returns True if this is a observation on sky on the 

431 summit. 

432 """ 

433 # For LSST we think on sky unless tracksys is local 

434 if self.is_key_ok("TRACKSYS"): 

435 if self._header["TRACKSYS"].lower() == "local": 

436 # not on sky 

437 return False 

438 

439 # These are obviously not on sky 

440 if self.to_observation_type() in ("bias", "dark", "flat"): 

441 return False 

442 

443 return self._is_on_mountain() 

444 

445 @cache_translation 

446 def to_location(self): 

447 # Docstring will be inherited. Property defined in properties.py 

448 if not self._is_on_mountain(): 

449 return None 

450 try: 

451 # Try standard FITS headers 

452 return super().to_location() 

453 except KeyError: 

454 return self._DEFAULT_LOCATION 

455 

456 @cache_translation 

457 def to_datetime_begin(self): 

458 # Docstring will be inherited. Property defined in properties.py 

459 self._used_these_cards("MJD-OBS") 

460 return Time(self._header["MJD-OBS"], scale="tai", format="mjd") 

461 

462 @cache_translation 

463 def to_datetime_end(self): 

464 # Docstring will be inherited. Property defined in properties.py 

465 if self.is_key_ok("DATE-END"): 

466 return super().to_datetime_end() 

467 

468 return self.to_datetime_begin() + self.to_exposure_time() 

469 

470 @cache_translation 

471 def to_detector_num(self): 

472 # Docstring will be inherited. Property defined in properties.py 

473 raft = self.to_detector_group() 

474 detector = self.to_detector_name() 

475 return self.compute_detector_num_from_name(raft, detector) 

476 

477 @cache_translation 

478 def to_detector_exposure_id(self): 

479 # Docstring will be inherited. Property defined in properties.py 

480 exposure_id = self.to_exposure_id() 

481 num = self.to_detector_num() 

482 return self.compute_detector_exposure_id(exposure_id, num) 

483 

484 @cache_translation 

485 def to_observation_type(self): 

486 # Docstring will be inherited. Property defined in properties.py 

487 obstype = self._header["IMGTYPE"] 

488 self._used_these_cards("IMGTYPE") 

489 obstype = obstype.lower() 

490 if obstype in ("skyexp", "object"): 

491 obstype = "science" 

492 return obstype 

493 

494 @cache_translation 

495 def to_observation_reason(self): 

496 # Docstring will be inherited. Property defined in properties.py 

497 if self.is_key_ok("TESTTYPE"): 

498 reason = self._header["TESTTYPE"] 

499 self._used_these_cards("TESTTYPE") 

500 return reason.lower() 

501 # no specific header present so use the default translation 

502 return super().to_observation_reason() 

503 

504 @cache_translation 

505 def to_dark_time(self): 

506 """Calculate the dark time. 

507 

508 If a DARKTIME header is not found, the value is assumed to be 

509 identical to the exposure time. 

510 

511 Returns 

512 ------- 

513 dark : `astropy.units.Quantity` 

514 The dark time in seconds. 

515 """ 

516 if self.is_key_ok("DARKTIME"): 

517 darktime = self._header["DARKTIME"]*u.s 

518 self._used_these_cards("DARKTIME") 

519 else: 

520 log.warning("%s: Unable to determine dark time. Setting from exposure time.", 

521 self._log_prefix) 

522 darktime = self.to_exposure_time() 

523 return darktime 

524 

525 @cache_translation 

526 def to_exposure_id(self): 

527 """Generate a unique exposure ID number 

528 

529 This is a combination of DAYOBS and SEQNUM, and optionally 

530 CONTRLLR. 

531 

532 Returns 

533 ------- 

534 exposure_id : `int` 

535 Unique exposure number. 

536 """ 

537 if "CALIB_ID" in self._header: 

538 self._used_these_cards("CALIB_ID") 

539 return None 

540 

541 dayobs = self._header["DAYOBS"] 

542 seqnum = self._header["SEQNUM"] 

543 self._used_these_cards("DAYOBS", "SEQNUM") 

544 

545 if self.is_key_ok("CONTRLLR"): 

546 controller = self._header["CONTRLLR"] 

547 self._used_these_cards("CONTRLLR") 

548 else: 

549 controller = None 

550 

551 return self.compute_exposure_id(dayobs, seqnum, controller=controller) 

552 

553 @cache_translation 

554 def to_visit_id(self): 

555 """Calculate the visit associated with this exposure. 

556 

557 Notes 

558 ----- 

559 For LATISS and LSSTCam the default visit is derived from the 

560 exposure group. For other instruments we return the exposure_id. 

561 """ 

562 

563 exposure_group = self.to_exposure_group() 

564 # If the group is an int we return it 

565 try: 

566 visit_id = int(exposure_group) 

567 return visit_id 

568 except ValueError: 

569 pass 

570 

571 # A Group is defined as ISO date with an extension 

572 # The integer must be the same for a given group so we can never 

573 # use datetime_begin. 

574 # Nominally a GROUPID looks like "ISODATE+N" where the +N is 

575 # optional. This can be converted to seconds since epoch with 

576 # an adjustment for N. 

577 # For early data lacking that form we hash the group and return 

578 # the int. 

579 matches_date = GROUP_RE.match(exposure_group) 

580 if matches_date: 

581 iso_str = matches_date.group(1) 

582 fraction = matches_date.group(2) 

583 n = matches_date.group(3) 

584 if n is not None: 

585 n = int(n) 

586 else: 

587 n = 0 

588 iso = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%S") 

589 

590 tdelta = iso - TZERO_DATETIME 

591 epoch = int(tdelta.total_seconds()) 

592 

593 # Form the integer from EPOCH + 3 DIGIT FRAC + 0-pad N 

594 visit_id = int(f"{epoch}{fraction}{n:04d}") 

595 else: 

596 # Non-standard string so convert to numbers 

597 # using a hash function. Use the first N hex digits 

598 group_bytes = exposure_group.encode("us-ascii") 

599 hasher = hashlib.blake2b(group_bytes) 

600 # Need to be big enough it does not possibly clash with the 

601 # date-based version above 

602 digest = hasher.hexdigest()[:14] 

603 visit_id = int(digest, base=16) 

604 

605 # To help with hash collision, append the string length 

606 visit_id = int(f"{visit_id}{len(exposure_group):02d}") 

607 

608 return visit_id 

609 

610 @cache_translation 

611 def to_physical_filter(self): 

612 """Calculate the physical filter name. 

613 

614 Returns 

615 ------- 

616 filter : `str` 

617 Name of filter. Can be a combination of FILTER, FILTER1 and FILTER2 

618 headers joined by a "~". Returns "unknown" if no filter is declared 

619 """ 

620 joined = self._join_keyword_values(["FILTER", "FILTER1", "FILTER2"], delim=FILTER_DELIMITER) 

621 if not joined: 

622 joined = "unknown" 

623 

624 return joined 

625 

626 @cache_translation 

627 def to_tracking_radec(self): 

628 if not self.is_on_sky(): 

629 return None 

630 

631 # RA/DEC are *derived* headers and for the case where the DATE-BEG 

632 # is 1970 they are garbage and should not be used. 

633 if self._header["DATE-OBS"] == self._header["DATE"]: 

634 # A fixed up date -- use AZEL as source of truth 

635 altaz = self.to_altaz_begin() 

636 radec = astropy.coordinates.SkyCoord(altaz.transform_to(astropy.coordinates.ICRS()), 

637 obstime=altaz.obstime, 

638 location=altaz.location) 

639 else: 

640 radecsys = ("RADESYS",) 

641 radecpairs = (("RASTART", "DECSTART"), ("RA", "DEC")) 

642 radec = tracking_from_degree_headers(self, radecsys, radecpairs) 

643 

644 return radec 

645 

646 @cache_translation 

647 def to_altaz_begin(self): 

648 if not self._is_on_mountain(): 

649 return None 

650 

651 # ALTAZ always relevant unless bias or dark 

652 if self.to_observation_type() in ("bias", "dark"): 

653 return None 

654 

655 return altaz_from_degree_headers(self, (("ELSTART", "AZSTART"),), 

656 self.to_datetime_begin(), is_zd=False) 

657 

658 @cache_translation 

659 def to_exposure_group(self): 

660 """Calculate the exposure group string. 

661 

662 For LSSTCam and LATISS this is read from the ``GROUPID`` header. 

663 If that header is missing the exposure_id is returned instead as 

664 a string. 

665 """ 

666 if self.is_key_ok("GROUPID"): 

667 exposure_group = self._header["GROUPID"] 

668 self._used_these_cards("GROUPID") 

669 return exposure_group 

670 return super().to_exposure_group() 

671 

672 @staticmethod 

673 def _is_filter_empty(filter): 

674 """Return true if the supplied filter indicates an empty filter slot 

675 

676 Parameters 

677 ---------- 

678 filter : `str` 

679 The filter string to check. 

680 

681 Returns 

682 ------- 

683 is_empty : `bool` 

684 `True` if the filter string looks like it is referring to an 

685 empty filter slot. For example this can be if the filter is 

686 "empty" or "empty_2". 

687 """ 

688 return bool(re.match(r"empty_?\d*$", filter.lower())) 

689 

690 def _determine_primary_filter(self): 

691 """Determine the primary filter from the ``FILTER`` header. 

692 

693 Returns 

694 ------- 

695 filter : `str` 

696 The contents of the ``FILTER`` header with some appropriate 

697 defaulting. 

698 """ 

699 

700 if self.is_key_ok("FILTER"): 

701 physical_filter = self._header["FILTER"] 

702 self._used_these_cards("FILTER") 

703 

704 if self._is_filter_empty(physical_filter): 

705 physical_filter = "empty" 

706 else: 

707 # Be explicit about having no knowledge of the filter 

708 # by setting it to "unknown". It should always have a value. 

709 physical_filter = "unknown" 

710 

711 # Warn if the filter being unknown is important 

712 obstype = self.to_observation_type() 

713 if obstype not in ("bias", "dark"): 

714 log.warning("%s: Unable to determine the filter", 

715 self._log_prefix) 

716 

717 return physical_filter 

718 

719 @cache_translation 

720 def to_observing_day(self): 

721 """Return the day of observation as YYYYMMDD integer. 

722 

723 For LSSTCam and other compliant instruments this is the value 

724 of the DAYOBS header. 

725 

726 Returns 

727 ------- 

728 obs_day : `int` 

729 The day of observation. 

730 """ 

731 if self.is_key_ok("DAYOBS"): 

732 self._used_these_cards("DAYOBS") 

733 return int(self._header["DAYOBS"]) 

734 

735 # Calculate it ourselves correcting for the Rubin offset 

736 date = self.to_datetime_begin().tai 

737 date -= self._ROLLOVER_TIME 

738 return int(date.strftime("%Y%m%d")) 

739 

740 @cache_translation 

741 def to_observation_counter(self): 

742 """Return the sequence number within the observing day. 

743 

744 Returns 

745 ------- 

746 counter : `int` 

747 The sequence number for this day. 

748 """ 

749 if self.is_key_ok("SEQNUM"): 

750 # Some older LATISS data may not have the header 

751 # but this is corrected in fix_header for LATISS. 

752 self._used_these_cards("SEQNUM") 

753 return int(self._header["SEQNUM"]) 

754 

755 # This indicates a problem so we warn and return a 0 

756 log.warning("%s: Unable to determine the observation counter so returning 0", 

757 self._log_prefix) 

758 return 0 

759 

760 @cache_translation 

761 def to_boresight_rotation_coord(self): 

762 """Boresight rotation angle. 

763 

764 Only relevant for science observations. 

765 """ 

766 unknown = "unknown" 

767 if not self.is_on_sky(): 

768 return unknown 

769 

770 self._used_these_cards("ROTCOORD") 

771 coord = self._header.get("ROTCOORD", unknown) 

772 if coord is None: 

773 coord = unknown 

774 return coord 

775 

776 @cache_translation 

777 def to_boresight_airmass(self): 

778 """Calculate airmass at boresight at start of observation. 

779 

780 Notes 

781 ----- 

782 Early data are missing AMSTART header so we fall back to calculating 

783 it from ELSTART. 

784 """ 

785 if not self.is_on_sky(): 

786 return None 

787 

788 # This observation should have AMSTART 

789 amkey = "AMSTART" 

790 if self.is_key_ok(amkey): 

791 self._used_these_cards(amkey) 

792 return self._header[amkey] 

793 

794 # Instead we need to look at azel 

795 altaz = self.to_altaz_begin() 

796 if altaz is not None: 

797 return altaz.secz.to_value() 

798 

799 log.warning("%s: Unable to determine airmass of a science observation, returning 1.", 

800 self._log_prefix) 

801 return 1.0