Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412

413

414

415

416

417

418

419

420

421

422

423

424

425

426

427

428

429

430

431

432

433

434

435

436

437

438

439

440

441

442

443

444

445

446

447

448

449

450

451

452

453

454

455

456

457

458

459

460

461

462

463

464

465

466

467

468

469

470

471

472

473

474

475

476

477

478

479

480

481

482

483

484

485

486

487

488

489

490

491

492

493

494

495

496

497

498

499

500

501

502

503

504

505

506

507

508

509

510

511

512

513

514

515

516

517

518

519

520

521

522

523

524

525

526

527

528

529

530

531

532

533

534

535

536

537

538

539

540

541

542

543

544

545

546

547

548

549

550

551

552

553

554

555

556

557

558

559

560

561

562

563

564

565

566

567

568

569

570

571

572

573

574

575

576

577

578

579

580

581

582

583

584

585

586

587

588

589

590

591

592

593

594

595

596

597

598

599

600

601

602

603

604

605

606

607

608

609

610

611

612

613

614

615

616

617

618

619

620

621

622

623

624

625

626

627

628

629

630

631

632

633

634

635

636

637

638

639

640

641

642

643

644

645

646

647

648

649

# This file is currently part of obs_lsst but is written to allow it 

# to be migrated to the astro_metadata_translator package at a later date. 

# 

# This product includes software developed by the LSST Project 

# (http://www.lsst.org). 

# See the LICENSE file in this directory for details of code ownership. 

# 

# Use of this source code is governed by a 3-clause BSD-style 

# license that can be found in the LICENSE file. 

 

"""Metadata translation support code for LSST headers""" 

 

__all__ = ("ROLLOVERTIME", "TZERO", "LSST_LOCATION", "read_detector_ids", 

"compute_detector_exposure_id_generic", "LsstBaseTranslator") 

 

import os.path 

import yaml 

import logging 

import re 

import datetime 

import hashlib 

 

import astropy.coordinates 

import astropy.units as u 

from astropy.time import Time, TimeDelta 

from astropy.coordinates import EarthLocation 

 

from lsst.utils import getPackageDir 

 

from astro_metadata_translator import cache_translation, FitsTranslator 

from astro_metadata_translator.translators.helpers import tracking_from_degree_headers, \ 

altaz_from_degree_headers 

 

# LSST day clock starts at UTC+8 

ROLLOVERTIME = TimeDelta(8*60*60, scale="tai", format="sec") 

TZERO = Time("2015-01-01T00:00", format="isot", scale="utc") 

TZERO_DATETIME = TZERO.to_datetime() 

 

# Regex to use for parsing a GROUPID string 

GROUP_RE = re.compile(r"^(\d\d\d\d\-\d\d\-\d\dT\d\d:\d\d:\d\d)\.(\d\d\d)(?:[\+#](\d+))?$") 

 

# LSST Default location in the absence of headers 

LSST_LOCATION = EarthLocation.from_geodetic(-70.749417, -30.244639, 2663.0) 

 

obs_lsst_packageDir = getPackageDir("obs_lsst") 

 

log = logging.getLogger(__name__) 

 

 

def read_detector_ids(policyFile): 

"""Read a camera policy file and retrieve the mapping from CCD name 

to ID. 

 

Parameters 

---------- 

policyFile : `str` 

Name of YAML policy file to read, relative to the obs_lsst 

package. 

 

Returns 

------- 

mapping : `dict` of `str` to (`int`, `str`) 

A `dict` with keys being the full names of the detectors, and the 

value is a `tuple` containing the integer detector number and the 

detector serial number. 

 

Notes 

----- 

Reads the camera YAML definition file directly and extracts just the 

IDs and serials. This routine does not use the standard 

`~lsst.obs.base.yamlCamera.YAMLCamera` infrastructure or 

`lsst.afw.cameraGeom`. This is because the translators are intended to 

have minimal dependencies on LSST infrastructure. 

""" 

 

file = os.path.join(obs_lsst_packageDir, policyFile) 

try: 

with open(file) as fh: 

# Use the fast parser since these files are large 

camera = yaml.load(fh, Loader=yaml.CSafeLoader) 

except OSError as e: 

raise ValueError(f"Could not load camera policy file {file}") from e 

 

mapping = {} 

for ccd, value in camera["CCDs"].items(): 

mapping[ccd] = (int(value["id"]), value["serial"]) 

 

return mapping 

 

 

def compute_detector_exposure_id_generic(exposure_id, detector_num, max_num=1000, mode="concat"): 

"""Compute the detector_exposure_id from the exposure id and the 

detector number. 

 

Parameters 

---------- 

exposure_id : `int` 

The exposure ID. 

detector_num : `int` 

The detector number. 

max_num : `int`, optional 

Maximum number of detectors to make space for. Defaults to 1000. 

mode : `str`, optional 

Computation mode. Defaults to "concat". 

- concat : Concatenate the exposure ID and detector number, making 

sure that there is space for max_num and zero padding. 

- multiply : Multiply the exposure ID by the maximum detector 

number and add the detector number. 

 

Returns 

------- 

detector_exposure_id : `int` 

Computed ID. 

 

Raises 

------ 

ValueError 

The detector number is out of range. 

""" 

 

if detector_num is None: 

raise ValueError("Detector number must be defined.") 

if detector_num > max_num or detector_num < 0: 

raise ValueError(f"Detector number out of range 0 <= {detector_num} <= {max_num}") 

 

if mode == "concat": 

npad = len(str(max_num)) 

return int(f"{exposure_id}{detector_num:0{npad}d}") 

elif mode == "multiply": 

return max_num*exposure_id + detector_num 

else: 

raise ValueError(f"Computation mode of '{mode}' is not understood") 

 

 

class LsstBaseTranslator(FitsTranslator): 

"""Translation methods useful for all LSST-style headers.""" 

 

_const_map = {} 

_trivial_map = {} 

 

# Do not specify a name for this translator 

cameraPolicyFile = None 

"""Path to policy file relative to obs_lsst root.""" 

 

detectorMapping = None 

"""Mapping of detector name to detector number and serial.""" 

 

detectorSerials = None 

"""Mapping of detector serial number to raft, number, and name.""" 

 

DETECTOR_MAX = 999 

"""Maximum number of detectors to use when calculating the 

detector_exposure_id.""" 

 

_DEFAULT_LOCATION = LSST_LOCATION 

"""Default telescope location in absence of relevant FITS headers.""" 

 

@classmethod 

def __init_subclass__(cls, **kwargs): 

"""Ensure that subclasses clear their own detector mapping entries 

such that subclasses of translators that use detector mappings 

do not pick up the incorrect values from a parent.""" 

 

cls.detectorMapping = None 

cls.detectorSerials = None 

 

super().__init_subclass__(**kwargs) 

 

def search_paths(self): 

"""Search paths to use for LSST data when looking for header correction 

files. 

 

Returns 

------- 

path : `list` 

List with a single element containing the full path to the 

``corrections`` directory within the ``obs_lsst`` package. 

""" 

return [os.path.join(obs_lsst_packageDir, "corrections")] 

 

@classmethod 

def compute_detector_exposure_id(cls, exposure_id, detector_num): 

"""Compute the detector exposure ID from detector number and 

exposure ID. 

 

This is a helper method to allow code working outside the translator 

infrastructure to use the same algorithm. 

 

Parameters 

---------- 

exposure_id : `int` 

Unique exposure ID. 

detector_num : `int` 

Detector number. 

 

Returns 

------- 

detector_exposure_id : `int` 

The calculated ID. 

""" 

return compute_detector_exposure_id_generic(exposure_id, detector_num, 

max_num=cls.DETECTOR_MAX, 

mode="concat") 

 

@classmethod 

def max_detector_exposure_id(cls): 

"""The maximum detector exposure ID expected to be generated by 

this instrument. 

 

Returns 

------- 

max_id : `int` 

The maximum value. 

""" 

max_exposure_id = cls.max_exposure_id() 

return cls.compute_detector_exposure_id(max_exposure_id, cls.DETECTOR_MAX) 

 

@classmethod 

def max_exposure_id(cls): 

"""The maximum exposure ID expected from this instrument. 

 

Returns 

------- 

max_exposure_id : `int` 

The maximum value. 

""" 

max_date = "2050-12-31T23:59.999" 

max_seqnum = 99_999 

max_controller = "C" # This controller triggers the largest numbers 

return cls.compute_exposure_id(max_date, max_seqnum, max_controller) 

 

@classmethod 

def detector_mapping(cls): 

"""Returns the mapping of full name to detector ID and serial. 

 

Returns 

------- 

mapping : `dict` of `str`:`tuple` 

Returns the mapping of full detector name (group+detector) 

to detector number and serial. 

 

Raises 

------ 

ValueError 

Raised if no camera policy file has been registered with this 

translation class. 

 

Notes 

----- 

Will construct the mapping if none has previously been constructed. 

""" 

if cls.cameraPolicyFile is not None: 

if cls.detectorMapping is None: 

cls.detectorMapping = read_detector_ids(cls.cameraPolicyFile) 

else: 

raise ValueError(f"Translation class '{cls.__name__}' has no registered camera policy file") 

 

return cls.detectorMapping 

 

@classmethod 

def detector_serials(cls): 

"""Obtain the mapping of detector serial to detector group, name, 

and number. 

 

Returns 

------- 

info : `dict` of `tuple` of (`str`, `str`, `int`) 

A `dict` with the serial numbers as keys and values of detector 

group, name, and number. 

""" 

if cls.detectorSerials is None: 

detector_mapping = cls.detector_mapping() 

 

if detector_mapping is not None: 

# Form mapping to go from serial number to names/numbers 

serials = {} 

for fullname, (id, serial) in cls.detectorMapping.items(): 

raft, detector_name = fullname.split("_") 

if serial in serials: 

raise RuntimeError(f"Serial {serial} is defined in multiple places") 

serials[serial] = (raft, detector_name, id) 

cls.detectorSerials = serials 

else: 

raise RuntimeError("Unable to obtain detector mapping information") 

 

return cls.detectorSerials 

 

@classmethod 

def compute_detector_num_from_name(cls, detector_group, detector_name): 

"""Helper method to return the detector number from the name. 

 

Parameters 

---------- 

detector_group : `str` 

Name of the detector grouping. This is generally the raft name. 

detector_name : `str` 

Detector name. 

 

Returns 

------- 

num : `int` 

Detector number. 

""" 

fullname = f"{detector_group}_{detector_name}" 

 

num = None 

detector_mapping = cls.detector_mapping() 

if detector_mapping is None: 

raise RuntimeError("Unable to obtain detector mapping information") 

 

if fullname in detector_mapping: 

num = detector_mapping[fullname] 

else: 

log.warning(f"Unable to determine detector number from detector name {fullname}") 

return None 

 

return num[0] 

 

@classmethod 

def compute_detector_info_from_serial(cls, detector_serial): 

"""Helper method to return the detector information from the serial. 

 

Parameters 

---------- 

detector_serial : `str` 

Detector serial ID. 

 

Returns 

------- 

info : `tuple` of (`str`, `str`, `int`) 

Detector group, name, and number. 

""" 

serial_mapping = cls.detector_serials() 

if serial_mapping is None: 

raise RuntimeError("Unable to obtain serial mapping information") 

 

if detector_serial in serial_mapping: 

info = serial_mapping[detector_serial] 

else: 

raise RuntimeError("Unable to determine detector information from detector serial" 

f" {detector_serial}") 

 

return info 

 

@staticmethod 

def compute_exposure_id(dayobs, seqnum, controller=None): 

"""Helper method to calculate the exposure_id. 

 

Parameters 

---------- 

dayobs : `str` 

Day of observation in either YYYYMMDD or YYYY-MM-DD format. 

If the string looks like ISO format it will be truncated before the 

``T`` before being handled. 

seqnum : `int` or `str` 

Sequence number. 

controller : `str`, optional 

Controller to use. If this is "O", no change is made to the 

exposure ID. If it is "C" a 1000 is added to the year component 

of the exposure ID. 

`None` indicates that the controller is not relevant to the 

exposure ID calculation (generally this is the case for test 

stand data). 

 

Returns 

------- 

exposure_id : `int` 

Exposure ID in form YYYYMMDDnnnnn form. 

""" 

if "T" in dayobs: 

dayobs = dayobs[:dayobs.find("T")] 

 

dayobs = dayobs.replace("-", "") 

 

if len(dayobs) != 8: 

raise ValueError(f"Malformed dayobs: {dayobs}") 

 

# Expect no more than 99,999 exposures in a day 

maxdigits = 5 

if seqnum >= 10**maxdigits: 

raise ValueError(f"Sequence number ({seqnum}) exceeds limit") 

 

# Camera control changes the exposure ID 

if controller is not None: 

if controller == "O": 

pass 

elif controller == "C": 

# Add 1000 to the year component 

dayobs = int(dayobs) 

dayobs += 1000_00_00 

else: 

raise ValueError(f"Supplied controller, '{controller}' is neither 'O' nor 'C'") 

 

# Form the number as a string zero padding the sequence number 

idstr = f"{dayobs}{seqnum:0{maxdigits}d}" 

 

# Exposure ID has to be an integer 

return int(idstr) 

 

def _is_on_mountain(self): 

"""Indicate whether these data are coming from the instrument 

installed on the mountain. 

 

Returns 

------- 

is : `bool` 

`True` if instrument is on the mountain. 

""" 

if "TSTAND" in self._header: 

return False 

return True 

 

def is_on_sky(self): 

"""Determine if this is an on-sky observation. 

 

Returns 

------- 

is_on_sky : `bool` 

Returns True if this is a observation on sky on the 

summit. 

""" 

# For LSST we think on sky unless tracksys is local 

if self.is_key_ok("TRACKSYS"): 

if self._header["TRACKSYS"].lower() == "local": 

# not on sky 

return False 

 

# These are obviously not on sky 

if self.to_observation_type() in ("bias", "dark", "flat"): 

return False 

 

return self._is_on_mountain() 

 

@cache_translation 

def to_location(self): 

# Docstring will be inherited. Property defined in properties.py 

if not self._is_on_mountain(): 

return None 

try: 

# Try standard FITS headers 

return super().to_location() 

except KeyError: 

return self._DEFAULT_LOCATION 

 

@cache_translation 

def to_datetime_begin(self): 

# Docstring will be inherited. Property defined in properties.py 

self._used_these_cards("MJD-OBS") 

return Time(self._header["MJD-OBS"], scale="tai", format="mjd") 

 

@cache_translation 

def to_datetime_end(self): 

# Docstring will be inherited. Property defined in properties.py 

if self.is_key_ok("DATE-END"): 

return super().to_datetime_end() 

 

return self.to_datetime_begin() + self.to_exposure_time() 

 

@cache_translation 

def to_detector_num(self): 

# Docstring will be inherited. Property defined in properties.py 

raft = self.to_detector_group() 

detector = self.to_detector_name() 

return self.compute_detector_num_from_name(raft, detector) 

 

@cache_translation 

def to_detector_exposure_id(self): 

# Docstring will be inherited. Property defined in properties.py 

exposure_id = self.to_exposure_id() 

num = self.to_detector_num() 

return self.compute_detector_exposure_id(exposure_id, num) 

 

@cache_translation 

def to_observation_type(self): 

# Docstring will be inherited. Property defined in properties.py 

obstype = self._header["IMGTYPE"] 

self._used_these_cards("IMGTYPE") 

obstype = obstype.lower() 

if obstype in ("skyexp", "object"): 

obstype = "science" 

return obstype 

 

@cache_translation 

def to_dark_time(self): 

"""Calculate the dark time. 

 

If a DARKTIME header is not found, the value is assumed to be 

identical to the exposure time. 

 

Returns 

------- 

dark : `astropy.units.Quantity` 

The dark time in seconds. 

""" 

if self.is_key_ok("DARKTIME"): 

darktime = self._header["DARKTIME"]*u.s 

self._used_these_cards("DARKTIME") 

else: 

log.warning("%s: Unable to determine dark time. Setting from exposure time.", 

self.to_observation_id()) 

darktime = self.to_exposure_time() 

return darktime 

 

@cache_translation 

def to_exposure_id(self): 

"""Generate a unique exposure ID number 

 

This is a combination of DAYOBS and SEQNUM, and optionally 

CONTRLLR. 

 

Returns 

------- 

exposure_id : `int` 

Unique exposure number. 

""" 

if "CALIB_ID" in self._header: 

self._used_these_cards("CALIB_ID") 

return None 

 

dayobs = self._header["DAYOBS"] 

seqnum = self._header["SEQNUM"] 

self._used_these_cards("DAYOBS", "SEQNUM") 

 

if self.is_key_ok("CONTRLLR"): 

controller = self._header["CONTRLLR"] 

self._used_these_cards("CONTRLLR") 

else: 

controller = None 

 

return self.compute_exposure_id(dayobs, seqnum, controller=controller) 

 

@cache_translation 

def to_visit_id(self): 

"""Calculate the visit associated with this exposure. 

 

Notes 

----- 

For LATISS and LSSTCam the default visit is derived from the 

exposure group. For other instruments we return the exposure_id. 

""" 

 

exposure_group = self.to_exposure_group() 

# If the group is an int we return it 

try: 

visit_id = int(exposure_group) 

return visit_id 

except ValueError: 

pass 

 

# A Group is defined as ISO date with an extension 

# The integer must be the same for a given group so we can never 

# use datetime_begin. 

# Nominally a GROUPID looks like "ISODATE+N" where the +N is 

# optional. This can be converted to seconds since epoch with 

# an adjustment for N. 

# For early data lacking that form we hash the group and return 

# the int. 

matches_date = GROUP_RE.match(exposure_group) 

if matches_date: 

iso_str = matches_date.group(1) 

fraction = matches_date.group(2) 

n = matches_date.group(3) 

if n is not None: 

n = int(n) 

else: 

n = 0 

iso = datetime.datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%S") 

 

tdelta = iso - TZERO_DATETIME 

epoch = int(tdelta.total_seconds()) 

 

# Form the integer from EPOCH + 3 DIGIT FRAC + 0-pad N 

visit_id = int(f"{epoch}{fraction}{n:04d}") 

else: 

# Non-standard string so convert to numbers 

# using a hash function. Use the first N hex digits 

group_bytes = exposure_group.encode("us-ascii") 

hasher = hashlib.blake2b(group_bytes) 

# Need to be big enough it does not possibly clash with the 

# date-based version above 

digest = hasher.hexdigest()[:14] 

visit_id = int(digest, base=16) 

 

# To help with hash collision, append the string length 

visit_id = int(f"{visit_id}{len(exposure_group):02d}") 

 

return visit_id 

 

@cache_translation 

def to_physical_filter(self): 

"""Calculate the physical filter name. 

 

Returns 

------- 

filter : `str` 

Name of filter. Can be a combination of FILTER, FILTER1 and FILTER2 

headers joined by a "+". Returns "NONE" if no filter is declared. 

""" 

joined = self._join_keyword_values(["FILTER", "FILTER1", "FILTER2"], delim="+") 

if not joined: 

joined = "NONE" 

 

return joined 

 

@cache_translation 

def to_tracking_radec(self): 

if not self.is_on_sky(): 

return None 

 

# RA/DEC are *derived* headers and for the case where the DATE-BEG 

# is 1970 they are garbage and should not be used. 

if self._header["DATE-OBS"] == self._header["DATE"]: 

# A fixed up date -- use AZEL as source of truth 

altaz = self.to_altaz_begin() 

radec = astropy.coordinates.SkyCoord(altaz.transform_to(astropy.coordinates.ICRS), 

obstime=altaz.obstime, 

location=altaz.location) 

else: 

radecsys = ("RADESYS",) 

radecpairs = (("RASTART", "DECSTART"), ("RA", "DEC")) 

radec = tracking_from_degree_headers(self, radecsys, radecpairs) 

 

return radec 

 

@cache_translation 

def to_altaz_begin(self): 

if not self._is_on_mountain(): 

return None 

 

# ALTAZ always relevant unless bias or dark 

if self.to_observation_type() in ("bias", "dark"): 

return None 

 

return altaz_from_degree_headers(self, (("ELSTART", "AZSTART"),), 

self.to_datetime_begin(), is_zd=False) 

 

@cache_translation 

def to_exposure_group(self): 

"""Calculate the exposure group string. 

 

For LSSTCam and LATISS this is read from the ``GROUPID`` header. 

If that header is missing the exposure_id is returned instead as 

a string. 

""" 

if self.is_key_ok("GROUPID"): 

exposure_group = self._header["GROUPID"] 

self._used_these_cards("GROUPID") 

return exposure_group 

return super().to_exposure_group()