Coverage for python/lsst/summit/utils/utils.py: 18%

353 statements  

« prev     ^ index     » next       coverage.py v7.4.1, created at 2024-02-18 12:42 +0000

1# This file is part of summit_utils. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import os 

23from typing import Iterable 

24import numpy as np 

25import logging 

26from scipy.ndimage import gaussian_filter 

27import lsst.afw.image as afwImage 

28import lsst.afw.detection as afwDetect 

29from lsst.afw.detection import Footprint, FootprintSet 

30import lsst.afw.math as afwMath 

31import lsst.daf.base as dafBase 

32import lsst.geom as geom 

33import lsst.pipe.base as pipeBase 

34import lsst.utils.packages as packageUtils 

35from lsst.daf.butler.cli.cliLog import CliLog 

36import datetime 

37from dateutil.tz import gettz 

38 

39from lsst.obs.lsst.translators.lsst import FILTER_DELIMITER 

40from lsst.obs.lsst.translators.latiss import AUXTEL_LOCATION 

41 

42from astro_metadata_translator import ObservationInfo 

43from astropy.coordinates import SkyCoord, AltAz 

44from astropy.coordinates.earth import EarthLocation 

45import astropy.units as u 

46from astropy.time import Time 

47 

48from .astrometry.utils import genericCameraHeaderToWcs 

49 

50__all__ = ["SIGMATOFWHM", 

51 "FWHMTOSIGMA", 

52 "EFD_CLIENT_MISSING_MSG", 

53 "GOOGLE_CLOUD_MISSING_MSG", 

54 "AUXTEL_LOCATION", 

55 "countPixels", 

56 "quickSmooth", 

57 "argMax2d", 

58 "getImageStats", 

59 "detectObjectsInExp", 

60 "humanNameForCelestialObject", 

61 "getFocusFromHeader", 

62 "dayObsIntToString", 

63 "dayObsSeqNumToVisitId", 

64 "setupLogging", 

65 "getCurrentDayObs_datetime", 

66 "getCurrentDayObs_int", 

67 "getCurrentDayObs_humanStr", 

68 "getSite", 

69 "getExpPositionOffset", 

70 "starTrackerFileToExposure", 

71 "getAirmassSeeingCorrection", 

72 "getFilterSeeingCorrection", 

73 "getCdf", 

74 "getQuantiles", 

75 "digitizeData", 

76 ] 

77 

78 

79SIGMATOFWHM = 2.0*np.sqrt(2.0*np.log(2.0)) 

80FWHMTOSIGMA = 1/SIGMATOFWHM 

81 

82EFD_CLIENT_MISSING_MSG = ('ImportError: lsst_efd_client not found. Please install with:\n' 

83 ' pip install lsst-efd-client') 

84 

85GOOGLE_CLOUD_MISSING_MSG = ('ImportError: Google cloud storage not found. Please install with:\n' 

86 ' pip install google-cloud-storage') 

87 

88 

89def countPixels(maskedImage, maskPlane): 

90 """Count the number of pixels in an image with a given mask bit set. 

91 

92 Parameters 

93 ---------- 

94 maskedImage : `lsst.afw.image.MaskedImage` 

95 The masked image, 

96 maskPlane : `str` 

97 The name of the bitmask. 

98 

99 Returns 

100 ------- 

101 count : `int`` 

102 The number of pixels in with the selected mask bit 

103 """ 

104 bit = maskedImage.mask.getPlaneBitMask(maskPlane) 

105 return len(np.where(np.bitwise_and(maskedImage.mask.array, bit))[0]) 

106 

107 

108def quickSmooth(data, sigma=2): 

109 """Perform a quick smoothing of the image. 

110 

111 Not to be used for scientific purposes, but improves the stretch and 

112 visual rendering of low SNR against the sky background in cutouts. 

113 

114 Parameters 

115 ---------- 

116 data : `np.array` 

117 The image data to smooth 

118 sigma : `float`, optional 

119 The size of the smoothing kernel. 

120 

121 Returns 

122 ------- 

123 smoothData : `np.array` 

124 The smoothed data 

125 """ 

126 kernel = [sigma, sigma] 

127 smoothData = gaussian_filter(data, kernel, mode='constant') 

128 return smoothData 

129 

130 

131def argMax2d(array): 

132 """Get the index of the max value of an array and whether it's unique. 

133 

134 If its not unique, returns a list of the other locations containing the 

135 maximum value, e.g. returns 

136 

137 (12, 34), False, [(56,78), (910, 1112)] 

138 

139 Parameters 

140 ---------- 

141 array : `np.array` 

142 The data 

143 

144 Returns 

145 ------- 

146 maxLocation : `tuple` 

147 The coords of the first instance of the max value 

148 unique : `bool` 

149 Whether it's the only location 

150 otherLocations : `list` of `tuple` 

151 List of the other max values' locations, empty if False 

152 """ 

153 uniqueMaximum = False 

154 maxCoords = np.where(array == np.max(array)) 

155 maxCoords = [coord for coord in zip(*maxCoords)] # list of coords as tuples 

156 if len(maxCoords) == 1: # single unambiguous value 

157 uniqueMaximum = True 

158 

159 return maxCoords[0], uniqueMaximum, maxCoords[1:] 

160 

161 

162def dayObsIntToString(dayObs): 

163 """Convert an integer dayObs to a dash-delimited string. 

164 

165 e.g. convert the hard to read 20210101 to 2021-01-01 

166 

167 Parameters 

168 ---------- 

169 dayObs : `int` 

170 The dayObs. 

171 

172 Returns 

173 ------- 

174 dayObs : `str` 

175 The dayObs as a string. 

176 """ 

177 assert isinstance(dayObs, int) 

178 dStr = str(dayObs) 

179 assert len(dStr) == 8 

180 return '-'.join([dStr[0:4], dStr[4:6], dStr[6:8]]) 

181 

182 

183def dayObsSeqNumToVisitId(dayObs, seqNum): 

184 """Get the visit id for a given dayObs/seqNum. 

185 

186 Parameters 

187 ---------- 

188 dayObs : `int` 

189 The dayObs. 

190 seqNum : `int` 

191 The seqNum. 

192 

193 Returns 

194 ------- 

195 visitId : `int` 

196 The visitId. 

197 

198 Notes 

199 ----- 

200 TODO: Remove this horrible hack once DM-30948 makes this possible 

201 programatically/via the butler. 

202 """ 

203 if dayObs < 19700101 or dayObs > 35000101: 

204 raise ValueError(f'dayObs value {dayObs} outside plausible range') 

205 return int(f"{dayObs}{seqNum:05}") 

206 

207 

208def getImageStats(exp): 

209 """Calculate a grab-bag of stats for an image. Must remain fast. 

210 

211 Parameters 

212 ---------- 

213 exp : `lsst.afw.image.Exposure` 

214 The input exposure. 

215 

216 Returns 

217 ------- 

218 stats : `lsst.pipe.base.Struct` 

219 A container with attributes containing measurements and statistics 

220 for the image. 

221 """ 

222 result = pipeBase.Struct() 

223 

224 vi = exp.visitInfo 

225 expTime = vi.exposureTime 

226 md = exp.getMetadata() 

227 

228 obj = vi.object 

229 mjd = vi.getDate().get() 

230 result.object = obj 

231 result.mjd = mjd 

232 

233 fullFilterString = exp.filter.physicalLabel 

234 filt = fullFilterString.split(FILTER_DELIMITER)[0] 

235 grating = fullFilterString.split(FILTER_DELIMITER)[1] 

236 

237 airmass = vi.getBoresightAirmass() 

238 rotangle = vi.getBoresightRotAngle().asDegrees() 

239 

240 azAlt = vi.getBoresightAzAlt() 

241 az = azAlt[0].asDegrees() 

242 el = azAlt[1].asDegrees() 

243 

244 result.expTime = expTime 

245 result.filter = filt 

246 result.grating = grating 

247 result.airmass = airmass 

248 result.rotangle = rotangle 

249 result.az = az 

250 result.el = el 

251 result.focus = md.get('FOCUSZ') 

252 

253 data = exp.image.array 

254 result.maxValue = np.max(data) 

255 

256 peak, uniquePeak, otherPeaks = argMax2d(data) 

257 result.maxPixelLocation = peak 

258 result.multipleMaxPixels = uniquePeak 

259 

260 result.nBadPixels = countPixels(exp.maskedImage, 'BAD') 

261 result.nSatPixels = countPixels(exp.maskedImage, 'SAT') 

262 result.percentile99 = np.percentile(data, 99) 

263 result.percentile9999 = np.percentile(data, 99.99) 

264 

265 sctrl = afwMath.StatisticsControl() 

266 sctrl.setNumSigmaClip(5) 

267 sctrl.setNumIter(2) 

268 statTypes = afwMath.MEANCLIP | afwMath.STDEVCLIP 

269 stats = afwMath.makeStatistics(exp.maskedImage, statTypes, sctrl) 

270 std, stderr = stats.getResult(afwMath.STDEVCLIP) 

271 mean, meanerr = stats.getResult(afwMath.MEANCLIP) 

272 

273 result.clippedMean = mean 

274 result.clippedStddev = std 

275 

276 return result 

277 

278 

279def detectObjectsInExp(exp, nSigma=10, nPixMin=10, grow=0): 

280 """Quick and dirty object detection for an exposure. 

281 

282 Return the footPrintSet for the objects in a preferably-postISR exposure. 

283 

284 Parameters 

285 ---------- 

286 exp : `lsst.afw.image.Exposure` 

287 The exposure to detect objects in. 

288 nSigma : `float` 

289 The number of sigma for detection. 

290 nPixMin : `int` 

291 The minimum number of pixels in an object for detection. 

292 grow : `int` 

293 The number of pixels to grow the footprint by after detection. 

294 

295 Returns 

296 ------- 

297 footPrintSet : `lsst.afw.detection.FootprintSet` 

298 The set of footprints in the image. 

299 """ 

300 median = np.nanmedian(exp.image.array) 

301 exp.image -= median 

302 

303 threshold = afwDetect.Threshold(nSigma, afwDetect.Threshold.STDEV) 

304 footPrintSet = afwDetect.FootprintSet(exp.getMaskedImage(), threshold, "DETECTED", nPixMin) 

305 if grow > 0: 

306 isotropic = True 

307 footPrintSet = afwDetect.FootprintSet(footPrintSet, grow, isotropic) 

308 

309 exp.image += median # add back in to leave background unchanged 

310 return footPrintSet 

311 

312 

313def fluxesFromFootprints(footprints, parentImage, subtractImageMedian=False): 

314 """Calculate the flux from a set of footprints, given the parent image, 

315 optionally subtracting the whole-image median from each pixel as a very 

316 rough background subtraction. 

317 

318 Parameters 

319 ---------- 

320 footprints : `lsst.afw.detection.FootprintSet` or 

321 `lsst.afw.detection.Footprint` or 

322 `iterable` of `lsst.afw.detection.Footprint` 

323 The footprints to measure. 

324 parentImage : `lsst.afw.image.Image` 

325 The parent image. 

326 subtractImageMedian : `bool`, optional 

327 Subtract a whole-image median from each pixel in the footprint when 

328 summing as a very crude background subtraction. Does not change the 

329 original image. 

330 

331 Returns 

332 ------- 

333 fluxes : `list` of `float` 

334 The fluxes for each footprint. 

335 

336 Raises 

337 ------ 

338 TypeError : raise for unsupported types. 

339 """ 

340 median = 0 

341 if subtractImageMedian: 

342 median = np.nanmedian(parentImage.array) 

343 

344 # poor person's single dispatch 

345 badTypeMsg = ("This function works with FootprintSets, single Footprints, and iterables of Footprints. " 

346 f"Got {type(footprints)}: {footprints}") 

347 if isinstance(footprints, FootprintSet): 

348 footprints = footprints.getFootprints() 

349 elif isinstance(footprints, Iterable): 

350 if not isinstance(footprints[0], Footprint): 

351 raise TypeError(badTypeMsg) 

352 elif isinstance(footprints, Footprint): 

353 footprints = [footprints] 

354 else: 

355 raise TypeError(badTypeMsg) 

356 

357 return np.array([fluxFromFootprint(fp, parentImage, backgroundValue=median) for fp in footprints]) 

358 

359 

360def fluxFromFootprint(footprint, parentImage, backgroundValue=0): 

361 """Calculate the flux from a footprint, given the parent image, optionally 

362 subtracting a single value from each pixel as a very rough background 

363 subtraction, e.g. the image median. 

364 

365 Parameters 

366 ---------- 

367 footprint : `lsst.afw.detection.Footprint` 

368 The footprint to measure. 

369 parentImage : `lsst.afw.image.Image` 

370 Image containing the footprint. 

371 backgroundValue : `bool`, optional 

372 The value to subtract from each pixel in the footprint when summing 

373 as a very crude background subtraction. Does not change the original 

374 image. 

375 

376 Returns 

377 ------- 

378 flux : `float` 

379 The flux in the footprint 

380 """ 

381 if backgroundValue: # only do the subtraction if non-zero for speed 

382 xy0 = parentImage.getBBox().getMin() 

383 return footprint.computeFluxFromArray(parentImage.array - backgroundValue, xy0) 

384 return footprint.computeFluxFromImage(parentImage) 

385 

386 

387def humanNameForCelestialObject(objName): 

388 """Returns a list of all human names for obj, or [] if none are found. 

389 

390 Parameters 

391 ---------- 

392 objName : `str` 

393 The/a name of the object. 

394 

395 Returns 

396 ------- 

397 names : `list` of `str` 

398 The names found for the object 

399 """ 

400 from astroquery.simbad import Simbad 

401 results = [] 

402 try: 

403 simbadResult = Simbad.query_objectids(objName) 

404 for row in simbadResult: 

405 if row['ID'].startswith('NAME'): 

406 results.append(row['ID'].replace('NAME ', '')) 

407 return results 

408 except Exception: 

409 return [] # same behavior as for found but un-named objects 

410 

411 

412def _getAltAzZenithsFromSeqNum(butler, dayObs, seqNumList): 

413 """Get the alt, az and zenith angle for the seqNums of a given dayObs. 

414 

415 Parameters 

416 ---------- 

417 butler : `lsst.daf.butler.Butler` 

418 The butler to query. 

419 dayObs : `int` 

420 The dayObs. 

421 seqNumList : `list` of `int` 

422 The seqNums for which to return the alt, az and zenith 

423 

424 Returns 

425 ------- 

426 azimuths : `list` of `float` 

427 List of the azimuths for each seqNum 

428 elevations : `list` of `float` 

429 List of the elevations for each seqNum 

430 zeniths : `list` of `float` 

431 List of the zenith angles for each seqNum 

432 """ 

433 azimuths, elevations, zeniths = [], [], [] 

434 for seqNum in seqNumList: 

435 md = butler.get('raw.metadata', day_obs=dayObs, seq_num=seqNum, detector=0) 

436 obsInfo = ObservationInfo(md) 

437 alt = obsInfo.altaz_begin.alt.value 

438 az = obsInfo.altaz_begin.az.value 

439 elevations.append(alt) 

440 zeniths.append(90-alt) 

441 azimuths.append(az) 

442 return azimuths, elevations, zeniths 

443 

444 

445def getFocusFromHeader(exp): 

446 """Get the raw focus value from the header. 

447 

448 Parameters 

449 ---------- 

450 exp : `lsst.afw.image.exposure` 

451 The exposure. 

452 

453 Returns 

454 ------- 

455 focus : `float` or `None` 

456 The focus value if found, else ``None``. 

457 """ 

458 md = exp.getMetadata() 

459 if 'FOCUSZ' in md: 

460 return md['FOCUSZ'] 

461 return None 

462 

463 

464def checkStackSetup(): 

465 """Check which weekly tag is being used and which local packages are setup. 

466 

467 Designed primarily for use in notbooks/observing, this prints the weekly 

468 tag(s) are setup for lsst_distrib, and lists any locally setup packages and 

469 the path to each. 

470 

471 Notes 

472 ----- 

473 Uses print() instead of logger messages as this should simply print them 

474 without being vulnerable to any log messages potentially being diverted. 

475 """ 

476 packages = packageUtils.getEnvironmentPackages(include_all=True) 

477 

478 lsstDistribHashAndTags = packages['lsst_distrib'] # looks something like 'g4eae7cb9+1418867f (w_2022_13)' 

479 lsstDistribTags = lsstDistribHashAndTags.split()[1] 

480 if len(lsstDistribTags.split()) == 1: 

481 tag = lsstDistribTags.replace('(', '') 

482 tag = tag.replace(')', '') 

483 print(f"You are running {tag} of lsst_distrib") 

484 else: # multiple weekly tags found for lsst_distrib! 

485 print(f'The version of lsst_distrib you have is compatible with: {lsstDistribTags}') 

486 

487 localPackages = [] 

488 localPaths = [] 

489 for package, tags in packages.items(): 

490 if tags.startswith('LOCAL:'): 

491 path = tags.split('LOCAL:')[1] 

492 path = path.split('@')[0] # don't need the git SHA etc 

493 localPaths.append(path) 

494 localPackages.append(package) 

495 

496 if localPackages: 

497 print("\nLocally setup packages:") 

498 print("-----------------------") 

499 maxLen = max(len(package) for package in localPackages) 

500 for package, path in zip(localPackages, localPaths): 

501 print(f"{package:<{maxLen}s} at {path}") 

502 else: 

503 print("\nNo locally setup packages (using a vanilla stack)") 

504 

505 

506def setupLogging(longlog=False): 

507 """Setup logging in the same way as one would get from pipetask run. 

508 

509 Code that isn't run through the butler CLI defaults to WARNING level 

510 messages and no logger names. This sets the behaviour to follow whatever 

511 the pipeline default is, currently 

512 <logger_name> <level>: <message> e.g. 

513 lsst.isr INFO: Masking defects. 

514 """ 

515 CliLog.initLog(longlog=longlog) 

516 

517 

518def getCurrentDayObs_datetime(): 

519 """Get the current day_obs - the observatory rolls the date over at UTC-12 

520 

521 Returned as datetime.date(2022, 4, 28) 

522 """ 

523 utc = gettz("UTC") 

524 nowUtc = datetime.datetime.now().astimezone(utc) 

525 offset = datetime.timedelta(hours=-12) 

526 dayObs = (nowUtc + offset).date() 

527 return dayObs 

528 

529 

530def getCurrentDayObs_int(): 

531 """Return the current dayObs as an int in the form 20220428 

532 """ 

533 return int(getCurrentDayObs_datetime().strftime("%Y%m%d")) 

534 

535 

536def getCurrentDayObs_humanStr(): 

537 """Return the current dayObs as a string in the form '2022-04-28' 

538 """ 

539 return dayObsIntToString(getCurrentDayObs_int()) 

540 

541 

542def getExpRecordAge(expRecord): 

543 """Get the time, in seconds, since the end of exposure. 

544 

545 Parameters 

546 ---------- 

547 expRecord : `lsst.daf.butler.DimensionRecord` 

548 The exposure record. 

549 

550 Returns 

551 ------- 

552 age : `float` 

553 The age of the exposure, in seconds. 

554 """ 

555 return -1 * (expRecord.timespan.end - Time.now()).sec 

556 

557 

558def getSite(): 

559 """Returns where the code is running. 

560 

561 Returns 

562 ------- 

563 location : `str` 

564 One of: 

565 ['tucson', 'summit', 'base', 'staff-rsp', 'rubin-devl', 'jenkins', 

566 'usdf-k8s'] 

567 

568 Raises 

569 ------ 

570 ValueError 

571 Raised if location cannot be determined. 

572 """ 

573 # All nublado instances guarantee that EXTERNAL_URL is set and uniquely 

574 # identifies it. 

575 location = os.getenv('EXTERNAL_INSTANCE_URL', "") 

576 if location == "https://tucson-teststand.lsst.codes": 576 ↛ 577line 576 didn't jump to line 577, because the condition on line 576 was never true

577 return 'tucson' 

578 elif location == "https://summit-lsp.lsst.codes": 578 ↛ 579line 578 didn't jump to line 579, because the condition on line 578 was never true

579 return 'summit' 

580 elif location == "https://base-lsp.lsst.codes": 580 ↛ 581line 580 didn't jump to line 581, because the condition on line 580 was never true

581 return 'base' 

582 elif location == "https://usdf-rsp.slac.stanford.edu": 582 ↛ 583line 582 didn't jump to line 583, because the condition on line 582 was never true

583 return 'staff-rsp' 

584 

585 # if no EXTERNAL_URL, try HOSTNAME to see if we're on the dev nodes 

586 # it is expected that this will be extensible to SLAC 

587 hostname = os.getenv('HOSTNAME', "") 

588 if hostname.startswith('sdfrome'): 588 ↛ 589line 588 didn't jump to line 589, because the condition on line 588 was never true

589 return 'rubin-devl' 

590 

591 jenkinsHome = os.getenv('JENKINS_HOME', "") 

592 if jenkinsHome != "": 592 ↛ 593line 592 didn't jump to line 593, because the condition on line 592 was never true

593 return 'jenkins' 

594 

595 # we're probably inside a k8s pod doing rapid analysis work at this point 

596 location = os.getenv('RAPID_ANALYSIS_LOCATION', "") 

597 if location == "TTS": 597 ↛ 598line 597 didn't jump to line 598, because the condition on line 597 was never true

598 return 'tucson' 

599 if location == "BTS": 599 ↛ 600line 599 didn't jump to line 600, because the condition on line 599 was never true

600 return 'base' 

601 if location == "SUMMIT": 601 ↛ 602line 601 didn't jump to line 602, because the condition on line 601 was never true

602 return 'summit' 

603 if location == "USDF": 603 ↛ 604line 603 didn't jump to line 604, because the condition on line 603 was never true

604 return 'usdf-k8s' 

605 

606 # we have failed 

607 raise ValueError('Location could not be determined') 

608 

609 

610def getAltAzFromSkyPosition(skyPos, visitInfo, doCorrectRefraction=False, 

611 wavelength=500.0, 

612 pressureOverride=None, 

613 temperatureOverride=None, 

614 relativeHumidityOverride=None, 

615 ): 

616 """Get the alt/az from the position on the sky and the time and location 

617 of the observation. 

618 

619 The temperature, pressure and relative humidity are taken from the 

620 visitInfo by default, but can be individually overridden as needed. It 

621 should be noted that the visitInfo never contains a nominal wavelength, and 

622 so this takes a default value of 500nm. 

623 

624 Parameters 

625 ---------- 

626 skyPos : `lsst.geom.SpherePoint` 

627 The position on the sky. 

628 visitInfo : `lsst.afw.image.VisitInfo` 

629 The visit info containing the time of the observation. 

630 doCorrectRefraction : `bool`, optional 

631 Correct for the atmospheric refraction? 

632 wavelength : `float`, optional 

633 The nominal wavelength in nanometers (e.g. 500.0), as a float. 

634 pressureOverride : `float`, optional 

635 The pressure, in bars (e.g. 0.770), to override the value supplied in 

636 the visitInfo, as a float. 

637 temperatureOverride : `float`, optional 

638 The temperature, in Celsius (e.g. 10.0), to override the value supplied 

639 in the visitInfo, as a float. 

640 relativeHumidityOverride : `float`, optional 

641 The relativeHumidity in the range 0..1 (i.e. not as a percentage), to 

642 override the value supplied in the visitInfo, as a float. 

643 

644 Returns 

645 ------- 

646 alt : `lsst.geom.Angle` 

647 The altitude. 

648 az : `lsst.geom.Angle` 

649 The azimuth. 

650 """ 

651 skyLocation = SkyCoord(skyPos.getRa().asRadians(), skyPos.getDec().asRadians(), unit=u.rad) 

652 long = visitInfo.observatory.getLongitude() 

653 lat = visitInfo.observatory.getLatitude() 

654 ele = visitInfo.observatory.getElevation() 

655 earthLocation = EarthLocation.from_geodetic(long.asDegrees(), lat.asDegrees(), ele) 

656 

657 refractionKwargs = {} 

658 if doCorrectRefraction: 

659 # wavelength is never supplied in the visitInfo so always take this 

660 wavelength = wavelength * u.nm 

661 

662 if pressureOverride: 

663 pressure = pressureOverride 

664 else: 

665 pressure = visitInfo.weather.getAirPressure() 

666 # ObservationInfos (which are the "source of truth" use pascals) so 

667 # convert from pascals to bars 

668 pressure /= 100000.0 

669 pressure = pressure*u.bar 

670 

671 if temperatureOverride: 

672 temperature = temperatureOverride 

673 else: 

674 temperature = visitInfo.weather.getAirTemperature() 

675 temperature = temperature*u.deg_C 

676 

677 if relativeHumidityOverride: 

678 relativeHumidity = relativeHumidityOverride 

679 else: 

680 relativeHumidity = visitInfo.weather.getHumidity() / 100.0 # this is in percent 

681 relativeHumidity = relativeHumidity*u.deg_C 

682 

683 refractionKwargs = dict(pressure=pressure, 

684 temperature=temperature, 

685 relative_humidity=relativeHumidity, 

686 obswl=wavelength) 

687 

688 # must go via astropy.Time because dafBase.dateTime.DateTime contains 

689 # the timezone, but going straight to visitInfo.date.toPython() loses this. 

690 obsTime = Time(visitInfo.date.toPython(), scale='tai') 

691 altAz = AltAz(obstime=obsTime, 

692 location=earthLocation, 

693 **refractionKwargs) 

694 

695 obsAltAz = skyLocation.transform_to(altAz) 

696 alt = geom.Angle(obsAltAz.alt.degree, geom.degrees) 

697 az = geom.Angle(obsAltAz.az.degree, geom.degrees) 

698 

699 return alt, az 

700 

701 

702def getExpPositionOffset(exp1, exp2, useWcs=True, allowDifferentPlateScales=False): 

703 """Get the change in sky position between two exposures. 

704 

705 Given two exposures, calculate the offset on the sky between the images. 

706 If useWcs then use the (fitted or unfitted) skyOrigin from their WCSs, and 

707 calculate the alt/az from the observation times, otherwise use the nominal 

708 values in the exposures' visitInfos. Note that if using the visitInfo 

709 values that for a given pointing the ra/dec will be ~identical, regardless 

710 of whether astrometric fitting has been performed. 

711 

712 Values are given as exp1-exp2. 

713 

714 Parameters 

715 ---------- 

716 exp1 : `lsst.afw.image.Exposure` 

717 The first exposure. 

718 exp2 : `lsst.afw.image.Exposure` 

719 The second exposure. 

720 useWcs : `bool` 

721 Use the WCS for the ra/dec and alt/az if True, else use the nominal/ 

722 boresight values from the exposures' visitInfos. 

723 allowDifferentPlateScales : `bool`, optional 

724 Use to disable checking that plate scales are the same. Generally, 

725 differing plate scales would indicate an error, but where blind-solving 

726 has been undertaken during commissioning plate scales can be different 

727 enough to warrant setting this to ``True``. 

728 

729 Returns 

730 ------- 

731 offsets : `lsst.pipe.base.Struct` 

732 A struct containing the offsets: 

733 ``deltaRa`` 

734 The diference in ra (`lsst.geom.Angle`) 

735 ``deltaDec`` 

736 The diference in dec (`lsst.geom.Angle`) 

737 ``deltaAlt`` 

738 The diference in alt (`lsst.geom.Angle`) 

739 ``deltaAz`` 

740 The diference in az (`lsst.geom.Angle`) 

741 ``deltaPixels`` 

742 The diference in pixels (`float`) 

743 """ 

744 

745 wcs1 = exp1.getWcs() 

746 wcs2 = exp2.getWcs() 

747 pixScaleArcSec = wcs1.getPixelScale().asArcseconds() 

748 if not allowDifferentPlateScales: 

749 assert np.isclose(pixScaleArcSec, wcs2.getPixelScale().asArcseconds()), \ 

750 "Pixel scales in the exposures differ." 

751 

752 if useWcs: 

753 p1 = wcs1.getSkyOrigin() 

754 p2 = wcs2.getSkyOrigin() 

755 alt1, az1 = getAltAzFromSkyPosition(p1, exp1.getInfo().getVisitInfo()) 

756 alt2, az2 = getAltAzFromSkyPosition(p2, exp2.getInfo().getVisitInfo()) 

757 ra1 = p1[0] 

758 ra2 = p2[0] 

759 dec1 = p1[1] 

760 dec2 = p2[1] 

761 else: 

762 az1 = exp1.visitInfo.boresightAzAlt[0] 

763 az2 = exp2.visitInfo.boresightAzAlt[0] 

764 alt1 = exp1.visitInfo.boresightAzAlt[1] 

765 alt2 = exp2.visitInfo.boresightAzAlt[1] 

766 

767 ra1 = exp1.visitInfo.boresightRaDec[0] 

768 ra2 = exp2.visitInfo.boresightRaDec[0] 

769 dec1 = exp1.visitInfo.boresightRaDec[1] 

770 dec2 = exp2.visitInfo.boresightRaDec[1] 

771 

772 p1 = exp1.visitInfo.boresightRaDec 

773 p2 = exp2.visitInfo.boresightRaDec 

774 

775 angular_offset = p1.separation(p2).asArcseconds() 

776 deltaPixels = angular_offset / pixScaleArcSec 

777 

778 ret = pipeBase.Struct(deltaRa=(ra1-ra2).wrapNear(geom.Angle(0.0)), 

779 deltaDec=dec1-dec2, 

780 deltaAlt=alt1-alt2, 

781 deltaAz=(az1-az2).wrapNear(geom.Angle(0.0)), 

782 deltaPixels=deltaPixels 

783 ) 

784 

785 return ret 

786 

787 

788def starTrackerFileToExposure(filename, logger=None): 

789 """Read the exposure from the file and set the wcs from the header. 

790 

791 Parameters 

792 ---------- 

793 filename : `str` 

794 The full path to the file. 

795 logger : `logging.Logger`, optional 

796 The logger to use for errors, created if not supplied. 

797 

798 Returns 

799 ------- 

800 exp : `lsst.afw.image.Exposure` 

801 The exposure. 

802 """ 

803 if not logger: 

804 logger = logging.getLogger(__name__) 

805 exp = afwImage.ExposureF(filename) 

806 try: 

807 wcs = genericCameraHeaderToWcs(exp) 

808 exp.setWcs(wcs) 

809 except Exception as e: 

810 logger.warning(f"Failed to set wcs from header: {e}") 

811 

812 # for some reason the date isn't being set correctly 

813 # DATE-OBS is present in the original header, but it's being 

814 # stripped out and somehow not set (plus it doesn't give the midpoint 

815 # of the exposure), so set it manually from the midpoint here 

816 try: 

817 md = exp.getMetadata() 

818 begin = datetime.datetime.fromisoformat(md['DATE-BEG']) 

819 end = datetime.datetime.fromisoformat(md['DATE-END']) 

820 duration = end - begin 

821 mid = begin + duration/2 

822 newTime = dafBase.DateTime(mid.isoformat(), dafBase.DateTime.Timescale.TAI) 

823 newVi = exp.visitInfo.copyWith(date=newTime) 

824 exp.info.setVisitInfo(newVi) 

825 except Exception as e: 

826 logger.warning(f"Failed to set date from header: {e}") 

827 

828 return exp 

829 

830 

831def obsInfoToDict(obsInfo): 

832 """Convert an ObservationInfo to a dict. 

833 

834 Parameters 

835 ---------- 

836 obsInfo : `astro_metadata_translator.ObservationInfo` 

837 The ObservationInfo to convert. 

838 

839 Returns 

840 ------- 

841 obsInfoDict : `dict` 

842 The ObservationInfo as a dict. 

843 """ 

844 return {prop: getattr(obsInfo, prop) for prop in obsInfo.all_properties.keys()} 

845 

846 

847def getFieldNameAndTileNumber(field, warn=True, logger=None): 

848 """Get the tile name and number of an observed field. 

849 

850 It is assumed to always be appended, with an underscore, to the rest of the 

851 field name. Returns the name and number as a tuple, or the name unchanged 

852 if no tile number is found. 

853 

854 Parameters 

855 ---------- 

856 field : `str` 

857 The name of the field 

858 

859 Returns 

860 ------- 

861 fieldName : `str` 

862 The name of the field without the trailing tile number, if present. 

863 tileNum : `int` 

864 The number of the tile, as an integer, or ``None`` if not found. 

865 """ 

866 if warn and not logger: 

867 logger = logging.getLogger('lsst.summit.utils.utils.getFieldNameAndTileNumber') 

868 

869 if '_' not in field: 

870 if warn: 

871 logger.warning(f"Field {field} does not contain an underscore," 

872 " so cannot determine the tile number.") 

873 return field, None 

874 

875 try: 

876 fieldParts = field.split("_") 

877 fieldNum = int(fieldParts[-1]) 

878 except ValueError: 

879 if warn: 

880 logger.warning(f"Field {field} does not contain only an integer after the final underscore" 

881 " so cannot determine the tile number.") 

882 return field, None 

883 

884 return "_".join(fieldParts[:-1]), fieldNum 

885 

886 

887def getAirmassSeeingCorrection(airmass): 

888 """Get the correction factor for seeing due to airmass. 

889 

890 Parameters 

891 ---------- 

892 airmass : `float` 

893 The airmass, greater than or equal to 1. 

894 

895 Returns 

896 ------- 

897 correctionFactor : `float` 

898 The correction factor to apply to the seeing. 

899 

900 Raises 

901 ------ 

902 ValueError raised for unphysical airmasses. 

903 """ 

904 if airmass < 1: 

905 raise ValueError(f"Invalid airmass: {airmass}") 

906 return airmass**(-0.6) 

907 

908 

909def getFilterSeeingCorrection(filterName): 

910 """Get the correction factor for seeing due to a filter. 

911 

912 Parameters 

913 ---------- 

914 filterName : `str` 

915 The name of the filter, e.g. 'SDSSg_65mm'. 

916 

917 Returns 

918 ------- 

919 correctionFactor : `float` 

920 The correction factor to apply to the seeing. 

921 

922 Raises 

923 ------ 

924 ValueError raised for unknown filters. 

925 """ 

926 match filterName: 

927 case 'SDSSg_65mm': 

928 return (477./500.)**0.2 

929 case 'SDSSr_65mm': 

930 return (623./500.)**0.2 

931 case 'SDSSi_65mm': 

932 return (762./500.)**0.2 

933 case _: 

934 raise ValueError(f"Unknown filter name: {filterName}") 

935 

936 

937def getCdf(data, scale, nBinsMax=300_000): 

938 """Return an approximate cumulative distribution function scaled to 

939 the [0, scale] range. 

940 

941 If the input data is all nan, then the output cdf will be nan as well as 

942 the min and max values. 

943 

944 Parameters 

945 ---------- 

946 data : `np.array` 

947 The input data. 

948 scale : `int` 

949 The scaling range of the output. 

950 nBinsMax : `int`, optional 

951 Maximum number of bins to use. 

952 

953 Returns 

954 ------- 

955 cdf : `np.array` of `int` 

956 A monotonically increasing sequence that represents a scaled 

957 cumulative distribution function, starting with the value at 

958 minVal, then at (minVal + 1), and so on. 

959 minVal : `float` 

960 An integer smaller than the minimum value in the input data. 

961 maxVal : `float` 

962 An integer larger than the maximum value in the input data. 

963 """ 

964 flatData = data.ravel() 

965 size = flatData.size - np.count_nonzero(np.isnan(flatData)) 

966 

967 minVal = np.floor(np.nanmin(flatData)) 

968 maxVal = np.ceil(np.nanmax(flatData)) + 1.0 

969 

970 if np.isnan(minVal) or np.isnan(maxVal): 

971 # if either the min or max are nan, then the data is all nan as we're 

972 # using nanmin and nanmax. Given this, we can't calculate a cdf, so 

973 # return nans for all values 

974 return np.nan, np.nan, np.nan 

975 

976 nBins = np.clip(int(maxVal) - int(minVal), 1, nBinsMax) 

977 

978 hist, binEdges = np.histogram( 

979 flatData, bins=nBins, range=(int(minVal), int(maxVal)) 

980 ) 

981 

982 cdf = (scale*np.cumsum(hist)/size).astype(np.int64) 

983 return cdf, minVal, maxVal 

984 

985 

986def getQuantiles(data, nColors): 

987 """Get a set of boundaries that equally distribute data into 

988 nColors intervals. The output can be used to make a colormap of nColors 

989 colors. 

990 

991 This is equivalent to using the numpy function: 

992 np.nanquantile(data, np.linspace(0, 1, nColors + 1)) 

993 but with a coarser precision, yet sufficient for our use case. This 

994 implementation gives a significant speed-up. In the case of large 

995 ranges, np.nanquantile is used because it is more memory efficient. 

996 

997 If all elements of ``data`` are nan then the output ``boundaries`` will 

998 also all be ``nan`` to keep the interface consistent. 

999 

1000 Parameters 

1001 ---------- 

1002 data : `np.array` 

1003 The input image data. 

1004 nColors : `int` 

1005 The number of intervals to distribute data into. 

1006 

1007 Returns 

1008 ------- 

1009 boundaries: `list` of `float` 

1010 A monotonically increasing sequence of size (nColors + 1). These are 

1011 the edges of nColors intervals. 

1012 """ 

1013 if (np.nanmax(data) - np.nanmin(data)) > 300_000: 

1014 # Use slower but memory efficient nanquantile 

1015 logger = logging.getLogger(__name__) 

1016 logger.warning("Data range is very large; using slower quantile code.") 

1017 boundaries = np.nanquantile(data, np.linspace(0, 1, nColors + 1)) 

1018 else: 

1019 cdf, minVal, maxVal = getCdf(data, nColors) 

1020 if np.isnan(minVal): # cdf calculation has failed because all data is nan 

1021 return np.asarray([np.nan for _ in range(nColors)]) 

1022 

1023 scale = (maxVal - minVal)/len(cdf) 

1024 

1025 boundaries = np.asarray( 

1026 [np.argmax(cdf >= i)*scale + minVal for i in range(nColors)] + [maxVal] 

1027 ) 

1028 

1029 return boundaries 

1030 

1031 

1032def digitizeData(data, nColors=256): 

1033 """ 

1034 Scale data into nColors using its cumulative distribution function. 

1035 

1036 Parameters 

1037 ---------- 

1038 data : `np.array` 

1039 The input image data. 

1040 nColors : `int` 

1041 The number of intervals to distribute data into. 

1042 

1043 Returns 

1044 ------- 

1045 data: `np.array` of `int` 

1046 Scaled data in the [0, nColors - 1] range. 

1047 """ 

1048 cdf, minVal, maxVal = getCdf(data, nColors - 1) 

1049 scale = (maxVal - minVal)/len(cdf) 

1050 bins = np.floor((data*scale - minVal)).astype(np.int64) 

1051 return cdf[bins]