Coverage for python/lsst/summit/utils/utils.py: 19%

340 statements  

« prev     ^ index     » next       coverage.py v7.3.0, created at 2023-08-29 10:24 +0000

1# This file is part of summit_utils. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import os 

23from typing import Iterable 

24import numpy as np 

25import logging 

26from scipy.ndimage import gaussian_filter 

27import lsst.afw.image as afwImage 

28import lsst.afw.detection as afwDetect 

29from lsst.afw.detection import Footprint, FootprintSet 

30import lsst.afw.math as afwMath 

31import lsst.daf.base as dafBase 

32import lsst.geom as geom 

33import lsst.pipe.base as pipeBase 

34import lsst.utils.packages as packageUtils 

35from lsst.daf.butler.cli.cliLog import CliLog 

36import datetime 

37from dateutil.tz import gettz 

38 

39from lsst.obs.lsst.translators.lsst import FILTER_DELIMITER 

40from lsst.obs.lsst.translators.latiss import AUXTEL_LOCATION 

41 

42from astro_metadata_translator import ObservationInfo 

43from astropy.coordinates import SkyCoord, AltAz 

44from astropy.coordinates.earth import EarthLocation 

45import astropy.units as u 

46from astropy.time import Time 

47 

48from .astrometry.utils import genericCameraHeaderToWcs 

49 

50__all__ = ["SIGMATOFWHM", 

51 "FWHMTOSIGMA", 

52 "EFD_CLIENT_MISSING_MSG", 

53 "GOOGLE_CLOUD_MISSING_MSG", 

54 "AUXTEL_LOCATION", 

55 "countPixels", 

56 "quickSmooth", 

57 "argMax2d", 

58 "getImageStats", 

59 "detectObjectsInExp", 

60 "humanNameForCelestialObject", 

61 "getFocusFromHeader", 

62 "dayObsIntToString", 

63 "dayObsSeqNumToVisitId", 

64 "setupLogging", 

65 "getCurrentDayObs_datetime", 

66 "getCurrentDayObs_int", 

67 "getCurrentDayObs_humanStr", 

68 "getSite", 

69 "getExpPositionOffset", 

70 "starTrackerFileToExposure", 

71 "getAirmassSeeingCorrection", 

72 "getFilterSeeingCorrection", 

73 "getCdf", 

74 "getQuantiles", 

75 "digitizeData", 

76 ] 

77 

78 

79SIGMATOFWHM = 2.0*np.sqrt(2.0*np.log(2.0)) 

80FWHMTOSIGMA = 1/SIGMATOFWHM 

81 

82EFD_CLIENT_MISSING_MSG = ('ImportError: lsst_efd_client not found. Please install with:\n' 

83 ' pip install lsst-efd-client') 

84 

85GOOGLE_CLOUD_MISSING_MSG = ('ImportError: Google cloud storage not found. Please install with:\n' 

86 ' pip install google-cloud-storage') 

87 

88 

89def countPixels(maskedImage, maskPlane): 

90 """Count the number of pixels in an image with a given mask bit set. 

91 

92 Parameters 

93 ---------- 

94 maskedImage : `lsst.afw.image.MaskedImage` 

95 The masked image, 

96 maskPlane : `str` 

97 The name of the bitmask. 

98 

99 Returns 

100 ------- 

101 count : `int`` 

102 The number of pixels in with the selected mask bit 

103 """ 

104 bit = maskedImage.mask.getPlaneBitMask(maskPlane) 

105 return len(np.where(np.bitwise_and(maskedImage.mask.array, bit))[0]) 

106 

107 

108def quickSmooth(data, sigma=2): 

109 """Perform a quick smoothing of the image. 

110 

111 Not to be used for scientific purposes, but improves the stretch and 

112 visual rendering of low SNR against the sky background in cutouts. 

113 

114 Parameters 

115 ---------- 

116 data : `np.array` 

117 The image data to smooth 

118 sigma : `float`, optional 

119 The size of the smoothing kernel. 

120 

121 Returns 

122 ------- 

123 smoothData : `np.array` 

124 The smoothed data 

125 """ 

126 kernel = [sigma, sigma] 

127 smoothData = gaussian_filter(data, kernel, mode='constant') 

128 return smoothData 

129 

130 

131def argMax2d(array): 

132 """Get the index of the max value of an array and whether it's unique. 

133 

134 If its not unique, returns a list of the other locations containing the 

135 maximum value, e.g. returns 

136 

137 (12, 34), False, [(56,78), (910, 1112)] 

138 

139 Parameters 

140 ---------- 

141 array : `np.array` 

142 The data 

143 

144 Returns 

145 ------- 

146 maxLocation : `tuple` 

147 The coords of the first instance of the max value 

148 unique : `bool` 

149 Whether it's the only location 

150 otherLocations : `list` of `tuple` 

151 List of the other max values' locations, empty if False 

152 """ 

153 uniqueMaximum = False 

154 maxCoords = np.where(array == np.max(array)) 

155 maxCoords = [coord for coord in zip(*maxCoords)] # list of coords as tuples 

156 if len(maxCoords) == 1: # single unambiguous value 

157 uniqueMaximum = True 

158 

159 return maxCoords[0], uniqueMaximum, maxCoords[1:] 

160 

161 

162def dayObsIntToString(dayObs): 

163 """Convert an integer dayObs to a dash-delimited string. 

164 

165 e.g. convert the hard to read 20210101 to 2021-01-01 

166 

167 Parameters 

168 ---------- 

169 dayObs : `int` 

170 The dayObs. 

171 

172 Returns 

173 ------- 

174 dayObs : `str` 

175 The dayObs as a string. 

176 """ 

177 assert isinstance(dayObs, int) 

178 dStr = str(dayObs) 

179 assert len(dStr) == 8 

180 return '-'.join([dStr[0:4], dStr[4:6], dStr[6:8]]) 

181 

182 

183def dayObsSeqNumToVisitId(dayObs, seqNum): 

184 """Get the visit id for a given dayObs/seqNum. 

185 

186 Parameters 

187 ---------- 

188 dayObs : `int` 

189 The dayObs. 

190 seqNum : `int` 

191 The seqNum. 

192 

193 Returns 

194 ------- 

195 visitId : `int` 

196 The visitId. 

197 

198 Notes 

199 ----- 

200 TODO: Remove this horrible hack once DM-30948 makes this possible 

201 programatically/via the butler. 

202 """ 

203 if dayObs < 19700101 or dayObs > 35000101: 

204 raise ValueError(f'dayObs value {dayObs} outside plausible range') 

205 return int(f"{dayObs}{seqNum:05}") 

206 

207 

208def getImageStats(exp): 

209 """Calculate a grab-bag of stats for an image. Must remain fast. 

210 

211 Parameters 

212 ---------- 

213 exp : `lsst.afw.image.Exposure` 

214 The input exposure. 

215 

216 Returns 

217 ------- 

218 stats : `lsst.pipe.base.Struct` 

219 A container with attributes containing measurements and statistics 

220 for the image. 

221 """ 

222 result = pipeBase.Struct() 

223 

224 vi = exp.visitInfo 

225 expTime = vi.exposureTime 

226 md = exp.getMetadata() 

227 

228 obj = vi.object 

229 mjd = vi.getDate().get() 

230 result.object = obj 

231 result.mjd = mjd 

232 

233 fullFilterString = exp.filter.physicalLabel 

234 filt = fullFilterString.split(FILTER_DELIMITER)[0] 

235 grating = fullFilterString.split(FILTER_DELIMITER)[1] 

236 

237 airmass = vi.getBoresightAirmass() 

238 rotangle = vi.getBoresightRotAngle().asDegrees() 

239 

240 azAlt = vi.getBoresightAzAlt() 

241 az = azAlt[0].asDegrees() 

242 el = azAlt[1].asDegrees() 

243 

244 result.expTime = expTime 

245 result.filter = filt 

246 result.grating = grating 

247 result.airmass = airmass 

248 result.rotangle = rotangle 

249 result.az = az 

250 result.el = el 

251 result.focus = md.get('FOCUSZ') 

252 

253 data = exp.image.array 

254 result.maxValue = np.max(data) 

255 

256 peak, uniquePeak, otherPeaks = argMax2d(data) 

257 result.maxPixelLocation = peak 

258 result.multipleMaxPixels = uniquePeak 

259 

260 result.nBadPixels = countPixels(exp.maskedImage, 'BAD') 

261 result.nSatPixels = countPixels(exp.maskedImage, 'SAT') 

262 result.percentile99 = np.percentile(data, 99) 

263 result.percentile9999 = np.percentile(data, 99.99) 

264 

265 sctrl = afwMath.StatisticsControl() 

266 sctrl.setNumSigmaClip(5) 

267 sctrl.setNumIter(2) 

268 statTypes = afwMath.MEANCLIP | afwMath.STDEVCLIP 

269 stats = afwMath.makeStatistics(exp.maskedImage, statTypes, sctrl) 

270 std, stderr = stats.getResult(afwMath.STDEVCLIP) 

271 mean, meanerr = stats.getResult(afwMath.MEANCLIP) 

272 

273 result.clippedMean = mean 

274 result.clippedStddev = std 

275 

276 return result 

277 

278 

279def detectObjectsInExp(exp, nSigma=10, nPixMin=10, grow=0): 

280 """Quick and dirty object detection for an exposure. 

281 

282 Return the footPrintSet for the objects in a preferably-postISR exposure. 

283 

284 Parameters 

285 ---------- 

286 exp : `lsst.afw.image.Exposure` 

287 The exposure to detect objects in. 

288 nSigma : `float` 

289 The number of sigma for detection. 

290 nPixMin : `int` 

291 The minimum number of pixels in an object for detection. 

292 grow : `int` 

293 The number of pixels to grow the footprint by after detection. 

294 

295 Returns 

296 ------- 

297 footPrintSet : `lsst.afw.detection.FootprintSet` 

298 The set of footprints in the image. 

299 """ 

300 median = np.nanmedian(exp.image.array) 

301 exp.image -= median 

302 

303 threshold = afwDetect.Threshold(nSigma, afwDetect.Threshold.STDEV) 

304 footPrintSet = afwDetect.FootprintSet(exp.getMaskedImage(), threshold, "DETECTED", nPixMin) 

305 if grow > 0: 

306 isotropic = True 

307 footPrintSet = afwDetect.FootprintSet(footPrintSet, grow, isotropic) 

308 

309 exp.image += median # add back in to leave background unchanged 

310 return footPrintSet 

311 

312 

313def fluxesFromFootprints(footprints, parentImage, subtractImageMedian=False): 

314 """Calculate the flux from a set of footprints, given the parent image, 

315 optionally subtracting the whole-image median from each pixel as a very 

316 rough background subtraction. 

317 

318 Parameters 

319 ---------- 

320 footprints : `lsst.afw.detection.FootprintSet` or 

321 `lsst.afw.detection.Footprint` or 

322 `iterable` of `lsst.afw.detection.Footprint` 

323 The footprints to measure. 

324 parentImage : `lsst.afw.image.Image` 

325 The parent image. 

326 subtractImageMedian : `bool`, optional 

327 Subtract a whole-image median from each pixel in the footprint when 

328 summing as a very crude background subtraction. Does not change the 

329 original image. 

330 

331 Returns 

332 ------- 

333 fluxes : `list` of `float` 

334 The fluxes for each footprint. 

335 

336 Raises 

337 ------ 

338 TypeError : raise for unsupported types. 

339 """ 

340 median = 0 

341 if subtractImageMedian: 

342 median = np.nanmedian(parentImage.array) 

343 

344 # poor person's single dispatch 

345 badTypeMsg = ("This function works with FootprintSets, single Footprints, and iterables of Footprints. " 

346 f"Got {type(footprints)}: {footprints}") 

347 if isinstance(footprints, FootprintSet): 

348 footprints = footprints.getFootprints() 

349 elif isinstance(footprints, Iterable): 

350 if not isinstance(footprints[0], Footprint): 

351 raise TypeError(badTypeMsg) 

352 elif isinstance(footprints, Footprint): 

353 footprints = [footprints] 

354 else: 

355 raise TypeError(badTypeMsg) 

356 

357 return np.array([fluxFromFootprint(fp, parentImage, backgroundValue=median) for fp in footprints]) 

358 

359 

360def fluxFromFootprint(footprint, parentImage, backgroundValue=0): 

361 """Calculate the flux from a footprint, given the parent image, optionally 

362 subtracting a single value from each pixel as a very rough background 

363 subtraction, e.g. the image median. 

364 

365 Parameters 

366 ---------- 

367 footprint : `lsst.afw.detection.Footprint` 

368 The footprint to measure. 

369 parentImage : `lsst.afw.image.Image` 

370 Image containing the footprint. 

371 backgroundValue : `bool`, optional 

372 The value to subtract from each pixel in the footprint when summing 

373 as a very crude background subtraction. Does not change the original 

374 image. 

375 

376 Returns 

377 ------- 

378 flux : `float` 

379 The flux in the footprint 

380 """ 

381 if backgroundValue: # only do the subtraction if non-zero for speed 

382 xy0 = parentImage.getBBox().getMin() 

383 return footprint.computeFluxFromArray(parentImage.array - backgroundValue, xy0) 

384 return footprint.computeFluxFromImage(parentImage) 

385 

386 

387def humanNameForCelestialObject(objName): 

388 """Returns a list of all human names for obj, or [] if none are found. 

389 

390 Parameters 

391 ---------- 

392 objName : `str` 

393 The/a name of the object. 

394 

395 Returns 

396 ------- 

397 names : `list` of `str` 

398 The names found for the object 

399 """ 

400 from astroquery.simbad import Simbad 

401 results = [] 

402 try: 

403 simbadResult = Simbad.query_objectids(objName) 

404 for row in simbadResult: 

405 if row['ID'].startswith('NAME'): 

406 results.append(row['ID'].replace('NAME ', '')) 

407 return results 

408 except Exception: 

409 return [] # same behavior as for found but un-named objects 

410 

411 

412def _getAltAzZenithsFromSeqNum(butler, dayObs, seqNumList): 

413 """Get the alt, az and zenith angle for the seqNums of a given dayObs. 

414 

415 Parameters 

416 ---------- 

417 butler : `lsst.daf.butler.Butler` 

418 The butler to query. 

419 dayObs : `int` 

420 The dayObs. 

421 seqNumList : `list` of `int` 

422 The seqNums for which to return the alt, az and zenith 

423 

424 Returns 

425 ------- 

426 azimuths : `list` of `float` 

427 List of the azimuths for each seqNum 

428 elevations : `list` of `float` 

429 List of the elevations for each seqNum 

430 zeniths : `list` of `float` 

431 List of the zenith angles for each seqNum 

432 """ 

433 azimuths, elevations, zeniths = [], [], [] 

434 for seqNum in seqNumList: 

435 md = butler.get('raw.metadata', day_obs=dayObs, seq_num=seqNum, detector=0) 

436 obsInfo = ObservationInfo(md) 

437 alt = obsInfo.altaz_begin.alt.value 

438 az = obsInfo.altaz_begin.az.value 

439 elevations.append(alt) 

440 zeniths.append(90-alt) 

441 azimuths.append(az) 

442 return azimuths, elevations, zeniths 

443 

444 

445def getFocusFromHeader(exp): 

446 """Get the raw focus value from the header. 

447 

448 Parameters 

449 ---------- 

450 exp : `lsst.afw.image.exposure` 

451 The exposure. 

452 

453 Returns 

454 ------- 

455 focus : `float` or `None` 

456 The focus value if found, else ``None``. 

457 """ 

458 md = exp.getMetadata() 

459 if 'FOCUSZ' in md: 

460 return md['FOCUSZ'] 

461 return None 

462 

463 

464def checkStackSetup(): 

465 """Check which weekly tag is being used and which local packages are setup. 

466 

467 Designed primarily for use in notbooks/observing, this prints the weekly 

468 tag(s) are setup for lsst_distrib, and lists any locally setup packages and 

469 the path to each. 

470 

471 Notes 

472 ----- 

473 Uses print() instead of logger messages as this should simply print them 

474 without being vulnerable to any log messages potentially being diverted. 

475 """ 

476 packages = packageUtils.getEnvironmentPackages(include_all=True) 

477 

478 lsstDistribHashAndTags = packages['lsst_distrib'] # looks something like 'g4eae7cb9+1418867f (w_2022_13)' 

479 lsstDistribTags = lsstDistribHashAndTags.split()[1] 

480 if len(lsstDistribTags.split()) == 1: 

481 tag = lsstDistribTags.replace('(', '') 

482 tag = tag.replace(')', '') 

483 print(f"You are running {tag} of lsst_distrib") 

484 else: # multiple weekly tags found for lsst_distrib! 

485 print(f'The version of lsst_distrib you have is compatible with: {lsstDistribTags}') 

486 

487 localPackages = [] 

488 localPaths = [] 

489 for package, tags in packages.items(): 

490 if tags.startswith('LOCAL:'): 

491 path = tags.split('LOCAL:')[1] 

492 path = path.split('@')[0] # don't need the git SHA etc 

493 localPaths.append(path) 

494 localPackages.append(package) 

495 

496 if localPackages: 

497 print("\nLocally setup packages:") 

498 print("-----------------------") 

499 maxLen = max(len(package) for package in localPackages) 

500 for package, path in zip(localPackages, localPaths): 

501 print(f"{package:<{maxLen}s} at {path}") 

502 else: 

503 print("\nNo locally setup packages (using a vanilla stack)") 

504 

505 

506def setupLogging(longlog=False): 

507 """Setup logging in the same way as one would get from pipetask run. 

508 

509 Code that isn't run through the butler CLI defaults to WARNING level 

510 messages and no logger names. This sets the behaviour to follow whatever 

511 the pipeline default is, currently 

512 <logger_name> <level>: <message> e.g. 

513 lsst.isr INFO: Masking defects. 

514 """ 

515 CliLog.initLog(longlog=longlog) 

516 

517 

518def getCurrentDayObs_datetime(): 

519 """Get the current day_obs - the observatory rolls the date over at UTC-12 

520 

521 Returned as datetime.date(2022, 4, 28) 

522 """ 

523 utc = gettz("UTC") 

524 nowUtc = datetime.datetime.now().astimezone(utc) 

525 offset = datetime.timedelta(hours=-12) 

526 dayObs = (nowUtc + offset).date() 

527 return dayObs 

528 

529 

530def getCurrentDayObs_int(): 

531 """Return the current dayObs as an int in the form 20220428 

532 """ 

533 return int(getCurrentDayObs_datetime().strftime("%Y%m%d")) 

534 

535 

536def getCurrentDayObs_humanStr(): 

537 """Return the current dayObs as a string in the form '2022-04-28' 

538 """ 

539 return dayObsIntToString(getCurrentDayObs_int()) 

540 

541 

542def getExpRecordAge(expRecord): 

543 """Get the time, in seconds, since the end of exposure. 

544 

545 Parameters 

546 ---------- 

547 expRecord : `lsst.daf.butler.DimensionRecord` 

548 The exposure record. 

549 

550 Returns 

551 ------- 

552 age : `float` 

553 The age of the exposure, in seconds. 

554 """ 

555 return -1 * (expRecord.timespan.end - Time.now()).sec 

556 

557 

558def getSite(): 

559 """Returns where the code is running. 

560 

561 Returns 

562 ------- 

563 location : `str` 

564 One of: 

565 ['tucson', 'summit', 'base', 'staff-rsp', 'rubin-devl', 'jenkins', 

566 'usdf-k8s'] 

567 

568 Raises 

569 ------ 

570 ValueError 

571 Raised if location cannot be determined. 

572 """ 

573 # All nublado instances guarantee that EXTERNAL_URL is set and uniquely 

574 # identifies it. 

575 location = os.getenv('EXTERNAL_INSTANCE_URL', "") 

576 if location == "https://tucson-teststand.lsst.codes": 576 ↛ 577line 576 didn't jump to line 577, because the condition on line 576 was never true

577 return 'tucson' 

578 elif location == "https://summit-lsp.lsst.codes": 578 ↛ 579line 578 didn't jump to line 579, because the condition on line 578 was never true

579 return 'summit' 

580 elif location == "https://base-lsp.lsst.codes": 580 ↛ 581line 580 didn't jump to line 581, because the condition on line 580 was never true

581 return 'base' 

582 elif location == "https://usdf-rsp.slac.stanford.edu": 582 ↛ 583line 582 didn't jump to line 583, because the condition on line 582 was never true

583 return 'staff-rsp' 

584 

585 # if no EXTERNAL_URL, try HOSTNAME to see if we're on the dev nodes 

586 # it is expected that this will be extensible to SLAC 

587 hostname = os.getenv('HOSTNAME', "") 

588 if hostname.startswith('sdfrome'): 588 ↛ 589line 588 didn't jump to line 589, because the condition on line 588 was never true

589 return 'rubin-devl' 

590 

591 jenkinsHome = os.getenv('JENKINS_HOME', "") 

592 if jenkinsHome != "": 592 ↛ 593line 592 didn't jump to line 593, because the condition on line 592 was never true

593 return 'jenkins' 

594 

595 # we're probably inside a k8s pod doing rapid analysis work at this point 

596 location = os.getenv('RAPID_ANALYSIS_LOCATION', "") 

597 if location == "TTS": 597 ↛ 598line 597 didn't jump to line 598, because the condition on line 597 was never true

598 return 'tucson' 

599 if location == "SUMMIT": 599 ↛ 600line 599 didn't jump to line 600, because the condition on line 599 was never true

600 return 'summit' 

601 if location == "USDF": 601 ↛ 602line 601 didn't jump to line 602, because the condition on line 601 was never true

602 return 'usdf-k8s' 

603 

604 # we have failed 

605 raise ValueError('Location could not be determined') 

606 

607 

608def getAltAzFromSkyPosition(skyPos, visitInfo, doCorrectRefraction=False, 

609 wavelength=500.0, 

610 pressureOverride=None, 

611 temperatureOverride=None, 

612 relativeHumidityOverride=None, 

613 ): 

614 """Get the alt/az from the position on the sky and the time and location 

615 of the observation. 

616 

617 The temperature, pressure and relative humidity are taken from the 

618 visitInfo by default, but can be individually overridden as needed. It 

619 should be noted that the visitInfo never contains a nominal wavelength, and 

620 so this takes a default value of 500nm. 

621 

622 Parameters 

623 ---------- 

624 skyPos : `lsst.geom.SpherePoint` 

625 The position on the sky. 

626 visitInfo : `lsst.afw.image.VisitInfo` 

627 The visit info containing the time of the observation. 

628 doCorrectRefraction : `bool`, optional 

629 Correct for the atmospheric refraction? 

630 wavelength : `float`, optional 

631 The nominal wavelength in nanometers (e.g. 500.0), as a float. 

632 pressureOverride : `float`, optional 

633 The pressure, in bars (e.g. 0.770), to override the value supplied in 

634 the visitInfo, as a float. 

635 temperatureOverride : `float`, optional 

636 The temperature, in Celsius (e.g. 10.0), to override the value supplied 

637 in the visitInfo, as a float. 

638 relativeHumidityOverride : `float`, optional 

639 The relativeHumidity in the range 0..1 (i.e. not as a percentage), to 

640 override the value supplied in the visitInfo, as a float. 

641 

642 Returns 

643 ------- 

644 alt : `lsst.geom.Angle` 

645 The altitude. 

646 az : `lsst.geom.Angle` 

647 The azimuth. 

648 """ 

649 skyLocation = SkyCoord(skyPos.getRa().asRadians(), skyPos.getDec().asRadians(), unit=u.rad) 

650 long = visitInfo.observatory.getLongitude() 

651 lat = visitInfo.observatory.getLatitude() 

652 ele = visitInfo.observatory.getElevation() 

653 earthLocation = EarthLocation.from_geodetic(long.asDegrees(), lat.asDegrees(), ele) 

654 

655 refractionKwargs = {} 

656 if doCorrectRefraction: 

657 # wavelength is never supplied in the visitInfo so always take this 

658 wavelength = wavelength * u.nm 

659 

660 if pressureOverride: 

661 pressure = pressureOverride 

662 else: 

663 pressure = visitInfo.weather.getAirPressure() 

664 # ObservationInfos (which are the "source of truth" use pascals) so 

665 # convert from pascals to bars 

666 pressure /= 100000.0 

667 pressure = pressure*u.bar 

668 

669 if temperatureOverride: 

670 temperature = temperatureOverride 

671 else: 

672 temperature = visitInfo.weather.getAirTemperature() 

673 temperature = temperature*u.deg_C 

674 

675 if relativeHumidityOverride: 

676 relativeHumidity = relativeHumidityOverride 

677 else: 

678 relativeHumidity = visitInfo.weather.getHumidity() / 100.0 # this is in percent 

679 relativeHumidity = relativeHumidity*u.deg_C 

680 

681 refractionKwargs = dict(pressure=pressure, 

682 temperature=temperature, 

683 relative_humidity=relativeHumidity, 

684 obswl=wavelength) 

685 

686 # must go via astropy.Time because dafBase.dateTime.DateTime contains 

687 # the timezone, but going straight to visitInfo.date.toPython() loses this. 

688 obsTime = Time(visitInfo.date.toPython(), scale='tai') 

689 altAz = AltAz(obstime=obsTime, 

690 location=earthLocation, 

691 **refractionKwargs) 

692 

693 obsAltAz = skyLocation.transform_to(altAz) 

694 alt = geom.Angle(obsAltAz.alt.degree, geom.degrees) 

695 az = geom.Angle(obsAltAz.az.degree, geom.degrees) 

696 

697 return alt, az 

698 

699 

700def getExpPositionOffset(exp1, exp2, useWcs=True, allowDifferentPlateScales=False): 

701 """Get the change in sky position between two exposures. 

702 

703 Given two exposures, calculate the offset on the sky between the images. 

704 If useWcs then use the (fitted or unfitted) skyOrigin from their WCSs, and 

705 calculate the alt/az from the observation times, otherwise use the nominal 

706 values in the exposures' visitInfos. Note that if using the visitInfo 

707 values that for a given pointing the ra/dec will be ~identical, regardless 

708 of whether astrometric fitting has been performed. 

709 

710 Values are given as exp1-exp2. 

711 

712 Parameters 

713 ---------- 

714 exp1 : `lsst.afw.image.Exposure` 

715 The first exposure. 

716 exp2 : `lsst.afw.image.Exposure` 

717 The second exposure. 

718 useWcs : `bool` 

719 Use the WCS for the ra/dec and alt/az if True, else use the nominal/ 

720 boresight values from the exposures' visitInfos. 

721 allowDifferentPlateScales : `bool`, optional 

722 Use to disable checking that plate scales are the same. Generally, 

723 differing plate scales would indicate an error, but where blind-solving 

724 has been undertaken during commissioning plate scales can be different 

725 enough to warrant setting this to ``True``. 

726 

727 Returns 

728 ------- 

729 offsets : `lsst.pipe.base.Struct` 

730 A struct containing the offsets: 

731 ``deltaRa`` 

732 The diference in ra (`lsst.geom.Angle`) 

733 ``deltaDec`` 

734 The diference in dec (`lsst.geom.Angle`) 

735 ``deltaAlt`` 

736 The diference in alt (`lsst.geom.Angle`) 

737 ``deltaAz`` 

738 The diference in az (`lsst.geom.Angle`) 

739 ``deltaPixels`` 

740 The diference in pixels (`float`) 

741 """ 

742 

743 wcs1 = exp1.getWcs() 

744 wcs2 = exp2.getWcs() 

745 pixScaleArcSec = wcs1.getPixelScale().asArcseconds() 

746 if not allowDifferentPlateScales: 

747 assert np.isclose(pixScaleArcSec, wcs2.getPixelScale().asArcseconds()), \ 

748 "Pixel scales in the exposures differ." 

749 

750 if useWcs: 

751 p1 = wcs1.getSkyOrigin() 

752 p2 = wcs2.getSkyOrigin() 

753 alt1, az1 = getAltAzFromSkyPosition(p1, exp1.getInfo().getVisitInfo()) 

754 alt2, az2 = getAltAzFromSkyPosition(p2, exp2.getInfo().getVisitInfo()) 

755 ra1 = p1[0] 

756 ra2 = p2[0] 

757 dec1 = p1[1] 

758 dec2 = p2[1] 

759 else: 

760 az1 = exp1.visitInfo.boresightAzAlt[0] 

761 az2 = exp2.visitInfo.boresightAzAlt[0] 

762 alt1 = exp1.visitInfo.boresightAzAlt[1] 

763 alt2 = exp2.visitInfo.boresightAzAlt[1] 

764 

765 ra1 = exp1.visitInfo.boresightRaDec[0] 

766 ra2 = exp2.visitInfo.boresightRaDec[0] 

767 dec1 = exp1.visitInfo.boresightRaDec[1] 

768 dec2 = exp2.visitInfo.boresightRaDec[1] 

769 

770 p1 = exp1.visitInfo.boresightRaDec 

771 p2 = exp2.visitInfo.boresightRaDec 

772 

773 angular_offset = p1.separation(p2).asArcseconds() 

774 deltaPixels = angular_offset / pixScaleArcSec 

775 

776 ret = pipeBase.Struct(deltaRa=(ra1-ra2).wrapNear(geom.Angle(0.0)), 

777 deltaDec=dec1-dec2, 

778 deltaAlt=alt1-alt2, 

779 deltaAz=(az1-az2).wrapNear(geom.Angle(0.0)), 

780 deltaPixels=deltaPixels 

781 ) 

782 

783 return ret 

784 

785 

786def starTrackerFileToExposure(filename, logger=None): 

787 """Read the exposure from the file and set the wcs from the header. 

788 

789 Parameters 

790 ---------- 

791 filename : `str` 

792 The full path to the file. 

793 logger : `logging.Logger`, optional 

794 The logger to use for errors, created if not supplied. 

795 

796 Returns 

797 ------- 

798 exp : `lsst.afw.image.Exposure` 

799 The exposure. 

800 """ 

801 if not logger: 

802 logger = logging.getLogger(__name__) 

803 exp = afwImage.ExposureF(filename) 

804 try: 

805 wcs = genericCameraHeaderToWcs(exp) 

806 exp.setWcs(wcs) 

807 except Exception as e: 

808 logger.warning(f"Failed to set wcs from header: {e}") 

809 

810 # for some reason the date isn't being set correctly 

811 # DATE-OBS is present in the original header, but it's being 

812 # stripped out and somehow not set (plus it doesn't give the midpoint 

813 # of the exposure), so set it manually from the midpoint here 

814 try: 

815 md = exp.getMetadata() 

816 begin = datetime.datetime.fromisoformat(md['DATE-BEG']) 

817 end = datetime.datetime.fromisoformat(md['DATE-END']) 

818 duration = end - begin 

819 mid = begin + duration/2 

820 newTime = dafBase.DateTime(mid.isoformat(), dafBase.DateTime.Timescale.TAI) 

821 newVi = exp.visitInfo.copyWith(date=newTime) 

822 exp.info.setVisitInfo(newVi) 

823 except Exception as e: 

824 logger.warning(f"Failed to set date from header: {e}") 

825 

826 return exp 

827 

828 

829def obsInfoToDict(obsInfo): 

830 """Convert an ObservationInfo to a dict. 

831 

832 Parameters 

833 ---------- 

834 obsInfo : `astro_metadata_translator.ObservationInfo` 

835 The ObservationInfo to convert. 

836 

837 Returns 

838 ------- 

839 obsInfoDict : `dict` 

840 The ObservationInfo as a dict. 

841 """ 

842 return {prop: getattr(obsInfo, prop) for prop in obsInfo.all_properties.keys()} 

843 

844 

845def getFieldNameAndTileNumber(field, warn=True, logger=None): 

846 """Get the tile name and number of an observed field. 

847 

848 It is assumed to always be appended, with an underscore, to the rest of the 

849 field name. Returns the name and number as a tuple, or the name unchanged 

850 if no tile number is found. 

851 

852 Parameters 

853 ---------- 

854 field : `str` 

855 The name of the field 

856 

857 Returns 

858 ------- 

859 fieldName : `str` 

860 The name of the field without the trailing tile number, if present. 

861 tileNum : `int` 

862 The number of the tile, as an integer, or ``None`` if not found. 

863 """ 

864 if warn and not logger: 

865 logger = logging.getLogger('lsst.summit.utils.utils.getFieldNameAndTileNumber') 

866 

867 if '_' not in field: 

868 if warn: 

869 logger.warning(f"Field {field} does not contain an underscore," 

870 " so cannot determine the tile number.") 

871 return field, None 

872 

873 try: 

874 fieldParts = field.split("_") 

875 fieldNum = int(fieldParts[-1]) 

876 except ValueError: 

877 if warn: 

878 logger.warning(f"Field {field} does not contain only an integer after the final underscore" 

879 " so cannot determine the tile number.") 

880 return field, None 

881 

882 return "_".join(fieldParts[:-1]), fieldNum 

883 

884 

885def getAirmassSeeingCorrection(airmass): 

886 """Get the correction factor for seeing due to airmass. 

887 

888 Parameters 

889 ---------- 

890 airmass : `float` 

891 The airmass, greater than or equal to 1. 

892 

893 Returns 

894 ------- 

895 correctionFactor : `float` 

896 The correction factor to apply to the seeing. 

897 

898 Raises 

899 ------ 

900 ValueError raised for unphysical airmasses. 

901 """ 

902 if airmass < 1: 

903 raise ValueError(f"Invalid airmass: {airmass}") 

904 return airmass**(-0.6) 

905 

906 

907def getFilterSeeingCorrection(filterName): 

908 """Get the correction factor for seeing due to a filter. 

909 

910 Parameters 

911 ---------- 

912 filterName : `str` 

913 The name of the filter, e.g. 'SDSSg_65mm'. 

914 

915 Returns 

916 ------- 

917 correctionFactor : `float` 

918 The correction factor to apply to the seeing. 

919 

920 Raises 

921 ------ 

922 ValueError raised for unknown filters. 

923 """ 

924 match filterName: 

925 case 'SDSSg_65mm': 

926 return (477./500.)**0.2 

927 case 'SDSSr_65mm': 

928 return (623./500.)**0.2 

929 case 'SDSSi_65mm': 

930 return (762./500.)**0.2 

931 case _: 

932 raise ValueError(f"Unknown filter name: {filterName}") 

933 

934 

935def getCdf(data, scale): 

936 """Return an approximate cumulative distribution function scaled to 

937 the [0, scale] range. 

938 

939 Parameters 

940 ---------- 

941 data : `np.array` 

942 The input data. 

943 scale : `int` 

944 The scaling range of the output. 

945 

946 Returns 

947 ------- 

948 cdf : `np.array` of `int` 

949 A monotonically increasing sequence that represents a scaled 

950 cumulative distribution function, starting with the value at 

951 minVal, then at (minVal + 1), and so on. 

952 minVal : `float` 

953 An integer smaller than the minimum value in the input data. 

954 maxVal : `float` 

955 An integer larger than the maximum value in the input data. 

956 """ 

957 flatData = data.ravel() 

958 size = flatData.size - np.count_nonzero(np.isnan(flatData)) 

959 

960 minVal = np.floor(np.nanmin(flatData)) 

961 maxVal = np.ceil(np.nanmax(flatData)) + 1.0 

962 

963 hist, binEdges = np.histogram( 

964 flatData, bins=int(maxVal - minVal), range=(minVal, maxVal) 

965 ) 

966 

967 cdf = (scale*np.cumsum(hist)/size).astype(np.int64) 

968 return cdf, minVal, maxVal 

969 

970 

971def getQuantiles(data, nColors): 

972 """Get a set of boundaries that equally distribute data into 

973 nColors intervals. The output can be used to make a colormap 

974 of nColors colors. 

975 

976 This is equivalent to using the numpy function: 

977 np.quantile(data, np.linspace(0, 1, nColors + 1)) 

978 but with a coarser precision, yet sufficient for our use case. 

979 This implementation gives a speed-up. 

980 

981 Parameters 

982 ---------- 

983 data : `np.array` 

984 The input image data. 

985 nColors : `int` 

986 The number of intervals to distribute data into. 

987 

988 Returns 

989 ------- 

990 boundaries: `list` of `float` 

991 A monotonically increasing sequence of size (nColors + 1). 

992 These are the edges of nColors intervals. 

993 """ 

994 cdf, minVal, maxVal = getCdf(data, nColors) 

995 boundaries = np.asarray( 

996 [np.argmax(cdf >= i) + minVal for i in range(nColors)] + [maxVal] 

997 ) 

998 return boundaries 

999 

1000 

1001def digitizeData(data, nColors=256): 

1002 """ 

1003 Scale data into nColors using its cumulative distribution function. 

1004 

1005 Parameters 

1006 ---------- 

1007 data : `np.array` 

1008 The input image data. 

1009 nColors : `int` 

1010 The number of intervals to distribute data into. 

1011 

1012 Returns 

1013 ------- 

1014 data: `np.array` of `int` 

1015 Scaled data in the [0, nColors - 1] range. 

1016 """ 

1017 cdf, minVal, maxVal = getCdf(data, nColors - 1) 

1018 bins = np.floor((data - minVal)).astype(np.int64) 

1019 return cdf[bins]