Coverage for python/lsst/summit/utils/utils.py: 18%
365 statements
« prev ^ index » next coverage.py v7.4.3, created at 2024-03-09 13:02 +0000
« prev ^ index » next coverage.py v7.4.3, created at 2024-03-09 13:02 +0000
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import datetime
23import logging
24import os
25from typing import Iterable
27import astropy.units as u
28import numpy as np
29from astro_metadata_translator import ObservationInfo
30from astropy.coordinates import AltAz, SkyCoord
31from astropy.coordinates.earth import EarthLocation
32from astropy.time import Time
33from dateutil.tz import gettz
34from scipy.ndimage import gaussian_filter
36import lsst.afw.detection as afwDetect
37import lsst.afw.image as afwImage
38import lsst.afw.math as afwMath
39import lsst.daf.base as dafBase
40import lsst.geom as geom
41import lsst.pipe.base as pipeBase
42import lsst.utils.packages as packageUtils
43from lsst.afw.coord import Weather
44from lsst.afw.detection import Footprint, FootprintSet
45from lsst.daf.butler.cli.cliLog import CliLog
46from lsst.obs.lsst.translators.latiss import AUXTEL_LOCATION
47from lsst.obs.lsst.translators.lsst import FILTER_DELIMITER
49from .astrometry.utils import genericCameraHeaderToWcs
51__all__ = [
52 "SIGMATOFWHM",
53 "FWHMTOSIGMA",
54 "EFD_CLIENT_MISSING_MSG",
55 "GOOGLE_CLOUD_MISSING_MSG",
56 "AUXTEL_LOCATION",
57 "countPixels",
58 "quickSmooth",
59 "argMax2d",
60 "getImageStats",
61 "detectObjectsInExp",
62 "humanNameForCelestialObject",
63 "getFocusFromHeader",
64 "dayObsIntToString",
65 "dayObsSeqNumToVisitId",
66 "setupLogging",
67 "getCurrentDayObs_datetime",
68 "getCurrentDayObs_int",
69 "getCurrentDayObs_humanStr",
70 "getSite",
71 "getExpPositionOffset",
72 "starTrackerFileToExposure",
73 "getAirmassSeeingCorrection",
74 "getFilterSeeingCorrection",
75 "getCdf",
76 "getQuantiles",
77 "digitizeData",
78]
81SIGMATOFWHM = 2.0 * np.sqrt(2.0 * np.log(2.0))
82FWHMTOSIGMA = 1 / SIGMATOFWHM
84EFD_CLIENT_MISSING_MSG = (
85 "ImportError: lsst_efd_client not found. Please install with:\n" " pip install lsst-efd-client"
86)
88GOOGLE_CLOUD_MISSING_MSG = (
89 "ImportError: Google cloud storage not found. Please install with:\n"
90 " pip install google-cloud-storage"
91)
94def countPixels(maskedImage, maskPlane):
95 """Count the number of pixels in an image with a given mask bit set.
97 Parameters
98 ----------
99 maskedImage : `lsst.afw.image.MaskedImage`
100 The masked image,
101 maskPlane : `str`
102 The name of the bitmask.
104 Returns
105 -------
106 count : `int``
107 The number of pixels in with the selected mask bit
108 """
109 bit = maskedImage.mask.getPlaneBitMask(maskPlane)
110 return len(np.where(np.bitwise_and(maskedImage.mask.array, bit))[0])
113def quickSmooth(data, sigma=2):
114 """Perform a quick smoothing of the image.
116 Not to be used for scientific purposes, but improves the stretch and
117 visual rendering of low SNR against the sky background in cutouts.
119 Parameters
120 ----------
121 data : `np.array`
122 The image data to smooth
123 sigma : `float`, optional
124 The size of the smoothing kernel.
126 Returns
127 -------
128 smoothData : `np.array`
129 The smoothed data
130 """
131 kernel = [sigma, sigma]
132 smoothData = gaussian_filter(data, kernel, mode="constant")
133 return smoothData
136def argMax2d(array):
137 """Get the index of the max value of an array and whether it's unique.
139 If its not unique, returns a list of the other locations containing the
140 maximum value, e.g. returns
142 (12, 34), False, [(56,78), (910, 1112)]
144 Parameters
145 ----------
146 array : `np.array`
147 The data
149 Returns
150 -------
151 maxLocation : `tuple`
152 The coords of the first instance of the max value
153 unique : `bool`
154 Whether it's the only location
155 otherLocations : `list` of `tuple`
156 List of the other max values' locations, empty if False
157 """
158 uniqueMaximum = False
159 maxCoords = np.where(array == np.max(array))
160 maxCoords = [coord for coord in zip(*maxCoords)] # list of coords as tuples
161 if len(maxCoords) == 1: # single unambiguous value
162 uniqueMaximum = True
164 return maxCoords[0], uniqueMaximum, maxCoords[1:]
167def dayObsIntToString(dayObs):
168 """Convert an integer dayObs to a dash-delimited string.
170 e.g. convert the hard to read 20210101 to 2021-01-01
172 Parameters
173 ----------
174 dayObs : `int`
175 The dayObs.
177 Returns
178 -------
179 dayObs : `str`
180 The dayObs as a string.
181 """
182 assert isinstance(dayObs, int)
183 dStr = str(dayObs)
184 assert len(dStr) == 8
185 return "-".join([dStr[0:4], dStr[4:6], dStr[6:8]])
188def dayObsSeqNumToVisitId(dayObs, seqNum):
189 """Get the visit id for a given dayObs/seqNum.
191 Parameters
192 ----------
193 dayObs : `int`
194 The dayObs.
195 seqNum : `int`
196 The seqNum.
198 Returns
199 -------
200 visitId : `int`
201 The visitId.
203 Notes
204 -----
205 TODO: Remove this horrible hack once DM-30948 makes this possible
206 programatically/via the butler.
207 """
208 if dayObs < 19700101 or dayObs > 35000101:
209 raise ValueError(f"dayObs value {dayObs} outside plausible range")
210 return int(f"{dayObs}{seqNum:05}")
213def getImageStats(exp):
214 """Calculate a grab-bag of stats for an image. Must remain fast.
216 Parameters
217 ----------
218 exp : `lsst.afw.image.Exposure`
219 The input exposure.
221 Returns
222 -------
223 stats : `lsst.pipe.base.Struct`
224 A container with attributes containing measurements and statistics
225 for the image.
226 """
227 result = pipeBase.Struct()
229 vi = exp.visitInfo
230 expTime = vi.exposureTime
231 md = exp.getMetadata()
233 obj = vi.object
234 mjd = vi.getDate().get()
235 result.object = obj
236 result.mjd = mjd
238 fullFilterString = exp.filter.physicalLabel
239 filt = fullFilterString.split(FILTER_DELIMITER)[0]
240 grating = fullFilterString.split(FILTER_DELIMITER)[1]
242 airmass = vi.getBoresightAirmass()
243 rotangle = vi.getBoresightRotAngle().asDegrees()
245 azAlt = vi.getBoresightAzAlt()
246 az = azAlt[0].asDegrees()
247 el = azAlt[1].asDegrees()
249 result.expTime = expTime
250 result.filter = filt
251 result.grating = grating
252 result.airmass = airmass
253 result.rotangle = rotangle
254 result.az = az
255 result.el = el
256 result.focus = md.get("FOCUSZ")
258 data = exp.image.array
259 result.maxValue = np.max(data)
261 peak, uniquePeak, otherPeaks = argMax2d(data)
262 result.maxPixelLocation = peak
263 result.multipleMaxPixels = uniquePeak
265 result.nBadPixels = countPixels(exp.maskedImage, "BAD")
266 result.nSatPixels = countPixels(exp.maskedImage, "SAT")
267 result.percentile99 = np.percentile(data, 99)
268 result.percentile9999 = np.percentile(data, 99.99)
270 sctrl = afwMath.StatisticsControl()
271 sctrl.setNumSigmaClip(5)
272 sctrl.setNumIter(2)
273 statTypes = afwMath.MEANCLIP | afwMath.STDEVCLIP
274 stats = afwMath.makeStatistics(exp.maskedImage, statTypes, sctrl)
275 std, stderr = stats.getResult(afwMath.STDEVCLIP)
276 mean, meanerr = stats.getResult(afwMath.MEANCLIP)
278 result.clippedMean = mean
279 result.clippedStddev = std
281 return result
284def detectObjectsInExp(exp, nSigma=10, nPixMin=10, grow=0):
285 """Quick and dirty object detection for an exposure.
287 Return the footPrintSet for the objects in a preferably-postISR exposure.
289 Parameters
290 ----------
291 exp : `lsst.afw.image.Exposure`
292 The exposure to detect objects in.
293 nSigma : `float`
294 The number of sigma for detection.
295 nPixMin : `int`
296 The minimum number of pixels in an object for detection.
297 grow : `int`
298 The number of pixels to grow the footprint by after detection.
300 Returns
301 -------
302 footPrintSet : `lsst.afw.detection.FootprintSet`
303 The set of footprints in the image.
304 """
305 median = np.nanmedian(exp.image.array)
306 exp.image -= median
308 threshold = afwDetect.Threshold(nSigma, afwDetect.Threshold.STDEV)
309 footPrintSet = afwDetect.FootprintSet(exp.getMaskedImage(), threshold, "DETECTED", nPixMin)
310 if grow > 0:
311 isotropic = True
312 footPrintSet = afwDetect.FootprintSet(footPrintSet, grow, isotropic)
314 exp.image += median # add back in to leave background unchanged
315 return footPrintSet
318def fluxesFromFootprints(footprints, parentImage, subtractImageMedian=False):
319 """Calculate the flux from a set of footprints, given the parent image,
320 optionally subtracting the whole-image median from each pixel as a very
321 rough background subtraction.
323 Parameters
324 ----------
325 footprints : `lsst.afw.detection.FootprintSet` or
326 `lsst.afw.detection.Footprint` or
327 `iterable` of `lsst.afw.detection.Footprint`
328 The footprints to measure.
329 parentImage : `lsst.afw.image.Image`
330 The parent image.
331 subtractImageMedian : `bool`, optional
332 Subtract a whole-image median from each pixel in the footprint when
333 summing as a very crude background subtraction. Does not change the
334 original image.
336 Returns
337 -------
338 fluxes : `list` of `float`
339 The fluxes for each footprint.
341 Raises
342 ------
343 TypeError : raise for unsupported types.
344 """
345 median = 0
346 if subtractImageMedian:
347 median = np.nanmedian(parentImage.array)
349 # poor person's single dispatch
350 badTypeMsg = (
351 "This function works with FootprintSets, single Footprints, and iterables of Footprints. "
352 f"Got {type(footprints)}: {footprints}"
353 )
354 if isinstance(footprints, FootprintSet):
355 footprints = footprints.getFootprints()
356 elif isinstance(footprints, Iterable):
357 if not isinstance(footprints[0], Footprint):
358 raise TypeError(badTypeMsg)
359 elif isinstance(footprints, Footprint):
360 footprints = [footprints]
361 else:
362 raise TypeError(badTypeMsg)
364 return np.array([fluxFromFootprint(fp, parentImage, backgroundValue=median) for fp in footprints])
367def fluxFromFootprint(footprint, parentImage, backgroundValue=0):
368 """Calculate the flux from a footprint, given the parent image, optionally
369 subtracting a single value from each pixel as a very rough background
370 subtraction, e.g. the image median.
372 Parameters
373 ----------
374 footprint : `lsst.afw.detection.Footprint`
375 The footprint to measure.
376 parentImage : `lsst.afw.image.Image`
377 Image containing the footprint.
378 backgroundValue : `bool`, optional
379 The value to subtract from each pixel in the footprint when summing
380 as a very crude background subtraction. Does not change the original
381 image.
383 Returns
384 -------
385 flux : `float`
386 The flux in the footprint
387 """
388 if backgroundValue: # only do the subtraction if non-zero for speed
389 xy0 = parentImage.getBBox().getMin()
390 return footprint.computeFluxFromArray(parentImage.array - backgroundValue, xy0)
391 return footprint.computeFluxFromImage(parentImage)
394def humanNameForCelestialObject(objName):
395 """Returns a list of all human names for obj, or [] if none are found.
397 Parameters
398 ----------
399 objName : `str`
400 The/a name of the object.
402 Returns
403 -------
404 names : `list` of `str`
405 The names found for the object
406 """
407 from astroquery.simbad import Simbad
409 results = []
410 try:
411 simbadResult = Simbad.query_objectids(objName)
412 for row in simbadResult:
413 if row["ID"].startswith("NAME"):
414 results.append(row["ID"].replace("NAME ", ""))
415 return results
416 except Exception:
417 return [] # same behavior as for found but un-named objects
420def _getAltAzZenithsFromSeqNum(butler, dayObs, seqNumList):
421 """Get the alt, az and zenith angle for the seqNums of a given dayObs.
423 Parameters
424 ----------
425 butler : `lsst.daf.butler.Butler`
426 The butler to query.
427 dayObs : `int`
428 The dayObs.
429 seqNumList : `list` of `int`
430 The seqNums for which to return the alt, az and zenith
432 Returns
433 -------
434 azimuths : `list` of `float`
435 List of the azimuths for each seqNum
436 elevations : `list` of `float`
437 List of the elevations for each seqNum
438 zeniths : `list` of `float`
439 List of the zenith angles for each seqNum
440 """
441 azimuths, elevations, zeniths = [], [], []
442 for seqNum in seqNumList:
443 md = butler.get("raw.metadata", day_obs=dayObs, seq_num=seqNum, detector=0)
444 obsInfo = ObservationInfo(md)
445 alt = obsInfo.altaz_begin.alt.value
446 az = obsInfo.altaz_begin.az.value
447 elevations.append(alt)
448 zeniths.append(90 - alt)
449 azimuths.append(az)
450 return azimuths, elevations, zeniths
453def getFocusFromHeader(exp):
454 """Get the raw focus value from the header.
456 Parameters
457 ----------
458 exp : `lsst.afw.image.exposure`
459 The exposure.
461 Returns
462 -------
463 focus : `float` or `None`
464 The focus value if found, else ``None``.
465 """
466 md = exp.getMetadata()
467 if "FOCUSZ" in md:
468 return md["FOCUSZ"]
469 return None
472def checkStackSetup():
473 """Check which weekly tag is being used and which local packages are setup.
475 Designed primarily for use in notbooks/observing, this prints the weekly
476 tag(s) are setup for lsst_distrib, and lists any locally setup packages and
477 the path to each.
479 Notes
480 -----
481 Uses print() instead of logger messages as this should simply print them
482 without being vulnerable to any log messages potentially being diverted.
483 """
484 packages = packageUtils.getEnvironmentPackages(include_all=True)
486 lsstDistribHashAndTags = packages["lsst_distrib"] # looks something like 'g4eae7cb9+1418867f (w_2022_13)'
487 lsstDistribTags = lsstDistribHashAndTags.split()[1]
488 if len(lsstDistribTags.split()) == 1:
489 tag = lsstDistribTags.replace("(", "")
490 tag = tag.replace(")", "")
491 print(f"You are running {tag} of lsst_distrib")
492 else: # multiple weekly tags found for lsst_distrib!
493 print(f"The version of lsst_distrib you have is compatible with: {lsstDistribTags}")
495 localPackages = []
496 localPaths = []
497 for package, tags in packages.items():
498 if tags.startswith("LOCAL:"):
499 path = tags.split("LOCAL:")[1]
500 path = path.split("@")[0] # don't need the git SHA etc
501 localPaths.append(path)
502 localPackages.append(package)
504 if localPackages:
505 print("\nLocally setup packages:")
506 print("-----------------------")
507 maxLen = max(len(package) for package in localPackages)
508 for package, path in zip(localPackages, localPaths):
509 print(f"{package:<{maxLen}s} at {path}")
510 else:
511 print("\nNo locally setup packages (using a vanilla stack)")
514def setupLogging(longlog=False):
515 """Setup logging in the same way as one would get from pipetask run.
517 Code that isn't run through the butler CLI defaults to WARNING level
518 messages and no logger names. This sets the behaviour to follow whatever
519 the pipeline default is, currently
520 <logger_name> <level>: <message> e.g.
521 lsst.isr INFO: Masking defects.
522 """
523 CliLog.initLog(longlog=longlog)
526def getCurrentDayObs_datetime():
527 """Get the current day_obs - the observatory rolls the date over at UTC-12
529 Returned as datetime.date(2022, 4, 28)
530 """
531 utc = gettz("UTC")
532 nowUtc = datetime.datetime.now().astimezone(utc)
533 offset = datetime.timedelta(hours=-12)
534 dayObs = (nowUtc + offset).date()
535 return dayObs
538def getCurrentDayObs_int():
539 """Return the current dayObs as an int in the form 20220428"""
540 return int(getCurrentDayObs_datetime().strftime("%Y%m%d"))
543def getCurrentDayObs_humanStr():
544 """Return the current dayObs as a string in the form '2022-04-28'"""
545 return dayObsIntToString(getCurrentDayObs_int())
548def getExpRecordAge(expRecord):
549 """Get the time, in seconds, since the end of exposure.
551 Parameters
552 ----------
553 expRecord : `lsst.daf.butler.DimensionRecord`
554 The exposure record.
556 Returns
557 -------
558 age : `float`
559 The age of the exposure, in seconds.
560 """
561 return -1 * (expRecord.timespan.end - Time.now()).sec
564def getSite():
565 """Returns where the code is running.
567 Returns
568 -------
569 location : `str`
570 One of:
571 ['tucson', 'summit', 'base', 'staff-rsp', 'rubin-devl', 'jenkins',
572 'usdf-k8s']
574 Raises
575 ------
576 ValueError
577 Raised if location cannot be determined.
578 """
579 # All nublado instances guarantee that EXTERNAL_URL is set and uniquely
580 # identifies it.
581 location = os.getenv("EXTERNAL_INSTANCE_URL", "")
582 if location == "https://tucson-teststand.lsst.codes": 582 ↛ 583line 582 didn't jump to line 583, because the condition on line 582 was never true
583 return "tucson"
584 elif location == "https://summit-lsp.lsst.codes": 584 ↛ 585line 584 didn't jump to line 585, because the condition on line 584 was never true
585 return "summit"
586 elif location == "https://base-lsp.lsst.codes": 586 ↛ 587line 586 didn't jump to line 587, because the condition on line 586 was never true
587 return "base"
588 elif location == "https://usdf-rsp.slac.stanford.edu": 588 ↛ 589line 588 didn't jump to line 589, because the condition on line 588 was never true
589 return "staff-rsp"
591 # if no EXTERNAL_URL, try HOSTNAME to see if we're on the dev nodes
592 # it is expected that this will be extensible to SLAC
593 hostname = os.getenv("HOSTNAME", "")
594 if hostname.startswith("sdfrome"): 594 ↛ 595line 594 didn't jump to line 595, because the condition on line 594 was never true
595 return "rubin-devl"
597 jenkinsHome = os.getenv("JENKINS_HOME", "")
598 if jenkinsHome != "": 598 ↛ 599line 598 didn't jump to line 599, because the condition on line 598 was never true
599 return "jenkins"
601 # we're probably inside a k8s pod doing rapid analysis work at this point
602 location = os.getenv("RAPID_ANALYSIS_LOCATION", "")
603 if location == "TTS": 603 ↛ 604line 603 didn't jump to line 604, because the condition on line 603 was never true
604 return "tucson"
605 if location == "BTS": 605 ↛ 606line 605 didn't jump to line 606, because the condition on line 605 was never true
606 return "base"
607 if location == "SUMMIT": 607 ↛ 608line 607 didn't jump to line 608, because the condition on line 607 was never true
608 return "summit"
609 if location == "USDF": 609 ↛ 610line 609 didn't jump to line 610, because the condition on line 609 was never true
610 return "usdf-k8s"
612 # we have failed
613 raise ValueError("Location could not be determined")
616def getAltAzFromSkyPosition(
617 skyPos,
618 visitInfo,
619 doCorrectRefraction=False,
620 wavelength=500.0,
621 pressureOverride=None,
622 temperatureOverride=None,
623 relativeHumidityOverride=None,
624):
625 """Get the alt/az from the position on the sky and the time and location
626 of the observation.
628 The temperature, pressure and relative humidity are taken from the
629 visitInfo by default, but can be individually overridden as needed. It
630 should be noted that the visitInfo never contains a nominal wavelength, and
631 so this takes a default value of 500nm.
633 Parameters
634 ----------
635 skyPos : `lsst.geom.SpherePoint`
636 The position on the sky.
637 visitInfo : `lsst.afw.image.VisitInfo`
638 The visit info containing the time of the observation.
639 doCorrectRefraction : `bool`, optional
640 Correct for the atmospheric refraction?
641 wavelength : `float`, optional
642 The nominal wavelength in nanometers (e.g. 500.0), as a float.
643 pressureOverride : `float`, optional
644 The pressure, in bars (e.g. 0.770), to override the value supplied in
645 the visitInfo, as a float.
646 temperatureOverride : `float`, optional
647 The temperature, in Celsius (e.g. 10.0), to override the value supplied
648 in the visitInfo, as a float.
649 relativeHumidityOverride : `float`, optional
650 The relativeHumidity in the range 0..1 (i.e. not as a percentage), to
651 override the value supplied in the visitInfo, as a float.
653 Returns
654 -------
655 alt : `lsst.geom.Angle`
656 The altitude.
657 az : `lsst.geom.Angle`
658 The azimuth.
659 """
660 skyLocation = SkyCoord(skyPos.getRa().asRadians(), skyPos.getDec().asRadians(), unit=u.rad)
661 long = visitInfo.observatory.getLongitude()
662 lat = visitInfo.observatory.getLatitude()
663 ele = visitInfo.observatory.getElevation()
664 earthLocation = EarthLocation.from_geodetic(long.asDegrees(), lat.asDegrees(), ele)
666 refractionKwargs = {}
667 if doCorrectRefraction:
668 # wavelength is never supplied in the visitInfo so always take this
669 wavelength = wavelength * u.nm
671 if pressureOverride:
672 pressure = pressureOverride
673 else:
674 pressure = visitInfo.weather.getAirPressure()
675 # ObservationInfos (which are the "source of truth" use pascals) so
676 # convert from pascals to bars
677 pressure /= 100000.0
678 pressure = pressure * u.bar
680 if temperatureOverride:
681 temperature = temperatureOverride
682 else:
683 temperature = visitInfo.weather.getAirTemperature()
684 temperature = temperature * u.deg_C
686 if relativeHumidityOverride:
687 relativeHumidity = relativeHumidityOverride
688 else:
689 relativeHumidity = visitInfo.weather.getHumidity() / 100.0 # this is in percent
690 relativeHumidity = relativeHumidity
692 refractionKwargs = dict(
693 pressure=pressure, temperature=temperature, relative_humidity=relativeHumidity, obswl=wavelength
694 )
696 # must go via astropy.Time because dafBase.dateTime.DateTime contains
697 # the timezone, but going straight to visitInfo.date.toPython() loses this.
698 obsTime = Time(visitInfo.date.toPython(), scale="tai")
699 altAz = AltAz(obstime=obsTime, location=earthLocation, **refractionKwargs)
701 obsAltAz = skyLocation.transform_to(altAz)
702 alt = geom.Angle(obsAltAz.alt.degree, geom.degrees)
703 az = geom.Angle(obsAltAz.az.degree, geom.degrees)
705 return alt, az
708def getExpPositionOffset(exp1, exp2, useWcs=True, allowDifferentPlateScales=False):
709 """Get the change in sky position between two exposures.
711 Given two exposures, calculate the offset on the sky between the images.
712 If useWcs then use the (fitted or unfitted) skyOrigin from their WCSs, and
713 calculate the alt/az from the observation times, otherwise use the nominal
714 values in the exposures' visitInfos. Note that if using the visitInfo
715 values that for a given pointing the ra/dec will be ~identical, regardless
716 of whether astrometric fitting has been performed.
718 Values are given as exp1-exp2.
720 Parameters
721 ----------
722 exp1 : `lsst.afw.image.Exposure`
723 The first exposure.
724 exp2 : `lsst.afw.image.Exposure`
725 The second exposure.
726 useWcs : `bool`
727 Use the WCS for the ra/dec and alt/az if True, else use the nominal/
728 boresight values from the exposures' visitInfos.
729 allowDifferentPlateScales : `bool`, optional
730 Use to disable checking that plate scales are the same. Generally,
731 differing plate scales would indicate an error, but where blind-solving
732 has been undertaken during commissioning plate scales can be different
733 enough to warrant setting this to ``True``.
735 Returns
736 -------
737 offsets : `lsst.pipe.base.Struct`
738 A struct containing the offsets:
739 ``deltaRa``
740 The diference in ra (`lsst.geom.Angle`)
741 ``deltaDec``
742 The diference in dec (`lsst.geom.Angle`)
743 ``deltaAlt``
744 The diference in alt (`lsst.geom.Angle`)
745 ``deltaAz``
746 The diference in az (`lsst.geom.Angle`)
747 ``deltaPixels``
748 The diference in pixels (`float`)
749 """
751 wcs1 = exp1.getWcs()
752 wcs2 = exp2.getWcs()
753 pixScaleArcSec = wcs1.getPixelScale().asArcseconds()
754 if not allowDifferentPlateScales:
755 assert np.isclose(
756 pixScaleArcSec, wcs2.getPixelScale().asArcseconds()
757 ), "Pixel scales in the exposures differ."
759 if useWcs:
760 p1 = wcs1.getSkyOrigin()
761 p2 = wcs2.getSkyOrigin()
762 alt1, az1 = getAltAzFromSkyPosition(p1, exp1.getInfo().getVisitInfo())
763 alt2, az2 = getAltAzFromSkyPosition(p2, exp2.getInfo().getVisitInfo())
764 ra1 = p1[0]
765 ra2 = p2[0]
766 dec1 = p1[1]
767 dec2 = p2[1]
768 else:
769 az1 = exp1.visitInfo.boresightAzAlt[0]
770 az2 = exp2.visitInfo.boresightAzAlt[0]
771 alt1 = exp1.visitInfo.boresightAzAlt[1]
772 alt2 = exp2.visitInfo.boresightAzAlt[1]
774 ra1 = exp1.visitInfo.boresightRaDec[0]
775 ra2 = exp2.visitInfo.boresightRaDec[0]
776 dec1 = exp1.visitInfo.boresightRaDec[1]
777 dec2 = exp2.visitInfo.boresightRaDec[1]
779 p1 = exp1.visitInfo.boresightRaDec
780 p2 = exp2.visitInfo.boresightRaDec
782 angular_offset = p1.separation(p2).asArcseconds()
783 deltaPixels = angular_offset / pixScaleArcSec
785 ret = pipeBase.Struct(
786 deltaRa=(ra1 - ra2).wrapNear(geom.Angle(0.0)),
787 deltaDec=dec1 - dec2,
788 deltaAlt=alt1 - alt2,
789 deltaAz=(az1 - az2).wrapNear(geom.Angle(0.0)),
790 deltaPixels=deltaPixels,
791 )
793 return ret
796def starTrackerFileToExposure(filename, logger=None):
797 """Read the exposure from the file and set the wcs from the header.
799 Parameters
800 ----------
801 filename : `str`
802 The full path to the file.
803 logger : `logging.Logger`, optional
804 The logger to use for errors, created if not supplied.
806 Returns
807 -------
808 exp : `lsst.afw.image.Exposure`
809 The exposure.
810 """
811 if not logger:
812 logger = logging.getLogger(__name__)
813 exp = afwImage.ExposureF(filename)
814 try:
815 wcs = genericCameraHeaderToWcs(exp)
816 exp.setWcs(wcs)
817 except Exception as e:
818 logger.warning(f"Failed to set wcs from header: {e}")
820 # for some reason the date isn't being set correctly
821 # DATE-OBS is present in the original header, but it's being
822 # stripped out and somehow not set (plus it doesn't give the midpoint
823 # of the exposure), so set it manually from the midpoint here
824 try:
825 newArgs = {} # dict to unpack into visitInfo.copyWith - fill it with whatever needs to be replaced
826 md = exp.getMetadata()
828 begin = datetime.datetime.fromisoformat(md["DATE-BEG"])
829 end = datetime.datetime.fromisoformat(md["DATE-END"])
830 duration = end - begin
831 mid = begin + duration / 2
832 newTime = dafBase.DateTime(mid.isoformat(), dafBase.DateTime.Timescale.TAI)
833 newArgs["date"] = newTime
835 # AIRPRESS is being set as PRESSURE so afw doesn't pick it up
836 # once we're using the butler for data we will just set it to take
837 # PRESSURE in the translator instead of this
838 weather = exp.visitInfo.getWeather()
839 oldPressure = weather.getAirPressure()
840 if not np.isfinite(oldPressure):
841 pressure = md.get("PRESSURE")
842 if pressure is not None:
843 logger.info("Patching the weather info using the PRESSURE header keyword")
844 newWeather = Weather(weather.getAirTemperature(), pressure, weather.getHumidity())
845 newArgs["weather"] = newWeather
847 if newArgs:
848 newVi = exp.visitInfo.copyWith(**newArgs)
849 exp.info.setVisitInfo(newVi)
850 except Exception as e:
851 logger.warning(f"Failed to set date from header: {e}")
853 return exp
856def obsInfoToDict(obsInfo):
857 """Convert an ObservationInfo to a dict.
859 Parameters
860 ----------
861 obsInfo : `astro_metadata_translator.ObservationInfo`
862 The ObservationInfo to convert.
864 Returns
865 -------
866 obsInfoDict : `dict`
867 The ObservationInfo as a dict.
868 """
869 return {prop: getattr(obsInfo, prop) for prop in obsInfo.all_properties.keys()}
872def getFieldNameAndTileNumber(field, warn=True, logger=None):
873 """Get the tile name and number of an observed field.
875 It is assumed to always be appended, with an underscore, to the rest of the
876 field name. Returns the name and number as a tuple, or the name unchanged
877 if no tile number is found.
879 Parameters
880 ----------
881 field : `str`
882 The name of the field
884 Returns
885 -------
886 fieldName : `str`
887 The name of the field without the trailing tile number, if present.
888 tileNum : `int`
889 The number of the tile, as an integer, or ``None`` if not found.
890 """
891 if warn and not logger:
892 logger = logging.getLogger("lsst.summit.utils.utils.getFieldNameAndTileNumber")
894 if "_" not in field:
895 if warn:
896 logger.warning(
897 f"Field {field} does not contain an underscore," " so cannot determine the tile number."
898 )
899 return field, None
901 try:
902 fieldParts = field.split("_")
903 fieldNum = int(fieldParts[-1])
904 except ValueError:
905 if warn:
906 logger.warning(
907 f"Field {field} does not contain only an integer after the final underscore"
908 " so cannot determine the tile number."
909 )
910 return field, None
912 return "_".join(fieldParts[:-1]), fieldNum
915def getAirmassSeeingCorrection(airmass):
916 """Get the correction factor for seeing due to airmass.
918 Parameters
919 ----------
920 airmass : `float`
921 The airmass, greater than or equal to 1.
923 Returns
924 -------
925 correctionFactor : `float`
926 The correction factor to apply to the seeing.
928 Raises
929 ------
930 ValueError raised for unphysical airmasses.
931 """
932 if airmass < 1:
933 raise ValueError(f"Invalid airmass: {airmass}")
934 return airmass ** (-0.6)
937def getFilterSeeingCorrection(filterName):
938 """Get the correction factor for seeing due to a filter.
940 Parameters
941 ----------
942 filterName : `str`
943 The name of the filter, e.g. 'SDSSg_65mm'.
945 Returns
946 -------
947 correctionFactor : `float`
948 The correction factor to apply to the seeing.
950 Raises
951 ------
952 ValueError raised for unknown filters.
953 """
954 match filterName:
955 case "SDSSg_65mm":
956 return (477.0 / 500.0) ** 0.2
957 case "SDSSr_65mm":
958 return (623.0 / 500.0) ** 0.2
959 case "SDSSi_65mm":
960 return (762.0 / 500.0) ** 0.2
961 case _:
962 raise ValueError(f"Unknown filter name: {filterName}")
965def getCdf(data, scale, nBinsMax=300_000):
966 """Return an approximate cumulative distribution function scaled to
967 the [0, scale] range.
969 If the input data is all nan, then the output cdf will be nan as well as
970 the min and max values.
972 Parameters
973 ----------
974 data : `np.array`
975 The input data.
976 scale : `int`
977 The scaling range of the output.
978 nBinsMax : `int`, optional
979 Maximum number of bins to use.
981 Returns
982 -------
983 cdf : `np.array` of `int`
984 A monotonically increasing sequence that represents a scaled
985 cumulative distribution function, starting with the value at
986 minVal, then at (minVal + 1), and so on.
987 minVal : `float`
988 An integer smaller than the minimum value in the input data.
989 maxVal : `float`
990 An integer larger than the maximum value in the input data.
991 """
992 flatData = data.ravel()
993 size = flatData.size - np.count_nonzero(np.isnan(flatData))
995 minVal = np.floor(np.nanmin(flatData))
996 maxVal = np.ceil(np.nanmax(flatData)) + 1.0
998 if np.isnan(minVal) or np.isnan(maxVal):
999 # if either the min or max are nan, then the data is all nan as we're
1000 # using nanmin and nanmax. Given this, we can't calculate a cdf, so
1001 # return nans for all values
1002 return np.nan, np.nan, np.nan
1004 nBins = np.clip(int(maxVal) - int(minVal), 1, nBinsMax)
1006 hist, binEdges = np.histogram(flatData, bins=nBins, range=(int(minVal), int(maxVal)))
1008 cdf = (scale * np.cumsum(hist) / size).astype(np.int64)
1009 return cdf, minVal, maxVal
1012def getQuantiles(data, nColors):
1013 """Get a set of boundaries that equally distribute data into
1014 nColors intervals. The output can be used to make a colormap of nColors
1015 colors.
1017 This is equivalent to using the numpy function:
1018 np.nanquantile(data, np.linspace(0, 1, nColors + 1))
1019 but with a coarser precision, yet sufficient for our use case. This
1020 implementation gives a significant speed-up. In the case of large
1021 ranges, np.nanquantile is used because it is more memory efficient.
1023 If all elements of ``data`` are nan then the output ``boundaries`` will
1024 also all be ``nan`` to keep the interface consistent.
1026 Parameters
1027 ----------
1028 data : `np.array`
1029 The input image data.
1030 nColors : `int`
1031 The number of intervals to distribute data into.
1033 Returns
1034 -------
1035 boundaries: `list` of `float`
1036 A monotonically increasing sequence of size (nColors + 1). These are
1037 the edges of nColors intervals.
1038 """
1039 if (np.nanmax(data) - np.nanmin(data)) > 300_000:
1040 # Use slower but memory efficient nanquantile
1041 logger = logging.getLogger(__name__)
1042 logger.warning("Data range is very large; using slower quantile code.")
1043 boundaries = np.nanquantile(data, np.linspace(0, 1, nColors + 1))
1044 else:
1045 cdf, minVal, maxVal = getCdf(data, nColors)
1046 if np.isnan(minVal): # cdf calculation has failed because all data is nan
1047 return np.asarray([np.nan for _ in range(nColors)])
1049 scale = (maxVal - minVal) / len(cdf)
1051 boundaries = np.asarray([np.argmax(cdf >= i) * scale + minVal for i in range(nColors)] + [maxVal])
1053 return boundaries
1056def digitizeData(data, nColors=256):
1057 """
1058 Scale data into nColors using its cumulative distribution function.
1060 Parameters
1061 ----------
1062 data : `np.array`
1063 The input image data.
1064 nColors : `int`
1065 The number of intervals to distribute data into.
1067 Returns
1068 -------
1069 data: `np.array` of `int`
1070 Scaled data in the [0, nColors - 1] range.
1071 """
1072 cdf, minVal, maxVal = getCdf(data, nColors - 1)
1073 scale = (maxVal - minVal) / len(cdf)
1074 bins = np.floor((data * scale - minVal)).astype(np.int64)
1075 return cdf[bins]