Coverage for python/lsst/summit/utils/nightReport.py: 12%
340 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-17 04:43 -0700
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-17 04:43 -0700
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import logging
23import pickle
24from collections.abc import Callable
25from dataclasses import dataclass
26from typing import Any
28import matplotlib
29import matplotlib.pyplot as plt
30import numpy as np
31from astro_metadata_translator import ObservationInfo
32from matplotlib.pyplot import cm
34from lsst.utils.iteration import ensure_iterable
36from .utils import getFieldNameAndTileNumber, obsInfoToDict
38try: # TODO: Remove post RFC-896: add humanize to rubin-env
39 precisedelta: Callable[[Any], str]
40 from humanize.time import precisedelta
42 HAVE_HUMANIZE = True
43except ImportError:
44 # log a python warning about the lack of humanize
45 logging.warning("humanize not available, install it to get better time printing")
46 HAVE_HUMANIZE = False
47 precisedelta = repr
50__all__ = ["NightReport"]
52CALIB_VALUES = [
53 "FlatField position",
54 "Park position",
55 "azel_target",
56 "slew_icrs",
57 "DaytimeCheckout001",
58 "DaytimeCheckout002",
59]
60N_STARS_PER_SYMBOL = 6
61MARKER_SEQUENCE = [
62 "*",
63 "o",
64 "D",
65 "P",
66 "v",
67 "^",
68 "s",
69 "o",
70 "v",
71 "^",
72 "<",
73 ">",
74 "1",
75 "2",
76 "3",
77 "4",
78 "8",
79 "s",
80 "p",
81 "P",
82 "*",
83 "h",
84 "H",
85 "+",
86 "x",
87 "X",
88 "D",
89 "d",
90 "|",
91 "_",
92]
93SOUTHPOLESTAR = "HD 185975"
96@dataclass
97class ColorAndMarker:
98 """Class for holding colors and marker symbols"""
100 color: list
101 marker: str = "*"
104class NightReport:
105 _version = 1
107 def __init__(self, butler, dayObs, loadFromFile=None):
108 self._supressAstroMetadataTranslatorWarnings() # call early
109 self.log = logging.getLogger("lsst.summit.utils.NightReport")
110 self.butler = butler
111 self.dayObs = dayObs
112 self.data = dict()
113 self._expRecordsLoaded = set() # set of the expRecords loaded
114 self._obsInfosLoaded = set() # set of the seqNums loaded
115 self.stars = None
116 self.cMap = None
117 if loadFromFile is not None:
118 self._load(loadFromFile)
119 self.rebuild() # sets stars and cMap
121 def _supressAstroMetadataTranslatorWarnings(self):
122 """NB: must be called early"""
123 logging.basicConfig()
124 logger = logging.getLogger("lsst.obs.lsst.translators.latiss")
125 logger.setLevel(logging.ERROR)
126 logger = logging.getLogger("astro_metadata_translator.observationInfo")
127 logger.setLevel(logging.ERROR)
129 def save(self, filename):
130 """Save the internal data to a file.
132 Parameters
133 ----------
134 filename : `str`
135 The full name and path of the file to save to.
136 """
137 toSave = dict(
138 data=self.data,
139 _expRecordsLoaded=self._expRecordsLoaded,
140 _obsInfosLoaded=self._obsInfosLoaded,
141 dayObs=self.dayObs,
142 version=self._version,
143 )
144 with open(filename, "wb") as f:
145 pickle.dump(toSave, f, pickle.HIGHEST_PROTOCOL)
147 def _load(self, filename):
148 """Load the report data from a file.
150 Called on init if loadFromFile is not None. Should not be used directly
151 as other things are populated on load in the __init__.
153 Parameters
154 ----------
155 filename : `str`
156 The full name and path of the file to load from.
157 """
158 with open(filename, "rb") as f:
159 loaded = pickle.load(f)
160 self.data = loaded["data"]
161 self._expRecordsLoaded = loaded["_expRecordsLoaded"]
162 self._obsInfosLoaded = loaded["_obsInfosLoaded"]
163 dayObs = loaded["dayObs"]
164 loadedVersion = loaded.get("version", 0)
166 if dayObs != self.dayObs:
167 raise RuntimeError(f"Loaded data is for {dayObs} but current dayObs is {self.dayObs}")
168 if loadedVersion < self._version:
169 self.log.critical(
170 f"Loaded version is {loadedVersion} but current version is {self._version}."
171 " Check carefully for compatibility issues/regenerate your saved report!"
172 )
173 # update to the version on the instance in case the report is
174 # re-saved.
175 self._version = loadedVersion
176 assert len(self.data) == len(self._expRecordsLoaded)
177 assert len(self.data) == len(self._obsInfosLoaded)
178 self.log.info(f"Loaded {len(self.data)} records from {filename}")
180 @staticmethod
181 def _getSortedData(data):
182 """Get a sorted copy of the internal data."""
183 if list(data.keys()) == sorted(data.keys()):
184 return data
185 else:
186 return {k: data[k] for k in sorted(data.keys())}
188 def getExpRecordDictForDayObs(self, dayObs):
189 """Get all the exposureRecords as dicts for the current dayObs.
191 Notes
192 -----
193 Runs in ~0.05s for 1000 records.
194 """
195 expRecords = self.butler.registry.queryDimensionRecords(
196 "exposure", where="exposure.day_obs=dayObs", bind={"dayObs": dayObs}, datasets="raw"
197 )
198 expRecords = list(expRecords)
199 records = {e.seq_num: e.toDict() for e in expRecords} # not guaranteed to be in order
200 for record in records.values():
201 target = record["target_name"] if record["target_name"] is not None else ""
202 if target:
203 shortTarget, _ = getFieldNameAndTileNumber(target, warn=False)
204 else:
205 shortTarget = ""
206 record["target_name_short"] = shortTarget
207 return self._getSortedData(records)
209 def getObsInfoAndMetadataForSeqNum(self, seqNum):
210 """Get the obsInfo and metadata for a given seqNum.
212 TODO: Once we have a summit repo containing all this info, remove this
213 method and all scraping of headers! Probably also remove the save/load
214 functionalty there too, as the whole init will go from many minutes to
215 under a second.
217 Parameters
218 ----------
219 seqNum : `int`
220 The seqNum.
222 Returns
223 -------
224 obsInfo : `astro_metadata_translator.ObservationInfo`
225 The obsInfo.
226 md : `dict`
227 The raw metadata.
229 Notes
230 -----
231 Very slow, as it has to load the whole file on object store repos
232 and access the file on regular filesystem repos.
233 """
234 dataId = {"day_obs": self.dayObs, "seq_num": seqNum, "detector": 0}
235 md = self.butler.get("raw.metadata", dataId)
236 return ObservationInfo(md), md.toDict()
238 def rebuild(self, full=False):
239 """Scrape new data if there is any, otherwise is a no-op.
241 If full is True, then all data is reloaded.
243 Parameters
244 ----------
245 full : `bool`, optional
246 Do a full reload of all the data, removing any which is pre-loaded?
247 """
248 if full:
249 self.data = dict()
250 self._expRecordsLoaded = set()
251 self._obsInfosLoaded = set()
253 records = self.getExpRecordDictForDayObs(self.dayObs)
254 if len(records) == len(self.data): # nothing to do
255 self.log.info("No new records found")
256 # NB don't return here, because we need to rebuild the
257 # star maps etc if we came from a file.
258 else:
259 # still need to merge the new expRecordDicts into self.data
260 # but only these, as the other items have obsInfos merged into them
261 for seqNum in list(records.keys() - self._expRecordsLoaded):
262 self.data[seqNum] = records[seqNum]
263 self._expRecordsLoaded.add(seqNum)
265 # now load all the obsInfos
266 seqNums = list(records.keys())
267 obsInfosToLoad = set(seqNums) - self._obsInfosLoaded
268 if obsInfosToLoad:
269 self.log.info(f"Loading {len(obsInfosToLoad)} obsInfo(s)")
270 for i, seqNum in enumerate(obsInfosToLoad):
271 if (i + 1) % 200 == 0:
272 self.log.info(f"Loaded {i+1} obsInfos")
273 obsInfo, metadata = self.getObsInfoAndMetadataForSeqNum(seqNum)
274 obsInfoDict = obsInfoToDict(obsInfo)
275 records[seqNum].update(obsInfoDict)
276 # _raw_metadata item will hopefully not be needed in the future
277 # but add it while we have it for free, as it has DIMM seeing
278 records[seqNum]["_raw_metadata"] = metadata
279 self._obsInfosLoaded.add(seqNum)
281 self.data = self._getSortedData(self.data) # make sure we stay sorted
282 self.stars = self.getObservedObjects()
283 self.cMap = self.makeStarColorAndMarkerMap(self.stars)
285 def getDatesForSeqNums(self):
286 """Get a dict of {seqNum: date} for the report.
288 Returns
289 -------
290 dates : `dict`
291 Dict of {seqNum: date} for the current report.
292 """
293 return {
294 seqNum: self.data[seqNum]["timespan"].begin.to_datetime() for seqNum in sorted(self.data.keys())
295 }
297 def getObservedObjects(self, ignoreTileNum=True):
298 """Get a list of the observed objects for the night.
300 Repeated observations of individual imaging fields have _NNN appended
301 to the field name. Use ``ignoreTileNum`` to remove these, collapsing
302 the observations of the field to a single target name.
304 Parameters
305 ----------
306 ignoreTileNum : `bool`, optional
307 Remove the trailing _NNN tile number for imaging fields?
308 """
309 key = "target_name_short" if ignoreTileNum else "target_name"
310 allTargets = sorted({record[key] if record[key] is not None else "" for record in self.data.values()})
311 return allTargets
313 def getSeqNumsMatching(self, invert=False, subset=None, **kwargs):
314 """Get seqNums which match/don't match all kwargs provided, e.g.
316 report.getSeqNumsMatching(exposure_time=30,
317 target_name='ETA1 DOR')
319 Set invert=True to get all seqNums which don't match the provided
320 args, e.g. to find all seqNums which are not calibs
322 Subset allows for repeated filtering by passing in a set of seqNums
323 """
324 # copy data so we can pop, and restrict to subset if provided
325 local = {seqNum: rec for seqNum, rec in self.data.items() if (subset is None or seqNum in subset)}
327 # for each kwarg, filter out items which match/don't
328 for filtAttr, filtVal in kwargs.items():
329 toPop = [] # can't pop inside inner loop so collect
330 for seqNum, record in local.items():
331 v = record.get(filtAttr)
332 if invert:
333 if v == filtVal:
334 toPop.append(seqNum)
335 else:
336 if v != filtVal:
337 toPop.append(seqNum)
338 [local.pop(seqNum) for seqNum in toPop]
340 return sorted(local.keys())
342 def printAvailableKeys(self, sample=False, includeRaw=False):
343 """Print all the keys available to query on, optionally including the
344 full set of header keys.
346 Note that there is a big mix of quantities, some are int/float/string
347 but some are astropy quantities.
349 If sample is True, then a sample value for each key is printed too,
350 which is useful for dealing with types and seeing what each item
351 actually means.
352 """
353 for seqNum, recordDict in self.data.items(): # loop + break because we don't know the first seqNum
354 for k, v in recordDict.items():
355 if sample:
356 print(f"{k}: {v}")
357 else:
358 print(k)
359 if includeRaw:
360 print("\nRaw header keys in _raw_metadata:")
361 for k in recordDict["_raw_metadata"]:
362 print(k)
363 break
365 @staticmethod
366 def makeStarColorAndMarkerMap(stars):
367 """Create a color/marker map for a list of observed objects."""
368 markerMap = {}
369 colors = cm.rainbow(np.linspace(0, 1, N_STARS_PER_SYMBOL))
370 for i, star in enumerate(stars):
371 markerIndex = i // (N_STARS_PER_SYMBOL)
372 colorIndex = i % (N_STARS_PER_SYMBOL)
373 markerMap[star] = ColorAndMarker(colors[colorIndex], MARKER_SEQUENCE[markerIndex])
374 return markerMap
376 def calcShutterTimes(self):
377 """Calculate the total time spent on science, engineering and readout.
379 Science and engineering time both include the time spent on readout,
380 such that if images were taken all night with no downtime and no slews
381 the efficiency would be 100%.
383 Returns
384 -------
385 timings : `dict`
386 Dictionary of the various calculated times, in seconds, and the
387 seqNums of the first and last observations used in the calculation.
388 """
389 firstObs = self.getObservingStartSeqNum(method="heuristic")
390 if not firstObs:
391 self.log.warning("No on-sky observations found.")
392 return None
393 lastObs = max(self.data.keys())
395 begin = self.data[firstObs]["datetime_begin"]
396 end = self.data[lastObs]["datetime_end"]
398 READOUT_TIME = 2.0
399 shutterOpenTime = sum([self.data[s]["exposure_time"] for s in range(firstObs, lastObs + 1)])
400 readoutTime = sum([READOUT_TIME for _ in range(firstObs, lastObs + 1)])
402 sciSeqNums = self.getSeqNumsMatching(observation_type="science")
403 scienceIntegration = sum([self.data[s]["exposure_time"] for s in sciSeqNums])
404 scienceTimeTotal = scienceIntegration.value + (len(sciSeqNums) * READOUT_TIME)
406 result = {}
407 result["firstObs"] = firstObs
408 result["lastObs"] = lastObs
409 result["startTime"] = begin
410 result["endTime"] = end
411 result["nightLength"] = (end - begin).sec # was a datetime.timedelta
412 result["shutterOpenTime"] = shutterOpenTime.value # was an Quantity
413 result["readoutTime"] = readoutTime
414 result["scienceIntegration"] = scienceIntegration.value # was an Quantity
415 result["scienceTimeTotal"] = scienceTimeTotal
417 return result
419 def printShutterTimes(self):
420 """Print out the shutter efficiency stats in a human-readable
421 format.
422 """
423 if not HAVE_HUMANIZE:
424 self.log.warning("Please install humanize to make this print as intended.")
425 timings = self.calcShutterTimes()
426 if not timings:
427 print("No on-sky observations found, so no shutter efficiency stats are available yet.")
428 return
430 print(
431 f"Observations started at: seqNum {timings['firstObs']:>3} at"
432 f" {timings['startTime'].to_datetime().strftime('%H:%M:%S')} TAI"
433 )
434 print(
435 f"Observations ended at: seqNum {timings['lastObs']:>3} at"
436 f" {timings['endTime'].to_datetime().strftime('%H:%M:%S')} TAI"
437 )
438 print(f"Total time on sky: {precisedelta(timings['nightLength'])}")
439 print()
440 print(f"Shutter open time: {precisedelta(timings['shutterOpenTime'])}")
441 print(f"Readout time: {precisedelta(timings['readoutTime'])}")
442 engEff = 100 * (timings["shutterOpenTime"] + timings["readoutTime"]) / timings["nightLength"]
443 print(f"Engineering shutter efficiency = {engEff:.1f}%")
444 print()
445 print(f"Science integration: {precisedelta(timings['scienceIntegration'])}")
446 sciEff = 100 * (timings["scienceTimeTotal"] / timings["nightLength"])
447 print(f"Science shutter efficiency = {sciEff:.1f}%")
449 def getTimeDeltas(self):
450 """Returns a dict, keyed by seqNum, of the time since the end of the
451 last integration. The time since does include the readout, so is always
452 greater than or equal to the readout time.
454 Returns
455 -------
456 timeGaps : `dict`
457 Dictionary of the time gaps, in seconds, keyed by seqNum.
458 """
459 seqNums = list(self.data.keys()) # need a list not a generator, and NB it might not be contiguous!
460 dts = [0] # first item is zero by definition
461 for i, seqNum in enumerate(seqNums[1:]):
462 dt = self.data[seqNum]["datetime_begin"] - self.data[(seqNums[i])]["datetime_end"]
463 dts.append(dt.sec)
465 return {s: dt for s, dt in zip(seqNums, dts)}
467 def printObsGaps(self, threshold=100, includeCalibs=False):
468 """Print out the gaps between observations in a human-readable format.
470 Prints the most recent gaps first.
472 Parameters
473 ----------
474 threshold : `float`, optional
475 The minimum time gap to print out, in seconds.
476 includeCalibs : `bool`, optional
477 If True, start at the lowest seqNum, otherwise start when the
478 night's observing started.
479 """
480 if not HAVE_HUMANIZE:
481 self.log.warning("Please install humanize to make this print as intended.")
482 dts = self.getTimeDeltas()
484 allSeqNums = list(self.data.keys())
485 if includeCalibs:
486 seqNums = allSeqNums
487 else:
488 firstObs = self.getObservingStartSeqNum(method="heuristic")
489 if not firstObs:
490 print("No on-sky observations found, so there can be no gaps in observing yet.")
491 return
492 # there is always a big gap before firstObs by definition so add 1
493 startPoint = allSeqNums.index(firstObs) + 1
494 seqNums = allSeqNums[startPoint:]
496 messages = []
497 for seqNum in reversed(seqNums):
498 dt = dts[seqNum]
499 if dt > threshold:
500 messages.append(f"seqNum {seqNum:3}: {precisedelta(dt)} gap")
502 if messages:
503 print(f"Gaps between observations greater than {threshold}s:")
504 for line in messages:
505 print(line)
507 def getObservingStartSeqNum(self, method="safe"):
508 """Get the seqNum at which on-sky observations started.
510 If no on-sky observations were taken ``None`` is returned.
512 Parameters
513 ----------
514 method : `str`
515 The calculation method to use. Options are:
516 - 'safe': Use the first seqNum with an observation_type that is
517 explicitly not a calibration or test. This is a safe way of
518 excluding the calibs, but will include observations where we
519 take some closed dome test images, or start observing too early,
520 and go back to taking calibs for a while before the night starts.
521 - 'heuristic': Use a heuristic to find the first seqNum. The
522 current heuristic is to find the first seqNum with an observation
523 type of CWFS, as we always do a CWFS focus before going on sky.
524 This does not work well for old days, because this wasn't always
525 the way data was taken. Note: may be updated in the future, at
526 which point this will be renamed ``cwfs``.
528 Returns
529 -------
530 startSeqNum : `int`
531 The seqNum of the start of the night's observing.
532 """
533 allowedMethods = ["heuristic", "safe"]
534 if method not in allowedMethods:
535 raise ValueError(f"Method must be one of {allowedMethods}")
537 if method == "safe":
538 # as of 20221211, the full set of observation_types ever seen is:
539 # acq, bias, cwfs, dark, engtest, flat, focus, science, stuttered,
540 # test, unknown
541 offSkyObsTypes = ["bias", "dark", "flat", "test", "unknown"]
542 for seqNum in sorted(self.data.keys()):
543 if self.data[seqNum]["observation_type"] not in offSkyObsTypes:
544 return seqNum
545 return None
547 if method == "heuristic":
548 # take the first cwfs image and return that
549 seqNums = self.getSeqNumsMatching(observation_type="cwfs")
550 if not seqNums:
551 self.log.warning("No cwfs images found, observing is assumed not to have started.")
552 return None
553 return min(seqNums)
555 def printObsTable(self, **kwargs):
556 """Print a table of the days observations.
558 Parameters
559 ----------
560 **kwargs : `dict`
561 Filter the observation table according to seqNums which match these
562 {k: v} pairs. For example, to only print out science observations
563 pass ``observation_type='science'``.
564 """
565 seqNums = self.data.keys() if not kwargs else self.getSeqNumsMatching(**kwargs)
566 seqNums = sorted(seqNums) # should always be sorted, but is a total disaster here if not
568 dts = self.getTimeDeltas()
569 lines = []
570 for seqNum in seqNums:
571 try:
572 expTime = self.data[seqNum]["exposure_time"].value
573 imageType = self.data[seqNum]["observation_type"]
574 target = self.data[seqNum]["target_name"]
575 deadtime = dts[seqNum]
576 filt = self.data[seqNum]["physical_filter"]
578 msg = f"{seqNum} {target} {expTime:.1f} {deadtime:.02f} {imageType} {filt}"
579 except Exception:
580 msg = f"Error parsing {seqNum}!"
581 lines.append(msg)
583 print(r"seqNum target expTime deadtime imageType filt")
584 print(r"------ ------ ------- -------- --------- ----")
585 for line in lines:
586 print(line)
588 def getExposureMidpoint(self, seqNum):
589 """Return the midpoint of the exposure as a float in MJD.
591 Parameters
592 ----------
593 seqNum : `int`
594 The seqNum to get the midpoint for.
596 Returns
597 -------
598 midpoint : `datetime.datetime`
599 The midpoint, as a python datetime object.
600 """
601 timespan = self.data[seqNum]["timespan"]
602 expTime = self.data[seqNum]["exposure_time"]
603 return ((timespan.begin) + expTime / 2).to_datetime()
605 def plotPerObjectAirMass(self, objects=None, airmassOneAtTop=True, saveFig=""):
606 """Plot the airmass for objects observed over the course of the night.
608 Parameters
609 ----------
610 objects : `list` [`str`], optional
611 The objects to plot. If not provided, all objects are plotted.
612 airmassOneAtTop : `bool`, optional
613 Put the airmass of 1 at the top of the plot, like astronomers
614 expect.
615 saveFig : `str`, optional
616 Save the figure to this file path?
617 """
618 if not objects:
619 objects = self.stars
621 objects = ensure_iterable(objects)
623 plt.figure(figsize=(10, 6))
624 for star in objects:
625 if star in CALIB_VALUES:
626 continue
627 seqNums = self.getSeqNumsMatching(target_name_short=star)
628 airMasses = [self.data[seqNum]["boresight_airmass"] for seqNum in seqNums]
629 obsTimes = [self.getExposureMidpoint(seqNum) for seqNum in seqNums]
630 color = self.cMap[star].color
631 marker = self.cMap[star].marker
632 plt.plot(obsTimes, airMasses, color=color, marker=marker, label=star, ms=10, ls="")
634 plt.ylabel("Airmass", fontsize=20)
635 plt.xlabel("Time (UTC)", fontsize=20)
636 plt.xticks(rotation=25, horizontalalignment="right")
638 ax = plt.gca()
639 xfmt = matplotlib.dates.DateFormatter("%m-%d %H:%M:%S")
640 ax.xaxis.set_major_formatter(xfmt)
642 if airmassOneAtTop:
643 ax.set_ylim(ax.get_ylim()[::-1])
645 plt.legend(bbox_to_anchor=(1, 1.025), prop={"size": 15}, loc="upper left")
647 plt.tight_layout()
648 if saveFig:
649 plt.savefig(saveFig)
650 plt.show()
651 plt.close()
653 def _makePolarPlot(
654 self, azimuthsInDegrees, zenithAngles, marker="*-", title=None, makeFig=True, color=None, objName=None
655 ):
656 """Private method to actually do the polar plotting.
658 azimuthsInDegrees : `list` [`float`]
659 The azimuth values, in degrees.
660 zenithAngles : `list` [`float`]
661 The zenith angle values, but more generally, the values on the
662 radial axis, so can be in whatever units you want.
663 marker : `str`, optional
664 The marker to use.
665 title : `str`, optional
666 The plot title.
667 makeFig : `bool`, optional
668 Make a new figure?
669 color : `str`, optional
670 The marker color.
671 objName : `str`, optional
672 The object name, for the legend.
674 Returns
675 -------
676 ax : `matplotlib.axes.Axes`
677 The axes on which the plot was made.
678 """
679 if makeFig:
680 _ = plt.figure(figsize=(10, 10))
681 ax = plt.subplot(111, polar=True)
682 ax.plot([a * np.pi / 180 for a in azimuthsInDegrees], zenithAngles, marker, c=color, label=objName)
683 if title:
684 ax.set_title(title, va="bottom")
685 ax.set_theta_zero_location("N")
686 ax.set_theta_direction(-1)
687 ax.set_rlim(0, 90)
688 return ax
690 def makeAltAzCoveragePlot(self, objects=None, withLines=False, saveFig=""):
691 """Make a polar plot of the azimuth and zenith angle for each object.
693 Plots the azimuth on the theta axis, and zenith angle (not altitude!)
694 on the radius axis, such that 0 is at the centre, like you're looking
695 top-down on the telescope.
697 Parameters
698 ----------
699 objects : `list` [`str`], optional
700 The objects to plot. If not provided, all objects are plotted.
701 withLines : `bool`, optional
702 Connect the points with lines?
703 saveFig : `str`, optional
704 Save the figure to this file path?
705 """
706 if not objects:
707 objects = self.stars
708 objects = ensure_iterable(objects)
710 _ = plt.figure(figsize=(14, 10))
712 for obj in objects:
713 if obj in CALIB_VALUES:
714 continue
715 seqNums = self.getSeqNumsMatching(target_name_short=obj)
716 altAzes = [self.data[seqNum]["altaz_begin"] for seqNum in seqNums]
717 alts = [altAz.alt.deg for altAz in altAzes if altAz is not None]
718 azes = [altAz.az.deg for altAz in altAzes if altAz is not None]
719 assert len(alts) == len(azes)
720 if len(azes) == 0:
721 self.log.warning(f"Found no alt/az data for {obj}")
722 zens = [90 - alt for alt in alts]
723 color = self.cMap[obj].color
724 marker = self.cMap[obj].marker
725 if withLines:
726 marker += "-"
728 ax = self._makePolarPlot(
729 azes, zens, marker=marker, title=None, makeFig=False, color=color, objName=obj
730 )
731 lgnd = ax.legend(bbox_to_anchor=(1.05, 1), prop={"size": 15}, loc="upper left")
732 ax.set_title("Axial coverage - azimuth (theta, deg) vs zenith angle (r, deg)", size=20)
733 for h in lgnd.legendHandles:
734 size = 14
735 if "-" in marker:
736 size += 5
737 h.set_markersize(size)
739 plt.tight_layout()
740 if saveFig:
741 plt.savefig(saveFig)
742 plt.show()
743 plt.close()