Coverage for python/lsst/summit/utils/nightReport.py: 11%

307 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-01-28 11:53 +0000

1# This file is part of summit_utils. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import pickle 

23import logging 

24 

25from dataclasses import dataclass 

26import numpy as np 

27import matplotlib.pyplot as plt 

28from matplotlib.pyplot import cm 

29 

30from lsst.utils.iteration import ensure_iterable 

31from astro_metadata_translator import ObservationInfo 

32from .utils import obsInfoToDict, getFieldNameAndTileNumber 

33 

34try: # TODO: Remove post RFC-896: add humanize to rubin-env 

35 from humanize.time import precisedelta 

36 HAVE_HUMANIZE = True 

37except ImportError: 

38 # log a python warning about the lack of humanize 

39 logging.warning("humanize not available, install it to get better time printing") 

40 HAVE_HUMANIZE = False 

41 precisedelta = repr 

42 

43 

44__all__ = ['NightReport'] 

45 

46CALIB_VALUES = ['FlatField position', 'Park position', 'azel_target'] 

47N_STARS_PER_SYMBOL = 6 

48MARKER_SEQUENCE = ['*', 'o', "D", 'P', 'v', "^", 's', 'o', 'v', '^', '<', '>', 

49 '1', '2', '3', '4', '8', 's', 'p', 'P', '*', 'h', 'H', '+', 

50 'x', 'X', 'D', 'd', '|', '_'] 

51SOUTHPOLESTAR = 'HD 185975' 

52 

53CALIB_VALUES = ['FlatField position', 'Park position', 'azel_target'] 

54# TODO: add skips for calib values 

55 

56 

57@dataclass 

58class ColorAndMarker: 

59 '''Class for holding colors and marker symbols''' 

60 color: list 

61 marker: str = '*' 

62 

63 

64class NightReport(): 

65 def __init__(self, butler, dayObs, loadFromFile=None): 

66 self._supressAstroMetadataTranslatorWarnings() # call early 

67 self.log = logging.getLogger('lsst.summit.utils.NightReport') 

68 self.butler = butler 

69 self.dayObs = dayObs 

70 self.data = dict() 

71 self._expRecordsLoaded = set() # set of the expRecords loaded 

72 self._obsInfosLoaded = set() # set of the seqNums loaded 

73 self.stars = None 

74 self.cMap = None 

75 if loadFromFile is not None: 

76 self._load(loadFromFile) 

77 self.rebuild() # sets stars and cMap 

78 

79 def _supressAstroMetadataTranslatorWarnings(self): 

80 """NB: must be called early""" 

81 logging.basicConfig() 

82 logger = logging.getLogger("lsst.obs.lsst.translators.latiss") 

83 logger.setLevel(logging.ERROR) 

84 logger = logging.getLogger("astro_metadata_translator.observationInfo") 

85 logger.setLevel(logging.ERROR) 

86 

87 def save(self, filename): 

88 """Save the internal data to a file. 

89 

90 Parameters 

91 ---------- 

92 filename : `str` 

93 The full name and path of the file to save to. 

94 """ 

95 toSave = (self.data, self._expRecordsLoaded, self._obsInfosLoaded, self.dayObs) 

96 with open(filename, "wb") as f: 

97 pickle.dump(toSave, f, pickle.HIGHEST_PROTOCOL) 

98 

99 def _load(self, filename): 

100 """Load the report data from a file. 

101 

102 Called on init if loadFromFile is not None. Should not be used directly 

103 as other things are populated on load in the __init__. 

104 

105 Parameters 

106 ---------- 

107 filename : `str` 

108 The full name and path of the file to load from. 

109 """ 

110 with open(filename, "rb") as f: 

111 loaded = pickle.load(f) 

112 self.data, self._expRecordsLoaded, self._obsInfosLoaded, dayObs = loaded 

113 if dayObs != self.dayObs: 

114 raise RuntimeError(f"Loaded data is for {dayObs} but current dayObs is {self.dayObs}") 

115 assert len(self.data) == len(self._expRecordsLoaded) 

116 assert len(self.data) == len(self._obsInfosLoaded) 

117 self.log.info(f"Loaded {len(self.data)} records from {filename}") 

118 

119 @staticmethod 

120 def _getSortedData(data): 

121 """Get a sorted copy of the internal data. 

122 """ 

123 if list(data.keys()) == sorted(data.keys()): 

124 return data 

125 else: 

126 return {k: data[k] for k in sorted(data.keys())} 

127 

128 def getExpRecordDictForDayObs(self, dayObs): 

129 """Get all the exposureRecords as dicts for the current dayObs. 

130 

131 Notes 

132 ----- 

133 Runs in ~0.05s for 1000 records. 

134 """ 

135 expRecords = self.butler.registry.queryDimensionRecords("exposure", 

136 where="exposure.day_obs=day_obs", 

137 bind={'day_obs': dayObs}, 

138 datasets='raw') 

139 expRecords = list(expRecords) 

140 records = {e.seq_num: e.toDict() for e in expRecords} # not guaranteed to be in order 

141 for record in records.values(): 

142 target = record['target_name'] if record['target_name'] is not None else '' 

143 if target: 

144 shortTarget, _ = getFieldNameAndTileNumber(target, warn=False) 

145 else: 

146 shortTarget = '' 

147 record['target_name_short'] = shortTarget 

148 return self._getSortedData(records) 

149 

150 def getObsInfoAndMetadataForSeqNum(self, seqNum): 

151 """Get the obsInfo and metadata for a given seqNum. 

152 

153 TODO: Once we have a summit repo containing all this info, remove this 

154 method and all scraping of headers! Probably also remove the save/load 

155 functionalty there too, as the whole init will go from many minutes to 

156 under a second. 

157 

158 Parameters 

159 ---------- 

160 seqNum : `int` 

161 The seqNum. 

162 

163 Returns 

164 ------- 

165 obsInfo : `astro_metadata_translator.ObservationInfo` 

166 The obsInfo. 

167 md : `dict` 

168 The raw metadata. 

169 

170 Notes 

171 ----- 

172 Very slow, as it has to load the whole file on object store repos 

173 and access the file on regular filesystem repos. 

174 """ 

175 dataId = {'day_obs': self.dayObs, 'seq_num': seqNum, 'detector': 0} 

176 md = self.butler.get('raw.metadata', dataId) 

177 return ObservationInfo(md), md.toDict() 

178 

179 def rebuild(self, full=False): 

180 """Scrape new data if there is any, otherwise is a no-op. 

181 

182 If full is True, then all data is reloaded. 

183 

184 Parameters 

185 ---------- 

186 full : `bool`, optional 

187 Do a full reload of all the data, removing any which is pre-loaded? 

188 """ 

189 if full: 

190 self.data = dict() 

191 self._expRecordsLoaded = set() 

192 self._obsInfosLoaded = set() 

193 

194 records = self.getExpRecordDictForDayObs(self.dayObs) 

195 if len(records) == len(self.data): # nothing to do 

196 self.log.info('No new records found') 

197 # NB don't return here, because we need to rebuild the 

198 # star maps etc if we came from a file. 

199 else: 

200 # still need to merge the new expRecordDicts into self.data 

201 # but only these, as the other items have obsInfos merged into them 

202 for seqNum in list(records.keys() - self._expRecordsLoaded): 

203 self.data[seqNum] = records[seqNum] 

204 self._expRecordsLoaded.add(seqNum) 

205 

206 # now load all the obsInfos 

207 seqNums = list(records.keys()) 

208 obsInfosToLoad = set(seqNums) - self._obsInfosLoaded 

209 if obsInfosToLoad: 

210 self.log.info(f"Loading {len(obsInfosToLoad)} obsInfo(s)") 

211 for i, seqNum in enumerate(obsInfosToLoad): 

212 if (i + 1) % 200 == 0: 

213 self.log.info(f"Loaded {i+1} obsInfos") 

214 obsInfo, metadata = self.getObsInfoAndMetadataForSeqNum(seqNum) 

215 obsInfoDict = obsInfoToDict(obsInfo) 

216 records[seqNum].update(obsInfoDict) 

217 # _raw_metadata item will hopefully not be needed in the future 

218 # but add it while we have it for free, as it has DIMM seeing 

219 records[seqNum]['_raw_metadata'] = metadata 

220 self._obsInfosLoaded.add(seqNum) 

221 

222 self.data = self._getSortedData(self.data) # make sure we stay sorted 

223 self.stars = self.getObservedObjects() 

224 self.cMap = self.makeStarColorAndMarkerMap(self.stars) 

225 

226 def getObservedObjects(self, ignoreTileNum=True): 

227 """Get a list of the observed objects for the night. 

228 

229 Repeated observations of individual imaging fields have _NNN appended 

230 to the field name. Use ``ignoreTileNum`` to remove these, collapsing 

231 the observations of the field to a single target name. 

232 

233 Parameters 

234 ---------- 

235 ignoreTileNum : `bool`, optional 

236 Remove the trailing _NNN tile number for imaging fields? 

237 """ 

238 key = 'target_name_short' if ignoreTileNum else 'target_name' 

239 allTargets = sorted({record[key] if record[key] is not None else '' 

240 for record in self.data.values()}) 

241 return allTargets 

242 

243 def getSeqNumsMatching(self, invert=False, subset=None, **kwargs): 

244 """Get seqNums which match/don't match all kwargs provided, e.g. 

245 

246 report.getSeqNumsMatching(exposure_time=30, 

247 target_name='ETA1 DOR') 

248 

249 Set invert=True to get all seqNums which don't match the provided 

250 args, e.g. to find all seqNums which are not calibs 

251 

252 Subset allows for repeated filtering by passing in a set of seqNums 

253 """ 

254 # copy data so we can pop, and restrict to subset if provided 

255 local = {seqNum: rec for seqNum, rec in self.data.items() if (subset is None or seqNum in subset)} 

256 

257 # for each kwarg, filter out items which match/don't 

258 for filtAttr, filtVal in kwargs.items(): 

259 toPop = [] # can't pop inside inner loop so collect 

260 for seqNum, record in local.items(): 

261 v = record.get(filtAttr) 

262 if invert: 

263 if v == filtVal: 

264 toPop.append(seqNum) 

265 else: 

266 if v != filtVal: 

267 toPop.append(seqNum) 

268 [local.pop(seqNum) for seqNum in toPop] 

269 

270 return sorted(local.keys()) 

271 

272 def printAvailableKeys(self, sample=False, includeRaw=False): 

273 """Print all the keys available to query on, optionally including the 

274 full set of header keys. 

275 

276 Note that there is a big mix of quantities, some are int/float/string 

277 but some are astropy quantities. 

278 

279 If sample is True, then a sample value for each key is printed too, 

280 which is useful for dealing with types and seeing what each item 

281 actually means. 

282 """ 

283 for seqNum, recordDict in self.data.items(): # loop + break because we don't know the first seqNum 

284 for k, v in recordDict.items(): 

285 if sample: 

286 print(f"{k}: {v}") 

287 else: 

288 print(k) 

289 if includeRaw: 

290 print("\nRaw header keys in _raw_metadata:") 

291 for k in recordDict['_raw_metadata']: 

292 print(k) 

293 break 

294 

295 @staticmethod 

296 def makeStarColorAndMarkerMap(stars): 

297 """Create a color/marker map for a list of observed objects. 

298 """ 

299 markerMap = {} 

300 colors = cm.rainbow(np.linspace(0, 1, N_STARS_PER_SYMBOL)) 

301 for i, star in enumerate(stars): 

302 markerIndex = i//(N_STARS_PER_SYMBOL) 

303 colorIndex = i%(N_STARS_PER_SYMBOL) 

304 markerMap[star] = ColorAndMarker(colors[colorIndex], MARKER_SEQUENCE[markerIndex]) 

305 return markerMap 

306 

307 def calcShutterTimes(self): 

308 """Calculate the total time spent on science, engineering and readout. 

309 

310 Science and engineering time both include the time spent on readout, 

311 such that if images were taken all night with no downtime and no slews 

312 the efficiency would be 100%. 

313 

314 Returns 

315 ------- 

316 timings : `dict` 

317 Dictionary of the various calculated times, in seconds, and the 

318 seqNums of the first and last observations used in the calculation. 

319 """ 

320 firstObs = self.getObservingStartSeqNum(method='safe') 

321 lastObs = max(self.data.keys()) 

322 

323 begin = self.data[firstObs]['datetime_begin'] 

324 end = self.data[lastObs]['datetime_end'] 

325 

326 READOUT_TIME = 2.0 

327 shutterOpenTime = sum([self.data[s]['exposure_time'] for s in range(firstObs, lastObs + 1)]) 

328 readoutTime = sum([READOUT_TIME for _ in range(firstObs, lastObs + 1)]) 

329 

330 sciSeqNums = self.getSeqNumsMatching(observation_type='science') 

331 scienceIntegration = sum([self.data[s]['exposure_time'] for s in sciSeqNums]) 

332 scienceTimeTotal = scienceIntegration.value + (len(sciSeqNums)*READOUT_TIME) 

333 

334 result = {} 

335 result['firstObs'] = firstObs 

336 result['lastObs'] = lastObs 

337 result['startTime'] = begin 

338 result['endTime'] = end 

339 result['nightLength'] = (end - begin).sec # was a datetime.timedelta 

340 result['shutterOpenTime'] = shutterOpenTime.value # was an Quantity 

341 result['readoutTime'] = readoutTime 

342 result['scienceIntegration'] = scienceIntegration.value # was an Quantity 

343 result['scienceTimeTotal'] = scienceTimeTotal 

344 

345 return result 

346 

347 def printShutterTimes(self): 

348 """Print out the shutter efficiency stats in a human-readable format. 

349 """ 

350 if not HAVE_HUMANIZE: 

351 self.log.warning('Please install humanize to make this print as intended.') 

352 timings = self.calcShutterTimes() 

353 

354 print(f"Observations started at: seqNum {timings['firstObs']:>3} at" 

355 f" {timings['startTime'].to_datetime().strftime('%H:%M:%S')} TAI") 

356 print(f"Observations ended at: seqNum {timings['lastObs']:>3} at" 

357 f" {timings['endTime'].to_datetime().strftime('%H:%M:%S')} TAI") 

358 print(f"Total time on sky: {precisedelta(timings['nightLength'])}") 

359 print() 

360 print(f"Shutter open time: {precisedelta(timings['shutterOpenTime'])}") 

361 print(f"Readout time: {precisedelta(timings['readoutTime'])}") 

362 engEff = 100 * (timings['shutterOpenTime'] + timings['readoutTime']) / timings['nightLength'] 

363 print(f"Engineering shutter efficiency = {engEff:.1f}%") 

364 print() 

365 print(f"Science integration: {precisedelta(timings['scienceIntegration'])}") 

366 sciEff = 100*(timings['scienceTimeTotal'] / timings['nightLength']) 

367 print(f"Science shutter efficiency = {sciEff:.1f}%") 

368 

369 def getTimeDeltas(self): 

370 """Returns a dict, keyed by seqNum, of the time since the end of the 

371 last integration. The time since does include the readout, so is always 

372 greater than or equal to the readout time. 

373 

374 Returns 

375 ------- 

376 timeGaps : `dict` 

377 Dictionary of the time gaps, in seconds, keyed by seqNum. 

378 """ 

379 seqNums = list(self.data.keys()) # need a list not a generator, and NB it might not be contiguous! 

380 dts = [0] # first item is zero by definition 

381 for i, seqNum in enumerate(seqNums[1:]): 

382 dt = self.data[seqNum]['datetime_begin'] - self.data[(seqNums[i])]['datetime_end'] 

383 dts.append(dt.sec) 

384 

385 return {s: dt for s, dt in zip(seqNums, dts)} 

386 

387 def printObsGaps(self, threshold=100, includeCalibs=False): 

388 """Print out the gaps between observations in a human-readable format. 

389 

390 Parameters 

391 ---------- 

392 threshold : `float`, optional 

393 The minimum time gap to print out, in seconds. 

394 includeCalibs : `bool`, optional 

395 If True, start at the lowest seqNum, otherwise start when the 

396 night's observing started. 

397 """ 

398 if not HAVE_HUMANIZE: 

399 self.log.warning('Please install humanize to make this print as intended.') 

400 dts = self.getTimeDeltas() 

401 

402 allSeqNums = list(self.data.keys()) 

403 if includeCalibs: 

404 seqNums = allSeqNums 

405 else: 

406 firstObs = self.getObservingStartSeqNum(method='safe') 

407 # there is always a big gap before firstObs by definition so add 1 

408 startPoint = allSeqNums.index(firstObs) + 1 

409 seqNums = allSeqNums[startPoint:] 

410 

411 messages = [] 

412 for seqNum in seqNums: 

413 dt = dts[seqNum] 

414 if dt > threshold: 

415 messages.append(f"seqNum {seqNum:3}: {precisedelta(dt)} gap") 

416 

417 if messages: 

418 print(f"Gaps between observations greater than {threshold}s:") 

419 for line in messages: 

420 print(line) 

421 

422 def getObservingStartSeqNum(self, method='safe'): 

423 """Get the seqNum at which on-sky observations started. 

424 

425 If no on-sky observations were taken ``None`` is returned. 

426 

427 Parameters 

428 ---------- 

429 method : `str` 

430 The calculation method to use. Options are: 

431 - 'safe': Use the first seqNum with an observation_type that is 

432 explicitly not a calibration or test. This is a safe way of 

433 excluding the calibs, but will include observations where we 

434 take some closed dome test images, or start observing too early, 

435 and go back to taking calibs for a while before the night starts. 

436 - 'heuristic': Use a heuristic to find the first seqNum. The 

437 current heuristic is to find the first seqNum with an observation 

438 type of CWFS, as we always do a CWFS focus before going on sky. 

439 This does not work well for old days, because this wasn't always 

440 the way data was taken. Note: may be updated in the future, at 

441 which point this will be renamed ``cwfs``. 

442 

443 Returns 

444 ------- 

445 startSeqNum : `int` 

446 The seqNum of the start of the night's observing. 

447 """ 

448 allowedMethods = ['heuristic', 'safe'] 

449 if method not in allowedMethods: 

450 raise ValueError(f"Method must be one of {allowedMethods}") 

451 

452 if method == 'safe': 

453 # as of 20221211, the full set of observation_types ever seen is: 

454 # acq, bias, cwfs, dark, engtest, flat, focus, science, stuttered, 

455 # test, unknown 

456 offSkyObsTypes = ['bias', 'dark', 'flat', 'test', 'unknown'] 

457 for seqNum in sorted(self.data.keys()): 

458 if self.data[seqNum]['observation_type'] not in offSkyObsTypes: 

459 return seqNum 

460 return None 

461 

462 if method == 'heuristic': 

463 # take the first cwfs image and return that 

464 seqNums = self.getSeqNumsMatching(observation_type='cwfs') 

465 return min(seqNums) 

466 

467 def printObsTable(self, **kwargs): 

468 """Print a table of the days observations. 

469 

470 Parameters 

471 ---------- 

472 **kwargs : `dict` 

473 Filter the observation table according to seqNums which match these 

474 {k: v} pairs. For example, to only print out science observations 

475 pass ``observation_type='science'``. 

476 """ 

477 seqNums = self.data.keys() if not kwargs else self.getSeqNumsMatching(**kwargs) 

478 seqNums = sorted(seqNums) # should always be sorted, but is a total disaster here if not 

479 

480 dts = self.getTimeDeltas() 

481 lines = [] 

482 for seqNum in seqNums: 

483 try: 

484 expTime = self.data[seqNum]['exposure_time'].value 

485 imageType = self.data[seqNum]['observation_type'] 

486 target = self.data[seqNum]['target_name'] 

487 deadtime = dts[seqNum] 

488 filt = self.data[seqNum]['physical_filter'] 

489 

490 msg = f'{seqNum} {target} {expTime:.1f} {deadtime:.02f} {imageType} {filt}' 

491 except Exception: 

492 msg = f"Error parsing {seqNum}!" 

493 lines.append(msg) 

494 

495 print(r"seqNum target expTime deadtime imageType filt") 

496 print(r"------ ------ ------- -------- --------- ----") 

497 for line in lines: 

498 print(line) 

499 

500 def getExposureMidpoint(self, seqNum): 

501 """Return the midpoint of the exposure as a float in MJD. 

502 

503 Parameters 

504 ---------- 

505 seqNum : `int` 

506 The seqNum to get the midpoint for. 

507 

508 Returns 

509 ------- 

510 midpointMjd : `float` 

511 The midpoint, as an mjd float. 

512 """ 

513 timespan = self.data[seqNum]['timespan'] 

514 return (timespan.begin.mjd + timespan.end.mjd) / 2 

515 

516 def plotPerObjectAirMass(self, objects=None, airmassOneAtTop=True, saveFig=''): 

517 """Plot the airmass for objects observed over the course of the night. 

518 

519 Parameters 

520 ---------- 

521 objects : `list` [`str`], optional 

522 The objects to plot. If not provided, all objects are plotted. 

523 airmassOneAtTop : `bool`, optional 

524 Put the airmass of 1 at the top of the plot, like astronomers 

525 expect. 

526 saveFig : `str`, optional 

527 Save the figure to this file path? 

528 """ 

529 if not objects: 

530 objects = self.stars 

531 

532 objects = ensure_iterable(objects) 

533 

534 plt.figure(figsize=(10, 6)) 

535 for star in objects: 

536 seqNums = self.getSeqNumsMatching(target_name_short=star) 

537 airMasses = [self.data[seqNum]['boresight_airmass'] for seqNum in seqNums] 

538 obsTimes = [self.getExposureMidpoint(seqNum) for seqNum in seqNums] 

539 color = self.cMap[star].color 

540 marker = self.cMap[star].marker 

541 plt.plot(obsTimes, airMasses, color=color, marker=marker, label=star, ms=10, ls='') 

542 

543 plt.ylabel('Airmass', fontsize=20) 

544 plt.ylabel('MJD', fontsize=20) 

545 if airmassOneAtTop: 

546 ax = plt.gca() 

547 ax.set_ylim(ax.get_ylim()[::-1]) 

548 plt.legend(bbox_to_anchor=(1, 1.025), prop={'size': 15}, loc='upper left') 

549 

550 plt.tight_layout() 

551 if saveFig: 

552 plt.savefig(saveFig) 

553 plt.show() 

554 plt.close() 

555 

556 def _makePolarPlot(self, azimuthsInDegrees, zenithAngles, marker="*-", 

557 title=None, makeFig=True, color=None, objName=None): 

558 """Private method to actually do the polar plotting. 

559 

560 azimuthsInDegrees : `list` [`float`] 

561 The azimuth values, in degrees. 

562 zenithAngles : `list` [`float`] 

563 The zenith angle values, but more generally, the values on the 

564 radial axis, so can be in whatever units you want. 

565 marker : `str`, optional 

566 The marker to use. 

567 title : `str`, optional 

568 The plot title. 

569 makeFig : `bool`, optional 

570 Make a new figure? 

571 color : `str`, optional 

572 The marker color. 

573 objName : `str`, optional 

574 The object name, for the legend. 

575 

576 Returns 

577 ------- 

578 ax : `matplotlib.axes.Axes` 

579 The axes on which the plot was made. 

580 """ 

581 if makeFig: 

582 _ = plt.figure(figsize=(10, 10)) 

583 ax = plt.subplot(111, polar=True) 

584 ax.plot([a*np.pi/180 for a in azimuthsInDegrees], zenithAngles, marker, c=color, label=objName) 

585 if title: 

586 ax.set_title(title, va='bottom') 

587 ax.set_theta_zero_location("N") 

588 ax.set_theta_direction(-1) 

589 ax.set_rlim(0, 90) 

590 return ax 

591 

592 def makeAltAzCoveragePlot(self, objects=None, withLines=False, saveFig=''): 

593 """Make a polar plot of the azimuth and zenith angle for each object. 

594 

595 Plots the azimuth on the theta axis, and zenith angle (not altitude!) 

596 on the radius axis, such that 0 is at the centre, like you're looking 

597 top-down on the telescope. 

598 

599 Parameters 

600 ---------- 

601 objects : `list` [`str`], optional 

602 The objects to plot. If not provided, all objects are plotted. 

603 withLines : `bool`, optional 

604 Connect the points with lines? 

605 saveFig : `str`, optional 

606 Save the figure to this file path? 

607 """ 

608 if not objects: 

609 objects = self.stars 

610 objects = ensure_iterable(objects) 

611 

612 _ = plt.figure(figsize=(14, 10)) 

613 

614 for obj in objects: 

615 seqNums = self.getSeqNumsMatching(target_name_short=obj) 

616 altAzes = [self.data[seqNum]['altaz_begin'] for seqNum in seqNums] 

617 alts = [altAz.alt.deg for altAz in altAzes if altAz is not None] 

618 azes = [altAz.az.deg for altAz in altAzes if altAz is not None] 

619 assert(len(alts) == len(azes)) 

620 if len(azes) == 0: 

621 self.log.warning(f"Found no alt/az data for {obj}") 

622 zens = [90 - alt for alt in alts] 

623 color = self.cMap[obj].color 

624 marker = self.cMap[obj].marker 

625 if withLines: 

626 marker += '-' 

627 

628 ax = self._makePolarPlot(azes, zens, marker=marker, title=None, makeFig=False, 

629 color=color, objName=obj) 

630 lgnd = ax.legend(bbox_to_anchor=(1.05, 1), prop={'size': 15}, loc='upper left') 

631 ax.set_title("Axial coverage - azimuth (theta, deg) vs zenith angle (r, deg)", size=20) 

632 for h in lgnd.legendHandles: 

633 size = 14 

634 if '-' in marker: 

635 size += 5 

636 h.set_markersize(size) 

637 

638 plt.tight_layout() 

639 if saveFig: 

640 plt.savefig(saveFig) 

641 plt.show() 

642 plt.close()