Coverage for python/lsst/summit/utils/tmaUtils.py: 17%

662 statements  

« prev     ^ index     » next       coverage.py v7.5.0, created at 2024-04-25 12:24 -0700

1# This file is part of summit_utils. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import datetime 

23import enum 

24import itertools 

25import logging 

26import re 

27from dataclasses import dataclass, field 

28 

29import humanize 

30import matplotlib.dates as mdates 

31import matplotlib.pyplot as plt 

32import numpy as np 

33import pandas as pd 

34from astropy.time import Time 

35from matplotlib.ticker import FuncFormatter 

36 

37from lsst.utils.iteration import ensure_iterable 

38 

39from .blockUtils import BlockParser 

40from .efdUtils import ( 

41 COMMAND_ALIASES, 

42 clipDataToEvent, 

43 efdTimestampToAstropy, 

44 getCommands, 

45 getDayObsEndTime, 

46 getDayObsForTime, 

47 getDayObsStartTime, 

48 getEfdData, 

49 makeEfdClient, 

50) 

51from .enums import AxisMotionState, PowerState 

52from .utils import dayObsIntToString, getCurrentDayObs_int 

53 

54__all__ = ( 

55 "TMAStateMachine", 

56 "TMAEvent", 

57 "TMAEventMaker", 

58 "TMAState", 

59 "AxisMotionState", 

60 "PowerState", 

61 "getSlewsFromEventList", 

62 "getTracksFromEventList", 

63 "getTorqueMaxima", 

64 "filterBadValues", 

65) 

66 

67# we don't want to use `None` for a no data sentinel because dict.get('key') 

68# returns None if the key isn't present, and also we need to mark that the data 

69# was queried for and no data was found, whereas the key not being present 

70# means that we've not yet looked for the data. 

71NO_DATA_SENTINEL = "NODATA" 

72 

73# The known time difference between the TMA demand position and the TMA 

74# position when tracking. 20Hz data times three points = 150ms. 

75TRACKING_RESIDUAL_TAIL_CLIP = -0.15 # seconds 

76 

77MOUNT_IMAGE_WARNING_LEVEL = 0.01 # this determines the colouring of the cells in the table, yellow for this 

78MOUNT_IMAGE_BAD_LEVEL = 0.05 # and red for this 

79 

80 

81def getSlewsFromEventList(events): 

82 """Get the slew events from a list of TMAEvents. 

83 

84 Parameters 

85 ---------- 

86 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

87 The list of events to filter. 

88 

89 Returns 

90 ------- 

91 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

92 The filtered list of events. 

93 """ 

94 return [e for e in events if e.type == TMAState.SLEWING] 

95 

96 

97def getTracksFromEventList(events): 

98 """Get the tracking events from a list of TMAEvents. 

99 

100 Parameters 

101 ---------- 

102 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

103 The list of events to filter. 

104 

105 Returns 

106 ------- 

107 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

108 The filtered list of events. 

109 """ 

110 return [e for e in events if e.type == TMAState.TRACKING] 

111 

112 

113def getTorqueMaxima(table): 

114 """Print the maximum positive and negative azimuth and elevation torques. 

115 

116 Designed to be used with the table as downloaded from RubinTV. 

117 

118 Parameters 

119 ---------- 

120 table : `pd.DataFrame` 

121 The table of data to use, as generated by Rapid Analysis. 

122 """ 

123 for axis in ["elevation", "azimuth"]: 

124 col = f"Largest {axis} torque" 

125 maxPos = np.argmax(table[col]) 

126 maxVal = table[col].iloc[maxPos] 

127 print(f"Max positive {axis:9} torque during seqNum {maxPos:>4}: {maxVal/1000:>7.1f}kNm") 

128 minPos = np.argmin(table[col]) 

129 minVal = table[col].iloc[minPos] 

130 print(f"Max negative {axis:9} torque during seqNum {minPos:>4}: {minVal/1000:>7.1f}kNm") 

131 

132 

133def getAzimuthElevationDataForEvent( 

134 client, 

135 event, 

136 prePadding=0, 

137 postPadding=0, 

138): 

139 """Get the data for the az/el telemetry topics for a given TMAEvent. 

140 

141 The error between the actual and demanded positions is calculated and added 

142 to the dataframes in the az/elError columns. For TRACKING type events, this 

143 error should be extremely close to zero, whereas for SLEWING type events, 

144 this error represents the how far the TMA is from the demanded position, 

145 and is therefore arbitrarily large, and tends to zero as the TMA get closer 

146 to tracking the sky. 

147 

148 Parameters 

149 ---------- 

150 client : `lsst_efd_client.efd_helper.EfdClient` 

151 The EFD client to use. 

152 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

153 The event to get the data for. 

154 prePadding : `float`, optional 

155 The amount of time to pad the event with before the start time, in 

156 seconds. 

157 postPadding : `float`, optional 

158 The amount of time to pad the event with after the end time, in 

159 seconds. 

160 

161 Returns 

162 ------- 

163 azimuthData : `pd.DataFrame` 

164 The azimuth data for the specified event. 

165 elevationData : `pd.DataFrame` 

166 The elevation data for the specified event. 

167 """ 

168 azimuthData = getEfdData( 

169 client, "lsst.sal.MTMount.azimuth", event=event, prePadding=prePadding, postPadding=postPadding 

170 ) 

171 elevationData = getEfdData( 

172 client, "lsst.sal.MTMount.elevation", event=event, prePadding=prePadding, postPadding=postPadding 

173 ) 

174 

175 azValues = azimuthData["actualPosition"].values 

176 elValues = elevationData["actualPosition"].values 

177 azDemand = azimuthData["demandPosition"].values 

178 elDemand = elevationData["demandPosition"].values 

179 

180 azError = (azValues - azDemand) * 3600 

181 elError = (elValues - elDemand) * 3600 

182 

183 azimuthData["azError"] = azError 

184 elevationData["elError"] = elError 

185 

186 return azimuthData, elevationData 

187 

188 

189def filterBadValues(values, maxDelta=0.1, maxConsecutiveValues=3): 

190 """Filter out bad values from a dataset, replacing them in-place. 

191 

192 This function replaces non-physical points in the dataset with an 

193 extrapolation of the preceding two values. No more than 3 successive data 

194 points are allowed to be replaced. Minimum length of the input is 3 points. 

195 

196 Parameters 

197 ---------- 

198 values : `list` or `np.ndarray` 

199 The dataset containing the values to be filtered. 

200 maxDelta : `float`, optional 

201 The maximum allowed difference between consecutive values. Values with 

202 a difference greater than `maxDelta` will be considered as bad values 

203 and replaced with an extrapolation. 

204 maxConsecutiveValues : `int`, optional 

205 The maximum number of consecutive values to replace. Defaults to 3. 

206 

207 Returns 

208 ------- 

209 nBadPoints : `int` 

210 The number of bad values that were replaced out. 

211 """ 

212 # Find non-physical points and replace with extrapolation. No more than 

213 # maxConsecutiveValues successive data points can be replaced. 

214 badCounter = 0 

215 consecutiveCounter = 0 

216 

217 log = logging.getLogger(__name__) 

218 

219 median = np.nanmedian(values) 

220 # if either of the the first two points are more than maxDelta away from 

221 # the median, replace them with the median 

222 for i in range(2): 

223 if abs(values[i] - median) > maxDelta: 

224 log.warning(f"Replacing bad value of {values[i]} at index {i} with {median=}") 

225 values[i] = median 

226 badCounter += 1 

227 

228 # from the second element of the array, walk through and calculate the 

229 # difference between each element and the previous one. If the difference 

230 # is greater than maxDelta, replace the element with the average of the 

231 # previous two known good values, i.e. ones which have not been replaced. 

232 # if the first two points differ from the median by more than maxDelta, 

233 # replace them with the median 

234 lastGoodValue1 = values[1] # the most recent good value 

235 lastGoodValue2 = values[0] # the second most recent good value 

236 replacementValue = (lastGoodValue1 + lastGoodValue2) / 2.0 # in case we have to replace the first value 

237 for i in range(2, len(values)): 

238 if abs(values[i] - lastGoodValue1) >= maxDelta: 

239 if consecutiveCounter < maxConsecutiveValues: 

240 consecutiveCounter += 1 

241 badCounter += 1 

242 log.warning(f"Replacing value at index {i} with {replacementValue}") 

243 values[i] = replacementValue 

244 else: 

245 log.warning( 

246 f"More than 3 consecutive replacements at index {i}. Stopping replacements" 

247 " until the next good value." 

248 ) 

249 else: 

250 lastGoodValue2 = lastGoodValue1 

251 lastGoodValue1 = values[i] 

252 replacementValue = (lastGoodValue1 + lastGoodValue2) / 2.0 

253 consecutiveCounter = 0 

254 return badCounter 

255 

256 

257def plotEvent( 

258 client, 

259 event, 

260 fig=None, 

261 prePadding=0, 

262 postPadding=0, 

263 commands={}, 

264 azimuthData=None, 

265 elevationData=None, 

266 doFilterResiduals=False, 

267 maxDelta=0.1, 

268 metadataWriter=None, 

269): 

270 """Plot the TMA axis positions over the course of a given TMAEvent. 

271 

272 Plots the axis motion profiles for the given event, with optional padding 

273 at the start and end of the event. If the data is provided via the 

274 azimuthData and elevationData parameters, it will be used, otherwise it 

275 will be queried from the EFD. 

276 

277 Optionally plots any commands issued during or around the event, if these 

278 are supplied. Commands are supplied as a dictionary of the command topic 

279 strings, with values as astro.time.Time objects at which the command was 

280 issued. 

281 

282 Due to a problem with the way the data is uploaded to the EFD, there are 

283 occasional points in the tracking error plots that are very much larger 

284 than the typical mount jitter. These points are unphysical, since it is not 

285 possible for the mount to move that fast. We don't want these points, which 

286 are not true mount problems, to distract from any real mount problems, and 

287 these can be filtered out via the ``doFilterResiduals`` kwarg, which 

288 replaces these non-physical points with an extrapolation of the average of 

289 the preceding two known-good points. If the first two points are bad these 

290 are replaced with the median of the dataset. The maximum difference between 

291 the model and the actual data, in arcseconds, to allow before filtering a 

292 data point can be set with the ``maxDelta`` kwarg. 

293 

294 Parameters 

295 ---------- 

296 client : `lsst_efd_client.efd_helper.EfdClient` 

297 The EFD client to use. 

298 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

299 The event to plot. 

300 fig : `matplotlib.figure.Figure`, optional 

301 The figure to plot on. If not specified, a new figure will be created. 

302 prePadding : `float`, optional 

303 The amount of time to pad the event with before the start time, in 

304 seconds. 

305 postPadding : `float`, optional 

306 The amount of time to pad the event with after the end time, in 

307 seconds. 

308 commands : `dict` [`pd.Timestamp`, `str`], or 

309 `dict` [`datetime.datetime`, `str`], oroptional 

310 A dictionary of commands to plot on the figure. The keys are the times 

311 at which a command was issued, and the value is the command string, as 

312 returned by efdUtils.getCommands(). 

313 azimuthData : `pd.DataFrame`, optional 

314 The azimuth data to plot. If not specified, it will be queried from the 

315 EFD. 

316 elevationData : `pd.DataFrame`, optional 

317 The elevation data to plot. If not specified, it will be queried from 

318 the EFD. 

319 doFilterResiduals : 'bool', optional 

320 Enables filtering of unphysical data points in the tracking residuals. 

321 maxDelta : `float`, optional 

322 The maximum difference between the model and the actual data, in 

323 arcseconds, to allow before filtering the data point. Ignored if 

324 ``doFilterResiduals`` is `False`. 

325 metadataWriter : `callable`, optional 

326 Should be a callable 

327 ``lsst.rubintv.production.utils.writeMetadataShard`` function that has 

328 had the path filled in with ``functools.patrial`` so that it will just 

329 write out the data when called with the event's dayObs and a 

330 dictionary containing the row data that should be written. 

331 

332 Returns 

333 ------- 

334 fig : `matplotlib.figure.Figure` 

335 The figure on which the plot was made. 

336 """ 

337 

338 def tickFormatter(value, tick_number): 

339 # Convert the value to a string without subtracting large numbers 

340 # tick_number is unused. 

341 return f"{value:.2f}" 

342 

343 def getPlotTime(time): 

344 """Get the right time to plot a point from the various time formats.""" 

345 match time: 

346 case pd.Timestamp(): 

347 return time.to_pydatetime() 

348 case Time(): 

349 return time.utc.datetime 

350 case datetime.datetime(): 

351 return time 

352 case _: 

353 raise ValueError(f"Unknown type for commandTime: {type(time)}") 

354 

355 # plot any commands we might have 

356 if not isinstance(commands, dict): 

357 raise TypeError("commands must be a dict of command names with values as" " astropy.time.Time values") 

358 

359 if fig is None: 

360 fig = plt.figure(figsize=(10, 8)) 

361 log = logging.getLogger(__name__) 

362 log.warning( 

363 "Making new matplotlib figure - if this is in a loop you're going to have a bad time." 

364 " Pass in a figure with fig = plt.figure(figsize=(10, 8)) to avoid this warning." 

365 ) 

366 

367 fig.clear() 

368 ax1p5 = None # need to always be defined 

369 if event.type.name == "TRACKING": 

370 ax1, ax1p5, ax2 = fig.subplots( 

371 3, sharex=True, gridspec_kw={"wspace": 0, "hspace": 0, "height_ratios": [2.5, 1, 1]} 

372 ) 

373 else: 

374 ax1, ax2 = fig.subplots( 

375 2, sharex=True, gridspec_kw={"wspace": 0, "hspace": 0, "height_ratios": [2.5, 1]} 

376 ) 

377 

378 if azimuthData is None or elevationData is None: 

379 azimuthData, elevationData = getAzimuthElevationDataForEvent( 

380 client, event, prePadding=prePadding, postPadding=postPadding 

381 ) 

382 

383 # Use the native color cycle for the lines. Because they're on different 

384 # axes they don't cycle by themselves 

385 lineColors = [p["color"] for p in plt.rcParams["axes.prop_cycle"]] 

386 nColors = len(lineColors) 

387 colorCounter = 0 

388 

389 ax1.plot(azimuthData["actualPosition"], label="Azimuth position", c=lineColors[colorCounter % nColors]) 

390 colorCounter += 1 

391 ax1.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

392 ax1.set_ylabel("Azimuth (degrees)") 

393 

394 ax1_twin = ax1.twinx() 

395 ax1_twin.plot( 

396 elevationData["actualPosition"], label="Elevation position", c=lineColors[colorCounter % nColors] 

397 ) 

398 colorCounter += 1 

399 ax1_twin.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

400 ax1_twin.set_ylabel("Elevation (degrees)") 

401 ax1.set_xticks([]) # remove x tick labels on the hidden upper x-axis 

402 

403 ax2_twin = ax2.twinx() 

404 ax2.plot(azimuthData["actualTorque"], label="Azimuth torque", c=lineColors[colorCounter % nColors]) 

405 colorCounter += 1 

406 ax2_twin.plot( 

407 elevationData["actualTorque"], label="Elevation torque", c=lineColors[colorCounter % nColors] 

408 ) 

409 colorCounter += 1 

410 ax2.set_ylabel("Azimuth torque (Nm)") 

411 ax2_twin.set_ylabel("Elevation torque (Nm)") 

412 ax2.set_xlabel("Time (UTC)") # yes, it really is UTC, matplotlib converts this automatically! 

413 

414 # put the ticks at an angle, and right align with the tick marks 

415 ax2.set_xticks(ax2.get_xticks()) # needed to supress a user warning 

416 xlabels = ax2.get_xticks() 

417 ax2.set_xticklabels(xlabels, rotation=40, ha="right") 

418 ax2.xaxis.set_major_locator(mdates.AutoDateLocator()) 

419 ax2.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M:%S")) 

420 

421 if event.type.name == "TRACKING": 

422 # returns a copy 

423 clippedAzimuthData = clipDataToEvent(azimuthData, event, postPadding=TRACKING_RESIDUAL_TAIL_CLIP) 

424 clippedElevationData = clipDataToEvent(elevationData, event, postPadding=TRACKING_RESIDUAL_TAIL_CLIP) 

425 

426 azError = clippedAzimuthData["azError"].values 

427 elError = clippedElevationData["elError"].values 

428 elVals = clippedElevationData["actualPosition"].values 

429 if doFilterResiduals: 

430 # Filtering out bad values 

431 nReplacedAz = filterBadValues(azError, maxDelta) 

432 nReplacedEl = filterBadValues(elError, maxDelta) 

433 clippedAzimuthData["azError"] = azError 

434 clippedElevationData["elError"] = elError 

435 # Calculate RMS 

436 az_rms = np.sqrt(np.mean(azError * azError)) 

437 el_rms = np.sqrt(np.mean(elError * elError)) 

438 

439 # Calculate Image impact RMS 

440 # We are less sensitive to Az errors near the zenith 

441 image_az_rms = az_rms * np.cos(elVals[0] * np.pi / 180.0) 

442 image_el_rms = el_rms 

443 image_impact_rms = np.sqrt(image_az_rms**2 + image_el_rms**2) 

444 ax1p5.plot( 

445 clippedAzimuthData["azError"], 

446 label="Azimuth tracking error", 

447 c=lineColors[colorCounter % nColors], 

448 ) 

449 colorCounter += 1 

450 ax1p5.plot( 

451 clippedElevationData["elError"], 

452 label="Elevation tracking error", 

453 c=lineColors[colorCounter % nColors], 

454 ) 

455 colorCounter += 1 

456 ax1p5.axhline(0.01, ls="-.", color="black") 

457 ax1p5.axhline(-0.01, ls="-.", color="black") 

458 ax1p5.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

459 ax1p5.set_ylabel("Tracking error (arcsec)") 

460 ax1p5.set_xticks([]) # remove x tick labels on the hidden upper x-axis 

461 ax1p5.set_ylim(-0.05, 0.05) 

462 ax1p5.set_yticks([-0.04, -0.02, 0.0, 0.02, 0.04]) 

463 ax1p5.legend() 

464 ax1p5.text(0.1, 0.9, f"Image impact RMS = {image_impact_rms:.3f} arcsec", transform=ax1p5.transAxes) 

465 if doFilterResiduals: 

466 ax1p5.text( 

467 0.1, 

468 0.8, 

469 f"{nReplacedAz} bad azimuth values and {nReplacedEl} bad elevation values were replaced", 

470 transform=ax1p5.transAxes, 

471 ) 

472 if metadataWriter is not None: 

473 md = {"Tracking image impact": f"{image_impact_rms:.3f}"} 

474 flagKey = "_Tracking image impact" 

475 if image_impact_rms > MOUNT_IMAGE_BAD_LEVEL: 

476 md.update({flagKey: "bad"}) 

477 elif image_impact_rms > MOUNT_IMAGE_WARNING_LEVEL: 

478 md.update({flagKey: "warning"}) 

479 

480 rowData = {event.seqNum: md} 

481 metadataWriter(dayObs=event.dayObs, mdDict=rowData) 

482 

483 if prePadding or postPadding: 

484 # note the conversion to utc because the x-axis from the dataframe 

485 # already got automagically converted when plotting before, so this is 

486 # necessary for things to line up 

487 ax1_twin.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5, label="Event begin/end") 

488 ax1_twin.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5) 

489 # extend lines down across lower plot, but do not re-add label 

490 ax2_twin.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5) 

491 ax2_twin.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5) 

492 if ax1p5: 

493 ax1p5.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5) 

494 ax1p5.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5) 

495 

496 for commandTime, command in commands.items(): 

497 plotTime = getPlotTime(commandTime) 

498 ax1_twin.axvline( 

499 plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75, label=f"{command}" 

500 ) 

501 # extend lines down across lower plot, but do not re-add label 

502 ax2_twin.axvline(plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75) 

503 if ax1p5: 

504 ax1p5.axvline(plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75) 

505 colorCounter += 1 

506 

507 # combine the legends and put inside the plot 

508 handles1a, labels1a = ax1.get_legend_handles_labels() 

509 handles1b, labels1b = ax1_twin.get_legend_handles_labels() 

510 handles2a, labels2a = ax2.get_legend_handles_labels() 

511 handles2b, labels2b = ax2_twin.get_legend_handles_labels() 

512 

513 handles = handles1a + handles1b + handles2a + handles2b 

514 labels = labels1a + labels1b + labels2a + labels2b 

515 # ax2 is "in front" of ax1 because it has the vlines plotted on it, and 

516 # vlines are on ax2 so that they appear at the bottom of the legend, so 

517 # make sure to plot the legend on ax2, otherwise the vlines will go on top 

518 # of the otherwise-opaque legend. 

519 ax1_twin.legend(handles, labels, facecolor="white", framealpha=1) 

520 

521 # Add title with the event name, type etc 

522 dayObsStr = dayObsIntToString(event.dayObs) 

523 title = ( 

524 # top line is the event title, the details go on the line below 

525 f"{dayObsStr} - seqNum {event.seqNum} (version {event.version})" 

526 f"\nDuration = {event.duration:.2f}s" 

527 f" Event type: {event.type.name}" 

528 f" End reason: {event.endReason.name}" 

529 ) 

530 ax1_twin.set_title(title) 

531 return fig 

532 

533 

534def getCommandsDuringEvent( 

535 client, 

536 event, 

537 commands=("raDecTarget"), 

538 prePadding=0, 

539 postPadding=0, 

540 timeFormat="python", 

541 log=None, 

542 doLog=True, 

543): 

544 """Get the commands issued during an event. 

545 

546 Get the times at which the specified commands were issued during the event. 

547 

548 Parameters 

549 ---------- 

550 client : `lsst_efd_client.efd_helper.EfdClient` 

551 The EFD client to use. 

552 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

553 The event to plot. 

554 commands : `list` of `str`, optional 

555 The commands or command aliases to look for. Defaults to 

556 ['raDecTarget']. 

557 prePadding : `float`, optional 

558 The amount of time to pad the event with before the start time, in 

559 seconds. 

560 postPadding : `float`, optional 

561 The amount of time to pad the event with after the end time, in 

562 seconds. 

563 timeFormat : `str`, optional 

564 One of 'pandas' or 'astropy' or 'python'. If 'pandas', the dictionary 

565 keys will be pandas timestamps, if 'astropy' they will be astropy times 

566 and if 'python' they will be python datetimes. 

567 log : `logging.Logger`, optional 

568 The logger to use. If not specified, a new logger will be created if 

569 needed. 

570 doLog : `bool`, optional 

571 Whether to log messages. Defaults to True. 

572 

573 Returns 

574 ------- 

575 commandTimes : `dict` [`time`, `str`] 

576 A dictionary of the times at which the commands where issued. The type 

577 that `time` takes is determined by the format key, and defaults to 

578 python datetime. 

579 """ 

580 commands = list(ensure_iterable(commands)) 

581 fullCommands = [c if c not in COMMAND_ALIASES else COMMAND_ALIASES[c] for c in commands] 

582 del commands # make sure we always use their full names 

583 

584 commandTimes = getCommands( 

585 client, 

586 fullCommands, 

587 begin=event.begin, 

588 end=event.end, 

589 prePadding=prePadding, 

590 postPadding=postPadding, 

591 timeFormat=timeFormat, 

592 ) 

593 

594 if not commandTimes and doLog: 

595 log = logging.getLogger(__name__) 

596 log.info(f"Found no commands in {fullCommands} issued during event {event.seqNum}") 

597 

598 return commandTimes 

599 

600 

601def _initializeTma(tma): 

602 """Helper function to turn a TMA into a valid state for testing. 

603 

604 Do not call directly in normal usage or code, as this just arbitrarily 

605 sets values to make the TMA valid. 

606 

607 Parameters 

608 ---------- 

609 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine` 

610 The TMA state machine model to initialize. 

611 """ 

612 tma._parts["azimuthInPosition"] = False 

613 tma._parts["azimuthMotionState"] = AxisMotionState.STOPPED 

614 tma._parts["azimuthSystemState"] = PowerState.ON 

615 tma._parts["elevationInPosition"] = False 

616 tma._parts["elevationMotionState"] = AxisMotionState.STOPPED 

617 tma._parts["elevationSystemState"] = PowerState.ON 

618 

619 

620@dataclass(kw_only=True, frozen=True) 

621class TMAEvent: 

622 """A movement event for the TMA. 

623 

624 Contains the dayObs on which the event occured, using the standard 

625 observatory definition of the dayObs, and the sequence number of the event, 

626 which is unique for each event on a given dayObs. 

627 

628 The event type can be either 'SLEWING' or 'TRACKING', defined as: 

629 - SLEWING: some part of the TMA is in motion 

630 - TRACKING: both axes are in position and tracking the sky 

631 

632 The end reason can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', or 'OFF'. 

633 - SLEWING: The previous event was a TRACKING event, and one or more of 

634 the TMA components either stopped being in position, or stopped 

635 moving, or went into fault, or was turned off, and hence we are now 

636 only slewing and no longer tracking the sky. 

637 - TRACKING: the TMA started tracking the sky when it wasn't previously. 

638 Usualy this would always be preceded by directly by a SLEWING 

639 event, but this is not strictly true, as the EUI seems to be able 

640 to make the TMA start tracking the sky without slewing first. 

641 - STOPPED: the components of the TMA transitioned to the STOPPED state. 

642 - FAULT: the TMA went into fault. 

643 - OFF: the TMA components were turned off. 

644 

645 Note that this class is not intended to be instantiated directly, but 

646 rather to be returned by the ``TMAEventMaker.getEvents()`` function. 

647 

648 Parameters 

649 ---------- 

650 dayObs : `int` 

651 The dayObs on which the event occured. 

652 seqNum : `int` 

653 The sequence number of the event, 

654 type : `lsst.summit.utils.tmaUtils.TMAState` 

655 The type of the event, either 'SLEWING' or 'TRACKING'. 

656 endReason : `lsst.summit.utils.tmaUtils.TMAState` 

657 The reason the event ended, either 'STOPPED', 'TRACKING', 'FAULT', 

658 'SLEWING', or 'OFF'. 

659 duration : `float` 

660 The duration of the event, in seconds. 

661 begin : `astropy.time.Time` 

662 The time the event began. 

663 end : `astropy.time.Time` 

664 The time the event ended. 

665 blockInfos : `list` of `lsst.summit.utils.tmaUtils.BlockInfo`, or `None` 

666 The block infomation, if any, relating to the event. Could be `None`, 

667 or one or more block informations. 

668 version : `int` 

669 The version of the TMAEvent class. Equality between events is only 

670 valid for a given version of the class. If the class definition 

671 changes, the time ranges can change, and hence the equality between 

672 events is ``False``. 

673 _startRow : `int` 

674 The first row in the merged EFD data which is part of the event. 

675 _endRow : `int` 

676 The last row in the merged EFD data which is part of the event. 

677 """ 

678 

679 dayObs: int 

680 seqNum: int 

681 type: str # can be 'SLEWING', 'TRACKING' 

682 endReason: str # can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', 'OFF' 

683 duration: float # seconds 

684 begin: Time 

685 end: Time 

686 blockInfos: list = field(default_factory=list) 

687 version: int = 0 # update this number any time a code change which could change event definitions is made 

688 _startRow: int 

689 _endRow: int 

690 

691 def __lt__(self, other): 

692 if self.version != other.version: 

693 raise ValueError( 

694 f"Cannot compare TMAEvents with different versions: {self.version} != {other.version}" 

695 ) 

696 if self.dayObs < other.dayObs: 

697 return True 

698 elif self.dayObs == other.dayObs: 

699 return self.seqNum < other.seqNum 

700 return False 

701 

702 def __repr__(self): 

703 return ( 

704 f"TMAEvent(dayObs={self.dayObs}, seqNum={self.seqNum}, type={self.type!r}," 

705 f" endReason={self.endReason!r}, duration={self.duration}, begin={self.begin!r}," 

706 f" end={self.end!r}" 

707 ) 

708 

709 def __hash__(self): 

710 # deliberately don't hash the blockInfos here, as they are not 

711 # a core part of the event itself, and are listy and cause problems 

712 return hash( 

713 ( 

714 self.dayObs, 

715 self.seqNum, 

716 self.type, 

717 self.endReason, 

718 self.duration, 

719 self.begin, 

720 self.end, 

721 self.version, 

722 self._startRow, 

723 self._endRow, 

724 ) 

725 ) 

726 

727 def _ipython_display_(self): 

728 print(self.__str__()) 

729 

730 def __str__(self): 

731 def indent(string): 

732 return "\n" + "\n".join([" " + s for s in string.splitlines()]) 

733 

734 blockInfoStr = "None" 

735 if self.blockInfos is not None: 

736 blockInfoStr = "".join(indent(str(i)) for i in self.blockInfos) 

737 

738 return ( 

739 f"dayObs: {self.dayObs}\n" 

740 f"seqNum: {self.seqNum}\n" 

741 f"type: {self.type.name}\n" 

742 f"endReason: {self.endReason.name}\n" 

743 f"duration: {self.duration}\n" 

744 f"begin: {self.begin!r}\n" 

745 f"end: {self.end!r}\n" 

746 f"blockInfos: {blockInfoStr}" 

747 ) 

748 

749 def associatedWith(self, block=None, blockSeqNum=None, ticket=None, salIndex=None): 

750 """Check whether an event is associated with a set of parameters. 

751 

752 Check if an event is associated with a specific block and/or ticket 

753 and/or salIndex. All specified parameters must match for the function 

754 to return True. If checking if an event is in a block, the blockSeqNum 

755 can also be specified to identify events which related to a given 

756 running the specified block. 

757 

758 Parameters 

759 ---------- 

760 block : `int`, optional 

761 The block number to check for. 

762 blockSeqNum : `int`, optional 

763 The block sequence number to check for, if the block is specified. 

764 ticket : `str`, optional 

765 The ticket number to check for. 

766 salIndex : `int`, optional 

767 The salIndex to check for. 

768 

769 Returns 

770 ------- 

771 relates : `bool` 

772 Whether the event is associated with the specified block, ticket, 

773 and salIndex. 

774 """ 

775 if all([block is None, ticket is None, salIndex is None]): 

776 raise ValueError("Must specify at least one of block, ticket, or salIndex") 

777 

778 if blockSeqNum is not None and block is None: 

779 raise ValueError("block must be specified if blockSeqNum is specified") 

780 

781 for blockInfo in self.blockInfos: 

782 # "X is None or" is used for each parameter to allow it to be None 

783 # in the kwargs 

784 blockMatches = False 

785 if block is not None: 

786 if blockSeqNum is None and blockInfo.blockNumber == block: 

787 blockMatches = True 

788 elif ( 

789 blockSeqNum is not None 

790 and blockInfo.blockNumber == block 

791 and blockInfo.seqNum == blockSeqNum 

792 ): 

793 blockMatches = True 

794 else: 

795 blockMatches = True # no block specified at all, so it matches 

796 

797 salIndexMatches = salIndex is None or salIndex in blockInfo.salIndices 

798 ticketMatches = ticket is None or ticket in blockInfo.tickets 

799 

800 if blockMatches and salIndexMatches and ticketMatches: 

801 return True 

802 

803 return False 

804 

805 

806class TMAState(enum.IntEnum): 

807 """Overall state of the TMA. 

808 

809 States are defined as follows: 

810 

811 UNINITIALIZED 

812 We have not yet got data for all relevant components, so the overall 

813 state is undefined. 

814 STOPPED 

815 All components are on, and none are moving. 

816 TRACKING 

817 We are tracking the sky. 

818 SLEWING 

819 One or more components are moving, and one or more are not tracking the 

820 sky. This should probably be called MOVING, as it includes: slewing, 

821 MOVING_POINT_TO_POINT, and JOGGING. 

822 FAULT 

823 All (if engineeringMode) or any (if not engineeringMode) components are 

824 in fault. 

825 OFF 

826 All components are off. 

827 """ 

828 

829 UNINITIALIZED = -1 

830 STOPPED = 0 

831 TRACKING = 1 

832 SLEWING = 2 

833 FAULT = 3 

834 OFF = 4 

835 

836 def __repr__(self): 

837 return f"TMAState.{self.name}" 

838 

839 

840def getAxisAndType(rowFor): 

841 """Get the axis the data relates to, and the type of data it contains. 

842 

843 Parameters 

844 ---------- 

845 rowFor : `str` 

846 The column in the dataframe denoting what this row is for, e.g. 

847 "elevationMotionState" or "azimuthInPosition", etc. 

848 

849 Returns 

850 ------- 

851 axis : `str` 

852 The axis the row is for, e.g. "azimuth", "elevation". 

853 rowType : `str` 

854 The type of the row, e.g. "MotionState", "SystemState", "InPosition". 

855 """ 

856 regex = r"(azimuth|elevation)(InPosition|MotionState|SystemState)$" # matches the end of the line 

857 matches = re.search(regex, rowFor) 

858 if matches is None: 

859 raise ValueError(f"Could not parse axis and rowType from {rowFor=}") 

860 axis = matches.group(1) 

861 rowType = matches.group(2) 

862 

863 assert rowFor.endswith(f"{axis}{rowType}") 

864 return axis, rowType 

865 

866 

867class ListViewOfDict: 

868 """A class to allow making lists which contain references to an underlying 

869 dictionary. 

870 

871 Normally, making a list of items from a dictionary would make a copy of the 

872 items, but this class allows making a list which contains references to the 

873 underlying dictionary items themselves. This is useful for making a list of 

874 components, such that they can be manipulated in their logical sets. 

875 """ 

876 

877 def __init__(self, underlyingDictionary, keysToLink): 

878 self.dictionary = underlyingDictionary 

879 self.keys = keysToLink 

880 

881 def __getitem__(self, index): 

882 return self.dictionary[self.keys[index]] 

883 

884 def __setitem__(self, index, value): 

885 self.dictionary[self.keys[index]] = value 

886 

887 def __len__(self): 

888 return len(self.keys) 

889 

890 

891class TMAStateMachine: 

892 """A state machine model of the TMA. 

893 

894 Note that this is currently only implemented for the azimuth and elevation 

895 axes, but will be extended to include the rotator in the future. 

896 

897 Note that when used for event generation, changing ``engineeringMode`` to 

898 False might change the resulting list of events, and that if the TMA moves 

899 with some axis in fault, then these events will be missed. It is therefore 

900 thought that ``engineeringMode=True`` should always be used when generating 

901 events. The option, however, is there for completeness, as this will be 

902 useful for knowing is the CSC would consider the TMA to be in fault in the 

903 general case. 

904 

905 Parameters 

906 ---------- 

907 engineeringMode : `bool`, optional 

908 Whether the TMA is in engineering mode. Defaults to True. If False, 

909 then the TMA will be in fault if any component is in fault. If True, 

910 then the TMA will be in fault only if all components are in fault. 

911 debug : `bool`, optional 

912 Whether to log debug messages. Defaults to False. 

913 """ 

914 

915 _UNINITIALIZED_VALUE: int = -999 

916 

917 def __init__(self, engineeringMode=True, debug=False): 

918 self.engineeringMode = engineeringMode 

919 self.log = logging.getLogger("lsst.summit.utils.tmaUtils.TMA") 

920 if debug: 

921 self.log.level = logging.DEBUG 

922 self._mostRecentRowTime = -1 

923 

924 # the actual components of the TMA 

925 self._parts = { 

926 "azimuthInPosition": self._UNINITIALIZED_VALUE, 

927 "azimuthMotionState": self._UNINITIALIZED_VALUE, 

928 "azimuthSystemState": self._UNINITIALIZED_VALUE, 

929 "elevationInPosition": self._UNINITIALIZED_VALUE, 

930 "elevationMotionState": self._UNINITIALIZED_VALUE, 

931 "elevationSystemState": self._UNINITIALIZED_VALUE, 

932 } 

933 systemKeys = ["azimuthSystemState", "elevationSystemState"] 

934 positionKeys = ["azimuthInPosition", "elevationInPosition"] 

935 motionKeys = ["azimuthMotionState", "elevationMotionState"] 

936 

937 # references to the _parts as conceptual groupings 

938 self.system = ListViewOfDict(self._parts, systemKeys) 

939 self.motion = ListViewOfDict(self._parts, motionKeys) 

940 self.inPosition = ListViewOfDict(self._parts, positionKeys) 

941 

942 # tuples of states for state collapsing. Note that STOP_LIKE + 

943 # MOVING_LIKE must cover the full set of AxisMotionState enums 

944 self.STOP_LIKE = (AxisMotionState.STOPPING, AxisMotionState.STOPPED, AxisMotionState.TRACKING_PAUSED) 

945 self.MOVING_LIKE = ( 

946 AxisMotionState.MOVING_POINT_TO_POINT, 

947 AxisMotionState.JOGGING, 

948 AxisMotionState.TRACKING, 

949 ) 

950 # Likewise, ON_LIKE + OFF_LIKE must cover the full set of PowerState 

951 # enums 

952 self.OFF_LIKE = (PowerState.OFF, PowerState.TURNING_OFF) 

953 self.ON_LIKE = (PowerState.ON, PowerState.TURNING_ON) 

954 self.FAULT_LIKE = (PowerState.FAULT,) # note the trailing comma - this must be an iterable 

955 

956 def apply(self, row): 

957 """Apply a row of data to the TMA state. 

958 

959 Checks that the row contains data for a later time than any data 

960 previously applied, and applies the relevant column entry to the 

961 relevant component. 

962 

963 Parameters 

964 ---------- 

965 row : `pd.Series` 

966 The row of data to apply to the state machine. 

967 """ 

968 timestamp = row["private_efdStamp"] 

969 if timestamp < self._mostRecentRowTime: # NB equals is OK, technically, though it never happens 

970 raise ValueError( 

971 "TMA evolution must be monotonic increasing in time, tried to apply a row which" 

972 " predates the most previous one" 

973 ) 

974 self._mostRecentRowTime = timestamp 

975 

976 rowFor = row["rowFor"] # e.g. elevationMotionState 

977 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState 

978 value = self._getRowPayload(row, rowType, rowFor) 

979 self.log.debug(f"Setting {rowFor} to {repr(value)}") 

980 self._parts[rowFor] = value 

981 try: 

982 # touch the state property as this executes the sieving, to make 

983 # sure we don't fall through the sieve at any point in time 

984 _ = self.state 

985 except RuntimeError as e: 

986 # improve error reporting, but always reraise this, as this is a 

987 # full-blown failure 

988 raise RuntimeError(f"Failed to apply {value} to {axis}{rowType} with state {self._parts}") from e 

989 

990 def _getRowPayload(self, row, rowType, rowFor): 

991 """Get the relevant value from the row. 

992 

993 Given the row, and which component it relates to, get the relevant 

994 value, as a bool or cast to the appropriate enum class. 

995 

996 Parameters 

997 ---------- 

998 row : `pd.Series` 

999 The row of data from the dataframe. 

1000 rowType : `str` 

1001 The type of the row, e.g. "MotionState", "SystemState", 

1002 "InPosition". 

1003 rowFor : `str` 

1004 The component the row is for, e.g. "azimuth", "elevation". 

1005 

1006 Returns 

1007 ------- 

1008 value : `bool` or `enum` 

1009 The value of the row, as a bool or enum, depending on the 

1010 component, cast to the appropriate enum class or bool. 

1011 """ 

1012 match rowType: 

1013 case "MotionState": 

1014 value = row[f"state_{rowFor}"] 

1015 return AxisMotionState(value) 

1016 case "SystemState": 

1017 value = row[f"powerState_{rowFor}"] 

1018 return PowerState(value) 

1019 case "InPosition": 

1020 value = row[f"inPosition_{rowFor}"] 

1021 return bool(value) 

1022 case _: 

1023 raise ValueError(f"Failed to get row payload with {rowType=} and {row=}") 

1024 

1025 @property 

1026 def _isValid(self): 

1027 """Has the TMA had a value applied to all its components? 

1028 

1029 If any component has not yet had a value applied, the TMA is not valid, 

1030 as those components will be in an unknown state. 

1031 

1032 Returns 

1033 ------- 

1034 isValid : `bool` 

1035 Whether the TMA is fully initialized. 

1036 """ 

1037 return not any([v == self._UNINITIALIZED_VALUE for v in self._parts.values()]) 

1038 

1039 # state inspection properties - a high level way of inspecting the state as 

1040 # an API 

1041 @property 

1042 def isMoving(self): 

1043 return self.state in [TMAState.TRACKING, TMAState.SLEWING] 

1044 

1045 @property 

1046 def isNotMoving(self): 

1047 return not self.isMoving 

1048 

1049 @property 

1050 def isTracking(self): 

1051 return self.state == TMAState.TRACKING 

1052 

1053 @property 

1054 def isSlewing(self): 

1055 return self.state == TMAState.SLEWING 

1056 

1057 @property 

1058 def canMove(self): 

1059 badStates = [PowerState.OFF, PowerState.TURNING_OFF, PowerState.FAULT, PowerState.UNKNOWN] 

1060 return bool( 

1061 self._isValid 

1062 and self._parts["azimuthSystemState"] not in badStates 

1063 and self._parts["elevationSystemState"] not in badStates 

1064 ) 

1065 

1066 # Axis inspection properties, designed for internal use. These return 

1067 # iterables so that they can be used in any() and all() calls, which make 

1068 # the logic much easier to read, e.g. to see if anything is moving, we can 

1069 # write `if not any(_axisInMotion):` 

1070 @property 

1071 def _axesInFault(self): 

1072 return [x in self.FAULT_LIKE for x in self.system] 

1073 

1074 @property 

1075 def _axesOff(self): 

1076 return [x in self.OFF_LIKE for x in self.system] 

1077 

1078 @property 

1079 def _axesOn(self): 

1080 return [not x for x in self._axesOn] 

1081 

1082 @property 

1083 def _axesInMotion(self): 

1084 return [x in self.MOVING_LIKE for x in self.motion] 

1085 

1086 @property 

1087 def _axesTRACKING(self): 

1088 """Note this is deliberately named _axesTRACKING and not _axesTracking 

1089 to make it clear that this is the AxisMotionState type of TRACKING and 

1090 not the normal conceptual notion of tracking (the sky, i.e. as opposed 

1091 to slewing). 

1092 """ 

1093 return [x == AxisMotionState.TRACKING for x in self.motion] 

1094 

1095 @property 

1096 def _axesInPosition(self): 

1097 return [x is True for x in self.inPosition] 

1098 

1099 @property 

1100 def state(self): 

1101 """The overall state of the TMA. 

1102 

1103 Note that this is both a property, and also the method which applies 

1104 the logic sieve to determine the state at a given point in time. 

1105 

1106 Returns 

1107 ------- 

1108 state : `lsst.summit.utils.tmaUtils.TMAState` 

1109 The overall state of the TMA. 

1110 """ 

1111 # first, check we're valid, and if not, return UNINITIALIZED state, as 

1112 # things are unknown 

1113 if not self._isValid: 

1114 return TMAState.UNINITIALIZED 

1115 

1116 # if we're not in engineering mode, i.e. we're under normal CSC 

1117 # control, then if anything is in fault, we're in fault. If we're 

1118 # engineering then some axes will move when others are in fault 

1119 if not self.engineeringMode: 

1120 if any(self._axesInFault): 

1121 return TMAState.FAULT 

1122 else: 

1123 # we're in engineering mode, so return fault state if ALL are in 

1124 # fault 

1125 if all(self._axesInFault): 

1126 return TMAState.FAULT 

1127 

1128 # if all axes are off, the TMA is OFF 

1129 if all(self._axesOff): 

1130 return TMAState.OFF 

1131 

1132 # we know we're valid and at least some axes are not off, so see if 

1133 # we're in motion if no axes are moving, we're stopped 

1134 if not any(self._axesInMotion): 

1135 return TMAState.STOPPED 

1136 

1137 # now we know we're initialized, and that at least one axis is moving 

1138 # so check axes for motion and in position. If all axes are tracking 

1139 # and all are in position, we're tracking the sky 

1140 if all(self._axesTRACKING) and all(self._axesInPosition): 

1141 return TMAState.TRACKING 

1142 

1143 # we now know explicitly that not everything is in position, so we no 

1144 # longer need to check that. We do actually know that something is in 

1145 # motion, but confirm that's the case and return SLEWING 

1146 if any(self._axesInMotion): 

1147 return TMAState.SLEWING 

1148 

1149 # if we want to differentiate between MOVING_POINT_TO_POINT moves, 

1150 # JOGGING moves and regular slews, the logic in the step above needs to 

1151 # be changed and the new steps added here. 

1152 

1153 raise RuntimeError("State error: fell through the state sieve - rewrite your logic!") 

1154 

1155 

1156class TMAEventMaker: 

1157 """A class to create per-dayObs TMAEvents for the TMA's movements. 

1158 

1159 If this class is being used in tests, make sure to pass the EFD client in, 

1160 and create it with `makeEfdClient(testing=True)`. This ensures that the 

1161 USDF EFD is "used" as this is the EFD which has the recorded data available 

1162 in the test suite via `vcr`. 

1163 

1164 Example usage: 

1165 >>> dayObs = 20230630 

1166 >>> eventMaker = TMAEventMaker() 

1167 >>> events = eventMaker.getEvents(dayObs) 

1168 >>> print(f'Found {len(events)} for {dayObs=}') 

1169 

1170 Parameters 

1171 ---------- 

1172 client : `lsst_efd_client.efd_helper.EfdClient`, optional 

1173 The EFD client to use, created if not provided. 

1174 """ 

1175 

1176 # the topics which need logical combination to determine the overall mount 

1177 # state. Will need updating as new components are added to the system. 

1178 

1179 # relevant column: 'state' 

1180 _movingComponents = [ 

1181 "lsst.sal.MTMount.logevent_azimuthMotionState", 

1182 "lsst.sal.MTMount.logevent_elevationMotionState", 

1183 ] 

1184 

1185 # relevant column: 'inPosition' 

1186 _inPositionComponents = [ 

1187 "lsst.sal.MTMount.logevent_azimuthInPosition", 

1188 "lsst.sal.MTMount.logevent_elevationInPosition", 

1189 ] 

1190 

1191 # the components which, if in fault, put the TMA into fault 

1192 # relevant column: 'powerState' 

1193 _stateComponents = [ 

1194 "lsst.sal.MTMount.logevent_azimuthSystemState", 

1195 "lsst.sal.MTMount.logevent_elevationSystemState", 

1196 ] 

1197 

1198 def __init__(self, client=None): 

1199 if client is not None: 

1200 self.client = client 

1201 else: 

1202 self.client = makeEfdClient() 

1203 self.log = logging.getLogger(__name__) 

1204 self._data = {} 

1205 

1206 @dataclass(frozen=True) 

1207 class ParsedState: 

1208 eventStart: Time 

1209 eventEnd: int 

1210 previousState: TMAState 

1211 state: TMAState 

1212 

1213 @staticmethod 

1214 def isToday(dayObs): 

1215 """Find out if the specified dayObs is today, or in the past. 

1216 

1217 If the day is today, the function returns ``True``, if it is in the 

1218 past it returns ``False``. If the day is in the future, a 

1219 ``ValueError`` is raised, as this indicates there is likely an 

1220 off-by-one type error somewhere in the logic. 

1221 

1222 Parameters 

1223 ---------- 

1224 dayObs : `int` 

1225 The dayObs to check, in the format YYYYMMDD. 

1226 

1227 Returns 

1228 ------- 

1229 isToday : `bool` 

1230 ``True`` if the dayObs is today, ``False`` if it is in the past. 

1231 

1232 Raises 

1233 ValueError: if the dayObs is in the future. 

1234 """ 

1235 todayDayObs = getCurrentDayObs_int() 

1236 if dayObs == todayDayObs: 

1237 return True 

1238 if dayObs > todayDayObs: 

1239 raise ValueError("dayObs is in the future") 

1240 return False 

1241 

1242 @staticmethod 

1243 def _shortName(topic): 

1244 """Get the short name of a topic. 

1245 

1246 Parameters 

1247 ---------- 

1248 topic : `str` 

1249 The topic to get the short name of. 

1250 

1251 Returns 

1252 ------- 

1253 shortName : `str` 

1254 The short name of the topic, e.g. 'azimuthInPosition' 

1255 """ 

1256 # get, for example 'azimuthInPosition' from 

1257 # lsst.sal.MTMount.logevent_azimuthInPosition 

1258 return topic.split("_")[-1] 

1259 

1260 def _mergeData(self, data): 

1261 """Merge a dict of dataframes based on private_efdStamp, recording 

1262 where each row came from. 

1263 

1264 Given a dict or dataframes, keyed by topic, merge them into a single 

1265 dataframe, adding a column to record which topic each row came from. 

1266 

1267 Parameters 

1268 ---------- 

1269 data : `dict` of `str` : `pd.DataFrame` 

1270 The dataframes to merge. 

1271 

1272 Returns 

1273 ------- 

1274 merged : `pd.DataFrame` 

1275 The merged dataframe. 

1276 """ 

1277 excludeColumns = ["private_efdStamp", "rowFor"] 

1278 

1279 mergeArgs = { 

1280 "how": "outer", 

1281 "sort": True, 

1282 } 

1283 

1284 merged = None 

1285 originalRowCounter = 0 

1286 

1287 # Iterate over the keys and merge the corresponding DataFrames 

1288 for key, df in data.items(): 

1289 if df.empty: 

1290 # Must skip the df if it's empty, otherwise the merge will fail 

1291 # due to lack of private_efdStamp. Because other axes might 

1292 # still be in motion, so we still want to merge what we have 

1293 continue 

1294 

1295 originalRowCounter += len(df) 

1296 component = self._shortName(key) # Add suffix to column names to identify the source 

1297 suffix = "_" + component 

1298 

1299 df["rowFor"] = component 

1300 

1301 columnsToSuffix = [col for col in df.columns if col not in excludeColumns] 

1302 df_to_suffix = df[columnsToSuffix].add_suffix(suffix) 

1303 df = pd.concat([df[excludeColumns], df_to_suffix], axis=1) 

1304 

1305 if merged is None: 

1306 merged = df.copy() 

1307 else: 

1308 merged = pd.merge(merged, df, **mergeArgs) 

1309 

1310 merged = merged.loc[:, ~merged.columns.duplicated()] # Remove duplicate columns after merge 

1311 

1312 if len(merged) != originalRowCounter: 

1313 self.log.warning( 

1314 "Merged data has a different number of rows to the original data, some" 

1315 " timestamps (rows) will contain more than one piece of actual information." 

1316 ) 

1317 

1318 # if the index is still a DatetimeIndex here then we didn't actually 

1319 # merge any data, so there is only data from a single component. 

1320 # This is likely to result in no events, but not necessarily, and for 

1321 # generality, instead we convert to a range index to ensure consistency 

1322 # in the returned data, and allow processing to continue. 

1323 if isinstance(merged.index, pd.DatetimeIndex): 

1324 self.log.warning("Data was only found for a single component in the EFD.") 

1325 merged.reset_index(drop=True, inplace=True) 

1326 

1327 return merged 

1328 

1329 def getEvent(self, dayObs, seqNum): 

1330 """Get a specific event for a given dayObs and seqNum. 

1331 

1332 Repeated calls for the same ``dayObs`` will use the cached data if the 

1333 day is in the past, and so will be much quicker. If the ``dayObs`` is 

1334 the current day then the EFD will be queried for new data for each 

1335 call, so a call which returns ``None`` on the first try might return an 

1336 event on the next, if the TMA is still moving and thus generating 

1337 events. 

1338 

1339 Parameters 

1340 ---------- 

1341 dayObs : `int` 

1342 The dayObs to get the event for. 

1343 seqNum : `int` 

1344 The sequence number of the event to get. 

1345 

1346 Returns 

1347 ------- 

1348 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

1349 The event for the specified dayObs and seqNum, or `None` if the 

1350 event was not found. 

1351 """ 

1352 events = self.getEvents(dayObs) 

1353 if seqNum <= len(events): 

1354 event = events[seqNum] 

1355 if event.seqNum != seqNum: 

1356 # it's zero-indexed and contiguous so this must be true but 

1357 # a sanity check doesn't hurt. 

1358 raise AssertionError(f"Event sequence number mismatch: {event.seqNum} != {seqNum}") 

1359 return event 

1360 else: 

1361 self.log.warning(f"Event {seqNum} not found for {dayObs}") 

1362 return None 

1363 

1364 def getEvents(self, dayObs, addBlockInfo=True): 

1365 """Get the TMA events for the specified dayObs. 

1366 

1367 Gets the required mount data from the cache or the EFD as required, 

1368 handling whether we're working with live vs historical data. The 

1369 dataframes from the EFD is merged and applied to the TMAStateMachine, 

1370 and that series of state changes is used to generate a list of 

1371 TmaEvents for the day's data. 

1372 

1373 If the data is for the current day, i.e. if new events can potentially 

1374 land, then if the last event is "open" (meaning that the TMA appears to 

1375 be in motion and thus the event is growing with time), then that event 

1376 is excluded from the event list as it is expected to be changing with 

1377 time, and will likely close eventually. However, if that situation 

1378 occurs on a day in the past, then that event can never close, and the 

1379 event is therefore included, but a warning about the open event is 

1380 logged. 

1381 

1382 Parameters 

1383 ---------- 

1384 dayObs : `int` 

1385 The dayObs for which to get the events. 

1386 addBlockInfo : `bool`, optional 

1387 Whether to add block information to the events. This allows 

1388 skipping this step for speed when generating events for purposes 

1389 which don't need block information. 

1390 

1391 Returns 

1392 ------- 

1393 events : `list` of `lsst.summit.utils.tmaUtils.TMAState` 

1394 The events for the specified dayObs. 

1395 """ 

1396 workingLive = self.isToday(dayObs) 

1397 data = None 

1398 

1399 if workingLive: 

1400 # it's potentially updating data, so we must update the date 

1401 # regarless of whether we have it already or not 

1402 self.log.info(f"Updating mount data for {dayObs} from the EFD") 

1403 self._getEfdDataForDayObs(dayObs) 

1404 data = self._data[dayObs] 

1405 elif dayObs in self._data: 

1406 # data is in the cache and it's not being updated, so use it 

1407 data = self._data[dayObs] 

1408 elif dayObs not in self._data: 

1409 # we don't have the data yet, but it's not growing, so put it in 

1410 # the cache and use it from there 

1411 self.log.info(f"Retrieving mount data for {dayObs} from the EFD") 

1412 self._getEfdDataForDayObs(dayObs) 

1413 data = self._data[dayObs] 

1414 else: 

1415 raise RuntimeError("This should never happen") 

1416 

1417 # if we don't have something to work with, log a warning and return 

1418 if not self.dataFound(data): 

1419 self.log.warning(f"No EFD data found for {dayObs=}") 

1420 return [] 

1421 

1422 # applies the data to the state machine, and generates events from the 

1423 # series of states which results 

1424 events = self._calculateEventsFromMergedData( 

1425 data, dayObs, dataIsForCurrentDay=workingLive, addBlockInfo=addBlockInfo 

1426 ) 

1427 if not events: 

1428 self.log.warning(f"Failed to calculate any events for {dayObs=} despite EFD data existing!") 

1429 return events 

1430 

1431 @staticmethod 

1432 def dataFound(data): 

1433 """Check if any data was found. 

1434 

1435 Parameters 

1436 ---------- 

1437 data : `pd.DataFrame` 

1438 The merged dataframe to check. 

1439 

1440 Returns 

1441 ------- 

1442 dataFound : `bool` 

1443 Whether data was found. 

1444 """ 

1445 # You can't just compare to with data == NO_DATA_SENTINEL because 

1446 # `data` is usually a dataframe, and you can't compare a dataframe to a 

1447 # string directly. 

1448 return not (isinstance(data, str) and data == NO_DATA_SENTINEL) 

1449 

1450 def _getEfdDataForDayObs(self, dayObs): 

1451 """Get the EFD data for the specified dayObs and store it in the cache. 

1452 

1453 Gets the EFD data for all components, as a dict of dataframes keyed by 

1454 component name. These are then merged into a single dataframe in time 

1455 order, based on each row's `private_efdStamp`. This is then stored in 

1456 self._data[dayObs]. 

1457 

1458 If no data is found, the value is set to ``NO_DATA_SENTINEL`` to 

1459 differentiate this from ``None``, as this is what you'd get if you 

1460 queried the cache with `self._data.get(dayObs)`. It also marks that we 

1461 have already queried this day. 

1462 

1463 Parameters 

1464 ---------- 

1465 dayObs : `int` 

1466 The dayObs to query. 

1467 """ 

1468 data = {} 

1469 for component in itertools.chain( 

1470 self._movingComponents, self._inPositionComponents, self._stateComponents 

1471 ): 

1472 data[component] = getEfdData(self.client, component, dayObs=dayObs, warn=False) 

1473 self.log.debug(f"Found {len(data[component])} for {component}") 

1474 

1475 if all(dataframe.empty for dataframe in data.values()): 

1476 # if every single dataframe is empty, set the sentinel and don't 

1477 # try to merge anything, otherwise merge all the data we found 

1478 self.log.debug(f"No data found for {dayObs=}") 

1479 # a sentinel value that's not None 

1480 self._data[dayObs] = NO_DATA_SENTINEL 

1481 else: 

1482 merged = self._mergeData(data) 

1483 self._data[dayObs] = merged 

1484 

1485 def _calculateEventsFromMergedData(self, data, dayObs, dataIsForCurrentDay, addBlockInfo): 

1486 """Calculate the list of events from the merged data. 

1487 

1488 Runs the merged data, row by row, through the TMA state machine (with 

1489 ``tma.apply``) to get the overall TMA state at each row, building a 

1490 dict of these states, keyed by row number. 

1491 

1492 This time-series of TMA states are then looped over (in 

1493 `_statesToEventTuples`), building a list of tuples representing the 

1494 start and end of each event, the type of the event, and the reason for 

1495 the event ending. 

1496 

1497 This list of tuples is then passed to ``_makeEventsFromStateTuples``, 

1498 which actually creates the ``TMAEvent`` objects. 

1499 

1500 Parameters 

1501 ---------- 

1502 data : `pd.DataFrame` 

1503 The merged dataframe to use. 

1504 dayObs : `int` 

1505 The dayObs for the data. 

1506 dataIsForCurrentDay : `bool` 

1507 Whether the data is for the current day. Determines whether to 

1508 allow an open last event or not. 

1509 addBlockInfo : `bool` 

1510 Whether to add block information to the events. This allows 

1511 skipping this step for speed when generating events for purposes 

1512 which don't need block information. 

1513 

1514 Returns 

1515 ------- 

1516 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1517 The events for the specified dayObs. 

1518 """ 

1519 engineeringMode = True 

1520 tma = TMAStateMachine(engineeringMode=engineeringMode) 

1521 

1522 # For now, we assume that the TMA starts each day able to move, but 

1523 # stationary. If this turns out to cause problems, we will need to 

1524 # change to loading data from the previous day(s), and looking back 

1525 # through it in time until a state change has been found for every 

1526 # axis. For now though, Bruno et. al think this is acceptable and 

1527 # preferable. 

1528 _initializeTma(tma) 

1529 

1530 tmaStates = {} 

1531 for rowNum, row in data.iterrows(): 

1532 tma.apply(row) 

1533 tmaStates[rowNum] = tma.state 

1534 

1535 stateTuples = self._statesToEventTuples(tmaStates, dataIsForCurrentDay) 

1536 events = self._makeEventsFromStateTuples(stateTuples, dayObs, data) 

1537 if addBlockInfo: 

1538 self.addBlockDataToEvents(dayObs, events) 

1539 return events 

1540 

1541 def _statesToEventTuples(self, states, dataIsForCurrentDay): 

1542 """Get the event-tuples from the dictionary of TMAStates. 

1543 

1544 Chunks the states into blocks of the same state, so that we can create 

1545 an event for each block in `_makeEventsFromStateTuples`. Off-type 

1546 states are skipped over, with each event starting when the telescope 

1547 next resumes motion or changes to a different type of motion state, 

1548 i.e. from non-tracking type movement (MOVE_POINT_TO_POINT, JOGGING, 

1549 TRACKING-but-not-in-position, i.e. slewing) to a tracking type 

1550 movement, or vice versa. 

1551 

1552 Parameters 

1553 ---------- 

1554 states : `dict` of `int` : `lsst.summit.utils.tmaUtils.TMAState` 

1555 The states of the TMA, keyed by row number. 

1556 dataIsForCurrentDay : `bool` 

1557 Whether the data is for the current day. Determines whether to 

1558 allow and open last event or not. 

1559 

1560 Returns 

1561 ------- 

1562 parsedStates : `list` of `tuple` 

1563 The parsed states, as a list of tuples of the form: 

1564 ``(eventStart, eventEnd, eventType, endReason)`` 

1565 """ 

1566 # Consider rewriting this with states as a list and using pop(0)? 

1567 skipStates = (TMAState.STOPPED, TMAState.OFF, TMAState.FAULT) 

1568 

1569 parsedStates = [] 

1570 eventStart = None 

1571 rowNum = 0 

1572 nRows = len(states) 

1573 while rowNum < nRows: 

1574 previousState = None 

1575 state = states[rowNum] 

1576 # if we're not in an event, fast forward through off-like rows 

1577 # until a new event starts 

1578 if eventStart is None and state in skipStates: 

1579 rowNum += 1 

1580 continue 

1581 

1582 # we've started a new event, so walk through it and find the end 

1583 eventStart = rowNum 

1584 previousState = state 

1585 rowNum += 1 # move to the next row before starting the while loop 

1586 if rowNum == nRows: 

1587 # we've reached the end of the data, and we're still in an 

1588 # event, so don't return this presumably in-progress event 

1589 self.log.warning("Reached the end of the data while starting a new event") 

1590 break 

1591 state = states[rowNum] 

1592 while state == previousState: 

1593 rowNum += 1 

1594 if rowNum == nRows: 

1595 break 

1596 state = states[rowNum] 

1597 parsedStates.append( 

1598 self.ParsedState( 

1599 eventStart=eventStart, eventEnd=rowNum, previousState=previousState, state=state 

1600 ) 

1601 ) 

1602 if state in skipStates: 

1603 eventStart = None 

1604 

1605 # done parsing, just check the last event is valid 

1606 if parsedStates: # ensure we have at least one event 

1607 lastEvent = parsedStates[-1] 

1608 if lastEvent.eventEnd == nRows: 

1609 # Generally, you *want* the timespan for an event to be the 

1610 # first row of the next event, because you were in that state 

1611 # right up until that state change. However, if that event is 

1612 # a) the last one of the day and b) runs right up until the end 

1613 # of the dataframe, then there isn't another row, so this will 

1614 # overrun the array. 

1615 # 

1616 # If the data is for the current day then this isn't a worry, 

1617 # as we're likely still taking data, and this event will likely 

1618 # close yet, so we don't issue a warning, and simply drop the 

1619 # event from the list. 

1620 

1621 # However, if the data is for a past day then no new data will 

1622 # come to close the event, so allow the event to be "open", and 

1623 # issue a warning 

1624 if dataIsForCurrentDay: 

1625 self.log.info("Discarding open (likely in-progess) final event from current day's events") 

1626 parsedStates = parsedStates[:-1] 

1627 else: 

1628 self.log.warning("Last event ends open, forcing it to end at end of the day's data") 

1629 # it's a tuple, so (deliberately) awkward to modify 

1630 parsedStates[-1] = self.ParsedState( 

1631 eventStart=lastEvent.eventStart, 

1632 eventEnd=lastEvent.eventEnd - 1, 

1633 previousState=lastEvent.previousState, 

1634 state=lastEvent.state, 

1635 ) 

1636 

1637 return parsedStates 

1638 

1639 def addBlockDataToEvents(self, dayObs, events): 

1640 """Find all the block data in the EFD for the specified events. 

1641 

1642 Finds all the block data in the EFD relating to the events, parses it, 

1643 from the rows of the dataframe, and adds it to the events in place. 

1644 

1645 Parameters 

1646 ---------- 

1647 events : `lsst.summit.utils.tmaUtils.TMAEvent` or 

1648 `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1649 One or more events to get the block data for. 

1650 """ 

1651 try: 

1652 blockParser = BlockParser(dayObs, client=self.client) 

1653 except Exception as e: 

1654 # adding the block data should never cause a failure so if we can't 

1655 # get the block data, log a warning and return. It is, however, 

1656 # never expected, so use log.exception to get the full traceback 

1657 # and scare users so it gets reported 

1658 self.log.exception(f"Failed to parse block data for {dayObs=}, {e}") 

1659 return 

1660 blocks = blockParser.getBlockNums() 

1661 blockDict = {} 

1662 for block in blocks: 

1663 blockDict[block] = blockParser.getSeqNums(block) 

1664 

1665 for block, seqNums in blockDict.items(): 

1666 for seqNum in seqNums: 

1667 blockInfo = blockParser.getBlockInfo(block=block, seqNum=seqNum) 

1668 

1669 relatedEvents = blockParser.getEventsForBlock(events, block=block, seqNum=seqNum) 

1670 for event in relatedEvents: 

1671 toSet = [blockInfo] 

1672 if event.blockInfos is not None: 

1673 existingInfo = event.blockInfos 

1674 existingInfo.append(blockInfo) 

1675 toSet = existingInfo 

1676 

1677 # Add the blockInfo to the TMAEvent. Because this is a 

1678 # frozen dataclass, use object.__setattr__ to set the 

1679 # attribute. This is the correct way to set a frozen 

1680 # dataclass attribute after creation. 

1681 object.__setattr__(event, "blockInfos", toSet) 

1682 

1683 def _makeEventsFromStateTuples(self, states, dayObs, data): 

1684 """For the list of state-tuples, create a list of ``TMAEvent`` objects. 

1685 

1686 Given the underlying data, and the start/stop points for each event, 

1687 create the TMAEvent objects for the dayObs. 

1688 

1689 Parameters 

1690 ---------- 

1691 states : `list` of `tuple` 

1692 The parsed states, as a list of tuples of the form: 

1693 ``(eventStart, eventEnd, eventType, endReason)`` 

1694 dayObs : `int` 

1695 The dayObs for the data. 

1696 data : `pd.DataFrame` 

1697 The merged dataframe. 

1698 

1699 Returns 

1700 ------- 

1701 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1702 The events for the specified dayObs. 

1703 """ 

1704 seqNum = 0 

1705 events = [] 

1706 for parsedState in states: 

1707 begin = data.iloc[parsedState.eventStart]["private_efdStamp"] 

1708 end = data.iloc[parsedState.eventEnd]["private_efdStamp"] 

1709 beginAstropy = efdTimestampToAstropy(begin) 

1710 endAstropy = efdTimestampToAstropy(end) 

1711 duration = end - begin 

1712 event = TMAEvent( 

1713 dayObs=dayObs, 

1714 seqNum=seqNum, 

1715 type=parsedState.previousState, 

1716 endReason=parsedState.state, 

1717 duration=duration, 

1718 begin=beginAstropy, 

1719 end=endAstropy, 

1720 blockInfos=[], # this is added later 

1721 _startRow=parsedState.eventStart, 

1722 _endRow=parsedState.eventEnd, 

1723 ) 

1724 events.append(event) 

1725 seqNum += 1 

1726 return events 

1727 

1728 @staticmethod 

1729 def printTmaDetailedState(tma): 

1730 """Print the full state of all the components of the TMA. 

1731 

1732 Currently this is the azimuth and elevation axes' power and motion 

1733 states, and their respective inPosition statuses. 

1734 

1735 Parameters 

1736 ---------- 

1737 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine` 

1738 The TMA state machine in the state we want to print. 

1739 """ 

1740 axes = ["azimuth", "elevation"] 

1741 p = tma._parts 

1742 axisPad = len(max(axes, key=len)) # length of the longest axis string == 9 here, but this is general 

1743 motionPad = max(len(s.name) for s in AxisMotionState) 

1744 powerPad = max(len(s.name) for s in PowerState) 

1745 

1746 # example output to show what's being done with the padding: 

1747 # azimuth - Power: ON Motion: STOPPED InPosition: True # noqa: W505 

1748 # elevation - Power: ON Motion: MOVING_POINT_TO_POINT InPosition: False # noqa: W505 

1749 for axis in axes: 

1750 print( 

1751 f"{axis:>{axisPad}} - " 

1752 f"Power: {p[f'{axis}SystemState'].name:>{powerPad}} " 

1753 f"Motion: {p[f'{axis}MotionState'].name:>{motionPad}} " 

1754 f"InPosition: {p[f'{axis}InPosition']}" 

1755 ) 

1756 print(f"Overall system state: {tma.state.name}") 

1757 

1758 def printFullDayStateEvolution(self, dayObs, taiOrUtc="utc"): 

1759 """Print the full TMA state evolution for the specified dayObs. 

1760 

1761 Replays all the data from the EFD for the specified dayObs through 

1762 the TMA state machine, and prints both the overall and detailed state 

1763 of the TMA for each row. 

1764 

1765 Parameters 

1766 ---------- 

1767 dayObs : `int` 

1768 The dayObs for which to print the state evolution. 

1769 taiOrUtc : `str`, optional 

1770 Whether to print the timestamps in TAI or UTC. Default is UTC. 

1771 """ 

1772 # create a fake event which spans the whole day, and then use 

1773 # printEventDetails code while skipping the header to print the 

1774 # evolution. 

1775 _ = self.getEvents(dayObs) # ensure the data has been retrieved from the EFD 

1776 data = self._data[dayObs] 

1777 lastRowNum = len(data) - 1 

1778 

1779 fakeEvent = TMAEvent( 

1780 dayObs=dayObs, 

1781 seqNum=-1, # anything will do 

1782 type=TMAState.OFF, # anything will do 

1783 endReason=TMAState.OFF, # anything will do 

1784 duration=-1, # anything will do 

1785 begin=efdTimestampToAstropy(data.iloc[0]["private_efdStamp"]), 

1786 end=efdTimestampToAstropy(data.iloc[-1]["private_efdStamp"]), 

1787 _startRow=0, 

1788 _endRow=lastRowNum, 

1789 ) 

1790 self.printEventDetails(fakeEvent, taiOrUtc=taiOrUtc, printHeader=False) 

1791 

1792 def printEventDetails(self, event, taiOrUtc="tai", printHeader=True): 

1793 """Print a detailed breakdown of all state transitions during an event. 

1794 

1795 Note: this is not the most efficient way to do this, but it is much the 

1796 cleanest with respect to the actual state machine application and event 

1797 generation code, and is easily fast enough for the cases it will be 

1798 used for. It is not worth complicating the normal state machine logic 

1799 to try to use this code. 

1800 

1801 Parameters 

1802 ---------- 

1803 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

1804 The event to display the details of. 

1805 taiOrUtc : `str`, optional 

1806 Whether to display time strings in TAI or UTC. Defaults to TAI. 

1807 Case insensitive. 

1808 printHeader : `bool`, optional 

1809 Whether to print the event summary. Defaults to True. The primary 

1810 reason for the existence of this option is so that this same 

1811 printing function can be used to show the evolution of a whole day 

1812 by supplying a fake event which spans the whole day, but this event 

1813 necessarily has a meaningless summary, and so needs suppressing. 

1814 """ 

1815 taiOrUtc = taiOrUtc.lower() 

1816 if taiOrUtc not in ["tai", "utc"]: 

1817 raise ValueError(f"Got unsuppoted value for {taiOrUtc=}") 

1818 useUtc = taiOrUtc == "utc" 

1819 

1820 if printHeader: 

1821 print( 

1822 f"Details for {event.duration:.2f}s {event.type.name} event dayObs={event.dayObs}" 

1823 f" seqNum={event.seqNum}:" 

1824 ) 

1825 print(f"- Event began at: {event.begin.utc.isot if useUtc else event.begin.isot}") 

1826 print(f"- Event ended at: {event.end.utc.isot if useUtc else event.end.isot}") 

1827 

1828 dayObs = event.dayObs 

1829 data = self._data[dayObs] 

1830 startRow = event._startRow 

1831 endRow = event._endRow 

1832 nRowsToApply = endRow - startRow + 1 

1833 print(f"\nTotal number of rows in the merged dataframe: {len(data)}") 

1834 if printHeader: 

1835 print(f"of which rows {startRow} to {endRow} (inclusive) relate to this event.") 

1836 

1837 # reconstruct all the states 

1838 tma = TMAStateMachine(engineeringMode=True) 

1839 _initializeTma(tma) 

1840 

1841 tmaStates = {} 

1842 firstAppliedRow = True # flag to print a header on the first row that's applied 

1843 for rowNum, row in data.iterrows(): # must replay rows right from start to get full correct state 

1844 if rowNum == startRow: 

1845 # we've not yet applied this row, so this is the state just 

1846 # before event 

1847 print(f"\nBefore the event the TMA was in state {tma.state.name}:") 

1848 self.printTmaDetailedState(tma) 

1849 

1850 if rowNum >= startRow and rowNum <= endRow: 

1851 if firstAppliedRow: # only print this intro on the first row we're applying 

1852 print( 

1853 f"\nThen, applying the {nRowsToApply} rows of data for this event, the state" 

1854 " evolved as follows:\n" 

1855 ) 

1856 firstAppliedRow = False 

1857 

1858 # break the row down and print its details 

1859 rowFor = row["rowFor"] 

1860 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState 

1861 value = tma._getRowPayload(row, rowType, rowFor) 

1862 valueStr = f"{str(value) if isinstance(value, bool) else value.name}" 

1863 rowTime = efdTimestampToAstropy(row["private_efdStamp"]) 

1864 print( 

1865 f"On row {rowNum} the {axis} axis had the {rowType} set to {valueStr} at" 

1866 f" {rowTime.utc.isot if useUtc else rowTime.isot}" 

1867 ) 

1868 

1869 # then apply it as usual, printing the state right afterwards 

1870 tma.apply(row) 

1871 tmaStates[rowNum] = tma.state 

1872 self.printTmaDetailedState(tma) 

1873 print() 

1874 

1875 else: 

1876 # if it's not in the range of interest then just apply it 

1877 # silently as usual 

1878 tma.apply(row) 

1879 tmaStates[rowNum] = tma.state 

1880 

1881 def findEvent(self, time): 

1882 """Find the event which contains the specified time. 

1883 

1884 If the specified time lies within an event, that event is returned. If 

1885 it is at the exact start, that is logged, and if that start point is 

1886 shared by the end of the previous event, that is logged too. If the 

1887 event lies between events, the events either side are logged, but 

1888 ``None`` is returned. If the time lies before the first event of the 

1889 day a warning is logged, as for times after the last event of the day. 

1890 

1891 Parameters 

1892 ---------- 

1893 time : `astropy.time.Time` 

1894 The time. 

1895 

1896 Returns 

1897 ------- 

1898 event : `lsst.summit.utils.tmaUtils.TMAEvent` or `None` 

1899 The event which contains the specified time, or ``None`` if the 

1900 time doesn't fall during an event. 

1901 """ 

1902 # there are five possible cases: 

1903 # 1) the time lies before the first event of the day 

1904 # 2) the time lies after the last event of the day 

1905 # 3) the time lies within an event 

1906 # 3a) the time is exactly at the start of an event 

1907 # 3b) if so, time can be shared by the end of the previous event if 

1908 # they are contiguous 

1909 # 4) the time lies between two events 

1910 # 5) the time is exactly at end of the last event of the day. This is 

1911 # an issue because event end times are exclusive, so this time is 

1912 # not technically in that event, it's the moment it closes (and if 

1913 # there *was* an event which followed contiguously, it would be in 

1914 # that event instead, which is what motivates this definition of 

1915 # lies within what event) 

1916 

1917 dayObs = getDayObsForTime(time) 

1918 # we know this is on the right day, and definitely before the specified 

1919 # time, but sanity check this before continuing as this needs to be 

1920 # true for this to give the correct answer 

1921 assert getDayObsStartTime(dayObs) <= time 

1922 assert getDayObsEndTime(dayObs) > time 

1923 

1924 # command start to many log messages so define once here 

1925 logStart = f"Specified time {time.isot} falls on {dayObs=}" 

1926 

1927 events = self.getEvents(dayObs) 

1928 if len(events) == 0: 

1929 self.log.warning(f"There are no events found for {dayObs}") 

1930 return None 

1931 

1932 # check case 1) 

1933 if time < events[0].begin: 

1934 self.log.warning(f"{logStart} and is before the first event of the day") 

1935 return None 

1936 

1937 # check case 2) 

1938 if time > events[-1].end: 

1939 self.log.warning(f"{logStart} and is after the last event of the day") 

1940 return None 

1941 

1942 # check case 5) 

1943 if time == events[-1].end: 

1944 self.log.warning( 

1945 f"{logStart} and is exactly at the end of the last event of the day" 

1946 f" (seqnum={events[-1].seqNum}). Because event intervals are half-open, this" 

1947 " time does not technically lie in any event" 

1948 ) 

1949 return None 

1950 

1951 # we are now either in an event, or between events. Walk through the 

1952 # events, and if the end of the event is after the specified time, then 

1953 # we're either in it or past it, so check if we're in. 

1954 for eventNum, event in enumerate(events): 

1955 if event.end > time: # case 3) we are now into or past the right event 

1956 # the event end encloses the time, so note the > and not >=, 

1957 # this must be strictly greater, we check the overlap case 

1958 # later 

1959 if time >= event.begin: # we're fully inside the event, so return it. 

1960 # 3a) before returning, check if we're exactly at the start 

1961 # of the event, and if so, log it. Then 3b) also check if 

1962 # we're at the exact end of the previous event, and if so, 

1963 # log that too. 

1964 if time == event.begin: 

1965 self.log.info(f"{logStart} and is exactly at the start of event" f" {eventNum}") 

1966 if eventNum == 0: # I think this is actually impossible, but check anyway 

1967 return event # can't check the previous event so return here 

1968 previousEvent = events[eventNum - 1] 

1969 if previousEvent.end == time: 

1970 self.log.info( 

1971 "Previous event is contiguous, so this time is also at the exact" 

1972 f" end of {eventNum - 1}" 

1973 ) 

1974 return event 

1975 else: # case 4) 

1976 # the event end is past the time, but it's not inside the 

1977 # event, so we're between events. Log which we're between 

1978 # and return None 

1979 previousEvent = events[eventNum - 1] 

1980 timeAfterPrev = (time - previousEvent.end).to_datetime() 

1981 naturalTimeAfterPrev = humanize.naturaldelta(timeAfterPrev, minimum_unit="MICROSECONDS") 

1982 timeBeforeCurrent = (event.begin - time).to_datetime() 

1983 naturalTimeBeforeCurrent = humanize.naturaldelta( 

1984 timeBeforeCurrent, minimum_unit="MICROSECONDS" 

1985 ) 

1986 self.log.info( 

1987 f"{logStart} and lies" 

1988 f" {naturalTimeAfterPrev} after the end of event {previousEvent.seqNum}" 

1989 f" and {naturalTimeBeforeCurrent} before the start of event {event.seqNum}." 

1990 ) 

1991 return None 

1992 

1993 raise RuntimeError( 

1994 "Event finding logic fundamentally failed, which should never happen - the code" " needs fixing" 

1995 )