Coverage for python / lsst / summit / utils / tmaUtils.py: 14%

750 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-05-07 09:02 +0000

1# This file is part of summit_utils. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23import datetime 

24import enum 

25import itertools 

26import logging 

27import re 

28from collections.abc import Iterable 

29from dataclasses import dataclass, field 

30from typing import TYPE_CHECKING, Any, Callable 

31 

32import astropy 

33import humanize 

34import matplotlib 

35import matplotlib.dates as mdates 

36import matplotlib.pyplot as plt 

37import numpy as np 

38import pandas as pd 

39from astropy.time import Time 

40from matplotlib.ticker import FuncFormatter 

41from scipy.optimize import minimize 

42 

43from lsst.utils.iteration import ensure_iterable 

44from lsst.utils.plotting.figures import make_figure 

45 

46from .blockUtils import BlockParser 

47from .dateTime import ( 

48 dayObsIntToString, 

49 efdTimestampToAstropy, 

50 getCurrentDayObsInt, 

51 getDayObsEndTime, 

52 getDayObsForTime, 

53 getDayObsStartTime, 

54) 

55from .efdUtils import COMMAND_ALIASES, clipDataToEvent, getCommands, getEfdData, makeEfdClient 

56from .enums import AxisMotionState, PowerState 

57 

58if TYPE_CHECKING: 

59 try: 

60 from lsst_efd_client import EfdClient 

61 except ImportError: 

62 EfdClient = None 

63 

64__all__ = ( 

65 "TMAStateMachine", 

66 "TMAEvent", 

67 "TMAEventMaker", 

68 "TMAState", 

69 "AxisMotionState", 

70 "PowerState", 

71 "getSlewsFromEventList", 

72 "getTracksFromEventList", 

73 "getTorqueMaxima", 

74 "filterBadValues", 

75) 

76 

77# we don't want to use `None` for a no data sentinel because dict.get('key') 

78# returns None if the key isn't present, and also we need to mark that the data 

79# was queried for and no data was found, whereas the key not being present 

80# means that we've not yet looked for the data. 

81NO_DATA_SENTINEL = "NODATA" 

82 

83# The known time difference between the TMA demand position and the TMA 

84# position when tracking. 20Hz data times three points = 150ms. 

85TRACKING_RESIDUAL_TAIL_CLIP = -0.15 # seconds 

86 

87MOUNT_IMAGE_WARNING_LEVEL = 0.01 # this determines the colouring of the cells in the table, yellow for this 

88MOUNT_IMAGE_BAD_LEVEL = 0.05 # and red for this 

89HARDPOINT_FORCE_WARNING_LEVEL = 900.0 # for colouring of the cells in the table, yellow for this 

90HARDPOINT_FORCE_BAD_LEVEL = 1100.0 # and red for this 

91 

92 

93def getSlewsFromEventList( 

94 events: list[TMAEvent], 

95) -> list[TMAEvent]: 

96 """Get the slew events from a list of TMAEvents. 

97 

98 Parameters 

99 ---------- 

100 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

101 The list of events to filter. 

102 

103 Returns 

104 ------- 

105 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

106 The filtered list of events. 

107 """ 

108 return [e for e in events if e.type == TMAState.SLEWING] 

109 

110 

111def getTracksFromEventList( 

112 events: list[TMAEvent], 

113) -> list[TMAEvent]: 

114 """Get the tracking events from a list of TMAEvents. 

115 

116 Parameters 

117 ---------- 

118 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

119 The list of events to filter. 

120 

121 Returns 

122 ------- 

123 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

124 The filtered list of events. 

125 """ 

126 return [e for e in events if e.type == TMAState.TRACKING] 

127 

128 

129def getTorqueMaxima(table: pd.DataFrame) -> None: 

130 """Print the maximum positive and negative azimuth and elevation torques. 

131 

132 Designed to be used with the table as downloaded from RubinTV. 

133 

134 Parameters 

135 ---------- 

136 table : `pd.DataFrame` 

137 The table of data to use, as generated by Rapid Analysis. 

138 """ 

139 for axis in ["elevation", "azimuth"]: 

140 col = f"Largest {axis} torque" 

141 maxPos = np.argmax(table[col]) 

142 maxVal = table[col].iloc[maxPos] 

143 print(f"Max positive {axis:9} torque during seqNum {maxPos:>4}: {maxVal / 1000:>7.1f}kNm") 

144 minPos = np.argmin(table[col]) 

145 minVal = table[col].iloc[minPos] 

146 print(f"Max negative {axis:9} torque during seqNum {minPos:>4}: {minVal / 1000:>7.1f}kNm") 

147 return None 

148 

149 

150def getAzimuthElevationDataForEvent( 

151 client: EfdClient, 

152 event: TMAEvent, 

153 prePadding: float = 0, 

154 postPadding: float = 0, 

155 maxDeltaT: float = 1.0e-3, 

156) -> tuple[pd.DataFrame, pd.DataFrame]: 

157 """Get the data for the az/el telemetry topics for a given TMAEvent. 

158 

159 The error between the actual and demanded positions is calculated and added 

160 to the dataframes in the az/elError columns. For TRACKING type events, this 

161 error should be extremely close to zero, whereas for SLEWING type events, 

162 this error represents the how far the TMA is from the demanded position, 

163 and is therefore arbitrarily large, and tends to zero as the TMA get closer 

164 to tracking the sky. 

165 

166 Parameters 

167 ---------- 

168 client : `lsst_efd_client.efd_helper.EfdClient` 

169 The EFD client to use. 

170 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

171 The event to get the data for. 

172 prePadding : `float`, optional 

173 The amount of time to pad the event with before the start time, in 

174 seconds. 

175 postPadding : `float`, optional 

176 The amount of time to pad the event with after the end time, in 

177 seconds. 

178 maxDeltaT : `float`, optional 

179 The maximum allowed time adjustment to bring the median error to 

180 zero, in seconds. 

181 

182 Returns 

183 ------- 

184 azimuthData : `pd.DataFrame` 

185 The azimuth data for the specified event. 

186 elevationData : `pd.DataFrame` 

187 The elevation data for the specified event. 

188 """ 

189 

190 def calcDeltaT(params, args): 

191 # This calculates the deltaT needed 

192 # to make the median(error) = 0 

193 [values, valTimes, demand, demTimes] = args 

194 [deltaT] = params 

195 demandInterp = np.interp(valTimes, demTimes + deltaT, demand) 

196 error = (values - demandInterp) * 3600 

197 value = abs(np.median(error)) 

198 return value 

199 

200 azimuthData = getEfdData( 

201 client, 

202 "lsst.sal.MTMount.azimuth", 

203 event=event, 

204 prePadding=prePadding, 

205 postPadding=postPadding, 

206 raiseIfTopicNotInSchema=False, 

207 ) 

208 elevationData = getEfdData( 

209 client, 

210 "lsst.sal.MTMount.elevation", 

211 event=event, 

212 prePadding=prePadding, 

213 postPadding=postPadding, 

214 raiseIfTopicNotInSchema=False, 

215 ) 

216 

217 azValues = np.asarray(azimuthData["actualPosition"]) 

218 azValTimes = np.asarray(azimuthData["actualPositionTimestamp"]) 

219 azDemand = np.asarray(azimuthData["demandPosition"]) 

220 azDemTimes = np.asarray(azimuthData["demandPositionTimestamp"]) 

221 elValues = np.asarray(elevationData["actualPosition"]) 

222 elValTimes = np.asarray(elevationData["actualPositionTimestamp"]) 

223 elDemand = np.asarray(elevationData["demandPosition"]) 

224 elDemTimes = np.asarray(elevationData["demandPositionTimestamp"]) 

225 

226 # Calculate the deltaT needed to drive the median(error) to zero 

227 args = [azValues, azValTimes, azDemand, azDemTimes] 

228 x0 = [0.0] 

229 result = minimize(calcDeltaT, x0, args=args, method="Powell", bounds=[(-maxDeltaT, maxDeltaT)]) 

230 deltaTAz = result.x[0] 

231 

232 args = [elValues, elValTimes, elDemand, elDemTimes] 

233 x0 = [0.0] 

234 result = minimize(calcDeltaT, x0, args=args, method="Powell", bounds=[(-maxDeltaT, maxDeltaT)]) 

235 deltaTEl = result.x[0] 

236 

237 azDemandInterp = np.interp(azValTimes, azDemTimes + deltaTAz, azDemand) 

238 elDemandInterp = np.interp(elValTimes, elDemTimes + deltaTEl, elDemand) 

239 

240 azError = (azValues - azDemandInterp) * 3600 

241 elError = (elValues - elDemandInterp) * 3600 

242 

243 azimuthData["azError"] = azError 

244 elevationData["elError"] = elError 

245 

246 return azimuthData, elevationData 

247 

248 

249def getM1M3HardpointDataForEvent( 

250 client: EfdClient, 

251 event: TMAEvent, 

252 prePadding: float = 0, 

253 postPadding: float = 0, 

254) -> pd.DataFrame: 

255 """Get the data for the M1M3 hardpoint telemetry for a given TMAEvent. 

256 During slews, the M1M3 mirror moves in response to the accelerations. The 

257 hardpoint forces need to stay within accepted limits to prevent long term 

258 cracking on the mirror. 

259 

260 Parameters 

261 ---------- 

262 client : `lsst_efd_client.efd_helper.EfdClient` 

263 The EFD client to use. 

264 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

265 The event to get the data for. 

266 prePadding : `float`, optional 

267 The amount of time to pad the event with before the start time, in 

268 seconds. 

269 postPadding : `float`, optional 

270 The amount of time to pad the event with after the end time, in 

271 seconds. 

272 

273 Returns 

274 ------- 

275 hardpointData : `pd.DataFrame` 

276 The hardpoint forces for the specified event. 

277 """ 

278 hardpointData = getEfdData( 

279 client, 

280 "lsst.sal.MTM1M3.hardpointActuatorData", 

281 event=event, 

282 prePadding=prePadding, 

283 postPadding=postPadding, 

284 raiseIfTopicNotInSchema=False, 

285 ) 

286 

287 # Add a column with the absolute max force 

288 maxForce = [ 

289 np.nanmax([abs(hardpointData.iloc[i][f"measuredForce{j}"]) for j in range(6)]) 

290 for i in range(len(hardpointData)) 

291 ] 

292 

293 hardpointData["maxForce"] = maxForce 

294 

295 return hardpointData 

296 

297 

298def filterBadValues(values: list | np.ndarray, maxDelta: float = 0.1, maxConsecutiveValues: int = 3) -> int: 

299 """Filter out bad values from a dataset, replacing them in-place. 

300 

301 This function replaces non-physical points in the dataset with an 

302 extrapolation of the preceding two values. No more than 3 successive data 

303 points are allowed to be replaced. Minimum length of the input is 3 points. 

304 

305 Parameters 

306 ---------- 

307 values : `list` or `np.ndarray` 

308 The dataset containing the values to be filtered. 

309 maxDelta : `float`, optional 

310 The maximum allowed difference between consecutive values. Values with 

311 a difference greater than `maxDelta` will be considered as bad values 

312 and replaced with an extrapolation. 

313 maxConsecutiveValues : `int`, optional 

314 The maximum number of consecutive values to replace. Defaults to 3. 

315 

316 Returns 

317 ------- 

318 nBadPoints : `int` 

319 The number of bad values that were replaced out. 

320 """ 

321 # Find non-physical points and replace with extrapolation. No more than 

322 # maxConsecutiveValues successive data points can be replaced. 

323 badCounter = 0 

324 consecutiveCounter = 0 

325 

326 log = logging.getLogger(__name__) 

327 

328 median = np.nanmedian(values) 

329 # if either of the the first two points are more than maxDelta away from 

330 # the median, replace them with the median 

331 for i in range(2): 

332 if abs(values[i] - median) > maxDelta: 

333 log.warning(f"Replacing bad value of {values[i]} at index {i} with {median=}") 

334 values[i] = median 

335 badCounter += 1 

336 

337 # from the second element of the array, walk through and calculate the 

338 # difference between each element and the previous one. If the difference 

339 # is greater than maxDelta, replace the element with the average of the 

340 # previous two known good values, i.e. ones which have not been replaced. 

341 # if the first two points differ from the median by more than maxDelta, 

342 # replace them with the median 

343 lastGoodValue1 = values[1] # the most recent good value 

344 lastGoodValue2 = values[0] # the second most recent good value 

345 replacementValue = (lastGoodValue1 + lastGoodValue2) / 2.0 # in case we have to replace the first value 

346 for i in range(2, len(values)): 

347 if abs(values[i] - lastGoodValue1) >= maxDelta: 

348 if consecutiveCounter < maxConsecutiveValues: 

349 consecutiveCounter += 1 

350 badCounter += 1 

351 log.warning(f"Replacing value at index {i} with {replacementValue}") 

352 values[i] = replacementValue 

353 else: 

354 log.warning( 

355 f"More than 3 consecutive replacements at index {i}. Stopping replacements" 

356 " until the next good value." 

357 ) 

358 else: 

359 lastGoodValue2 = lastGoodValue1 

360 lastGoodValue1 = values[i] 

361 replacementValue = (lastGoodValue1 + lastGoodValue2) / 2.0 

362 consecutiveCounter = 0 

363 return badCounter 

364 

365 

366def plotEvent( 

367 client: EfdClient, 

368 event: TMAEvent, 

369 fig: matplotlib.figure.Figure | None = None, 

370 prePadding: float = 0, 

371 postPadding: float = 0, 

372 commands: dict[pd.Timestamp | datetime.datetime, str] = {}, 

373 azimuthData: pd.DataFrame | None = None, 

374 elevationData: pd.DataFrame | None = None, 

375 hardpointData: pd.DataFrame | None = None, 

376 doFilterResiduals: bool = False, 

377 maxDelta: float = 0.1, 

378 metadataWriter: Callable | None = None, 

379) -> matplotlib.figure.Figure: 

380 """Plot the TMA axis positions over the course of a given TMAEvent. 

381 

382 Plots the axis motion profiles for the given event, with optional padding 

383 at the start and end of the event. If the data is provided via the 

384 azimuthData and elevationData parameters, it will be used, otherwise it 

385 will be queried from the EFD. For slews, the M1M3 hardpoint forces are 

386 also plotted. Similarly, it will query the EFD if the hardpoint forces are 

387 not provided. 

388 

389 Optionally plots any commands issued during or around the event, if these 

390 are supplied. Commands are supplied as a dictionary of tje times at which 

391 the commands were issued, with values as command strings. 

392 

393 Due to a problem with the way the data is uploaded to the EFD, there are 

394 occasional points in the tracking error plots that are very much larger 

395 than the typical mount jitter. These points are unphysical, since it is not 

396 possible for the mount to move that fast. We don't want these points, which 

397 are not true mount problems, to distract from any real mount problems, and 

398 these can be filtered out via the ``doFilterResiduals`` kwarg, which 

399 replaces these non-physical points with an extrapolation of the average of 

400 the preceding two known-good points. If the first two points are bad these 

401 are replaced with the median of the dataset. The maximum difference between 

402 the model and the actual data, in arcseconds, to allow before filtering a 

403 data point can be set with the ``maxDelta`` kwarg. 

404 

405 Parameters 

406 ---------- 

407 client : `lsst_efd_client.efd_helper.EfdClient` 

408 The EFD client to use. 

409 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

410 The event to plot. 

411 fig : `matplotlib.figure.Figure`, optional 

412 The figure to plot on. If not specified, a new figure will be created. 

413 prePadding : `float`, optional 

414 The amount of time to pad the event with before the start time, in 

415 seconds. 

416 postPadding : `float`, optional 

417 The amount of time to pad the event with after the end time, in 

418 seconds. 

419 commands : `dict` [`pd.Timestamp`, `str`], or 

420 `dict` [`datetime.datetime`, `str`], or optional 

421 A dictionary of commands to plot on the figure. The keys are the times 

422 at which a command was issued, and the value is the command string, as 

423 returned by efdUtils.getCommands(). 

424 azimuthData : `pd.DataFrame`, optional 

425 The azimuth data to plot. If not specified, it will be queried from the 

426 EFD. 

427 elevationData : `pd.DataFrame`, optional 

428 The elevation data to plot. If not specified, it will be queried from 

429 the EFD. 

430 hardpointData : `pd.DataFrame`, optional 

431 The hardpoint forces to plot. If not specified, it will be queried from 

432 the EFD. 

433 doFilterResiduals : 'bool', optional 

434 Enables filtering of unphysical data points in the tracking residuals. 

435 maxDelta : `float`, optional 

436 The maximum difference between the model and the actual data, in 

437 arcseconds, to allow before filtering the data point. Ignored if 

438 ``doFilterResiduals`` is `False`. 

439 metadataWriter : `callable`, optional 

440 Should be a callable 

441 ``lsst.rubintv.production.utils.writeMetadataShard`` function that has 

442 had the path filled in with ``functools.patrial`` so that it will just 

443 write out the data when called with the event's dayObs and a dictionary 

444 containing the row data that should be written. 

445 

446 Returns 

447 ------- 

448 fig : `matplotlib.figure.Figure` 

449 The figure on which the plot was made. 

450 """ 

451 

452 def tickFormatter(value: float, tick_number: float) -> str: 

453 # Convert the value to a string without subtracting large numbers 

454 # tick_number is unused. 

455 return f"{value:.2f}" 

456 

457 def getPlotTime(time: pd.Timestamp | Time | datetime.datetime) -> datetime.datetime: 

458 """Get the right time to plot a point from the various time formats.""" 

459 match time: 

460 case pd.Timestamp(): 

461 return time.to_pydatetime() 

462 case astropy.time.Time(): 

463 return time.utc.datetime 

464 case datetime.datetime(): 

465 return time 

466 case _: 

467 raise ValueError(f"Unknown type for commandTime: {type(time)}") 

468 

469 # plot any commands we might have 

470 if not isinstance(commands, dict): 

471 raise TypeError("commands must be a dict of command names with values as" " astropy.time.Time values") 

472 

473 if fig is None: 

474 fig = make_figure(figsize=(10, 8)) 

475 log = logging.getLogger(__name__) 

476 log.warning( 

477 "Making new matplotlib figure - if this is in a loop you're going to have a bad time." 

478 " Pass in a figure with fig = plt.figure(figsize=(10, 8)) to avoid this warning." 

479 ) 

480 

481 fig.clear() 

482 ax1, ax1p5, ax2 = fig.subplots( 

483 3, sharex=True, gridspec_kw={"wspace": 0, "hspace": 0, "height_ratios": [2.5, 1, 1]} 

484 ) # type: ignore 

485 

486 if azimuthData is None or elevationData is None: 

487 azimuthData, elevationData = getAzimuthElevationDataForEvent( 

488 client, event, prePadding=prePadding, postPadding=postPadding 

489 ) 

490 

491 assert azimuthData is not None 

492 assert elevationData is not None 

493 

494 # Use the native color cycle for the lines. Because they're on different 

495 # axes they don't cycle by themselves 

496 lineColors = [p["color"] for p in plt.rcParams["axes.prop_cycle"]] 

497 nColors = len(lineColors) 

498 colorCounter = 0 

499 

500 ax1.plot(azimuthData["actualPosition"], label="Azimuth position", c=lineColors[colorCounter % nColors]) 

501 colorCounter += 1 

502 ax1.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

503 ax1.set_ylabel("Azimuth (degrees)") 

504 

505 ax1_twin = ax1.twinx() 

506 ax1_twin.plot( 

507 elevationData["actualPosition"], label="Elevation position", c=lineColors[colorCounter % nColors] 

508 ) 

509 colorCounter += 1 

510 ax1_twin.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

511 ax1_twin.set_ylabel("Elevation (degrees)") 

512 ax1.set_xticks([]) # remove x tick labels on the hidden upper x-axis 

513 

514 ax2_twin = ax2.twinx() 

515 ax2.plot(azimuthData["actualTorque"], label="Azimuth torque", c=lineColors[colorCounter % nColors]) 

516 colorCounter += 1 

517 ax2_twin.plot( 

518 elevationData["actualTorque"], label="Elevation torque", c=lineColors[colorCounter % nColors] 

519 ) 

520 colorCounter += 1 

521 ax2.set_ylabel("Azimuth torque (Nm)") 

522 ax2_twin.set_ylabel("Elevation torque (Nm)") 

523 ax2.set_xlabel("Time (UTC)") # yes, it really is UTC, matplotlib converts this automatically! 

524 

525 if event.type == TMAState.TRACKING: 

526 # returns a copy 

527 clippedAzimuthData = clipDataToEvent(azimuthData, event, postPadding=TRACKING_RESIDUAL_TAIL_CLIP) 

528 clippedElevationData = clipDataToEvent(elevationData, event, postPadding=TRACKING_RESIDUAL_TAIL_CLIP) 

529 

530 azError = clippedAzimuthData["azError"].to_numpy() 

531 elError = clippedElevationData["elError"].to_numpy() 

532 elVals = clippedElevationData["actualPosition"].to_numpy() 

533 

534 if doFilterResiduals: 

535 # Filtering out bad values 

536 nReplacedAz = filterBadValues(azError, maxDelta) 

537 nReplacedEl = filterBadValues(elError, maxDelta) 

538 clippedAzimuthData["azError"] = azError 

539 clippedElevationData["elError"] = elError 

540 replacementMsg = ( 

541 f"{nReplacedAz} bad azimuth values and {nReplacedEl} bad elevation values were replaced" 

542 ) 

543 else: 

544 # currently unused, but needed for mypy, and better than an 

545 # alternative in case someone redesigns that part later 

546 replacementMsg = "No values were replaced because doFilterResiduals is False" 

547 

548 # Calculate RMS 

549 az_rms = np.sqrt(np.mean(azError * azError)) 

550 el_rms = np.sqrt(np.mean(elError * elError)) 

551 

552 # Calculate Image impact RMS 

553 # We are less sensitive to Az errors near the zenith 

554 image_az_rms = az_rms * np.cos(elVals[0] * np.pi / 180.0) 

555 image_el_rms = el_rms 

556 image_impact_rms = np.sqrt(image_az_rms**2 + image_el_rms**2) 

557 

558 ax1p5.plot( 

559 clippedAzimuthData["azError"], 

560 label="Azimuth tracking error", 

561 c=lineColors[colorCounter % nColors], 

562 ) 

563 colorCounter += 1 

564 ax1p5.plot( 

565 clippedElevationData["elError"], 

566 label="Elevation tracking error", 

567 c=lineColors[colorCounter % nColors], 

568 ) 

569 colorCounter += 1 

570 ax1p5.axhline(0.01, ls="-.", color="black") 

571 ax1p5.axhline(-0.01, ls="-.", color="black") 

572 ax1p5.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

573 ax1p5.set_ylabel("Tracking error (arcsec)") 

574 ax1p5.set_xticks([]) # remove x tick labels on the hidden upper x-axis 

575 ax1p5.set_ylim(-0.05, 0.05) 

576 ax1p5.set_yticks([-0.04, -0.02, 0.0, 0.02, 0.04]) 

577 ax1p5.legend() 

578 ax1p5.text(0.1, 0.9, f"Image impact RMS = {image_impact_rms:.3f} arcsec", transform=ax1p5.transAxes) 

579 if doFilterResiduals: 

580 ax1p5.text( 

581 0.1, 

582 0.8, 

583 replacementMsg, 

584 transform=ax1p5.transAxes, 

585 ) 

586 if metadataWriter is not None: 

587 md = {"Tracking image impact": f"{image_impact_rms:.3f}"} 

588 flagKey = "_Tracking image impact" 

589 if image_impact_rms > MOUNT_IMAGE_BAD_LEVEL: 

590 md.update({flagKey: "bad"}) 

591 elif image_impact_rms > MOUNT_IMAGE_WARNING_LEVEL: 

592 md.update({flagKey: "warning"}) 

593 

594 rowData = {event.seqNum: md} 

595 metadataWriter(dayObs=event.dayObs, mdDict=rowData) 

596 

597 elif event.type == TMAState.SLEWING: 

598 if hardpointData is None: 

599 hardpointData = getM1M3HardpointDataForEvent( 

600 client, event, prePadding=prePadding, postPadding=postPadding 

601 ) 

602 

603 if not hardpointData.empty: 

604 # Calculate Max hardpoint force 

605 maxForce = np.nanmax(hardpointData["maxForce"].to_numpy()) 

606 

607 for hp_index in range(6): 

608 ax1p5.plot( 

609 hardpointData[f"measuredForce{hp_index}"], 

610 label=f"HP_{hp_index}", 

611 c=lineColors[colorCounter % nColors], 

612 ) 

613 colorCounter += 1 

614 ax1p5.axhline(500.0, ls="-.", color="goldenrod") 

615 ax1p5.axhline(-500.0, ls="-.", color="goldenrod") 

616 ax1p5.axhline(1000.0, ls="-.", color="red") 

617 ax1p5.axhline(-1000.0, ls="-.", color="red") 

618 ax1p5.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

619 ax1p5.set_ylabel("HP force (N)") 

620 ax1p5.set_xticks([]) # remove x tick labels on the hidden upper x-axis 

621 if maxForce > 2000.0: 

622 ax1p5.set_ylim(-4000.0, 4000.0) 

623 ax1p5.set_yticks([-2000, 0.0, 2000]) 

624 elif maxForce > 1000.0: 

625 ax1p5.set_ylim(-2000.0, 2000.0) 

626 ax1p5.set_yticks([-1000, 0.0, 1000]) 

627 else: 

628 ax1p5.set_ylim(-1000.0, 1000.0) 

629 ax1p5.set_yticks([-500, 0.0, 500]) 

630 ax1p5.legend() 

631 ax1p5.text(0.1, 0.9, f"Max HP force = {maxForce:.1f} N", transform=ax1p5.transAxes) 

632 

633 if metadataWriter is not None: 

634 md = {"HP Max force": f"{maxForce:.2f}"} 

635 flagKey = "_HP Max force" 

636 if maxForce > HARDPOINT_FORCE_BAD_LEVEL: 

637 md.update({flagKey: "bad"}) 

638 elif maxForce > HARDPOINT_FORCE_WARNING_LEVEL: 

639 md.update({flagKey: "warning"}) 

640 

641 rowData = {event.seqNum: md} 

642 metadataWriter(dayObs=event.dayObs, mdDict=rowData) 

643 

644 if prePadding or postPadding: 

645 # note the conversion to utc because the x-axis from the dataframe 

646 # already got automagically converted when plotting before, so this is 

647 # necessary for things to line up 

648 ax1_twin.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5, label="Event begin/end") 

649 ax1_twin.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5) 

650 # extend lines down across lower plot, but do not re-add label 

651 ax2_twin.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5) 

652 ax2_twin.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5) 

653 ax1p5.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5) 

654 ax1p5.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5) 

655 

656 for commandTime, command in commands.items(): 

657 plotTime = getPlotTime(commandTime) 

658 ax1_twin.axvline( 

659 plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75, label=f"{command}" 

660 ) 

661 # extend lines down across lower plot, but do not re-add label 

662 ax2_twin.axvline(plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75) 

663 ax1p5.axvline(plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75) 

664 colorCounter += 1 

665 

666 # combine the legends and put inside the plot 

667 handles1a, labels1a = ax1.get_legend_handles_labels() 

668 handles1b, labels1b = ax1_twin.get_legend_handles_labels() 

669 handles2a, labels2a = ax2.get_legend_handles_labels() 

670 handles2b, labels2b = ax2_twin.get_legend_handles_labels() 

671 

672 handles = handles1a + handles1b + handles2a + handles2b 

673 labels = labels1a + labels1b + labels2a + labels2b 

674 # ax2 is "in front" of ax1 because it has the vlines plotted on it, and 

675 # vlines are on ax2 so that they appear at the bottom of the legend, so 

676 # make sure to plot the legend on ax2, otherwise the vlines will go on top 

677 # of the otherwise-opaque legend. 

678 ax1_twin.legend(handles, labels, facecolor="white", framealpha=1) 

679 

680 # Add title with the event name, type etc 

681 dayObsStr = dayObsIntToString(event.dayObs) 

682 title = ( 

683 # top line is the event title, the details go on the line below 

684 f"{dayObsStr} - seqNum {event.seqNum} (version {event.version})" 

685 f"\nDuration = {event.duration:.2f}s" 

686 f" Event type: {event.type.name}" 

687 f" End reason: {event.endReason.name}" 

688 ) 

689 ax1_twin.set_title(title) 

690 

691 # Moving this block to the end recovers the time ticks and ticklabels. 

692 # put the ticks at an angle, and right align with the tick marks 

693 ax2.set_xticks(ax2.get_xticks()) # needed to supress a user warning 

694 xlabels = ax2.get_xticks() 

695 ax2.set_xticklabels(xlabels, rotation=40, ha="right") 

696 ax2.xaxis.set_major_locator(mdates.AutoDateLocator()) 

697 ax2.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M:%S")) 

698 

699 return fig 

700 

701 

702def getCommandsDuringEvent( 

703 client: EfdClient, 

704 event: TMAEvent, 

705 commands: Iterable[str] = ("raDecTarget",), 

706 prePadding: float = 0, 

707 postPadding: float = 0, 

708 timeFormat: str = "python", 

709 log: logging.Logger | None = None, 

710 doLog: bool = True, 

711) -> dict[datetime.datetime, str]: 

712 """Get the commands issued during an event. 

713 

714 Get the times at which the specified commands were issued during the event. 

715 

716 Parameters 

717 ---------- 

718 client : `lsst_efd_client.efd_helper.EfdClient` 

719 The EFD client to use. 

720 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

721 The event to plot. 

722 commands : `list` of `str`, optional 

723 The commands or command aliases to look for. Defaults to 

724 ['raDecTarget']. 

725 prePadding : `float`, optional 

726 The amount of time to pad the event with before the start time, in 

727 seconds. 

728 postPadding : `float`, optional 

729 The amount of time to pad the event with after the end time, in 

730 seconds. 

731 timeFormat : `str`, optional 

732 One of 'pandas' or 'astropy' or 'python'. If 'pandas', the dictionary 

733 keys will be pandas timestamps, if 'astropy' they will be astropy times 

734 and if 'python' they will be python datetimes. 

735 log : `logging.Logger`, optional 

736 The logger to use. If not specified, a new logger will be created if 

737 needed. 

738 doLog : `bool`, optional 

739 Whether to log messages. Defaults to True. 

740 

741 Returns 

742 ------- 

743 commandTimes : `dict` [`time`, `str`] 

744 A dictionary of the times at which the commands where issued. The type 

745 that `time` takes is determined by the format key, and defaults to 

746 python datetime. 

747 """ 

748 commands = list(ensure_iterable(commands)) 

749 fullCommands = [c if c not in COMMAND_ALIASES else COMMAND_ALIASES[c] for c in commands] 

750 del commands # make sure we always use their full names 

751 

752 commandTimes = getCommands( 

753 client, 

754 fullCommands, 

755 begin=event.begin, 

756 end=event.end, 

757 prePadding=prePadding, 

758 postPadding=postPadding, 

759 timeFormat=timeFormat, 

760 ) 

761 

762 if not commandTimes and doLog: 

763 log = logging.getLogger(__name__) 

764 log.info(f"Found no commands in {fullCommands} issued during event {event.seqNum}") 

765 

766 return commandTimes 

767 

768 

769def _initializeTma(tma: TMAStateMachine) -> None: 

770 """Helper function to turn a TMA into a valid state for testing. 

771 

772 Do not call directly in normal usage or code, as this just arbitrarily 

773 sets values to make the TMA valid. 

774 

775 Parameters 

776 ---------- 

777 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine` 

778 The TMA state machine model to initialize. 

779 """ 

780 tma._parts["azimuthInPosition"] = False 

781 tma._parts["azimuthMotionState"] = AxisMotionState.STOPPED 

782 tma._parts["azimuthSystemState"] = PowerState.ON 

783 tma._parts["elevationInPosition"] = False 

784 tma._parts["elevationMotionState"] = AxisMotionState.STOPPED 

785 tma._parts["elevationSystemState"] = PowerState.ON 

786 

787 

788@dataclass(kw_only=True, frozen=True) 

789class TMAEvent: 

790 """A movement event for the TMA. 

791 

792 Contains the dayObs on which the event occured, using the standard 

793 observatory definition of the dayObs, and the sequence number of the event, 

794 which is unique for each event on a given dayObs. 

795 

796 The event type can be either 'SLEWING' or 'TRACKING', defined as: 

797 - SLEWING: some part of the TMA is in motion 

798 - TRACKING: both axes are in position and tracking the sky 

799 

800 The end reason can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', or 'OFF'. 

801 - SLEWING: The previous event was a TRACKING event, and one or more of 

802 the TMA components either stopped being in position, or stopped 

803 moving, or went into fault, or was turned off, and hence we are now 

804 only slewing and no longer tracking the sky. 

805 - TRACKING: the TMA started tracking the sky when it wasn't previously. 

806 Usualy this would always be preceded by directly by a SLEWING 

807 event, but this is not strictly true, as the EUI seems to be able 

808 to make the TMA start tracking the sky without slewing first. 

809 - STOPPED: the components of the TMA transitioned to the STOPPED state. 

810 - FAULT: the TMA went into fault. 

811 - OFF: the TMA components were turned off. 

812 

813 Note that this class is not intended to be instantiated directly, but 

814 rather to be returned by the ``TMAEventMaker.getEvents()`` function. 

815 

816 Parameters 

817 ---------- 

818 dayObs : `int` 

819 The dayObs on which the event occured. 

820 seqNum : `int` 

821 The sequence number of the event, 

822 type : `lsst.summit.utils.tmaUtils.TMAState` 

823 The type of the event, either 'SLEWING' or 'TRACKING'. 

824 endReason : `lsst.summit.utils.tmaUtils.TMAState` 

825 The reason the event ended, either 'STOPPED', 'TRACKING', 'FAULT', 

826 'SLEWING', or 'OFF'. 

827 duration : `float` 

828 The duration of the event, in seconds. 

829 begin : `astropy.time.Time` 

830 The time the event began. 

831 end : `astropy.time.Time` 

832 The time the event ended. 

833 blockInfos : `list` of `lsst.summit.utils.tmaUtils.BlockInfo`, or `None` 

834 The block infomation, if any, relating to the event. Could be `None`, 

835 or one or more block informations. 

836 version : `int` 

837 The version of the TMAEvent class. Equality between events is only 

838 valid for a given version of the class. If the class definition 

839 changes, the time ranges can change, and hence the equality between 

840 events is ``False``. 

841 _startRow : `int` 

842 The first row in the merged EFD data which is part of the event. 

843 _endRow : `int` 

844 The last row in the merged EFD data which is part of the event. 

845 """ 

846 

847 dayObs: int 

848 seqNum: int 

849 type: TMAState # can be 'SLEWING', 'TRACKING' 

850 endReason: TMAState # can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', 'OFF' 

851 duration: float # seconds 

852 begin: Time 

853 end: Time 

854 blockInfos: list = field(default_factory=list) 

855 version: int = 0 # update this number any time a code change which could change event definitions is made 

856 _startRow: int 

857 _endRow: int 

858 

859 def __lt__(self, other: TMAEvent) -> bool: 

860 if self.version != other.version: 

861 raise ValueError( 

862 f"Cannot compare TMAEvents with different versions: {self.version} != {other.version}" 

863 ) 

864 if self.dayObs < other.dayObs: 

865 return True 

866 elif self.dayObs == other.dayObs: 

867 return self.seqNum < other.seqNum 

868 return False 

869 

870 def __eq__(self, other: object) -> bool: 

871 if not isinstance(other, TMAEvent): 

872 return NotImplemented 

873 if self.version != other.version: 

874 return False 

875 return ( 

876 self.dayObs == other.dayObs 

877 and self.seqNum == other.seqNum 

878 and self.type == other.type 

879 and self.endReason == other.endReason 

880 and self.duration == other.duration 

881 and self.begin == other.begin 

882 and self.end == other.end 

883 # deliberately do not compare on blockInfos, so skip explicitly 

884 # and self.blockInfos == other.blockInfos 

885 and self._startRow == other._startRow 

886 and self._endRow == other._endRow 

887 ) 

888 

889 def __repr__(self) -> str: 

890 return ( 

891 f"TMAEvent(dayObs={self.dayObs}, seqNum={self.seqNum}, type={self.type!r}," 

892 f" endReason={self.endReason!r}, duration={self.duration}, begin={self.begin!r}," 

893 f" end={self.end!r}" 

894 ) 

895 

896 def __hash__(self) -> int: 

897 # deliberately don't hash the blockInfos here, as they are not 

898 # a core part of the event itself, and are listy and cause problems 

899 return hash( 

900 ( 

901 self.dayObs, 

902 self.seqNum, 

903 self.type, 

904 self.endReason, 

905 self.duration, 

906 self.begin, 

907 self.end, 

908 self.version, 

909 self._startRow, 

910 self._endRow, 

911 ) 

912 ) 

913 

914 def _ipython_display_(self) -> None: 

915 print(self.__str__()) 

916 

917 def __str__(self) -> str: 

918 def indent(string: str) -> str: 

919 return "\n" + "\n".join([" " + s for s in string.splitlines()]) 

920 

921 blockInfoStr = "None" 

922 if self.blockInfos is not None: 

923 blockInfoStr = "".join(indent(str(i)) for i in self.blockInfos) 

924 

925 return ( 

926 f"dayObs: {self.dayObs}\n" 

927 f"seqNum: {self.seqNum}\n" 

928 f"type: {self.type.name}\n" 

929 f"endReason: {self.endReason.name}\n" 

930 f"duration: {self.duration}\n" 

931 f"begin: {self.begin!r}\n" 

932 f"end: {self.end!r}\n" 

933 f"blockInfos: {blockInfoStr}" 

934 ) 

935 

936 def associatedWith( 

937 self, 

938 block: int | str | None = None, 

939 blockSeqNum: int | None = None, 

940 ticket: str | None = None, 

941 salIndex: int | None = None, 

942 ) -> bool: 

943 """Check whether an event is associated with a set of parameters. 

944 

945 Check if an event is associated with a specific block and/or ticket 

946 and/or salIndex. All specified parameters must match for the function 

947 to return True. If checking if an event is in a block, the blockSeqNum 

948 can also be specified to identify events which related to a given 

949 running the specified block. 

950 

951 Parameters 

952 ---------- 

953 block : `int`, optional 

954 The block number to check for. 

955 blockSeqNum : `int`, optional 

956 The block sequence number to check for, if the block is specified. 

957 ticket : `str`, optional 

958 The ticket number to check for. 

959 salIndex : `int`, optional 

960 The salIndex to check for. 

961 

962 Returns 

963 ------- 

964 relates : `bool` 

965 Whether the event is associated with the specified block, ticket, 

966 and salIndex. 

967 """ 

968 if isinstance(block, int): 

969 block = str(block) 

970 

971 if all([block is None, ticket is None, salIndex is None]): 

972 raise ValueError("Must specify at least one of block, ticket, or salIndex") 

973 

974 if blockSeqNum is not None and block is None: 

975 raise ValueError("block must be specified if blockSeqNum is specified") 

976 

977 for blockInfo in self.blockInfos: 

978 # "X is None or" is used for each parameter to allow it to be None 

979 # in the kwargs 

980 blockMatches = False 

981 if block is not None: 

982 if blockSeqNum is None and blockInfo.blockNumber == block: 

983 blockMatches = True 

984 elif ( 

985 blockSeqNum is not None 

986 and blockInfo.blockNumber == block 

987 and blockInfo.seqNum == blockSeqNum 

988 ): 

989 blockMatches = True 

990 else: 

991 blockMatches = True # no block specified at all, so it matches 

992 

993 salIndexMatches = salIndex is None or salIndex in blockInfo.salIndices 

994 ticketMatches = ticket is None or ticket in blockInfo.tickets 

995 

996 if blockMatches and salIndexMatches and ticketMatches: 

997 return True 

998 

999 return False 

1000 

1001 

1002class TMAState(enum.IntEnum): 

1003 """Overall state of the TMA. 

1004 

1005 States are defined as follows: 

1006 

1007 UNINITIALIZED 

1008 We have not yet got data for all relevant components, so the overall 

1009 state is undefined. 

1010 STOPPED 

1011 All components are on, and none are moving. 

1012 TRACKING 

1013 We are tracking the sky. 

1014 SLEWING 

1015 One or more components are moving, and one or more are not tracking the 

1016 sky. This should probably be called MOVING, as it includes: slewing, 

1017 MOVING_POINT_TO_POINT, and JOGGING. 

1018 FAULT 

1019 All (if engineeringMode) or any (if not engineeringMode) components are 

1020 in fault. 

1021 OFF 

1022 All components are off. 

1023 """ 

1024 

1025 UNINITIALIZED = -1 

1026 STOPPED = 0 

1027 TRACKING = 1 

1028 SLEWING = 2 

1029 FAULT = 3 

1030 OFF = 4 

1031 

1032 def __repr__(self) -> str: 

1033 return f"TMAState.{self.name}" 

1034 

1035 

1036def getAxisAndType(rowFor: str) -> tuple[str, str]: 

1037 """Get the axis the data relates to, and the type of data it contains. 

1038 

1039 Parameters 

1040 ---------- 

1041 rowFor : `str` 

1042 The column in the dataframe denoting what this row is for, e.g. 

1043 "elevationMotionState" or "azimuthInPosition", etc. 

1044 

1045 Returns 

1046 ------- 

1047 axis : `str` 

1048 The axis the row is for, e.g. "azimuth", "elevation". 

1049 rowType : `str` 

1050 The type of the row, e.g. "MotionState", "SystemState", "InPosition". 

1051 """ 

1052 regex = r"(azimuth|elevation)(InPosition|MotionState|SystemState)$" # matches the end of the line 

1053 matches = re.search(regex, rowFor) 

1054 if matches is None: 

1055 raise ValueError(f"Could not parse axis and rowType from {rowFor=}") 

1056 axis = matches.group(1) 

1057 rowType = matches.group(2) 

1058 

1059 assert rowFor.endswith(f"{axis}{rowType}") 

1060 return axis, rowType 

1061 

1062 

1063class ListViewOfDict: 

1064 """A class to allow making lists which contain references to an underlying 

1065 dictionary. 

1066 

1067 Normally, making a list of items from a dictionary would make a copy of the 

1068 items, but this class allows making a list which contains references to the 

1069 underlying dictionary items themselves. This is useful for making a list of 

1070 components, such that they can be manipulated in their logical sets. 

1071 """ 

1072 

1073 def __init__(self, underlyingDictionary: dict, keysToLink: list): 

1074 self.dictionary = underlyingDictionary 

1075 self.keys = keysToLink 

1076 

1077 def __getitem__(self, index: Any) -> Any: 

1078 return self.dictionary[self.keys[index]] 

1079 

1080 def __setitem__(self, index: int, value: Any) -> None: 

1081 self.dictionary[self.keys[index]] = value 

1082 

1083 def __len__(self) -> int: 

1084 return len(self.keys) 

1085 

1086 def __iter__(self) -> Any: 

1087 for key in self.keys: 

1088 yield self.dictionary[key] 

1089 

1090 

1091class TMAStateMachine: 

1092 """A state machine model of the TMA. 

1093 

1094 Note that this is currently only implemented for the azimuth and elevation 

1095 axes, but will be extended to include the rotator in the future. 

1096 

1097 Note that when used for event generation, changing ``engineeringMode`` to 

1098 False might change the resulting list of events, and that if the TMA moves 

1099 with some axis in fault, then these events will be missed. It is therefore 

1100 thought that ``engineeringMode=True`` should always be used when generating 

1101 events. The option, however, is there for completeness, as this will be 

1102 useful for knowing is the CSC would consider the TMA to be in fault in the 

1103 general case. 

1104 

1105 Parameters 

1106 ---------- 

1107 engineeringMode : `bool`, optional 

1108 Whether the TMA is in engineering mode. Defaults to True. If False, 

1109 then the TMA will be in fault if any component is in fault. If True, 

1110 then the TMA will be in fault only if all components are in fault. 

1111 debug : `bool`, optional 

1112 Whether to log debug messages. Defaults to False. 

1113 """ 

1114 

1115 _UNINITIALIZED_VALUE: int = -999 

1116 

1117 def __init__(self, engineeringMode: bool = True, debug: bool = False): 

1118 self.engineeringMode = engineeringMode 

1119 self.log = logging.getLogger("lsst.summit.utils.tmaUtils.TMA") 

1120 if debug: 

1121 self.log.level = logging.DEBUG 

1122 self._mostRecentRowTime = -1 

1123 

1124 # the actual components of the TMA 

1125 self._parts = { 

1126 "azimuthInPosition": self._UNINITIALIZED_VALUE, 

1127 "azimuthMotionState": self._UNINITIALIZED_VALUE, 

1128 "azimuthSystemState": self._UNINITIALIZED_VALUE, 

1129 "elevationInPosition": self._UNINITIALIZED_VALUE, 

1130 "elevationMotionState": self._UNINITIALIZED_VALUE, 

1131 "elevationSystemState": self._UNINITIALIZED_VALUE, 

1132 } 

1133 systemKeys = ["azimuthSystemState", "elevationSystemState"] 

1134 positionKeys = ["azimuthInPosition", "elevationInPosition"] 

1135 motionKeys = ["azimuthMotionState", "elevationMotionState"] 

1136 

1137 # references to the _parts as conceptual groupings 

1138 self.system = ListViewOfDict(self._parts, systemKeys) 

1139 self.motion = ListViewOfDict(self._parts, motionKeys) 

1140 self.inPosition = ListViewOfDict(self._parts, positionKeys) 

1141 

1142 # tuples of states for state collapsing. Note that STOP_LIKE + 

1143 # MOVING_LIKE must cover the full set of AxisMotionState enums 

1144 self.STOP_LIKE = (AxisMotionState.STOPPING, AxisMotionState.STOPPED, AxisMotionState.TRACKING_PAUSED) 

1145 self.MOVING_LIKE = ( 

1146 AxisMotionState.MOVING_POINT_TO_POINT, 

1147 AxisMotionState.JOGGING, 

1148 AxisMotionState.TRACKING, 

1149 ) 

1150 # Likewise, ON_LIKE + OFF_LIKE must cover the full set of PowerState 

1151 # enums 

1152 self.OFF_LIKE = (PowerState.OFF, PowerState.TURNING_OFF) 

1153 self.ON_LIKE = (PowerState.ON, PowerState.TURNING_ON) 

1154 self.FAULT_LIKE = (PowerState.FAULT,) # note the trailing comma - this must be an iterable 

1155 

1156 def apply(self, row: pd.Series) -> None: 

1157 """Apply a row of data to the TMA state. 

1158 

1159 Checks that the row contains data for a later time than any data 

1160 previously applied, and applies the relevant column entry to the 

1161 relevant component. 

1162 

1163 Parameters 

1164 ---------- 

1165 row : `pd.Series` 

1166 The row of data to apply to the state machine. 

1167 """ 

1168 timestamp = row["private_efdStamp"] 

1169 if timestamp < self._mostRecentRowTime: # NB equals is OK, technically, though it never happens 

1170 raise ValueError( 

1171 "TMA evolution must be monotonic increasing in time, tried to apply a row which" 

1172 " predates the most previous one" 

1173 ) 

1174 self._mostRecentRowTime = timestamp 

1175 

1176 rowFor = row["rowFor"] # e.g. elevationMotionState 

1177 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState 

1178 value = self._getRowPayload(row, rowType, rowFor) 

1179 self.log.debug(f"Setting {rowFor} to {repr(value)}") 

1180 self._parts[rowFor] = value 

1181 try: 

1182 # touch the state property as this executes the sieving, to make 

1183 # sure we don't fall through the sieve at any point in time 

1184 _ = self.state 

1185 except RuntimeError as e: 

1186 # improve error reporting, but always reraise this, as this is a 

1187 # full-blown failure 

1188 raise RuntimeError(f"Failed to apply {value} to {axis}{rowType} with state {self._parts}") from e 

1189 

1190 def _getRowPayload( 

1191 self, row: pd.Series, rowType: str, rowFor: str 

1192 ) -> bool | AxisMotionState | PowerState: 

1193 """Get the relevant value from the row. 

1194 

1195 Given the row, and which component it relates to, get the relevant 

1196 value, as a bool or cast to the appropriate enum class. 

1197 

1198 Parameters 

1199 ---------- 

1200 row : `pd.Series` 

1201 The row of data from the dataframe. 

1202 rowType : `str` 

1203 The type of the row, e.g. "MotionState", "SystemState", 

1204 "InPosition". 

1205 rowFor : `str` 

1206 The component the row is for, e.g. "azimuth", "elevation". 

1207 

1208 Returns 

1209 ------- 

1210 value : `bool` or `enum` 

1211 The value of the row, as a bool or enum, depending on the 

1212 component, cast to the appropriate enum class or bool. 

1213 """ 

1214 match rowType: 

1215 case "MotionState": 

1216 value = row[f"state_{rowFor}"] 

1217 return AxisMotionState(value) 

1218 case "SystemState": 

1219 value = row[f"powerState_{rowFor}"] 

1220 return PowerState(value) 

1221 case "InPosition": 

1222 value = row[f"inPosition_{rowFor}"] 

1223 return bool(value) 

1224 case _: 

1225 raise ValueError(f"Failed to get row payload with {rowType=} and {row=}") 

1226 

1227 @property 

1228 def _isValid(self) -> bool: 

1229 """Has the TMA had a value applied to all its components? 

1230 

1231 If any component has not yet had a value applied, the TMA is not valid, 

1232 as those components will be in an unknown state. 

1233 

1234 Returns 

1235 ------- 

1236 isValid : `bool` 

1237 Whether the TMA is fully initialized. 

1238 """ 

1239 return not any([v == self._UNINITIALIZED_VALUE for v in self._parts.values()]) 

1240 

1241 # state inspection properties - a high level way of inspecting the state as 

1242 # an API 

1243 @property 

1244 def isMoving(self) -> bool: 

1245 return self.state in [TMAState.TRACKING, TMAState.SLEWING] 

1246 

1247 @property 

1248 def isNotMoving(self) -> bool: 

1249 return not self.isMoving 

1250 

1251 @property 

1252 def isTracking(self) -> bool: 

1253 return self.state == TMAState.TRACKING 

1254 

1255 @property 

1256 def isSlewing(self) -> bool: 

1257 return self.state == TMAState.SLEWING 

1258 

1259 @property 

1260 def canMove(self) -> bool: 

1261 badStates = [PowerState.OFF, PowerState.TURNING_OFF, PowerState.FAULT, PowerState.UNKNOWN] 

1262 return bool( 

1263 self._isValid 

1264 and self._parts["azimuthSystemState"] not in badStates 

1265 and self._parts["elevationSystemState"] not in badStates 

1266 ) 

1267 

1268 # Axis inspection properties, designed for internal use. These return 

1269 # iterables so that they can be used in any() and all() calls, which make 

1270 # the logic much easier to read, e.g. to see if anything is moving, we can 

1271 # write `if not any(_axisInMotion):` 

1272 @property 

1273 def _axesInFault(self) -> list[bool]: 

1274 return [x in self.FAULT_LIKE for x in self.system] 

1275 

1276 @property 

1277 def _axesOff(self) -> list[bool]: 

1278 return [x in self.OFF_LIKE for x in self.system] 

1279 

1280 @property 

1281 def _axesOn(self) -> list[bool]: 

1282 return [not x for x in self._axesOn] 

1283 

1284 @property 

1285 def _axesInMotion(self) -> list[bool]: 

1286 return [x in self.MOVING_LIKE for x in self.motion] 

1287 

1288 @property 

1289 def _axesTRACKING(self) -> list[bool]: 

1290 """Note this is deliberately named _axesTRACKING and not _axesTracking 

1291 to make it clear that this is the AxisMotionState type of TRACKING and 

1292 not the normal conceptual notion of tracking (the sky, i.e. as opposed 

1293 to slewing). 

1294 """ 

1295 return [x == AxisMotionState.TRACKING for x in self.motion] 

1296 

1297 @property 

1298 def _axesInPosition(self) -> list[bool]: 

1299 return [x is True for x in self.inPosition] 

1300 

1301 @property 

1302 def state(self) -> TMAState: 

1303 """The overall state of the TMA. 

1304 

1305 Note that this is both a property, and also the method which applies 

1306 the logic sieve to determine the state at a given point in time. 

1307 

1308 Returns 

1309 ------- 

1310 state : `lsst.summit.utils.tmaUtils.TMAState` 

1311 The overall state of the TMA. 

1312 """ 

1313 # first, check we're valid, and if not, return UNINITIALIZED state, as 

1314 # things are unknown 

1315 if not self._isValid: 

1316 return TMAState.UNINITIALIZED 

1317 

1318 # if we're not in engineering mode, i.e. we're under normal CSC 

1319 # control, then if anything is in fault, we're in fault. If we're 

1320 # engineering then some axes will move when others are in fault 

1321 if not self.engineeringMode: 

1322 if any(self._axesInFault): 

1323 return TMAState.FAULT 

1324 else: 

1325 # we're in engineering mode, so return fault state if ALL are in 

1326 # fault 

1327 if all(self._axesInFault): 

1328 return TMAState.FAULT 

1329 

1330 # if all axes are off, the TMA is OFF 

1331 if all(self._axesOff): 

1332 return TMAState.OFF 

1333 

1334 # we know we're valid and at least some axes are not off, so see if 

1335 # we're in motion if no axes are moving, we're stopped 

1336 if not any(self._axesInMotion): 

1337 return TMAState.STOPPED 

1338 

1339 # now we know we're initialized, and that at least one axis is moving 

1340 # so check axes for motion and in position. If all axes are tracking 

1341 # and all are in position, we're tracking the sky 

1342 if all(self._axesTRACKING) and all(self._axesInPosition): 

1343 return TMAState.TRACKING 

1344 

1345 # we now know explicitly that not everything is in position, so we no 

1346 # longer need to check that. We do actually know that something is in 

1347 # motion, but confirm that's the case and return SLEWING 

1348 if any(self._axesInMotion): 

1349 return TMAState.SLEWING 

1350 

1351 # if we want to differentiate between MOVING_POINT_TO_POINT moves, 

1352 # JOGGING moves and regular slews, the logic in the step above needs to 

1353 # be changed and the new steps added here. 

1354 

1355 raise RuntimeError("State error: fell through the state sieve - rewrite your logic!") 

1356 

1357 

1358class TMAEventMaker: 

1359 """A class to create per-dayObs TMAEvents for the TMA's movements. 

1360 

1361 If this class is being used in tests, make sure to pass the EFD client in, 

1362 and create it with `makeEfdClient(testing=True)`. This ensures that the 

1363 USDF EFD is "used" as this is the EFD which has the recorded data available 

1364 in the test suite via `vcr`. 

1365 

1366 Example usage: 

1367 >>> dayObs = 20230630 

1368 >>> eventMaker = TMAEventMaker() 

1369 >>> events = eventMaker.getEvents(dayObs) 

1370 >>> print(f'Found {len(events)} for {dayObs=}') 

1371 

1372 Parameters 

1373 ---------- 

1374 client : `lsst_efd_client.efd_helper.EfdClient`, optional 

1375 The EFD client to use, created if not provided. 

1376 """ 

1377 

1378 # the topics which need logical combination to determine the overall mount 

1379 # state. Will need updating as new components are added to the system. 

1380 

1381 # relevant column: 'state' 

1382 _movingComponents = [ 

1383 "lsst.sal.MTMount.logevent_azimuthMotionState", 

1384 "lsst.sal.MTMount.logevent_elevationMotionState", 

1385 ] 

1386 

1387 # relevant column: 'inPosition' 

1388 _inPositionComponents = [ 

1389 "lsst.sal.MTMount.logevent_azimuthInPosition", 

1390 "lsst.sal.MTMount.logevent_elevationInPosition", 

1391 ] 

1392 

1393 # the components which, if in fault, put the TMA into fault 

1394 # relevant column: 'powerState' 

1395 _stateComponents = [ 

1396 "lsst.sal.MTMount.logevent_azimuthSystemState", 

1397 "lsst.sal.MTMount.logevent_elevationSystemState", 

1398 ] 

1399 

1400 def __init__(self, client: EfdClient | None = None): 

1401 if client is not None: 

1402 self.client = client 

1403 else: 

1404 self.client = makeEfdClient() 

1405 self.log = logging.getLogger(__name__) 

1406 self._data: dict = {} 

1407 

1408 @dataclass(frozen=True) 

1409 class ParsedState: 

1410 eventStart: Time 

1411 eventEnd: int 

1412 previousState: TMAState 

1413 state: TMAState 

1414 

1415 @staticmethod 

1416 def isToday(dayObs: int) -> bool: 

1417 """Find out if the specified dayObs is today, or in the past. 

1418 

1419 If the day is today, the function returns ``True``, if it is in the 

1420 past it returns ``False``. If the day is in the future, a 

1421 ``ValueError`` is raised, as this indicates there is likely an 

1422 off-by-one type error somewhere in the logic. 

1423 

1424 Parameters 

1425 ---------- 

1426 dayObs : `int` 

1427 The dayObs to check, in the format YYYYMMDD. 

1428 

1429 Returns 

1430 ------- 

1431 isToday : `bool` 

1432 ``True`` if the dayObs is today, ``False`` if it is in the past. 

1433 

1434 Raises 

1435 ValueError: if the dayObs is in the future. 

1436 """ 

1437 todayDayObs = getCurrentDayObsInt() 

1438 if dayObs == todayDayObs: 

1439 return True 

1440 if dayObs > todayDayObs: 

1441 raise ValueError("dayObs is in the future") 

1442 return False 

1443 

1444 @staticmethod 

1445 def _shortName(topic: str) -> str: 

1446 """Get the short name of a topic. 

1447 

1448 Parameters 

1449 ---------- 

1450 topic : `str` 

1451 The topic to get the short name of. 

1452 

1453 Returns 

1454 ------- 

1455 shortName : `str` 

1456 The short name of the topic, e.g. 'azimuthInPosition' 

1457 """ 

1458 # get, for example 'azimuthInPosition' from 

1459 # lsst.sal.MTMount.logevent_azimuthInPosition 

1460 return topic.split("_")[-1] 

1461 

1462 def _mergeData(self, data: dict[str, pd.DataFrame]) -> pd.DataFrame: 

1463 """Merge a dict of dataframes based on private_efdStamp, recording 

1464 where each row came from. 

1465 

1466 Given a dict or dataframes, keyed by topic, merge them into a single 

1467 dataframe, adding a column to record which topic each row came from. 

1468 

1469 Parameters 

1470 ---------- 

1471 data : `dict` of `str` : `pd.DataFrame` 

1472 The dataframes to merge. 

1473 

1474 Returns 

1475 ------- 

1476 merged : `pd.DataFrame` 

1477 The merged dataframe. 

1478 """ 

1479 excludeColumns = ["private_efdStamp", "rowFor"] 

1480 

1481 mergeArgs = { 

1482 "how": "outer", 

1483 "sort": True, 

1484 } 

1485 

1486 merged = None 

1487 originalRowCounter = 0 

1488 

1489 # Iterate over the keys and merge the corresponding DataFrames 

1490 for key, df in data.items(): 

1491 if df.empty: 

1492 # Must skip the df if it's empty, otherwise the merge will fail 

1493 # due to lack of private_efdStamp. Because other axes might 

1494 # still be in motion, so we still want to merge what we have 

1495 continue 

1496 

1497 originalRowCounter += len(df) 

1498 component = self._shortName(key) # Add suffix to column names to identify the source 

1499 suffix = "_" + component 

1500 

1501 df["rowFor"] = component 

1502 

1503 columnsToSuffix = [col for col in df.columns if col not in excludeColumns] 

1504 df_to_suffix = df[columnsToSuffix].add_suffix(suffix) 

1505 df = pd.concat([df[excludeColumns], df_to_suffix], axis=1) 

1506 

1507 if merged is None: 

1508 merged = df.copy() 

1509 else: 

1510 merged = pd.merge(merged, df, **mergeArgs) 

1511 assert merged is not None, "merged data is None" 

1512 merged = merged.loc[:, ~merged.columns.duplicated()] # Remove duplicate columns after merge 

1513 

1514 if len(merged) != originalRowCounter: 

1515 self.log.warning( 

1516 "Merged data has a different number of rows to the original data, some" 

1517 " timestamps (rows) will contain more than one piece of actual information." 

1518 ) 

1519 

1520 # if the index is still a DatetimeIndex here then we didn't actually 

1521 # merge any data, so there is only data from a single component. 

1522 # This is likely to result in no events, but not necessarily, and for 

1523 # generality, instead we convert to a range index to ensure consistency 

1524 # in the returned data, and allow processing to continue. 

1525 if isinstance(merged.index, pd.DatetimeIndex): 

1526 self.log.warning("Data was only found for a single component in the EFD.") 

1527 merged.reset_index(drop=True, inplace=True) 

1528 

1529 return merged 

1530 

1531 def getEvent(self, dayObs: int, seqNum: int) -> TMAEvent | None: 

1532 """Get a specific event for a given dayObs and seqNum. 

1533 

1534 Repeated calls for the same ``dayObs`` will use the cached data if the 

1535 day is in the past, and so will be much quicker. If the ``dayObs`` is 

1536 the current day then the EFD will be queried for new data for each 

1537 call, so a call which returns ``None`` on the first try might return an 

1538 event on the next, if the TMA is still moving and thus generating 

1539 events. 

1540 

1541 Parameters 

1542 ---------- 

1543 dayObs : `int` 

1544 The dayObs to get the event for. 

1545 seqNum : `int` 

1546 The sequence number of the event to get. 

1547 

1548 Returns 

1549 ------- 

1550 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

1551 The event for the specified dayObs and seqNum, or `None` if the 

1552 event was not found. 

1553 """ 

1554 events = self.getEvents(dayObs) 

1555 if seqNum <= len(events): 

1556 event = events[seqNum] 

1557 if event.seqNum != seqNum: 

1558 # it's zero-indexed and contiguous so this must be true but 

1559 # a sanity check doesn't hurt. 

1560 raise AssertionError(f"Event sequence number mismatch: {event.seqNum} != {seqNum}") 

1561 return event 

1562 else: 

1563 self.log.warning(f"Event {seqNum} not found for {dayObs}") 

1564 return None 

1565 

1566 def getEvents(self, dayObs: int, addBlockInfo: bool = True) -> list[TMAEvent]: 

1567 """Get the TMA events for the specified dayObs. 

1568 

1569 Gets the required mount data from the cache or the EFD as required, 

1570 handling whether we're working with live vs historical data. The 

1571 dataframes from the EFD is merged and applied to the TMAStateMachine, 

1572 and that series of state changes is used to generate a list of 

1573 TmaEvents for the day's data. 

1574 

1575 If the data is for the current day, i.e. if new events can potentially 

1576 land, then if the last event is "open" (meaning that the TMA appears to 

1577 be in motion and thus the event is growing with time), then that event 

1578 is excluded from the event list as it is expected to be changing with 

1579 time, and will likely close eventually. However, if that situation 

1580 occurs on a day in the past, then that event can never close, and the 

1581 event is therefore included, but a warning about the open event is 

1582 logged. 

1583 

1584 Parameters 

1585 ---------- 

1586 dayObs : `int` 

1587 The dayObs for which to get the events. 

1588 addBlockInfo : `bool`, optional 

1589 Whether to add block information to the events. This allows 

1590 skipping this step for speed when generating events for purposes 

1591 which don't need block information. 

1592 

1593 Returns 

1594 ------- 

1595 events : `list` of `lsst.summit.utils.tmaUtils.TMAState` 

1596 The events for the specified dayObs. 

1597 """ 

1598 workingLive = self.isToday(dayObs) 

1599 data = None 

1600 

1601 if workingLive: 

1602 # it's potentially updating data, so we must update the date 

1603 # regarless of whether we have it already or not 

1604 self.log.info(f"Updating mount data for {dayObs} from the EFD") 

1605 self._getEfdDataForDayObs(dayObs) 

1606 data = self._data[dayObs] 

1607 elif dayObs in self._data: 

1608 # data is in the cache and it's not being updated, so use it 

1609 data = self._data[dayObs] 

1610 elif dayObs not in self._data: 

1611 # we don't have the data yet, but it's not growing, so put it in 

1612 # the cache and use it from there 

1613 self.log.info(f"Retrieving mount data for {dayObs} from the EFD") 

1614 self._getEfdDataForDayObs(dayObs) 

1615 data = self._data[dayObs] 

1616 else: 

1617 raise RuntimeError("This should never happen") 

1618 

1619 # if we don't have something to work with, log a warning and return 

1620 if not self.dataFound(data): 

1621 self.log.warning(f"No EFD data found for {dayObs=}") 

1622 return [] 

1623 

1624 # applies the data to the state machine, and generates events from the 

1625 # series of states which results 

1626 events = self._calculateEventsFromMergedData( 

1627 data, dayObs, dataIsForCurrentDay=workingLive, addBlockInfo=addBlockInfo 

1628 ) 

1629 if not events: 

1630 self.log.warning(f"Failed to calculate any events for {dayObs=} despite EFD data existing!") 

1631 return events 

1632 

1633 @staticmethod 

1634 def dataFound(data: pd.DataFrame) -> bool: 

1635 """Check if any data was found. 

1636 

1637 Parameters 

1638 ---------- 

1639 data : `pd.DataFrame` 

1640 The merged dataframe to check. 

1641 

1642 Returns 

1643 ------- 

1644 dataFound : `bool` 

1645 Whether data was found. 

1646 """ 

1647 # You can't just compare to with data == NO_DATA_SENTINEL because 

1648 # `data` is usually a dataframe, and you can't compare a dataframe to a 

1649 # string directly. 

1650 return not (isinstance(data, str) and data == NO_DATA_SENTINEL) 

1651 

1652 def _getEfdDataForDayObs(self, dayObs: int) -> None: 

1653 """Get the EFD data for the specified dayObs and store it in the cache. 

1654 

1655 Gets the EFD data for all components, as a dict of dataframes keyed by 

1656 component name. These are then merged into a single dataframe in time 

1657 order, based on each row's `private_efdStamp`. This is then stored in 

1658 self._data[dayObs]. 

1659 

1660 If no data is found, the value is set to ``NO_DATA_SENTINEL`` to 

1661 differentiate this from ``None``, as this is what you'd get if you 

1662 queried the cache with `self._data.get(dayObs)`. It also marks that we 

1663 have already queried this day. 

1664 

1665 Parameters 

1666 ---------- 

1667 dayObs : `int` 

1668 The dayObs to query. 

1669 """ 

1670 data = {} 

1671 for component in itertools.chain( 

1672 self._movingComponents, self._inPositionComponents, self._stateComponents 

1673 ): 

1674 data[component] = getEfdData( 

1675 self.client, 

1676 component, 

1677 dayObs=dayObs, 

1678 warn=False, 

1679 raiseIfTopicNotInSchema=False, 

1680 ) 

1681 self.log.debug(f"Found {len(data[component])} for {component}") 

1682 

1683 if all(dataframe.empty for dataframe in data.values()): 

1684 # if every single dataframe is empty, set the sentinel and don't 

1685 # try to merge anything, otherwise merge all the data we found 

1686 self.log.debug(f"No data found for {dayObs=}") 

1687 # a sentinel value that's not None 

1688 self._data[dayObs] = NO_DATA_SENTINEL 

1689 else: 

1690 merged = self._mergeData(data) 

1691 self._data[dayObs] = merged 

1692 

1693 def _calculateEventsFromMergedData( 

1694 self, data: pd.DataFrame, dayObs: int, dataIsForCurrentDay: bool, addBlockInfo: bool 

1695 ) -> list[TMAEvent]: 

1696 """Calculate the list of events from the merged data. 

1697 

1698 Runs the merged data, row by row, through the TMA state machine (with 

1699 ``tma.apply``) to get the overall TMA state at each row, building a 

1700 dict of these states, keyed by row number. 

1701 

1702 This time-series of TMA states are then looped over (in 

1703 `_statesToEventTuples`), building a list of tuples representing the 

1704 start and end of each event, the type of the event, and the reason for 

1705 the event ending. 

1706 

1707 This list of tuples is then passed to ``_makeEventsFromStateTuples``, 

1708 which actually creates the ``TMAEvent`` objects. 

1709 

1710 Parameters 

1711 ---------- 

1712 data : `pd.DataFrame` 

1713 The merged dataframe to use. 

1714 dayObs : `int` 

1715 The dayObs for the data. 

1716 dataIsForCurrentDay : `bool` 

1717 Whether the data is for the current day. Determines whether to 

1718 allow an open last event or not. 

1719 addBlockInfo : `bool` 

1720 Whether to add block information to the events. This allows 

1721 skipping this step for speed when generating events for purposes 

1722 which don't need block information. 

1723 

1724 Returns 

1725 ------- 

1726 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1727 The events for the specified dayObs. 

1728 """ 

1729 engineeringMode = True 

1730 tma = TMAStateMachine(engineeringMode=engineeringMode) 

1731 

1732 # For now, we assume that the TMA starts each day able to move, but 

1733 # stationary. If this turns out to cause problems, we will need to 

1734 # change to loading data from the previous day(s), and looking back 

1735 # through it in time until a state change has been found for every 

1736 # axis. For now though, Bruno et. al think this is acceptable and 

1737 # preferable. 

1738 _initializeTma(tma) 

1739 

1740 tmaStates = {} 

1741 for rowNum, row in data.iterrows(): 

1742 assert isinstance(rowNum, int) 

1743 tma.apply(row) 

1744 tmaStates[rowNum] = tma.state 

1745 

1746 stateTuples = self._statesToEventTuples(tmaStates, dataIsForCurrentDay) 

1747 events = self._makeEventsFromStateTuples(stateTuples, dayObs, data) 

1748 if addBlockInfo: 

1749 self.addBlockDataToEvents(dayObs, events) 

1750 return events 

1751 

1752 def _statesToEventTuples( 

1753 self, states: dict[int, TMAState], dataIsForCurrentDay: bool 

1754 ) -> list[ParsedState]: 

1755 """Get the event-tuples from the dictionary of TMAStates. 

1756 

1757 Chunks the states into blocks of the same state, so that we can create 

1758 an event for each block in `_makeEventsFromStateTuples`. Off-type 

1759 states are skipped over, with each event starting when the telescope 

1760 next resumes motion or changes to a different type of motion state, 

1761 i.e. from non-tracking type movement (MOVE_POINT_TO_POINT, JOGGING, 

1762 TRACKING-but-not-in-position, i.e. slewing) to a tracking type 

1763 movement, or vice versa. 

1764 

1765 Parameters 

1766 ---------- 

1767 states : `dict` of `int` : `lsst.summit.utils.tmaUtils.TMAState` 

1768 The states of the TMA, keyed by row number. 

1769 dataIsForCurrentDay : `bool` 

1770 Whether the data is for the current day. Determines whether to 

1771 allow and open last event or not. 

1772 

1773 Returns 

1774 ------- 

1775 parsedStates : `list` of `tuple` 

1776 The parsed states, as a list of tuples of the form: 

1777 ``(eventStart, eventEnd, eventType, endReason)`` 

1778 """ 

1779 # Consider rewriting this with states as a list and using pop(0)? 

1780 skipStates = (TMAState.STOPPED, TMAState.OFF, TMAState.FAULT) 

1781 

1782 parsedStates = [] 

1783 eventStart = None 

1784 rowNum = 0 

1785 nRows = len(states) 

1786 while rowNum < nRows: 

1787 previousState = None 

1788 state = states[rowNum] 

1789 # if we're not in an event, fast forward through off-like rows 

1790 # until a new event starts 

1791 if eventStart is None and state in skipStates: 

1792 rowNum += 1 

1793 continue 

1794 

1795 # we've started a new event, so walk through it and find the end 

1796 eventStart = rowNum 

1797 previousState = state 

1798 rowNum += 1 # move to the next row before starting the while loop 

1799 if rowNum == nRows: 

1800 # we've reached the end of the data, and we're still in an 

1801 # event, so don't return this presumably in-progress event 

1802 self.log.warning("Reached the end of the data while starting a new event") 

1803 break 

1804 state = states[rowNum] 

1805 while state == previousState: 

1806 rowNum += 1 

1807 if rowNum == nRows: 

1808 break 

1809 state = states[rowNum] 

1810 parsedStates.append( 

1811 self.ParsedState( 

1812 eventStart=eventStart, eventEnd=rowNum, previousState=previousState, state=state 

1813 ) 

1814 ) 

1815 if state in skipStates: 

1816 eventStart = None 

1817 

1818 # done parsing, just check the last event is valid 

1819 if parsedStates: # ensure we have at least one event 

1820 lastEvent = parsedStates[-1] 

1821 if lastEvent.eventEnd == nRows: 

1822 # Generally, you *want* the timespan for an event to be the 

1823 # first row of the next event, because you were in that state 

1824 # right up until that state change. However, if that event is 

1825 # a) the last one of the day and b) runs right up until the end 

1826 # of the dataframe, then there isn't another row, so this will 

1827 # overrun the array. 

1828 # 

1829 # If the data is for the current day then this isn't a worry, 

1830 # as we're likely still taking data, and this event will likely 

1831 # close yet, so we don't issue a warning, and simply drop the 

1832 # event from the list. 

1833 

1834 # However, if the data is for a past day then no new data will 

1835 # come to close the event, so allow the event to be "open", and 

1836 # issue a warning 

1837 if dataIsForCurrentDay: 

1838 self.log.info("Discarding open (likely in-progess) final event from current day's events") 

1839 parsedStates = parsedStates[:-1] 

1840 else: 

1841 self.log.warning("Last event ends open, forcing it to end at end of the day's data") 

1842 # it's a tuple, so (deliberately) awkward to modify 

1843 parsedStates[-1] = self.ParsedState( 

1844 eventStart=lastEvent.eventStart, 

1845 eventEnd=lastEvent.eventEnd - 1, 

1846 previousState=lastEvent.previousState, 

1847 state=lastEvent.state, 

1848 ) 

1849 

1850 return parsedStates 

1851 

1852 def addBlockDataToEvents( 

1853 self, 

1854 dayObs: int, 

1855 events: list[TMAEvent] | TMAEvent, 

1856 ) -> None: 

1857 """Find all the block data in the EFD for the specified events. 

1858 

1859 Finds all the block data in the EFD relating to the events, parses it, 

1860 from the rows of the dataframe, and adds it to the events in place. 

1861 

1862 Parameters 

1863 ---------- 

1864 events : `lsst.summit.utils.tmaUtils.TMAEvent` or 

1865 `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1866 One or more events to get the block data for. 

1867 """ 

1868 try: 

1869 blockParser = BlockParser(dayObs, client=self.client) 

1870 except Exception as e: 

1871 # adding the block data should never cause a failure so if we can't 

1872 # get the block data, log a warning and return. It is, however, 

1873 # never expected, so use log.exception to get the full traceback 

1874 # and scare users so it gets reported 

1875 self.log.exception(f"Failed to parse block data for {dayObs=}, {e}") 

1876 return 

1877 if not isinstance(events, list): 

1878 events = [events] 

1879 blocks = blockParser.getBlockNums() 

1880 blockDict = {} 

1881 for block in blocks: 

1882 blockDict[block] = blockParser.getSeqNums(block) 

1883 

1884 for block, seqNums in blockDict.items(): 

1885 for seqNum in seqNums: 

1886 blockInfo = blockParser.getBlockInfo(block=block, seqNum=seqNum) 

1887 

1888 relatedEvents = blockParser.getEventsForBlock(events, block=block, seqNum=seqNum) 

1889 for event in relatedEvents: 

1890 toSet = [blockInfo] 

1891 if event.blockInfos is not None: 

1892 existingInfo = event.blockInfos 

1893 existingInfo.append(blockInfo) 

1894 toSet = existingInfo 

1895 

1896 # Add the blockInfo to the TMAEvent. Because this is a 

1897 # frozen dataclass, use object.__setattr__ to set the 

1898 # attribute. This is the correct way to set a frozen 

1899 # dataclass attribute after creation. 

1900 object.__setattr__(event, "blockInfos", toSet) 

1901 

1902 def _makeEventsFromStateTuples( 

1903 self, states: list[ParsedState], dayObs: int, data: pd.DataFrame 

1904 ) -> list[TMAEvent]: 

1905 """For the list of state-tuples, create a list of ``TMAEvent`` objects. 

1906 

1907 Given the underlying data, and the start/stop points for each event, 

1908 create the TMAEvent objects for the dayObs. 

1909 

1910 Parameters 

1911 ---------- 

1912 states : `list` of `tuple` 

1913 The parsed states, as a list of tuples of the form: 

1914 ``(eventStart, eventEnd, eventType, endReason)`` 

1915 dayObs : `int` 

1916 The dayObs for the data. 

1917 data : `pd.DataFrame` 

1918 The merged dataframe. 

1919 

1920 Returns 

1921 ------- 

1922 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1923 The events for the specified dayObs. 

1924 """ 

1925 seqNum = 0 

1926 events = [] 

1927 for parsedState in states: 

1928 begin = data.iloc[parsedState.eventStart]["private_efdStamp"] 

1929 end = data.iloc[parsedState.eventEnd]["private_efdStamp"] 

1930 beginAstropy = efdTimestampToAstropy(begin) 

1931 endAstropy = efdTimestampToAstropy(end) 

1932 duration = end - begin 

1933 event = TMAEvent( 

1934 dayObs=dayObs, 

1935 seqNum=seqNum, 

1936 type=parsedState.previousState, 

1937 endReason=parsedState.state, 

1938 duration=duration, 

1939 begin=beginAstropy, 

1940 end=endAstropy, 

1941 blockInfos=[], # this is added later 

1942 _startRow=parsedState.eventStart, 

1943 _endRow=parsedState.eventEnd, 

1944 ) 

1945 events.append(event) 

1946 seqNum += 1 

1947 return events 

1948 

1949 @staticmethod 

1950 def printTmaDetailedState(tma: TMAStateMachine) -> None: 

1951 """Print the full state of all the components of the TMA. 

1952 

1953 Currently this is the azimuth and elevation axes' power and motion 

1954 states, and their respective inPosition statuses. 

1955 

1956 Parameters 

1957 ---------- 

1958 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine` 

1959 The TMA state machine in the state we want to print. 

1960 """ 

1961 axes = ["azimuth", "elevation"] 

1962 p = tma._parts 

1963 axisPad = len(max(axes, key=len)) # length of the longest axis string == 9 here, but this is general 

1964 motionPad = max(len(s.name) for s in AxisMotionState) 

1965 powerPad = max(len(s.name) for s in PowerState) 

1966 

1967 # example output to show what's being done with the padding: 

1968 # azimuth - Power: ON Motion: STOPPED InPosition: True # noqa: W505 

1969 # elevation - Power: ON Motion: MOVING_POINT_TO_POINT InPosition: False # noqa: W505 

1970 for axis in axes: 

1971 powerState = p[f"{axis}SystemState"] 

1972 motionState = p[f"{axis}MotionState"] 

1973 assert isinstance(powerState, PowerState) 

1974 assert isinstance(motionState, AxisMotionState) 

1975 print( 

1976 f"{axis:>{axisPad}} - " 

1977 f"Power: {powerState.name:>{powerPad}} " 

1978 f"Motion: {motionState.name:>{motionPad}} " 

1979 f"InPosition: {p[f'{axis}InPosition']}" 

1980 ) 

1981 print(f"Overall system state: {tma.state.name}") 

1982 

1983 def printFullDayStateEvolution(self, dayObs: int, taiOrUtc: str = "utc") -> None: 

1984 """Print the full TMA state evolution for the specified dayObs. 

1985 

1986 Replays all the data from the EFD for the specified dayObs through 

1987 the TMA state machine, and prints both the overall and detailed state 

1988 of the TMA for each row. 

1989 

1990 Parameters 

1991 ---------- 

1992 dayObs : `int` 

1993 The dayObs for which to print the state evolution. 

1994 taiOrUtc : `str`, optional 

1995 Whether to print the timestamps in TAI or UTC. Default is UTC. 

1996 """ 

1997 # create a fake event which spans the whole day, and then use 

1998 # printEventDetails code while skipping the header to print the 

1999 # evolution. 

2000 _ = self.getEvents(dayObs) # ensure the data has been retrieved from the EFD 

2001 data = self._data[dayObs] 

2002 lastRowNum = len(data) - 1 

2003 

2004 fakeEvent = TMAEvent( 

2005 dayObs=dayObs, 

2006 seqNum=-1, # anything will do 

2007 type=TMAState.OFF, # anything will do 

2008 endReason=TMAState.OFF, # anything will do 

2009 duration=-1, # anything will do 

2010 begin=efdTimestampToAstropy(data.iloc[0]["private_efdStamp"]), 

2011 end=efdTimestampToAstropy(data.iloc[-1]["private_efdStamp"]), 

2012 _startRow=0, 

2013 _endRow=lastRowNum, 

2014 ) 

2015 self.printEventDetails(fakeEvent, taiOrUtc=taiOrUtc, printHeader=False) 

2016 

2017 def printEventDetails( 

2018 self, 

2019 event: TMAEvent, 

2020 taiOrUtc: str = "tai", 

2021 printHeader: bool = False, 

2022 ) -> None: 

2023 """Print a detailed breakdown of all state transitions during an event. 

2024 

2025 Note: this is not the most efficient way to do this, but it is much the 

2026 cleanest with respect to the actual state machine application and event 

2027 generation code, and is easily fast enough for the cases it will be 

2028 used for. It is not worth complicating the normal state machine logic 

2029 to try to use this code. 

2030 

2031 Parameters 

2032 ---------- 

2033 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

2034 The event to display the details of. 

2035 taiOrUtc : `str`, optional 

2036 Whether to display time strings in TAI or UTC. Defaults to TAI. 

2037 Case insensitive. 

2038 printHeader : `bool`, optional 

2039 Whether to print the event summary. Defaults to True. The primary 

2040 reason for the existence of this option is so that this same 

2041 printing function can be used to show the evolution of a whole day 

2042 by supplying a fake event which spans the whole day, but this event 

2043 necessarily has a meaningless summary, and so needs suppressing. 

2044 """ 

2045 taiOrUtc = taiOrUtc.lower() 

2046 if taiOrUtc not in ["tai", "utc"]: 

2047 raise ValueError(f"Got unsuppoted value for {taiOrUtc=}") 

2048 useUtc = taiOrUtc == "utc" 

2049 

2050 if printHeader: 

2051 print( 

2052 f"Details for {event.duration:.2f}s {event.type.name} event dayObs={event.dayObs}" 

2053 f" seqNum={event.seqNum}:" 

2054 ) 

2055 print(f"- Event began at: {event.begin.utc.isot if useUtc else event.begin.isot}") 

2056 print(f"- Event ended at: {event.end.utc.isot if useUtc else event.end.isot}") 

2057 

2058 dayObs = event.dayObs 

2059 data = self._data[dayObs] 

2060 startRow = event._startRow 

2061 endRow = event._endRow 

2062 nRowsToApply = endRow - startRow + 1 

2063 print(f"\nTotal number of rows in the merged dataframe: {len(data)}") 

2064 if printHeader: 

2065 print(f"of which rows {startRow} to {endRow} (inclusive) relate to this event.") 

2066 

2067 # reconstruct all the states 

2068 tma = TMAStateMachine(engineeringMode=True) 

2069 _initializeTma(tma) 

2070 

2071 tmaStates = {} 

2072 firstAppliedRow = True # flag to print a header on the first row that's applied 

2073 for rowNum, row in data.iterrows(): # must replay rows right from start to get full correct state 

2074 if rowNum == startRow: 

2075 # we've not yet applied this row, so this is the state just 

2076 # before event 

2077 print(f"\nBefore the event the TMA was in state {tma.state.name}:") 

2078 self.printTmaDetailedState(tma) 

2079 

2080 if rowNum >= startRow and rowNum <= endRow: 

2081 if firstAppliedRow: # only print this intro on the first row we're applying 

2082 print( 

2083 f"\nThen, applying the {nRowsToApply} rows of data for this event, the state" 

2084 " evolved as follows:\n" 

2085 ) 

2086 firstAppliedRow = False 

2087 

2088 # break the row down and print its details 

2089 rowFor = row["rowFor"] 

2090 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState 

2091 value = tma._getRowPayload(row, rowType, rowFor) 

2092 valueStr = f"{str(value) if isinstance(value, bool) else value.name}" 

2093 rowTime = efdTimestampToAstropy(row["private_efdStamp"]) 

2094 print( 

2095 f"On row {rowNum} the {axis} axis had the {rowType} set to {valueStr} at" 

2096 f" {rowTime.utc.isot if useUtc else rowTime.isot}" 

2097 ) 

2098 

2099 # then apply it as usual, printing the state right afterwards 

2100 tma.apply(row) 

2101 tmaStates[rowNum] = tma.state 

2102 self.printTmaDetailedState(tma) 

2103 print() 

2104 

2105 else: 

2106 # if it's not in the range of interest then just apply it 

2107 # silently as usual 

2108 tma.apply(row) 

2109 tmaStates[rowNum] = tma.state 

2110 

2111 def findEvent(self, time: astropy.time.Time) -> TMAEvent | None: 

2112 """Find the event which contains the specified time. 

2113 

2114 If the specified time lies within an event, that event is returned. If 

2115 it is at the exact start, that is logged, and if that start point is 

2116 shared by the end of the previous event, that is logged too. If the 

2117 event lies between events, the events either side are logged, but 

2118 ``None`` is returned. If the time lies before the first event of the 

2119 day a warning is logged, as for times after the last event of the day. 

2120 

2121 Parameters 

2122 ---------- 

2123 time : `astropy.time.Time` 

2124 The time. 

2125 

2126 Returns 

2127 ------- 

2128 event : `lsst.summit.utils.tmaUtils.TMAEvent` or `None` 

2129 The event which contains the specified time, or ``None`` if the 

2130 time doesn't fall during an event. 

2131 """ 

2132 # there are five possible cases: 

2133 # 1) the time lies before the first event of the day 

2134 # 2) the time lies after the last event of the day 

2135 # 3) the time lies within an event 

2136 # 3a) the time is exactly at the start of an event 

2137 # 3b) if so, time can be shared by the end of the previous event if 

2138 # they are contiguous 

2139 # 4) the time lies between two events 

2140 # 5) the time is exactly at end of the last event of the day. This is 

2141 # an issue because event end times are exclusive, so this time is 

2142 # not technically in that event, it's the moment it closes (and if 

2143 # there *was* an event which followed contiguously, it would be in 

2144 # that event instead, which is what motivates this definition of 

2145 # lies within what event) 

2146 

2147 dayObs = getDayObsForTime(time) 

2148 # we know this is on the right day, and definitely before the specified 

2149 # time, but sanity check this before continuing as this needs to be 

2150 # true for this to give the correct answer 

2151 assert getDayObsStartTime(dayObs) <= time 

2152 assert getDayObsEndTime(dayObs) > time 

2153 

2154 # command start to many log messages so define once here 

2155 logStart = f"Specified time {time.isot} falls on {dayObs=}" 

2156 

2157 events = self.getEvents(dayObs, addBlockInfo=False) 

2158 if len(events) == 0: 

2159 self.log.warning(f"There are no events found for {dayObs}") 

2160 return None 

2161 

2162 # check case 1) 

2163 if time < events[0].begin: 

2164 self.log.warning(f"{logStart} and is before the first event of the day") 

2165 return None 

2166 

2167 # check case 2) 

2168 if time > events[-1].end: 

2169 self.log.warning(f"{logStart} and is after the last event of the day") 

2170 return None 

2171 

2172 # check case 5) 

2173 if time == events[-1].end: 

2174 self.log.warning( 

2175 f"{logStart} and is exactly at the end of the last event of the day" 

2176 f" (seqnum={events[-1].seqNum}). Because event intervals are half-open, this" 

2177 " time does not technically lie in any event" 

2178 ) 

2179 return None 

2180 

2181 # we are now either in an event, or between events. Walk through the 

2182 # events, and if the end of the event is after the specified time, then 

2183 # we're either in it or past it, so check if we're in. 

2184 for eventNum, event in enumerate(events): 

2185 if event.end > time: # case 3) we are now into or past the right event 

2186 # the event end encloses the time, so note the > and not >=, 

2187 # this must be strictly greater, we check the overlap case 

2188 # later 

2189 if time >= event.begin: # we're fully inside the event, so return it. 

2190 # 3a) before returning, check if we're exactly at the start 

2191 # of the event, and if so, log it. Then 3b) also check if 

2192 # we're at the exact end of the previous event, and if so, 

2193 # log that too. 

2194 if time == event.begin: 

2195 self.log.info(f"{logStart} and is exactly at the start of event" f" {eventNum}") 

2196 if eventNum == 0: # I think this is actually impossible, but check anyway 

2197 return event # can't check the previous event so return here 

2198 previousEvent = events[eventNum - 1] 

2199 if previousEvent.end == time: 

2200 self.log.info( 

2201 "Previous event is contiguous, so this time is also at the exact" 

2202 f" end of {eventNum - 1}" 

2203 ) 

2204 return event 

2205 else: # case 4) 

2206 # the event end is past the time, but it's not inside the 

2207 # event, so we're between events. Log which we're between 

2208 # and return None 

2209 previousEvent = events[eventNum - 1] 

2210 timeAfterPrev = (time - previousEvent.end).to_datetime() 

2211 naturalTimeAfterPrev = humanize.naturaldelta(timeAfterPrev, minimum_unit="MICROSECONDS") 

2212 timeBeforeCurrent = (event.begin - time).to_datetime() 

2213 naturalTimeBeforeCurrent = humanize.naturaldelta( 

2214 timeBeforeCurrent, minimum_unit="MICROSECONDS" 

2215 ) 

2216 self.log.info( 

2217 f"{logStart} and lies" 

2218 f" {naturalTimeAfterPrev} after the end of event {previousEvent.seqNum}" 

2219 f" and {naturalTimeBeforeCurrent} before the start of event {event.seqNum}." 

2220 ) 

2221 return None 

2222 

2223 raise RuntimeError( 

2224 "Event finding logic fundamentally failed, which should never happen - the code" " needs fixing" 

2225 )