Coverage for python/lsst/summit/utils/tmaUtils.py: 20%

559 statements  

« prev     ^ index     » next       coverage.py v7.4.2, created at 2024-02-23 15:47 +0000

1# This file is part of summit_utils. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import re 

23import enum 

24import itertools 

25import logging 

26import pandas as pd 

27import numpy as np 

28import humanize 

29from dataclasses import dataclass 

30from astropy.time import Time 

31from matplotlib.ticker import FuncFormatter 

32import matplotlib.dates as mdates 

33import matplotlib.pyplot as plt 

34from lsst.utils.iteration import ensure_iterable 

35 

36from .enums import AxisMotionState, PowerState 

37from .blockUtils import BlockParser 

38from .utils import getCurrentDayObs_int, dayObsIntToString 

39from .efdUtils import (getEfdData, 

40 makeEfdClient, 

41 efdTimestampToAstropy, 

42 COMMAND_ALIASES, 

43 getDayObsForTime, 

44 getDayObsStartTime, 

45 getDayObsEndTime, 

46 ) 

47 

48__all__ = ( 

49 'TMAStateMachine', 

50 'TMAEvent', 

51 'TMAEventMaker', 

52 'TMAState', 

53 'AxisMotionState', 

54 'PowerState', 

55 'getSlewsFromEventList', 

56 'getTracksFromEventList', 

57 'getTorqueMaxima', 

58) 

59 

60# we don't want to use `None` for a no data sentinel because dict.get('key') 

61# returns None if the key isn't present, and also we need to mark that the data 

62# was queried for and no data was found, whereas the key not being present 

63# means that we've not yet looked for the data. 

64NO_DATA_SENTINEL = "NODATA" 

65 

66 

67def getSlewsFromEventList(events): 

68 """Get the slew events from a list of TMAEvents. 

69 

70 Parameters 

71 ---------- 

72 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

73 The list of events to filter. 

74 

75 Returns 

76 ------- 

77 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

78 The filtered list of events. 

79 """ 

80 return [e for e in events if e.type == TMAState.SLEWING] 

81 

82 

83def getTracksFromEventList(events): 

84 """Get the tracking events from a list of TMAEvents. 

85 

86 Parameters 

87 ---------- 

88 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

89 The list of events to filter. 

90 

91 Returns 

92 ------- 

93 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

94 The filtered list of events. 

95 """ 

96 return [e for e in events if e.type == TMAState.TRACKING] 

97 

98 

99def getTorqueMaxima(table): 

100 """Print the maximum positive and negative azimuth and elevation torques. 

101 

102 Designed to be used with the table as downloaded from RubinTV. 

103 

104 Parameters 

105 ---------- 

106 table : `pd.DataFrame` 

107 The table of data to use, as generated by Rapid Analysis. 

108 """ 

109 for axis in ['elevation', 'azimuth']: 

110 col = f'Largest {axis} torque' 

111 maxPos = np.argmax(table[col]) 

112 maxVal = table[col].iloc[maxPos] 

113 print(f"Max positive {axis:9} torque during seqNum {maxPos:>4}: {maxVal/1000:>7.1f}kNm") 

114 minPos = np.argmin(table[col]) 

115 minVal = table[col].iloc[minPos] 

116 print(f"Max negative {axis:9} torque during seqNum {minPos:>4}: {minVal/1000:>7.1f}kNm") 

117 

118 

119def getAzimuthElevationDataForEvent(client, event, prePadding=0, postPadding=0): 

120 """Get the data for the az/el telemetry topics for a given TMAEvent. 

121 

122 Parameters 

123 ---------- 

124 client : `lsst_efd_client.efd_helper.EfdClient` 

125 The EFD client to use. 

126 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

127 The event to get the data for. 

128 prePadding : `float`, optional 

129 The amount of time to pad the event with before the start time, in 

130 seconds. 

131 postPadding : `float`, optional 

132 The amount of time to pad the event with after the end time, in 

133 seconds. 

134 

135 Returns 

136 ------- 

137 azimuthData : `pd.DataFrame` 

138 The azimuth data for the specified event. 

139 elevationData : `pd.DataFrame` 

140 The elevation data for the specified event. 

141 """ 

142 azimuthData = getEfdData(client, 

143 'lsst.sal.MTMount.azimuth', 

144 event=event, 

145 prePadding=prePadding, 

146 postPadding=postPadding) 

147 elevationData = getEfdData(client, 

148 'lsst.sal.MTMount.elevation', 

149 event=event, 

150 prePadding=prePadding, 

151 postPadding=postPadding) 

152 

153 return azimuthData, elevationData 

154 

155 

156def plotEvent(client, event, fig=None, prePadding=0, postPadding=0, commands={}, 

157 azimuthData=None, elevationData=None): 

158 """Plot the TMA axis positions over the course of a given TMAEvent. 

159 

160 Plots the axis motion profiles for the given event, with optional padding 

161 at the start and end of the event. If the data is provided via the 

162 azimuthData and elevationData parameters, it will be used, otherwise it 

163 will be queried from the EFD. 

164 

165 Optionally plots any commands issued during or around the event, if these 

166 are supplied. Commands are supplied as a dictionary of the command topic 

167 strings, with values as astro.time.Time objects at which the command was 

168 issued. 

169 

170 Parameters 

171 ---------- 

172 client : `lsst_efd_client.efd_helper.EfdClient` 

173 The EFD client to use. 

174 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

175 The event to plot. 

176 fig : `matplotlib.figure.Figure`, optional 

177 The figure to plot on. If not specified, a new figure will be created. 

178 prePadding : `float`, optional 

179 The amount of time to pad the event with before the start time, in 

180 seconds. 

181 postPadding : `float`, optional 

182 The amount of time to pad the event with after the end time, in 

183 seconds. 

184 commands : `dict` of `str` : `astropy.time.Time`, optional 

185 A dictionary of commands to plot on the figure. The keys are the topic 

186 names, and the values are the times at which the commands were sent. 

187 azimuthData : `pd.DataFrame`, optional 

188 The azimuth data to plot. If not specified, it will be queried from the 

189 EFD. 

190 elevationData : `pd.DataFrame`, optional 

191 The elevation data to plot. If not specified, it will be queried from 

192 the EFD. 

193 

194 Returns 

195 ------- 

196 fig : `matplotlib.figure.Figure` 

197 The figure on which the plot was made. 

198 """ 

199 def tickFormatter(value, tick_number): 

200 # Convert the value to a string without subtracting large numbers 

201 # tick_number is unused. 

202 return f"{value:.2f}" 

203 

204 # plot any commands we might have 

205 if not isinstance(commands, dict): 

206 raise TypeError('commands must be a dict of command names with values as' 

207 ' astropy.time.Time values') 

208 

209 if fig is None: 

210 fig = plt.figure(figsize=(10, 8)) 

211 log = logging.getLogger(__name__) 

212 log.warning("Making new matplotlib figure - if this is in a loop you're going to have a bad time." 

213 " Pass in a figure with fig = plt.figure(figsize=(10, 8)) to avoid this warning.") 

214 

215 fig.clear() 

216 ax1, ax2 = fig.subplots(2, 

217 sharex=True, 

218 gridspec_kw={'wspace': 0, 

219 'hspace': 0, 

220 'height_ratios': [2.5, 1]}) 

221 

222 if azimuthData is None or elevationData is None: 

223 azimuthData, elevationData = getAzimuthElevationDataForEvent(client, 

224 event, 

225 prePadding=prePadding, 

226 postPadding=postPadding) 

227 

228 # Use the native color cycle for the lines. Because they're on different 

229 # axes they don't cycle by themselves 

230 lineColors = [p['color'] for p in plt.rcParams['axes.prop_cycle']] 

231 colorCounter = 0 

232 

233 ax1.plot(azimuthData['actualPosition'], label='Azimuth position', c=lineColors[colorCounter]) 

234 colorCounter += 1 

235 ax1.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

236 ax1.set_ylabel('Azimuth (degrees)') 

237 

238 ax1_twin = ax1.twinx() 

239 ax1_twin.plot(elevationData['actualPosition'], label='Elevation position', c=lineColors[colorCounter]) 

240 colorCounter += 1 

241 ax1_twin.yaxis.set_major_formatter(FuncFormatter(tickFormatter)) 

242 ax1_twin.set_ylabel('Elevation (degrees)') 

243 ax1.set_xticks([]) # remove x tick labels on the hidden upper x-axis 

244 

245 ax2_twin = ax2.twinx() 

246 ax2.plot(azimuthData['actualTorque'], label='Azimuth torque', c=lineColors[colorCounter]) 

247 colorCounter += 1 

248 ax2_twin.plot(elevationData['actualTorque'], label='Elevation torque', c=lineColors[colorCounter]) 

249 colorCounter += 1 

250 ax2.set_ylabel('Azimuth torque (Nm)') 

251 ax2_twin.set_ylabel('Elevation torque (Nm)') 

252 ax2.set_xlabel('Time (UTC)') # yes, it really is UTC, matplotlib converts this automatically! 

253 

254 # put the ticks at an angle, and right align with the tick marks 

255 ax2.set_xticks(ax2.get_xticks()) # needed to supress a user warning 

256 xlabels = ax2.get_xticks() 

257 ax2.set_xticklabels(xlabels, rotation=40, ha='right') 

258 ax2.xaxis.set_major_locator(mdates.AutoDateLocator()) 

259 ax2.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M:%S')) 

260 

261 if prePadding or postPadding: 

262 # note the conversion to utc because the x-axis from the dataframe 

263 # already got automagically converted when plotting before, so this is 

264 # necessary for things to line up 

265 ax1_twin.axvline(event.begin.utc.datetime, c='k', ls='--', alpha=0.5, label='Event begin/end') 

266 ax1_twin.axvline(event.end.utc.datetime, c='k', ls='--', alpha=0.5) 

267 # extend lines down across lower plot, but do not re-add label 

268 ax2_twin.axvline(event.begin.utc.datetime, c='k', ls='--', alpha=0.5) 

269 ax2_twin.axvline(event.end.utc.datetime, c='k', ls='--', alpha=0.5) 

270 

271 for command, commandTime in commands.items(): 

272 # if commands weren't found, the item is set to None. This is common 

273 # for events so handle it gracefully and silently. The command finding 

274 # code logs about lack of commands found so no need to mention here. 

275 if commandTime is None: 

276 continue 

277 ax1_twin.axvline(commandTime.utc.datetime, c=lineColors[colorCounter], 

278 ls='--', alpha=0.75, label=f'{command}') 

279 # extend lines down across lower plot, but do not re-add label 

280 ax2_twin.axvline(commandTime.utc.datetime, c=lineColors[colorCounter], 

281 ls='--', alpha=0.75) 

282 colorCounter += 1 

283 

284 # combine the legends and put inside the plot 

285 handles1a, labels1a = ax1.get_legend_handles_labels() 

286 handles1b, labels1b = ax1_twin.get_legend_handles_labels() 

287 handles2a, labels2a = ax2.get_legend_handles_labels() 

288 handles2b, labels2b = ax2_twin.get_legend_handles_labels() 

289 

290 handles = handles1a + handles1b + handles2a + handles2b 

291 labels = labels1a + labels1b + labels2a + labels2b 

292 # ax2 is "in front" of ax1 because it has the vlines plotted on it, and 

293 # vlines are on ax2 so that they appear at the bottom of the legend, so 

294 # make sure to plot the legend on ax2, otherwise the vlines will go on top 

295 # of the otherwise-opaque legend. 

296 ax1_twin.legend(handles, labels, facecolor='white', framealpha=1) 

297 

298 # Add title with the event name, type etc 

299 dayObsStr = dayObsIntToString(event.dayObs) 

300 title = (f"{dayObsStr} - seqNum {event.seqNum} (version {event.version})" # top line, rest below 

301 f"\nDuration = {event.duration:.2f}s" 

302 f" Event type: {event.type.name}" 

303 f" End reason: {event.endReason.name}" 

304 ) 

305 ax1_twin.set_title(title) 

306 return fig 

307 

308 

309def getCommandsDuringEvent(client, event, commands=('raDecTarget'), log=None, doLog=True): 

310 """Get the commands issued during an event. 

311 

312 Get the times at which the specified commands were issued during the event. 

313 

314 Parameters 

315 ---------- 

316 client : `lsst_efd_client.efd_helper.EfdClient` 

317 The EFD client to use. 

318 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

319 The event to plot. 

320 commands : `list` of `str`, optional 

321 The commands or command aliases to look for. Defaults to 

322 ['raDecTarget']. 

323 log : `logging.Logger`, optional 

324 The logger to use. If not specified, a new logger will be created if 

325 needed. 

326 doLog : `bool`, optional 

327 Whether to log messages. Defaults to True. 

328 

329 Returns 

330 ------- 

331 commands : `dict` of `str` : `astropy.time.Time` 

332 A dictionary of the commands and the times at which they were issued. 

333 """ 

334 # TODO: DM-40100 Add support for padding the event here to allow looking 

335 # for triggering commands before the event 

336 

337 # TODO: DM-40100 Change this to always return a list of times, and remove 

338 # warning about finding multiple commands. Remember to update docs and 

339 # plotting code. 

340 if log is None and doLog: 

341 log = logging.getLogger(__name__) 

342 

343 commands = ensure_iterable(commands) 

344 fullCommands = [c if c not in COMMAND_ALIASES else COMMAND_ALIASES[c] for c in commands] 

345 del commands # make sure we always use their full names 

346 

347 ret = {} 

348 for command in fullCommands: 

349 data = getEfdData(client, command, event=event, warn=False) 

350 if data.empty: 

351 if doLog: 

352 log.info(f'Found no command issued for {command} during event') 

353 ret[command] = None 

354 elif len(data) > 1: 

355 if doLog: 

356 log.warning(f'Found multiple commands issued for {command} during event, returning None') 

357 ret[command] = None 

358 else: 

359 assert len(data) == 1 # this must be true now 

360 commandTime = data.private_efdStamp 

361 ret[command] = Time(commandTime, format='unix') 

362 

363 return ret 

364 

365 

366def _initializeTma(tma): 

367 """Helper function to turn a TMA into a valid state for testing. 

368 

369 Do not call directly in normal usage or code, as this just arbitrarily 

370 sets values to make the TMA valid. 

371 

372 Parameters 

373 ---------- 

374 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine` 

375 The TMA state machine model to initialize. 

376 """ 

377 tma._parts['azimuthInPosition'] = False 

378 tma._parts['azimuthMotionState'] = AxisMotionState.STOPPED 

379 tma._parts['azimuthSystemState'] = PowerState.ON 

380 tma._parts['elevationInPosition'] = False 

381 tma._parts['elevationMotionState'] = AxisMotionState.STOPPED 

382 tma._parts['elevationSystemState'] = PowerState.ON 

383 

384 

385@dataclass(kw_only=True, frozen=True) 

386class TMAEvent: 

387 """A movement event for the TMA. 

388 

389 Contains the dayObs on which the event occured, using the standard 

390 observatory definition of the dayObs, and the sequence number of the event, 

391 which is unique for each event on a given dayObs. 

392 

393 The event type can be either 'SLEWING' or 'TRACKING', defined as: 

394 - SLEWING: some part of the TMA is in motion 

395 - TRACKING: both axes are in position and tracking the sky 

396 

397 The end reason can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', or 'OFF'. 

398 - SLEWING: The previous event was a TRACKING event, and one or more of 

399 the TMA components either stopped being in position, or stopped 

400 moving, or went into fault, or was turned off, and hence we are now 

401 only slewing and no longer tracking the sky. 

402 - TRACKING: the TMA started tracking the sky when it wasn't previously. 

403 Usualy this would always be preceded by directly by a SLEWING 

404 event, but this is not strictly true, as the EUI seems to be able 

405 to make the TMA start tracking the sky without slewing first. 

406 - STOPPED: the components of the TMA transitioned to the STOPPED state. 

407 - FAULT: the TMA went into fault. 

408 - OFF: the TMA components were turned off. 

409 

410 Note that this class is not intended to be instantiated directly, but 

411 rather to be returned by the ``TMAEventMaker.getEvents()`` function. 

412 

413 Parameters 

414 ---------- 

415 dayObs : `int` 

416 The dayObs on which the event occured. 

417 seqNum : `int` 

418 The sequence number of the event, 

419 type : `lsst.summit.utils.tmaUtils.TMAState` 

420 The type of the event, either 'SLEWING' or 'TRACKING'. 

421 endReason : `lsst.summit.utils.tmaUtils.TMAState` 

422 The reason the event ended, either 'STOPPED', 'TRACKING', 'FAULT', 

423 'SLEWING', or 'OFF'. 

424 duration : `float` 

425 The duration of the event, in seconds. 

426 begin : `astropy.time.Time` 

427 The time the event began. 

428 end : `astropy.time.Time` 

429 The time the event ended. 

430 blockInfos : `list` of `lsst.summit.utils.tmaUtils.BlockInfo`, or `None` 

431 The block infomation, if any, relating to the event. Could be `None`, 

432 or one or more block informations. 

433 version : `int` 

434 The version of the TMAEvent class. Equality between events is only 

435 valid for a given version of the class. If the class definition 

436 changes, the time ranges can change, and hence the equality between 

437 events is ``False``. 

438 _startRow : `int` 

439 The first row in the merged EFD data which is part of the event. 

440 _endRow : `int` 

441 The last row in the merged EFD data which is part of the event. 

442 """ 

443 dayObs: int 

444 seqNum: int 

445 type: str # can be 'SLEWING', 'TRACKING' 

446 endReason: str # can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', 'OFF' 

447 duration: float # seconds 

448 begin: Time 

449 end: Time 

450 blockInfos: list = None 

451 version: int = 0 # update this number any time a code change which could change event definitions is made 

452 _startRow: int 

453 _endRow: int 

454 

455 def __lt__(self, other): 

456 if self.version != other.version: 

457 raise ValueError( 

458 f"Cannot compare TMAEvents with different versions: {self.version} != {other.version}" 

459 ) 

460 if self.dayObs < other.dayObs: 

461 return True 

462 elif self.dayObs == other.dayObs: 

463 return self.seqNum < other.seqNum 

464 return False 

465 

466 def __repr__(self): 

467 return ( 

468 f"TMAEvent(dayObs={self.dayObs}, seqNum={self.seqNum}, type={self.type!r}," 

469 f" endReason={self.endReason!r}, duration={self.duration}, begin={self.begin!r}," 

470 f" end={self.end!r}" 

471 ) 

472 

473 def __hash__(self): 

474 # deliberately don't hash the blockInfos here, as they are not 

475 # a core part of the event itself, and are listy and cause problems 

476 return hash((self.dayObs, 

477 self.seqNum, 

478 self.type, 

479 self.endReason, 

480 self.duration, 

481 self.begin, 

482 self.end, 

483 self.version, 

484 self._startRow, 

485 self._endRow 

486 ) 

487 ) 

488 

489 def _ipython_display_(self): 

490 print(self.__str__()) 

491 

492 def __str__(self): 

493 def indent(string): 

494 return '\n' + '\n'.join([' ' + s for s in string.splitlines()]) 

495 

496 blockInfoStr = 'None' 

497 if self.blockInfos is not None: 

498 blockInfoStr = ''.join(indent(str(i)) for i in self.blockInfos) 

499 

500 return ( 

501 f"dayObs: {self.dayObs}\n" 

502 f"seqNum: {self.seqNum}\n" 

503 f"type: {self.type.name}\n" 

504 f"endReason: {self.endReason.name}\n" 

505 f"duration: {self.duration}\n" 

506 f"begin: {self.begin!r}\n" 

507 f"end: {self.end!r}\n" 

508 f"blockInfos: {blockInfoStr}" 

509 ) 

510 

511 

512class TMAState(enum.IntEnum): 

513 """Overall state of the TMA. 

514 

515 States are defined as follows: 

516 

517 UNINITIALIZED 

518 We have not yet got data for all relevant components, so the overall 

519 state is undefined. 

520 STOPPED 

521 All components are on, and none are moving. 

522 TRACKING 

523 We are tracking the sky. 

524 SLEWING 

525 One or more components are moving, and one or more are not tracking the 

526 sky. This should probably be called MOVING, as it includes: slewing, 

527 MOVING_POINT_TO_POINT, and JOGGING. 

528 FAULT 

529 All (if engineeringMode) or any (if not engineeringMode) components are 

530 in fault. 

531 OFF 

532 All components are off. 

533 """ 

534 UNINITIALIZED = -1 

535 STOPPED = 0 

536 TRACKING = 1 

537 SLEWING = 2 

538 FAULT = 3 

539 OFF = 4 

540 

541 def __repr__(self): 

542 return f"TMAState.{self.name}" 

543 

544 

545def getAxisAndType(rowFor): 

546 """Get the axis the data relates to, and the type of data it contains. 

547 

548 Parameters 

549 ---------- 

550 rowFor : `str` 

551 The column in the dataframe denoting what this row is for, e.g. 

552 "elevationMotionState" or "azimuthInPosition", etc. 

553 

554 Returns 

555 ------- 

556 axis : `str` 

557 The axis the row is for, e.g. "azimuth", "elevation". 

558 rowType : `str` 

559 The type of the row, e.g. "MotionState", "SystemState", "InPosition". 

560 """ 

561 regex = r'(azimuth|elevation)(InPosition|MotionState|SystemState)$' # matches the end of the line 

562 matches = re.search(regex, rowFor) 

563 if matches is None: 

564 raise ValueError(f"Could not parse axis and rowType from {rowFor=}") 

565 axis = matches.group(1) 

566 rowType = matches.group(2) 

567 

568 assert rowFor.endswith(f"{axis}{rowType}") 

569 return axis, rowType 

570 

571 

572class ListViewOfDict: 

573 """A class to allow making lists which contain references to an underlying 

574 dictionary. 

575 

576 Normally, making a list of items from a dictionary would make a copy of the 

577 items, but this class allows making a list which contains references to the 

578 underlying dictionary items themselves. This is useful for making a list of 

579 components, such that they can be manipulated in their logical sets. 

580 """ 

581 def __init__(self, underlyingDictionary, keysToLink): 

582 self.dictionary = underlyingDictionary 

583 self.keys = keysToLink 

584 

585 def __getitem__(self, index): 

586 return self.dictionary[self.keys[index]] 

587 

588 def __setitem__(self, index, value): 

589 self.dictionary[self.keys[index]] = value 

590 

591 def __len__(self): 

592 return len(self.keys) 

593 

594 

595class TMAStateMachine: 

596 """A state machine model of the TMA. 

597 

598 Note that this is currently only implemented for the azimuth and elevation 

599 axes, but will be extended to include the rotator in the future. 

600 

601 Note that when used for event generation, changing ``engineeringMode`` to 

602 False might change the resulting list of events, and that if the TMA moves 

603 with some axis in fault, then these events will be missed. It is therefore 

604 thought that ``engineeringMode=True`` should always be used when generating 

605 events. The option, however, is there for completeness, as this will be 

606 useful for knowing is the CSC would consider the TMA to be in fault in the 

607 general case. 

608 

609 Parameters 

610 ---------- 

611 engineeringMode : `bool`, optional 

612 Whether the TMA is in engineering mode. Defaults to True. If False, 

613 then the TMA will be in fault if any component is in fault. If True, 

614 then the TMA will be in fault only if all components are in fault. 

615 debug : `bool`, optional 

616 Whether to log debug messages. Defaults to False. 

617 """ 

618 _UNINITIALIZED_VALUE: int = -999 

619 

620 def __init__(self, engineeringMode=True, debug=False): 

621 self.engineeringMode = engineeringMode 

622 self.log = logging.getLogger('lsst.summit.utils.tmaUtils.TMA') 

623 if debug: 

624 self.log.level = logging.DEBUG 

625 self._mostRecentRowTime = -1 

626 

627 # the actual components of the TMA 

628 self._parts = {'azimuthInPosition': self._UNINITIALIZED_VALUE, 

629 'azimuthMotionState': self._UNINITIALIZED_VALUE, 

630 'azimuthSystemState': self._UNINITIALIZED_VALUE, 

631 'elevationInPosition': self._UNINITIALIZED_VALUE, 

632 'elevationMotionState': self._UNINITIALIZED_VALUE, 

633 'elevationSystemState': self._UNINITIALIZED_VALUE, 

634 } 

635 systemKeys = ['azimuthSystemState', 'elevationSystemState'] 

636 positionKeys = ['azimuthInPosition', 'elevationInPosition'] 

637 motionKeys = ['azimuthMotionState', 'elevationMotionState'] 

638 

639 # references to the _parts as conceptual groupings 

640 self.system = ListViewOfDict(self._parts, systemKeys) 

641 self.motion = ListViewOfDict(self._parts, motionKeys) 

642 self.inPosition = ListViewOfDict(self._parts, positionKeys) 

643 

644 # tuples of states for state collapsing. Note that STOP_LIKE + 

645 # MOVING_LIKE must cover the full set of AxisMotionState enums 

646 self.STOP_LIKE = (AxisMotionState.STOPPING, 

647 AxisMotionState.STOPPED, 

648 AxisMotionState.TRACKING_PAUSED) 

649 self.MOVING_LIKE = (AxisMotionState.MOVING_POINT_TO_POINT, 

650 AxisMotionState.JOGGING, 

651 AxisMotionState.TRACKING) 

652 # Likewise, ON_LIKE + OFF_LIKE must cover the full set of PowerState 

653 # enums 

654 self.OFF_LIKE = (PowerState.OFF, PowerState.TURNING_OFF) 

655 self.ON_LIKE = (PowerState.ON, PowerState.TURNING_ON) 

656 self.FAULT_LIKE = (PowerState.FAULT,) # note the trailing comma - this must be an iterable 

657 

658 def apply(self, row): 

659 """Apply a row of data to the TMA state. 

660 

661 Checks that the row contains data for a later time than any data 

662 previously applied, and applies the relevant column entry to the 

663 relevant component. 

664 

665 Parameters 

666 ---------- 

667 row : `pd.Series` 

668 The row of data to apply to the state machine. 

669 """ 

670 timestamp = row['private_efdStamp'] 

671 if timestamp < self._mostRecentRowTime: # NB equals is OK, technically, though it never happens 

672 raise ValueError('TMA evolution must be monotonic increasing in time, tried to apply a row which' 

673 ' predates the most previous one') 

674 self._mostRecentRowTime = timestamp 

675 

676 rowFor = row['rowFor'] # e.g. elevationMotionState 

677 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState 

678 value = self._getRowPayload(row, rowType, rowFor) 

679 self.log.debug(f"Setting {rowFor} to {repr(value)}") 

680 self._parts[rowFor] = value 

681 try: 

682 # touch the state property as this executes the sieving, to make 

683 # sure we don't fall through the sieve at any point in time 

684 _ = self.state 

685 except RuntimeError as e: 

686 # improve error reporting, but always reraise this, as this is a 

687 # full-blown failure 

688 raise RuntimeError(f'Failed to apply {value} to {axis}{rowType} with state {self._parts}') from e 

689 

690 def _getRowPayload(self, row, rowType, rowFor): 

691 """Get the relevant value from the row. 

692 

693 Given the row, and which component it relates to, get the relevant 

694 value, as a bool or cast to the appropriate enum class. 

695 

696 Parameters 

697 ---------- 

698 row : `pd.Series` 

699 The row of data from the dataframe. 

700 rowType : `str` 

701 The type of the row, e.g. "MotionState", "SystemState", 

702 "InPosition". 

703 rowFor : `str` 

704 The component the row is for, e.g. "azimuth", "elevation". 

705 

706 Returns 

707 ------- 

708 value : `bool` or `enum` 

709 The value of the row, as a bool or enum, depending on the 

710 component, cast to the appropriate enum class or bool. 

711 """ 

712 match rowType: 

713 case 'MotionState': 

714 value = row[f'state_{rowFor}'] 

715 return AxisMotionState(value) 

716 case 'SystemState': 

717 value = row[f'powerState_{rowFor}'] 

718 return PowerState(value) 

719 case 'InPosition': 

720 value = row[f'inPosition_{rowFor}'] 

721 return bool(value) 

722 case _: 

723 raise ValueError(f'Failed to get row payload with {rowType=} and {row=}') 

724 

725 @property 

726 def _isValid(self): 

727 """Has the TMA had a value applied to all its components? 

728 

729 If any component has not yet had a value applied, the TMA is not valid, 

730 as those components will be in an unknown state. 

731 

732 Returns 

733 ------- 

734 isValid : `bool` 

735 Whether the TMA is fully initialized. 

736 """ 

737 return not any([v == self._UNINITIALIZED_VALUE for v in self._parts.values()]) 

738 

739 # state inspection properties - a high level way of inspecting the state as 

740 # an API 

741 @property 

742 def isMoving(self): 

743 return self.state in [TMAState.TRACKING, TMAState.SLEWING] 

744 

745 @property 

746 def isNotMoving(self): 

747 return not self.isMoving 

748 

749 @property 

750 def isTracking(self): 

751 return self.state == TMAState.TRACKING 

752 

753 @property 

754 def isSlewing(self): 

755 return self.state == TMAState.SLEWING 

756 

757 @property 

758 def canMove(self): 

759 badStates = [PowerState.OFF, PowerState.TURNING_OFF, PowerState.FAULT, PowerState.UNKNOWN] 

760 return bool( 

761 self._isValid and 

762 self._parts['azimuthSystemState'] not in badStates and 

763 self._parts['elevationSystemState'] not in badStates 

764 ) 

765 

766 # Axis inspection properties, designed for internal use. These return 

767 # iterables so that they can be used in any() and all() calls, which make 

768 # the logic much easier to read, e.g. to see if anything is moving, we can 

769 # write `if not any(_axisInMotion):` 

770 @property 

771 def _axesInFault(self): 

772 return [x in self.FAULT_LIKE for x in self.system] 

773 

774 @property 

775 def _axesOff(self): 

776 return [x in self.OFF_LIKE for x in self.system] 

777 

778 @property 

779 def _axesOn(self): 

780 return [not x for x in self._axesOn] 

781 

782 @property 

783 def _axesInMotion(self): 

784 return [x in self.MOVING_LIKE for x in self.motion] 

785 

786 @property 

787 def _axesTRACKING(self): 

788 """Note this is deliberately named _axesTRACKING and not _axesTracking 

789 to make it clear that this is the AxisMotionState type of TRACKING and 

790 not the normal conceptual notion of tracking (the sky, i.e. as opposed 

791 to slewing). 

792 """ 

793 return [x == AxisMotionState.TRACKING for x in self.motion] 

794 

795 @property 

796 def _axesInPosition(self): 

797 return [x is True for x in self.inPosition] 

798 

799 @property 

800 def state(self): 

801 """The overall state of the TMA. 

802 

803 Note that this is both a property, and also the method which applies 

804 the logic sieve to determine the state at a given point in time. 

805 

806 Returns 

807 ------- 

808 state : `lsst.summit.utils.tmaUtils.TMAState` 

809 The overall state of the TMA. 

810 """ 

811 # first, check we're valid, and if not, return UNINITIALIZED state, as 

812 # things are unknown 

813 if not self._isValid: 

814 return TMAState.UNINITIALIZED 

815 

816 # if we're not in engineering mode, i.e. we're under normal CSC 

817 # control, then if anything is in fault, we're in fault. If we're 

818 # engineering then some axes will move when others are in fault 

819 if not self.engineeringMode: 

820 if any(self._axesInFault): 

821 return TMAState.FAULT 

822 else: 

823 # we're in engineering mode, so return fault state if ALL are in 

824 # fault 

825 if all(self._axesInFault): 

826 return TMAState.FAULT 

827 

828 # if all axes are off, the TMA is OFF 

829 if all(self._axesOff): 

830 return TMAState.OFF 

831 

832 # we know we're valid and at least some axes are not off, so see if 

833 # we're in motion if no axes are moving, we're stopped 

834 if not any(self._axesInMotion): 

835 return TMAState.STOPPED 

836 

837 # now we know we're initialized, and that at least one axis is moving 

838 # so check axes for motion and in position. If all axes are tracking 

839 # and all are in position, we're tracking the sky 

840 if (all(self._axesTRACKING) and all(self._axesInPosition)): 

841 return TMAState.TRACKING 

842 

843 # we now know explicitly that not everything is in position, so we no 

844 # longer need to check that. We do actually know that something is in 

845 # motion, but confirm that's the case and return SLEWING 

846 if (any(self._axesInMotion)): 

847 return TMAState.SLEWING 

848 

849 # if we want to differentiate between MOVING_POINT_TO_POINT moves, 

850 # JOGGING moves and regular slews, the logic in the step above needs to 

851 # be changed and the new steps added here. 

852 

853 raise RuntimeError('State error: fell through the state sieve - rewrite your logic!') 

854 

855 

856class TMAEventMaker: 

857 """A class to create per-dayObs TMAEvents for the TMA's movements. 

858 

859 If this class is being used in tests, make sure to pass the EFD client in, 

860 and create it with `makeEfdClient(testing=True)`. This ensures that the 

861 USDF EFD is "used" as this is the EFD which has the recorded data available 

862 in the test suite via `vcr`. 

863 

864 Example usage: 

865 >>> dayObs = 20230630 

866 >>> eventMaker = TMAEventMaker() 

867 >>> events = eventMaker.getEvents(dayObs) 

868 >>> print(f'Found {len(events)} for {dayObs=}') 

869 

870 Parameters 

871 ---------- 

872 client : `lsst_efd_client.efd_helper.EfdClient`, optional 

873 The EFD client to use, created if not provided. 

874 """ 

875 # the topics which need logical combination to determine the overall mount 

876 # state. Will need updating as new components are added to the system. 

877 

878 # relevant column: 'state' 

879 _movingComponents = [ 

880 'lsst.sal.MTMount.logevent_azimuthMotionState', 

881 'lsst.sal.MTMount.logevent_elevationMotionState', 

882 ] 

883 

884 # relevant column: 'inPosition' 

885 _inPositionComponents = [ 

886 'lsst.sal.MTMount.logevent_azimuthInPosition', 

887 'lsst.sal.MTMount.logevent_elevationInPosition', 

888 ] 

889 

890 # the components which, if in fault, put the TMA into fault 

891 # relevant column: 'powerState' 

892 _stateComponents = [ 

893 'lsst.sal.MTMount.logevent_azimuthSystemState', 

894 'lsst.sal.MTMount.logevent_elevationSystemState', 

895 ] 

896 

897 def __init__(self, client=None): 

898 if client is not None: 

899 self.client = client 

900 else: 

901 self.client = makeEfdClient() 

902 self.log = logging.getLogger(__name__) 

903 self._data = {} 

904 

905 @dataclass(frozen=True) 

906 class ParsedState: 

907 eventStart: Time 

908 eventEnd: int 

909 previousState: TMAState 

910 state: TMAState 

911 

912 @staticmethod 

913 def isToday(dayObs): 

914 """Find out if the specified dayObs is today, or in the past. 

915 

916 If the day is today, the function returns ``True``, if it is in the 

917 past it returns ``False``. If the day is in the future, a 

918 ``ValueError`` is raised, as this indicates there is likely an 

919 off-by-one type error somewhere in the logic. 

920 

921 Parameters 

922 ---------- 

923 dayObs : `int` 

924 The dayObs to check, in the format YYYYMMDD. 

925 

926 Returns 

927 ------- 

928 isToday : `bool` 

929 ``True`` if the dayObs is today, ``False`` if it is in the past. 

930 

931 Raises 

932 ValueError: if the dayObs is in the future. 

933 """ 

934 todayDayObs = getCurrentDayObs_int() 

935 if dayObs == todayDayObs: 

936 return True 

937 if dayObs > todayDayObs: 

938 raise ValueError("dayObs is in the future") 

939 return False 

940 

941 @staticmethod 

942 def _shortName(topic): 

943 """Get the short name of a topic. 

944 

945 Parameters 

946 ---------- 

947 topic : `str` 

948 The topic to get the short name of. 

949 

950 Returns 

951 ------- 

952 shortName : `str` 

953 The short name of the topic, e.g. 'azimuthInPosition' 

954 """ 

955 # get, for example 'azimuthInPosition' from 

956 # lsst.sal.MTMount.logevent_azimuthInPosition 

957 return topic.split('_')[-1] 

958 

959 def _mergeData(self, data): 

960 """Merge a dict of dataframes based on private_efdStamp, recording 

961 where each row came from. 

962 

963 Given a dict or dataframes, keyed by topic, merge them into a single 

964 dataframe, adding a column to record which topic each row came from. 

965 

966 Parameters 

967 ---------- 

968 data : `dict` of `str` : `pd.DataFrame` 

969 The dataframes to merge. 

970 

971 Returns 

972 ------- 

973 merged : `pd.DataFrame` 

974 The merged dataframe. 

975 """ 

976 excludeColumns = ['private_efdStamp', 'rowFor'] 

977 

978 mergeArgs = { 

979 'how': 'outer', 

980 'sort': True, 

981 } 

982 

983 merged = None 

984 originalRowCounter = 0 

985 

986 # Iterate over the keys and merge the corresponding DataFrames 

987 for key, df in data.items(): 

988 if df.empty: 

989 # Must skip the df if it's empty, otherwise the merge will fail 

990 # due to lack of private_efdStamp. Because other axes might 

991 # still be in motion, so we still want to merge what we have 

992 continue 

993 

994 originalRowCounter += len(df) 

995 component = self._shortName(key) # Add suffix to column names to identify the source 

996 suffix = '_' + component 

997 

998 df['rowFor'] = component 

999 

1000 columnsToSuffix = [col for col in df.columns if col not in excludeColumns] 

1001 df_to_suffix = df[columnsToSuffix].add_suffix(suffix) 

1002 df = pd.concat([df[excludeColumns], df_to_suffix], axis=1) 

1003 

1004 if merged is None: 

1005 merged = df.copy() 

1006 else: 

1007 merged = pd.merge(merged, df, **mergeArgs) 

1008 

1009 merged = merged.loc[:, ~merged.columns.duplicated()] # Remove duplicate columns after merge 

1010 

1011 if len(merged) != originalRowCounter: 

1012 self.log.warning("Merged data has a different number of rows to the original data, some" 

1013 " timestamps (rows) will contain more than one piece of actual information.") 

1014 

1015 # if the index is still a DatetimeIndex here then we didn't actually 

1016 # merge any data, so there is only data from a single component. 

1017 # This is likely to result in no events, but not necessarily, and for 

1018 # generality, instead we convert to a range index to ensure consistency 

1019 # in the returned data, and allow processing to continue. 

1020 if isinstance(merged.index, pd.DatetimeIndex): 

1021 self.log.warning("Data was only found for a single component in the EFD.") 

1022 merged.reset_index(drop=True, inplace=True) 

1023 

1024 return merged 

1025 

1026 def getEvent(self, dayObs, seqNum): 

1027 """Get a specific event for a given dayObs and seqNum. 

1028 

1029 Repeated calls for the same ``dayObs`` will use the cached data if the 

1030 day is in the past, and so will be much quicker. If the ``dayObs`` is 

1031 the current day then the EFD will be queried for new data for each 

1032 call, so a call which returns ``None`` on the first try might return an 

1033 event on the next, if the TMA is still moving and thus generating 

1034 events. 

1035 

1036 Parameters 

1037 ---------- 

1038 dayObs : `int` 

1039 The dayObs to get the event for. 

1040 seqNum : `int` 

1041 The sequence number of the event to get. 

1042 

1043 Returns 

1044 ------- 

1045 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

1046 The event for the specified dayObs and seqNum, or `None` if the 

1047 event was not found. 

1048 """ 

1049 events = self.getEvents(dayObs) 

1050 if seqNum <= len(events): 

1051 event = events[seqNum] 

1052 if event.seqNum != seqNum: 

1053 # it's zero-indexed and contiguous so this must be true but 

1054 # a sanity check doesn't hurt. 

1055 raise AssertionError(f"Event sequence number mismatch: {event.seqNum} != {seqNum}") 

1056 return event 

1057 else: 

1058 self.log.warning(f"Event {seqNum} not found for {dayObs}") 

1059 return None 

1060 

1061 def getEvents(self, dayObs): 

1062 """Get the TMA events for the specified dayObs. 

1063 

1064 Gets the required mount data from the cache or the EFD as required, 

1065 handling whether we're working with live vs historical data. The 

1066 dataframes from the EFD is merged and applied to the TMAStateMachine, 

1067 and that series of state changes is used to generate a list of 

1068 TmaEvents for the day's data. 

1069 

1070 If the data is for the current day, i.e. if new events can potentially 

1071 land, then if the last event is "open" (meaning that the TMA appears to 

1072 be in motion and thus the event is growing with time), then that event 

1073 is excluded from the event list as it is expected to be changing with 

1074 time, and will likely close eventually. However, if that situation 

1075 occurs on a day in the past, then that event can never close, and the 

1076 event is therefore included, but a warning about the open event is 

1077 logged. 

1078 

1079 Parameters 

1080 ---------- 

1081 dayObs : `int` 

1082 The dayObs for which to get the events. 

1083 

1084 Returns 

1085 ------- 

1086 events : `list` of `lsst.summit.utils.tmaUtils.TMAState` 

1087 The events for the specified dayObs. 

1088 """ 

1089 workingLive = self.isToday(dayObs) 

1090 data = None 

1091 

1092 if workingLive: 

1093 # it's potentially updating data, so we must update the date 

1094 # regarless of whether we have it already or not 

1095 self.log.info(f'Updating mount data for {dayObs} from the EFD') 

1096 self._getEfdDataForDayObs(dayObs) 

1097 data = self._data[dayObs] 

1098 elif dayObs in self._data: 

1099 # data is in the cache and it's not being updated, so use it 

1100 data = self._data[dayObs] 

1101 elif dayObs not in self._data: 

1102 # we don't have the data yet, but it's not growing, so put it in 

1103 # the cache and use it from there 

1104 self.log.info(f'Retrieving mount data for {dayObs} from the EFD') 

1105 self._getEfdDataForDayObs(dayObs) 

1106 data = self._data[dayObs] 

1107 else: 

1108 raise RuntimeError("This should never happen") 

1109 

1110 # if we don't have something to work with, log a warning and return 

1111 if not self.dataFound(data): 

1112 self.log.warning(f"No EFD data found for {dayObs=}") 

1113 return [] 

1114 

1115 # applies the data to the state machine, and generates events from the 

1116 # series of states which results 

1117 events = self._calculateEventsFromMergedData(data, dayObs, dataIsForCurrentDay=workingLive) 

1118 if not events: 

1119 self.log.warning(f"Failed to calculate any events for {dayObs=} despite EFD data existing!") 

1120 return events 

1121 

1122 @staticmethod 

1123 def dataFound(data): 

1124 """Check if any data was found. 

1125 

1126 Parameters 

1127 ---------- 

1128 data : `pd.DataFrame` 

1129 The merged dataframe to check. 

1130 

1131 Returns 

1132 ------- 

1133 dataFound : `bool` 

1134 Whether data was found. 

1135 """ 

1136 # You can't just compare to with data == NO_DATA_SENTINEL because 

1137 # `data` is usually a dataframe, and you can't compare a dataframe to a 

1138 # string directly. 

1139 return not (isinstance(data, str) and data == NO_DATA_SENTINEL) 

1140 

1141 def _getEfdDataForDayObs(self, dayObs): 

1142 """Get the EFD data for the specified dayObs and store it in the cache. 

1143 

1144 Gets the EFD data for all components, as a dict of dataframes keyed by 

1145 component name. These are then merged into a single dataframe in time 

1146 order, based on each row's `private_efdStamp`. This is then stored in 

1147 self._data[dayObs]. 

1148 

1149 If no data is found, the value is set to ``NO_DATA_SENTINEL`` to 

1150 differentiate this from ``None``, as this is what you'd get if you 

1151 queried the cache with `self._data.get(dayObs)`. It also marks that we 

1152 have already queried this day. 

1153 

1154 Parameters 

1155 ---------- 

1156 dayObs : `int` 

1157 The dayObs to query. 

1158 """ 

1159 data = {} 

1160 for component in itertools.chain( 

1161 self._movingComponents, 

1162 self._inPositionComponents, 

1163 self._stateComponents 

1164 ): 

1165 data[component] = getEfdData(self.client, component, dayObs=dayObs, warn=False) 

1166 self.log.debug(f"Found {len(data[component])} for {component}") 

1167 

1168 if all(dataframe.empty for dataframe in data.values()): 

1169 # if every single dataframe is empty, set the sentinel and don't 

1170 # try to merge anything, otherwise merge all the data we found 

1171 self.log.debug(f"No data found for {dayObs=}") 

1172 # a sentinel value that's not None 

1173 self._data[dayObs] = NO_DATA_SENTINEL 

1174 else: 

1175 merged = self._mergeData(data) 

1176 self._data[dayObs] = merged 

1177 

1178 def _calculateEventsFromMergedData(self, data, dayObs, dataIsForCurrentDay): 

1179 """Calculate the list of events from the merged data. 

1180 

1181 Runs the merged data, row by row, through the TMA state machine (with 

1182 ``tma.apply``) to get the overall TMA state at each row, building a 

1183 dict of these states, keyed by row number. 

1184 

1185 This time-series of TMA states are then looped over (in 

1186 `_statesToEventTuples`), building a list of tuples representing the 

1187 start and end of each event, the type of the event, and the reason for 

1188 the event ending. 

1189 

1190 This list of tuples is then passed to ``_makeEventsFromStateTuples``, 

1191 which actually creates the ``TMAEvent`` objects. 

1192 

1193 Parameters 

1194 ---------- 

1195 data : `pd.DataFrame` 

1196 The merged dataframe to use. 

1197 dayObs : `int` 

1198 The dayObs for the data. 

1199 dataIsForCurrentDay : `bool` 

1200 Whether the data is for the current day. Determines whether to 

1201 allow an open last event or not. 

1202 

1203 Returns 

1204 ------- 

1205 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1206 The events for the specified dayObs. 

1207 """ 

1208 engineeringMode = True 

1209 tma = TMAStateMachine(engineeringMode=engineeringMode) 

1210 

1211 # For now, we assume that the TMA starts each day able to move, but 

1212 # stationary. If this turns out to cause problems, we will need to 

1213 # change to loading data from the previous day(s), and looking back 

1214 # through it in time until a state change has been found for every 

1215 # axis. For now though, Bruno et. al think this is acceptable and 

1216 # preferable. 

1217 _initializeTma(tma) 

1218 

1219 tmaStates = {} 

1220 for rowNum, row in data.iterrows(): 

1221 tma.apply(row) 

1222 tmaStates[rowNum] = tma.state 

1223 

1224 stateTuples = self._statesToEventTuples(tmaStates, dataIsForCurrentDay) 

1225 events = self._makeEventsFromStateTuples(stateTuples, dayObs, data) 

1226 self.addBlockDataToEvents(dayObs, events) 

1227 return events 

1228 

1229 def _statesToEventTuples(self, states, dataIsForCurrentDay): 

1230 """Get the event-tuples from the dictionary of TMAStates. 

1231 

1232 Chunks the states into blocks of the same state, so that we can create 

1233 an event for each block in `_makeEventsFromStateTuples`. Off-type 

1234 states are skipped over, with each event starting when the telescope 

1235 next resumes motion or changes to a different type of motion state, 

1236 i.e. from non-tracking type movement (MOVE_POINT_TO_POINT, JOGGING, 

1237 TRACKING-but-not-in-position, i.e. slewing) to a tracking type 

1238 movement, or vice versa. 

1239 

1240 Parameters 

1241 ---------- 

1242 states : `dict` of `int` : `lsst.summit.utils.tmaUtils.TMAState` 

1243 The states of the TMA, keyed by row number. 

1244 dataIsForCurrentDay : `bool` 

1245 Whether the data is for the current day. Determines whether to 

1246 allow and open last event or not. 

1247 

1248 Returns 

1249 ------- 

1250 parsedStates : `list` of `tuple` 

1251 The parsed states, as a list of tuples of the form: 

1252 ``(eventStart, eventEnd, eventType, endReason)`` 

1253 """ 

1254 # Consider rewriting this with states as a list and using pop(0)? 

1255 skipStates = (TMAState.STOPPED, TMAState.OFF, TMAState.FAULT) 

1256 

1257 parsedStates = [] 

1258 eventStart = None 

1259 rowNum = 0 

1260 nRows = len(states) 

1261 while rowNum < nRows: 

1262 previousState = None 

1263 state = states[rowNum] 

1264 # if we're not in an event, fast forward through off-like rows 

1265 # until a new event starts 

1266 if eventStart is None and state in skipStates: 

1267 rowNum += 1 

1268 continue 

1269 

1270 # we've started a new event, so walk through it and find the end 

1271 eventStart = rowNum 

1272 previousState = state 

1273 rowNum += 1 # move to the next row before starting the while loop 

1274 if rowNum == nRows: 

1275 # we've reached the end of the data, and we're still in an 

1276 # event, so don't return this presumably in-progress event 

1277 self.log.warning('Reached the end of the data while starting a new event') 

1278 break 

1279 state = states[rowNum] 

1280 while state == previousState: 

1281 rowNum += 1 

1282 if rowNum == nRows: 

1283 break 

1284 state = states[rowNum] 

1285 parsedStates.append( 

1286 self.ParsedState( 

1287 eventStart=eventStart, 

1288 eventEnd=rowNum, 

1289 previousState=previousState, 

1290 state=state 

1291 ) 

1292 ) 

1293 if state in skipStates: 

1294 eventStart = None 

1295 

1296 # done parsing, just check the last event is valid 

1297 if parsedStates: # ensure we have at least one event 

1298 lastEvent = parsedStates[-1] 

1299 if lastEvent.eventEnd == nRows: 

1300 # Generally, you *want* the timespan for an event to be the 

1301 # first row of the next event, because you were in that state 

1302 # right up until that state change. However, if that event is 

1303 # a) the last one of the day and b) runs right up until the end 

1304 # of the dataframe, then there isn't another row, so this will 

1305 # overrun the array. 

1306 # 

1307 # If the data is for the current day then this isn't a worry, 

1308 # as we're likely still taking data, and this event will likely 

1309 # close yet, so we don't issue a warning, and simply drop the 

1310 # event from the list. 

1311 

1312 # However, if the data is for a past day then no new data will 

1313 # come to close the event, so allow the event to be "open", and 

1314 # issue a warning 

1315 if dataIsForCurrentDay: 

1316 self.log.info("Discarding open (likely in-progess) final event from current day's events") 

1317 parsedStates = parsedStates[:-1] 

1318 else: 

1319 self.log.warning("Last event ends open, forcing it to end at end of the day's data") 

1320 # it's a tuple, so (deliberately) awkward to modify 

1321 parsedStates[-1] = self.ParsedState( 

1322 eventStart=lastEvent.eventStart, 

1323 eventEnd=lastEvent.eventEnd - 1, 

1324 previousState=lastEvent.previousState, 

1325 state=lastEvent.state 

1326 ) 

1327 

1328 return parsedStates 

1329 

1330 def addBlockDataToEvents(self, dayObs, events): 

1331 """Find all the block data in the EFD for the specified events. 

1332 

1333 Finds all the block data in the EFD relating to the events, parses it, 

1334 from the rows of the dataframe, and adds it to the events in place. 

1335 

1336 Parameters 

1337 ---------- 

1338 events : `lsst.summit.utils.tmaUtils.TMAEvent` or 

1339 `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1340 One or more events to get the block data for. 

1341 """ 

1342 try: 

1343 blockParser = BlockParser(dayObs, client=self.client) 

1344 except Exception as e: 

1345 # adding the block data should never cause a failure so if we can't 

1346 # get the block data, log a warning and return. It is, however, 

1347 # never expected, so use log.exception to get the full traceback 

1348 # and scare users so it gets reported 

1349 self.log.exception(f'Failed to parse block data for {dayObs=}, {e}') 

1350 return 

1351 blocks = blockParser.getBlockNums() 

1352 blockDict = {} 

1353 for block in blocks: 

1354 blockDict[block] = blockParser.getSeqNums(block) 

1355 

1356 for block, seqNums in blockDict.items(): 

1357 for seqNum in seqNums: 

1358 blockInfo = blockParser.getBlockInfo(block=block, seqNum=seqNum) 

1359 

1360 relatedEvents = blockParser.getEventsForBlock(events, block=block, seqNum=seqNum) 

1361 for event in relatedEvents: 

1362 toSet = [blockInfo] 

1363 if event.blockInfos is not None: 

1364 existingInfo = event.blockInfos 

1365 existingInfo.append(blockInfo) 

1366 toSet = existingInfo 

1367 

1368 # Add the blockInfo to the TMAEvent. Because this is a 

1369 # frozen dataclass, use object.__setattr__ to set the 

1370 # attribute. This is the correct way to set a frozen 

1371 # dataclass attribute after creation. 

1372 object.__setattr__(event, 'blockInfos', toSet) 

1373 

1374 def _makeEventsFromStateTuples(self, states, dayObs, data): 

1375 """For the list of state-tuples, create a list of ``TMAEvent`` objects. 

1376 

1377 Given the underlying data, and the start/stop points for each event, 

1378 create the TMAEvent objects for the dayObs. 

1379 

1380 Parameters 

1381 ---------- 

1382 states : `list` of `tuple` 

1383 The parsed states, as a list of tuples of the form: 

1384 ``(eventStart, eventEnd, eventType, endReason)`` 

1385 dayObs : `int` 

1386 The dayObs for the data. 

1387 data : `pd.DataFrame` 

1388 The merged dataframe. 

1389 

1390 Returns 

1391 ------- 

1392 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent` 

1393 The events for the specified dayObs. 

1394 """ 

1395 seqNum = 0 

1396 events = [] 

1397 for parsedState in states: 

1398 begin = data.iloc[parsedState.eventStart]['private_efdStamp'] 

1399 end = data.iloc[parsedState.eventEnd]['private_efdStamp'] 

1400 beginAstropy = efdTimestampToAstropy(begin) 

1401 endAstropy = efdTimestampToAstropy(end) 

1402 duration = end - begin 

1403 event = TMAEvent( 

1404 dayObs=dayObs, 

1405 seqNum=seqNum, 

1406 type=parsedState.previousState, 

1407 endReason=parsedState.state, 

1408 duration=duration, 

1409 begin=beginAstropy, 

1410 end=endAstropy, 

1411 blockInfos=None, # this is added later 

1412 _startRow=parsedState.eventStart, 

1413 _endRow=parsedState.eventEnd, 

1414 ) 

1415 events.append(event) 

1416 seqNum += 1 

1417 return events 

1418 

1419 @staticmethod 

1420 def printTmaDetailedState(tma): 

1421 """Print the full state of all the components of the TMA. 

1422 

1423 Currently this is the azimuth and elevation axes' power and motion 

1424 states, and their respective inPosition statuses. 

1425 

1426 Parameters 

1427 ---------- 

1428 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine` 

1429 The TMA state machine in the state we want to print. 

1430 """ 

1431 axes = ['azimuth', 'elevation'] 

1432 p = tma._parts 

1433 axisPad = len(max(axes, key=len)) # length of the longest axis string == 9 here, but this is general 

1434 motionPad = max(len(s.name) for s in AxisMotionState) 

1435 powerPad = max(len(s.name) for s in PowerState) 

1436 

1437 # example output to show what's being done with the padding: 

1438 # azimuth - Power: ON Motion: STOPPED InPosition: True # noqa: W505 

1439 # elevation - Power: ON Motion: MOVING_POINT_TO_POINT InPosition: False # noqa: W505 

1440 for axis in axes: 

1441 print(f"{axis:>{axisPad}} - " 

1442 f"Power: {p[f'{axis}SystemState'].name:>{powerPad}} " 

1443 f"Motion: {p[f'{axis}MotionState'].name:>{motionPad}} " 

1444 f"InPosition: {p[f'{axis}InPosition']}") 

1445 print(f"Overall system state: {tma.state.name}") 

1446 

1447 def printFullDayStateEvolution(self, dayObs, taiOrUtc='utc'): 

1448 """Print the full TMA state evolution for the specified dayObs. 

1449 

1450 Replays all the data from the EFD for the specified dayObs through 

1451 the TMA state machine, and prints both the overall and detailed state 

1452 of the TMA for each row. 

1453 

1454 Parameters 

1455 ---------- 

1456 dayObs : `int` 

1457 The dayObs for which to print the state evolution. 

1458 taiOrUtc : `str`, optional 

1459 Whether to print the timestamps in TAI or UTC. Default is UTC. 

1460 """ 

1461 # create a fake event which spans the whole day, and then use 

1462 # printEventDetails code while skipping the header to print the 

1463 # evolution. 

1464 _ = self.getEvents(dayObs) # ensure the data has been retrieved from the EFD 

1465 data = self._data[dayObs] 

1466 lastRowNum = len(data) - 1 

1467 

1468 fakeEvent = TMAEvent( 

1469 dayObs=dayObs, 

1470 seqNum=-1, # anything will do 

1471 type=TMAState.OFF, # anything will do 

1472 endReason=TMAState.OFF, # anything will do 

1473 duration=-1, # anything will do 

1474 begin=efdTimestampToAstropy(data.iloc[0]['private_efdStamp']), 

1475 end=efdTimestampToAstropy(data.iloc[-1]['private_efdStamp']), 

1476 _startRow=0, 

1477 _endRow=lastRowNum 

1478 ) 

1479 self.printEventDetails(fakeEvent, taiOrUtc=taiOrUtc, printHeader=False) 

1480 

1481 def printEventDetails(self, event, taiOrUtc='tai', printHeader=True): 

1482 """Print a detailed breakdown of all state transitions during an event. 

1483 

1484 Note: this is not the most efficient way to do this, but it is much the 

1485 cleanest with respect to the actual state machine application and event 

1486 generation code, and is easily fast enough for the cases it will be 

1487 used for. It is not worth complicating the normal state machine logic 

1488 to try to use this code. 

1489 

1490 Parameters 

1491 ---------- 

1492 event : `lsst.summit.utils.tmaUtils.TMAEvent` 

1493 The event to display the details of. 

1494 taiOrUtc : `str`, optional 

1495 Whether to display time strings in TAI or UTC. Defaults to TAI. 

1496 Case insensitive. 

1497 printHeader : `bool`, optional 

1498 Whether to print the event summary. Defaults to True. The primary 

1499 reason for the existence of this option is so that this same 

1500 printing function can be used to show the evolution of a whole day 

1501 by supplying a fake event which spans the whole day, but this event 

1502 necessarily has a meaningless summary, and so needs suppressing. 

1503 """ 

1504 taiOrUtc = taiOrUtc.lower() 

1505 if taiOrUtc not in ['tai', 'utc']: 

1506 raise ValueError(f'Got unsuppoted value for {taiOrUtc=}') 

1507 useUtc = taiOrUtc == 'utc' 

1508 

1509 if printHeader: 

1510 print(f"Details for {event.duration:.2f}s {event.type.name} event dayObs={event.dayObs}" 

1511 f" seqNum={event.seqNum}:") 

1512 print(f"- Event began at: {event.begin.utc.isot if useUtc else event.begin.isot}") 

1513 print(f"- Event ended at: {event.end.utc.isot if useUtc else event.end.isot}") 

1514 

1515 dayObs = event.dayObs 

1516 data = self._data[dayObs] 

1517 startRow = event._startRow 

1518 endRow = event._endRow 

1519 nRowsToApply = endRow - startRow + 1 

1520 print(f"\nTotal number of rows in the merged dataframe: {len(data)}") 

1521 if printHeader: 

1522 print(f"of which rows {startRow} to {endRow} (inclusive) relate to this event.") 

1523 

1524 # reconstruct all the states 

1525 tma = TMAStateMachine(engineeringMode=True) 

1526 _initializeTma(tma) 

1527 

1528 tmaStates = {} 

1529 firstAppliedRow = True # flag to print a header on the first row that's applied 

1530 for rowNum, row in data.iterrows(): # must replay rows right from start to get full correct state 

1531 if rowNum == startRow: 

1532 # we've not yet applied this row, so this is the state just 

1533 # before event 

1534 print(f"\nBefore the event the TMA was in state {tma.state.name}:") 

1535 self.printTmaDetailedState(tma) 

1536 

1537 if rowNum >= startRow and rowNum <= endRow: 

1538 if firstAppliedRow: # only print this intro on the first row we're applying 

1539 print(f"\nThen, applying the {nRowsToApply} rows of data for this event, the state" 

1540 " evolved as follows:\n") 

1541 firstAppliedRow = False 

1542 

1543 # break the row down and print its details 

1544 rowFor = row['rowFor'] 

1545 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState 

1546 value = tma._getRowPayload(row, rowType, rowFor) 

1547 valueStr = f"{str(value) if isinstance(value, bool) else value.name}" 

1548 rowTime = efdTimestampToAstropy(row['private_efdStamp']) 

1549 print(f"On row {rowNum} the {axis} axis had the {rowType} set to {valueStr} at" 

1550 f" {rowTime.utc.isot if useUtc else rowTime.isot}") 

1551 

1552 # then apply it as usual, printing the state right afterwards 

1553 tma.apply(row) 

1554 tmaStates[rowNum] = tma.state 

1555 self.printTmaDetailedState(tma) 

1556 print() 

1557 

1558 else: 

1559 # if it's not in the range of interest then just apply it 

1560 # silently as usual 

1561 tma.apply(row) 

1562 tmaStates[rowNum] = tma.state 

1563 

1564 def findEvent(self, time): 

1565 """Find the event which contains the specified time. 

1566 

1567 If the specified time lies within an event, that event is returned. If 

1568 it is at the exact start, that is logged, and if that start point is 

1569 shared by the end of the previous event, that is logged too. If the 

1570 event lies between events, the events either side are logged, but 

1571 ``None`` is returned. If the time lies before the first event of the 

1572 day a warning is logged, as for times after the last event of the day. 

1573 

1574 Parameters 

1575 ---------- 

1576 time : `astropy.time.Time` 

1577 The time. 

1578 

1579 Returns 

1580 ------- 

1581 event : `lsst.summit.utils.tmaUtils.TMAEvent` or `None` 

1582 The event which contains the specified time, or ``None`` if the 

1583 time doesn't fall during an event. 

1584 """ 

1585 # there are five possible cases: 

1586 # 1) the time lies before the first event of the day 

1587 # 2) the time lies after the last event of the day 

1588 # 3) the time lies within an event 

1589 # 3a) the time is exactly at the start of an event 

1590 # 3b) if so, time can be shared by the end of the previous event if 

1591 # they are contiguous 

1592 # 4) the time lies between two events 

1593 # 5) the time is exactly at end of the last event of the day. This is 

1594 # an issue because event end times are exclusive, so this time is 

1595 # not technically in that event, it's the moment it closes (and if 

1596 # there *was* an event which followed contiguously, it would be in 

1597 # that event instead, which is what motivates this definition of 

1598 # lies within what event) 

1599 

1600 dayObs = getDayObsForTime(time) 

1601 # we know this is on the right day, and definitely before the specified 

1602 # time, but sanity check this before continuing as this needs to be 

1603 # true for this to give the correct answer 

1604 assert getDayObsStartTime(dayObs) <= time 

1605 assert getDayObsEndTime(dayObs) > time 

1606 

1607 # command start to many log messages so define once here 

1608 logStart = f"Specified time {time.isot} falls on {dayObs=}" 

1609 

1610 events = self.getEvents(dayObs) 

1611 if len(events) == 0: 

1612 self.log.warning(f'There are no events found for {dayObs}') 

1613 return None 

1614 

1615 # check case 1) 

1616 if time < events[0].begin: 

1617 self.log.warning(f'{logStart} and is before the first event of the day') 

1618 return None 

1619 

1620 # check case 2) 

1621 if time > events[-1].end: 

1622 self.log.warning(f'{logStart} and is after the last event of the day') 

1623 return None 

1624 

1625 # check case 5) 

1626 if time == events[-1].end: 

1627 self.log.warning(f'{logStart} and is exactly at the end of the last event of the day' 

1628 f' (seqnum={events[-1].seqNum}). Because event intervals are half-open, this' 

1629 ' time does not technically lie in any event') 

1630 return None 

1631 

1632 # we are now either in an event, or between events. Walk through the 

1633 # events, and if the end of the event is after the specified time, then 

1634 # we're either in it or past it, so check if we're in. 

1635 for eventNum, event in enumerate(events): 

1636 if event.end > time: # case 3) we are now into or past the right event 

1637 # the event end encloses the time, so note the > and not >=, 

1638 # this must be strictly greater, we check the overlap case 

1639 # later 

1640 if time >= event.begin: # we're fully inside the event, so return it. 

1641 # 3a) before returning, check if we're exactly at the start 

1642 # of the event, and if so, log it. Then 3b) also check if 

1643 # we're at the exact end of the previous event, and if so, 

1644 # log that too. 

1645 if time == event.begin: 

1646 self.log.info(f"{logStart} and is exactly at the start of event" 

1647 f" {eventNum}") 

1648 if eventNum == 0: # I think this is actually impossible, but check anyway 

1649 return event # can't check the previous event so return here 

1650 previousEvent = events[eventNum - 1] 

1651 if previousEvent.end == time: 

1652 self.log.info("Previous event is contiguous, so this time is also at the exact" 

1653 f" end of {eventNum - 1}") 

1654 return event 

1655 else: # case 4) 

1656 # the event end is past the time, but it's not inside the 

1657 # event, so we're between events. Log which we're between 

1658 # and return None 

1659 previousEvent = events[eventNum - 1] 

1660 timeAfterPrev = (time - previousEvent.end).to_datetime() 

1661 naturalTimeAfterPrev = humanize.naturaldelta(timeAfterPrev, minimum_unit='MICROSECONDS') 

1662 timeBeforeCurrent = (event.begin - time).to_datetime() 

1663 naturalTimeBeforeCurrent = humanize.naturaldelta(timeBeforeCurrent, 

1664 minimum_unit='MICROSECONDS') 

1665 self.log.info(f"{logStart} and lies" 

1666 f" {naturalTimeAfterPrev} after the end of event {previousEvent.seqNum}" 

1667 f" and {naturalTimeBeforeCurrent} before the start of event {event.seqNum}." 

1668 ) 

1669 return None 

1670 

1671 raise RuntimeError('Event finding logic fundamentally failed, which should never happen - the code' 

1672 ' needs fixing')