Coverage for python/lsst/summit/utils/tmaUtils.py: 20%
559 statements
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-09 14:32 +0000
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-09 14:32 +0000
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import re
23import enum
24import itertools
25import logging
26import pandas as pd
27import numpy as np
28import humanize
29from dataclasses import dataclass
30from astropy.time import Time
31from matplotlib.ticker import FuncFormatter
32import matplotlib.dates as mdates
33import matplotlib.pyplot as plt
34from lsst.utils.iteration import ensure_iterable
36from .enums import AxisMotionState, PowerState
37from .blockUtils import BlockParser
38from .utils import getCurrentDayObs_int, dayObsIntToString
39from .efdUtils import (getEfdData,
40 makeEfdClient,
41 efdTimestampToAstropy,
42 COMMAND_ALIASES,
43 getDayObsForTime,
44 getDayObsStartTime,
45 getDayObsEndTime,
46 )
48__all__ = (
49 'TMAStateMachine',
50 'TMAEvent',
51 'TMAEventMaker',
52 'TMAState',
53 'AxisMotionState',
54 'PowerState',
55 'getSlewsFromEventList',
56 'getTracksFromEventList',
57 'getTorqueMaxima',
58)
60# we don't want to use `None` for a no data sentinel because dict.get('key')
61# returns None if the key isn't present, and also we need to mark that the data
62# was queried for and no data was found, whereas the key not being present
63# means that we've not yet looked for the data.
64NO_DATA_SENTINEL = "NODATA"
67def getSlewsFromEventList(events):
68 """Get the slew events from a list of TMAEvents.
70 Parameters
71 ----------
72 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
73 The list of events to filter.
75 Returns
76 -------
77 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
78 The filtered list of events.
79 """
80 return [e for e in events if e.type == TMAState.SLEWING]
83def getTracksFromEventList(events):
84 """Get the tracking events from a list of TMAEvents.
86 Parameters
87 ----------
88 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
89 The list of events to filter.
91 Returns
92 -------
93 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
94 The filtered list of events.
95 """
96 return [e for e in events if e.type == TMAState.TRACKING]
99def getTorqueMaxima(table):
100 """Print the maximum positive and negative azimuth and elevation torques.
102 Designed to be used with the table as downloaded from RubinTV.
104 Parameters
105 ----------
106 table : `pd.DataFrame`
107 The table of data to use, as generated by Rapid Analysis.
108 """
109 for axis in ['elevation', 'azimuth']:
110 col = f'Largest {axis} torque'
111 maxPos = np.argmax(table[col])
112 maxVal = table[col].iloc[maxPos]
113 print(f"Max positive {axis:9} torque during seqNum {maxPos:>4}: {maxVal/1000:>7.1f}kNm")
114 minPos = np.argmin(table[col])
115 minVal = table[col].iloc[minPos]
116 print(f"Max negative {axis:9} torque during seqNum {minPos:>4}: {minVal/1000:>7.1f}kNm")
119def getAzimuthElevationDataForEvent(client, event, prePadding=0, postPadding=0):
120 """Get the data for the az/el telemetry topics for a given TMAEvent.
122 Parameters
123 ----------
124 client : `lsst_efd_client.efd_helper.EfdClient`
125 The EFD client to use.
126 event : `lsst.summit.utils.tmaUtils.TMAEvent`
127 The event to get the data for.
128 prePadding : `float`, optional
129 The amount of time to pad the event with before the start time, in
130 seconds.
131 postPadding : `float`, optional
132 The amount of time to pad the event with after the end time, in
133 seconds.
135 Returns
136 -------
137 azimuthData : `pd.DataFrame`
138 The azimuth data for the specified event.
139 elevationData : `pd.DataFrame`
140 The elevation data for the specified event.
141 """
142 azimuthData = getEfdData(client,
143 'lsst.sal.MTMount.azimuth',
144 event=event,
145 prePadding=prePadding,
146 postPadding=postPadding)
147 elevationData = getEfdData(client,
148 'lsst.sal.MTMount.elevation',
149 event=event,
150 prePadding=prePadding,
151 postPadding=postPadding)
153 return azimuthData, elevationData
156def plotEvent(client, event, fig=None, prePadding=0, postPadding=0, commands={},
157 azimuthData=None, elevationData=None):
158 """Plot the TMA axis positions over the course of a given TMAEvent.
160 Plots the axis motion profiles for the given event, with optional padding
161 at the start and end of the event. If the data is provided via the
162 azimuthData and elevationData parameters, it will be used, otherwise it
163 will be queried from the EFD.
165 Optionally plots any commands issued during or around the event, if these
166 are supplied. Commands are supplied as a dictionary of the command topic
167 strings, with values as astro.time.Time objects at which the command was
168 issued.
170 Parameters
171 ----------
172 client : `lsst_efd_client.efd_helper.EfdClient`
173 The EFD client to use.
174 event : `lsst.summit.utils.tmaUtils.TMAEvent`
175 The event to plot.
176 fig : `matplotlib.figure.Figure`, optional
177 The figure to plot on. If not specified, a new figure will be created.
178 prePadding : `float`, optional
179 The amount of time to pad the event with before the start time, in
180 seconds.
181 postPadding : `float`, optional
182 The amount of time to pad the event with after the end time, in
183 seconds.
184 commands : `dict` of `str` : `astropy.time.Time`, optional
185 A dictionary of commands to plot on the figure. The keys are the topic
186 names, and the values are the times at which the commands were sent.
187 azimuthData : `pd.DataFrame`, optional
188 The azimuth data to plot. If not specified, it will be queried from the
189 EFD.
190 elevationData : `pd.DataFrame`, optional
191 The elevation data to plot. If not specified, it will be queried from
192 the EFD.
194 Returns
195 -------
196 fig : `matplotlib.figure.Figure`
197 The figure on which the plot was made.
198 """
199 def tickFormatter(value, tick_number):
200 # Convert the value to a string without subtracting large numbers
201 # tick_number is unused.
202 return f"{value:.2f}"
204 # plot any commands we might have
205 if not isinstance(commands, dict):
206 raise TypeError('commands must be a dict of command names with values as'
207 ' astropy.time.Time values')
209 if fig is None:
210 fig = plt.figure(figsize=(10, 8))
211 log = logging.getLogger(__name__)
212 log.warning("Making new matplotlib figure - if this is in a loop you're going to have a bad time."
213 " Pass in a figure with fig = plt.figure(figsize=(10, 8)) to avoid this warning.")
215 fig.clear()
216 ax1, ax2 = fig.subplots(2,
217 sharex=True,
218 gridspec_kw={'wspace': 0,
219 'hspace': 0,
220 'height_ratios': [2.5, 1]})
222 if azimuthData is None or elevationData is None:
223 azimuthData, elevationData = getAzimuthElevationDataForEvent(client,
224 event,
225 prePadding=prePadding,
226 postPadding=postPadding)
228 # Use the native color cycle for the lines. Because they're on different
229 # axes they don't cycle by themselves
230 lineColors = [p['color'] for p in plt.rcParams['axes.prop_cycle']]
231 colorCounter = 0
233 ax1.plot(azimuthData['actualPosition'], label='Azimuth position', c=lineColors[colorCounter])
234 colorCounter += 1
235 ax1.yaxis.set_major_formatter(FuncFormatter(tickFormatter))
236 ax1.set_ylabel('Azimuth (degrees)')
238 ax1_twin = ax1.twinx()
239 ax1_twin.plot(elevationData['actualPosition'], label='Elevation position', c=lineColors[colorCounter])
240 colorCounter += 1
241 ax1_twin.yaxis.set_major_formatter(FuncFormatter(tickFormatter))
242 ax1_twin.set_ylabel('Elevation (degrees)')
243 ax1.set_xticks([]) # remove x tick labels on the hidden upper x-axis
245 ax2_twin = ax2.twinx()
246 ax2.plot(azimuthData['actualTorque'], label='Azimuth torque', c=lineColors[colorCounter])
247 colorCounter += 1
248 ax2_twin.plot(elevationData['actualTorque'], label='Elevation torque', c=lineColors[colorCounter])
249 colorCounter += 1
250 ax2.set_ylabel('Azimuth torque (Nm)')
251 ax2_twin.set_ylabel('Elevation torque (Nm)')
252 ax2.set_xlabel('Time (UTC)') # yes, it really is UTC, matplotlib converts this automatically!
254 # put the ticks at an angle, and right align with the tick marks
255 ax2.set_xticks(ax2.get_xticks()) # needed to supress a user warning
256 xlabels = ax2.get_xticks()
257 ax2.set_xticklabels(xlabels, rotation=40, ha='right')
258 ax2.xaxis.set_major_locator(mdates.AutoDateLocator())
259 ax2.xaxis.set_major_formatter(mdates.DateFormatter('%H:%M:%S'))
261 if prePadding or postPadding:
262 # note the conversion to utc because the x-axis from the dataframe
263 # already got automagically converted when plotting before, so this is
264 # necessary for things to line up
265 ax1_twin.axvline(event.begin.utc.datetime, c='k', ls='--', alpha=0.5, label='Event begin/end')
266 ax1_twin.axvline(event.end.utc.datetime, c='k', ls='--', alpha=0.5)
267 # extend lines down across lower plot, but do not re-add label
268 ax2_twin.axvline(event.begin.utc.datetime, c='k', ls='--', alpha=0.5)
269 ax2_twin.axvline(event.end.utc.datetime, c='k', ls='--', alpha=0.5)
271 for command, commandTime in commands.items():
272 # if commands weren't found, the item is set to None. This is common
273 # for events so handle it gracefully and silently. The command finding
274 # code logs about lack of commands found so no need to mention here.
275 if commandTime is None:
276 continue
277 ax1_twin.axvline(commandTime.utc.datetime, c=lineColors[colorCounter],
278 ls='--', alpha=0.75, label=f'{command}')
279 # extend lines down across lower plot, but do not re-add label
280 ax2_twin.axvline(commandTime.utc.datetime, c=lineColors[colorCounter],
281 ls='--', alpha=0.75)
282 colorCounter += 1
284 # combine the legends and put inside the plot
285 handles1a, labels1a = ax1.get_legend_handles_labels()
286 handles1b, labels1b = ax1_twin.get_legend_handles_labels()
287 handles2a, labels2a = ax2.get_legend_handles_labels()
288 handles2b, labels2b = ax2_twin.get_legend_handles_labels()
290 handles = handles1a + handles1b + handles2a + handles2b
291 labels = labels1a + labels1b + labels2a + labels2b
292 # ax2 is "in front" of ax1 because it has the vlines plotted on it, and
293 # vlines are on ax2 so that they appear at the bottom of the legend, so
294 # make sure to plot the legend on ax2, otherwise the vlines will go on top
295 # of the otherwise-opaque legend.
296 ax1_twin.legend(handles, labels, facecolor='white', framealpha=1)
298 # Add title with the event name, type etc
299 dayObsStr = dayObsIntToString(event.dayObs)
300 title = (f"{dayObsStr} - seqNum {event.seqNum} (version {event.version})" # top line, rest below
301 f"\nDuration = {event.duration:.2f}s"
302 f" Event type: {event.type.name}"
303 f" End reason: {event.endReason.name}"
304 )
305 ax1_twin.set_title(title)
306 return fig
309def getCommandsDuringEvent(client, event, commands=('raDecTarget'), log=None, doLog=True):
310 """Get the commands issued during an event.
312 Get the times at which the specified commands were issued during the event.
314 Parameters
315 ----------
316 client : `lsst_efd_client.efd_helper.EfdClient`
317 The EFD client to use.
318 event : `lsst.summit.utils.tmaUtils.TMAEvent`
319 The event to plot.
320 commands : `list` of `str`, optional
321 The commands or command aliases to look for. Defaults to
322 ['raDecTarget'].
323 log : `logging.Logger`, optional
324 The logger to use. If not specified, a new logger will be created if
325 needed.
326 doLog : `bool`, optional
327 Whether to log messages. Defaults to True.
329 Returns
330 -------
331 commands : `dict` of `str` : `astropy.time.Time`
332 A dictionary of the commands and the times at which they were issued.
333 """
334 # TODO: DM-40100 Add support for padding the event here to allow looking
335 # for triggering commands before the event
337 # TODO: DM-40100 Change this to always return a list of times, and remove
338 # warning about finding multiple commands. Remember to update docs and
339 # plotting code.
340 if log is None and doLog:
341 log = logging.getLogger(__name__)
343 commands = ensure_iterable(commands)
344 fullCommands = [c if c not in COMMAND_ALIASES else COMMAND_ALIASES[c] for c in commands]
345 del commands # make sure we always use their full names
347 ret = {}
348 for command in fullCommands:
349 data = getEfdData(client, command, event=event, warn=False)
350 if data.empty:
351 if doLog:
352 log.info(f'Found no command issued for {command} during event')
353 ret[command] = None
354 elif len(data) > 1:
355 if doLog:
356 log.warning(f'Found multiple commands issued for {command} during event, returning None')
357 ret[command] = None
358 else:
359 assert len(data) == 1 # this must be true now
360 commandTime = data.private_efdStamp
361 ret[command] = Time(commandTime, format='unix')
363 return ret
366def _initializeTma(tma):
367 """Helper function to turn a TMA into a valid state for testing.
369 Do not call directly in normal usage or code, as this just arbitrarily
370 sets values to make the TMA valid.
372 Parameters
373 ----------
374 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine`
375 The TMA state machine model to initialize.
376 """
377 tma._parts['azimuthInPosition'] = False
378 tma._parts['azimuthMotionState'] = AxisMotionState.STOPPED
379 tma._parts['azimuthSystemState'] = PowerState.ON
380 tma._parts['elevationInPosition'] = False
381 tma._parts['elevationMotionState'] = AxisMotionState.STOPPED
382 tma._parts['elevationSystemState'] = PowerState.ON
385@dataclass(kw_only=True, frozen=True)
386class TMAEvent:
387 """A movement event for the TMA.
389 Contains the dayObs on which the event occured, using the standard
390 observatory definition of the dayObs, and the sequence number of the event,
391 which is unique for each event on a given dayObs.
393 The event type can be either 'SLEWING' or 'TRACKING', defined as:
394 - SLEWING: some part of the TMA is in motion
395 - TRACKING: both axes are in position and tracking the sky
397 The end reason can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', or 'OFF'.
398 - SLEWING: The previous event was a TRACKING event, and one or more of
399 the TMA components either stopped being in position, or stopped
400 moving, or went into fault, or was turned off, and hence we are now
401 only slewing and no longer tracking the sky.
402 - TRACKING: the TMA started tracking the sky when it wasn't previously.
403 Usualy this would always be preceded by directly by a SLEWING
404 event, but this is not strictly true, as the EUI seems to be able
405 to make the TMA start tracking the sky without slewing first.
406 - STOPPED: the components of the TMA transitioned to the STOPPED state.
407 - FAULT: the TMA went into fault.
408 - OFF: the TMA components were turned off.
410 Note that this class is not intended to be instantiated directly, but
411 rather to be returned by the ``TMAEventMaker.getEvents()`` function.
413 Parameters
414 ----------
415 dayObs : `int`
416 The dayObs on which the event occured.
417 seqNum : `int`
418 The sequence number of the event,
419 type : `lsst.summit.utils.tmaUtils.TMAState`
420 The type of the event, either 'SLEWING' or 'TRACKING'.
421 endReason : `lsst.summit.utils.tmaUtils.TMAState`
422 The reason the event ended, either 'STOPPED', 'TRACKING', 'FAULT',
423 'SLEWING', or 'OFF'.
424 duration : `float`
425 The duration of the event, in seconds.
426 begin : `astropy.time.Time`
427 The time the event began.
428 end : `astropy.time.Time`
429 The time the event ended.
430 blockInfos : `list` of `lsst.summit.utils.tmaUtils.BlockInfo`, or `None`
431 The block infomation, if any, relating to the event. Could be `None`,
432 or one or more block informations.
433 version : `int`
434 The version of the TMAEvent class. Equality between events is only
435 valid for a given version of the class. If the class definition
436 changes, the time ranges can change, and hence the equality between
437 events is ``False``.
438 _startRow : `int`
439 The first row in the merged EFD data which is part of the event.
440 _endRow : `int`
441 The last row in the merged EFD data which is part of the event.
442 """
443 dayObs: int
444 seqNum: int
445 type: str # can be 'SLEWING', 'TRACKING'
446 endReason: str # can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', 'OFF'
447 duration: float # seconds
448 begin: Time
449 end: Time
450 blockInfos: list = None
451 version: int = 0 # update this number any time a code change which could change event definitions is made
452 _startRow: int
453 _endRow: int
455 def __lt__(self, other):
456 if self.version != other.version:
457 raise ValueError(
458 f"Cannot compare TMAEvents with different versions: {self.version} != {other.version}"
459 )
460 if self.dayObs < other.dayObs:
461 return True
462 elif self.dayObs == other.dayObs:
463 return self.seqNum < other.seqNum
464 return False
466 def __repr__(self):
467 return (
468 f"TMAEvent(dayObs={self.dayObs}, seqNum={self.seqNum}, type={self.type!r},"
469 f" endReason={self.endReason!r}, duration={self.duration}, begin={self.begin!r},"
470 f" end={self.end!r}"
471 )
473 def __hash__(self):
474 # deliberately don't hash the blockInfos here, as they are not
475 # a core part of the event itself, and are listy and cause problems
476 return hash((self.dayObs,
477 self.seqNum,
478 self.type,
479 self.endReason,
480 self.duration,
481 self.begin,
482 self.end,
483 self.version,
484 self._startRow,
485 self._endRow
486 )
487 )
489 def _ipython_display_(self):
490 print(self.__str__())
492 def __str__(self):
493 def indent(string):
494 return '\n' + '\n'.join([' ' + s for s in string.splitlines()])
496 blockInfoStr = 'None'
497 if self.blockInfos is not None:
498 blockInfoStr = ''.join(indent(str(i)) for i in self.blockInfos)
500 return (
501 f"dayObs: {self.dayObs}\n"
502 f"seqNum: {self.seqNum}\n"
503 f"type: {self.type.name}\n"
504 f"endReason: {self.endReason.name}\n"
505 f"duration: {self.duration}\n"
506 f"begin: {self.begin!r}\n"
507 f"end: {self.end!r}\n"
508 f"blockInfos: {blockInfoStr}"
509 )
512class TMAState(enum.IntEnum):
513 """Overall state of the TMA.
515 States are defined as follows:
517 UNINITIALIZED
518 We have not yet got data for all relevant components, so the overall
519 state is undefined.
520 STOPPED
521 All components are on, and none are moving.
522 TRACKING
523 We are tracking the sky.
524 SLEWING
525 One or more components are moving, and one or more are not tracking the
526 sky. This should probably be called MOVING, as it includes: slewing,
527 MOVING_POINT_TO_POINT, and JOGGING.
528 FAULT
529 All (if engineeringMode) or any (if not engineeringMode) components are
530 in fault.
531 OFF
532 All components are off.
533 """
534 UNINITIALIZED = -1
535 STOPPED = 0
536 TRACKING = 1
537 SLEWING = 2
538 FAULT = 3
539 OFF = 4
541 def __repr__(self):
542 return f"TMAState.{self.name}"
545def getAxisAndType(rowFor):
546 """Get the axis the data relates to, and the type of data it contains.
548 Parameters
549 ----------
550 rowFor : `str`
551 The column in the dataframe denoting what this row is for, e.g.
552 "elevationMotionState" or "azimuthInPosition", etc.
554 Returns
555 -------
556 axis : `str`
557 The axis the row is for, e.g. "azimuth", "elevation".
558 rowType : `str`
559 The type of the row, e.g. "MotionState", "SystemState", "InPosition".
560 """
561 regex = r'(azimuth|elevation)(InPosition|MotionState|SystemState)$' # matches the end of the line
562 matches = re.search(regex, rowFor)
563 if matches is None:
564 raise ValueError(f"Could not parse axis and rowType from {rowFor=}")
565 axis = matches.group(1)
566 rowType = matches.group(2)
568 assert rowFor.endswith(f"{axis}{rowType}")
569 return axis, rowType
572class ListViewOfDict:
573 """A class to allow making lists which contain references to an underlying
574 dictionary.
576 Normally, making a list of items from a dictionary would make a copy of the
577 items, but this class allows making a list which contains references to the
578 underlying dictionary items themselves. This is useful for making a list of
579 components, such that they can be manipulated in their logical sets.
580 """
581 def __init__(self, underlyingDictionary, keysToLink):
582 self.dictionary = underlyingDictionary
583 self.keys = keysToLink
585 def __getitem__(self, index):
586 return self.dictionary[self.keys[index]]
588 def __setitem__(self, index, value):
589 self.dictionary[self.keys[index]] = value
591 def __len__(self):
592 return len(self.keys)
595class TMAStateMachine:
596 """A state machine model of the TMA.
598 Note that this is currently only implemented for the azimuth and elevation
599 axes, but will be extended to include the rotator in the future.
601 Note that when used for event generation, changing ``engineeringMode`` to
602 False might change the resulting list of events, and that if the TMA moves
603 with some axis in fault, then these events will be missed. It is therefore
604 thought that ``engineeringMode=True`` should always be used when generating
605 events. The option, however, is there for completeness, as this will be
606 useful for knowing is the CSC would consider the TMA to be in fault in the
607 general case.
609 Parameters
610 ----------
611 engineeringMode : `bool`, optional
612 Whether the TMA is in engineering mode. Defaults to True. If False,
613 then the TMA will be in fault if any component is in fault. If True,
614 then the TMA will be in fault only if all components are in fault.
615 debug : `bool`, optional
616 Whether to log debug messages. Defaults to False.
617 """
618 _UNINITIALIZED_VALUE: int = -999
620 def __init__(self, engineeringMode=True, debug=False):
621 self.engineeringMode = engineeringMode
622 self.log = logging.getLogger('lsst.summit.utils.tmaUtils.TMA')
623 if debug:
624 self.log.level = logging.DEBUG
625 self._mostRecentRowTime = -1
627 # the actual components of the TMA
628 self._parts = {'azimuthInPosition': self._UNINITIALIZED_VALUE,
629 'azimuthMotionState': self._UNINITIALIZED_VALUE,
630 'azimuthSystemState': self._UNINITIALIZED_VALUE,
631 'elevationInPosition': self._UNINITIALIZED_VALUE,
632 'elevationMotionState': self._UNINITIALIZED_VALUE,
633 'elevationSystemState': self._UNINITIALIZED_VALUE,
634 }
635 systemKeys = ['azimuthSystemState', 'elevationSystemState']
636 positionKeys = ['azimuthInPosition', 'elevationInPosition']
637 motionKeys = ['azimuthMotionState', 'elevationMotionState']
639 # references to the _parts as conceptual groupings
640 self.system = ListViewOfDict(self._parts, systemKeys)
641 self.motion = ListViewOfDict(self._parts, motionKeys)
642 self.inPosition = ListViewOfDict(self._parts, positionKeys)
644 # tuples of states for state collapsing. Note that STOP_LIKE +
645 # MOVING_LIKE must cover the full set of AxisMotionState enums
646 self.STOP_LIKE = (AxisMotionState.STOPPING,
647 AxisMotionState.STOPPED,
648 AxisMotionState.TRACKING_PAUSED)
649 self.MOVING_LIKE = (AxisMotionState.MOVING_POINT_TO_POINT,
650 AxisMotionState.JOGGING,
651 AxisMotionState.TRACKING)
652 # Likewise, ON_LIKE + OFF_LIKE must cover the full set of PowerState
653 # enums
654 self.OFF_LIKE = (PowerState.OFF, PowerState.TURNING_OFF)
655 self.ON_LIKE = (PowerState.ON, PowerState.TURNING_ON)
656 self.FAULT_LIKE = (PowerState.FAULT,) # note the trailing comma - this must be an iterable
658 def apply(self, row):
659 """Apply a row of data to the TMA state.
661 Checks that the row contains data for a later time than any data
662 previously applied, and applies the relevant column entry to the
663 relevant component.
665 Parameters
666 ----------
667 row : `pd.Series`
668 The row of data to apply to the state machine.
669 """
670 timestamp = row['private_efdStamp']
671 if timestamp < self._mostRecentRowTime: # NB equals is OK, technically, though it never happens
672 raise ValueError('TMA evolution must be monotonic increasing in time, tried to apply a row which'
673 ' predates the most previous one')
674 self._mostRecentRowTime = timestamp
676 rowFor = row['rowFor'] # e.g. elevationMotionState
677 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState
678 value = self._getRowPayload(row, rowType, rowFor)
679 self.log.debug(f"Setting {rowFor} to {repr(value)}")
680 self._parts[rowFor] = value
681 try:
682 # touch the state property as this executes the sieving, to make
683 # sure we don't fall through the sieve at any point in time
684 _ = self.state
685 except RuntimeError as e:
686 # improve error reporting, but always reraise this, as this is a
687 # full-blown failure
688 raise RuntimeError(f'Failed to apply {value} to {axis}{rowType} with state {self._parts}') from e
690 def _getRowPayload(self, row, rowType, rowFor):
691 """Get the relevant value from the row.
693 Given the row, and which component it relates to, get the relevant
694 value, as a bool or cast to the appropriate enum class.
696 Parameters
697 ----------
698 row : `pd.Series`
699 The row of data from the dataframe.
700 rowType : `str`
701 The type of the row, e.g. "MotionState", "SystemState",
702 "InPosition".
703 rowFor : `str`
704 The component the row is for, e.g. "azimuth", "elevation".
706 Returns
707 -------
708 value : `bool` or `enum`
709 The value of the row, as a bool or enum, depending on the
710 component, cast to the appropriate enum class or bool.
711 """
712 match rowType:
713 case 'MotionState':
714 value = row[f'state_{rowFor}']
715 return AxisMotionState(value)
716 case 'SystemState':
717 value = row[f'powerState_{rowFor}']
718 return PowerState(value)
719 case 'InPosition':
720 value = row[f'inPosition_{rowFor}']
721 return bool(value)
722 case _:
723 raise ValueError(f'Failed to get row payload with {rowType=} and {row=}')
725 @property
726 def _isValid(self):
727 """Has the TMA had a value applied to all its components?
729 If any component has not yet had a value applied, the TMA is not valid,
730 as those components will be in an unknown state.
732 Returns
733 -------
734 isValid : `bool`
735 Whether the TMA is fully initialized.
736 """
737 return not any([v == self._UNINITIALIZED_VALUE for v in self._parts.values()])
739 # state inspection properties - a high level way of inspecting the state as
740 # an API
741 @property
742 def isMoving(self):
743 return self.state in [TMAState.TRACKING, TMAState.SLEWING]
745 @property
746 def isNotMoving(self):
747 return not self.isMoving
749 @property
750 def isTracking(self):
751 return self.state == TMAState.TRACKING
753 @property
754 def isSlewing(self):
755 return self.state == TMAState.SLEWING
757 @property
758 def canMove(self):
759 badStates = [PowerState.OFF, PowerState.TURNING_OFF, PowerState.FAULT, PowerState.UNKNOWN]
760 return bool(
761 self._isValid and
762 self._parts['azimuthSystemState'] not in badStates and
763 self._parts['elevationSystemState'] not in badStates
764 )
766 # Axis inspection properties, designed for internal use. These return
767 # iterables so that they can be used in any() and all() calls, which make
768 # the logic much easier to read, e.g. to see if anything is moving, we can
769 # write `if not any(_axisInMotion):`
770 @property
771 def _axesInFault(self):
772 return [x in self.FAULT_LIKE for x in self.system]
774 @property
775 def _axesOff(self):
776 return [x in self.OFF_LIKE for x in self.system]
778 @property
779 def _axesOn(self):
780 return [not x for x in self._axesOn]
782 @property
783 def _axesInMotion(self):
784 return [x in self.MOVING_LIKE for x in self.motion]
786 @property
787 def _axesTRACKING(self):
788 """Note this is deliberately named _axesTRACKING and not _axesTracking
789 to make it clear that this is the AxisMotionState type of TRACKING and
790 not the normal conceptual notion of tracking (the sky, i.e. as opposed
791 to slewing).
792 """
793 return [x == AxisMotionState.TRACKING for x in self.motion]
795 @property
796 def _axesInPosition(self):
797 return [x is True for x in self.inPosition]
799 @property
800 def state(self):
801 """The overall state of the TMA.
803 Note that this is both a property, and also the method which applies
804 the logic sieve to determine the state at a given point in time.
806 Returns
807 -------
808 state : `lsst.summit.utils.tmaUtils.TMAState`
809 The overall state of the TMA.
810 """
811 # first, check we're valid, and if not, return UNINITIALIZED state, as
812 # things are unknown
813 if not self._isValid:
814 return TMAState.UNINITIALIZED
816 # if we're not in engineering mode, i.e. we're under normal CSC
817 # control, then if anything is in fault, we're in fault. If we're
818 # engineering then some axes will move when others are in fault
819 if not self.engineeringMode:
820 if any(self._axesInFault):
821 return TMAState.FAULT
822 else:
823 # we're in engineering mode, so return fault state if ALL are in
824 # fault
825 if all(self._axesInFault):
826 return TMAState.FAULT
828 # if all axes are off, the TMA is OFF
829 if all(self._axesOff):
830 return TMAState.OFF
832 # we know we're valid and at least some axes are not off, so see if
833 # we're in motion if no axes are moving, we're stopped
834 if not any(self._axesInMotion):
835 return TMAState.STOPPED
837 # now we know we're initialized, and that at least one axis is moving
838 # so check axes for motion and in position. If all axes are tracking
839 # and all are in position, we're tracking the sky
840 if (all(self._axesTRACKING) and all(self._axesInPosition)):
841 return TMAState.TRACKING
843 # we now know explicitly that not everything is in position, so we no
844 # longer need to check that. We do actually know that something is in
845 # motion, but confirm that's the case and return SLEWING
846 if (any(self._axesInMotion)):
847 return TMAState.SLEWING
849 # if we want to differentiate between MOVING_POINT_TO_POINT moves,
850 # JOGGING moves and regular slews, the logic in the step above needs to
851 # be changed and the new steps added here.
853 raise RuntimeError('State error: fell through the state sieve - rewrite your logic!')
856class TMAEventMaker:
857 """A class to create per-dayObs TMAEvents for the TMA's movements.
859 Example usage:
860 >>> dayObs = 20230630
861 >>> eventMaker = TMAEventMaker()
862 >>> events = eventMaker.getEvents(dayObs)
863 >>> print(f'Found {len(events)} for {dayObs=}')
865 Parameters
866 ----------
867 client : `lsst_efd_client.efd_helper.EfdClient`, optional
868 The EFD client to use, created if not provided.
869 """
870 # the topics which need logical combination to determine the overall mount
871 # state. Will need updating as new components are added to the system.
873 # relevant column: 'state'
874 _movingComponents = [
875 'lsst.sal.MTMount.logevent_azimuthMotionState',
876 'lsst.sal.MTMount.logevent_elevationMotionState',
877 ]
879 # relevant column: 'inPosition'
880 _inPositionComponents = [
881 'lsst.sal.MTMount.logevent_azimuthInPosition',
882 'lsst.sal.MTMount.logevent_elevationInPosition',
883 ]
885 # the components which, if in fault, put the TMA into fault
886 # relevant column: 'powerState'
887 _stateComponents = [
888 'lsst.sal.MTMount.logevent_azimuthSystemState',
889 'lsst.sal.MTMount.logevent_elevationSystemState',
890 ]
892 def __init__(self, client=None):
893 if client is not None:
894 self.client = client
895 else:
896 self.client = makeEfdClient()
897 self.log = logging.getLogger(__name__)
898 self._data = {}
900 @dataclass(frozen=True)
901 class ParsedState:
902 eventStart: Time
903 eventEnd: int
904 previousState: TMAState
905 state: TMAState
907 @staticmethod
908 def isToday(dayObs):
909 """Find out if the specified dayObs is today, or in the past.
911 If the day is today, the function returns ``True``, if it is in the
912 past it returns ``False``. If the day is in the future, a
913 ``ValueError`` is raised, as this indicates there is likely an
914 off-by-one type error somewhere in the logic.
916 Parameters
917 ----------
918 dayObs : `int`
919 The dayObs to check, in the format YYYYMMDD.
921 Returns
922 -------
923 isToday : `bool`
924 ``True`` if the dayObs is today, ``False`` if it is in the past.
926 Raises
927 ValueError: if the dayObs is in the future.
928 """
929 todayDayObs = getCurrentDayObs_int()
930 if dayObs == todayDayObs:
931 return True
932 if dayObs > todayDayObs:
933 raise ValueError("dayObs is in the future")
934 return False
936 @staticmethod
937 def _shortName(topic):
938 """Get the short name of a topic.
940 Parameters
941 ----------
942 topic : `str`
943 The topic to get the short name of.
945 Returns
946 -------
947 shortName : `str`
948 The short name of the topic, e.g. 'azimuthInPosition'
949 """
950 # get, for example 'azimuthInPosition' from
951 # lsst.sal.MTMount.logevent_azimuthInPosition
952 return topic.split('_')[-1]
954 def _mergeData(self, data):
955 """Merge a dict of dataframes based on private_efdStamp, recording
956 where each row came from.
958 Given a dict or dataframes, keyed by topic, merge them into a single
959 dataframe, adding a column to record which topic each row came from.
961 Parameters
962 ----------
963 data : `dict` of `str` : `pd.DataFrame`
964 The dataframes to merge.
966 Returns
967 -------
968 merged : `pd.DataFrame`
969 The merged dataframe.
970 """
971 excludeColumns = ['private_efdStamp', 'rowFor']
973 mergeArgs = {
974 'how': 'outer',
975 'sort': True,
976 }
978 merged = None
979 originalRowCounter = 0
981 # Iterate over the keys and merge the corresponding DataFrames
982 for key, df in data.items():
983 if df.empty:
984 # Must skip the df if it's empty, otherwise the merge will fail
985 # due to lack of private_efdStamp. Because other axes might
986 # still be in motion, so we still want to merge what we have
987 continue
989 originalRowCounter += len(df)
990 component = self._shortName(key) # Add suffix to column names to identify the source
991 suffix = '_' + component
993 df['rowFor'] = component
995 columnsToSuffix = [col for col in df.columns if col not in excludeColumns]
996 df_to_suffix = df[columnsToSuffix].add_suffix(suffix)
997 df = pd.concat([df[excludeColumns], df_to_suffix], axis=1)
999 if merged is None:
1000 merged = df.copy()
1001 else:
1002 merged = pd.merge(merged, df, **mergeArgs)
1004 merged = merged.loc[:, ~merged.columns.duplicated()] # Remove duplicate columns after merge
1006 if len(merged) != originalRowCounter:
1007 self.log.warning("Merged data has a different number of rows to the original data, some"
1008 " timestamps (rows) will contain more than one piece of actual information.")
1010 # if the index is still a DatetimeIndex here then we didn't actually
1011 # merge any data, so there is only data from a single component.
1012 # This is likely to result in no events, but not necessarily, and for
1013 # generality, instead we convert to a range index to ensure consistency
1014 # in the returned data, and allow processing to continue.
1015 if isinstance(merged.index, pd.DatetimeIndex):
1016 self.log.warning("Data was only found for a single component in the EFD.")
1017 merged.reset_index(drop=True, inplace=True)
1019 return merged
1021 def getEvent(self, dayObs, seqNum):
1022 """Get a specific event for a given dayObs and seqNum.
1024 Repeated calls for the same ``dayObs`` will use the cached data if the
1025 day is in the past, and so will be much quicker. If the ``dayObs`` is
1026 the current day then the EFD will be queried for new data for each
1027 call, so a call which returns ``None`` on the first try might return an
1028 event on the next, if the TMA is still moving and thus generating
1029 events.
1031 Parameters
1032 ----------
1033 dayObs : `int`
1034 The dayObs to get the event for.
1035 seqNum : `int`
1036 The sequence number of the event to get.
1038 Returns
1039 -------
1040 event : `lsst.summit.utils.tmaUtils.TMAEvent`
1041 The event for the specified dayObs and seqNum, or `None` if the
1042 event was not found.
1043 """
1044 events = self.getEvents(dayObs)
1045 if seqNum <= len(events):
1046 event = events[seqNum]
1047 if event.seqNum != seqNum:
1048 # it's zero-indexed and contiguous so this must be true but
1049 # a sanity check doesn't hurt.
1050 raise AssertionError(f"Event sequence number mismatch: {event.seqNum} != {seqNum}")
1051 return event
1052 else:
1053 self.log.warning(f"Event {seqNum} not found for {dayObs}")
1054 return None
1056 def getEvents(self, dayObs):
1057 """Get the TMA events for the specified dayObs.
1059 Gets the required mount data from the cache or the EFD as required,
1060 handling whether we're working with live vs historical data. The
1061 dataframes from the EFD is merged and applied to the TMAStateMachine,
1062 and that series of state changes is used to generate a list of
1063 TmaEvents for the day's data.
1065 If the data is for the current day, i.e. if new events can potentially
1066 land, then if the last event is "open" (meaning that the TMA appears to
1067 be in motion and thus the event is growing with time), then that event
1068 is excluded from the event list as it is expected to be changing with
1069 time, and will likely close eventually. However, if that situation
1070 occurs on a day in the past, then that event can never close, and the
1071 event is therefore included, but a warning about the open event is
1072 logged.
1074 Parameters
1075 ----------
1076 dayObs : `int`
1077 The dayObs for which to get the events.
1079 Returns
1080 -------
1081 events : `list` of `lsst.summit.utils.tmaUtils.TMAState`
1082 The events for the specified dayObs.
1083 """
1084 workingLive = self.isToday(dayObs)
1085 data = None
1087 if workingLive:
1088 # it's potentially updating data, so we must update the date
1089 # regarless of whether we have it already or not
1090 self.log.info(f'Updating mount data for {dayObs} from the EFD')
1091 self._getEfdDataForDayObs(dayObs)
1092 data = self._data[dayObs]
1093 elif dayObs in self._data:
1094 # data is in the cache and it's not being updated, so use it
1095 data = self._data[dayObs]
1096 elif dayObs not in self._data:
1097 # we don't have the data yet, but it's not growing, so put it in
1098 # the cache and use it from there
1099 self.log.info(f'Retrieving mount data for {dayObs} from the EFD')
1100 self._getEfdDataForDayObs(dayObs)
1101 data = self._data[dayObs]
1102 else:
1103 raise RuntimeError("This should never happen")
1105 # if we don't have something to work with, log a warning and return
1106 if not self.dataFound(data):
1107 self.log.warning(f"No EFD data found for {dayObs=}")
1108 return []
1110 # applies the data to the state machine, and generates events from the
1111 # series of states which results
1112 events = self._calculateEventsFromMergedData(data, dayObs, dataIsForCurrentDay=workingLive)
1113 if not events:
1114 self.log.warning(f"Failed to calculate any events for {dayObs=} despite EFD data existing!")
1115 return events
1117 @staticmethod
1118 def dataFound(data):
1119 """Check if any data was found.
1121 Parameters
1122 ----------
1123 data : `pd.DataFrame`
1124 The merged dataframe to check.
1126 Returns
1127 -------
1128 dataFound : `bool`
1129 Whether data was found.
1130 """
1131 # You can't just compare to with data == NO_DATA_SENTINEL because
1132 # `data` is usually a dataframe, and you can't compare a dataframe to a
1133 # string directly.
1134 return not (isinstance(data, str) and data == NO_DATA_SENTINEL)
1136 def _getEfdDataForDayObs(self, dayObs):
1137 """Get the EFD data for the specified dayObs and store it in the cache.
1139 Gets the EFD data for all components, as a dict of dataframes keyed by
1140 component name. These are then merged into a single dataframe in time
1141 order, based on each row's `private_efdStamp`. This is then stored in
1142 self._data[dayObs].
1144 If no data is found, the value is set to ``NO_DATA_SENTINEL`` to
1145 differentiate this from ``None``, as this is what you'd get if you
1146 queried the cache with `self._data.get(dayObs)`. It also marks that we
1147 have already queried this day.
1149 Parameters
1150 ----------
1151 dayObs : `int`
1152 The dayObs to query.
1153 """
1154 data = {}
1155 for component in itertools.chain(
1156 self._movingComponents,
1157 self._inPositionComponents,
1158 self._stateComponents
1159 ):
1160 data[component] = getEfdData(self.client, component, dayObs=dayObs, warn=False)
1161 self.log.debug(f"Found {len(data[component])} for {component}")
1163 if all(dataframe.empty for dataframe in data.values()):
1164 # if every single dataframe is empty, set the sentinel and don't
1165 # try to merge anything, otherwise merge all the data we found
1166 self.log.debug(f"No data found for {dayObs=}")
1167 # a sentinel value that's not None
1168 self._data[dayObs] = NO_DATA_SENTINEL
1169 else:
1170 merged = self._mergeData(data)
1171 self._data[dayObs] = merged
1173 def _calculateEventsFromMergedData(self, data, dayObs, dataIsForCurrentDay):
1174 """Calculate the list of events from the merged data.
1176 Runs the merged data, row by row, through the TMA state machine (with
1177 ``tma.apply``) to get the overall TMA state at each row, building a
1178 dict of these states, keyed by row number.
1180 This time-series of TMA states are then looped over (in
1181 `_statesToEventTuples`), building a list of tuples representing the
1182 start and end of each event, the type of the event, and the reason for
1183 the event ending.
1185 This list of tuples is then passed to ``_makeEventsFromStateTuples``,
1186 which actually creates the ``TMAEvent`` objects.
1188 Parameters
1189 ----------
1190 data : `pd.DataFrame`
1191 The merged dataframe to use.
1192 dayObs : `int`
1193 The dayObs for the data.
1194 dataIsForCurrentDay : `bool`
1195 Whether the data is for the current day. Determines whether to
1196 allow an open last event or not.
1198 Returns
1199 -------
1200 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
1201 The events for the specified dayObs.
1202 """
1203 engineeringMode = True
1204 tma = TMAStateMachine(engineeringMode=engineeringMode)
1206 # For now, we assume that the TMA starts each day able to move, but
1207 # stationary. If this turns out to cause problems, we will need to
1208 # change to loading data from the previous day(s), and looking back
1209 # through it in time until a state change has been found for every
1210 # axis. For now though, Bruno et. al think this is acceptable and
1211 # preferable.
1212 _initializeTma(tma)
1214 tmaStates = {}
1215 for rowNum, row in data.iterrows():
1216 tma.apply(row)
1217 tmaStates[rowNum] = tma.state
1219 stateTuples = self._statesToEventTuples(tmaStates, dataIsForCurrentDay)
1220 events = self._makeEventsFromStateTuples(stateTuples, dayObs, data)
1221 self.addBlockDataToEvents(dayObs, events)
1222 return events
1224 def _statesToEventTuples(self, states, dataIsForCurrentDay):
1225 """Get the event-tuples from the dictionary of TMAStates.
1227 Chunks the states into blocks of the same state, so that we can create
1228 an event for each block in `_makeEventsFromStateTuples`. Off-type
1229 states are skipped over, with each event starting when the telescope
1230 next resumes motion or changes to a different type of motion state,
1231 i.e. from non-tracking type movement (MOVE_POINT_TO_POINT, JOGGING,
1232 TRACKING-but-not-in-position, i.e. slewing) to a tracking type
1233 movement, or vice versa.
1235 Parameters
1236 ----------
1237 states : `dict` of `int` : `lsst.summit.utils.tmaUtils.TMAState`
1238 The states of the TMA, keyed by row number.
1239 dataIsForCurrentDay : `bool`
1240 Whether the data is for the current day. Determines whether to
1241 allow and open last event or not.
1243 Returns
1244 -------
1245 parsedStates : `list` of `tuple`
1246 The parsed states, as a list of tuples of the form:
1247 ``(eventStart, eventEnd, eventType, endReason)``
1248 """
1249 # Consider rewriting this with states as a list and using pop(0)?
1250 skipStates = (TMAState.STOPPED, TMAState.OFF, TMAState.FAULT)
1252 parsedStates = []
1253 eventStart = None
1254 rowNum = 0
1255 nRows = len(states)
1256 while rowNum < nRows:
1257 previousState = None
1258 state = states[rowNum]
1259 # if we're not in an event, fast forward through off-like rows
1260 # until a new event starts
1261 if eventStart is None and state in skipStates:
1262 rowNum += 1
1263 continue
1265 # we've started a new event, so walk through it and find the end
1266 eventStart = rowNum
1267 previousState = state
1268 rowNum += 1 # move to the next row before starting the while loop
1269 if rowNum == nRows:
1270 # we've reached the end of the data, and we're still in an
1271 # event, so don't return this presumably in-progress event
1272 self.log.warning('Reached the end of the data while starting a new event')
1273 break
1274 state = states[rowNum]
1275 while state == previousState:
1276 rowNum += 1
1277 if rowNum == nRows:
1278 break
1279 state = states[rowNum]
1280 parsedStates.append(
1281 self.ParsedState(
1282 eventStart=eventStart,
1283 eventEnd=rowNum,
1284 previousState=previousState,
1285 state=state
1286 )
1287 )
1288 if state in skipStates:
1289 eventStart = None
1291 # done parsing, just check the last event is valid
1292 if parsedStates: # ensure we have at least one event
1293 lastEvent = parsedStates[-1]
1294 if lastEvent.eventEnd == nRows:
1295 # Generally, you *want* the timespan for an event to be the
1296 # first row of the next event, because you were in that state
1297 # right up until that state change. However, if that event is
1298 # a) the last one of the day and b) runs right up until the end
1299 # of the dataframe, then there isn't another row, so this will
1300 # overrun the array.
1301 #
1302 # If the data is for the current day then this isn't a worry,
1303 # as we're likely still taking data, and this event will likely
1304 # close yet, so we don't issue a warning, and simply drop the
1305 # event from the list.
1307 # However, if the data is for a past day then no new data will
1308 # come to close the event, so allow the event to be "open", and
1309 # issue a warning
1310 if dataIsForCurrentDay:
1311 self.log.info("Discarding open (likely in-progess) final event from current day's events")
1312 parsedStates = parsedStates[:-1]
1313 else:
1314 self.log.warning("Last event ends open, forcing it to end at end of the day's data")
1315 # it's a tuple, so (deliberately) awkward to modify
1316 parsedStates[-1] = self.ParsedState(
1317 eventStart=lastEvent.eventStart,
1318 eventEnd=lastEvent.eventEnd - 1,
1319 previousState=lastEvent.previousState,
1320 state=lastEvent.state
1321 )
1323 return parsedStates
1325 def addBlockDataToEvents(self, dayObs, events):
1326 """Find all the block data in the EFD for the specified events.
1328 Finds all the block data in the EFD relating to the events, parses it,
1329 from the rows of the dataframe, and adds it to the events in place.
1331 Parameters
1332 ----------
1333 events : `lsst.summit.utils.tmaUtils.TMAEvent` or
1334 `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
1335 One or more events to get the block data for.
1336 """
1337 try:
1338 blockParser = BlockParser(dayObs, client=self.client)
1339 except Exception as e:
1340 # adding the block data should never cause a failure so if we can't
1341 # get the block data, log a warning and return. It is, however,
1342 # never expected, so use log.exception to get the full traceback
1343 # and scare users so it gets reported
1344 self.log.exception(f'Failed to parse block data for {dayObs=}, {e}')
1345 return
1346 blocks = blockParser.getBlockNums()
1347 blockDict = {}
1348 for block in blocks:
1349 blockDict[block] = blockParser.getSeqNums(block)
1351 for block, seqNums in blockDict.items():
1352 for seqNum in seqNums:
1353 blockInfo = blockParser.getBlockInfo(block=block, seqNum=seqNum)
1355 relatedEvents = blockParser.getEventsForBlock(events, block=block, seqNum=seqNum)
1356 for event in relatedEvents:
1357 toSet = [blockInfo]
1358 if event.blockInfos is not None:
1359 existingInfo = event.blockInfos
1360 existingInfo.append(blockInfo)
1361 toSet = existingInfo
1363 # Add the blockInfo to the TMAEvent. Because this is a
1364 # frozen dataclass, use object.__setattr__ to set the
1365 # attribute. This is the correct way to set a frozen
1366 # dataclass attribute after creation.
1367 object.__setattr__(event, 'blockInfos', toSet)
1369 def _makeEventsFromStateTuples(self, states, dayObs, data):
1370 """For the list of state-tuples, create a list of ``TMAEvent`` objects.
1372 Given the underlying data, and the start/stop points for each event,
1373 create the TMAEvent objects for the dayObs.
1375 Parameters
1376 ----------
1377 states : `list` of `tuple`
1378 The parsed states, as a list of tuples of the form:
1379 ``(eventStart, eventEnd, eventType, endReason)``
1380 dayObs : `int`
1381 The dayObs for the data.
1382 data : `pd.DataFrame`
1383 The merged dataframe.
1385 Returns
1386 -------
1387 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
1388 The events for the specified dayObs.
1389 """
1390 seqNum = 0
1391 events = []
1392 for parsedState in states:
1393 begin = data.iloc[parsedState.eventStart]['private_efdStamp']
1394 end = data.iloc[parsedState.eventEnd]['private_efdStamp']
1395 beginAstropy = efdTimestampToAstropy(begin)
1396 endAstropy = efdTimestampToAstropy(end)
1397 duration = end - begin
1398 event = TMAEvent(
1399 dayObs=dayObs,
1400 seqNum=seqNum,
1401 type=parsedState.previousState,
1402 endReason=parsedState.state,
1403 duration=duration,
1404 begin=beginAstropy,
1405 end=endAstropy,
1406 blockInfos=None, # this is added later
1407 _startRow=parsedState.eventStart,
1408 _endRow=parsedState.eventEnd,
1409 )
1410 events.append(event)
1411 seqNum += 1
1412 return events
1414 @staticmethod
1415 def printTmaDetailedState(tma):
1416 """Print the full state of all the components of the TMA.
1418 Currently this is the azimuth and elevation axes' power and motion
1419 states, and their respective inPosition statuses.
1421 Parameters
1422 ----------
1423 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine`
1424 The TMA state machine in the state we want to print.
1425 """
1426 axes = ['azimuth', 'elevation']
1427 p = tma._parts
1428 axisPad = len(max(axes, key=len)) # length of the longest axis string == 9 here, but this is general
1429 motionPad = max(len(s.name) for s in AxisMotionState)
1430 powerPad = max(len(s.name) for s in PowerState)
1432 # example output to show what's being done with the padding:
1433 # azimuth - Power: ON Motion: STOPPED InPosition: True # noqa: W505
1434 # elevation - Power: ON Motion: MOVING_POINT_TO_POINT InPosition: False # noqa: W505
1435 for axis in axes:
1436 print(f"{axis:>{axisPad}} - "
1437 f"Power: {p[f'{axis}SystemState'].name:>{powerPad}} "
1438 f"Motion: {p[f'{axis}MotionState'].name:>{motionPad}} "
1439 f"InPosition: {p[f'{axis}InPosition']}")
1440 print(f"Overall system state: {tma.state.name}")
1442 def printFullDayStateEvolution(self, dayObs, taiOrUtc='utc'):
1443 """Print the full TMA state evolution for the specified dayObs.
1445 Replays all the data from the EFD for the specified dayObs through
1446 the TMA state machine, and prints both the overall and detailed state
1447 of the TMA for each row.
1449 Parameters
1450 ----------
1451 dayObs : `int`
1452 The dayObs for which to print the state evolution.
1453 taiOrUtc : `str`, optional
1454 Whether to print the timestamps in TAI or UTC. Default is UTC.
1455 """
1456 # create a fake event which spans the whole day, and then use
1457 # printEventDetails code while skipping the header to print the
1458 # evolution.
1459 _ = self.getEvents(dayObs) # ensure the data has been retrieved from the EFD
1460 data = self._data[dayObs]
1461 lastRowNum = len(data) - 1
1463 fakeEvent = TMAEvent(
1464 dayObs=dayObs,
1465 seqNum=-1, # anything will do
1466 type=TMAState.OFF, # anything will do
1467 endReason=TMAState.OFF, # anything will do
1468 duration=-1, # anything will do
1469 begin=efdTimestampToAstropy(data.iloc[0]['private_efdStamp']),
1470 end=efdTimestampToAstropy(data.iloc[-1]['private_efdStamp']),
1471 _startRow=0,
1472 _endRow=lastRowNum
1473 )
1474 self.printEventDetails(fakeEvent, taiOrUtc=taiOrUtc, printHeader=False)
1476 def printEventDetails(self, event, taiOrUtc='tai', printHeader=True):
1477 """Print a detailed breakdown of all state transitions during an event.
1479 Note: this is not the most efficient way to do this, but it is much the
1480 cleanest with respect to the actual state machine application and event
1481 generation code, and is easily fast enough for the cases it will be
1482 used for. It is not worth complicating the normal state machine logic
1483 to try to use this code.
1485 Parameters
1486 ----------
1487 event : `lsst.summit.utils.tmaUtils.TMAEvent`
1488 The event to display the details of.
1489 taiOrUtc : `str`, optional
1490 Whether to display time strings in TAI or UTC. Defaults to TAI.
1491 Case insensitive.
1492 printHeader : `bool`, optional
1493 Whether to print the event summary. Defaults to True. The primary
1494 reason for the existence of this option is so that this same
1495 printing function can be used to show the evolution of a whole day
1496 by supplying a fake event which spans the whole day, but this event
1497 necessarily has a meaningless summary, and so needs suppressing.
1498 """
1499 taiOrUtc = taiOrUtc.lower()
1500 if taiOrUtc not in ['tai', 'utc']:
1501 raise ValueError(f'Got unsuppoted value for {taiOrUtc=}')
1502 useUtc = taiOrUtc == 'utc'
1504 if printHeader:
1505 print(f"Details for {event.duration:.2f}s {event.type.name} event dayObs={event.dayObs}"
1506 f" seqNum={event.seqNum}:")
1507 print(f"- Event began at: {event.begin.utc.isot if useUtc else event.begin.isot}")
1508 print(f"- Event ended at: {event.end.utc.isot if useUtc else event.end.isot}")
1510 dayObs = event.dayObs
1511 data = self._data[dayObs]
1512 startRow = event._startRow
1513 endRow = event._endRow
1514 nRowsToApply = endRow - startRow + 1
1515 print(f"\nTotal number of rows in the merged dataframe: {len(data)}")
1516 if printHeader:
1517 print(f"of which rows {startRow} to {endRow} (inclusive) relate to this event.")
1519 # reconstruct all the states
1520 tma = TMAStateMachine(engineeringMode=True)
1521 _initializeTma(tma)
1523 tmaStates = {}
1524 firstAppliedRow = True # flag to print a header on the first row that's applied
1525 for rowNum, row in data.iterrows(): # must replay rows right from start to get full correct state
1526 if rowNum == startRow:
1527 # we've not yet applied this row, so this is the state just
1528 # before event
1529 print(f"\nBefore the event the TMA was in state {tma.state.name}:")
1530 self.printTmaDetailedState(tma)
1532 if rowNum >= startRow and rowNum <= endRow:
1533 if firstAppliedRow: # only print this intro on the first row we're applying
1534 print(f"\nThen, applying the {nRowsToApply} rows of data for this event, the state"
1535 " evolved as follows:\n")
1536 firstAppliedRow = False
1538 # break the row down and print its details
1539 rowFor = row['rowFor']
1540 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState
1541 value = tma._getRowPayload(row, rowType, rowFor)
1542 valueStr = f"{str(value) if isinstance(value, bool) else value.name}"
1543 rowTime = efdTimestampToAstropy(row['private_efdStamp'])
1544 print(f"On row {rowNum} the {axis} axis had the {rowType} set to {valueStr} at"
1545 f" {rowTime.utc.isot if useUtc else rowTime.isot}")
1547 # then apply it as usual, printing the state right afterwards
1548 tma.apply(row)
1549 tmaStates[rowNum] = tma.state
1550 self.printTmaDetailedState(tma)
1551 print()
1553 else:
1554 # if it's not in the range of interest then just apply it
1555 # silently as usual
1556 tma.apply(row)
1557 tmaStates[rowNum] = tma.state
1559 def findEvent(self, time):
1560 """Find the event which contains the specified time.
1562 If the specified time lies within an event, that event is returned. If
1563 it is at the exact start, that is logged, and if that start point is
1564 shared by the end of the previous event, that is logged too. If the
1565 event lies between events, the events either side are logged, but
1566 ``None`` is returned. If the time lies before the first event of the
1567 day a warning is logged, as for times after the last event of the day.
1569 Parameters
1570 ----------
1571 time : `astropy.time.Time`
1572 The time.
1574 Returns
1575 -------
1576 event : `lsst.summit.utils.tmaUtils.TMAEvent` or `None`
1577 The event which contains the specified time, or ``None`` if the
1578 time doesn't fall during an event.
1579 """
1580 # there are five possible cases:
1581 # 1) the time lies before the first event of the day
1582 # 2) the time lies after the last event of the day
1583 # 3) the time lies within an event
1584 # 3a) the time is exactly at the start of an event
1585 # 3b) if so, time can be shared by the end of the previous event if
1586 # they are contiguous
1587 # 4) the time lies between two events
1588 # 5) the time is exactly at end of the last event of the day. This is
1589 # an issue because event end times are exclusive, so this time is
1590 # not technically in that event, it's the moment it closes (and if
1591 # there *was* an event which followed contiguously, it would be in
1592 # that event instead, which is what motivates this definition of
1593 # lies within what event)
1595 dayObs = getDayObsForTime(time)
1596 # we know this is on the right day, and definitely before the specified
1597 # time, but sanity check this before continuing as this needs to be
1598 # true for this to give the correct answer
1599 assert getDayObsStartTime(dayObs) <= time
1600 assert getDayObsEndTime(dayObs) > time
1602 # command start to many log messages so define once here
1603 logStart = f"Specified time {time.isot} falls on {dayObs=}"
1605 events = self.getEvents(dayObs)
1606 if len(events) == 0:
1607 self.log.warning(f'There are no events found for {dayObs}')
1608 return None
1610 # check case 1)
1611 if time < events[0].begin:
1612 self.log.warning(f'{logStart} and is before the first event of the day')
1613 return None
1615 # check case 2)
1616 if time > events[-1].end:
1617 self.log.warning(f'{logStart} and is after the last event of the day')
1618 return None
1620 # check case 5)
1621 if time == events[-1].end:
1622 self.log.warning(f'{logStart} and is exactly at the end of the last event of the day'
1623 f' (seqnum={events[-1].seqNum}). Because event intervals are half-open, this'
1624 ' time does not technically lie in any event')
1625 return None
1627 # we are now either in an event, or between events. Walk through the
1628 # events, and if the end of the event is after the specified time, then
1629 # we're either in it or past it, so check if we're in.
1630 for eventNum, event in enumerate(events):
1631 if event.end > time: # case 3) we are now into or past the right event
1632 # the event end encloses the time, so note the > and not >=,
1633 # this must be strictly greater, we check the overlap case
1634 # later
1635 if time >= event.begin: # we're fully inside the event, so return it.
1636 # 3a) before returning, check if we're exactly at the start
1637 # of the event, and if so, log it. Then 3b) also check if
1638 # we're at the exact end of the previous event, and if so,
1639 # log that too.
1640 if time == event.begin:
1641 self.log.info(f"{logStart} and is exactly at the start of event"
1642 f" {eventNum}")
1643 if eventNum == 0: # I think this is actually impossible, but check anyway
1644 return event # can't check the previous event so return here
1645 previousEvent = events[eventNum - 1]
1646 if previousEvent.end == time:
1647 self.log.info("Previous event is contiguous, so this time is also at the exact"
1648 f" end of {eventNum - 1}")
1649 return event
1650 else: # case 4)
1651 # the event end is past the time, but it's not inside the
1652 # event, so we're between events. Log which we're between
1653 # and return None
1654 previousEvent = events[eventNum - 1]
1655 timeAfterPrev = (time - previousEvent.end).to_datetime()
1656 naturalTimeAfterPrev = humanize.naturaldelta(timeAfterPrev, minimum_unit='MICROSECONDS')
1657 timeBeforeCurrent = (event.begin - time).to_datetime()
1658 naturalTimeBeforeCurrent = humanize.naturaldelta(timeBeforeCurrent,
1659 minimum_unit='MICROSECONDS')
1660 self.log.info(f"{logStart} and lies"
1661 f" {naturalTimeAfterPrev} after the end of event {previousEvent.seqNum}"
1662 f" and {naturalTimeBeforeCurrent} before the start of event {event.seqNum}."
1663 )
1664 return None
1666 raise RuntimeError('Event finding logic fundamentally failed, which should never happen - the code'
1667 ' needs fixing')