Coverage for python/lsst/summit/utils/tmaUtils.py: 18%
672 statements
« prev ^ index » next coverage.py v7.5.0, created at 2024-05-03 04:44 -0700
« prev ^ index » next coverage.py v7.5.0, created at 2024-05-03 04:44 -0700
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21from __future__ import annotations
23import datetime
24import enum
25import itertools
26import logging
27import re
28from collections.abc import Iterable
29from dataclasses import dataclass, field
30from typing import TYPE_CHECKING, Any, Union
32import astropy
33import humanize
34import matplotlib
35import matplotlib.dates as mdates
36import matplotlib.pyplot as plt
37import numpy as np
38import pandas as pd
39from astropy.time import Time
40from matplotlib.ticker import FuncFormatter
42from lsst.utils.iteration import ensure_iterable
44from .blockUtils import BlockParser
45from .efdUtils import (
46 COMMAND_ALIASES,
47 clipDataToEvent,
48 efdTimestampToAstropy,
49 getCommands,
50 getDayObsEndTime,
51 getDayObsForTime,
52 getDayObsStartTime,
53 getEfdData,
54 makeEfdClient,
55)
56from .enums import AxisMotionState, PowerState
57from .utils import dayObsIntToString, getCurrentDayObs_int
59if TYPE_CHECKING: 59 ↛ 60line 59 didn't jump to line 60, because the condition on line 59 was never true
60 try:
61 from lsst_efd_client import EfdClient
62 except ImportError:
63 EfdClient = None
65__all__ = (
66 "TMAStateMachine",
67 "TMAEvent",
68 "TMAEventMaker",
69 "TMAState",
70 "AxisMotionState",
71 "PowerState",
72 "getSlewsFromEventList",
73 "getTracksFromEventList",
74 "getTorqueMaxima",
75 "filterBadValues",
76)
78# we don't want to use `None` for a no data sentinel because dict.get('key')
79# returns None if the key isn't present, and also we need to mark that the data
80# was queried for and no data was found, whereas the key not being present
81# means that we've not yet looked for the data.
82NO_DATA_SENTINEL = "NODATA"
84# The known time difference between the TMA demand position and the TMA
85# position when tracking. 20Hz data times three points = 150ms.
86TRACKING_RESIDUAL_TAIL_CLIP = -0.15 # seconds
88MOUNT_IMAGE_WARNING_LEVEL = 0.01 # this determines the colouring of the cells in the table, yellow for this
89MOUNT_IMAGE_BAD_LEVEL = 0.05 # and red for this
92def getSlewsFromEventList(
93 events: list[TMAEvent],
94) -> list[TMAEvent]:
95 """Get the slew events from a list of TMAEvents.
97 Parameters
98 ----------
99 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
100 The list of events to filter.
102 Returns
103 -------
104 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
105 The filtered list of events.
106 """
107 return [e for e in events if e.type == TMAState.SLEWING]
110def getTracksFromEventList(
111 events: list[TMAEvent],
112) -> list[TMAEvent]:
113 """Get the tracking events from a list of TMAEvents.
115 Parameters
116 ----------
117 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
118 The list of events to filter.
120 Returns
121 -------
122 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
123 The filtered list of events.
124 """
125 return [e for e in events if e.type == TMAState.TRACKING]
128def getTorqueMaxima(table: pd.DataFrame):
129 """Print the maximum positive and negative azimuth and elevation torques.
131 Designed to be used with the table as downloaded from RubinTV.
133 Parameters
134 ----------
135 table : `pd.DataFrame`
136 The table of data to use, as generated by Rapid Analysis.
137 """
138 for axis in ["elevation", "azimuth"]:
139 col = f"Largest {axis} torque"
140 maxPos = np.argmax(table[col])
141 maxVal = table[col].iloc[maxPos]
142 print(f"Max positive {axis:9} torque during seqNum {maxPos:>4}: {maxVal/1000:>7.1f}kNm")
143 minPos = np.argmin(table[col])
144 minVal = table[col].iloc[minPos]
145 print(f"Max negative {axis:9} torque during seqNum {minPos:>4}: {minVal/1000:>7.1f}kNm")
148def getAzimuthElevationDataForEvent(
149 client: EfdClient,
150 event: TMAEvent,
151 prePadding: float = 0,
152 postPadding: float = 0,
153) -> tuple[pd.Dataframe, pd.Dataframe]:
154 """Get the data for the az/el telemetry topics for a given TMAEvent.
156 The error between the actual and demanded positions is calculated and added
157 to the dataframes in the az/elError columns. For TRACKING type events, this
158 error should be extremely close to zero, whereas for SLEWING type events,
159 this error represents the how far the TMA is from the demanded position,
160 and is therefore arbitrarily large, and tends to zero as the TMA get closer
161 to tracking the sky.
163 Parameters
164 ----------
165 client : `lsst_efd_client.efd_helper.EfdClient`
166 The EFD client to use.
167 event : `lsst.summit.utils.tmaUtils.TMAEvent`
168 The event to get the data for.
169 prePadding : `float`, optional
170 The amount of time to pad the event with before the start time, in
171 seconds.
172 postPadding : `float`, optional
173 The amount of time to pad the event with after the end time, in
174 seconds.
176 Returns
177 -------
178 azimuthData : `pd.DataFrame`
179 The azimuth data for the specified event.
180 elevationData : `pd.DataFrame`
181 The elevation data for the specified event.
182 """
183 azimuthData = getEfdData(
184 client, "lsst.sal.MTMount.azimuth", event=event, prePadding=prePadding, postPadding=postPadding
185 )
186 elevationData = getEfdData(
187 client, "lsst.sal.MTMount.elevation", event=event, prePadding=prePadding, postPadding=postPadding
188 )
190 azValues = azimuthData["actualPosition"].values
191 elValues = elevationData["actualPosition"].values
192 azDemand = azimuthData["demandPosition"].values
193 elDemand = elevationData["demandPosition"].values
195 azError = (azValues - azDemand) * 3600
196 elError = (elValues - elDemand) * 3600
198 azimuthData["azError"] = azError
199 elevationData["elError"] = elError
201 return azimuthData, elevationData
204def filterBadValues(values: list | np.ndarray, maxDelta: float = 0.1, maxConsecutiveValues: int = 3) -> int:
205 """Filter out bad values from a dataset, replacing them in-place.
207 This function replaces non-physical points in the dataset with an
208 extrapolation of the preceding two values. No more than 3 successive data
209 points are allowed to be replaced. Minimum length of the input is 3 points.
211 Parameters
212 ----------
213 values : `list` or `np.ndarray`
214 The dataset containing the values to be filtered.
215 maxDelta : `float`, optional
216 The maximum allowed difference between consecutive values. Values with
217 a difference greater than `maxDelta` will be considered as bad values
218 and replaced with an extrapolation.
219 maxConsecutiveValues : `int`, optional
220 The maximum number of consecutive values to replace. Defaults to 3.
222 Returns
223 -------
224 nBadPoints : `int`
225 The number of bad values that were replaced out.
226 """
227 # Find non-physical points and replace with extrapolation. No more than
228 # maxConsecutiveValues successive data points can be replaced.
229 badCounter = 0
230 consecutiveCounter = 0
232 log = logging.getLogger(__name__)
234 median = np.nanmedian(values)
235 # if either of the the first two points are more than maxDelta away from
236 # the median, replace them with the median
237 for i in range(2):
238 if abs(values[i] - median) > maxDelta:
239 log.warning(f"Replacing bad value of {values[i]} at index {i} with {median=}")
240 values[i] = median
241 badCounter += 1
243 # from the second element of the array, walk through and calculate the
244 # difference between each element and the previous one. If the difference
245 # is greater than maxDelta, replace the element with the average of the
246 # previous two known good values, i.e. ones which have not been replaced.
247 # if the first two points differ from the median by more than maxDelta,
248 # replace them with the median
249 lastGoodValue1 = values[1] # the most recent good value
250 lastGoodValue2 = values[0] # the second most recent good value
251 replacementValue = (lastGoodValue1 + lastGoodValue2) / 2.0 # in case we have to replace the first value
252 for i in range(2, len(values)):
253 if abs(values[i] - lastGoodValue1) >= maxDelta:
254 if consecutiveCounter < maxConsecutiveValues:
255 consecutiveCounter += 1
256 badCounter += 1
257 log.warning(f"Replacing value at index {i} with {replacementValue}")
258 values[i] = replacementValue
259 else:
260 log.warning(
261 f"More than 3 consecutive replacements at index {i}. Stopping replacements"
262 " until the next good value."
263 )
264 else:
265 lastGoodValue2 = lastGoodValue1
266 lastGoodValue1 = values[i]
267 replacementValue = (lastGoodValue1 + lastGoodValue2) / 2.0
268 consecutiveCounter = 0
269 return badCounter
272def plotEvent(
273 client: EfdClient,
274 event: TMAEvent,
275 fig: matplotlib.figure.Figure | None = None,
276 prePadding: float = 0,
277 postPadding: float = 0,
278 commands: dict[pd.Timestamp | datetime.datetime, str] = {},
279 azimuthData: pd.DataFrame | None = None,
280 elevationData: pd.DataFrame | None = None,
281 doFilterResiduals: bool = False,
282 maxDelta: float = 0.1,
283 metadataWriter=None,
284):
285 """Plot the TMA axis positions over the course of a given TMAEvent.
287 Plots the axis motion profiles for the given event, with optional padding
288 at the start and end of the event. If the data is provided via the
289 azimuthData and elevationData parameters, it will be used, otherwise it
290 will be queried from the EFD.
292 Optionally plots any commands issued during or around the event, if these
293 are supplied. Commands are supplied as a dictionary of the command topic
294 strings, with values as astro.time.Time objects at which the command was
295 issued.
297 Due to a problem with the way the data is uploaded to the EFD, there are
298 occasional points in the tracking error plots that are very much larger
299 than the typical mount jitter. These points are unphysical, since it is not
300 possible for the mount to move that fast. We don't want these points, which
301 are not true mount problems, to distract from any real mount problems, and
302 these can be filtered out via the ``doFilterResiduals`` kwarg, which
303 replaces these non-physical points with an extrapolation of the average of
304 the preceding two known-good points. If the first two points are bad these
305 are replaced with the median of the dataset. The maximum difference between
306 the model and the actual data, in arcseconds, to allow before filtering a
307 data point can be set with the ``maxDelta`` kwarg.
309 Parameters
310 ----------
311 client : `lsst_efd_client.efd_helper.EfdClient`
312 The EFD client to use.
313 event : `lsst.summit.utils.tmaUtils.TMAEvent`
314 The event to plot.
315 fig : `matplotlib.figure.Figure`, optional
316 The figure to plot on. If not specified, a new figure will be created.
317 prePadding : `float`, optional
318 The amount of time to pad the event with before the start time, in
319 seconds.
320 postPadding : `float`, optional
321 The amount of time to pad the event with after the end time, in
322 seconds.
323 commands : `dict` [`pd.Timestamp`, `str`], or
324 `dict` [`datetime.datetime`, `str`], or optional
325 A dictionary of commands to plot on the figure. The keys are the times
326 at which a command was issued, and the value is the command string, as
327 returned by efdUtils.getCommands().
328 azimuthData : `pd.DataFrame`, optional
329 The azimuth data to plot. If not specified, it will be queried from the
330 EFD.
331 elevationData : `pd.DataFrame`, optional
332 The elevation data to plot. If not specified, it will be queried from
333 the EFD.
334 doFilterResiduals : 'bool', optional
335 Enables filtering of unphysical data points in the tracking residuals.
336 maxDelta : `float`, optional
337 The maximum difference between the model and the actual data, in
338 arcseconds, to allow before filtering the data point. Ignored if
339 ``doFilterResiduals`` is `False`.
340 metadataWriter : `callable`, optional
341 Should be a callable
342 ``lsst.rubintv.production.utils.writeMetadataShard`` function that has
343 had the path filled in with ``functools.patrial`` so that it will just
344 write out the data when called with the event's dayObs and a
345 dictionary containing the row data that should be written.
347 Returns
348 -------
349 fig : `matplotlib.figure.Figure`
350 The figure on which the plot was made.
351 """
353 def tickFormatter(value, tick_number: float) -> str:
354 # Convert the value to a string without subtracting large numbers
355 # tick_number is unused.
356 return f"{value:.2f}"
358 def getPlotTime(time: pd.Timestamp | Time | datetime.datetime):
359 """Get the right time to plot a point from the various time formats."""
360 match time:
361 case pd.Timestamp():
362 return time.to_pydatetime()
363 case Time():
364 return time.utc.datetime
365 case datetime.datetime():
366 return time
367 case _:
368 raise ValueError(f"Unknown type for commandTime: {type(time)}")
370 # plot any commands we might have
371 if not isinstance(commands, dict):
372 raise TypeError("commands must be a dict of command names with values as" " astropy.time.Time values")
374 if fig is None:
375 fig = plt.figure(figsize=(10, 8))
376 log = logging.getLogger(__name__)
377 log.warning(
378 "Making new matplotlib figure - if this is in a loop you're going to have a bad time."
379 " Pass in a figure with fig = plt.figure(figsize=(10, 8)) to avoid this warning."
380 )
382 fig.clear()
383 ax1p5 = None # need to always be defined
384 if event.type.name == "TRACKING":
385 ax1, ax1p5, ax2 = fig.subplots(
386 3, sharex=True, gridspec_kw={"wspace": 0, "hspace": 0, "height_ratios": [2.5, 1, 1]}
387 )
388 else:
389 ax1, ax2 = fig.subplots(
390 2, sharex=True, gridspec_kw={"wspace": 0, "hspace": 0, "height_ratios": [2.5, 1]}
391 )
393 if azimuthData is None or elevationData is None:
394 azimuthData, elevationData = getAzimuthElevationDataForEvent(
395 client, event, prePadding=prePadding, postPadding=postPadding
396 )
398 # Use the native color cycle for the lines. Because they're on different
399 # axes they don't cycle by themselves
400 lineColors = [p["color"] for p in plt.rcParams["axes.prop_cycle"]]
401 nColors = len(lineColors)
402 colorCounter = 0
404 ax1.plot(azimuthData["actualPosition"], label="Azimuth position", c=lineColors[colorCounter % nColors])
405 colorCounter += 1
406 ax1.yaxis.set_major_formatter(FuncFormatter(tickFormatter))
407 ax1.set_ylabel("Azimuth (degrees)")
409 ax1_twin = ax1.twinx()
410 ax1_twin.plot(
411 elevationData["actualPosition"], label="Elevation position", c=lineColors[colorCounter % nColors]
412 )
413 colorCounter += 1
414 ax1_twin.yaxis.set_major_formatter(FuncFormatter(tickFormatter))
415 ax1_twin.set_ylabel("Elevation (degrees)")
416 ax1.set_xticks([]) # remove x tick labels on the hidden upper x-axis
418 ax2_twin = ax2.twinx()
419 ax2.plot(azimuthData["actualTorque"], label="Azimuth torque", c=lineColors[colorCounter % nColors])
420 colorCounter += 1
421 ax2_twin.plot(
422 elevationData["actualTorque"], label="Elevation torque", c=lineColors[colorCounter % nColors]
423 )
424 colorCounter += 1
425 ax2.set_ylabel("Azimuth torque (Nm)")
426 ax2_twin.set_ylabel("Elevation torque (Nm)")
427 ax2.set_xlabel("Time (UTC)") # yes, it really is UTC, matplotlib converts this automatically!
429 # put the ticks at an angle, and right align with the tick marks
430 ax2.set_xticks(ax2.get_xticks()) # needed to supress a user warning
431 xlabels = ax2.get_xticks()
432 ax2.set_xticklabels(xlabels, rotation=40, ha="right")
433 ax2.xaxis.set_major_locator(mdates.AutoDateLocator())
434 ax2.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M:%S"))
436 if event.type.name == "TRACKING":
437 # returns a copy
438 clippedAzimuthData = clipDataToEvent(azimuthData, event, postPadding=TRACKING_RESIDUAL_TAIL_CLIP)
439 clippedElevationData = clipDataToEvent(elevationData, event, postPadding=TRACKING_RESIDUAL_TAIL_CLIP)
441 azError = clippedAzimuthData["azError"].values
442 elError = clippedElevationData["elError"].values
443 elVals = clippedElevationData["actualPosition"].values
444 if doFilterResiduals:
445 # Filtering out bad values
446 nReplacedAz = filterBadValues(azError, maxDelta)
447 nReplacedEl = filterBadValues(elError, maxDelta)
448 clippedAzimuthData["azError"] = azError
449 clippedElevationData["elError"] = elError
450 # Calculate RMS
451 az_rms = np.sqrt(np.mean(azError * azError))
452 el_rms = np.sqrt(np.mean(elError * elError))
454 # Calculate Image impact RMS
455 # We are less sensitive to Az errors near the zenith
456 image_az_rms = az_rms * np.cos(elVals[0] * np.pi / 180.0)
457 image_el_rms = el_rms
458 image_impact_rms = np.sqrt(image_az_rms**2 + image_el_rms**2)
459 ax1p5.plot(
460 clippedAzimuthData["azError"],
461 label="Azimuth tracking error",
462 c=lineColors[colorCounter % nColors],
463 )
464 colorCounter += 1
465 ax1p5.plot(
466 clippedElevationData["elError"],
467 label="Elevation tracking error",
468 c=lineColors[colorCounter % nColors],
469 )
470 colorCounter += 1
471 ax1p5.axhline(0.01, ls="-.", color="black")
472 ax1p5.axhline(-0.01, ls="-.", color="black")
473 ax1p5.yaxis.set_major_formatter(FuncFormatter(tickFormatter))
474 ax1p5.set_ylabel("Tracking error (arcsec)")
475 ax1p5.set_xticks([]) # remove x tick labels on the hidden upper x-axis
476 ax1p5.set_ylim(-0.05, 0.05)
477 ax1p5.set_yticks([-0.04, -0.02, 0.0, 0.02, 0.04])
478 ax1p5.legend()
479 ax1p5.text(0.1, 0.9, f"Image impact RMS = {image_impact_rms:.3f} arcsec", transform=ax1p5.transAxes)
480 if doFilterResiduals:
481 ax1p5.text(
482 0.1,
483 0.8,
484 f"{nReplacedAz} bad azimuth values and {nReplacedEl} bad elevation values were replaced",
485 transform=ax1p5.transAxes,
486 )
487 if metadataWriter is not None:
488 md = {"Tracking image impact": f"{image_impact_rms:.3f}"}
489 flagKey = "_Tracking image impact"
490 if image_impact_rms > MOUNT_IMAGE_BAD_LEVEL:
491 md.update({flagKey: "bad"})
492 elif image_impact_rms > MOUNT_IMAGE_WARNING_LEVEL:
493 md.update({flagKey: "warning"})
495 rowData = {event.seqNum: md}
496 metadataWriter(dayObs=event.dayObs, mdDict=rowData)
498 if prePadding or postPadding:
499 # note the conversion to utc because the x-axis from the dataframe
500 # already got automagically converted when plotting before, so this is
501 # necessary for things to line up
502 ax1_twin.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5, label="Event begin/end")
503 ax1_twin.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5)
504 # extend lines down across lower plot, but do not re-add label
505 ax2_twin.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5)
506 ax2_twin.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5)
507 if ax1p5:
508 ax1p5.axvline(event.begin.utc.datetime, c="k", ls="--", alpha=0.5)
509 ax1p5.axvline(event.end.utc.datetime, c="k", ls="--", alpha=0.5)
511 for commandTime, command in commands.items():
512 plotTime = getPlotTime(commandTime)
513 ax1_twin.axvline(
514 plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75, label=f"{command}"
515 )
516 # extend lines down across lower plot, but do not re-add label
517 ax2_twin.axvline(plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75)
518 if ax1p5:
519 ax1p5.axvline(plotTime, c=lineColors[colorCounter % nColors], ls="--", alpha=0.75)
520 colorCounter += 1
522 # combine the legends and put inside the plot
523 handles1a, labels1a = ax1.get_legend_handles_labels()
524 handles1b, labels1b = ax1_twin.get_legend_handles_labels()
525 handles2a, labels2a = ax2.get_legend_handles_labels()
526 handles2b, labels2b = ax2_twin.get_legend_handles_labels()
528 handles = handles1a + handles1b + handles2a + handles2b
529 labels = labels1a + labels1b + labels2a + labels2b
530 # ax2 is "in front" of ax1 because it has the vlines plotted on it, and
531 # vlines are on ax2 so that they appear at the bottom of the legend, so
532 # make sure to plot the legend on ax2, otherwise the vlines will go on top
533 # of the otherwise-opaque legend.
534 ax1_twin.legend(handles, labels, facecolor="white", framealpha=1)
536 # Add title with the event name, type etc
537 dayObsStr = dayObsIntToString(event.dayObs)
538 title = (
539 # top line is the event title, the details go on the line below
540 f"{dayObsStr} - seqNum {event.seqNum} (version {event.version})"
541 f"\nDuration = {event.duration:.2f}s"
542 f" Event type: {event.type.name}"
543 f" End reason: {event.endReason.name}"
544 )
545 ax1_twin.set_title(title)
546 return fig
549def getCommandsDuringEvent(
550 client: EfdClient,
551 event: TMAEvent,
552 commands: Iterable[str] = ("raDecTarget"),
553 prePadding: float = 0,
554 postPadding: float = 0,
555 timeFormat: str = "python",
556 log: logging.Logger | None = None,
557 doLog: bool = True,
558):
559 """Get the commands issued during an event.
561 Get the times at which the specified commands were issued during the event.
563 Parameters
564 ----------
565 client : `lsst_efd_client.efd_helper.EfdClient`
566 The EFD client to use.
567 event : `lsst.summit.utils.tmaUtils.TMAEvent`
568 The event to plot.
569 commands : `list` of `str`, optional
570 The commands or command aliases to look for. Defaults to
571 ['raDecTarget'].
572 prePadding : `float`, optional
573 The amount of time to pad the event with before the start time, in
574 seconds.
575 postPadding : `float`, optional
576 The amount of time to pad the event with after the end time, in
577 seconds.
578 timeFormat : `str`, optional
579 One of 'pandas' or 'astropy' or 'python'. If 'pandas', the dictionary
580 keys will be pandas timestamps, if 'astropy' they will be astropy times
581 and if 'python' they will be python datetimes.
582 log : `logging.Logger`, optional
583 The logger to use. If not specified, a new logger will be created if
584 needed.
585 doLog : `bool`, optional
586 Whether to log messages. Defaults to True.
588 Returns
589 -------
590 commandTimes : `dict` [`time`, `str`]
591 A dictionary of the times at which the commands where issued. The type
592 that `time` takes is determined by the format key, and defaults to
593 python datetime.
594 """
595 commands = list(ensure_iterable(commands))
596 fullCommands = [c if c not in COMMAND_ALIASES else COMMAND_ALIASES[c] for c in commands]
597 del commands # make sure we always use their full names
599 commandTimes = getCommands(
600 client,
601 fullCommands,
602 begin=event.begin,
603 end=event.end,
604 prePadding=prePadding,
605 postPadding=postPadding,
606 timeFormat=timeFormat,
607 )
609 if not commandTimes and doLog:
610 log = logging.getLogger(__name__)
611 log.info(f"Found no commands in {fullCommands} issued during event {event.seqNum}")
613 return commandTimes
616def _initializeTma(tma: TMAStateMachine) -> None:
617 """Helper function to turn a TMA into a valid state for testing.
619 Do not call directly in normal usage or code, as this just arbitrarily
620 sets values to make the TMA valid.
622 Parameters
623 ----------
624 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine`
625 The TMA state machine model to initialize.
626 """
627 tma._parts["azimuthInPosition"] = False
628 tma._parts["azimuthMotionState"] = AxisMotionState.STOPPED
629 tma._parts["azimuthSystemState"] = PowerState.ON
630 tma._parts["elevationInPosition"] = False
631 tma._parts["elevationMotionState"] = AxisMotionState.STOPPED
632 tma._parts["elevationSystemState"] = PowerState.ON
635@dataclass(kw_only=True, frozen=True)
636class TMAEvent:
637 """A movement event for the TMA.
639 Contains the dayObs on which the event occured, using the standard
640 observatory definition of the dayObs, and the sequence number of the event,
641 which is unique for each event on a given dayObs.
643 The event type can be either 'SLEWING' or 'TRACKING', defined as:
644 - SLEWING: some part of the TMA is in motion
645 - TRACKING: both axes are in position and tracking the sky
647 The end reason can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', or 'OFF'.
648 - SLEWING: The previous event was a TRACKING event, and one or more of
649 the TMA components either stopped being in position, or stopped
650 moving, or went into fault, or was turned off, and hence we are now
651 only slewing and no longer tracking the sky.
652 - TRACKING: the TMA started tracking the sky when it wasn't previously.
653 Usualy this would always be preceded by directly by a SLEWING
654 event, but this is not strictly true, as the EUI seems to be able
655 to make the TMA start tracking the sky without slewing first.
656 - STOPPED: the components of the TMA transitioned to the STOPPED state.
657 - FAULT: the TMA went into fault.
658 - OFF: the TMA components were turned off.
660 Note that this class is not intended to be instantiated directly, but
661 rather to be returned by the ``TMAEventMaker.getEvents()`` function.
663 Parameters
664 ----------
665 dayObs : `int`
666 The dayObs on which the event occured.
667 seqNum : `int`
668 The sequence number of the event,
669 type : `lsst.summit.utils.tmaUtils.TMAState`
670 The type of the event, either 'SLEWING' or 'TRACKING'.
671 endReason : `lsst.summit.utils.tmaUtils.TMAState`
672 The reason the event ended, either 'STOPPED', 'TRACKING', 'FAULT',
673 'SLEWING', or 'OFF'.
674 duration : `float`
675 The duration of the event, in seconds.
676 begin : `astropy.time.Time`
677 The time the event began.
678 end : `astropy.time.Time`
679 The time the event ended.
680 blockInfos : `list` of `lsst.summit.utils.tmaUtils.BlockInfo`, or `None`
681 The block infomation, if any, relating to the event. Could be `None`,
682 or one or more block informations.
683 version : `int`
684 The version of the TMAEvent class. Equality between events is only
685 valid for a given version of the class. If the class definition
686 changes, the time ranges can change, and hence the equality between
687 events is ``False``.
688 _startRow : `int`
689 The first row in the merged EFD data which is part of the event.
690 _endRow : `int`
691 The last row in the merged EFD data which is part of the event.
692 """
694 dayObs: int
695 seqNum: int
696 type: str # can be 'SLEWING', 'TRACKING'
697 endReason: str # can be 'STOPPED', 'TRACKING', 'FAULT', 'SLEWING', 'OFF'
698 duration: float # seconds
699 begin: Time
700 end: Time
701 blockInfos: list = field(default_factory=list)
702 version: int = 0 # update this number any time a code change which could change event definitions is made
703 _startRow: int
704 _endRow: int
706 def __lt__(self, other: TMAEvent) -> bool:
707 if self.version != other.version:
708 raise ValueError(
709 f"Cannot compare TMAEvents with different versions: {self.version} != {other.version}"
710 )
711 if self.dayObs < other.dayObs:
712 return True
713 elif self.dayObs == other.dayObs:
714 return self.seqNum < other.seqNum
715 return False
717 def __repr__(self) -> str:
718 return (
719 f"TMAEvent(dayObs={self.dayObs}, seqNum={self.seqNum}, type={self.type!r},"
720 f" endReason={self.endReason!r}, duration={self.duration}, begin={self.begin!r},"
721 f" end={self.end!r}"
722 )
724 def __hash__(self) -> int:
725 # deliberately don't hash the blockInfos here, as they are not
726 # a core part of the event itself, and are listy and cause problems
727 return hash(
728 (
729 self.dayObs,
730 self.seqNum,
731 self.type,
732 self.endReason,
733 self.duration,
734 self.begin,
735 self.end,
736 self.version,
737 self._startRow,
738 self._endRow,
739 )
740 )
742 def _ipython_display_(self) -> None:
743 print(self.__str__())
745 def __str__(self) -> str:
746 def indent(string):
747 return "\n" + "\n".join([" " + s for s in string.splitlines()])
749 blockInfoStr = "None"
750 if self.blockInfos is not None:
751 blockInfoStr = "".join(indent(str(i)) for i in self.blockInfos)
753 return (
754 f"dayObs: {self.dayObs}\n"
755 f"seqNum: {self.seqNum}\n"
756 f"type: {self.type.name}\n"
757 f"endReason: {self.endReason.name}\n"
758 f"duration: {self.duration}\n"
759 f"begin: {self.begin!r}\n"
760 f"end: {self.end!r}\n"
761 f"blockInfos: {blockInfoStr}"
762 )
764 def associatedWith(
765 self,
766 block: int | None = None,
767 blockSeqNum: int | None = None,
768 ticket: str | None = None,
769 salIndex: int | None = None,
770 ) -> bool:
771 """Check whether an event is associated with a set of parameters.
773 Check if an event is associated with a specific block and/or ticket
774 and/or salIndex. All specified parameters must match for the function
775 to return True. If checking if an event is in a block, the blockSeqNum
776 can also be specified to identify events which related to a given
777 running the specified block.
779 Parameters
780 ----------
781 block : `int`, optional
782 The block number to check for.
783 blockSeqNum : `int`, optional
784 The block sequence number to check for, if the block is specified.
785 ticket : `str`, optional
786 The ticket number to check for.
787 salIndex : `int`, optional
788 The salIndex to check for.
790 Returns
791 -------
792 relates : `bool`
793 Whether the event is associated with the specified block, ticket,
794 and salIndex.
795 """
796 if all([block is None, ticket is None, salIndex is None]):
797 raise ValueError("Must specify at least one of block, ticket, or salIndex")
799 if blockSeqNum is not None and block is None:
800 raise ValueError("block must be specified if blockSeqNum is specified")
802 for blockInfo in self.blockInfos:
803 # "X is None or" is used for each parameter to allow it to be None
804 # in the kwargs
805 blockMatches = False
806 if block is not None:
807 if blockSeqNum is None and blockInfo.blockNumber == block:
808 blockMatches = True
809 elif (
810 blockSeqNum is not None
811 and blockInfo.blockNumber == block
812 and blockInfo.seqNum == blockSeqNum
813 ):
814 blockMatches = True
815 else:
816 blockMatches = True # no block specified at all, so it matches
818 salIndexMatches = salIndex is None or salIndex in blockInfo.salIndices
819 ticketMatches = ticket is None or ticket in blockInfo.tickets
821 if blockMatches and salIndexMatches and ticketMatches:
822 return True
824 return False
827class TMAState(enum.IntEnum):
828 """Overall state of the TMA.
830 States are defined as follows:
832 UNINITIALIZED
833 We have not yet got data for all relevant components, so the overall
834 state is undefined.
835 STOPPED
836 All components are on, and none are moving.
837 TRACKING
838 We are tracking the sky.
839 SLEWING
840 One or more components are moving, and one or more are not tracking the
841 sky. This should probably be called MOVING, as it includes: slewing,
842 MOVING_POINT_TO_POINT, and JOGGING.
843 FAULT
844 All (if engineeringMode) or any (if not engineeringMode) components are
845 in fault.
846 OFF
847 All components are off.
848 """
850 UNINITIALIZED = -1
851 STOPPED = 0
852 TRACKING = 1
853 SLEWING = 2
854 FAULT = 3
855 OFF = 4
857 def __repr__(self) -> str:
858 return f"TMAState.{self.name}"
861def getAxisAndType(rowFor: str) -> tuple[str, str]:
862 """Get the axis the data relates to, and the type of data it contains.
864 Parameters
865 ----------
866 rowFor : `str`
867 The column in the dataframe denoting what this row is for, e.g.
868 "elevationMotionState" or "azimuthInPosition", etc.
870 Returns
871 -------
872 axis : `str`
873 The axis the row is for, e.g. "azimuth", "elevation".
874 rowType : `str`
875 The type of the row, e.g. "MotionState", "SystemState", "InPosition".
876 """
877 regex = r"(azimuth|elevation)(InPosition|MotionState|SystemState)$" # matches the end of the line
878 matches = re.search(regex, rowFor)
879 if matches is None:
880 raise ValueError(f"Could not parse axis and rowType from {rowFor=}")
881 axis = matches.group(1)
882 rowType = matches.group(2)
884 assert rowFor.endswith(f"{axis}{rowType}")
885 return axis, rowType
888class ListViewOfDict:
889 """A class to allow making lists which contain references to an underlying
890 dictionary.
892 Normally, making a list of items from a dictionary would make a copy of the
893 items, but this class allows making a list which contains references to the
894 underlying dictionary items themselves. This is useful for making a list of
895 components, such that they can be manipulated in their logical sets.
896 """
898 def __init__(self, underlyingDictionary: dict, keysToLink: list):
899 self.dictionary = underlyingDictionary
900 self.keys = keysToLink
902 def __getitem__(self, index: Any) -> Any:
903 return self.dictionary[self.keys[index]]
905 def __setitem__(self, index: int, value: Any) -> None:
906 self.dictionary[self.keys[index]] = value
908 def __len__(self) -> int:
909 return len(self.keys)
912class TMAStateMachine:
913 """A state machine model of the TMA.
915 Note that this is currently only implemented for the azimuth and elevation
916 axes, but will be extended to include the rotator in the future.
918 Note that when used for event generation, changing ``engineeringMode`` to
919 False might change the resulting list of events, and that if the TMA moves
920 with some axis in fault, then these events will be missed. It is therefore
921 thought that ``engineeringMode=True`` should always be used when generating
922 events. The option, however, is there for completeness, as this will be
923 useful for knowing is the CSC would consider the TMA to be in fault in the
924 general case.
926 Parameters
927 ----------
928 engineeringMode : `bool`, optional
929 Whether the TMA is in engineering mode. Defaults to True. If False,
930 then the TMA will be in fault if any component is in fault. If True,
931 then the TMA will be in fault only if all components are in fault.
932 debug : `bool`, optional
933 Whether to log debug messages. Defaults to False.
934 """
936 _UNINITIALIZED_VALUE: int = -999
938 def __init__(self, engineeringMode: bool = True, debug: bool = False):
939 self.engineeringMode = engineeringMode
940 self.log = logging.getLogger("lsst.summit.utils.tmaUtils.TMA")
941 if debug:
942 self.log.level = logging.DEBUG
943 self._mostRecentRowTime = -1
945 # the actual components of the TMA
946 self._parts = {
947 "azimuthInPosition": self._UNINITIALIZED_VALUE,
948 "azimuthMotionState": self._UNINITIALIZED_VALUE,
949 "azimuthSystemState": self._UNINITIALIZED_VALUE,
950 "elevationInPosition": self._UNINITIALIZED_VALUE,
951 "elevationMotionState": self._UNINITIALIZED_VALUE,
952 "elevationSystemState": self._UNINITIALIZED_VALUE,
953 }
954 systemKeys = ["azimuthSystemState", "elevationSystemState"]
955 positionKeys = ["azimuthInPosition", "elevationInPosition"]
956 motionKeys = ["azimuthMotionState", "elevationMotionState"]
958 # references to the _parts as conceptual groupings
959 self.system = ListViewOfDict(self._parts, systemKeys)
960 self.motion = ListViewOfDict(self._parts, motionKeys)
961 self.inPosition = ListViewOfDict(self._parts, positionKeys)
963 # tuples of states for state collapsing. Note that STOP_LIKE +
964 # MOVING_LIKE must cover the full set of AxisMotionState enums
965 self.STOP_LIKE = (AxisMotionState.STOPPING, AxisMotionState.STOPPED, AxisMotionState.TRACKING_PAUSED)
966 self.MOVING_LIKE = (
967 AxisMotionState.MOVING_POINT_TO_POINT,
968 AxisMotionState.JOGGING,
969 AxisMotionState.TRACKING,
970 )
971 # Likewise, ON_LIKE + OFF_LIKE must cover the full set of PowerState
972 # enums
973 self.OFF_LIKE = (PowerState.OFF, PowerState.TURNING_OFF)
974 self.ON_LIKE = (PowerState.ON, PowerState.TURNING_ON)
975 self.FAULT_LIKE = (PowerState.FAULT,) # note the trailing comma - this must be an iterable
977 def apply(self, row: pd.Series) -> None:
978 """Apply a row of data to the TMA state.
980 Checks that the row contains data for a later time than any data
981 previously applied, and applies the relevant column entry to the
982 relevant component.
984 Parameters
985 ----------
986 row : `pd.Series`
987 The row of data to apply to the state machine.
988 """
989 timestamp = row["private_efdStamp"]
990 if timestamp < self._mostRecentRowTime: # NB equals is OK, technically, though it never happens
991 raise ValueError(
992 "TMA evolution must be monotonic increasing in time, tried to apply a row which"
993 " predates the most previous one"
994 )
995 self._mostRecentRowTime = timestamp
997 rowFor = row["rowFor"] # e.g. elevationMotionState
998 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState
999 value = self._getRowPayload(row, rowType, rowFor)
1000 self.log.debug(f"Setting {rowFor} to {repr(value)}")
1001 self._parts[rowFor] = value
1002 try:
1003 # touch the state property as this executes the sieving, to make
1004 # sure we don't fall through the sieve at any point in time
1005 _ = self.state
1006 except RuntimeError as e:
1007 # improve error reporting, but always reraise this, as this is a
1008 # full-blown failure
1009 raise RuntimeError(f"Failed to apply {value} to {axis}{rowType} with state {self._parts}") from e
1011 def _getRowPayload(
1012 self, row: pd.Series, rowType: str, rowFor: str
1013 ) -> bool | AxisMotionState | PowerState:
1014 """Get the relevant value from the row.
1016 Given the row, and which component it relates to, get the relevant
1017 value, as a bool or cast to the appropriate enum class.
1019 Parameters
1020 ----------
1021 row : `pd.Series`
1022 The row of data from the dataframe.
1023 rowType : `str`
1024 The type of the row, e.g. "MotionState", "SystemState",
1025 "InPosition".
1026 rowFor : `str`
1027 The component the row is for, e.g. "azimuth", "elevation".
1029 Returns
1030 -------
1031 value : `bool` or `enum`
1032 The value of the row, as a bool or enum, depending on the
1033 component, cast to the appropriate enum class or bool.
1034 """
1035 match rowType:
1036 case "MotionState":
1037 value = row[f"state_{rowFor}"]
1038 return AxisMotionState(value)
1039 case "SystemState":
1040 value = row[f"powerState_{rowFor}"]
1041 return PowerState(value)
1042 case "InPosition":
1043 value = row[f"inPosition_{rowFor}"]
1044 return bool(value)
1045 case _:
1046 raise ValueError(f"Failed to get row payload with {rowType=} and {row=}")
1048 @property
1049 def _isValid(self) -> bool:
1050 """Has the TMA had a value applied to all its components?
1052 If any component has not yet had a value applied, the TMA is not valid,
1053 as those components will be in an unknown state.
1055 Returns
1056 -------
1057 isValid : `bool`
1058 Whether the TMA is fully initialized.
1059 """
1060 return not any([v == self._UNINITIALIZED_VALUE for v in self._parts.values()])
1062 # state inspection properties - a high level way of inspecting the state as
1063 # an API
1064 @property
1065 def isMoving(self) -> bool:
1066 return self.state in [TMAState.TRACKING, TMAState.SLEWING]
1068 @property
1069 def isNotMoving(self) -> bool:
1070 return not self.isMoving
1072 @property
1073 def isTracking(self) -> bool:
1074 return self.state == TMAState.TRACKING
1076 @property
1077 def isSlewing(self) -> bool:
1078 return self.state == TMAState.SLEWING
1080 @property
1081 def canMove(self) -> bool:
1082 badStates = [PowerState.OFF, PowerState.TURNING_OFF, PowerState.FAULT, PowerState.UNKNOWN]
1083 return bool(
1084 self._isValid
1085 and self._parts["azimuthSystemState"] not in badStates
1086 and self._parts["elevationSystemState"] not in badStates
1087 )
1089 # Axis inspection properties, designed for internal use. These return
1090 # iterables so that they can be used in any() and all() calls, which make
1091 # the logic much easier to read, e.g. to see if anything is moving, we can
1092 # write `if not any(_axisInMotion):`
1093 @property
1094 def _axesInFault(self) -> list[bool]:
1095 return [x in self.FAULT_LIKE for x in self.system]
1097 @property
1098 def _axesOff(self) -> list[bool]:
1099 return [x in self.OFF_LIKE for x in self.system]
1101 @property
1102 def _axesOn(self) -> list[bool]:
1103 return [not x for x in self._axesOn]
1105 @property
1106 def _axesInMotion(self) -> list[bool]:
1107 return [x in self.MOVING_LIKE for x in self.motion]
1109 @property
1110 def _axesTRACKING(self) -> list[bool]:
1111 """Note this is deliberately named _axesTRACKING and not _axesTracking
1112 to make it clear that this is the AxisMotionState type of TRACKING and
1113 not the normal conceptual notion of tracking (the sky, i.e. as opposed
1114 to slewing).
1115 """
1116 return [x == AxisMotionState.TRACKING for x in self.motion]
1118 @property
1119 def _axesInPosition(self) -> list[bool]:
1120 return [x is True for x in self.inPosition]
1122 @property
1123 def state(self) -> TMAState:
1124 """The overall state of the TMA.
1126 Note that this is both a property, and also the method which applies
1127 the logic sieve to determine the state at a given point in time.
1129 Returns
1130 -------
1131 state : `lsst.summit.utils.tmaUtils.TMAState`
1132 The overall state of the TMA.
1133 """
1134 # first, check we're valid, and if not, return UNINITIALIZED state, as
1135 # things are unknown
1136 if not self._isValid:
1137 return TMAState.UNINITIALIZED
1139 # if we're not in engineering mode, i.e. we're under normal CSC
1140 # control, then if anything is in fault, we're in fault. If we're
1141 # engineering then some axes will move when others are in fault
1142 if not self.engineeringMode:
1143 if any(self._axesInFault):
1144 return TMAState.FAULT
1145 else:
1146 # we're in engineering mode, so return fault state if ALL are in
1147 # fault
1148 if all(self._axesInFault):
1149 return TMAState.FAULT
1151 # if all axes are off, the TMA is OFF
1152 if all(self._axesOff):
1153 return TMAState.OFF
1155 # we know we're valid and at least some axes are not off, so see if
1156 # we're in motion if no axes are moving, we're stopped
1157 if not any(self._axesInMotion):
1158 return TMAState.STOPPED
1160 # now we know we're initialized, and that at least one axis is moving
1161 # so check axes for motion and in position. If all axes are tracking
1162 # and all are in position, we're tracking the sky
1163 if all(self._axesTRACKING) and all(self._axesInPosition):
1164 return TMAState.TRACKING
1166 # we now know explicitly that not everything is in position, so we no
1167 # longer need to check that. We do actually know that something is in
1168 # motion, but confirm that's the case and return SLEWING
1169 if any(self._axesInMotion):
1170 return TMAState.SLEWING
1172 # if we want to differentiate between MOVING_POINT_TO_POINT moves,
1173 # JOGGING moves and regular slews, the logic in the step above needs to
1174 # be changed and the new steps added here.
1176 raise RuntimeError("State error: fell through the state sieve - rewrite your logic!")
1179class TMAEventMaker:
1180 """A class to create per-dayObs TMAEvents for the TMA's movements.
1182 If this class is being used in tests, make sure to pass the EFD client in,
1183 and create it with `makeEfdClient(testing=True)`. This ensures that the
1184 USDF EFD is "used" as this is the EFD which has the recorded data available
1185 in the test suite via `vcr`.
1187 Example usage:
1188 >>> dayObs = 20230630
1189 >>> eventMaker = TMAEventMaker()
1190 >>> events = eventMaker.getEvents(dayObs)
1191 >>> print(f'Found {len(events)} for {dayObs=}')
1193 Parameters
1194 ----------
1195 client : `lsst_efd_client.efd_helper.EfdClient`, optional
1196 The EFD client to use, created if not provided.
1197 """
1199 # the topics which need logical combination to determine the overall mount
1200 # state. Will need updating as new components are added to the system.
1202 # relevant column: 'state'
1203 _movingComponents = [
1204 "lsst.sal.MTMount.logevent_azimuthMotionState",
1205 "lsst.sal.MTMount.logevent_elevationMotionState",
1206 ]
1208 # relevant column: 'inPosition'
1209 _inPositionComponents = [
1210 "lsst.sal.MTMount.logevent_azimuthInPosition",
1211 "lsst.sal.MTMount.logevent_elevationInPosition",
1212 ]
1214 # the components which, if in fault, put the TMA into fault
1215 # relevant column: 'powerState'
1216 _stateComponents = [
1217 "lsst.sal.MTMount.logevent_azimuthSystemState",
1218 "lsst.sal.MTMount.logevent_elevationSystemState",
1219 ]
1221 def __init__(self, client: EfdClient | None = None):
1222 if client is not None:
1223 self.client = client
1224 else:
1225 self.client = makeEfdClient()
1226 self.log = logging.getLogger(__name__)
1227 self._data = {}
1229 @dataclass(frozen=True)
1230 class ParsedState:
1231 eventStart: Time
1232 eventEnd: int
1233 previousState: TMAState
1234 state: TMAState
1236 @staticmethod
1237 def isToday(dayObs: int) -> bool:
1238 """Find out if the specified dayObs is today, or in the past.
1240 If the day is today, the function returns ``True``, if it is in the
1241 past it returns ``False``. If the day is in the future, a
1242 ``ValueError`` is raised, as this indicates there is likely an
1243 off-by-one type error somewhere in the logic.
1245 Parameters
1246 ----------
1247 dayObs : `int`
1248 The dayObs to check, in the format YYYYMMDD.
1250 Returns
1251 -------
1252 isToday : `bool`
1253 ``True`` if the dayObs is today, ``False`` if it is in the past.
1255 Raises
1256 ValueError: if the dayObs is in the future.
1257 """
1258 todayDayObs = getCurrentDayObs_int()
1259 if dayObs == todayDayObs:
1260 return True
1261 if dayObs > todayDayObs:
1262 raise ValueError("dayObs is in the future")
1263 return False
1265 @staticmethod
1266 def _shortName(topic: str) -> str:
1267 """Get the short name of a topic.
1269 Parameters
1270 ----------
1271 topic : `str`
1272 The topic to get the short name of.
1274 Returns
1275 -------
1276 shortName : `str`
1277 The short name of the topic, e.g. 'azimuthInPosition'
1278 """
1279 # get, for example 'azimuthInPosition' from
1280 # lsst.sal.MTMount.logevent_azimuthInPosition
1281 return topic.split("_")[-1]
1283 def _mergeData(self, data: dict[str, pd.DataFrame]) -> pd.DataFrame:
1284 """Merge a dict of dataframes based on private_efdStamp, recording
1285 where each row came from.
1287 Given a dict or dataframes, keyed by topic, merge them into a single
1288 dataframe, adding a column to record which topic each row came from.
1290 Parameters
1291 ----------
1292 data : `dict` of `str` : `pd.DataFrame`
1293 The dataframes to merge.
1295 Returns
1296 -------
1297 merged : `pd.DataFrame`
1298 The merged dataframe.
1299 """
1300 excludeColumns = ["private_efdStamp", "rowFor"]
1302 mergeArgs = {
1303 "how": "outer",
1304 "sort": True,
1305 }
1307 merged = None
1308 originalRowCounter = 0
1310 # Iterate over the keys and merge the corresponding DataFrames
1311 for key, df in data.items():
1312 if df.empty:
1313 # Must skip the df if it's empty, otherwise the merge will fail
1314 # due to lack of private_efdStamp. Because other axes might
1315 # still be in motion, so we still want to merge what we have
1316 continue
1318 originalRowCounter += len(df)
1319 component = self._shortName(key) # Add suffix to column names to identify the source
1320 suffix = "_" + component
1322 df["rowFor"] = component
1324 columnsToSuffix = [col for col in df.columns if col not in excludeColumns]
1325 df_to_suffix = df[columnsToSuffix].add_suffix(suffix)
1326 df = pd.concat([df[excludeColumns], df_to_suffix], axis=1)
1328 if merged is None:
1329 merged = df.copy()
1330 else:
1331 merged = pd.merge(merged, df, **mergeArgs)
1333 merged = merged.loc[:, ~merged.columns.duplicated()] # Remove duplicate columns after merge
1335 if len(merged) != originalRowCounter:
1336 self.log.warning(
1337 "Merged data has a different number of rows to the original data, some"
1338 " timestamps (rows) will contain more than one piece of actual information."
1339 )
1341 # if the index is still a DatetimeIndex here then we didn't actually
1342 # merge any data, so there is only data from a single component.
1343 # This is likely to result in no events, but not necessarily, and for
1344 # generality, instead we convert to a range index to ensure consistency
1345 # in the returned data, and allow processing to continue.
1346 if isinstance(merged.index, pd.DatetimeIndex):
1347 self.log.warning("Data was only found for a single component in the EFD.")
1348 merged.reset_index(drop=True, inplace=True)
1350 return merged
1352 def getEvent(self, dayObs: int, seqNum: int) -> TMAEvent | None:
1353 """Get a specific event for a given dayObs and seqNum.
1355 Repeated calls for the same ``dayObs`` will use the cached data if the
1356 day is in the past, and so will be much quicker. If the ``dayObs`` is
1357 the current day then the EFD will be queried for new data for each
1358 call, so a call which returns ``None`` on the first try might return an
1359 event on the next, if the TMA is still moving and thus generating
1360 events.
1362 Parameters
1363 ----------
1364 dayObs : `int`
1365 The dayObs to get the event for.
1366 seqNum : `int`
1367 The sequence number of the event to get.
1369 Returns
1370 -------
1371 event : `lsst.summit.utils.tmaUtils.TMAEvent`
1372 The event for the specified dayObs and seqNum, or `None` if the
1373 event was not found.
1374 """
1375 events = self.getEvents(dayObs)
1376 if seqNum <= len(events):
1377 event = events[seqNum]
1378 if event.seqNum != seqNum:
1379 # it's zero-indexed and contiguous so this must be true but
1380 # a sanity check doesn't hurt.
1381 raise AssertionError(f"Event sequence number mismatch: {event.seqNum} != {seqNum}")
1382 return event
1383 else:
1384 self.log.warning(f"Event {seqNum} not found for {dayObs}")
1385 return None
1387 def getEvents(self, dayObs: int, addBlockInfo: bool = True) -> list[TMAEvent]:
1388 """Get the TMA events for the specified dayObs.
1390 Gets the required mount data from the cache or the EFD as required,
1391 handling whether we're working with live vs historical data. The
1392 dataframes from the EFD is merged and applied to the TMAStateMachine,
1393 and that series of state changes is used to generate a list of
1394 TmaEvents for the day's data.
1396 If the data is for the current day, i.e. if new events can potentially
1397 land, then if the last event is "open" (meaning that the TMA appears to
1398 be in motion and thus the event is growing with time), then that event
1399 is excluded from the event list as it is expected to be changing with
1400 time, and will likely close eventually. However, if that situation
1401 occurs on a day in the past, then that event can never close, and the
1402 event is therefore included, but a warning about the open event is
1403 logged.
1405 Parameters
1406 ----------
1407 dayObs : `int`
1408 The dayObs for which to get the events.
1409 addBlockInfo : `bool`, optional
1410 Whether to add block information to the events. This allows
1411 skipping this step for speed when generating events for purposes
1412 which don't need block information.
1414 Returns
1415 -------
1416 events : `list` of `lsst.summit.utils.tmaUtils.TMAState`
1417 The events for the specified dayObs.
1418 """
1419 workingLive = self.isToday(dayObs)
1420 data = None
1422 if workingLive:
1423 # it's potentially updating data, so we must update the date
1424 # regarless of whether we have it already or not
1425 self.log.info(f"Updating mount data for {dayObs} from the EFD")
1426 self._getEfdDataForDayObs(dayObs)
1427 data = self._data[dayObs]
1428 elif dayObs in self._data:
1429 # data is in the cache and it's not being updated, so use it
1430 data = self._data[dayObs]
1431 elif dayObs not in self._data:
1432 # we don't have the data yet, but it's not growing, so put it in
1433 # the cache and use it from there
1434 self.log.info(f"Retrieving mount data for {dayObs} from the EFD")
1435 self._getEfdDataForDayObs(dayObs)
1436 data = self._data[dayObs]
1437 else:
1438 raise RuntimeError("This should never happen")
1440 # if we don't have something to work with, log a warning and return
1441 if not self.dataFound(data):
1442 self.log.warning(f"No EFD data found for {dayObs=}")
1443 return []
1445 # applies the data to the state machine, and generates events from the
1446 # series of states which results
1447 events = self._calculateEventsFromMergedData(
1448 data, dayObs, dataIsForCurrentDay=workingLive, addBlockInfo=addBlockInfo
1449 )
1450 if not events:
1451 self.log.warning(f"Failed to calculate any events for {dayObs=} despite EFD data existing!")
1452 return events
1454 @staticmethod
1455 def dataFound(data: pd.DataFrame) -> bool:
1456 """Check if any data was found.
1458 Parameters
1459 ----------
1460 data : `pd.DataFrame`
1461 The merged dataframe to check.
1463 Returns
1464 -------
1465 dataFound : `bool`
1466 Whether data was found.
1467 """
1468 # You can't just compare to with data == NO_DATA_SENTINEL because
1469 # `data` is usually a dataframe, and you can't compare a dataframe to a
1470 # string directly.
1471 return not (isinstance(data, str) and data == NO_DATA_SENTINEL)
1473 def _getEfdDataForDayObs(self, dayObs: int) -> None:
1474 """Get the EFD data for the specified dayObs and store it in the cache.
1476 Gets the EFD data for all components, as a dict of dataframes keyed by
1477 component name. These are then merged into a single dataframe in time
1478 order, based on each row's `private_efdStamp`. This is then stored in
1479 self._data[dayObs].
1481 If no data is found, the value is set to ``NO_DATA_SENTINEL`` to
1482 differentiate this from ``None``, as this is what you'd get if you
1483 queried the cache with `self._data.get(dayObs)`. It also marks that we
1484 have already queried this day.
1486 Parameters
1487 ----------
1488 dayObs : `int`
1489 The dayObs to query.
1490 """
1491 data = {}
1492 for component in itertools.chain(
1493 self._movingComponents, self._inPositionComponents, self._stateComponents
1494 ):
1495 data[component] = getEfdData(self.client, component, dayObs=dayObs, warn=False)
1496 self.log.debug(f"Found {len(data[component])} for {component}")
1498 if all(dataframe.empty for dataframe in data.values()):
1499 # if every single dataframe is empty, set the sentinel and don't
1500 # try to merge anything, otherwise merge all the data we found
1501 self.log.debug(f"No data found for {dayObs=}")
1502 # a sentinel value that's not None
1503 self._data[dayObs] = NO_DATA_SENTINEL
1504 else:
1505 merged = self._mergeData(data)
1506 self._data[dayObs] = merged
1508 def _calculateEventsFromMergedData(
1509 self, data: pd.DataFrame, dayObs: int, dataIsForCurrentDay: bool, addBlockInfo: bool
1510 ) -> list[TMAEvent]:
1511 """Calculate the list of events from the merged data.
1513 Runs the merged data, row by row, through the TMA state machine (with
1514 ``tma.apply``) to get the overall TMA state at each row, building a
1515 dict of these states, keyed by row number.
1517 This time-series of TMA states are then looped over (in
1518 `_statesToEventTuples`), building a list of tuples representing the
1519 start and end of each event, the type of the event, and the reason for
1520 the event ending.
1522 This list of tuples is then passed to ``_makeEventsFromStateTuples``,
1523 which actually creates the ``TMAEvent`` objects.
1525 Parameters
1526 ----------
1527 data : `pd.DataFrame`
1528 The merged dataframe to use.
1529 dayObs : `int`
1530 The dayObs for the data.
1531 dataIsForCurrentDay : `bool`
1532 Whether the data is for the current day. Determines whether to
1533 allow an open last event or not.
1534 addBlockInfo : `bool`
1535 Whether to add block information to the events. This allows
1536 skipping this step for speed when generating events for purposes
1537 which don't need block information.
1539 Returns
1540 -------
1541 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
1542 The events for the specified dayObs.
1543 """
1544 engineeringMode = True
1545 tma = TMAStateMachine(engineeringMode=engineeringMode)
1547 # For now, we assume that the TMA starts each day able to move, but
1548 # stationary. If this turns out to cause problems, we will need to
1549 # change to loading data from the previous day(s), and looking back
1550 # through it in time until a state change has been found for every
1551 # axis. For now though, Bruno et. al think this is acceptable and
1552 # preferable.
1553 _initializeTma(tma)
1555 tmaStates = {}
1556 for rowNum, row in data.iterrows():
1557 tma.apply(row)
1558 tmaStates[rowNum] = tma.state
1560 stateTuples = self._statesToEventTuples(tmaStates, dataIsForCurrentDay)
1561 events = self._makeEventsFromStateTuples(stateTuples, dayObs, data)
1562 if addBlockInfo:
1563 self.addBlockDataToEvents(dayObs, events)
1564 return events
1566 def _statesToEventTuples(
1567 self, states: dict[int, TMAEvent], dataIsForCurrentDay: bool
1568 ) -> list[ParsedState]:
1569 """Get the event-tuples from the dictionary of TMAStates.
1571 Chunks the states into blocks of the same state, so that we can create
1572 an event for each block in `_makeEventsFromStateTuples`. Off-type
1573 states are skipped over, with each event starting when the telescope
1574 next resumes motion or changes to a different type of motion state,
1575 i.e. from non-tracking type movement (MOVE_POINT_TO_POINT, JOGGING,
1576 TRACKING-but-not-in-position, i.e. slewing) to a tracking type
1577 movement, or vice versa.
1579 Parameters
1580 ----------
1581 states : `dict` of `int` : `lsst.summit.utils.tmaUtils.TMAState`
1582 The states of the TMA, keyed by row number.
1583 dataIsForCurrentDay : `bool`
1584 Whether the data is for the current day. Determines whether to
1585 allow and open last event or not.
1587 Returns
1588 -------
1589 parsedStates : `list` of `tuple`
1590 The parsed states, as a list of tuples of the form:
1591 ``(eventStart, eventEnd, eventType, endReason)``
1592 """
1593 # Consider rewriting this with states as a list and using pop(0)?
1594 skipStates = (TMAState.STOPPED, TMAState.OFF, TMAState.FAULT)
1596 parsedStates = []
1597 eventStart = None
1598 rowNum = 0
1599 nRows = len(states)
1600 while rowNum < nRows:
1601 previousState = None
1602 state = states[rowNum]
1603 # if we're not in an event, fast forward through off-like rows
1604 # until a new event starts
1605 if eventStart is None and state in skipStates:
1606 rowNum += 1
1607 continue
1609 # we've started a new event, so walk through it and find the end
1610 eventStart = rowNum
1611 previousState = state
1612 rowNum += 1 # move to the next row before starting the while loop
1613 if rowNum == nRows:
1614 # we've reached the end of the data, and we're still in an
1615 # event, so don't return this presumably in-progress event
1616 self.log.warning("Reached the end of the data while starting a new event")
1617 break
1618 state = states[rowNum]
1619 while state == previousState:
1620 rowNum += 1
1621 if rowNum == nRows:
1622 break
1623 state = states[rowNum]
1624 parsedStates.append(
1625 self.ParsedState(
1626 eventStart=eventStart, eventEnd=rowNum, previousState=previousState, state=state
1627 )
1628 )
1629 if state in skipStates:
1630 eventStart = None
1632 # done parsing, just check the last event is valid
1633 if parsedStates: # ensure we have at least one event
1634 lastEvent = parsedStates[-1]
1635 if lastEvent.eventEnd == nRows:
1636 # Generally, you *want* the timespan for an event to be the
1637 # first row of the next event, because you were in that state
1638 # right up until that state change. However, if that event is
1639 # a) the last one of the day and b) runs right up until the end
1640 # of the dataframe, then there isn't another row, so this will
1641 # overrun the array.
1642 #
1643 # If the data is for the current day then this isn't a worry,
1644 # as we're likely still taking data, and this event will likely
1645 # close yet, so we don't issue a warning, and simply drop the
1646 # event from the list.
1648 # However, if the data is for a past day then no new data will
1649 # come to close the event, so allow the event to be "open", and
1650 # issue a warning
1651 if dataIsForCurrentDay:
1652 self.log.info("Discarding open (likely in-progess) final event from current day's events")
1653 parsedStates = parsedStates[:-1]
1654 else:
1655 self.log.warning("Last event ends open, forcing it to end at end of the day's data")
1656 # it's a tuple, so (deliberately) awkward to modify
1657 parsedStates[-1] = self.ParsedState(
1658 eventStart=lastEvent.eventStart,
1659 eventEnd=lastEvent.eventEnd - 1,
1660 previousState=lastEvent.previousState,
1661 state=lastEvent.state,
1662 )
1664 return parsedStates
1666 def addBlockDataToEvents(
1667 self,
1668 dayObs: int,
1669 events: list[TMAEvent] | TMAEvent,
1670 ) -> None:
1671 """Find all the block data in the EFD for the specified events.
1673 Finds all the block data in the EFD relating to the events, parses it,
1674 from the rows of the dataframe, and adds it to the events in place.
1676 Parameters
1677 ----------
1678 events : `lsst.summit.utils.tmaUtils.TMAEvent` or
1679 `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
1680 One or more events to get the block data for.
1681 """
1682 try:
1683 blockParser = BlockParser(dayObs, client=self.client)
1684 except Exception as e:
1685 # adding the block data should never cause a failure so if we can't
1686 # get the block data, log a warning and return. It is, however,
1687 # never expected, so use log.exception to get the full traceback
1688 # and scare users so it gets reported
1689 self.log.exception(f"Failed to parse block data for {dayObs=}, {e}")
1690 return
1691 blocks = blockParser.getBlockNums()
1692 blockDict = {}
1693 for block in blocks:
1694 blockDict[block] = blockParser.getSeqNums(block)
1696 for block, seqNums in blockDict.items():
1697 for seqNum in seqNums:
1698 blockInfo = blockParser.getBlockInfo(block=block, seqNum=seqNum)
1700 relatedEvents = blockParser.getEventsForBlock(events, block=block, seqNum=seqNum)
1701 for event in relatedEvents:
1702 toSet = [blockInfo]
1703 if event.blockInfos is not None:
1704 existingInfo = event.blockInfos
1705 existingInfo.append(blockInfo)
1706 toSet = existingInfo
1708 # Add the blockInfo to the TMAEvent. Because this is a
1709 # frozen dataclass, use object.__setattr__ to set the
1710 # attribute. This is the correct way to set a frozen
1711 # dataclass attribute after creation.
1712 object.__setattr__(event, "blockInfos", toSet)
1714 def _makeEventsFromStateTuples(
1715 self, states: list[tuple[Union[Time, int, TMAState]]], dayObs: int, data: pd.DataFrame
1716 ) -> list[TMAEvent]:
1717 """For the list of state-tuples, create a list of ``TMAEvent`` objects.
1719 Given the underlying data, and the start/stop points for each event,
1720 create the TMAEvent objects for the dayObs.
1722 Parameters
1723 ----------
1724 states : `list` of `tuple`
1725 The parsed states, as a list of tuples of the form:
1726 ``(eventStart, eventEnd, eventType, endReason)``
1727 dayObs : `int`
1728 The dayObs for the data.
1729 data : `pd.DataFrame`
1730 The merged dataframe.
1732 Returns
1733 -------
1734 events : `list` of `lsst.summit.utils.tmaUtils.TMAEvent`
1735 The events for the specified dayObs.
1736 """
1737 seqNum = 0
1738 events = []
1739 for parsedState in states:
1740 begin = data.iloc[parsedState.eventStart]["private_efdStamp"]
1741 end = data.iloc[parsedState.eventEnd]["private_efdStamp"]
1742 beginAstropy = efdTimestampToAstropy(begin)
1743 endAstropy = efdTimestampToAstropy(end)
1744 duration = end - begin
1745 event = TMAEvent(
1746 dayObs=dayObs,
1747 seqNum=seqNum,
1748 type=parsedState.previousState,
1749 endReason=parsedState.state,
1750 duration=duration,
1751 begin=beginAstropy,
1752 end=endAstropy,
1753 blockInfos=[], # this is added later
1754 _startRow=parsedState.eventStart,
1755 _endRow=parsedState.eventEnd,
1756 )
1757 events.append(event)
1758 seqNum += 1
1759 return events
1761 @staticmethod
1762 def printTmaDetailedState(tma: TMAStateMachine) -> None:
1763 """Print the full state of all the components of the TMA.
1765 Currently this is the azimuth and elevation axes' power and motion
1766 states, and their respective inPosition statuses.
1768 Parameters
1769 ----------
1770 tma : `lsst.summit.utils.tmaUtils.TMAStateMachine`
1771 The TMA state machine in the state we want to print.
1772 """
1773 axes = ["azimuth", "elevation"]
1774 p = tma._parts
1775 axisPad = len(max(axes, key=len)) # length of the longest axis string == 9 here, but this is general
1776 motionPad = max(len(s.name) for s in AxisMotionState)
1777 powerPad = max(len(s.name) for s in PowerState)
1779 # example output to show what's being done with the padding:
1780 # azimuth - Power: ON Motion: STOPPED InPosition: True # noqa: W505
1781 # elevation - Power: ON Motion: MOVING_POINT_TO_POINT InPosition: False # noqa: W505
1782 for axis in axes:
1783 print(
1784 f"{axis:>{axisPad}} - "
1785 f"Power: {p[f'{axis}SystemState'].name:>{powerPad}} "
1786 f"Motion: {p[f'{axis}MotionState'].name:>{motionPad}} "
1787 f"InPosition: {p[f'{axis}InPosition']}"
1788 )
1789 print(f"Overall system state: {tma.state.name}")
1791 def printFullDayStateEvolution(self, dayObs: int, taiOrUtc: str = "utc") -> None:
1792 """Print the full TMA state evolution for the specified dayObs.
1794 Replays all the data from the EFD for the specified dayObs through
1795 the TMA state machine, and prints both the overall and detailed state
1796 of the TMA for each row.
1798 Parameters
1799 ----------
1800 dayObs : `int`
1801 The dayObs for which to print the state evolution.
1802 taiOrUtc : `str`, optional
1803 Whether to print the timestamps in TAI or UTC. Default is UTC.
1804 """
1805 # create a fake event which spans the whole day, and then use
1806 # printEventDetails code while skipping the header to print the
1807 # evolution.
1808 _ = self.getEvents(dayObs) # ensure the data has been retrieved from the EFD
1809 data = self._data[dayObs]
1810 lastRowNum = len(data) - 1
1812 fakeEvent = TMAEvent(
1813 dayObs=dayObs,
1814 seqNum=-1, # anything will do
1815 type=TMAState.OFF, # anything will do
1816 endReason=TMAState.OFF, # anything will do
1817 duration=-1, # anything will do
1818 begin=efdTimestampToAstropy(data.iloc[0]["private_efdStamp"]),
1819 end=efdTimestampToAstropy(data.iloc[-1]["private_efdStamp"]),
1820 _startRow=0,
1821 _endRow=lastRowNum,
1822 )
1823 self.printEventDetails(fakeEvent, taiOrUtc=taiOrUtc, printHeader=False)
1825 def printEventDetails(
1826 self,
1827 event: TMAEvent,
1828 taiOrUtc: str = "tai",
1829 printHeader: bool = False,
1830 ) -> None:
1831 """Print a detailed breakdown of all state transitions during an event.
1833 Note: this is not the most efficient way to do this, but it is much the
1834 cleanest with respect to the actual state machine application and event
1835 generation code, and is easily fast enough for the cases it will be
1836 used for. It is not worth complicating the normal state machine logic
1837 to try to use this code.
1839 Parameters
1840 ----------
1841 event : `lsst.summit.utils.tmaUtils.TMAEvent`
1842 The event to display the details of.
1843 taiOrUtc : `str`, optional
1844 Whether to display time strings in TAI or UTC. Defaults to TAI.
1845 Case insensitive.
1846 printHeader : `bool`, optional
1847 Whether to print the event summary. Defaults to True. The primary
1848 reason for the existence of this option is so that this same
1849 printing function can be used to show the evolution of a whole day
1850 by supplying a fake event which spans the whole day, but this event
1851 necessarily has a meaningless summary, and so needs suppressing.
1852 """
1853 taiOrUtc = taiOrUtc.lower()
1854 if taiOrUtc not in ["tai", "utc"]:
1855 raise ValueError(f"Got unsuppoted value for {taiOrUtc=}")
1856 useUtc = taiOrUtc == "utc"
1858 if printHeader:
1859 print(
1860 f"Details for {event.duration:.2f}s {event.type.name} event dayObs={event.dayObs}"
1861 f" seqNum={event.seqNum}:"
1862 )
1863 print(f"- Event began at: {event.begin.utc.isot if useUtc else event.begin.isot}")
1864 print(f"- Event ended at: {event.end.utc.isot if useUtc else event.end.isot}")
1866 dayObs = event.dayObs
1867 data = self._data[dayObs]
1868 startRow = event._startRow
1869 endRow = event._endRow
1870 nRowsToApply = endRow - startRow + 1
1871 print(f"\nTotal number of rows in the merged dataframe: {len(data)}")
1872 if printHeader:
1873 print(f"of which rows {startRow} to {endRow} (inclusive) relate to this event.")
1875 # reconstruct all the states
1876 tma = TMAStateMachine(engineeringMode=True)
1877 _initializeTma(tma)
1879 tmaStates = {}
1880 firstAppliedRow = True # flag to print a header on the first row that's applied
1881 for rowNum, row in data.iterrows(): # must replay rows right from start to get full correct state
1882 if rowNum == startRow:
1883 # we've not yet applied this row, so this is the state just
1884 # before event
1885 print(f"\nBefore the event the TMA was in state {tma.state.name}:")
1886 self.printTmaDetailedState(tma)
1888 if rowNum >= startRow and rowNum <= endRow:
1889 if firstAppliedRow: # only print this intro on the first row we're applying
1890 print(
1891 f"\nThen, applying the {nRowsToApply} rows of data for this event, the state"
1892 " evolved as follows:\n"
1893 )
1894 firstAppliedRow = False
1896 # break the row down and print its details
1897 rowFor = row["rowFor"]
1898 axis, rowType = getAxisAndType(rowFor) # e.g. elevation, MotionState
1899 value = tma._getRowPayload(row, rowType, rowFor)
1900 valueStr = f"{str(value) if isinstance(value, bool) else value.name}"
1901 rowTime = efdTimestampToAstropy(row["private_efdStamp"])
1902 print(
1903 f"On row {rowNum} the {axis} axis had the {rowType} set to {valueStr} at"
1904 f" {rowTime.utc.isot if useUtc else rowTime.isot}"
1905 )
1907 # then apply it as usual, printing the state right afterwards
1908 tma.apply(row)
1909 tmaStates[rowNum] = tma.state
1910 self.printTmaDetailedState(tma)
1911 print()
1913 else:
1914 # if it's not in the range of interest then just apply it
1915 # silently as usual
1916 tma.apply(row)
1917 tmaStates[rowNum] = tma.state
1919 def findEvent(self, time: astropy.time.Time) -> TMAEvent | None:
1920 """Find the event which contains the specified time.
1922 If the specified time lies within an event, that event is returned. If
1923 it is at the exact start, that is logged, and if that start point is
1924 shared by the end of the previous event, that is logged too. If the
1925 event lies between events, the events either side are logged, but
1926 ``None`` is returned. If the time lies before the first event of the
1927 day a warning is logged, as for times after the last event of the day.
1929 Parameters
1930 ----------
1931 time : `astropy.time.Time`
1932 The time.
1934 Returns
1935 -------
1936 event : `lsst.summit.utils.tmaUtils.TMAEvent` or `None`
1937 The event which contains the specified time, or ``None`` if the
1938 time doesn't fall during an event.
1939 """
1940 # there are five possible cases:
1941 # 1) the time lies before the first event of the day
1942 # 2) the time lies after the last event of the day
1943 # 3) the time lies within an event
1944 # 3a) the time is exactly at the start of an event
1945 # 3b) if so, time can be shared by the end of the previous event if
1946 # they are contiguous
1947 # 4) the time lies between two events
1948 # 5) the time is exactly at end of the last event of the day. This is
1949 # an issue because event end times are exclusive, so this time is
1950 # not technically in that event, it's the moment it closes (and if
1951 # there *was* an event which followed contiguously, it would be in
1952 # that event instead, which is what motivates this definition of
1953 # lies within what event)
1955 dayObs = getDayObsForTime(time)
1956 # we know this is on the right day, and definitely before the specified
1957 # time, but sanity check this before continuing as this needs to be
1958 # true for this to give the correct answer
1959 assert getDayObsStartTime(dayObs) <= time
1960 assert getDayObsEndTime(dayObs) > time
1962 # command start to many log messages so define once here
1963 logStart = f"Specified time {time.isot} falls on {dayObs=}"
1965 events = self.getEvents(dayObs)
1966 if len(events) == 0:
1967 self.log.warning(f"There are no events found for {dayObs}")
1968 return None
1970 # check case 1)
1971 if time < events[0].begin:
1972 self.log.warning(f"{logStart} and is before the first event of the day")
1973 return None
1975 # check case 2)
1976 if time > events[-1].end:
1977 self.log.warning(f"{logStart} and is after the last event of the day")
1978 return None
1980 # check case 5)
1981 if time == events[-1].end:
1982 self.log.warning(
1983 f"{logStart} and is exactly at the end of the last event of the day"
1984 f" (seqnum={events[-1].seqNum}). Because event intervals are half-open, this"
1985 " time does not technically lie in any event"
1986 )
1987 return None
1989 # we are now either in an event, or between events. Walk through the
1990 # events, and if the end of the event is after the specified time, then
1991 # we're either in it or past it, so check if we're in.
1992 for eventNum, event in enumerate(events):
1993 if event.end > time: # case 3) we are now into or past the right event
1994 # the event end encloses the time, so note the > and not >=,
1995 # this must be strictly greater, we check the overlap case
1996 # later
1997 if time >= event.begin: # we're fully inside the event, so return it.
1998 # 3a) before returning, check if we're exactly at the start
1999 # of the event, and if so, log it. Then 3b) also check if
2000 # we're at the exact end of the previous event, and if so,
2001 # log that too.
2002 if time == event.begin:
2003 self.log.info(f"{logStart} and is exactly at the start of event" f" {eventNum}")
2004 if eventNum == 0: # I think this is actually impossible, but check anyway
2005 return event # can't check the previous event so return here
2006 previousEvent = events[eventNum - 1]
2007 if previousEvent.end == time:
2008 self.log.info(
2009 "Previous event is contiguous, so this time is also at the exact"
2010 f" end of {eventNum - 1}"
2011 )
2012 return event
2013 else: # case 4)
2014 # the event end is past the time, but it's not inside the
2015 # event, so we're between events. Log which we're between
2016 # and return None
2017 previousEvent = events[eventNum - 1]
2018 timeAfterPrev = (time - previousEvent.end).to_datetime()
2019 naturalTimeAfterPrev = humanize.naturaldelta(timeAfterPrev, minimum_unit="MICROSECONDS")
2020 timeBeforeCurrent = (event.begin - time).to_datetime()
2021 naturalTimeBeforeCurrent = humanize.naturaldelta(
2022 timeBeforeCurrent, minimum_unit="MICROSECONDS"
2023 )
2024 self.log.info(
2025 f"{logStart} and lies"
2026 f" {naturalTimeAfterPrev} after the end of event {previousEvent.seqNum}"
2027 f" and {naturalTimeBeforeCurrent} before the start of event {event.seqNum}."
2028 )
2029 return None
2031 raise RuntimeError(
2032 "Event finding logic fundamentally failed, which should never happen - the code" " needs fixing"
2033 )