Coverage for python/lsst/summit/utils/m1m3/inertia_compensation_system.py: 17%
174 statements
« prev ^ index » next coverage.py v7.4.2, created at 2024-02-23 15:45 +0000
« prev ^ index » next coverage.py v7.4.2, created at 2024-02-23 15:45 +0000
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import logging
23from datetime import timedelta
25import numpy as np
26import pandas as pd
27from astropy import units as u
28from astropy.time import Time
30from lsst.summit.utils.efdUtils import getEfdData
31from lsst.summit.utils.tmaUtils import TMAEvent, TMAEventMaker
32from lsst.ts.xml.tables.m1m3 import FATABLE_XFA, FATABLE_YFA, FATABLE_ZFA, HP_COUNT
34HAS_EFD_CLIENT = True
35try:
36 from lsst_efd_client import EfdClient
37except ImportError:
38 EfdClient = None # this is currently just for mypy
39 HAS_EFD_CLIENT = False
41__all__ = [
42 "M1M3ICSAnalysis",
43 "find_adjacent_true_regions",
44 "evaluate_m1m3_ics_single_slew",
45 "evaluate_m1m3_ics_day_obs",
46]
49class M1M3ICSAnalysis:
50 """
51 Evaluate the M1M3 Inertia Compensation System's performance by calculating
52 the minima, maxima and peak-to-peak values during a slew. In addition,
53 calculates the mean, median and standard deviation when the slew has
54 contant velocity or zero acceleration.
56 Parameters
57 ----------
58 event : `lsst.summit.utils.tmaUtils.TMAEvent`
59 Abtract representation of a slew event.
60 efd_client : `EfdClient`
61 Client to access the EFD.
62 inner_pad : `float`, optional
63 Time padding inside the stable time window of the slew.
64 outer_pad : `float`, optional
65 Time padding outside the slew time window.
66 n_sigma : `float`, optional
67 Number of standard deviations to use for the stable region.
68 log : `logging.Logger`, optional
69 Logger object to use for logging messages.
70 """
72 def __init__(
73 self,
74 event: TMAEvent,
75 efd_client: EfdClient,
76 inner_pad: float = 1.0,
77 outer_pad: float = 1.0,
78 n_sigma: float = 1.0,
79 log: logging.Logger | None = None,
80 ):
81 self.log = (
82 log.getChild(type(self).__name__)
83 if log is not None
84 else logging.getLogger(type(self).__name__)
85 )
87 self.event = event
88 self.inner_pad = inner_pad * u.second
89 self.outer_pad = outer_pad * u.second
90 self.n_sigma = n_sigma
91 self.client = efd_client
93 self.number_of_hardpoints = HP_COUNT
94 self.measured_forces_topics = [
95 f"measuredForce{i}" for i in range(self.number_of_hardpoints)
96 ]
98 self.applied_forces_topics = (
99 [f"xForces{actuator}" for actuator in range(FATABLE_XFA)] +
100 [f"yForces{actuator}" for actuator in range(FATABLE_YFA)] +
101 [f"zForces{actuator}" for actuator in range(FATABLE_ZFA)]
102 )
104 self.log.info(f"Querying datasets for {event.dayObs=} {event.seqNum=}")
105 self.df = self.query_dataset()
107 self.log.info("Calculating statistics")
108 self.stats = self.get_stats()
110 self.log.info("Packing results into a Series")
111 self.stats = self.pack_stats_series()
113 def find_stable_region(self) -> tuple[Time, Time]:
114 """
115 Find the stable region of the dataset. By stable, we mean the region
116 where the torque is within n_sigma of the mean.
118 Returns
119 -------
120 stable_region : `tuple[Time, Time]`
121 The begin and end times of the stable region.
122 """
123 az_torque = self.df["az_actual_torque"]
124 az_torque_regions = find_adjacent_true_regions(
125 np.abs(az_torque - az_torque.mean()) < self.n_sigma * az_torque.std()
126 )
128 el_torque = self.df["el_actual_torque"]
129 el_torque_regions = find_adjacent_true_regions(
130 np.abs(el_torque - el_torque.mean()) < self.n_sigma * el_torque.std()
131 )
133 if az_torque_regions and el_torque_regions:
134 stable_begin = max([reg[0] for reg in az_torque_regions + el_torque_regions])
135 stable_begin = Time(stable_begin, scale="utc")
137 stable_end = min([reg[-1] for reg in az_torque_regions + el_torque_regions])
138 stable_end = Time(stable_end, scale="utc")
139 else:
140 self.log.warning("No stable region found. Using full slew.")
141 stable_begin = self.event.begin
142 stable_end = self.event.end
144 return stable_begin, stable_end
146 def query_dataset(self) -> pd.DataFrame:
147 """
148 Queries all the relevant data, resampling them to have the same
149 frequency, and merges them into a single dataframe.
151 Returns
152 -------
153 data : `pd.DataFrame`
154 The data.
155 """
156 evt = self.event
157 query_config = {
158 "hp_measured_forces": {
159 "topic": "lsst.sal.MTM1M3.hardpointActuatorData",
160 "columns": self.measured_forces_topics,
161 "err_msg": f"No hard-point data found for event {evt.seqNum} on {evt.dayObs}",
162 },
163 "m1m3_applied_velocity_forces": {
164 "topic": "lsst.sal.MTM1M3.appliedVelocityForces",
165 "columns": self.applied_forces_topics,
166 "err_msg": None,
167 "rename_columns": {col: f"avf_{col}" for col in self.applied_forces_topics},
168 },
169 "m1m3_applied_acceleration_forces": {
170 "topic": "lsst.sal.MTM1M3.appliedAccelerationForces",
171 "columns": self.applied_forces_topics,
172 "err_msg": None,
173 "rename_columns": {col: f"aaf_{col}" for col in self.applied_forces_topics},
174 },
175 "tma_az": {
176 "topic": "lsst.sal.MTMount.azimuth",
177 "columns": ["timestamp", "actualPosition", "actualVelocity", "actualTorque"],
178 "err_msg": f"No TMA azimuth data found for event {evt.seqNum} on {evt.dayObs}",
179 "reset_index": True,
180 "rename_columns": {
181 "actualTorque": "az_actual_torque",
182 "actualVelocity": "az_actual_velocity",
183 "actualPosition": "az_actual_position",
184 },
185 },
186 "tma_el": {
187 "topic": "lsst.sal.MTMount.elevation",
188 "columns": ["timestamp", "actualPosition", "actualVelocity", "actualTorque"],
189 "err_msg": f"No TMA elevation data found for event {evt.seqNum} on {evt.dayObs}",
190 "reset_index": True,
191 "rename_columns": {
192 "actualPosition": "el_actual_position",
193 "actualTorque": "el_actual_torque",
194 "actualVelocity": "el_actual_velocity",
195 },
196 },
197 }
199 # Query datasets
200 queries = {key: self.query_efd_data(**cfg) for key, cfg in query_config.items()} # type: ignore
202 # Merge datasets
203 df = self.merge_datasets(queries)
205 # Convert torque from Nm to kNm
206 cols = ["az_actual_torque", "el_actual_torque"]
207 df.loc[:, cols] *= 1e-3
209 return df
211 def merge_datasets(self, queries: dict[str, pd.DataFrame]) -> pd.DataFrame:
212 """
213 Merge multiple datasets based on their timestamps.
215 Parameters
216 ----------
217 queries (dict[str, pd.DataFrame]):
218 A dictionary of dataframes to be merged.
220 Returns
221 -------
222 df : `pd.DataFrame`
223 A merged dataframe.
224 """
225 merge_cfg = {
226 "left_index": True,
227 "right_index": True,
228 "tolerance": timedelta(seconds=1),
229 "direction": "nearest",
230 }
232 self.log.info("Merging datasets")
233 df_list = [df for _, df in queries.items()]
234 merged_df = df_list[0]
236 for df in df_list[1:]:
237 merged_df = pd.merge_asof(merged_df, df, **merge_cfg)
239 return merged_df
241 def query_efd_data(
242 self,
243 topic: str,
244 columns: list[str],
245 err_msg: str | None = None,
246 reset_index: bool = False,
247 rename_columns: dict | None = None,
248 resample: float | None = None,
249 ) -> pd.DataFrame:
250 """
251 Query the EFD data for a given topic and return a dataframe.
253 Parameters
254 ----------
255 topic : `str`
256 The topic to query.
257 columns : `List[str]`
258 The columns to query.
259 err_msg : `str`, optional
260 The error message to raise if no data is found. If None, it creates
261 a dataframe padded with zeros.
262 reset_index : `bool`, optional
263 Whether to reset the index of the dataframe.
264 rename_columns : `dict`, optional
265 A dictionary of column names to rename.
266 resample : `float`, optional
267 The resampling frequency in seconds.
269 Returns
270 -------
271 df : `pd.DataFrame`
272 A dataframe containing the queried data. If no data is found and
273 `err_msg` is None, returns a dataframe padded with zeros.
274 """
275 self.log.info(f"Querying dataset: {topic}")
276 df = getEfdData(
277 self.client,
278 topic,
279 columns=columns,
280 event=self.event,
281 prePadding=self.outer_pad,
282 postPadding=self.outer_pad,
283 warn=False,
284 )
286 self.log.debug(f"Queried {df.index.size} rows from {topic}")
287 if df.index.size == 0:
288 if err_msg is not None:
289 self.log.error(err_msg)
290 raise ValueError(err_msg)
291 else:
292 self.log.warning(
293 f"Empty dataset for {topic}. Returning a zero-padded dataframe."
294 )
295 begin_timestamp = pd.Timestamp(self.event.begin.unix, unit='s')
296 end_timestamp = pd.Timestamp(self.event.end.unix, unit='s')
297 index = pd.DatetimeIndex(
298 pd.date_range(begin_timestamp, end_timestamp, freq="1S")
299 )
300 df = pd.DataFrame(
301 columns=columns,
302 index=index,
303 data=np.zeros((index.size, len(columns))),
304 )
306 if rename_columns is not None:
307 df = df.rename(columns=rename_columns)
309 if reset_index:
310 df["timestamp"] = Time(
311 df["timestamp"], format="unix_tai", scale="utc"
312 ).datetime
313 df.set_index("timestamp", inplace=True)
314 df.index = df.index.tz_localize("UTC")
316 return df
318 def get_midppoint(self) -> Time:
319 """Return the halfway point between begin and end."""
320 return self.df.index[len(self.df.index) // 2]
322 def get_stats(self) -> pd.DataFrame:
323 """
324 Calculate the statistics for each column in the retrieved dataset.
326 Returns
327 -------
328 data : `pd.DataFrame`
329 A DataFrame containing calculated statistics for each column in the
330 dataset. For each column, the statistics include minimum, maximum,
331 and peak-to-peak values.
333 Notes
334 -----
335 This function computes statistics for each column in the provided
336 dataset. It utilizes the `get_minmax` function to calculate minimum,
337 maximum, and peak-to-peak values for each column's data.
338 """
339 cols = self.measured_forces_topics
340 full_slew_stats = pd.DataFrame(
341 data=[self.get_slew_minmax(self.df[col]) for col in cols], index=cols
342 )
343 self.log.info("Finding stable time window")
344 begin, end = self.find_stable_region()
346 self.log.debug("Updating begin and end times")
347 begin = begin + self.inner_pad
348 end = end - self.inner_pad
350 self.log.debug("Calculating statistics in stable time window from M1M3")
351 stable_slew_stats = pd.DataFrame(
352 data=[
353 self.get_stats_in_torqueless_interval(
354 self.df[col].loc[begin.isot: end.isot]
355 )
356 for col in cols
357 ],
358 index=cols,
359 )
361 self.log.debug("Concatenating statistics")
362 stats = pd.concat((full_slew_stats, stable_slew_stats), axis=1)
364 return stats
366 @staticmethod
367 def get_stats_in_torqueless_interval(s: pd.Series) -> pd.Series:
368 """
369 Calculates the statistical measures within a torqueless interval.
371 This static method computes descriptive statistics for a given pandas
372 Series within a torqueless interval. The torqueless interval represents
373 a period of the data analysis when no external torque is applied.
375 Parameters
376 ----------
377 s : `pd.Series`
378 A pandas Series containing data values for analysis.
380 Returns
381 -------
382 stats : `pd.Series`
383 A pandas Series containing the following statistical measures:
384 - Mean: The arithmetic mean of the data.
385 - Median: The median value of the data.
386 - Standard Deviation (Std): The standard deviation of the data.
387 """
388 result = pd.Series(
389 data=[s.mean(), s.median(), s.std()],
390 index=["mean", "median", "std"],
391 name=s.name,
392 )
393 return result
395 @staticmethod
396 def get_slew_minmax(s: pd.Series) -> pd.Series:
397 """
398 Calculates the min, max, and peak-to-peak values for a data series.
400 Parameters
401 ----------
402 s : `pd.Series`
403 The input pandas Series containing data.
405 Returns
406 -------
407 stats : `pd.Series`
408 A Series containing the following calculated values for the two
409 halves of the input Series:
410 - min: Minimum value of the Series.
411 - max: Maximum value of the Series.
412 - ptp: Peak-to-peak (ptp) value of the Series (abs(max - min)).
413 """
414 result = pd.Series(
415 data=[s.min(), s.max(), np.ptp(s)],
416 index=["min", "max", "ptp"],
417 name=s.name,
418 )
419 return result
421 def pack_stats_series(self) -> pd.Series:
422 """
423 Packs the stats DataFrame into a Series with custom index labels.
425 This method takes the DataFrame of statistics stored in the 'stats'
426 attribute of the current object and reshapes it into a Series where the
427 indexes are generated using custom labels based on the column names and
428 index positions. The resulting Series combines values from all columns
429 of the DataFrame.
431 Returns
432 -------
433 stats : `pd.Series`
434 A Series with custom index labels based on the column names and
435 index positions. The Series contains values from all columns of the
436 DataFrame.
437 """
438 if isinstance(self.stats, pd.Series):
439 self.log.info("Stats are already packed into a Series.")
440 return self.stats
442 self.log.info("Packing stats into a Series.")
443 df = self.stats.transpose()
445 # Define the prefix patterns
446 column_prefixes = df.columns
447 index_positions = df.index
449 # Generate all combinations of prefixes and positions
450 index_prefixes = [
451 f"measuredForce{stat.capitalize()}{position}"
452 for stat in index_positions
453 for position, _ in enumerate(column_prefixes)
454 ]
456 # Flatten the DataFrame and set the new index
457 result_series = df.stack().reset_index(drop=True)
458 result_series.index = index_prefixes
460 # Append the event information to the Series
461 event_keys = [
462 "dayObs",
463 "seqNum",
464 "version",
465 "begin",
466 "end",
467 "duration",
468 "type",
469 "endReason",
470 ]
471 event_dict = vars(self.event)
472 event_dict = {key: val for key, val in event_dict.items() if key in event_keys}
474 # Create a pandas Series from the dictionary
475 event_series = pd.Series(event_dict)
477 # Create a new Pandas Series correlating event and system information
478 system_series = pd.Series(
479 {
480 "az_start": self.get_nearest_value("az_actual_torque", self.event.begin),
481 "az_end": self.get_nearest_value("az_actual_torque", self.event.end),
482 "az_extreme_vel": self.get_extreme_value("az_actual_velocity"),
483 "az_extreme_torque": self.get_extreme_value("az_actual_torque"),
484 "el_start": self.get_nearest_value("el_actual_torque", self.event.begin),
485 "el_end": self.get_nearest_value("el_actual_torque", self.event.end),
486 "el_extreme_vel": self.get_extreme_value("el_actual_velocity"),
487 "el_extreme_torque": self.get_extreme_value("el_actual_torque"),
488 "ics_enabled": self.get_ics_status()
489 }
490 )
492 system_series["az_diff"] = system_series["az_end"] - system_series["az_start"]
493 system_series["el_diff"] = system_series["el_end"] - system_series["el_start"]
495 # Concatenate the two Series
496 result_series = pd.concat([event_series, system_series, result_series])
498 # Rename the series columns
499 result_series = result_series.rename(
500 {
501 "dayObs": "day_obs",
502 "seqNum": "seq_num",
503 "version": "version",
504 "begin": "time_begin",
505 "end": "time_end",
506 "duration": "time_duration",
507 "type": "slew_type",
508 "endReason": "end_reason",
509 }
510 )
512 # Display the resulting Series
513 return result_series
515 def get_extreme_value(self, column):
516 """
517 Returns the most extreme (either max or min) value from a given column.
519 Parameters
520 ----------
521 column : `str`
522 The column to query.
524 Returns
525 -------
526 extreme_val : `float`
527 The most extreme value from the given column.
528 """
529 index_of_extreme = self.df[column].abs().idxmax()
530 extreme_value = self.df.loc[index_of_extreme, column]
531 return extreme_value
533 def get_nearest_value(self, column, timestamp):
534 """
535 Returns the nearest value to a given timestamp from a given column.
537 Parameters
538 ----------
539 column : `str`
540 The column to query.
541 timestamp : `astropy.time.Time`
542 The timestamp to query.
544 Returns
545 -------
546 nearest_val : float
547 The nearest value to the given timestamp from the given column.
548 """
549 timestamp = pd.Timestamp(timestamp.iso, tz="UTC")
550 time_diff = abs(self.df.index - timestamp)
551 idx = time_diff.argmin()
552 return self.df[column].iloc[idx]
554 def get_ics_status(self, threshold: float = 1e-6) -> bool:
555 """Get the status of the ICS for the given event.
557 Evaluates the values of the applied velocity and acceleration forces
558 inside the padded stable time window. If the values are all zero, then
559 this function will return False as the ICS was not enabled. Otherwise,
560 it will return True.
562 Parameters
563 ----------
564 threshold : `float`, optional
565 Threshold value used to determine if the ICS is enabled or not. If
566 all the values of the applied velocity and acceleration forces are
567 below this threshold, then the ICS is considered to be disabled.
569 Returns
570 -------
571 status : `bool`
572 True if the ICS is enabled, False otherwise.
573 """
574 avf0 = (self.df[[c for c in self.df.columns if "avf" in c]].abs() < threshold).all().eq(True).all()
575 aaf0 = (self.df[[c for c in self.df.columns if "aaf" in c]].abs() < threshold).all().eq(True).all()
576 return not (avf0 and aaf0)
579def find_adjacent_true_regions(
580 series: pd.Series, min_adjacent: None | int = None
581) -> list[tuple[pd.DatetimeIndex, pd.DatetimeIndex]]:
582 """Find regions in a boolean Series containing adjacent True values.
584 Parameters
585 ----------
586 series : `pd.Series`
587 The boolean Series to search for regions.
588 min_adjacent : `int`, optional
589 Minimum number of adjacent True values in a region. Defaults to half
590 size of the series.
592 Returns
593 -------
594 true_regions : list[tuple[pd.DatetimeIndex, pd.DatetimeIndex]]
595 A list of tuples representing the start and end indices of regions
596 containing more than or equal to min_adjacent adjacent True values.
597 """
598 min_adjacent = min_adjacent if min_adjacent else 0.5 * series.size
599 regions = []
600 for key, group in series.groupby((series != series.shift()).cumsum()):
601 if key and len(group) >= min_adjacent:
602 region_indices = group.index
603 regions.append((region_indices.min(), region_indices.max()))
604 return regions
607def evaluate_m1m3_ics_single_slew(
608 event: TMAEvent,
609 efd_client: EfdClient,
610 inner_pad: float = 1.0,
611 outer_pad: float = 1.0,
612 n_sigma: float = 1.0,
613 log: logging.Logger | None = None,
614) -> M1M3ICSAnalysis:
615 """
616 Evaluate the M1M3 Inertia Compensation System for a single TMAEvent.
618 Parameters
619 ----------
620 event : `TMAEvent`
621 The TMA event to analyze.
622 efd_client : `EfdClient`
623 The EFD client to use to retrieve data.
624 inner_pad : `float`, optional
625 Time padding inside the stable time window of the slew.
626 outer_pad : `float`, optional
627 Time padding outside the slew time window.
628 n_sigma : `float`, optional
629 Number of standard deviations to use for the stable region.
630 log : `logging.Logger`, optional
631 Logger object to use for logging messages.
633 Returns
634 -------
635 result : `M1M3ICSAnalysis`
636 The results of the analysis.
638 Raises
639 ------
640 ValueError
641 Raised if there is no hardpoint data for the specified event.
642 """
643 log = log.getChild(__name__) if log is not None else logging.getLogger(__name__)
645 log.info("Starting inertia compensation system analysis.")
646 performance_analysis = M1M3ICSAnalysis(
647 event,
648 efd_client,
649 inner_pad=inner_pad,
650 outer_pad=outer_pad,
651 n_sigma=n_sigma,
652 log=log,
653 )
655 return performance_analysis
658def evaluate_m1m3_ics_day_obs(
659 day_obs: int,
660 event_maker: TMAEventMaker,
661 inner_pad: float = 1.0,
662 outer_pad: float = 1.0,
663 n_sigma: float = 1.0,
664 log: logging.Logger | None = None,
665) -> pd.DataFrame:
666 """
667 Evaluate the M1M3 Inertia Compensation System in every slew event during a
668 `dayObs`.
670 Parameters
671 ----------
672 day_obs : `int`
673 Observation day in the YYYYMMDD format.
674 event_maker : `TMAEventMaker`
675 Object to retrieve TMA events.
676 inner_pad : `float`, optional
677 Time padding inside the stable time window of the slew.
678 outer_pad : `float`, optional
679 Time padding outside the slew time window.
680 n_sigma : `float`, optional
681 Number of standard deviations to use for the stable region.
682 log : `logging.Logger`, optional
683 Logger object to use for logging messages.
685 Returns
686 -------
687 results : `pd.DataFrame`
688 A data-frame containing the statistical summary of the analysis.
689 """
690 log = log.getChild(__name__) if log is not None else logging.getLogger(__name__)
691 log.setLevel(logging.DEBUG)
693 log.info("Retrieving TMA slew events.")
694 events = event_maker.getEvents(day_obs)
695 log.info(f"Found {len(events)} events for day {day_obs}")
697 stats = None
698 for event in events:
699 log.info(f"Start inertia compensation system analysis on {event.seqNum}.")
701 try:
702 performance_analysis = M1M3ICSAnalysis(
703 event,
704 event_maker.client,
705 inner_pad=inner_pad,
706 outer_pad=outer_pad,
707 n_sigma=n_sigma,
708 log=log,
709 )
710 log.info(
711 f"Complete inertia compensation system analysis on {event.seqNum}."
712 )
713 except ValueError:
714 log.warning(f"Missing data for {event.seqNum} on {event.dayObs}")
715 continue
717 if stats is None:
718 stats = performance_analysis.stats
719 else:
720 stats = pd.concat((stats.T, performance_analysis.stats), axis=1).T
722 assert isinstance(stats, pd.DataFrame)
723 stats = stats.set_index("seq_num", drop=False)
724 return stats