Coverage for python/lsst/summit/utils/m1m3/inertia_compensation_system.py: 17%
174 statements
« prev ^ index » next coverage.py v7.4.3, created at 2024-03-09 12:59 +0000
« prev ^ index » next coverage.py v7.4.3, created at 2024-03-09 12:59 +0000
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import logging
23from datetime import timedelta
25import numpy as np
26import pandas as pd
27from astropy import units as u
28from astropy.time import Time
30from lsst.summit.utils.efdUtils import getEfdData
31from lsst.summit.utils.tmaUtils import TMAEvent, TMAEventMaker
32from lsst.ts.xml.tables.m1m3 import FATABLE_XFA, FATABLE_YFA, FATABLE_ZFA, HP_COUNT
34HAS_EFD_CLIENT = True
35try:
36 from lsst_efd_client import EfdClient
37except ImportError:
38 EfdClient = None # this is currently just for mypy
39 HAS_EFD_CLIENT = False
41__all__ = [
42 "M1M3ICSAnalysis",
43 "find_adjacent_true_regions",
44 "evaluate_m1m3_ics_single_slew",
45 "evaluate_m1m3_ics_day_obs",
46]
49class M1M3ICSAnalysis:
50 """
51 Evaluate the M1M3 Inertia Compensation System's performance by calculating
52 the minima, maxima and peak-to-peak values during a slew. In addition,
53 calculates the mean, median and standard deviation when the slew has
54 contant velocity or zero acceleration.
56 Parameters
57 ----------
58 event : `lsst.summit.utils.tmaUtils.TMAEvent`
59 Abtract representation of a slew event.
60 efd_client : `EfdClient`
61 Client to access the EFD.
62 inner_pad : `float`, optional
63 Time padding inside the stable time window of the slew.
64 outer_pad : `float`, optional
65 Time padding outside the slew time window.
66 n_sigma : `float`, optional
67 Number of standard deviations to use for the stable region.
68 log : `logging.Logger`, optional
69 Logger object to use for logging messages.
70 """
72 def __init__(
73 self,
74 event: TMAEvent,
75 efd_client: EfdClient,
76 inner_pad: float = 1.0,
77 outer_pad: float = 1.0,
78 n_sigma: float = 1.0,
79 log: logging.Logger | None = None,
80 ):
81 self.log = (
82 log.getChild(type(self).__name__) if log is not None else logging.getLogger(type(self).__name__)
83 )
85 self.event = event
86 self.inner_pad = inner_pad * u.second
87 self.outer_pad = outer_pad * u.second
88 self.n_sigma = n_sigma
89 self.client = efd_client
91 self.number_of_hardpoints = HP_COUNT
92 self.measured_forces_topics = [f"measuredForce{i}" for i in range(self.number_of_hardpoints)]
94 self.applied_forces_topics = (
95 [f"xForces{actuator}" for actuator in range(FATABLE_XFA)]
96 + [f"yForces{actuator}" for actuator in range(FATABLE_YFA)]
97 + [f"zForces{actuator}" for actuator in range(FATABLE_ZFA)]
98 )
100 self.log.info(f"Querying datasets for {event.dayObs=} {event.seqNum=}")
101 self.df = self.query_dataset()
103 self.log.info("Calculating statistics")
104 self.stats = self.get_stats()
106 self.log.info("Packing results into a Series")
107 self.stats = self.pack_stats_series()
109 def find_stable_region(self) -> tuple[Time, Time]:
110 """
111 Find the stable region of the dataset. By stable, we mean the region
112 where the torque is within n_sigma of the mean.
114 Returns
115 -------
116 stable_region : `tuple[Time, Time]`
117 The begin and end times of the stable region.
118 """
119 az_torque = self.df["az_actual_torque"]
120 az_torque_regions = find_adjacent_true_regions(
121 np.abs(az_torque - az_torque.mean()) < self.n_sigma * az_torque.std()
122 )
124 el_torque = self.df["el_actual_torque"]
125 el_torque_regions = find_adjacent_true_regions(
126 np.abs(el_torque - el_torque.mean()) < self.n_sigma * el_torque.std()
127 )
129 if az_torque_regions and el_torque_regions:
130 stable_begin = max([reg[0] for reg in az_torque_regions + el_torque_regions])
131 stable_begin = Time(stable_begin, scale="utc")
133 stable_end = min([reg[-1] for reg in az_torque_regions + el_torque_regions])
134 stable_end = Time(stable_end, scale="utc")
135 else:
136 self.log.warning("No stable region found. Using full slew.")
137 stable_begin = self.event.begin
138 stable_end = self.event.end
140 return stable_begin, stable_end
142 def query_dataset(self) -> pd.DataFrame:
143 """
144 Queries all the relevant data, resampling them to have the same
145 frequency, and merges them into a single dataframe.
147 Returns
148 -------
149 data : `pd.DataFrame`
150 The data.
151 """
152 evt = self.event
153 query_config = {
154 "hp_measured_forces": {
155 "topic": "lsst.sal.MTM1M3.hardpointActuatorData",
156 "columns": self.measured_forces_topics,
157 "err_msg": f"No hard-point data found for event {evt.seqNum} on {evt.dayObs}",
158 },
159 "m1m3_applied_velocity_forces": {
160 "topic": "lsst.sal.MTM1M3.appliedVelocityForces",
161 "columns": self.applied_forces_topics,
162 "err_msg": None,
163 "rename_columns": {col: f"avf_{col}" for col in self.applied_forces_topics},
164 },
165 "m1m3_applied_acceleration_forces": {
166 "topic": "lsst.sal.MTM1M3.appliedAccelerationForces",
167 "columns": self.applied_forces_topics,
168 "err_msg": None,
169 "rename_columns": {col: f"aaf_{col}" for col in self.applied_forces_topics},
170 },
171 "tma_az": {
172 "topic": "lsst.sal.MTMount.azimuth",
173 "columns": ["timestamp", "actualPosition", "actualVelocity", "actualTorque"],
174 "err_msg": f"No TMA azimuth data found for event {evt.seqNum} on {evt.dayObs}",
175 "reset_index": True,
176 "rename_columns": {
177 "actualTorque": "az_actual_torque",
178 "actualVelocity": "az_actual_velocity",
179 "actualPosition": "az_actual_position",
180 },
181 },
182 "tma_el": {
183 "topic": "lsst.sal.MTMount.elevation",
184 "columns": ["timestamp", "actualPosition", "actualVelocity", "actualTorque"],
185 "err_msg": f"No TMA elevation data found for event {evt.seqNum} on {evt.dayObs}",
186 "reset_index": True,
187 "rename_columns": {
188 "actualPosition": "el_actual_position",
189 "actualTorque": "el_actual_torque",
190 "actualVelocity": "el_actual_velocity",
191 },
192 },
193 }
195 # Query datasets
196 queries = {key: self.query_efd_data(**cfg) for key, cfg in query_config.items()} # type: ignore
198 # Merge datasets
199 df = self.merge_datasets(queries)
201 # Convert torque from Nm to kNm
202 cols = ["az_actual_torque", "el_actual_torque"]
203 df.loc[:, cols] *= 1e-3
205 return df
207 def merge_datasets(self, queries: dict[str, pd.DataFrame]) -> pd.DataFrame:
208 """
209 Merge multiple datasets based on their timestamps.
211 Parameters
212 ----------
213 queries (dict[str, pd.DataFrame]):
214 A dictionary of dataframes to be merged.
216 Returns
217 -------
218 df : `pd.DataFrame`
219 A merged dataframe.
220 """
221 merge_cfg = {
222 "left_index": True,
223 "right_index": True,
224 "tolerance": timedelta(seconds=1),
225 "direction": "nearest",
226 }
228 self.log.info("Merging datasets")
229 df_list = [df for _, df in queries.items()]
230 merged_df = df_list[0]
232 for df in df_list[1:]:
233 merged_df = pd.merge_asof(merged_df, df, **merge_cfg)
235 return merged_df
237 def query_efd_data(
238 self,
239 topic: str,
240 columns: list[str],
241 err_msg: str | None = None,
242 reset_index: bool = False,
243 rename_columns: dict | None = None,
244 resample: float | None = None,
245 ) -> pd.DataFrame:
246 """
247 Query the EFD data for a given topic and return a dataframe.
249 Parameters
250 ----------
251 topic : `str`
252 The topic to query.
253 columns : `List[str]`
254 The columns to query.
255 err_msg : `str`, optional
256 The error message to raise if no data is found. If None, it creates
257 a dataframe padded with zeros.
258 reset_index : `bool`, optional
259 Whether to reset the index of the dataframe.
260 rename_columns : `dict`, optional
261 A dictionary of column names to rename.
262 resample : `float`, optional
263 The resampling frequency in seconds.
265 Returns
266 -------
267 df : `pd.DataFrame`
268 A dataframe containing the queried data. If no data is found and
269 `err_msg` is None, returns a dataframe padded with zeros.
270 """
271 self.log.info(f"Querying dataset: {topic}")
272 df = getEfdData(
273 self.client,
274 topic,
275 columns=columns,
276 event=self.event,
277 prePadding=self.outer_pad,
278 postPadding=self.outer_pad,
279 warn=False,
280 )
282 self.log.debug(f"Queried {df.index.size} rows from {topic}")
283 if df.index.size == 0:
284 if err_msg is not None:
285 self.log.error(err_msg)
286 raise ValueError(err_msg)
287 else:
288 self.log.warning(f"Empty dataset for {topic}. Returning a zero-padded dataframe.")
289 begin_timestamp = pd.Timestamp(self.event.begin.unix, unit="s")
290 end_timestamp = pd.Timestamp(self.event.end.unix, unit="s")
291 index = pd.DatetimeIndex(pd.date_range(begin_timestamp, end_timestamp, freq="1S"))
292 df = pd.DataFrame(
293 columns=columns,
294 index=index,
295 data=np.zeros((index.size, len(columns))),
296 )
298 if rename_columns is not None:
299 df = df.rename(columns=rename_columns)
301 if reset_index:
302 df["timestamp"] = Time(df["timestamp"], format="unix_tai", scale="utc").datetime
303 df.set_index("timestamp", inplace=True)
304 df.index = df.index.tz_localize("UTC")
306 return df
308 def get_midppoint(self) -> Time:
309 """Return the halfway point between begin and end."""
310 return self.df.index[len(self.df.index) // 2]
312 def get_stats(self) -> pd.DataFrame:
313 """
314 Calculate the statistics for each column in the retrieved dataset.
316 Returns
317 -------
318 data : `pd.DataFrame`
319 A DataFrame containing calculated statistics for each column in the
320 dataset. For each column, the statistics include minimum, maximum,
321 and peak-to-peak values.
323 Notes
324 -----
325 This function computes statistics for each column in the provided
326 dataset. It utilizes the `get_minmax` function to calculate minimum,
327 maximum, and peak-to-peak values for each column's data.
328 """
329 cols = self.measured_forces_topics
330 full_slew_stats = pd.DataFrame(data=[self.get_slew_minmax(self.df[col]) for col in cols], index=cols)
331 self.log.info("Finding stable time window")
332 begin, end = self.find_stable_region()
334 self.log.debug("Updating begin and end times")
335 begin = begin + self.inner_pad
336 end = end - self.inner_pad
338 self.log.debug("Calculating statistics in stable time window from M1M3")
339 stable_slew_stats = pd.DataFrame(
340 data=[
341 self.get_stats_in_torqueless_interval(self.df[col].loc[begin.isot : end.isot]) for col in cols
342 ],
343 index=cols,
344 )
346 self.log.debug("Concatenating statistics")
347 stats = pd.concat((full_slew_stats, stable_slew_stats), axis=1)
349 return stats
351 @staticmethod
352 def get_stats_in_torqueless_interval(s: pd.Series) -> pd.Series:
353 """
354 Calculates the statistical measures within a torqueless interval.
356 This static method computes descriptive statistics for a given pandas
357 Series within a torqueless interval. The torqueless interval represents
358 a period of the data analysis when no external torque is applied.
360 Parameters
361 ----------
362 s : `pd.Series`
363 A pandas Series containing data values for analysis.
365 Returns
366 -------
367 stats : `pd.Series`
368 A pandas Series containing the following statistical measures:
369 - Mean: The arithmetic mean of the data.
370 - Median: The median value of the data.
371 - Standard Deviation (Std): The standard deviation of the data.
372 """
373 result = pd.Series(
374 data=[s.mean(), s.median(), s.std()],
375 index=["mean", "median", "std"],
376 name=s.name,
377 )
378 return result
380 @staticmethod
381 def get_slew_minmax(s: pd.Series) -> pd.Series:
382 """
383 Calculates the min, max, and peak-to-peak values for a data series.
385 Parameters
386 ----------
387 s : `pd.Series`
388 The input pandas Series containing data.
390 Returns
391 -------
392 stats : `pd.Series`
393 A Series containing the following calculated values for the two
394 halves of the input Series:
395 - min: Minimum value of the Series.
396 - max: Maximum value of the Series.
397 - ptp: Peak-to-peak (ptp) value of the Series (abs(max - min)).
398 """
399 result = pd.Series(
400 data=[s.min(), s.max(), np.ptp(s)],
401 index=["min", "max", "ptp"],
402 name=s.name,
403 )
404 return result
406 def pack_stats_series(self) -> pd.Series:
407 """
408 Packs the stats DataFrame into a Series with custom index labels.
410 This method takes the DataFrame of statistics stored in the 'stats'
411 attribute of the current object and reshapes it into a Series where the
412 indexes are generated using custom labels based on the column names and
413 index positions. The resulting Series combines values from all columns
414 of the DataFrame.
416 Returns
417 -------
418 stats : `pd.Series`
419 A Series with custom index labels based on the column names and
420 index positions. The Series contains values from all columns of the
421 DataFrame.
422 """
423 if isinstance(self.stats, pd.Series):
424 self.log.info("Stats are already packed into a Series.")
425 return self.stats
427 self.log.info("Packing stats into a Series.")
428 df = self.stats.transpose()
430 # Define the prefix patterns
431 column_prefixes = df.columns
432 index_positions = df.index
434 # Generate all combinations of prefixes and positions
435 index_prefixes = [
436 f"measuredForce{stat.capitalize()}{position}"
437 for stat in index_positions
438 for position, _ in enumerate(column_prefixes)
439 ]
441 # Flatten the DataFrame and set the new index
442 result_series = df.stack().reset_index(drop=True)
443 result_series.index = index_prefixes
445 # Append the event information to the Series
446 event_keys = [
447 "dayObs",
448 "seqNum",
449 "version",
450 "begin",
451 "end",
452 "duration",
453 "type",
454 "endReason",
455 ]
456 event_dict = vars(self.event)
457 event_dict = {key: val for key, val in event_dict.items() if key in event_keys}
459 # Create a pandas Series from the dictionary
460 event_series = pd.Series(event_dict)
462 # Create a new Pandas Series correlating event and system information
463 system_series = pd.Series(
464 {
465 "az_start": self.get_nearest_value("az_actual_torque", self.event.begin),
466 "az_end": self.get_nearest_value("az_actual_torque", self.event.end),
467 "az_extreme_vel": self.get_extreme_value("az_actual_velocity"),
468 "az_extreme_torque": self.get_extreme_value("az_actual_torque"),
469 "el_start": self.get_nearest_value("el_actual_torque", self.event.begin),
470 "el_end": self.get_nearest_value("el_actual_torque", self.event.end),
471 "el_extreme_vel": self.get_extreme_value("el_actual_velocity"),
472 "el_extreme_torque": self.get_extreme_value("el_actual_torque"),
473 "ics_enabled": self.get_ics_status(),
474 }
475 )
477 system_series["az_diff"] = system_series["az_end"] - system_series["az_start"]
478 system_series["el_diff"] = system_series["el_end"] - system_series["el_start"]
480 # Concatenate the two Series
481 result_series = pd.concat([event_series, system_series, result_series])
483 # Rename the series columns
484 result_series = result_series.rename(
485 {
486 "dayObs": "day_obs",
487 "seqNum": "seq_num",
488 "version": "version",
489 "begin": "time_begin",
490 "end": "time_end",
491 "duration": "time_duration",
492 "type": "slew_type",
493 "endReason": "end_reason",
494 }
495 )
497 # Display the resulting Series
498 return result_series
500 def get_extreme_value(self, column):
501 """
502 Returns the most extreme (either max or min) value from a given column.
504 Parameters
505 ----------
506 column : `str`
507 The column to query.
509 Returns
510 -------
511 extreme_val : `float`
512 The most extreme value from the given column.
513 """
514 index_of_extreme = self.df[column].abs().idxmax()
515 extreme_value = self.df.loc[index_of_extreme, column]
516 return extreme_value
518 def get_nearest_value(self, column, timestamp):
519 """
520 Returns the nearest value to a given timestamp from a given column.
522 Parameters
523 ----------
524 column : `str`
525 The column to query.
526 timestamp : `astropy.time.Time`
527 The timestamp to query.
529 Returns
530 -------
531 nearest_val : float
532 The nearest value to the given timestamp from the given column.
533 """
534 timestamp = pd.Timestamp(timestamp.iso, tz="UTC")
535 time_diff = abs(self.df.index - timestamp)
536 idx = time_diff.argmin()
537 return self.df[column].iloc[idx]
539 def get_ics_status(self, threshold: float = 1e-6) -> bool:
540 """Get the status of the ICS for the given event.
542 Evaluates the values of the applied velocity and acceleration forces
543 inside the padded stable time window. If the values are all zero, then
544 this function will return False as the ICS was not enabled. Otherwise,
545 it will return True.
547 Parameters
548 ----------
549 threshold : `float`, optional
550 Threshold value used to determine if the ICS is enabled or not. If
551 all the values of the applied velocity and acceleration forces are
552 below this threshold, then the ICS is considered to be disabled.
554 Returns
555 -------
556 status : `bool`
557 True if the ICS is enabled, False otherwise.
558 """
559 avf0 = (self.df[[c for c in self.df.columns if "avf" in c]].abs() < threshold).all().eq(True).all()
560 aaf0 = (self.df[[c for c in self.df.columns if "aaf" in c]].abs() < threshold).all().eq(True).all()
561 return not (avf0 and aaf0)
564def find_adjacent_true_regions(
565 series: pd.Series, min_adjacent: None | int = None
566) -> list[tuple[pd.DatetimeIndex, pd.DatetimeIndex]]:
567 """Find regions in a boolean Series containing adjacent True values.
569 Parameters
570 ----------
571 series : `pd.Series`
572 The boolean Series to search for regions.
573 min_adjacent : `int`, optional
574 Minimum number of adjacent True values in a region. Defaults to half
575 size of the series.
577 Returns
578 -------
579 true_regions : list[tuple[pd.DatetimeIndex, pd.DatetimeIndex]]
580 A list of tuples representing the start and end indices of regions
581 containing more than or equal to min_adjacent adjacent True values.
582 """
583 min_adjacent = min_adjacent if min_adjacent else 0.5 * series.size
584 regions = []
585 for key, group in series.groupby((series != series.shift()).cumsum()):
586 if key and len(group) >= min_adjacent:
587 region_indices = group.index
588 regions.append((region_indices.min(), region_indices.max()))
589 return regions
592def evaluate_m1m3_ics_single_slew(
593 event: TMAEvent,
594 efd_client: EfdClient,
595 inner_pad: float = 1.0,
596 outer_pad: float = 1.0,
597 n_sigma: float = 1.0,
598 log: logging.Logger | None = None,
599) -> M1M3ICSAnalysis:
600 """
601 Evaluate the M1M3 Inertia Compensation System for a single TMAEvent.
603 Parameters
604 ----------
605 event : `TMAEvent`
606 The TMA event to analyze.
607 efd_client : `EfdClient`
608 The EFD client to use to retrieve data.
609 inner_pad : `float`, optional
610 Time padding inside the stable time window of the slew.
611 outer_pad : `float`, optional
612 Time padding outside the slew time window.
613 n_sigma : `float`, optional
614 Number of standard deviations to use for the stable region.
615 log : `logging.Logger`, optional
616 Logger object to use for logging messages.
618 Returns
619 -------
620 result : `M1M3ICSAnalysis`
621 The results of the analysis.
623 Raises
624 ------
625 ValueError
626 Raised if there is no hardpoint data for the specified event.
627 """
628 log = log.getChild(__name__) if log is not None else logging.getLogger(__name__)
630 log.info("Starting inertia compensation system analysis.")
631 performance_analysis = M1M3ICSAnalysis(
632 event,
633 efd_client,
634 inner_pad=inner_pad,
635 outer_pad=outer_pad,
636 n_sigma=n_sigma,
637 log=log,
638 )
640 return performance_analysis
643def evaluate_m1m3_ics_day_obs(
644 day_obs: int,
645 event_maker: TMAEventMaker,
646 inner_pad: float = 1.0,
647 outer_pad: float = 1.0,
648 n_sigma: float = 1.0,
649 log: logging.Logger | None = None,
650) -> pd.DataFrame:
651 """
652 Evaluate the M1M3 Inertia Compensation System in every slew event during a
653 `dayObs`.
655 Parameters
656 ----------
657 day_obs : `int`
658 Observation day in the YYYYMMDD format.
659 event_maker : `TMAEventMaker`
660 Object to retrieve TMA events.
661 inner_pad : `float`, optional
662 Time padding inside the stable time window of the slew.
663 outer_pad : `float`, optional
664 Time padding outside the slew time window.
665 n_sigma : `float`, optional
666 Number of standard deviations to use for the stable region.
667 log : `logging.Logger`, optional
668 Logger object to use for logging messages.
670 Returns
671 -------
672 results : `pd.DataFrame`
673 A data-frame containing the statistical summary of the analysis.
674 """
675 log = log.getChild(__name__) if log is not None else logging.getLogger(__name__)
676 log.setLevel(logging.DEBUG)
678 log.info("Retrieving TMA slew events.")
679 events = event_maker.getEvents(day_obs)
680 log.info(f"Found {len(events)} events for day {day_obs}")
682 stats = None
683 for event in events:
684 log.info(f"Start inertia compensation system analysis on {event.seqNum}.")
686 try:
687 performance_analysis = M1M3ICSAnalysis(
688 event,
689 event_maker.client,
690 inner_pad=inner_pad,
691 outer_pad=outer_pad,
692 n_sigma=n_sigma,
693 log=log,
694 )
695 log.info(f"Complete inertia compensation system analysis on {event.seqNum}.")
696 except ValueError:
697 log.warning(f"Missing data for {event.seqNum} on {event.dayObs}")
698 continue
700 if stats is None:
701 stats = performance_analysis.stats
702 else:
703 stats = pd.concat((stats.T, performance_analysis.stats), axis=1).T
705 assert isinstance(stats, pd.DataFrame)
706 stats = stats.set_index("seq_num", drop=False)
707 return stats