Coverage for python / lsst / summit / utils / guiders / seeing.py: 17%
146 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-23 09:09 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-23 09:09 +0000
1# This file is part of summit_utils.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21from __future__ import annotations
23from dataclasses import dataclass
25import numpy as np
26import pandas as pd
27from numpy.typing import NDArray
29from lsst.summit.utils.guiders.metrics import GuiderMetricsBuilder
31__all__ = [
32 "rotateCentroids",
33 "groupStarsPerDetector",
34 "fwhmVonkarman",
35 "r0FromVariance",
36 "calcCentroidStats",
37 "getROICenters",
38 "alignTimes",
39 "getCorrectedXy",
40 "GuiderSeeing",
41 "CorrelationAnalysis",
42]
45MM_TO_PIXEL = 100.0
48def rotateCentroids(
49 centroids1: NDArray[np.float64],
50 centroids2: NDArray[np.float64],
51 center1: NDArray[np.float64],
52 center2: NDArray[np.float64],
53) -> tuple[NDArray[np.float64], NDArray[np.float64]]:
54 """Rotate two sets of centroid coordinates into a common local frame.
56 The local coordinate system is defined by the vector connecting ``center1``
57 to ``center2``. That direction becomes the local y-axis (parallel), and its
58 perpendicular becomes the local x-axis (perpendicular). Both centroid sets
59 are projected into this frame.
61 Parameters
62 ----------
63 centroids1 : `ndarray`
64 Shape (N, 2). Original (x, y) centroids from the first detector, in
65 focal-plane pixels.
66 centroids2 : `ndarray`
67 Shape (M, 2). Original (x, y) centroids from the second detector, in
68 focal-plane pixels.
69 center1 : `ndarray`
70 Shape (2,). Mean (x, y) position of ``centroids1``, used to define the
71 rotation axis.
72 center2 : `ndarray`
73 Shape (2,). Mean (x, y) position of ``centroids2``, used to define the
74 rotation axis.
76 Returns
77 -------
78 centroids1Rot : `ndarray`
79 Shape (N, 2). Rotated coordinates of ``centroids1`` in the common
80 frame. Columns correspond to (x_perp, y_parallel).
81 centroids2Rot : `ndarray`
82 Shape (M, 2). Rotated coordinates of ``centroids2`` in the same frame.
83 """
84 yHat = (center2 - center1).astype(float)
85 yHat /= np.linalg.norm(yHat)
87 y1Rot = np.dot(yHat[np.newaxis, :], centroids1.T).reshape(-1)
88 y2Rot = np.dot(yHat[np.newaxis, :], centroids2.T).reshape(-1)
90 xHat = np.array([yHat[1], -yHat[0]], dtype=float)
92 x1Rot = np.dot(xHat[np.newaxis, :], centroids1.T).reshape(-1)
93 x2Rot = np.dot(xHat[np.newaxis, :], centroids2.T).reshape(-1)
95 centroids1Rot = np.column_stack((x1Rot, y1Rot))
96 centroids2Rot = np.column_stack((x2Rot, y2Rot))
98 return centroids1Rot, centroids2Rot
101def groupStarsPerDetector(starsDetrended: pd.DataFrame, fluxCut: float) -> dict[str, pd.DataFrame]:
102 """Group detrended star rows by detector and apply a mean-flux threshold.
104 Parameters
105 ----------
106 starsDetrended : `pandas.DataFrame`
107 Detrended star table containing at least columns ``'detector'`` and
108 ``'flux'``.
109 fluxCut : `float`
110 Minimum mean flux required to keep a detector.
112 Returns
113 -------
114 starList : `dict[str, pandas.DataFrame]`
115 Mapping from detector ID to filtered dataframe for that detector.
116 Detectors below the flux threshold are omitted.
117 """
118 groups = dict(tuple(starsDetrended.groupby("detector", sort=False)))
120 toDrop: list[str] = []
121 for det, df in groups.items():
122 meanFlux = float(np.nanmean(pd.to_numeric(df["flux"], errors="coerce")))
123 if not (meanFlux > fluxCut):
124 toDrop.append(det)
125 for det in toDrop:
126 groups.pop(det, None)
127 return groups
130def fwhmVonkarman(r0: float | NDArray[np.float64], outerScale: float = 25.0) -> NDArray[np.float64]:
131 """Atmospheric FWHM (Vonkármán PSF) from Fried parameter ``r0`` at 500 nm.
133 Parameters
134 ----------
135 r0 : `float` or `ndarray`
136 Fried parameter (meters) referenced to 500 nm.
137 outerScale : `float`, optional
138 Outer scale of turbulence (meters).
140 Returns
141 -------
142 fwhm : `ndarray`
143 Atmospheric seeing FWHM (arcsec) at 500 nm under Vonkármán model.
144 """
145 r0Arr = np.asarray(r0, dtype=float)
146 kolmTerm = (0.976 * 0.5) / (4.85 * r0Arr)
147 vkCorrection = np.sqrt(1.0 - 2.183 * (r0Arr / outerScale) ** 0.356)
148 return (kolmTerm * vkCorrection).astype(np.float64)
151def r0FromVariance(variance: float | NDArray[np.float64]) -> NDArray[np.float64]:
152 """Infer Fried parameter ``r0`` from centroid-motion variance.
154 Parameters
155 ----------
156 var : `float` or `ndarray`
157 Variance proxy for centroid motion (units consistent with model).
159 Returns
160 -------
161 r0 : `ndarray`
162 Fried parameter (same unit system as the 0.1 scaling in formula).
164 Notes
165 -----
166 Uses a emperically found scaling: ``r0 ≈ (var/0.1)^(-3/5) * 0.1``.
167 """
168 variance = np.asarray(variance, dtype=float)
169 return ((variance / 0.1) ** (-3.0 / 5.0) * 0.1).astype(np.float64)
172@dataclass(frozen=True, slots=True)
173class GuiderSeeing:
174 """Container for tomographic seeing results.
176 Attributes
177 ----------
178 total : `float`
179 Total seeing full width at half maximum (FWHM) in arcseconds.
180 low : `float`
181 Estimated FWHM contribution from the low-altitude turbulence layer
182 (typically < 0.5 km), inferred from longer baseline correlations.
183 mid : `float`
184 Estimated FWHM contribution from the mid-altitude turbulence layer
185 (~0.5-1.5 km), representing the residual between low- and high-layer
186 components.
187 high : `float`
188 Estimated FWHM contribution from the high-altitude turbulence layer (>
189 1.5 km), inferred from the shorter baseline correlations of centroid
190 motion.
191 """
193 total: float
194 low: float
195 mid: float
196 high: float
199class CorrelationAnalysis:
200 """Correlation measurement on guider star centroid motions.
202 Provides utilities to:
203 - filter detectors by flux threshold,
204 - estimate and correct common drift across detectors, and
205 - measure per-detector variance and pairwise correlations of
206 corrected centroid motions.
208 Parameters
209 ----------
210 stars : `pandas.DataFrame`
211 Table of star measurements. Must contain columns:
212 ``'detector'``, ``'stamp'`` (time), ``'xfp'``, ``'yfp'``, ``'flux'``,
213 ``'expid'``, and ``'filter'``.
214 expid : `int`
215 Exposure ID to select. Only rows with this exposure ID are retained.
216 fluxCut : `float`, optional
217 Minimum mean flux required to keep a detector.
219 Raises
220 ------
221 ValueError
222 If no rows with the given ``expid`` are found in ``stars``.
223 ValueError
224 If more than one unique filter is present in the selected
225 exposure.
226 """
228 def __init__(self, stars: pd.DataFrame, expid: int, fluxCut: float = 4e3) -> None:
229 self.expid = expid
230 self.fluxCut = fluxCut
232 # Early exit if no data
233 mask = stars["expid"].eq(expid)
234 if not mask.any():
235 raise ValueError(f"Stars dataframe has no data with expid={expid}")
237 starsFiltered = stars[mask].copy()
239 # Wavelength information from LSST bandpass centers (nm).
240 # TODO: DM-52059 Use official filter wavelengths once they're available
241 lamMap = {
242 "u": 372.4,
243 "g": 480.7,
244 "r": 622.1,
245 "i": 755.9,
246 "z": 868.0,
247 "y": 975.3,
248 }
249 band = starsFiltered["filter"].unique()
250 if len(band) != 1:
251 raise ValueError("cannot have more than one filter per exposure")
252 self.wavelength = float(lamMap[str(band[0])])
254 # Initialize metrics builder and detrend stars.
255 # These classes/objects are external to this module.
256 dummyNmissingStampPlaceholder = 0
257 metricsBuilder = GuiderMetricsBuilder(starsFiltered, dummyNmissingStampPlaceholder)
258 self.metrics = metricsBuilder.buildMetrics(self.expid)
259 starsDetrended = metricsBuilder.detrendStars()
261 # Build star list per detector with a flux threshold.
262 self.starList = groupStarsPerDetector(starsDetrended, fluxCut)
263 self.detectors = list(self.starList.keys())
265 def measureVariance(self) -> list[float]:
266 """Compute per-detector variances of corrected centroid motions.
268 Requires that detrending has already been applied. Variances are scaled
269 to units of (pix x MM_TO_PIXEL)^2.
271 Returns
272 -------
273 out : `list[float]`
274 Flattened list of per-detector variances: ``[vx_det1, vy_det1,
275 vx_det2, vy_det2, ...]``. If a detector was excluded or lacks
276 corrected coordinates, entries are ``NaN``.
277 """
278 # Verify detrending outputs are present (handled upstream).
279 # Leaving this note instead of commented-out long lines.
281 out: list[float] = []
282 for df in self.starList.values():
283 meanFlux = float(np.nanmean(pd.to_numeric(df["flux"], errors="coerce")))
284 hasCorr = "dxfp_corr" in df and "dyfp_corr" in df
285 if meanFlux > self.fluxCut and hasCorr:
286 vx = np.nanvar(pd.to_numeric(df["dxfp_corr"], errors="coerce")) * (MM_TO_PIXEL**2)
287 vy = np.nanvar(pd.to_numeric(df["dyfp_corr"], errors="coerce")) * (MM_TO_PIXEL**2)
288 out.extend([vx, vy])
289 else:
290 out.extend([np.nan, np.nan])
291 return out
293 @staticmethod
294 def getBaselinePairs(kind: str) -> tuple[tuple[str, str], ...]:
295 """Produce preset detector baseline pairs.
297 Parameters
298 ----------
299 kind : `str`
300 One of ``{'adjacent', 'cross', 'diagonal'}``.
302 Returns
303 -------
304 pairs : `tuple` of `tuple`
305 Immutable sequence of 2-element detector pairs ``(('R00_SG0',
306 'R00_SG1'), ...)``.
308 Raises
309 ------
310 KeyError
311 If ``kind`` is not a recognized preset.
312 """
313 presets: dict[str, tuple[tuple[str, str], ...]] = {
314 "adjacent": (
315 ("R00_SG0", "R00_SG1"),
316 ("R04_SG0", "R04_SG1"),
317 ("R40_SG0", "R40_SG1"),
318 ("R44_SG0", "R44_SG1"),
319 ),
320 "cross": (
321 ("R00_SG0", "R40_SG1"),
322 ("R04_SG1", "R44_SG0"),
323 ("R00_SG1", "R04_SG0"),
324 ("R40_SG0", "R44_SG1"),
325 ),
326 "diagonal": (
327 ("R00_SG0", "R44_SG1"),
328 ("R00_SG1", "R44_SG0"),
329 ("R40_SG0", "R04_SG1"),
330 ("R40_SG1", "R04_SG0"),
331 ),
332 }
333 return presets[kind]
335 def measurePairwiseStats(
336 self, kind: str
337 ) -> tuple[NDArray[np.float64], NDArray[np.float64], NDArray[np.float64]]:
338 """Compute pairwise correlations, covariances, and differential
339 variances between centroid motions of detector pairs.
341 The detector pairs are chosen by a preset specified by ``kind``. For
342 each pair of detectors, star centroids are rotated into the local
343 (perpendicular, parallel) frame defined by the vector that connects
344 their mean positions. The method then computes:
345 - Pearson correlation coefficients,
346 - covariance (scaled to centipix²), and
347 - variance of the centroid differences (scaled to centipix²),
348 separately for the perpendicular (x) and parallel (y) components.
350 Parameters
351 ----------
352 kind : `str`
353 Preset name passed to ``self.getBaselinePairs()``.
355 Returns
356 -------
357 corrs : `ndarray`
358 Shape (P, 2). Correlation coefficients per pair: columns
359 ``[corrPerp, corrPar]``.
360 covs : `ndarray`
361 Shape (P, 2). Covariances per pair (scaled by ``MM_TO_PIXEL^2``):
362 columns ``[covPerp, covPar]``.
363 diffVars : `ndarray`
364 Shape (P, 2). Variances of centroid differences per pair (scaled by
365 ``MM_TO_PIXEL^2``): columns ``[diffVarPerp, diffVarPar]``.
366 """
367 pairs = self.getBaselinePairs(kind)
369 corrs: list[list[float]] = []
370 covs: list[list[float]] = []
371 diffVars: list[list[float]] = []
373 for det1, det2 in pairs:
374 df1 = self.starList.get(det1)
375 df2 = self.starList.get(det2)
376 if df1 is None or df2 is None:
377 corrs.append([np.nan, np.nan])
378 covs.append([np.nan, np.nan])
379 diffVars.append([np.nan, np.nan])
380 continue
382 i1, i2 = alignTimes(df1, df2)
383 if i1 is None or i2 is None:
384 corrs.append([np.nan, np.nan])
385 covs.append([np.nan, np.nan])
386 diffVars.append([np.nan, np.nan])
387 continue
389 roiCenter1, roiCenter2 = getROICenters(df1, df2)
390 starCentroid1 = getCorrectedXy(df1)
391 starCentroid2 = getCorrectedXy(df2)
392 alignedCentroid1, alignedCentroid2 = rotateCentroids(
393 starCentroid1, starCentroid2, roiCenter1, roiCenter2
394 )
396 pairCorrs: list[float] = []
397 pairCovs: list[float] = []
398 pairDvars: list[float] = []
399 for comp in (0, 1): # 0 = ⟂ (x), 1 = ∥ (y)
400 corr, cov, dvar = calcCentroidStats(alignedCentroid1[i1, comp], alignedCentroid2[i2, comp])
401 pairCorrs.append(corr)
402 pairCovs.append(cov)
403 pairDvars.append(dvar)
405 corrs.append(pairCorrs)
406 covs.append(pairCovs)
407 diffVars.append(pairDvars)
409 return (
410 np.asarray(corrs, dtype=np.float64),
411 np.asarray(covs, dtype=np.float64),
412 np.asarray(diffVars, dtype=np.float64),
413 )
415 def calcTomographicCoeffs(self) -> dict[str, tuple[tuple[float, float], ...]]:
416 """Return nested coefficient tuples for tomographic seeing layer
417 weights for the low and high layers.
419 Structure mirrors the correlation data order: ((diag_x, diag_y),
420 (cross_x, cross_y), (adj_x, adj_y)).
422 Returns
423 -------
424 coeffs : `dict`
425 Dictionary with keys ``'low'`` and ``'high'`` mapping to the
426 corresponding coefficient tuples.
427 """
428 # TODO: add ROI position dependence
429 return {
430 "low": ((0, 0.51), (0.49, 0), (0, 0)),
431 "high": ((0, 0), (0, 0), (0.872, 0.128)),
432 }
434 def measureTomographicSeeing(self) -> GuiderSeeing:
435 """Estimate tomographic seeing by splitting the total FWHM into
436 low/mid/high altitude contributions.
438 This method:
439 1) computes pairwise correlations for three detector-pair presets
440 (adjacent, cross, diagonal),
441 2) estimates a total Fried parameter from the median centroid variance
442 across detectors, and
443 3) converts to total FWHM using a Vonkármán model and scales to the
444 exposure wavelength,
445 4) splits the total into low/mid/high layers using empirical weights
446 based on correlations.
448 Returns
449 -------
450 result : `GuiderSeeing`
451 Dataclass containing total, low-, mid-, and high-altitude FWHM
452 components (arcsec) for the exposure.
453 """
454 corrsAdj, _, _ = self.measurePairwiseStats("adjacent")
455 corrsCross, _, _ = self.measurePairwiseStats("cross")
456 corrsDiag, _, _ = self.measurePairwiseStats("diagonal")
457 var = self.measureVariance()
459 r0 = r0FromVariance(float(np.nanmedian(var)))
460 # Wavelength scaling: r0 ∝ λ^(6/5) ⇒ FWHM ∝ λ^(-1/5)
461 fwhmTot = float(fwhmVonkarman(r0) * (self.wavelength / 500.0) ** (-1.0 / 5.0))
463 coeffs = self.calcTomographicCoeffs()
464 weightsLow = (
465 coeffs["low"][0][0] * float(np.nanmedian(corrsDiag[:, 0]))
466 + coeffs["low"][0][1] * float(np.nanmedian(corrsDiag[:, 1]))
467 + coeffs["low"][1][0] * float(np.nanmedian(corrsCross[:, 0]))
468 + coeffs["low"][1][1] * float(np.nanmedian(corrsCross[:, 1]))
469 + coeffs["low"][2][0] * float(np.nanmedian(corrsAdj[:, 0]))
470 + coeffs["low"][2][1] * float(np.nanmedian(corrsAdj[:, 1]))
471 )
472 weightsHigh = (
473 1
474 - coeffs["high"][0][0] * float(np.nanmedian(corrsDiag[:, 0]))
475 - coeffs["high"][0][1] * float(np.nanmedian(corrsDiag[:, 1]))
476 - coeffs["high"][1][0] * float(np.nanmedian(corrsCross[:, 0]))
477 - coeffs["high"][1][1] * float(np.nanmedian(corrsCross[:, 1]))
478 - coeffs["high"][2][0] * float(np.nanmedian(corrsAdj[:, 0]))
479 - coeffs["high"][2][1] * float(np.nanmedian(corrsAdj[:, 1]))
480 )
481 weightsMid = 1.0 - weightsLow - weightsHigh
483 def safeGuard(w: float) -> float:
484 if np.isnan(w):
485 return np.nan
486 return np.sqrt(max(0.0, float(w)))
488 return GuiderSeeing(
489 total=fwhmTot,
490 low=fwhmTot * safeGuard(weightsLow),
491 mid=fwhmTot * safeGuard(weightsMid),
492 high=fwhmTot * safeGuard(weightsHigh),
493 )
496def calcCentroidStats(a: np.ndarray, b: np.ndarray) -> tuple[float, float, float]:
497 """Compute Pearson correlation, covariance, and (a-b) variance for arrays.
499 Parameters
500 ----------
501 a : `ndarray`
502 First 1D array (component of rotated centroids).
503 b : `ndarray`
504 Second 1D array (component of rotated centroids).
506 Returns
507 -------
508 corr : `float`
509 Pearson correlation coefficient.
510 cov : `float`
511 Covariance scaled to centipix² (i.e., multiplied by ``MM_TO_PIXEL^2``).
512 dvar : `float`
513 Variance of (a - b) scaled to centipix² (i.e., multiplied by
514 ``MM_TO_PIXEL^2``).
515 """
516 if np.all(np.isnan(a)) or np.all(np.isnan(b)):
517 return np.nan, np.nan, np.nan
518 corr = float(np.corrcoef(a, b)[0, 1])
519 cov = float(np.cov(a, b, ddof=1)[0, 1]) * (MM_TO_PIXEL**2)
520 dvar = float(np.var(a - b, ddof=1)) * (MM_TO_PIXEL**2)
521 return corr, cov, dvar
524def getROICenters(df1: pd.DataFrame, df2: pd.DataFrame) -> tuple[np.ndarray, np.ndarray]:
525 """Compute mean (x, y) centers for two detector dataframes.
527 Parameters
528 ----------
529 df1 : `pandas.DataFrame`
530 First detector dataframe; must contain ``'xfp'``, ``'yfp'``.
531 df2 : `pandas.DataFrame`
532 Second detector dataframe; must contain ``'xfp'``, ``'yfp'``.
534 Returns
535 -------
536 center1 : `ndarray`
537 Shape (2,). Mean of (xfp, yfp) for ``df1``.
538 center2 : `ndarray`
539 Shape (2,). Mean of (xfp, yfp) for ``df2``.
540 """
541 c1 = np.array(
542 [
543 np.nanmean(pd.to_numeric(df1["xfp"], errors="coerce")),
544 np.nanmean(pd.to_numeric(df1["yfp"], errors="coerce")),
545 ],
546 float,
547 )
548 c2 = np.array(
549 [
550 np.nanmean(pd.to_numeric(df2["xfp"], errors="coerce")),
551 np.nanmean(pd.to_numeric(df2["yfp"], errors="coerce")),
552 ],
553 float,
554 )
555 return c1, c2
558def alignTimes(df1: pd.DataFrame, df2: pd.DataFrame) -> tuple[np.ndarray, np.ndarray] | tuple[None, None]:
559 """Align two detector dataframes on common ``elapsed_time`` stamps.
561 Parameters
562 ----------
563 df1 : `pandas.DataFrame`
564 First detector dataframe; must contain ``'elapsed_time'``.
565 df2 : `pandas.DataFrame`
566 Second detector dataframe; must contain ``'elapsed_time'``.
568 Returns
569 -------
570 i1 : `ndarray` or `None`
571 Indices into ``df1`` for rows matching the common timestamps, or
572 ``None`` if no overlap.
573 i2 : `ndarray` or `None`
574 Indices into ``df2`` for rows matching the common timestamps, or
575 ``None`` if no overlap.
576 """
577 t, i1, i2 = np.intersect1d(df1["elapsed_time"], df2["elapsed_time"], return_indices=True)
578 return (i1, i2) if t.size else (None, None)
581def getCorrectedXy(df: pd.DataFrame) -> np.ndarray:
582 """Return corrected (x, y) centroid columns as a 2D array.
584 Parameters
585 ----------
586 df : `pandas.DataFrame`
587 Detector dataframe containing columns ``'dxfp_corr'`` and
588 ``'dyfp_corr'``.
590 Returns
591 -------
592 xy : `ndarray`
593 Shape (N, 2) array with columns (dxfp_corr, dyfp_corr).
594 """
595 return np.column_stack([df["dxfp_corr"], df["dyfp_corr"]]).astype(np.float64)