Coverage for python/lsst/faro/utils/stellar_locus.py: 14%
70 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-23 09:36 +0000
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-23 09:36 +0000
1# This file is part of faro.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import numpy as np
23import scipy.stats as scipyStats
24from lsst.pipe.base import Struct
26__all__ = (
27 "stellarLocusResid",
28 "calcP1P2",
29 "getCoeffs",
30 "p1CoeffsFromP2x0y0",
31 "p2p1CoeffsFromLinearFit",
32 "calcQuartileClippedStats",
33)
36def stellarLocusResid(gmags, rmags, imags, **filterargs):
38 gr = gmags - rmags
39 ri = rmags - imags
41 # Also trim large values of r-i, since those will skew the linear regression
42 okfitcolors = (
43 (gr < 1.1)
44 & (gr > 0.3)
45 & (np.abs(ri) < 1.0)
46 & np.isfinite(gmags)
47 & np.isfinite(rmags)
48 & np.isfinite(imags)
49 )
50 # Eventually switch to using orthogonal regression instead of linear (as in pipe-analysis)?
52 slope, intercept, r_value, p_value, std_err = scipyStats.linregress(
53 gr[okfitcolors], ri[okfitcolors]
54 )
55 p2p1coeffs = p2p1CoeffsFromLinearFit(slope, intercept, 0.3, slope * 0.3 + intercept)
56 p1coeffs = p2p1coeffs.p1Coeffs.copy()
57 # hack to put the zeros in for u, z coeffs
58 p1coeffs.insert(0, 0.0)
59 p1coeffs.insert(4, 0.0)
60 p2coeffs = list(p2p1coeffs.p2Coeffs.copy())
61 p2coeffs.insert(0, 0.0)
62 p2coeffs.insert(4, 0.0)
63 umags = np.zeros(len(gmags))
64 zmags = np.zeros(len(gmags))
65 p1_fit = calcP1P2([umags, gmags, rmags, imags, zmags], p1coeffs)
66 p2_fit = calcP1P2([umags, gmags, rmags, imags, zmags], p2coeffs)
67 okp1_fit = (p1_fit < 0.6) & (p1_fit > -0.2)
69 # Do a second iteration, removing large (>3 sigma) outliers in p2:
70 clippedStats = calcQuartileClippedStats(p2_fit[okp1_fit], 3.0)
71 keep = np.abs(p2_fit) < clippedStats.clipValue
73 slope, intercept, r_value, p_value, std_err = scipyStats.linregress(
74 gr[okfitcolors & keep], ri[okfitcolors & keep]
75 )
76 p2p1coeffs = p2p1CoeffsFromLinearFit(slope, intercept, 0.3, slope * 0.3 + intercept)
77 p1coeffs = p2p1coeffs.p1Coeffs.copy()
78 # hack to put the zeros in for u, z coeffs
79 p1coeffs.insert(0, 0.0)
80 p1coeffs.insert(4, 0.0)
81 p2coeffs = list(p2p1coeffs.p2Coeffs.copy())
82 p2coeffs.insert(0, 0.0)
83 p2coeffs.insert(4, 0.0)
84 p1_fit = calcP1P2([umags, gmags, rmags, imags, zmags], p1coeffs)
85 p2_fit = calcP1P2([umags, gmags, rmags, imags, zmags], p2coeffs)
86 okp1_fit = (p1_fit < 0.6) & (p1_fit > -0.2)
88 return p1_fit[okp1_fit], p2_fit[okp1_fit], p1coeffs, p2coeffs
91def calcP1P2(mags, coeffs):
92 # P1 =A′u+B′g+C′r+D′i+E′z+F′
93 # P2′=Au+Bg+Cr+Di+Ez+F
94 p1p2 = (
95 float(coeffs[0]) * mags[0]
96 + float(coeffs[1]) * mags[1]
97 + float(coeffs[2]) * mags[2]
98 + float(coeffs[3]) * mags[3]
99 + float(coeffs[4]) * mags[4]
100 + float(coeffs[5])
101 )
102 return p1p2
105def getCoeffs():
106 # Coefficients from the Ivezic+2004 paper. Warning - if possible, the Coefficients
107 # should be derived from a fit to the stellar locus rather than these "fallback" values.
108 ivezicCoeffs = {
109 "P1s": [0.91, -0.495, -0.415, 0.0, 0.0, -1.28],
110 "P1w": [0.0, 0.928, -0.556, -0.372, 0.0, -0.425],
111 "P2s": [-0.249, 0.794, -0.555, 0.0, 0.0, 0.234],
112 "P2w": [0.0, -0.227, 0.792, -0.567, 0.0, 0.050],
113 }
114 ivezicCoeffsHSC = {
115 "P1s": [0.91, -0.495, -0.415, 0.0, 0.0, -1.28],
116 "P1w": [0.0, 0.888, -0.427, -0.461, 0.0, -0.478],
117 "P2s": [-0.249, 0.794, -0.555, 0.0, 0.0, 0.234],
118 "P2w": [0.0, -0.274, 0.803, -0.529, 0.0, 0.041],
119 }
120 return ivezicCoeffs, ivezicCoeffsHSC
123# Everything below this is copied directly from pipe_analysis/utils.py.
124# Should we move all those functions here once pipe_analysis is rewritten?
127def p1CoeffsFromP2x0y0(p2Coeffs, x0, y0):
128 """Compute Ivezic P1 coefficients using the P2 coeffs and origin (x0, y0)
129 Reference: Ivezic et al. 2004 (2004AN....325..583I)
130 theta = arctan(mP1), where mP1 is the slope of the equivalent straight
131 line (the P1 line) from the P2 coeffs in the (x, y)
132 coordinate system and x = c1 - c2, y = c2 - c3
133 P1 = cos(theta)*c1 + ((sin(theta) - cos(theta))*c2 - sin(theta)*c3 + deltaP1
134 P1 = 0 at x0, y0 ==> deltaP1 = -cos(theta)*x0 - sin(theta)*y0
135 Parameters
136 ----------
137 p2Coeffs : `list` of `float`
138 List of the four P2 coefficients from which, along with the origin point
139 (``x0``, ``y0``), to compute/derive the associated P1 coefficients.
140 x0, y0 : `float`
141 Coordinates at which to set P1 = 0 (i.e. the P1/P2 axis origin).
142 Returns
143 -------
144 p1Coeffs: `list` of `float`
145 The four P1 coefficients.
146 """
147 mP1 = p2Coeffs[0] / p2Coeffs[2]
148 cosTheta = np.cos(np.arctan(mP1))
149 sinTheta = np.sin(np.arctan(mP1))
150 deltaP1 = -cosTheta * x0 - sinTheta * y0
151 p1Coeffs = [cosTheta, sinTheta - cosTheta, -sinTheta, deltaP1]
153 return p1Coeffs
156def p2p1CoeffsFromLinearFit(m, b, x0, y0):
157 """Derive the Ivezic et al. 2004 P2 and P1 equations based on linear fit
158 Where the linear fit is to the given region in color-color space.
159 Reference: Ivezic et al. 2004 (2004AN....325..583I)
160 For y = m*x + b fit, where x = c1 - c2 and y = c2 - c3,
161 P2 = (-m*c1 + (m + 1)*c2 - c3 - b)/sqrt(m**2 + 1)
162 P2norm = P2/sqrt[(m**2 + (m + 1)**2 + 1**2)/(m**2 + 1)]
163 P1 = cos(theta)*x + sin(theta)*y + deltaP1, theta = arctan(m)
164 P1 = cos(theta)*(c1 - c2) + sin(theta)*(c2 - c3) + deltaP1
165 P1 = cos(theta)*c1 + ((sin(theta) - cos(theta))*c2 - sin(theta)*c3 + deltaP1
166 P1 = 0 at x0, y0 ==> deltaP1 = -cos(theta)*x0 - sin(theta)*y0
167 Parameters
168 ----------
169 m : `float`
170 Slope of line to convert.
171 b : `float`
172 Intercept of line to convert.
173 x0, y0 : `float`
174 Coordinates at which to set P1 = 0.
175 Returns
176 -------
177 result : `lsst.pipe.base.Struct`
178 Result struct with components:
179 - ``p2Coeffs`` : four P2 equation coefficents (`list` of `float`).
180 - ``p1Coeffs`` : four P1 equation coefficents (`list` of `float`).
181 """
182 # Compute Ivezic P2 coefficients using the linear fit slope and intercept
183 scaleFact = np.sqrt(m ** 2 + 1.0)
184 p2Coeffs = [-m / scaleFact, (m + 1.0) / scaleFact, -1.0 / scaleFact, -b / scaleFact]
185 p2Norm = 0.0
186 for coeff in p2Coeffs[:-1]: # Omit the constant normalization term
187 p2Norm += coeff ** 2
188 p2Norm = np.sqrt(p2Norm)
189 p2Coeffs /= p2Norm
191 # Compute Ivezic P1 coefficients equation using the linear fit slope and
192 # point (x0, y0) as the origin
193 p1Coeffs = p1CoeffsFromP2x0y0(p2Coeffs, x0, y0)
195 return Struct(p2Coeffs=p2Coeffs, p1Coeffs=p1Coeffs,)
198def calcQuartileClippedStats(dataArray, nSigmaToClip=3.0):
199 """Calculate the quartile-based clipped statistics of a data array.
200 The difference between quartiles[2] and quartiles[0] is the interquartile
201 distance. 0.74*interquartileDistance is an estimate of standard deviation
202 so, in the case that ``dataArray`` has an approximately Gaussian
203 distribution, this is equivalent to nSigma clipping.
204 Parameters
205 ----------
206 dataArray : `list` or `numpy.ndarray` of `float`
207 List or array containing the values for which the quartile-based
208 clipped statistics are to be calculated.
209 nSigmaToClip : `float`, optional
210 Number of \"sigma\" outside of which to clip data when computing the
211 statistics.
212 Returns
213 -------
214 result : `lsst.pipe.base.Struct`
215 The quartile-based clipped statistics with ``nSigmaToClip`` clipping.
216 Atributes are:
217 ``median``
218 The median of the full ``dataArray`` (`float`).
219 ``mean``
220 The quartile-based clipped mean (`float`).
221 ``stdDev``
222 The quartile-based clipped standard deviation (`float`).
223 ``rms``
224 The quartile-based clipped root-mean-squared (`float`).
225 ``clipValue``
226 The value outside of which to clip the data before computing the
227 statistics (`float`).
228 ``goodArray``
229 A boolean array indicating which data points in ``dataArray`` were
230 used in the calculation of the statistics, where `False` indicates
231 a clipped datapoint (`numpy.ndarray` of `bool`).
232 """
233 quartiles = np.percentile(dataArray, [25, 50, 75])
234 assert len(quartiles) == 3
235 median = quartiles[1]
236 interQuartileDistance = quartiles[2] - quartiles[0]
237 clipValue = nSigmaToClip * 0.74 * interQuartileDistance
238 good = np.logical_not(np.abs(dataArray - median) > clipValue)
239 quartileClippedMean = dataArray[good].mean()
240 quartileClippedStdDev = dataArray[good].std()
241 quartileClippedRms = np.sqrt(np.mean(dataArray[good] ** 2))
243 return Struct(
244 median=median,
245 mean=quartileClippedMean,
246 stdDev=quartileClippedStdDev,
247 rms=quartileClippedRms,
248 clipValue=clipValue,
249 goodArray=good,
250 )