Coverage for python/lsst/cp/pipe/plotPtc.py : 6%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of cp_pipe.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21#
23__all__ = ['PlotPhotonTransferCurveTask']
25import numpy as np
26import matplotlib.pyplot as plt
27import matplotlib as mpl
28from matplotlib import gridspec
29import os
30from matplotlib.backends.backend_pdf import PdfPages
32import lsst.ip.isr as isr
33import lsst.pex.config as pexConfig
34import lsst.pipe.base as pipeBase
35import pickle
37from .utils import (funcAstier, funcPolynomial, NonexistentDatasetTaskDataIdContainer,
38 calculateWeightedReducedChi2)
39from matplotlib.ticker import MaxNLocator
41from .astierCovPtcFit import computeApproximateAcoeffs
44class PlotPhotonTransferCurveTaskConfig(pexConfig.Config):
45 """Config class for photon transfer curve measurement task"""
46 datasetFileName = pexConfig.Field(
47 dtype=str,
48 doc="datasetPtc file name (pkl)",
49 default="",
50 )
51 linearizerFileName = pexConfig.Field(
52 dtype=str,
53 doc="linearizer file name (fits)",
54 default="",
55 )
56 ccdKey = pexConfig.Field(
57 dtype=str,
58 doc="The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
59 default='detector',
60 )
61 signalElectronsRelativeA = pexConfig.Field(
62 dtype=float,
63 doc="Signal value for relative systematic bias between different methods of estimating a_ij "
64 "(Fig. 15 of Astier+19).",
65 default=75000,
66 )
67 plotNormalizedCovariancesNumberOfBins = pexConfig.Field(
68 dtype=int,
69 doc="Number of bins in `plotNormalizedCovariancesNumber` function "
70 "(Fig. 8, 10., of Astier+19).",
71 default=10,
72 )
75class PlotPhotonTransferCurveTask(pipeBase.CmdLineTask):
76 """A class to plot the dataset from MeasurePhotonTransferCurveTask.
78 Parameters
79 ----------
81 *args: `list`
82 Positional arguments passed to the Task constructor. None used at this
83 time.
84 **kwargs: `dict`
85 Keyword arguments passed on to the Task constructor. None used at this
86 time.
88 """
90 ConfigClass = PlotPhotonTransferCurveTaskConfig
91 _DefaultName = "plotPhotonTransferCurve"
93 def __init__(self, *args, **kwargs):
94 pipeBase.CmdLineTask.__init__(self, *args, **kwargs)
95 plt.interactive(False) # stop windows popping up when plotting. When headless, use 'agg' backend too
96 self.config.validate()
97 self.config.freeze()
99 @classmethod
100 def _makeArgumentParser(cls):
101 """Augment argument parser for the MeasurePhotonTransferCurveTask."""
102 parser = pipeBase.ArgumentParser(name=cls._DefaultName)
103 parser.add_id_argument("--id", datasetType="photonTransferCurveDataset",
104 ContainerClass=NonexistentDatasetTaskDataIdContainer,
105 help="The ccds to use, e.g. --id ccd=0..100")
106 return parser
108 @pipeBase.timeMethod
109 def runDataRef(self, dataRef):
110 """Run the Photon Transfer Curve (PTC) plotting measurement task.
112 Parameters
113 ----------
114 dataRef : list of lsst.daf.persistence.ButlerDataRef
115 dataRef for the detector for the visits to be fit.
116 """
118 datasetFile = self.config.datasetFileName
120 with open(datasetFile, "rb") as f:
121 datasetPtc = pickle.load(f)
123 dirname = dataRef.getUri(datasetType='cpPipePlotRoot', write=True)
124 if not os.path.exists(dirname):
125 os.makedirs(dirname)
127 detNum = dataRef.dataId[self.config.ccdKey]
128 filename = f"PTC_det{detNum}.pdf"
129 filenameFull = os.path.join(dirname, filename)
131 if self.config.linearizerFileName:
132 linearizer = isr.linearize.Linearizer.readFits(self.config.linearizerFileName)
133 else:
134 linearizer = None
135 self.run(filenameFull, datasetPtc, linearizer=linearizer, log=self.log)
137 return pipeBase.Struct(exitStatus=0)
139 def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
140 """Make the plots for the PTC task"""
141 ptcFitType = datasetPtc.ptcFitType
142 with PdfPages(filenameFull) as pdfPages:
143 if ptcFitType in ["FULLCOVARIANCE", ]:
144 self.covAstierMakeAllPlots(datasetPtc.covariancesFits, datasetPtc.covariancesFitsWithNoB,
145 pdfPages, log=log)
146 elif ptcFitType in ["EXPAPPROXIMATION", "POLYNOMIAL"]:
147 self._plotStandardPtc(datasetPtc, ptcFitType, pdfPages)
148 else:
149 raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
150 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
151 if linearizer:
152 self._plotLinearizer(datasetPtc, linearizer, pdfPages)
154 return
156 def covAstierMakeAllPlots(self, covFits, covFitsNoB, pdfPages,
157 log=None):
158 """Make plots for MeasurePhotonTransferCurve task when doCovariancesAstier=True.
160 This function call other functions that mostly reproduce the plots in Astier+19.
161 Most of the code is ported from Pierre Astier's repository https://github.com/PierreAstier/bfptc
163 Parameters
164 ----------
165 covFits: `dict`
166 Dictionary of CovFit objects, with amp names as keys.
168 covFitsNoB: `dict`
169 Dictionary of CovFit objects, with amp names as keys (b=0 in Eq. 20 of Astier+19).
171 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
172 PDF file where the plots will be saved.
174 log : `lsst.log.Log`, optional
175 Logger to handle messages
176 """
177 self.plotCovariances(covFits, pdfPages)
178 self.plotNormalizedCovariances(covFits, covFitsNoB, 0, 0, pdfPages, offset=0.01, topPlot=True,
179 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
180 log=log)
181 self.plotNormalizedCovariances(covFits, covFitsNoB, 0, 1, pdfPages,
182 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
183 log=log)
184 self.plotNormalizedCovariances(covFits, covFitsNoB, 1, 0, pdfPages,
185 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
186 log=log)
187 self.plot_a_b(covFits, pdfPages)
188 self.ab_vs_dist(covFits, pdfPages, bRange=4)
189 self.plotAcoeffsSum(covFits, pdfPages)
190 self.plotRelativeBiasACoeffs(covFits, covFitsNoB, self.config.signalElectronsRelativeA, pdfPages,
191 maxr=4)
193 return
195 @staticmethod
196 def plotCovariances(covFits, pdfPages):
197 """Plot covariances and models: Cov00, Cov10, Cov01.
199 Figs. 6 and 7 of Astier+19
201 Parameters
202 ----------
203 covFits: `dict`
204 Dictionary of CovFit objects, with amp names as keys.
206 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
207 PDF file where the plots will be saved.
208 """
210 legendFontSize = 7
211 labelFontSize = 7
212 titleFontSize = 9
213 supTitleFontSize = 18
214 markerSize = 25
216 nAmps = len(covFits)
217 if nAmps == 2:
218 nRows, nCols = 2, 1
219 nRows = np.sqrt(nAmps)
220 mantissa, _ = np.modf(nRows)
221 if mantissa > 0:
222 nRows = int(nRows) + 1
223 nCols = nRows
224 else:
225 nRows = int(nRows)
226 nCols = nRows
228 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
229 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
230 fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row',
231 figsize=(13, 10))
232 fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
233 fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
235 for i, (fitPair, a, a2, aResVar, a3, a4) in enumerate(zip(covFits.items(), ax.flatten(),
236 ax2.flatten(), axResCov00.flatten(),
237 axCov01.flatten(), axCov10.flatten())):
239 amp = fitPair[0]
240 fit = fitPair[1]
242 (meanVecOriginal, varVecOriginal, varVecModelOriginal,
243 weightsOriginal, varMask) = fit.getFitData(0, 0)
244 meanVecFinal, varVecFinal = meanVecOriginal[varMask], varVecOriginal[varMask]
245 varVecModelFinal = varVecModelOriginal[varMask]
246 meanVecOutliers = meanVecOriginal[np.invert(varMask)]
247 varVecOutliers = varVecOriginal[np.invert(varMask)]
248 varWeightsFinal = weightsOriginal[varMask]
249 # Get weighted reduced chi2
250 chi2FullModelVar = calculateWeightedReducedChi2(varVecFinal, varVecModelFinal,
251 varWeightsFinal, len(meanVecFinal), 4)
253 (meanVecOrigCov01, varVecOrigCov01, varVecModelOrigCov01,
254 _, maskCov01) = fit.getFitData(0, 1)
255 meanVecFinalCov01, varVecFinalCov01 = meanVecOrigCov01[maskCov01], varVecOrigCov01[maskCov01]
256 varVecModelFinalCov01 = varVecModelOrigCov01[maskCov01]
257 meanVecOutliersCov01 = meanVecOrigCov01[np.invert(maskCov01)]
258 varVecOutliersCov01 = varVecOrigCov01[np.invert(maskCov01)]
260 (meanVecOrigCov10, varVecOrigCov10, varVecModelOrigCov10,
261 _, maskCov10) = fit.getFitData(1, 0)
262 meanVecFinalCov10, varVecFinalCov10 = meanVecOrigCov10[maskCov10], varVecOrigCov10[maskCov10]
263 varVecModelFinalCov10 = varVecModelOrigCov10[maskCov10]
264 meanVecOutliersCov10 = meanVecOrigCov10[np.invert(maskCov10)]
265 varVecOutliersCov10 = varVecOrigCov10[np.invert(maskCov10)]
267 # cuadratic fit for residuals below
268 par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
269 varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
270 chi2QuadModelVar = calculateWeightedReducedChi2(varVecFinal, varModelFinalQuadratic,
271 varWeightsFinal, len(meanVecFinal), 3)
273 # fit with no 'b' coefficient (c = a*b in Eq. 20 of Astier+19)
274 fitNoB = fit.copy()
275 fitNoB.params['c'].fix(val=0)
276 fitNoB.fitFullModel()
277 (meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB,
278 varWeightsFinalNoB, maskNoB) = fitNoB.getFitData(0, 0, returnMasked=True)
279 chi2FullModelNoBVar = calculateWeightedReducedChi2(varVecFinalNoB, varVecModelFinalNoB,
280 varWeightsFinalNoB, len(meanVecFinalNoB), 3)
282 if len(meanVecFinal): # Empty if the whole amp is bad, for example.
283 stringLegend = (f"Gain: {fit.getGain():.4} e/DN \n" +
284 f"Noise: {fit.getRon():.4} e \n" +
285 r"$a_{00}$: %.3e 1/e"%fit.getA()[0, 0] +
286 "\n" + r"$b_{00}$: %.3e 1/e"%fit.getB()[0, 0])
287 minMeanVecFinal = np.min(meanVecFinal)
288 maxMeanVecFinal = np.max(meanVecFinal)
289 deltaXlim = maxMeanVecFinal - minMeanVecFinal
291 a.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
292 a.set_ylabel(r'Variance (DN$^2$)', fontsize=labelFontSize)
293 a.tick_params(labelsize=11)
294 a.set_xscale('linear', fontsize=labelFontSize)
295 a.set_yscale('linear', fontsize=labelFontSize)
296 a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
297 a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
298 a.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
299 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
300 a.set_title(amp, fontsize=titleFontSize)
301 a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
303 # Same as above, but in log-scale
304 a2.set_xlabel(r'Mean Signal ($\mu$, DN)', fontsize=labelFontSize)
305 a2.set_ylabel(r'Variance (DN$^2$)', fontsize=labelFontSize)
306 a2.tick_params(labelsize=11)
307 a2.set_xscale('log')
308 a2.set_yscale('log')
309 a2.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
310 a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
311 a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
312 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
313 a2.set_title(amp, fontsize=titleFontSize)
314 a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
316 # Residuals var - model
317 aResVar.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
318 aResVar.set_ylabel(r'Residuals (DN$^2$)', fontsize=labelFontSize)
319 aResVar.tick_params(labelsize=11)
320 aResVar.set_xscale('linear', fontsize=labelFontSize)
321 aResVar.set_yscale('linear', fontsize=labelFontSize)
322 aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color='blue', lineStyle='-',
323 label=r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
324 aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color='red', lineStyle='-',
325 label=r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
326 aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color='green',
327 lineStyle='-',
328 label=r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
329 aResVar.axhline(color='black')
330 aResVar.set_title(amp, fontsize=titleFontSize)
331 aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
332 aResVar.legend(fontsize=7)
334 a3.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
335 a3.set_ylabel(r'Cov01 (DN$^2$)', fontsize=labelFontSize)
336 a3.tick_params(labelsize=11)
337 a3.set_xscale('linear', fontsize=labelFontSize)
338 a3.set_yscale('linear', fontsize=labelFontSize)
339 a3.scatter(meanVecFinalCov01, varVecFinalCov01, c='blue', marker='o', s=markerSize)
340 a3.scatter(meanVecOutliersCov01, varVecOutliersCov01, c='magenta', marker='s', s=markerSize)
341 a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color='red', lineStyle='-')
342 a3.set_title(amp, fontsize=titleFontSize)
343 a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
345 a4.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
346 a4.set_ylabel(r'Cov10 (DN$^2$)', fontsize=labelFontSize)
347 a4.tick_params(labelsize=11)
348 a4.set_xscale('linear', fontsize=labelFontSize)
349 a4.set_yscale('linear', fontsize=labelFontSize)
350 a4.scatter(meanVecFinalCov10, varVecFinalCov10, c='blue', marker='o', s=markerSize)
351 a4.scatter(meanVecOutliersCov10, varVecOutliersCov10, c='magenta', marker='s', s=markerSize)
352 a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color='red', lineStyle='-')
353 a4.set_title(amp, fontsize=titleFontSize)
354 a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
356 else:
357 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
358 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
359 a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
360 a4.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
362 f.suptitle("PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
363 fontsize=supTitleFontSize)
364 pdfPages.savefig(f)
365 f2.suptitle("PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
366 fontsize=supTitleFontSize)
367 pdfPages.savefig(f2)
368 fResCov00.suptitle("Residuals (data-model) for Cov00 (Var)", fontsize=supTitleFontSize)
369 pdfPages.savefig(fResCov00)
370 fCov01.suptitle("Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n" +
371 " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
372 pdfPages.savefig(fCov01)
373 fCov10.suptitle("Cov10 as in Astier+19 (nearest serial neighbor covariance) \n" +
374 "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
375 pdfPages.savefig(fCov10)
377 return
379 def plotNormalizedCovariances(self, covFits, covFitsNoB, i, j, pdfPages, offset=0.004,
380 numberOfBins=10, plotData=True, topPlot=False, log=None):
381 """Plot C_ij/mu vs mu.
383 Figs. 8, 10, and 11 of Astier+19
385 Parameters
386 ----------
387 covFits: `dict`
388 Dictionary of CovFit objects, with amp names as keys.
390 covFitsNoB: `dict`
391 Dictionary of CovFit objects, with amp names as keys (b=0 in Eq. 20 of Astier+19).
393 i : `int`
394 Covariane lag
396 j : `int
397 Covariance lag
399 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
400 PDF file where the plots will be saved.
402 offset : `float`, optional
403 Constant offset factor to plot covariances in same panel (so they don't overlap).
405 numberOfBins : `int`, optional
406 Number of bins for top and bottom plot.
408 plotData : `bool`, optional
409 Plot the data points?
411 topPlot : `bool`, optional
412 Plot the top plot with the covariances, and the bottom plot with the model residuals?
414 log : `lsst.log.Log`, optional
415 Logger to handle messages.
416 """
418 lchi2, la, lb, lcov = [], [], [], []
420 if (not topPlot):
421 fig = plt.figure(figsize=(8, 10))
422 gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
423 gs.update(hspace=0)
424 ax0 = plt.subplot(gs[0])
425 plt.setp(ax0.get_xticklabels(), visible=False)
426 else:
427 fig = plt.figure(figsize=(8, 8))
428 ax0 = plt.subplot(111)
429 ax0.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
430 ax0.tick_params(axis='both', labelsize='x-large')
431 mue, rese, wce = [], [], []
432 mueNoB, reseNoB, wceNoB = [], [], []
433 for counter, (amp, fit) in enumerate(covFits.items()):
434 mu, cov, model, weightCov, _ = fit.getFitData(i, j, divideByMu=True, returnMasked=True)
435 wres = (cov-model)*weightCov
436 chi2 = ((wres*wres).sum())/(len(mu)-3)
437 chi2bin = 0
438 mue += list(mu)
439 rese += list(cov - model)
440 wce += list(weightCov)
442 fitNoB = covFitsNoB[amp]
443 (muNoB, covNoB, modelNoB,
444 weightCovNoB, _) = fitNoB.getFitData(i, j, divideByMu=True, returnMasked=True)
445 mueNoB += list(muNoB)
446 reseNoB += list(covNoB - modelNoB)
447 wceNoB += list(weightCovNoB)
449 # the corresponding fit
450 fit_curve, = plt.plot(mu, model + counter*offset, '-', linewidth=4.0)
451 # bin plot. len(mu) = no binning
452 gind = self.indexForBins(mu, numberOfBins)
454 xb, yb, wyb, sigyb = self.binData(mu, cov, gind, weightCov)
455 chi2bin = (sigyb*wyb).mean() # chi2 of enforcing the same value in each bin
456 plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker='o', linestyle='none', markersize=6.5,
457 color=fit_curve.get_color(), label=f"{amp} (N: {len(mu)})")
458 # plot the data
459 if plotData:
460 points, = plt.plot(mu, cov + counter*offset, '.', color=fit_curve.get_color())
461 plt.legend(loc='upper right', fontsize=8)
462 aij = fit.getA()[i, j]
463 bij = fit.getB()[i, j]
464 la.append(aij)
465 lb.append(bij)
466 if fit.getACov() is not None:
467 lcov.append(fit.getACov()[i, j, i, j])
468 else:
469 lcov.append(np.nan)
470 lchi2.append(chi2)
471 log.info('Cov%d%d %s: slope %g b %g chi2 %f chi2bin %f'%(i, j, amp, aij, bij, chi2, chi2bin))
472 # end loop on amps
473 la = np.array(la)
474 lb = np.array(lb)
475 lcov = np.array(lcov)
476 lchi2 = np.array(lchi2)
477 mue = np.array(mue)
478 rese = np.array(rese)
479 wce = np.array(wce)
480 mueNoB = np.array(mueNoB)
481 reseNoB = np.array(reseNoB)
482 wceNoB = np.array(wceNoB)
484 plt.xlabel(r"$\mu (el)$", fontsize='x-large')
485 plt.ylabel(r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize='x-large')
486 if (not topPlot):
487 gind = self.indexForBins(mue, numberOfBins)
488 xb, yb, wyb, sigyb = self.binData(mue, rese, gind, wce)
490 ax1 = plt.subplot(gs[1], sharex=ax0)
491 ax1.errorbar(xb, yb, yerr=sigyb, marker='o', linestyle='none', label='Full fit')
492 gindNoB = self.indexForBins(mueNoB, numberOfBins)
493 xb2, yb2, wyb2, sigyb2 = self.binData(mueNoB, reseNoB, gindNoB, wceNoB)
495 ax1.errorbar(xb2, yb2, yerr=sigyb2, marker='o', linestyle='none', label='b = 0')
496 ax1.tick_params(axis='both', labelsize='x-large')
497 plt.legend(loc='upper left', fontsize='large')
498 # horizontal line at zero
499 plt.plot(xb, [0]*len(xb), '--', color='k')
500 plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
501 plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
502 plt.xlabel(r'$\mu (el)$', fontsize='x-large')
503 plt.ylabel(r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize='x-large')
504 plt.tight_layout()
505 plt.suptitle(f"Nbins: {numberOfBins}")
506 # overlapping y labels:
507 fig.canvas.draw()
508 labels0 = [item.get_text() for item in ax0.get_yticklabels()]
509 labels0[0] = u''
510 ax0.set_yticklabels(labels0)
511 pdfPages.savefig(fig)
513 return
515 @staticmethod
516 def plot_a_b(covFits, pdfPages, bRange=3):
517 """Fig. 12 of Astier+19
519 Color display of a and b arrays fits, averaged over channels.
521 Parameters
522 ----------
523 covFits: `dict`
524 Dictionary of CovFit objects, with amp names as keys.
526 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
527 PDF file where the plots will be saved.
529 bRange : `int`
530 Maximum lag for b arrays.
531 """
532 a, b = [], []
533 for amp, fit in covFits.items():
534 a.append(fit.getA())
535 b.append(fit.getB())
536 a = np.array(a).mean(axis=0)
537 b = np.array(b).mean(axis=0)
538 fig = plt.figure(figsize=(7, 11))
539 ax0 = fig.add_subplot(2, 1, 1)
540 im0 = ax0.imshow(np.abs(a.transpose()), origin='lower', norm=mpl.colors.LogNorm())
541 ax0.tick_params(axis='both', labelsize='x-large')
542 ax0.set_title(r'$|a|$', fontsize='x-large')
543 ax0.xaxis.set_ticks_position('bottom')
544 cb0 = plt.colorbar(im0)
545 cb0.ax.tick_params(labelsize='x-large')
547 ax1 = fig.add_subplot(2, 1, 2)
548 ax1.tick_params(axis='both', labelsize='x-large')
549 ax1.yaxis.set_major_locator(MaxNLocator(integer=True))
550 ax1.xaxis.set_major_locator(MaxNLocator(integer=True))
551 im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin='lower')
552 cb1 = plt.colorbar(im1)
553 cb1.ax.tick_params(labelsize='x-large')
554 ax1.set_title(r'$b \times 10^6$', fontsize='x-large')
555 ax1.xaxis.set_ticks_position('bottom')
556 plt.tight_layout()
557 pdfPages.savefig(fig)
559 return
561 @staticmethod
562 def ab_vs_dist(covFits, pdfPages, bRange=4):
563 """Fig. 13 of Astier+19.
565 Values of a and b arrays fits, averaged over amplifiers, as a function of distance.
567 Parameters
568 ----------
569 covFits: `dict`
570 Dictionary of CovFit objects, with amp names as keys.
572 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
573 PDF file where the plots will be saved.
575 bRange : `int`
576 Maximum lag for b arrays.
577 """
578 a = np.array([f.getA() for f in covFits.values()])
579 y = a.mean(axis=0)
580 sy = a.std(axis=0)/np.sqrt(len(covFits))
581 i, j = np.indices(y.shape)
582 upper = (i >= j).ravel()
583 r = np.sqrt(i**2 + j**2).ravel()
584 y = y.ravel()
585 sy = sy.ravel()
586 fig = plt.figure(figsize=(6, 9))
587 ax = fig.add_subplot(211)
588 ax.set_xlim([0.5, r.max()+1])
589 ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker='o', linestyle='none', color='b',
590 label='$i>=j$')
591 ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker='o', linestyle='none', color='r',
592 label='$i<j$')
593 ax.legend(loc='upper center', fontsize='x-large')
594 ax.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
595 ax.set_ylabel(r'$a_{ij}$', fontsize='x-large')
596 ax.set_yscale('log')
597 ax.tick_params(axis='both', labelsize='x-large')
599 #
600 axb = fig.add_subplot(212)
601 b = np.array([f.getB() for f in covFits.values()])
602 yb = b.mean(axis=0)
603 syb = b.std(axis=0)/np.sqrt(len(covFits))
604 ib, jb = np.indices(yb.shape)
605 upper = (ib > jb).ravel()
606 rb = np.sqrt(i**2 + j**2).ravel()
607 yb = yb.ravel()
608 syb = syb.ravel()
609 xmin = -0.2
610 xmax = bRange
611 axb.set_xlim([xmin, xmax+0.2])
612 cutu = (r > xmin) & (r < xmax) & (upper)
613 cutl = (r > xmin) & (r < xmax) & (~upper)
614 axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker='o', linestyle='none', color='b',
615 label='$i>=j$')
616 axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker='o', linestyle='none', color='r',
617 label='$i<j$')
618 plt.legend(loc='upper center', fontsize='x-large')
619 axb.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
620 axb.set_ylabel(r'$b_{ij}$', fontsize='x-large')
621 axb.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
622 axb.tick_params(axis='both', labelsize='x-large')
623 plt.tight_layout()
624 pdfPages.savefig(fig)
626 return
628 @staticmethod
629 def plotAcoeffsSum(covFits, pdfPages):
630 """Fig. 14. of Astier+19
632 Cumulative sum of a_ij as a function of maximum separation. This plot displays the average over
633 channels.
635 Parameters
636 ----------
637 covFits: `dict`
638 Dictionary of CovFit objects, with amp names as keys.
640 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
641 PDF file where the plots will be saved.
642 """
643 a, b = [], []
644 for amp, fit in covFits.items():
645 a.append(fit.getA())
646 b.append(fit.getB())
647 a = np.array(a).mean(axis=0)
648 b = np.array(b).mean(axis=0)
649 fig = plt.figure(figsize=(7, 6))
650 w = 4*np.ones_like(a)
651 w[0, 1:] = 2
652 w[1:, 0] = 2
653 w[0, 0] = 1
654 wa = w*a
655 indices = range(1, a.shape[0]+1)
656 sums = [wa[0:n, 0:n].sum() for n in indices]
657 ax = plt.subplot(111)
658 ax.plot(indices, sums/sums[0], 'o', color='b')
659 ax.set_yscale('log')
660 ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
661 ax.set_ylim(None, 1.2)
662 ax.set_ylabel(r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize='x-large')
663 ax.set_xlabel('n', fontsize='x-large')
664 ax.tick_params(axis='both', labelsize='x-large')
665 plt.tight_layout()
666 pdfPages.savefig(fig)
668 return
670 @staticmethod
671 def plotRelativeBiasACoeffs(covFits, covFitsNoB, signalElectrons, pdfPages, maxr=None):
672 """Fig. 15 in Astier+19.
674 Illustrates systematic bias from estimating 'a'
675 coefficients from the slope of correlations as opposed to the
676 full model in Astier+19.
678 Parameters
679 ----------
680 covFits : `dict`
681 Dictionary of CovFit objects, with amp names as keys.
683 covFitsNoB : `dict`
684 Dictionary of CovFit objects, with amp names as keys (b=0 in Eq. 20 of Astier+19).
686 signalElectrons : `float`
687 Signal at which to evaluate the a_ij coefficients.
689 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
690 PDF file where the plots will be saved.
692 maxr : `int`, optional
693 Maximum lag.
694 """
696 fig = plt.figure(figsize=(7, 11))
697 title = [f"'a' relative bias at {signalElectrons} e", "'a' relative bias (b=0)"]
698 data = [covFits, covFitsNoB]
700 for k in range(2):
701 diffs = []
702 amean = []
703 for fit in data[k].values():
704 if fit is None:
705 continue
706 aOld = computeApproximateAcoeffs(fit, signalElectrons)
707 a = fit.getA()
708 amean.append(a)
709 diffs.append((aOld-a))
710 amean = np.array(amean).mean(axis=0)
711 diff = np.array(diffs).mean(axis=0)
712 diff = diff/amean
713 # The difference should be close to zero
714 diff[0, 0] = 0
715 if maxr is None:
716 maxr = diff.shape[0]
717 diff = diff[:maxr, :maxr]
718 ax0 = fig.add_subplot(2, 1, k+1)
719 im0 = ax0.imshow(diff.transpose(), origin='lower')
720 ax0.yaxis.set_major_locator(MaxNLocator(integer=True))
721 ax0.xaxis.set_major_locator(MaxNLocator(integer=True))
722 ax0.tick_params(axis='both', labelsize='x-large')
723 plt.colorbar(im0)
724 ax0.set_title(title[k])
726 plt.tight_layout()
727 pdfPages.savefig(fig)
729 return
731 def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
732 """Plot PTC, var/signal vs signal, linearity, and linearity residual per amplifier.
734 Parameters
735 ----------
736 dataset : `lsst.cp.pipe.ptc.PhotonTransferCurveDataset`
737 The dataset containing the means, variances, exposure times, and mask.
739 ptcFitType : `str`
740 Type of the model fit to the PTC. Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
742 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
743 PDF file where the plots will be saved.
744 """
746 if ptcFitType == 'EXPAPPROXIMATION':
747 ptcFunc = funcAstier
748 stringTitle = (r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
749 elif ptcFitType == 'POLYNOMIAL':
750 ptcFunc = funcPolynomial
751 for key in dataset.ptcFitPars:
752 deg = len(dataset.ptcFitPars[key]) - 1
753 break
754 stringTitle = r"Polynomial (degree: %g)" % (deg)
755 else:
756 raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
757 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
759 legendFontSize = 8
760 labelFontSize = 8
761 titleFontSize = 9
762 supTitleFontSize = 18
763 markerSize = 25
765 # General determination of the size of the plot grid
766 nAmps = len(dataset.ampNames)
767 if nAmps == 2:
768 nRows, nCols = 2, 1
769 nRows = np.sqrt(nAmps)
770 mantissa, _ = np.modf(nRows)
771 if mantissa > 0:
772 nRows = int(nRows) + 1
773 nCols = nRows
774 else:
775 nRows = int(nRows)
776 nCols = nRows
778 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
779 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
780 f3, ax3 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
782 for i, (amp, a, a2, a3) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten(),
783 ax3.flatten())):
784 meanVecOriginal = np.array(dataset.rawMeans[amp])
785 varVecOriginal = np.array(dataset.rawVars[amp])
786 mask = dataset.visitMask[amp]
787 if not len(mask): # Empty if the whole amp is bad
788 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
789 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
790 a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
791 continue
792 meanVecFinal = meanVecOriginal[mask]
793 varVecFinal = varVecOriginal[mask]
794 meanVecOutliers = meanVecOriginal[np.invert(mask)]
795 varVecOutliers = varVecOriginal[np.invert(mask)]
796 pars, parsErr = dataset.ptcFitPars[amp], dataset.ptcFitParsError[amp]
797 ptcRedChi2 = dataset.ptcFitReducedChiSquared[amp]
798 if ptcFitType == 'EXPAPPROXIMATION':
799 if len(meanVecFinal):
800 ptcA00, ptcA00error = pars[0], parsErr[0]
801 ptcGain, ptcGainError = pars[1], parsErr[1]
802 ptcNoise = np.sqrt((pars[2])) # pars[2] is in (e-)^2
803 ptcNoiseAdu = ptcNoise*(1./ptcGain)
804 ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
805 stringLegend = (f"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
806 f"\nGain: {ptcGain:.4}+/-{ptcGainError:.2e} e/DN"
807 f"\nNoise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
808 r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}")
810 if ptcFitType == 'POLYNOMIAL':
811 if len(meanVecFinal):
812 ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
813 ptcNoiseAdu = np.sqrt((pars[0])) # pars[0] is in ADU^2
814 ptcNoise = ptcNoiseAdu*ptcGain
815 ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
816 stringLegend = (f"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/DN\n"
817 f"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
818 r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}")
820 a.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
821 a.set_ylabel(r'Variance (DN$^2$)', fontsize=labelFontSize)
822 a.tick_params(labelsize=11)
823 a.set_xscale('linear', fontsize=labelFontSize)
824 a.set_yscale('linear', fontsize=labelFontSize)
826 a2.set_xlabel(r'Mean Signal ($\mu$, DN)', fontsize=labelFontSize)
827 a2.set_ylabel(r'Variance (DN$^2$)', fontsize=labelFontSize)
828 a2.tick_params(labelsize=11)
829 a2.set_xscale('log')
830 a2.set_yscale('log')
832 a3.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
833 a3.set_ylabel(r'Variance/$\mu$ (DN)', fontsize=labelFontSize)
834 a3.tick_params(labelsize=11)
835 a3.set_xscale('linear', fontsize=labelFontSize)
836 a3.set_yscale('linear', fontsize=labelFontSize)
838 minMeanVecFinal = np.min(meanVecFinal)
839 maxMeanVecFinal = np.max(meanVecFinal)
840 meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
841 minMeanVecOriginal = np.min(meanVecOriginal)
842 maxMeanVecOriginal = np.max(meanVecOriginal)
843 deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
845 a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
846 a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color='green',
847 linestyle='--')
848 a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
849 a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
850 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
851 a.set_title(amp, fontsize=titleFontSize)
852 a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
854 # Same, but in log-scale
855 a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
856 a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
857 a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
858 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
859 a2.set_title(amp, fontsize=titleFontSize)
860 a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
862 # Var/mu vs mu
863 a3.plot(meanVecFit, ptcFunc(pars, meanVecFit)/meanVecFit, color='red')
864 a3.scatter(meanVecFinal, varVecFinal/meanVecFinal, c='blue', marker='o', s=markerSize)
865 a3.scatter(meanVecOutliers, varVecOutliers/meanVecOutliers, c='magenta', marker='s',
866 s=markerSize)
867 a3.text(0.2, 0.65, stringLegend, transform=a3.transAxes, fontsize=legendFontSize)
868 a3.set_title(amp, fontsize=titleFontSize)
869 a3.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
871 f.suptitle("PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
872 pdfPages.savefig(f)
873 f2.suptitle("PTC (log-log)", fontsize=supTitleFontSize)
874 pdfPages.savefig(f2)
875 f3.suptitle(r"Var/$\mu$", fontsize=supTitleFontSize)
876 pdfPages.savefig(f3)
878 return
880 def _plotLinearizer(self, dataset, linearizer, pdfPages):
881 """Plot linearity and linearity residual per amplifier
883 Parameters
884 ----------
885 dataset : `lsst.cp.pipe.ptc.PhotonTransferCurveDataset`
886 The dataset containing the means, variances, exposure times, and mask.
888 linearizer : `lsst.ip.isr.Linearizer`
889 Linearizer object
890 """
891 legendFontSize = 7
892 labelFontSize = 7
893 titleFontSize = 9
894 supTitleFontSize = 18
896 # General determination of the size of the plot grid
897 nAmps = len(dataset.ampNames)
898 if nAmps == 2:
899 nRows, nCols = 2, 1
900 nRows = np.sqrt(nAmps)
901 mantissa, _ = np.modf(nRows)
902 if mantissa > 0:
903 nRows = int(nRows) + 1
904 nCols = nRows
905 else:
906 nRows = int(nRows)
907 nCols = nRows
909 # Plot mean vs time (f1), and fractional residuals (f2)
910 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
911 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
912 for i, (amp, a, a2) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
913 mask = dataset.visitMask[amp]
914 if not len(mask):
915 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
916 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
917 continue
918 meanVecFinal = np.array(dataset.rawMeans[amp])[mask]
919 timeVecFinal = np.array(dataset.rawExpTimes[amp])[mask]
921 a.set_xlabel('Time (sec)', fontsize=labelFontSize)
922 a.set_ylabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
923 a.tick_params(labelsize=labelFontSize)
924 a.set_xscale('linear', fontsize=labelFontSize)
925 a.set_yscale('linear', fontsize=labelFontSize)
927 a2.axhline(y=0, color='k')
928 a2.axvline(x=0, color='k', linestyle='-')
929 a2.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
930 a2.set_ylabel('Fractional nonlinearity (%)', fontsize=labelFontSize)
931 a2.tick_params(labelsize=labelFontSize)
932 a2.set_xscale('linear', fontsize=labelFontSize)
933 a2.set_yscale('linear', fontsize=labelFontSize)
935 pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
936 k0, k0Error = pars[0], parsErr[0]
937 k1, k1Error = pars[1], parsErr[1]
938 k2, k2Error = pars[2], parsErr[2]
939 linRedChi2 = linearizer.fitChiSq[amp]
940 stringLegend = (f"k0: {k0:.4}+/-{k0Error:.2e} DN\nk1: {k1:.4}+/-{k1Error:.2e} DN/t"
941 f"\nk2: {k2:.2e}+/-{k2Error:.2e} DN/t^2\n"
942 r"$\chi^2_{\rm{red}}$: " + f"{linRedChi2:.4}")
943 a.scatter(timeVecFinal, meanVecFinal)
944 a.plot(timeVecFinal, funcPolynomial(pars, timeVecFinal), color='red')
945 a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
946 a.set_title(f"{amp}", fontsize=titleFontSize)
948 linearPart = k0 + k1*timeVecFinal
949 fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
950 a2.plot(meanVecFinal, fracLinRes, c='g')
951 a2.set_title(f"{amp}", fontsize=titleFontSize)
953 f.suptitle("Linearity \n Fit: Polynomial (degree: %g)"
954 % (len(pars)-1),
955 fontsize=supTitleFontSize)
956 f2.suptitle(r"Fractional NL residual" + "\n" +
957 r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
958 fontsize=supTitleFontSize)
959 pdfPages.savefig(f)
960 pdfPages.savefig(f2)
962 @staticmethod
963 def findGroups(x, maxDiff):
964 """Group data into bins, with at most maxDiff distance between bins.
966 Parameters
967 ----------
968 x: `list`
969 Data to bin.
971 maxDiff: `int`
972 Maximum distance between bins.
974 Returns
975 -------
976 index: `list`
977 Bin indices.
978 """
979 ix = np.argsort(x)
980 xsort = np.sort(x)
981 index = np.zeros_like(x, dtype=np.int32)
982 xc = xsort[0]
983 group = 0
984 ng = 1
986 for i in range(1, len(ix)):
987 xval = xsort[i]
988 if (xval - xc < maxDiff):
989 xc = (ng*xc + xval)/(ng+1)
990 ng += 1
991 index[ix[i]] = group
992 else:
993 group += 1
994 ng = 1
995 index[ix[i]] = group
996 xc = xval
998 return index
1000 @staticmethod
1001 def indexForBins(x, nBins):
1002 """Builds an index with regular binning. The result can be fed into binData.
1004 Parameters
1005 ----------
1006 x: `numpy.array`
1007 Data to bin.
1008 nBins: `int`
1009 Number of bin.
1011 Returns
1012 -------
1013 np.digitize(x, bins): `numpy.array`
1014 Bin indices.
1015 """
1017 bins = np.linspace(x.min(), x.max() + abs(x.max() * 1e-7), nBins + 1)
1018 return np.digitize(x, bins)
1020 @staticmethod
1021 def binData(x, y, binIndex, wy=None):
1022 """Bin data (usually for display purposes).
1024 Patrameters
1025 -----------
1026 x: `numpy.array`
1027 Data to bin.
1029 y: `numpy.array`
1030 Data to bin.
1032 binIdex: `list`
1033 Bin number of each datum.
1035 wy: `numpy.array`
1036 Inverse rms of each datum to use when averaging (the actual weight is wy**2).
1038 Returns:
1039 -------
1041 xbin: `numpy.array`
1042 Binned data in x.
1044 ybin: `numpy.array`
1045 Binned data in y.
1047 wybin: `numpy.array`
1048 Binned weights in y, computed from wy's in each bin.
1050 sybin: `numpy.array`
1051 Uncertainty on the bin average, considering actual scatter, and ignoring weights.
1052 """
1054 if wy is None:
1055 wy = np.ones_like(x)
1056 binIndexSet = set(binIndex)
1057 w2 = wy*wy
1058 xw2 = x*(w2)
1059 xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1061 yw2 = y*w2
1062 ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1064 wybin = np.sqrt(np.array([w2[binIndex == i].sum() for i in binIndexSet]))
1065 sybin = np.array([y[binIndex == i].std()/np.sqrt(np.array([binIndex == i]).sum())
1066 for i in binIndexSet])
1068 return xbin, ybin, wybin, sybin