Coverage for python/lsst/cp/pipe/plotPtc.py : 6%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of cp_pipe.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21#
23__all__ = ['PlotPhotonTransferCurveTask']
25import numpy as np
26import matplotlib.pyplot as plt
27import matplotlib as mpl
28from matplotlib import gridspec
29import os
30from matplotlib.backends.backend_pdf import PdfPages
32import lsst.ip.isr as isr
33import lsst.pex.config as pexConfig
34import lsst.pipe.base as pipeBase
35import pickle
37from .utils import (funcAstier, funcPolynomial, NonexistentDatasetTaskDataIdContainer,
38 calculateWeightedReducedChi2)
39from matplotlib.ticker import MaxNLocator
41from .astierCovPtcFit import computeApproximateAcoeffs
44class PlotPhotonTransferCurveTaskConfig(pexConfig.Config):
45 """Config class for photon transfer curve measurement task"""
46 datasetFileName = pexConfig.Field(
47 dtype=str,
48 doc="datasetPtc file name (pkl)",
49 default="",
50 )
51 linearizerFileName = pexConfig.Field(
52 dtype=str,
53 doc="linearizer file name (fits)",
54 default="",
55 )
56 ccdKey = pexConfig.Field(
57 dtype=str,
58 doc="The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
59 default='detector',
60 )
61 signalElectronsRelativeA = pexConfig.Field(
62 dtype=float,
63 doc="Signal value for relative systematic bias between different methods of estimating a_ij "
64 "(Fig. 15 of Astier+19).",
65 default=75000,
66 )
67 plotNormalizedCovariancesNumberOfBins = pexConfig.Field(
68 dtype=int,
69 doc="Number of bins in `plotNormalizedCovariancesNumber` function "
70 "(Fig. 8, 10., of Astier+19).",
71 default=10,
72 )
75class PlotPhotonTransferCurveTask(pipeBase.CmdLineTask):
76 """A class to plot the dataset from MeasurePhotonTransferCurveTask.
78 Parameters
79 ----------
81 *args: `list`
82 Positional arguments passed to the Task constructor. None used at this
83 time.
84 **kwargs: `dict`
85 Keyword arguments passed on to the Task constructor. None used at this
86 time.
88 """
90 ConfigClass = PlotPhotonTransferCurveTaskConfig
91 _DefaultName = "plotPhotonTransferCurve"
93 def __init__(self, *args, **kwargs):
94 pipeBase.CmdLineTask.__init__(self, *args, **kwargs)
95 plt.interactive(False) # stop windows popping up when plotting. When headless, use 'agg' backend too
96 self.config.validate()
97 self.config.freeze()
99 @classmethod
100 def _makeArgumentParser(cls):
101 """Augment argument parser for the MeasurePhotonTransferCurveTask."""
102 parser = pipeBase.ArgumentParser(name=cls._DefaultName)
103 parser.add_id_argument("--id", datasetType="photonTransferCurveDataset",
104 ContainerClass=NonexistentDatasetTaskDataIdContainer,
105 help="The ccds to use, e.g. --id ccd=0..100")
106 return parser
108 @pipeBase.timeMethod
109 def runDataRef(self, dataRef):
110 """Run the Photon Transfer Curve (PTC) plotting measurement task.
112 Parameters
113 ----------
114 dataRef : list of lsst.daf.persistence.ButlerDataRef
115 dataRef for the detector for the visits to be fit.
116 """
118 datasetFile = self.config.datasetFileName
120 with open(datasetFile, "rb") as f:
121 datasetPtc = pickle.load(f)
123 dirname = dataRef.getUri(datasetType='cpPipePlotRoot', write=True)
124 if not os.path.exists(dirname):
125 os.makedirs(dirname)
127 detNum = dataRef.dataId[self.config.ccdKey]
128 filename = f"PTC_det{detNum}.pdf"
129 filenameFull = os.path.join(dirname, filename)
131 if self.config.linearizerFileName:
132 linearizer = isr.linearize.Linearizer.readFits(self.config.linearizerFileName)
133 else:
134 linearizer = None
135 self.run(filenameFull, datasetPtc, linearizer=linearizer, log=self.log)
137 return pipeBase.Struct(exitStatus=0)
139 def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
140 """Make the plots for the PTC task"""
141 ptcFitType = datasetPtc.ptcFitType
142 with PdfPages(filenameFull) as pdfPages:
143 if ptcFitType in ["FULLCOVARIANCE", ]:
144 self.covAstierMakeAllPlots(datasetPtc.covariancesFits, datasetPtc.covariancesFitsWithNoB,
145 pdfPages, log=log)
146 elif ptcFitType in ["EXPAPPROXIMATION", "POLYNOMIAL"]:
147 self._plotStandardPtc(datasetPtc, ptcFitType, pdfPages)
148 else:
149 raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
150 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
151 if linearizer:
152 self._plotLinearizer(datasetPtc, linearizer, pdfPages)
154 return
156 def covAstierMakeAllPlots(self, covFits, covFitsNoB, pdfPages,
157 log=None):
158 """Make plots for MeasurePhotonTransferCurve task when doCovariancesAstier=True.
160 This function call other functions that mostly reproduce the plots in Astier+19.
161 Most of the code is ported from Pierre Astier's repository https://github.com/PierreAstier/bfptc
163 Parameters
164 ----------
165 covFits: `dict`
166 Dictionary of CovFit objects, with amp names as keys.
168 covFitsNoB: `dict`
169 Dictionary of CovFit objects, with amp names as keys (b=0 in Eq. 20 of Astier+19).
171 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
172 PDF file where the plots will be saved.
174 log : `lsst.log.Log`, optional
175 Logger to handle messages
176 """
177 self.plotCovariances(covFits, pdfPages)
178 self.plotNormalizedCovariances(covFits, covFitsNoB, 0, 0, pdfPages, offset=0.01, topPlot=True,
179 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
180 log=log)
181 self.plotNormalizedCovariances(covFits, covFitsNoB, 0, 1, pdfPages,
182 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
183 log=log)
184 self.plotNormalizedCovariances(covFits, covFitsNoB, 1, 0, pdfPages,
185 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
186 log=log)
187 self.plot_a_b(covFits, pdfPages)
188 self.ab_vs_dist(covFits, pdfPages, bRange=4)
189 self.plotAcoeffsSum(covFits, pdfPages)
190 self.plotRelativeBiasACoeffs(covFits, covFitsNoB, self.config.signalElectronsRelativeA, pdfPages,
191 maxr=4)
193 return
195 @staticmethod
196 def plotCovariances(covFits, pdfPages):
197 """Plot covariances and models: Cov00, Cov10, Cov01.
199 Figs. 6 and 7 of Astier+19
201 Parameters
202 ----------
203 covFits: `dict`
204 Dictionary of CovFit objects, with amp names as keys.
206 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
207 PDF file where the plots will be saved.
208 """
210 legendFontSize = 7
211 labelFontSize = 7
212 titleFontSize = 9
213 supTitleFontSize = 18
214 markerSize = 25
216 nAmps = len(covFits)
217 if nAmps == 2:
218 nRows, nCols = 2, 1
219 nRows = np.sqrt(nAmps)
220 mantissa, _ = np.modf(nRows)
221 if mantissa > 0:
222 nRows = int(nRows) + 1
223 nCols = nRows
224 else:
225 nRows = int(nRows)
226 nCols = nRows
228 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
229 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
230 fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row',
231 figsize=(13, 10))
232 fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
233 fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
235 for i, (fitPair, a, a2, aResVar, a3, a4) in enumerate(zip(covFits.items(), ax.flatten(),
236 ax2.flatten(), axResCov00.flatten(),
237 axCov01.flatten(), axCov10.flatten())):
239 amp = fitPair[0]
240 fit = fitPair[1]
242 (meanVecOriginal, varVecOriginal, varVecModelOriginal,
243 weightsOriginal, varMask) = fit.getFitData(0, 0)
244 meanVecFinal, varVecFinal = meanVecOriginal[varMask], varVecOriginal[varMask]
245 varVecModelFinal = varVecModelOriginal[varMask]
246 meanVecOutliers = meanVecOriginal[np.invert(varMask)]
247 varVecOutliers = varVecOriginal[np.invert(varMask)]
248 varWeightsFinal = weightsOriginal[varMask]
249 # Get weighted reduced chi2
250 chi2FullModelVar = calculateWeightedReducedChi2(varVecFinal, varVecModelFinal,
251 varWeightsFinal, len(meanVecFinal), 4)
253 (meanVecOrigCov01, varVecOrigCov01, varVecModelOrigCov01,
254 _, maskCov01) = fit.getFitData(0, 1)
255 meanVecFinalCov01, varVecFinalCov01 = meanVecOrigCov01[maskCov01], varVecOrigCov01[maskCov01]
256 varVecModelFinalCov01 = varVecModelOrigCov01[maskCov01]
257 meanVecOutliersCov01 = meanVecOrigCov01[np.invert(maskCov01)]
258 varVecOutliersCov01 = varVecOrigCov01[np.invert(maskCov01)]
260 (meanVecOrigCov10, varVecOrigCov10, varVecModelOrigCov10,
261 _, maskCov10) = fit.getFitData(1, 0)
262 meanVecFinalCov10, varVecFinalCov10 = meanVecOrigCov10[maskCov10], varVecOrigCov10[maskCov10]
263 varVecModelFinalCov10 = varVecModelOrigCov10[maskCov10]
264 meanVecOutliersCov10 = meanVecOrigCov10[np.invert(maskCov10)]
265 varVecOutliersCov10 = varVecOrigCov10[np.invert(maskCov10)]
267 # cuadratic fit for residuals below
268 par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
269 varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
270 chi2QuadModelVar = calculateWeightedReducedChi2(varVecFinal, varModelFinalQuadratic,
271 varWeightsFinal, len(meanVecFinal), 3)
273 # fit with no 'b' coefficient (c = a*b in Eq. 20 of Astier+19)
274 fitNoB = fit.copy()
275 fitNoB.params['c'].fix(val=0)
276 fitNoB.fitFullModel()
277 (meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB,
278 varWeightsFinalNoB, maskNoB) = fitNoB.getFitData(0, 0, returnMasked=True)
279 chi2FullModelNoBVar = calculateWeightedReducedChi2(varVecFinalNoB, varVecModelFinalNoB,
280 varWeightsFinalNoB, len(meanVecFinalNoB), 3)
282 if len(meanVecFinal): # Empty if the whole amp is bad, for example.
283 stringLegend = (f"Gain: {fit.getGain():.4} e/DN \n" +
284 f"Noise: {fit.getRon():.4} e \n" +
285 r"$a_{00}$: %.3e 1/e"%fit.getA()[0, 0] +
286 "\n" + r"$b_{00}$: %.3e 1/e"%fit.getB()[0, 0])
287 minMeanVecFinal = np.min(meanVecFinal)
288 maxMeanVecFinal = np.max(meanVecFinal)
289 deltaXlim = maxMeanVecFinal - minMeanVecFinal
291 a.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
292 a.set_ylabel(r'Variance (DN$^2$)', fontsize=labelFontSize)
293 a.tick_params(labelsize=11)
294 a.set_xscale('linear', fontsize=labelFontSize)
295 a.set_yscale('linear', fontsize=labelFontSize)
296 a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
297 a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
298 a.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
299 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
300 a.set_title(amp, fontsize=titleFontSize)
301 a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
303 # Same as above, but in log-scale
304 a2.set_xlabel(r'Mean Signal ($\mu$, DN)', fontsize=labelFontSize)
305 a2.set_ylabel(r'Variance (DN$^2$)', fontsize=labelFontSize)
306 a2.tick_params(labelsize=11)
307 a2.set_xscale('log')
308 a2.set_yscale('log')
309 a2.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
310 a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
311 a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
312 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
313 a2.set_title(amp, fontsize=titleFontSize)
314 a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
316 # Residuals var - model
317 aResVar.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
318 aResVar.set_ylabel(r'Residuals (DN$^2$)', fontsize=labelFontSize)
319 aResVar.tick_params(labelsize=11)
320 aResVar.set_xscale('linear', fontsize=labelFontSize)
321 aResVar.set_yscale('linear', fontsize=labelFontSize)
322 aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color='blue', lineStyle='-',
323 label=r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
324 aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color='red', lineStyle='-',
325 label=r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
326 aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color='green',
327 lineStyle='-',
328 label=r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
329 aResVar.axhline(color='black')
330 aResVar.set_title(amp, fontsize=titleFontSize)
331 aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
332 aResVar.legend(fontsize=7)
334 a3.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
335 a3.set_ylabel(r'Cov01 (DN$^2$)', fontsize=labelFontSize)
336 a3.tick_params(labelsize=11)
337 a3.set_xscale('linear', fontsize=labelFontSize)
338 a3.set_yscale('linear', fontsize=labelFontSize)
339 a3.scatter(meanVecFinalCov01, varVecFinalCov01, c='blue', marker='o', s=markerSize)
340 a3.scatter(meanVecOutliersCov01, varVecOutliersCov01, c='magenta', marker='s', s=markerSize)
341 a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color='red', lineStyle='-')
342 a3.set_title(amp, fontsize=titleFontSize)
343 a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
345 a4.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
346 a4.set_ylabel(r'Cov10 (DN$^2$)', fontsize=labelFontSize)
347 a4.tick_params(labelsize=11)
348 a4.set_xscale('linear', fontsize=labelFontSize)
349 a4.set_yscale('linear', fontsize=labelFontSize)
350 a4.scatter(meanVecFinalCov10, varVecFinalCov10, c='blue', marker='o', s=markerSize)
351 a4.scatter(meanVecOutliersCov10, varVecOutliersCov10, c='magenta', marker='s', s=markerSize)
352 a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color='red', lineStyle='-')
353 a4.set_title(amp, fontsize=titleFontSize)
354 a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
356 else:
357 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
358 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
359 a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
360 a4.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
362 f.suptitle("PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
363 fontsize=supTitleFontSize)
364 pdfPages.savefig(f)
365 f2.suptitle("PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
366 fontsize=supTitleFontSize)
367 pdfPages.savefig(f2)
368 fResCov00.suptitle("Residuals (data- model) for Cov00 (Var)", fontsize=supTitleFontSize)
369 pdfPages.savefig(fResCov00)
370 fCov01.suptitle("Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n" +
371 " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
372 pdfPages.savefig(fCov01)
373 fCov10.suptitle("Cov10 as in Astier+19 (nearest serial neighbor covariance) \n" +
374 "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
375 pdfPages.savefig(fCov10)
377 return
379 def plotNormalizedCovariances(self, covFits, covFitsNoB, i, j, pdfPages, offset=0.004,
380 numberOfBins=10, plotData=True, topPlot=False, log=None):
381 """Plot C_ij/mu vs mu.
383 Figs. 8, 10, and 11 of Astier+19
385 Parameters
386 ----------
387 covFits: `dict`
388 Dictionary of CovFit objects, with amp names as keys.
390 covFitsNoB: `dict`
391 Dictionary of CovFit objects, with amp names as keys (b=0 in Eq. 20 of Astier+19).
393 i : `int`
394 Covariane lag
396 j : `int
397 Covariance lag
399 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
400 PDF file where the plots will be saved.
402 offset : `float`, optional
403 Constant offset factor to plot covariances in same panel (so they don't overlap).
405 numberOfBins : `int`, optional
406 Number of bins for top and bottom plot.
408 plotData : `bool`, optional
409 Plot the data points?
411 topPlot : `bool`, optional
412 Plot the top plot with the covariances, and the bottom plot with the model residuals?
414 log : `lsst.log.Log`, optional
415 Logger to handle messages.
416 """
418 lchi2, la, lb, lcov = [], [], [], []
420 if (not topPlot):
421 fig = plt.figure(figsize=(8, 10))
422 gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
423 gs.update(hspace=0)
424 ax0 = plt.subplot(gs[0])
425 plt.setp(ax0.get_xticklabels(), visible=False)
426 else:
427 fig = plt.figure(figsize=(8, 8))
428 ax0 = plt.subplot(111)
429 ax0.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
430 ax0.tick_params(axis='both', labelsize='x-large')
431 mue, rese, wce = [], [], []
432 mueNoB, reseNoB, wceNoB = [], [], []
433 for counter, (amp, fit) in enumerate(covFits.items()):
434 mu, cov, model, weightCov, _ = fit.getFitData(i, j, divideByMu=True, returnMasked=True)
435 wres = (cov-model)*weightCov
436 chi2 = ((wres*wres).sum())/(len(mu)-3)
437 chi2bin = 0
438 mue += list(mu)
439 rese += list(cov - model)
440 wce += list(weightCov)
442 fitNoB = covFitsNoB[amp]
443 (muNoB, covNoB, modelNoB,
444 weightCovNoB, _) = fitNoB.getFitData(i, j, divideByMu=True, returnMasked=True)
445 mueNoB += list(muNoB)
446 reseNoB += list(covNoB - modelNoB)
447 wceNoB += list(weightCovNoB)
449 # the corresponding fit
450 fit_curve, = plt.plot(mu, model + counter*offset, '-', linewidth=4.0)
451 # bin plot. len(mu) = no binning
452 gind = self.indexForBins(mu, numberOfBins)
454 xb, yb, wyb, sigyb = self.binData(mu, cov, gind, weightCov)
455 chi2bin = (sigyb*wyb).mean() # chi2 of enforcing the same value in each bin
456 plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker='o', linestyle='none', markersize=6.5,
457 color=fit_curve.get_color(), label=f"{amp} (N: {len(mu)})")
458 # plot the data
459 if plotData:
460 points, = plt.plot(mu, cov + counter*offset, '.', color=fit_curve.get_color())
461 plt.legend(loc='upper right', fontsize=8)
462 aij = fit.getA()[i, j]
463 bij = fit.getB()[i, j]
464 la.append(aij)
465 lb.append(bij)
466 if fit.getACov() is not None:
467 lcov.append(fit.getACov()[i, j, i, j])
468 else:
469 lcov.append(np.nan)
470 lchi2.append(chi2)
471 log.info('Cov%d%d %s: slope %g b %g chi2 %f chi2bin %f'%(i, j, amp, aij, bij, chi2, chi2bin))
472 # end loop on amps
473 la = np.array(la)
474 lb = np.array(lb)
475 lcov = np.array(lcov)
476 lchi2 = np.array(lchi2)
477 mue = np.array(mue)
478 rese = np.array(rese)
479 wce = np.array(wce)
480 mueNoB = np.array(mueNoB)
481 reseNoB = np.array(reseNoB)
482 wceNoB = np.array(wceNoB)
484 plt.xlabel(r"$\mu (el)$", fontsize='x-large')
485 plt.ylabel(r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize='x-large')
486 if (not topPlot):
487 gind = self.indexForBins(mue, numberOfBins)
488 xb, yb, wyb, sigyb = self.binData(mue, rese, gind, wce)
490 ax1 = plt.subplot(gs[1], sharex=ax0)
491 ax1.errorbar(xb, yb, yerr=sigyb, marker='o', linestyle='none', label='Full fit')
492 gindNoB = self.indexForBins(mueNoB, numberOfBins)
493 xb2, yb2, wyb2, sigyb2 = self.binData(mueNoB, reseNoB, gindNoB, wceNoB)
495 ax1.errorbar(xb2, yb2, yerr=sigyb2, marker='o', linestyle='none', label='b = 0')
496 ax1.tick_params(axis='both', labelsize='x-large')
497 plt.legend(loc='upper left', fontsize='large')
498 # horizontal line at zero
499 plt.plot(xb, [0]*len(xb), '--', color='k')
500 plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
501 plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
502 plt.xlabel(r'$\mu (el)$', fontsize='x-large')
503 plt.ylabel(r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize='x-large')
504 plt.tight_layout()
505 plt.suptitle(f"Nbins: {numberOfBins}")
506 # overlapping y labels:
507 fig.canvas.draw()
508 labels0 = [item.get_text() for item in ax0.get_yticklabels()]
509 labels0[0] = u''
510 ax0.set_yticklabels(labels0)
511 pdfPages.savefig(fig)
513 return
515 @staticmethod
516 def plot_a_b(covFits, pdfPages, bRange=3):
517 """Fig. 12 of Astier+19
519 Color display of a and b arrays fits, averaged over channels.
521 Parameters
522 ----------
523 covFits: `dict`
524 Dictionary of CovFit objects, with amp names as keys.
526 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
527 PDF file where the plots will be saved.
529 bRange : `int`
530 Maximum lag for b arrays.
531 """
532 a, b = [], []
533 for amp, fit in covFits.items():
534 a.append(fit.getA())
535 b.append(fit.getB())
536 a = np.array(a).mean(axis=0)
537 b = np.array(b).mean(axis=0)
538 fig = plt.figure(figsize=(7, 11))
539 ax0 = fig.add_subplot(2, 1, 1)
540 im0 = ax0.imshow(np.abs(a.transpose()), origin='lower', norm=mpl.colors.LogNorm())
541 ax0.tick_params(axis='both', labelsize='x-large')
542 ax0.set_title(r'$|a|$', fontsize='x-large')
543 ax0.xaxis.set_ticks_position('bottom')
544 cb0 = plt.colorbar(im0)
545 cb0.ax.tick_params(labelsize='x-large')
547 ax1 = fig.add_subplot(2, 1, 2)
548 ax1.tick_params(axis='both', labelsize='x-large')
549 ax1.yaxis.set_major_locator(MaxNLocator(integer=True))
550 ax1.xaxis.set_major_locator(MaxNLocator(integer=True))
551 im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin='lower')
552 cb1 = plt.colorbar(im1)
553 cb1.ax.tick_params(labelsize='x-large')
554 ax1.set_title(r'$b \times 10^6$', fontsize='x-large')
555 ax1.xaxis.set_ticks_position('bottom')
556 plt.tight_layout()
557 pdfPages.savefig(fig)
559 return
561 @staticmethod
562 def ab_vs_dist(covFits, pdfPages, bRange=4):
563 """Fig. 13 of Astier+19.
565 Values of a and b arrays fits, averaged over amplifiers, as a function of distance.
567 Parameters
568 ----------
569 covFits: `dict`
570 Dictionary of CovFit objects, with amp names as keys.
572 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
573 PDF file where the plots will be saved.
575 bRange : `int`
576 Maximum lag for b arrays.
577 """
578 a = np.array([f.getA() for f in covFits.values()])
579 y = a.mean(axis=0)
580 sy = a.std(axis=0)/np.sqrt(len(covFits))
581 i, j = np.indices(y.shape)
582 upper = (i >= j).ravel()
583 r = np.sqrt(i**2 + j**2).ravel()
584 y = y.ravel()
585 sy = sy.ravel()
586 fig = plt.figure(figsize=(6, 9))
587 ax = fig.add_subplot(211)
588 ax.set_xlim([0.5, r.max()+1])
589 ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker='o', linestyle='none', color='b',
590 label='$i>=j$')
591 ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker='o', linestyle='none', color='r',
592 label='$i<j$')
593 ax.legend(loc='upper center', fontsize='x-large')
594 ax.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
595 ax.set_ylabel(r'$a_{ij}$', fontsize='x-large')
596 ax.set_yscale('log')
597 ax.tick_params(axis='both', labelsize='x-large')
599 #
600 axb = fig.add_subplot(212)
601 b = np.array([f.getB() for f in covFits.values()])
602 yb = b.mean(axis=0)
603 syb = b.std(axis=0)/np.sqrt(len(covFits))
604 ib, jb = np.indices(yb.shape)
605 upper = (ib > jb).ravel()
606 rb = np.sqrt(i**2 + j**2).ravel()
607 yb = yb.ravel()
608 syb = syb.ravel()
609 xmin = -0.2
610 xmax = bRange
611 axb.set_xlim([xmin, xmax+0.2])
612 cutu = (r > xmin) & (r < xmax) & (upper)
613 cutl = (r > xmin) & (r < xmax) & (~upper)
614 axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker='o', linestyle='none', color='b',
615 label='$i>=j$')
616 axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker='o', linestyle='none', color='r',
617 label='$i<j$')
618 plt.legend(loc='upper center', fontsize='x-large')
619 axb.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
620 axb.set_ylabel(r'$b_{ij}$', fontsize='x-large')
621 axb.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
622 axb.tick_params(axis='both', labelsize='x-large')
623 plt.tight_layout()
624 pdfPages.savefig(fig)
626 return
628 @staticmethod
629 def plotAcoeffsSum(covFits, pdfPages):
630 """Fig. 14. of Astier+19
632 Cumulative sum of a_ij as a function of maximum separation. This plot displays the average over
633 channels.
635 Parameters
636 ----------
637 covFits: `dict`
638 Dictionary of CovFit objects, with amp names as keys.
640 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
641 PDF file where the plots will be saved.
642 """
643 a, b = [], []
644 for amp, fit in covFits.items():
645 a.append(fit.getA())
646 b.append(fit.getB())
647 a = np.array(a).mean(axis=0)
648 b = np.array(b).mean(axis=0)
649 fig = plt.figure(figsize=(7, 6))
650 w = 4*np.ones_like(a)
651 w[0, 1:] = 2
652 w[1:, 0] = 2
653 w[0, 0] = 1
654 wa = w*a
655 indices = range(1, a.shape[0]+1)
656 sums = [wa[0:n, 0:n].sum() for n in indices]
657 ax = plt.subplot(111)
658 ax.plot(indices, sums/sums[0], 'o', color='b')
659 ax.set_yscale('log')
660 ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
661 ax.set_ylim(None, 1.2)
662 ax.set_ylabel(r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize='x-large')
663 ax.set_xlabel('n', fontsize='x-large')
664 ax.tick_params(axis='both', labelsize='x-large')
665 plt.tight_layout()
666 pdfPages.savefig(fig)
668 return
670 @staticmethod
671 def plotRelativeBiasACoeffs(covFits, covFitsNoB, signalElectrons, pdfPages, maxr=None):
672 """Fig. 15 in Astier+19.
674 Illustrates systematic bias from estimating 'a'
675 coefficients from the slope of correlations as opposed to the
676 full model in Astier+19.
678 Parameters
679 ----------
680 covFits : `dict`
681 Dictionary of CovFit objects, with amp names as keys.
683 covFitsNoB : `dict`
684 Dictionary of CovFit objects, with amp names as keys (b=0 in Eq. 20 of Astier+19).
686 signalElectrons : `float`
687 Signal at which to evaluate the a_ij coefficients.
689 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
690 PDF file where the plots will be saved.
692 maxr : `int`, optional
693 Maximum lag.
694 """
696 fig = plt.figure(figsize=(7, 11))
697 title = [f"'a' relative bias at {signalElectrons} e", "'a' relative bias (b=0)"]
698 data = [covFits, covFitsNoB]
700 for k in range(2):
701 diffs = []
702 amean = []
703 for fit in data[k].values():
704 if fit is None:
705 continue
706 aOld = computeApproximateAcoeffs(fit, signalElectrons)
707 a = fit.getA()
708 amean.append(a)
709 diffs.append((aOld-a))
710 amean = np.array(amean).mean(axis=0)
711 diff = np.array(diffs).mean(axis=0)
712 diff = diff/amean
713 # The difference should be close to zero
714 diff[0, 0] = 0
715 if maxr is None:
716 maxr = diff.shape[0]
717 diff = diff[:maxr, :maxr]
718 ax0 = fig.add_subplot(2, 1, k+1)
719 im0 = ax0.imshow(diff.transpose(), origin='lower')
720 ax0.yaxis.set_major_locator(MaxNLocator(integer=True))
721 ax0.xaxis.set_major_locator(MaxNLocator(integer=True))
722 ax0.tick_params(axis='both', labelsize='x-large')
723 plt.colorbar(im0)
724 ax0.set_title(title[k])
726 plt.tight_layout()
727 pdfPages.savefig(fig)
729 return
731 def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
732 """Plot PTC, linearity, and linearity residual per amplifier
734 Parameters
735 ----------
736 dataset : `lsst.cp.pipe.ptc.PhotonTransferCurveDataset`
737 The dataset containing the means, variances, exposure times, and mask.
739 ptcFitType : `str`
740 Type of the model fit to the PTC. Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
742 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
743 PDF file where the plots will be saved.
744 """
746 if ptcFitType == 'EXPAPPROXIMATION':
747 ptcFunc = funcAstier
748 stringTitle = (r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
749 elif ptcFitType == 'POLYNOMIAL':
750 ptcFunc = funcPolynomial
751 for key in dataset.ptcFitPars:
752 deg = len(dataset.ptcFitPars[key]) - 1
753 break
754 stringTitle = r"Polynomial (degree: %g)" % (deg)
755 else:
756 raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
757 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
759 legendFontSize = 7
760 labelFontSize = 7
761 titleFontSize = 9
762 supTitleFontSize = 18
763 markerSize = 25
765 # General determination of the size of the plot grid
766 nAmps = len(dataset.ampNames)
767 if nAmps == 2:
768 nRows, nCols = 2, 1
769 nRows = np.sqrt(nAmps)
770 mantissa, _ = np.modf(nRows)
771 if mantissa > 0:
772 nRows = int(nRows) + 1
773 nCols = nRows
774 else:
775 nRows = int(nRows)
776 nCols = nRows
778 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
779 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
781 for i, (amp, a, a2) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
782 meanVecOriginal = np.array(dataset.rawMeans[amp])
783 varVecOriginal = np.array(dataset.rawVars[amp])
784 mask = dataset.visitMask[amp]
785 meanVecFinal = meanVecOriginal[mask]
786 varVecFinal = varVecOriginal[mask]
787 meanVecOutliers = meanVecOriginal[np.invert(mask)]
788 varVecOutliers = varVecOriginal[np.invert(mask)]
789 pars, parsErr = dataset.ptcFitPars[amp], dataset.ptcFitParsError[amp]
790 ptcRedChi2 = dataset.ptcFitReducedChiSquared[amp]
791 if ptcFitType == 'EXPAPPROXIMATION':
792 if len(meanVecFinal):
793 ptcA00, ptcA00error = pars[0], parsErr[0]
794 ptcGain, ptcGainError = pars[1], parsErr[1]
795 ptcNoise = np.sqrt((pars[2])) # pars[2] is in (e-)^2
796 ptcNoiseAdu = ptcNoise*(1./ptcGain)
797 ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
798 stringLegend = (f"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
799 f"\n Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/DN"
800 f"\n Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e \n"
801 r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}")
803 if ptcFitType == 'POLYNOMIAL':
804 if len(meanVecFinal):
805 ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
806 ptcNoiseAdu = np.sqrt((pars[0])) # pars[0] is in ADU^2
807 ptcNoise = ptcNoiseAdu*ptcGain
808 ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
809 stringLegend = (f"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/DN \n"
810 f"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e \n"
811 r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}")
813 a.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
814 a.set_ylabel(r'Variance (DN$^2$)', fontsize=labelFontSize)
815 a.tick_params(labelsize=11)
816 a.set_xscale('linear', fontsize=labelFontSize)
817 a.set_yscale('linear', fontsize=labelFontSize)
819 a2.set_xlabel(r'Mean Signal ($\mu$, DN)', fontsize=labelFontSize)
820 a2.set_ylabel(r'Variance (DN$^2$)', fontsize=labelFontSize)
821 a2.tick_params(labelsize=11)
822 a2.set_xscale('log')
823 a2.set_yscale('log')
825 if len(meanVecFinal): # Empty if the whole amp is bad, for example.
826 minMeanVecFinal = np.min(meanVecFinal)
827 maxMeanVecFinal = np.max(meanVecFinal)
828 meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
829 minMeanVecOriginal = np.min(meanVecOriginal)
830 maxMeanVecOriginal = np.max(meanVecOriginal)
831 deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
833 a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
834 a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color='green',
835 linestyle='--')
836 a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
837 a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
838 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
839 a.set_title(amp, fontsize=titleFontSize)
840 a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
842 # Same, but in log-scale
843 a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
844 a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
845 a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
846 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
847 a2.set_title(amp, fontsize=titleFontSize)
848 a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
849 else:
850 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
851 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
853 f.suptitle("PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
854 pdfPages.savefig(f)
855 f2.suptitle("PTC (log-log)", fontsize=supTitleFontSize)
856 pdfPages.savefig(f2)
858 return
860 def _plotLinearizer(self, dataset, linearizer, pdfPages):
861 """Plot linearity and linearity residual per amplifier
863 Parameters
864 ----------
865 dataset : `lsst.cp.pipe.ptc.PhotonTransferCurveDataset`
866 The dataset containing the means, variances, exposure times, and mask.
868 linearizer : `lsst.ip.isr.Linearizer`
869 Linearizer object
870 """
871 legendFontSize = 7
872 labelFontSize = 7
873 titleFontSize = 9
874 supTitleFontSize = 18
876 # General determination of the size of the plot grid
877 nAmps = len(dataset.ampNames)
878 if nAmps == 2:
879 nRows, nCols = 2, 1
880 nRows = np.sqrt(nAmps)
881 mantissa, _ = np.modf(nRows)
882 if mantissa > 0:
883 nRows = int(nRows) + 1
884 nCols = nRows
885 else:
886 nRows = int(nRows)
887 nCols = nRows
889 # Plot mean vs time (f1), and fractional residuals (f2)
890 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
891 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
892 for i, (amp, a, a2) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
893 meanVecFinal = np.array(dataset.rawMeans[amp])[dataset.visitMask[amp]]
894 timeVecFinal = np.array(dataset.rawExpTimes[amp])[dataset.visitMask[amp]]
896 a.set_xlabel('Time (sec)', fontsize=labelFontSize)
897 a.set_ylabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
898 a.tick_params(labelsize=labelFontSize)
899 a.set_xscale('linear', fontsize=labelFontSize)
900 a.set_yscale('linear', fontsize=labelFontSize)
902 a2.axhline(y=0, color='k')
903 a2.axvline(x=0, color='k', linestyle='-')
904 a2.set_xlabel(r'Mean signal ($\mu$, DN)', fontsize=labelFontSize)
905 a2.set_ylabel('Fractional nonlinearity (%)', fontsize=labelFontSize)
906 a2.tick_params(labelsize=labelFontSize)
907 a2.set_xscale('linear', fontsize=labelFontSize)
908 a2.set_yscale('linear', fontsize=labelFontSize)
910 if len(meanVecFinal):
911 pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
912 k0, k0Error = pars[0], parsErr[0]
913 k1, k1Error = pars[1], parsErr[1]
914 k2, k2Error = pars[2], parsErr[2]
915 linRedChi2 = linearizer.linearityFitReducedChiSquared[amp]
916 stringLegend = (f"k0: {k0:.4}+/-{k0Error:.2e} DN\n k1: {k1:.4}+/-{k1Error:.2e} DN/t"
917 f"\n k2: {k2:.2e}+/-{k2Error:.2e} DN/t^2 \n"
918 r"$\chi^2_{\rm{red}}$: " + f"{linRedChi2:.4}")
919 a.scatter(timeVecFinal, meanVecFinal)
920 a.plot(timeVecFinal, funcPolynomial(pars, timeVecFinal), color='red')
921 a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
922 a.set_title(f"{amp}", fontsize=titleFontSize)
924 linearPart = k0 + k1*timeVecFinal
925 fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
926 a2.plot(meanVecFinal, fracLinRes, c='g')
927 a2.set_title(f"{amp}", fontsize=titleFontSize)
928 else:
929 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
930 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
932 f.suptitle("Linearity \n Fit: Polynomial (degree: %g)"
933 % (len(pars)-1),
934 fontsize=supTitleFontSize)
935 f2.suptitle(r"Fractional NL residual" + "\n" +
936 r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
937 fontsize=supTitleFontSize)
938 pdfPages.savefig(f)
939 pdfPages.savefig(f2)
941 @staticmethod
942 def findGroups(x, maxDiff):
943 """Group data into bins, with at most maxDiff distance between bins.
945 Parameters
946 ----------
947 x: `list`
948 Data to bin.
950 maxDiff: `int`
951 Maximum distance between bins.
953 Returns
954 -------
955 index: `list`
956 Bin indices.
957 """
958 ix = np.argsort(x)
959 xsort = np.sort(x)
960 index = np.zeros_like(x, dtype=np.int32)
961 xc = xsort[0]
962 group = 0
963 ng = 1
965 for i in range(1, len(ix)):
966 xval = xsort[i]
967 if (xval - xc < maxDiff):
968 xc = (ng*xc + xval)/(ng+1)
969 ng += 1
970 index[ix[i]] = group
971 else:
972 group += 1
973 ng = 1
974 index[ix[i]] = group
975 xc = xval
977 return index
979 @staticmethod
980 def indexForBins(x, nBins):
981 """Builds an index with regular binning. The result can be fed into binData.
983 Parameters
984 ----------
985 x: `numpy.array`
986 Data to bin.
987 nBins: `int`
988 Number of bin.
990 Returns
991 -------
992 np.digitize(x, bins): `numpy.array`
993 Bin indices.
994 """
996 bins = np.linspace(x.min(), x.max() + abs(x.max() * 1e-7), nBins + 1)
997 return np.digitize(x, bins)
999 @staticmethod
1000 def binData(x, y, binIndex, wy=None):
1001 """Bin data (usually for display purposes).
1003 Patrameters
1004 -----------
1005 x: `numpy.array`
1006 Data to bin.
1008 y: `numpy.array`
1009 Data to bin.
1011 binIdex: `list`
1012 Bin number of each datum.
1014 wy: `numpy.array`
1015 Inverse rms of each datum to use when averaging (the actual weight is wy**2).
1017 Returns:
1018 -------
1020 xbin: `numpy.array`
1021 Binned data in x.
1023 ybin: `numpy.array`
1024 Binned data in y.
1026 wybin: `numpy.array`
1027 Binned weights in y, computed from wy's in each bin.
1029 sybin: `numpy.array`
1030 Uncertainty on the bin average, considering actual scatter, and ignoring weights.
1031 """
1033 if wy is None:
1034 wy = np.ones_like(x)
1035 binIndexSet = set(binIndex)
1036 w2 = wy*wy
1037 xw2 = x*(w2)
1038 xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1040 yw2 = y*w2
1041 ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1043 wybin = np.sqrt(np.array([w2[binIndex == i].sum() for i in binIndexSet]))
1044 sybin = np.array([y[binIndex == i].std()/np.sqrt(np.array([binIndex == i]).sum())
1045 for i in binIndexSet])
1047 return xbin, ybin, wybin, sybin