Coverage for python/lsst/cp/pipe/ptc/plotPtc.py : 5%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of cp_pipe.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21#
23__all__ = ['PlotPhotonTransferCurveTask']
25import numpy as np
26import matplotlib.pyplot as plt
27import matplotlib as mpl
28from matplotlib import gridspec
29import os
30from matplotlib.backends.backend_pdf import PdfPages
32import lsst.ip.isr as isr
34from lsst.cp.pipe.utils import (funcAstier, funcPolynomial,
35 calculateWeightedReducedChi2)
36from matplotlib.ticker import MaxNLocator
38from lsst.cp.pipe.ptc.astierCovPtcFit import computeApproximateAcoeffs
39from lsst.cp.pipe.ptc.astierCovPtcUtils import getFitDataFromCovariances
41from lsst.ip.isr import PhotonTransferCurveDataset
43import lsst.log as lsstLog
46class PlotPhotonTransferCurveTask():
47 """A class to plot the dataset from MeasurePhotonTransferCurveTask.
49 Parameters
50 ----------
52 datasetFileName : `str`
53 datasetPtc (lsst.ip.isr.PhotonTransferCurveDataset) file
54 name (fits).
56 linearizerFileName : `str`, optional
57 linearizer (isr.linearize.Linearizer) file
58 name (fits).
60 outDir : `str`, optional
61 Path to the output directory where the final PDF will
62 be placed.
64 detNum : `int`, optional
65 Detector number.
67 signalElectronsRelativeA : `float`, optional
68 Signal value for relative systematic bias between different
69 methods of estimating a_ij (Fig. 15 of Astier+19).
71 plotNormalizedCovariancesNumberOfBins : `float`, optional
72 Number of bins in `plotNormalizedCovariancesNumber` function
73 (Fig. 8, 10., of Astier+19).
75 Notes
76 -----
77 The plotting code in this file is almost identical to the code in
78 `plotPtcGen2.py`. If further changes are implemented in this file,
79 `plotPtcGen2.py` needs to be updated accordingly, and vice versa.
80 The file `plotPtcGen2.py` helps with maintaining backwards
81 compatibility with gen2 as we transition to gen3; the code
82 duplication is meant to only last for few month from now
83 (Jan, 2021). At that point only this file, `plotPtc.py`, will
84 remain.
85 """
87 def __init__(self, datasetFilename, linearizerFileName=None,
88 outDir='.', detNum=999, signalElectronsRelativeA=75000,
89 plotNormalizedCovariancesNumberOfBins=10):
90 self.datasetFilename = datasetFilename
91 self.linearizerFileName = linearizerFileName
92 self.detNum = detNum
93 self.signalElectronsRelativeA = signalElectronsRelativeA
94 self.plotNormalizedCovariancesNumberOfBins = plotNormalizedCovariancesNumberOfBins
95 self.outDir = outDir
97 def runDataRef(self):
98 """Run the Photon Transfer Curve (PTC) plotting measurement task.
99 """
101 datasetFile = self.datasetFilename
102 datasetPtc = PhotonTransferCurveDataset.readFits(datasetFile)
104 dirname = self.outDir
105 if not os.path.exists(dirname):
106 os.makedirs(dirname)
108 detNum = self.detNum
109 filename = f"PTC_det{detNum}.pdf"
110 filenameFull = os.path.join(dirname, filename)
112 if self.linearizerFileName:
113 linearizer = isr.linearize.Linearizer.readFits(self.linearizerFileName)
114 else:
115 linearizer = None
116 self.run(filenameFull, datasetPtc, linearizer=linearizer, log=lsstLog)
118 return
120 def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
121 """Make the plots for the PTC task"""
122 ptcFitType = datasetPtc.ptcFitType
123 with PdfPages(filenameFull) as pdfPages:
124 if ptcFitType in ["FULLCOVARIANCE", ]:
125 self.covAstierMakeAllPlots(datasetPtc, pdfPages, log=log)
126 elif ptcFitType in ["EXPAPPROXIMATION", "POLYNOMIAL"]:
127 self._plotStandardPtc(datasetPtc, ptcFitType, pdfPages)
128 else:
129 raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n"
130 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
131 if linearizer:
132 self._plotLinearizer(datasetPtc, linearizer, pdfPages)
134 return
136 def covAstierMakeAllPlots(self, dataset, pdfPages,
137 log=None):
138 """Make plots for MeasurePhotonTransferCurve task when doCovariancesAstier=True.
140 This function call other functions that mostly reproduce the plots in Astier+19.
141 Most of the code is ported from Pierre Astier's repository https://github.com/PierreAstier/bfptc
143 Parameters
144 ----------
145 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
146 The dataset containing the necessary information to produce the plots.
148 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
149 PDF file where the plots will be saved.
151 log : `lsst.log.Log`, optional
152 Logger to handle messages
153 """
154 mu = dataset.finalMeans
155 # dictionaries with ampNames as keys
156 fullCovs = dataset.covariances
157 fullCovsModel = dataset.covariancesModel
158 fullCovWeights = dataset.covariancesSqrtWeights
159 aDict = dataset.aMatrix
160 bDict = dataset.bMatrix
161 fullCovsNoB = dataset.covariances
162 fullCovsModelNoB = dataset.covariancesModelNoB
163 fullCovWeightsNoB = dataset.covariancesSqrtWeights
164 aDictNoB = dataset.aMatrixNoB
165 gainDict = dataset.gain
166 noiseDict = dataset.noise
168 self.plotCovariances(mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB, fullCovsModelNoB,
169 fullCovWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
170 self.plotNormalizedCovariances(0, 0, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
171 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
172 offset=0.01, topPlot=True,
173 numberOfBins=self.plotNormalizedCovariancesNumberOfBins,
174 log=log)
175 self.plotNormalizedCovariances(0, 1, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
176 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
177 numberOfBins=self.plotNormalizedCovariancesNumberOfBins,
178 log=log)
179 self.plotNormalizedCovariances(1, 0, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
180 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
181 numberOfBins=self.plotNormalizedCovariancesNumberOfBins,
182 log=log)
183 self.plot_a_b(aDict, bDict, pdfPages)
184 self.ab_vs_dist(aDict, bDict, pdfPages, bRange=4)
185 self.plotAcoeffsSum(aDict, bDict, pdfPages)
186 self.plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB,
187 self.signalElectronsRelativeA, gainDict, pdfPages, maxr=4)
189 return
191 @staticmethod
192 def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB,
193 gainDict, noiseDict, aDict, bDict, pdfPages):
194 """Plot covariances and models: Cov00, Cov10, Cov01.
196 Figs. 6 and 7 of Astier+19
198 Parameters
199 ----------
200 mu : `dict`, [`str`, `list`]
201 Dictionary keyed by amp name with mean signal values.
203 covs : `dict`, [`str`, `list`]
204 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
206 covsModel : `dict`, [`str`, `list`]
207 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
209 covsWeights : `dict`, [`str`, `list`]
210 Dictionary keyed by amp names containinging sqrt. of covariances weights.
212 covsNoB : `dict`, [`str`, `list`]
213 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
214 Astier+19).
216 covsModelNoB : `dict`, [`str`, `list`]
217 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
218 per mean flux.
220 covsWeightsNoB : `dict`, [`str`, `list`]
221 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
222 Astier+19).
224 gainDict : `dict`, [`str`, `float`]
225 Dictionary keyed by amp names containing the gains in e-/ADU.
227 noiseDict : `dict`, [`str`, `float`]
228 Dictionary keyed by amp names containing the rms redout noise in e-.
230 aDict : `dict`, [`str`, `numpy.array`]
231 Dictionary keyed by amp names containing 'a' coefficients (Eq. 20 of Astier+19).
233 bDict : `dict`, [`str`, `numpy.array`]
234 Dictionary keyed by amp names containing 'b' coefficients (Eq. 20 of Astier+19).
236 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
237 PDF file where the plots will be saved.
238 """
240 legendFontSize = 6.5
241 labelFontSize = 7
242 titleFontSize = 9
243 supTitleFontSize = 18
244 markerSize = 25
246 nAmps = len(covs)
247 if nAmps == 2:
248 nRows, nCols = 2, 1
249 nRows = np.sqrt(nAmps)
250 mantissa, _ = np.modf(nRows)
251 if mantissa > 0:
252 nRows = int(nRows) + 1
253 nCols = nRows
254 else:
255 nRows = int(nRows)
256 nCols = nRows
258 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
259 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
260 fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row',
261 figsize=(13, 10))
262 fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
263 fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
265 assert(len(covsModel) == nAmps)
266 assert(len(covsWeights) == nAmps)
268 assert(len(covsNoB) == nAmps)
269 assert(len(covsModelNoB) == nAmps)
270 assert(len(covsWeightsNoB) == nAmps)
272 for i, (amp, a, a2, aResVar, a3, a4) in enumerate(zip(covs, ax.flatten(),
273 ax2.flatten(), axResCov00.flatten(),
274 axCov01.flatten(), axCov10.flatten())):
276 muAmp, cov, model, weight = mu[amp], covs[amp], covsModel[amp], covsWeights[amp]
277 if not np.isnan(np.array(cov)).all(): # If all the entries are np.nan, this is a bad amp.
278 aCoeffs, bCoeffs = np.array(aDict[amp]), np.array(bDict[amp])
279 gain, noise = gainDict[amp], noiseDict[amp]
280 (meanVecFinal, varVecFinal, varVecModelFinal,
281 varWeightsFinal, _) = getFitDataFromCovariances(0, 0, muAmp, cov, model, weight,
282 returnMasked=True)
284 # Get weighted reduced chi2
285 chi2FullModelVar = calculateWeightedReducedChi2(varVecFinal, varVecModelFinal,
286 varWeightsFinal, len(meanVecFinal), 4)
288 (meanVecFinalCov01, varVecFinalCov01, varVecModelFinalCov01,
289 _, _) = getFitDataFromCovariances(0, 0, muAmp, cov, model, weight, returnMasked=True)
291 (meanVecFinalCov10, varVecFinalCov10, varVecModelFinalCov10,
292 _, _) = getFitDataFromCovariances(1, 0, muAmp, cov, model, weight, returnMasked=True)
294 # cuadratic fit for residuals below
295 par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
296 varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
297 chi2QuadModelVar = calculateWeightedReducedChi2(varVecFinal, varModelFinalQuadratic,
298 varWeightsFinal, len(meanVecFinal), 3)
300 # fit with no 'b' coefficient (c = a*b in Eq. 20 of Astier+19)
301 covNoB, modelNoB, weightNoB = covsNoB[amp], covsModelNoB[amp], covsWeightsNoB[amp]
302 (meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB,
303 varWeightsFinalNoB, _) = getFitDataFromCovariances(0, 0, muAmp, covNoB, modelNoB,
304 weightNoB, returnMasked=True)
306 chi2FullModelNoBVar = calculateWeightedReducedChi2(varVecFinalNoB, varVecModelFinalNoB,
307 varWeightsFinalNoB, len(meanVecFinalNoB),
308 3)
309 stringLegend = (f"Gain: {gain:.4} e/ADU \n"
310 f"Noise: {noise:.4} e \n"
311 + r"$a_{00}$: %.3e 1/e"%aCoeffs[0, 0] + "\n"
312 + r"$b_{00}$: %.3e 1/e"%bCoeffs[0, 0]
313 + f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
314 minMeanVecFinal = np.nanmin(meanVecFinal)
315 maxMeanVecFinal = np.nanmax(meanVecFinal)
316 deltaXlim = maxMeanVecFinal - minMeanVecFinal
318 a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
319 a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
320 a.tick_params(labelsize=11)
321 a.set_xscale('linear')
322 a.set_yscale('linear')
323 a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
324 a.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
325 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
326 a.set_title(amp, fontsize=titleFontSize)
327 a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
329 # Same as above, but in log-scale
330 a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
331 a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
332 a2.tick_params(labelsize=11)
333 a2.set_xscale('log')
334 a2.set_yscale('log')
335 a2.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
336 a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
337 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
338 a2.set_title(amp, fontsize=titleFontSize)
339 a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
341 # Residuals var - model
342 aResVar.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
343 aResVar.set_ylabel(r'Residuals (ADU$^2$)', fontsize=labelFontSize)
344 aResVar.tick_params(labelsize=11)
345 aResVar.set_xscale('linear')
346 aResVar.set_yscale('linear')
347 aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color='blue', lineStyle='-',
348 label=r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
349 aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color='red', lineStyle='-',
350 label=r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
351 aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color='green',
352 lineStyle='-',
353 label=r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
354 aResVar.axhline(color='black')
355 aResVar.set_title(amp, fontsize=titleFontSize)
356 aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
357 aResVar.legend(fontsize=7)
359 a3.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
360 a3.set_ylabel(r'Cov01 (ADU$^2$)', fontsize=labelFontSize)
361 a3.tick_params(labelsize=11)
362 a3.set_xscale('linear')
363 a3.set_yscale('linear')
364 a3.scatter(meanVecFinalCov01, varVecFinalCov01, c='blue', marker='o', s=markerSize)
365 a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color='red', lineStyle='-')
366 a3.set_title(amp, fontsize=titleFontSize)
367 a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
369 a4.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
370 a4.set_ylabel(r'Cov10 (ADU$^2$)', fontsize=labelFontSize)
371 a4.tick_params(labelsize=11)
372 a4.set_xscale('linear')
373 a4.set_yscale('linear')
374 a4.scatter(meanVecFinalCov10, varVecFinalCov10, c='blue', marker='o', s=markerSize)
375 a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color='red', lineStyle='-')
376 a4.set_title(amp, fontsize=titleFontSize)
377 a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
379 else:
380 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
381 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
382 a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
383 a4.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
385 f.suptitle("PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
386 fontsize=supTitleFontSize)
387 pdfPages.savefig(f)
388 f2.suptitle("PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
389 fontsize=supTitleFontSize)
390 pdfPages.savefig(f2)
391 fResCov00.suptitle("Residuals (data-model) for Cov00 (Var)", fontsize=supTitleFontSize)
392 pdfPages.savefig(fResCov00)
393 fCov01.suptitle("Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n"
394 " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
395 pdfPages.savefig(fCov01)
396 fCov10.suptitle("Cov10 as in Astier+19 (nearest serial neighbor covariance) \n"
397 "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
398 pdfPages.savefig(fCov10)
400 return
402 def plotNormalizedCovariances(self, i, j, inputMu, covs, covsModel, covsWeights, covsNoB, covsModelNoB,
403 covsWeightsNoB, pdfPages, offset=0.004,
404 numberOfBins=10, plotData=True, topPlot=False, log=None):
405 """Plot C_ij/mu vs mu.
407 Figs. 8, 10, and 11 of Astier+19
409 Parameters
410 ----------
411 i : `int`
412 Covariane lag
414 j : `int`
415 Covariance lag
417 inputMu : `dict`, [`str`, `list`]
418 Dictionary keyed by amp name with mean signal values.
420 covs : `dict`, [`str`, `list`]
421 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
423 covsModel : `dict`, [`str`, `list`]
424 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
426 covsWeights : `dict`, [`str`, `list`]
427 Dictionary keyed by amp names containinging sqrt. of covariances weights.
429 covsNoB : `dict`, [`str`, `list`]
430 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
431 Astier+19).
433 covsModelNoB : `dict`, [`str`, `list`]
434 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
435 per mean flux.
437 covsWeightsNoB : `dict`, [`str`, `list`]
438 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
439 Astier+19).
441 expIdMask : `dict`, [`str`, `list`]
442 Dictionary keyed by amp names containing the masked exposure pairs.
444 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
445 PDF file where the plots will be saved.
447 offset : `float`, optional
448 Constant offset factor to plot covariances in same panel (so they don't overlap).
450 numberOfBins : `int`, optional
451 Number of bins for top and bottom plot.
453 plotData : `bool`, optional
454 Plot the data points?
456 topPlot : `bool`, optional
457 Plot the top plot with the covariances, and the bottom plot with the model residuals?
459 log : `lsst.log.Log`, optional
460 Logger to handle messages.
461 """
462 if not topPlot:
463 fig = plt.figure(figsize=(8, 10))
464 gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
465 gs.update(hspace=0)
466 ax0 = plt.subplot(gs[0])
467 plt.setp(ax0.get_xticklabels(), visible=False)
468 else:
469 fig = plt.figure(figsize=(8, 8))
470 ax0 = plt.subplot(111)
471 ax0.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
472 ax0.tick_params(axis='both', labelsize='x-large')
473 mue, rese, wce = [], [], []
474 mueNoB, reseNoB, wceNoB = [], [], []
475 for counter, amp in enumerate(covs):
476 muAmp, fullCov, fullCovModel, fullCovWeight = (inputMu[amp], covs[amp], covsModel[amp],
477 covsWeights[amp])
478 if len(fullCov) == 0:
479 continue
480 mu, cov, model, weightCov, _ = getFitDataFromCovariances(i, j, muAmp, fullCov, fullCovModel,
481 fullCovWeight, divideByMu=True,
482 returnMasked=True)
484 mue += list(mu)
485 rese += list(cov - model)
486 wce += list(weightCov)
488 fullCovNoB, fullCovModelNoB, fullCovWeightNoB = (covsNoB[amp], covsModelNoB[amp],
489 covsWeightsNoB[amp])
490 if len(fullCovNoB) == 0:
491 continue
492 (muNoB, covNoB, modelNoB,
493 weightCovNoB, _) = getFitDataFromCovariances(i, j, muAmp, fullCovNoB, fullCovModelNoB,
494 fullCovWeightNoB, divideByMu=True,
495 returnMasked=True)
497 mueNoB += list(muNoB)
498 reseNoB += list(covNoB - modelNoB)
499 wceNoB += list(weightCovNoB)
501 # the corresponding fit
502 fit_curve, = plt.plot(mu, model + counter*offset, '-', linewidth=4.0)
503 # bin plot. len(mu) = no binning
504 gind = self.indexForBins(mu, numberOfBins)
506 xb, yb, wyb, sigyb = self.binData(mu, cov, gind, weightCov)
507 plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker='o', linestyle='none', markersize=6.5,
508 color=fit_curve.get_color(), label=f"{amp} (N: {len(mu)})")
509 # plot the data
510 if plotData:
511 points, = plt.plot(mu, cov + counter*offset, '.', color=fit_curve.get_color())
512 plt.legend(loc='upper right', fontsize=8)
513 # end loop on amps
514 mue = np.array(mue)
515 rese = np.array(rese)
516 wce = np.array(wce)
517 mueNoB = np.array(mueNoB)
518 reseNoB = np.array(reseNoB)
519 wceNoB = np.array(wceNoB)
521 plt.xlabel(r"$\mu (el)$", fontsize='x-large')
522 plt.ylabel(r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize='x-large')
523 if (not topPlot):
524 gind = self.indexForBins(mue, numberOfBins)
525 xb, yb, wyb, sigyb = self.binData(mue, rese, gind, wce)
527 ax1 = plt.subplot(gs[1], sharex=ax0)
528 ax1.errorbar(xb, yb, yerr=sigyb, marker='o', linestyle='none', label='Full fit')
529 gindNoB = self.indexForBins(mueNoB, numberOfBins)
530 xb2, yb2, wyb2, sigyb2 = self.binData(mueNoB, reseNoB, gindNoB, wceNoB)
532 ax1.errorbar(xb2, yb2, yerr=sigyb2, marker='o', linestyle='none', label='b = 0')
533 ax1.tick_params(axis='both', labelsize='x-large')
534 plt.legend(loc='upper left', fontsize='large')
535 # horizontal line at zero
536 plt.plot(xb, [0]*len(xb), '--', color='k')
537 plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
538 plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
539 plt.xlabel(r'$\mu (el)$', fontsize='x-large')
540 plt.ylabel(r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize='x-large')
541 plt.tight_layout()
542 plt.suptitle(f"Nbins: {numberOfBins}")
543 # overlapping y labels:
544 fig.canvas.draw()
545 labels0 = [item.get_text() for item in ax0.get_yticklabels()]
546 labels0[0] = u''
547 ax0.set_yticklabels(labels0)
548 pdfPages.savefig(fig)
550 return
552 @staticmethod
553 def plot_a_b(aDict, bDict, pdfPages, bRange=3):
554 """Fig. 12 of Astier+19
556 Color display of a and b arrays fits, averaged over channels.
558 Parameters
559 ----------
560 aDict : `dict`, [`numpy.array`]
561 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
562 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
564 bDict : `dict`, [`numpy.array`]
565 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
566 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
568 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
569 PDF file where the plots will be saved.
571 bRange : `int`
572 Maximum lag for b arrays.
573 """
574 a, b = [], []
575 for amp in aDict:
576 if np.isnan(aDict[amp]).all():
577 continue
578 a.append(aDict[amp])
579 b.append(bDict[amp])
580 a = np.array(a).mean(axis=0)
581 b = np.array(b).mean(axis=0)
582 fig = plt.figure(figsize=(7, 11))
583 ax0 = fig.add_subplot(2, 1, 1)
584 im0 = ax0.imshow(np.abs(a.transpose()), origin='lower', norm=mpl.colors.LogNorm())
585 ax0.tick_params(axis='both', labelsize='x-large')
586 ax0.set_title(r'$|a|$', fontsize='x-large')
587 ax0.xaxis.set_ticks_position('bottom')
588 cb0 = plt.colorbar(im0)
589 cb0.ax.tick_params(labelsize='x-large')
591 ax1 = fig.add_subplot(2, 1, 2)
592 ax1.tick_params(axis='both', labelsize='x-large')
593 ax1.yaxis.set_major_locator(MaxNLocator(integer=True))
594 ax1.xaxis.set_major_locator(MaxNLocator(integer=True))
595 im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin='lower')
596 cb1 = plt.colorbar(im1)
597 cb1.ax.tick_params(labelsize='x-large')
598 ax1.set_title(r'$b \times 10^6$', fontsize='x-large')
599 ax1.xaxis.set_ticks_position('bottom')
600 plt.tight_layout()
601 pdfPages.savefig(fig)
603 return
605 @staticmethod
606 def ab_vs_dist(aDict, bDict, pdfPages, bRange=4):
607 """Fig. 13 of Astier+19.
609 Values of a and b arrays fits, averaged over amplifiers, as a function of distance.
611 Parameters
612 ----------
613 aDict : `dict`, [`numpy.array`]
614 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
615 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
617 bDict : `dict`, [`numpy.array`]
618 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
619 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
621 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
622 PDF file where the plots will be saved.
624 bRange : `int`
625 Maximum lag for b arrays.
626 """
627 assert (len(aDict) == len(bDict))
628 a = []
629 for amp in aDict:
630 if np.isnan(aDict[amp]).all():
631 continue
632 a.append(aDict[amp])
633 a = np.array(a)
634 y = a.mean(axis=0)
635 sy = a.std(axis=0)/np.sqrt(len(aDict))
636 i, j = np.indices(y.shape)
637 upper = (i >= j).ravel()
638 r = np.sqrt(i**2 + j**2).ravel()
639 y = y.ravel()
640 sy = sy.ravel()
641 fig = plt.figure(figsize=(6, 9))
642 ax = fig.add_subplot(211)
643 ax.set_xlim([0.5, r.max()+1])
644 ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker='o', linestyle='none', color='b',
645 label='$i>=j$')
646 ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker='o', linestyle='none', color='r',
647 label='$i<j$')
648 ax.legend(loc='upper center', fontsize='x-large')
649 ax.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
650 ax.set_ylabel(r'$a_{ij}$', fontsize='x-large')
651 ax.set_yscale('log')
652 ax.tick_params(axis='both', labelsize='x-large')
654 #
655 axb = fig.add_subplot(212)
656 b = []
657 for amp in bDict:
658 if np.isnan(bDict[amp]).all():
659 continue
660 b.append(bDict[amp])
661 b = np.array(b)
662 yb = b.mean(axis=0)
663 syb = b.std(axis=0)/np.sqrt(len(bDict))
664 ib, jb = np.indices(yb.shape)
665 upper = (ib > jb).ravel()
666 rb = np.sqrt(i**2 + j**2).ravel()
667 yb = yb.ravel()
668 syb = syb.ravel()
669 xmin = -0.2
670 xmax = bRange
671 axb.set_xlim([xmin, xmax+0.2])
672 cutu = (r > xmin) & (r < xmax) & (upper)
673 cutl = (r > xmin) & (r < xmax) & (~upper)
674 axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker='o', linestyle='none', color='b',
675 label='$i>=j$')
676 axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker='o', linestyle='none', color='r',
677 label='$i<j$')
678 plt.legend(loc='upper center', fontsize='x-large')
679 axb.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
680 axb.set_ylabel(r'$b_{ij}$', fontsize='x-large')
681 axb.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
682 axb.tick_params(axis='both', labelsize='x-large')
683 plt.tight_layout()
684 pdfPages.savefig(fig)
686 return
688 @staticmethod
689 def plotAcoeffsSum(aDict, bDict, pdfPages):
690 """Fig. 14. of Astier+19
692 Cumulative sum of a_ij as a function of maximum separation. This plot displays the average over
693 channels.
695 Parameters
696 ----------
697 aDict : `dict`, [`numpy.array`]
698 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
699 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
701 bDict : `dict`, [`numpy.array`]
702 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
703 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
705 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
706 PDF file where the plots will be saved.
707 """
708 assert (len(aDict) == len(bDict))
709 a, b = [], []
710 for amp in aDict:
711 if np.isnan(aDict[amp]).all() or np.isnan(bDict[amp]).all():
712 continue
713 a.append(aDict[amp])
714 b.append(bDict[amp])
715 a = np.array(a).mean(axis=0)
716 b = np.array(b).mean(axis=0)
717 fig = plt.figure(figsize=(7, 6))
718 w = 4*np.ones_like(a)
719 w[0, 1:] = 2
720 w[1:, 0] = 2
721 w[0, 0] = 1
722 wa = w*a
723 indices = range(1, a.shape[0]+1)
724 sums = [wa[0:n, 0:n].sum() for n in indices]
725 ax = plt.subplot(111)
726 ax.plot(indices, sums/sums[0], 'o', color='b')
727 ax.set_yscale('log')
728 ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
729 ax.set_ylim(None, 1.2)
730 ax.set_ylabel(r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize='x-large')
731 ax.set_xlabel('n', fontsize='x-large')
732 ax.tick_params(axis='both', labelsize='x-large')
733 plt.tight_layout()
734 pdfPages.savefig(fig)
736 return
738 @staticmethod
739 def plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB, signalElectrons,
740 gainDict, pdfPages, maxr=None):
741 """Fig. 15 in Astier+19.
743 Illustrates systematic bias from estimating 'a'
744 coefficients from the slope of correlations as opposed to the
745 full model in Astier+19.
747 Parameters
748 ----------
749 aDict: `dict`
750 Dictionary of 'a' matrices (Eq. 20, Astier+19), with amp names as keys.
752 aDictNoB: `dict`
753 Dictionary of 'a' matrices ('b'= 0 in Eq. 20, Astier+19), with amp names as keys.
755 fullCovsModel : `dict`, [`str`, `list`]
756 Dictionary keyed by amp names containing covariances model per mean flux.
758 fullCovsModelNoB : `dict`, [`str`, `list`]
759 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of
760 Astier+19) per mean flux.
762 signalElectrons : `float`
763 Signal at which to evaluate the a_ij coefficients.
765 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
766 PDF file where the plots will be saved.
768 gainDict : `dict`, [`str`, `float`]
769 Dicgionary keyed by amp names with the gains in e-/ADU.
771 maxr : `int`, optional
772 Maximum lag.
773 """
775 fig = plt.figure(figsize=(7, 11))
776 title = [f"'a' relative bias at {signalElectrons} e", "'a' relative bias (b=0)"]
777 data = [(aDict, fullCovsModel), (aDictNoB, fullCovsModelNoB)]
779 for k, pair in enumerate(data):
780 diffs = []
781 amean = []
782 for amp in pair[0]:
783 covModel = pair[1][amp]
784 if np.isnan(covModel).all():
785 continue
786 aOld = computeApproximateAcoeffs(covModel, signalElectrons, gainDict[amp])
787 a = pair[0][amp]
788 amean.append(a)
789 diffs.append((aOld-a))
790 amean = np.array(amean).mean(axis=0)
791 diff = np.array(diffs).mean(axis=0)
792 diff = diff/amean
793 diff = diff[:]
794 # The difference should be close to zero
795 diff[0, 0] = 0
796 if maxr is None:
797 maxr = diff.shape[0]
798 diff = diff[:maxr, :maxr]
799 ax0 = fig.add_subplot(2, 1, k+1)
800 im0 = ax0.imshow(diff.transpose(), origin='lower')
801 ax0.yaxis.set_major_locator(MaxNLocator(integer=True))
802 ax0.xaxis.set_major_locator(MaxNLocator(integer=True))
803 ax0.tick_params(axis='both', labelsize='x-large')
804 plt.colorbar(im0)
805 ax0.set_title(title[k])
807 plt.tight_layout()
808 pdfPages.savefig(fig)
810 return
812 def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
813 """Plot PTC, var/signal vs signal, linearity, and linearity residual per amplifier.
815 Parameters
816 ----------
817 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
818 The dataset containing the means, variances, exposure times, and mask.
820 ptcFitType : `str`
821 Type of the model fit to the PTC. Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
823 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
824 PDF file where the plots will be saved.
825 """
827 if ptcFitType == 'EXPAPPROXIMATION':
828 ptcFunc = funcAstier
829 stringTitle = (r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
830 elif ptcFitType == 'POLYNOMIAL':
831 ptcFunc = funcPolynomial
832 for key in dataset.ptcFitPars:
833 deg = len(dataset.ptcFitPars[key]) - 1
834 break
835 stringTitle = r"Polynomial (degree: %g)" % (deg)
836 else:
837 raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n"
838 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
840 legendFontSize = 6.5
841 labelFontSize = 8
842 titleFontSize = 9
843 supTitleFontSize = 18
844 markerSize = 25
846 # General determination of the size of the plot grid
847 nAmps = len(dataset.ampNames)
848 if nAmps == 2:
849 nRows, nCols = 2, 1
850 nRows = np.sqrt(nAmps)
851 mantissa, _ = np.modf(nRows)
852 if mantissa > 0:
853 nRows = int(nRows) + 1
854 nCols = nRows
855 else:
856 nRows = int(nRows)
857 nCols = nRows
859 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
860 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
861 f3, ax3 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
863 for i, (amp, a, a2, a3) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten(),
864 ax3.flatten())):
865 meanVecOriginal = np.ravel(np.array(dataset.rawMeans[amp]))
866 varVecOriginal = np.ravel(np.array(dataset.rawVars[amp]))
867 mask = np.ravel(np.array(dataset.expIdMask[amp]))
868 if np.isnan(mask[0]): # All NaNs the whole amp is bad
869 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
870 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
871 a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
872 continue
873 else:
874 mask = mask.astype(bool)
875 meanVecFinal = meanVecOriginal[mask]
876 varVecFinal = varVecOriginal[mask]
877 meanVecOutliers = meanVecOriginal[np.invert(mask)]
878 varVecOutliers = varVecOriginal[np.invert(mask)]
879 pars, parsErr = np.array(dataset.ptcFitPars[amp]), np.array(dataset.ptcFitParsError[amp])
880 ptcRedChi2 = dataset.ptcFitChiSq[amp]
881 if ptcFitType == 'EXPAPPROXIMATION':
882 if len(meanVecFinal):
883 ptcA00, ptcA00error = pars[0], parsErr[0]
884 ptcGain, ptcGainError = pars[1], parsErr[1]
885 ptcNoise = np.sqrt((pars[2])) # pars[2] is in (e-)^2
886 ptcNoiseAdu = ptcNoise*(1./ptcGain)
887 ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
888 stringLegend = (f"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
889 f"\nGain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU"
890 f"\nNoise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
891 r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}"
892 f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
894 if ptcFitType == 'POLYNOMIAL':
895 if len(meanVecFinal):
896 ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
897 ptcNoiseAdu = np.sqrt((pars[0])) # pars[0] is in ADU^2
898 ptcNoise = ptcNoiseAdu*ptcGain
899 ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
900 stringLegend = (f"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU\n"
901 f"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
902 r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}"
903 f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
905 a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
906 a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
907 a.tick_params(labelsize=11)
908 a.set_xscale('linear')
909 a.set_yscale('linear')
911 a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
912 a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
913 a2.tick_params(labelsize=11)
914 a2.set_xscale('log')
915 a2.set_yscale('log')
917 a3.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
918 a3.set_ylabel(r'Variance/$\mu$ (ADU)', fontsize=labelFontSize)
919 a3.tick_params(labelsize=11)
920 a3.set_xscale('log')
921 a3.set_yscale('linear')
923 minMeanVecFinal = np.nanmin(meanVecFinal)
924 maxMeanVecFinal = np.nanmax(meanVecFinal)
925 meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
926 minMeanVecOriginal = np.nanmin(meanVecOriginal)
927 maxMeanVecOriginal = np.nanmax(meanVecOriginal)
928 deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
929 a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
930 a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color='green',
931 linestyle='--')
932 a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
933 a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
934 a.text(0.03, 0.66, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
935 a.set_title(amp, fontsize=titleFontSize)
936 a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
938 # Same, but in log-scale
939 a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
940 a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
941 a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
942 a2.text(0.03, 0.66, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
943 a2.set_title(amp, fontsize=titleFontSize)
944 a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
946 # Var/mu vs mu
947 a3.plot(meanVecFit, ptcFunc(pars, meanVecFit)/meanVecFit, color='red')
948 a3.scatter(meanVecFinal, varVecFinal/meanVecFinal, c='blue', marker='o', s=markerSize)
949 a3.scatter(meanVecOutliers, varVecOutliers/meanVecOutliers, c='magenta', marker='s',
950 s=markerSize)
951 a3.text(0.05, 0.1, stringLegend, transform=a3.transAxes, fontsize=legendFontSize)
952 a3.set_title(amp, fontsize=titleFontSize)
953 a3.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
955 f.suptitle("PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
956 pdfPages.savefig(f)
957 f2.suptitle("PTC (log-log)", fontsize=supTitleFontSize)
958 pdfPages.savefig(f2)
959 f3.suptitle(r"Var/$\mu$", fontsize=supTitleFontSize)
960 pdfPages.savefig(f3)
962 return
964 def _plotLinearizer(self, dataset, linearizer, pdfPages):
965 """Plot linearity and linearity residual per amplifier
967 Parameters
968 ----------
969 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
970 The dataset containing the means, variances, exposure times, and mask.
972 linearizer : `lsst.ip.isr.Linearizer`
973 Linearizer object
974 """
975 legendFontSize = 7
976 labelFontSize = 7
977 titleFontSize = 9
978 supTitleFontSize = 18
980 # General determination of the size of the plot grid
981 nAmps = len(dataset.ampNames)
982 if nAmps == 2:
983 nRows, nCols = 2, 1
984 nRows = np.sqrt(nAmps)
985 mantissa, _ = np.modf(nRows)
986 if mantissa > 0:
987 nRows = int(nRows) + 1
988 nCols = nRows
989 else:
990 nRows = int(nRows)
991 nCols = nRows
993 # Plot mean vs time (f1), and fractional residuals (f2)
994 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
995 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
996 for i, (amp, a, a2) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
997 mask = dataset.expIdMask[amp]
998 if np.isnan(mask[0]):
999 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
1000 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
1001 continue
1002 else:
1003 mask = mask.astype(bool)
1004 meanVecFinal = np.array(dataset.rawMeans[amp])[mask]
1005 timeVecFinal = np.array(dataset.rawExpTimes[amp])[mask]
1007 a.set_xlabel('Time (sec)', fontsize=labelFontSize)
1008 a.set_ylabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1009 a.tick_params(labelsize=labelFontSize)
1010 a.set_xscale('linear')
1011 a.set_yscale('linear')
1013 a2.axhline(y=0, color='k')
1014 a2.axvline(x=0, color='k', linestyle='-')
1015 a2.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1016 a2.set_ylabel('Fractional nonlinearity (%)', fontsize=labelFontSize)
1017 a2.tick_params(labelsize=labelFontSize)
1018 a2.set_xscale('linear')
1019 a2.set_yscale('linear')
1021 pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
1022 k0, k0Error = pars[0], parsErr[0]
1023 k1, k1Error = pars[1], parsErr[1]
1024 k2, k2Error = pars[2], parsErr[2]
1025 linRedChi2 = linearizer.fitChiSq[amp]
1026 stringLegend = (f"k0: {k0:.4}+/-{k0Error:.2e} ADU\nk1: {k1:.4}+/-{k1Error:.2e} ADU/t"
1027 f"\nk2: {k2:.2e}+/-{k2Error:.2e} ADU/t^2\n"
1028 r"$\chi^2_{\rm{red}}$: " + f"{linRedChi2:.4}")
1029 a.scatter(timeVecFinal, meanVecFinal)
1030 a.plot(timeVecFinal, funcPolynomial(pars, timeVecFinal), color='red')
1031 a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
1032 a.set_title(f"{amp}", fontsize=titleFontSize)
1034 linearPart = k0 + k1*timeVecFinal
1035 fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
1036 a2.plot(meanVecFinal, fracLinRes, c='g')
1037 a2.set_title(f"{amp}", fontsize=titleFontSize)
1039 f.suptitle("Linearity \n Fit: Polynomial (degree: %g)"
1040 % (len(pars)-1),
1041 fontsize=supTitleFontSize)
1042 f2.suptitle(r"Fractional NL residual" "\n"
1043 r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
1044 fontsize=supTitleFontSize)
1045 pdfPages.savefig(f)
1046 pdfPages.savefig(f2)
1048 @staticmethod
1049 def findGroups(x, maxDiff):
1050 """Group data into bins, with at most maxDiff distance between bins.
1052 Parameters
1053 ----------
1054 x: `list`
1055 Data to bin.
1057 maxDiff: `int`
1058 Maximum distance between bins.
1060 Returns
1061 -------
1062 index: `list`
1063 Bin indices.
1064 """
1065 ix = np.argsort(x)
1066 xsort = np.sort(x)
1067 index = np.zeros_like(x, dtype=np.int32)
1068 xc = xsort[0]
1069 group = 0
1070 ng = 1
1072 for i in range(1, len(ix)):
1073 xval = xsort[i]
1074 if (xval - xc < maxDiff):
1075 xc = (ng*xc + xval)/(ng+1)
1076 ng += 1
1077 index[ix[i]] = group
1078 else:
1079 group += 1
1080 ng = 1
1081 index[ix[i]] = group
1082 xc = xval
1084 return index
1086 @staticmethod
1087 def indexForBins(x, nBins):
1088 """Builds an index with regular binning. The result can be fed into binData.
1090 Parameters
1091 ----------
1092 x: `numpy.array`
1093 Data to bin.
1094 nBins: `int`
1095 Number of bin.
1097 Returns
1098 -------
1099 np.digitize(x, bins): `numpy.array`
1100 Bin indices.
1101 """
1103 bins = np.linspace(x.min(), x.max() + abs(x.max() * 1e-7), nBins + 1)
1104 return np.digitize(x, bins)
1106 @staticmethod
1107 def binData(x, y, binIndex, wy=None):
1108 """Bin data (usually for display purposes).
1110 Patrameters
1111 -----------
1112 x: `numpy.array`
1113 Data to bin.
1115 y: `numpy.array`
1116 Data to bin.
1118 binIdex: `list`
1119 Bin number of each datum.
1121 wy: `numpy.array`
1122 Inverse rms of each datum to use when averaging (the actual weight is wy**2).
1124 Returns:
1125 -------
1127 xbin: `numpy.array`
1128 Binned data in x.
1130 ybin: `numpy.array`
1131 Binned data in y.
1133 wybin: `numpy.array`
1134 Binned weights in y, computed from wy's in each bin.
1136 sybin: `numpy.array`
1137 Uncertainty on the bin average, considering actual scatter, and ignoring weights.
1138 """
1140 if wy is None:
1141 wy = np.ones_like(x)
1142 binIndexSet = set(binIndex)
1143 w2 = wy*wy
1144 xw2 = x*(w2)
1145 xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1147 yw2 = y*w2
1148 ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1150 wybin = np.sqrt(np.array([w2[binIndex == i].sum() for i in binIndexSet]))
1151 sybin = np.array([y[binIndex == i].std()/np.sqrt(np.array([binIndex == i]).sum())
1152 for i in binIndexSet])
1154 return xbin, ybin, wybin, sybin