Coverage for python/lsst/cp/pipe/ptc/plotPtc.py: 5%
593 statements
« prev ^ index » next coverage.py v6.4.1, created at 2022-07-09 07:20 -0700
« prev ^ index » next coverage.py v6.4.1, created at 2022-07-09 07:20 -0700
1# This file is part of cp_pipe.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21#
23__all__ = ['PlotPhotonTransferCurveTask']
25import logging
26import numpy as np
27import matplotlib.pyplot as plt
28import matplotlib as mpl
29from matplotlib import gridspec
30import os
31from matplotlib.backends.backend_pdf import PdfPages
33import lsst.ip.isr as isr
35from lsst.cp.pipe.utils import (funcAstier, funcPolynomial,
36 calculateWeightedReducedChi2,
37 getFitDataFromCovariances)
38from matplotlib.ticker import MaxNLocator
39from lsst.ip.isr import PhotonTransferCurveDataset
42class PlotPhotonTransferCurveTask():
43 """A class to plot the dataset from MeasurePhotonTransferCurveTask.
45 Parameters
46 ----------
48 datasetFileName : `str`
49 datasetPtc (lsst.ip.isr.PhotonTransferCurveDataset) file
50 name (fits).
52 linearizerFileName : `str`, optional
53 linearizer (isr.linearize.Linearizer) file
54 name (fits).
56 outDir : `str`, optional
57 Path to the output directory where the final PDF will
58 be placed.
60 detNum : `int`, optional
61 Detector number.
63 signalElectronsRelativeA : `float`, optional
64 Signal value for relative systematic bias between different
65 methods of estimating a_ij (Fig. 15 of Astier+19).
67 plotNormalizedCovariancesNumberOfBins : `float`, optional
68 Number of bins in `plotNormalizedCovariancesNumber` function
69 (Fig. 8, 10., of Astier+19).
70 """
72 def __init__(self, datasetFilename, linearizerFileName=None,
73 outDir='.', detNum=999, signalElectronsRelativeA=75000,
74 plotNormalizedCovariancesNumberOfBins=10):
75 self.datasetFilename = datasetFilename
76 self.linearizerFileName = linearizerFileName
77 self.detNum = detNum
78 self.signalElectronsRelativeA = signalElectronsRelativeA
79 self.plotNormalizedCovariancesNumberOfBins = plotNormalizedCovariancesNumberOfBins
80 self.outDir = outDir
82 def runDataRef(self):
83 """Run the Photon Transfer Curve (PTC) plotting measurement task.
84 """
85 datasetFile = self.datasetFilename
86 datasetPtc = PhotonTransferCurveDataset.readFits(datasetFile)
88 dirname = self.outDir
89 if not os.path.exists(dirname):
90 os.makedirs(dirname)
92 detNum = self.detNum
93 filename = f"PTC_det{detNum}.pdf"
94 filenameFull = os.path.join(dirname, filename)
96 if self.linearizerFileName:
97 linearizer = isr.linearize.Linearizer.readFits(self.linearizerFileName)
98 else:
99 linearizer = None
100 self.run(filenameFull, datasetPtc, linearizer=linearizer, log=logging.getLogger(__name__))
102 return
104 def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
105 """Make the plots for the PTC task"""
106 ptcFitType = datasetPtc.ptcFitType
107 with PdfPages(filenameFull) as pdfPages:
108 if ptcFitType in ["FULLCOVARIANCE", ]:
109 self.covAstierMakeAllPlots(datasetPtc, pdfPages, log=log)
110 elif ptcFitType in ["EXPAPPROXIMATION", "POLYNOMIAL"]:
111 self._plotStandardPtc(datasetPtc, ptcFitType, pdfPages)
112 else:
113 raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n"
114 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
115 if linearizer:
116 self._plotLinearizer(datasetPtc, linearizer, pdfPages)
118 return
120 def covAstierMakeAllPlots(self, dataset, pdfPages,
121 log=None):
122 """Make plots for MeasurePhotonTransferCurve task when
123 doCovariancesAstier=True.
125 This function call other functions that mostly reproduce the
126 plots in Astier+19. Most of the code is ported from Pierre
127 Astier's repository https://github.com/PierreAstier/bfptc
129 Parameters
130 ----------
131 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
132 The dataset containing the necessary information to
133 produce the plots.
135 pdfPages : `matplotlib.backends.backend_pdf.PdfPages`
136 PDF file where the plots will be saved.
138 log : `logging.Logger`, optional
139 Logger to handle messages
140 """
141 mu = dataset.finalMeans
142 # dictionaries with ampNames as keys
143 fullCovs = dataset.covariances
144 fullCovsModel = dataset.covariancesModel
145 fullCovWeights = dataset.covariancesSqrtWeights
146 aDict = dataset.aMatrix
147 bDict = dataset.bMatrix
148 fullCovsNoB = dataset.covariances
149 fullCovsModelNoB = dataset.covariancesModelNoB
150 fullCovWeightsNoB = dataset.covariancesSqrtWeights
151 aDictNoB = dataset.aMatrixNoB
152 gainDict = dataset.gain
153 noiseDict = dataset.noise
155 self.plotCovariances(mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB, fullCovsModelNoB,
156 fullCovWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
157 self.plotNormalizedCovariances(0, 0, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
158 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
159 offset=0.01, topPlot=True,
160 numberOfBins=self.plotNormalizedCovariancesNumberOfBins,
161 log=log)
162 self.plotNormalizedCovariances(0, 1, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
163 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
164 numberOfBins=self.plotNormalizedCovariancesNumberOfBins,
165 log=log)
166 self.plotNormalizedCovariances(1, 0, mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB,
167 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
168 numberOfBins=self.plotNormalizedCovariancesNumberOfBins,
169 log=log)
170 self.plot_a_b(aDict, bDict, pdfPages)
171 self.ab_vs_dist(aDict, bDict, pdfPages, bRange=4)
172 self.plotAcoeffsSum(aDict, bDict, pdfPages)
173 self.plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB,
174 self.signalElectronsRelativeA, gainDict, pdfPages, maxr=4)
176 return
178 @staticmethod
179 def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB,
180 gainDict, noiseDict, aDict, bDict, pdfPages):
181 """Plot covariances and models: Cov00, Cov10, Cov01.
183 Figs. 6 and 7 of Astier+19
185 Parameters
186 ----------
187 mu : `dict` [`str`, `list`]
188 Dictionary keyed by amp name with mean signal values.
190 covs : `dict` [`str`, `list`]
191 Dictionary keyed by amp names containing a list of measued
192 covariances per mean flux.
194 covsModel : `dict` [`str`, `list`]
195 Dictionary keyed by amp names containinging covariances
196 model (Eq. 20 of Astier+19) per mean flux.
198 covsWeights : `dict` [`str`, `list`]
199 Dictionary keyed by amp names containinging sqrt. of
200 covariances weights.
202 covsNoB : `dict` [`str`, `list`]
203 Dictionary keyed by amp names containing a list of measued
204 covariances per mean flux ('b'=0 in Astier+19).
206 covsModelNoB : `dict` [`str`, `list`]
207 Dictionary keyed by amp names containing covariances model
208 (with 'b'=0 in Eq. 20 of Astier+19) per mean flux.
210 covsWeightsNoB : `dict` [`str`, `list`]
211 Dictionary keyed by amp names containing sqrt. of
212 covariances weights ('b' = 0 in Eq. 20 of Astier+19).
214 gainDict : `dict` [`str`, `float`]
215 Dictionary keyed by amp names containing the gains in e-/ADU.
217 noiseDict : `dict` [`str`, `float`]
218 Dictionary keyed by amp names containing the rms redout
219 noise in e-.
221 aDict : `dict` [`str`, `numpy.array`]
222 Dictionary keyed by amp names containing 'a' coefficients
223 (Eq. 20 of Astier+19).
225 bDict : `dict` [`str`, `numpy.array`]
226 Dictionary keyed by amp names containing 'b' coefficients
227 (Eq. 20 of Astier+19).
229 pdfPages : `matplotlib.backends.backend_pdf.PdfPages`
230 PDF file where the plots will be saved.
231 """
232 legendFontSize = 6.5
233 labelFontSize = 7
234 titleFontSize = 9
235 supTitleFontSize = 18
236 markerSize = 25
238 nAmps = len(covs)
239 if nAmps == 2:
240 nRows, nCols = 2, 1
241 nRows = np.sqrt(nAmps)
242 mantissa, _ = np.modf(nRows)
243 if mantissa > 0:
244 nRows = int(nRows) + 1
245 nCols = nRows
246 else:
247 nRows = int(nRows)
248 nCols = nRows
250 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
251 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
252 fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row',
253 figsize=(13, 10))
254 fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
255 fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
257 assert(len(covsModel) == nAmps)
258 assert(len(covsWeights) == nAmps)
260 assert(len(covsNoB) == nAmps)
261 assert(len(covsModelNoB) == nAmps)
262 assert(len(covsWeightsNoB) == nAmps)
264 for i, (amp, a, a2, aResVar, a3, a4) in enumerate(zip(covs, ax.flatten(),
265 ax2.flatten(), axResCov00.flatten(),
266 axCov01.flatten(), axCov10.flatten())):
268 muAmp, cov, model, weight = mu[amp], covs[amp], covsModel[amp], covsWeights[amp]
269 if not np.isnan(np.array(cov)).all(): # If all the entries are np.nan, this is a bad amp.
270 aCoeffs, bCoeffs = np.array(aDict[amp]), np.array(bDict[amp])
271 gain, noise = gainDict[amp], noiseDict[amp]
272 (meanVecFinal, varVecFinal, varVecModelFinal,
273 varWeightsFinal, _) = getFitDataFromCovariances(0, 0, muAmp, cov, model, weight,
274 returnMasked=True)
276 # Get weighted reduced chi2
277 chi2FullModelVar = calculateWeightedReducedChi2(varVecFinal, varVecModelFinal,
278 varWeightsFinal, len(meanVecFinal), 4)
280 (meanVecFinalCov01, varVecFinalCov01, varVecModelFinalCov01,
281 _, _) = getFitDataFromCovariances(0, 0, muAmp, cov, model, weight, returnMasked=True)
283 (meanVecFinalCov10, varVecFinalCov10, varVecModelFinalCov10,
284 _, _) = getFitDataFromCovariances(1, 0, muAmp, cov, model, weight, returnMasked=True)
286 # cuadratic fit for residuals below
287 par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
288 varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
289 chi2QuadModelVar = calculateWeightedReducedChi2(varVecFinal, varModelFinalQuadratic,
290 varWeightsFinal, len(meanVecFinal), 3)
292 # fit with no 'b' coefficient (c = a*b in Eq. 20 of Astier+19)
293 covNoB, modelNoB, weightNoB = covsNoB[amp], covsModelNoB[amp], covsWeightsNoB[amp]
294 (meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB,
295 varWeightsFinalNoB, _) = getFitDataFromCovariances(0, 0, muAmp, covNoB, modelNoB,
296 weightNoB, returnMasked=True)
298 chi2FullModelNoBVar = calculateWeightedReducedChi2(varVecFinalNoB, varVecModelFinalNoB,
299 varWeightsFinalNoB, len(meanVecFinalNoB),
300 3)
301 stringLegend = (f"Gain: {gain:.4} e/ADU \n"
302 f"Noise: {noise:.4} e \n"
303 + r"$a_{00}$: %.3e 1/e"%aCoeffs[0, 0] + "\n"
304 + r"$b_{00}$: %.3e 1/e"%bCoeffs[0, 0]
305 + f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
306 minMeanVecFinal = np.nanmin(meanVecFinal)
307 maxMeanVecFinal = np.nanmax(meanVecFinal)
308 deltaXlim = maxMeanVecFinal - minMeanVecFinal
310 a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
311 a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
312 a.tick_params(labelsize=11)
313 a.set_xscale('linear')
314 a.set_yscale('linear')
315 a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
316 a.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
317 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
318 a.set_title(amp, fontsize=titleFontSize)
319 a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
321 # Same as above, but in log-scale
322 a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
323 a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
324 a2.tick_params(labelsize=11)
325 a2.set_xscale('log')
326 a2.set_yscale('log')
327 a2.plot(meanVecFinal, varVecModelFinal, color='red', lineStyle='-')
328 a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
329 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
330 a2.set_title(amp, fontsize=titleFontSize)
331 a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
333 # Residuals var - model
334 aResVar.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
335 aResVar.set_ylabel(r'Residuals (ADU$^2$)', fontsize=labelFontSize)
336 aResVar.tick_params(labelsize=11)
337 aResVar.set_xscale('linear')
338 aResVar.set_yscale('linear')
339 aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color='blue', lineStyle='-',
340 label=r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
341 aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color='red', lineStyle='-',
342 label=r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
343 aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color='green',
344 lineStyle='-',
345 label=r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
346 aResVar.axhline(color='black')
347 aResVar.set_title(amp, fontsize=titleFontSize)
348 aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
349 aResVar.legend(fontsize=7)
351 a3.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
352 a3.set_ylabel(r'Cov01 (ADU$^2$)', fontsize=labelFontSize)
353 a3.tick_params(labelsize=11)
354 a3.set_xscale('linear')
355 a3.set_yscale('linear')
356 a3.scatter(meanVecFinalCov01, varVecFinalCov01, c='blue', marker='o', s=markerSize)
357 a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color='red', lineStyle='-')
358 a3.set_title(amp, fontsize=titleFontSize)
359 a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
361 a4.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
362 a4.set_ylabel(r'Cov10 (ADU$^2$)', fontsize=labelFontSize)
363 a4.tick_params(labelsize=11)
364 a4.set_xscale('linear')
365 a4.set_yscale('linear')
366 a4.scatter(meanVecFinalCov10, varVecFinalCov10, c='blue', marker='o', s=markerSize)
367 a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color='red', lineStyle='-')
368 a4.set_title(amp, fontsize=titleFontSize)
369 a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
371 else:
372 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
373 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
374 a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
375 a4.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
377 f.suptitle("PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
378 fontsize=supTitleFontSize)
379 pdfPages.savefig(f)
380 f2.suptitle("PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
381 fontsize=supTitleFontSize)
382 pdfPages.savefig(f2)
383 fResCov00.suptitle("Residuals (data-model) for Cov00 (Var)", fontsize=supTitleFontSize)
384 pdfPages.savefig(fResCov00)
385 fCov01.suptitle("Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n"
386 " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
387 pdfPages.savefig(fCov01)
388 fCov10.suptitle("Cov10 as in Astier+19 (nearest serial neighbor covariance) \n"
389 "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
390 pdfPages.savefig(fCov10)
392 return
394 def plotNormalizedCovariances(self, i, j, inputMu, covs, covsModel, covsWeights, covsNoB, covsModelNoB,
395 covsWeightsNoB, pdfPages, offset=0.004,
396 numberOfBins=10, plotData=True, topPlot=False, log=None):
397 """Plot C_ij/mu vs mu.
399 Figs. 8, 10, and 11 of Astier+19
401 Parameters
402 ----------
403 i : `int`
404 Covariane lag
406 j : `int`
407 Covariance lag
409 inputMu : `dict` [`str`, `list`]
410 Dictionary keyed by amp name with mean signal values.
412 covs : `dict` [`str`, `list`]
413 Dictionary keyed by amp names containing a list of measued
414 covariances per mean flux.
416 covsModel : `dict` [`str`, `list`]
417 Dictionary keyed by amp names containinging covariances
418 model (Eq. 20 of Astier+19) per mean flux.
420 covsWeights : `dict` [`str`, `list`]
421 Dictionary keyed by amp names containinging sqrt. of
422 covariances weights.
424 covsNoB : `dict` [`str`, `list`]
425 Dictionary keyed by amp names containing a list of measued
426 covariances per mean flux ('b'=0 in Astier+19).
428 covsModelNoB : `dict` [`str`, `list`]
429 Dictionary keyed by amp names containing covariances model
430 (with 'b'=0 in Eq. 20 of Astier+19) per mean flux.
432 covsWeightsNoB : `dict` [`str`, `list`]
433 Dictionary keyed by amp names containing sqrt. of
434 covariances weights ('b' = 0 in Eq. 20 of Astier+19).
436 expIdMask : `dict` [`str`, `list`]
437 Dictionary keyed by amp names containing the masked
438 exposure pairs.
440 pdfPages : `matplotlib.backends.backend_pdf.PdfPages`
441 PDF file where the plots will be saved.
443 offset : `float`, optional
444 Constant offset factor to plot covariances in same panel
445 (so they don't overlap).
447 numberOfBins : `int`, optional
448 Number of bins for top and bottom plot.
450 plotData : `bool`, optional
451 Plot the data points?
453 topPlot : `bool`, optional
454 Plot the top plot with the covariances, and the bottom
455 plot with the model residuals?
457 log : `logging.Logger`, optional
458 Logger to handle messages.
459 """
460 if not topPlot:
461 fig = plt.figure(figsize=(8, 10))
462 gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
463 gs.update(hspace=0)
464 ax0 = plt.subplot(gs[0])
465 plt.setp(ax0.get_xticklabels(), visible=False)
466 else:
467 fig = plt.figure(figsize=(8, 8))
468 ax0 = plt.subplot(111)
469 ax0.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
470 ax0.tick_params(axis='both', labelsize='x-large')
471 mue, rese, wce = [], [], []
472 mueNoB, reseNoB, wceNoB = [], [], []
473 for counter, amp in enumerate(covs):
474 muAmp, fullCov, fullCovModel, fullCovWeight = (inputMu[amp], covs[amp], covsModel[amp],
475 covsWeights[amp])
476 if len(fullCov) == 0:
477 continue
478 mu, cov, model, weightCov, _ = getFitDataFromCovariances(i, j, muAmp, fullCov, fullCovModel,
479 fullCovWeight, divideByMu=True,
480 returnMasked=True)
482 mue += list(mu)
483 rese += list(cov - model)
484 wce += list(weightCov)
486 fullCovNoB, fullCovModelNoB, fullCovWeightNoB = (covsNoB[amp], covsModelNoB[amp],
487 covsWeightsNoB[amp])
488 if len(fullCovNoB) == 0:
489 continue
490 (muNoB, covNoB, modelNoB,
491 weightCovNoB, _) = getFitDataFromCovariances(i, j, muAmp, fullCovNoB, fullCovModelNoB,
492 fullCovWeightNoB, divideByMu=True,
493 returnMasked=True)
495 mueNoB += list(muNoB)
496 reseNoB += list(covNoB - modelNoB)
497 wceNoB += list(weightCovNoB)
499 # the corresponding fit
500 fit_curve, = plt.plot(mu, model + counter*offset, '-', linewidth=4.0)
501 # bin plot. len(mu) = no binning
502 gind = self.indexForBins(mu, numberOfBins)
504 xb, yb, wyb, sigyb = self.binData(mu, cov, gind, weightCov)
505 plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker='o', linestyle='none', markersize=6.5,
506 color=fit_curve.get_color(), label=f"{amp} (N: {len(mu)})")
507 # plot the data
508 if plotData:
509 points, = plt.plot(mu, cov + counter*offset, '.', color=fit_curve.get_color())
510 plt.legend(loc='upper right', fontsize=8)
511 # end loop on amps
512 mue = np.array(mue)
513 rese = np.array(rese)
514 wce = np.array(wce)
515 mueNoB = np.array(mueNoB)
516 reseNoB = np.array(reseNoB)
517 wceNoB = np.array(wceNoB)
519 plt.xlabel(r"$\mu (el)$", fontsize='x-large')
520 plt.ylabel(r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize='x-large')
521 if (not topPlot):
522 gind = self.indexForBins(mue, numberOfBins)
523 xb, yb, wyb, sigyb = self.binData(mue, rese, gind, wce)
525 ax1 = plt.subplot(gs[1], sharex=ax0)
526 ax1.errorbar(xb, yb, yerr=sigyb, marker='o', linestyle='none', label='Full fit')
527 gindNoB = self.indexForBins(mueNoB, numberOfBins)
528 xb2, yb2, wyb2, sigyb2 = self.binData(mueNoB, reseNoB, gindNoB, wceNoB)
530 ax1.errorbar(xb2, yb2, yerr=sigyb2, marker='o', linestyle='none', label='b = 0')
531 ax1.tick_params(axis='both', labelsize='x-large')
532 plt.legend(loc='upper left', fontsize='large')
533 # horizontal line at zero
534 plt.plot(xb, [0]*len(xb), '--', color='k')
535 plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
536 plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
537 plt.xlabel(r'$\mu (el)$', fontsize='x-large')
538 plt.ylabel(r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize='x-large')
539 plt.tight_layout()
540 plt.suptitle(f"Nbins: {numberOfBins}")
541 # overlapping y labels:
542 fig.canvas.draw()
543 labels0 = [item.get_text() for item in ax0.get_yticklabels()]
544 labels0[0] = u''
545 ax0.set_yticklabels(labels0)
546 pdfPages.savefig(fig)
548 return
550 @staticmethod
551 def plot_a_b(aDict, bDict, pdfPages, bRange=3):
552 """Fig. 12 of Astier+19
554 Color display of a and b arrays fits, averaged over channels.
556 Parameters
557 ----------
558 aDict : `dict` [`numpy.array`]
559 Dictionary keyed by amp names containing the fitted 'a'
560 coefficients from the model in Eq. 20 of Astier+19 (if
561 `ptcFitType` is `FULLCOVARIANCE`).
563 bDict : `dict` [`numpy.array`]
564 Dictionary keyed by amp names containing the fitted 'b'
565 coefficients from the model in Eq. 20 of Astier+19 (if
566 `ptcFitType` is `FULLCOVARIANCE`).
568 pdfPages : `matplotlib.backends.backend_pdf.PdfPages`
569 PDF file where the plots will be saved.
571 bRange : `int`
572 Maximum lag for b arrays.
573 """
574 a, b = [], []
575 for amp in aDict:
576 if np.isnan(aDict[amp]).all():
577 continue
578 a.append(aDict[amp])
579 b.append(bDict[amp])
580 a = np.array(a).mean(axis=0)
581 b = np.array(b).mean(axis=0)
582 fig = plt.figure(figsize=(7, 11))
583 ax0 = fig.add_subplot(2, 1, 1)
584 im0 = ax0.imshow(np.abs(a.transpose()), origin='lower', norm=mpl.colors.LogNorm())
585 ax0.tick_params(axis='both', labelsize='x-large')
586 ax0.set_title(r'$|a|$', fontsize='x-large')
587 ax0.xaxis.set_ticks_position('bottom')
588 cb0 = plt.colorbar(im0)
589 cb0.ax.tick_params(labelsize='x-large')
591 ax1 = fig.add_subplot(2, 1, 2)
592 ax1.tick_params(axis='both', labelsize='x-large')
593 ax1.yaxis.set_major_locator(MaxNLocator(integer=True))
594 ax1.xaxis.set_major_locator(MaxNLocator(integer=True))
595 im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin='lower')
596 cb1 = plt.colorbar(im1)
597 cb1.ax.tick_params(labelsize='x-large')
598 ax1.set_title(r'$b \times 10^6$', fontsize='x-large')
599 ax1.xaxis.set_ticks_position('bottom')
600 plt.tight_layout()
601 pdfPages.savefig(fig)
603 return
605 @staticmethod
606 def ab_vs_dist(aDict, bDict, pdfPages, bRange=4):
607 """Fig. 13 of Astier+19.
609 Values of a and b arrays fits, averaged over amplifiers, as a
610 function of distance.
612 Parameters
613 ----------
614 aDict : `dict` [`numpy.array`]
615 Dictionary keyed by amp names containing the fitted 'a'
616 coefficients from the model in Eq. 20 of Astier+19 (if
617 `ptcFitType` is `FULLCOVARIANCE`).
619 bDict : `dict` [`numpy.array`]
620 Dictionary keyed by amp names containing the fitted 'b'
621 coefficients from the model in Eq. 20 of Astier+19 (if
622 `ptcFitType` is `FULLCOVARIANCE`).
624 pdfPages : `matplotlib.backends.backend_pdf.PdfPages`
625 PDF file where the plots will be saved.
627 bRange : `int`
628 Maximum lag for b arrays.
629 """
630 assert (len(aDict) == len(bDict))
631 a = []
632 for amp in aDict:
633 if np.isnan(aDict[amp]).all():
634 continue
635 a.append(aDict[amp])
636 a = np.array(a)
637 y = a.mean(axis=0)
638 sy = a.std(axis=0)/np.sqrt(len(aDict))
639 i, j = np.indices(y.shape)
640 upper = (i >= j).ravel()
641 r = np.sqrt(i**2 + j**2).ravel()
642 y = y.ravel()
643 sy = sy.ravel()
644 fig = plt.figure(figsize=(6, 9))
645 ax = fig.add_subplot(211)
646 ax.set_xlim([0.5, r.max()+1])
647 ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker='o', linestyle='none', color='b',
648 label='$i>=j$')
649 ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker='o', linestyle='none', color='r',
650 label='$i<j$')
651 ax.legend(loc='upper center', fontsize='x-large')
652 ax.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
653 ax.set_ylabel(r'$a_{ij}$', fontsize='x-large')
654 ax.set_yscale('log')
655 ax.tick_params(axis='both', labelsize='x-large')
657 #
658 axb = fig.add_subplot(212)
659 b = []
660 for amp in bDict:
661 if np.isnan(bDict[amp]).all():
662 continue
663 b.append(bDict[amp])
664 b = np.array(b)
665 yb = b.mean(axis=0)
666 syb = b.std(axis=0)/np.sqrt(len(bDict))
667 ib, jb = np.indices(yb.shape)
668 upper = (ib > jb).ravel()
669 rb = np.sqrt(i**2 + j**2).ravel()
670 yb = yb.ravel()
671 syb = syb.ravel()
672 xmin = -0.2
673 xmax = bRange
674 axb.set_xlim([xmin, xmax+0.2])
675 cutu = (r > xmin) & (r < xmax) & (upper)
676 cutl = (r > xmin) & (r < xmax) & (~upper)
677 axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker='o', linestyle='none', color='b',
678 label='$i>=j$')
679 axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker='o', linestyle='none', color='r',
680 label='$i<j$')
681 plt.legend(loc='upper center', fontsize='x-large')
682 axb.set_xlabel(r'$\sqrt{i^2+j^2}$', fontsize='x-large')
683 axb.set_ylabel(r'$b_{ij}$', fontsize='x-large')
684 axb.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
685 axb.tick_params(axis='both', labelsize='x-large')
686 plt.tight_layout()
687 pdfPages.savefig(fig)
689 return
691 @staticmethod
692 def plotAcoeffsSum(aDict, bDict, pdfPages):
693 """Fig. 14. of Astier+19
695 Cumulative sum of a_ij as a function of maximum
696 separation. This plot displays the average over channels.
698 Parameters
699 ----------
700 aDict : `dict` [`numpy.array`]
701 Dictionary keyed by amp names containing the fitted 'a'
702 coefficients from the model in Eq. 20 of Astier+19 (if
703 `ptcFitType` is `FULLCOVARIANCE`).
705 bDict : `dict` [`numpy.array`]
706 Dictionary keyed by amp names containing the fitted 'b'
707 coefficients from the model in Eq. 20 of Astier+19 (if
708 `ptcFitType` is `FULLCOVARIANCE`).
710 pdfPages : `matplotlib.backends.backend_pdf.PdfPages`
711 PDF file where the plots will be saved.
712 """
713 assert (len(aDict) == len(bDict))
714 a, b = [], []
715 for amp in aDict:
716 if np.isnan(aDict[amp]).all() or np.isnan(bDict[amp]).all():
717 continue
718 a.append(aDict[amp])
719 b.append(bDict[amp])
720 a = np.array(a).mean(axis=0)
721 b = np.array(b).mean(axis=0)
722 fig = plt.figure(figsize=(7, 6))
723 w = 4*np.ones_like(a)
724 w[0, 1:] = 2
725 w[1:, 0] = 2
726 w[0, 0] = 1
727 wa = w*a
728 indices = range(1, a.shape[0]+1)
729 sums = [wa[0:n, 0:n].sum() for n in indices]
730 ax = plt.subplot(111)
731 ax.plot(indices, sums/sums[0], 'o', color='b')
732 ax.set_yscale('log')
733 ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
734 ax.set_ylim(None, 1.2)
735 ax.set_ylabel(r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize='x-large')
736 ax.set_xlabel('n', fontsize='x-large')
737 ax.tick_params(axis='both', labelsize='x-large')
738 plt.tight_layout()
739 pdfPages.savefig(fig)
741 return
743 @staticmethod
744 def plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB, signalElectrons,
745 gainDict, pdfPages, maxr=None):
746 """Fig. 15 in Astier+19.
748 Illustrates systematic bias from estimating 'a'
749 coefficients from the slope of correlations as opposed to the
750 full model in Astier+19.
752 Parameters
753 ----------
754 aDict : `dict`
755 Dictionary of 'a' matrices (Eq. 20, Astier+19), with amp
756 names as keys.
758 aDictNoB : `dict`
759 Dictionary of 'a' matrices ('b'= 0 in Eq. 20, Astier+19),
760 with amp names as keys.
762 fullCovsModel : `dict` [`str`, `list`]
763 Dictionary keyed by amp names containing covariances model
764 per mean flux.
766 fullCovsModelNoB : `dict` [`str`, `list`]
767 Dictionary keyed by amp names containing covariances model
768 (with 'b'=0 in Eq. 20 of Astier+19) per mean flux.
770 signalElectrons : `float`
771 Signal at which to evaluate the a_ij coefficients.
773 pdfPages : `matplotlib.backends.backend_pdf.PdfPages`
774 PDF file where the plots will be saved.
776 gainDict : `dict` [`str`, `float`]
777 Dicgionary keyed by amp names with the gains in e-/ADU.
779 maxr : `int`, optional
780 Maximum lag.
781 """
782 fig = plt.figure(figsize=(7, 11))
783 title = [f"'a' relative bias at {signalElectrons} e", "'a' relative bias (b=0)"]
784 data = [(aDict, fullCovsModel), (aDictNoB, fullCovsModelNoB)]
786 for k, pair in enumerate(data):
787 diffs = []
788 amean = []
789 for amp in pair[0]:
790 covModel = np.array(pair[1][amp])
791 if np.isnan(covModel).all():
792 continue
793 # Compute the "a" coefficients of the Antilogus+14
794 # (1402.0725) model as in Guyonnet+15 (1501.01577,
795 # eq. 16, the slope of cov/var at a given flux mu in
796 # electrons). Eq. 16 of 1501.01577 is an approximation
797 # to the more complete model in Astier+19
798 # (1905.08677).
799 var = covModel[0, 0, 0] # ADU^2
800 # For a result in electrons^-1, we have to use mu in electrons
801 aOld = covModel[0, :, :]/(var*signalElectrons)
802 a = pair[0][amp]
803 amean.append(a)
804 diffs.append((aOld-a))
805 amean = np.array(amean).mean(axis=0)
806 diff = np.array(diffs).mean(axis=0)
807 diff = diff/amean
808 diff = diff[:]
809 # The difference should be close to zero
810 diff[0, 0] = 0
811 if maxr is None:
812 maxr = diff.shape[0]
813 diff = diff[:maxr, :maxr]
814 ax0 = fig.add_subplot(2, 1, k+1)
815 im0 = ax0.imshow(diff.transpose(), origin='lower')
816 ax0.yaxis.set_major_locator(MaxNLocator(integer=True))
817 ax0.xaxis.set_major_locator(MaxNLocator(integer=True))
818 ax0.tick_params(axis='both', labelsize='x-large')
819 plt.colorbar(im0)
820 ax0.set_title(title[k])
822 plt.tight_layout()
823 pdfPages.savefig(fig)
825 return
827 def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
828 """Plot PTC, var/signal vs signal, linearity, and linearity residual
829 per amplifier.
831 Parameters
832 ----------
833 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
834 The dataset containing the means, variances, exposure
835 times, and mask.
837 ptcFitType : `str`
838 Type of the model fit to the PTC. Options:
839 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
841 pdfPages : `matplotlib.backends.backend_pdf.PdfPages`
842 PDF file where the plots will be saved.
843 """
844 if ptcFitType == 'EXPAPPROXIMATION':
845 ptcFunc = funcAstier
846 stringTitle = (r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
847 elif ptcFitType == 'POLYNOMIAL':
848 ptcFunc = funcPolynomial
849 for key in dataset.ptcFitPars:
850 deg = len(dataset.ptcFitPars[key]) - 1
851 break
852 stringTitle = r"Polynomial (degree: %g)" % (deg)
853 else:
854 raise RuntimeError(f"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n"
855 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
857 legendFontSize = 6.5
858 labelFontSize = 8
859 titleFontSize = 9
860 supTitleFontSize = 18
861 markerSize = 25
863 # General determination of the size of the plot grid
864 nAmps = len(dataset.ampNames)
865 if nAmps == 2:
866 nRows, nCols = 2, 1
867 nRows = np.sqrt(nAmps)
868 mantissa, _ = np.modf(nRows)
869 if mantissa > 0:
870 nRows = int(nRows) + 1
871 nCols = nRows
872 else:
873 nRows = int(nRows)
874 nCols = nRows
876 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
877 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
878 f3, ax3 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
880 for i, (amp, a, a2, a3) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten(),
881 ax3.flatten())):
882 meanVecOriginal = np.ravel(np.array(dataset.rawMeans[amp]))
883 varVecOriginal = np.ravel(np.array(dataset.rawVars[amp]))
884 mask = np.ravel(np.array(dataset.expIdMask[amp]))
885 if np.sum(mask) == 0: # The whole amp is bad
886 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
887 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
888 a3.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
889 continue
890 else:
891 mask = mask.astype(bool)
892 meanVecFinal = meanVecOriginal[mask]
893 varVecFinal = varVecOriginal[mask]
894 meanVecOutliers = meanVecOriginal[np.invert(mask)]
895 varVecOutliers = varVecOriginal[np.invert(mask)]
896 pars, parsErr = np.array(dataset.ptcFitPars[amp]), np.array(dataset.ptcFitParsError[amp])
897 ptcRedChi2 = dataset.ptcFitChiSq[amp]
898 if ptcFitType == 'EXPAPPROXIMATION':
899 if len(meanVecFinal):
900 ptcA00, ptcA00error = pars[0], parsErr[0]
901 ptcGain, ptcGainError = pars[1], parsErr[1]
902 ptcNoise = np.sqrt((pars[2])) # pars[2] is in (e-)^2
903 ptcNoiseAdu = ptcNoise*(1./ptcGain)
904 ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
905 stringLegend = (f"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
906 f"\nGain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU"
907 f"\nNoise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
908 r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}"
909 f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
911 if ptcFitType == 'POLYNOMIAL':
912 if len(meanVecFinal):
913 ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
914 ptcNoiseAdu = np.sqrt((pars[0])) # pars[0] is in ADU^2
915 ptcNoise = ptcNoiseAdu*ptcGain
916 ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
917 stringLegend = (f"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU\n"
918 f"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
919 r"$\chi^2_{\rm{red}}$: " + f"{ptcRedChi2:.4}"
920 f"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
921 a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
922 a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
923 a.tick_params(labelsize=11)
924 a.set_xscale('linear')
925 a.set_yscale('linear')
927 a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
928 a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
929 a2.tick_params(labelsize=11)
930 a2.set_xscale('log')
931 a2.set_yscale('log')
933 a3.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
934 a3.set_ylabel(r'Variance/$\mu$ (ADU)', fontsize=labelFontSize)
935 a3.tick_params(labelsize=11)
936 a3.set_xscale('log')
937 a3.set_yscale('linear')
938 minMeanVecFinal = np.nanmin(meanVecFinal)
939 maxMeanVecFinal = np.nanmax(meanVecFinal)
940 meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
941 minMeanVecOriginal = np.nanmin(meanVecOriginal)
942 maxMeanVecOriginal = np.nanmax(meanVecOriginal)
943 deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
944 a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
945 a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color='green',
946 linestyle='--')
947 a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
948 a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
949 a.text(0.03, 0.66, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
950 a.set_title(amp, fontsize=titleFontSize)
951 a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
953 # Same, but in log-scale
954 a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
955 a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o', s=markerSize)
956 a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s', s=markerSize)
957 a2.text(0.03, 0.66, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
958 a2.set_title(amp, fontsize=titleFontSize)
959 a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
961 # Var/mu vs mu
962 a3.plot(meanVecFit, ptcFunc(pars, meanVecFit)/meanVecFit, color='red')
963 a3.scatter(meanVecFinal, varVecFinal/meanVecFinal, c='blue', marker='o', s=markerSize)
964 a3.scatter(meanVecOutliers, varVecOutliers/meanVecOutliers, c='magenta', marker='s',
965 s=markerSize)
966 a3.text(0.05, 0.1, stringLegend, transform=a3.transAxes, fontsize=legendFontSize)
967 a3.set_title(amp, fontsize=titleFontSize)
968 a3.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
969 f.suptitle("PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
970 pdfPages.savefig(f)
971 f2.suptitle("PTC (log-log)", fontsize=supTitleFontSize)
972 pdfPages.savefig(f2)
973 f3.suptitle(r"Var/$\mu$", fontsize=supTitleFontSize)
974 pdfPages.savefig(f3)
976 return
978 def _plotLinearizer(self, dataset, linearizer, pdfPages):
979 """Plot linearity and linearity residual per amplifier
981 Parameters
982 ----------
983 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
984 The dataset containing the means, variances, exposure
985 times, and mask.
987 linearizer : `lsst.ip.isr.Linearizer`
988 Linearizer object
989 """
990 legendFontSize = 7
991 labelFontSize = 7
992 titleFontSize = 9
993 supTitleFontSize = 18
995 # General determination of the size of the plot grid
996 nAmps = len(dataset.ampNames)
997 if nAmps == 2:
998 nRows, nCols = 2, 1
999 nRows = np.sqrt(nAmps)
1000 mantissa, _ = np.modf(nRows)
1001 if mantissa > 0:
1002 nRows = int(nRows) + 1
1003 nCols = nRows
1004 else:
1005 nRows = int(nRows)
1006 nCols = nRows
1008 # Plot mean vs time (f1), and fractional residuals (f2)
1009 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
1010 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
1011 for i, (amp, a, a2) in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
1012 mask = dataset.expIdMask[amp]
1013 if np.sum(mask) == 0: # Bad amp
1014 a.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
1015 a2.set_title(f"{amp} (BAD)", fontsize=titleFontSize)
1016 continue
1017 else:
1018 mask = mask.astype(bool)
1019 meanVecFinal = np.array(dataset.rawMeans[amp])[mask]
1020 timeVecFinal = np.array(dataset.rawExpTimes[amp])[mask]
1022 a.set_xlabel('Time (sec)', fontsize=labelFontSize)
1023 a.set_ylabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1024 a.tick_params(labelsize=labelFontSize)
1025 a.set_xscale('linear')
1026 a.set_yscale('linear')
1028 a2.axhline(y=0, color='k')
1029 a2.axvline(x=0, color='k', linestyle='-')
1030 a2.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1031 a2.set_ylabel('Fractional nonlinearity (%)', fontsize=labelFontSize)
1032 a2.tick_params(labelsize=labelFontSize)
1033 a2.set_xscale('linear')
1034 a2.set_yscale('linear')
1036 pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
1037 k0, k0Error = pars[0], parsErr[0]
1038 k1, k1Error = pars[1], parsErr[1]
1039 k2, k2Error = pars[2], parsErr[2]
1040 linRedChi2 = linearizer.fitChiSq[amp]
1041 stringLegend = (f"k0: {k0:.4}+/-{k0Error:.2e} ADU\nk1: {k1:.4}+/-{k1Error:.2e} ADU/t"
1042 f"\nk2: {k2:.2e}+/-{k2Error:.2e} ADU/t^2\n"
1043 r"$\chi^2_{\rm{red}}$: " + f"{linRedChi2:.4}")
1044 a.scatter(timeVecFinal, meanVecFinal)
1045 a.plot(timeVecFinal, funcPolynomial(pars, timeVecFinal), color='red')
1046 a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
1047 a.set_title(f"{amp}", fontsize=titleFontSize)
1049 linearPart = k0 + k1*timeVecFinal
1050 fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
1051 a2.plot(meanVecFinal, fracLinRes, c='g')
1052 a2.set_title(f"{amp}", fontsize=titleFontSize)
1054 f.suptitle("Linearity \n Fit: Polynomial (degree: %g)"
1055 % (len(pars)-1),
1056 fontsize=supTitleFontSize)
1057 f2.suptitle(r"Fractional NL residual" "\n"
1058 r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
1059 fontsize=supTitleFontSize)
1060 pdfPages.savefig(f)
1061 pdfPages.savefig(f2)
1063 @staticmethod
1064 def findGroups(x, maxDiff):
1065 """Group data into bins, with at most maxDiff distance between bins.
1067 Parameters
1068 ----------
1069 x : `list`
1070 Data to bin.
1072 maxDiff : `int`
1073 Maximum distance between bins.
1075 Returns
1076 -------
1077 index : `list`
1078 Bin indices.
1079 """
1080 ix = np.argsort(x)
1081 xsort = np.sort(x)
1082 index = np.zeros_like(x, dtype=np.int32)
1083 xc = xsort[0]
1084 group = 0
1085 ng = 1
1087 for i in range(1, len(ix)):
1088 xval = xsort[i]
1089 if (xval - xc < maxDiff):
1090 xc = (ng*xc + xval)/(ng+1)
1091 ng += 1
1092 index[ix[i]] = group
1093 else:
1094 group += 1
1095 ng = 1
1096 index[ix[i]] = group
1097 xc = xval
1099 return index
1101 @staticmethod
1102 def indexForBins(x, nBins):
1103 """Builds an index with regular binning. The result can be fed into
1104 binData.
1106 Parameters
1107 ----------
1108 x : `numpy.array`
1109 Data to bin.
1110 nBins : `int`
1111 Number of bin.
1113 Returns
1114 -------
1115 np.digitize(x, bins): `numpy.array`
1116 Bin indices.
1117 """
1118 bins = np.linspace(x.min(), x.max() + abs(x.max() * 1e-7), nBins + 1)
1119 return np.digitize(x, bins)
1121 @staticmethod
1122 def binData(x, y, binIndex, wy=None):
1123 """Bin data (usually for display purposes).
1125 Parameters
1126 ----------
1127 x : `numpy.array`
1128 Data to bin.
1130 y : `numpy.array`
1131 Data to bin.
1133 binIdex : `list`
1134 Bin number of each datum.
1136 wy : `numpy.array`
1137 Inverse rms of each datum to use when averaging (the
1138 actual weight is wy**2).
1140 Returns
1141 -------
1142 xbin : `numpy.array`
1143 Binned data in x.
1145 ybin : `numpy.array`
1146 Binned data in y.
1148 wybin : `numpy.array`
1149 Binned weights in y, computed from wy's in each bin.
1151 sybin : `numpy.array`
1152 Uncertainty on the bin average, considering actual
1153 scatter, and ignoring weights.
1154 """
1155 if wy is None:
1156 wy = np.ones_like(x)
1157 binIndexSet = set(binIndex)
1158 w2 = wy*wy
1159 xw2 = x*(w2)
1160 xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1162 yw2 = y*w2
1163 ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum() for i in binIndexSet])
1165 wybin = np.sqrt(np.array([w2[binIndex == i].sum() for i in binIndexSet]))
1166 sybin = np.array([y[binIndex == i].std()/np.sqrt(np.array([binIndex == i]).sum())
1167 for i in binIndexSet])
1169 return xbin, ybin, wybin, sybin