23 __all__ = [
'PlotPhotonTransferCurveTask']
26 import matplotlib.pyplot
as plt
27 import matplotlib
as mpl
28 from matplotlib
import gridspec
30 from matplotlib.backends.backend_pdf
import PdfPages
36 from .utils
import (funcAstier, funcPolynomial, NonexistentDatasetTaskDataIdContainer,
37 calculateWeightedReducedChi2)
38 from matplotlib.ticker
import MaxNLocator
40 from .astierCovPtcFit
import computeApproximateAcoeffs
41 from .astierCovPtcUtils
import getFitDataFromCovariances
47 """Config class for photon transfer curve measurement task"""
48 datasetFileName = pexConfig.Field(
50 doc=
"datasetPtc file name (pkl)",
53 linearizerFileName = pexConfig.Field(
55 doc=
"linearizer file name (fits)",
58 ccdKey = pexConfig.Field(
60 doc=
"The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
63 signalElectronsRelativeA = pexConfig.Field(
65 doc=
"Signal value for relative systematic bias between different methods of estimating a_ij "
66 "(Fig. 15 of Astier+19).",
69 plotNormalizedCovariancesNumberOfBins = pexConfig.Field(
71 doc=
"Number of bins in `plotNormalizedCovariancesNumber` function "
72 "(Fig. 8, 10., of Astier+19).",
78 """A class to plot the dataset from MeasurePhotonTransferCurveTask.
84 Positional arguments passed to the Task constructor. None used at this
87 Keyword arguments passed on to the Task constructor. None used at this
92 ConfigClass = PlotPhotonTransferCurveTaskConfig
93 _DefaultName =
"plotPhotonTransferCurve"
96 pipeBase.CmdLineTask.__init__(self, *args, **kwargs)
97 plt.interactive(
False)
98 self.config.validate()
102 def _makeArgumentParser(cls):
103 """Augment argument parser for the MeasurePhotonTransferCurveTask."""
105 parser.add_id_argument(
"--id", datasetType=
"photonTransferCurveDataset",
106 ContainerClass=NonexistentDatasetTaskDataIdContainer,
107 help=
"The ccds to use, e.g. --id ccd=0..100")
112 """Run the Photon Transfer Curve (PTC) plotting measurement task.
116 dataRef : list of lsst.daf.persistence.ButlerDataRef
117 dataRef for the detector for the expIds to be fit.
120 datasetFile = self.config.datasetFileName
121 datasetPtc = PhotonTransferCurveDataset.readFits(datasetFile)
123 dirname = dataRef.getUri(datasetType=
'cpPipePlotRoot', write=
True)
124 if not os.path.exists(dirname):
127 detNum = dataRef.dataId[self.config.ccdKey]
128 filename = f
"PTC_det{detNum}.pdf"
129 filenameFull = os.path.join(dirname, filename)
131 if self.config.linearizerFileName:
132 linearizer = isr.linearize.Linearizer.readFits(self.config.linearizerFileName)
135 self.
run(filenameFull, datasetPtc, linearizer=linearizer, log=self.log)
137 return pipeBase.Struct(exitStatus=0)
139 def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
140 """Make the plots for the PTC task"""
141 ptcFitType = datasetPtc.ptcFitType
142 with PdfPages(filenameFull)
as pdfPages:
143 if ptcFitType
in [
"FULLCOVARIANCE", ]:
145 elif ptcFitType
in [
"EXPAPPROXIMATION",
"POLYNOMIAL"]:
148 raise RuntimeError(f
"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
149 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
157 """Make plots for MeasurePhotonTransferCurve task when doCovariancesAstier=True.
159 This function call other functions that mostly reproduce the plots in Astier+19.
160 Most of the code is ported from Pierre Astier's repository https://github.com/PierreAstier/bfptc
164 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
165 The dataset containing the necessary information to produce the plots.
167 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
168 PDF file where the plots will be saved.
170 log : `lsst.log.Log`, optional
171 Logger to handle messages
173 mu = dataset.rawMeans
174 expIdMask = dataset.expIdMask
176 fullCovs = dataset.covariances
177 fullCovsModel = dataset.covariancesModel
178 fullCovWeights = dataset.covariancesSqrtWeights
179 aDict = dataset.aMatrix
180 bDict = dataset.bMatrix
181 fullCovsNoB = dataset.covariancesNoB
182 fullCovsModelNoB = dataset.covariancesModelNoB
183 fullCovWeightsNoB = dataset.covariancesSqrtWeightsNoB
184 aDictNoB = dataset.aMatrixNoB
185 gainDict = dataset.gain
186 noiseDict = dataset.noise
188 self.
plotCovariances(mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB, fullCovsModelNoB,
189 fullCovWeightsNoB, gainDict, noiseDict, aDict, bDict, expIdMask, pdfPages)
191 fullCovsModelNoB, fullCovWeightsNoB, expIdMask, pdfPages,
192 offset=0.01, topPlot=
True,
193 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
196 fullCovsModelNoB, fullCovWeightsNoB, expIdMask, pdfPages,
197 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
200 fullCovsModelNoB, fullCovWeightsNoB, expIdMask, pdfPages,
201 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
203 self.
plot_a_b(aDict, bDict, pdfPages)
204 self.
ab_vs_dist(aDict, bDict, pdfPages, bRange=4)
207 self.config.signalElectronsRelativeA, gainDict, pdfPages, maxr=4)
212 def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB,
213 gainDict, noiseDict, aDict, bDict, expIdMask, pdfPages):
214 """Plot covariances and models: Cov00, Cov10, Cov01.
216 Figs. 6 and 7 of Astier+19
220 mu : `dict`, [`str`, `list`]
221 Dictionary keyed by amp name with mean signal values.
223 covs : `dict`, [`str`, `list`]
224 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
226 covsModel : `dict`, [`str`, `list`]
227 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
229 covsWeights : `dict`, [`str`, `list`]
230 Dictionary keyed by amp names containinging sqrt. of covariances weights.
232 covsNoB : `dict`, [`str`, `list`]
233 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
236 covsModelNoB : `dict`, [`str`, `list`]
237 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
240 covsWeightsNoB : `dict`, [`str`, `list`]
241 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
244 gainDict : `dict`, [`str`, `float`]
245 Dictionary keyed by amp names containing the gains in e-/ADU.
247 noiseDict : `dict`, [`str`, `float`]
248 Dictionary keyed by amp names containing the rms redout noise in e-.
250 aDict : `dict`, [`str`, `numpy.array`]
251 Dictionary keyed by amp names containing 'a' coefficients (Eq. 20 of Astier+19).
253 bDict : `dict`, [`str`, `numpy.array`]
254 Dictionary keyed by amp names containing 'b' coefficients (Eq. 20 of Astier+19).
256 expIdMask : `dict`, [`str`, `list`]
257 Dictionary keyed by amp names containing the masked exposure pairs.
259 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
260 PDF file where the plots will be saved.
266 supTitleFontSize = 18
272 nRows = np.sqrt(nAmps)
273 mantissa, _ = np.modf(nRows)
275 nRows = int(nRows) + 1
281 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
282 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
283 fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row',
285 fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
286 fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
288 assert(len(covsModel) == nAmps)
289 assert(len(covsWeights) == nAmps)
291 assert(len(covsNoB) == nAmps)
292 assert(len(covsModelNoB) == nAmps)
293 assert(len(covsWeightsNoB) == nAmps)
295 for i, (amp, a, a2, aResVar, a3, a4)
in enumerate(zip(covs, ax.flatten(),
296 ax2.flatten(), axResCov00.flatten(),
297 axCov01.flatten(), axCov10.flatten())):
299 muAmp, cov, model, weight = mu[amp], covs[amp], covsModel[amp], covsWeights[amp]
300 if not np.isnan(np.array(cov)).all():
301 aCoeffs, bCoeffs = np.array(aDict[amp]), np.array(bDict[amp])
302 gain, noise = gainDict[amp], noiseDict[amp]
303 mask = expIdMask[amp]
305 (meanVecOriginal, varVecOriginal, varVecModelOriginal,
307 meanVecFinal, varVecFinal = meanVecOriginal[mask], varVecOriginal[mask]
308 varVecModelFinal = varVecModelOriginal[mask]
309 meanVecOutliers = meanVecOriginal[np.invert(mask)]
310 varVecOutliers = varVecOriginal[np.invert(mask)]
311 varWeightsFinal = weightsOriginal[mask]
314 varWeightsFinal, len(meanVecFinal), 4)
316 (meanVecOrigCov01, varVecOrigCov01, varVecModelOrigCov01,
318 meanVecFinalCov01, varVecFinalCov01 = meanVecOrigCov01[mask], varVecOrigCov01[mask]
319 varVecModelFinalCov01 = varVecModelOrigCov01[mask]
320 meanVecOutliersCov01 = meanVecOrigCov01[np.invert(mask)]
321 varVecOutliersCov01 = varVecOrigCov01[np.invert(mask)]
323 (meanVecOrigCov10, varVecOrigCov10, varVecModelOrigCov10,
325 meanVecFinalCov10, varVecFinalCov10 = meanVecOrigCov10[mask], varVecOrigCov10[mask]
326 varVecModelFinalCov10 = varVecModelOrigCov10[mask]
327 meanVecOutliersCov10 = meanVecOrigCov10[np.invert(mask)]
328 varVecOutliersCov10 = varVecOrigCov10[np.invert(mask)]
331 par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
332 varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
334 varWeightsFinal, len(meanVecFinal), 3)
337 covNoB, modelNoB, weightNoB = covsNoB[amp], covsModelNoB[amp], covsWeightsNoB[amp]
338 (meanVecOriginalNoB, varVecOriginalNoB, varVecModelOriginalNoB,
342 meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB, varWeightsFinalNoB = (
343 meanVecOriginalNoB[mask], varVecOriginalNoB[mask], varVecModelOriginalNoB[mask],
344 varWeightsOriginalNoB[mask])
347 varWeightsFinalNoB, len(meanVecFinalNoB),
349 stringLegend = (f
"Gain: {gain:.4} e/ADU \n" +
350 f
"Noise: {noise:.4} e \n" +
351 r"$a_{00}$: %.3e 1/e"%aCoeffs[0, 0] +
352 "\n" +
r"$b_{00}$: %.3e 1/e"%bCoeffs[0, 0] +
353 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
354 minMeanVecFinal = np.nanmin(meanVecFinal)
355 maxMeanVecFinal = np.nanmax(meanVecFinal)
356 deltaXlim = maxMeanVecFinal - minMeanVecFinal
358 a.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
359 a.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
360 a.tick_params(labelsize=11)
361 a.set_xscale(
'linear', fontsize=labelFontSize)
362 a.set_yscale(
'linear', fontsize=labelFontSize)
363 a.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
364 a.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
365 a.plot(meanVecFinal, varVecModelFinal, color=
'red', lineStyle=
'-')
366 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
367 a.set_title(amp, fontsize=titleFontSize)
368 a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
371 a2.set_xlabel(
r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
372 a2.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
373 a2.tick_params(labelsize=11)
376 a2.plot(meanVecFinal, varVecModelFinal, color=
'red', lineStyle=
'-')
377 a2.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
378 a2.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
379 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
380 a2.set_title(amp, fontsize=titleFontSize)
381 a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
384 aResVar.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
385 aResVar.set_ylabel(
r'Residuals (ADU$^2$)', fontsize=labelFontSize)
386 aResVar.tick_params(labelsize=11)
387 aResVar.set_xscale(
'linear', fontsize=labelFontSize)
388 aResVar.set_yscale(
'linear', fontsize=labelFontSize)
389 aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color=
'blue', lineStyle=
'-',
390 label=
r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
391 aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color=
'red', lineStyle=
'-',
392 label=
r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
393 aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color=
'green',
395 label=
r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
396 aResVar.axhline(color=
'black')
397 aResVar.set_title(amp, fontsize=titleFontSize)
398 aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
399 aResVar.legend(fontsize=7)
401 a3.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
402 a3.set_ylabel(
r'Cov01 (ADU$^2$)', fontsize=labelFontSize)
403 a3.tick_params(labelsize=11)
404 a3.set_xscale(
'linear', fontsize=labelFontSize)
405 a3.set_yscale(
'linear', fontsize=labelFontSize)
406 a3.scatter(meanVecFinalCov01, varVecFinalCov01, c=
'blue', marker=
'o', s=markerSize)
407 a3.scatter(meanVecOutliersCov01, varVecOutliersCov01, c=
'magenta', marker=
's', s=markerSize)
408 a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color=
'red', lineStyle=
'-')
409 a3.set_title(amp, fontsize=titleFontSize)
410 a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
412 a4.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
413 a4.set_ylabel(
r'Cov10 (ADU$^2$)', fontsize=labelFontSize)
414 a4.tick_params(labelsize=11)
415 a4.set_xscale(
'linear', fontsize=labelFontSize)
416 a4.set_yscale(
'linear', fontsize=labelFontSize)
417 a4.scatter(meanVecFinalCov10, varVecFinalCov10, c=
'blue', marker=
'o', s=markerSize)
418 a4.scatter(meanVecOutliersCov10, varVecOutliersCov10, c=
'magenta', marker=
's', s=markerSize)
419 a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color=
'red', lineStyle=
'-')
420 a4.set_title(amp, fontsize=titleFontSize)
421 a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
424 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
425 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
426 a3.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
427 a4.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
429 f.suptitle(
"PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
430 fontsize=supTitleFontSize)
432 f2.suptitle(
"PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
433 fontsize=supTitleFontSize)
435 fResCov00.suptitle(
"Residuals (data-model) for Cov00 (Var)", fontsize=supTitleFontSize)
436 pdfPages.savefig(fResCov00)
437 fCov01.suptitle(
"Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n" +
438 " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
439 pdfPages.savefig(fCov01)
440 fCov10.suptitle(
"Cov10 as in Astier+19 (nearest serial neighbor covariance) \n" +
441 "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
442 pdfPages.savefig(fCov10)
447 covsWeightsNoB, expIdMask, pdfPages, offset=0.004,
448 numberOfBins=10, plotData=True, topPlot=False, log=None):
449 """Plot C_ij/mu vs mu.
451 Figs. 8, 10, and 11 of Astier+19
461 inputMu : `dict`, [`str`, `list`]
462 Dictionary keyed by amp name with mean signal values.
464 covs : `dict`, [`str`, `list`]
465 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
467 covsModel : `dict`, [`str`, `list`]
468 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
470 covsWeights : `dict`, [`str`, `list`]
471 Dictionary keyed by amp names containinging sqrt. of covariances weights.
473 covsNoB : `dict`, [`str`, `list`]
474 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
477 covsModelNoB : `dict`, [`str`, `list`]
478 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
481 covsWeightsNoB : `dict`, [`str`, `list`]
482 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
485 expIdMask : `dict`, [`str`, `list`]
486 Dictionary keyed by amp names containing the masked exposure pairs.
488 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
489 PDF file where the plots will be saved.
491 offset : `float`, optional
492 Constant offset factor to plot covariances in same panel (so they don't overlap).
494 numberOfBins : `int`, optional
495 Number of bins for top and bottom plot.
497 plotData : `bool`, optional
498 Plot the data points?
500 topPlot : `bool`, optional
501 Plot the top plot with the covariances, and the bottom plot with the model residuals?
503 log : `lsst.log.Log`, optional
504 Logger to handle messages.
507 fig = plt.figure(figsize=(8, 10))
508 gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
510 ax0 = plt.subplot(gs[0])
511 plt.setp(ax0.get_xticklabels(), visible=
False)
513 fig = plt.figure(figsize=(8, 8))
514 ax0 = plt.subplot(111)
515 ax0.ticklabel_format(style=
'sci', axis=
'x', scilimits=(0, 0))
516 ax0.tick_params(axis=
'both', labelsize=
'x-large')
517 mue, rese, wce = [], [], []
518 mueNoB, reseNoB, wceNoB = [], [], []
519 for counter, amp
in enumerate(covs):
520 mask = expIdMask[amp]
522 muAmp, fullCov, fullCovModel, fullCovWeight = (inputMu[amp], covs[amp], covsModel[amp],
524 if len(fullCov) == 0:
527 fullCovWeight, divideByMu=
True)
529 mu, cov, model, weightCov = mu[mask], cov[mask], model[mask], weightCov[mask]
532 rese += list(cov - model)
533 wce += list(weightCov)
535 fullCovNoB, fullCovModelNoB, fullCovWeightNoB = (covsNoB[amp], covsModelNoB[amp],
537 if len(fullCovNoB) == 0:
539 (muNoB, covNoB, modelNoB,
541 fullCovWeightNoB, divideByMu=
True)
543 muNoB, covNoB, modelNoB, weightCovNoB = (muNoB[mask], covNoB[mask], modelNoB[mask],
546 mueNoB += list(muNoB)
547 reseNoB += list(covNoB - modelNoB)
548 wceNoB += list(weightCovNoB)
551 fit_curve, = plt.plot(mu, model + counter*offset,
'-', linewidth=4.0)
555 xb, yb, wyb, sigyb = self.
binData(mu, cov, gind, weightCov)
556 plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker=
'o', linestyle=
'none', markersize=6.5,
557 color=fit_curve.get_color(), label=f
"{amp} (N: {len(mu)})")
560 points, = plt.plot(mu, cov + counter*offset,
'.', color=fit_curve.get_color())
561 plt.legend(loc=
'upper right', fontsize=8)
564 rese = np.array(rese)
566 mueNoB = np.array(mueNoB)
567 reseNoB = np.array(reseNoB)
568 wceNoB = np.array(wceNoB)
570 plt.xlabel(
r"$\mu (el)$", fontsize=
'x-large')
571 plt.ylabel(
r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize=
'x-large')
574 xb, yb, wyb, sigyb = self.
binData(mue, rese, gind, wce)
576 ax1 = plt.subplot(gs[1], sharex=ax0)
577 ax1.errorbar(xb, yb, yerr=sigyb, marker=
'o', linestyle=
'none', label=
'Full fit')
579 xb2, yb2, wyb2, sigyb2 = self.
binData(mueNoB, reseNoB, gindNoB, wceNoB)
581 ax1.errorbar(xb2, yb2, yerr=sigyb2, marker=
'o', linestyle=
'none', label=
'b = 0')
582 ax1.tick_params(axis=
'both', labelsize=
'x-large')
583 plt.legend(loc=
'upper left', fontsize=
'large')
585 plt.plot(xb, [0]*len(xb),
'--', color=
'k')
586 plt.ticklabel_format(style=
'sci', axis=
'x', scilimits=(0, 0))
587 plt.ticklabel_format(style=
'sci', axis=
'y', scilimits=(0, 0))
588 plt.xlabel(
r'$\mu (el)$', fontsize=
'x-large')
589 plt.ylabel(
r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize=
'x-large')
591 plt.suptitle(f
"Nbins: {numberOfBins}")
594 labels0 = [item.get_text()
for item
in ax0.get_yticklabels()]
596 ax0.set_yticklabels(labels0)
597 pdfPages.savefig(fig)
603 """Fig. 12 of Astier+19
605 Color display of a and b arrays fits, averaged over channels.
609 aDict : `dict`, [`numpy.array`]
610 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
611 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
613 bDict : `dict`, [`numpy.array`]
614 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
615 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
617 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
618 PDF file where the plots will be saved.
621 Maximum lag for b arrays.
625 if np.isnan(aDict[amp]).all():
629 a = np.array(a).mean(axis=0)
630 b = np.array(b).mean(axis=0)
631 fig = plt.figure(figsize=(7, 11))
632 ax0 = fig.add_subplot(2, 1, 1)
633 im0 = ax0.imshow(np.abs(a.transpose()), origin=
'lower', norm=mpl.colors.LogNorm())
634 ax0.tick_params(axis=
'both', labelsize=
'x-large')
635 ax0.set_title(
r'$|a|$', fontsize=
'x-large')
636 ax0.xaxis.set_ticks_position(
'bottom')
637 cb0 = plt.colorbar(im0)
638 cb0.ax.tick_params(labelsize=
'x-large')
640 ax1 = fig.add_subplot(2, 1, 2)
641 ax1.tick_params(axis=
'both', labelsize=
'x-large')
642 ax1.yaxis.set_major_locator(MaxNLocator(integer=
True))
643 ax1.xaxis.set_major_locator(MaxNLocator(integer=
True))
644 im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin=
'lower')
645 cb1 = plt.colorbar(im1)
646 cb1.ax.tick_params(labelsize=
'x-large')
647 ax1.set_title(
r'$b \times 10^6$', fontsize=
'x-large')
648 ax1.xaxis.set_ticks_position(
'bottom')
650 pdfPages.savefig(fig)
656 """Fig. 13 of Astier+19.
658 Values of a and b arrays fits, averaged over amplifiers, as a function of distance.
662 aDict : `dict`, [`numpy.array`]
663 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
664 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
666 bDict : `dict`, [`numpy.array`]
667 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
668 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
670 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
671 PDF file where the plots will be saved.
674 Maximum lag for b arrays.
676 assert (len(aDict) == len(bDict))
679 if np.isnan(aDict[amp]).all():
684 sy = a.std(axis=0)/np.sqrt(len(aDict))
685 i, j = np.indices(y.shape)
686 upper = (i >= j).ravel()
687 r = np.sqrt(i**2 + j**2).ravel()
690 fig = plt.figure(figsize=(6, 9))
691 ax = fig.add_subplot(211)
692 ax.set_xlim([0.5, r.max()+1])
693 ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker=
'o', linestyle=
'none', color=
'b',
695 ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker=
'o', linestyle=
'none', color=
'r',
697 ax.legend(loc=
'upper center', fontsize=
'x-large')
698 ax.set_xlabel(
r'$\sqrt{i^2+j^2}$', fontsize=
'x-large')
699 ax.set_ylabel(
r'$a_{ij}$', fontsize=
'x-large')
701 ax.tick_params(axis=
'both', labelsize=
'x-large')
704 axb = fig.add_subplot(212)
707 if np.isnan(bDict[amp]).all():
712 syb = b.std(axis=0)/np.sqrt(len(bDict))
713 ib, jb = np.indices(yb.shape)
714 upper = (ib > jb).ravel()
715 rb = np.sqrt(i**2 + j**2).ravel()
720 axb.set_xlim([xmin, xmax+0.2])
721 cutu = (r > xmin) & (r < xmax) & (upper)
722 cutl = (r > xmin) & (r < xmax) & (~upper)
723 axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker=
'o', linestyle=
'none', color=
'b',
725 axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker=
'o', linestyle=
'none', color=
'r',
727 plt.legend(loc=
'upper center', fontsize=
'x-large')
728 axb.set_xlabel(
r'$\sqrt{i^2+j^2}$', fontsize=
'x-large')
729 axb.set_ylabel(
r'$b_{ij}$', fontsize=
'x-large')
730 axb.ticklabel_format(style=
'sci', axis=
'y', scilimits=(0, 0))
731 axb.tick_params(axis=
'both', labelsize=
'x-large')
733 pdfPages.savefig(fig)
739 """Fig. 14. of Astier+19
741 Cumulative sum of a_ij as a function of maximum separation. This plot displays the average over
746 aDict : `dict`, [`numpy.array`]
747 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
748 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
750 bDict : `dict`, [`numpy.array`]
751 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
752 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
754 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
755 PDF file where the plots will be saved.
757 assert (len(aDict) == len(bDict))
760 if np.isnan(aDict[amp]).all()
or np.isnan(bDict[amp]).all():
764 a = np.array(a).mean(axis=0)
765 b = np.array(b).mean(axis=0)
766 fig = plt.figure(figsize=(7, 6))
767 w = 4*np.ones_like(a)
772 indices = range(1, a.shape[0]+1)
773 sums = [wa[0:n, 0:n].sum()
for n
in indices]
774 ax = plt.subplot(111)
775 ax.plot(indices, sums/sums[0],
'o', color=
'b')
777 ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
778 ax.set_ylim(
None, 1.2)
779 ax.set_ylabel(
r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize=
'x-large')
780 ax.set_xlabel(
'n', fontsize=
'x-large')
781 ax.tick_params(axis=
'both', labelsize=
'x-large')
783 pdfPages.savefig(fig)
789 gainDict, pdfPages, maxr=None):
790 """Fig. 15 in Astier+19.
792 Illustrates systematic bias from estimating 'a'
793 coefficients from the slope of correlations as opposed to the
794 full model in Astier+19.
799 Dictionary of 'a' matrices (Eq. 20, Astier+19), with amp names as keys.
802 Dictionary of 'a' matrices ('b'= 0 in Eq. 20, Astier+19), with amp names as keys.
804 fullCovsModel : `dict`, [`str`, `list`]
805 Dictionary keyed by amp names containing covariances model per mean flux.
807 fullCovsModelNoB : `dict`, [`str`, `list`]
808 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of
809 Astier+19) per mean flux.
811 signalElectrons : `float`
812 Signal at which to evaluate the a_ij coefficients.
814 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
815 PDF file where the plots will be saved.
817 gainDict : `dict`, [`str`, `float`]
818 Dicgionary keyed by amp names with the gains in e-/ADU.
820 maxr : `int`, optional
824 fig = plt.figure(figsize=(7, 11))
825 title = [f
"'a' relative bias at {signalElectrons} e",
"'a' relative bias (b=0)"]
826 data = [(aDict, fullCovsModel), (aDictNoB, fullCovsModelNoB)]
828 for k, pair
in enumerate(data):
832 covModel = pair[1][amp]
833 if np.isnan(covModel).all():
838 diffs.append((aOld-a))
839 amean = np.array(amean).mean(axis=0)
840 diff = np.array(diffs).mean(axis=0)
847 diff = diff[:maxr, :maxr]
848 ax0 = fig.add_subplot(2, 1, k+1)
849 im0 = ax0.imshow(diff.transpose(), origin=
'lower')
850 ax0.yaxis.set_major_locator(MaxNLocator(integer=
True))
851 ax0.xaxis.set_major_locator(MaxNLocator(integer=
True))
852 ax0.tick_params(axis=
'both', labelsize=
'x-large')
854 ax0.set_title(title[k])
857 pdfPages.savefig(fig)
861 def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
862 """Plot PTC, var/signal vs signal, linearity, and linearity residual per amplifier.
866 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
867 The dataset containing the means, variances, exposure times, and mask.
870 Type of the model fit to the PTC. Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
872 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
873 PDF file where the plots will be saved.
876 if ptcFitType ==
'EXPAPPROXIMATION':
878 stringTitle = (
r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
879 elif ptcFitType ==
'POLYNOMIAL':
880 ptcFunc = funcPolynomial
881 for key
in dataset.ptcFitPars:
882 deg = len(dataset.ptcFitPars[key]) - 1
884 stringTitle =
r"Polynomial (degree: %g)" % (deg)
886 raise RuntimeError(f
"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n" +
887 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
892 supTitleFontSize = 18
896 nAmps = len(dataset.ampNames)
899 nRows = np.sqrt(nAmps)
900 mantissa, _ = np.modf(nRows)
902 nRows = int(nRows) + 1
908 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
909 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
910 f3, ax3 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
912 for i, (amp, a, a2, a3)
in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten(),
914 meanVecOriginal = np.array(dataset.rawMeans[amp])
915 varVecOriginal = np.array(dataset.rawVars[amp])
916 mask = np.array(dataset.expIdMask[amp])
917 if np.isnan(mask[0]):
918 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
919 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
920 a3.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
923 mask = mask.astype(bool)
924 meanVecFinal = meanVecOriginal[mask]
925 varVecFinal = varVecOriginal[mask]
926 meanVecOutliers = meanVecOriginal[np.invert(mask)]
927 varVecOutliers = varVecOriginal[np.invert(mask)]
928 pars, parsErr = np.array(dataset.ptcFitPars[amp]), np.array(dataset.ptcFitParsError[amp])
929 ptcRedChi2 = np.array(dataset.ptcFitChiSq[amp])
930 if ptcFitType ==
'EXPAPPROXIMATION':
931 if len(meanVecFinal):
932 ptcA00, ptcA00error = pars[0], parsErr[0]
933 ptcGain, ptcGainError = pars[1], parsErr[1]
934 ptcNoise = np.sqrt((pars[2]))
935 ptcNoiseAdu = ptcNoise*(1./ptcGain)
936 ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
937 stringLegend = (f
"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
938 f
"\nGain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU"
939 f
"\nNoise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
940 r"$\chi^2_{\rm{red}}$: " + f
"{ptcRedChi2:.4}"
941 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
943 if ptcFitType ==
'POLYNOMIAL':
944 if len(meanVecFinal):
945 ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
946 ptcNoiseAdu = np.sqrt((pars[0]))
947 ptcNoise = ptcNoiseAdu*ptcGain
948 ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
949 stringLegend = (f
"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU\n"
950 f
"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
951 r"$\chi^2_{\rm{red}}$: " + f
"{ptcRedChi2:.4}"
952 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
954 a.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
955 a.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
956 a.tick_params(labelsize=11)
957 a.set_xscale(
'linear', fontsize=labelFontSize)
958 a.set_yscale(
'linear', fontsize=labelFontSize)
960 a2.set_xlabel(
r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
961 a2.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
962 a2.tick_params(labelsize=11)
966 a3.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
967 a3.set_ylabel(
r'Variance/$\mu$ (ADU)', fontsize=labelFontSize)
968 a3.tick_params(labelsize=11)
970 a3.set_yscale(
'linear', fontsize=labelFontSize)
972 minMeanVecFinal = np.nanmin(meanVecFinal)
973 maxMeanVecFinal = np.nanmax(meanVecFinal)
974 meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
975 minMeanVecOriginal = np.nanmin(meanVecOriginal)
976 maxMeanVecOriginal = np.nanmax(meanVecOriginal)
977 deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
978 a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color=
'red')
979 a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color=
'green',
981 a.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
982 a.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
983 a.text(0.03, 0.66, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
984 a.set_title(amp, fontsize=titleFontSize)
985 a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
988 a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color=
'red')
989 a2.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
990 a2.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
991 a2.text(0.03, 0.66, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
992 a2.set_title(amp, fontsize=titleFontSize)
993 a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
996 a3.plot(meanVecFit, ptcFunc(pars, meanVecFit)/meanVecFit, color=
'red')
997 a3.scatter(meanVecFinal, varVecFinal/meanVecFinal, c=
'blue', marker=
'o', s=markerSize)
998 a3.scatter(meanVecOutliers, varVecOutliers/meanVecOutliers, c=
'magenta', marker=
's',
1000 a3.text(0.05, 0.1, stringLegend, transform=a3.transAxes, fontsize=legendFontSize)
1001 a3.set_title(amp, fontsize=titleFontSize)
1002 a3.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
1004 f.suptitle(
"PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
1006 f2.suptitle(
"PTC (log-log)", fontsize=supTitleFontSize)
1007 pdfPages.savefig(f2)
1008 f3.suptitle(
r"Var/$\mu$", fontsize=supTitleFontSize)
1009 pdfPages.savefig(f3)
1013 def _plotLinearizer(self, dataset, linearizer, pdfPages):
1014 """Plot linearity and linearity residual per amplifier
1018 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
1019 The dataset containing the means, variances, exposure times, and mask.
1021 linearizer : `lsst.ip.isr.Linearizer`
1027 supTitleFontSize = 18
1030 nAmps = len(dataset.ampNames)
1033 nRows = np.sqrt(nAmps)
1034 mantissa, _ = np.modf(nRows)
1036 nRows = int(nRows) + 1
1043 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
1044 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
1045 for i, (amp, a, a2)
in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
1046 mask = dataset.expIdMask[amp]
1047 if np.isnan(mask[0]):
1048 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
1049 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
1052 mask = mask.astype(bool)
1053 meanVecFinal = np.array(dataset.rawMeans[amp])[mask]
1054 timeVecFinal = np.array(dataset.rawExpTimes[amp])[mask]
1056 a.set_xlabel(
'Time (sec)', fontsize=labelFontSize)
1057 a.set_ylabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1058 a.tick_params(labelsize=labelFontSize)
1059 a.set_xscale(
'linear', fontsize=labelFontSize)
1060 a.set_yscale(
'linear', fontsize=labelFontSize)
1062 a2.axhline(y=0, color=
'k')
1063 a2.axvline(x=0, color=
'k', linestyle=
'-')
1064 a2.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1065 a2.set_ylabel(
'Fractional nonlinearity (%)', fontsize=labelFontSize)
1066 a2.tick_params(labelsize=labelFontSize)
1067 a2.set_xscale(
'linear', fontsize=labelFontSize)
1068 a2.set_yscale(
'linear', fontsize=labelFontSize)
1070 pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
1071 k0, k0Error = pars[0], parsErr[0]
1072 k1, k1Error = pars[1], parsErr[1]
1073 k2, k2Error = pars[2], parsErr[2]
1074 linRedChi2 = linearizer.fitChiSq[amp]
1075 stringLegend = (f
"k0: {k0:.4}+/-{k0Error:.2e} ADU\nk1: {k1:.4}+/-{k1Error:.2e} ADU/t"
1076 f
"\nk2: {k2:.2e}+/-{k2Error:.2e} ADU/t^2\n"
1077 r"$\chi^2_{\rm{red}}$: " + f
"{linRedChi2:.4}")
1078 a.scatter(timeVecFinal, meanVecFinal)
1079 a.plot(timeVecFinal,
funcPolynomial(pars, timeVecFinal), color=
'red')
1080 a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
1081 a.set_title(f
"{amp}", fontsize=titleFontSize)
1083 linearPart = k0 + k1*timeVecFinal
1084 fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
1085 a2.plot(meanVecFinal, fracLinRes, c=
'g')
1086 a2.set_title(f
"{amp}", fontsize=titleFontSize)
1088 f.suptitle(
"Linearity \n Fit: Polynomial (degree: %g)"
1090 fontsize=supTitleFontSize)
1091 f2.suptitle(
r"Fractional NL residual" +
"\n" +
1092 r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
1093 fontsize=supTitleFontSize)
1095 pdfPages.savefig(f2)
1099 """Group data into bins, with at most maxDiff distance between bins.
1107 Maximum distance between bins.
1116 index = np.zeros_like(x, dtype=np.int32)
1121 for i
in range(1, len(ix)):
1123 if (xval - xc < maxDiff):
1124 xc = (ng*xc + xval)/(ng+1)
1126 index[ix[i]] = group
1130 index[ix[i]] = group
1137 """Builds an index with regular binning. The result can be fed into binData.
1148 np.digitize(x, bins): `numpy.array`
1152 bins = np.linspace(x.min(), x.max() + abs(x.max() * 1e-7), nBins + 1)
1153 return np.digitize(x, bins)
1157 """Bin data (usually for display purposes).
1168 Bin number of each datum.
1171 Inverse rms of each datum to use when averaging (the actual weight is wy**2).
1182 wybin: `numpy.array`
1183 Binned weights in y, computed from wy's in each bin.
1185 sybin: `numpy.array`
1186 Uncertainty on the bin average, considering actual scatter, and ignoring weights.
1190 wy = np.ones_like(x)
1191 binIndexSet = set(binIndex)
1194 xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum()
for i
in binIndexSet])
1197 ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum()
for i
in binIndexSet])
1199 wybin = np.sqrt(np.array([w2[binIndex == i].sum()
for i
in binIndexSet]))
1200 sybin = np.array([y[binIndex == i].
std()/np.sqrt(np.array([binIndex == i]).sum())
1201 for i
in binIndexSet])
1203 return xbin, ybin, wybin, sybin