23 __all__ = [
'PlotPhotonTransferCurveTaskGen2']
26 import matplotlib.pyplot
as plt
27 import matplotlib
as mpl
28 from matplotlib
import gridspec
30 from matplotlib.backends.backend_pdf
import PdfPages
36 from lsst.cp.pipe.utils import (funcAstier, funcPolynomial, NonexistentDatasetTaskDataIdContainer,
37 calculateWeightedReducedChi2)
38 from matplotlib.ticker
import MaxNLocator
47 """Config class for photon transfer curve measurement task"""
48 datasetFileName = pexConfig.Field(
50 doc=
"datasetPtc file name (fits)",
53 linearizerFileName = pexConfig.Field(
55 doc=
"linearizer file name (fits)",
58 ccdKey = pexConfig.Field(
60 doc=
"The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
63 signalElectronsRelativeA = pexConfig.Field(
65 doc=
"Signal value for relative systematic bias between different methods of estimating a_ij "
66 "(Fig. 15 of Astier+19).",
69 plotNormalizedCovariancesNumberOfBins = pexConfig.Field(
71 doc=
"Number of bins in `plotNormalizedCovariancesNumber` function "
72 "(Fig. 8, 10., of Astier+19).",
78 """A class to plot the dataset from MeasurePhotonTransferCurveTask.
84 Positional arguments passed to the Task constructor. None used at this
87 Keyword arguments passed on to the Task constructor. None used at this
92 The plotting code in this file is almost identical to the code in
93 `plotPtc.py`. If further changes are implemented in this file,
94 `plotPtc.py` needs to be updated accordingly, and vice versa.
95 This file (`plotPtcGen2.py`) helps with maintaining backwards
96 compatibility with gen2 as we transition to gen3; the code
97 duplication is meant to only last for few month from now
98 (Jan, 2021). At that point only the `plotPtc.py` file will
102 ConfigClass = PlotPhotonTransferCurveTaskConfigGen2
103 _DefaultName =
"plotPhotonTransferCurve"
107 plt.interactive(
False)
110 def _makeArgumentParser(cls):
111 """Augment argument parser for the MeasurePhotonTransferCurveTask."""
112 parser = pipeBase.ArgumentParser(name=cls.
_DefaultName_DefaultName)
113 parser.add_id_argument(
"--id", datasetType=
"photonTransferCurveDataset",
114 ContainerClass=NonexistentDatasetTaskDataIdContainer,
115 help=
"The ccds to use, e.g. --id ccd=0..100")
120 """Run the Photon Transfer Curve (PTC) plotting measurement task.
124 dataRef : list of lsst.daf.persistence.ButlerDataRef
125 dataRef for the detector for the expIds to be fit.
128 datasetFile = self.config.datasetFileName
129 datasetPtc = PhotonTransferCurveDataset.readFits(datasetFile)
131 dirname = dataRef.getUri(datasetType=
'cpPipePlotRoot', write=
True)
132 if not os.path.exists(dirname):
135 detNum = dataRef.dataId[self.config.ccdKey]
136 filename = f
"PTC_det{detNum}.pdf"
137 filenameFull = os.path.join(dirname, filename)
139 if self.config.linearizerFileName:
140 linearizer = isr.linearize.Linearizer.readFits(self.config.linearizerFileName)
143 self.
runrun(filenameFull, datasetPtc, linearizer=linearizer, log=self.log)
145 return pipeBase.Struct(exitStatus=0)
147 def run(self, filenameFull, datasetPtc, linearizer=None, log=None):
148 """Make the plots for the PTC task"""
149 ptcFitType = datasetPtc.ptcFitType
150 with PdfPages(filenameFull)
as pdfPages:
151 if ptcFitType
in [
"FULLCOVARIANCE", ]:
153 elif ptcFitType
in [
"EXPAPPROXIMATION",
"POLYNOMIAL"]:
156 raise RuntimeError(f
"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n"
157 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
165 """Make plots for MeasurePhotonTransferCurve task when doCovariancesAstier=True.
167 This function call other functions that mostly reproduce the plots in Astier+19.
168 Most of the code is ported from Pierre Astier's repository https://github.com/PierreAstier/bfptc
172 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
173 The dataset containing the necessary information to produce the plots.
175 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
176 PDF file where the plots will be saved.
178 log : `lsst.log.Log`, optional
179 Logger to handle messages
181 mu = dataset.finalMeans
183 fullCovs = dataset.covariances
184 fullCovsModel = dataset.covariancesModel
185 fullCovWeights = dataset.covariancesSqrtWeights
186 aDict = dataset.aMatrix
187 bDict = dataset.bMatrix
188 fullCovsNoB = dataset.covariances
189 fullCovsModelNoB = dataset.covariancesModelNoB
190 fullCovWeightsNoB = dataset.covariancesSqrtWeights
191 aDictNoB = dataset.aMatrixNoB
192 gainDict = dataset.gain
193 noiseDict = dataset.noise
195 self.
plotCovariancesplotCovariances(mu, fullCovs, fullCovsModel, fullCovWeights, fullCovsNoB, fullCovsModelNoB,
196 fullCovWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
198 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
199 offset=0.01, topPlot=
True,
200 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
203 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
204 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
207 fullCovsModelNoB, fullCovWeightsNoB, pdfPages,
208 numberOfBins=self.config.plotNormalizedCovariancesNumberOfBins,
210 self.
plot_a_bplot_a_b(aDict, bDict, pdfPages)
211 self.
ab_vs_distab_vs_dist(aDict, bDict, pdfPages, bRange=4)
214 self.config.signalElectronsRelativeA, gainDict, pdfPages, maxr=4)
219 def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB,
220 gainDict, noiseDict, aDict, bDict, pdfPages):
221 """Plot covariances and models: Cov00, Cov10, Cov01.
223 Figs. 6 and 7 of Astier+19
227 mu : `dict`, [`str`, `list`]
228 Dictionary keyed by amp name with mean signal values.
230 covs : `dict`, [`str`, `list`]
231 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
233 covsModel : `dict`, [`str`, `list`]
234 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
236 covsWeights : `dict`, [`str`, `list`]
237 Dictionary keyed by amp names containinging sqrt. of covariances weights.
239 covsNoB : `dict`, [`str`, `list`]
240 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
243 covsModelNoB : `dict`, [`str`, `list`]
244 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
247 covsWeightsNoB : `dict`, [`str`, `list`]
248 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
251 gainDict : `dict`, [`str`, `float`]
252 Dictionary keyed by amp names containing the gains in e-/ADU.
254 noiseDict : `dict`, [`str`, `float`]
255 Dictionary keyed by amp names containing the rms redout noise in e-.
257 aDict : `dict`, [`str`, `numpy.array`]
258 Dictionary keyed by amp names containing 'a' coefficients (Eq. 20 of Astier+19).
260 bDict : `dict`, [`str`, `numpy.array`]
261 Dictionary keyed by amp names containing 'b' coefficients (Eq. 20 of Astier+19).
263 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
264 PDF file where the plots will be saved.
270 supTitleFontSize = 18
276 nRows = np.sqrt(nAmps)
277 mantissa, _ = np.modf(nRows)
279 nRows = int(nRows) + 1
285 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
286 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
287 fResCov00, axResCov00 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row',
289 fCov01, axCov01 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
290 fCov10, axCov10 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
292 assert(len(covsModel) == nAmps)
293 assert(len(covsWeights) == nAmps)
295 assert(len(covsNoB) == nAmps)
296 assert(len(covsModelNoB) == nAmps)
297 assert(len(covsWeightsNoB) == nAmps)
299 for i, (amp, a, a2, aResVar, a3, a4)
in enumerate(zip(covs, ax.flatten(),
300 ax2.flatten(), axResCov00.flatten(),
301 axCov01.flatten(), axCov10.flatten())):
303 muAmp, cov, model, weight = mu[amp], covs[amp], covsModel[amp], covsWeights[amp]
304 if not np.isnan(np.array(cov)).all():
305 aCoeffs, bCoeffs = np.array(aDict[amp]), np.array(bDict[amp])
306 gain, noise = gainDict[amp], noiseDict[amp]
307 (meanVecFinal, varVecFinal, varVecModelFinal,
313 varWeightsFinal, len(meanVecFinal), 4)
315 (meanVecFinalCov01, varVecFinalCov01, varVecModelFinalCov01,
318 (meanVecFinalCov10, varVecFinalCov10, varVecModelFinalCov10,
322 par2 = np.polyfit(meanVecFinal, varVecFinal, 2, w=varWeightsFinal)
323 varModelFinalQuadratic = np.polyval(par2, meanVecFinal)
325 varWeightsFinal, len(meanVecFinal), 3)
328 covNoB, modelNoB, weightNoB = covsNoB[amp], covsModelNoB[amp], covsWeightsNoB[amp]
329 (meanVecFinalNoB, varVecFinalNoB, varVecModelFinalNoB,
331 weightNoB, returnMasked=
True)
334 varWeightsFinalNoB, len(meanVecFinalNoB),
336 stringLegend = (f
"Gain: {gain:.4} e/ADU \n"
337 f
"Noise: {noise:.4} e \n"
338 +
r"$a_{00}$: %.3e 1/e"%aCoeffs[0, 0] +
"\n"
339 +
r"$b_{00}$: %.3e 1/e"%bCoeffs[0, 0]
340 + f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
341 minMeanVecFinal = np.nanmin(meanVecFinal)
342 maxMeanVecFinal = np.nanmax(meanVecFinal)
343 deltaXlim = maxMeanVecFinal - minMeanVecFinal
345 a.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
346 a.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
347 a.tick_params(labelsize=11)
348 a.set_xscale(
'linear')
349 a.set_yscale(
'linear')
350 a.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
351 a.plot(meanVecFinal, varVecModelFinal, color=
'red', linestyle=
'-')
352 a.text(0.03, 0.7, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
353 a.set_title(amp, fontsize=titleFontSize)
354 a.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
357 a2.set_xlabel(
r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
358 a2.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
359 a2.tick_params(labelsize=11)
362 a2.plot(meanVecFinal, varVecModelFinal, color=
'red', linestyle=
'-')
363 a2.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
364 a2.text(0.03, 0.7, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
365 a2.set_title(amp, fontsize=titleFontSize)
366 a2.set_xlim([minMeanVecFinal, maxMeanVecFinal])
369 aResVar.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
370 aResVar.set_ylabel(
r'Residuals (ADU$^2$)', fontsize=labelFontSize)
371 aResVar.tick_params(labelsize=11)
372 aResVar.set_xscale(
'linear')
373 aResVar.set_yscale(
'linear')
374 aResVar.plot(meanVecFinal, varVecFinal - varVecModelFinal, color=
'blue', linestyle=
'-',
375 label=
r'Full fit ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelVar)
376 aResVar.plot(meanVecFinal, varVecFinal - varModelFinalQuadratic, color=
'red', linestyle=
'-',
377 label=
r'Quadratic fit ($\chi_{\rm{red}}^2$: %g)'%chi2QuadModelVar)
378 aResVar.plot(meanVecFinalNoB, varVecFinalNoB - varVecModelFinalNoB, color=
'green',
380 label=
r'Full fit (b=0) ($\chi_{\rm{red}}^2$: %g)'%chi2FullModelNoBVar)
381 aResVar.axhline(color=
'black')
382 aResVar.set_title(amp, fontsize=titleFontSize)
383 aResVar.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
384 aResVar.legend(fontsize=7)
386 a3.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
387 a3.set_ylabel(
r'Cov01 (ADU$^2$)', fontsize=labelFontSize)
388 a3.tick_params(labelsize=11)
389 a3.set_xscale(
'linear')
390 a3.set_yscale(
'linear')
391 a3.scatter(meanVecFinalCov01, varVecFinalCov01, c=
'blue', marker=
'o', s=markerSize)
392 a3.plot(meanVecFinalCov01, varVecModelFinalCov01, color=
'red', linestyle=
'-')
393 a3.set_title(amp, fontsize=titleFontSize)
394 a3.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
396 a4.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
397 a4.set_ylabel(
r'Cov10 (ADU$^2$)', fontsize=labelFontSize)
398 a4.tick_params(labelsize=11)
399 a4.set_xscale(
'linear')
400 a4.set_yscale(
'linear')
401 a4.scatter(meanVecFinalCov10, varVecFinalCov10, c=
'blue', marker=
'o', s=markerSize)
402 a4.plot(meanVecFinalCov10, varVecModelFinalCov10, color=
'red', linestyle=
'-')
403 a4.set_title(amp, fontsize=titleFontSize)
404 a4.set_xlim([minMeanVecFinal - 0.2*deltaXlim, maxMeanVecFinal + 0.2*deltaXlim])
407 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
408 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
409 a3.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
410 a4.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
412 f.suptitle(
"PTC from covariances as in Astier+19 \n Fit: Eq. 20, Astier+19",
413 fontsize=supTitleFontSize)
415 f2.suptitle(
"PTC from covariances as in Astier+19 (log-log) \n Fit: Eq. 20, Astier+19",
416 fontsize=supTitleFontSize)
418 fResCov00.suptitle(
"Residuals (data-model) for Cov00 (Var)", fontsize=supTitleFontSize)
419 pdfPages.savefig(fResCov00)
420 fCov01.suptitle(
"Cov01 as in Astier+19 (nearest parallel neighbor covariance) \n"
421 " Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
422 pdfPages.savefig(fCov01)
423 fCov10.suptitle(
"Cov10 as in Astier+19 (nearest serial neighbor covariance) \n"
424 "Fit: Eq. 20, Astier+19", fontsize=supTitleFontSize)
425 pdfPages.savefig(fCov10)
430 covsWeightsNoB, pdfPages, offset=0.004,
431 numberOfBins=10, plotData=True, topPlot=False, log=None):
432 """Plot C_ij/mu vs mu.
434 Figs. 8, 10, and 11 of Astier+19
444 inputMu : `dict`, [`str`, `list`]
445 Dictionary keyed by amp name with mean signal values.
447 covs : `dict`, [`str`, `list`]
448 Dictionary keyed by amp names containing a list of measued covariances per mean flux.
450 covsModel : `dict`, [`str`, `list`]
451 Dictionary keyed by amp names containinging covariances model (Eq. 20 of Astier+19) per mean flux.
453 covsWeights : `dict`, [`str`, `list`]
454 Dictionary keyed by amp names containinging sqrt. of covariances weights.
456 covsNoB : `dict`, [`str`, `list`]
457 Dictionary keyed by amp names containing a list of measued covariances per mean flux ('b'=0 in
460 covsModelNoB : `dict`, [`str`, `list`]
461 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of Astier+19)
464 covsWeightsNoB : `dict`, [`str`, `list`]
465 Dictionary keyed by amp names containing sqrt. of covariances weights ('b' = 0 in Eq. 20 of
468 expIdMask : `dict`, [`str`, `list`]
469 Dictionary keyed by amp names containing the masked exposure pairs.
471 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
472 PDF file where the plots will be saved.
474 offset : `float`, optional
475 Constant offset factor to plot covariances in same panel (so they don't overlap).
477 numberOfBins : `int`, optional
478 Number of bins for top and bottom plot.
480 plotData : `bool`, optional
481 Plot the data points?
483 topPlot : `bool`, optional
484 Plot the top plot with the covariances, and the bottom plot with the model residuals?
486 log : `lsst.log.Log`, optional
487 Logger to handle messages.
490 fig = plt.figure(figsize=(8, 10))
491 gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
493 ax0 = plt.subplot(gs[0])
494 plt.setp(ax0.get_xticklabels(), visible=
False)
496 fig = plt.figure(figsize=(8, 8))
497 ax0 = plt.subplot(111)
498 ax0.ticklabel_format(style=
'sci', axis=
'x', scilimits=(0, 0))
499 ax0.tick_params(axis=
'both', labelsize=
'x-large')
500 mue, rese, wce = [], [], []
501 mueNoB, reseNoB, wceNoB = [], [], []
502 for counter, amp
in enumerate(covs):
503 muAmp, fullCov, fullCovModel, fullCovWeight = (inputMu[amp], covs[amp], covsModel[amp],
505 if len(fullCov) == 0:
508 fullCovWeight, divideByMu=
True,
512 rese += list(cov - model)
513 wce += list(weightCov)
515 fullCovNoB, fullCovModelNoB, fullCovWeightNoB = (covsNoB[amp], covsModelNoB[amp],
517 if len(fullCovNoB) == 0:
519 (muNoB, covNoB, modelNoB,
521 fullCovWeightNoB, divideByMu=
True,
524 mueNoB += list(muNoB)
525 reseNoB += list(covNoB - modelNoB)
526 wceNoB += list(weightCovNoB)
529 fit_curve, = plt.plot(mu, model + counter*offset,
'-', linewidth=4.0)
533 xb, yb, wyb, sigyb = self.
binDatabinData(mu, cov, gind, weightCov)
534 plt.errorbar(xb, yb+counter*offset, yerr=sigyb, marker=
'o', linestyle=
'none', markersize=6.5,
535 color=fit_curve.get_color(), label=f
"{amp} (N: {len(mu)})")
538 points, = plt.plot(mu, cov + counter*offset,
'.', color=fit_curve.get_color())
539 plt.legend(loc=
'upper right', fontsize=8)
542 rese = np.array(rese)
544 mueNoB = np.array(mueNoB)
545 reseNoB = np.array(reseNoB)
546 wceNoB = np.array(wceNoB)
548 plt.xlabel(
r"$\mu (el)$", fontsize=
'x-large')
549 plt.ylabel(
r"$Cov{%d%d}/\mu + Cst (el)$"%(i, j), fontsize=
'x-large')
552 xb, yb, wyb, sigyb = self.
binDatabinData(mue, rese, gind, wce)
554 ax1 = plt.subplot(gs[1], sharex=ax0)
555 ax1.errorbar(xb, yb, yerr=sigyb, marker=
'o', linestyle=
'none', label=
'Full fit')
556 gindNoB = self.
indexForBinsindexForBins(mueNoB, numberOfBins)
557 xb2, yb2, wyb2, sigyb2 = self.
binDatabinData(mueNoB, reseNoB, gindNoB, wceNoB)
559 ax1.errorbar(xb2, yb2, yerr=sigyb2, marker=
'o', linestyle=
'none', label=
'b = 0')
560 ax1.tick_params(axis=
'both', labelsize=
'x-large')
561 plt.legend(loc=
'upper left', fontsize=
'large')
563 plt.plot(xb, [0]*len(xb),
'--', color=
'k')
564 plt.ticklabel_format(style=
'sci', axis=
'x', scilimits=(0, 0))
565 plt.ticklabel_format(style=
'sci', axis=
'y', scilimits=(0, 0))
566 plt.xlabel(
r'$\mu (el)$', fontsize=
'x-large')
567 plt.ylabel(
r'$Cov{%d%d}/\mu$ -model (el)'%(i, j), fontsize=
'x-large')
569 plt.suptitle(f
"Nbins: {numberOfBins}")
572 labels0 = [item.get_text()
for item
in ax0.get_yticklabels()]
574 ax0.set_yticklabels(labels0)
575 pdfPages.savefig(fig)
581 """Fig. 12 of Astier+19
583 Color display of a and b arrays fits, averaged over channels.
587 aDict : `dict`, [`numpy.array`]
588 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
589 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
591 bDict : `dict`, [`numpy.array`]
592 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
593 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
595 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
596 PDF file where the plots will be saved.
599 Maximum lag for b arrays.
603 if np.isnan(aDict[amp]).all():
607 a = np.array(a).mean(axis=0)
608 b = np.array(b).mean(axis=0)
609 fig = plt.figure(figsize=(7, 11))
610 ax0 = fig.add_subplot(2, 1, 1)
611 im0 = ax0.imshow(np.abs(a.transpose()), origin=
'lower', norm=mpl.colors.LogNorm())
612 ax0.tick_params(axis=
'both', labelsize=
'x-large')
613 ax0.set_title(
r'$|a|$', fontsize=
'x-large')
614 ax0.xaxis.set_ticks_position(
'bottom')
615 cb0 = plt.colorbar(im0)
616 cb0.ax.tick_params(labelsize=
'x-large')
618 ax1 = fig.add_subplot(2, 1, 2)
619 ax1.tick_params(axis=
'both', labelsize=
'x-large')
620 ax1.yaxis.set_major_locator(MaxNLocator(integer=
True))
621 ax1.xaxis.set_major_locator(MaxNLocator(integer=
True))
622 im1 = ax1.imshow(1e6*b[:bRange, :bRange].transpose(), origin=
'lower')
623 cb1 = plt.colorbar(im1)
624 cb1.ax.tick_params(labelsize=
'x-large')
625 ax1.set_title(
r'$b \times 10^6$', fontsize=
'x-large')
626 ax1.xaxis.set_ticks_position(
'bottom')
628 pdfPages.savefig(fig)
634 """Fig. 13 of Astier+19.
636 Values of a and b arrays fits, averaged over amplifiers, as a function of distance.
640 aDict : `dict`, [`numpy.array`]
641 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
642 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
644 bDict : `dict`, [`numpy.array`]
645 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
646 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
648 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
649 PDF file where the plots will be saved.
652 Maximum lag for b arrays.
654 assert (len(aDict) == len(bDict))
657 if np.isnan(aDict[amp]).all():
662 sy = a.std(axis=0)/np.sqrt(len(aDict))
663 i, j = np.indices(y.shape)
664 upper = (i >= j).ravel()
665 r = np.sqrt(i**2 + j**2).ravel()
668 fig = plt.figure(figsize=(6, 9))
669 ax = fig.add_subplot(211)
670 ax.set_xlim([0.5, r.max()+1])
671 ax.errorbar(r[upper], y[upper], yerr=sy[upper], marker=
'o', linestyle=
'none', color=
'b',
673 ax.errorbar(r[~upper], y[~upper], yerr=sy[~upper], marker=
'o', linestyle=
'none', color=
'r',
675 ax.legend(loc=
'upper center', fontsize=
'x-large')
676 ax.set_xlabel(
r'$\sqrt{i^2+j^2}$', fontsize=
'x-large')
677 ax.set_ylabel(
r'$a_{ij}$', fontsize=
'x-large')
679 ax.tick_params(axis=
'both', labelsize=
'x-large')
682 axb = fig.add_subplot(212)
685 if np.isnan(bDict[amp]).all():
690 syb = b.std(axis=0)/np.sqrt(len(bDict))
691 ib, jb = np.indices(yb.shape)
692 upper = (ib > jb).ravel()
693 rb = np.sqrt(i**2 + j**2).ravel()
698 axb.set_xlim([xmin, xmax+0.2])
699 cutu = (r > xmin) & (r < xmax) & (upper)
700 cutl = (r > xmin) & (r < xmax) & (~upper)
701 axb.errorbar(rb[cutu], yb[cutu], yerr=syb[cutu], marker=
'o', linestyle=
'none', color=
'b',
703 axb.errorbar(rb[cutl], yb[cutl], yerr=syb[cutl], marker=
'o', linestyle=
'none', color=
'r',
705 plt.legend(loc=
'upper center', fontsize=
'x-large')
706 axb.set_xlabel(
r'$\sqrt{i^2+j^2}$', fontsize=
'x-large')
707 axb.set_ylabel(
r'$b_{ij}$', fontsize=
'x-large')
708 axb.ticklabel_format(style=
'sci', axis=
'y', scilimits=(0, 0))
709 axb.tick_params(axis=
'both', labelsize=
'x-large')
711 pdfPages.savefig(fig)
717 """Fig. 14. of Astier+19
719 Cumulative sum of a_ij as a function of maximum separation. This plot displays the average over
724 aDict : `dict`, [`numpy.array`]
725 Dictionary keyed by amp names containing the fitted 'a' coefficients from the model
726 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
728 bDict : `dict`, [`numpy.array`]
729 Dictionary keyed by amp names containing the fitted 'b' coefficients from the model
730 in Eq. 20 of Astier+19 (if `ptcFitType` is `FULLCOVARIANCE`).
732 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
733 PDF file where the plots will be saved.
735 assert (len(aDict) == len(bDict))
738 if np.isnan(aDict[amp]).all()
or np.isnan(bDict[amp]).all():
742 a = np.array(a).mean(axis=0)
743 b = np.array(b).mean(axis=0)
744 fig = plt.figure(figsize=(7, 6))
745 w = 4*np.ones_like(a)
750 indices = range(1, a.shape[0]+1)
751 sums = [wa[0:n, 0:n].sum()
for n
in indices]
752 ax = plt.subplot(111)
753 ax.plot(indices, sums/sums[0],
'o', color=
'b')
755 ax.set_xlim(indices[0]-0.5, indices[-1]+0.5)
756 ax.set_ylim(
None, 1.2)
757 ax.set_ylabel(
r'$[\sum_{|i|<n\ &\ |j|<n} a_{ij}] / |a_{00}|$', fontsize=
'x-large')
758 ax.set_xlabel(
'n', fontsize=
'x-large')
759 ax.tick_params(axis=
'both', labelsize=
'x-large')
761 pdfPages.savefig(fig)
767 gainDict, pdfPages, maxr=None):
768 """Fig. 15 in Astier+19.
770 Illustrates systematic bias from estimating 'a'
771 coefficients from the slope of correlations as opposed to the
772 full model in Astier+19.
777 Dictionary of 'a' matrices (Eq. 20, Astier+19), with amp names as keys.
780 Dictionary of 'a' matrices ('b'= 0 in Eq. 20, Astier+19), with amp names as keys.
782 fullCovsModel : `dict`, [`str`, `list`]
783 Dictionary keyed by amp names containing covariances model per mean flux.
785 fullCovsModelNoB : `dict`, [`str`, `list`]
786 Dictionary keyed by amp names containing covariances model (with 'b'=0 in Eq. 20 of
787 Astier+19) per mean flux.
789 signalElectrons : `float`
790 Signal at which to evaluate the a_ij coefficients.
792 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
793 PDF file where the plots will be saved.
795 gainDict : `dict`, [`str`, `float`]
796 Dicgionary keyed by amp names with the gains in e-/ADU.
798 maxr : `int`, optional
802 fig = plt.figure(figsize=(7, 11))
803 title = [f
"'a' relative bias at {signalElectrons} e",
"'a' relative bias (b=0)"]
804 data = [(aDict, fullCovsModel), (aDictNoB, fullCovsModelNoB)]
806 for k, pair
in enumerate(data):
810 covModel = pair[1][amp]
811 if np.isnan(covModel).all():
816 diffs.append((aOld-a))
817 amean = np.array(amean).mean(axis=0)
818 diff = np.array(diffs).mean(axis=0)
825 diff = diff[:maxr, :maxr]
826 ax0 = fig.add_subplot(2, 1, k+1)
827 im0 = ax0.imshow(diff.transpose(), origin=
'lower')
828 ax0.yaxis.set_major_locator(MaxNLocator(integer=
True))
829 ax0.xaxis.set_major_locator(MaxNLocator(integer=
True))
830 ax0.tick_params(axis=
'both', labelsize=
'x-large')
832 ax0.set_title(title[k])
835 pdfPages.savefig(fig)
839 def _plotStandardPtc(self, dataset, ptcFitType, pdfPages):
840 """Plot PTC, var/signal vs signal, linearity, and linearity residual per amplifier.
844 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
845 The dataset containing the means, variances, exposure times, and mask.
848 Type of the model fit to the PTC. Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.
850 pdfPages: `matplotlib.backends.backend_pdf.PdfPages`
851 PDF file where the plots will be saved.
854 if ptcFitType ==
'EXPAPPROXIMATION':
856 stringTitle = (
r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$ ")
857 elif ptcFitType ==
'POLYNOMIAL':
858 ptcFunc = funcPolynomial
859 for key
in dataset.ptcFitPars:
860 deg = len(dataset.ptcFitPars[key]) - 1
862 stringTitle =
r"Polynomial (degree: %g)" % (deg)
864 raise RuntimeError(f
"The input dataset had an invalid dataset.ptcFitType: {ptcFitType}. \n"
865 "Options: 'FULLCOVARIANCE', EXPAPPROXIMATION, or 'POLYNOMIAL'.")
870 supTitleFontSize = 18
874 nAmps = len(dataset.ampNames)
877 nRows = np.sqrt(nAmps)
878 mantissa, _ = np.modf(nRows)
880 nRows = int(nRows) + 1
886 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
887 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
888 f3, ax3 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
890 for i, (amp, a, a2, a3)
in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten(),
892 meanVecOriginal = np.ravel(np.array(dataset.rawMeans[amp]))
893 varVecOriginal = np.ravel(np.array(dataset.rawVars[amp]))
894 mask = np.ravel(np.array(dataset.expIdMask[amp]))
895 if np.isnan(mask[0]):
896 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
897 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
898 a3.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
901 mask = mask.astype(bool)
902 meanVecFinal = meanVecOriginal[mask]
903 varVecFinal = varVecOriginal[mask]
904 meanVecOutliers = meanVecOriginal[np.invert(mask)]
905 varVecOutliers = varVecOriginal[np.invert(mask)]
906 pars, parsErr = np.array(dataset.ptcFitPars[amp]), np.array(dataset.ptcFitParsError[amp])
907 ptcRedChi2 = dataset.ptcFitChiSq[amp]
908 if ptcFitType ==
'EXPAPPROXIMATION':
909 if len(meanVecFinal):
910 ptcA00, ptcA00error = pars[0], parsErr[0]
911 ptcGain, ptcGainError = pars[1], parsErr[1]
912 ptcNoise = np.sqrt((pars[2]))
913 ptcNoiseAdu = ptcNoise*(1./ptcGain)
914 ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
915 stringLegend = (f
"a00: {ptcA00:.2e}+/-{ptcA00error:.2e} 1/e"
916 f
"\nGain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU"
917 f
"\nNoise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
918 r"$\chi^2_{\rm{red}}$: " + f
"{ptcRedChi2:.4}"
919 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
921 if ptcFitType ==
'POLYNOMIAL':
922 if len(meanVecFinal):
923 ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
924 ptcNoiseAdu = np.sqrt((pars[0]))
925 ptcNoise = ptcNoiseAdu*ptcGain
926 ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
927 stringLegend = (f
"Gain: {ptcGain:.4}+/-{ptcGainError:.2e} e/ADU\n"
928 f
"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} e\n"
929 r"$\chi^2_{\rm{red}}$: " + f
"{ptcRedChi2:.4}"
930 f
"\nLast in fit: {meanVecFinal[-1]:.7} ADU ")
932 a.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
933 a.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
934 a.tick_params(labelsize=11)
935 a.set_xscale(
'linear')
936 a.set_yscale(
'linear')
938 a2.set_xlabel(
r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
939 a2.set_ylabel(
r'Variance (ADU$^2$)', fontsize=labelFontSize)
940 a2.tick_params(labelsize=11)
944 a3.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
945 a3.set_ylabel(
r'Variance/$\mu$ (ADU)', fontsize=labelFontSize)
946 a3.tick_params(labelsize=11)
948 a3.set_yscale(
'linear')
950 minMeanVecFinal = np.nanmin(meanVecFinal)
951 maxMeanVecFinal = np.nanmax(meanVecFinal)
952 meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
953 minMeanVecOriginal = np.nanmin(meanVecOriginal)
954 maxMeanVecOriginal = np.nanmax(meanVecOriginal)
955 deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
956 a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color=
'red')
957 a.plot(meanVecFinal, ptcNoiseAdu**2 + (1./ptcGain)*meanVecFinal, color=
'green',
959 a.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
960 a.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
961 a.text(0.03, 0.66, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
962 a.set_title(amp, fontsize=titleFontSize)
963 a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
966 a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color=
'red')
967 a2.scatter(meanVecFinal, varVecFinal, c=
'blue', marker=
'o', s=markerSize)
968 a2.scatter(meanVecOutliers, varVecOutliers, c=
'magenta', marker=
's', s=markerSize)
969 a2.text(0.03, 0.66, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
970 a2.set_title(amp, fontsize=titleFontSize)
971 a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
974 a3.plot(meanVecFit, ptcFunc(pars, meanVecFit)/meanVecFit, color=
'red')
975 a3.scatter(meanVecFinal, varVecFinal/meanVecFinal, c=
'blue', marker=
'o', s=markerSize)
976 a3.scatter(meanVecOutliers, varVecOutliers/meanVecOutliers, c=
'magenta', marker=
's',
978 a3.text(0.05, 0.1, stringLegend, transform=a3.transAxes, fontsize=legendFontSize)
979 a3.set_title(amp, fontsize=titleFontSize)
980 a3.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
982 f.suptitle(
"PTC \n Fit: " + stringTitle, fontsize=supTitleFontSize)
984 f2.suptitle(
"PTC (log-log)", fontsize=supTitleFontSize)
986 f3.suptitle(
r"Var/$\mu$", fontsize=supTitleFontSize)
991 def _plotLinearizer(self, dataset, linearizer, pdfPages):
992 """Plot linearity and linearity residual per amplifier
996 dataset : `lsst.ip.isr.ptcDataset.PhotonTransferCurveDataset`
997 The dataset containing the means, variances, exposure times, and mask.
999 linearizer : `lsst.ip.isr.Linearizer`
1005 supTitleFontSize = 18
1008 nAmps = len(dataset.ampNames)
1011 nRows = np.sqrt(nAmps)
1012 mantissa, _ = np.modf(nRows)
1014 nRows = int(nRows) + 1
1021 f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
1022 f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex=
'col', sharey=
'row', figsize=(13, 10))
1023 for i, (amp, a, a2)
in enumerate(zip(dataset.ampNames, ax.flatten(), ax2.flatten())):
1024 mask = dataset.expIdMask[amp]
1025 if np.isnan(mask[0]):
1026 a.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
1027 a2.set_title(f
"{amp} (BAD)", fontsize=titleFontSize)
1030 mask = mask.astype(bool)
1031 meanVecFinal = np.array(dataset.rawMeans[amp])[mask]
1032 timeVecFinal = np.array(dataset.rawExpTimes[amp])[mask]
1034 a.set_xlabel(
'Time (sec)', fontsize=labelFontSize)
1035 a.set_ylabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1036 a.tick_params(labelsize=labelFontSize)
1037 a.set_xscale(
'linear')
1038 a.set_yscale(
'linear')
1040 a2.axhline(y=0, color=
'k')
1041 a2.axvline(x=0, color=
'k', linestyle=
'-')
1042 a2.set_xlabel(
r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
1043 a2.set_ylabel(
'Fractional nonlinearity (%)', fontsize=labelFontSize)
1044 a2.tick_params(labelsize=labelFontSize)
1045 a2.set_xscale(
'linear')
1046 a2.set_yscale(
'linear')
1048 pars, parsErr = linearizer.fitParams[amp], linearizer.fitParamsErr[amp]
1049 k0, k0Error = pars[0], parsErr[0]
1050 k1, k1Error = pars[1], parsErr[1]
1051 k2, k2Error = pars[2], parsErr[2]
1052 linRedChi2 = linearizer.fitChiSq[amp]
1053 stringLegend = (f
"k0: {k0:.4}+/-{k0Error:.2e} ADU\nk1: {k1:.4}+/-{k1Error:.2e} ADU/t"
1054 f
"\nk2: {k2:.2e}+/-{k2Error:.2e} ADU/t^2\n"
1055 r"$\chi^2_{\rm{red}}$: " + f
"{linRedChi2:.4}")
1056 a.scatter(timeVecFinal, meanVecFinal)
1057 a.plot(timeVecFinal,
funcPolynomial(pars, timeVecFinal), color=
'red')
1058 a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
1059 a.set_title(f
"{amp}", fontsize=titleFontSize)
1061 linearPart = k0 + k1*timeVecFinal
1062 fracLinRes = 100*(linearPart - meanVecFinal)/linearPart
1063 a2.plot(meanVecFinal, fracLinRes, c=
'g')
1064 a2.set_title(f
"{amp}", fontsize=titleFontSize)
1066 f.suptitle(
"Linearity \n Fit: Polynomial (degree: %g)"
1068 fontsize=supTitleFontSize)
1069 f2.suptitle(
r"Fractional NL residual" "\n"
1070 r"$100\times \frac{(k_0 + k_1*Time-\mu)}{k_0+k_1*Time}$",
1071 fontsize=supTitleFontSize)
1073 pdfPages.savefig(f2)
1077 """Group data into bins, with at most maxDiff distance between bins.
1085 Maximum distance between bins.
1094 index = np.zeros_like(x, dtype=np.int32)
1099 for i
in range(1, len(ix)):
1101 if (xval - xc < maxDiff):
1102 xc = (ng*xc + xval)/(ng+1)
1104 index[ix[i]] = group
1108 index[ix[i]] = group
1115 """Builds an index with regular binning. The result can be fed into binData.
1126 np.digitize(x, bins): `numpy.array`
1130 bins = np.linspace(x.min(), x.max() + abs(x.max() * 1e-7), nBins + 1)
1131 return np.digitize(x, bins)
1135 """Bin data (usually for display purposes).
1146 Bin number of each datum.
1149 Inverse rms of each datum to use when averaging (the actual weight is wy**2).
1160 wybin: `numpy.array`
1161 Binned weights in y, computed from wy's in each bin.
1163 sybin: `numpy.array`
1164 Uncertainty on the bin average, considering actual scatter, and ignoring weights.
1168 wy = np.ones_like(x)
1169 binIndexSet = set(binIndex)
1172 xbin = np.array([xw2[binIndex == i].sum()/w2[binIndex == i].sum()
for i
in binIndexSet])
1175 ybin = np.array([yw2[binIndex == i].sum()/w2[binIndex == i].sum()
for i
in binIndexSet])
1177 wybin = np.sqrt(np.array([w2[binIndex == i].sum()
for i
in binIndexSet]))
1178 sybin = np.array([y[binIndex == i].
std()/np.sqrt(np.array([binIndex == i]).sum())
1179 for i
in binIndexSet])
1181 return xbin, ybin, wybin, sybin
def run(self, filenameFull, datasetPtc, linearizer=None, log=None)
def plot_a_b(aDict, bDict, pdfPages, bRange=3)
def runDataRef(self, dataRef)
def plotCovariances(mu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB, gainDict, noiseDict, aDict, bDict, pdfPages)
def binData(x, y, binIndex, wy=None)
def plotNormalizedCovariances(self, i, j, inputMu, covs, covsModel, covsWeights, covsNoB, covsModelNoB, covsWeightsNoB, pdfPages, offset=0.004, numberOfBins=10, plotData=True, topPlot=False, log=None)
def _plotStandardPtc(self, dataset, ptcFitType, pdfPages)
def covAstierMakeAllPlots(self, dataset, pdfPages, log=None)
def plotAcoeffsSum(aDict, bDict, pdfPages)
def findGroups(x, maxDiff)
def plotRelativeBiasACoeffs(aDict, aDictNoB, fullCovsModel, fullCovsModelNoB, signalElectrons, gainDict, pdfPages, maxr=None)
def _plotLinearizer(self, dataset, linearizer, pdfPages)
def indexForBins(x, nBins)
def __init__(self, *args, **kwargs)
def ab_vs_dist(aDict, bDict, pdfPages, bRange=4)
def computeApproximateAcoeffs(covModel, muEl, gain)
def getFitDataFromCovariances(i, j, mu, fullCov, fullCovModel, fullCovSqrtWeights, gain=1.0, divideByMu=False, returnMasked=False)
def calculateWeightedReducedChi2(measured, model, weightsMeasured, nData, nParsModel)
def funcPolynomial(pars, x)