lsst.cp.pipe  18.1.0-4-g9828021+6
ptc.py
Go to the documentation of this file.
1 # This file is part of cp_pipe.
2 #
3 # Developed for the LSST Data Management System.
4 # This product includes software developed by the LSST Project
5 # (https://www.lsst.org).
6 # See the COPYRIGHT file at the top-level directory of this distribution
7 # for details of code ownership.
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the GNU General Public License
20 # along with this program. If not, see <https://www.gnu.org/licenses/>.
21 #
22 
23 __all__ = ['MeasurePhotonTransferCurveTask',
24  'MeasurePhotonTransferCurveTaskConfig', ]
25 
26 import numpy as np
27 import matplotlib.pyplot as plt
28 import os
29 from matplotlib.backends.backend_pdf import PdfPages
30 from sqlite3 import OperationalError
31 
32 import lsst.afw.math as afwMath
33 import lsst.pex.config as pexConfig
34 import lsst.pipe.base as pipeBase
35 from lsst.ip.isr import IsrTask
36 from .utils import (NonexistentDatasetTaskDataIdContainer, PairedVisitListTaskRunner,
37  checkExpLengthEqual, validateIsrConfig)
38 from scipy.optimize import leastsq
39 import numpy.polynomial.polynomial as poly
40 
41 
42 class MeasurePhotonTransferCurveTaskConfig(pexConfig.Config):
43  """Config class for photon transfer curve measurement task"""
44  isr = pexConfig.ConfigurableField(
45  target=IsrTask,
46  doc="""Task to perform instrumental signature removal.""",
47  )
48  isrMandatorySteps = pexConfig.ListField(
49  dtype=str,
50  doc="isr operations that must be performed for valid results. Raises if any of these are False.",
51  default=['doAssembleCcd']
52  )
53  isrForbiddenSteps = pexConfig.ListField(
54  dtype=str,
55  doc="isr operations that must NOT be performed for valid results. Raises if any of these are True",
56  default=['doFlat', 'doFringe', 'doAddDistortionModel', 'doBrighterFatter', 'doUseOpticsTransmission',
57  'doUseFilterTransmission', 'doUseSensorTransmission', 'doUseAtmosphereTransmission']
58  )
59  isrDesirableSteps = pexConfig.ListField(
60  dtype=str,
61  doc="isr operations that it is advisable to perform, but are not mission-critical." +
62  " WARNs are logged for any of these found to be False.",
63  default=['doBias', 'doDark', 'doCrosstalk', 'doDefect']
64  )
65  isrUndesirableSteps = pexConfig.ListField(
66  dtype=str,
67  doc="isr operations that it is *not* advisable to perform in the general case, but are not" +
68  " forbidden as some use-cases might warrant them." +
69  " WARNs are logged for any of these found to be True.",
70  default=['doLinearize']
71  )
72  ccdKey = pexConfig.Field(
73  dtype=str,
74  doc="The key by which to pull a detector from a dataId, e.g. 'ccd' or 'detector'.",
75  default='ccd',
76  )
77  makePlots = pexConfig.Field(
78  dtype=bool,
79  doc="Plot the PTC curves?.",
80  default=False,
81  )
82  ptcFitType = pexConfig.ChoiceField(
83  dtype=str,
84  doc="Fit PTC to approximation in Astier+19 (Equation 16) or to a polynomial.",
85  default="POLYNOMIAL",
86  allowed={
87  "POLYNOMIAL": "n-degree polynomial (use 'polynomialFitDegree' to set 'n').",
88  "ASTIERAPPROXIMATION": "Approximation in Astier+19 (Eq. 16)."
89  }
90  )
91  polynomialFitDegree = pexConfig.Field(
92  dtype=int,
93  doc="Degree of polynomial to fit the PTC, when 'ptcFitType'=POLYNOMIAL.",
94  default=2,
95  )
96  binSize = pexConfig.Field(
97  dtype=int,
98  doc="Bin the image by this factor in both dimensions.",
99  default=1,
100  )
101  minMeanSignal = pexConfig.Field(
102  dtype=float,
103  doc="Minimum value of mean signal (in ADU) to consider.",
104  default=0,
105  )
106  maxMeanSignal = pexConfig.Field(
107  dtype=float,
108  doc="Maximum value to of mean signal (in ADU) to consider.",
109  default=9e6,
110  )
111  sigmaCutPtcOutliers = pexConfig.Field(
112  dtype=float,
113  doc="Sigma cut for outlier rejection in PTC.",
114  default=4.0,
115  )
116  maxIterationsPtcOutliers = pexConfig.Field(
117  dtype=int,
118  doc="Maximum number of iterations for outlier rejection in PTC.",
119  default=2,
120  )
121  doFitBootstrap = pexConfig.Field(
122  dtype=bool,
123  doc="Use bootstrap for the PTC fit parameters and errors?.",
124  default=False,
125  )
126  linResidualTimeIndex = pexConfig.Field(
127  dtype=int,
128  doc="Index position in time array for reference time in linearity residual calculation.",
129  default=2,
130  )
131 
132 
133 class MeasurePhotonTransferCurveTask(pipeBase.CmdLineTask):
134  """A class to calculate, fit, and plot a PTC from a set of flat pairs.
135 
136  The Photon Transfer Curve (var(signal) vs mean(signal)) is a standard tool
137  used in astronomical detectors characterization (e.g., Janesick 2001,
138  Janesick 2007). This task calculates the PTC from a series of pairs of
139  flat-field images; each pair taken at identical exposure times. The
140  difference image of each pair is formed to eliminate fixed pattern noise,
141  and then the variance of the difference image and the mean of the average image
142  are used to produce the PTC. An n-degree polynomial or the approximation in Equation
143  16 of Astier+19 ("The Shape of the Photon Transfer Curve of CCD sensors",
144  arXiv:1905.08677) can be fitted to the PTC curve. These models include
145  parameters such as the gain (e/ADU) and readout noise.
146 
147  Parameters
148  ----------
149 
150  *args: `list`
151  Positional arguments passed to the Task constructor. None used at this
152  time.
153  **kwargs: `dict`
154  Keyword arguments passed on to the Task constructor. None used at this
155  time.
156 
157  """
158 
159  RunnerClass = PairedVisitListTaskRunner
160  ConfigClass = MeasurePhotonTransferCurveTaskConfig
161  _DefaultName = "measurePhotonTransferCurve"
162 
163  def __init__(self, *args, **kwargs):
164  pipeBase.CmdLineTask.__init__(self, *args, **kwargs)
165  self.makeSubtask("isr")
166  plt.interactive(False) # stop windows popping up when plotting. When headless, use 'agg' backend too
167  validateIsrConfig(self.isr, self.config.isrMandatorySteps,
168  self.config.isrForbiddenSteps, self.config.isrDesirableSteps, checkTrim=False)
169  self.config.validate()
170  self.config.freeze()
171 
172  @classmethod
173  def _makeArgumentParser(cls):
174  """Augment argument parser for the MeasurePhotonTransferCurveTask."""
175  parser = pipeBase.ArgumentParser(name=cls._DefaultName)
176  parser.add_argument("--visit-pairs", dest="visitPairs", nargs="*",
177  help="Visit pairs to use. Each pair must be of the form INT,INT e.g. 123,456")
178  parser.add_id_argument("--id", datasetType="measurePhotonTransferCurveGainAndNoise",
179  ContainerClass=NonexistentDatasetTaskDataIdContainer,
180  help="The ccds to use, e.g. --id ccd=0..100")
181  return parser
182 
183  @pipeBase.timeMethod
184  def runDataRef(self, dataRef, visitPairs):
185  """Run the Photon Transfer Curve (PTC) measurement task.
186 
187  For a dataRef (which is each detector here),
188  and given a list of visit pairs at different exposure times,
189  measure the PTC.
190 
191  Parameters
192  ----------
193  dataRef : list of lsst.daf.persistence.ButlerDataRef
194  dataRef for the detector for the visits to be fit.
195  visitPairs : `iterable` of `tuple` of `int`
196  Pairs of visit numbers to be processed together
197  """
198 
199  # setup necessary objects
200  detNum = dataRef.dataId[self.config.ccdKey]
201  detector = dataRef.get('camera')[dataRef.dataId[self.config.ccdKey]]
202  # expand some missing fields that we need for lsstCam. This is a work-around
203  # for Gen2 problems that I (RHL) don't feel like solving. The calibs pipelines
204  # (which inherit from CalibTask) use addMissingKeys() to do basically the same thing
205  #
206  # Basically, the butler's trying to look up the fields in `raw_visit` which won't work
207  for name in dataRef.getButler().getKeys('bias'):
208  if name not in dataRef.dataId:
209  try:
210  dataRef.dataId[name] = \
211  dataRef.getButler().queryMetadata('raw', [name], detector=detNum)[0]
212  except OperationalError:
213  pass
214 
215  amps = detector.getAmplifiers()
216  ampNames = [amp.getName() for amp in amps]
217  dataDict = {key: {} for key in ampNames}
218  fitVectorsDict = {key: ([], [], []) for key in ampNames}
219 
220  self.log.info('Measuring PTC using %s visits for detector %s' % (visitPairs, detNum))
221 
222  for (v1, v2) in visitPairs:
223  # Perform ISR on each exposure
224  dataRef.dataId['visit'] = v1
225  exp1 = self.isr.runDataRef(dataRef).exposure
226  dataRef.dataId['visit'] = v2
227  exp2 = self.isr.runDataRef(dataRef).exposure
228  del dataRef.dataId['visit']
229 
230  checkExpLengthEqual(exp1, exp2, v1, v2, raiseWithMessage=True)
231  expTime = exp1.getInfo().getVisitInfo().getExposureTime()
232 
233  for amp in detector:
234  mu, varDiff = self.measureMeanVarPair(exp1, exp2, region=amp.getBBox())
235  data = dict(expTime=expTime, meanClip=mu, varClip=varDiff)
236  ampName = amp.getName()
237  dataDict[ampName][(v1, v2)] = data
238  fitVectorsDict[ampName][0].append(expTime)
239  fitVectorsDict[ampName][1].append(mu)
240  fitVectorsDict[ampName][2].append(varDiff)
241 
242  # Fit PTC and (non)linearity of signal vs time curve
243  fitPtcDict, nlDict, gainDict, noiseDict = self.fitPtcAndNl(fitVectorsDict,
244  ptcFitType=self.config.ptcFitType)
245  allDict = {"data": dataDict, "ptc": fitPtcDict, "nl": nlDict}
246  gainNoiseNlDict = {"gain": gainDict, "noise": noiseDict, "nl": nlDict}
247 
248  if self.config.makePlots:
249  self.plot(dataRef, fitPtcDict, nlDict, ptcFitType=self.config.ptcFitType)
250 
251  # Save data, PTC fit, and NL fit dictionaries
252  self.log.info(f"Writing PTC and NL data to {dataRef.getUri(write=True)}")
253  dataRef.put(gainNoiseNlDict, datasetType="measurePhotonTransferCurveGainAndNoise")
254  dataRef.put(allDict, datasetType="measurePhotonTransferCurveDatasetAll")
255 
256  self.log.info('Finished measuring PTC for in detector %s' % detNum)
257 
258  return pipeBase.Struct(exitStatus=0)
259 
260  def measureMeanVarPair(self, exposure1, exposure2, region=None):
261  """Calculate the mean signal of two exposures and the variance of their difference.
262 
263  Parameters
264  ----------
265  exposure1 : `lsst.afw.image.exposure.exposure.ExposureF`
266  First exposure of flat field pair.
267 
268  exposure2 : `lsst.afw.image.exposure.exposure.ExposureF`
269  Second exposure of flat field pair.
270 
271  region : `lsst.geom.Box2I`
272  Region of each exposure where to perform the calculations (e.g, an amplifier).
273 
274  Return
275  ------
276 
277  mu : `np.float`
278  0.5*(mu1 + mu2), where mu1, and mu2 are the clipped means of the regions in
279  both exposures.
280 
281  varDiff : `np.float`
282  Half of the clipped variance of the difference of the regions inthe two input
283  exposures.
284  """
285 
286  if region is not None:
287  im1Area = exposure1.maskedImage[region]
288  im2Area = exposure2.maskedImage[region]
289  else:
290  im1Area = exposure1.maskedImage
291  im2Area = exposure2.maskedImage
292 
293  im1Area = afwMath.binImage(im1Area, self.config.binSize)
294  im2Area = afwMath.binImage(im2Area, self.config.binSize)
295 
296  # Clipped mean of images; then average of mean.
297  mu1 = afwMath.makeStatistics(im1Area, afwMath.MEANCLIP).getValue()
298  mu2 = afwMath.makeStatistics(im2Area, afwMath.MEANCLIP).getValue()
299  mu = 0.5*(mu1 + mu2)
300 
301  # Take difference of pairs
302  # symmetric formula: diff = (mu2*im1-mu1*im2)/(0.5*(mu1+mu2))
303  temp = im2Area.clone()
304  temp *= mu1
305  diffIm = im1Area.clone()
306  diffIm *= mu2
307  diffIm -= temp
308  diffIm /= mu
309 
310  varDiff = 0.5*(afwMath.makeStatistics(diffIm, afwMath.VARIANCECLIP).getValue())
311 
312  return mu, varDiff
313 
314  def _fitLeastSq(self, initialParams, dataX, dataY, function):
315  """Do a fit and estimate the parameter errors using using scipy.optimize.leastq.
316 
317  optimize.leastsq returns the fractional covariance matrix. To estimate the
318  standard deviation of the fit parameters, multiply the entries of this matrix
319  by the reduced chi squared and take the square root of the diagon al elements.
320 
321  Parameters
322  ----------
323  initialParams : list of np.float
324  initial values for fit parameters. For ptcFitType=POLYNOMIAL, its length
325  determines the degree of the polynomial.
326 
327  dataX : np.array of np.float
328  Data in the abscissa axis.
329 
330  dataY : np.array of np.float
331  Data in the ordinate axis.
332 
333  function : callable object (function)
334  Function to fit the data with.
335 
336  Return
337  ------
338  pFitSingleLeastSquares : list of np.float
339  List with fitted parameters.
340 
341  pErrSingleLeastSquares : list of np.float
342  List with errors for fitted parameters.
343  """
344 
345  def errFunc(p, x, y):
346  return function(p, x) - y
347 
348  pFit, pCov, infoDict, errMessage, success = leastsq(errFunc, initialParams,
349  args=(dataX, dataY), full_output=1, epsfcn=0.0001)
350 
351  if (len(dataY) > len(initialParams)) and pCov is not None:
352  reducedChiSq = (errFunc(pFit, dataX, dataY)**2).sum()/(len(dataY)-len(initialParams))
353  pCov *= reducedChiSq
354  else:
355  pCov = np.inf
356 
357  errorVec = []
358  for i in range(len(pFit)):
359  errorVec.append(np.fabs(pCov[i][i])**0.5)
360 
361  pFitSingleLeastSquares = pFit
362  pErrSingleLeastSquares = np.array(errorVec)
363 
364  return pFitSingleLeastSquares, pErrSingleLeastSquares
365 
366  def _fitBootstrap(self, initialParams, dataX, dataY, function, confidenceSigma=1.):
367  """Do a fit using least squares and bootstrap to estimate parameter errors.
368 
369  The bootstrap error bars are calculated by fitting 100 random data sets.
370 
371  Parameters
372  ----------
373  initialParams : list of np.float
374  initial values for fit parameters. For ptcFitType=POLYNOMIAL, its length
375  determines the degree of the polynomial.
376 
377  dataX : np.array of np.float
378  Data in the abscissa axis.
379 
380  dataY : np.array of np.float
381  Data in the ordinate axis.
382 
383  function : callable object (function)
384  Function to fit the data with.
385 
386  confidenceSigma : np.float
387  Number of sigmas that determine confidence interval for the bootstrap errors.
388 
389  Return
390  ------
391  pFitBootstrap : list of np.float
392  List with fitted parameters.
393 
394  pErrBootstrap : list of np.float
395  List with errors for fitted parameters.
396  """
397 
398  def errFunc(p, x, y):
399  return function(p, x) - y
400 
401  # Fit first time
402  pFit, _ = leastsq(errFunc, initialParams, args=(dataX, dataY), full_output=0)
403 
404  # Get the stdev of the residuals
405  residuals = errFunc(pFit, dataX, dataY)
406  sigmaErrTotal = np.std(residuals)
407 
408  # 100 random data sets are generated and fitted
409  pars = []
410  for i in range(100):
411  randomDelta = np.random.normal(0., sigmaErrTotal, len(dataY))
412  randomDataY = dataY + randomDelta
413  randomFit, _ = leastsq(errFunc, initialParams,
414  args=(dataX, randomDataY), full_output=0)
415  pars.append(randomFit)
416  pars = np.array(pars)
417  meanPfit = np.mean(pars, 0)
418 
419  # confidence interval for parameter estimates
420  nSigma = confidenceSigma
421  errPfit = nSigma*np.std(pars, 0)
422  pFitBootstrap = meanPfit
423  pErrBootstrap = errPfit
424  return pFitBootstrap, pErrBootstrap
425 
426  def funcPolynomial(self, pars, x):
427  """Polynomial function definition"""
428  return poly.polyval(x, [*pars])
429 
430  def funcAstier(self, pars, x):
431  """Single brighter-fatter parameter model for PTC; Equation 16 of Astier+19"""
432  a00, gain, noise = pars
433  return 0.5/(a00*gain*gain)*(np.exp(2*a00*x*gain)-1) + noise/(gain*gain)
434 
435  def fitPtcAndNl(self, fitVectorsDict, ptcFitType='POLYNOMIAL'):
436  """Function to fit PTC, and calculate linearity and linearity residual
437 
438  Parameters
439  ----------
440  fitVectorsDicti : `dict`
441  Dictionary with exposure time, mean, and variance vectors in a tuple
442  ptcFitType : `str`
443  Fit a 'POLYNOMIAL' (degree: 'polynomialFitDegree') or '
444  ASTIERAPPROXIMATION' to the PTC
445 
446  Returns
447  -------
448  fitPtcDict : `dict`
449  Dictionary of the form fitPtcDict[amp] =
450  (meanVec, varVec, parsFit, parsFitErr, index)
451  nlDict : `dict`
452  Dictionary of the form nlDict[amp] =
453  (timeVec, meanVec, linResidual, parsFit, parsFitErr)
454  """
455  if ptcFitType == 'ASTIERAPPROXIMATION':
456  ptcFunc = self.funcAstier
457  parsIniPtc = [-1e-9, 1.0, 10.] # a00, gain, noise
458  if ptcFitType == 'POLYNOMIAL':
459  ptcFunc = self.funcPolynomial
460  parsIniPtc = np.repeat(1., self.config.polynomialFitDegree + 1)
461 
462  parsIniNl = [1., 1., 1.]
463  fitPtcDict = {key: {} for key in fitVectorsDict}
464  nlDict = {key: {} for key in fitVectorsDict}
465  gainDict = {key: {} for key in fitVectorsDict}
466  noiseDict = {key: {} for key in fitVectorsDict}
467 
468  def errFunc(p, x, y):
469  return ptcFunc(p, x) - y
470 
471  maxIterationsPtcOutliers = self.config.maxIterationsPtcOutliers
472  for amp in fitVectorsDict:
473  timeVec, meanVec, varVec = fitVectorsDict[amp]
474  timeVecOriginal = np.array(timeVec)
475  meanVecOriginal = np.array(meanVec)
476  varVecOriginal = np.array(varVec)
477  index0 = ((meanVecOriginal > self.config.minMeanSignal) &
478  (meanVecOriginal <= self.config.maxMeanSignal))
479  # Before bootstrap fit, do an iterative fit to get rid of outliers in PTC
480  count = 1
481  sigmaCutPtcOutliers = self.config.sigmaCutPtcOutliers
482  maxIterationsPtcOutliers = self.config.maxIterationsPtcOutliers
483  timeTempVec = timeVecOriginal[index0]
484  meanTempVec = meanVecOriginal[index0]
485  varTempVec = varVecOriginal[index0]
486  while count <= maxIterationsPtcOutliers:
487  pars, cov = leastsq(errFunc, parsIniPtc, args=(meanTempVec,
488  varTempVec), full_output=0)
489  sigResids = (varTempVec -
490  ptcFunc(pars, meanTempVec))/np.sqrt(varTempVec)
491  index = list(np.where(np.abs(sigResids) < sigmaCutPtcOutliers)[0])
492  timeTempVec = timeTempVec[index]
493  meanTempVec = meanTempVec[index]
494  varTempVec = varTempVec[index]
495  count += 1
496 
497  parsIniPtc = pars
498  timeVecFinal, meanVecFinal, varVecFinal = timeTempVec, meanTempVec, varTempVec
499  if (len(meanVecFinal) - len(meanVecOriginal)) > 0:
500  self.log.info((f"Number of points discarded in PTC of amplifier {amp}:" +
501  "{len(meanVecFinal)-len(meanVecOriginal)} out of {len(meanVecOriginal)}"))
502 
503  if (len(meanVecFinal) < len(parsIniPtc)):
504  raise RuntimeError(f"Not enough data points ({len(meanVecFinal)}) compared to the number of" +
505  "parameters of the PTC model({len(parsIniPtc)}).")
506  # Fit the PTC
507  if self.config.doFitBootstrap:
508  parsFit, parsFitErr = self._fitBootstrap(parsIniPtc, meanVecFinal, varVecFinal, ptcFunc)
509  else:
510  parsFit, parsFitErr = self._fitLeastSq(parsIniPtc, meanVecFinal, varVecFinal, ptcFunc)
511 
512  fitPtcDict[amp] = (timeVecOriginal, meanVecOriginal, varVecOriginal, timeVecFinal,
513  meanVecFinal, varVecFinal, parsFit, parsFitErr)
514 
515  if ptcFitType == 'ASTIERAPPROXIMATION':
516  ptcGain = parsFit[1]
517  ptcGainErr = parsFitErr[1]
518  ptcNoise = np.sqrt(np.fabs(parsFit[2]))
519  ptcNoiseErr = 0.5*(parsFitErr[2]/np.fabs(parsFit[2]))*np.sqrt(np.fabs(parsFit[2]))
520  if ptcFitType == 'POLYNOMIAL':
521  ptcGain = 1./parsFit[1]
522  ptcGainErr = np.fabs(1./parsFit[1])*(parsFitErr[1]/parsFit[1])
523  ptcNoise = np.sqrt(np.fabs(parsFit[0]))*ptcGain
524  ptcNoiseErr = (0.5*(parsFitErr[0]/np.fabs(parsFit[0]))*(np.sqrt(np.fabs(parsFit[0]))))*ptcGain
525 
526  gainDict[amp] = (ptcGain, ptcGainErr)
527  noiseDict[amp] = (ptcNoise, ptcNoiseErr)
528 
529  # Non-linearity residuals (NL of mean vs time curve): percentage, and fit to a quadratic function
530  # In this case, len(parsIniNl) = 3 indicates that we want a quadratic fit
531  if self.config.doFitBootstrap:
532  parsFit, parsFitErr = self._fitBootstrap(parsIniNl, timeVecFinal, meanVecFinal,
533  self.funcPolynomial)
534  else:
535  parsFit, parsFitErr = self._fitLeastSq(parsIniNl, timeVecFinal, meanVecFinal,
536  self.funcPolynomial)
537  linResidualTimeIndex = self.config.linResidualTimeIndex
538  if timeVecFinal[linResidualTimeIndex] == 0.0:
539  raise RuntimeError("Reference time for linearity residual can't be 0.0")
540  linResidual = 100*(1 - ((meanVecFinal[linResidualTimeIndex] /
541  timeVecFinal[linResidualTimeIndex]) / (meanVecFinal/timeVecFinal)))
542  nlDict[amp] = (timeVecFinal, meanVecFinal, linResidual, parsFit, parsFitErr)
543 
544  return fitPtcDict, nlDict, gainDict, noiseDict
545 
546  def plot(self, dataRef, fitPtcDict, nlDict, ptcFitType='POLYNOMIAL'):
547  dirname = dataRef.getUri(datasetType='cpPipePlotRoot', write=True)
548  if not os.path.exists(dirname):
549  os.makedirs(dirname)
550 
551  detNum = dataRef.dataId[self.config.ccdKey]
552  filename = f"PTC_det{detNum}.pdf"
553  filenameFull = os.path.join(dirname, filename)
554  with PdfPages(filenameFull) as pdfPages:
555  self._plotPtc(fitPtcDict, nlDict, ptcFitType, pdfPages)
556 
557  def _plotPtc(self, fitPtcDict, nlDict, ptcFitType, pdfPages):
558  """Plot PTC, linearity, and linearity residual per amplifier"""
559 
560  if ptcFitType == 'ASTIERAPPROXIMATION':
561  ptcFunc = self.funcAstier
562  stringTitle = r"Var = $\frac{1}{2g^2a_{00}}(\exp (2a_{00} \mu g) - 1) + \frac{n_{00}}{g^2}$"
563 
564  if ptcFitType == 'POLYNOMIAL':
565  ptcFunc = self.funcPolynomial
566  stringTitle = f"Polynomial (degree: {self.config.polynomialFitDegree})"
567 
568  legendFontSize = 7.5
569  labelFontSize = 8
570  titleFontSize = 10
571  supTitleFontSize = 18
572 
573  # General determination of the size of the plot grid
574  nAmps = len(fitPtcDict)
575  if nAmps == 2:
576  nRows, nCols = 2, 1
577  nRows = np.sqrt(nAmps)
578  mantissa, _ = np.modf(nRows)
579  if mantissa > 0:
580  nRows = int(nRows) + 1
581  nCols = nRows
582  else:
583  nRows = int(nRows)
584  nCols = nRows
585 
586  f, ax = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
587  f2, ax2 = plt.subplots(nrows=nRows, ncols=nCols, sharex='col', sharey='row', figsize=(13, 10))
588 
589  # fitPtcDict[amp] = (timeVecOriginal, meanVecOriginal, varVecOriginal, timeVecFinal,
590  # meanVecFinal, varVecFinal, parsFit, parsFitErr)
591  for i, (amp, a, a2) in enumerate(zip(fitPtcDict, ax.flatten(), ax2.flatten())):
592  meanVecOriginal, varVecOriginal = fitPtcDict[amp][1], fitPtcDict[amp][2]
593  meanVecFinal, varVecFinal = fitPtcDict[amp][4], fitPtcDict[amp][5]
594  meanVecOutliers = np.setdiff1d(meanVecOriginal, meanVecFinal)
595  varVecOutliers = np.setdiff1d(varVecOriginal, varVecFinal)
596  pars, parsErr = fitPtcDict[amp][6], fitPtcDict[amp][7]
597 
598  if ptcFitType == 'ASTIERAPPROXIMATION':
599  ptcA00, ptcA00error = pars[0], parsErr[0]
600  ptcGain, ptcGainError = pars[1], parsErr[1]
601  ptcNoise = np.sqrt(np.fabs(pars[2]))
602  ptcNoiseError = 0.5*(parsErr[2]/np.fabs(pars[2]))*np.sqrt(np.fabs(pars[2]))
603  stringLegend = (f"a00: {ptcA00:.2e}+/-{ptcA00error:.2e}"
604  f"\n Gain: {ptcGain:.4}+/-{ptcGainError:.2e}"
605  f"\n Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e}")
606 
607  if ptcFitType == 'POLYNOMIAL':
608  ptcGain, ptcGainError = 1./pars[1], np.fabs(1./pars[1])*(parsErr[1]/pars[1])
609  ptcNoise = np.sqrt(np.fabs(pars[0]))*ptcGain
610  ptcNoiseError = (0.5*(parsErr[0]/np.fabs(pars[0]))*(np.sqrt(np.fabs(pars[0]))))*ptcGain
611  stringLegend = (f"Noise: {ptcNoise:.4}+/-{ptcNoiseError:.2e} \n"
612  f"Gain: {ptcGain:.4}+/-{ptcGainError:.2e}")
613 
614  minMeanVecFinal = np.min(meanVecFinal)
615  maxMeanVecFinal = np.max(meanVecFinal)
616  meanVecFit = np.linspace(minMeanVecFinal, maxMeanVecFinal, 100*len(meanVecFinal))
617  minMeanVecOriginal = np.min(meanVecOriginal)
618  maxMeanVecOriginal = np.max(meanVecOriginal)
619  deltaXlim = maxMeanVecOriginal - minMeanVecOriginal
620 
621  a.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
622  a.plot(meanVecFinal, pars[0] + pars[1]*meanVecFinal, color='green', linestyle='--')
623  a.scatter(meanVecFinal, varVecFinal, c='blue', marker='o')
624  a.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s')
625  a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
626  a.set_xticks(meanVecOriginal)
627  a.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
628  a.tick_params(labelsize=11)
629  a.text(0.03, 0.8, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
630  a.set_xscale('linear', fontsize=labelFontSize)
631  a.set_yscale('linear', fontsize=labelFontSize)
632  a.set_title(amp, fontsize=titleFontSize)
633  a.set_xlim([minMeanVecOriginal - 0.2*deltaXlim, maxMeanVecOriginal + 0.2*deltaXlim])
634 
635  # Same, but in log-scale
636  a2.plot(meanVecFit, ptcFunc(pars, meanVecFit), color='red')
637  a2.scatter(meanVecFinal, varVecFinal, c='blue', marker='o')
638  a2.scatter(meanVecOutliers, varVecOutliers, c='magenta', marker='s')
639  a2.set_xlabel(r'Mean Signal ($\mu$, ADU)', fontsize=labelFontSize)
640  a2.set_ylabel(r'Variance (ADU$^2$)', fontsize=labelFontSize)
641  a2.tick_params(labelsize=11)
642  a2.text(0.03, 0.8, stringLegend, transform=a2.transAxes, fontsize=legendFontSize)
643  a2.set_xscale('log')
644  a2.set_yscale('log')
645  a2.set_title(amp, fontsize=titleFontSize)
646  a2.set_xlim([minMeanVecOriginal, maxMeanVecOriginal])
647 
648  f.suptitle(f"PTC \n Fit: " + stringTitle, fontsize=20)
649  pdfPages.savefig(f)
650  f2.suptitle(f"PTC (log-log)", fontsize=20)
651  pdfPages.savefig(f2)
652 
653  # Plot mean vs time
654  f, ax = plt.subplots(nrows=4, ncols=4, sharex='col', sharey='row', figsize=(13, 10))
655  for i, (amp, a) in enumerate(zip(fitPtcDict, ax.flatten())):
656  timeVecFinal, meanVecFinal = nlDict[amp][0], nlDict[amp][1]
657  pars, _ = nlDict[amp][3], nlDict[amp][4]
658  c0, c0Error = pars[0], parsErr[0]
659  c1, c1Error = pars[1], parsErr[1]
660  c2, c2Error = pars[2], parsErr[2]
661  stringLegend = f"c0: {c0:.4}+/-{c0Error:.2e}\n c1: {c1:.4}+/-{c1Error:.2e}" \
662  + f"\n c2(NL): {c2:.2e}+/-{c2Error:.2e}"
663  a.scatter(timeVecFinal, meanVecFinal)
664  a.plot(timeVecFinal, self.funcPolynomial(pars, timeVecFinal), color='red')
665  a.set_xlabel('Time (sec)', fontsize=labelFontSize)
666  a.set_xticks(timeVecFinal)
667  a.set_ylabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
668  a.tick_params(labelsize=labelFontSize)
669  a.text(0.03, 0.75, stringLegend, transform=a.transAxes, fontsize=legendFontSize)
670  a.set_xscale('linear', fontsize=labelFontSize)
671  a.set_yscale('linear', fontsize=labelFontSize)
672  a.set_title(amp, fontsize=titleFontSize)
673 
674  f.suptitle("Linearity \n Fit: " + r"$\mu = c_0 + c_1 t + c_2 t^2$", fontsize=supTitleFontSize)
675  pdfPages.savefig()
676 
677  # Plot linearity residual
678  f, ax = plt.subplots(nrows=4, ncols=4, sharex='col', sharey='row', figsize=(13, 10))
679  for i, (amp, a) in enumerate(zip(fitPtcDict, ax.flatten())):
680  meanVecFinal, linRes = nlDict[amp][1], nlDict[amp][2]
681  a.scatter(meanVecFinal, linRes)
682  a.axhline(y=0, color='k')
683  a.axvline(x=timeVecFinal[self.config.linResidualTimeIndex], color ='g', linestyle = '--')
684  a.set_xlabel(r'Mean signal ($\mu$, ADU)', fontsize=labelFontSize)
685  a.set_xticks(meanVecFinal)
686  a.set_ylabel('LR (%)', fontsize=labelFontSize)
687  a.tick_params(labelsize=labelFontSize)
688  a.set_xscale('linear', fontsize=labelFontSize)
689  a.set_yscale('linear', fontsize=labelFontSize)
690  a.set_title(amp, fontsize=titleFontSize)
691 
692  f.suptitle(r"Linearity Residual: $100(1 - \mu_{\rm{ref}}/t_{\rm{ref}})/(\mu / t))$" + "\n" +
693  r"$t_{\rm{ref}}$: " + f"{timeVecFinal[2]} s", fontsize=supTitleFontSize)
694  pdfPages.savefig()
695 
696  return
def _plotPtc(self, fitPtcDict, nlDict, ptcFitType, pdfPages)
Definition: ptc.py:557
def runDataRef(self, dataRef, visitPairs)
Definition: ptc.py:184
def fitPtcAndNl(self, fitVectorsDict, ptcFitType='POLYNOMIAL')
Definition: ptc.py:435
def checkExpLengthEqual(exp1, exp2, v1=None, v2=None, raiseWithMessage=False)
Definition: utils.py:162
def validateIsrConfig(isrTask, mandatory=None, forbidden=None, desirable=None, undesirable=None, checkTrim=True, logName=None)
Definition: utils.py:200
def _fitBootstrap(self, initialParams, dataX, dataY, function, confidenceSigma=1.)
Definition: ptc.py:366
def __init__(self, args, kwargs)
Definition: ptc.py:163
def measureMeanVarPair(self, exposure1, exposure2, region=None)
Definition: ptc.py:260
def plot(self, dataRef, fitPtcDict, nlDict, ptcFitType='POLYNOMIAL')
Definition: ptc.py:546
def _fitLeastSq(self, initialParams, dataX, dataY, function)
Definition: ptc.py:314