24 import lsst.pex.config
as pexConfig
32 from astro_metadata_translator
import merge_headers, ObservationGroup
33 from astro_metadata_translator.serialize
import dates_to_fits
38 """Parameters controlling the measurement of background statistics.
40 stat = pexConfig.Field(
42 default=int(afwMath.MEANCLIP),
43 doc=
"Statistic to use to estimate background (from lsst.afw.math)",
45 clip = pexConfig.Field(
48 doc=
"Clipping threshold for background",
50 nIter = pexConfig.Field(
53 doc=
"Clipping iterations for background",
55 mask = pexConfig.ListField(
57 default=[
"DETECTED",
"BAD",
"NO_DATA"],
58 doc=
"Mask planes to reject",
63 """Measure statistics on the background
65 This can be useful for scaling the background, e.g., for flats and fringe frames.
67 ConfigClass = CalibStatsConfig
69 def run(self, exposureOrImage):
70 """Measure a particular statistic on an image (of some sort).
74 exposureOrImage : `lsst.afw.image.Exposure`, `lsst.afw.image.MaskedImage`, or `lsst.afw.image.Image`
75 Exposure or image to calculate statistics on.
80 Resulting statistic value.
82 stats = afwMath.StatisticsControl(self.config.clip, self.config.nIter,
83 afwImage.Mask.getPlaneBitMask(self.config.mask))
85 image = exposureOrImage.getMaskedImage()
88 image = exposureOrImage.getImage()
90 image = exposureOrImage
92 return afwMath.makeStatistics(image, self.config.stat, stats).getValue()
96 dimensions=(
"instrument",
"detector")):
99 doc=
"Input pre-processed exposures to combine.",
100 storageClass=
"ExposureF",
101 dimensions=(
"instrument",
"detector",
"exposure"),
104 inputScales = cT.Input(
106 doc=
"Input scale factors to use.",
107 storageClass=
"StructuredDataDict",
108 dimensions=(
"instrument", ),
112 outputData = cT.Output(
114 doc=
"Output combined proposed calibration.",
115 storageClass=
"ExposureF",
116 dimensions=(
"instrument",
"detector"),
122 if config
and len(config.calibrationDimensions) != 0:
123 newDimensions = tuple(config.calibrationDimensions)
124 newOutputData = cT.Output(
128 dimensions=self.allConnections[
'outputData'].dimensions + newDimensions
130 self.dimensions.update(config.calibrationDimensions)
133 if config.exposureScaling ==
'InputList':
134 newInputScales = cT.PrerequisiteInput(
138 dimensions=self.allConnections[
'inputScales'].dimensions + newDimensions
140 self.dimensions.update(config.calibrationDimensions)
146 pipelineConnections=CalibCombineConnections):
147 """Configuration for combining calib exposures.
149 calibrationType = pexConfig.Field(
151 default=
"calibration",
152 doc=
"Name of calibration to be generated.",
154 calibrationDimensions = pexConfig.ListField(
157 doc=
"List of updated dimensions to append to output."
160 exposureScaling = pexConfig.ChoiceField(
163 "None":
"No scaling used.",
164 "ExposureTime":
"Scale inputs by their exposure time.",
165 "DarkTime":
"Scale inputs by their dark time.",
166 "MeanStats":
"Scale inputs based on their mean values.",
167 "InputList":
"Scale inputs based on a list of values.",
170 doc=
"Scaling to be applied to each input exposure.",
172 scalingLevel = pexConfig.ChoiceField(
175 "DETECTOR":
"Scale by detector.",
176 "AMP":
"Scale by amplifier.",
179 doc=
"Region to scale.",
181 maxVisitsToCalcErrorFromInputVariance = pexConfig.Field(
184 doc=
"Maximum number of visits to estimate variance from input variance, not per-pixel spread",
187 doVignette = pexConfig.Field(
190 doc=
"Copy vignette polygon to output and censor vignetted pixels?"
193 mask = pexConfig.ListField(
195 default=[
"SAT",
"DETECTED",
"INTRP"],
196 doc=
"Mask planes to respect",
198 combine = pexConfig.Field(
200 default=int(afwMath.MEANCLIP),
201 doc=
"Statistic to use for combination (from lsst.afw.math)",
203 clip = pexConfig.Field(
206 doc=
"Clipping threshold for combination",
208 nIter = pexConfig.Field(
211 doc=
"Clipping iterations for combination",
213 stats = pexConfig.ConfigurableField(
214 target=CalibStatsTask,
215 doc=
"Background statistics configuration",
220 pipeBase.CmdLineTask):
221 """Task to combine calib exposures."""
222 ConfigClass = CalibCombineConfig
223 _DefaultName =
'cpCombine'
227 self.makeSubtask(
"stats")
230 inputs = butlerQC.get(inputRefs)
232 dimensions = [exp.dataId.byName()
for exp
in inputRefs.inputExps]
233 inputs[
'inputDims'] = dimensions
235 outputs = self.
run(**inputs)
236 butlerQC.put(outputs, outputRefs)
238 def run(self, inputExps, inputScales=None, inputDims=None):
239 """Combine calib exposures for a single detector.
243 inputExps : `list` [`lsst.afw.image.Exposure`]
244 Input list of exposures to combine.
245 inputScales : `dict` [`dict` [`dict` [`float`]]], optional
246 Dictionary of scales, indexed by detector (`int`),
247 amplifier (`int`), and exposure (`int`). Used for
249 inputDims : `list` [`dict`]
250 List of dictionaries of input data dimensions/values.
251 Each list entry should contain:
254 exposure id value (`int`)
256 detector id value (`int`)
260 combinedExp : `lsst.afw.image.Exposure`
261 Final combined exposure generated from the inputs.
266 Raised if no input data is found. Also raised if
267 config.exposureScaling == InputList, and a necessary scale
271 stats = afwMath.StatisticsControl(self.config.clip, self.config.nIter,
272 afwImage.Mask.getPlaneBitMask(self.config.mask))
273 numExps = len(inputExps)
275 raise RuntimeError(
"No valid input data")
276 if numExps < self.config.maxVisitsToCalcErrorFromInputVariance:
277 stats.setCalcErrorFromInputVariance(
True)
280 combined = afwImage.MaskedImageF(width, height)
281 combinedExp = afwImage.makeExposure(combined)
285 if inputDims
is None:
286 inputDims = [dict()
for i
in inputExps]
288 for index, (exp, dims)
in enumerate(zip(inputExps, inputDims)):
291 self.log.warn(
"Input %d is None (%s); unable to scale exp.", index, dims)
294 if self.config.exposureScaling ==
"ExposureTime":
295 scale = exp.getInfo().getVisitInfo().getExposureTime()
296 elif self.config.exposureScaling ==
"DarkTime":
297 scale = exp.getInfo().getVisitInfo().getDarkTime()
298 elif self.config.exposureScaling ==
"MeanStats":
299 scale = self.stats.
run(exp)
300 elif self.config.exposureScaling ==
"InputList":
301 visitId = dims.get(
'exposure',
None)
302 detectorId = dims.get(
'detector',
None)
303 if visitId
is None or detectorId
is None:
304 raise RuntimeError(f
"Could not identify scaling for input {index} ({dims})")
305 if detectorId
not in inputScales[
'expScale']:
306 raise RuntimeError(f
"Could not identify a scaling for input {index}"
307 f
" detector {detectorId}")
309 if self.config.scalingLevel ==
"DETECTOR":
310 if visitId
not in inputScales[
'expScale'][detectorId]:
311 raise RuntimeError(f
"Could not identify a scaling for input {index}"
312 f
"detector {detectorId} visit {visitId}")
313 scale = inputScales[
'expScale'][detectorId][visitId]
314 elif self.config.scalingLevel ==
'AMP':
315 scale = [inputScales[
'expScale'][detectorId][amp.getName()][visitId]
316 for amp
in exp.getDetector()]
318 raise RuntimeError(f
"Unknown scaling level: {self.config.scalingLevel}")
319 elif self.config.exposureScaling ==
'None':
322 raise RuntimeError(f
"Unknown scaling type: {self.config.exposureScaling}.")
324 expScales.append(scale)
325 self.log.info(
"Scaling input %d by %s", index, scale)
328 self.
combine(combined, inputExps, stats)
332 if self.config.doVignette:
333 polygon = inputExps[0].getInfo().getValidPolygon()
335 doSetValue=
True, vignetteValue=0.0)
339 calibType=self.config.calibrationType, scales=expScales)
342 return pipeBase.Struct(
343 outputData=combinedExp,
347 """Get dimensions of the inputs.
351 expList : `list` [`lsst.afw.image.Exposure`]
352 Exps to check the sizes of.
356 width, height : `int`
357 Unique set of input dimensions.
359 dimList = [exp.getDimensions()
for exp
in expList
if exp
is not None]
363 """Determine a consistent size, given a list of image sizes.
367 dimList : iterable of `tuple` (`int`, `int`)
373 If input dimensions are inconsistent.
377 width, height : `int`
380 dim = set((w, h)
for w, h
in dimList)
382 raise RuntimeError(
"Inconsistent dimensions: %s" % dim)
386 """Apply scale to input exposure.
388 This implementation applies a flux scaling: the input exposure is
389 divided by the provided scale.
393 exposure : `lsst.afw.image.Exposure`
395 scale : `float` or `list` [`float`], optional
396 Constant scale to divide the exposure by.
398 if scale
is not None:
399 mi = exposure.getMaskedImage()
400 if isinstance(scale, list):
401 for amp, ampScale
in zip(exposure.getDetector(), scale):
402 ampIm = mi[amp.getBBox()]
408 """Combine multiple images.
412 target : `lsst.afw.image.Exposure`
413 Output exposure to construct.
414 expList : `list` [`lsst.afw.image.Exposure`]
415 Input exposures to combine.
416 stats : `lsst.afw.math.StatisticsControl`
417 Control explaining how to combine the input images.
419 images = [img.getMaskedImage()
for img
in expList
if img
is not None]
420 afwMath.statisticsStack(target, images, afwMath.Property(self.config.combine), stats)
423 """Combine input headers to determine the set of common headers,
424 supplemented by calibration inputs.
428 expList : `list` of `lsst.afw.image.Exposure`
429 Input list of exposures to combine.
430 calib : `lsst.afw.image.Exposure`
431 Output calibration to construct headers for.
432 calibType: `str`, optional
433 OBSTYPE the output should claim.
434 scales: `list` of `float`, optional
435 Scale values applied to each input to record.
439 header : `lsst.daf.base.PropertyList`
443 header = calib.getMetadata()
444 header.set(
"OBSTYPE", calibType)
447 comments = {
"TIMESYS":
"Time scale for all dates",
448 "DATE-OBS":
"Start date of earliest input observation",
449 "MJD-OBS":
"[d] Start MJD of earliest input observation",
450 "DATE-END":
"End date of oldest input observation",
451 "MJD-END":
"[d] End MJD of oldest input observation",
452 "MJD-AVG":
"[d] MJD midpoint of all input observations",
453 "DATE-AVG":
"Midpoint date of all input observations"}
456 now = time.localtime()
457 calibDate = time.strftime(
"%Y-%m-%d", now)
458 calibTime = time.strftime(
"%X %Z", now)
459 header.set(
"CALIB_CREATE_DATE", calibDate)
460 header.set(
"CALIB_CREATE_TIME", calibTime)
463 inputHeaders = [exp.getMetadata()
for exp
in expList
if exp
is not None]
464 merged = merge_headers(inputHeaders, mode=
'drop')
465 for k, v
in merged.items():
467 md = expList[0].getMetadata()
468 comment = md.getComment(k)
if k
in md
else None
469 header.set(k, v, comment=comment)
472 visitInfoList = [exp.getInfo().getVisitInfo()
for exp
in expList
if exp
is not None]
473 for i, visit
in enumerate(visitInfoList):
476 header.set(
"CPP_INPUT_%d" % (i,), visit.getExposureId())
477 header.set(
"CPP_INPUT_DATE_%d" % (i,), str(visit.getDate()))
478 header.set(
"CPP_INPUT_EXPT_%d" % (i,), visit.getExposureTime())
479 if scales
is not None:
480 header.set(
"CPP_INPUT_SCALE_%d" % (i,), scales[i])
487 group = ObservationGroup(visitInfoList, pedantic=
False)
489 self.log.warn(
"Exception making an obs group for headers. Continuing.")
491 dateCards = {
"DATE-OBS":
"{}T00:00:00.00".format(calibDate)}
492 comments[
"DATE-OBS"] =
"Date of start of day of calibration midpoint"
494 oldest, newest = group.extremes()
495 dateCards = dates_to_fits(oldest.datetime_begin, newest.datetime_end)
497 for k, v
in dateCards.items():
498 header.set(k, v, comment=comments.get(k,
None))
503 """Interpolate over NANs in the combined image.
505 NANs can result from masked areas on the CCD. We don't want them getting
506 into our science images, so we replace them with the median of the image.
510 exp : `lsst.afw.image.Exposure`
511 Exp to check for NaNs.
513 array = exp.getImage().getArray()
514 bad = np.isnan(array)
516 median = np.median(array[np.logical_not(bad)])
517 count = np.sum(np.logical_not(bad))
520 self.log.warn(
"Found %s NAN pixels", count)
524 doUpdateMask=True, maskPlane='BAD',
525 doSetValue=False, vignetteValue=0.0,
527 """Apply vignetted polygon to image pixels.
531 exposure : `lsst.afw.image.Exposure`
533 doUpdateMask : `bool`, optional
534 Update the exposure mask for vignetted area?
535 maskPlane : `str`, optional,
536 Mask plane to assign.
537 doSetValue : `bool`, optional
538 Set image value for vignetted area?
539 vignetteValue : `float`, optional
541 log : `lsst.log.Log`, optional
547 Raised if no valid polygon exists.
549 polygon = polygon
if polygon
else exposure.getInfo().getValidPolygon()
551 raise RuntimeError(
"Could not find valid polygon!")
552 log = log
if log
else Log.getLogger(__name__.partition(
".")[2])
554 fullyIlluminated =
True
555 for corner
in exposure.getBBox().getCorners():
556 if not polygon.contains(
Point2D(corner)):
557 fullyIlluminated =
False
559 log.info(
"Exposure is fully illuminated? %s", fullyIlluminated)
561 if not fullyIlluminated:
563 mask = exposure.getMask()
564 numPixels = mask.getBBox().getArea()
566 xx, yy = np.meshgrid(np.arange(0, mask.getWidth(), dtype=int),
567 np.arange(0, mask.getHeight(), dtype=int))
569 vignMask = np.array([
not polygon.contains(
Point2D(x, y))
for x, y
in
570 zip(xx.reshape(numPixels), yy.reshape(numPixels))])
571 vignMask = vignMask.reshape(mask.getHeight(), mask.getWidth())
574 bitMask = mask.getPlaneBitMask(maskPlane)
575 maskArray = mask.getArray()
576 maskArray[vignMask] |= bitMask
578 imageArray = exposure.getImage().getArray()
579 imageArray[vignMask] = vignetteValue
580 log.info(
"Exposure contains %d vignetted pixels.",
581 np.count_nonzero(vignMask))