22__all__ = [
"DetectCoaddSourcesConfig",
"DetectCoaddSourcesTask",
23 "MeasureMergedCoaddSourcesConfig",
"MeasureMergedCoaddSourcesTask",
29from lsst.pipe.base
import (
30 AnnotatedPartialOutputsError,
34 PipelineTaskConnections
36import lsst.pipe.base.connectionTypes
as cT
40 ExceedsMaxVarianceScaleError,
41 InsufficientSourcesError,
43 ReferenceObjectLoader,
46 TooManyMaskedPixelsError,
50 SingleFrameMeasurementTask,
52 CatalogCalculationTask,
53 SkyMapIdGeneratorConfig,
55from lsst.meas.extensions.scarlet.io
import updateCatalogFootprints
65from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
66from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
67from .multiBandUtils
import CullPeaksConfig
68from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
69from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
74* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
75* deepCoadd_mergeDet: merged detections (tract, patch)
76* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
77* deepCoadd_ref: reference sources (tract, patch)
78All of these have associated *_schema catalogs that require no data ID and hold no records.
80In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
81the mergeDet, meas, and ref dataset Footprints:
82* deepCoadd_peak_schema
88 dimensions=(
"tract",
"patch",
"band",
"skymap"),
89 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
90 detectionSchema = cT.InitOutput(
91 doc=
"Schema of the detection catalog",
92 name=
"{outputCoaddName}Coadd_det_schema",
93 storageClass=
"SourceCatalog",
96 doc=
"Exposure on which detections are to be performed. ",
97 name=
"{inputCoaddName}Coadd",
98 storageClass=
"ExposureF",
99 dimensions=(
"tract",
"patch",
"band",
"skymap")
101 exposure_cells = cT.Input(
102 doc=
"Exposure on which detections are to be performed. ",
103 name=
"{inputCoaddName}CoaddCell",
104 storageClass=
"MultipleCellCoadd",
105 dimensions=(
"tract",
"patch",
"band",
"skymap"),
108 doc=
"Description of the skymap's tracts and patches.",
109 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
110 storageClass=
"SkyMap",
111 dimensions=(
"skymap",),
113 outputBackgrounds = cT.Output(
114 doc=
"Output Backgrounds used in detection",
115 name=
"{outputCoaddName}Coadd_calexp_background",
116 storageClass=
"Background",
117 dimensions=(
"tract",
"patch",
"band",
"skymap")
119 outputSources = cT.Output(
120 doc=
"Detected sources catalog",
121 name=
"{outputCoaddName}Coadd_det",
122 storageClass=
"SourceCatalog",
123 dimensions=(
"tract",
"patch",
"band",
"skymap")
125 outputExposure = cT.Output(
126 doc=
"Exposure post detection",
127 name=
"{outputCoaddName}Coadd_calexp",
128 storageClass=
"ExposureF",
129 dimensions=(
"tract",
"patch",
"band",
"skymap")
132 def __init__(self, *, config=None):
133 super().__init__(config=config)
134 assert isinstance(config, DetectCoaddSourcesConfig)
136 if config.useCellCoadds:
139 del self.exposure_cells
141 if not self.config.forceExactBinning:
143 if self.config.writeOnlyBackgrounds:
144 del self.outputExposure
145 del self.outputSources
146 del self.detectionSchema
149class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
150 """Configuration parameters for the DetectCoaddSourcesTask
153 doScaleVariance = Field(dtype=bool, default=
True, doc=
"Scale variance plane using empirical noise?")
154 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
155 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
156 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
157 useCellCoadds = Field(dtype=bool, default=
False, doc=
"Whether to use cell coadds?")
161 doc=
"Should be set to True if fake sources have been inserted into the input data.",
163 idGenerator = SkyMapIdGeneratorConfig.make_field()
164 forceExactBinning = Field(
168 "Check that the background bin size evenly divides the patch inner region, and "
169 "crop the outer region to an integer number of bins."
172 writeOnlyBackgrounds = Field(dtype=bool, default=
False, doc=
"If true, only save the background models.")
173 writeEmptyBackgrounds = Field(
177 "If true, save a placeholder background with NaNs in all bins (but the right geometry) when "
178 "there are no pixels to compute a background from. This can be useful if a later task combines "
179 "backgrounds from multiple patches as input."
183 def setDefaults(self):
184 super().setDefaults()
185 self.detection.thresholdType =
"pixel_stdev"
186 self.detection.isotropicGrow =
True
188 self.detection.reEstimateBackground =
False
189 self.detection.background.useApprox =
False
190 self.detection.background.binSize = 4096
191 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
192 self.detection.doTempWideBackground =
True
195 self.idGenerator.packer.n_bands =
None
198class DetectCoaddSourcesTask(PipelineTask):
199 """Detect sources on a single filter coadd.
201 Coadding individual visits requires each exposure to be warped. This
202 introduces covariance in the noise properties across pixels. Before
203 detection, we correct the coadd variance by scaling the variance plane in
204 the coadd to match the observed variance. This is an approximate
205 approach -- strictly, we should propagate the full covariance matrix --
206 but it is simple and works well in practice.
208 After scaling the variance plane, we detect sources and generate footprints
209 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
211 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
212 in a given band. The purpose of the task is to update the background,
213 detect all sources in a single band and generate a set of parent
214 footprints. Subsequent tasks in the multi-band processing procedure will
215 merge sources across bands and, eventually, perform forced photometry.
219 schema : `lsst.afw.table.Schema`, optional
220 Initial schema for the output catalog, modified-in place to include all
221 fields set by this task. If None, the source minimal schema will be used.
223 Additional keyword arguments.
226 _DefaultName =
"detectCoaddSources"
227 ConfigClass = DetectCoaddSourcesConfig
229 def __init__(self, schema=None, **kwargs):
232 super().__init__(**kwargs)
234 schema = afwTable.SourceTable.makeMinimalSchema()
236 self.makeSubtask(
"detection", schema=self.schema)
237 if self.config.doScaleVariance:
238 self.makeSubtask(
"scaleVariance")
240 self.detectionSchema = afwTable.SourceCatalog(self.schema)
242 def runQuantum(self, butlerQC, inputRefs, outputRefs):
243 inputs = butlerQC.get(inputRefs)
244 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
246 if self.config.useCellCoadds:
247 multiple_cell_coadd = inputs.pop(
"exposure_cells")
248 exposure = multiple_cell_coadd.stitch().asExposure()
250 exposure = inputs.pop(
"exposure")
252 skyMap = inputs.pop(
"skyMap",
None)
253 if skyMap
is not None:
254 patchInfo = skyMap[butlerQC.quantum.dataId[
"tract"]][butlerQC.quantum.dataId[
"patch"]]
258 assert not inputs,
"runQuantum got more inputs than expected."
262 idFactory=idGenerator.make_table_id_factory(),
263 expId=idGenerator.catalog_id,
267 TooManyMaskedPixelsError,
268 ExceedsMaxVarianceScaleError,
269 InsufficientSourcesError,
273 if self.config.writeEmptyBackgrounds:
274 butlerQC.put(self._makeEmptyBackground(exposure, patchInfo), outputRefs.outputBackgrounds)
275 butlerQC.put(exposure, outputRefs.outputExposure)
276 error = AnnotatedPartialOutputsError.annotate(
284 butlerQC.put(outputs, outputRefs)
286 def run(self, exposure, idFactory, expId, patchInfo=None):
287 """Run detection on an exposure.
289 First scale the variance plane to match the observed variance
290 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
295 exposure : `lsst.afw.image.Exposure`
296 Exposure on which to detect (may be background-subtracted and scaled,
297 depending on configuration).
298 idFactory : `lsst.afw.table.IdFactory`
299 IdFactory to set source identifiers.
301 Exposure identifier (integer) for RNG seed.
302 patchInfo : `lsst.skymap.PatchInfo`, optional
303 Description of the patch geometry. Only needed if
304 `~DetectCoaddSourceConfig.forceExactBinning` is `True`.
308 result : `lsst.pipe.base.Struct`
309 Results as a struct with attributes:
312 Catalog of detections (`lsst.afw.table.SourceCatalog`).
314 List of backgrounds (`list`).
316 if self.config.forceExactBinning:
317 exposure = self._cropToExactBinning(exposure, patchInfo)
318 if self.config.doScaleVariance:
319 varScale = self.scaleVariance.run(exposure.maskedImage)
320 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
321 backgrounds = afwMath.BackgroundList()
322 table = afwTable.SourceTable.make(self.schema, idFactory)
323 detections = self.detection.run(table, exposure, expId=expId)
324 sources = detections.sources
325 if hasattr(detections,
"background")
and detections.background:
326 for bg
in detections.background:
327 backgrounds.append(bg)
328 if len(backgrounds) == 0:
331 emptyBg = self._makeEmptyBackground(exposure, patchInfo)
332 backgrounds.append(emptyBg)
334 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
336 def _cropToExactBinning(self, exposure, patchInfo):
337 """Crop a coadd `~lsst.afw.image.Exposure` instance to ensure exact
342 exposure : `lsst.afw.image.Exposure`
343 Exposure to crop, assumed to cover the patch outer bounding box.
344 patchInfo : `lsst.skymap.PatchInfo`
345 Description of the patch geometry.
349 cropped : `lsst.afw.image.Exposure`
350 View of ``exposure`` with background bins that evenly divide both
351 the full cropped image and the patch inner region. The bounding
352 box is guaranteed to contain the patch inner bounding box and be
353 contained by the patch outer bounding box.
358 Raised if the patch inner region width or height is not a multiple
359 of the background bin size.
361 bbox = patchInfo.getInnerBBox()
362 if bbox.width % self.detection.background.binSizeX:
364 f
"Patch inner width {bbox.width} does not evenly "
365 f
"divide bin width {self.detection.background.binSizeX}."
367 if bbox.height % self.detection.background.binSizeY:
369 f
"Patch inner height {bbox.height} does not evenly "
370 f
"divide bin height {self.detection.background.binSizeY}."
372 outer_bbox = patchInfo.getOuterBBox()
373 n_bins_grow_x = (bbox.x.begin - outer_bbox.x.begin) // self.detection.background.binSizeX
374 n_bins_grow_y = (bbox.y.begin - outer_bbox.y.begin) // self.detection.background.binSizeY
377 n_bins_grow_x*self.detection.background.binSizeX,
378 n_bins_grow_y*self.detection.background.binSizeY,
381 assert outer_bbox.contains(bbox)
382 assert bbox.contains(patchInfo.getInnerBBox())
383 assert bbox.width % self.detection.background.binSizeX == 0
384 assert bbox.height % self.detection.background.binSizeY == 0
385 return exposure[bbox]
387 def _makeEmptyBackground(self, exposure, patchInfo=None):
388 """Construct an empty `lsst.afw.math.BackgroundList` with NaN values.
392 exposure : `lsst.afw.image.Exposure`
393 Exposure that the background should correspond to.
394 patchInfo : `lsst.skymap.PatchInfo`, optional
395 Description of the patch geometry. Only needed if
396 `~DetectCoaddSourceConfig.forceExactBinning` is `True`.
400 background : `lsst.afw.math.BackgroundList`
401 A background object with a single layer and the same bin geometry
402 that a background for that exposure would have had if it had enough
403 usable pixels. This object cannot actually be used for background
408 if self.config.forceExactBinning:
409 exposure = self._cropToExactBinning(exposure, patchInfo).clone()
412 bgStats = afwImage.MaskedImageF(1, 1)
413 bgStats.set(bgLevel, 0, bgLevel)
414 bg = afwMath.BackgroundMI(exposure.getBBox(), bgStats)
415 bgData = (bg, afwMath.Interpolate.LINEAR, afwMath.REDUCE_INTERP_ORDER,
416 afwMath.ApproximateControl.UNKNOWN, 0, 0,
False)
417 background = afwMath.BackgroundList()
418 background.append(bgData)
419 for bg, *_
in background:
420 stats = bg.getStatsImage()
421 stats.mask.array[:, :] = stats.mask.getPlaneBitMask(
"NO_DATA")
422 stats.variance.array[:, :] = 0.0
426class MeasureMergedCoaddSourcesConnections(
427 PipelineTaskConnections,
428 dimensions=(
"tract",
"patch",
"band",
"skymap"),
430 "inputCoaddName":
"deep",
431 "outputCoaddName":
"deep",
432 "deblendedCatalog":
"deblendedFlux",
434 deprecatedTemplates={
436 "deblendedCatalog":
"Support for old deblender outputs will be removed after v29."
439 inputSchema = cT.InitInput(
440 doc=
"Input schema for measure merged task produced by a deblender or detection task",
441 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
442 storageClass=
"SourceCatalog"
444 outputSchema = cT.InitOutput(
445 doc=
"Output schema after all new fields are added by task",
446 name=
"{inputCoaddName}Coadd_meas_schema",
447 storageClass=
"SourceCatalog"
450 refCat = cT.PrerequisiteInput(
451 doc=
"Reference catalog used to match measured sources against known sources",
453 storageClass=
"SimpleCatalog",
454 dimensions=(
"skypix",),
457 deprecated=
"Reference matching in measureCoaddSources will be removed after v29.",
460 doc=
"Input non-cell-based coadd image",
461 name=
"{inputCoaddName}Coadd_calexp",
462 storageClass=
"ExposureF",
463 dimensions=(
"tract",
"patch",
"band",
"skymap")
465 exposure_cells = cT.Input(
466 doc=
"Input cell-based coadd image",
467 name=
"{inputCoaddName}CoaddCell",
468 storageClass=
"MultipleCellCoadd",
469 dimensions=(
"tract",
"patch",
"band",
"skymap"),
471 background = cT.Input(
472 doc=
"Background to subtract from cell-based coadd image",
473 name=
"{inputCoaddName}Coadd_calexp_background",
474 storageClass=
"Background",
475 dimensions=(
"tract",
"patch",
"band",
"skymap")
478 doc=
"SkyMap to use in processing",
479 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
480 storageClass=
"SkyMap",
481 dimensions=(
"skymap",),
484 visitCatalogs = cT.Input(
485 doc=
"Deprecated and unused.",
487 dimensions=(
"instrument",
"visit",
"detector"),
488 storageClass=
"SourceCatalog",
490 deprecated=
"Deprecated and unused. Will be removed after v29.",
492 sourceTableHandles = cT.Input(
493 doc=(
"Source tables that are derived from the ``CalibrateTask`` sources. "
494 "These tables contain astrometry and photometry flags, and optionally "
496 name=
"sourceTable_visit",
497 storageClass=
"ArrowAstropy",
498 dimensions=(
"instrument",
"visit"),
502 finalizedSourceTableHandles = cT.Input(
503 doc=(
"Finalized source tables from ``FinalizeCalibrationTask``. These "
504 "tables contain PSF flags from the finalized PSF estimation."),
505 name=
"finalized_src_table",
506 storageClass=
"ArrowAstropy",
507 dimensions=(
"instrument",
"visit"),
511 finalVisitSummaryHandles = cT.Input(
512 doc=
"Final visit summary table",
513 name=
"finalVisitSummary",
514 storageClass=
"ExposureCatalog",
515 dimensions=(
"instrument",
"visit"),
520 inputCatalog = cT.Input(
521 doc=(
"Name of the input catalog to use."
522 "If the single band deblender was used this should be 'deblendedFlux."
523 "If the multi-band deblender was used this should be 'deblendedModel, "
524 "or deblendedFlux if the multiband deblender was configured to output "
525 "deblended flux catalogs. If no deblending was performed this should "
527 name=
"{inputCoaddName}Coadd_{deblendedCatalog}",
528 storageClass=
"SourceCatalog",
529 deprecated=
"Support for old deblender outputs will be removed after v29.",
530 dimensions=(
"tract",
"patch",
"band",
"skymap"),
532 scarletCatalog = cT.Input(
533 doc=
"Catalogs produced by multiband deblending",
534 name=
"{inputCoaddName}Coadd_deblendedCatalog",
535 storageClass=
"SourceCatalog",
536 dimensions=(
"tract",
"patch",
"skymap"),
538 scarletModels = cT.Input(
539 doc=
"Multiband scarlet models produced by the deblender",
540 name=
"{inputCoaddName}Coadd_scarletModelData",
541 storageClass=
"LsstScarletModelData",
542 dimensions=(
"tract",
"patch",
"skymap"),
544 outputSources = cT.Output(
545 doc=
"Source catalog containing all the measurement information generated in this task",
546 name=
"{outputCoaddName}Coadd_meas",
547 dimensions=(
"tract",
"patch",
"band",
"skymap"),
548 storageClass=
"SourceCatalog",
551 matchResult = cT.Output(
552 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
553 name=
"{outputCoaddName}Coadd_measMatch",
554 dimensions=(
"tract",
"patch",
"band",
"skymap"),
555 storageClass=
"Catalog",
556 deprecated=
"Reference matching in measureCoaddSources will be removed after v29.",
559 denormMatches = cT.Output(
560 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
561 "doWriteMatchesDenormalized",
562 name=
"{outputCoaddName}Coadd_measMatchFull",
563 dimensions=(
"tract",
"patch",
"band",
"skymap"),
564 storageClass=
"Catalog",
565 deprecated=
"Reference matching in measureCoaddSources will be removed after v29.",
568 def __init__(self, *, config=None):
569 super().__init__(config=config)
570 del self.visitCatalogs
571 if not config.doPropagateFlags:
572 del self.sourceTableHandles
573 del self.finalizedSourceTableHandles
576 if not config.propagateFlags.source_flags:
577 del self.sourceTableHandles
578 if not config.propagateFlags.finalized_source_flags:
579 del self.finalizedSourceTableHandles
581 if config.inputCatalog ==
"deblendedCatalog":
582 del self.inputCatalog
583 if not config.doAddFootprints:
584 del self.scarletModels
586 del self.deblendedCatalog
587 del self.scarletModels
590 if not config.doMatchSources:
594 if not config.doWriteMatchesDenormalized:
595 del self.denormMatches
597 if config.useCellCoadds:
600 del self.exposure_cells
604class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
605 pipelineConnections=MeasureMergedCoaddSourcesConnections):
606 """Configuration parameters for the MeasureMergedCoaddSourcesTask
608 inputCatalog = ChoiceField(
610 default=
"deblendedCatalog",
612 "deblendedCatalog":
"Output catalog from ScarletDeblendTask",
613 "deblendedFlux":
"Output catalog from SourceDeblendTask",
614 "mergeDet":
"The merged detections before deblending."
616 doc=
"The name of the input catalog.",
618 deprecated=
"Support for old deblender outputs will be removed after v29.",
620 doAddFootprints = Field(dtype=bool,
622 doc=
"Whether or not to add footprints to the input catalog from scarlet models. "
623 "This should be true whenever using the multi-band deblender, "
624 "otherwise this should be False.")
625 doConserveFlux = Field(dtype=bool, default=
True,
626 doc=
"Whether to use the deblender models as templates to re-distribute the flux "
627 "from the 'exposure' (True), or to perform measurements on the deblender "
629 doStripFootprints = Field(dtype=bool, default=
True,
630 doc=
"Whether to strip footprints from the output catalog before "
632 "This is usually done when using scarlet models to save disk space.")
633 useCellCoadds = Field(dtype=bool, default=
False, doc=
"Whether to use cell coadds?")
634 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
635 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
636 doPropagateFlags = Field(
637 dtype=bool, default=
True,
638 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
640 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc=
"Propagate source flags to coadd")
641 doMatchSources = Field(
644 doc=
"Match sources to reference catalog?",
645 deprecated=
"Reference matching in measureCoaddSources will be removed after v29.",
647 match = ConfigurableField(
648 target=DirectMatchTask,
649 doc=
"Matching to reference catalog",
650 deprecated=
"Reference matching in measureCoaddSources will be removed after v29.",
652 doWriteMatchesDenormalized = Field(
655 doc=(
"Write reference matches in denormalized format? "
656 "This format uses more disk space, but is more convenient to read."),
657 deprecated=
"Reference matching in measureCoaddSources will be removed after v29.",
659 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
660 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
661 checkUnitsParseStrict = Field(
662 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
669 doc=
"Apply aperture corrections"
671 applyApCorr = ConfigurableField(
672 target=ApplyApCorrTask,
673 doc=
"Subtask to apply aperture corrections"
675 doRunCatalogCalculation = Field(
678 doc=
'Run catalogCalculation task'
680 catalogCalculation = ConfigurableField(
681 target=CatalogCalculationTask,
682 doc=
"Subtask to run catalogCalculation plugins on catalog"
688 doc=
"Should be set to True if fake sources have been inserted into the input data."
690 idGenerator = SkyMapIdGeneratorConfig.make_field()
693 def refObjLoader(self):
694 return self.match.refObjLoader
696 def setDefaults(self):
697 super().setDefaults()
698 self.measurement.plugins.names |= [
'base_InputCount',
700 'base_LocalPhotoCalib',
706 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
708 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
714 if not self.doMatchSources
and self.doWriteMatchesDenormalized:
715 raise ValueError(
"Cannot set doWriteMatchesDenormalized if doMatchSources is False.")
718class MeasureMergedCoaddSourcesTask(PipelineTask):
719 """Deblend sources from main catalog in each coadd seperately and measure.
721 Use peaks and footprints from a master catalog to perform deblending and
722 measurement in each coadd.
724 Given a master input catalog of sources (peaks and footprints) or deblender
725 outputs(including a HeavyFootprint in each band), measure each source on
726 the coadd. Repeating this procedure with the same master catalog across
727 multiple coadds will generate a consistent set of child sources.
729 The deblender retains all peaks and deblends any missing peaks (dropouts in
730 that band) as PSFs. Source properties are measured and the @c is-primary
731 flag (indicating sources with no children) is set. Visit flags are
732 propagated to the coadd sources.
734 Optionally, we can match the coadd sources to an external reference
737 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
738 have a set of per-band catalogs. The next stage in the multi-band
739 processing procedure will merge these measurements into a suitable catalog
740 for driving forced photometry.
744 schema : ``lsst.afw.table.Schema`, optional
745 The schema of the merged detection catalog used as input to this one.
746 peakSchema : ``lsst.afw.table.Schema`, optional
747 The schema of the PeakRecords in the Footprints in the merged detection catalog.
748 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
749 An instance of ReferenceObjectLoader that supplies an external reference
750 catalog. May be None if the loader can be constructed from the butler argument or all steps
751 requiring a reference catalog are disabled.
752 initInputs : `dict`, optional
753 Dictionary that can contain a key ``inputSchema`` containing the
754 input schema. If present will override the value of ``schema``.
756 Additional keyword arguments.
759 _DefaultName =
"measureCoaddSources"
760 ConfigClass = MeasureMergedCoaddSourcesConfig
762 def __init__(self, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
764 super().__init__(**kwargs)
765 self.deblended = self.config.inputCatalog.startswith(
"deblended")
766 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
767 if initInputs
is not None:
768 schema = initInputs[
'inputSchema'].schema
770 raise ValueError(
"Schema must be defined.")
771 self.schemaMapper = afwTable.SchemaMapper(schema)
772 self.schemaMapper.addMinimalSchema(schema)
773 self.schema = self.schemaMapper.getOutputSchema()
775 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
776 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
778 if self.config.doMatchSources:
779 self.makeSubtask(
"match", refObjLoader=refObjLoader)
780 if self.config.doPropagateFlags:
781 self.makeSubtask(
"propagateFlags", schema=self.schema)
782 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
783 if self.config.doApCorr:
784 self.makeSubtask(
"applyApCorr", schema=self.schema)
785 if self.config.doRunCatalogCalculation:
786 self.makeSubtask(
"catalogCalculation", schema=self.schema)
788 self.outputSchema = afwTable.SourceCatalog(self.schema)
790 def runQuantum(self, butlerQC, inputRefs, outputRefs):
791 inputs = butlerQC.get(inputRefs)
794 if self.config.doMatchSources:
795 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
796 inputs.pop(
'refCat'),
797 name=self.config.connections.refCat,
798 config=self.config.refObjLoader,
800 self.match.setRefObjLoader(refObjLoader)
802 if self.config.useCellCoadds:
803 multiple_cell_coadd = inputs.pop(
"exposure_cells")
804 stitched_coadd = multiple_cell_coadd.stitch()
805 exposure = stitched_coadd.asExposure()
806 background = inputs.pop(
"background")
807 exposure.image -= background.getImage()
809 ccdInputs = stitched_coadd.ccds
810 apCorrMap = stitched_coadd.ap_corr_map
811 band = inputRefs.exposure_cells.dataId[
"band"]
813 exposure = inputs.pop(
"exposure")
816 exposure.getPsf().setCacheCapacity(self.config.psfCache)
818 ccdInputs = exposure.getInfo().getCoaddInputs().ccds
819 apCorrMap = exposure.getInfo().getApCorrMap()
820 band = inputRefs.exposure.dataId[
"band"]
824 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
827 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
828 sources = afwTable.SourceCatalog(table)
830 if "scarletCatalog" in inputs:
831 inputCatalog = inputs.pop(
"scarletCatalog")
832 catalogRef = inputRefs.scarletCatalog
834 inputCatalog = inputs.pop(
"inputCatalog")
835 catalogRef = inputRefs.inputCatalog
836 sources.extend(inputCatalog, self.schemaMapper)
839 if self.config.doAddFootprints:
840 modelData = inputs.pop(
'scarletModels')
841 if self.config.doConserveFlux:
842 imageForRedistribution = exposure
844 imageForRedistribution =
None
845 updateCatalogFootprints(
849 imageForRedistribution=imageForRedistribution,
850 removeScarletData=
True,
851 updateFluxColumns=
True,
853 table = sources.getTable()
854 table.setMetadata(self.algMetadata)
856 skyMap = inputs.pop(
'skyMap')
857 tractNumber = catalogRef.dataId[
'tract']
858 tractInfo = skyMap[tractNumber]
859 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId[
'patch'])
864 wcs=tractInfo.getWcs(),
865 bbox=patchInfo.getOuterBBox()
868 if self.config.doPropagateFlags:
869 if "sourceTableHandles" in inputs:
870 sourceTableHandles = inputs.pop(
"sourceTableHandles")
871 sourceTableHandleDict = {handle.dataId[
"visit"]: handle
for handle
in sourceTableHandles}
873 sourceTableHandleDict =
None
874 if "finalizedSourceTableHandles" in inputs:
875 finalizedSourceTableHandles = inputs.pop(
"finalizedSourceTableHandles")
876 finalizedSourceTableHandleDict = {handle.dataId[
"visit"]: handle
877 for handle
in finalizedSourceTableHandles}
879 finalizedSourceTableHandleDict =
None
880 if "finalVisitSummaryHandles" in inputs:
881 finalVisitSummaryHandles = inputs.pop(
"finalVisitSummaryHandles")
882 finalVisitSummaryHandleDict = {handle.dataId[
"visit"]: handle
883 for handle
in finalVisitSummaryHandles}
885 finalVisitSummaryHandleDict =
None
887 assert not inputs,
"runQuantum got more inputs than expected."
892 exposureId=idGenerator.catalog_id,
894 sourceTableHandleDict=sourceTableHandleDict,
895 finalizedSourceTableHandleDict=finalizedSourceTableHandleDict,
896 finalVisitSummaryHandleDict=finalVisitSummaryHandleDict,
900 if self.config.doStripFootprints:
901 sources = outputs.outputSources
902 for source
in sources[sources[
"parent"] != 0]:
903 source.setFootprint(
None)
904 butlerQC.put(outputs, outputRefs)
906 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None,
907 sourceTableHandleDict=None, finalizedSourceTableHandleDict=None, finalVisitSummaryHandleDict=None,
909 """Run measurement algorithms on the input exposure, and optionally populate the
910 resulting catalog with extra information.
914 exposure : `lsst.afw.exposure.Exposure`
915 The input exposure on which measurements are to be performed.
916 sources : `lsst.afw.table.SourceCatalog`
917 A catalog built from the results of merged detections, or
919 parentCatalog : `lsst.afw.table.SourceCatalog`
920 Catalog of parent sources corresponding to sources.
921 skyInfo : `lsst.pipe.base.Struct`
922 A struct containing information about the position of the input exposure within
923 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
924 exposureId : `int` or `bytes`
925 Packed unique number or bytes unique to the input exposure.
926 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
927 Catalog containing information on the individual visits which went into making
929 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
930 Dict for sourceTable_visit handles (key is visit) for propagating flags.
931 These tables contain astrometry and photometry flags, and optionally PSF flags.
932 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
933 Dict for finalized_src_table handles (key is visit) for propagating flags.
934 These tables contain PSF flags from the finalized PSF estimation.
935 finalVisitSummaryHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
936 Dict for visit_summary handles (key is visit) for visit-level information.
937 These tables contain the WCS information of the single-visit input images.
938 apCorrMap : `lsst.afw.image.ApCorrMap`, optional
939 Aperture correction map attached to the ``exposure``. If None, it
940 will be read from the ``exposure``.
944 results : `lsst.pipe.base.Struct`
945 Results of running measurement task. Will contain the catalog in the
946 sources attribute. Optionally will have results of matching to a
947 reference catalog in the matchResults attribute, and denormalized
948 matches in the denormMatches attribute.
950 if self.config.doPropagateFlags:
953 for maskPlane
in self.config.measurement.plugins[
"base_PixelFlags"].masksFpAnywhere:
954 exposure.mask.addMaskPlane(maskPlane)
955 for maskPlane
in self.config.measurement.plugins[
"base_PixelFlags"].masksFpCenter:
956 exposure.mask.addMaskPlane(maskPlane)
958 self.measurement.run(sources, exposure, exposureId=exposureId)
960 if self.config.doApCorr:
961 if apCorrMap
is None:
962 apCorrMap = exposure.getInfo().getApCorrMap()
963 self.applyApCorr.run(
972 if not sources.isContiguous():
973 sources = sources.copy(deep=
True)
975 if self.config.doRunCatalogCalculation:
976 self.catalogCalculation.run(sources)
978 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
979 patchInfo=skyInfo.patchInfo)
980 if self.config.doPropagateFlags:
981 self.propagateFlags.run(
984 sourceTableHandleDict,
985 finalizedSourceTableHandleDict,
986 finalVisitSummaryHandleDict,
992 if self.config.doMatchSources:
993 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
994 matches = afwTable.packMatches(matchResult.matches)
995 matches.table.setMetadata(matchResult.matchMeta)
996 results.matchResult = matches
997 if self.config.doWriteMatchesDenormalized:
998 if matchResult.matches:
999 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1001 self.log.warning(
"No matches, so generating dummy denormalized matches file")
1002 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1004 denormMatches.getMetadata().add(
"COMMENT",
1005 "This catalog is empty because no matches were found.")
1006 results.denormMatches = denormMatches
1007 results.denormMatches = denormMatches
1009 results.outputSources = sources