22__all__ = [
"DetectCoaddSourcesConfig",
"DetectCoaddSourcesTask"]
24from lsst.pipe.base
import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
25import lsst.pipe.base.connectionTypes
as cT
29 SingleFrameMeasurementTask,
31 CatalogCalculationTask,
32 SkyMapIdGeneratorConfig,
43from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
44from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
45from .multiBandUtils
import CullPeaksConfig
46from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
47from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
48from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
49from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
54* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
55* deepCoadd_mergeDet: merged detections (tract, patch)
56* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
57* deepCoadd_ref: reference sources (tract, patch)
58All of these have associated *_schema catalogs that require no data ID and hold no records.
60In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
61the mergeDet, meas, and ref dataset Footprints:
62* deepCoadd_peak_schema
68 dimensions=(
"tract",
"patch",
"band",
"skymap"),
69 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
70 detectionSchema = cT.InitOutput(
71 doc=
"Schema of the detection catalog",
72 name=
"{outputCoaddName}Coadd_det_schema",
73 storageClass=
"SourceCatalog",
76 doc=
"Exposure on which detections are to be performed",
77 name=
"{inputCoaddName}Coadd",
78 storageClass=
"ExposureF",
79 dimensions=(
"tract",
"patch",
"band",
"skymap")
81 outputBackgrounds = cT.Output(
82 doc=
"Output Backgrounds used in detection",
83 name=
"{outputCoaddName}Coadd_calexp_background",
84 storageClass=
"Background",
85 dimensions=(
"tract",
"patch",
"band",
"skymap")
87 outputSources = cT.Output(
88 doc=
"Detected sources catalog",
89 name=
"{outputCoaddName}Coadd_det",
90 storageClass=
"SourceCatalog",
91 dimensions=(
"tract",
"patch",
"band",
"skymap")
93 outputExposure = cT.Output(
94 doc=
"Exposure post detection",
95 name=
"{outputCoaddName}Coadd_calexp",
96 storageClass=
"ExposureF",
97 dimensions=(
"tract",
"patch",
"band",
"skymap")
101class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
102 """Configuration parameters for the DetectCoaddSourcesTask
105 doScaleVariance = Field(dtype=bool, default=
True, doc=
"Scale variance plane using empirical noise?")
106 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
107 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
108 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
112 doc=
"Should be set to True if fake sources have been inserted into the input data.",
114 idGenerator = SkyMapIdGeneratorConfig.make_field()
116 def setDefaults(self):
117 super().setDefaults()
118 self.detection.thresholdType =
"pixel_stdev"
119 self.detection.isotropicGrow =
True
121 self.detection.reEstimateBackground =
False
122 self.detection.background.useApprox =
False
123 self.detection.background.binSize = 4096
124 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
125 self.detection.doTempWideBackground =
True
128 self.idGenerator.packer.n_bands =
None
131class DetectCoaddSourcesTask(PipelineTask):
132 """Detect sources on a single filter coadd.
134 Coadding individual visits requires each exposure to be warped. This
135 introduces covariance in the noise properties across pixels. Before
136 detection, we correct the coadd variance by scaling the variance plane in
137 the coadd to match the observed variance. This is an approximate
138 approach -- strictly, we should propagate the full covariance matrix --
139 but it is simple and works well in practice.
141 After scaling the variance plane, we detect sources and generate footprints
142 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
144 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
145 in a given band. The purpose of the task is to update the background,
146 detect all sources in a single band and generate a set of parent
147 footprints. Subsequent tasks in the multi-band processing procedure will
148 merge sources across bands and, eventually, perform forced photometry.
152 schema : `lsst.afw.table.Schema`, optional
153 Initial schema for the output catalog, modified-in place to include all
154 fields set by this task. If None, the source minimal schema will be used.
156 Additional keyword arguments.
159 _DefaultName =
"detectCoaddSources"
160 ConfigClass = DetectCoaddSourcesConfig
162 def __init__(self, schema=None, **kwargs):
165 super().__init__(**kwargs)
167 schema = afwTable.SourceTable.makeMinimalSchema()
169 self.makeSubtask(
"detection", schema=self.schema)
170 if self.config.doScaleVariance:
171 self.makeSubtask(
"scaleVariance")
173 self.detectionSchema = afwTable.SourceCatalog(self.schema)
175 def runQuantum(self, butlerQC, inputRefs, outputRefs):
176 inputs = butlerQC.get(inputRefs)
177 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
178 inputs[
"idFactory"] = idGenerator.make_table_id_factory()
179 inputs[
"expId"] = idGenerator.catalog_id
180 outputs = self.run(**inputs)
181 butlerQC.put(outputs, outputRefs)
183 def run(self, exposure, idFactory, expId):
184 """Run detection on an exposure.
186 First scale the variance plane to match the observed variance
187 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
192 exposure : `lsst.afw.image.Exposure`
193 Exposure on which to detect (may be backround-subtracted and scaled,
194 depending on configuration).
195 idFactory : `lsst.afw.table.IdFactory`
196 IdFactory to set source identifiers.
198 Exposure identifier (integer) for RNG seed.
202 result : `lsst.pipe.base.Struct`
203 Results as a struct with attributes:
206 Catalog of detections (`lsst.afw.table.SourceCatalog`).
208 List of backgrounds (`list`).
210 if self.config.doScaleVariance:
211 varScale = self.scaleVariance.run(exposure.maskedImage)
212 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
213 backgrounds = afwMath.BackgroundList()
214 table = afwTable.SourceTable.make(self.schema, idFactory)
215 detections = self.detection.run(table, exposure, expId=expId)
216 sources = detections.sources
217 if hasattr(detections,
"background")
and detections.background:
218 for bg
in detections.background:
219 backgrounds.append(bg)
220 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
223class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
224 dimensions=(
"tract",
"patch",
"band",
"skymap"),
225 defaultTemplates={
"inputCoaddName":
"deep",
226 "outputCoaddName":
"deep",
227 "deblendedCatalog":
"deblendedFlux"}):
228 inputSchema = cT.InitInput(
229 doc=
"Input schema for measure merged task produced by a deblender or detection task",
230 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
231 storageClass=
"SourceCatalog"
233 outputSchema = cT.InitOutput(
234 doc=
"Output schema after all new fields are added by task",
235 name=
"{inputCoaddName}Coadd_meas_schema",
236 storageClass=
"SourceCatalog"
238 refCat = cT.PrerequisiteInput(
239 doc=
"Reference catalog used to match measured sources against known sources",
241 storageClass=
"SimpleCatalog",
242 dimensions=(
"skypix",),
247 doc=
"Input coadd image",
248 name=
"{inputCoaddName}Coadd_calexp",
249 storageClass=
"ExposureF",
250 dimensions=(
"tract",
"patch",
"band",
"skymap")
253 doc=
"SkyMap to use in processing",
254 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
255 storageClass=
"SkyMap",
256 dimensions=(
"skymap",),
258 visitCatalogs = cT.Input(
259 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
260 "further filtered in the task for the purpose of propagating flags from image calibration "
261 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
263 dimensions=(
"instrument",
"visit",
"detector"),
264 storageClass=
"SourceCatalog",
267 sourceTableHandles = cT.Input(
268 doc=(
"Source tables that are derived from the ``CalibrateTask`` sources. "
269 "These tables contain astrometry and photometry flags, and optionally "
271 name=
"sourceTable_visit",
272 storageClass=
"DataFrame",
273 dimensions=(
"instrument",
"visit"),
277 finalizedSourceTableHandles = cT.Input(
278 doc=(
"Finalized source tables from ``FinalizeCalibrationTask``. These "
279 "tables contain PSF flags from the finalized PSF estimation."),
280 name=
"finalized_src_table",
281 storageClass=
"DataFrame",
282 dimensions=(
"instrument",
"visit"),
286 inputCatalog = cT.Input(
287 doc=(
"Name of the input catalog to use."
288 "If the single band deblender was used this should be 'deblendedFlux."
289 "If the multi-band deblender was used this should be 'deblendedModel, "
290 "or deblendedFlux if the multiband deblender was configured to output "
291 "deblended flux catalogs. If no deblending was performed this should "
293 name=
"{inputCoaddName}Coadd_{deblendedCatalog}",
294 storageClass=
"SourceCatalog",
295 dimensions=(
"tract",
"patch",
"band",
"skymap"),
297 scarletCatalog = cT.Input(
298 doc=
"Catalogs produced by multiband deblending",
299 name=
"{inputCoaddName}Coadd_deblendedCatalog",
300 storageClass=
"SourceCatalog",
301 dimensions=(
"tract",
"patch",
"skymap"),
303 scarletModels = cT.Input(
304 doc=
"Multiband scarlet models produced by the deblender",
305 name=
"{inputCoaddName}Coadd_scarletModelData",
306 storageClass=
"ScarletModelData",
307 dimensions=(
"tract",
"patch",
"skymap"),
309 outputSources = cT.Output(
310 doc=
"Source catalog containing all the measurement information generated in this task",
311 name=
"{outputCoaddName}Coadd_meas",
312 dimensions=(
"tract",
"patch",
"band",
"skymap"),
313 storageClass=
"SourceCatalog",
315 matchResult = cT.Output(
316 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
317 name=
"{outputCoaddName}Coadd_measMatch",
318 dimensions=(
"tract",
"patch",
"band",
"skymap"),
319 storageClass=
"Catalog",
321 denormMatches = cT.Output(
322 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
323 "doWriteMatchesDenormalized",
324 name=
"{outputCoaddName}Coadd_measMatchFull",
325 dimensions=(
"tract",
"patch",
"band",
"skymap"),
326 storageClass=
"Catalog",
329 def __init__(self, *, config=None):
330 super().__init__(config=config)
331 if config.doPropagateFlags
is False:
332 self.inputs -= set((
"visitCatalogs",))
333 self.inputs -= set((
"sourceTableHandles",))
334 self.inputs -= set((
"finalizedSourceTableHandles",))
335 elif config.propagateFlags.target == PropagateSourceFlagsTask:
337 self.inputs -= set((
"visitCatalogs",))
339 if not config.propagateFlags.source_flags:
340 self.inputs -= set((
"sourceTableHandles",))
341 if not config.propagateFlags.finalized_source_flags:
342 self.inputs -= set((
"finalizedSourceTableHandles",))
345 self.inputs -= set((
"sourceTableHandles",))
346 self.inputs -= set((
"finalizedSourceTableHandles",))
348 if config.inputCatalog ==
"deblendedCatalog":
349 self.inputs -= set((
"inputCatalog",))
351 if not config.doAddFootprints:
352 self.inputs -= set((
"scarletModels",))
354 self.inputs -= set((
"deblendedCatalog"))
355 self.inputs -= set((
"scarletModels",))
357 if config.doMatchSources
is False:
358 self.outputs -= set((
"matchResult",))
360 if config.doWriteMatchesDenormalized
is False:
361 self.outputs -= set((
"denormMatches",))
364class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
365 pipelineConnections=MeasureMergedCoaddSourcesConnections):
366 """Configuration parameters for the MeasureMergedCoaddSourcesTask
368 inputCatalog = ChoiceField(
370 default=
"deblendedCatalog",
372 "deblendedCatalog":
"Output catalog from ScarletDeblendTask",
373 "deblendedFlux":
"Output catalog from SourceDeblendTask",
374 "mergeDet":
"The merged detections before deblending."
376 doc=
"The name of the input catalog.",
378 doAddFootprints = Field(dtype=bool,
380 doc=
"Whether or not to add footprints to the input catalog from scarlet models. "
381 "This should be true whenever using the multi-band deblender, "
382 "otherwise this should be False.")
383 doConserveFlux = Field(dtype=bool, default=
True,
384 doc=
"Whether to use the deblender models as templates to re-distribute the flux "
385 "from the 'exposure' (True), or to perform measurements on the deblender "
387 doStripFootprints = Field(dtype=bool, default=
True,
388 doc=
"Whether to strip footprints from the output catalog before "
390 "This is usually done when using scarlet models to save disk space.")
391 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
392 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
393 doPropagateFlags = Field(
394 dtype=bool, default=
True,
395 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
397 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc=
"Propagate source flags to coadd")
398 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
399 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
400 doWriteMatchesDenormalized = Field(
403 doc=(
"Write reference matches in denormalized format? "
404 "This format uses more disk space, but is more convenient to read."),
406 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
407 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
408 checkUnitsParseStrict = Field(
409 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
416 doc=
"Apply aperture corrections"
418 applyApCorr = ConfigurableField(
419 target=ApplyApCorrTask,
420 doc=
"Subtask to apply aperture corrections"
422 doRunCatalogCalculation = Field(
425 doc=
'Run catalogCalculation task'
427 catalogCalculation = ConfigurableField(
428 target=CatalogCalculationTask,
429 doc=
"Subtask to run catalogCalculation plugins on catalog"
435 doc=
"Should be set to True if fake sources have been inserted into the input data."
437 idGenerator = SkyMapIdGeneratorConfig.make_field()
441 return self.match.refObjLoader
443 def setDefaults(self):
444 super().setDefaults()
445 self.measurement.plugins.names |= [
'base_InputCount',
447 'base_LocalPhotoCalib',
449 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
451 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
455class MeasureMergedCoaddSourcesTask(PipelineTask):
456 """Deblend sources from main catalog in each coadd seperately and measure.
458 Use peaks and footprints from a master catalog to perform deblending and
459 measurement in each coadd.
461 Given a master input catalog of sources (peaks and footprints) or deblender
462 outputs(including a HeavyFootprint in each band), measure each source on
463 the coadd. Repeating this procedure with the same master catalog across
464 multiple coadds will generate a consistent set of child sources.
466 The deblender retains all peaks and deblends any missing peaks (dropouts in
467 that band) as PSFs. Source properties are measured and the @c is-primary
468 flag (indicating sources with no children) is set. Visit flags are
469 propagated to the coadd sources.
471 Optionally, we can match the coadd sources to an external reference
474 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
475 have a set of per-band catalogs. The next stage in the multi-band
476 processing procedure will merge these measurements into a suitable catalog
477 for driving forced photometry.
481 schema : ``lsst.afw.table.Schema`, optional
482 The schema of the merged detection catalog used as input to this one.
483 peakSchema : ``lsst.afw.table.Schema`, optional
484 The schema of the PeakRecords in the Footprints in the merged detection catalog.
485 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
486 An instance of ReferenceObjectLoader that supplies an external reference
487 catalog. May be None if the loader can be constructed from the butler argument or all steps
488 requiring a reference catalog are disabled.
489 initInputs : `dict`, optional
490 Dictionary that can contain a key ``inputSchema`` containing the
491 input schema. If present will override the value of ``schema``.
493 Additional keyword arguments.
496 _DefaultName =
"measureCoaddSources"
497 ConfigClass = MeasureMergedCoaddSourcesConfig
499 def __init__(self, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
501 super().__init__(**kwargs)
502 self.deblended = self.config.inputCatalog.startswith(
"deblended")
503 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
504 if initInputs
is not None:
505 schema = initInputs[
'inputSchema'].schema
507 raise ValueError(
"Schema must be defined.")
508 self.schemaMapper = afwTable.SchemaMapper(schema)
509 self.schemaMapper.addMinimalSchema(schema)
510 self.schema = self.schemaMapper.getOutputSchema()
511 afwTable.CoordKey.addErrorFields(self.schema)
513 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
514 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
515 if self.config.doMatchSources:
516 self.makeSubtask(
"match", refObjLoader=refObjLoader)
517 if self.config.doPropagateFlags:
518 self.makeSubtask(
"propagateFlags", schema=self.schema)
519 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
520 if self.config.doApCorr:
521 self.makeSubtask(
"applyApCorr", schema=self.schema)
522 if self.config.doRunCatalogCalculation:
523 self.makeSubtask(
"catalogCalculation", schema=self.schema)
525 self.outputSchema = afwTable.SourceCatalog(self.schema)
527 def runQuantum(self, butlerQC, inputRefs, outputRefs):
528 inputs = butlerQC.get(inputRefs)
530 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
531 inputs.pop(
'refCat'),
532 name=self.config.connections.refCat,
533 config=self.config.refObjLoader,
535 self.match.setRefObjLoader(refObjLoader)
539 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
543 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
544 inputs[
'exposureId'] = idGenerator.catalog_id
547 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
548 sources = afwTable.SourceCatalog(table)
550 if "scarletCatalog" in inputs:
551 inputCatalog = inputs.pop(
"scarletCatalog")
552 catalogRef = inputRefs.scarletCatalog
554 inputCatalog = inputs.pop(
"inputCatalog")
555 catalogRef = inputRefs.inputCatalog
556 sources.extend(inputCatalog, self.schemaMapper)
559 if self.config.doAddFootprints:
560 modelData = inputs.pop(
'scarletModels')
561 if self.config.doConserveFlux:
562 redistributeImage = inputs[
'exposure'].image
564 redistributeImage =
None
565 modelData.updateCatalogFootprints(
567 band=inputRefs.exposure.dataId[
"band"],
568 psfModel=inputs[
'exposure'].getPsf(),
569 maskImage=inputs[
'exposure'].mask,
570 redistributeImage=redistributeImage,
571 removeScarletData=
True,
573 table = sources.getTable()
574 table.setMetadata(self.algMetadata)
575 inputs[
'sources'] = sources
577 skyMap = inputs.pop(
'skyMap')
578 tractNumber = catalogRef.dataId[
'tract']
579 tractInfo = skyMap[tractNumber]
580 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId[
'patch'])
585 wcs=tractInfo.getWcs(),
586 bbox=patchInfo.getOuterBBox()
588 inputs[
'skyInfo'] = skyInfo
590 if self.config.doPropagateFlags:
591 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
593 ccdInputs = inputs[
"exposure"].getInfo().getCoaddInputs().ccds
594 inputs[
"ccdInputs"] = ccdInputs
596 if "sourceTableHandles" in inputs:
597 sourceTableHandles = inputs.pop(
"sourceTableHandles")
598 sourceTableHandleDict = {handle.dataId[
"visit"]: handle
599 for handle
in sourceTableHandles}
600 inputs[
"sourceTableHandleDict"] = sourceTableHandleDict
601 if "finalizedSourceTableHandles" in inputs:
602 finalizedSourceTableHandles = inputs.pop(
"finalizedSourceTableHandles")
603 finalizedSourceTableHandleDict = {handle.dataId[
"visit"]: handle
604 for handle
in finalizedSourceTableHandles}
605 inputs[
"finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
609 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
610 visitKey = ccdInputs.schema.find(
"visit").key
611 ccdKey = ccdInputs.schema.find(
"ccd").key
612 inputVisitIds = set()
614 for ccdRecord
in ccdInputs:
615 visit = ccdRecord.get(visitKey)
616 ccd = ccdRecord.get(ccdKey)
617 inputVisitIds.add((visit, ccd))
618 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
620 inputCatalogsToKeep = []
621 inputCatalogWcsUpdate = []
622 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
623 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
624 if key
in inputVisitIds:
625 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
626 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
627 inputs[
'visitCatalogs'] = inputCatalogsToKeep
628 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
629 inputs[
'ccdInputs'] = ccdInputs
631 outputs = self.run(**inputs)
633 sources = outputs.outputSources
634 butlerQC.put(outputs, outputRefs)
636 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
637 sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
638 """Run measurement algorithms on the input exposure, and optionally populate the
639 resulting catalog with extra information.
643 exposure : `lsst.afw.exposure.Exposure`
644 The input exposure on which measurements are to be performed.
645 sources : `lsst.afw.table.SourceCatalog`
646 A catalog built from the results of merged detections, or
648 skyInfo : `lsst.pipe.base.Struct`
649 A struct containing information about the position of the input exposure within
650 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
651 exposureId : `int` or `bytes`
652 Packed unique number or bytes unique to the input exposure.
653 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
654 Catalog containing information on the individual visits which went into making
656 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
657 A list of source catalogs corresponding to measurements made on the individual
658 visits which went into the input exposure. If None and butler is `None` then
659 the task cannot propagate visit flags to the output catalog.
660 Deprecated, to be removed with PropagateVisitFlagsTask.
661 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional
662 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
663 to the input visits. Used to put all coordinates to common system. If `None` and
664 butler is `None` then the task cannot propagate visit flags to the output catalog.
665 Deprecated, to be removed with PropagateVisitFlagsTask.
666 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
667 Dict for sourceTable_visit handles (key is visit) for propagating flags.
668 These tables are derived from the ``CalibrateTask`` sources, and contain
669 astrometry and photometry flags, and optionally PSF flags.
670 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
671 Dict for finalized_src_table handles (key is visit) for propagating flags.
672 These tables are derived from ``FinalizeCalibrationTask`` and contain
673 PSF flags from the finalized PSF estimation.
677 results : `lsst.pipe.base.Struct`
678 Results of running measurement task. Will contain the catalog in the
679 sources attribute. Optionally will have results of matching to a
680 reference catalog in the matchResults attribute, and denormalized
681 matches in the denormMatches attribute.
683 self.measurement.run(sources, exposure, exposureId=exposureId)
685 if self.config.doApCorr:
686 self.applyApCorr.run(
688 apCorrMap=exposure.getInfo().getApCorrMap()
695 if not sources.isContiguous():
696 sources = sources.copy(deep=
True)
698 if self.config.doRunCatalogCalculation:
699 self.catalogCalculation.run(sources)
701 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
702 patchInfo=skyInfo.patchInfo)
703 if self.config.doPropagateFlags:
704 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
706 self.propagateFlags.run(
709 sourceTableHandleDict,
710 finalizedSourceTableHandleDict
714 self.propagateFlags.run(
724 if self.config.doMatchSources:
725 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
726 matches = afwTable.packMatches(matchResult.matches)
727 matches.table.setMetadata(matchResult.matchMeta)
728 results.matchResult = matches
729 if self.config.doWriteMatchesDenormalized:
730 if matchResult.matches:
731 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
733 self.log.warning(
"No matches, so generating dummy denormalized matches file")
734 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
736 denormMatches.getMetadata().add(
"COMMENT",
737 "This catalog is empty because no matches were found.")
738 results.denormMatches = denormMatches
739 results.denormMatches = denormMatches
741 results.outputSources = sources