22__all__ = [
"DetectCoaddSourcesConfig",
"DetectCoaddSourcesTask"]
24from lsst.pipe.base
import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
25import lsst.pipe.base.connectionTypes
as cT
29 SingleFrameMeasurementTask,
31 CatalogCalculationTask,
32 SkyMapIdGeneratorConfig,
34from lsst.meas.extensions.scarlet.io
import updateCatalogFootprints
44from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
45from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
46from .multiBandUtils
import CullPeaksConfig
47from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
48from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
49from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
50from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56* deepCoadd_mergeDet: merged detections (tract, patch)
57* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58* deepCoadd_ref: reference sources (tract, patch)
59All of these have associated *_schema catalogs that require no data ID and hold no records.
61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62the mergeDet, meas, and ref dataset Footprints:
63* deepCoadd_peak_schema
69 dimensions=(
"tract",
"patch",
"band",
"skymap"),
70 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
71 detectionSchema = cT.InitOutput(
72 doc=
"Schema of the detection catalog",
73 name=
"{outputCoaddName}Coadd_det_schema",
74 storageClass=
"SourceCatalog",
77 doc=
"Exposure on which detections are to be performed",
78 name=
"{inputCoaddName}Coadd",
79 storageClass=
"ExposureF",
80 dimensions=(
"tract",
"patch",
"band",
"skymap")
82 outputBackgrounds = cT.Output(
83 doc=
"Output Backgrounds used in detection",
84 name=
"{outputCoaddName}Coadd_calexp_background",
85 storageClass=
"Background",
86 dimensions=(
"tract",
"patch",
"band",
"skymap")
88 outputSources = cT.Output(
89 doc=
"Detected sources catalog",
90 name=
"{outputCoaddName}Coadd_det",
91 storageClass=
"SourceCatalog",
92 dimensions=(
"tract",
"patch",
"band",
"skymap")
94 outputExposure = cT.Output(
95 doc=
"Exposure post detection",
96 name=
"{outputCoaddName}Coadd_calexp",
97 storageClass=
"ExposureF",
98 dimensions=(
"tract",
"patch",
"band",
"skymap")
102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103 """Configuration parameters for the DetectCoaddSourcesTask
106 doScaleVariance = Field(dtype=bool, default=
True, doc=
"Scale variance plane using empirical noise?")
107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
108 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
109 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
113 doc=
"Should be set to True if fake sources have been inserted into the input data.",
115 idGenerator = SkyMapIdGeneratorConfig.make_field()
117 def setDefaults(self):
118 super().setDefaults()
119 self.detection.thresholdType =
"pixel_stdev"
120 self.detection.isotropicGrow =
True
122 self.detection.reEstimateBackground =
False
123 self.detection.background.useApprox =
False
124 self.detection.background.binSize = 4096
125 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
126 self.detection.doTempWideBackground =
True
129 self.idGenerator.packer.n_bands =
None
132class DetectCoaddSourcesTask(PipelineTask):
133 """Detect sources on a single filter coadd.
135 Coadding individual visits requires each exposure to be warped. This
136 introduces covariance in the noise properties across pixels. Before
137 detection, we correct the coadd variance by scaling the variance plane in
138 the coadd to match the observed variance. This is an approximate
139 approach -- strictly, we should propagate the full covariance matrix --
140 but it is simple and works well in practice.
142 After scaling the variance plane, we detect sources and generate footprints
143 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
145 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
146 in a given band. The purpose of the task is to update the background,
147 detect all sources in a single band and generate a set of parent
148 footprints. Subsequent tasks in the multi-band processing procedure will
149 merge sources across bands and, eventually, perform forced photometry.
153 schema : `lsst.afw.table.Schema`, optional
154 Initial schema for the output catalog, modified-in place to include all
155 fields set by this task. If None, the source minimal schema will be used.
157 Additional keyword arguments.
160 _DefaultName =
"detectCoaddSources"
161 ConfigClass = DetectCoaddSourcesConfig
163 def __init__(self, schema=None, **kwargs):
166 super().__init__(**kwargs)
168 schema = afwTable.SourceTable.makeMinimalSchema()
170 self.makeSubtask(
"detection", schema=self.schema)
171 if self.config.doScaleVariance:
172 self.makeSubtask(
"scaleVariance")
174 self.detectionSchema = afwTable.SourceCatalog(self.schema)
176 def runQuantum(self, butlerQC, inputRefs, outputRefs):
177 inputs = butlerQC.get(inputRefs)
178 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
179 inputs[
"idFactory"] = idGenerator.make_table_id_factory()
180 inputs[
"expId"] = idGenerator.catalog_id
181 outputs = self.run(**inputs)
182 butlerQC.put(outputs, outputRefs)
184 def run(self, exposure, idFactory, expId):
185 """Run detection on an exposure.
187 First scale the variance plane to match the observed variance
188 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
193 exposure : `lsst.afw.image.Exposure`
194 Exposure on which to detect (may be backround-subtracted and scaled,
195 depending on configuration).
196 idFactory : `lsst.afw.table.IdFactory`
197 IdFactory to set source identifiers.
199 Exposure identifier (integer) for RNG seed.
203 result : `lsst.pipe.base.Struct`
204 Results as a struct with attributes:
207 Catalog of detections (`lsst.afw.table.SourceCatalog`).
209 List of backgrounds (`list`).
211 if self.config.doScaleVariance:
212 varScale = self.scaleVariance.run(exposure.maskedImage)
213 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
214 backgrounds = afwMath.BackgroundList()
215 table = afwTable.SourceTable.make(self.schema, idFactory)
216 detections = self.detection.run(table, exposure, expId=expId)
217 sources = detections.sources
218 if hasattr(detections,
"background")
and detections.background:
219 for bg
in detections.background:
220 backgrounds.append(bg)
221 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
224class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
225 dimensions=(
"tract",
"patch",
"band",
"skymap"),
226 defaultTemplates={
"inputCoaddName":
"deep",
227 "outputCoaddName":
"deep",
228 "deblendedCatalog":
"deblendedFlux"}):
229 inputSchema = cT.InitInput(
230 doc=
"Input schema for measure merged task produced by a deblender or detection task",
231 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
232 storageClass=
"SourceCatalog"
234 outputSchema = cT.InitOutput(
235 doc=
"Output schema after all new fields are added by task",
236 name=
"{inputCoaddName}Coadd_meas_schema",
237 storageClass=
"SourceCatalog"
239 refCat = cT.PrerequisiteInput(
240 doc=
"Reference catalog used to match measured sources against known sources",
242 storageClass=
"SimpleCatalog",
243 dimensions=(
"skypix",),
248 doc=
"Input coadd image",
249 name=
"{inputCoaddName}Coadd_calexp",
250 storageClass=
"ExposureF",
251 dimensions=(
"tract",
"patch",
"band",
"skymap")
254 doc=
"SkyMap to use in processing",
255 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
256 storageClass=
"SkyMap",
257 dimensions=(
"skymap",),
259 visitCatalogs = cT.Input(
260 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
261 "further filtered in the task for the purpose of propagating flags from image calibration "
262 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
264 dimensions=(
"instrument",
"visit",
"detector"),
265 storageClass=
"SourceCatalog",
268 sourceTableHandles = cT.Input(
269 doc=(
"Source tables that are derived from the ``CalibrateTask`` sources. "
270 "These tables contain astrometry and photometry flags, and optionally "
272 name=
"sourceTable_visit",
273 storageClass=
"DataFrame",
274 dimensions=(
"instrument",
"visit"),
278 finalizedSourceTableHandles = cT.Input(
279 doc=(
"Finalized source tables from ``FinalizeCalibrationTask``. These "
280 "tables contain PSF flags from the finalized PSF estimation."),
281 name=
"finalized_src_table",
282 storageClass=
"DataFrame",
283 dimensions=(
"instrument",
"visit"),
287 inputCatalog = cT.Input(
288 doc=(
"Name of the input catalog to use."
289 "If the single band deblender was used this should be 'deblendedFlux."
290 "If the multi-band deblender was used this should be 'deblendedModel, "
291 "or deblendedFlux if the multiband deblender was configured to output "
292 "deblended flux catalogs. If no deblending was performed this should "
294 name=
"{inputCoaddName}Coadd_{deblendedCatalog}",
295 storageClass=
"SourceCatalog",
296 dimensions=(
"tract",
"patch",
"band",
"skymap"),
298 scarletCatalog = cT.Input(
299 doc=
"Catalogs produced by multiband deblending",
300 name=
"{inputCoaddName}Coadd_deblendedCatalog",
301 storageClass=
"SourceCatalog",
302 dimensions=(
"tract",
"patch",
"skymap"),
304 scarletModels = cT.Input(
305 doc=
"Multiband scarlet models produced by the deblender",
306 name=
"{inputCoaddName}Coadd_scarletModelData",
307 storageClass=
"ScarletModelData",
308 dimensions=(
"tract",
"patch",
"skymap"),
310 outputSources = cT.Output(
311 doc=
"Source catalog containing all the measurement information generated in this task",
312 name=
"{outputCoaddName}Coadd_meas",
313 dimensions=(
"tract",
"patch",
"band",
"skymap"),
314 storageClass=
"SourceCatalog",
316 matchResult = cT.Output(
317 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
318 name=
"{outputCoaddName}Coadd_measMatch",
319 dimensions=(
"tract",
"patch",
"band",
"skymap"),
320 storageClass=
"Catalog",
322 denormMatches = cT.Output(
323 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
324 "doWriteMatchesDenormalized",
325 name=
"{outputCoaddName}Coadd_measMatchFull",
326 dimensions=(
"tract",
"patch",
"band",
"skymap"),
327 storageClass=
"Catalog",
330 def __init__(self, *, config=None):
331 super().__init__(config=config)
332 if config.doPropagateFlags
is False:
333 self.inputs -= set((
"visitCatalogs",))
334 self.inputs -= set((
"sourceTableHandles",))
335 self.inputs -= set((
"finalizedSourceTableHandles",))
336 elif config.propagateFlags.target == PropagateSourceFlagsTask:
338 self.inputs -= set((
"visitCatalogs",))
340 if not config.propagateFlags.source_flags:
341 self.inputs -= set((
"sourceTableHandles",))
342 if not config.propagateFlags.finalized_source_flags:
343 self.inputs -= set((
"finalizedSourceTableHandles",))
346 self.inputs -= set((
"sourceTableHandles",))
347 self.inputs -= set((
"finalizedSourceTableHandles",))
349 if config.inputCatalog ==
"deblendedCatalog":
350 self.inputs -= set((
"inputCatalog",))
352 if not config.doAddFootprints:
353 self.inputs -= set((
"scarletModels",))
355 self.inputs -= set((
"deblendedCatalog"))
356 self.inputs -= set((
"scarletModels",))
358 if config.doMatchSources
is False:
359 self.outputs -= set((
"matchResult",))
361 if config.doWriteMatchesDenormalized
is False:
362 self.outputs -= set((
"denormMatches",))
365class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
366 pipelineConnections=MeasureMergedCoaddSourcesConnections):
367 """Configuration parameters for the MeasureMergedCoaddSourcesTask
369 inputCatalog = ChoiceField(
371 default=
"deblendedCatalog",
373 "deblendedCatalog":
"Output catalog from ScarletDeblendTask",
374 "deblendedFlux":
"Output catalog from SourceDeblendTask",
375 "mergeDet":
"The merged detections before deblending."
377 doc=
"The name of the input catalog.",
379 doAddFootprints = Field(dtype=bool,
381 doc=
"Whether or not to add footprints to the input catalog from scarlet models. "
382 "This should be true whenever using the multi-band deblender, "
383 "otherwise this should be False.")
384 doConserveFlux = Field(dtype=bool, default=
True,
385 doc=
"Whether to use the deblender models as templates to re-distribute the flux "
386 "from the 'exposure' (True), or to perform measurements on the deblender "
388 doStripFootprints = Field(dtype=bool, default=
True,
389 doc=
"Whether to strip footprints from the output catalog before "
391 "This is usually done when using scarlet models to save disk space.")
392 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
393 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
394 doPropagateFlags = Field(
395 dtype=bool, default=
True,
396 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
398 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc=
"Propagate source flags to coadd")
399 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
400 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
401 doWriteMatchesDenormalized = Field(
404 doc=(
"Write reference matches in denormalized format? "
405 "This format uses more disk space, but is more convenient to read."),
407 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
408 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
409 checkUnitsParseStrict = Field(
410 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
417 doc=
"Apply aperture corrections"
419 applyApCorr = ConfigurableField(
420 target=ApplyApCorrTask,
421 doc=
"Subtask to apply aperture corrections"
423 doRunCatalogCalculation = Field(
426 doc=
'Run catalogCalculation task'
428 catalogCalculation = ConfigurableField(
429 target=CatalogCalculationTask,
430 doc=
"Subtask to run catalogCalculation plugins on catalog"
436 doc=
"Should be set to True if fake sources have been inserted into the input data."
438 idGenerator = SkyMapIdGeneratorConfig.make_field()
442 return self.match.refObjLoader
444 def setDefaults(self):
445 super().setDefaults()
446 self.measurement.plugins.names |= [
'base_InputCount',
448 'base_LocalPhotoCalib',
450 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
451 'INEXACT_PSF',
'STREAK']
452 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
453 'INEXACT_PSF',
'STREAK']
458 if not self.doMatchSources
and self.doWriteMatchesDenormalized:
459 raise ValueError(
"Cannot set doWriteMatchesDenormalized if doMatchSources is False.")
462class MeasureMergedCoaddSourcesTask(PipelineTask):
463 """Deblend sources from main catalog in each coadd seperately and measure.
465 Use peaks and footprints from a master catalog to perform deblending and
466 measurement in each coadd.
468 Given a master input catalog of sources (peaks and footprints) or deblender
469 outputs(including a HeavyFootprint in each band), measure each source on
470 the coadd. Repeating this procedure with the same master catalog across
471 multiple coadds will generate a consistent set of child sources.
473 The deblender retains all peaks and deblends any missing peaks (dropouts in
474 that band) as PSFs. Source properties are measured and the @c is-primary
475 flag (indicating sources with no children) is set. Visit flags are
476 propagated to the coadd sources.
478 Optionally, we can match the coadd sources to an external reference
481 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
482 have a set of per-band catalogs. The next stage in the multi-band
483 processing procedure will merge these measurements into a suitable catalog
484 for driving forced photometry.
488 schema : ``lsst.afw.table.Schema`, optional
489 The schema of the merged detection catalog used as input to this one.
490 peakSchema : ``lsst.afw.table.Schema`, optional
491 The schema of the PeakRecords in the Footprints in the merged detection catalog.
492 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
493 An instance of ReferenceObjectLoader that supplies an external reference
494 catalog. May be None if the loader can be constructed from the butler argument or all steps
495 requiring a reference catalog are disabled.
496 initInputs : `dict`, optional
497 Dictionary that can contain a key ``inputSchema`` containing the
498 input schema. If present will override the value of ``schema``.
500 Additional keyword arguments.
503 _DefaultName =
"measureCoaddSources"
504 ConfigClass = MeasureMergedCoaddSourcesConfig
506 def __init__(self, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
508 super().__init__(**kwargs)
509 self.deblended = self.config.inputCatalog.startswith(
"deblended")
510 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
511 if initInputs
is not None:
512 schema = initInputs[
'inputSchema'].schema
514 raise ValueError(
"Schema must be defined.")
515 self.schemaMapper = afwTable.SchemaMapper(schema)
516 self.schemaMapper.addMinimalSchema(schema)
517 self.schema = self.schemaMapper.getOutputSchema()
518 afwTable.CoordKey.addErrorFields(self.schema)
520 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
521 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
522 if self.config.doMatchSources:
523 self.makeSubtask(
"match", refObjLoader=refObjLoader)
524 if self.config.doPropagateFlags:
525 self.makeSubtask(
"propagateFlags", schema=self.schema)
526 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
527 if self.config.doApCorr:
528 self.makeSubtask(
"applyApCorr", schema=self.schema)
529 if self.config.doRunCatalogCalculation:
530 self.makeSubtask(
"catalogCalculation", schema=self.schema)
532 self.outputSchema = afwTable.SourceCatalog(self.schema)
534 def runQuantum(self, butlerQC, inputRefs, outputRefs):
535 inputs = butlerQC.get(inputRefs)
537 if self.config.doMatchSources:
538 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
539 inputs.pop(
'refCat'),
540 name=self.config.connections.refCat,
541 config=self.config.refObjLoader,
543 self.match.setRefObjLoader(refObjLoader)
547 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
551 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
552 inputs[
'exposureId'] = idGenerator.catalog_id
555 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
556 sources = afwTable.SourceCatalog(table)
558 if "scarletCatalog" in inputs:
559 inputCatalog = inputs.pop(
"scarletCatalog")
560 catalogRef = inputRefs.scarletCatalog
562 inputCatalog = inputs.pop(
"inputCatalog")
563 catalogRef = inputRefs.inputCatalog
564 sources.extend(inputCatalog, self.schemaMapper)
567 if self.config.doAddFootprints:
568 modelData = inputs.pop(
'scarletModels')
569 if self.config.doConserveFlux:
570 imageForRedistribution = inputs[
'exposure']
572 imageForRedistribution =
None
573 updateCatalogFootprints(
576 band=inputRefs.exposure.dataId[
"band"],
577 imageForRedistribution=imageForRedistribution,
578 removeScarletData=
True,
579 updateFluxColumns=
True,
581 table = sources.getTable()
582 table.setMetadata(self.algMetadata)
583 inputs[
'sources'] = sources
585 skyMap = inputs.pop(
'skyMap')
586 tractNumber = catalogRef.dataId[
'tract']
587 tractInfo = skyMap[tractNumber]
588 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId[
'patch'])
593 wcs=tractInfo.getWcs(),
594 bbox=patchInfo.getOuterBBox()
596 inputs[
'skyInfo'] = skyInfo
598 if self.config.doPropagateFlags:
599 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
601 ccdInputs = inputs[
"exposure"].getInfo().getCoaddInputs().ccds
602 inputs[
"ccdInputs"] = ccdInputs
604 if "sourceTableHandles" in inputs:
605 sourceTableHandles = inputs.pop(
"sourceTableHandles")
606 sourceTableHandleDict = {handle.dataId[
"visit"]: handle
607 for handle
in sourceTableHandles}
608 inputs[
"sourceTableHandleDict"] = sourceTableHandleDict
609 if "finalizedSourceTableHandles" in inputs:
610 finalizedSourceTableHandles = inputs.pop(
"finalizedSourceTableHandles")
611 finalizedSourceTableHandleDict = {handle.dataId[
"visit"]: handle
612 for handle
in finalizedSourceTableHandles}
613 inputs[
"finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
617 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
618 visitKey = ccdInputs.schema.find(
"visit").key
619 ccdKey = ccdInputs.schema.find(
"ccd").key
620 inputVisitIds = set()
622 for ccdRecord
in ccdInputs:
623 visit = ccdRecord.get(visitKey)
624 ccd = ccdRecord.get(ccdKey)
625 inputVisitIds.add((visit, ccd))
626 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
628 inputCatalogsToKeep = []
629 inputCatalogWcsUpdate = []
630 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
631 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
632 if key
in inputVisitIds:
633 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
634 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
635 inputs[
'visitCatalogs'] = inputCatalogsToKeep
636 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
637 inputs[
'ccdInputs'] = ccdInputs
639 outputs = self.run(**inputs)
641 sources = outputs.outputSources
642 butlerQC.put(outputs, outputRefs)
644 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
645 sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
646 """Run measurement algorithms on the input exposure, and optionally populate the
647 resulting catalog with extra information.
651 exposure : `lsst.afw.exposure.Exposure`
652 The input exposure on which measurements are to be performed.
653 sources : `lsst.afw.table.SourceCatalog`
654 A catalog built from the results of merged detections, or
656 skyInfo : `lsst.pipe.base.Struct`
657 A struct containing information about the position of the input exposure within
658 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
659 exposureId : `int` or `bytes`
660 Packed unique number or bytes unique to the input exposure.
661 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
662 Catalog containing information on the individual visits which went into making
664 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
665 A list of source catalogs corresponding to measurements made on the individual
666 visits which went into the input exposure. If None and butler is `None` then
667 the task cannot propagate visit flags to the output catalog.
668 Deprecated, to be removed with PropagateVisitFlagsTask.
669 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional
670 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
671 to the input visits. Used to put all coordinates to common system. If `None` and
672 butler is `None` then the task cannot propagate visit flags to the output catalog.
673 Deprecated, to be removed with PropagateVisitFlagsTask.
674 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
675 Dict for sourceTable_visit handles (key is visit) for propagating flags.
676 These tables are derived from the ``CalibrateTask`` sources, and contain
677 astrometry and photometry flags, and optionally PSF flags.
678 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
679 Dict for finalized_src_table handles (key is visit) for propagating flags.
680 These tables are derived from ``FinalizeCalibrationTask`` and contain
681 PSF flags from the finalized PSF estimation.
685 results : `lsst.pipe.base.Struct`
686 Results of running measurement task. Will contain the catalog in the
687 sources attribute. Optionally will have results of matching to a
688 reference catalog in the matchResults attribute, and denormalized
689 matches in the denormMatches attribute.
691 self.measurement.run(sources, exposure, exposureId=exposureId)
693 if self.config.doApCorr:
694 self.applyApCorr.run(
696 apCorrMap=exposure.getInfo().getApCorrMap()
703 if not sources.isContiguous():
704 sources = sources.copy(deep=
True)
706 if self.config.doRunCatalogCalculation:
707 self.catalogCalculation.run(sources)
709 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
710 patchInfo=skyInfo.patchInfo)
711 if self.config.doPropagateFlags:
712 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
714 self.propagateFlags.run(
717 sourceTableHandleDict,
718 finalizedSourceTableHandleDict
722 self.propagateFlags.run(
732 if self.config.doMatchSources:
733 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
734 matches = afwTable.packMatches(matchResult.matches)
735 matches.table.setMetadata(matchResult.matchMeta)
736 results.matchResult = matches
737 if self.config.doWriteMatchesDenormalized:
738 if matchResult.matches:
739 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
741 self.log.warning(
"No matches, so generating dummy denormalized matches file")
742 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
744 denormMatches.getMetadata().add(
"COMMENT",
745 "This catalog is empty because no matches were found.")
746 results.denormMatches = denormMatches
747 results.denormMatches = denormMatches
749 results.outputSources = sources