22__all__ = [
"DetectCoaddSourcesConfig",
"DetectCoaddSourcesTask"]
26from lsst.pipe.base
import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27import lsst.pipe.base.connectionTypes
as cT
31 SingleFrameMeasurementTask,
33 CatalogCalculationTask,
34 SkyMapIdGeneratorConfig,
45from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
46from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
47from .multiBandUtils
import CullPeaksConfig
48from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
49from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
50from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
51from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
56* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
57* deepCoadd_mergeDet: merged detections (tract, patch)
58* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
59* deepCoadd_ref: reference sources (tract, patch)
60All of these have associated *_schema catalogs that require no data ID and hold no records.
62In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
63the mergeDet, meas, and ref dataset Footprints:
64* deepCoadd_peak_schema
70 dimensions=(
"tract",
"patch",
"band",
"skymap"),
71 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
72 detectionSchema = cT.InitOutput(
73 doc=
"Schema of the detection catalog",
74 name=
"{outputCoaddName}Coadd_det_schema",
75 storageClass=
"SourceCatalog",
78 doc=
"Exposure on which detections are to be performed",
79 name=
"{inputCoaddName}Coadd",
80 storageClass=
"ExposureF",
81 dimensions=(
"tract",
"patch",
"band",
"skymap")
83 outputBackgrounds = cT.Output(
84 doc=
"Output Backgrounds used in detection",
85 name=
"{outputCoaddName}Coadd_calexp_background",
86 storageClass=
"Background",
87 dimensions=(
"tract",
"patch",
"band",
"skymap")
89 outputSources = cT.Output(
90 doc=
"Detected sources catalog",
91 name=
"{outputCoaddName}Coadd_det",
92 storageClass=
"SourceCatalog",
93 dimensions=(
"tract",
"patch",
"band",
"skymap")
95 outputExposure = cT.Output(
96 doc=
"Exposure post detection",
97 name=
"{outputCoaddName}Coadd_calexp",
98 storageClass=
"ExposureF",
99 dimensions=(
"tract",
"patch",
"band",
"skymap")
103class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
104 """Configuration parameters for the DetectCoaddSourcesTask
107 doScaleVariance = Field(dtype=bool, default=True, doc=
"Scale variance plane using empirical noise?")
108 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
109 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
110 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
114 doc=
"Should be set to True if fake sources have been inserted into the input data.",
116 idGenerator = SkyMapIdGeneratorConfig.make_field()
118 def setDefaults(self):
119 super().setDefaults()
120 self.detection.thresholdType =
"pixel_stdev"
121 self.detection.isotropicGrow =
True
123 self.detection.reEstimateBackground =
False
124 self.detection.background.useApprox =
False
125 self.detection.background.binSize = 4096
126 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
127 self.detection.doTempWideBackground =
True
130 self.idGenerator.packer.n_bands =
None
133class DetectCoaddSourcesTask(PipelineTask):
134 """Detect sources on a single filter coadd.
136 Coadding individual visits requires each exposure to be warped. This
137 introduces covariance in the noise properties across pixels. Before
138 detection, we correct the coadd variance by scaling the variance plane
in
139 the coadd to match the observed variance. This
is an approximate
140 approach -- strictly, we should propagate the full covariance matrix --
141 but it
is simple
and works well
in practice.
143 After scaling the variance plane, we detect sources
and generate footprints
144 by delegating to the
@ref SourceDetectionTask_
"detection" subtask.
146 DetectCoaddSourcesTask
is meant to be run after assembling a coadded image
147 in a given band. The purpose of the task
is to update the background,
148 detect all sources
in a single band
and generate a set of parent
149 footprints. Subsequent tasks
in the multi-band processing procedure will
150 merge sources across bands
and, eventually, perform forced photometry.
155 Initial schema
for the output catalog, modified-
in place to include all
156 fields set by this task. If
None, the source minimal schema will be used.
158 Additional keyword arguments.
161 _DefaultName = "detectCoaddSources"
162 ConfigClass = DetectCoaddSourcesConfig
164 def __init__(self, schema=None, **kwargs):
167 super().__init__(**kwargs)
169 schema = afwTable.SourceTable.makeMinimalSchema()
171 self.makeSubtask(
"detection", schema=self.schema)
172 if self.config.doScaleVariance:
173 self.makeSubtask(
"scaleVariance")
175 self.detectionSchema = afwTable.SourceCatalog(self.schema)
177 def runQuantum(self, butlerQC, inputRefs, outputRefs):
178 inputs = butlerQC.get(inputRefs)
179 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
180 inputs[
"idFactory"] = idGenerator.make_table_id_factory()
181 inputs[
"expId"] = idGenerator.catalog_id
182 outputs = self.run(**inputs)
183 butlerQC.put(outputs, outputRefs)
185 def run(self, exposure, idFactory, expId):
186 """Run detection on an exposure.
188 First scale the variance plane to match the observed variance
189 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
195 Exposure on which to detect (may be backround-subtracted
and scaled,
196 depending on configuration).
198 IdFactory to set source identifiers.
200 Exposure identifier (integer)
for RNG seed.
204 result : `lsst.pipe.base.Struct`
205 Results
as a struct
with attributes:
210 List of backgrounds (`list`).
212 if self.config.doScaleVariance:
213 varScale = self.scaleVariance.run(exposure.maskedImage)
214 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
215 backgrounds = afwMath.BackgroundList()
216 table = afwTable.SourceTable.make(self.schema, idFactory)
217 detections = self.detection.run(table, exposure, expId=expId)
218 sources = detections.sources
219 if hasattr(detections,
"background")
and detections.background:
220 for bg
in detections.background:
221 backgrounds.append(bg)
222 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
225class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
226 dimensions=(
"tract",
"patch",
"band",
"skymap"),
227 defaultTemplates={
"inputCoaddName":
"deep",
228 "outputCoaddName":
"deep",
229 "deblendedCatalog":
"deblendedFlux"}):
230 inputSchema = cT.InitInput(
231 doc=
"Input schema for measure merged task produced by a deblender or detection task",
232 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
233 storageClass=
"SourceCatalog"
235 outputSchema = cT.InitOutput(
236 doc=
"Output schema after all new fields are added by task",
237 name=
"{inputCoaddName}Coadd_meas_schema",
238 storageClass=
"SourceCatalog"
240 refCat = cT.PrerequisiteInput(
241 doc=
"Reference catalog used to match measured sources against known sources",
243 storageClass=
"SimpleCatalog",
244 dimensions=(
"skypix",),
249 doc=
"Input coadd image",
250 name=
"{inputCoaddName}Coadd_calexp",
251 storageClass=
"ExposureF",
252 dimensions=(
"tract",
"patch",
"band",
"skymap")
255 doc=
"SkyMap to use in processing",
256 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
257 storageClass=
"SkyMap",
258 dimensions=(
"skymap",),
260 visitCatalogs = cT.Input(
261 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
262 "further filtered in the task for the purpose of propagating flags from image calibration "
263 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
265 dimensions=(
"instrument",
"visit",
"detector"),
266 storageClass=
"SourceCatalog",
269 sourceTableHandles = cT.Input(
270 doc=(
"Source tables that are derived from the ``CalibrateTask`` sources. "
271 "These tables contain astrometry and photometry flags, and optionally "
273 name=
"sourceTable_visit",
274 storageClass=
"DataFrame",
275 dimensions=(
"instrument",
"visit"),
279 finalizedSourceTableHandles = cT.Input(
280 doc=(
"Finalized source tables from ``FinalizeCalibrationTask``. These "
281 "tables contain PSF flags from the finalized PSF estimation."),
282 name=
"finalized_src_table",
283 storageClass=
"DataFrame",
284 dimensions=(
"instrument",
"visit"),
288 inputCatalog = cT.Input(
289 doc=(
"Name of the input catalog to use."
290 "If the single band deblender was used this should be 'deblendedFlux."
291 "If the multi-band deblender was used this should be 'deblendedModel, "
292 "or deblendedFlux if the multiband deblender was configured to output "
293 "deblended flux catalogs. If no deblending was performed this should "
295 name=
"{inputCoaddName}Coadd_{deblendedCatalog}",
296 storageClass=
"SourceCatalog",
297 dimensions=(
"tract",
"patch",
"band",
"skymap"),
299 scarletCatalog = cT.Input(
300 doc=
"Catalogs produced by multiband deblending",
301 name=
"{inputCoaddName}Coadd_deblendedCatalog",
302 storageClass=
"SourceCatalog",
303 dimensions=(
"tract",
"patch",
"skymap"),
305 scarletModels = cT.Input(
306 doc=
"Multiband scarlet models produced by the deblender",
307 name=
"{inputCoaddName}Coadd_scarletModelData",
308 storageClass=
"ScarletModelData",
309 dimensions=(
"tract",
"patch",
"skymap"),
311 outputSources = cT.Output(
312 doc=
"Source catalog containing all the measurement information generated in this task",
313 name=
"{outputCoaddName}Coadd_meas",
314 dimensions=(
"tract",
"patch",
"band",
"skymap"),
315 storageClass=
"SourceCatalog",
317 matchResult = cT.Output(
318 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
319 name=
"{outputCoaddName}Coadd_measMatch",
320 dimensions=(
"tract",
"patch",
"band",
"skymap"),
321 storageClass=
"Catalog",
323 denormMatches = cT.Output(
324 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
325 "doWriteMatchesDenormalized",
326 name=
"{outputCoaddName}Coadd_measMatchFull",
327 dimensions=(
"tract",
"patch",
"band",
"skymap"),
328 storageClass=
"Catalog",
331 def __init__(self, *, config=None):
332 super().__init__(config=config)
333 if config.doPropagateFlags
is False:
334 self.inputs -= set((
"visitCatalogs",))
335 self.inputs -= set((
"sourceTableHandles",))
336 self.inputs -= set((
"finalizedSourceTableHandles",))
337 elif config.propagateFlags.target == PropagateSourceFlagsTask:
339 self.inputs -= set((
"visitCatalogs",))
341 if not config.propagateFlags.source_flags:
342 self.inputs -= set((
"sourceTableHandles",))
343 if not config.propagateFlags.finalized_source_flags:
344 self.inputs -= set((
"finalizedSourceTableHandles",))
347 self.inputs -= set((
"sourceTableHandles",))
348 self.inputs -= set((
"finalizedSourceTableHandles",))
350 if config.inputCatalog ==
"deblendedCatalog":
351 self.inputs -= set((
"inputCatalog",))
353 if not config.doAddFootprints:
354 self.inputs -= set((
"scarletModels",))
356 self.inputs -= set((
"deblendedCatalog"))
357 self.inputs -= set((
"scarletModels",))
359 if config.doMatchSources
is False:
360 self.outputs -= set((
"matchResult",))
362 if config.doWriteMatchesDenormalized
is False:
363 self.outputs -= set((
"denormMatches",))
366class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
367 pipelineConnections=MeasureMergedCoaddSourcesConnections):
368 """Configuration parameters for the MeasureMergedCoaddSourcesTask
370 inputCatalog = ChoiceField(
372 default="deblendedCatalog",
374 "deblendedCatalog":
"Output catalog from ScarletDeblendTask",
375 "deblendedFlux":
"Output catalog from SourceDeblendTask",
376 "mergeDet":
"The merged detections before deblending."
378 doc=
"The name of the input catalog.",
380 doAddFootprints = Field(dtype=bool,
382 doc=
"Whether or not to add footprints to the input catalog from scarlet models. "
383 "This should be true whenever using the multi-band deblender, "
384 "otherwise this should be False.")
385 doConserveFlux = Field(dtype=bool, default=
True,
386 doc=
"Whether to use the deblender models as templates to re-distribute the flux "
387 "from the 'exposure' (True), or to perform measurements on the deblender "
389 doStripFootprints = Field(dtype=bool, default=
True,
390 doc=
"Whether to strip footprints from the output catalog before "
392 "This is usually done when using scarlet models to save disk space.")
393 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
394 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
395 doPropagateFlags = Field(
396 dtype=bool, default=
True,
397 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
399 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc=
"Propagate source flags to coadd")
400 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
401 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
402 doWriteMatchesDenormalized = Field(
405 doc=(
"Write reference matches in denormalized format? "
406 "This format uses more disk space, but is more convenient to read."),
408 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
409 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
410 checkUnitsParseStrict = Field(
411 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
418 doc=
"Apply aperture corrections"
420 applyApCorr = ConfigurableField(
421 target=ApplyApCorrTask,
422 doc=
"Subtask to apply aperture corrections"
424 doRunCatalogCalculation = Field(
427 doc=
'Run catalogCalculation task'
429 catalogCalculation = ConfigurableField(
430 target=CatalogCalculationTask,
431 doc=
"Subtask to run catalogCalculation plugins on catalog"
437 doc=
"Should be set to True if fake sources have been inserted into the input data."
439 idGenerator = SkyMapIdGeneratorConfig.make_field()
442 def refObjLoader(self):
443 return self.match.refObjLoader
445 def setDefaults(self):
446 super().setDefaults()
447 self.measurement.plugins.names |= [
'base_InputCount',
449 'base_LocalPhotoCalib',
451 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
453 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
457class MeasureMergedCoaddSourcesTask(PipelineTask):
458 """Deblend sources from main catalog in each coadd seperately and measure.
460 Use peaks and footprints
from a master catalog to perform deblending
and
461 measurement
in each coadd.
463 Given a master input catalog of sources (peaks
and footprints)
or deblender
464 outputs(including a HeavyFootprint
in each band), measure each source on
465 the coadd. Repeating this procedure
with the same master catalog across
466 multiple coadds will generate a consistent set of child sources.
468 The deblender retains all peaks
and deblends any missing peaks (dropouts
in
469 that band)
as PSFs. Source properties are measured
and the
@c is-primary
470 flag (indicating sources
with no children)
is set. Visit flags are
471 propagated to the coadd sources.
473 Optionally, we can match the coadd sources to an external reference
476 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
477 have a set of per-band catalogs. The next stage
in the multi-band
478 processing procedure will merge these measurements into a suitable catalog
479 for driving forced photometry.
483 butler : `lsst.daf.butler.Butler`
or `
None`, optional
484 A butler used to read the input schemas
from disk
or construct the reference
485 catalog loader,
if schema
or peakSchema
or refObjLoader
is None.
487 The schema of the merged detection catalog used
as input to this one.
489 The schema of the PeakRecords
in the Footprints
in the merged detection catalog.
490 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
491 An instance of ReferenceObjectLoader that supplies an external reference
492 catalog. May be
None if the loader can be constructed
from the butler argument
or all steps
493 requiring a reference catalog are disabled.
494 initInputs : `dict`, optional
495 Dictionary that can contain a key ``inputSchema`` containing the
496 input schema. If present will override the value of ``schema``.
498 Additional keyword arguments.
501 _DefaultName = "measureCoaddSources"
502 ConfigClass = MeasureMergedCoaddSourcesConfig
504 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
506 super().__init__(**kwargs)
507 self.deblended = self.config.inputCatalog.startswith(
"deblended")
508 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
509 if initInputs
is not None:
510 schema = initInputs[
'inputSchema'].schema
512 assert butler
is not None,
"Neither butler nor schema is defined"
513 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema").schema
514 self.schemaMapper = afwTable.SchemaMapper(schema)
515 self.schemaMapper.addMinimalSchema(schema)
516 self.schema = self.schemaMapper.getOutputSchema()
518 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
519 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
520 if self.config.doMatchSources:
521 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
522 if self.config.doPropagateFlags:
523 self.makeSubtask(
"propagateFlags", schema=self.schema)
524 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
525 if self.config.doApCorr:
526 self.makeSubtask(
"applyApCorr", schema=self.schema)
527 if self.config.doRunCatalogCalculation:
528 self.makeSubtask(
"catalogCalculation", schema=self.schema)
530 self.outputSchema = afwTable.SourceCatalog(self.schema)
532 def runQuantum(self, butlerQC, inputRefs, outputRefs):
533 inputs = butlerQC.get(inputRefs)
535 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
536 inputs.pop(
'refCat'),
537 name=self.config.connections.refCat,
538 config=self.config.refObjLoader,
540 self.match.setRefObjLoader(refObjLoader)
544 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
548 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
549 inputs[
'exposureId'] = idGenerator.catalog_id
552 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
553 sources = afwTable.SourceCatalog(table)
555 if "scarletCatalog" in inputs:
556 inputCatalog = inputs.pop(
"scarletCatalog")
557 catalogRef = inputRefs.scarletCatalog
559 inputCatalog = inputs.pop(
"inputCatalog")
560 catalogRef = inputRefs.inputCatalog
561 sources.extend(inputCatalog, self.schemaMapper)
564 if self.config.doAddFootprints:
565 modelData = inputs.pop(
'scarletModels')
566 if self.config.doConserveFlux:
567 redistributeImage = inputs[
'exposure'].image
569 redistributeImage =
None
570 modelData.updateCatalogFootprints(
572 band=inputRefs.exposure.dataId[
"band"],
573 psfModel=inputs[
'exposure'].getPsf(),
574 redistributeImage=redistributeImage,
575 removeScarletData=
True,
577 table = sources.getTable()
578 table.setMetadata(self.algMetadata)
579 inputs[
'sources'] = sources
581 skyMap = inputs.pop(
'skyMap')
582 tractNumber = catalogRef.dataId[
'tract']
583 tractInfo = skyMap[tractNumber]
584 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId[
'patch'])
589 wcs=tractInfo.getWcs(),
590 bbox=patchInfo.getOuterBBox()
592 inputs[
'skyInfo'] = skyInfo
594 if self.config.doPropagateFlags:
595 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
597 ccdInputs = inputs[
"exposure"].getInfo().getCoaddInputs().ccds
598 inputs[
"ccdInputs"] = ccdInputs
600 if "sourceTableHandles" in inputs:
601 sourceTableHandles = inputs.pop(
"sourceTableHandles")
602 sourceTableHandleDict = {handle.dataId[
"visit"]: handle
603 for handle
in sourceTableHandles}
604 inputs[
"sourceTableHandleDict"] = sourceTableHandleDict
605 if "finalizedSourceTableHandles" in inputs:
606 finalizedSourceTableHandles = inputs.pop(
"finalizedSourceTableHandles")
607 finalizedSourceTableHandleDict = {handle.dataId[
"visit"]: handle
608 for handle
in finalizedSourceTableHandles}
609 inputs[
"finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
613 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
614 visitKey = ccdInputs.schema.find(
"visit").key
615 ccdKey = ccdInputs.schema.find(
"ccd").key
616 inputVisitIds = set()
618 for ccdRecord
in ccdInputs:
619 visit = ccdRecord.get(visitKey)
620 ccd = ccdRecord.get(ccdKey)
621 inputVisitIds.add((visit, ccd))
622 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
624 inputCatalogsToKeep = []
625 inputCatalogWcsUpdate = []
626 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
627 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
628 if key
in inputVisitIds:
629 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
630 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
631 inputs[
'visitCatalogs'] = inputCatalogsToKeep
632 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
633 inputs[
'ccdInputs'] = ccdInputs
635 outputs = self.run(**inputs)
637 sources = outputs.outputSources
638 butlerQC.put(outputs, outputRefs)
640 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
641 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
642 """Run measurement algorithms on the input exposure, and optionally populate the
643 resulting catalog with extra information.
647 exposure : `lsst.afw.exposure.Exposure`
648 The input exposure on which measurements are to be performed.
650 A catalog built
from the results of merged detections,
or
652 skyInfo : `lsst.pipe.base.Struct`
653 A struct containing information about the position of the input exposure within
654 a `SkyMap`, the `SkyMap`, its `Wcs`,
and its bounding box.
655 exposureId : `int`
or `bytes`
656 Packed unique number
or bytes unique to the input exposure.
658 Catalog containing information on the individual visits which went into making
660 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
661 A list of source catalogs corresponding to measurements made on the individual
662 visits which went into the input exposure. If
None and butler
is `
None` then
663 the task cannot propagate visit flags to the output catalog.
664 Deprecated, to be removed
with PropagateVisitFlagsTask.
666 If visitCatalogs
is not `
None` this should be a list of wcs objects which correspond
667 to the input visits. Used to put all coordinates to common system. If `
None`
and
668 butler
is `
None` then the task cannot propagate visit flags to the output catalog.
669 Deprecated, to be removed
with PropagateVisitFlagsTask.
670 butler : `
None`, optional
671 This was a Gen2 butler used to load visit catalogs.
672 No longer used
and should
not be set. Will be removed
in the
674 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
675 Dict
for sourceTable_visit handles (key
is visit)
for propagating flags.
676 These tables are derived
from the ``CalibrateTask`` sources,
and contain
677 astrometry
and photometry flags,
and optionally PSF flags.
678 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
679 Dict
for finalized_src_table handles (key
is visit)
for propagating flags.
680 These tables are derived
from ``FinalizeCalibrationTask``
and contain
681 PSF flags
from the finalized PSF estimation.
685 results : `lsst.pipe.base.Struct`
686 Results of running measurement task. Will contain the catalog
in the
687 sources attribute. Optionally will have results of matching to a
688 reference catalog
in the matchResults attribute,
and denormalized
689 matches
in the denormMatches attribute.
691 if butler
is not None:
692 warnings.warn(
"The 'butler' parameter is no longer used and can be safely removed.",
693 category=FutureWarning, stacklevel=2)
696 self.measurement.run(sources, exposure, exposureId=exposureId)
698 if self.config.doApCorr:
699 self.applyApCorr.run(
701 apCorrMap=exposure.getInfo().getApCorrMap()
708 if not sources.isContiguous():
709 sources = sources.copy(deep=
True)
711 if self.config.doRunCatalogCalculation:
712 self.catalogCalculation.run(sources)
714 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
715 patchInfo=skyInfo.patchInfo)
716 if self.config.doPropagateFlags:
717 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
719 self.propagateFlags.run(
722 sourceTableHandleDict,
723 finalizedSourceTableHandleDict
727 self.propagateFlags.run(
738 if self.config.doMatchSources:
739 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
740 matches = afwTable.packMatches(matchResult.matches)
741 matches.table.setMetadata(matchResult.matchMeta)
742 results.matchResult = matches
743 if self.config.doWriteMatchesDenormalized:
744 if matchResult.matches:
745 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
747 self.log.warning(
"No matches, so generating dummy denormalized matches file")
748 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
750 denormMatches.getMetadata().add(
"COMMENT",
751 "This catalog is empty because no matches were found.")
752 results.denormMatches = denormMatches
753 results.denormMatches = denormMatches
755 results.outputSources = sources