22__all__ = [
"DetectCoaddSourcesConfig",
"DetectCoaddSourcesTask"]
26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27import lsst.pipe.base.connectionTypes
as cT
31 SingleFrameMeasurementTask,
33 CatalogCalculationTask,
34 SkyMapIdGeneratorConfig,
46from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
47from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
48from .multiBandUtils
import CullPeaksConfig
49from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
50from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
51from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
52from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
57* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
58* deepCoadd_mergeDet: merged detections (tract, patch)
59* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
60* deepCoadd_ref: reference sources (tract, patch)
61All of these have associated *_schema catalogs that require no data ID and hold no records.
63In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
64the mergeDet, meas, and ref dataset Footprints:
65* deepCoadd_peak_schema
71 dimensions=(
"tract",
"patch",
"band",
"skymap"),
72 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
73 detectionSchema = cT.InitOutput(
74 doc=
"Schema of the detection catalog",
75 name=
"{outputCoaddName}Coadd_det_schema",
76 storageClass=
"SourceCatalog",
79 doc=
"Exposure on which detections are to be performed",
80 name=
"{inputCoaddName}Coadd",
81 storageClass=
"ExposureF",
82 dimensions=(
"tract",
"patch",
"band",
"skymap")
84 outputBackgrounds = cT.Output(
85 doc=
"Output Backgrounds used in detection",
86 name=
"{outputCoaddName}Coadd_calexp_background",
87 storageClass=
"Background",
88 dimensions=(
"tract",
"patch",
"band",
"skymap")
90 outputSources = cT.Output(
91 doc=
"Detected sources catalog",
92 name=
"{outputCoaddName}Coadd_det",
93 storageClass=
"SourceCatalog",
94 dimensions=(
"tract",
"patch",
"band",
"skymap")
96 outputExposure = cT.Output(
97 doc=
"Exposure post detection",
98 name=
"{outputCoaddName}Coadd_calexp",
99 storageClass=
"ExposureF",
100 dimensions=(
"tract",
"patch",
"band",
"skymap")
104class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
105 """Configuration parameters for the DetectCoaddSourcesTask
108 doScaleVariance = Field(dtype=bool, default=True, doc=
"Scale variance plane using empirical noise?")
109 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
110 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
111 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
112 doInsertFakes = Field(dtype=bool, default=
False,
113 doc=
"Run fake sources injection task",
114 deprecated=(
"doInsertFakes is no longer supported. This config will be removed "
116 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
117 doc=
"Injection of fake sources for testing "
118 "purposes (must be retargeted)",
119 deprecated=(
"insertFakes is no longer supported. This config will "
120 "be removed after v24."))
124 doc=
"Should be set to True if fake sources have been inserted into the input data.",
126 idGenerator = SkyMapIdGeneratorConfig.make_field()
128 def setDefaults(self):
129 super().setDefaults()
130 self.detection.thresholdType =
"pixel_stdev"
131 self.detection.isotropicGrow =
True
133 self.detection.reEstimateBackground =
False
134 self.detection.background.useApprox =
False
135 self.detection.background.binSize = 4096
136 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
137 self.detection.doTempWideBackground =
True
140 self.idGenerator.packer.n_bands =
None
143class DetectCoaddSourcesTask(PipelineTask):
144 """Detect sources on a single filter coadd.
146 Coadding individual visits requires each exposure to be warped. This
147 introduces covariance in the noise properties across pixels. Before
148 detection, we correct the coadd variance by scaling the variance plane
in
149 the coadd to match the observed variance. This
is an approximate
150 approach -- strictly, we should propagate the full covariance matrix --
151 but it
is simple
and works well
in practice.
153 After scaling the variance plane, we detect sources
and generate footprints
154 by delegating to the
@ref SourceDetectionTask_
"detection" subtask.
156 DetectCoaddSourcesTask
is meant to be run after assembling a coadded image
157 in a given band. The purpose of the task
is to update the background,
158 detect all sources
in a single band
and generate a set of parent
159 footprints. Subsequent tasks
in the multi-band processing procedure will
160 merge sources across bands
and, eventually, perform forced photometry.
165 Initial schema
for the output catalog, modified-
in place to include all
166 fields set by this task. If
None, the source minimal schema will be used.
168 Additional keyword arguments.
171 _DefaultName = "detectCoaddSources"
172 ConfigClass = DetectCoaddSourcesConfig
174 def __init__(self, schema=None, **kwargs):
177 super().__init__(**kwargs)
179 schema = afwTable.SourceTable.makeMinimalSchema()
181 self.makeSubtask(
"detection", schema=self.schema)
182 if self.config.doScaleVariance:
183 self.makeSubtask(
"scaleVariance")
185 self.detectionSchema = afwTable.SourceCatalog(self.schema)
187 def runQuantum(self, butlerQC, inputRefs, outputRefs):
188 inputs = butlerQC.get(inputRefs)
189 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
190 inputs[
"idFactory"] = idGenerator.make_table_id_factory()
191 inputs[
"expId"] = idGenerator.catalog_id
192 outputs = self.run(**inputs)
193 butlerQC.put(outputs, outputRefs)
195 def run(self, exposure, idFactory, expId):
196 """Run detection on an exposure.
198 First scale the variance plane to match the observed variance
199 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
205 Exposure on which to detect (may be backround-subtracted
and scaled,
206 depending on configuration).
208 IdFactory to set source identifiers.
210 Exposure identifier (integer)
for RNG seed.
214 result : `lsst.pipe.base.Struct`
215 Results
as a struct
with attributes:
220 List of backgrounds (`list`).
222 if self.config.doScaleVariance:
223 varScale = self.scaleVariance.run(exposure.maskedImage)
224 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
225 backgrounds = afwMath.BackgroundList()
226 table = afwTable.SourceTable.make(self.schema, idFactory)
227 detections = self.detection.run(table, exposure, expId=expId)
228 sources = detections.sources
229 if hasattr(detections,
"background")
and detections.background:
230 for bg
in detections.background:
231 backgrounds.append(bg)
232 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
235class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
236 dimensions=(
"tract",
"patch",
"band",
"skymap"),
237 defaultTemplates={
"inputCoaddName":
"deep",
238 "outputCoaddName":
"deep",
239 "deblendedCatalog":
"deblendedFlux"}):
240 inputSchema = cT.InitInput(
241 doc=
"Input schema for measure merged task produced by a deblender or detection task",
242 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
243 storageClass=
"SourceCatalog"
245 outputSchema = cT.InitOutput(
246 doc=
"Output schema after all new fields are added by task",
247 name=
"{inputCoaddName}Coadd_meas_schema",
248 storageClass=
"SourceCatalog"
250 refCat = cT.PrerequisiteInput(
251 doc=
"Reference catalog used to match measured sources against known sources",
253 storageClass=
"SimpleCatalog",
254 dimensions=(
"skypix",),
259 doc=
"Input coadd image",
260 name=
"{inputCoaddName}Coadd_calexp",
261 storageClass=
"ExposureF",
262 dimensions=(
"tract",
"patch",
"band",
"skymap")
265 doc=
"SkyMap to use in processing",
266 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
267 storageClass=
"SkyMap",
268 dimensions=(
"skymap",),
270 visitCatalogs = cT.Input(
271 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
272 "further filtered in the task for the purpose of propagating flags from image calibration "
273 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
275 dimensions=(
"instrument",
"visit",
"detector"),
276 storageClass=
"SourceCatalog",
279 sourceTableHandles = cT.Input(
280 doc=(
"Source tables that are derived from the ``CalibrateTask`` sources. "
281 "These tables contain astrometry and photometry flags, and optionally "
283 name=
"sourceTable_visit",
284 storageClass=
"DataFrame",
285 dimensions=(
"instrument",
"visit"),
289 finalizedSourceTableHandles = cT.Input(
290 doc=(
"Finalized source tables from ``FinalizeCalibrationTask``. These "
291 "tables contain PSF flags from the finalized PSF estimation."),
292 name=
"finalized_src_table",
293 storageClass=
"DataFrame",
294 dimensions=(
"instrument",
"visit"),
298 inputCatalog = cT.Input(
299 doc=(
"Name of the input catalog to use."
300 "If the single band deblender was used this should be 'deblendedFlux."
301 "If the multi-band deblender was used this should be 'deblendedModel, "
302 "or deblendedFlux if the multiband deblender was configured to output "
303 "deblended flux catalogs. If no deblending was performed this should "
305 name=
"{inputCoaddName}Coadd_{deblendedCatalog}",
306 storageClass=
"SourceCatalog",
307 dimensions=(
"tract",
"patch",
"band",
"skymap"),
309 scarletCatalog = cT.Input(
310 doc=
"Catalogs produced by multiband deblending",
311 name=
"{inputCoaddName}Coadd_deblendedCatalog",
312 storageClass=
"SourceCatalog",
313 dimensions=(
"tract",
"patch",
"skymap"),
315 scarletModels = cT.Input(
316 doc=
"Multiband scarlet models produced by the deblender",
317 name=
"{inputCoaddName}Coadd_scarletModelData",
318 storageClass=
"ScarletModelData",
319 dimensions=(
"tract",
"patch",
"skymap"),
321 outputSources = cT.Output(
322 doc=
"Source catalog containing all the measurement information generated in this task",
323 name=
"{outputCoaddName}Coadd_meas",
324 dimensions=(
"tract",
"patch",
"band",
"skymap"),
325 storageClass=
"SourceCatalog",
327 matchResult = cT.Output(
328 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
329 name=
"{outputCoaddName}Coadd_measMatch",
330 dimensions=(
"tract",
"patch",
"band",
"skymap"),
331 storageClass=
"Catalog",
333 denormMatches = cT.Output(
334 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
335 "doWriteMatchesDenormalized",
336 name=
"{outputCoaddName}Coadd_measMatchFull",
337 dimensions=(
"tract",
"patch",
"band",
"skymap"),
338 storageClass=
"Catalog",
341 def __init__(self, *, config=None):
342 super().__init__(config=config)
343 if config.doPropagateFlags
is False:
344 self.inputs -= set((
"visitCatalogs",))
345 self.inputs -= set((
"sourceTableHandles",))
346 self.inputs -= set((
"finalizedSourceTableHandles",))
347 elif config.propagateFlags.target == PropagateSourceFlagsTask:
349 self.inputs -= set((
"visitCatalogs",))
351 if not config.propagateFlags.source_flags:
352 self.inputs -= set((
"sourceTableHandles",))
353 if not config.propagateFlags.finalized_source_flags:
354 self.inputs -= set((
"finalizedSourceTableHandles",))
357 self.inputs -= set((
"sourceTableHandles",))
358 self.inputs -= set((
"finalizedSourceTableHandles",))
360 if config.inputCatalog ==
"deblendedCatalog":
361 self.inputs -= set((
"inputCatalog",))
363 if not config.doAddFootprints:
364 self.inputs -= set((
"scarletModels",))
366 self.inputs -= set((
"deblendedCatalog"))
367 self.inputs -= set((
"scarletModels",))
369 if config.doMatchSources
is False:
370 self.outputs -= set((
"matchResult",))
372 if config.doWriteMatchesDenormalized
is False:
373 self.outputs -= set((
"denormMatches",))
376class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
377 pipelineConnections=MeasureMergedCoaddSourcesConnections):
378 """Configuration parameters for the MeasureMergedCoaddSourcesTask
380 inputCatalog = ChoiceField(
382 default="deblendedCatalog",
384 "deblendedCatalog":
"Output catalog from ScarletDeblendTask",
385 "deblendedFlux":
"Output catalog from SourceDeblendTask",
386 "mergeDet":
"The merged detections before deblending."
388 doc=
"The name of the input catalog.",
390 doAddFootprints = Field(dtype=bool,
392 doc=
"Whether or not to add footprints to the input catalog from scarlet models. "
393 "This should be true whenever using the multi-band deblender, "
394 "otherwise this should be False.")
395 doConserveFlux = Field(dtype=bool, default=
True,
396 doc=
"Whether to use the deblender models as templates to re-distribute the flux "
397 "from the 'exposure' (True), or to perform measurements on the deblender "
399 doStripFootprints = Field(dtype=bool, default=
True,
400 doc=
"Whether to strip footprints from the output catalog before "
402 "This is usually done when using scarlet models to save disk space.")
403 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
404 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
405 doPropagateFlags = Field(
406 dtype=bool, default=
True,
407 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
409 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc=
"Propagate source flags to coadd")
410 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
411 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
412 doWriteMatchesDenormalized = Field(
415 doc=(
"Write reference matches in denormalized format? "
416 "This format uses more disk space, but is more convenient to read."),
418 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
419 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
420 checkUnitsParseStrict = Field(
421 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
428 doc=
"Apply aperture corrections"
430 applyApCorr = ConfigurableField(
431 target=ApplyApCorrTask,
432 doc=
"Subtask to apply aperture corrections"
434 doRunCatalogCalculation = Field(
437 doc=
'Run catalogCalculation task'
439 catalogCalculation = ConfigurableField(
440 target=CatalogCalculationTask,
441 doc=
"Subtask to run catalogCalculation plugins on catalog"
447 doc=
"Should be set to True if fake sources have been inserted into the input data."
449 idGenerator = SkyMapIdGeneratorConfig.make_field()
452 def refObjLoader(self):
453 return self.match.refObjLoader
455 def setDefaults(self):
456 super().setDefaults()
457 self.measurement.plugins.names |= [
'base_InputCount',
459 'base_LocalPhotoCalib',
461 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
463 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
467class MeasureMergedCoaddSourcesTask(PipelineTask):
468 """Deblend sources from main catalog in each coadd seperately and measure.
470 Use peaks and footprints
from a master catalog to perform deblending
and
471 measurement
in each coadd.
473 Given a master input catalog of sources (peaks
and footprints)
or deblender
474 outputs(including a HeavyFootprint
in each band), measure each source on
475 the coadd. Repeating this procedure
with the same master catalog across
476 multiple coadds will generate a consistent set of child sources.
478 The deblender retains all peaks
and deblends any missing peaks (dropouts
in
479 that band)
as PSFs. Source properties are measured
and the
@c is-primary
480 flag (indicating sources
with no children)
is set. Visit flags are
481 propagated to the coadd sources.
483 Optionally, we can match the coadd sources to an external reference
486 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
487 have a set of per-band catalogs. The next stage
in the multi-band
488 processing procedure will merge these measurements into a suitable catalog
489 for driving forced photometry.
493 butler : `lsst.daf.butler.Butler`
or `
None`, optional
494 A butler used to read the input schemas
from disk
or construct the reference
495 catalog loader,
if schema
or peakSchema
or refObjLoader
is None.
497 The schema of the merged detection catalog used
as input to this one.
499 The schema of the PeakRecords
in the Footprints
in the merged detection catalog.
500 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
501 An instance of LoadReferenceObjectsTasks that supplies an external reference
502 catalog. May be
None if the loader can be constructed
from the butler argument
or all steps
503 requiring a reference catalog are disabled.
504 initInputs : `dict`, optional
505 Dictionary that can contain a key ``inputSchema`` containing the
506 input schema. If present will override the value of ``schema``.
508 Additional keyword arguments.
511 _DefaultName = "measureCoaddSources"
512 ConfigClass = MeasureMergedCoaddSourcesConfig
514 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
516 super().__init__(**kwargs)
517 self.deblended = self.config.inputCatalog.startswith(
"deblended")
518 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
519 if initInputs
is not None:
520 schema = initInputs[
'inputSchema'].schema
522 assert butler
is not None,
"Neither butler nor schema is defined"
523 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema").schema
524 self.schemaMapper = afwTable.SchemaMapper(schema)
525 self.schemaMapper.addMinimalSchema(schema)
526 self.schema = self.schemaMapper.getOutputSchema()
528 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
529 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
530 if self.config.doMatchSources:
531 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
532 if self.config.doPropagateFlags:
533 self.makeSubtask(
"propagateFlags", schema=self.schema)
534 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
535 if self.config.doApCorr:
536 self.makeSubtask(
"applyApCorr", schema=self.schema)
537 if self.config.doRunCatalogCalculation:
538 self.makeSubtask(
"catalogCalculation", schema=self.schema)
540 self.outputSchema = afwTable.SourceCatalog(self.schema)
542 def runQuantum(self, butlerQC, inputRefs, outputRefs):
543 inputs = butlerQC.get(inputRefs)
545 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
546 inputs.pop(
'refCat'),
547 name=self.config.connections.refCat,
548 config=self.config.refObjLoader,
550 self.match.setRefObjLoader(refObjLoader)
554 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
558 idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
559 inputs[
'exposureId'] = idGenerator.catalog_id
562 table = afwTable.SourceTable.make(self.schema, idGenerator.make_table_id_factory())
563 sources = afwTable.SourceCatalog(table)
565 if "scarletCatalog" in inputs:
566 inputCatalog = inputs.pop(
"scarletCatalog")
567 catalogRef = inputRefs.scarletCatalog
569 inputCatalog = inputs.pop(
"inputCatalog")
570 catalogRef = inputRefs.inputCatalog
571 sources.extend(inputCatalog, self.schemaMapper)
574 if self.config.doAddFootprints:
575 modelData = inputs.pop(
'scarletModels')
576 if self.config.doConserveFlux:
577 redistributeImage = inputs[
'exposure'].image
579 redistributeImage =
None
580 modelData.updateCatalogFootprints(
582 band=inputRefs.exposure.dataId[
"band"],
583 psfModel=inputs[
'exposure'].getPsf(),
584 redistributeImage=redistributeImage,
585 removeScarletData=
True,
587 table = sources.getTable()
588 table.setMetadata(self.algMetadata)
589 inputs[
'sources'] = sources
591 skyMap = inputs.pop(
'skyMap')
592 tractNumber = catalogRef.dataId[
'tract']
593 tractInfo = skyMap[tractNumber]
594 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId[
'patch'])
599 wcs=tractInfo.getWcs(),
600 bbox=patchInfo.getOuterBBox()
602 inputs[
'skyInfo'] = skyInfo
604 if self.config.doPropagateFlags:
605 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
607 ccdInputs = inputs[
"exposure"].getInfo().getCoaddInputs().ccds
608 inputs[
"ccdInputs"] = ccdInputs
610 if "sourceTableHandles" in inputs:
611 sourceTableHandles = inputs.pop(
"sourceTableHandles")
612 sourceTableHandleDict = {handle.dataId[
"visit"]: handle
613 for handle
in sourceTableHandles}
614 inputs[
"sourceTableHandleDict"] = sourceTableHandleDict
615 if "finalizedSourceTableHandles" in inputs:
616 finalizedSourceTableHandles = inputs.pop(
"finalizedSourceTableHandles")
617 finalizedSourceTableHandleDict = {handle.dataId[
"visit"]: handle
618 for handle
in finalizedSourceTableHandles}
619 inputs[
"finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
623 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
624 visitKey = ccdInputs.schema.find(
"visit").key
625 ccdKey = ccdInputs.schema.find(
"ccd").key
626 inputVisitIds = set()
628 for ccdRecord
in ccdInputs:
629 visit = ccdRecord.get(visitKey)
630 ccd = ccdRecord.get(ccdKey)
631 inputVisitIds.add((visit, ccd))
632 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
634 inputCatalogsToKeep = []
635 inputCatalogWcsUpdate = []
636 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
637 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
638 if key
in inputVisitIds:
639 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
640 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
641 inputs[
'visitCatalogs'] = inputCatalogsToKeep
642 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
643 inputs[
'ccdInputs'] = ccdInputs
645 outputs = self.run(**inputs)
647 sources = outputs.outputSources
648 butlerQC.put(outputs, outputRefs)
650 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
651 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
652 """Run measurement algorithms on the input exposure, and optionally populate the
653 resulting catalog with extra information.
657 exposure : `lsst.afw.exposure.Exposure`
658 The input exposure on which measurements are to be performed.
660 A catalog built
from the results of merged detections,
or
662 skyInfo : `lsst.pipe.base.Struct`
663 A struct containing information about the position of the input exposure within
664 a `SkyMap`, the `SkyMap`, its `Wcs`,
and its bounding box.
665 exposureId : `int`
or `bytes`
666 Packed unique number
or bytes unique to the input exposure.
668 Catalog containing information on the individual visits which went into making
670 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
671 A list of source catalogs corresponding to measurements made on the individual
672 visits which went into the input exposure. If
None and butler
is `
None` then
673 the task cannot propagate visit flags to the output catalog.
674 Deprecated, to be removed
with PropagateVisitFlagsTask.
676 If visitCatalogs
is not `
None` this should be a list of wcs objects which correspond
677 to the input visits. Used to put all coordinates to common system. If `
None`
and
678 butler
is `
None` then the task cannot propagate visit flags to the output catalog.
679 Deprecated, to be removed
with PropagateVisitFlagsTask.
680 butler : `
None`, optional
681 This was a Gen2 butler used to load visit catalogs.
682 No longer used
and should
not be set. Will be removed
in the
684 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
685 Dict
for sourceTable_visit handles (key
is visit)
for propagating flags.
686 These tables are derived
from the ``CalibrateTask`` sources,
and contain
687 astrometry
and photometry flags,
and optionally PSF flags.
688 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
689 Dict
for finalized_src_table handles (key
is visit)
for propagating flags.
690 These tables are derived
from ``FinalizeCalibrationTask``
and contain
691 PSF flags
from the finalized PSF estimation.
695 results : `lsst.pipe.base.Struct`
696 Results of running measurement task. Will contain the catalog
in the
697 sources attribute. Optionally will have results of matching to a
698 reference catalog
in the matchResults attribute,
and denormalized
699 matches
in the denormMatches attribute.
701 if butler
is not None:
702 warnings.warn(
"The 'butler' parameter is no longer used and can be safely removed.",
703 category=FutureWarning, stacklevel=2)
706 self.measurement.run(sources, exposure, exposureId=exposureId)
708 if self.config.doApCorr:
709 self.applyApCorr.run(
711 apCorrMap=exposure.getInfo().getApCorrMap()
718 if not sources.isContiguous():
719 sources = sources.copy(deep=
True)
721 if self.config.doRunCatalogCalculation:
722 self.catalogCalculation.run(sources)
724 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
725 patchInfo=skyInfo.patchInfo)
726 if self.config.doPropagateFlags:
727 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
729 self.propagateFlags.run(
732 sourceTableHandleDict,
733 finalizedSourceTableHandleDict
737 self.propagateFlags.run(
748 if self.config.doMatchSources:
749 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
750 matches = afwTable.packMatches(matchResult.matches)
751 matches.table.setMetadata(matchResult.matchMeta)
752 results.matchResult = matches
753 if self.config.doWriteMatchesDenormalized:
754 if matchResult.matches:
755 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
757 self.log.warning(
"No matches, so generating dummy denormalized matches file")
758 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
760 denormMatches.getMetadata().add(
"COMMENT",
761 "This catalog is empty because no matches were found.")
762 results.denormMatches = denormMatches
763 results.denormMatches = denormMatches
765 results.outputSources = sources