Coverage for python/lsst/pipe/tasks/multiBand.py: 27%
285 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-01-28 02:52 -0800
« prev ^ index » next coverage.py v6.5.0, created at 2023-01-28 02:52 -0800
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"]
24import warnings
26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27import lsst.pipe.base.connectionTypes as cT
28from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
29from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
30from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
31from lsst.meas.deblender import SourceDeblendTask
32from lsst.meas.extensions.scarlet import ScarletDeblendTask
33from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
37import lsst.afw.table as afwTable
38import lsst.afw.math as afwMath
39from lsst.daf.base import PropertyList
40from lsst.skymap import BaseSkyMap
41from lsst.obs.base import ExposureIdInfo
43# NOTE: these imports are a convenience so multiband users only have to import this file.
44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46from .multiBandUtils import CullPeaksConfig # noqa: F401
47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
53"""
54New set types:
55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56* deepCoadd_mergeDet: merged detections (tract, patch)
57* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58* deepCoadd_ref: reference sources (tract, patch)
59All of these have associated *_schema catalogs that require no data ID and hold no records.
61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62the mergeDet, meas, and ref dataset Footprints:
63* deepCoadd_peak_schema
64"""
67##############################################################################################################
68class DetectCoaddSourcesConnections(PipelineTaskConnections,
69 dimensions=("tract", "patch", "band", "skymap"),
70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
71 detectionSchema = cT.InitOutput(
72 doc="Schema of the detection catalog",
73 name="{outputCoaddName}Coadd_det_schema",
74 storageClass="SourceCatalog",
75 )
76 exposure = cT.Input(
77 doc="Exposure on which detections are to be performed",
78 name="{inputCoaddName}Coadd",
79 storageClass="ExposureF",
80 dimensions=("tract", "patch", "band", "skymap")
81 )
82 outputBackgrounds = cT.Output(
83 doc="Output Backgrounds used in detection",
84 name="{outputCoaddName}Coadd_calexp_background",
85 storageClass="Background",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputSources = cT.Output(
89 doc="Detected sources catalog",
90 name="{outputCoaddName}Coadd_det",
91 storageClass="SourceCatalog",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputExposure = cT.Output(
95 doc="Exposure post detection",
96 name="{outputCoaddName}Coadd_calexp",
97 storageClass="ExposureF",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103 """Configuration parameters for the DetectCoaddSourcesTask
104 """
106 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
108 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
109 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
110 doInsertFakes = Field(dtype=bool, default=False,
111 doc="Run fake sources injection task",
112 deprecated=("doInsertFakes is no longer supported. This config will be removed "
113 "after v24."))
114 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
115 doc="Injection of fake sources for testing "
116 "purposes (must be retargeted)",
117 deprecated=("insertFakes is no longer supported. This config will "
118 "be removed after v24."))
119 hasFakes = Field(
120 dtype=bool,
121 default=False,
122 doc="Should be set to True if fake sources have been inserted into the input data.",
123 )
125 def setDefaults(self):
126 super().setDefaults()
127 self.detection.thresholdType = "pixel_stdev"
128 self.detection.isotropicGrow = True
129 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
130 self.detection.reEstimateBackground = False
131 self.detection.background.useApprox = False
132 self.detection.background.binSize = 4096
133 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
134 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
137class DetectCoaddSourcesTask(PipelineTask):
138 """Detect sources on a single filter coadd.
140 Coadding individual visits requires each exposure to be warped. This
141 introduces covariance in the noise properties across pixels. Before
142 detection, we correct the coadd variance by scaling the variance plane in
143 the coadd to match the observed variance. This is an approximate
144 approach -- strictly, we should propagate the full covariance matrix --
145 but it is simple and works well in practice.
147 After scaling the variance plane, we detect sources and generate footprints
148 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
150 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
151 in a given band. The purpose of the task is to update the background,
152 detect all sources in a single band and generate a set of parent
153 footprints. Subsequent tasks in the multi-band processing procedure will
154 merge sources across bands and, eventually, perform forced photometry.
156 Parameters
157 ----------
158 schema : `lsst.afw.table.Schema`, optional
159 Initial schema for the output catalog, modified-in place to include all
160 fields set by this task. If None, the source minimal schema will be used.
161 **kwargs
162 Additional keyword arguments.
163 """
165 _DefaultName = "detectCoaddSources"
166 ConfigClass = DetectCoaddSourcesConfig
168 def __init__(self, schema=None, **kwargs):
169 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
170 # call structure has been reviewed carefully to be sure super will work as intended.
171 super().__init__(**kwargs)
172 if schema is None:
173 schema = afwTable.SourceTable.makeMinimalSchema()
174 self.schema = schema
175 self.makeSubtask("detection", schema=self.schema)
176 if self.config.doScaleVariance:
177 self.makeSubtask("scaleVariance")
179 self.detectionSchema = afwTable.SourceCatalog(self.schema)
181 def runQuantum(self, butlerQC, inputRefs, outputRefs):
182 inputs = butlerQC.get(inputRefs)
183 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
184 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
185 inputs["expId"] = exposureIdInfo.expId
186 outputs = self.run(**inputs)
187 butlerQC.put(outputs, outputRefs)
189 def run(self, exposure, idFactory, expId):
190 """Run detection on an exposure.
192 First scale the variance plane to match the observed variance
193 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
194 detect sources.
196 Parameters
197 ----------
198 exposure : `lsst.afw.image.Exposure`
199 Exposure on which to detect (may be backround-subtracted and scaled,
200 depending on configuration).
201 idFactory : `lsst.afw.table.IdFactory`
202 IdFactory to set source identifiers.
203 expId : `int`
204 Exposure identifier (integer) for RNG seed.
206 Returns
207 -------
208 result : `lsst.pipe.base.Struct`
209 Results as a struct with attributes:
211 ``sources``
212 Catalog of detections (`lsst.afw.table.SourceCatalog`).
213 ``backgrounds``
214 List of backgrounds (`list`).
215 """
216 if self.config.doScaleVariance:
217 varScale = self.scaleVariance.run(exposure.maskedImage)
218 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
219 backgrounds = afwMath.BackgroundList()
220 table = afwTable.SourceTable.make(self.schema, idFactory)
221 detections = self.detection.run(table, exposure, expId=expId)
222 sources = detections.sources
223 fpSets = detections.fpSets
224 if hasattr(fpSets, "background") and fpSets.background:
225 for bg in fpSets.background:
226 backgrounds.append(bg)
227 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
230##############################################################################################################
233class DeblendCoaddSourcesConfig(Config):
234 """Configuration parameters for the `DeblendCoaddSourcesTask`.
235 """
237 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
238 doc="Deblend sources separately in each band")
239 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
240 doc="Deblend sources simultaneously across bands")
241 simultaneous = Field(dtype=bool,
242 default=True,
243 doc="Simultaneously deblend all bands? "
244 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
245 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
246 hasFakes = Field(dtype=bool,
247 default=False,
248 doc="Should be set to True if fake sources have been inserted into the input data.")
250 def setDefaults(self):
251 Config.setDefaults(self)
252 self.singleBandDeblend.propagateAllPeaks = True
255class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
256 dimensions=("tract", "patch", "band", "skymap"),
257 defaultTemplates={"inputCoaddName": "deep",
258 "outputCoaddName": "deep",
259 "deblendedCatalog": "deblendedFlux"}):
260 inputSchema = cT.InitInput(
261 doc="Input schema for measure merged task produced by a deblender or detection task",
262 name="{inputCoaddName}Coadd_deblendedFlux_schema",
263 storageClass="SourceCatalog"
264 )
265 outputSchema = cT.InitOutput(
266 doc="Output schema after all new fields are added by task",
267 name="{inputCoaddName}Coadd_meas_schema",
268 storageClass="SourceCatalog"
269 )
270 refCat = cT.PrerequisiteInput(
271 doc="Reference catalog used to match measured sources against known sources",
272 name="ref_cat",
273 storageClass="SimpleCatalog",
274 dimensions=("skypix",),
275 deferLoad=True,
276 multiple=True
277 )
278 exposure = cT.Input(
279 doc="Input coadd image",
280 name="{inputCoaddName}Coadd_calexp",
281 storageClass="ExposureF",
282 dimensions=("tract", "patch", "band", "skymap")
283 )
284 skyMap = cT.Input(
285 doc="SkyMap to use in processing",
286 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
287 storageClass="SkyMap",
288 dimensions=("skymap",),
289 )
290 visitCatalogs = cT.Input(
291 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
292 "further filtered in the task for the purpose of propagating flags from image calibration "
293 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
294 name="src",
295 dimensions=("instrument", "visit", "detector"),
296 storageClass="SourceCatalog",
297 multiple=True
298 )
299 sourceTableHandles = cT.Input(
300 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
301 "These tables contain astrometry and photometry flags, and optionally "
302 "PSF flags."),
303 name="sourceTable_visit",
304 storageClass="DataFrame",
305 dimensions=("instrument", "visit"),
306 multiple=True,
307 deferLoad=True,
308 )
309 finalizedSourceTableHandles = cT.Input(
310 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
311 "tables contain PSF flags from the finalized PSF estimation."),
312 name="finalized_src_table",
313 storageClass="DataFrame",
314 dimensions=("instrument", "visit"),
315 multiple=True,
316 deferLoad=True,
317 )
318 inputCatalog = cT.Input(
319 doc=("Name of the input catalog to use."
320 "If the single band deblender was used this should be 'deblendedFlux."
321 "If the multi-band deblender was used this should be 'deblendedModel, "
322 "or deblendedFlux if the multiband deblender was configured to output "
323 "deblended flux catalogs. If no deblending was performed this should "
324 "be 'mergeDet'"),
325 name="{inputCoaddName}Coadd_{deblendedCatalog}",
326 storageClass="SourceCatalog",
327 dimensions=("tract", "patch", "band", "skymap"),
328 )
329 scarletCatalog = cT.Input(
330 doc="Catalogs produced by multiband deblending",
331 name="{inputCoaddName}Coadd_deblendedCatalog",
332 storageClass="SourceCatalog",
333 dimensions=("tract", "patch", "skymap"),
334 )
335 scarletModels = cT.Input(
336 doc="Multiband scarlet models produced by the deblender",
337 name="{inputCoaddName}Coadd_scarletModelData",
338 storageClass="ScarletModelData",
339 dimensions=("tract", "patch", "skymap"),
340 )
341 outputSources = cT.Output(
342 doc="Source catalog containing all the measurement information generated in this task",
343 name="{outputCoaddName}Coadd_meas",
344 dimensions=("tract", "patch", "band", "skymap"),
345 storageClass="SourceCatalog",
346 )
347 matchResult = cT.Output(
348 doc="Match catalog produced by configured matcher, optional on doMatchSources",
349 name="{outputCoaddName}Coadd_measMatch",
350 dimensions=("tract", "patch", "band", "skymap"),
351 storageClass="Catalog",
352 )
353 denormMatches = cT.Output(
354 doc="Denormalized Match catalog produced by configured matcher, optional on "
355 "doWriteMatchesDenormalized",
356 name="{outputCoaddName}Coadd_measMatchFull",
357 dimensions=("tract", "patch", "band", "skymap"),
358 storageClass="Catalog",
359 )
361 def __init__(self, *, config=None):
362 super().__init__(config=config)
363 if config.doPropagateFlags is False:
364 self.inputs -= set(("visitCatalogs",))
365 self.inputs -= set(("sourceTableHandles",))
366 self.inputs -= set(("finalizedSourceTableHandles",))
367 elif config.propagateFlags.target == PropagateSourceFlagsTask:
368 # New PropagateSourceFlagsTask does not use visitCatalogs.
369 self.inputs -= set(("visitCatalogs",))
370 # Check for types of flags required.
371 if not config.propagateFlags.source_flags:
372 self.inputs -= set(("sourceTableHandles",))
373 if not config.propagateFlags.finalized_source_flags:
374 self.inputs -= set(("finalizedSourceTableHandles",))
375 else:
376 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
377 self.inputs -= set(("sourceTableHandles",))
378 self.inputs -= set(("finalizedSourceTableHandles",))
380 if config.inputCatalog == "deblendedCatalog":
381 self.inputs -= set(("inputCatalog",))
383 if not config.doAddFootprints:
384 self.inputs -= set(("scarletModels",))
385 else:
386 self.inputs -= set(("deblendedCatalog"))
387 self.inputs -= set(("scarletModels",))
389 if config.doMatchSources is False:
390 self.outputs -= set(("matchResult",))
392 if config.doWriteMatchesDenormalized is False:
393 self.outputs -= set(("denormMatches",))
396class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
397 pipelineConnections=MeasureMergedCoaddSourcesConnections):
398 """Configuration parameters for the MeasureMergedCoaddSourcesTask
399 """
400 inputCatalog = ChoiceField(
401 dtype=str,
402 default="deblendedCatalog",
403 allowed={
404 "deblendedCatalog": "Output catalog from ScarletDeblendTask",
405 "deblendedFlux": "Output catalog from SourceDeblendTask",
406 "mergeDet": "The merged detections before deblending."
407 },
408 doc="The name of the input catalog.",
409 )
410 doAddFootprints = Field(dtype=bool,
411 default=True,
412 doc="Whether or not to add footprints to the input catalog from scarlet models. "
413 "This should be true whenever using the multi-band deblender, "
414 "otherwise this should be False.")
415 doConserveFlux = Field(dtype=bool, default=True,
416 doc="Whether to use the deblender models as templates to re-distribute the flux "
417 "from the 'exposure' (True), or to perform measurements on the deblender "
418 "model footprints.")
419 doStripFootprints = Field(dtype=bool, default=True,
420 doc="Whether to strip footprints from the output catalog before "
421 "saving to disk. "
422 "This is usually done when using scarlet models to save disk space.")
423 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
424 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
425 doPropagateFlags = Field(
426 dtype=bool, default=True,
427 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
428 )
429 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
430 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
431 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
432 doWriteMatchesDenormalized = Field(
433 dtype=bool,
434 default=False,
435 doc=("Write reference matches in denormalized format? "
436 "This format uses more disk space, but is more convenient to read."),
437 )
438 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
439 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
440 checkUnitsParseStrict = Field(
441 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
442 dtype=str,
443 default="raise",
444 )
445 doApCorr = Field(
446 dtype=bool,
447 default=True,
448 doc="Apply aperture corrections"
449 )
450 applyApCorr = ConfigurableField(
451 target=ApplyApCorrTask,
452 doc="Subtask to apply aperture corrections"
453 )
454 doRunCatalogCalculation = Field(
455 dtype=bool,
456 default=True,
457 doc='Run catalogCalculation task'
458 )
459 catalogCalculation = ConfigurableField(
460 target=CatalogCalculationTask,
461 doc="Subtask to run catalogCalculation plugins on catalog"
462 )
464 hasFakes = Field(
465 dtype=bool,
466 default=False,
467 doc="Should be set to True if fake sources have been inserted into the input data."
468 )
470 @property
471 def refObjLoader(self):
472 return self.match.refObjLoader
474 def setDefaults(self):
475 super().setDefaults()
476 self.measurement.plugins.names |= ['base_InputCount',
477 'base_Variance',
478 'base_LocalPhotoCalib',
479 'base_LocalWcs']
480 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
481 'INEXACT_PSF']
482 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
483 'INEXACT_PSF']
486class MeasureMergedCoaddSourcesTask(PipelineTask):
487 """Deblend sources from main catalog in each coadd seperately and measure.
489 Use peaks and footprints from a master catalog to perform deblending and
490 measurement in each coadd.
492 Given a master input catalog of sources (peaks and footprints) or deblender
493 outputs(including a HeavyFootprint in each band), measure each source on
494 the coadd. Repeating this procedure with the same master catalog across
495 multiple coadds will generate a consistent set of child sources.
497 The deblender retains all peaks and deblends any missing peaks (dropouts in
498 that band) as PSFs. Source properties are measured and the @c is-primary
499 flag (indicating sources with no children) is set. Visit flags are
500 propagated to the coadd sources.
502 Optionally, we can match the coadd sources to an external reference
503 catalog.
505 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
506 have a set of per-band catalogs. The next stage in the multi-band
507 processing procedure will merge these measurements into a suitable catalog
508 for driving forced photometry.
510 Parameters
511 ----------
512 butler : `lsst.daf.butler.Butler` or `None`, optional
513 A butler used to read the input schemas from disk or construct the reference
514 catalog loader, if schema or peakSchema or refObjLoader is None.
515 schema : ``lsst.afw.table.Schema`, optional
516 The schema of the merged detection catalog used as input to this one.
517 peakSchema : ``lsst.afw.table.Schema`, optional
518 The schema of the PeakRecords in the Footprints in the merged detection catalog.
519 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
520 An instance of LoadReferenceObjectsTasks that supplies an external reference
521 catalog. May be None if the loader can be constructed from the butler argument or all steps
522 requiring a reference catalog are disabled.
523 initInputs : `dict`, optional
524 Dictionary that can contain a key ``inputSchema`` containing the
525 input schema. If present will override the value of ``schema``.
526 **kwargs
527 Additional keyword arguments.
528 """
530 _DefaultName = "measureCoaddSources"
531 ConfigClass = MeasureMergedCoaddSourcesConfig
533 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
534 **kwargs):
535 super().__init__(**kwargs)
536 self.deblended = self.config.inputCatalog.startswith("deblended")
537 self.inputCatalog = "Coadd_" + self.config.inputCatalog
538 if initInputs is not None:
539 schema = initInputs['inputSchema'].schema
540 if schema is None:
541 assert butler is not None, "Neither butler nor schema is defined"
542 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
543 self.schemaMapper = afwTable.SchemaMapper(schema)
544 self.schemaMapper.addMinimalSchema(schema)
545 self.schema = self.schemaMapper.getOutputSchema()
546 self.algMetadata = PropertyList()
547 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
548 self.makeSubtask("setPrimaryFlags", schema=self.schema)
549 if self.config.doMatchSources:
550 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
551 if self.config.doPropagateFlags:
552 self.makeSubtask("propagateFlags", schema=self.schema)
553 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
554 if self.config.doApCorr:
555 self.makeSubtask("applyApCorr", schema=self.schema)
556 if self.config.doRunCatalogCalculation:
557 self.makeSubtask("catalogCalculation", schema=self.schema)
559 self.outputSchema = afwTable.SourceCatalog(self.schema)
561 def runQuantum(self, butlerQC, inputRefs, outputRefs):
562 inputs = butlerQC.get(inputRefs)
564 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
565 inputs.pop('refCat'),
566 name=self.config.connections.refCat,
567 config=self.config.refObjLoader,
568 log=self.log)
569 self.match.setRefObjLoader(refObjLoader)
571 # Set psfcache
572 # move this to run after gen2 deprecation
573 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
575 # Get unique integer ID for IdFactory and RNG seeds
576 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
577 inputs['exposureId'] = exposureIdInfo.expId
578 idFactory = exposureIdInfo.makeSourceIdFactory()
579 # Transform inputCatalog
580 table = afwTable.SourceTable.make(self.schema, idFactory)
581 sources = afwTable.SourceCatalog(table)
582 # Load the correct input catalog
583 if "scarletCatalog" in inputs:
584 inputCatalog = inputs.pop("scarletCatalog")
585 catalogRef = inputRefs.scarletCatalog
586 else:
587 inputCatalog = inputs.pop("inputCatalog")
588 catalogRef = inputRefs.inputCatalog
589 sources.extend(inputCatalog, self.schemaMapper)
590 del inputCatalog
591 # Add the HeavyFootprints to the deblended sources
592 if self.config.doAddFootprints:
593 modelData = inputs.pop('scarletModels')
594 if self.config.doConserveFlux:
595 redistributeImage = inputs['exposure'].image
596 else:
597 redistributeImage = None
598 modelData.updateCatalogFootprints(
599 catalog=sources,
600 band=inputRefs.exposure.dataId["band"],
601 psfModel=inputs['exposure'].getPsf(),
602 redistributeImage=redistributeImage,
603 removeScarletData=True,
604 )
605 table = sources.getTable()
606 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
607 inputs['sources'] = sources
609 skyMap = inputs.pop('skyMap')
610 tractNumber = catalogRef.dataId['tract']
611 tractInfo = skyMap[tractNumber]
612 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
613 skyInfo = Struct(
614 skyMap=skyMap,
615 tractInfo=tractInfo,
616 patchInfo=patchInfo,
617 wcs=tractInfo.getWcs(),
618 bbox=patchInfo.getOuterBBox()
619 )
620 inputs['skyInfo'] = skyInfo
622 if self.config.doPropagateFlags:
623 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
624 # New version
625 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
626 inputs["ccdInputs"] = ccdInputs
628 if "sourceTableHandles" in inputs:
629 sourceTableHandles = inputs.pop("sourceTableHandles")
630 sourceTableHandleDict = {handle.dataId["visit"]: handle
631 for handle in sourceTableHandles}
632 inputs["sourceTableHandleDict"] = sourceTableHandleDict
633 if "finalizedSourceTableHandles" in inputs:
634 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
635 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
636 for handle in finalizedSourceTableHandles}
637 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
638 else:
639 # Deprecated legacy version
640 # Filter out any visit catalog that is not coadd inputs
641 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
642 visitKey = ccdInputs.schema.find("visit").key
643 ccdKey = ccdInputs.schema.find("ccd").key
644 inputVisitIds = set()
645 ccdRecordsWcs = {}
646 for ccdRecord in ccdInputs:
647 visit = ccdRecord.get(visitKey)
648 ccd = ccdRecord.get(ccdKey)
649 inputVisitIds.add((visit, ccd))
650 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
652 inputCatalogsToKeep = []
653 inputCatalogWcsUpdate = []
654 for i, dataRef in enumerate(inputRefs.visitCatalogs):
655 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
656 if key in inputVisitIds:
657 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
658 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
659 inputs['visitCatalogs'] = inputCatalogsToKeep
660 inputs['wcsUpdates'] = inputCatalogWcsUpdate
661 inputs['ccdInputs'] = ccdInputs
663 outputs = self.run(**inputs)
664 # Strip HeavyFootprints to save space on disk
665 sources = outputs.outputSources
666 butlerQC.put(outputs, outputRefs)
668 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
669 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
670 """Run measurement algorithms on the input exposure, and optionally populate the
671 resulting catalog with extra information.
673 Parameters
674 ----------
675 exposure : `lsst.afw.exposure.Exposure`
676 The input exposure on which measurements are to be performed.
677 sources : `lsst.afw.table.SourceCatalog`
678 A catalog built from the results of merged detections, or
679 deblender outputs.
680 skyInfo : `lsst.pipe.base.Struct`
681 A struct containing information about the position of the input exposure within
682 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
683 exposureId : `int` or `bytes`
684 Packed unique number or bytes unique to the input exposure.
685 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
686 Catalog containing information on the individual visits which went into making
687 the coadd.
688 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
689 A list of source catalogs corresponding to measurements made on the individual
690 visits which went into the input exposure. If None and butler is `None` then
691 the task cannot propagate visit flags to the output catalog.
692 Deprecated, to be removed with PropagateVisitFlagsTask.
693 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional
694 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
695 to the input visits. Used to put all coordinates to common system. If `None` and
696 butler is `None` then the task cannot propagate visit flags to the output catalog.
697 Deprecated, to be removed with PropagateVisitFlagsTask.
698 butler : `None`, optional
699 This was a Gen2 butler used to load visit catalogs.
700 No longer used and should not be set. Will be removed in the
701 future.
702 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
703 Dict for sourceTable_visit handles (key is visit) for propagating flags.
704 These tables are derived from the ``CalibrateTask`` sources, and contain
705 astrometry and photometry flags, and optionally PSF flags.
706 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
707 Dict for finalized_src_table handles (key is visit) for propagating flags.
708 These tables are derived from ``FinalizeCalibrationTask`` and contain
709 PSF flags from the finalized PSF estimation.
711 Returns
712 -------
713 results : `lsst.pipe.base.Struct`
714 Results of running measurement task. Will contain the catalog in the
715 sources attribute. Optionally will have results of matching to a
716 reference catalog in the matchResults attribute, and denormalized
717 matches in the denormMatches attribute.
718 """
719 if butler is not None:
720 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.",
721 category=FutureWarning, stacklevel=2)
722 butler = None
724 self.measurement.run(sources, exposure, exposureId=exposureId)
726 if self.config.doApCorr:
727 self.applyApCorr.run(
728 catalog=sources,
729 apCorrMap=exposure.getInfo().getApCorrMap()
730 )
732 # TODO DM-11568: this contiguous check-and-copy could go away if we
733 # reserve enough space during SourceDetection and/or SourceDeblend.
734 # NOTE: sourceSelectors require contiguous catalogs, so ensure
735 # contiguity now, so views are preserved from here on.
736 if not sources.isContiguous():
737 sources = sources.copy(deep=True)
739 if self.config.doRunCatalogCalculation:
740 self.catalogCalculation.run(sources)
742 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
743 patchInfo=skyInfo.patchInfo)
744 if self.config.doPropagateFlags:
745 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
746 # New version
747 self.propagateFlags.run(
748 sources,
749 ccdInputs,
750 sourceTableHandleDict,
751 finalizedSourceTableHandleDict
752 )
753 else:
754 # Legacy deprecated version
755 self.propagateFlags.run(
756 butler,
757 sources,
758 ccdInputs,
759 exposure.getWcs(),
760 visitCatalogs,
761 wcsUpdates
762 )
764 results = Struct()
766 if self.config.doMatchSources:
767 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
768 matches = afwTable.packMatches(matchResult.matches)
769 matches.table.setMetadata(matchResult.matchMeta)
770 results.matchResult = matches
771 if self.config.doWriteMatchesDenormalized:
772 if matchResult.matches:
773 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
774 else:
775 self.log.warning("No matches, so generating dummy denormalized matches file")
776 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
777 denormMatches.setMetadata(PropertyList())
778 denormMatches.getMetadata().add("COMMENT",
779 "This catalog is empty because no matches were found.")
780 results.denormMatches = denormMatches
781 results.denormMatches = denormMatches
783 results.outputSources = sources
784 return results