Coverage for python/lsst/pipe/tasks/multiBand.py: 27%
287 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-16 01:24 -0800
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-16 01:24 -0800
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"]
24import warnings
26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27import lsst.pipe.base.connectionTypes as cT
28from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
29from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
30from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
31from lsst.meas.deblender import SourceDeblendTask
32from lsst.meas.extensions.scarlet import ScarletDeblendTask
33from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
37import lsst.afw.table as afwTable
38import lsst.afw.math as afwMath
39from lsst.daf.base import PropertyList
40from lsst.skymap import BaseSkyMap
41from lsst.obs.base import ExposureIdInfo
43# NOTE: these imports are a convenience so multiband users only have to import this file.
44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46from .multiBandUtils import CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
53"""
54New set types:
55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56* deepCoadd_mergeDet: merged detections (tract, patch)
57* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58* deepCoadd_ref: reference sources (tract, patch)
59All of these have associated *_schema catalogs that require no data ID and hold no records.
61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62the mergeDet, meas, and ref dataset Footprints:
63* deepCoadd_peak_schema
64"""
67##############################################################################################################
68class DetectCoaddSourcesConnections(PipelineTaskConnections,
69 dimensions=("tract", "patch", "band", "skymap"),
70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
71 detectionSchema = cT.InitOutput(
72 doc="Schema of the detection catalog",
73 name="{outputCoaddName}Coadd_det_schema",
74 storageClass="SourceCatalog",
75 )
76 exposure = cT.Input(
77 doc="Exposure on which detections are to be performed",
78 name="{inputCoaddName}Coadd",
79 storageClass="ExposureF",
80 dimensions=("tract", "patch", "band", "skymap")
81 )
82 outputBackgrounds = cT.Output(
83 doc="Output Backgrounds used in detection",
84 name="{outputCoaddName}Coadd_calexp_background",
85 storageClass="Background",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputSources = cT.Output(
89 doc="Detected sources catalog",
90 name="{outputCoaddName}Coadd_det",
91 storageClass="SourceCatalog",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputExposure = cT.Output(
95 doc="Exposure post detection",
96 name="{outputCoaddName}Coadd_calexp",
97 storageClass="ExposureF",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103 """Configuration parameters for the DetectCoaddSourcesTask
104 """
106 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
108 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
109 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
110 doInsertFakes = Field(dtype=bool, default=False,
111 doc="Run fake sources injection task",
112 deprecated=("doInsertFakes is no longer supported. This config will be removed "
113 "after v24."))
114 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
115 doc="Injection of fake sources for testing "
116 "purposes (must be retargeted)",
117 deprecated=("insertFakes is no longer supported. This config will "
118 "be removed after v24."))
119 hasFakes = Field(
120 dtype=bool,
121 default=False,
122 doc="Should be set to True if fake sources have been inserted into the input data.",
123 )
125 def setDefaults(self):
126 super().setDefaults()
127 self.detection.thresholdType = "pixel_stdev"
128 self.detection.isotropicGrow = True
129 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
130 self.detection.reEstimateBackground = False
131 self.detection.background.useApprox = False
132 self.detection.background.binSize = 4096
133 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
134 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
137class DetectCoaddSourcesTask(PipelineTask):
138 """Detect sources on a single filter coadd.
140 Coadding individual visits requires each exposure to be warped. This
141 introduces covariance in the noise properties across pixels. Before
142 detection, we correct the coadd variance by scaling the variance plane in
143 the coadd to match the observed variance. This is an approximate
144 approach -- strictly, we should propagate the full covariance matrix --
145 but it is simple and works well in practice.
147 After scaling the variance plane, we detect sources and generate footprints
148 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
150 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
151 in a given band. The purpose of the task is to update the background,
152 detect all sources in a single band and generate a set of parent
153 footprints. Subsequent tasks in the multi-band processing procedure will
154 merge sources across bands and, eventually, perform forced photometry.
156 Parameters
157 ----------
158 schema : `lsst.afw.table.Schema`, optional
159 Initial schema for the output catalog, modified-in place to include all
160 fields set by this task. If None, the source minimal schema will be used.
161 **kwargs
162 Additional keyword arguments.
163 """
165 _DefaultName = "detectCoaddSources"
166 ConfigClass = DetectCoaddSourcesConfig
167 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
169 def __init__(self, schema=None, **kwargs):
170 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
171 # call structure has been reviewed carefully to be sure super will work as intended.
172 super().__init__(**kwargs)
173 if schema is None:
174 schema = afwTable.SourceTable.makeMinimalSchema()
175 self.schema = schema
176 self.makeSubtask("detection", schema=self.schema)
177 if self.config.doScaleVariance:
178 self.makeSubtask("scaleVariance")
180 self.detectionSchema = afwTable.SourceCatalog(self.schema)
182 def runQuantum(self, butlerQC, inputRefs, outputRefs):
183 inputs = butlerQC.get(inputRefs)
184 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
185 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
186 inputs["expId"] = exposureIdInfo.expId
187 outputs = self.run(**inputs)
188 butlerQC.put(outputs, outputRefs)
190 def run(self, exposure, idFactory, expId):
191 """Run detection on an exposure.
193 First scale the variance plane to match the observed variance
194 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
195 detect sources.
197 Parameters
198 ----------
199 exposure : `lsst.afw.image.Exposure`
200 Exposure on which to detect (may be backround-subtracted and scaled,
201 depending on configuration).
202 idFactory : `lsst.afw.table.IdFactory`
203 IdFactory to set source identifiers.
204 expId : `int`
205 Exposure identifier (integer) for RNG seed.
207 Returns
208 -------
209 result : `lsst.pipe.base.Struct`
210 Results as a struct with attributes:
212 ``sources``
213 Catalog of detections (`lsst.afw.table.SourceCatalog`).
214 ``backgrounds``
215 List of backgrounds (`list`).
216 """
217 if self.config.doScaleVariance:
218 varScale = self.scaleVariance.run(exposure.maskedImage)
219 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
220 backgrounds = afwMath.BackgroundList()
221 table = afwTable.SourceTable.make(self.schema, idFactory)
222 detections = self.detection.run(table, exposure, expId=expId)
223 sources = detections.sources
224 fpSets = detections.fpSets
225 if hasattr(fpSets, "background") and fpSets.background:
226 for bg in fpSets.background:
227 backgrounds.append(bg)
228 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
231##############################################################################################################
234class DeblendCoaddSourcesConfig(Config):
235 """Configuration parameters for the `DeblendCoaddSourcesTask`.
236 """
238 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
239 doc="Deblend sources separately in each band")
240 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
241 doc="Deblend sources simultaneously across bands")
242 simultaneous = Field(dtype=bool,
243 default=True,
244 doc="Simultaneously deblend all bands? "
245 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
246 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
247 hasFakes = Field(dtype=bool,
248 default=False,
249 doc="Should be set to True if fake sources have been inserted into the input data.")
251 def setDefaults(self):
252 Config.setDefaults(self)
253 self.singleBandDeblend.propagateAllPeaks = True
256class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
257 dimensions=("tract", "patch", "band", "skymap"),
258 defaultTemplates={"inputCoaddName": "deep",
259 "outputCoaddName": "deep",
260 "deblendedCatalog": "deblendedFlux"}):
261 inputSchema = cT.InitInput(
262 doc="Input schema for measure merged task produced by a deblender or detection task",
263 name="{inputCoaddName}Coadd_deblendedFlux_schema",
264 storageClass="SourceCatalog"
265 )
266 outputSchema = cT.InitOutput(
267 doc="Output schema after all new fields are added by task",
268 name="{inputCoaddName}Coadd_meas_schema",
269 storageClass="SourceCatalog"
270 )
271 refCat = cT.PrerequisiteInput(
272 doc="Reference catalog used to match measured sources against known sources",
273 name="ref_cat",
274 storageClass="SimpleCatalog",
275 dimensions=("skypix",),
276 deferLoad=True,
277 multiple=True
278 )
279 exposure = cT.Input(
280 doc="Input coadd image",
281 name="{inputCoaddName}Coadd_calexp",
282 storageClass="ExposureF",
283 dimensions=("tract", "patch", "band", "skymap")
284 )
285 skyMap = cT.Input(
286 doc="SkyMap to use in processing",
287 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
288 storageClass="SkyMap",
289 dimensions=("skymap",),
290 )
291 visitCatalogs = cT.Input(
292 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
293 "further filtered in the task for the purpose of propagating flags from image calibration "
294 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
295 name="src",
296 dimensions=("instrument", "visit", "detector"),
297 storageClass="SourceCatalog",
298 multiple=True
299 )
300 sourceTableHandles = cT.Input(
301 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
302 "These tables contain astrometry and photometry flags, and optionally "
303 "PSF flags."),
304 name="sourceTable_visit",
305 storageClass="DataFrame",
306 dimensions=("instrument", "visit"),
307 multiple=True,
308 deferLoad=True,
309 )
310 finalizedSourceTableHandles = cT.Input(
311 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
312 "tables contain PSF flags from the finalized PSF estimation."),
313 name="finalized_src_table",
314 storageClass="DataFrame",
315 dimensions=("instrument", "visit"),
316 multiple=True,
317 deferLoad=True,
318 )
319 inputCatalog = cT.Input(
320 doc=("Name of the input catalog to use."
321 "If the single band deblender was used this should be 'deblendedFlux."
322 "If the multi-band deblender was used this should be 'deblendedModel, "
323 "or deblendedFlux if the multiband deblender was configured to output "
324 "deblended flux catalogs. If no deblending was performed this should "
325 "be 'mergeDet'"),
326 name="{inputCoaddName}Coadd_{deblendedCatalog}",
327 storageClass="SourceCatalog",
328 dimensions=("tract", "patch", "band", "skymap"),
329 )
330 scarletCatalog = cT.Input(
331 doc="Catalogs produced by multiband deblending",
332 name="{inputCoaddName}Coadd_deblendedCatalog",
333 storageClass="SourceCatalog",
334 dimensions=("tract", "patch", "skymap"),
335 )
336 scarletModels = cT.Input(
337 doc="Multiband scarlet models produced by the deblender",
338 name="{inputCoaddName}Coadd_scarletModelData",
339 storageClass="ScarletModelData",
340 dimensions=("tract", "patch", "skymap"),
341 )
342 outputSources = cT.Output(
343 doc="Source catalog containing all the measurement information generated in this task",
344 name="{outputCoaddName}Coadd_meas",
345 dimensions=("tract", "patch", "band", "skymap"),
346 storageClass="SourceCatalog",
347 )
348 matchResult = cT.Output(
349 doc="Match catalog produced by configured matcher, optional on doMatchSources",
350 name="{outputCoaddName}Coadd_measMatch",
351 dimensions=("tract", "patch", "band", "skymap"),
352 storageClass="Catalog",
353 )
354 denormMatches = cT.Output(
355 doc="Denormalized Match catalog produced by configured matcher, optional on "
356 "doWriteMatchesDenormalized",
357 name="{outputCoaddName}Coadd_measMatchFull",
358 dimensions=("tract", "patch", "band", "skymap"),
359 storageClass="Catalog",
360 )
362 def __init__(self, *, config=None):
363 super().__init__(config=config)
364 if config.doPropagateFlags is False:
365 self.inputs -= set(("visitCatalogs",))
366 self.inputs -= set(("sourceTableHandles",))
367 self.inputs -= set(("finalizedSourceTableHandles",))
368 elif config.propagateFlags.target == PropagateSourceFlagsTask:
369 # New PropagateSourceFlagsTask does not use visitCatalogs.
370 self.inputs -= set(("visitCatalogs",))
371 # Check for types of flags required.
372 if not config.propagateFlags.source_flags:
373 self.inputs -= set(("sourceTableHandles",))
374 if not config.propagateFlags.finalized_source_flags:
375 self.inputs -= set(("finalizedSourceTableHandles",))
376 else:
377 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
378 self.inputs -= set(("sourceTableHandles",))
379 self.inputs -= set(("finalizedSourceTableHandles",))
381 if config.inputCatalog == "deblendedCatalog":
382 self.inputs -= set(("inputCatalog",))
384 if not config.doAddFootprints:
385 self.inputs -= set(("scarletModels",))
386 else:
387 self.inputs -= set(("deblendedCatalog"))
388 self.inputs -= set(("scarletModels",))
390 if config.doMatchSources is False:
391 self.outputs -= set(("matchResult",))
393 if config.doWriteMatchesDenormalized is False:
394 self.outputs -= set(("denormMatches",))
397class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
398 pipelineConnections=MeasureMergedCoaddSourcesConnections):
399 """Configuration parameters for the MeasureMergedCoaddSourcesTask
400 """
401 inputCatalog = ChoiceField(
402 dtype=str,
403 default="deblendedCatalog",
404 allowed={
405 "deblendedCatalog": "Output catalog from ScarletDeblendTask",
406 "deblendedFlux": "Output catalog from SourceDeblendTask",
407 "mergeDet": "The merged detections before deblending."
408 },
409 doc="The name of the input catalog.",
410 )
411 doAddFootprints = Field(dtype=bool,
412 default=True,
413 doc="Whether or not to add footprints to the input catalog from scarlet models. "
414 "This should be true whenever using the multi-band deblender, "
415 "otherwise this should be False.")
416 doConserveFlux = Field(dtype=bool, default=True,
417 doc="Whether to use the deblender models as templates to re-distribute the flux "
418 "from the 'exposure' (True), or to perform measurements on the deblender "
419 "model footprints.")
420 doStripFootprints = Field(dtype=bool, default=True,
421 doc="Whether to strip footprints from the output catalog before "
422 "saving to disk. "
423 "This is usually done when using scarlet models to save disk space.")
424 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
425 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
426 doPropagateFlags = Field(
427 dtype=bool, default=True,
428 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
429 )
430 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
431 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
432 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
433 doWriteMatchesDenormalized = Field(
434 dtype=bool,
435 default=False,
436 doc=("Write reference matches in denormalized format? "
437 "This format uses more disk space, but is more convenient to read."),
438 )
439 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
440 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
441 checkUnitsParseStrict = Field(
442 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
443 dtype=str,
444 default="raise",
445 )
446 doApCorr = Field(
447 dtype=bool,
448 default=True,
449 doc="Apply aperture corrections"
450 )
451 applyApCorr = ConfigurableField(
452 target=ApplyApCorrTask,
453 doc="Subtask to apply aperture corrections"
454 )
455 doRunCatalogCalculation = Field(
456 dtype=bool,
457 default=True,
458 doc='Run catalogCalculation task'
459 )
460 catalogCalculation = ConfigurableField(
461 target=CatalogCalculationTask,
462 doc="Subtask to run catalogCalculation plugins on catalog"
463 )
465 hasFakes = Field(
466 dtype=bool,
467 default=False,
468 doc="Should be set to True if fake sources have been inserted into the input data."
469 )
471 @property
472 def refObjLoader(self):
473 return self.match.refObjLoader
475 def setDefaults(self):
476 super().setDefaults()
477 self.measurement.plugins.names |= ['base_InputCount',
478 'base_Variance',
479 'base_LocalPhotoCalib',
480 'base_LocalWcs']
481 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
482 'INEXACT_PSF']
483 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
484 'INEXACT_PSF']
487class MeasureMergedCoaddSourcesTask(PipelineTask):
488 """Deblend sources from main catalog in each coadd seperately and measure.
490 Use peaks and footprints from a master catalog to perform deblending and
491 measurement in each coadd.
493 Given a master input catalog of sources (peaks and footprints) or deblender
494 outputs(including a HeavyFootprint in each band), measure each source on
495 the coadd. Repeating this procedure with the same master catalog across
496 multiple coadds will generate a consistent set of child sources.
498 The deblender retains all peaks and deblends any missing peaks (dropouts in
499 that band) as PSFs. Source properties are measured and the @c is-primary
500 flag (indicating sources with no children) is set. Visit flags are
501 propagated to the coadd sources.
503 Optionally, we can match the coadd sources to an external reference
504 catalog.
506 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
507 have a set of per-band catalogs. The next stage in the multi-band
508 processing procedure will merge these measurements into a suitable catalog
509 for driving forced photometry.
511 Parameters
512 ----------
513 butler : `lsst.daf.butler.Butler` or `None`, optional
514 A butler used to read the input schemas from disk or construct the reference
515 catalog loader, if schema or peakSchema or refObjLoader is None.
516 schema : ``lsst.afw.table.Schema`, optional
517 The schema of the merged detection catalog used as input to this one.
518 peakSchema : ``lsst.afw.table.Schema`, optional
519 The schema of the PeakRecords in the Footprints in the merged detection catalog.
520 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
521 An instance of LoadReferenceObjectsTasks that supplies an external reference
522 catalog. May be None if the loader can be constructed from the butler argument or all steps
523 requiring a reference catalog are disabled.
524 initInputs : `dict`, optional
525 Dictionary that can contain a key ``inputSchema`` containing the
526 input schema. If present will override the value of ``schema``.
527 **kwargs
528 Additional keyword arguments.
529 """
531 _DefaultName = "measureCoaddSources"
532 ConfigClass = MeasureMergedCoaddSourcesConfig
533 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
535 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
536 **kwargs):
537 super().__init__(**kwargs)
538 self.deblended = self.config.inputCatalog.startswith("deblended")
539 self.inputCatalog = "Coadd_" + self.config.inputCatalog
540 if initInputs is not None:
541 schema = initInputs['inputSchema'].schema
542 if schema is None:
543 assert butler is not None, "Neither butler nor schema is defined"
544 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
545 self.schemaMapper = afwTable.SchemaMapper(schema)
546 self.schemaMapper.addMinimalSchema(schema)
547 self.schema = self.schemaMapper.getOutputSchema()
548 self.algMetadata = PropertyList()
549 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
550 self.makeSubtask("setPrimaryFlags", schema=self.schema)
551 if self.config.doMatchSources:
552 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
553 if self.config.doPropagateFlags:
554 self.makeSubtask("propagateFlags", schema=self.schema)
555 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
556 if self.config.doApCorr:
557 self.makeSubtask("applyApCorr", schema=self.schema)
558 if self.config.doRunCatalogCalculation:
559 self.makeSubtask("catalogCalculation", schema=self.schema)
561 self.outputSchema = afwTable.SourceCatalog(self.schema)
563 def runQuantum(self, butlerQC, inputRefs, outputRefs):
564 inputs = butlerQC.get(inputRefs)
566 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
567 inputs.pop('refCat'),
568 name=self.config.connections.refCat,
569 config=self.config.refObjLoader,
570 log=self.log)
571 self.match.setRefObjLoader(refObjLoader)
573 # Set psfcache
574 # move this to run after gen2 deprecation
575 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
577 # Get unique integer ID for IdFactory and RNG seeds
578 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
579 inputs['exposureId'] = exposureIdInfo.expId
580 idFactory = exposureIdInfo.makeSourceIdFactory()
581 # Transform inputCatalog
582 table = afwTable.SourceTable.make(self.schema, idFactory)
583 sources = afwTable.SourceCatalog(table)
584 # Load the correct input catalog
585 if "scarletCatalog" in inputs:
586 inputCatalog = inputs.pop("scarletCatalog")
587 catalogRef = inputRefs.scarletCatalog
588 else:
589 inputCatalog = inputs.pop("inputCatalog")
590 catalogRef = inputRefs.inputCatalog
591 sources.extend(inputCatalog, self.schemaMapper)
592 del inputCatalog
593 # Add the HeavyFootprints to the deblended sources
594 if self.config.doAddFootprints:
595 modelData = inputs.pop('scarletModels')
596 if self.config.doConserveFlux:
597 redistributeImage = inputs['exposure'].image
598 else:
599 redistributeImage = None
600 modelData.updateCatalogFootprints(
601 catalog=sources,
602 band=inputRefs.exposure.dataId["band"],
603 psfModel=inputs['exposure'].getPsf(),
604 redistributeImage=redistributeImage,
605 removeScarletData=True,
606 )
607 table = sources.getTable()
608 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
609 inputs['sources'] = sources
611 skyMap = inputs.pop('skyMap')
612 tractNumber = catalogRef.dataId['tract']
613 tractInfo = skyMap[tractNumber]
614 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
615 skyInfo = Struct(
616 skyMap=skyMap,
617 tractInfo=tractInfo,
618 patchInfo=patchInfo,
619 wcs=tractInfo.getWcs(),
620 bbox=patchInfo.getOuterBBox()
621 )
622 inputs['skyInfo'] = skyInfo
624 if self.config.doPropagateFlags:
625 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
626 # New version
627 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
628 inputs["ccdInputs"] = ccdInputs
630 if "sourceTableHandles" in inputs:
631 sourceTableHandles = inputs.pop("sourceTableHandles")
632 sourceTableHandleDict = {handle.dataId["visit"]: handle
633 for handle in sourceTableHandles}
634 inputs["sourceTableHandleDict"] = sourceTableHandleDict
635 if "finalizedSourceTableHandles" in inputs:
636 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
637 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
638 for handle in finalizedSourceTableHandles}
639 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
640 else:
641 # Deprecated legacy version
642 # Filter out any visit catalog that is not coadd inputs
643 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
644 visitKey = ccdInputs.schema.find("visit").key
645 ccdKey = ccdInputs.schema.find("ccd").key
646 inputVisitIds = set()
647 ccdRecordsWcs = {}
648 for ccdRecord in ccdInputs:
649 visit = ccdRecord.get(visitKey)
650 ccd = ccdRecord.get(ccdKey)
651 inputVisitIds.add((visit, ccd))
652 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
654 inputCatalogsToKeep = []
655 inputCatalogWcsUpdate = []
656 for i, dataRef in enumerate(inputRefs.visitCatalogs):
657 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
658 if key in inputVisitIds:
659 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
660 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
661 inputs['visitCatalogs'] = inputCatalogsToKeep
662 inputs['wcsUpdates'] = inputCatalogWcsUpdate
663 inputs['ccdInputs'] = ccdInputs
665 outputs = self.run(**inputs)
666 # Strip HeavyFootprints to save space on disk
667 sources = outputs.outputSources
668 butlerQC.put(outputs, outputRefs)
670 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
671 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
672 """Run measurement algorithms on the input exposure, and optionally populate the
673 resulting catalog with extra information.
675 Parameters
676 ----------
677 exposure : `lsst.afw.exposure.Exposure`
678 The input exposure on which measurements are to be performed.
679 sources : `lsst.afw.table.SourceCatalog`
680 A catalog built from the results of merged detections, or
681 deblender outputs.
682 skyInfo : `lsst.pipe.base.Struct`
683 A struct containing information about the position of the input exposure within
684 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
685 exposureId : `int` or `bytes`
686 Packed unique number or bytes unique to the input exposure.
687 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
688 Catalog containing information on the individual visits which went into making
689 the coadd.
690 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
691 A list of source catalogs corresponding to measurements made on the individual
692 visits which went into the input exposure. If None and butler is `None` then
693 the task cannot propagate visit flags to the output catalog.
694 Deprecated, to be removed with PropagateVisitFlagsTask.
695 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional
696 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
697 to the input visits. Used to put all coordinates to common system. If `None` and
698 butler is `None` then the task cannot propagate visit flags to the output catalog.
699 Deprecated, to be removed with PropagateVisitFlagsTask.
700 butler : `None`, optional
701 This was a Gen2 butler used to load visit catalogs.
702 No longer used and should not be set. Will be removed in the
703 future.
704 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
705 Dict for sourceTable_visit handles (key is visit) for propagating flags.
706 These tables are derived from the ``CalibrateTask`` sources, and contain
707 astrometry and photometry flags, and optionally PSF flags.
708 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
709 Dict for finalized_src_table handles (key is visit) for propagating flags.
710 These tables are derived from ``FinalizeCalibrationTask`` and contain
711 PSF flags from the finalized PSF estimation.
713 Returns
714 -------
715 results : `lsst.pipe.base.Struct`
716 Results of running measurement task. Will contain the catalog in the
717 sources attribute. Optionally will have results of matching to a
718 reference catalog in the matchResults attribute, and denormalized
719 matches in the denormMatches attribute.
720 """
721 if butler is not None:
722 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.",
723 category=FutureWarning, stacklevel=2)
724 butler = None
726 self.measurement.run(sources, exposure, exposureId=exposureId)
728 if self.config.doApCorr:
729 self.applyApCorr.run(
730 catalog=sources,
731 apCorrMap=exposure.getInfo().getApCorrMap()
732 )
734 # TODO DM-11568: this contiguous check-and-copy could go away if we
735 # reserve enough space during SourceDetection and/or SourceDeblend.
736 # NOTE: sourceSelectors require contiguous catalogs, so ensure
737 # contiguity now, so views are preserved from here on.
738 if not sources.isContiguous():
739 sources = sources.copy(deep=True)
741 if self.config.doRunCatalogCalculation:
742 self.catalogCalculation.run(sources)
744 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
745 patchInfo=skyInfo.patchInfo)
746 if self.config.doPropagateFlags:
747 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
748 # New version
749 self.propagateFlags.run(
750 sources,
751 ccdInputs,
752 sourceTableHandleDict,
753 finalizedSourceTableHandleDict
754 )
755 else:
756 # Legacy deprecated version
757 self.propagateFlags.run(
758 butler,
759 sources,
760 ccdInputs,
761 exposure.getWcs(),
762 visitCatalogs,
763 wcsUpdates
764 )
766 results = Struct()
768 if self.config.doMatchSources:
769 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
770 matches = afwTable.packMatches(matchResult.matches)
771 matches.table.setMetadata(matchResult.matchMeta)
772 results.matchResult = matches
773 if self.config.doWriteMatchesDenormalized:
774 if matchResult.matches:
775 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
776 else:
777 self.log.warning("No matches, so generating dummy denormalized matches file")
778 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
779 denormMatches.setMetadata(PropertyList())
780 denormMatches.getMetadata().add("COMMENT",
781 "This catalog is empty because no matches were found.")
782 results.denormMatches = denormMatches
783 results.denormMatches = denormMatches
785 results.outputSources = sources
786 return results