Coverage for python/lsst/pipe/tasks/multiBand.py: 30%
287 statements
« prev ^ index » next coverage.py v6.4.4, created at 2022-08-18 20:09 +0000
« prev ^ index » next coverage.py v6.4.4, created at 2022-08-18 20:09 +0000
1#!/usr/bin/env python
2#
3# LSST Data Management System
4# Copyright 2008-2015 AURA/LSST.
5#
6# This product includes software developed by the
7# LSST Project (http://www.lsst.org/).
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the LSST License Statement and
20# the GNU General Public License along with this program. If not,
21# see <https://www.lsstcorp.org/LegalNotices/>.
22#
23import warnings
25from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
26import lsst.pipe.base.connectionTypes as cT
27from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
28from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30from lsst.meas.deblender import SourceDeblendTask
31from lsst.meas.extensions.scarlet import ScarletDeblendTask
32from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
33from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
34from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
35from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
36import lsst.afw.table as afwTable
37import lsst.afw.math as afwMath
38from lsst.daf.base import PropertyList
39from lsst.skymap import BaseSkyMap
40from lsst.obs.base import ExposureIdInfo
42# NOTE: these imports are a convenience so multiband users only have to import this file.
43from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
44from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
45from .multiBandUtils import CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
46from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
52"""
53New set types:
54* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
55* deepCoadd_mergeDet: merged detections (tract, patch)
56* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
57* deepCoadd_ref: reference sources (tract, patch)
58All of these have associated *_schema catalogs that require no data ID and hold no records.
60In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
61the mergeDet, meas, and ref dataset Footprints:
62* deepCoadd_peak_schema
63"""
66##############################################################################################################
67class DetectCoaddSourcesConnections(PipelineTaskConnections,
68 dimensions=("tract", "patch", "band", "skymap"),
69 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
70 detectionSchema = cT.InitOutput(
71 doc="Schema of the detection catalog",
72 name="{outputCoaddName}Coadd_det_schema",
73 storageClass="SourceCatalog",
74 )
75 exposure = cT.Input(
76 doc="Exposure on which detections are to be performed",
77 name="{inputCoaddName}Coadd",
78 storageClass="ExposureF",
79 dimensions=("tract", "patch", "band", "skymap")
80 )
81 outputBackgrounds = cT.Output(
82 doc="Output Backgrounds used in detection",
83 name="{outputCoaddName}Coadd_calexp_background",
84 storageClass="Background",
85 dimensions=("tract", "patch", "band", "skymap")
86 )
87 outputSources = cT.Output(
88 doc="Detected sources catalog",
89 name="{outputCoaddName}Coadd_det",
90 storageClass="SourceCatalog",
91 dimensions=("tract", "patch", "band", "skymap")
92 )
93 outputExposure = cT.Output(
94 doc="Exposure post detection",
95 name="{outputCoaddName}Coadd_calexp",
96 storageClass="ExposureF",
97 dimensions=("tract", "patch", "band", "skymap")
98 )
101class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
102 """!
103 @anchor DetectCoaddSourcesConfig_
105 @brief Configuration parameters for the DetectCoaddSourcesTask
106 """
107 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
108 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
109 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
110 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
111 doInsertFakes = Field(dtype=bool, default=False,
112 doc="Run fake sources injection task",
113 deprecated=("doInsertFakes is no longer supported. This config will be removed "
114 "after v24."))
115 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
116 doc="Injection of fake sources for testing "
117 "purposes (must be retargeted)",
118 deprecated=("insertFakes is no longer supported. This config will "
119 "be removed after v24."))
120 hasFakes = Field(
121 dtype=bool,
122 default=False,
123 doc="Should be set to True if fake sources have been inserted into the input data.",
124 )
126 def setDefaults(self):
127 super().setDefaults()
128 self.detection.thresholdType = "pixel_stdev"
129 self.detection.isotropicGrow = True
130 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
131 self.detection.reEstimateBackground = False
132 self.detection.background.useApprox = False
133 self.detection.background.binSize = 4096
134 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
135 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
137## @addtogroup LSST_task_documentation
138## @{
139## @page page_DetectCoaddSourcesTask DetectCoaddSourcesTask
140## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask"
141## @copybrief DetectCoaddSourcesTask
142## @}
145class DetectCoaddSourcesTask(PipelineTask):
146 """Detect sources on a coadd."""
147 _DefaultName = "detectCoaddSources"
148 ConfigClass = DetectCoaddSourcesConfig
149 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
151 def __init__(self, schema=None, **kwargs):
152 """!
153 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
155 Keyword arguments (in addition to those forwarded to PipelineTask.__init__):
157 @param[in] schema: initial schema for the output catalog, modified-in place to include all
158 fields set by this task. If None, the source minimal schema will be used.
159 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
160 """
161 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
162 # call structure has been reviewed carefully to be sure super will work as intended.
163 super().__init__(**kwargs)
164 if schema is None:
165 schema = afwTable.SourceTable.makeMinimalSchema()
166 self.schema = schema
167 self.makeSubtask("detection", schema=self.schema)
168 if self.config.doScaleVariance:
169 self.makeSubtask("scaleVariance")
171 self.detectionSchema = afwTable.SourceCatalog(self.schema)
173 def runQuantum(self, butlerQC, inputRefs, outputRefs):
174 inputs = butlerQC.get(inputRefs)
175 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
176 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
177 inputs["expId"] = exposureIdInfo.expId
178 outputs = self.run(**inputs)
179 butlerQC.put(outputs, outputRefs)
181 def run(self, exposure, idFactory, expId):
182 """!
183 @brief Run detection on an exposure.
185 First scale the variance plane to match the observed variance
186 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
187 detect sources.
189 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
190 depending on configuration).
191 @param[in] idFactory: IdFactory to set source identifiers
192 @param[in] expId: Exposure identifier (integer) for RNG seed
194 @return a pipe.base.Struct with fields
195 - sources: catalog of detections
196 - backgrounds: list of backgrounds
197 """
198 if self.config.doScaleVariance:
199 varScale = self.scaleVariance.run(exposure.maskedImage)
200 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
201 backgrounds = afwMath.BackgroundList()
202 table = afwTable.SourceTable.make(self.schema, idFactory)
203 detections = self.detection.run(table, exposure, expId=expId)
204 sources = detections.sources
205 fpSets = detections.fpSets
206 if hasattr(fpSets, "background") and fpSets.background:
207 for bg in fpSets.background:
208 backgrounds.append(bg)
209 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
212##############################################################################################################
215class DeblendCoaddSourcesConfig(Config):
216 """DeblendCoaddSourcesConfig
218 Configuration parameters for the `DeblendCoaddSourcesTask`.
219 """
220 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
221 doc="Deblend sources separately in each band")
222 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
223 doc="Deblend sources simultaneously across bands")
224 simultaneous = Field(dtype=bool,
225 default=True,
226 doc="Simultaneously deblend all bands? "
227 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
228 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
229 hasFakes = Field(dtype=bool,
230 default=False,
231 doc="Should be set to True if fake sources have been inserted into the input data.")
233 def setDefaults(self):
234 Config.setDefaults(self)
235 self.singleBandDeblend.propagateAllPeaks = True
238class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
239 dimensions=("tract", "patch", "band", "skymap"),
240 defaultTemplates={"inputCoaddName": "deep",
241 "outputCoaddName": "deep",
242 "deblendedCatalog": "deblendedFlux"}):
243 warnings.warn("MeasureMergedCoaddSourcesConnections.defaultTemplates is deprecated and no longer used. "
244 "Use MeasureMergedCoaddSourcesConfig.inputCatalog.")
245 inputSchema = cT.InitInput(
246 doc="Input schema for measure merged task produced by a deblender or detection task",
247 name="{inputCoaddName}Coadd_deblendedFlux_schema",
248 storageClass="SourceCatalog"
249 )
250 outputSchema = cT.InitOutput(
251 doc="Output schema after all new fields are added by task",
252 name="{inputCoaddName}Coadd_meas_schema",
253 storageClass="SourceCatalog"
254 )
255 refCat = cT.PrerequisiteInput(
256 doc="Reference catalog used to match measured sources against known sources",
257 name="ref_cat",
258 storageClass="SimpleCatalog",
259 dimensions=("skypix",),
260 deferLoad=True,
261 multiple=True
262 )
263 exposure = cT.Input(
264 doc="Input coadd image",
265 name="{inputCoaddName}Coadd_calexp",
266 storageClass="ExposureF",
267 dimensions=("tract", "patch", "band", "skymap")
268 )
269 skyMap = cT.Input(
270 doc="SkyMap to use in processing",
271 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
272 storageClass="SkyMap",
273 dimensions=("skymap",),
274 )
275 visitCatalogs = cT.Input(
276 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
277 "further filtered in the task for the purpose of propagating flags from image calibration "
278 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
279 name="src",
280 dimensions=("instrument", "visit", "detector"),
281 storageClass="SourceCatalog",
282 multiple=True
283 )
284 sourceTableHandles = cT.Input(
285 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
286 "These tables contain astrometry and photometry flags, and optionally "
287 "PSF flags."),
288 name="sourceTable_visit",
289 storageClass="DataFrame",
290 dimensions=("instrument", "visit"),
291 multiple=True,
292 deferLoad=True,
293 )
294 finalizedSourceTableHandles = cT.Input(
295 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
296 "tables contain PSF flags from the finalized PSF estimation."),
297 name="finalized_src_table",
298 storageClass="DataFrame",
299 dimensions=("instrument", "visit"),
300 multiple=True,
301 deferLoad=True,
302 )
303 inputCatalog = cT.Input(
304 doc=("Name of the input catalog to use."
305 "If the single band deblender was used this should be 'deblendedFlux."
306 "If the multi-band deblender was used this should be 'deblendedModel, "
307 "or deblendedFlux if the multiband deblender was configured to output "
308 "deblended flux catalogs. If no deblending was performed this should "
309 "be 'mergeDet'"),
310 name="{inputCoaddName}Coadd_{deblendedCatalog}",
311 storageClass="SourceCatalog",
312 dimensions=("tract", "patch", "band", "skymap"),
313 )
314 scarletCatalog = cT.Input(
315 doc="Catalogs produced by multiband deblending",
316 name="{inputCoaddName}Coadd_deblendedCatalog",
317 storageClass="SourceCatalog",
318 dimensions=("tract", "patch", "skymap"),
319 )
320 scarletModels = cT.Input(
321 doc="Multiband scarlet models produced by the deblender",
322 name="{inputCoaddName}Coadd_scarletModelData",
323 storageClass="ScarletModelData",
324 dimensions=("tract", "patch", "skymap"),
325 )
326 outputSources = cT.Output(
327 doc="Source catalog containing all the measurement information generated in this task",
328 name="{outputCoaddName}Coadd_meas",
329 dimensions=("tract", "patch", "band", "skymap"),
330 storageClass="SourceCatalog",
331 )
332 matchResult = cT.Output(
333 doc="Match catalog produced by configured matcher, optional on doMatchSources",
334 name="{outputCoaddName}Coadd_measMatch",
335 dimensions=("tract", "patch", "band", "skymap"),
336 storageClass="Catalog",
337 )
338 denormMatches = cT.Output(
339 doc="Denormalized Match catalog produced by configured matcher, optional on "
340 "doWriteMatchesDenormalized",
341 name="{outputCoaddName}Coadd_measMatchFull",
342 dimensions=("tract", "patch", "band", "skymap"),
343 storageClass="Catalog",
344 )
346 def __init__(self, *, config=None):
347 super().__init__(config=config)
348 if config.doPropagateFlags is False:
349 self.inputs -= set(("visitCatalogs",))
350 self.inputs -= set(("sourceTableHandles",))
351 self.inputs -= set(("finalizedSourceTableHandles",))
352 elif config.propagateFlags.target == PropagateSourceFlagsTask:
353 # New PropagateSourceFlagsTask does not use visitCatalogs.
354 self.inputs -= set(("visitCatalogs",))
355 # Check for types of flags required.
356 if not config.propagateFlags.source_flags:
357 self.inputs -= set(("sourceTableHandles",))
358 if not config.propagateFlags.finalized_source_flags:
359 self.inputs -= set(("finalizedSourceTableHandles",))
360 else:
361 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
362 self.inputs -= set(("sourceTableHandles",))
363 self.inputs -= set(("finalizedSourceTableHandles",))
365 if config.inputCatalog == "deblendedCatalog":
366 self.inputs -= set(("inputCatalog",))
368 if not config.doAddFootprints:
369 self.inputs -= set(("scarletModels",))
370 else:
371 self.inputs -= set(("deblendedCatalog"))
372 self.inputs -= set(("scarletModels",))
374 if config.doMatchSources is False:
375 self.outputs -= set(("matchResult",))
377 if config.doWriteMatchesDenormalized is False:
378 self.outputs -= set(("denormMatches",))
381class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
382 pipelineConnections=MeasureMergedCoaddSourcesConnections):
383 """!
384 @anchor MeasureMergedCoaddSourcesConfig_
386 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
387 """
388 inputCatalog = ChoiceField(
389 dtype=str,
390 default="deblendedCatalog",
391 allowed={
392 "deblendedCatalog": "Output catalog from ScarletDeblendTask",
393 "deblendedFlux": "Output catalog from SourceDeblendTask",
394 "mergeDet": "The merged detections before deblending."
395 },
396 doc="The name of the input catalog.",
397 )
398 doAddFootprints = Field(dtype=bool,
399 default=True,
400 doc="Whether or not to add footprints to the input catalog from scarlet models. "
401 "This should be true whenever using the multi-band deblender, "
402 "otherwise this should be False.")
403 doConserveFlux = Field(dtype=bool, default=True,
404 doc="Whether to use the deblender models as templates to re-distribute the flux "
405 "from the 'exposure' (True), or to perform measurements on the deblender "
406 "model footprints.")
407 doStripFootprints = Field(dtype=bool, default=True,
408 doc="Whether to strip footprints from the output catalog before "
409 "saving to disk. "
410 "This is usually done when using scarlet models to save disk space.")
411 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
412 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
413 doPropagateFlags = Field(
414 dtype=bool, default=True,
415 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
416 )
417 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
418 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
419 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
420 doWriteMatchesDenormalized = Field(
421 dtype=bool,
422 default=False,
423 doc=("Write reference matches in denormalized format? "
424 "This format uses more disk space, but is more convenient to read."),
425 )
426 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
427 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
428 checkUnitsParseStrict = Field(
429 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
430 dtype=str,
431 default="raise",
432 )
433 doApCorr = Field(
434 dtype=bool,
435 default=True,
436 doc="Apply aperture corrections"
437 )
438 applyApCorr = ConfigurableField(
439 target=ApplyApCorrTask,
440 doc="Subtask to apply aperture corrections"
441 )
442 doRunCatalogCalculation = Field(
443 dtype=bool,
444 default=True,
445 doc='Run catalogCalculation task'
446 )
447 catalogCalculation = ConfigurableField(
448 target=CatalogCalculationTask,
449 doc="Subtask to run catalogCalculation plugins on catalog"
450 )
452 hasFakes = Field(
453 dtype=bool,
454 default=False,
455 doc="Should be set to True if fake sources have been inserted into the input data."
456 )
458 @property
459 def refObjLoader(self):
460 return self.match.refObjLoader
462 def setDefaults(self):
463 super().setDefaults()
464 self.measurement.plugins.names |= ['base_InputCount',
465 'base_Variance',
466 'base_LocalPhotoCalib',
467 'base_LocalWcs']
468 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
469 'INEXACT_PSF']
470 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
471 'INEXACT_PSF']
474## @addtogroup LSST_task_documentation
475## @{
476## @page page_MeasureMergedCoaddSourcesTask MeasureMergedCoaddSourcesTask
477## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask"
478## @copybrief MeasureMergedCoaddSourcesTask
479## @}
481class MeasureMergedCoaddSourcesTask(PipelineTask):
482 """Deblend sources from main catalog in each coadd seperately and measure."""
483 _DefaultName = "measureCoaddSources"
484 ConfigClass = MeasureMergedCoaddSourcesConfig
485 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
487 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
488 **kwargs):
489 """!
490 @brief Initialize the task.
492 Keyword arguments (in addition to those forwarded to PipelineTask.__init__):
493 @param[in] schema: the schema of the merged detection catalog used as input to this one
494 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
495 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
496 catalog. May be None if the loader can be constructed from the butler argument or all steps
497 requiring a reference catalog are disabled.
498 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
499 catalog loader, if schema or peakSchema or refObjLoader is None
501 The task will set its own self.schema attribute to the schema of the output measurement catalog.
502 This will include all fields from the input schema, as well as additional fields for all the
503 measurements.
504 """
505 super().__init__(**kwargs)
506 self.deblended = self.config.inputCatalog.startswith("deblended")
507 self.inputCatalog = "Coadd_" + self.config.inputCatalog
508 if initInputs is not None:
509 schema = initInputs['inputSchema'].schema
510 if schema is None:
511 assert butler is not None, "Neither butler nor schema is defined"
512 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
513 self.schemaMapper = afwTable.SchemaMapper(schema)
514 self.schemaMapper.addMinimalSchema(schema)
515 self.schema = self.schemaMapper.getOutputSchema()
516 self.algMetadata = PropertyList()
517 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
518 self.makeSubtask("setPrimaryFlags", schema=self.schema)
519 if self.config.doMatchSources:
520 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
521 if self.config.doPropagateFlags:
522 self.makeSubtask("propagateFlags", schema=self.schema)
523 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
524 if self.config.doApCorr:
525 self.makeSubtask("applyApCorr", schema=self.schema)
526 if self.config.doRunCatalogCalculation:
527 self.makeSubtask("catalogCalculation", schema=self.schema)
529 self.outputSchema = afwTable.SourceCatalog(self.schema)
531 def runQuantum(self, butlerQC, inputRefs, outputRefs):
532 inputs = butlerQC.get(inputRefs)
534 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
535 inputs.pop('refCat'), config=self.config.refObjLoader,
536 log=self.log)
537 self.match.setRefObjLoader(refObjLoader)
539 # Set psfcache
540 # move this to run after gen2 deprecation
541 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
543 # Get unique integer ID for IdFactory and RNG seeds
544 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
545 inputs['exposureId'] = exposureIdInfo.expId
546 idFactory = exposureIdInfo.makeSourceIdFactory()
547 # Transform inputCatalog
548 table = afwTable.SourceTable.make(self.schema, idFactory)
549 sources = afwTable.SourceCatalog(table)
550 # Load the correct input catalog
551 if "scarletCatalog" in inputs:
552 inputCatalog = inputs.pop("scarletCatalog")
553 catalogRef = inputRefs.scarletCatalog
554 else:
555 inputCatalog = inputs.pop("inputCatalog")
556 catalogRef = inputRefs.inputCatalog
557 sources.extend(inputCatalog, self.schemaMapper)
558 del inputCatalog
559 # Add the HeavyFootprints to the deblended sources
560 if self.config.doAddFootprints:
561 modelData = inputs.pop('scarletModels')
562 if self.config.doConserveFlux:
563 redistributeImage = inputs['exposure'].image
564 else:
565 redistributeImage = None
566 modelData.updateCatalogFootprints(
567 catalog=sources,
568 band=inputRefs.exposure.dataId["band"],
569 psfModel=inputs['exposure'].getPsf(),
570 redistributeImage=redistributeImage,
571 removeScarletData=True,
572 )
573 table = sources.getTable()
574 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
575 inputs['sources'] = sources
577 skyMap = inputs.pop('skyMap')
578 tractNumber = catalogRef.dataId['tract']
579 tractInfo = skyMap[tractNumber]
580 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
581 skyInfo = Struct(
582 skyMap=skyMap,
583 tractInfo=tractInfo,
584 patchInfo=patchInfo,
585 wcs=tractInfo.getWcs(),
586 bbox=patchInfo.getOuterBBox()
587 )
588 inputs['skyInfo'] = skyInfo
590 if self.config.doPropagateFlags:
591 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
592 # New version
593 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
594 inputs["ccdInputs"] = ccdInputs
596 if "sourceTableHandles" in inputs:
597 sourceTableHandles = inputs.pop("sourceTableHandles")
598 sourceTableHandleDict = {handle.dataId["visit"]: handle
599 for handle in sourceTableHandles}
600 inputs["sourceTableHandleDict"] = sourceTableHandleDict
601 if "finalizedSourceTableHandles" in inputs:
602 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
603 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
604 for handle in finalizedSourceTableHandles}
605 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
606 else:
607 # Deprecated legacy version
608 # Filter out any visit catalog that is not coadd inputs
609 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
610 visitKey = ccdInputs.schema.find("visit").key
611 ccdKey = ccdInputs.schema.find("ccd").key
612 inputVisitIds = set()
613 ccdRecordsWcs = {}
614 for ccdRecord in ccdInputs:
615 visit = ccdRecord.get(visitKey)
616 ccd = ccdRecord.get(ccdKey)
617 inputVisitIds.add((visit, ccd))
618 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
620 inputCatalogsToKeep = []
621 inputCatalogWcsUpdate = []
622 for i, dataRef in enumerate(inputRefs.visitCatalogs):
623 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
624 if key in inputVisitIds:
625 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
626 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
627 inputs['visitCatalogs'] = inputCatalogsToKeep
628 inputs['wcsUpdates'] = inputCatalogWcsUpdate
629 inputs['ccdInputs'] = ccdInputs
631 outputs = self.run(**inputs)
632 # Strip HeavyFootprints to save space on disk
633 sources = outputs.outputSources
634 butlerQC.put(outputs, outputRefs)
636 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
637 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
638 """Run measurement algorithms on the input exposure, and optionally populate the
639 resulting catalog with extra information.
641 Parameters
642 ----------
643 exposure : `lsst.afw.exposure.Exposure`
644 The input exposure on which measurements are to be performed
645 sources : `lsst.afw.table.SourceCatalog`
646 A catalog built from the results of merged detections, or
647 deblender outputs.
648 skyInfo : `lsst.pipe.base.Struct`
649 A struct containing information about the position of the input exposure within
650 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
651 exposureId : `int` or `bytes`
652 packed unique number or bytes unique to the input exposure
653 ccdInputs : `lsst.afw.table.ExposureCatalog`
654 Catalog containing information on the individual visits which went into making
655 the coadd.
656 sourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
657 Dict for sourceTable_visit handles (key is visit) for propagating flags.
658 These tables are derived from the ``CalibrateTask`` sources, and contain
659 astrometry and photometry flags, and optionally PSF flags.
660 finalizedSourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`], optional
661 Dict for finalized_src_table handles (key is visit) for propagating flags.
662 These tables are derived from ``FinalizeCalibrationTask`` and contain
663 PSF flags from the finalized PSF estimation.
664 visitCatalogs : list of `lsst.afw.table.SourceCatalogs`
665 A list of source catalogs corresponding to measurements made on the individual
666 visits which went into the input exposure. If None and butler is `None` then
667 the task cannot propagate visit flags to the output catalog.
668 Deprecated, to be removed with PropagateVisitFlagsTask.
669 wcsUpdates : list of `lsst.afw.geom.SkyWcs`
670 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
671 to the input visits. Used to put all coordinates to common system. If `None` and
672 butler is `None` then the task cannot propagate visit flags to the output catalog.
673 Deprecated, to be removed with PropagateVisitFlagsTask.
674 butler : `None`
675 This was a Gen2 butler used to load visit catalogs.
676 No longer used and should not be set. Will be removed in the
677 future.
679 Returns
680 -------
681 results : `lsst.pipe.base.Struct`
682 Results of running measurement task. Will contain the catalog in the
683 sources attribute. Optionally will have results of matching to a
684 reference catalog in the matchResults attribute, and denormalized
685 matches in the denormMatches attribute.
686 """
687 if butler is not None:
688 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.",
689 category=FutureWarning, stacklevel=2)
690 butler = None
692 self.measurement.run(sources, exposure, exposureId=exposureId)
694 if self.config.doApCorr:
695 self.applyApCorr.run(
696 catalog=sources,
697 apCorrMap=exposure.getInfo().getApCorrMap()
698 )
700 # TODO DM-11568: this contiguous check-and-copy could go away if we
701 # reserve enough space during SourceDetection and/or SourceDeblend.
702 # NOTE: sourceSelectors require contiguous catalogs, so ensure
703 # contiguity now, so views are preserved from here on.
704 if not sources.isContiguous():
705 sources = sources.copy(deep=True)
707 if self.config.doRunCatalogCalculation:
708 self.catalogCalculation.run(sources)
710 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
711 patchInfo=skyInfo.patchInfo)
712 if self.config.doPropagateFlags:
713 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
714 # New version
715 self.propagateFlags.run(
716 sources,
717 ccdInputs,
718 sourceTableHandleDict,
719 finalizedSourceTableHandleDict
720 )
721 else:
722 # Legacy deprecated version
723 self.propagateFlags.run(
724 butler,
725 sources,
726 ccdInputs,
727 exposure.getWcs(),
728 visitCatalogs,
729 wcsUpdates
730 )
732 results = Struct()
734 if self.config.doMatchSources:
735 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
736 matches = afwTable.packMatches(matchResult.matches)
737 matches.table.setMetadata(matchResult.matchMeta)
738 results.matchResult = matches
739 if self.config.doWriteMatchesDenormalized:
740 if matchResult.matches:
741 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
742 else:
743 self.log.warning("No matches, so generating dummy denormalized matches file")
744 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
745 denormMatches.setMetadata(PropertyList())
746 denormMatches.getMetadata().add("COMMENT",
747 "This catalog is empty because no matches were found.")
748 results.denormMatches = denormMatches
749 results.denormMatches = denormMatches
751 results.outputSources = sources
752 return results