Coverage for python/lsst/pipe/tasks/multiBand.py: 28%
288 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-10-12 03:21 -0700
« prev ^ index » next coverage.py v6.5.0, created at 2022-10-12 03:21 -0700
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"]
24import warnings
26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27import lsst.pipe.base.connectionTypes as cT
28from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
29from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
30from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
31from lsst.meas.deblender import SourceDeblendTask
32from lsst.meas.extensions.scarlet import ScarletDeblendTask
33from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
37import lsst.afw.table as afwTable
38import lsst.afw.math as afwMath
39from lsst.daf.base import PropertyList
40from lsst.skymap import BaseSkyMap
41from lsst.obs.base import ExposureIdInfo
43# NOTE: these imports are a convenience so multiband users only have to import this file.
44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46from .multiBandUtils import CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
53"""
54New set types:
55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56* deepCoadd_mergeDet: merged detections (tract, patch)
57* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58* deepCoadd_ref: reference sources (tract, patch)
59All of these have associated *_schema catalogs that require no data ID and hold no records.
61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62the mergeDet, meas, and ref dataset Footprints:
63* deepCoadd_peak_schema
64"""
67##############################################################################################################
68class DetectCoaddSourcesConnections(PipelineTaskConnections,
69 dimensions=("tract", "patch", "band", "skymap"),
70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
71 detectionSchema = cT.InitOutput(
72 doc="Schema of the detection catalog",
73 name="{outputCoaddName}Coadd_det_schema",
74 storageClass="SourceCatalog",
75 )
76 exposure = cT.Input(
77 doc="Exposure on which detections are to be performed",
78 name="{inputCoaddName}Coadd",
79 storageClass="ExposureF",
80 dimensions=("tract", "patch", "band", "skymap")
81 )
82 outputBackgrounds = cT.Output(
83 doc="Output Backgrounds used in detection",
84 name="{outputCoaddName}Coadd_calexp_background",
85 storageClass="Background",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputSources = cT.Output(
89 doc="Detected sources catalog",
90 name="{outputCoaddName}Coadd_det",
91 storageClass="SourceCatalog",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputExposure = cT.Output(
95 doc="Exposure post detection",
96 name="{outputCoaddName}Coadd_calexp",
97 storageClass="ExposureF",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103 """Configuration parameters for the DetectCoaddSourcesTask
104 """
106 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
108 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
109 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
110 doInsertFakes = Field(dtype=bool, default=False,
111 doc="Run fake sources injection task",
112 deprecated=("doInsertFakes is no longer supported. This config will be removed "
113 "after v24."))
114 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
115 doc="Injection of fake sources for testing "
116 "purposes (must be retargeted)",
117 deprecated=("insertFakes is no longer supported. This config will "
118 "be removed after v24."))
119 hasFakes = Field(
120 dtype=bool,
121 default=False,
122 doc="Should be set to True if fake sources have been inserted into the input data.",
123 )
125 def setDefaults(self):
126 super().setDefaults()
127 self.detection.thresholdType = "pixel_stdev"
128 self.detection.isotropicGrow = True
129 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
130 self.detection.reEstimateBackground = False
131 self.detection.background.useApprox = False
132 self.detection.background.binSize = 4096
133 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
134 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
137class DetectCoaddSourcesTask(PipelineTask):
138 """Detect sources on a single filter coadd.
140 Coadding individual visits requires each exposure to be warped. This
141 introduces covariance in the noise properties across pixels. Before
142 detection, we correct the coadd variance by scaling the variance plane in
143 the coadd to match the observed variance. This is an approximate
144 approach -- strictly, we should propagate the full covariance matrix --
145 but it is simple and works well in practice.
147 After scaling the variance plane, we detect sources and generate footprints
148 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
150 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
151 in a given band. The purpose of the task is to update the background,
152 detect all sources in a single band and generate a set of parent
153 footprints. Subsequent tasks in the multi-band processing procedure will
154 merge sources across bands and, eventually, perform forced photometry.
156 Parameters
157 ----------
158 schema : `lsst.afw.table.Schema`, optional
159 Initial schema for the output catalog, modified-in place to include all
160 fields set by this task. If None, the source minimal schema will be used.
161 **kwargs
162 Additional keyword arguments.
163 """
165 _DefaultName = "detectCoaddSources"
166 ConfigClass = DetectCoaddSourcesConfig
167 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
169 def __init__(self, schema=None, **kwargs):
170 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
171 # call structure has been reviewed carefully to be sure super will work as intended.
172 super().__init__(**kwargs)
173 if schema is None:
174 schema = afwTable.SourceTable.makeMinimalSchema()
175 self.schema = schema
176 self.makeSubtask("detection", schema=self.schema)
177 if self.config.doScaleVariance:
178 self.makeSubtask("scaleVariance")
180 self.detectionSchema = afwTable.SourceCatalog(self.schema)
182 def runQuantum(self, butlerQC, inputRefs, outputRefs):
183 inputs = butlerQC.get(inputRefs)
184 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
185 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
186 inputs["expId"] = exposureIdInfo.expId
187 outputs = self.run(**inputs)
188 butlerQC.put(outputs, outputRefs)
190 def run(self, exposure, idFactory, expId):
191 """Run detection on an exposure.
193 First scale the variance plane to match the observed variance
194 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
195 detect sources.
197 Parameters
198 ----------
199 exposure : `lsst.afw.image.Exposure`
200 Exposure on which to detect (may be backround-subtracted and scaled,
201 depending on configuration).
202 idFactory : `lsst.afw.table.IdFactory`
203 IdFactory to set source identifiers.
204 expId : `int`
205 Exposure identifier (integer) for RNG seed.
207 Returns
208 -------
209 result : `lsst.pipe.base.Struct`
210 Results as a struct with attributes:
212 ``sources``
213 Catalog of detections (`lsst.afw.table.SourceCatalog`).
214 ``backgrounds``
215 List of backgrounds (`list`).
216 """
217 if self.config.doScaleVariance:
218 varScale = self.scaleVariance.run(exposure.maskedImage)
219 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
220 backgrounds = afwMath.BackgroundList()
221 table = afwTable.SourceTable.make(self.schema, idFactory)
222 detections = self.detection.run(table, exposure, expId=expId)
223 sources = detections.sources
224 fpSets = detections.fpSets
225 if hasattr(fpSets, "background") and fpSets.background:
226 for bg in fpSets.background:
227 backgrounds.append(bg)
228 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
231##############################################################################################################
234class DeblendCoaddSourcesConfig(Config):
235 """Configuration parameters for the `DeblendCoaddSourcesTask`.
236 """
238 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
239 doc="Deblend sources separately in each band")
240 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
241 doc="Deblend sources simultaneously across bands")
242 simultaneous = Field(dtype=bool,
243 default=True,
244 doc="Simultaneously deblend all bands? "
245 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
246 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
247 hasFakes = Field(dtype=bool,
248 default=False,
249 doc="Should be set to True if fake sources have been inserted into the input data.")
251 def setDefaults(self):
252 Config.setDefaults(self)
253 self.singleBandDeblend.propagateAllPeaks = True
256class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
257 dimensions=("tract", "patch", "band", "skymap"),
258 defaultTemplates={"inputCoaddName": "deep",
259 "outputCoaddName": "deep",
260 "deblendedCatalog": "deblendedFlux"}):
261 warnings.warn("MeasureMergedCoaddSourcesConnections.defaultTemplates is deprecated and no longer used. "
262 "Use MeasureMergedCoaddSourcesConfig.inputCatalog.")
263 inputSchema = cT.InitInput(
264 doc="Input schema for measure merged task produced by a deblender or detection task",
265 name="{inputCoaddName}Coadd_deblendedFlux_schema",
266 storageClass="SourceCatalog"
267 )
268 outputSchema = cT.InitOutput(
269 doc="Output schema after all new fields are added by task",
270 name="{inputCoaddName}Coadd_meas_schema",
271 storageClass="SourceCatalog"
272 )
273 refCat = cT.PrerequisiteInput(
274 doc="Reference catalog used to match measured sources against known sources",
275 name="ref_cat",
276 storageClass="SimpleCatalog",
277 dimensions=("skypix",),
278 deferLoad=True,
279 multiple=True
280 )
281 exposure = cT.Input(
282 doc="Input coadd image",
283 name="{inputCoaddName}Coadd_calexp",
284 storageClass="ExposureF",
285 dimensions=("tract", "patch", "band", "skymap")
286 )
287 skyMap = cT.Input(
288 doc="SkyMap to use in processing",
289 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
290 storageClass="SkyMap",
291 dimensions=("skymap",),
292 )
293 visitCatalogs = cT.Input(
294 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
295 "further filtered in the task for the purpose of propagating flags from image calibration "
296 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
297 name="src",
298 dimensions=("instrument", "visit", "detector"),
299 storageClass="SourceCatalog",
300 multiple=True
301 )
302 sourceTableHandles = cT.Input(
303 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
304 "These tables contain astrometry and photometry flags, and optionally "
305 "PSF flags."),
306 name="sourceTable_visit",
307 storageClass="DataFrame",
308 dimensions=("instrument", "visit"),
309 multiple=True,
310 deferLoad=True,
311 )
312 finalizedSourceTableHandles = cT.Input(
313 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
314 "tables contain PSF flags from the finalized PSF estimation."),
315 name="finalized_src_table",
316 storageClass="DataFrame",
317 dimensions=("instrument", "visit"),
318 multiple=True,
319 deferLoad=True,
320 )
321 inputCatalog = cT.Input(
322 doc=("Name of the input catalog to use."
323 "If the single band deblender was used this should be 'deblendedFlux."
324 "If the multi-band deblender was used this should be 'deblendedModel, "
325 "or deblendedFlux if the multiband deblender was configured to output "
326 "deblended flux catalogs. If no deblending was performed this should "
327 "be 'mergeDet'"),
328 name="{inputCoaddName}Coadd_{deblendedCatalog}",
329 storageClass="SourceCatalog",
330 dimensions=("tract", "patch", "band", "skymap"),
331 )
332 scarletCatalog = cT.Input(
333 doc="Catalogs produced by multiband deblending",
334 name="{inputCoaddName}Coadd_deblendedCatalog",
335 storageClass="SourceCatalog",
336 dimensions=("tract", "patch", "skymap"),
337 )
338 scarletModels = cT.Input(
339 doc="Multiband scarlet models produced by the deblender",
340 name="{inputCoaddName}Coadd_scarletModelData",
341 storageClass="ScarletModelData",
342 dimensions=("tract", "patch", "skymap"),
343 )
344 outputSources = cT.Output(
345 doc="Source catalog containing all the measurement information generated in this task",
346 name="{outputCoaddName}Coadd_meas",
347 dimensions=("tract", "patch", "band", "skymap"),
348 storageClass="SourceCatalog",
349 )
350 matchResult = cT.Output(
351 doc="Match catalog produced by configured matcher, optional on doMatchSources",
352 name="{outputCoaddName}Coadd_measMatch",
353 dimensions=("tract", "patch", "band", "skymap"),
354 storageClass="Catalog",
355 )
356 denormMatches = cT.Output(
357 doc="Denormalized Match catalog produced by configured matcher, optional on "
358 "doWriteMatchesDenormalized",
359 name="{outputCoaddName}Coadd_measMatchFull",
360 dimensions=("tract", "patch", "band", "skymap"),
361 storageClass="Catalog",
362 )
364 def __init__(self, *, config=None):
365 super().__init__(config=config)
366 if config.doPropagateFlags is False:
367 self.inputs -= set(("visitCatalogs",))
368 self.inputs -= set(("sourceTableHandles",))
369 self.inputs -= set(("finalizedSourceTableHandles",))
370 elif config.propagateFlags.target == PropagateSourceFlagsTask:
371 # New PropagateSourceFlagsTask does not use visitCatalogs.
372 self.inputs -= set(("visitCatalogs",))
373 # Check for types of flags required.
374 if not config.propagateFlags.source_flags:
375 self.inputs -= set(("sourceTableHandles",))
376 if not config.propagateFlags.finalized_source_flags:
377 self.inputs -= set(("finalizedSourceTableHandles",))
378 else:
379 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
380 self.inputs -= set(("sourceTableHandles",))
381 self.inputs -= set(("finalizedSourceTableHandles",))
383 if config.inputCatalog == "deblendedCatalog":
384 self.inputs -= set(("inputCatalog",))
386 if not config.doAddFootprints:
387 self.inputs -= set(("scarletModels",))
388 else:
389 self.inputs -= set(("deblendedCatalog"))
390 self.inputs -= set(("scarletModels",))
392 if config.doMatchSources is False:
393 self.outputs -= set(("matchResult",))
395 if config.doWriteMatchesDenormalized is False:
396 self.outputs -= set(("denormMatches",))
399class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
400 pipelineConnections=MeasureMergedCoaddSourcesConnections):
401 """Configuration parameters for the MeasureMergedCoaddSourcesTask
402 """
403 inputCatalog = ChoiceField(
404 dtype=str,
405 default="deblendedCatalog",
406 allowed={
407 "deblendedCatalog": "Output catalog from ScarletDeblendTask",
408 "deblendedFlux": "Output catalog from SourceDeblendTask",
409 "mergeDet": "The merged detections before deblending."
410 },
411 doc="The name of the input catalog.",
412 )
413 doAddFootprints = Field(dtype=bool,
414 default=True,
415 doc="Whether or not to add footprints to the input catalog from scarlet models. "
416 "This should be true whenever using the multi-band deblender, "
417 "otherwise this should be False.")
418 doConserveFlux = Field(dtype=bool, default=True,
419 doc="Whether to use the deblender models as templates to re-distribute the flux "
420 "from the 'exposure' (True), or to perform measurements on the deblender "
421 "model footprints.")
422 doStripFootprints = Field(dtype=bool, default=True,
423 doc="Whether to strip footprints from the output catalog before "
424 "saving to disk. "
425 "This is usually done when using scarlet models to save disk space.")
426 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
427 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
428 doPropagateFlags = Field(
429 dtype=bool, default=True,
430 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
431 )
432 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
433 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
434 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
435 doWriteMatchesDenormalized = Field(
436 dtype=bool,
437 default=False,
438 doc=("Write reference matches in denormalized format? "
439 "This format uses more disk space, but is more convenient to read."),
440 )
441 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
442 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
443 checkUnitsParseStrict = Field(
444 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
445 dtype=str,
446 default="raise",
447 )
448 doApCorr = Field(
449 dtype=bool,
450 default=True,
451 doc="Apply aperture corrections"
452 )
453 applyApCorr = ConfigurableField(
454 target=ApplyApCorrTask,
455 doc="Subtask to apply aperture corrections"
456 )
457 doRunCatalogCalculation = Field(
458 dtype=bool,
459 default=True,
460 doc='Run catalogCalculation task'
461 )
462 catalogCalculation = ConfigurableField(
463 target=CatalogCalculationTask,
464 doc="Subtask to run catalogCalculation plugins on catalog"
465 )
467 hasFakes = Field(
468 dtype=bool,
469 default=False,
470 doc="Should be set to True if fake sources have been inserted into the input data."
471 )
473 @property
474 def refObjLoader(self):
475 return self.match.refObjLoader
477 def setDefaults(self):
478 super().setDefaults()
479 self.measurement.plugins.names |= ['base_InputCount',
480 'base_Variance',
481 'base_LocalPhotoCalib',
482 'base_LocalWcs']
483 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
484 'INEXACT_PSF']
485 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
486 'INEXACT_PSF']
489class MeasureMergedCoaddSourcesTask(PipelineTask):
490 """Deblend sources from main catalog in each coadd seperately and measure.
492 Use peaks and footprints from a master catalog to perform deblending and
493 measurement in each coadd.
495 Given a master input catalog of sources (peaks and footprints) or deblender
496 outputs(including a HeavyFootprint in each band), measure each source on
497 the coadd. Repeating this procedure with the same master catalog across
498 multiple coadds will generate a consistent set of child sources.
500 The deblender retains all peaks and deblends any missing peaks (dropouts in
501 that band) as PSFs. Source properties are measured and the @c is-primary
502 flag (indicating sources with no children) is set. Visit flags are
503 propagated to the coadd sources.
505 Optionally, we can match the coadd sources to an external reference
506 catalog.
508 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
509 have a set of per-band catalogs. The next stage in the multi-band
510 processing procedure will merge these measurements into a suitable catalog
511 for driving forced photometry.
513 Parameters
514 ----------
515 butler : `lsst.daf.butler.Butler` or `None`, optional
516 A butler used to read the input schemas from disk or construct the reference
517 catalog loader, if schema or peakSchema or refObjLoader is None.
518 schema : ``lsst.afw.table.Schema`, optional
519 The schema of the merged detection catalog used as input to this one.
520 peakSchema : ``lsst.afw.table.Schema`, optional
521 The schema of the PeakRecords in the Footprints in the merged detection catalog.
522 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
523 An instance of LoadReferenceObjectsTasks that supplies an external reference
524 catalog. May be None if the loader can be constructed from the butler argument or all steps
525 requiring a reference catalog are disabled.
526 initInputs : `dict`, optional
527 Dictionary that can contain a key ``inputSchema`` containing the
528 input schema. If present will override the value of ``schema``.
529 **kwargs
530 Additional keyword arguments.
531 """
533 _DefaultName = "measureCoaddSources"
534 ConfigClass = MeasureMergedCoaddSourcesConfig
535 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
537 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
538 **kwargs):
539 super().__init__(**kwargs)
540 self.deblended = self.config.inputCatalog.startswith("deblended")
541 self.inputCatalog = "Coadd_" + self.config.inputCatalog
542 if initInputs is not None:
543 schema = initInputs['inputSchema'].schema
544 if schema is None:
545 assert butler is not None, "Neither butler nor schema is defined"
546 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
547 self.schemaMapper = afwTable.SchemaMapper(schema)
548 self.schemaMapper.addMinimalSchema(schema)
549 self.schema = self.schemaMapper.getOutputSchema()
550 self.algMetadata = PropertyList()
551 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
552 self.makeSubtask("setPrimaryFlags", schema=self.schema)
553 if self.config.doMatchSources:
554 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
555 if self.config.doPropagateFlags:
556 self.makeSubtask("propagateFlags", schema=self.schema)
557 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
558 if self.config.doApCorr:
559 self.makeSubtask("applyApCorr", schema=self.schema)
560 if self.config.doRunCatalogCalculation:
561 self.makeSubtask("catalogCalculation", schema=self.schema)
563 self.outputSchema = afwTable.SourceCatalog(self.schema)
565 def runQuantum(self, butlerQC, inputRefs, outputRefs):
566 inputs = butlerQC.get(inputRefs)
568 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
569 inputs.pop('refCat'),
570 name=self.config.connections.refCat,
571 config=self.config.refObjLoader,
572 log=self.log)
573 self.match.setRefObjLoader(refObjLoader)
575 # Set psfcache
576 # move this to run after gen2 deprecation
577 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
579 # Get unique integer ID for IdFactory and RNG seeds
580 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
581 inputs['exposureId'] = exposureIdInfo.expId
582 idFactory = exposureIdInfo.makeSourceIdFactory()
583 # Transform inputCatalog
584 table = afwTable.SourceTable.make(self.schema, idFactory)
585 sources = afwTable.SourceCatalog(table)
586 # Load the correct input catalog
587 if "scarletCatalog" in inputs:
588 inputCatalog = inputs.pop("scarletCatalog")
589 catalogRef = inputRefs.scarletCatalog
590 else:
591 inputCatalog = inputs.pop("inputCatalog")
592 catalogRef = inputRefs.inputCatalog
593 sources.extend(inputCatalog, self.schemaMapper)
594 del inputCatalog
595 # Add the HeavyFootprints to the deblended sources
596 if self.config.doAddFootprints:
597 modelData = inputs.pop('scarletModels')
598 if self.config.doConserveFlux:
599 redistributeImage = inputs['exposure'].image
600 else:
601 redistributeImage = None
602 modelData.updateCatalogFootprints(
603 catalog=sources,
604 band=inputRefs.exposure.dataId["band"],
605 psfModel=inputs['exposure'].getPsf(),
606 redistributeImage=redistributeImage,
607 removeScarletData=True,
608 )
609 table = sources.getTable()
610 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
611 inputs['sources'] = sources
613 skyMap = inputs.pop('skyMap')
614 tractNumber = catalogRef.dataId['tract']
615 tractInfo = skyMap[tractNumber]
616 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
617 skyInfo = Struct(
618 skyMap=skyMap,
619 tractInfo=tractInfo,
620 patchInfo=patchInfo,
621 wcs=tractInfo.getWcs(),
622 bbox=patchInfo.getOuterBBox()
623 )
624 inputs['skyInfo'] = skyInfo
626 if self.config.doPropagateFlags:
627 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
628 # New version
629 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
630 inputs["ccdInputs"] = ccdInputs
632 if "sourceTableHandles" in inputs:
633 sourceTableHandles = inputs.pop("sourceTableHandles")
634 sourceTableHandleDict = {handle.dataId["visit"]: handle
635 for handle in sourceTableHandles}
636 inputs["sourceTableHandleDict"] = sourceTableHandleDict
637 if "finalizedSourceTableHandles" in inputs:
638 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
639 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
640 for handle in finalizedSourceTableHandles}
641 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
642 else:
643 # Deprecated legacy version
644 # Filter out any visit catalog that is not coadd inputs
645 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
646 visitKey = ccdInputs.schema.find("visit").key
647 ccdKey = ccdInputs.schema.find("ccd").key
648 inputVisitIds = set()
649 ccdRecordsWcs = {}
650 for ccdRecord in ccdInputs:
651 visit = ccdRecord.get(visitKey)
652 ccd = ccdRecord.get(ccdKey)
653 inputVisitIds.add((visit, ccd))
654 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
656 inputCatalogsToKeep = []
657 inputCatalogWcsUpdate = []
658 for i, dataRef in enumerate(inputRefs.visitCatalogs):
659 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
660 if key in inputVisitIds:
661 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
662 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
663 inputs['visitCatalogs'] = inputCatalogsToKeep
664 inputs['wcsUpdates'] = inputCatalogWcsUpdate
665 inputs['ccdInputs'] = ccdInputs
667 outputs = self.run(**inputs)
668 # Strip HeavyFootprints to save space on disk
669 sources = outputs.outputSources
670 butlerQC.put(outputs, outputRefs)
672 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
673 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
674 """Run measurement algorithms on the input exposure, and optionally populate the
675 resulting catalog with extra information.
677 Parameters
678 ----------
679 exposure : `lsst.afw.exposure.Exposure`
680 The input exposure on which measurements are to be performed.
681 sources : `lsst.afw.table.SourceCatalog`
682 A catalog built from the results of merged detections, or
683 deblender outputs.
684 skyInfo : `lsst.pipe.base.Struct`
685 A struct containing information about the position of the input exposure within
686 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
687 exposureId : `int` or `bytes`
688 Packed unique number or bytes unique to the input exposure.
689 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
690 Catalog containing information on the individual visits which went into making
691 the coadd.
692 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
693 A list of source catalogs corresponding to measurements made on the individual
694 visits which went into the input exposure. If None and butler is `None` then
695 the task cannot propagate visit flags to the output catalog.
696 Deprecated, to be removed with PropagateVisitFlagsTask.
697 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional
698 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
699 to the input visits. Used to put all coordinates to common system. If `None` and
700 butler is `None` then the task cannot propagate visit flags to the output catalog.
701 Deprecated, to be removed with PropagateVisitFlagsTask.
702 butler : `None`, optional
703 This was a Gen2 butler used to load visit catalogs.
704 No longer used and should not be set. Will be removed in the
705 future.
706 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
707 Dict for sourceTable_visit handles (key is visit) for propagating flags.
708 These tables are derived from the ``CalibrateTask`` sources, and contain
709 astrometry and photometry flags, and optionally PSF flags.
710 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
711 Dict for finalized_src_table handles (key is visit) for propagating flags.
712 These tables are derived from ``FinalizeCalibrationTask`` and contain
713 PSF flags from the finalized PSF estimation.
715 Returns
716 -------
717 results : `lsst.pipe.base.Struct`
718 Results of running measurement task. Will contain the catalog in the
719 sources attribute. Optionally will have results of matching to a
720 reference catalog in the matchResults attribute, and denormalized
721 matches in the denormMatches attribute.
722 """
723 if butler is not None:
724 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.",
725 category=FutureWarning, stacklevel=2)
726 butler = None
728 self.measurement.run(sources, exposure, exposureId=exposureId)
730 if self.config.doApCorr:
731 self.applyApCorr.run(
732 catalog=sources,
733 apCorrMap=exposure.getInfo().getApCorrMap()
734 )
736 # TODO DM-11568: this contiguous check-and-copy could go away if we
737 # reserve enough space during SourceDetection and/or SourceDeblend.
738 # NOTE: sourceSelectors require contiguous catalogs, so ensure
739 # contiguity now, so views are preserved from here on.
740 if not sources.isContiguous():
741 sources = sources.copy(deep=True)
743 if self.config.doRunCatalogCalculation:
744 self.catalogCalculation.run(sources)
746 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
747 patchInfo=skyInfo.patchInfo)
748 if self.config.doPropagateFlags:
749 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
750 # New version
751 self.propagateFlags.run(
752 sources,
753 ccdInputs,
754 sourceTableHandleDict,
755 finalizedSourceTableHandleDict
756 )
757 else:
758 # Legacy deprecated version
759 self.propagateFlags.run(
760 butler,
761 sources,
762 ccdInputs,
763 exposure.getWcs(),
764 visitCatalogs,
765 wcsUpdates
766 )
768 results = Struct()
770 if self.config.doMatchSources:
771 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
772 matches = afwTable.packMatches(matchResult.matches)
773 matches.table.setMetadata(matchResult.matchMeta)
774 results.matchResult = matches
775 if self.config.doWriteMatchesDenormalized:
776 if matchResult.matches:
777 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
778 else:
779 self.log.warning("No matches, so generating dummy denormalized matches file")
780 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
781 denormMatches.setMetadata(PropertyList())
782 denormMatches.getMetadata().add("COMMENT",
783 "This catalog is empty because no matches were found.")
784 results.denormMatches = denormMatches
785 results.denormMatches = denormMatches
787 results.outputSources = sources
788 return results