lsst.pipe.tasks gb58c50b8ee+007c719058
Loading...
Searching...
No Matches
multiBand.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"]
23
24import warnings
25
26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27import lsst.pipe.base.connectionTypes as cT
28from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
29from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
30from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
31from lsst.meas.deblender import SourceDeblendTask
32from lsst.meas.extensions.scarlet import ScarletDeblendTask
33from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
37import lsst.afw.table as afwTable
38import lsst.afw.math as afwMath
39from lsst.daf.base import PropertyList
40from lsst.skymap import BaseSkyMap
41from lsst.obs.base import ExposureIdInfo
42
43# NOTE: these imports are a convenience so multiband users only have to import this file.
44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46from .multiBandUtils import CullPeaksConfig # noqa: F401
47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
51
52
53"""
54New set types:
55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56* deepCoadd_mergeDet: merged detections (tract, patch)
57* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58* deepCoadd_ref: reference sources (tract, patch)
59All of these have associated *_schema catalogs that require no data ID and hold no records.
60
61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62the mergeDet, meas, and ref dataset Footprints:
63* deepCoadd_peak_schema
64"""
65
66
67
68class DetectCoaddSourcesConnections(PipelineTaskConnections,
69 dimensions=("tract", "patch", "band", "skymap"),
70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
71 detectionSchema = cT.InitOutput(
72 doc="Schema of the detection catalog",
73 name="{outputCoaddName}Coadd_det_schema",
74 storageClass="SourceCatalog",
75 )
76 exposure = cT.Input(
77 doc="Exposure on which detections are to be performed",
78 name="{inputCoaddName}Coadd",
79 storageClass="ExposureF",
80 dimensions=("tract", "patch", "band", "skymap")
81 )
82 outputBackgrounds = cT.Output(
83 doc="Output Backgrounds used in detection",
84 name="{outputCoaddName}Coadd_calexp_background",
85 storageClass="Background",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputSources = cT.Output(
89 doc="Detected sources catalog",
90 name="{outputCoaddName}Coadd_det",
91 storageClass="SourceCatalog",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputExposure = cT.Output(
95 doc="Exposure post detection",
96 name="{outputCoaddName}Coadd_calexp",
97 storageClass="ExposureF",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
100
101
102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103 """Configuration parameters for the DetectCoaddSourcesTask
104 """
105
106 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
108 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
109 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
110 doInsertFakes = Field(dtype=bool, default=False,
111 doc="Run fake sources injection task",
112 deprecated=("doInsertFakes is no longer supported. This config will be removed "
113 "after v24."))
114 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
115 doc="Injection of fake sources for testing "
116 "purposes (must be retargeted)",
117 deprecated=("insertFakes is no longer supported. This config will "
118 "be removed after v24."))
119 hasFakes = Field(
120 dtype=bool,
121 default=False,
122 doc="Should be set to True if fake sources have been inserted into the input data.",
123 )
124
125 def setDefaults(self):
126 super().setDefaults()
127 self.detection.thresholdType = "pixel_stdev"
128 self.detection.isotropicGrow = True
129 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
130 self.detection.reEstimateBackground = False
131 self.detection.background.useApprox = False
132 self.detection.background.binSize = 4096
133 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
134 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
135
136
137class DetectCoaddSourcesTask(PipelineTask):
138 """Detect sources on a single filter coadd.
139
140 Coadding individual visits requires each exposure to be warped. This
141 introduces covariance in the noise properties across pixels. Before
142 detection, we correct the coadd variance by scaling the variance plane in
143 the coadd to match the observed variance. This is an approximate
144 approach -- strictly, we should propagate the full covariance matrix --
145 but it is simple and works well in practice.
146
147 After scaling the variance plane, we detect sources and generate footprints
148 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
149
150 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
151 in a given band. The purpose of the task is to update the background,
152 detect all sources in a single band and generate a set of parent
153 footprints. Subsequent tasks in the multi-band processing procedure will
154 merge sources across bands and, eventually, perform forced photometry.
155
156 Parameters
157 ----------
158 schema : `lsst.afw.table.Schema`, optional
159 Initial schema for the output catalog, modified-in place to include all
160 fields set by this task. If None, the source minimal schema will be used.
161 **kwargs
162 Additional keyword arguments.
163 """
164
165 _DefaultName = "detectCoaddSources"
166 ConfigClass = DetectCoaddSourcesConfig
167
168 def __init__(self, schema=None, **kwargs):
169 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
170 # call structure has been reviewed carefully to be sure super will work as intended.
171 super().__init__(**kwargs)
172 if schema is None:
173 schema = afwTable.SourceTable.makeMinimalSchema()
174 self.schema = schema
175 self.makeSubtask("detection", schema=self.schema)
176 if self.config.doScaleVariance:
177 self.makeSubtask("scaleVariance")
178
179 self.detectionSchema = afwTable.SourceCatalog(self.schema)
180
181 def runQuantum(self, butlerQC, inputRefs, outputRefs):
182 inputs = butlerQC.get(inputRefs)
183 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
184 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
185 inputs["expId"] = exposureIdInfo.expId
186 outputs = self.run(**inputs)
187 butlerQC.put(outputs, outputRefs)
188
189 def run(self, exposure, idFactory, expId):
190 """Run detection on an exposure.
191
192 First scale the variance plane to match the observed variance
193 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
194 detect sources.
195
196 Parameters
197 ----------
198 exposure : `lsst.afw.image.Exposure`
199 Exposure on which to detect (may be backround-subtracted and scaled,
200 depending on configuration).
201 idFactory : `lsst.afw.table.IdFactory`
202 IdFactory to set source identifiers.
203 expId : `int`
204 Exposure identifier (integer) for RNG seed.
205
206 Returns
207 -------
208 result : `lsst.pipe.base.Struct`
209 Results as a struct with attributes:
210
211 ``sources``
212 Catalog of detections (`lsst.afw.table.SourceCatalog`).
213 ``backgrounds``
214 List of backgrounds (`list`).
215 """
216 if self.config.doScaleVariance:
217 varScale = self.scaleVariance.run(exposure.maskedImage)
218 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
219 backgrounds = afwMath.BackgroundList()
220 table = afwTable.SourceTable.make(self.schema, idFactory)
221 detections = self.detection.run(table, exposure, expId=expId)
222 sources = detections.sources
223 if hasattr(detections, "background") and detections.background:
224 for bg in detections.background:
225 backgrounds.append(bg)
226 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
227
228
229
230
231
232class DeblendCoaddSourcesConfig(Config):
233 """Configuration parameters for the `DeblendCoaddSourcesTask`.
234 """
235
236 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
237 doc="Deblend sources separately in each band")
238 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
239 doc="Deblend sources simultaneously across bands")
240 simultaneous = Field(dtype=bool,
241 default=True,
242 doc="Simultaneously deblend all bands? "
243 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
244 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
245 hasFakes = Field(dtype=bool,
246 default=False,
247 doc="Should be set to True if fake sources have been inserted into the input data.")
248
249 def setDefaults(self):
250 Config.setDefaults(self)
251 self.singleBandDeblend.propagateAllPeaks = True
252
253
254class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"),
255 defaultTemplates={"inputCoaddName": "deep",
256 "outputCoaddName": "deep",
257 "deblendedCatalog": "deblendedFlux"}):
258 inputSchema = cT.InitInput(
259 doc="Input schema for measure merged task produced by a deblender or detection task",
260 name="{inputCoaddName}Coadd_deblendedFlux_schema",
261 storageClass="SourceCatalog"
262 )
263 outputSchema = cT.InitOutput(
264 doc="Output schema after all new fields are added by task",
265 name="{inputCoaddName}Coadd_meas_schema",
266 storageClass="SourceCatalog"
267 )
268 refCat = cT.PrerequisiteInput(
269 doc="Reference catalog used to match measured sources against known sources",
270 name="ref_cat",
271 storageClass="SimpleCatalog",
272 dimensions=("skypix",),
273 deferLoad=True,
274 multiple=True
275 )
276 exposure = cT.Input(
277 doc="Input coadd image",
278 name="{inputCoaddName}Coadd_calexp",
279 storageClass="ExposureF",
280 dimensions=("tract", "patch", "band", "skymap")
281 )
282 skyMap = cT.Input(
283 doc="SkyMap to use in processing",
284 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
285 storageClass="SkyMap",
286 dimensions=("skymap",),
287 )
288 visitCatalogs = cT.Input(
289 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
290 "further filtered in the task for the purpose of propagating flags from image calibration "
291 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
292 name="src",
293 dimensions=("instrument", "visit", "detector"),
294 storageClass="SourceCatalog",
295 multiple=True
296 )
297 sourceTableHandles = cT.Input(
298 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
299 "These tables contain astrometry and photometry flags, and optionally "
300 "PSF flags."),
301 name="sourceTable_visit",
302 storageClass="DataFrame",
303 dimensions=("instrument", "visit"),
304 multiple=True,
305 deferLoad=True,
306 )
307 finalizedSourceTableHandles = cT.Input(
308 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
309 "tables contain PSF flags from the finalized PSF estimation."),
310 name="finalized_src_table",
311 storageClass="DataFrame",
312 dimensions=("instrument", "visit"),
313 multiple=True,
314 deferLoad=True,
315 )
316 inputCatalog = cT.Input(
317 doc=("Name of the input catalog to use."
318 "If the single band deblender was used this should be 'deblendedFlux."
319 "If the multi-band deblender was used this should be 'deblendedModel, "
320 "or deblendedFlux if the multiband deblender was configured to output "
321 "deblended flux catalogs. If no deblending was performed this should "
322 "be 'mergeDet'"),
323 name="{inputCoaddName}Coadd_{deblendedCatalog}",
324 storageClass="SourceCatalog",
325 dimensions=("tract", "patch", "band", "skymap"),
326 )
327 scarletCatalog = cT.Input(
328 doc="Catalogs produced by multiband deblending",
329 name="{inputCoaddName}Coadd_deblendedCatalog",
330 storageClass="SourceCatalog",
331 dimensions=("tract", "patch", "skymap"),
332 )
333 scarletModels = cT.Input(
334 doc="Multiband scarlet models produced by the deblender",
335 name="{inputCoaddName}Coadd_scarletModelData",
336 storageClass="ScarletModelData",
337 dimensions=("tract", "patch", "skymap"),
338 )
339 outputSources = cT.Output(
340 doc="Source catalog containing all the measurement information generated in this task",
341 name="{outputCoaddName}Coadd_meas",
342 dimensions=("tract", "patch", "band", "skymap"),
343 storageClass="SourceCatalog",
344 )
345 matchResult = cT.Output(
346 doc="Match catalog produced by configured matcher, optional on doMatchSources",
347 name="{outputCoaddName}Coadd_measMatch",
348 dimensions=("tract", "patch", "band", "skymap"),
349 storageClass="Catalog",
350 )
351 denormMatches = cT.Output(
352 doc="Denormalized Match catalog produced by configured matcher, optional on "
353 "doWriteMatchesDenormalized",
354 name="{outputCoaddName}Coadd_measMatchFull",
355 dimensions=("tract", "patch", "band", "skymap"),
356 storageClass="Catalog",
357 )
358
359 def __init__(self, *, config=None):
360 super().__init__(config=config)
361 if config.doPropagateFlags is False:
362 self.inputs -= set(("visitCatalogs",))
363 self.inputs -= set(("sourceTableHandles",))
364 self.inputs -= set(("finalizedSourceTableHandles",))
365 elif config.propagateFlags.target == PropagateSourceFlagsTask:
366 # New PropagateSourceFlagsTask does not use visitCatalogs.
367 self.inputs -= set(("visitCatalogs",))
368 # Check for types of flags required.
369 if not config.propagateFlags.source_flags:
370 self.inputs -= set(("sourceTableHandles",))
371 if not config.propagateFlags.finalized_source_flags:
372 self.inputs -= set(("finalizedSourceTableHandles",))
373 else:
374 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
375 self.inputs -= set(("sourceTableHandles",))
376 self.inputs -= set(("finalizedSourceTableHandles",))
377
378 if config.inputCatalog == "deblendedCatalog":
379 self.inputs -= set(("inputCatalog",))
380
381 if not config.doAddFootprints:
382 self.inputs -= set(("scarletModels",))
383 else:
384 self.inputs -= set(("deblendedCatalog"))
385 self.inputs -= set(("scarletModels",))
386
387 if config.doMatchSources is False:
388 self.outputs -= set(("matchResult",))
389
390 if config.doWriteMatchesDenormalized is False:
391 self.outputs -= set(("denormMatches",))
392
393
394class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
395 pipelineConnections=MeasureMergedCoaddSourcesConnections):
396 """Configuration parameters for the MeasureMergedCoaddSourcesTask
397 """
398 inputCatalog = ChoiceField(
399 dtype=str,
400 default="deblendedCatalog",
401 allowed={
402 "deblendedCatalog": "Output catalog from ScarletDeblendTask",
403 "deblendedFlux": "Output catalog from SourceDeblendTask",
404 "mergeDet": "The merged detections before deblending."
405 },
406 doc="The name of the input catalog.",
407 )
408 doAddFootprints = Field(dtype=bool,
409 default=True,
410 doc="Whether or not to add footprints to the input catalog from scarlet models. "
411 "This should be true whenever using the multi-band deblender, "
412 "otherwise this should be False.")
413 doConserveFlux = Field(dtype=bool, default=True,
414 doc="Whether to use the deblender models as templates to re-distribute the flux "
415 "from the 'exposure' (True), or to perform measurements on the deblender "
416 "model footprints.")
417 doStripFootprints = Field(dtype=bool, default=True,
418 doc="Whether to strip footprints from the output catalog before "
419 "saving to disk. "
420 "This is usually done when using scarlet models to save disk space.")
421 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
422 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
423 doPropagateFlags = Field(
424 dtype=bool, default=True,
425 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
426 )
427 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
428 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
429 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
430 doWriteMatchesDenormalized = Field(
431 dtype=bool,
432 default=False,
433 doc=("Write reference matches in denormalized format? "
434 "This format uses more disk space, but is more convenient to read."),
435 )
436 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
437 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
438 checkUnitsParseStrict = Field(
439 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
440 dtype=str,
441 default="raise",
442 )
443 doApCorr = Field(
444 dtype=bool,
445 default=True,
446 doc="Apply aperture corrections"
447 )
448 applyApCorr = ConfigurableField(
449 target=ApplyApCorrTask,
450 doc="Subtask to apply aperture corrections"
451 )
452 doRunCatalogCalculation = Field(
453 dtype=bool,
454 default=True,
455 doc='Run catalogCalculation task'
456 )
457 catalogCalculation = ConfigurableField(
458 target=CatalogCalculationTask,
459 doc="Subtask to run catalogCalculation plugins on catalog"
460 )
461
462 hasFakes = Field(
463 dtype=bool,
464 default=False,
465 doc="Should be set to True if fake sources have been inserted into the input data."
466 )
467
468 @property
469 def refObjLoader(self):
470 return self.match.refObjLoader
471
472 def setDefaults(self):
473 super().setDefaults()
474 self.measurement.plugins.names |= ['base_InputCount',
475 'base_Variance',
476 'base_LocalPhotoCalib',
477 'base_LocalWcs']
478 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
479 'INEXACT_PSF']
480 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
481 'INEXACT_PSF']
482
483
484class MeasureMergedCoaddSourcesTask(PipelineTask):
485 """Deblend sources from main catalog in each coadd seperately and measure.
486
487 Use peaks and footprints from a master catalog to perform deblending and
488 measurement in each coadd.
489
490 Given a master input catalog of sources (peaks and footprints) or deblender
491 outputs(including a HeavyFootprint in each band), measure each source on
492 the coadd. Repeating this procedure with the same master catalog across
493 multiple coadds will generate a consistent set of child sources.
494
495 The deblender retains all peaks and deblends any missing peaks (dropouts in
496 that band) as PSFs. Source properties are measured and the @c is-primary
497 flag (indicating sources with no children) is set. Visit flags are
498 propagated to the coadd sources.
499
500 Optionally, we can match the coadd sources to an external reference
501 catalog.
502
503 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
504 have a set of per-band catalogs. The next stage in the multi-band
505 processing procedure will merge these measurements into a suitable catalog
506 for driving forced photometry.
507
508 Parameters
509 ----------
510 butler : `lsst.daf.butler.Butler` or `None`, optional
511 A butler used to read the input schemas from disk or construct the reference
512 catalog loader, if schema or peakSchema or refObjLoader is None.
513 schema : ``lsst.afw.table.Schema`, optional
514 The schema of the merged detection catalog used as input to this one.
515 peakSchema : ``lsst.afw.table.Schema`, optional
516 The schema of the PeakRecords in the Footprints in the merged detection catalog.
517 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
518 An instance of LoadReferenceObjectsTasks that supplies an external reference
519 catalog. May be None if the loader can be constructed from the butler argument or all steps
520 requiring a reference catalog are disabled.
521 initInputs : `dict`, optional
522 Dictionary that can contain a key ``inputSchema`` containing the
523 input schema. If present will override the value of ``schema``.
524 **kwargs
525 Additional keyword arguments.
526 """
527
528 _DefaultName = "measureCoaddSources"
529 ConfigClass = MeasureMergedCoaddSourcesConfig
530
531 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
532 **kwargs):
533 super().__init__(**kwargs)
534 self.deblended = self.config.inputCatalog.startswith("deblended")
535 self.inputCatalog = "Coadd_" + self.config.inputCatalog
536 if initInputs is not None:
537 schema = initInputs['inputSchema'].schema
538 if schema is None:
539 assert butler is not None, "Neither butler nor schema is defined"
540 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema").schema
541 self.schemaMapper = afwTable.SchemaMapper(schema)
542 self.schemaMapper.addMinimalSchema(schema)
543 self.schema = self.schemaMapper.getOutputSchema()
544 self.algMetadata = PropertyList()
545 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
546 self.makeSubtask("setPrimaryFlags", schema=self.schema)
547 if self.config.doMatchSources:
548 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
549 if self.config.doPropagateFlags:
550 self.makeSubtask("propagateFlags", schema=self.schema)
551 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
552 if self.config.doApCorr:
553 self.makeSubtask("applyApCorr", schema=self.schema)
554 if self.config.doRunCatalogCalculation:
555 self.makeSubtask("catalogCalculation", schema=self.schema)
556
557 self.outputSchema = afwTable.SourceCatalog(self.schema)
558
559 def runQuantum(self, butlerQC, inputRefs, outputRefs):
560 inputs = butlerQC.get(inputRefs)
561
562 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
563 inputs.pop('refCat'),
564 name=self.config.connections.refCat,
565 config=self.config.refObjLoader,
566 log=self.log)
567 self.match.setRefObjLoader(refObjLoader)
568
569 # Set psfcache
570 # move this to run after gen2 deprecation
571 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
572
573 # Get unique integer ID for IdFactory and RNG seeds
574 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
575 inputs['exposureId'] = exposureIdInfo.expId
576 idFactory = exposureIdInfo.makeSourceIdFactory()
577 # Transform inputCatalog
578 table = afwTable.SourceTable.make(self.schema, idFactory)
579 sources = afwTable.SourceCatalog(table)
580 # Load the correct input catalog
581 if "scarletCatalog" in inputs:
582 inputCatalog = inputs.pop("scarletCatalog")
583 catalogRef = inputRefs.scarletCatalog
584 else:
585 inputCatalog = inputs.pop("inputCatalog")
586 catalogRef = inputRefs.inputCatalog
587 sources.extend(inputCatalog, self.schemaMapper)
588 del inputCatalog
589 # Add the HeavyFootprints to the deblended sources
590 if self.config.doAddFootprints:
591 modelData = inputs.pop('scarletModels')
592 if self.config.doConserveFlux:
593 redistributeImage = inputs['exposure'].image
594 else:
595 redistributeImage = None
596 modelData.updateCatalogFootprints(
597 catalog=sources,
598 band=inputRefs.exposure.dataId["band"],
599 psfModel=inputs['exposure'].getPsf(),
600 redistributeImage=redistributeImage,
601 removeScarletData=True,
602 )
603 table = sources.getTable()
604 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
605 inputs['sources'] = sources
606
607 skyMap = inputs.pop('skyMap')
608 tractNumber = catalogRef.dataId['tract']
609 tractInfo = skyMap[tractNumber]
610 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
611 skyInfo = Struct(
612 skyMap=skyMap,
613 tractInfo=tractInfo,
614 patchInfo=patchInfo,
615 wcs=tractInfo.getWcs(),
616 bbox=patchInfo.getOuterBBox()
617 )
618 inputs['skyInfo'] = skyInfo
619
620 if self.config.doPropagateFlags:
621 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
622 # New version
623 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
624 inputs["ccdInputs"] = ccdInputs
625
626 if "sourceTableHandles" in inputs:
627 sourceTableHandles = inputs.pop("sourceTableHandles")
628 sourceTableHandleDict = {handle.dataId["visit"]: handle
629 for handle in sourceTableHandles}
630 inputs["sourceTableHandleDict"] = sourceTableHandleDict
631 if "finalizedSourceTableHandles" in inputs:
632 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
633 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
634 for handle in finalizedSourceTableHandles}
635 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
636 else:
637 # Deprecated legacy version
638 # Filter out any visit catalog that is not coadd inputs
639 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
640 visitKey = ccdInputs.schema.find("visit").key
641 ccdKey = ccdInputs.schema.find("ccd").key
642 inputVisitIds = set()
643 ccdRecordsWcs = {}
644 for ccdRecord in ccdInputs:
645 visit = ccdRecord.get(visitKey)
646 ccd = ccdRecord.get(ccdKey)
647 inputVisitIds.add((visit, ccd))
648 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
649
650 inputCatalogsToKeep = []
651 inputCatalogWcsUpdate = []
652 for i, dataRef in enumerate(inputRefs.visitCatalogs):
653 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
654 if key in inputVisitIds:
655 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
656 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
657 inputs['visitCatalogs'] = inputCatalogsToKeep
658 inputs['wcsUpdates'] = inputCatalogWcsUpdate
659 inputs['ccdInputs'] = ccdInputs
660
661 outputs = self.run(**inputs)
662 # Strip HeavyFootprints to save space on disk
663 sources = outputs.outputSources
664 butlerQC.put(outputs, outputRefs)
665
666 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
667 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
668 """Run measurement algorithms on the input exposure, and optionally populate the
669 resulting catalog with extra information.
670
671 Parameters
672 ----------
673 exposure : `lsst.afw.exposure.Exposure`
674 The input exposure on which measurements are to be performed.
676 A catalog built from the results of merged detections, or
677 deblender outputs.
678 skyInfo : `lsst.pipe.base.Struct`
679 A struct containing information about the position of the input exposure within
680 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
681 exposureId : `int` or `bytes`
682 Packed unique number or bytes unique to the input exposure.
683 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
684 Catalog containing information on the individual visits which went into making
685 the coadd.
686 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
687 A list of source catalogs corresponding to measurements made on the individual
688 visits which went into the input exposure. If None and butler is `None` then
689 the task cannot propagate visit flags to the output catalog.
690 Deprecated, to be removed with PropagateVisitFlagsTask.
691 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional
692 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
693 to the input visits. Used to put all coordinates to common system. If `None` and
694 butler is `None` then the task cannot propagate visit flags to the output catalog.
695 Deprecated, to be removed with PropagateVisitFlagsTask.
696 butler : `None`, optional
697 This was a Gen2 butler used to load visit catalogs.
698 No longer used and should not be set. Will be removed in the
699 future.
700 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
701 Dict for sourceTable_visit handles (key is visit) for propagating flags.
702 These tables are derived from the ``CalibrateTask`` sources, and contain
703 astrometry and photometry flags, and optionally PSF flags.
704 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
705 Dict for finalized_src_table handles (key is visit) for propagating flags.
706 These tables are derived from ``FinalizeCalibrationTask`` and contain
707 PSF flags from the finalized PSF estimation.
708
709 Returns
710 -------
711 results : `lsst.pipe.base.Struct`
712 Results of running measurement task. Will contain the catalog in the
713 sources attribute. Optionally will have results of matching to a
714 reference catalog in the matchResults attribute, and denormalized
715 matches in the denormMatches attribute.
716 """
717 if butler is not None:
718 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.",
719 category=FutureWarning, stacklevel=2)
720 butler = None
721
722 self.measurement.run(sources, exposure, exposureId=exposureId)
723
724 if self.config.doApCorr:
725 self.applyApCorr.run(
726 catalog=sources,
727 apCorrMap=exposure.getInfo().getApCorrMap()
728 )
729
730 # TODO DM-11568: this contiguous check-and-copy could go away if we
731 # reserve enough space during SourceDetection and/or SourceDeblend.
732 # NOTE: sourceSelectors require contiguous catalogs, so ensure
733 # contiguity now, so views are preserved from here on.
734 if not sources.isContiguous():
735 sources = sources.copy(deep=True)
736
737 if self.config.doRunCatalogCalculation:
738 self.catalogCalculation.run(sources)
739
740 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
741 patchInfo=skyInfo.patchInfo)
742 if self.config.doPropagateFlags:
743 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
744 # New version
745 self.propagateFlags.run(
746 sources,
747 ccdInputs,
748 sourceTableHandleDict,
749 finalizedSourceTableHandleDict
750 )
751 else:
752 # Legacy deprecated version
753 self.propagateFlags.run(
754 butler,
755 sources,
756 ccdInputs,
757 exposure.getWcs(),
758 visitCatalogs,
759 wcsUpdates
760 )
761
762 results = Struct()
763
764 if self.config.doMatchSources:
765 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
766 matches = afwTable.packMatches(matchResult.matches)
767 matches.table.setMetadata(matchResult.matchMeta)
768 results.matchResult = matches
769 if self.config.doWriteMatchesDenormalized:
770 if matchResult.matches:
771 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
772 else:
773 self.log.warning("No matches, so generating dummy denormalized matches file")
774 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
775 denormMatches.setMetadata(PropertyList())
776 denormMatches.getMetadata().add("COMMENT",
777 "This catalog is empty because no matches were found.")
778 results.denormMatches = denormMatches
779 results.denormMatches = denormMatches
780
781 results.outputSources = sources
782 return results
783