lsst.pipe.tasks gb77071e687+26b37c131c
Loading...
Searching...
No Matches
multiBand.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21
22__all__ = ["DetectCoaddSourcesConfig", "DetectCoaddSourcesTask"]
23
24import warnings
25
26from lsst.pipe.base import (Struct, PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27import lsst.pipe.base.connectionTypes as cT
28from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
29from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
30from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
31from lsst.meas.deblender import SourceDeblendTask
32from lsst.meas.extensions.scarlet import ScarletDeblendTask
33from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
37import lsst.afw.table as afwTable
38import lsst.afw.math as afwMath
39from lsst.daf.base import PropertyList
40from lsst.skymap import BaseSkyMap
41from lsst.obs.base import ExposureIdInfo
42
43# NOTE: these imports are a convenience so multiband users only have to import this file.
44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46from .multiBandUtils import CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
47from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
51
52
53"""
54New set types:
55* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
56* deepCoadd_mergeDet: merged detections (tract, patch)
57* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
58* deepCoadd_ref: reference sources (tract, patch)
59All of these have associated *_schema catalogs that require no data ID and hold no records.
60
61In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
62the mergeDet, meas, and ref dataset Footprints:
63* deepCoadd_peak_schema
64"""
65
66
67
68class DetectCoaddSourcesConnections(PipelineTaskConnections,
69 dimensions=("tract", "patch", "band", "skymap"),
70 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
71 detectionSchema = cT.InitOutput(
72 doc="Schema of the detection catalog",
73 name="{outputCoaddName}Coadd_det_schema",
74 storageClass="SourceCatalog",
75 )
76 exposure = cT.Input(
77 doc="Exposure on which detections are to be performed",
78 name="{inputCoaddName}Coadd",
79 storageClass="ExposureF",
80 dimensions=("tract", "patch", "band", "skymap")
81 )
82 outputBackgrounds = cT.Output(
83 doc="Output Backgrounds used in detection",
84 name="{outputCoaddName}Coadd_calexp_background",
85 storageClass="Background",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputSources = cT.Output(
89 doc="Detected sources catalog",
90 name="{outputCoaddName}Coadd_det",
91 storageClass="SourceCatalog",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputExposure = cT.Output(
95 doc="Exposure post detection",
96 name="{outputCoaddName}Coadd_calexp",
97 storageClass="ExposureF",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
100
101
102class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
103 """Configuration parameters for the DetectCoaddSourcesTask
104 """
105
106 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
108 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
109 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
110 doInsertFakes = Field(dtype=bool, default=False,
111 doc="Run fake sources injection task",
112 deprecated=("doInsertFakes is no longer supported. This config will be removed "
113 "after v24."))
114 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
115 doc="Injection of fake sources for testing "
116 "purposes (must be retargeted)",
117 deprecated=("insertFakes is no longer supported. This config will "
118 "be removed after v24."))
119 hasFakes = Field(
120 dtype=bool,
121 default=False,
122 doc="Should be set to True if fake sources have been inserted into the input data.",
123 )
124
125 def setDefaults(self):
126 super().setDefaults()
127 self.detection.thresholdType = "pixel_stdev"
128 self.detection.isotropicGrow = True
129 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
130 self.detection.reEstimateBackground = False
131 self.detection.background.useApprox = False
132 self.detection.background.binSize = 4096
133 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
134 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
135
136
137class DetectCoaddSourcesTask(PipelineTask):
138 """Detect sources on a single filter coadd.
139
140 Coadding individual visits requires each exposure to be warped. This
141 introduces covariance in the noise properties across pixels. Before
142 detection, we correct the coadd variance by scaling the variance plane in
143 the coadd to match the observed variance. This is an approximate
144 approach -- strictly, we should propagate the full covariance matrix --
145 but it is simple and works well in practice.
146
147 After scaling the variance plane, we detect sources and generate footprints
148 by delegating to the @ref SourceDetectionTask_ "detection" subtask.
149
150 DetectCoaddSourcesTask is meant to be run after assembling a coadded image
151 in a given band. The purpose of the task is to update the background,
152 detect all sources in a single band and generate a set of parent
153 footprints. Subsequent tasks in the multi-band processing procedure will
154 merge sources across bands and, eventually, perform forced photometry.
155
156 Parameters
157 ----------
158 schema : `lsst.afw.table.Schema`, optional
159 Initial schema for the output catalog, modified-in place to include all
160 fields set by this task. If None, the source minimal schema will be used.
161 **kwargs
162 Additional keyword arguments.
163 """
164
165 _DefaultName = "detectCoaddSources"
166 ConfigClass = DetectCoaddSourcesConfig
167 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
168
169 def __init__(self, schema=None, **kwargs):
170 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
171 # call structure has been reviewed carefully to be sure super will work as intended.
172 super().__init__(**kwargs)
173 if schema is None:
174 schema = afwTable.SourceTable.makeMinimalSchema()
175 self.schema = schema
176 self.makeSubtask("detection", schema=self.schema)
177 if self.config.doScaleVariance:
178 self.makeSubtask("scaleVariance")
179
180 self.detectionSchema = afwTable.SourceCatalog(self.schema)
181
182 def runQuantum(self, butlerQC, inputRefs, outputRefs):
183 inputs = butlerQC.get(inputRefs)
184 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
185 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
186 inputs["expId"] = exposureIdInfo.expId
187 outputs = self.run(**inputs)
188 butlerQC.put(outputs, outputRefs)
189
190 def run(self, exposure, idFactory, expId):
191 """Run detection on an exposure.
192
193 First scale the variance plane to match the observed variance
194 using ``ScaleVarianceTask``. Then invoke the ``SourceDetectionTask_`` "detection" subtask to
195 detect sources.
196
197 Parameters
198 ----------
199 exposure : `lsst.afw.image.Exposure`
200 Exposure on which to detect (may be backround-subtracted and scaled,
201 depending on configuration).
202 idFactory : `lsst.afw.table.IdFactory`
203 IdFactory to set source identifiers.
204 expId : `int`
205 Exposure identifier (integer) for RNG seed.
206
207 Returns
208 -------
209 result : `lsst.pipe.base.Struct`
210 Results as a struct with attributes:
211
212 ``sources``
213 Catalog of detections (`lsst.afw.table.SourceCatalog`).
214 ``backgrounds``
215 List of backgrounds (`list`).
216 """
217 if self.config.doScaleVariance:
218 varScale = self.scaleVariance.run(exposure.maskedImage)
219 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
220 backgrounds = afwMath.BackgroundList()
221 table = afwTable.SourceTable.make(self.schema, idFactory)
222 detections = self.detection.run(table, exposure, expId=expId)
223 sources = detections.sources
224 fpSets = detections.fpSets
225 if hasattr(fpSets, "background") and fpSets.background:
226 for bg in fpSets.background:
227 backgrounds.append(bg)
228 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
229
230
231
232
233
234class DeblendCoaddSourcesConfig(Config):
235 """Configuration parameters for the `DeblendCoaddSourcesTask`.
236 """
237
238 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
239 doc="Deblend sources separately in each band")
240 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
241 doc="Deblend sources simultaneously across bands")
242 simultaneous = Field(dtype=bool,
243 default=True,
244 doc="Simultaneously deblend all bands? "
245 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
246 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
247 hasFakes = Field(dtype=bool,
248 default=False,
249 doc="Should be set to True if fake sources have been inserted into the input data.")
250
251 def setDefaults(self):
252 Config.setDefaults(self)
253 self.singleBandDeblend.propagateAllPeaks = True
254
255
256class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"),
257 defaultTemplates={"inputCoaddName": "deep",
258 "outputCoaddName": "deep",
259 "deblendedCatalog": "deblendedFlux"}):
260 inputSchema = cT.InitInput(
261 doc="Input schema for measure merged task produced by a deblender or detection task",
262 name="{inputCoaddName}Coadd_deblendedFlux_schema",
263 storageClass="SourceCatalog"
264 )
265 outputSchema = cT.InitOutput(
266 doc="Output schema after all new fields are added by task",
267 name="{inputCoaddName}Coadd_meas_schema",
268 storageClass="SourceCatalog"
269 )
270 refCat = cT.PrerequisiteInput(
271 doc="Reference catalog used to match measured sources against known sources",
272 name="ref_cat",
273 storageClass="SimpleCatalog",
274 dimensions=("skypix",),
275 deferLoad=True,
276 multiple=True
277 )
278 exposure = cT.Input(
279 doc="Input coadd image",
280 name="{inputCoaddName}Coadd_calexp",
281 storageClass="ExposureF",
282 dimensions=("tract", "patch", "band", "skymap")
283 )
284 skyMap = cT.Input(
285 doc="SkyMap to use in processing",
286 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
287 storageClass="SkyMap",
288 dimensions=("skymap",),
289 )
290 visitCatalogs = cT.Input(
291 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
292 "further filtered in the task for the purpose of propagating flags from image calibration "
293 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
294 name="src",
295 dimensions=("instrument", "visit", "detector"),
296 storageClass="SourceCatalog",
297 multiple=True
298 )
299 sourceTableHandles = cT.Input(
300 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
301 "These tables contain astrometry and photometry flags, and optionally "
302 "PSF flags."),
303 name="sourceTable_visit",
304 storageClass="DataFrame",
305 dimensions=("instrument", "visit"),
306 multiple=True,
307 deferLoad=True,
308 )
309 finalizedSourceTableHandles = cT.Input(
310 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
311 "tables contain PSF flags from the finalized PSF estimation."),
312 name="finalized_src_table",
313 storageClass="DataFrame",
314 dimensions=("instrument", "visit"),
315 multiple=True,
316 deferLoad=True,
317 )
318 inputCatalog = cT.Input(
319 doc=("Name of the input catalog to use."
320 "If the single band deblender was used this should be 'deblendedFlux."
321 "If the multi-band deblender was used this should be 'deblendedModel, "
322 "or deblendedFlux if the multiband deblender was configured to output "
323 "deblended flux catalogs. If no deblending was performed this should "
324 "be 'mergeDet'"),
325 name="{inputCoaddName}Coadd_{deblendedCatalog}",
326 storageClass="SourceCatalog",
327 dimensions=("tract", "patch", "band", "skymap"),
328 )
329 scarletCatalog = cT.Input(
330 doc="Catalogs produced by multiband deblending",
331 name="{inputCoaddName}Coadd_deblendedCatalog",
332 storageClass="SourceCatalog",
333 dimensions=("tract", "patch", "skymap"),
334 )
335 scarletModels = cT.Input(
336 doc="Multiband scarlet models produced by the deblender",
337 name="{inputCoaddName}Coadd_scarletModelData",
338 storageClass="ScarletModelData",
339 dimensions=("tract", "patch", "skymap"),
340 )
341 outputSources = cT.Output(
342 doc="Source catalog containing all the measurement information generated in this task",
343 name="{outputCoaddName}Coadd_meas",
344 dimensions=("tract", "patch", "band", "skymap"),
345 storageClass="SourceCatalog",
346 )
347 matchResult = cT.Output(
348 doc="Match catalog produced by configured matcher, optional on doMatchSources",
349 name="{outputCoaddName}Coadd_measMatch",
350 dimensions=("tract", "patch", "band", "skymap"),
351 storageClass="Catalog",
352 )
353 denormMatches = cT.Output(
354 doc="Denormalized Match catalog produced by configured matcher, optional on "
355 "doWriteMatchesDenormalized",
356 name="{outputCoaddName}Coadd_measMatchFull",
357 dimensions=("tract", "patch", "band", "skymap"),
358 storageClass="Catalog",
359 )
360
361 def __init__(self, *, config=None):
362 super().__init__(config=config)
363 if config.doPropagateFlags is False:
364 self.inputs -= set(("visitCatalogs",))
365 self.inputs -= set(("sourceTableHandles",))
366 self.inputs -= set(("finalizedSourceTableHandles",))
367 elif config.propagateFlags.target == PropagateSourceFlagsTask:
368 # New PropagateSourceFlagsTask does not use visitCatalogs.
369 self.inputs -= set(("visitCatalogs",))
370 # Check for types of flags required.
371 if not config.propagateFlags.source_flags:
372 self.inputs -= set(("sourceTableHandles",))
373 if not config.propagateFlags.finalized_source_flags:
374 self.inputs -= set(("finalizedSourceTableHandles",))
375 else:
376 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
377 self.inputs -= set(("sourceTableHandles",))
378 self.inputs -= set(("finalizedSourceTableHandles",))
379
380 if config.inputCatalog == "deblendedCatalog":
381 self.inputs -= set(("inputCatalog",))
382
383 if not config.doAddFootprints:
384 self.inputs -= set(("scarletModels",))
385 else:
386 self.inputs -= set(("deblendedCatalog"))
387 self.inputs -= set(("scarletModels",))
388
389 if config.doMatchSources is False:
390 self.outputs -= set(("matchResult",))
391
392 if config.doWriteMatchesDenormalized is False:
393 self.outputs -= set(("denormMatches",))
394
395
396class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
397 pipelineConnections=MeasureMergedCoaddSourcesConnections):
398 """Configuration parameters for the MeasureMergedCoaddSourcesTask
399 """
400 inputCatalog = ChoiceField(
401 dtype=str,
402 default="deblendedCatalog",
403 allowed={
404 "deblendedCatalog": "Output catalog from ScarletDeblendTask",
405 "deblendedFlux": "Output catalog from SourceDeblendTask",
406 "mergeDet": "The merged detections before deblending."
407 },
408 doc="The name of the input catalog.",
409 )
410 doAddFootprints = Field(dtype=bool,
411 default=True,
412 doc="Whether or not to add footprints to the input catalog from scarlet models. "
413 "This should be true whenever using the multi-band deblender, "
414 "otherwise this should be False.")
415 doConserveFlux = Field(dtype=bool, default=True,
416 doc="Whether to use the deblender models as templates to re-distribute the flux "
417 "from the 'exposure' (True), or to perform measurements on the deblender "
418 "model footprints.")
419 doStripFootprints = Field(dtype=bool, default=True,
420 doc="Whether to strip footprints from the output catalog before "
421 "saving to disk. "
422 "This is usually done when using scarlet models to save disk space.")
423 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
424 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
425 doPropagateFlags = Field(
426 dtype=bool, default=True,
427 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
428 )
429 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
430 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
431 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
432 doWriteMatchesDenormalized = Field(
433 dtype=bool,
434 default=False,
435 doc=("Write reference matches in denormalized format? "
436 "This format uses more disk space, but is more convenient to read."),
437 )
438 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
439 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
440 checkUnitsParseStrict = Field(
441 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
442 dtype=str,
443 default="raise",
444 )
445 doApCorr = Field(
446 dtype=bool,
447 default=True,
448 doc="Apply aperture corrections"
449 )
450 applyApCorr = ConfigurableField(
451 target=ApplyApCorrTask,
452 doc="Subtask to apply aperture corrections"
453 )
454 doRunCatalogCalculation = Field(
455 dtype=bool,
456 default=True,
457 doc='Run catalogCalculation task'
458 )
459 catalogCalculation = ConfigurableField(
460 target=CatalogCalculationTask,
461 doc="Subtask to run catalogCalculation plugins on catalog"
462 )
463
464 hasFakes = Field(
465 dtype=bool,
466 default=False,
467 doc="Should be set to True if fake sources have been inserted into the input data."
468 )
469
470 @property
471 def refObjLoader(self):
472 return self.match.refObjLoader
473
474 def setDefaults(self):
475 super().setDefaults()
476 self.measurement.plugins.names |= ['base_InputCount',
477 'base_Variance',
478 'base_LocalPhotoCalib',
479 'base_LocalWcs']
480 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
481 'INEXACT_PSF']
482 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
483 'INEXACT_PSF']
484
485
486class MeasureMergedCoaddSourcesTask(PipelineTask):
487 """Deblend sources from main catalog in each coadd seperately and measure.
488
489 Use peaks and footprints from a master catalog to perform deblending and
490 measurement in each coadd.
491
492 Given a master input catalog of sources (peaks and footprints) or deblender
493 outputs(including a HeavyFootprint in each band), measure each source on
494 the coadd. Repeating this procedure with the same master catalog across
495 multiple coadds will generate a consistent set of child sources.
496
497 The deblender retains all peaks and deblends any missing peaks (dropouts in
498 that band) as PSFs. Source properties are measured and the @c is-primary
499 flag (indicating sources with no children) is set. Visit flags are
500 propagated to the coadd sources.
501
502 Optionally, we can match the coadd sources to an external reference
503 catalog.
504
505 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we
506 have a set of per-band catalogs. The next stage in the multi-band
507 processing procedure will merge these measurements into a suitable catalog
508 for driving forced photometry.
509
510 Parameters
511 ----------
512 butler : `lsst.daf.butler.Butler` or `None`, optional
513 A butler used to read the input schemas from disk or construct the reference
514 catalog loader, if schema or peakSchema or refObjLoader is None.
515 schema : ``lsst.afw.table.Schema`, optional
516 The schema of the merged detection catalog used as input to this one.
517 peakSchema : ``lsst.afw.table.Schema`, optional
518 The schema of the PeakRecords in the Footprints in the merged detection catalog.
519 refObjLoader : `lsst.meas.algorithms.ReferenceObjectLoader`, optional
520 An instance of LoadReferenceObjectsTasks that supplies an external reference
521 catalog. May be None if the loader can be constructed from the butler argument or all steps
522 requiring a reference catalog are disabled.
523 initInputs : `dict`, optional
524 Dictionary that can contain a key ``inputSchema`` containing the
525 input schema. If present will override the value of ``schema``.
526 **kwargs
527 Additional keyword arguments.
528 """
529
530 _DefaultName = "measureCoaddSources"
531 ConfigClass = MeasureMergedCoaddSourcesConfig
532 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
533
534 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
535 **kwargs):
536 super().__init__(**kwargs)
537 self.deblended = self.config.inputCatalog.startswith("deblended")
538 self.inputCatalog = "Coadd_" + self.config.inputCatalog
539 if initInputs is not None:
540 schema = initInputs['inputSchema'].schema
541 if schema is None:
542 assert butler is not None, "Neither butler nor schema is defined"
543 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
544 self.schemaMapper = afwTable.SchemaMapper(schema)
545 self.schemaMapper.addMinimalSchema(schema)
546 self.schema = self.schemaMapper.getOutputSchema()
547 self.algMetadata = PropertyList()
548 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
549 self.makeSubtask("setPrimaryFlags", schema=self.schema)
550 if self.config.doMatchSources:
551 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
552 if self.config.doPropagateFlags:
553 self.makeSubtask("propagateFlags", schema=self.schema)
554 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
555 if self.config.doApCorr:
556 self.makeSubtask("applyApCorr", schema=self.schema)
557 if self.config.doRunCatalogCalculation:
558 self.makeSubtask("catalogCalculation", schema=self.schema)
559
560 self.outputSchema = afwTable.SourceCatalog(self.schema)
561
562 def runQuantum(self, butlerQC, inputRefs, outputRefs):
563 inputs = butlerQC.get(inputRefs)
564
565 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
566 inputs.pop('refCat'),
567 name=self.config.connections.refCat,
568 config=self.config.refObjLoader,
569 log=self.log)
570 self.match.setRefObjLoader(refObjLoader)
571
572 # Set psfcache
573 # move this to run after gen2 deprecation
574 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
575
576 # Get unique integer ID for IdFactory and RNG seeds
577 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
578 inputs['exposureId'] = exposureIdInfo.expId
579 idFactory = exposureIdInfo.makeSourceIdFactory()
580 # Transform inputCatalog
581 table = afwTable.SourceTable.make(self.schema, idFactory)
582 sources = afwTable.SourceCatalog(table)
583 # Load the correct input catalog
584 if "scarletCatalog" in inputs:
585 inputCatalog = inputs.pop("scarletCatalog")
586 catalogRef = inputRefs.scarletCatalog
587 else:
588 inputCatalog = inputs.pop("inputCatalog")
589 catalogRef = inputRefs.inputCatalog
590 sources.extend(inputCatalog, self.schemaMapper)
591 del inputCatalog
592 # Add the HeavyFootprints to the deblended sources
593 if self.config.doAddFootprints:
594 modelData = inputs.pop('scarletModels')
595 if self.config.doConserveFlux:
596 redistributeImage = inputs['exposure'].image
597 else:
598 redistributeImage = None
599 modelData.updateCatalogFootprints(
600 catalog=sources,
601 band=inputRefs.exposure.dataId["band"],
602 psfModel=inputs['exposure'].getPsf(),
603 redistributeImage=redistributeImage,
604 removeScarletData=True,
605 )
606 table = sources.getTable()
607 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
608 inputs['sources'] = sources
609
610 skyMap = inputs.pop('skyMap')
611 tractNumber = catalogRef.dataId['tract']
612 tractInfo = skyMap[tractNumber]
613 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
614 skyInfo = Struct(
615 skyMap=skyMap,
616 tractInfo=tractInfo,
617 patchInfo=patchInfo,
618 wcs=tractInfo.getWcs(),
619 bbox=patchInfo.getOuterBBox()
620 )
621 inputs['skyInfo'] = skyInfo
622
623 if self.config.doPropagateFlags:
624 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
625 # New version
626 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
627 inputs["ccdInputs"] = ccdInputs
628
629 if "sourceTableHandles" in inputs:
630 sourceTableHandles = inputs.pop("sourceTableHandles")
631 sourceTableHandleDict = {handle.dataId["visit"]: handle
632 for handle in sourceTableHandles}
633 inputs["sourceTableHandleDict"] = sourceTableHandleDict
634 if "finalizedSourceTableHandles" in inputs:
635 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
636 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
637 for handle in finalizedSourceTableHandles}
638 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
639 else:
640 # Deprecated legacy version
641 # Filter out any visit catalog that is not coadd inputs
642 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
643 visitKey = ccdInputs.schema.find("visit").key
644 ccdKey = ccdInputs.schema.find("ccd").key
645 inputVisitIds = set()
646 ccdRecordsWcs = {}
647 for ccdRecord in ccdInputs:
648 visit = ccdRecord.get(visitKey)
649 ccd = ccdRecord.get(ccdKey)
650 inputVisitIds.add((visit, ccd))
651 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
652
653 inputCatalogsToKeep = []
654 inputCatalogWcsUpdate = []
655 for i, dataRef in enumerate(inputRefs.visitCatalogs):
656 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
657 if key in inputVisitIds:
658 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
659 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
660 inputs['visitCatalogs'] = inputCatalogsToKeep
661 inputs['wcsUpdates'] = inputCatalogWcsUpdate
662 inputs['ccdInputs'] = ccdInputs
663
664 outputs = self.run(**inputs)
665 # Strip HeavyFootprints to save space on disk
666 sources = outputs.outputSources
667 butlerQC.put(outputs, outputRefs)
668
669 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
670 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
671 """Run measurement algorithms on the input exposure, and optionally populate the
672 resulting catalog with extra information.
673
674 Parameters
675 ----------
676 exposure : `lsst.afw.exposure.Exposure`
677 The input exposure on which measurements are to be performed.
679 A catalog built from the results of merged detections, or
680 deblender outputs.
681 skyInfo : `lsst.pipe.base.Struct`
682 A struct containing information about the position of the input exposure within
683 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box.
684 exposureId : `int` or `bytes`
685 Packed unique number or bytes unique to the input exposure.
686 ccdInputs : `lsst.afw.table.ExposureCatalog`, optional
687 Catalog containing information on the individual visits which went into making
688 the coadd.
689 visitCatalogs : `list` of `lsst.afw.table.SourceCatalogs`, optional
690 A list of source catalogs corresponding to measurements made on the individual
691 visits which went into the input exposure. If None and butler is `None` then
692 the task cannot propagate visit flags to the output catalog.
693 Deprecated, to be removed with PropagateVisitFlagsTask.
694 wcsUpdates : `list` of `lsst.afw.geom.SkyWcs`, optional
695 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
696 to the input visits. Used to put all coordinates to common system. If `None` and
697 butler is `None` then the task cannot propagate visit flags to the output catalog.
698 Deprecated, to be removed with PropagateVisitFlagsTask.
699 butler : `None`, optional
700 This was a Gen2 butler used to load visit catalogs.
701 No longer used and should not be set. Will be removed in the
702 future.
703 sourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
704 Dict for sourceTable_visit handles (key is visit) for propagating flags.
705 These tables are derived from the ``CalibrateTask`` sources, and contain
706 astrometry and photometry flags, and optionally PSF flags.
707 finalizedSourceTableHandleDict : `dict` [`int`, `lsst.daf.butler.DeferredDatasetHandle`], optional
708 Dict for finalized_src_table handles (key is visit) for propagating flags.
709 These tables are derived from ``FinalizeCalibrationTask`` and contain
710 PSF flags from the finalized PSF estimation.
711
712 Returns
713 -------
714 results : `lsst.pipe.base.Struct`
715 Results of running measurement task. Will contain the catalog in the
716 sources attribute. Optionally will have results of matching to a
717 reference catalog in the matchResults attribute, and denormalized
718 matches in the denormMatches attribute.
719 """
720 if butler is not None:
721 warnings.warn("The 'butler' parameter is no longer used and can be safely removed.",
722 category=FutureWarning, stacklevel=2)
723 butler = None
724
725 self.measurement.run(sources, exposure, exposureId=exposureId)
726
727 if self.config.doApCorr:
728 self.applyApCorr.run(
729 catalog=sources,
730 apCorrMap=exposure.getInfo().getApCorrMap()
731 )
732
733 # TODO DM-11568: this contiguous check-and-copy could go away if we
734 # reserve enough space during SourceDetection and/or SourceDeblend.
735 # NOTE: sourceSelectors require contiguous catalogs, so ensure
736 # contiguity now, so views are preserved from here on.
737 if not sources.isContiguous():
738 sources = sources.copy(deep=True)
739
740 if self.config.doRunCatalogCalculation:
741 self.catalogCalculation.run(sources)
742
743 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
744 patchInfo=skyInfo.patchInfo)
745 if self.config.doPropagateFlags:
746 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
747 # New version
748 self.propagateFlags.run(
749 sources,
750 ccdInputs,
751 sourceTableHandleDict,
752 finalizedSourceTableHandleDict
753 )
754 else:
755 # Legacy deprecated version
756 self.propagateFlags.run(
757 butler,
758 sources,
759 ccdInputs,
760 exposure.getWcs(),
761 visitCatalogs,
762 wcsUpdates
763 )
764
765 results = Struct()
766
767 if self.config.doMatchSources:
768 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
769 matches = afwTable.packMatches(matchResult.matches)
770 matches.table.setMetadata(matchResult.matchMeta)
771 results.matchResult = matches
772 if self.config.doWriteMatchesDenormalized:
773 if matchResult.matches:
774 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
775 else:
776 self.log.warning("No matches, so generating dummy denormalized matches file")
777 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
778 denormMatches.setMetadata(PropertyList())
779 denormMatches.getMetadata().add("COMMENT",
780 "This catalog is empty because no matches were found.")
781 results.denormMatches = denormMatches
782 results.denormMatches = denormMatches
783
784 results.outputSources = sources
785 return results
786