lsst.pipe.tasks gcc9029db3c+eeb411f812
multiBand.py
Go to the documentation of this file.
1#!/usr/bin/env python
2#
3# LSST Data Management System
4# Copyright 2008-2015 AURA/LSST.
5#
6# This product includes software developed by the
7# LSST Project (http://www.lsst.org/).
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the LSST License Statement and
20# the GNU General Public License along with this program. If not,
21# see <https://www.lsstcorp.org/LegalNotices/>.
22#
23import warnings
24import numpy as np
25
26from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
27from lsst.coadd.utils.getGen3CoaddExposureId import getGen3CoaddExposureId
28from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
29 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
30import lsst.pipe.base.connectionTypes as cT
31from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
32from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
33from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
34from lsst.meas.deblender import SourceDeblendTask
35from lsst.meas.extensions.scarlet import ScarletDeblendTask
36from lsst.pipe.tasks.coaddBase import getSkyInfo
37from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
38from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
39from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
40from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
41import lsst.afw.image as afwImage
42import lsst.afw.table as afwTable
43import lsst.afw.math as afwMath
44from lsst.daf.base import PropertyList
45from lsst.skymap import BaseSkyMap
46from lsst.obs.base import ExposureIdInfo
47
48# NOTE: these imports are a convenience so multiband users only have to import this file.
49from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
50from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
51from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
52from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401
53from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
54from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
55from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
56from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
57
58
59"""
60New set types:
61* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
62* deepCoadd_mergeDet: merged detections (tract, patch)
63* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
64* deepCoadd_ref: reference sources (tract, patch)
65All of these have associated *_schema catalogs that require no data ID and hold no records.
66
67In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
68the mergeDet, meas, and ref dataset Footprints:
69* deepCoadd_peak_schema
70"""
71
72
73
74class DetectCoaddSourcesConnections(PipelineTaskConnections,
75 dimensions=("tract", "patch", "band", "skymap"),
76 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
77 detectionSchema = cT.InitOutput(
78 doc="Schema of the detection catalog",
79 name="{outputCoaddName}Coadd_det_schema",
80 storageClass="SourceCatalog",
81 )
82 exposure = cT.Input(
83 doc="Exposure on which detections are to be performed",
84 name="{inputCoaddName}Coadd",
85 storageClass="ExposureF",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputBackgrounds = cT.Output(
89 doc="Output Backgrounds used in detection",
90 name="{outputCoaddName}Coadd_calexp_background",
91 storageClass="Background",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputSources = cT.Output(
95 doc="Detected sources catalog",
96 name="{outputCoaddName}Coadd_det",
97 storageClass="SourceCatalog",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
100 outputExposure = cT.Output(
101 doc="Exposure post detection",
102 name="{outputCoaddName}Coadd_calexp",
103 storageClass="ExposureF",
104 dimensions=("tract", "patch", "band", "skymap")
105 )
106
107
108class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
109 """!
110 @anchor DetectCoaddSourcesConfig_
111
112 @brief Configuration parameters for the DetectCoaddSourcesTask
113 """
114 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
115 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
116 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
117 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
118 doInsertFakes = Field(dtype=bool, default=False,
119 doc="Run fake sources injection task",
120 deprecated=("doInsertFakes is no longer supported. This config will be removed "
121 "after v24."))
122 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
123 doc="Injection of fake sources for testing "
124 "purposes (must be retargeted)",
125 deprecated=("insertFakes is no longer supported. This config will "
126 "be removed after v24."))
127 hasFakes = Field(
128 dtype=bool,
129 default=False,
130 doc="Should be set to True if fake sources have been inserted into the input data.",
131 )
132
133 def setDefaults(self):
134 super().setDefaults()
135 self.detection.thresholdType = "pixel_stdev"
136 self.detection.isotropicGrow = True
137 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
138 self.detection.reEstimateBackground = False
139 self.detection.background.useApprox = False
140 self.detection.background.binSize = 4096
141 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
142 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
143
144
150
151
152class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
153 r"""!
154 @anchor DetectCoaddSourcesTask_
155
156 @brief Detect sources on a coadd
157
158 @section pipe_tasks_multiBand_Contents Contents
159
160 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
161 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
162 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
163 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
164 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
165 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
166
167 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
168
169 Command-line task that detects sources on a coadd of exposures obtained with a single filter.
170
171 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
172 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
173 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
174 propagate the full covariance matrix -- but it is simple and works well in practice.
175
176 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
177 SourceDetectionTask_ "detection" subtask.
178
179 @par Inputs:
180 deepCoadd{tract,patch,filter}: ExposureF
181 @par Outputs:
182 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
183 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
184 exposure (ExposureF)
185 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
186 @par Data Unit:
187 tract, patch, filter
188
189 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
190 You can retarget this subtask if you wish.
191
192 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
193
194 @copydoc \_\_init\_\_
195
196 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
197
198 @copydoc run
199
200 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
201
202 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
203
204 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
205
206 The command line task interface supports a
207 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
208 files.
209
210 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
211 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
212 @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
213
214 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
215 of using DetectCoaddSourcesTask
216
217 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
218 the task is to update the background, detect all sources in a single band and generate a set of parent
219 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
220 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
221 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
222 calling detectCoaddSources.py with the `--help` command line argument:
223 @code
224 detectCoaddSources.py --help
225 @endcode
226
227 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
228 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
229 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
230 @code
231 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
232 @endcode
233 that will process the HSC-I band data. The results are written to
234 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
235
236 It is also necessary to run:
237 @code
238 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
239 @endcode
240 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
241 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
242 """
243 _DefaultName = "detectCoaddSources"
244 ConfigClass = DetectCoaddSourcesConfig
245 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
246 makeIdFactory = _makeMakeIdFactory("CoaddId")
247
248 @classmethod
249 def _makeArgumentParser(cls):
250 parser = ArgumentParser(name=cls._DefaultName)
251 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
252 ContainerClass=ExistingCoaddDataIdContainer)
253 return parser
254
255 def __init__(self, schema=None, **kwargs):
256 """!
257 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
258
259 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
260
261 @param[in] schema: initial schema for the output catalog, modified-in place to include all
262 fields set by this task. If None, the source minimal schema will be used.
263 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
264 """
265 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
266 # call structure has been reviewed carefully to be sure super will work as intended.
267 super().__init__(**kwargs)
268 if schema is None:
269 schema = afwTable.SourceTable.makeMinimalSchema()
270 self.schema = schema
271 self.makeSubtask("detection", schema=self.schema)
272 if self.config.doScaleVariance:
273 self.makeSubtask("scaleVariance")
274
275 self.detectionSchema = afwTable.SourceCatalog(self.schema)
276
277 def runDataRef(self, patchRef):
278 """!
279 @brief Run detection on a coadd.
280
281 Invokes @ref run and then uses @ref write to output the
282 results.
283
284 @param[in] patchRef: data reference for patch
285 """
286 if self.config.hasFakes:
287 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
288 else:
289 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
290 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, log=self.log)
291 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
292 self.write(results, patchRef)
293 return results
294
295 def runQuantum(self, butlerQC, inputRefs, outputRefs):
296 inputs = butlerQC.get(inputRefs)
297 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
298 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
299 inputs["expId"] = exposureIdInfo.expId
300 outputs = self.run(**inputs)
301 butlerQC.put(outputs, outputRefs)
302
303 def run(self, exposure, idFactory, expId):
304 """!
305 @brief Run detection on an exposure.
306
307 First scale the variance plane to match the observed variance
308 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
309 detect sources.
310
311 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
312 depending on configuration).
313 @param[in] idFactory: IdFactory to set source identifiers
314 @param[in] expId: Exposure identifier (integer) for RNG seed
315
316 @return a pipe.base.Struct with fields
317 - sources: catalog of detections
318 - backgrounds: list of backgrounds
319 """
320 if self.config.doScaleVariance:
321 varScale = self.scaleVariance.run(exposure.maskedImage)
322 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
323 backgrounds = afwMath.BackgroundList()
324 table = afwTable.SourceTable.make(self.schema, idFactory)
325 detections = self.detection.run(table, exposure, expId=expId)
326 sources = detections.sources
327 fpSets = detections.fpSets
328 if hasattr(fpSets, "background") and fpSets.background:
329 for bg in fpSets.background:
330 backgrounds.append(bg)
331 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
332
333 def write(self, results, patchRef):
334 """!
335 @brief Write out results from runDetection.
336
337 @param[in] exposure: Exposure to write out
338 @param[in] results: Struct returned from runDetection
339 @param[in] patchRef: data reference for patch
340 """
341 coaddName = self.config.coaddName + "Coadd"
342 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
343 patchRef.put(results.outputSources, coaddName + "_det")
344 if self.config.hasFakes:
345 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
346 else:
347 patchRef.put(results.outputExposure, coaddName + "_calexp")
348
349
350
351
352class DeblendCoaddSourcesConfig(Config):
353 """DeblendCoaddSourcesConfig
354
355 Configuration parameters for the `DeblendCoaddSourcesTask`.
356 """
357 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
358 doc="Deblend sources separately in each band")
359 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
360 doc="Deblend sources simultaneously across bands")
361 simultaneous = Field(dtype=bool,
362 default=True,
363 doc="Simultaneously deblend all bands? "
364 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
365 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
366 hasFakes = Field(dtype=bool,
367 default=False,
368 doc="Should be set to True if fake sources have been inserted into the input data.")
369
370 def setDefaults(self):
371 Config.setDefaults(self)
372 self.singleBandDeblend.propagateAllPeaks = True
373
374
375class DeblendCoaddSourcesRunner(MergeSourcesRunner):
376 """Task runner for the `MergeSourcesTask`
377
378 Required because the run method requires a list of
379 dataRefs rather than a single dataRef.
380 """
381 @staticmethod
382 def getTargetList(parsedCmd, **kwargs):
383 """Provide a list of patch references for each patch, tract, filter combo.
384
385 Parameters
386 ----------
387 parsedCmd:
388 The parsed command
389 kwargs:
390 Keyword arguments passed to the task
391
392 Returns
393 -------
394 targetList: list
395 List of tuples, where each tuple is a (dataRef, kwargs) pair.
396 """
397 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
398 kwargs["psfCache"] = parsedCmd.psfCache
399 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
400
401
402class DeblendCoaddSourcesTask(CmdLineTask):
403 """Deblend the sources in a merged catalog
404
405 Deblend sources from master catalog in each coadd.
406 This can either be done separately in each band using the HSC-SDSS deblender
407 (`DeblendCoaddSourcesTask.config.simultaneous==False`)
408 or use SCARLET to simultaneously fit the blend in all bands
409 (`DeblendCoaddSourcesTask.config.simultaneous==True`).
410 The task will set its own `self.schema` atribute to the `Schema` of the
411 output deblended catalog.
412 This will include all fields from the input `Schema`, as well as additional fields
413 from the deblender.
414
415 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
416 ---------------------------------------------------------
417 `
418
419 Parameters
420 ----------
421 butler: `Butler`
422 Butler used to read the input schemas from disk or
423 construct the reference catalog loader, if `schema` or `peakSchema` or
424 schema: `Schema`
425 The schema of the merged detection catalog as an input to this task.
426 peakSchema: `Schema`
427 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
428 """
429 ConfigClass = DeblendCoaddSourcesConfig
430 RunnerClass = DeblendCoaddSourcesRunner
431 _DefaultName = "deblendCoaddSources"
432 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
433
434 @classmethod
435 def _makeArgumentParser(cls):
436 parser = ArgumentParser(name=cls._DefaultName)
437 parser.add_id_argument("--id", "deepCoadd_calexp",
438 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
439 ContainerClass=ExistingCoaddDataIdContainer)
440 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
441 return parser
442
443 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
444 CmdLineTask.__init__(self, **kwargs)
445 if schema is None:
446 assert butler is not None, "Neither butler nor schema is defined"
447 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
448 self.schemaMapper = afwTable.SchemaMapper(schema)
449 self.schemaMapper.addMinimalSchema(schema)
450 self.schema = self.schemaMapper.getOutputSchema()
451 if peakSchema is None:
452 assert butler is not None, "Neither butler nor peakSchema is defined"
453 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
454
455 if self.config.simultaneous:
456 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
457 else:
458 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
459
460 def getSchemaCatalogs(self):
461 """Return a dict of empty catalogs for each catalog dataset produced by this task.
462
463 Returns
464 -------
465 result: dict
466 Dictionary of empty catalogs, with catalog names as keys.
467 """
468 catalog = afwTable.SourceCatalog(self.schema)
469 return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
470 self.config.coaddName + "Coadd_deblendedModel": catalog}
471
472 def runDataRef(self, patchRefList, psfCache=100):
473 """Deblend the patch
474
475 Deblend each source simultaneously or separately
476 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
477 Set `is-primary` and related flags.
478 Propagate flags from individual visits.
479 Write the deblended sources out.
480
481 Parameters
482 ----------
483 patchRefList: list
484 List of data references for each filter
485 """
486
487 if self.config.hasFakes:
488 coaddType = "fakes_" + self.config.coaddName
489 else:
490 coaddType = self.config.coaddName
491
492 if self.config.simultaneous:
493 # Use SCARLET to simultaneously deblend across filters
494 filters = []
495 exposures = []
496 for patchRef in patchRefList:
497 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
498 filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True)
499 filters.append(filter.bandLabel)
500 exposures.append(exposure)
501 # Sort inputs by band to match Gen3 order of inputs
502 exposures = [exposure for _, exposure in sorted(zip(filters, exposures))]
503 patchRefList = [patchRef for _, patchRef in sorted(zip(filters, patchRefList))]
504 filters.sort()
505 # The input sources are the same for all bands, since it is a merged catalog
506 sources = self.readSources(patchRef)
507 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
508 templateCatalogs, fluxCatalogs = self.multiBandDeblend.run(exposure, sources)
509 for n in range(len(patchRefList)):
510 self.write(patchRefList[n], templateCatalogs[filters[n]], "Model")
511 if filters[n] in fluxCatalogs:
512 self.write(patchRefList[n], fluxCatalogs[filters[n]], "Flux")
513 else:
514 # Use the singeband deblender to deblend each band separately
515 for patchRef in patchRefList:
516 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
517 exposure.getPsf().setCacheCapacity(psfCache)
518 sources = self.readSources(patchRef)
519 self.singleBandDeblend.run(exposure, sources)
520 self.write(patchRef, sources)
521
522 def readSources(self, dataRef):
523 """Read merged catalog
524
525 Read the catalog of merged detections and create a catalog
526 in a single band.
527
528 Parameters
529 ----------
530 dataRef: data reference
531 Data reference for catalog of merged detections
532
533 Returns
534 -------
535 sources: `SourceCatalog`
536 List of sources in merged catalog
537
538 We also need to add columns to hold the measurements we're about to make so we can measure in-place.
539 """
540 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
541 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
542 idFactory = self.makeIdFactory(dataRef)
543 # There may be gaps in the mergeDet catalog, which will cause the
544 # source ids to be inconsistent. So we update the id factory
545 # with the largest id already in the catalog.
546 maxId = np.max(merged["id"])
547 idFactory.notify(maxId)
548 table = afwTable.SourceTable.make(self.schema, idFactory)
549 sources = afwTable.SourceCatalog(table)
550 sources.extend(merged, self.schemaMapper)
551 return sources
552
553 def write(self, dataRef, sources, catalogType):
554 """Write the source catalog(s)
555
556 Parameters
557 ----------
558 dataRef: Data Reference
559 Reference to the output catalog.
560 sources: `SourceCatalog`
561 Flux conserved sources to write to file.
562 If using the single band deblender, this is the catalog
563 generated.
564 template_sources: `SourceCatalog`
565 Source catalog using the multiband template models
566 as footprints.
567 """
568 dataRef.put(sources, self.config.coaddName + f"Coadd_deblended{catalogType}")
569 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)
570
571 def writeMetadata(self, dataRefList):
572 """Write the metadata produced from processing the data.
573 Parameters
574 ----------
575 dataRefList
576 List of Butler data references used to write the metadata.
577 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
578 """
579 for dataRef in dataRefList:
580 try:
581 metadataName = self._getMetadataName()
582 if metadataName is not None:
583 dataRef.put(self.getFullMetadata(), metadataName)
584 except Exception as e:
585 self.log.warning("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
586
587
588class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
589 dimensions=("tract", "patch", "band", "skymap"),
590 defaultTemplates={"inputCoaddName": "deep",
591 "outputCoaddName": "deep",
592 "deblendedCatalog": "deblendedFlux"}):
593 warnings.warn("MeasureMergedCoaddSourcesConnections.defaultTemplates is deprecated and no longer used. "
594 "Use MeasureMergedCoaddSourcesConfig.inputCatalog.")
595 inputSchema = cT.InitInput(
596 doc="Input schema for measure merged task produced by a deblender or detection task",
597 name="{inputCoaddName}Coadd_deblendedFlux_schema",
598 storageClass="SourceCatalog"
599 )
600 outputSchema = cT.InitOutput(
601 doc="Output schema after all new fields are added by task",
602 name="{inputCoaddName}Coadd_meas_schema",
603 storageClass="SourceCatalog"
604 )
605 refCat = cT.PrerequisiteInput(
606 doc="Reference catalog used to match measured sources against known sources",
607 name="ref_cat",
608 storageClass="SimpleCatalog",
609 dimensions=("skypix",),
610 deferLoad=True,
611 multiple=True
612 )
613 exposure = cT.Input(
614 doc="Input coadd image",
615 name="{inputCoaddName}Coadd_calexp",
616 storageClass="ExposureF",
617 dimensions=("tract", "patch", "band", "skymap")
618 )
619 skyMap = cT.Input(
620 doc="SkyMap to use in processing",
621 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
622 storageClass="SkyMap",
623 dimensions=("skymap",),
624 )
625 visitCatalogs = cT.Input(
626 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
627 "further filtered in the task for the purpose of propagating flags from image calibration "
628 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
629 name="src",
630 dimensions=("instrument", "visit", "detector"),
631 storageClass="SourceCatalog",
632 multiple=True
633 )
634 sourceTableHandles = cT.Input(
635 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
636 "These tables contain astrometry and photometry flags, and optionally "
637 "PSF flags."),
638 name="sourceTable_visit",
639 storageClass="DataFrame",
640 dimensions=("instrument", "visit"),
641 multiple=True,
642 deferLoad=True,
643 )
644 finalizedSourceTableHandles = cT.Input(
645 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
646 "tables contain PSF flags from the finalized PSF estimation."),
647 name="finalized_src_table",
648 storageClass="DataFrame",
649 dimensions=("instrument", "visit"),
650 multiple=True,
651 deferLoad=True,
652 )
653 inputCatalog = cT.Input(
654 doc=("Name of the input catalog to use."
655 "If the single band deblender was used this should be 'deblendedFlux."
656 "If the multi-band deblender was used this should be 'deblendedModel, "
657 "or deblendedFlux if the multiband deblender was configured to output "
658 "deblended flux catalogs. If no deblending was performed this should "
659 "be 'mergeDet'"),
660 name="{inputCoaddName}Coadd_{deblendedCatalog}",
661 storageClass="SourceCatalog",
662 dimensions=("tract", "patch", "band", "skymap"),
663 )
664 scarletCatalog = cT.Input(
665 doc="Catalogs produced by multiband deblending",
666 name="{inputCoaddName}Coadd_deblendedCatalog",
667 storageClass="SourceCatalog",
668 dimensions=("tract", "patch", "skymap"),
669 )
670 scarletModels = cT.Input(
671 doc="Multiband scarlet models produced by the deblender",
672 name="{inputCoaddName}Coadd_scarletModelData",
673 storageClass="ScarletModelData",
674 dimensions=("tract", "patch", "skymap"),
675 )
676 outputSources = cT.Output(
677 doc="Source catalog containing all the measurement information generated in this task",
678 name="{outputCoaddName}Coadd_meas",
679 dimensions=("tract", "patch", "band", "skymap"),
680 storageClass="SourceCatalog",
681 )
682 matchResult = cT.Output(
683 doc="Match catalog produced by configured matcher, optional on doMatchSources",
684 name="{outputCoaddName}Coadd_measMatch",
685 dimensions=("tract", "patch", "band", "skymap"),
686 storageClass="Catalog",
687 )
688 denormMatches = cT.Output(
689 doc="Denormalized Match catalog produced by configured matcher, optional on "
690 "doWriteMatchesDenormalized",
691 name="{outputCoaddName}Coadd_measMatchFull",
692 dimensions=("tract", "patch", "band", "skymap"),
693 storageClass="Catalog",
694 )
695
696 def __init__(self, *, config=None):
697 super().__init__(config=config)
698 if config.doPropagateFlags is False:
699 self.inputs -= set(("visitCatalogs",))
700 self.inputs -= set(("sourceTableHandles",))
701 self.inputs -= set(("finalizedSourceTableHandles",))
702 elif config.propagateFlags.target == PropagateSourceFlagsTask:
703 # New PropagateSourceFlagsTask does not use visitCatalogs.
704 self.inputs -= set(("visitCatalogs",))
705 # Check for types of flags required.
706 if not config.propagateFlags.source_flags:
707 self.inputs -= set(("sourceTableHandles",))
708 if not config.propagateFlags.finalized_source_flags:
709 self.inputs -= set(("finalizedSourceTableHandles",))
710 else:
711 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
712 self.inputs -= set(("sourceTableHandles",))
713 self.inputs -= set(("finalizedSourceTableHandles",))
714
715 if config.inputCatalog == "deblendedCatalog":
716 self.inputs -= set(("inputCatalog",))
717
718 if not config.doAddFootprints:
719 self.inputs -= set(("scarletModels",))
720 else:
721 self.inputs -= set(("deblendedCatalog"))
722 self.inputs -= set(("scarletModels",))
723
724 if config.doMatchSources is False:
725 self.outputs -= set(("matchResult",))
726
727 if config.doWriteMatchesDenormalized is False:
728 self.outputs -= set(("denormMatches",))
729
730
731class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
732 pipelineConnections=MeasureMergedCoaddSourcesConnections):
733 """!
734 @anchor MeasureMergedCoaddSourcesConfig_
735
736 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
737 """
738 inputCatalog = ChoiceField(
739 dtype=str,
740 default="deblendedCatalog",
741 allowed={
742 "deblendedCatalog": "Output catalog from ScarletDeblendTask",
743 "deblendedFlux": "Output catalog from SourceDeblendTask",
744 "mergeDet": "The merged detections before deblending."
745 },
746 doc="The name of the input catalog.",
747 )
748 doAddFootprints = Field(dtype=bool,
749 default=True,
750 doc="Whether or not to add footprints to the input catalog from scarlet models. "
751 "This should be true whenever using the multi-band deblender, "
752 "otherwise this should be False.")
753 doConserveFlux = Field(dtype=bool, default=True,
754 doc="Whether to use the deblender models as templates to re-distribute the flux "
755 "from the 'exposure' (True), or to perform measurements on the deblender "
756 "model footprints.")
757 doStripFootprints = Field(dtype=bool, default=True,
758 doc="Whether to strip footprints from the output catalog before "
759 "saving to disk. "
760 "This is usually done when using scarlet models to save disk space.")
761 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
762 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
763 doPropagateFlags = Field(
764 dtype=bool, default=True,
765 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
766 )
767 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
768 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
769 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
770 doWriteMatchesDenormalized = Field(
771 dtype=bool,
772 default=False,
773 doc=("Write reference matches in denormalized format? "
774 "This format uses more disk space, but is more convenient to read."),
775 )
776 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
777 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
778 checkUnitsParseStrict = Field(
779 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
780 dtype=str,
781 default="raise",
782 )
783 doApCorr = Field(
784 dtype=bool,
785 default=True,
786 doc="Apply aperture corrections"
787 )
788 applyApCorr = ConfigurableField(
789 target=ApplyApCorrTask,
790 doc="Subtask to apply aperture corrections"
791 )
792 doRunCatalogCalculation = Field(
793 dtype=bool,
794 default=True,
795 doc='Run catalogCalculation task'
796 )
797 catalogCalculation = ConfigurableField(
798 target=CatalogCalculationTask,
799 doc="Subtask to run catalogCalculation plugins on catalog"
800 )
801
802 hasFakes = Field(
803 dtype=bool,
804 default=False,
805 doc="Should be set to True if fake sources have been inserted into the input data."
806 )
807
808 @property
809 def refObjLoader(self):
810 return self.match.refObjLoader
811
812 def setDefaults(self):
813 super().setDefaults()
814 self.measurement.plugins.names |= ['base_InputCount',
815 'base_Variance',
816 'base_LocalPhotoCalib',
817 'base_LocalWcs']
818 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
819 'INEXACT_PSF']
820 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
821 'INEXACT_PSF']
822
823 def validate(self):
824 super().validate()
825 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
826 if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
827 raise ValueError(
828 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
829 f"are different. These options must be kept in sync until Gen2 is retired."
830 )
831
832
833
839
840
841class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
842 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
843 @staticmethod
844 def getTargetList(parsedCmd, **kwargs):
845 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
846
847
848class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
849 r"""!
850 @anchor MeasureMergedCoaddSourcesTask_
851
852 @brief Deblend sources from master catalog in each coadd seperately and measure.
853
854 @section pipe_tasks_multiBand_Contents Contents
855
856 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
857 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
858 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
859 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
860 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
861 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
862
863 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
864
865 Command-line task that uses peaks and footprints from a master catalog to perform deblending and
866 measurement in each coadd.
867
868 Given a master input catalog of sources (peaks and footprints) or deblender outputs
869 (including a HeavyFootprint in each band), measure each source on the
870 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
871 consistent set of child sources.
872
873 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
874 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
875 flags are propagated to the coadd sources.
876
877 Optionally, we can match the coadd sources to an external reference catalog.
878
879 @par Inputs:
880 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
881 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
882 @par Outputs:
883 deepCoadd_meas{tract,patch,filter}: SourceCatalog
884 @par Data Unit:
885 tract, patch, filter
886
887 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
888
889 <DL>
890 <DT> @ref SingleFrameMeasurementTask_ "measurement"
891 <DD> Measure source properties of deblended sources.</DD>
892 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
893 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
894 not at the edge of the field and that have either not been deblended or are the children of deblended
895 sources</DD>
896 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
897 <DD> Propagate flags set in individual visits to the coadd.</DD>
898 <DT> @ref DirectMatchTask_ "match"
899 <DD> Match input sources to a reference catalog (optional).
900 </DD>
901 </DL>
902 These subtasks may be retargeted as required.
903
904 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
905
906 @copydoc \_\_init\_\_
907
908 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
909
910 @copydoc run
911
912 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
913
914 See @ref MeasureMergedCoaddSourcesConfig_
915
916 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
917
918 The command line task interface supports a
919 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
920 files.
921
922 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
923 the various sub-tasks. See the documetation for individual sub-tasks for more information.
924
925 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
926 MeasureMergedCoaddSourcesTask
927
928 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
929 The next stage in the multi-band processing procedure will merge these measurements into a suitable
930 catalog for driving forced photometry.
931
932 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
933 to be processed.
934 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
935 `--help` command line argument:
936 @code
937 measureCoaddSources.py --help
938 @endcode
939
940 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
941 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
942 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
943 coadd as follows:
944 @code
945 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
946 @endcode
947 This will process the HSC-I band data. The results are written in
948 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
949
950 It is also necessary to run
951 @code
952 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
953 @endcode
954 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
955 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
956 """
957 _DefaultName = "measureCoaddSources"
958 ConfigClass = MeasureMergedCoaddSourcesConfig
959 RunnerClass = MeasureMergedCoaddSourcesRunner
960 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
961 # The IDs we already have are of this type
962 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
963
964 @classmethod
965 def _makeArgumentParser(cls):
966 parser = ArgumentParser(name=cls._DefaultName)
967 parser.add_id_argument("--id", "deepCoadd_calexp",
968 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
969 ContainerClass=ExistingCoaddDataIdContainer)
970 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
971 return parser
972
973 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
974 **kwargs):
975 """!
976 @brief Initialize the task.
977
978 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
979 @param[in] schema: the schema of the merged detection catalog used as input to this one
980 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
981 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
982 catalog. May be None if the loader can be constructed from the butler argument or all steps
983 requiring a reference catalog are disabled.
984 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
985 catalog loader, if schema or peakSchema or refObjLoader is None
986
987 The task will set its own self.schema attribute to the schema of the output measurement catalog.
988 This will include all fields from the input schema, as well as additional fields for all the
989 measurements.
990 """
991 super().__init__(**kwargs)
992 self.deblended = self.config.inputCatalog.startswith("deblended")
993 self.inputCatalog = "Coadd_" + self.config.inputCatalog
994 if initInputs is not None:
995 schema = initInputs['inputSchema'].schema
996 if schema is None:
997 assert butler is not None, "Neither butler nor schema is defined"
998 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
999 self.schemaMapper = afwTable.SchemaMapper(schema)
1000 self.schemaMapper.addMinimalSchema(schema)
1001 self.schema = self.schemaMapper.getOutputSchema()
1002 self.algMetadata = PropertyList()
1003 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
1004 self.makeSubtask("setPrimaryFlags", schema=self.schema)
1005 if self.config.doMatchSources:
1006 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
1007 if self.config.doPropagateFlags:
1008 self.makeSubtask("propagateFlags", schema=self.schema)
1009 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
1010 if self.config.doApCorr:
1011 self.makeSubtask("applyApCorr", schema=self.schema)
1012 if self.config.doRunCatalogCalculation:
1013 self.makeSubtask("catalogCalculation", schema=self.schema)
1014
1015 self.outputSchema = afwTable.SourceCatalog(self.schema)
1016
1017 def runQuantum(self, butlerQC, inputRefs, outputRefs):
1018 inputs = butlerQC.get(inputRefs)
1019
1020 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
1021 inputs.pop('refCat'), config=self.config.refObjLoader,
1022 log=self.log)
1023 self.match.setRefObjLoader(refObjLoader)
1024
1025 # Set psfcache
1026 # move this to run after gen2 deprecation
1027 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
1028
1029 # Get unique integer ID for IdFactory and RNG seeds
1030 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
1031 inputs['exposureId'] = exposureIdInfo.expId
1032 idFactory = exposureIdInfo.makeSourceIdFactory()
1033 # Transform inputCatalog
1034 table = afwTable.SourceTable.make(self.schema, idFactory)
1035 sources = afwTable.SourceCatalog(table)
1036 # Load the correct input catalog
1037 if "scarletCatalog" in inputs:
1038 inputCatalog = inputs.pop("scarletCatalog")
1039 catalogRef = inputRefs.scarletCatalog
1040 else:
1041 inputCatalog = inputs.pop("inputCatalog")
1042 catalogRef = inputRefs.inputCatalog
1043 sources.extend(inputCatalog, self.schemaMapper)
1044 del inputCatalog
1045 # Add the HeavyFootprints to the deblended sources
1046 if self.config.doAddFootprints:
1047 modelData = inputs.pop('scarletModels')
1048 if self.config.doConserveFlux:
1049 redistributeImage = inputs['exposure'].image
1050 else:
1051 redistributeImage = None
1052 modelData.updateCatalogFootprints(
1053 catalog=sources,
1054 band=inputRefs.exposure.dataId["band"],
1055 psfModel=inputs['exposure'].getPsf(),
1056 redistributeImage=redistributeImage,
1057 removeScarletData=True,
1058 )
1059 table = sources.getTable()
1060 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1061 inputs['sources'] = sources
1062
1063 skyMap = inputs.pop('skyMap')
1064 tractNumber = catalogRef.dataId['tract']
1065 tractInfo = skyMap[tractNumber]
1066 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId['patch'])
1067 skyInfo = Struct(
1068 skyMap=skyMap,
1069 tractInfo=tractInfo,
1070 patchInfo=patchInfo,
1071 wcs=tractInfo.getWcs(),
1072 bbox=patchInfo.getOuterBBox()
1073 )
1074 inputs['skyInfo'] = skyInfo
1075
1076 if self.config.doPropagateFlags:
1077 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
1078 # New version
1079 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
1080 inputs["ccdInputs"] = ccdInputs
1081
1082 if "sourceTableHandles" in inputs:
1083 sourceTableHandles = inputs.pop("sourceTableHandles")
1084 sourceTableHandleDict = {handle.dataId["visit"]: handle
1085 for handle in sourceTableHandles}
1086 inputs["sourceTableHandleDict"] = sourceTableHandleDict
1087 if "finalizedSourceTableHandles" in inputs:
1088 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
1089 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
1090 for handle in finalizedSourceTableHandles}
1091 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
1092 else:
1093 # Deprecated legacy version
1094 # Filter out any visit catalog that is not coadd inputs
1095 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
1096 visitKey = ccdInputs.schema.find("visit").key
1097 ccdKey = ccdInputs.schema.find("ccd").key
1098 inputVisitIds = set()
1099 ccdRecordsWcs = {}
1100 for ccdRecord in ccdInputs:
1101 visit = ccdRecord.get(visitKey)
1102 ccd = ccdRecord.get(ccdKey)
1103 inputVisitIds.add((visit, ccd))
1104 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
1105
1106 inputCatalogsToKeep = []
1107 inputCatalogWcsUpdate = []
1108 for i, dataRef in enumerate(inputRefs.visitCatalogs):
1109 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
1110 if key in inputVisitIds:
1111 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
1112 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
1113 inputs['visitCatalogs'] = inputCatalogsToKeep
1114 inputs['wcsUpdates'] = inputCatalogWcsUpdate
1115 inputs['ccdInputs'] = ccdInputs
1116
1117 outputs = self.run(**inputs)
1118 # Strip HeavyFootprints to save space on disk
1119 sources = outputs.outputSources
1120 butlerQC.put(outputs, outputRefs)
1121
1122 def runDataRef(self, patchRef, psfCache=100):
1123 """!
1124 @brief Deblend and measure.
1125
1126 @param[in] patchRef: Patch reference.
1127
1128 Set 'is-primary' and related flags. Propagate flags
1129 from individual visits. Optionally match the sources to a reference catalog and write the matches.
1130 Finally, write the deblended sources and measurements out.
1131 """
1132 if self.config.hasFakes:
1133 coaddType = "fakes_" + self.config.coaddName
1134 else:
1135 coaddType = self.config.coaddName
1136 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1137 exposure.getPsf().setCacheCapacity(psfCache)
1138 sources = self.readSources(patchRef)
1139 table = sources.getTable()
1140 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1141 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1142
1143 if self.config.doPropagateFlags:
1144 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1145 else:
1146 ccdInputs = None
1147
1148 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, includeBand=False,
1149 log=self.log)
1150 results = self.run(exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=expId,
1151 ccdInputs=ccdInputs, butler=patchRef.getButler())
1152
1153 if self.config.doMatchSources:
1154 self.writeMatches(patchRef, results)
1155 self.write(patchRef, results.outputSources)
1156
1157 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1158 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
1159 """Run measurement algorithms on the input exposure, and optionally populate the
1160 resulting catalog with extra information.
1161
1162 Parameters
1163 ----------
1164 exposure : `lsst.afw.exposure.Exposure`
1165 The input exposure on which measurements are to be performed
1167 A catalog built from the results of merged detections, or
1168 deblender outputs.
1169 skyInfo : `lsst.pipe.base.Struct`
1170 A struct containing information about the position of the input exposure within
1171 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1172 exposureId : `int` or `bytes`
1173 packed unique number or bytes unique to the input exposure
1174 ccdInputs : `lsst.afw.table.ExposureCatalog`
1175 Catalog containing information on the individual visits which went into making
1176 the coadd.
1177 sourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
1178 Dict for sourceTable_visit handles (key is visit) for propagating flags.
1179 These tables are derived from the ``CalibrateTask`` sources, and contain
1180 astrometry and photometry flags, and optionally PSF flags.
1181 finalizedSourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`], optional
1182 Dict for finalized_src_table handles (key is visit) for propagating flags.
1183 These tables are derived from ``FinalizeCalibrationTask`` and contain
1184 PSF flags from the finalized PSF estimation.
1185 visitCatalogs : list of `lsst.afw.table.SourceCatalogs`
1186 A list of source catalogs corresponding to measurements made on the individual
1187 visits which went into the input exposure. If None and butler is `None` then
1188 the task cannot propagate visit flags to the output catalog.
1189 Deprecated, to be removed with PropagateVisitFlagsTask.
1190 wcsUpdates : list of `lsst.afw.geom.SkyWcs`
1191 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1192 to the input visits. Used to put all coordinates to common system. If `None` and
1193 butler is `None` then the task cannot propagate visit flags to the output catalog.
1194 Deprecated, to be removed with PropagateVisitFlagsTask.
1195 butler : `lsst.daf.persistence.Butler`
1196 A gen2 butler used to load visit catalogs.
1197 Deprecated, to be removed with Gen2.
1198
1199 Returns
1200 -------
1201 results : `lsst.pipe.base.Struct`
1202 Results of running measurement task. Will contain the catalog in the
1203 sources attribute. Optionally will have results of matching to a
1204 reference catalog in the matchResults attribute, and denormalized
1205 matches in the denormMatches attribute.
1206 """
1207 self.measurement.run(sources, exposure, exposureId=exposureId)
1208
1209 if self.config.doApCorr:
1210 self.applyApCorr.run(
1211 catalog=sources,
1212 apCorrMap=exposure.getInfo().getApCorrMap()
1213 )
1214
1215 # TODO DM-11568: this contiguous check-and-copy could go away if we
1216 # reserve enough space during SourceDetection and/or SourceDeblend.
1217 # NOTE: sourceSelectors require contiguous catalogs, so ensure
1218 # contiguity now, so views are preserved from here on.
1219 if not sources.isContiguous():
1220 sources = sources.copy(deep=True)
1221
1222 if self.config.doRunCatalogCalculation:
1223 self.catalogCalculation.run(sources)
1224
1225 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1226 patchInfo=skyInfo.patchInfo)
1227 if self.config.doPropagateFlags:
1228 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
1229 # New version
1230 self.propagateFlags.run(
1231 sources,
1232 ccdInputs,
1233 sourceTableHandleDict,
1234 finalizedSourceTableHandleDict
1235 )
1236 else:
1237 # Legacy deprecated version
1238 self.propagateFlags.run(
1239 butler,
1240 sources,
1241 ccdInputs,
1242 exposure.getWcs(),
1243 visitCatalogs,
1244 wcsUpdates
1245 )
1246
1247 results = Struct()
1248
1249 if self.config.doMatchSources:
1250 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
1251 matches = afwTable.packMatches(matchResult.matches)
1252 matches.table.setMetadata(matchResult.matchMeta)
1253 results.matchResult = matches
1254 if self.config.doWriteMatchesDenormalized:
1255 if matchResult.matches:
1256 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1257 else:
1258 self.log.warning("No matches, so generating dummy denormalized matches file")
1259 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1260 denormMatches.setMetadata(PropertyList())
1261 denormMatches.getMetadata().add("COMMENT",
1262 "This catalog is empty because no matches were found.")
1263 results.denormMatches = denormMatches
1264 results.denormMatches = denormMatches
1265
1266 results.outputSources = sources
1267 return results
1268
1269 def readSources(self, dataRef):
1270 """!
1271 @brief Read input sources.
1272
1273 @param[in] dataRef: Data reference for catalog of merged detections
1274 @return List of sources in merged catalog
1275
1276 We also need to add columns to hold the measurements we're about to make so we can measure in-place.
1277 """
1278 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1279 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
1280 idFactory = self.makeIdFactory(dataRef)
1281 for s in merged:
1282 idFactory.notify(s.getId())
1283 table = afwTable.SourceTable.make(self.schema, idFactory)
1284 sources = afwTable.SourceCatalog(table)
1285 sources.extend(merged, self.schemaMapper)
1286 return sources
1287
1288 def writeMatches(self, dataRef, results):
1289 """!
1290 @brief Write matches of the sources to the astrometric reference catalog.
1291
1292 @param[in] dataRef: data reference
1293 @param[in] results: results struct from run method
1294 """
1295 if hasattr(results, "matchResult"):
1296 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1297 if hasattr(results, "denormMatches"):
1298 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1299
1300 def write(self, dataRef, sources):
1301 """!
1302 @brief Write the source catalog.
1303
1304 @param[in] dataRef: data reference
1305 @param[in] sources: source catalog
1306 """
1307 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1308 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)
1309
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded.
Definition: coaddBase.py:275
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
def write(self, patchRef, catalog)
Write the output.