Coverage for python/lsst/pipe/tasks/multiBand.py: 27%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#!/usr/bin/env python
2#
3# LSST Data Management System
4# Copyright 2008-2015 AURA/LSST.
5#
6# This product includes software developed by the
7# LSST Project (http://www.lsst.org/).
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the LSST License Statement and
20# the GNU General Public License along with this program. If not,
21# see <https://www.lsstcorp.org/LegalNotices/>.
22#
23import numpy as np
25from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
26from lsst.coadd.utils.getGen3CoaddExposureId import getGen3CoaddExposureId
27from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
28 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
29import lsst.pipe.base.connectionTypes as cT
30from lsst.pex.config import Config, Field, ConfigurableField
31from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
32from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
33from lsst.meas.deblender import SourceDeblendTask
34from lsst.meas.extensions.scarlet import ScarletDeblendTask
35from lsst.pipe.tasks.coaddBase import getSkyInfo
36from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
37from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
38from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
39from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
40import lsst.afw.image as afwImage
41import lsst.afw.table as afwTable
42import lsst.afw.math as afwMath
43from lsst.daf.base import PropertyList
44from lsst.skymap import BaseSkyMap
45from lsst.obs.base import ExposureIdInfo
47# NOTE: these imports are a convenience so multiband users only have to import this file.
48from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
49from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
50from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
51from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401
52from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
53from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
54from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
55from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
58"""
59New set types:
60* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
61* deepCoadd_mergeDet: merged detections (tract, patch)
62* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
63* deepCoadd_ref: reference sources (tract, patch)
64All of these have associated *_schema catalogs that require no data ID and hold no records.
66In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
67the mergeDet, meas, and ref dataset Footprints:
68* deepCoadd_peak_schema
69"""
72##############################################################################################################
73class DetectCoaddSourcesConnections(PipelineTaskConnections,
74 dimensions=("tract", "patch", "band", "skymap"),
75 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
76 detectionSchema = cT.InitOutput(
77 doc="Schema of the detection catalog",
78 name="{outputCoaddName}Coadd_det_schema",
79 storageClass="SourceCatalog",
80 )
81 exposure = cT.Input(
82 doc="Exposure on which detections are to be performed",
83 name="{inputCoaddName}Coadd",
84 storageClass="ExposureF",
85 dimensions=("tract", "patch", "band", "skymap")
86 )
87 outputBackgrounds = cT.Output(
88 doc="Output Backgrounds used in detection",
89 name="{outputCoaddName}Coadd_calexp_background",
90 storageClass="Background",
91 dimensions=("tract", "patch", "band", "skymap")
92 )
93 outputSources = cT.Output(
94 doc="Detected sources catalog",
95 name="{outputCoaddName}Coadd_det",
96 storageClass="SourceCatalog",
97 dimensions=("tract", "patch", "band", "skymap")
98 )
99 outputExposure = cT.Output(
100 doc="Exposure post detection",
101 name="{outputCoaddName}Coadd_calexp",
102 storageClass="ExposureF",
103 dimensions=("tract", "patch", "band", "skymap")
104 )
107class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
108 """!
109 @anchor DetectCoaddSourcesConfig_
111 @brief Configuration parameters for the DetectCoaddSourcesTask
112 """
113 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
114 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
115 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
116 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
117 doInsertFakes = Field(dtype=bool, default=False,
118 doc="Run fake sources injection task",
119 deprecated=("doInsertFakes is no longer supported. This config will be removed "
120 "after v24."))
121 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
122 doc="Injection of fake sources for testing "
123 "purposes (must be retargeted)",
124 deprecated=("insertFakes is no longer supported. This config will "
125 "be removed after v24."))
126 hasFakes = Field(
127 dtype=bool,
128 default=False,
129 doc="Should be set to True if fake sources have been inserted into the input data.",
130 )
132 def setDefaults(self):
133 super().setDefaults()
134 self.detection.thresholdType = "pixel_stdev"
135 self.detection.isotropicGrow = True
136 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
137 self.detection.reEstimateBackground = False
138 self.detection.background.useApprox = False
139 self.detection.background.binSize = 4096
140 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
141 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
143## @addtogroup LSST_task_documentation
144## @{
145## @page page_DetectCoaddSourcesTask DetectCoaddSourcesTask
146## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask"
147## @copybrief DetectCoaddSourcesTask
148## @}
151class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
152 r"""!
153 @anchor DetectCoaddSourcesTask_
155 @brief Detect sources on a coadd
157 @section pipe_tasks_multiBand_Contents Contents
159 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
160 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
161 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
162 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
163 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
164 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
166 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
168 Command-line task that detects sources on a coadd of exposures obtained with a single filter.
170 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
171 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
172 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
173 propagate the full covariance matrix -- but it is simple and works well in practice.
175 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
176 SourceDetectionTask_ "detection" subtask.
178 @par Inputs:
179 deepCoadd{tract,patch,filter}: ExposureF
180 @par Outputs:
181 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
182 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
183 exposure (ExposureF)
184 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
185 @par Data Unit:
186 tract, patch, filter
188 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
189 You can retarget this subtask if you wish.
191 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
193 @copydoc \_\_init\_\_
195 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
197 @copydoc run
199 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
201 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
203 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
205 The command line task interface supports a
206 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
207 files.
209 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
210 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
211 @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
213 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
214 of using DetectCoaddSourcesTask
216 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
217 the task is to update the background, detect all sources in a single band and generate a set of parent
218 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
219 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
220 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
221 calling detectCoaddSources.py with the `--help` command line argument:
222 @code
223 detectCoaddSources.py --help
224 @endcode
226 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
227 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
228 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
229 @code
230 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
231 @endcode
232 that will process the HSC-I band data. The results are written to
233 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
235 It is also necessary to run:
236 @code
237 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
238 @endcode
239 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
240 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
241 """
242 _DefaultName = "detectCoaddSources"
243 ConfigClass = DetectCoaddSourcesConfig
244 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
245 makeIdFactory = _makeMakeIdFactory("CoaddId")
247 @classmethod
248 def _makeArgumentParser(cls):
249 parser = ArgumentParser(name=cls._DefaultName)
250 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
251 ContainerClass=ExistingCoaddDataIdContainer)
252 return parser
254 def __init__(self, schema=None, **kwargs):
255 """!
256 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
258 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
260 @param[in] schema: initial schema for the output catalog, modified-in place to include all
261 fields set by this task. If None, the source minimal schema will be used.
262 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
263 """
264 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
265 # call structure has been reviewed carefully to be sure super will work as intended.
266 super().__init__(**kwargs)
267 if schema is None:
268 schema = afwTable.SourceTable.makeMinimalSchema()
269 self.schema = schema
270 self.makeSubtask("detection", schema=self.schema)
271 if self.config.doScaleVariance:
272 self.makeSubtask("scaleVariance")
274 self.detectionSchema = afwTable.SourceCatalog(self.schema)
276 def runDataRef(self, patchRef):
277 """!
278 @brief Run detection on a coadd.
280 Invokes @ref run and then uses @ref write to output the
281 results.
283 @param[in] patchRef: data reference for patch
284 """
285 if self.config.hasFakes:
286 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
287 else:
288 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
289 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, log=self.log)
290 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
291 self.write(results, patchRef)
292 return results
294 def runQuantum(self, butlerQC, inputRefs, outputRefs):
295 inputs = butlerQC.get(inputRefs)
296 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
297 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
298 inputs["expId"] = exposureIdInfo.expId
299 outputs = self.run(**inputs)
300 butlerQC.put(outputs, outputRefs)
302 def run(self, exposure, idFactory, expId):
303 """!
304 @brief Run detection on an exposure.
306 First scale the variance plane to match the observed variance
307 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
308 detect sources.
310 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
311 depending on configuration).
312 @param[in] idFactory: IdFactory to set source identifiers
313 @param[in] expId: Exposure identifier (integer) for RNG seed
315 @return a pipe.base.Struct with fields
316 - sources: catalog of detections
317 - backgrounds: list of backgrounds
318 """
319 if self.config.doScaleVariance:
320 varScale = self.scaleVariance.run(exposure.maskedImage)
321 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
322 backgrounds = afwMath.BackgroundList()
323 table = afwTable.SourceTable.make(self.schema, idFactory)
324 detections = self.detection.run(table, exposure, expId=expId)
325 sources = detections.sources
326 fpSets = detections.fpSets
327 if hasattr(fpSets, "background") and fpSets.background:
328 for bg in fpSets.background:
329 backgrounds.append(bg)
330 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
332 def write(self, results, patchRef):
333 """!
334 @brief Write out results from runDetection.
336 @param[in] exposure: Exposure to write out
337 @param[in] results: Struct returned from runDetection
338 @param[in] patchRef: data reference for patch
339 """
340 coaddName = self.config.coaddName + "Coadd"
341 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
342 patchRef.put(results.outputSources, coaddName + "_det")
343 if self.config.hasFakes:
344 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
345 else:
346 patchRef.put(results.outputExposure, coaddName + "_calexp")
348##############################################################################################################
351class DeblendCoaddSourcesConfig(Config):
352 """DeblendCoaddSourcesConfig
354 Configuration parameters for the `DeblendCoaddSourcesTask`.
355 """
356 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
357 doc="Deblend sources separately in each band")
358 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
359 doc="Deblend sources simultaneously across bands")
360 simultaneous = Field(dtype=bool,
361 default=True,
362 doc="Simultaneously deblend all bands? "
363 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
364 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
365 hasFakes = Field(dtype=bool,
366 default=False,
367 doc="Should be set to True if fake sources have been inserted into the input data.")
369 def setDefaults(self):
370 Config.setDefaults(self)
371 self.singleBandDeblend.propagateAllPeaks = True
374class DeblendCoaddSourcesRunner(MergeSourcesRunner):
375 """Task runner for the `MergeSourcesTask`
377 Required because the run method requires a list of
378 dataRefs rather than a single dataRef.
379 """
380 @staticmethod
381 def getTargetList(parsedCmd, **kwargs):
382 """Provide a list of patch references for each patch, tract, filter combo.
384 Parameters
385 ----------
386 parsedCmd:
387 The parsed command
388 kwargs:
389 Keyword arguments passed to the task
391 Returns
392 -------
393 targetList: list
394 List of tuples, where each tuple is a (dataRef, kwargs) pair.
395 """
396 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
397 kwargs["psfCache"] = parsedCmd.psfCache
398 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
401class DeblendCoaddSourcesTask(CmdLineTask):
402 """Deblend the sources in a merged catalog
404 Deblend sources from master catalog in each coadd.
405 This can either be done separately in each band using the HSC-SDSS deblender
406 (`DeblendCoaddSourcesTask.config.simultaneous==False`)
407 or use SCARLET to simultaneously fit the blend in all bands
408 (`DeblendCoaddSourcesTask.config.simultaneous==True`).
409 The task will set its own `self.schema` atribute to the `Schema` of the
410 output deblended catalog.
411 This will include all fields from the input `Schema`, as well as additional fields
412 from the deblender.
414 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
415 ---------------------------------------------------------
416 `
418 Parameters
419 ----------
420 butler: `Butler`
421 Butler used to read the input schemas from disk or
422 construct the reference catalog loader, if `schema` or `peakSchema` or
423 schema: `Schema`
424 The schema of the merged detection catalog as an input to this task.
425 peakSchema: `Schema`
426 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
427 """
428 ConfigClass = DeblendCoaddSourcesConfig
429 RunnerClass = DeblendCoaddSourcesRunner
430 _DefaultName = "deblendCoaddSources"
431 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
433 @classmethod
434 def _makeArgumentParser(cls):
435 parser = ArgumentParser(name=cls._DefaultName)
436 parser.add_id_argument("--id", "deepCoadd_calexp",
437 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
438 ContainerClass=ExistingCoaddDataIdContainer)
439 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
440 return parser
442 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
443 CmdLineTask.__init__(self, **kwargs)
444 if schema is None:
445 assert butler is not None, "Neither butler nor schema is defined"
446 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
447 self.schemaMapper = afwTable.SchemaMapper(schema)
448 self.schemaMapper.addMinimalSchema(schema)
449 self.schema = self.schemaMapper.getOutputSchema()
450 if peakSchema is None:
451 assert butler is not None, "Neither butler nor peakSchema is defined"
452 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
454 if self.config.simultaneous:
455 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
456 else:
457 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
459 def getSchemaCatalogs(self):
460 """Return a dict of empty catalogs for each catalog dataset produced by this task.
462 Returns
463 -------
464 result: dict
465 Dictionary of empty catalogs, with catalog names as keys.
466 """
467 catalog = afwTable.SourceCatalog(self.schema)
468 return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
469 self.config.coaddName + "Coadd_deblendedModel": catalog}
471 def runDataRef(self, patchRefList, psfCache=100):
472 """Deblend the patch
474 Deblend each source simultaneously or separately
475 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
476 Set `is-primary` and related flags.
477 Propagate flags from individual visits.
478 Write the deblended sources out.
480 Parameters
481 ----------
482 patchRefList: list
483 List of data references for each filter
484 """
486 if self.config.hasFakes:
487 coaddType = "fakes_" + self.config.coaddName
488 else:
489 coaddType = self.config.coaddName
491 if self.config.simultaneous:
492 # Use SCARLET to simultaneously deblend across filters
493 filters = []
494 exposures = []
495 for patchRef in patchRefList:
496 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
497 filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True)
498 filters.append(filter.bandLabel)
499 exposures.append(exposure)
500 # Sort inputs by band to match Gen3 order of inputs
501 exposures = [exposure for _, exposure in sorted(zip(filters, exposures))]
502 patchRefList = [patchRef for _, patchRef in sorted(zip(filters, patchRefList))]
503 filters.sort()
504 # The input sources are the same for all bands, since it is a merged catalog
505 sources = self.readSources(patchRef)
506 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
507 templateCatalogs, fluxCatalogs = self.multiBandDeblend.run(exposure, sources)
508 for n in range(len(patchRefList)):
509 self.write(patchRefList[n], templateCatalogs[filters[n]], "Model")
510 if filters[n] in fluxCatalogs:
511 self.write(patchRefList[n], fluxCatalogs[filters[n]], "Flux")
512 else:
513 # Use the singeband deblender to deblend each band separately
514 for patchRef in patchRefList:
515 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
516 exposure.getPsf().setCacheCapacity(psfCache)
517 sources = self.readSources(patchRef)
518 self.singleBandDeblend.run(exposure, sources)
519 self.write(patchRef, sources)
521 def readSources(self, dataRef):
522 """Read merged catalog
524 Read the catalog of merged detections and create a catalog
525 in a single band.
527 Parameters
528 ----------
529 dataRef: data reference
530 Data reference for catalog of merged detections
532 Returns
533 -------
534 sources: `SourceCatalog`
535 List of sources in merged catalog
537 We also need to add columns to hold the measurements we're about to make
538 so we can measure in-place.
539 """
540 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
541 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
542 idFactory = self.makeIdFactory(dataRef)
543 # There may be gaps in the mergeDet catalog, which will cause the
544 # source ids to be inconsistent. So we update the id factory
545 # with the largest id already in the catalog.
546 maxId = np.max(merged["id"])
547 idFactory.notify(maxId)
548 table = afwTable.SourceTable.make(self.schema, idFactory)
549 sources = afwTable.SourceCatalog(table)
550 sources.extend(merged, self.schemaMapper)
551 return sources
553 def write(self, dataRef, sources, catalogType):
554 """Write the source catalog(s)
556 Parameters
557 ----------
558 dataRef: Data Reference
559 Reference to the output catalog.
560 sources: `SourceCatalog`
561 Flux conserved sources to write to file.
562 If using the single band deblender, this is the catalog
563 generated.
564 template_sources: `SourceCatalog`
565 Source catalog using the multiband template models
566 as footprints.
567 """
568 dataRef.put(sources, self.config.coaddName + f"Coadd_deblended{catalogType}")
569 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)
571 def writeMetadata(self, dataRefList):
572 """Write the metadata produced from processing the data.
573 Parameters
574 ----------
575 dataRefList
576 List of Butler data references used to write the metadata.
577 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
578 """
579 for dataRef in dataRefList:
580 try:
581 metadataName = self._getMetadataName()
582 if metadataName is not None:
583 dataRef.put(self.getFullMetadata(), metadataName)
584 except Exception as e:
585 self.log.warning("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
588class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
589 dimensions=("tract", "patch", "band", "skymap"),
590 defaultTemplates={"inputCoaddName": "deep",
591 "outputCoaddName": "deep",
592 "deblendedCatalog": "deblendedFlux"}):
593 inputSchema = cT.InitInput(
594 doc="Input schema for measure merged task produced by a deblender or detection task",
595 name="{inputCoaddName}Coadd_deblendedFlux_schema",
596 storageClass="SourceCatalog"
597 )
598 outputSchema = cT.InitOutput(
599 doc="Output schema after all new fields are added by task",
600 name="{inputCoaddName}Coadd_meas_schema",
601 storageClass="SourceCatalog"
602 )
603 refCat = cT.PrerequisiteInput(
604 doc="Reference catalog used to match measured sources against known sources",
605 name="ref_cat",
606 storageClass="SimpleCatalog",
607 dimensions=("skypix",),
608 deferLoad=True,
609 multiple=True
610 )
611 exposure = cT.Input(
612 doc="Input coadd image",
613 name="{inputCoaddName}Coadd_calexp",
614 storageClass="ExposureF",
615 dimensions=("tract", "patch", "band", "skymap")
616 )
617 skyMap = cT.Input(
618 doc="SkyMap to use in processing",
619 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
620 storageClass="SkyMap",
621 dimensions=("skymap",),
622 )
623 visitCatalogs = cT.Input(
624 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
625 "further filtered in the task for the purpose of propagating flags from image calibration "
626 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
627 name="src",
628 dimensions=("instrument", "visit", "detector"),
629 storageClass="SourceCatalog",
630 multiple=True
631 )
632 sourceTableHandles = cT.Input(
633 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
634 "These tables contain astrometry and photometry flags, and optionally "
635 "PSF flags."),
636 name="sourceTable_visit",
637 storageClass="DataFrame",
638 dimensions=("instrument", "visit"),
639 multiple=True,
640 deferLoad=True,
641 )
642 finalizedSourceTableHandles = cT.Input(
643 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
644 "tables contain PSF flags from the finalized PSF estimation."),
645 name="finalized_src_table",
646 storageClass="DataFrame",
647 dimensions=("instrument", "visit"),
648 multiple=True,
649 deferLoad=True,
650 )
651 inputCatalog = cT.Input(
652 doc=("Name of the input catalog to use."
653 "If the single band deblender was used this should be 'deblendedFlux."
654 "If the multi-band deblender was used this should be 'deblendedModel, "
655 "or deblendedFlux if the multiband deblender was configured to output "
656 "deblended flux catalogs. If no deblending was performed this should "
657 "be 'mergeDet'"),
658 name="{inputCoaddName}Coadd_{deblendedCatalog}",
659 storageClass="SourceCatalog",
660 dimensions=("tract", "patch", "band", "skymap"),
661 )
662 outputSources = cT.Output(
663 doc="Source catalog containing all the measurement information generated in this task",
664 name="{outputCoaddName}Coadd_meas",
665 dimensions=("tract", "patch", "band", "skymap"),
666 storageClass="SourceCatalog",
667 )
668 matchResult = cT.Output(
669 doc="Match catalog produced by configured matcher, optional on doMatchSources",
670 name="{outputCoaddName}Coadd_measMatch",
671 dimensions=("tract", "patch", "band", "skymap"),
672 storageClass="Catalog",
673 )
674 denormMatches = cT.Output(
675 doc="Denormalized Match catalog produced by configured matcher, optional on "
676 "doWriteMatchesDenormalized",
677 name="{outputCoaddName}Coadd_measMatchFull",
678 dimensions=("tract", "patch", "band", "skymap"),
679 storageClass="Catalog",
680 )
682 def __init__(self, *, config=None):
683 super().__init__(config=config)
684 if config.doPropagateFlags is False:
685 self.inputs -= set(("visitCatalogs",))
686 self.inputs -= set(("sourceTableHandles",))
687 self.inputs -= set(("finalizedSourceTableHandles",))
688 elif config.propagateFlags.target == PropagateSourceFlagsTask:
689 # New PropagateSourceFlagsTask does not use visitCatalogs.
690 self.inputs -= set(("visitCatalogs",))
691 # Check for types of flags required.
692 if not config.propagateFlags.source_flags:
693 self.inputs -= set(("sourceTableHandles",))
694 if not config.propagateFlags.finalized_source_flags:
695 self.inputs -= set(("finalizedSourceTableHandles",))
696 else:
697 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
698 self.inputs -= set(("sourceTableHandles",))
699 self.inputs -= set(("finalizedSourceTableHandles",))
701 if config.doMatchSources is False:
702 self.outputs -= set(("matchResult",))
704 if config.doWriteMatchesDenormalized is False:
705 self.outputs -= set(("denormMatches",))
708class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
709 pipelineConnections=MeasureMergedCoaddSourcesConnections):
710 """!
711 @anchor MeasureMergedCoaddSourcesConfig_
713 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
714 """
715 inputCatalog = Field(dtype=str, default="deblendedFlux",
716 doc=("Name of the input catalog to use."
717 "If the single band deblender was used this should be 'deblendedFlux."
718 "If the multi-band deblender was used this should be 'deblendedModel."
719 "If no deblending was performed this should be 'mergeDet'"))
720 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
721 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
722 doPropagateFlags = Field(
723 dtype=bool, default=True,
724 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
725 )
726 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
727 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
728 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
729 doWriteMatchesDenormalized = Field(
730 dtype=bool,
731 default=False,
732 doc=("Write reference matches in denormalized format? "
733 "This format uses more disk space, but is more convenient to read."),
734 )
735 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
736 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
737 checkUnitsParseStrict = Field(
738 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
739 dtype=str,
740 default="raise",
741 )
742 doApCorr = Field(
743 dtype=bool,
744 default=True,
745 doc="Apply aperture corrections"
746 )
747 applyApCorr = ConfigurableField(
748 target=ApplyApCorrTask,
749 doc="Subtask to apply aperture corrections"
750 )
751 doRunCatalogCalculation = Field(
752 dtype=bool,
753 default=True,
754 doc='Run catalogCalculation task'
755 )
756 catalogCalculation = ConfigurableField(
757 target=CatalogCalculationTask,
758 doc="Subtask to run catalogCalculation plugins on catalog"
759 )
761 hasFakes = Field(
762 dtype=bool,
763 default=False,
764 doc="Should be set to True if fake sources have been inserted into the input data."
765 )
767 @property
768 def refObjLoader(self):
769 return self.match.refObjLoader
771 def setDefaults(self):
772 super().setDefaults()
773 self.measurement.plugins.names |= ['base_InputCount',
774 'base_Variance',
775 'base_LocalPhotoCalib',
776 'base_LocalWcs']
777 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
778 'INEXACT_PSF']
779 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
780 'INEXACT_PSF']
782 def validate(self):
783 super().validate()
784 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
785 if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
786 raise ValueError(
787 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
788 f"are different. These options must be kept in sync until Gen2 is retired."
789 )
792## @addtogroup LSST_task_documentation
793## @{
794## @page page_MeasureMergedCoaddSourcesTask MeasureMergedCoaddSourcesTask
795## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask"
796## @copybrief MeasureMergedCoaddSourcesTask
797## @}
800class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
801 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
802 @staticmethod
803 def getTargetList(parsedCmd, **kwargs):
804 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
807class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
808 r"""!
809 @anchor MeasureMergedCoaddSourcesTask_
811 @brief Deblend sources from master catalog in each coadd seperately and measure.
813 @section pipe_tasks_multiBand_Contents Contents
815 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
816 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
817 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
818 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
819 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
820 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
822 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
824 Command-line task that uses peaks and footprints from a master catalog to perform deblending and
825 measurement in each coadd.
827 Given a master input catalog of sources (peaks and footprints) or deblender outputs
828 (including a HeavyFootprint in each band), measure each source on the
829 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
830 consistent set of child sources.
832 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
833 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
834 flags are propagated to the coadd sources.
836 Optionally, we can match the coadd sources to an external reference catalog.
838 @par Inputs:
839 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
840 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
841 @par Outputs:
842 deepCoadd_meas{tract,patch,filter}: SourceCatalog
843 @par Data Unit:
844 tract, patch, filter
846 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
848 <DL>
849 <DT> @ref SingleFrameMeasurementTask_ "measurement"
850 <DD> Measure source properties of deblended sources.</DD>
851 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
852 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
853 not at the edge of the field and that have either not been deblended or are the children of deblended
854 sources</DD>
855 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
856 <DD> Propagate flags set in individual visits to the coadd.</DD>
857 <DT> @ref DirectMatchTask_ "match"
858 <DD> Match input sources to a reference catalog (optional).
859 </DD>
860 </DL>
861 These subtasks may be retargeted as required.
863 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
865 @copydoc \_\_init\_\_
867 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
869 @copydoc run
871 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
873 See @ref MeasureMergedCoaddSourcesConfig_
875 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
877 The command line task interface supports a
878 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
879 files.
881 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
882 the various sub-tasks. See the documetation for individual sub-tasks for more information.
884 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
885 MeasureMergedCoaddSourcesTask
887 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
888 The next stage in the multi-band processing procedure will merge these measurements into a suitable
889 catalog for driving forced photometry.
891 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
892 to be processed.
893 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
894 `--help` command line argument:
895 @code
896 measureCoaddSources.py --help
897 @endcode
899 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
900 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
901 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
902 coadd as follows:
903 @code
904 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
905 @endcode
906 This will process the HSC-I band data. The results are written in
907 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
909 It is also necessary to run
910 @code
911 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
912 @endcode
913 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
914 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
915 """
916 _DefaultName = "measureCoaddSources"
917 ConfigClass = MeasureMergedCoaddSourcesConfig
918 RunnerClass = MeasureMergedCoaddSourcesRunner
919 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
920 # The IDs we already have are of this type
921 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
923 @classmethod
924 def _makeArgumentParser(cls):
925 parser = ArgumentParser(name=cls._DefaultName)
926 parser.add_id_argument("--id", "deepCoadd_calexp",
927 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
928 ContainerClass=ExistingCoaddDataIdContainer)
929 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
930 return parser
932 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
933 **kwargs):
934 """!
935 @brief Initialize the task.
937 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
938 @param[in] schema: the schema of the merged detection catalog used as input to this one
939 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
940 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
941 catalog. May be None if the loader can be constructed from the butler argument or all steps
942 requiring a reference catalog are disabled.
943 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
944 catalog loader, if schema or peakSchema or refObjLoader is None
946 The task will set its own self.schema attribute to the schema of the output measurement catalog.
947 This will include all fields from the input schema, as well as additional fields for all the
948 measurements.
949 """
950 super().__init__(**kwargs)
951 self.deblended = self.config.inputCatalog.startswith("deblended")
952 self.inputCatalog = "Coadd_" + self.config.inputCatalog
953 if initInputs is not None:
954 schema = initInputs['inputSchema'].schema
955 if schema is None:
956 assert butler is not None, "Neither butler nor schema is defined"
957 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
958 self.schemaMapper = afwTable.SchemaMapper(schema)
959 self.schemaMapper.addMinimalSchema(schema)
960 self.schema = self.schemaMapper.getOutputSchema()
961 self.algMetadata = PropertyList()
962 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
963 self.makeSubtask("setPrimaryFlags", schema=self.schema)
964 if self.config.doMatchSources:
965 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
966 if self.config.doPropagateFlags:
967 self.makeSubtask("propagateFlags", schema=self.schema)
968 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
969 if self.config.doApCorr:
970 self.makeSubtask("applyApCorr", schema=self.schema)
971 if self.config.doRunCatalogCalculation:
972 self.makeSubtask("catalogCalculation", schema=self.schema)
974 self.outputSchema = afwTable.SourceCatalog(self.schema)
976 def runQuantum(self, butlerQC, inputRefs, outputRefs):
977 inputs = butlerQC.get(inputRefs)
979 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
980 inputs.pop('refCat'), config=self.config.refObjLoader,
981 log=self.log)
982 self.match.setRefObjLoader(refObjLoader)
984 # Set psfcache
985 # move this to run after gen2 deprecation
986 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
988 # Get unique integer ID for IdFactory and RNG seeds
989 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
990 inputs['exposureId'] = exposureIdInfo.expId
991 idFactory = exposureIdInfo.makeSourceIdFactory()
992 # Transform inputCatalog
993 table = afwTable.SourceTable.make(self.schema, idFactory)
994 sources = afwTable.SourceCatalog(table)
995 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
996 table = sources.getTable()
997 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
998 inputs['sources'] = sources
1000 skyMap = inputs.pop('skyMap')
1001 tractNumber = inputRefs.inputCatalog.dataId['tract']
1002 tractInfo = skyMap[tractNumber]
1003 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
1004 skyInfo = Struct(
1005 skyMap=skyMap,
1006 tractInfo=tractInfo,
1007 patchInfo=patchInfo,
1008 wcs=tractInfo.getWcs(),
1009 bbox=patchInfo.getOuterBBox()
1010 )
1011 inputs['skyInfo'] = skyInfo
1013 if self.config.doPropagateFlags:
1014 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
1015 # New version
1016 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
1017 inputs["ccdInputs"] = ccdInputs
1019 if "sourceTableHandles" in inputs:
1020 sourceTableHandles = inputs.pop("sourceTableHandles")
1021 sourceTableHandleDict = {handle.dataId["visit"]: handle
1022 for handle in sourceTableHandles}
1023 inputs["sourceTableHandleDict"] = sourceTableHandleDict
1024 if "finalizedSourceTableHandles" in inputs:
1025 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
1026 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
1027 for handle in finalizedSourceTableHandles}
1028 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
1029 else:
1030 # Deprecated legacy version
1031 # Filter out any visit catalog that is not coadd inputs
1032 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
1033 visitKey = ccdInputs.schema.find("visit").key
1034 ccdKey = ccdInputs.schema.find("ccd").key
1035 inputVisitIds = set()
1036 ccdRecordsWcs = {}
1037 for ccdRecord in ccdInputs:
1038 visit = ccdRecord.get(visitKey)
1039 ccd = ccdRecord.get(ccdKey)
1040 inputVisitIds.add((visit, ccd))
1041 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
1043 inputCatalogsToKeep = []
1044 inputCatalogWcsUpdate = []
1045 for i, dataRef in enumerate(inputRefs.visitCatalogs):
1046 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
1047 if key in inputVisitIds:
1048 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
1049 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
1050 inputs['visitCatalogs'] = inputCatalogsToKeep
1051 inputs['wcsUpdates'] = inputCatalogWcsUpdate
1052 inputs['ccdInputs'] = ccdInputs
1054 outputs = self.run(**inputs)
1055 butlerQC.put(outputs, outputRefs)
1057 def runDataRef(self, patchRef, psfCache=100):
1058 """!
1059 @brief Deblend and measure.
1061 @param[in] patchRef: Patch reference.
1063 Set 'is-primary' and related flags. Propagate flags
1064 from individual visits. Optionally match the sources to a reference catalog and write the matches.
1065 Finally, write the deblended sources and measurements out.
1066 """
1067 if self.config.hasFakes:
1068 coaddType = "fakes_" + self.config.coaddName
1069 else:
1070 coaddType = self.config.coaddName
1071 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1072 exposure.getPsf().setCacheCapacity(psfCache)
1073 sources = self.readSources(patchRef)
1074 table = sources.getTable()
1075 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1076 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1078 if self.config.doPropagateFlags:
1079 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1080 else:
1081 ccdInputs = None
1083 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, includeBand=False,
1084 log=self.log)
1085 results = self.run(exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=expId,
1086 ccdInputs=ccdInputs, butler=patchRef.getButler())
1088 if self.config.doMatchSources:
1089 self.writeMatches(patchRef, results)
1090 self.write(patchRef, results.outputSources)
1092 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1093 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
1094 """Run measurement algorithms on the input exposure, and optionally populate the
1095 resulting catalog with extra information.
1097 Parameters
1098 ----------
1099 exposure : `lsst.afw.exposure.Exposure`
1100 The input exposure on which measurements are to be performed
1101 sources : `lsst.afw.table.SourceCatalog`
1102 A catalog built from the results of merged detections, or
1103 deblender outputs.
1104 skyInfo : `lsst.pipe.base.Struct`
1105 A struct containing information about the position of the input exposure within
1106 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1107 exposureId : `int` or `bytes`
1108 packed unique number or bytes unique to the input exposure
1109 ccdInputs : `lsst.afw.table.ExposureCatalog`
1110 Catalog containing information on the individual visits which went into making
1111 the coadd.
1112 sourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
1113 Dict for sourceTable_visit handles (key is visit) for propagating flags.
1114 These tables are derived from the ``CalibrateTask`` sources, and contain
1115 astrometry and photometry flags, and optionally PSF flags.
1116 finalizedSourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`], optional
1117 Dict for finalized_src_table handles (key is visit) for propagating flags.
1118 These tables are derived from ``FinalizeCalibrationTask`` and contain
1119 PSF flags from the finalized PSF estimation.
1120 visitCatalogs : list of `lsst.afw.table.SourceCatalogs`
1121 A list of source catalogs corresponding to measurements made on the individual
1122 visits which went into the input exposure. If None and butler is `None` then
1123 the task cannot propagate visit flags to the output catalog.
1124 Deprecated, to be removed with PropagateVisitFlagsTask.
1125 wcsUpdates : list of `lsst.afw.geom.SkyWcs`
1126 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1127 to the input visits. Used to put all coordinates to common system. If `None` and
1128 butler is `None` then the task cannot propagate visit flags to the output catalog.
1129 Deprecated, to be removed with PropagateVisitFlagsTask.
1130 butler : `lsst.daf.persistence.Butler`
1131 A gen2 butler used to load visit catalogs.
1132 Deprecated, to be removed with Gen2.
1134 Returns
1135 -------
1136 results : `lsst.pipe.base.Struct`
1137 Results of running measurement task. Will contain the catalog in the
1138 sources attribute. Optionally will have results of matching to a
1139 reference catalog in the matchResults attribute, and denormalized
1140 matches in the denormMatches attribute.
1141 """
1142 self.measurement.run(sources, exposure, exposureId=exposureId)
1144 if self.config.doApCorr:
1145 self.applyApCorr.run(
1146 catalog=sources,
1147 apCorrMap=exposure.getInfo().getApCorrMap()
1148 )
1150 # TODO DM-11568: this contiguous check-and-copy could go away if we
1151 # reserve enough space during SourceDetection and/or SourceDeblend.
1152 # NOTE: sourceSelectors require contiguous catalogs, so ensure
1153 # contiguity now, so views are preserved from here on.
1154 if not sources.isContiguous():
1155 sources = sources.copy(deep=True)
1157 if self.config.doRunCatalogCalculation:
1158 self.catalogCalculation.run(sources)
1160 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1161 patchInfo=skyInfo.patchInfo)
1162 if self.config.doPropagateFlags:
1163 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
1164 # New version
1165 self.propagateFlags.run(
1166 sources,
1167 ccdInputs,
1168 sourceTableHandleDict,
1169 finalizedSourceTableHandleDict
1170 )
1171 else:
1172 # Legacy deprecated version
1173 self.propagateFlags.run(
1174 butler,
1175 sources,
1176 ccdInputs,
1177 exposure.getWcs(),
1178 visitCatalogs,
1179 wcsUpdates
1180 )
1182 results = Struct()
1184 if self.config.doMatchSources:
1185 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel)
1186 matches = afwTable.packMatches(matchResult.matches)
1187 matches.table.setMetadata(matchResult.matchMeta)
1188 results.matchResult = matches
1189 if self.config.doWriteMatchesDenormalized:
1190 if matchResult.matches:
1191 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1192 else:
1193 self.log.warning("No matches, so generating dummy denormalized matches file")
1194 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1195 denormMatches.setMetadata(PropertyList())
1196 denormMatches.getMetadata().add("COMMENT",
1197 "This catalog is empty because no matches were found.")
1198 results.denormMatches = denormMatches
1199 results.denormMatches = denormMatches
1201 results.outputSources = sources
1202 return results
1204 def readSources(self, dataRef):
1205 """!
1206 @brief Read input sources.
1208 @param[in] dataRef: Data reference for catalog of merged detections
1209 @return List of sources in merged catalog
1211 We also need to add columns to hold the measurements we're about to make
1212 so we can measure in-place.
1213 """
1214 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1215 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
1216 idFactory = self.makeIdFactory(dataRef)
1217 for s in merged:
1218 idFactory.notify(s.getId())
1219 table = afwTable.SourceTable.make(self.schema, idFactory)
1220 sources = afwTable.SourceCatalog(table)
1221 sources.extend(merged, self.schemaMapper)
1222 return sources
1224 def writeMatches(self, dataRef, results):
1225 """!
1226 @brief Write matches of the sources to the astrometric reference catalog.
1228 @param[in] dataRef: data reference
1229 @param[in] results: results struct from run method
1230 """
1231 if hasattr(results, "matchResult"):
1232 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1233 if hasattr(results, "denormMatches"):
1234 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1236 def write(self, dataRef, sources):
1237 """!
1238 @brief Write the source catalog.
1240 @param[in] dataRef: data reference
1241 @param[in] sources: source catalog
1242 """
1243 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1244 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)