Coverage for python/lsst/pipe/tasks/multiBand.py : 62%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#!/usr/bin/env python
2#
3# LSST Data Management System
4# Copyright 2008-2015 AURA/LSST.
5#
6# This product includes software developed by the
7# LSST Project (http://www.lsst.org/).
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the LSST License Statement and
20# the GNU General Public License along with this program. If not,
21# see <https://www.lsstcorp.org/LegalNotices/>.
22#
23from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
26import lsst.pipe.base.connectionTypes as cT
27from lsst.pex.config import Config, Field, ConfigurableField
28from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30from lsst.meas.deblender import SourceDeblendTask
31from lsst.meas.extensions.scarlet import ScarletDeblendTask
32from lsst.pipe.tasks.coaddBase import getSkyInfo
33from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
34from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
35from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
36from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
37from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
38import lsst.afw.image as afwImage
39import lsst.afw.table as afwTable
40import lsst.afw.math as afwMath
41from lsst.daf.base import PropertyList
42from lsst.skymap import BaseSkyMap
44from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
47from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401
48from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
49from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
50from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
51from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
54"""
55New set types:
56* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
57* deepCoadd_mergeDet: merged detections (tract, patch)
58* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
59* deepCoadd_ref: reference sources (tract, patch)
60All of these have associated *_schema catalogs that require no data ID and hold no records.
62In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
63the mergeDet, meas, and ref dataset Footprints:
64* deepCoadd_peak_schema
65"""
68##############################################################################################################
69class DetectCoaddSourcesConnections(PipelineTaskConnections,
70 dimensions=("tract", "patch", "band", "skymap"),
71 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
72 detectionSchema = cT.InitOutput(
73 doc="Schema of the detection catalog",
74 name="{outputCoaddName}Coadd_det_schema",
75 storageClass="SourceCatalog",
76 )
77 exposure = cT.Input(
78 doc="Exposure on which detections are to be performed",
79 name="{inputCoaddName}Coadd",
80 storageClass="ExposureF",
81 dimensions=("tract", "patch", "band", "skymap")
82 )
83 outputBackgrounds = cT.Output(
84 doc="Output Backgrounds used in detection",
85 name="{outputCoaddName}Coadd_calexp_background",
86 storageClass="Background",
87 dimensions=("tract", "patch", "band", "skymap")
88 )
89 outputSources = cT.Output(
90 doc="Detected sources catalog",
91 name="{outputCoaddName}Coadd_det",
92 storageClass="SourceCatalog",
93 dimensions=("tract", "patch", "band", "skymap")
94 )
95 outputExposure = cT.Output(
96 doc="Exposure post detection",
97 name="{outputCoaddName}Coadd_calexp",
98 storageClass="ExposureF",
99 dimensions=("tract", "patch", "band", "skymap")
100 )
103class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
104 """!
105 @anchor DetectCoaddSourcesConfig_
107 @brief Configuration parameters for the DetectCoaddSourcesTask
108 """
109 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
110 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
111 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
112 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
113 doInsertFakes = Field(dtype=bool, default=False,
114 doc="Run fake sources injection task")
115 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
116 doc="Injection of fake sources for testing "
117 "purposes (must be retargeted)")
118 hasFakes = Field(
119 dtype=bool,
120 default=False,
121 doc="Should be set to True if fake sources have been inserted into the input data."
122 )
124 def setDefaults(self):
125 super().setDefaults()
126 self.detection.thresholdType = "pixel_stdev"
127 self.detection.isotropicGrow = True
128 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
129 self.detection.reEstimateBackground = False
130 self.detection.background.useApprox = False
131 self.detection.background.binSize = 4096
132 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
133 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
135## @addtogroup LSST_task_documentation
136## @{
137## @page DetectCoaddSourcesTask
138## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask"
139## @copybrief DetectCoaddSourcesTask
140## @}
143class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
144 r"""!
145 @anchor DetectCoaddSourcesTask_
147 @brief Detect sources on a coadd
149 @section pipe_tasks_multiBand_Contents Contents
151 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
154 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
155 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
156 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
158 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
160 Command-line task that detects sources on a coadd of exposures obtained with a single filter.
162 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
163 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
164 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
165 propagate the full covariance matrix -- but it is simple and works well in practice.
167 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
168 SourceDetectionTask_ "detection" subtask.
170 @par Inputs:
171 deepCoadd{tract,patch,filter}: ExposureF
172 @par Outputs:
173 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
174 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
175 exposure (ExposureF)
176 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
177 @par Data Unit:
178 tract, patch, filter
180 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
181 You can retarget this subtask if you wish.
183 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
185 @copydoc \_\_init\_\_
187 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
189 @copydoc run
191 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
193 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
195 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
197 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
198 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
199 files.
201 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
202 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
203 @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
205 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
206 of using DetectCoaddSourcesTask
208 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
209 the task is to update the background, detect all sources in a single band and generate a set of parent
210 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
211 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
212 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
213 calling detectCoaddSources.py with the `--help` command line argument:
214 @code
215 detectCoaddSources.py --help
216 @endcode
218 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
219 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
220 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
221 @code
222 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
223 @endcode
224 that will process the HSC-I band data. The results are written to
225 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
227 It is also necessary to run:
228 @code
229 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
230 @endcode
231 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
232 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
233 """
234 _DefaultName = "detectCoaddSources"
235 ConfigClass = DetectCoaddSourcesConfig
236 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
237 makeIdFactory = _makeMakeIdFactory("CoaddId")
239 @classmethod
240 def _makeArgumentParser(cls):
241 parser = ArgumentParser(name=cls._DefaultName)
242 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
243 ContainerClass=ExistingCoaddDataIdContainer)
244 return parser
246 def __init__(self, schema=None, **kwargs):
247 """!
248 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
250 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
252 @param[in] schema: initial schema for the output catalog, modified-in place to include all
253 fields set by this task. If None, the source minimal schema will be used.
254 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
255 """
256 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
257 # call structure has been reviewed carefully to be sure super will work as intended.
258 super().__init__(**kwargs)
259 if schema is None: 259 ↛ 261line 259 didn't jump to line 261, because the condition on line 259 was never false
260 schema = afwTable.SourceTable.makeMinimalSchema()
261 if self.config.doInsertFakes: 261 ↛ 262line 261 didn't jump to line 262, because the condition on line 261 was never true
262 self.makeSubtask("insertFakes")
263 self.schema = schema
264 self.makeSubtask("detection", schema=self.schema)
265 if self.config.doScaleVariance: 265 ↛ 268line 265 didn't jump to line 268, because the condition on line 265 was never false
266 self.makeSubtask("scaleVariance")
268 self.detectionSchema = afwTable.SourceCatalog(self.schema)
270 def runDataRef(self, patchRef):
271 """!
272 @brief Run detection on a coadd.
274 Invokes @ref run and then uses @ref write to output the
275 results.
277 @param[in] patchRef: data reference for patch
278 """
279 if self.config.hasFakes: 279 ↛ 280line 279 didn't jump to line 280, because the condition on line 279 was never true
280 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
281 else:
282 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
283 expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
284 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
285 self.write(results, patchRef)
286 return results
288 def runQuantum(self, butlerQC, inputRefs, outputRefs):
289 inputs = butlerQC.get(inputRefs)
290 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_band", returnMaxBits=True)
291 inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
292 inputs["expId"] = packedId
293 outputs = self.run(**inputs)
294 butlerQC.put(outputs, outputRefs)
296 def run(self, exposure, idFactory, expId):
297 """!
298 @brief Run detection on an exposure.
300 First scale the variance plane to match the observed variance
301 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
302 detect sources.
304 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
305 depending on configuration).
306 @param[in] idFactory: IdFactory to set source identifiers
307 @param[in] expId: Exposure identifier (integer) for RNG seed
309 @return a pipe.base.Struct with fields
310 - sources: catalog of detections
311 - backgrounds: list of backgrounds
312 """
313 if self.config.doScaleVariance: 313 ↛ 316line 313 didn't jump to line 316, because the condition on line 313 was never false
314 varScale = self.scaleVariance.run(exposure.maskedImage)
315 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
316 backgrounds = afwMath.BackgroundList()
317 if self.config.doInsertFakes: 317 ↛ 318line 317 didn't jump to line 318, because the condition on line 317 was never true
318 self.insertFakes.run(exposure, background=backgrounds)
319 table = afwTable.SourceTable.make(self.schema, idFactory)
320 detections = self.detection.run(table, exposure, expId=expId)
321 sources = detections.sources
322 fpSets = detections.fpSets
323 if hasattr(fpSets, "background") and fpSets.background: 323 ↛ 326line 323 didn't jump to line 326, because the condition on line 323 was never false
324 for bg in fpSets.background:
325 backgrounds.append(bg)
326 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
328 def write(self, results, patchRef):
329 """!
330 @brief Write out results from runDetection.
332 @param[in] exposure: Exposure to write out
333 @param[in] results: Struct returned from runDetection
334 @param[in] patchRef: data reference for patch
335 """
336 coaddName = self.config.coaddName + "Coadd"
337 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
338 patchRef.put(results.outputSources, coaddName + "_det")
339 if self.config.hasFakes: 339 ↛ 340line 339 didn't jump to line 340, because the condition on line 339 was never true
340 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
341 else:
342 patchRef.put(results.outputExposure, coaddName + "_calexp")
344##############################################################################################################
347class DeblendCoaddSourcesConfig(Config):
348 """DeblendCoaddSourcesConfig
350 Configuration parameters for the `DeblendCoaddSourcesTask`.
351 """
352 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
353 doc="Deblend sources separately in each band")
354 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
355 doc="Deblend sources simultaneously across bands")
356 simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
357 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
358 hasFakes = Field(dtype=bool,
359 default=False,
360 doc="Should be set to True if fake sources have been inserted into the input data.")
362 def setDefaults(self):
363 Config.setDefaults(self)
364 self.singleBandDeblend.propagateAllPeaks = True
367class DeblendCoaddSourcesRunner(MergeSourcesRunner):
368 """Task runner for the `MergeSourcesTask`
370 Required because the run method requires a list of
371 dataRefs rather than a single dataRef.
372 """
373 @staticmethod
374 def getTargetList(parsedCmd, **kwargs):
375 """Provide a list of patch references for each patch, tract, filter combo.
377 Parameters
378 ----------
379 parsedCmd:
380 The parsed command
381 kwargs:
382 Keyword arguments passed to the task
384 Returns
385 -------
386 targetList: list
387 List of tuples, where each tuple is a (dataRef, kwargs) pair.
388 """
389 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
390 kwargs["psfCache"] = parsedCmd.psfCache
391 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
394class DeblendCoaddSourcesTask(CmdLineTask):
395 """Deblend the sources in a merged catalog
397 Deblend sources from master catalog in each coadd.
398 This can either be done separately in each band using the HSC-SDSS deblender
399 (`DeblendCoaddSourcesTask.config.simultaneous==False`)
400 or use SCARLET to simultaneously fit the blend in all bands
401 (`DeblendCoaddSourcesTask.config.simultaneous==True`).
402 The task will set its own `self.schema` atribute to the `Schema` of the
403 output deblended catalog.
404 This will include all fields from the input `Schema`, as well as additional fields
405 from the deblender.
407 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
408 ---------------------------------------------------------
409 `
411 Parameters
412 ----------
413 butler: `Butler`
414 Butler used to read the input schemas from disk or
415 construct the reference catalog loader, if `schema` or `peakSchema` or
416 schema: `Schema`
417 The schema of the merged detection catalog as an input to this task.
418 peakSchema: `Schema`
419 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
420 """
421 ConfigClass = DeblendCoaddSourcesConfig
422 RunnerClass = DeblendCoaddSourcesRunner
423 _DefaultName = "deblendCoaddSources"
424 makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
426 @classmethod
427 def _makeArgumentParser(cls):
428 parser = ArgumentParser(name=cls._DefaultName)
429 parser.add_id_argument("--id", "deepCoadd_calexp",
430 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
431 ContainerClass=ExistingCoaddDataIdContainer)
432 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
433 return parser
435 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
436 CmdLineTask.__init__(self, **kwargs)
437 if schema is None: 437 ↛ 440line 437 didn't jump to line 440, because the condition on line 437 was never false
438 assert butler is not None, "Neither butler nor schema is defined"
439 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
440 self.schemaMapper = afwTable.SchemaMapper(schema)
441 self.schemaMapper.addMinimalSchema(schema)
442 self.schema = self.schemaMapper.getOutputSchema()
443 if peakSchema is None: 443 ↛ 447line 443 didn't jump to line 447, because the condition on line 443 was never false
444 assert butler is not None, "Neither butler nor peakSchema is defined"
445 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
447 if self.config.simultaneous: 447 ↛ 448line 447 didn't jump to line 448, because the condition on line 447 was never true
448 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
449 else:
450 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
452 def getSchemaCatalogs(self):
453 """Return a dict of empty catalogs for each catalog dataset produced by this task.
455 Returns
456 -------
457 result: dict
458 Dictionary of empty catalogs, with catalog names as keys.
459 """
460 catalog = afwTable.SourceCatalog(self.schema)
461 return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
462 self.config.coaddName + "Coadd_deblendedModel": catalog}
464 def runDataRef(self, patchRefList, psfCache=100):
465 """Deblend the patch
467 Deblend each source simultaneously or separately
468 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
469 Set `is-primary` and related flags.
470 Propagate flags from individual visits.
471 Write the deblended sources out.
473 Parameters
474 ----------
475 patchRefList: list
476 List of data references for each filter
477 """
479 if self.config.hasFakes: 479 ↛ 480line 479 didn't jump to line 480, because the condition on line 479 was never true
480 coaddType = "fakes_" + self.config.coaddName
481 else:
482 coaddType = self.config.coaddName
484 if self.config.simultaneous: 484 ↛ 486line 484 didn't jump to line 486, because the condition on line 484 was never true
485 # Use SCARLET to simultaneously deblend across filters
486 filters = []
487 exposures = []
488 for patchRef in patchRefList:
489 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
490 filters.append(patchRef.dataId["filter"])
491 exposures.append(exposure)
492 # The input sources are the same for all bands, since it is a merged catalog
493 sources = self.readSources(patchRef)
494 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
495 templateCatalogs = self.multiBandDeblend.run(exposure, sources)
496 for n in range(len(patchRefList)):
497 self.write(patchRefList[n], templateCatalogs[filters[n]])
498 else:
499 # Use the singeband deblender to deblend each band separately
500 for patchRef in patchRefList:
501 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
502 exposure.getPsf().setCacheCapacity(psfCache)
503 sources = self.readSources(patchRef)
504 self.singleBandDeblend.run(exposure, sources)
505 self.write(patchRef, sources)
507 def readSources(self, dataRef):
508 """Read merged catalog
510 Read the catalog of merged detections and create a catalog
511 in a single band.
513 Parameters
514 ----------
515 dataRef: data reference
516 Data reference for catalog of merged detections
518 Returns
519 -------
520 sources: `SourceCatalog`
521 List of sources in merged catalog
523 We also need to add columns to hold the measurements we're about to make
524 so we can measure in-place.
525 """
526 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
527 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
528 idFactory = self.makeIdFactory(dataRef)
529 for s in merged:
530 idFactory.notify(s.getId())
531 table = afwTable.SourceTable.make(self.schema, idFactory)
532 sources = afwTable.SourceCatalog(table)
533 sources.extend(merged, self.schemaMapper)
534 return sources
536 def write(self, dataRef, sources):
537 """Write the source catalog(s)
539 Parameters
540 ----------
541 dataRef: Data Reference
542 Reference to the output catalog.
543 sources: `SourceCatalog`
544 Flux conserved sources to write to file.
545 If using the single band deblender, this is the catalog
546 generated.
547 template_sources: `SourceCatalog`
548 Source catalog using the multiband template models
549 as footprints.
550 """
551 dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux")
552 self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
554 def writeMetadata(self, dataRefList):
555 """Write the metadata produced from processing the data.
556 Parameters
557 ----------
558 dataRefList
559 List of Butler data references used to write the metadata.
560 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
561 """
562 for dataRef in dataRefList:
563 try:
564 metadataName = self._getMetadataName()
565 if metadataName is not None:
566 dataRef.put(self.getFullMetadata(), metadataName)
567 except Exception as e:
568 self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
570 def getExposureId(self, dataRef):
571 """Get the ExposureId from a data reference
572 """
573 return int(dataRef.get(self.config.coaddName + "CoaddId"))
576class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
577 dimensions=("tract", "patch", "band", "skymap"),
578 defaultTemplates={"inputCoaddName": "deep",
579 "outputCoaddName": "deep"}):
580 inputSchema = cT.InitInput(
581 doc="Input schema for measure merged task produced by a deblender or detection task",
582 name="{inputCoaddName}Coadd_deblendedFlux_schema",
583 storageClass="SourceCatalog"
584 )
585 outputSchema = cT.InitOutput(
586 doc="Output schema after all new fields are added by task",
587 name="{inputCoaddName}Coadd_meas_schema",
588 storageClass="SourceCatalog"
589 )
590 refCat = cT.PrerequisiteInput(
591 doc="Reference catalog used to match measured sources against known sources",
592 name="ref_cat",
593 storageClass="SimpleCatalog",
594 dimensions=("skypix",),
595 deferLoad=True,
596 multiple=True
597 )
598 exposure = cT.Input(
599 doc="Input coadd image",
600 name="{inputCoaddName}Coadd_calexp",
601 storageClass="ExposureF",
602 dimensions=("tract", "patch", "band", "skymap")
603 )
604 skyMap = cT.Input(
605 doc="SkyMap to use in processing",
606 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
607 storageClass="SkyMap",
608 dimensions=("skymap",),
609 )
610 visitCatalogs = cT.Input(
611 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
612 "further filtered in the task for the purpose of propagating flags from image calibration "
613 "and characterization to codd objects",
614 name="src",
615 dimensions=("instrument", "visit", "detector"),
616 storageClass="SourceCatalog",
617 multiple=True
618 )
619 inputCatalog = cT.Input(
620 doc=("Name of the input catalog to use."
621 "If the single band deblender was used this should be 'deblendedFlux."
622 "If the multi-band deblender was used this should be 'deblendedModel, "
623 "or deblendedFlux if the multiband deblender was configured to output "
624 "deblended flux catalogs. If no deblending was performed this should "
625 "be 'mergeDet'"),
626 name="{inputCoaddName}Coadd_deblendedFlux",
627 storageClass="SourceCatalog",
628 dimensions=("tract", "patch", "band", "skymap"),
629 )
630 outputSources = cT.Output(
631 doc="Source catalog containing all the measurement information generated in this task",
632 name="{outputCoaddName}Coadd_meas",
633 dimensions=("tract", "patch", "band", "skymap"),
634 storageClass="SourceCatalog",
635 )
636 matchResult = cT.Output(
637 doc="Match catalog produced by configured matcher, optional on doMatchSources",
638 name="{outputCoaddName}Coadd_measMatch",
639 dimensions=("tract", "patch", "band", "skymap"),
640 storageClass="Catalog",
641 )
642 denormMatches = cT.Output(
643 doc="Denormalized Match catalog produced by configured matcher, optional on "
644 "doWriteMatchesDenormalized",
645 name="{outputCoaddName}Coadd_measMatchFull",
646 dimensions=("tract", "patch", "band", "skymap"),
647 storageClass="Catalog",
648 )
650 def __init__(self, *, config=None):
651 super().__init__(config=config)
652 if config.doPropagateFlags is False:
653 self.inputs -= set(("visitCatalogs",))
655 if config.doMatchSources is False:
656 self.outputs -= set(("matchResult",))
658 if config.doWriteMatchesDenormalized is False:
659 self.outputs -= set(("denormMatches",))
662class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
663 pipelineConnections=MeasureMergedCoaddSourcesConnections):
664 """!
665 @anchor MeasureMergedCoaddSourcesConfig_
667 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
668 """
669 inputCatalog = Field(dtype=str, default="deblendedFlux",
670 doc=("Name of the input catalog to use."
671 "If the single band deblender was used this should be 'deblendedFlux."
672 "If the multi-band deblender was used this should be 'deblendedModel."
673 "If no deblending was performed this should be 'mergeDet'"))
674 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
675 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
676 doPropagateFlags = Field(
677 dtype=bool, default=True,
678 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
679 )
680 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
681 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
682 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
683 doWriteMatchesDenormalized = Field(
684 dtype=bool,
685 default=False,
686 doc=("Write reference matches in denormalized format? "
687 "This format uses more disk space, but is more convenient to read."),
688 )
689 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
690 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
691 checkUnitsParseStrict = Field(
692 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
693 dtype=str,
694 default="raise",
695 )
696 doApCorr = Field(
697 dtype=bool,
698 default=True,
699 doc="Apply aperture corrections"
700 )
701 applyApCorr = ConfigurableField(
702 target=ApplyApCorrTask,
703 doc="Subtask to apply aperture corrections"
704 )
705 doRunCatalogCalculation = Field(
706 dtype=bool,
707 default=True,
708 doc='Run catalogCalculation task'
709 )
710 catalogCalculation = ConfigurableField(
711 target=CatalogCalculationTask,
712 doc="Subtask to run catalogCalculation plugins on catalog"
713 )
715 hasFakes = Field(
716 dtype=bool,
717 default=False,
718 doc="Should be set to True if fake sources have been inserted into the input data."
719 )
721 @property
722 def refObjLoader(self):
723 return self.match.refObjLoader
725 def setDefaults(self):
726 super().setDefaults()
727 self.measurement.plugins.names |= ['base_InputCount',
728 'base_Variance',
729 'base_LocalPhotoCalib',
730 'base_LocalWcs']
731 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
732 'INEXACT_PSF']
733 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
734 'INEXACT_PSF']
736 def validate(self):
737 super().validate()
738 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
739 if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
740 raise ValueError(
741 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
742 f"are different. These options must be kept in sync until Gen2 is retired."
743 )
746## @addtogroup LSST_task_documentation
747## @{
748## @page MeasureMergedCoaddSourcesTask
749## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask"
750## @copybrief MeasureMergedCoaddSourcesTask
751## @}
754class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
755 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
756 @staticmethod
757 def getTargetList(parsedCmd, **kwargs):
758 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
761class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
762 r"""!
763 @anchor MeasureMergedCoaddSourcesTask_
765 @brief Deblend sources from master catalog in each coadd seperately and measure.
767 @section pipe_tasks_multiBand_Contents Contents
769 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
770 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
771 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
772 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
773 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
774 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
776 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
778 Command-line task that uses peaks and footprints from a master catalog to perform deblending and
779 measurement in each coadd.
781 Given a master input catalog of sources (peaks and footprints) or deblender outputs
782 (including a HeavyFootprint in each band), measure each source on the
783 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
784 consistent set of child sources.
786 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
787 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
788 flags are propagated to the coadd sources.
790 Optionally, we can match the coadd sources to an external reference catalog.
792 @par Inputs:
793 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
794 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
795 @par Outputs:
796 deepCoadd_meas{tract,patch,filter}: SourceCatalog
797 @par Data Unit:
798 tract, patch, filter
800 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
802 <DL>
803 <DT> @ref SingleFrameMeasurementTask_ "measurement"
804 <DD> Measure source properties of deblended sources.</DD>
805 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
806 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
807 not at the edge of the field and that have either not been deblended or are the children of deblended
808 sources</DD>
809 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
810 <DD> Propagate flags set in individual visits to the coadd.</DD>
811 <DT> @ref DirectMatchTask_ "match"
812 <DD> Match input sources to a reference catalog (optional).
813 </DD>
814 </DL>
815 These subtasks may be retargeted as required.
817 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
819 @copydoc \_\_init\_\_
821 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
823 @copydoc run
825 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
827 See @ref MeasureMergedCoaddSourcesConfig_
829 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
831 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
832 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
833 files.
835 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
836 the various sub-tasks. See the documetation for individual sub-tasks for more information.
838 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
839 MeasureMergedCoaddSourcesTask
841 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
842 The next stage in the multi-band processing procedure will merge these measurements into a suitable
843 catalog for driving forced photometry.
845 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
846 to be processed.
847 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
848 `--help` command line argument:
849 @code
850 measureCoaddSources.py --help
851 @endcode
853 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
854 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
855 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
856 coadd as follows:
857 @code
858 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
859 @endcode
860 This will process the HSC-I band data. The results are written in
861 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
863 It is also necessary to run
864 @code
865 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
866 @endcode
867 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
868 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
869 """
870 _DefaultName = "measureCoaddSources"
871 ConfigClass = MeasureMergedCoaddSourcesConfig
872 RunnerClass = MeasureMergedCoaddSourcesRunner
873 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
874 makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
876 @classmethod
877 def _makeArgumentParser(cls):
878 parser = ArgumentParser(name=cls._DefaultName)
879 parser.add_id_argument("--id", "deepCoadd_calexp",
880 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
881 ContainerClass=ExistingCoaddDataIdContainer)
882 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
883 return parser
885 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
886 **kwargs):
887 """!
888 @brief Initialize the task.
890 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
891 @param[in] schema: the schema of the merged detection catalog used as input to this one
892 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
893 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
894 catalog. May be None if the loader can be constructed from the butler argument or all steps
895 requiring a reference catalog are disabled.
896 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
897 catalog loader, if schema or peakSchema or refObjLoader is None
899 The task will set its own self.schema attribute to the schema of the output measurement catalog.
900 This will include all fields from the input schema, as well as additional fields for all the
901 measurements.
902 """
903 super().__init__(**kwargs)
904 self.deblended = self.config.inputCatalog.startswith("deblended")
905 self.inputCatalog = "Coadd_" + self.config.inputCatalog
906 if initInputs is not None: 906 ↛ 907line 906 didn't jump to line 907, because the condition on line 906 was never true
907 schema = initInputs['inputSchema'].schema
908 if schema is None: 908 ↛ 911line 908 didn't jump to line 911, because the condition on line 908 was never false
909 assert butler is not None, "Neither butler nor schema is defined"
910 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
911 self.schemaMapper = afwTable.SchemaMapper(schema)
912 self.schemaMapper.addMinimalSchema(schema)
913 self.schema = self.schemaMapper.getOutputSchema()
914 self.algMetadata = PropertyList()
915 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
916 self.makeSubtask("setPrimaryFlags", schema=self.schema)
917 if self.config.doMatchSources: 917 ↛ 918line 917 didn't jump to line 918, because the condition on line 917 was never true
918 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
919 if self.config.doPropagateFlags: 919 ↛ 921line 919 didn't jump to line 921, because the condition on line 919 was never false
920 self.makeSubtask("propagateFlags", schema=self.schema)
921 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
922 if self.config.doApCorr: 922 ↛ 924line 922 didn't jump to line 924, because the condition on line 922 was never false
923 self.makeSubtask("applyApCorr", schema=self.schema)
924 if self.config.doRunCatalogCalculation: 924 ↛ 927line 924 didn't jump to line 927, because the condition on line 924 was never false
925 self.makeSubtask("catalogCalculation", schema=self.schema)
927 self.outputSchema = afwTable.SourceCatalog(self.schema)
929 def runQuantum(self, butlerQC, inputRefs, outputRefs):
930 inputs = butlerQC.get(inputRefs)
932 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
933 inputs.pop('refCat'), config=self.config.refObjLoader,
934 log=self.log)
935 self.match.setRefObjLoader(refObjLoader)
937 # Set psfcache
938 # move this to run after gen2 deprecation
939 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
941 # Get unique integer ID for IdFactory and RNG seeds
942 packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
943 inputs['exposureId'] = packedId
944 idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
945 # Transform inputCatalog
946 table = afwTable.SourceTable.make(self.schema, idFactory)
947 sources = afwTable.SourceCatalog(table)
948 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
949 table = sources.getTable()
950 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
951 inputs['sources'] = sources
953 skyMap = inputs.pop('skyMap')
954 tractNumber = inputRefs.inputCatalog.dataId['tract']
955 tractInfo = skyMap[tractNumber]
956 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
957 skyInfo = Struct(
958 skyMap=skyMap,
959 tractInfo=tractInfo,
960 patchInfo=patchInfo,
961 wcs=tractInfo.getWcs(),
962 bbox=patchInfo.getOuterBBox()
963 )
964 inputs['skyInfo'] = skyInfo
966 if self.config.doPropagateFlags:
967 # Filter out any visit catalog that is not coadd inputs
968 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
969 visitKey = ccdInputs.schema.find("visit").key
970 ccdKey = ccdInputs.schema.find("ccd").key
971 inputVisitIds = set()
972 ccdRecordsWcs = {}
973 for ccdRecord in ccdInputs:
974 visit = ccdRecord.get(visitKey)
975 ccd = ccdRecord.get(ccdKey)
976 inputVisitIds.add((visit, ccd))
977 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
979 inputCatalogsToKeep = []
980 inputCatalogWcsUpdate = []
981 for i, dataRef in enumerate(inputRefs.visitCatalogs):
982 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
983 if key in inputVisitIds:
984 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
985 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
986 inputs['visitCatalogs'] = inputCatalogsToKeep
987 inputs['wcsUpdates'] = inputCatalogWcsUpdate
988 inputs['ccdInputs'] = ccdInputs
990 outputs = self.run(**inputs)
991 butlerQC.put(outputs, outputRefs)
993 def runDataRef(self, patchRef, psfCache=100):
994 """!
995 @brief Deblend and measure.
997 @param[in] patchRef: Patch reference.
999 Set 'is-primary' and related flags. Propagate flags
1000 from individual visits. Optionally match the sources to a reference catalog and write the matches.
1001 Finally, write the deblended sources and measurements out.
1002 """
1003 if self.config.hasFakes: 1003 ↛ 1004line 1003 didn't jump to line 1004, because the condition on line 1003 was never true
1004 coaddType = "fakes_" + self.config.coaddName
1005 else:
1006 coaddType = self.config.coaddName
1007 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1008 exposure.getPsf().setCacheCapacity(psfCache)
1009 sources = self.readSources(patchRef)
1010 table = sources.getTable()
1011 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1012 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1014 if self.config.doPropagateFlags: 1014 ↛ 1017line 1014 didn't jump to line 1017, because the condition on line 1014 was never false
1015 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1016 else:
1017 ccdInputs = None
1019 results = self.run(exposure=exposure, sources=sources,
1020 ccdInputs=ccdInputs,
1021 skyInfo=skyInfo, butler=patchRef.getButler(),
1022 exposureId=self.getExposureId(patchRef))
1024 if self.config.doMatchSources: 1024 ↛ 1025line 1024 didn't jump to line 1025, because the condition on line 1024 was never true
1025 self.writeMatches(patchRef, results)
1026 self.write(patchRef, results.outputSources)
1028 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1029 butler=None):
1030 """Run measurement algorithms on the input exposure, and optionally populate the
1031 resulting catalog with extra information.
1033 Parameters
1034 ----------
1035 exposure : `lsst.afw.exposure.Exposure`
1036 The input exposure on which measurements are to be performed
1037 sources : `lsst.afw.table.SourceCatalog`
1038 A catalog built from the results of merged detections, or
1039 deblender outputs.
1040 skyInfo : `lsst.pipe.base.Struct`
1041 A struct containing information about the position of the input exposure within
1042 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1043 exposureId : `int` or `bytes`
1044 packed unique number or bytes unique to the input exposure
1045 ccdInputs : `lsst.afw.table.ExposureCatalog`
1046 Catalog containing information on the individual visits which went into making
1047 the exposure
1048 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1049 A list of source catalogs corresponding to measurements made on the individual
1050 visits which went into the input exposure. If None and butler is `None` then
1051 the task cannot propagate visit flags to the output catalog.
1052 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1053 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1054 to the input visits. Used to put all coordinates to common system. If `None` and
1055 butler is `None` then the task cannot propagate visit flags to the output catalog.
1056 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1057 Either a gen2 or gen3 butler used to load visit catalogs
1059 Returns
1060 -------
1061 results : `lsst.pipe.base.Struct`
1062 Results of running measurement task. Will contain the catalog in the
1063 sources attribute. Optionally will have results of matching to a
1064 reference catalog in the matchResults attribute, and denormalized
1065 matches in the denormMatches attribute.
1066 """
1067 self.measurement.run(sources, exposure, exposureId=exposureId)
1069 if self.config.doApCorr: 1069 ↛ 1079line 1069 didn't jump to line 1079, because the condition on line 1069 was never false
1070 self.applyApCorr.run(
1071 catalog=sources,
1072 apCorrMap=exposure.getInfo().getApCorrMap()
1073 )
1075 # TODO DM-11568: this contiguous check-and-copy could go away if we
1076 # reserve enough space during SourceDetection and/or SourceDeblend.
1077 # NOTE: sourceSelectors require contiguous catalogs, so ensure
1078 # contiguity now, so views are preserved from here on.
1079 if not sources.isContiguous(): 1079 ↛ 1080line 1079 didn't jump to line 1080, because the condition on line 1079 was never true
1080 sources = sources.copy(deep=True)
1082 if self.config.doRunCatalogCalculation: 1082 ↛ 1085line 1082 didn't jump to line 1085, because the condition on line 1082 was never false
1083 self.catalogCalculation.run(sources)
1085 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1086 patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended)
1087 if self.config.doPropagateFlags: 1087 ↛ 1090line 1087 didn't jump to line 1090, because the condition on line 1087 was never false
1088 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1090 results = Struct()
1092 if self.config.doMatchSources: 1092 ↛ 1093line 1092 didn't jump to line 1093, because the condition on line 1092 was never true
1093 matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName())
1094 matches = afwTable.packMatches(matchResult.matches)
1095 matches.table.setMetadata(matchResult.matchMeta)
1096 results.matchResult = matches
1097 if self.config.doWriteMatchesDenormalized:
1098 if matchResult.matches:
1099 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1100 else:
1101 self.log.warn("No matches, so generating dummy denormalized matches file")
1102 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1103 denormMatches.setMetadata(PropertyList())
1104 denormMatches.getMetadata().add("COMMENT",
1105 "This catalog is empty because no matches were found.")
1106 results.denormMatches = denormMatches
1107 results.denormMatches = denormMatches
1109 results.outputSources = sources
1110 return results
1112 def readSources(self, dataRef):
1113 """!
1114 @brief Read input sources.
1116 @param[in] dataRef: Data reference for catalog of merged detections
1117 @return List of sources in merged catalog
1119 We also need to add columns to hold the measurements we're about to make
1120 so we can measure in-place.
1121 """
1122 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1123 self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1124 idFactory = self.makeIdFactory(dataRef)
1125 for s in merged:
1126 idFactory.notify(s.getId())
1127 table = afwTable.SourceTable.make(self.schema, idFactory)
1128 sources = afwTable.SourceCatalog(table)
1129 sources.extend(merged, self.schemaMapper)
1130 return sources
1132 def writeMatches(self, dataRef, results):
1133 """!
1134 @brief Write matches of the sources to the astrometric reference catalog.
1136 @param[in] dataRef: data reference
1137 @param[in] results: results struct from run method
1138 """
1139 if hasattr(results, "matchResult"):
1140 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1141 if hasattr(results, "denormMatches"):
1142 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1144 def write(self, dataRef, sources):
1145 """!
1146 @brief Write the source catalog.
1148 @param[in] dataRef: data reference
1149 @param[in] sources: source catalog
1150 """
1151 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1152 self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1154 def getExposureId(self, dataRef):
1155 return int(dataRef.get(self.config.coaddName + "CoaddId"))