Coverage for python/lsst/pipe/tasks/multiBand.py: 29%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#!/usr/bin/env python
2#
3# LSST Data Management System
4# Copyright 2008-2015 AURA/LSST.
5#
6# This product includes software developed by the
7# LSST Project (http://www.lsst.org/).
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the LSST License Statement and
20# the GNU General Public License along with this program. If not,
21# see <https://www.lsstcorp.org/LegalNotices/>.
22#
23import numpy as np
25from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
26from lsst.coadd.utils.getGen3CoaddExposureId import getGen3CoaddExposureId
27from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
28 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
29import lsst.pipe.base.connectionTypes as cT
30from lsst.pex.config import Config, Field, ConfigurableField
31from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
32from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
33from lsst.meas.deblender import SourceDeblendTask
34from lsst.meas.extensions.scarlet import ScarletDeblendTask
35from lsst.pipe.tasks.coaddBase import getSkyInfo
36from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
37from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
38from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
39from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
40from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
41import lsst.afw.image as afwImage
42import lsst.afw.table as afwTable
43import lsst.afw.math as afwMath
44from lsst.daf.base import PropertyList
45from lsst.skymap import BaseSkyMap
46from lsst.obs.base import ExposureIdInfo
48# NOTE: these imports are a convenience so multiband users only have to import this file.
49from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
50from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
51from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
52from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401
53from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
54from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
55from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
56from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
59"""
60New set types:
61* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
62* deepCoadd_mergeDet: merged detections (tract, patch)
63* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
64* deepCoadd_ref: reference sources (tract, patch)
65All of these have associated *_schema catalogs that require no data ID and hold no records.
67In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
68the mergeDet, meas, and ref dataset Footprints:
69* deepCoadd_peak_schema
70"""
73##############################################################################################################
74class DetectCoaddSourcesConnections(PipelineTaskConnections,
75 dimensions=("tract", "patch", "band", "skymap"),
76 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
77 detectionSchema = cT.InitOutput(
78 doc="Schema of the detection catalog",
79 name="{outputCoaddName}Coadd_det_schema",
80 storageClass="SourceCatalog",
81 )
82 exposure = cT.Input(
83 doc="Exposure on which detections are to be performed",
84 name="{inputCoaddName}Coadd",
85 storageClass="ExposureF",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputBackgrounds = cT.Output(
89 doc="Output Backgrounds used in detection",
90 name="{outputCoaddName}Coadd_calexp_background",
91 storageClass="Background",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputSources = cT.Output(
95 doc="Detected sources catalog",
96 name="{outputCoaddName}Coadd_det",
97 storageClass="SourceCatalog",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
100 outputExposure = cT.Output(
101 doc="Exposure post detection",
102 name="{outputCoaddName}Coadd_calexp",
103 storageClass="ExposureF",
104 dimensions=("tract", "patch", "band", "skymap")
105 )
108class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
109 """!
110 @anchor DetectCoaddSourcesConfig_
112 @brief Configuration parameters for the DetectCoaddSourcesTask
113 """
114 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
115 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
116 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
117 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
118 doInsertFakes = Field(dtype=bool, default=False,
119 doc="Run fake sources injection task")
120 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
121 doc="Injection of fake sources for testing "
122 "purposes (must be retargeted)")
123 hasFakes = Field(
124 dtype=bool,
125 default=False,
126 doc="Should be set to True if fake sources have been inserted into the input data."
127 )
129 def setDefaults(self):
130 super().setDefaults()
131 self.detection.thresholdType = "pixel_stdev"
132 self.detection.isotropicGrow = True
133 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
134 self.detection.reEstimateBackground = False
135 self.detection.background.useApprox = False
136 self.detection.background.binSize = 4096
137 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
138 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
140## @addtogroup LSST_task_documentation
141## @{
142## @page page_DetectCoaddSourcesTask DetectCoaddSourcesTask
143## @ref DetectCoaddSourcesTask_ "DetectCoaddSourcesTask"
144## @copybrief DetectCoaddSourcesTask
145## @}
148class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
149 r"""!
150 @anchor DetectCoaddSourcesTask_
152 @brief Detect sources on a coadd
154 @section pipe_tasks_multiBand_Contents Contents
156 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
157 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
158 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
159 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
160 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
161 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
163 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
165 Command-line task that detects sources on a coadd of exposures obtained with a single filter.
167 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
168 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
169 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
170 propagate the full covariance matrix -- but it is simple and works well in practice.
172 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
173 SourceDetectionTask_ "detection" subtask.
175 @par Inputs:
176 deepCoadd{tract,patch,filter}: ExposureF
177 @par Outputs:
178 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
179 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
180 exposure (ExposureF)
181 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
182 @par Data Unit:
183 tract, patch, filter
185 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
186 You can retarget this subtask if you wish.
188 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
190 @copydoc \_\_init\_\_
192 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
194 @copydoc run
196 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
198 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
200 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
202 The command line task interface supports a
203 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
204 files.
206 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
207 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
208 @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
210 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
211 of using DetectCoaddSourcesTask
213 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
214 the task is to update the background, detect all sources in a single band and generate a set of parent
215 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
216 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
217 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
218 calling detectCoaddSources.py with the `--help` command line argument:
219 @code
220 detectCoaddSources.py --help
221 @endcode
223 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
224 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
225 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
226 @code
227 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
228 @endcode
229 that will process the HSC-I band data. The results are written to
230 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
232 It is also necessary to run:
233 @code
234 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
235 @endcode
236 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
237 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
238 """
239 _DefaultName = "detectCoaddSources"
240 ConfigClass = DetectCoaddSourcesConfig
241 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
242 makeIdFactory = _makeMakeIdFactory("CoaddId")
244 @classmethod
245 def _makeArgumentParser(cls):
246 parser = ArgumentParser(name=cls._DefaultName)
247 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
248 ContainerClass=ExistingCoaddDataIdContainer)
249 return parser
251 def __init__(self, schema=None, **kwargs):
252 """!
253 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
255 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
257 @param[in] schema: initial schema for the output catalog, modified-in place to include all
258 fields set by this task. If None, the source minimal schema will be used.
259 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
260 """
261 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
262 # call structure has been reviewed carefully to be sure super will work as intended.
263 super().__init__(**kwargs)
264 if schema is None:
265 schema = afwTable.SourceTable.makeMinimalSchema()
266 if self.config.doInsertFakes:
267 self.makeSubtask("insertFakes")
268 self.schema = schema
269 self.makeSubtask("detection", schema=self.schema)
270 if self.config.doScaleVariance:
271 self.makeSubtask("scaleVariance")
273 self.detectionSchema = afwTable.SourceCatalog(self.schema)
275 def runDataRef(self, patchRef):
276 """!
277 @brief Run detection on a coadd.
279 Invokes @ref run and then uses @ref write to output the
280 results.
282 @param[in] patchRef: data reference for patch
283 """
284 if self.config.hasFakes:
285 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
286 else:
287 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
288 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, log=self.log)
289 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
290 self.write(results, patchRef)
291 return results
293 def runQuantum(self, butlerQC, inputRefs, outputRefs):
294 inputs = butlerQC.get(inputRefs)
295 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
296 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
297 inputs["expId"] = exposureIdInfo.expId
298 outputs = self.run(**inputs)
299 butlerQC.put(outputs, outputRefs)
301 def run(self, exposure, idFactory, expId):
302 """!
303 @brief Run detection on an exposure.
305 First scale the variance plane to match the observed variance
306 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
307 detect sources.
309 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
310 depending on configuration).
311 @param[in] idFactory: IdFactory to set source identifiers
312 @param[in] expId: Exposure identifier (integer) for RNG seed
314 @return a pipe.base.Struct with fields
315 - sources: catalog of detections
316 - backgrounds: list of backgrounds
317 """
318 if self.config.doScaleVariance:
319 varScale = self.scaleVariance.run(exposure.maskedImage)
320 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
321 backgrounds = afwMath.BackgroundList()
322 if self.config.doInsertFakes:
323 self.insertFakes.run(exposure, background=backgrounds)
324 table = afwTable.SourceTable.make(self.schema, idFactory)
325 detections = self.detection.run(table, exposure, expId=expId)
326 sources = detections.sources
327 fpSets = detections.fpSets
328 if hasattr(fpSets, "background") and fpSets.background:
329 for bg in fpSets.background:
330 backgrounds.append(bg)
331 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
333 def write(self, results, patchRef):
334 """!
335 @brief Write out results from runDetection.
337 @param[in] exposure: Exposure to write out
338 @param[in] results: Struct returned from runDetection
339 @param[in] patchRef: data reference for patch
340 """
341 coaddName = self.config.coaddName + "Coadd"
342 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
343 patchRef.put(results.outputSources, coaddName + "_det")
344 if self.config.hasFakes:
345 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
346 else:
347 patchRef.put(results.outputExposure, coaddName + "_calexp")
349##############################################################################################################
352class DeblendCoaddSourcesConfig(Config):
353 """DeblendCoaddSourcesConfig
355 Configuration parameters for the `DeblendCoaddSourcesTask`.
356 """
357 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
358 doc="Deblend sources separately in each band")
359 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
360 doc="Deblend sources simultaneously across bands")
361 simultaneous = Field(dtype=bool,
362 default=True,
363 doc="Simultaneously deblend all bands? "
364 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
365 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
366 hasFakes = Field(dtype=bool,
367 default=False,
368 doc="Should be set to True if fake sources have been inserted into the input data.")
370 def setDefaults(self):
371 Config.setDefaults(self)
372 self.singleBandDeblend.propagateAllPeaks = True
375class DeblendCoaddSourcesRunner(MergeSourcesRunner):
376 """Task runner for the `MergeSourcesTask`
378 Required because the run method requires a list of
379 dataRefs rather than a single dataRef.
380 """
381 @staticmethod
382 def getTargetList(parsedCmd, **kwargs):
383 """Provide a list of patch references for each patch, tract, filter combo.
385 Parameters
386 ----------
387 parsedCmd:
388 The parsed command
389 kwargs:
390 Keyword arguments passed to the task
392 Returns
393 -------
394 targetList: list
395 List of tuples, where each tuple is a (dataRef, kwargs) pair.
396 """
397 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
398 kwargs["psfCache"] = parsedCmd.psfCache
399 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
402class DeblendCoaddSourcesTask(CmdLineTask):
403 """Deblend the sources in a merged catalog
405 Deblend sources from master catalog in each coadd.
406 This can either be done separately in each band using the HSC-SDSS deblender
407 (`DeblendCoaddSourcesTask.config.simultaneous==False`)
408 or use SCARLET to simultaneously fit the blend in all bands
409 (`DeblendCoaddSourcesTask.config.simultaneous==True`).
410 The task will set its own `self.schema` atribute to the `Schema` of the
411 output deblended catalog.
412 This will include all fields from the input `Schema`, as well as additional fields
413 from the deblender.
415 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
416 ---------------------------------------------------------
417 `
419 Parameters
420 ----------
421 butler: `Butler`
422 Butler used to read the input schemas from disk or
423 construct the reference catalog loader, if `schema` or `peakSchema` or
424 schema: `Schema`
425 The schema of the merged detection catalog as an input to this task.
426 peakSchema: `Schema`
427 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
428 """
429 ConfigClass = DeblendCoaddSourcesConfig
430 RunnerClass = DeblendCoaddSourcesRunner
431 _DefaultName = "deblendCoaddSources"
432 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
434 @classmethod
435 def _makeArgumentParser(cls):
436 parser = ArgumentParser(name=cls._DefaultName)
437 parser.add_id_argument("--id", "deepCoadd_calexp",
438 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
439 ContainerClass=ExistingCoaddDataIdContainer)
440 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
441 return parser
443 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
444 CmdLineTask.__init__(self, **kwargs)
445 if schema is None:
446 assert butler is not None, "Neither butler nor schema is defined"
447 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
448 self.schemaMapper = afwTable.SchemaMapper(schema)
449 self.schemaMapper.addMinimalSchema(schema)
450 self.schema = self.schemaMapper.getOutputSchema()
451 if peakSchema is None:
452 assert butler is not None, "Neither butler nor peakSchema is defined"
453 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
455 if self.config.simultaneous:
456 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
457 else:
458 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
460 def getSchemaCatalogs(self):
461 """Return a dict of empty catalogs for each catalog dataset produced by this task.
463 Returns
464 -------
465 result: dict
466 Dictionary of empty catalogs, with catalog names as keys.
467 """
468 catalog = afwTable.SourceCatalog(self.schema)
469 return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
470 self.config.coaddName + "Coadd_deblendedModel": catalog}
472 def runDataRef(self, patchRefList, psfCache=100):
473 """Deblend the patch
475 Deblend each source simultaneously or separately
476 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
477 Set `is-primary` and related flags.
478 Propagate flags from individual visits.
479 Write the deblended sources out.
481 Parameters
482 ----------
483 patchRefList: list
484 List of data references for each filter
485 """
487 if self.config.hasFakes:
488 coaddType = "fakes_" + self.config.coaddName
489 else:
490 coaddType = self.config.coaddName
492 if self.config.simultaneous:
493 # Use SCARLET to simultaneously deblend across filters
494 filters = []
495 exposures = []
496 for patchRef in patchRefList:
497 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
498 filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True)
499 filters.append(filter.bandLabel)
500 exposures.append(exposure)
501 # Sort inputs by band to match Gen3 order of inputs
502 exposures = [exposure for _, exposure in sorted(zip(filters, exposures))]
503 patchRefList = [patchRef for _, patchRef in sorted(zip(filters, patchRefList))]
504 filters.sort()
505 # The input sources are the same for all bands, since it is a merged catalog
506 sources = self.readSources(patchRef)
507 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
508 templateCatalogs, fluxCatalogs = self.multiBandDeblend.run(exposure, sources)
509 for n in range(len(patchRefList)):
510 self.write(patchRefList[n], templateCatalogs[filters[n]], "Model")
511 if filters[n] in fluxCatalogs:
512 self.write(patchRefList[n], fluxCatalogs[filters[n]], "Flux")
513 else:
514 # Use the singeband deblender to deblend each band separately
515 for patchRef in patchRefList:
516 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
517 exposure.getPsf().setCacheCapacity(psfCache)
518 sources = self.readSources(patchRef)
519 self.singleBandDeblend.run(exposure, sources)
520 self.write(patchRef, sources)
522 def readSources(self, dataRef):
523 """Read merged catalog
525 Read the catalog of merged detections and create a catalog
526 in a single band.
528 Parameters
529 ----------
530 dataRef: data reference
531 Data reference for catalog of merged detections
533 Returns
534 -------
535 sources: `SourceCatalog`
536 List of sources in merged catalog
538 We also need to add columns to hold the measurements we're about to make
539 so we can measure in-place.
540 """
541 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
542 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
543 idFactory = self.makeIdFactory(dataRef)
544 # There may be gaps in the mergeDet catalog, which will cause the
545 # source ids to be inconsistent. So we update the id factory
546 # with the largest id already in the catalog.
547 maxId = np.max(merged["id"])
548 idFactory.notify(maxId)
549 table = afwTable.SourceTable.make(self.schema, idFactory)
550 sources = afwTable.SourceCatalog(table)
551 sources.extend(merged, self.schemaMapper)
552 return sources
554 def write(self, dataRef, sources, catalogType):
555 """Write the source catalog(s)
557 Parameters
558 ----------
559 dataRef: Data Reference
560 Reference to the output catalog.
561 sources: `SourceCatalog`
562 Flux conserved sources to write to file.
563 If using the single band deblender, this is the catalog
564 generated.
565 template_sources: `SourceCatalog`
566 Source catalog using the multiband template models
567 as footprints.
568 """
569 dataRef.put(sources, self.config.coaddName + f"Coadd_deblended{catalogType}")
570 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)
572 def writeMetadata(self, dataRefList):
573 """Write the metadata produced from processing the data.
574 Parameters
575 ----------
576 dataRefList
577 List of Butler data references used to write the metadata.
578 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
579 """
580 for dataRef in dataRefList:
581 try:
582 metadataName = self._getMetadataName()
583 if metadataName is not None:
584 dataRef.put(self.getFullMetadata(), metadataName)
585 except Exception as e:
586 self.log.warning("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
589class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
590 dimensions=("tract", "patch", "band", "skymap"),
591 defaultTemplates={"inputCoaddName": "deep",
592 "outputCoaddName": "deep",
593 "deblendedCatalog": "deblendedFlux"}):
594 inputSchema = cT.InitInput(
595 doc="Input schema for measure merged task produced by a deblender or detection task",
596 name="{inputCoaddName}Coadd_deblendedFlux_schema",
597 storageClass="SourceCatalog"
598 )
599 outputSchema = cT.InitOutput(
600 doc="Output schema after all new fields are added by task",
601 name="{inputCoaddName}Coadd_meas_schema",
602 storageClass="SourceCatalog"
603 )
604 refCat = cT.PrerequisiteInput(
605 doc="Reference catalog used to match measured sources against known sources",
606 name="ref_cat",
607 storageClass="SimpleCatalog",
608 dimensions=("skypix",),
609 deferLoad=True,
610 multiple=True
611 )
612 exposure = cT.Input(
613 doc="Input coadd image",
614 name="{inputCoaddName}Coadd_calexp",
615 storageClass="ExposureF",
616 dimensions=("tract", "patch", "band", "skymap")
617 )
618 skyMap = cT.Input(
619 doc="SkyMap to use in processing",
620 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
621 storageClass="SkyMap",
622 dimensions=("skymap",),
623 )
624 visitCatalogs = cT.Input(
625 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
626 "further filtered in the task for the purpose of propagating flags from image calibration "
627 "and characterization to codd objects",
628 name="src",
629 dimensions=("instrument", "visit", "detector"),
630 storageClass="SourceCatalog",
631 multiple=True
632 )
633 inputCatalog = cT.Input(
634 doc=("Name of the input catalog to use."
635 "If the single band deblender was used this should be 'deblendedFlux."
636 "If the multi-band deblender was used this should be 'deblendedModel, "
637 "or deblendedFlux if the multiband deblender was configured to output "
638 "deblended flux catalogs. If no deblending was performed this should "
639 "be 'mergeDet'"),
640 name="{inputCoaddName}Coadd_{deblendedCatalog}",
641 storageClass="SourceCatalog",
642 dimensions=("tract", "patch", "band", "skymap"),
643 )
644 outputSources = cT.Output(
645 doc="Source catalog containing all the measurement information generated in this task",
646 name="{outputCoaddName}Coadd_meas",
647 dimensions=("tract", "patch", "band", "skymap"),
648 storageClass="SourceCatalog",
649 )
650 matchResult = cT.Output(
651 doc="Match catalog produced by configured matcher, optional on doMatchSources",
652 name="{outputCoaddName}Coadd_measMatch",
653 dimensions=("tract", "patch", "band", "skymap"),
654 storageClass="Catalog",
655 )
656 denormMatches = cT.Output(
657 doc="Denormalized Match catalog produced by configured matcher, optional on "
658 "doWriteMatchesDenormalized",
659 name="{outputCoaddName}Coadd_measMatchFull",
660 dimensions=("tract", "patch", "band", "skymap"),
661 storageClass="Catalog",
662 )
664 def __init__(self, *, config=None):
665 super().__init__(config=config)
666 if config.doPropagateFlags is False:
667 self.inputs -= set(("visitCatalogs",))
669 if config.doMatchSources is False:
670 self.outputs -= set(("matchResult",))
672 if config.doWriteMatchesDenormalized is False:
673 self.outputs -= set(("denormMatches",))
676class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
677 pipelineConnections=MeasureMergedCoaddSourcesConnections):
678 """!
679 @anchor MeasureMergedCoaddSourcesConfig_
681 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
682 """
683 inputCatalog = Field(dtype=str, default="deblendedFlux",
684 doc=("Name of the input catalog to use."
685 "If the single band deblender was used this should be 'deblendedFlux."
686 "If the multi-band deblender was used this should be 'deblendedModel."
687 "If no deblending was performed this should be 'mergeDet'"))
688 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
689 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
690 doPropagateFlags = Field(
691 dtype=bool, default=True,
692 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
693 )
694 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
695 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
696 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
697 doWriteMatchesDenormalized = Field(
698 dtype=bool,
699 default=False,
700 doc=("Write reference matches in denormalized format? "
701 "This format uses more disk space, but is more convenient to read."),
702 )
703 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
704 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
705 checkUnitsParseStrict = Field(
706 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
707 dtype=str,
708 default="raise",
709 )
710 doApCorr = Field(
711 dtype=bool,
712 default=True,
713 doc="Apply aperture corrections"
714 )
715 applyApCorr = ConfigurableField(
716 target=ApplyApCorrTask,
717 doc="Subtask to apply aperture corrections"
718 )
719 doRunCatalogCalculation = Field(
720 dtype=bool,
721 default=True,
722 doc='Run catalogCalculation task'
723 )
724 catalogCalculation = ConfigurableField(
725 target=CatalogCalculationTask,
726 doc="Subtask to run catalogCalculation plugins on catalog"
727 )
729 hasFakes = Field(
730 dtype=bool,
731 default=False,
732 doc="Should be set to True if fake sources have been inserted into the input data."
733 )
735 @property
736 def refObjLoader(self):
737 return self.match.refObjLoader
739 def setDefaults(self):
740 super().setDefaults()
741 self.measurement.plugins.names |= ['base_InputCount',
742 'base_Variance',
743 'base_LocalPhotoCalib',
744 'base_LocalWcs']
745 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
746 'INEXACT_PSF']
747 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
748 'INEXACT_PSF']
750 def validate(self):
751 super().validate()
752 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
753 if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
754 raise ValueError(
755 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
756 f"are different. These options must be kept in sync until Gen2 is retired."
757 )
760## @addtogroup LSST_task_documentation
761## @{
762## @page page_MeasureMergedCoaddSourcesTask MeasureMergedCoaddSourcesTask
763## @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask"
764## @copybrief MeasureMergedCoaddSourcesTask
765## @}
768class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
769 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
770 @staticmethod
771 def getTargetList(parsedCmd, **kwargs):
772 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
775class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
776 r"""!
777 @anchor MeasureMergedCoaddSourcesTask_
779 @brief Deblend sources from master catalog in each coadd seperately and measure.
781 @section pipe_tasks_multiBand_Contents Contents
783 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
784 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
785 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
786 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
787 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
788 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
790 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
792 Command-line task that uses peaks and footprints from a master catalog to perform deblending and
793 measurement in each coadd.
795 Given a master input catalog of sources (peaks and footprints) or deblender outputs
796 (including a HeavyFootprint in each band), measure each source on the
797 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
798 consistent set of child sources.
800 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
801 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
802 flags are propagated to the coadd sources.
804 Optionally, we can match the coadd sources to an external reference catalog.
806 @par Inputs:
807 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
808 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
809 @par Outputs:
810 deepCoadd_meas{tract,patch,filter}: SourceCatalog
811 @par Data Unit:
812 tract, patch, filter
814 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
816 <DL>
817 <DT> @ref SingleFrameMeasurementTask_ "measurement"
818 <DD> Measure source properties of deblended sources.</DD>
819 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
820 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
821 not at the edge of the field and that have either not been deblended or are the children of deblended
822 sources</DD>
823 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
824 <DD> Propagate flags set in individual visits to the coadd.</DD>
825 <DT> @ref DirectMatchTask_ "match"
826 <DD> Match input sources to a reference catalog (optional).
827 </DD>
828 </DL>
829 These subtasks may be retargeted as required.
831 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
833 @copydoc \_\_init\_\_
835 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
837 @copydoc run
839 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
841 See @ref MeasureMergedCoaddSourcesConfig_
843 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
845 The command line task interface supports a
846 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
847 files.
849 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
850 the various sub-tasks. See the documetation for individual sub-tasks for more information.
852 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
853 MeasureMergedCoaddSourcesTask
855 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
856 The next stage in the multi-band processing procedure will merge these measurements into a suitable
857 catalog for driving forced photometry.
859 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
860 to be processed.
861 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
862 `--help` command line argument:
863 @code
864 measureCoaddSources.py --help
865 @endcode
867 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
868 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
869 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
870 coadd as follows:
871 @code
872 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
873 @endcode
874 This will process the HSC-I band data. The results are written in
875 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
877 It is also necessary to run
878 @code
879 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
880 @endcode
881 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
882 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
883 """
884 _DefaultName = "measureCoaddSources"
885 ConfigClass = MeasureMergedCoaddSourcesConfig
886 RunnerClass = MeasureMergedCoaddSourcesRunner
887 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
888 # The IDs we already have are of this type
889 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
891 @classmethod
892 def _makeArgumentParser(cls):
893 parser = ArgumentParser(name=cls._DefaultName)
894 parser.add_id_argument("--id", "deepCoadd_calexp",
895 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
896 ContainerClass=ExistingCoaddDataIdContainer)
897 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
898 return parser
900 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
901 **kwargs):
902 """!
903 @brief Initialize the task.
905 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
906 @param[in] schema: the schema of the merged detection catalog used as input to this one
907 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
908 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
909 catalog. May be None if the loader can be constructed from the butler argument or all steps
910 requiring a reference catalog are disabled.
911 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
912 catalog loader, if schema or peakSchema or refObjLoader is None
914 The task will set its own self.schema attribute to the schema of the output measurement catalog.
915 This will include all fields from the input schema, as well as additional fields for all the
916 measurements.
917 """
918 super().__init__(**kwargs)
919 self.deblended = self.config.inputCatalog.startswith("deblended")
920 self.inputCatalog = "Coadd_" + self.config.inputCatalog
921 if initInputs is not None:
922 schema = initInputs['inputSchema'].schema
923 if schema is None:
924 assert butler is not None, "Neither butler nor schema is defined"
925 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
926 self.schemaMapper = afwTable.SchemaMapper(schema)
927 self.schemaMapper.addMinimalSchema(schema)
928 self.schema = self.schemaMapper.getOutputSchema()
929 self.algMetadata = PropertyList()
930 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
931 self.makeSubtask("setPrimaryFlags", schema=self.schema)
932 if self.config.doMatchSources:
933 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
934 if self.config.doPropagateFlags:
935 self.makeSubtask("propagateFlags", schema=self.schema)
936 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
937 if self.config.doApCorr:
938 self.makeSubtask("applyApCorr", schema=self.schema)
939 if self.config.doRunCatalogCalculation:
940 self.makeSubtask("catalogCalculation", schema=self.schema)
942 self.outputSchema = afwTable.SourceCatalog(self.schema)
944 def runQuantum(self, butlerQC, inputRefs, outputRefs):
945 inputs = butlerQC.get(inputRefs)
947 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
948 inputs.pop('refCat'), config=self.config.refObjLoader,
949 log=self.log)
950 self.match.setRefObjLoader(refObjLoader)
952 # Set psfcache
953 # move this to run after gen2 deprecation
954 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
956 # Get unique integer ID for IdFactory and RNG seeds
957 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
958 inputs['exposureId'] = exposureIdInfo.expId
959 idFactory = exposureIdInfo.makeSourceIdFactory()
960 # Transform inputCatalog
961 table = afwTable.SourceTable.make(self.schema, idFactory)
962 sources = afwTable.SourceCatalog(table)
963 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
964 table = sources.getTable()
965 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
966 inputs['sources'] = sources
968 skyMap = inputs.pop('skyMap')
969 tractNumber = inputRefs.inputCatalog.dataId['tract']
970 tractInfo = skyMap[tractNumber]
971 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
972 skyInfo = Struct(
973 skyMap=skyMap,
974 tractInfo=tractInfo,
975 patchInfo=patchInfo,
976 wcs=tractInfo.getWcs(),
977 bbox=patchInfo.getOuterBBox()
978 )
979 inputs['skyInfo'] = skyInfo
981 if self.config.doPropagateFlags:
982 # Filter out any visit catalog that is not coadd inputs
983 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
984 visitKey = ccdInputs.schema.find("visit").key
985 ccdKey = ccdInputs.schema.find("ccd").key
986 inputVisitIds = set()
987 ccdRecordsWcs = {}
988 for ccdRecord in ccdInputs:
989 visit = ccdRecord.get(visitKey)
990 ccd = ccdRecord.get(ccdKey)
991 inputVisitIds.add((visit, ccd))
992 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
994 inputCatalogsToKeep = []
995 inputCatalogWcsUpdate = []
996 for i, dataRef in enumerate(inputRefs.visitCatalogs):
997 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
998 if key in inputVisitIds:
999 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
1000 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
1001 inputs['visitCatalogs'] = inputCatalogsToKeep
1002 inputs['wcsUpdates'] = inputCatalogWcsUpdate
1003 inputs['ccdInputs'] = ccdInputs
1005 outputs = self.run(**inputs)
1006 butlerQC.put(outputs, outputRefs)
1008 def runDataRef(self, patchRef, psfCache=100):
1009 """!
1010 @brief Deblend and measure.
1012 @param[in] patchRef: Patch reference.
1014 Set 'is-primary' and related flags. Propagate flags
1015 from individual visits. Optionally match the sources to a reference catalog and write the matches.
1016 Finally, write the deblended sources and measurements out.
1017 """
1018 if self.config.hasFakes:
1019 coaddType = "fakes_" + self.config.coaddName
1020 else:
1021 coaddType = self.config.coaddName
1022 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1023 exposure.getPsf().setCacheCapacity(psfCache)
1024 sources = self.readSources(patchRef)
1025 table = sources.getTable()
1026 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1027 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1029 if self.config.doPropagateFlags:
1030 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1031 else:
1032 ccdInputs = None
1034 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, includeBand=False,
1035 log=self.log)
1036 results = self.run(exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=expId,
1037 ccdInputs=ccdInputs, butler=patchRef.getButler())
1039 if self.config.doMatchSources:
1040 self.writeMatches(patchRef, results)
1041 self.write(patchRef, results.outputSources)
1043 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1044 butler=None):
1045 """Run measurement algorithms on the input exposure, and optionally populate the
1046 resulting catalog with extra information.
1048 Parameters
1049 ----------
1050 exposure : `lsst.afw.exposure.Exposure`
1051 The input exposure on which measurements are to be performed
1052 sources : `lsst.afw.table.SourceCatalog`
1053 A catalog built from the results of merged detections, or
1054 deblender outputs.
1055 skyInfo : `lsst.pipe.base.Struct`
1056 A struct containing information about the position of the input exposure within
1057 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1058 exposureId : `int` or `bytes`
1059 packed unique number or bytes unique to the input exposure
1060 ccdInputs : `lsst.afw.table.ExposureCatalog`
1061 Catalog containing information on the individual visits which went into making
1062 the exposure
1063 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1064 A list of source catalogs corresponding to measurements made on the individual
1065 visits which went into the input exposure. If None and butler is `None` then
1066 the task cannot propagate visit flags to the output catalog.
1067 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1068 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1069 to the input visits. Used to put all coordinates to common system. If `None` and
1070 butler is `None` then the task cannot propagate visit flags to the output catalog.
1071 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1072 Either a gen2 or gen3 butler used to load visit catalogs
1074 Returns
1075 -------
1076 results : `lsst.pipe.base.Struct`
1077 Results of running measurement task. Will contain the catalog in the
1078 sources attribute. Optionally will have results of matching to a
1079 reference catalog in the matchResults attribute, and denormalized
1080 matches in the denormMatches attribute.
1081 """
1082 self.measurement.run(sources, exposure, exposureId=exposureId)
1084 if self.config.doApCorr:
1085 self.applyApCorr.run(
1086 catalog=sources,
1087 apCorrMap=exposure.getInfo().getApCorrMap()
1088 )
1090 # TODO DM-11568: this contiguous check-and-copy could go away if we
1091 # reserve enough space during SourceDetection and/or SourceDeblend.
1092 # NOTE: sourceSelectors require contiguous catalogs, so ensure
1093 # contiguity now, so views are preserved from here on.
1094 if not sources.isContiguous():
1095 sources = sources.copy(deep=True)
1097 if self.config.doRunCatalogCalculation:
1098 self.catalogCalculation.run(sources)
1100 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1101 patchInfo=skyInfo.patchInfo)
1102 if self.config.doPropagateFlags:
1103 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1105 results = Struct()
1107 if self.config.doMatchSources:
1108 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel)
1109 matches = afwTable.packMatches(matchResult.matches)
1110 matches.table.setMetadata(matchResult.matchMeta)
1111 results.matchResult = matches
1112 if self.config.doWriteMatchesDenormalized:
1113 if matchResult.matches:
1114 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1115 else:
1116 self.log.warning("No matches, so generating dummy denormalized matches file")
1117 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1118 denormMatches.setMetadata(PropertyList())
1119 denormMatches.getMetadata().add("COMMENT",
1120 "This catalog is empty because no matches were found.")
1121 results.denormMatches = denormMatches
1122 results.denormMatches = denormMatches
1124 results.outputSources = sources
1125 return results
1127 def readSources(self, dataRef):
1128 """!
1129 @brief Read input sources.
1131 @param[in] dataRef: Data reference for catalog of merged detections
1132 @return List of sources in merged catalog
1134 We also need to add columns to hold the measurements we're about to make
1135 so we can measure in-place.
1136 """
1137 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1138 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
1139 idFactory = self.makeIdFactory(dataRef)
1140 for s in merged:
1141 idFactory.notify(s.getId())
1142 table = afwTable.SourceTable.make(self.schema, idFactory)
1143 sources = afwTable.SourceCatalog(table)
1144 sources.extend(merged, self.schemaMapper)
1145 return sources
1147 def writeMatches(self, dataRef, results):
1148 """!
1149 @brief Write matches of the sources to the astrometric reference catalog.
1151 @param[in] dataRef: data reference
1152 @param[in] results: results struct from run method
1153 """
1154 if hasattr(results, "matchResult"):
1155 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1156 if hasattr(results, "denormMatches"):
1157 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1159 def write(self, dataRef, sources):
1160 """!
1161 @brief Write the source catalog.
1163 @param[in] dataRef: data reference
1164 @param[in] sources: source catalog
1165 """
1166 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1167 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)