lsst.pipe.tasks gb4d8b8e895+1304fb89ff
multiBand.py
Go to the documentation of this file.
1#!/usr/bin/env python
2#
3# LSST Data Management System
4# Copyright 2008-2015 AURA/LSST.
5#
6# This product includes software developed by the
7# LSST Project (http://www.lsst.org/).
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the LSST License Statement and
20# the GNU General Public License along with this program. If not,
21# see <https://www.lsstcorp.org/LegalNotices/>.
22#
23import numpy as np
24
25from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
26from lsst.coadd.utils.getGen3CoaddExposureId import getGen3CoaddExposureId
27from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
28 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
29import lsst.pipe.base.connectionTypes as cT
30from lsst.pex.config import Config, Field, ConfigurableField
31from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
32from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
33from lsst.meas.deblender import SourceDeblendTask
34from lsst.meas.extensions.scarlet import ScarletDeblendTask
35from lsst.pipe.tasks.coaddBase import getSkyInfo
36from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
37from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
38from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
39from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
40from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
41import lsst.afw.image as afwImage
42import lsst.afw.table as afwTable
43import lsst.afw.math as afwMath
44from lsst.daf.base import PropertyList
45from lsst.skymap import BaseSkyMap
46from lsst.obs.base import ExposureIdInfo
47
48# NOTE: these imports are a convenience so multiband users only have to import this file.
49from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
50from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
51from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
52from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401
53from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
54from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
55from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
56from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
57
58
59"""
60New set types:
61* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
62* deepCoadd_mergeDet: merged detections (tract, patch)
63* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
64* deepCoadd_ref: reference sources (tract, patch)
65All of these have associated *_schema catalogs that require no data ID and hold no records.
66
67In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
68the mergeDet, meas, and ref dataset Footprints:
69* deepCoadd_peak_schema
70"""
71
72
73
74class DetectCoaddSourcesConnections(PipelineTaskConnections,
75 dimensions=("tract", "patch", "band", "skymap"),
76 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
77 detectionSchema = cT.InitOutput(
78 doc="Schema of the detection catalog",
79 name="{outputCoaddName}Coadd_det_schema",
80 storageClass="SourceCatalog",
81 )
82 exposure = cT.Input(
83 doc="Exposure on which detections are to be performed",
84 name="{inputCoaddName}Coadd",
85 storageClass="ExposureF",
86 dimensions=("tract", "patch", "band", "skymap")
87 )
88 outputBackgrounds = cT.Output(
89 doc="Output Backgrounds used in detection",
90 name="{outputCoaddName}Coadd_calexp_background",
91 storageClass="Background",
92 dimensions=("tract", "patch", "band", "skymap")
93 )
94 outputSources = cT.Output(
95 doc="Detected sources catalog",
96 name="{outputCoaddName}Coadd_det",
97 storageClass="SourceCatalog",
98 dimensions=("tract", "patch", "band", "skymap")
99 )
100 outputExposure = cT.Output(
101 doc="Exposure post detection",
102 name="{outputCoaddName}Coadd_calexp",
103 storageClass="ExposureF",
104 dimensions=("tract", "patch", "band", "skymap")
105 )
106
107
108class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
109 """!
110 @anchor DetectCoaddSourcesConfig_
111
112 @brief Configuration parameters for the DetectCoaddSourcesTask
113 """
114 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
115 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
116 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
117 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
118 doInsertFakes = Field(dtype=bool, default=False,
119 doc="Run fake sources injection task")
120 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
121 doc="Injection of fake sources for testing "
122 "purposes (must be retargeted)")
123 hasFakes = Field(
124 dtype=bool,
125 default=False,
126 doc="Should be set to True if fake sources have been inserted into the input data."
127 )
128
129 def setDefaults(self):
130 super().setDefaults()
131 self.detection.thresholdType = "pixel_stdev"
132 self.detection.isotropicGrow = True
133 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
134 self.detection.reEstimateBackground = False
135 self.detection.background.useApprox = False
136 self.detection.background.binSize = 4096
137 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
138 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
139
140
146
147
148class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
149 r"""!
150 @anchor DetectCoaddSourcesTask_
151
152 @brief Detect sources on a coadd
153
154 @section pipe_tasks_multiBand_Contents Contents
155
156 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
157 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
158 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
159 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
160 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
161 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
162
163 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
164
165 Command-line task that detects sources on a coadd of exposures obtained with a single filter.
166
167 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
168 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
169 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
170 propagate the full covariance matrix -- but it is simple and works well in practice.
171
172 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
173 SourceDetectionTask_ "detection" subtask.
174
175 @par Inputs:
176 deepCoadd{tract,patch,filter}: ExposureF
177 @par Outputs:
178 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
179 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
180 exposure (ExposureF)
181 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
182 @par Data Unit:
183 tract, patch, filter
184
185 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
186 You can retarget this subtask if you wish.
187
188 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
189
190 @copydoc \_\_init\_\_
191
192 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
193
194 @copydoc run
195
196 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
197
198 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
199
200 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
201
202 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
203 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
204 files.
205
206 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
207 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
208 @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
209
210 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
211 of using DetectCoaddSourcesTask
212
213 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
214 the task is to update the background, detect all sources in a single band and generate a set of parent
215 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
216 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
217 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
218 calling detectCoaddSources.py with the `--help` command line argument:
219 @code
220 detectCoaddSources.py --help
221 @endcode
222
223 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
224 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
225 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
226 @code
227 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
228 @endcode
229 that will process the HSC-I band data. The results are written to
230 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
231
232 It is also necessary to run:
233 @code
234 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
235 @endcode
236 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
237 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
238 """
239 _DefaultName = "detectCoaddSources"
240 ConfigClass = DetectCoaddSourcesConfig
241 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
242 makeIdFactory = _makeMakeIdFactory("CoaddId")
243
244 @classmethod
245 def _makeArgumentParser(cls):
246 parser = ArgumentParser(name=cls._DefaultName)
247 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
248 ContainerClass=ExistingCoaddDataIdContainer)
249 return parser
250
251 def __init__(self, schema=None, **kwargs):
252 """!
253 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
254
255 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
256
257 @param[in] schema: initial schema for the output catalog, modified-in place to include all
258 fields set by this task. If None, the source minimal schema will be used.
259 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
260 """
261 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
262 # call structure has been reviewed carefully to be sure super will work as intended.
263 super().__init__(**kwargs)
264 if schema is None:
265 schema = afwTable.SourceTable.makeMinimalSchema()
266 if self.config.doInsertFakes:
267 self.makeSubtask("insertFakes")
268 self.schema = schema
269 self.makeSubtask("detection", schema=self.schema)
270 if self.config.doScaleVariance:
271 self.makeSubtask("scaleVariance")
272
273 self.detectionSchema = afwTable.SourceCatalog(self.schema)
274
275 def runDataRef(self, patchRef):
276 """!
277 @brief Run detection on a coadd.
278
279 Invokes @ref run and then uses @ref write to output the
280 results.
281
282 @param[in] patchRef: data reference for patch
283 """
284 if self.config.hasFakes:
285 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
286 else:
287 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
288 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, log=self.log)
289 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
290 self.write(results, patchRef)
291 return results
292
293 def runQuantum(self, butlerQC, inputRefs, outputRefs):
294 inputs = butlerQC.get(inputRefs)
295 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
296 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
297 inputs["expId"] = exposureIdInfo.expId
298 outputs = self.run(**inputs)
299 butlerQC.put(outputs, outputRefs)
300
301 def run(self, exposure, idFactory, expId):
302 """!
303 @brief Run detection on an exposure.
304
305 First scale the variance plane to match the observed variance
306 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
307 detect sources.
308
309 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
310 depending on configuration).
311 @param[in] idFactory: IdFactory to set source identifiers
312 @param[in] expId: Exposure identifier (integer) for RNG seed
313
314 @return a pipe.base.Struct with fields
315 - sources: catalog of detections
316 - backgrounds: list of backgrounds
317 """
318 if self.config.doScaleVariance:
319 varScale = self.scaleVariance.run(exposure.maskedImage)
320 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
321 backgrounds = afwMath.BackgroundList()
322 if self.config.doInsertFakes:
323 self.insertFakes.run(exposure, background=backgrounds)
324 table = afwTable.SourceTable.make(self.schema, idFactory)
325 detections = self.detection.run(table, exposure, expId=expId)
326 sources = detections.sources
327 fpSets = detections.fpSets
328 if hasattr(fpSets, "background") and fpSets.background:
329 for bg in fpSets.background:
330 backgrounds.append(bg)
331 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
332
333 def write(self, results, patchRef):
334 """!
335 @brief Write out results from runDetection.
336
337 @param[in] exposure: Exposure to write out
338 @param[in] results: Struct returned from runDetection
339 @param[in] patchRef: data reference for patch
340 """
341 coaddName = self.config.coaddName + "Coadd"
342 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
343 patchRef.put(results.outputSources, coaddName + "_det")
344 if self.config.hasFakes:
345 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
346 else:
347 patchRef.put(results.outputExposure, coaddName + "_calexp")
348
349
350
351
352class DeblendCoaddSourcesConfig(Config):
353 """DeblendCoaddSourcesConfig
354
355 Configuration parameters for the `DeblendCoaddSourcesTask`.
356 """
357 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
358 doc="Deblend sources separately in each band")
359 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
360 doc="Deblend sources simultaneously across bands")
361 simultaneous = Field(dtype=bool,
362 default=True,
363 doc="Simultaneously deblend all bands? "
364 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
365 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
366 hasFakes = Field(dtype=bool,
367 default=False,
368 doc="Should be set to True if fake sources have been inserted into the input data.")
369
370 def setDefaults(self):
371 Config.setDefaults(self)
372 self.singleBandDeblend.propagateAllPeaks = True
373
374
375class DeblendCoaddSourcesRunner(MergeSourcesRunner):
376 """Task runner for the `MergeSourcesTask`
377
378 Required because the run method requires a list of
379 dataRefs rather than a single dataRef.
380 """
381 @staticmethod
382 def getTargetList(parsedCmd, **kwargs):
383 """Provide a list of patch references for each patch, tract, filter combo.
384
385 Parameters
386 ----------
387 parsedCmd:
388 The parsed command
389 kwargs:
390 Keyword arguments passed to the task
391
392 Returns
393 -------
394 targetList: list
395 List of tuples, where each tuple is a (dataRef, kwargs) pair.
396 """
397 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
398 kwargs["psfCache"] = parsedCmd.psfCache
399 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
400
401
402class DeblendCoaddSourcesTask(CmdLineTask):
403 """Deblend the sources in a merged catalog
404
405 Deblend sources from master catalog in each coadd.
406 This can either be done separately in each band using the HSC-SDSS deblender
407 (`DeblendCoaddSourcesTask.config.simultaneous==False`)
408 or use SCARLET to simultaneously fit the blend in all bands
409 (`DeblendCoaddSourcesTask.config.simultaneous==True`).
410 The task will set its own `self.schema` atribute to the `Schema` of the
411 output deblended catalog.
412 This will include all fields from the input `Schema`, as well as additional fields
413 from the deblender.
414
415 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
416 ---------------------------------------------------------
417 `
418
419 Parameters
420 ----------
421 butler: `Butler`
422 Butler used to read the input schemas from disk or
423 construct the reference catalog loader, if `schema` or `peakSchema` or
424 schema: `Schema`
425 The schema of the merged detection catalog as an input to this task.
426 peakSchema: `Schema`
427 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
428 """
429 ConfigClass = DeblendCoaddSourcesConfig
430 RunnerClass = DeblendCoaddSourcesRunner
431 _DefaultName = "deblendCoaddSources"
432 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
433
434 @classmethod
435 def _makeArgumentParser(cls):
436 parser = ArgumentParser(name=cls._DefaultName)
437 parser.add_id_argument("--id", "deepCoadd_calexp",
438 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
439 ContainerClass=ExistingCoaddDataIdContainer)
440 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
441 return parser
442
443 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
444 CmdLineTask.__init__(self, **kwargs)
445 if schema is None:
446 assert butler is not None, "Neither butler nor schema is defined"
447 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
448 self.schemaMapper = afwTable.SchemaMapper(schema)
449 self.schemaMapper.addMinimalSchema(schema)
450 self.schema = self.schemaMapper.getOutputSchema()
451 if peakSchema is None:
452 assert butler is not None, "Neither butler nor peakSchema is defined"
453 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
454
455 if self.config.simultaneous:
456 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
457 else:
458 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
459
460 def getSchemaCatalogs(self):
461 """Return a dict of empty catalogs for each catalog dataset produced by this task.
462
463 Returns
464 -------
465 result: dict
466 Dictionary of empty catalogs, with catalog names as keys.
467 """
468 catalog = afwTable.SourceCatalog(self.schema)
469 return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
470 self.config.coaddName + "Coadd_deblendedModel": catalog}
471
472 def runDataRef(self, patchRefList, psfCache=100):
473 """Deblend the patch
474
475 Deblend each source simultaneously or separately
476 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
477 Set `is-primary` and related flags.
478 Propagate flags from individual visits.
479 Write the deblended sources out.
480
481 Parameters
482 ----------
483 patchRefList: list
484 List of data references for each filter
485 """
486
487 if self.config.hasFakes:
488 coaddType = "fakes_" + self.config.coaddName
489 else:
490 coaddType = self.config.coaddName
491
492 if self.config.simultaneous:
493 # Use SCARLET to simultaneously deblend across filters
494 filters = []
495 exposures = []
496 for patchRef in patchRefList:
497 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
498 filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True)
499 filters.append(filter.bandLabel)
500 exposures.append(exposure)
501 # Sort inputs by band to match Gen3 order of inputs
502 exposures = [exposure for _, exposure in sorted(zip(filters, exposures))]
503 patchRefList = [patchRef for _, patchRef in sorted(zip(filters, patchRefList))]
504 filters.sort()
505 # The input sources are the same for all bands, since it is a merged catalog
506 sources = self.readSources(patchRef)
507 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
508 templateCatalogs = self.multiBandDeblend.run(exposure, sources)
509 for n in range(len(patchRefList)):
510 self.write(patchRefList[n], templateCatalogs[filters[n]])
511 else:
512 # Use the singeband deblender to deblend each band separately
513 for patchRef in patchRefList:
514 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
515 exposure.getPsf().setCacheCapacity(psfCache)
516 sources = self.readSources(patchRef)
517 self.singleBandDeblend.run(exposure, sources)
518 self.write(patchRef, sources)
519
520 def readSources(self, dataRef):
521 """Read merged catalog
522
523 Read the catalog of merged detections and create a catalog
524 in a single band.
525
526 Parameters
527 ----------
528 dataRef: data reference
529 Data reference for catalog of merged detections
530
531 Returns
532 -------
533 sources: `SourceCatalog`
534 List of sources in merged catalog
535
536 We also need to add columns to hold the measurements we're about to make so we can measure in-place.
537 """
538 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
539 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
540 idFactory = self.makeIdFactory(dataRef)
541 # There may be gaps in the mergeDet catalog, which will cause the
542 # source ids to be inconsistent. So we update the id factory
543 # with the largest id already in the catalog.
544 maxId = np.max(merged["id"])
545 idFactory.notify(maxId)
546 table = afwTable.SourceTable.make(self.schema, idFactory)
547 sources = afwTable.SourceCatalog(table)
548 sources.extend(merged, self.schemaMapper)
549 return sources
550
551 def write(self, dataRef, sources):
552 """Write the source catalog(s)
553
554 Parameters
555 ----------
556 dataRef: Data Reference
557 Reference to the output catalog.
558 sources: `SourceCatalog`
559 Flux conserved sources to write to file.
560 If using the single band deblender, this is the catalog
561 generated.
562 template_sources: `SourceCatalog`
563 Source catalog using the multiband template models
564 as footprints.
565 """
566 dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux")
567 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)
568
569 def writeMetadata(self, dataRefList):
570 """Write the metadata produced from processing the data.
571 Parameters
572 ----------
573 dataRefList
574 List of Butler data references used to write the metadata.
575 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
576 """
577 for dataRef in dataRefList:
578 try:
579 metadataName = self._getMetadataName()
580 if metadataName is not None:
581 dataRef.put(self.getFullMetadata(), metadataName)
582 except Exception as e:
583 self.log.warning("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
584
585
586class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
587 dimensions=("tract", "patch", "band", "skymap"),
588 defaultTemplates={"inputCoaddName": "deep",
589 "outputCoaddName": "deep"}):
590 inputSchema = cT.InitInput(
591 doc="Input schema for measure merged task produced by a deblender or detection task",
592 name="{inputCoaddName}Coadd_deblendedFlux_schema",
593 storageClass="SourceCatalog"
594 )
595 outputSchema = cT.InitOutput(
596 doc="Output schema after all new fields are added by task",
597 name="{inputCoaddName}Coadd_meas_schema",
598 storageClass="SourceCatalog"
599 )
600 refCat = cT.PrerequisiteInput(
601 doc="Reference catalog used to match measured sources against known sources",
602 name="ref_cat",
603 storageClass="SimpleCatalog",
604 dimensions=("skypix",),
605 deferLoad=True,
606 multiple=True
607 )
608 exposure = cT.Input(
609 doc="Input coadd image",
610 name="{inputCoaddName}Coadd_calexp",
611 storageClass="ExposureF",
612 dimensions=("tract", "patch", "band", "skymap")
613 )
614 skyMap = cT.Input(
615 doc="SkyMap to use in processing",
616 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
617 storageClass="SkyMap",
618 dimensions=("skymap",),
619 )
620 visitCatalogs = cT.Input(
621 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
622 "further filtered in the task for the purpose of propagating flags from image calibration "
623 "and characterization to codd objects",
624 name="src",
625 dimensions=("instrument", "visit", "detector"),
626 storageClass="SourceCatalog",
627 multiple=True
628 )
629 inputCatalog = cT.Input(
630 doc=("Name of the input catalog to use."
631 "If the single band deblender was used this should be 'deblendedFlux."
632 "If the multi-band deblender was used this should be 'deblendedModel, "
633 "or deblendedFlux if the multiband deblender was configured to output "
634 "deblended flux catalogs. If no deblending was performed this should "
635 "be 'mergeDet'"),
636 name="{inputCoaddName}Coadd_deblendedFlux",
637 storageClass="SourceCatalog",
638 dimensions=("tract", "patch", "band", "skymap"),
639 )
640 outputSources = cT.Output(
641 doc="Source catalog containing all the measurement information generated in this task",
642 name="{outputCoaddName}Coadd_meas",
643 dimensions=("tract", "patch", "band", "skymap"),
644 storageClass="SourceCatalog",
645 )
646 matchResult = cT.Output(
647 doc="Match catalog produced by configured matcher, optional on doMatchSources",
648 name="{outputCoaddName}Coadd_measMatch",
649 dimensions=("tract", "patch", "band", "skymap"),
650 storageClass="Catalog",
651 )
652 denormMatches = cT.Output(
653 doc="Denormalized Match catalog produced by configured matcher, optional on "
654 "doWriteMatchesDenormalized",
655 name="{outputCoaddName}Coadd_measMatchFull",
656 dimensions=("tract", "patch", "band", "skymap"),
657 storageClass="Catalog",
658 )
659
660 def __init__(self, *, config=None):
661 super().__init__(config=config)
662 if config.doPropagateFlags is False:
663 self.inputs -= set(("visitCatalogs",))
664
665 if config.doMatchSources is False:
666 self.outputs -= set(("matchResult",))
667
668 if config.doWriteMatchesDenormalized is False:
669 self.outputs -= set(("denormMatches",))
670
671
672class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
673 pipelineConnections=MeasureMergedCoaddSourcesConnections):
674 """!
675 @anchor MeasureMergedCoaddSourcesConfig_
676
677 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
678 """
679 inputCatalog = Field(dtype=str, default="deblendedFlux",
680 doc=("Name of the input catalog to use."
681 "If the single band deblender was used this should be 'deblendedFlux."
682 "If the multi-band deblender was used this should be 'deblendedModel."
683 "If no deblending was performed this should be 'mergeDet'"))
684 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
685 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
686 doPropagateFlags = Field(
687 dtype=bool, default=True,
688 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
689 )
690 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
691 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
692 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
693 doWriteMatchesDenormalized = Field(
694 dtype=bool,
695 default=False,
696 doc=("Write reference matches in denormalized format? "
697 "This format uses more disk space, but is more convenient to read."),
698 )
699 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
700 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
701 checkUnitsParseStrict = Field(
702 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
703 dtype=str,
704 default="raise",
705 )
706 doApCorr = Field(
707 dtype=bool,
708 default=True,
709 doc="Apply aperture corrections"
710 )
711 applyApCorr = ConfigurableField(
712 target=ApplyApCorrTask,
713 doc="Subtask to apply aperture corrections"
714 )
715 doRunCatalogCalculation = Field(
716 dtype=bool,
717 default=True,
718 doc='Run catalogCalculation task'
719 )
720 catalogCalculation = ConfigurableField(
721 target=CatalogCalculationTask,
722 doc="Subtask to run catalogCalculation plugins on catalog"
723 )
724
725 hasFakes = Field(
726 dtype=bool,
727 default=False,
728 doc="Should be set to True if fake sources have been inserted into the input data."
729 )
730
731 @property
732 def refObjLoader(self):
733 return self.match.refObjLoader
734
735 def setDefaults(self):
736 super().setDefaults()
737 self.measurement.plugins.names |= ['base_InputCount',
738 'base_Variance',
739 'base_LocalPhotoCalib',
740 'base_LocalWcs']
741 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
742 'INEXACT_PSF']
743 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
744 'INEXACT_PSF']
745
746 def validate(self):
747 super().validate()
748 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
749 if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
750 raise ValueError(
751 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
752 f"are different. These options must be kept in sync until Gen2 is retired."
753 )
754
755
756
762
763
764class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
765 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
766 @staticmethod
767 def getTargetList(parsedCmd, **kwargs):
768 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
769
770
771class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
772 r"""!
773 @anchor MeasureMergedCoaddSourcesTask_
774
775 @brief Deblend sources from master catalog in each coadd seperately and measure.
776
777 @section pipe_tasks_multiBand_Contents Contents
778
779 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
780 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
781 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
782 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
783 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
784 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
785
786 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
787
788 Command-line task that uses peaks and footprints from a master catalog to perform deblending and
789 measurement in each coadd.
790
791 Given a master input catalog of sources (peaks and footprints) or deblender outputs
792 (including a HeavyFootprint in each band), measure each source on the
793 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
794 consistent set of child sources.
795
796 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
797 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
798 flags are propagated to the coadd sources.
799
800 Optionally, we can match the coadd sources to an external reference catalog.
801
802 @par Inputs:
803 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
804 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
805 @par Outputs:
806 deepCoadd_meas{tract,patch,filter}: SourceCatalog
807 @par Data Unit:
808 tract, patch, filter
809
810 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
811
812 <DL>
813 <DT> @ref SingleFrameMeasurementTask_ "measurement"
814 <DD> Measure source properties of deblended sources.</DD>
815 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
816 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
817 not at the edge of the field and that have either not been deblended or are the children of deblended
818 sources</DD>
819 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
820 <DD> Propagate flags set in individual visits to the coadd.</DD>
821 <DT> @ref DirectMatchTask_ "match"
822 <DD> Match input sources to a reference catalog (optional).
823 </DD>
824 </DL>
825 These subtasks may be retargeted as required.
826
827 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
828
829 @copydoc \_\_init\_\_
830
831 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
832
833 @copydoc run
834
835 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
836
837 See @ref MeasureMergedCoaddSourcesConfig_
838
839 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
840
841 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
842 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
843 files.
844
845 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
846 the various sub-tasks. See the documetation for individual sub-tasks for more information.
847
848 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
849 MeasureMergedCoaddSourcesTask
850
851 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
852 The next stage in the multi-band processing procedure will merge these measurements into a suitable
853 catalog for driving forced photometry.
854
855 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
856 to be processed.
857 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
858 `--help` command line argument:
859 @code
860 measureCoaddSources.py --help
861 @endcode
862
863 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
864 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
865 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
866 coadd as follows:
867 @code
868 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
869 @endcode
870 This will process the HSC-I band data. The results are written in
871 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
872
873 It is also necessary to run
874 @code
875 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
876 @endcode
877 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
878 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
879 """
880 _DefaultName = "measureCoaddSources"
881 ConfigClass = MeasureMergedCoaddSourcesConfig
882 RunnerClass = MeasureMergedCoaddSourcesRunner
883 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
884 # The IDs we already have are of this type
885 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
886
887 @classmethod
888 def _makeArgumentParser(cls):
889 parser = ArgumentParser(name=cls._DefaultName)
890 parser.add_id_argument("--id", "deepCoadd_calexp",
891 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
892 ContainerClass=ExistingCoaddDataIdContainer)
893 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
894 return parser
895
896 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
897 **kwargs):
898 """!
899 @brief Initialize the task.
900
901 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
902 @param[in] schema: the schema of the merged detection catalog used as input to this one
903 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
904 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
905 catalog. May be None if the loader can be constructed from the butler argument or all steps
906 requiring a reference catalog are disabled.
907 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
908 catalog loader, if schema or peakSchema or refObjLoader is None
909
910 The task will set its own self.schema attribute to the schema of the output measurement catalog.
911 This will include all fields from the input schema, as well as additional fields for all the
912 measurements.
913 """
914 super().__init__(**kwargs)
915 self.deblended = self.config.inputCatalog.startswith("deblended")
916 self.inputCatalog = "Coadd_" + self.config.inputCatalog
917 if initInputs is not None:
918 schema = initInputs['inputSchema'].schema
919 if schema is None:
920 assert butler is not None, "Neither butler nor schema is defined"
921 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
922 self.schemaMapper = afwTable.SchemaMapper(schema)
923 self.schemaMapper.addMinimalSchema(schema)
924 self.schema = self.schemaMapper.getOutputSchema()
925 self.algMetadata = PropertyList()
926 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
927 self.makeSubtask("setPrimaryFlags", schema=self.schema)
928 if self.config.doMatchSources:
929 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
930 if self.config.doPropagateFlags:
931 self.makeSubtask("propagateFlags", schema=self.schema)
932 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
933 if self.config.doApCorr:
934 self.makeSubtask("applyApCorr", schema=self.schema)
935 if self.config.doRunCatalogCalculation:
936 self.makeSubtask("catalogCalculation", schema=self.schema)
937
938 self.outputSchema = afwTable.SourceCatalog(self.schema)
939
940 def runQuantum(self, butlerQC, inputRefs, outputRefs):
941 inputs = butlerQC.get(inputRefs)
942
943 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
944 inputs.pop('refCat'), config=self.config.refObjLoader,
945 log=self.log)
946 self.match.setRefObjLoader(refObjLoader)
947
948 # Set psfcache
949 # move this to run after gen2 deprecation
950 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
951
952 # Get unique integer ID for IdFactory and RNG seeds
953 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
954 inputs['exposureId'] = exposureIdInfo.expId
955 idFactory = exposureIdInfo.makeSourceIdFactory()
956 # Transform inputCatalog
957 table = afwTable.SourceTable.make(self.schema, idFactory)
958 sources = afwTable.SourceCatalog(table)
959 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
960 table = sources.getTable()
961 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
962 inputs['sources'] = sources
963
964 skyMap = inputs.pop('skyMap')
965 tractNumber = inputRefs.inputCatalog.dataId['tract']
966 tractInfo = skyMap[tractNumber]
967 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
968 skyInfo = Struct(
969 skyMap=skyMap,
970 tractInfo=tractInfo,
971 patchInfo=patchInfo,
972 wcs=tractInfo.getWcs(),
973 bbox=patchInfo.getOuterBBox()
974 )
975 inputs['skyInfo'] = skyInfo
976
977 if self.config.doPropagateFlags:
978 # Filter out any visit catalog that is not coadd inputs
979 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
980 visitKey = ccdInputs.schema.find("visit").key
981 ccdKey = ccdInputs.schema.find("ccd").key
982 inputVisitIds = set()
983 ccdRecordsWcs = {}
984 for ccdRecord in ccdInputs:
985 visit = ccdRecord.get(visitKey)
986 ccd = ccdRecord.get(ccdKey)
987 inputVisitIds.add((visit, ccd))
988 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
989
990 inputCatalogsToKeep = []
991 inputCatalogWcsUpdate = []
992 for i, dataRef in enumerate(inputRefs.visitCatalogs):
993 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
994 if key in inputVisitIds:
995 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
996 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
997 inputs['visitCatalogs'] = inputCatalogsToKeep
998 inputs['wcsUpdates'] = inputCatalogWcsUpdate
999 inputs['ccdInputs'] = ccdInputs
1000
1001 outputs = self.run(**inputs)
1002 butlerQC.put(outputs, outputRefs)
1003
1004 def runDataRef(self, patchRef, psfCache=100):
1005 """!
1006 @brief Deblend and measure.
1007
1008 @param[in] patchRef: Patch reference.
1009
1010 Set 'is-primary' and related flags. Propagate flags
1011 from individual visits. Optionally match the sources to a reference catalog and write the matches.
1012 Finally, write the deblended sources and measurements out.
1013 """
1014 if self.config.hasFakes:
1015 coaddType = "fakes_" + self.config.coaddName
1016 else:
1017 coaddType = self.config.coaddName
1018 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1019 exposure.getPsf().setCacheCapacity(psfCache)
1020 sources = self.readSources(patchRef)
1021 table = sources.getTable()
1022 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1023 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1024
1025 if self.config.doPropagateFlags:
1026 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1027 else:
1028 ccdInputs = None
1029
1030 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, includeBand=False,
1031 log=self.log)
1032 results = self.run(exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=expId,
1033 ccdInputs=ccdInputs, butler=patchRef.getButler())
1034
1035 if self.config.doMatchSources:
1036 self.writeMatches(patchRef, results)
1037 self.write(patchRef, results.outputSources)
1038
1039 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1040 butler=None):
1041 """Run measurement algorithms on the input exposure, and optionally populate the
1042 resulting catalog with extra information.
1043
1044 Parameters
1045 ----------
1046 exposure : `lsst.afw.exposure.Exposure`
1047 The input exposure on which measurements are to be performed
1049 A catalog built from the results of merged detections, or
1050 deblender outputs.
1051 skyInfo : `lsst.pipe.base.Struct`
1052 A struct containing information about the position of the input exposure within
1053 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1054 exposureId : `int` or `bytes`
1055 packed unique number or bytes unique to the input exposure
1056 ccdInputs : `lsst.afw.table.ExposureCatalog`
1057 Catalog containing information on the individual visits which went into making
1058 the exposure
1059 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1060 A list of source catalogs corresponding to measurements made on the individual
1061 visits which went into the input exposure. If None and butler is `None` then
1062 the task cannot propagate visit flags to the output catalog.
1063 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1064 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1065 to the input visits. Used to put all coordinates to common system. If `None` and
1066 butler is `None` then the task cannot propagate visit flags to the output catalog.
1067 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1068 Either a gen2 or gen3 butler used to load visit catalogs
1069
1070 Returns
1071 -------
1072 results : `lsst.pipe.base.Struct`
1073 Results of running measurement task. Will contain the catalog in the
1074 sources attribute. Optionally will have results of matching to a
1075 reference catalog in the matchResults attribute, and denormalized
1076 matches in the denormMatches attribute.
1077 """
1078 self.measurement.run(sources, exposure, exposureId=exposureId)
1079
1080 if self.config.doApCorr:
1081 self.applyApCorr.run(
1082 catalog=sources,
1083 apCorrMap=exposure.getInfo().getApCorrMap()
1084 )
1085
1086 # TODO DM-11568: this contiguous check-and-copy could go away if we
1087 # reserve enough space during SourceDetection and/or SourceDeblend.
1088 # NOTE: sourceSelectors require contiguous catalogs, so ensure
1089 # contiguity now, so views are preserved from here on.
1090 if not sources.isContiguous():
1091 sources = sources.copy(deep=True)
1092
1093 if self.config.doRunCatalogCalculation:
1094 self.catalogCalculation.run(sources)
1095
1096 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1097 patchInfo=skyInfo.patchInfo)
1098 if self.config.doPropagateFlags:
1099 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1100
1101 results = Struct()
1102
1103 if self.config.doMatchSources:
1104 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel)
1105 matches = afwTable.packMatches(matchResult.matches)
1106 matches.table.setMetadata(matchResult.matchMeta)
1107 results.matchResult = matches
1108 if self.config.doWriteMatchesDenormalized:
1109 if matchResult.matches:
1110 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1111 else:
1112 self.log.warning("No matches, so generating dummy denormalized matches file")
1113 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1114 denormMatches.setMetadata(PropertyList())
1115 denormMatches.getMetadata().add("COMMENT",
1116 "This catalog is empty because no matches were found.")
1117 results.denormMatches = denormMatches
1118 results.denormMatches = denormMatches
1119
1120 results.outputSources = sources
1121 return results
1122
1123 def readSources(self, dataRef):
1124 """!
1125 @brief Read input sources.
1126
1127 @param[in] dataRef: Data reference for catalog of merged detections
1128 @return List of sources in merged catalog
1129
1130 We also need to add columns to hold the measurements we're about to make so we can measure in-place.
1131 """
1132 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1133 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
1134 idFactory = self.makeIdFactory(dataRef)
1135 for s in merged:
1136 idFactory.notify(s.getId())
1137 table = afwTable.SourceTable.make(self.schema, idFactory)
1138 sources = afwTable.SourceCatalog(table)
1139 sources.extend(merged, self.schemaMapper)
1140 return sources
1141
1142 def writeMatches(self, dataRef, results):
1143 """!
1144 @brief Write matches of the sources to the astrometric reference catalog.
1145
1146 @param[in] dataRef: data reference
1147 @param[in] results: results struct from run method
1148 """
1149 if hasattr(results, "matchResult"):
1150 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1151 if hasattr(results, "denormMatches"):
1152 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1153
1154 def write(self, dataRef, sources):
1155 """!
1156 @brief Write the source catalog.
1157
1158 @param[in] dataRef: data reference
1159 @param[in] sources: source catalog
1160 """
1161 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1162 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)