lsst.pipe.tasks gb1d6de0934+940593f51e
multiBand.py
Go to the documentation of this file.
1#!/usr/bin/env python
2#
3# LSST Data Management System
4# Copyright 2008-2015 AURA/LSST.
5#
6# This product includes software developed by the
7# LSST Project (http://www.lsst.org/).
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the LSST License Statement and
20# the GNU General Public License along with this program. If not,
21# see <https://www.lsstcorp.org/LegalNotices/>.
22#
23import numpy as np
24
25from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
26from lsst.coadd.utils.getGen3CoaddExposureId import getGen3CoaddExposureId
27from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
28 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
29import lsst.pipe.base.connectionTypes as cT
30from lsst.pex.config import Config, Field, ConfigurableField
31from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader, ScaleVarianceTask
32from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
33from lsst.meas.deblender import SourceDeblendTask
34from lsst.meas.extensions.scarlet import ScarletDeblendTask
35from lsst.pipe.tasks.coaddBase import getSkyInfo
36from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
37from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
38from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
39from lsst.pipe.tasks.propagateSourceFlags import PropagateSourceFlagsTask
40import lsst.afw.image as afwImage
41import lsst.afw.table as afwTable
42import lsst.afw.math as afwMath
43from lsst.daf.base import PropertyList
44from lsst.skymap import BaseSkyMap
45from lsst.obs.base import ExposureIdInfo
46
47# NOTE: these imports are a convenience so multiband users only have to import this file.
48from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
49from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
50from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
51from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401
52from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
53from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
54from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
55from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
56
57
58"""
59New set types:
60* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
61* deepCoadd_mergeDet: merged detections (tract, patch)
62* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
63* deepCoadd_ref: reference sources (tract, patch)
64All of these have associated *_schema catalogs that require no data ID and hold no records.
65
66In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
67the mergeDet, meas, and ref dataset Footprints:
68* deepCoadd_peak_schema
69"""
70
71
72
73class DetectCoaddSourcesConnections(PipelineTaskConnections,
74 dimensions=("tract", "patch", "band", "skymap"),
75 defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
76 detectionSchema = cT.InitOutput(
77 doc="Schema of the detection catalog",
78 name="{outputCoaddName}Coadd_det_schema",
79 storageClass="SourceCatalog",
80 )
81 exposure = cT.Input(
82 doc="Exposure on which detections are to be performed",
83 name="{inputCoaddName}Coadd",
84 storageClass="ExposureF",
85 dimensions=("tract", "patch", "band", "skymap")
86 )
87 outputBackgrounds = cT.Output(
88 doc="Output Backgrounds used in detection",
89 name="{outputCoaddName}Coadd_calexp_background",
90 storageClass="Background",
91 dimensions=("tract", "patch", "band", "skymap")
92 )
93 outputSources = cT.Output(
94 doc="Detected sources catalog",
95 name="{outputCoaddName}Coadd_det",
96 storageClass="SourceCatalog",
97 dimensions=("tract", "patch", "band", "skymap")
98 )
99 outputExposure = cT.Output(
100 doc="Exposure post detection",
101 name="{outputCoaddName}Coadd_calexp",
102 storageClass="ExposureF",
103 dimensions=("tract", "patch", "band", "skymap")
104 )
105
106
107class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
108 """!
109 @anchor DetectCoaddSourcesConfig_
110
111 @brief Configuration parameters for the DetectCoaddSourcesTask
112 """
113 doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
114 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
115 detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
116 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
117 doInsertFakes = Field(dtype=bool, default=False,
118 doc="Run fake sources injection task",
119 deprecated=("doInsertFakes is no longer supported. This config will be removed "
120 "after v24."))
121 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
122 doc="Injection of fake sources for testing "
123 "purposes (must be retargeted)",
124 deprecated=("insertFakes is no longer supported. This config will "
125 "be removed after v24."))
126 hasFakes = Field(
127 dtype=bool,
128 default=False,
129 doc="Should be set to True if fake sources have been inserted into the input data.",
130 )
131
132 def setDefaults(self):
133 super().setDefaults()
134 self.detection.thresholdType = "pixel_stdev"
135 self.detection.isotropicGrow = True
136 # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
137 self.detection.reEstimateBackground = False
138 self.detection.background.useApprox = False
139 self.detection.background.binSize = 4096
140 self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
141 self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
142
143
149
150
151class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
152 r"""!
153 @anchor DetectCoaddSourcesTask_
154
155 @brief Detect sources on a coadd
156
157 @section pipe_tasks_multiBand_Contents Contents
158
159 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
160 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
161 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
162 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
163 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
164 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
165
166 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
167
168 Command-line task that detects sources on a coadd of exposures obtained with a single filter.
169
170 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
171 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
172 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
173 propagate the full covariance matrix -- but it is simple and works well in practice.
174
175 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
176 SourceDetectionTask_ "detection" subtask.
177
178 @par Inputs:
179 deepCoadd{tract,patch,filter}: ExposureF
180 @par Outputs:
181 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
182 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
183 exposure (ExposureF)
184 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
185 @par Data Unit:
186 tract, patch, filter
187
188 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
189 You can retarget this subtask if you wish.
190
191 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
192
193 @copydoc \_\_init\_\_
194
195 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
196
197 @copydoc run
198
199 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
200
201 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
202
203 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
204
205 The command line task interface supports a
206 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
207 files.
208
209 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
210 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
211 @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
212
213 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
214 of using DetectCoaddSourcesTask
215
216 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
217 the task is to update the background, detect all sources in a single band and generate a set of parent
218 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
219 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
220 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
221 calling detectCoaddSources.py with the `--help` command line argument:
222 @code
223 detectCoaddSources.py --help
224 @endcode
225
226 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
227 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
228 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
229 @code
230 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
231 @endcode
232 that will process the HSC-I band data. The results are written to
233 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
234
235 It is also necessary to run:
236 @code
237 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
238 @endcode
239 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
240 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
241 """
242 _DefaultName = "detectCoaddSources"
243 ConfigClass = DetectCoaddSourcesConfig
244 getSchemaCatalogs = _makeGetSchemaCatalogs("det")
245 makeIdFactory = _makeMakeIdFactory("CoaddId")
246
247 @classmethod
248 def _makeArgumentParser(cls):
249 parser = ArgumentParser(name=cls._DefaultName)
250 parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
251 ContainerClass=ExistingCoaddDataIdContainer)
252 return parser
253
254 def __init__(self, schema=None, **kwargs):
255 """!
256 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
257
258 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
259
260 @param[in] schema: initial schema for the output catalog, modified-in place to include all
261 fields set by this task. If None, the source minimal schema will be used.
262 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
263 """
264 # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
265 # call structure has been reviewed carefully to be sure super will work as intended.
266 super().__init__(**kwargs)
267 if schema is None:
268 schema = afwTable.SourceTable.makeMinimalSchema()
269 self.schema = schema
270 self.makeSubtask("detection", schema=self.schema)
271 if self.config.doScaleVariance:
272 self.makeSubtask("scaleVariance")
273
274 self.detectionSchema = afwTable.SourceCatalog(self.schema)
275
276 def runDataRef(self, patchRef):
277 """!
278 @brief Run detection on a coadd.
279
280 Invokes @ref run and then uses @ref write to output the
281 results.
282
283 @param[in] patchRef: data reference for patch
284 """
285 if self.config.hasFakes:
286 exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
287 else:
288 exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
289 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, log=self.log)
290 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
291 self.write(results, patchRef)
292 return results
293
294 def runQuantum(self, butlerQC, inputRefs, outputRefs):
295 inputs = butlerQC.get(inputRefs)
296 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch_band")
297 inputs["idFactory"] = exposureIdInfo.makeSourceIdFactory()
298 inputs["expId"] = exposureIdInfo.expId
299 outputs = self.run(**inputs)
300 butlerQC.put(outputs, outputRefs)
301
302 def run(self, exposure, idFactory, expId):
303 """!
304 @brief Run detection on an exposure.
305
306 First scale the variance plane to match the observed variance
307 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
308 detect sources.
309
310 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
311 depending on configuration).
312 @param[in] idFactory: IdFactory to set source identifiers
313 @param[in] expId: Exposure identifier (integer) for RNG seed
314
315 @return a pipe.base.Struct with fields
316 - sources: catalog of detections
317 - backgrounds: list of backgrounds
318 """
319 if self.config.doScaleVariance:
320 varScale = self.scaleVariance.run(exposure.maskedImage)
321 exposure.getMetadata().add("VARIANCE_SCALE", varScale)
322 backgrounds = afwMath.BackgroundList()
323 table = afwTable.SourceTable.make(self.schema, idFactory)
324 detections = self.detection.run(table, exposure, expId=expId)
325 sources = detections.sources
326 fpSets = detections.fpSets
327 if hasattr(fpSets, "background") and fpSets.background:
328 for bg in fpSets.background:
329 backgrounds.append(bg)
330 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
331
332 def write(self, results, patchRef):
333 """!
334 @brief Write out results from runDetection.
335
336 @param[in] exposure: Exposure to write out
337 @param[in] results: Struct returned from runDetection
338 @param[in] patchRef: data reference for patch
339 """
340 coaddName = self.config.coaddName + "Coadd"
341 patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
342 patchRef.put(results.outputSources, coaddName + "_det")
343 if self.config.hasFakes:
344 patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
345 else:
346 patchRef.put(results.outputExposure, coaddName + "_calexp")
347
348
349
350
351class DeblendCoaddSourcesConfig(Config):
352 """DeblendCoaddSourcesConfig
353
354 Configuration parameters for the `DeblendCoaddSourcesTask`.
355 """
356 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
357 doc="Deblend sources separately in each band")
358 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
359 doc="Deblend sources simultaneously across bands")
360 simultaneous = Field(dtype=bool,
361 default=True,
362 doc="Simultaneously deblend all bands? "
363 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
364 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
365 hasFakes = Field(dtype=bool,
366 default=False,
367 doc="Should be set to True if fake sources have been inserted into the input data.")
368
369 def setDefaults(self):
370 Config.setDefaults(self)
371 self.singleBandDeblend.propagateAllPeaks = True
372
373
374class DeblendCoaddSourcesRunner(MergeSourcesRunner):
375 """Task runner for the `MergeSourcesTask`
376
377 Required because the run method requires a list of
378 dataRefs rather than a single dataRef.
379 """
380 @staticmethod
381 def getTargetList(parsedCmd, **kwargs):
382 """Provide a list of patch references for each patch, tract, filter combo.
383
384 Parameters
385 ----------
386 parsedCmd:
387 The parsed command
388 kwargs:
389 Keyword arguments passed to the task
390
391 Returns
392 -------
393 targetList: list
394 List of tuples, where each tuple is a (dataRef, kwargs) pair.
395 """
396 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
397 kwargs["psfCache"] = parsedCmd.psfCache
398 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
399
400
401class DeblendCoaddSourcesTask(CmdLineTask):
402 """Deblend the sources in a merged catalog
403
404 Deblend sources from master catalog in each coadd.
405 This can either be done separately in each band using the HSC-SDSS deblender
406 (`DeblendCoaddSourcesTask.config.simultaneous==False`)
407 or use SCARLET to simultaneously fit the blend in all bands
408 (`DeblendCoaddSourcesTask.config.simultaneous==True`).
409 The task will set its own `self.schema` atribute to the `Schema` of the
410 output deblended catalog.
411 This will include all fields from the input `Schema`, as well as additional fields
412 from the deblender.
413
414 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
415 ---------------------------------------------------------
416 `
417
418 Parameters
419 ----------
420 butler: `Butler`
421 Butler used to read the input schemas from disk or
422 construct the reference catalog loader, if `schema` or `peakSchema` or
423 schema: `Schema`
424 The schema of the merged detection catalog as an input to this task.
425 peakSchema: `Schema`
426 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
427 """
428 ConfigClass = DeblendCoaddSourcesConfig
429 RunnerClass = DeblendCoaddSourcesRunner
430 _DefaultName = "deblendCoaddSources"
431 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
432
433 @classmethod
434 def _makeArgumentParser(cls):
435 parser = ArgumentParser(name=cls._DefaultName)
436 parser.add_id_argument("--id", "deepCoadd_calexp",
437 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
438 ContainerClass=ExistingCoaddDataIdContainer)
439 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
440 return parser
441
442 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
443 CmdLineTask.__init__(self, **kwargs)
444 if schema is None:
445 assert butler is not None, "Neither butler nor schema is defined"
446 schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
447 self.schemaMapper = afwTable.SchemaMapper(schema)
448 self.schemaMapper.addMinimalSchema(schema)
449 self.schema = self.schemaMapper.getOutputSchema()
450 if peakSchema is None:
451 assert butler is not None, "Neither butler nor peakSchema is defined"
452 peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
453
454 if self.config.simultaneous:
455 self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
456 else:
457 self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
458
459 def getSchemaCatalogs(self):
460 """Return a dict of empty catalogs for each catalog dataset produced by this task.
461
462 Returns
463 -------
464 result: dict
465 Dictionary of empty catalogs, with catalog names as keys.
466 """
467 catalog = afwTable.SourceCatalog(self.schema)
468 return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
469 self.config.coaddName + "Coadd_deblendedModel": catalog}
470
471 def runDataRef(self, patchRefList, psfCache=100):
472 """Deblend the patch
473
474 Deblend each source simultaneously or separately
475 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
476 Set `is-primary` and related flags.
477 Propagate flags from individual visits.
478 Write the deblended sources out.
479
480 Parameters
481 ----------
482 patchRefList: list
483 List of data references for each filter
484 """
485
486 if self.config.hasFakes:
487 coaddType = "fakes_" + self.config.coaddName
488 else:
489 coaddType = self.config.coaddName
490
491 if self.config.simultaneous:
492 # Use SCARLET to simultaneously deblend across filters
493 filters = []
494 exposures = []
495 for patchRef in patchRefList:
496 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
497 filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True)
498 filters.append(filter.bandLabel)
499 exposures.append(exposure)
500 # Sort inputs by band to match Gen3 order of inputs
501 exposures = [exposure for _, exposure in sorted(zip(filters, exposures))]
502 patchRefList = [patchRef for _, patchRef in sorted(zip(filters, patchRefList))]
503 filters.sort()
504 # The input sources are the same for all bands, since it is a merged catalog
505 sources = self.readSources(patchRef)
506 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
507 templateCatalogs, fluxCatalogs = self.multiBandDeblend.run(exposure, sources)
508 for n in range(len(patchRefList)):
509 self.write(patchRefList[n], templateCatalogs[filters[n]], "Model")
510 if filters[n] in fluxCatalogs:
511 self.write(patchRefList[n], fluxCatalogs[filters[n]], "Flux")
512 else:
513 # Use the singeband deblender to deblend each band separately
514 for patchRef in patchRefList:
515 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
516 exposure.getPsf().setCacheCapacity(psfCache)
517 sources = self.readSources(patchRef)
518 self.singleBandDeblend.run(exposure, sources)
519 self.write(patchRef, sources)
520
521 def readSources(self, dataRef):
522 """Read merged catalog
523
524 Read the catalog of merged detections and create a catalog
525 in a single band.
526
527 Parameters
528 ----------
529 dataRef: data reference
530 Data reference for catalog of merged detections
531
532 Returns
533 -------
534 sources: `SourceCatalog`
535 List of sources in merged catalog
536
537 We also need to add columns to hold the measurements we're about to make so we can measure in-place.
538 """
539 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
540 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
541 idFactory = self.makeIdFactory(dataRef)
542 # There may be gaps in the mergeDet catalog, which will cause the
543 # source ids to be inconsistent. So we update the id factory
544 # with the largest id already in the catalog.
545 maxId = np.max(merged["id"])
546 idFactory.notify(maxId)
547 table = afwTable.SourceTable.make(self.schema, idFactory)
548 sources = afwTable.SourceCatalog(table)
549 sources.extend(merged, self.schemaMapper)
550 return sources
551
552 def write(self, dataRef, sources, catalogType):
553 """Write the source catalog(s)
554
555 Parameters
556 ----------
557 dataRef: Data Reference
558 Reference to the output catalog.
559 sources: `SourceCatalog`
560 Flux conserved sources to write to file.
561 If using the single band deblender, this is the catalog
562 generated.
563 template_sources: `SourceCatalog`
564 Source catalog using the multiband template models
565 as footprints.
566 """
567 dataRef.put(sources, self.config.coaddName + f"Coadd_deblended{catalogType}")
568 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)
569
570 def writeMetadata(self, dataRefList):
571 """Write the metadata produced from processing the data.
572 Parameters
573 ----------
574 dataRefList
575 List of Butler data references used to write the metadata.
576 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
577 """
578 for dataRef in dataRefList:
579 try:
580 metadataName = self._getMetadataName()
581 if metadataName is not None:
582 dataRef.put(self.getFullMetadata(), metadataName)
583 except Exception as e:
584 self.log.warning("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
585
586
587class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
588 dimensions=("tract", "patch", "band", "skymap"),
589 defaultTemplates={"inputCoaddName": "deep",
590 "outputCoaddName": "deep",
591 "deblendedCatalog": "deblendedFlux"}):
592 inputSchema = cT.InitInput(
593 doc="Input schema for measure merged task produced by a deblender or detection task",
594 name="{inputCoaddName}Coadd_deblendedFlux_schema",
595 storageClass="SourceCatalog"
596 )
597 outputSchema = cT.InitOutput(
598 doc="Output schema after all new fields are added by task",
599 name="{inputCoaddName}Coadd_meas_schema",
600 storageClass="SourceCatalog"
601 )
602 refCat = cT.PrerequisiteInput(
603 doc="Reference catalog used to match measured sources against known sources",
604 name="ref_cat",
605 storageClass="SimpleCatalog",
606 dimensions=("skypix",),
607 deferLoad=True,
608 multiple=True
609 )
610 exposure = cT.Input(
611 doc="Input coadd image",
612 name="{inputCoaddName}Coadd_calexp",
613 storageClass="ExposureF",
614 dimensions=("tract", "patch", "band", "skymap")
615 )
616 skyMap = cT.Input(
617 doc="SkyMap to use in processing",
618 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
619 storageClass="SkyMap",
620 dimensions=("skymap",),
621 )
622 visitCatalogs = cT.Input(
623 doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
624 "further filtered in the task for the purpose of propagating flags from image calibration "
625 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
626 name="src",
627 dimensions=("instrument", "visit", "detector"),
628 storageClass="SourceCatalog",
629 multiple=True
630 )
631 sourceTableHandles = cT.Input(
632 doc=("Source tables that are derived from the ``CalibrateTask`` sources. "
633 "These tables contain astrometry and photometry flags, and optionally "
634 "PSF flags."),
635 name="sourceTable_visit",
636 storageClass="DataFrame",
637 dimensions=("instrument", "visit"),
638 multiple=True,
639 deferLoad=True,
640 )
641 finalizedSourceTableHandles = cT.Input(
642 doc=("Finalized source tables from ``FinalizeCalibrationTask``. These "
643 "tables contain PSF flags from the finalized PSF estimation."),
644 name="finalized_src_table",
645 storageClass="DataFrame",
646 dimensions=("instrument", "visit"),
647 multiple=True,
648 deferLoad=True,
649 )
650 inputCatalog = cT.Input(
651 doc=("Name of the input catalog to use."
652 "If the single band deblender was used this should be 'deblendedFlux."
653 "If the multi-band deblender was used this should be 'deblendedModel, "
654 "or deblendedFlux if the multiband deblender was configured to output "
655 "deblended flux catalogs. If no deblending was performed this should "
656 "be 'mergeDet'"),
657 name="{inputCoaddName}Coadd_{deblendedCatalog}",
658 storageClass="SourceCatalog",
659 dimensions=("tract", "patch", "band", "skymap"),
660 )
661 outputSources = cT.Output(
662 doc="Source catalog containing all the measurement information generated in this task",
663 name="{outputCoaddName}Coadd_meas",
664 dimensions=("tract", "patch", "band", "skymap"),
665 storageClass="SourceCatalog",
666 )
667 matchResult = cT.Output(
668 doc="Match catalog produced by configured matcher, optional on doMatchSources",
669 name="{outputCoaddName}Coadd_measMatch",
670 dimensions=("tract", "patch", "band", "skymap"),
671 storageClass="Catalog",
672 )
673 denormMatches = cT.Output(
674 doc="Denormalized Match catalog produced by configured matcher, optional on "
675 "doWriteMatchesDenormalized",
676 name="{outputCoaddName}Coadd_measMatchFull",
677 dimensions=("tract", "patch", "band", "skymap"),
678 storageClass="Catalog",
679 )
680
681 def __init__(self, *, config=None):
682 super().__init__(config=config)
683 if config.doPropagateFlags is False:
684 self.inputs -= set(("visitCatalogs",))
685 self.inputs -= set(("sourceTableHandles",))
686 self.inputs -= set(("finalizedSourceTableHandles",))
687 elif config.propagateFlags.target == PropagateSourceFlagsTask:
688 # New PropagateSourceFlagsTask does not use visitCatalogs.
689 self.inputs -= set(("visitCatalogs",))
690 # Check for types of flags required.
691 if not config.propagateFlags.source_flags:
692 self.inputs -= set(("sourceTableHandles",))
693 if not config.propagateFlags.finalized_source_flags:
694 self.inputs -= set(("finalizedSourceTableHandles",))
695 else:
696 # Deprecated PropagateVisitFlagsTask uses visitCatalogs.
697 self.inputs -= set(("sourceTableHandles",))
698 self.inputs -= set(("finalizedSourceTableHandles",))
699
700 if config.doMatchSources is False:
701 self.outputs -= set(("matchResult",))
702
703 if config.doWriteMatchesDenormalized is False:
704 self.outputs -= set(("denormMatches",))
705
706
707class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
708 pipelineConnections=MeasureMergedCoaddSourcesConnections):
709 """!
710 @anchor MeasureMergedCoaddSourcesConfig_
711
712 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
713 """
714 inputCatalog = Field(dtype=str, default="deblendedFlux",
715 doc=("Name of the input catalog to use."
716 "If the single band deblender was used this should be 'deblendedFlux."
717 "If the multi-band deblender was used this should be 'deblendedModel."
718 "If no deblending was performed this should be 'mergeDet'"))
719 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
720 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
721 doPropagateFlags = Field(
722 dtype=bool, default=True,
723 doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
724 )
725 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc="Propagate source flags to coadd")
726 doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
727 match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
728 doWriteMatchesDenormalized = Field(
729 dtype=bool,
730 default=False,
731 doc=("Write reference matches in denormalized format? "
732 "This format uses more disk space, but is more convenient to read."),
733 )
734 coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
735 psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
736 checkUnitsParseStrict = Field(
737 doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
738 dtype=str,
739 default="raise",
740 )
741 doApCorr = Field(
742 dtype=bool,
743 default=True,
744 doc="Apply aperture corrections"
745 )
746 applyApCorr = ConfigurableField(
747 target=ApplyApCorrTask,
748 doc="Subtask to apply aperture corrections"
749 )
750 doRunCatalogCalculation = Field(
751 dtype=bool,
752 default=True,
753 doc='Run catalogCalculation task'
754 )
755 catalogCalculation = ConfigurableField(
756 target=CatalogCalculationTask,
757 doc="Subtask to run catalogCalculation plugins on catalog"
758 )
759
760 hasFakes = Field(
761 dtype=bool,
762 default=False,
763 doc="Should be set to True if fake sources have been inserted into the input data."
764 )
765
766 @property
767 def refObjLoader(self):
768 return self.match.refObjLoader
769
770 def setDefaults(self):
771 super().setDefaults()
772 self.measurement.plugins.names |= ['base_InputCount',
773 'base_Variance',
774 'base_LocalPhotoCalib',
775 'base_LocalWcs']
776 self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
777 'INEXACT_PSF']
778 self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
779 'INEXACT_PSF']
780
781 def validate(self):
782 super().validate()
783 refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
784 if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
785 raise ValueError(
786 f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
787 f"are different. These options must be kept in sync until Gen2 is retired."
788 )
789
790
791
797
798
799class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
800 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
801 @staticmethod
802 def getTargetList(parsedCmd, **kwargs):
803 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
804
805
806class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
807 r"""!
808 @anchor MeasureMergedCoaddSourcesTask_
809
810 @brief Deblend sources from master catalog in each coadd seperately and measure.
811
812 @section pipe_tasks_multiBand_Contents Contents
813
814 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
815 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
816 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
817 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
818 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
819 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
820
821 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
822
823 Command-line task that uses peaks and footprints from a master catalog to perform deblending and
824 measurement in each coadd.
825
826 Given a master input catalog of sources (peaks and footprints) or deblender outputs
827 (including a HeavyFootprint in each band), measure each source on the
828 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
829 consistent set of child sources.
830
831 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
832 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
833 flags are propagated to the coadd sources.
834
835 Optionally, we can match the coadd sources to an external reference catalog.
836
837 @par Inputs:
838 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
839 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
840 @par Outputs:
841 deepCoadd_meas{tract,patch,filter}: SourceCatalog
842 @par Data Unit:
843 tract, patch, filter
844
845 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
846
847 <DL>
848 <DT> @ref SingleFrameMeasurementTask_ "measurement"
849 <DD> Measure source properties of deblended sources.</DD>
850 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
851 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
852 not at the edge of the field and that have either not been deblended or are the children of deblended
853 sources</DD>
854 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
855 <DD> Propagate flags set in individual visits to the coadd.</DD>
856 <DT> @ref DirectMatchTask_ "match"
857 <DD> Match input sources to a reference catalog (optional).
858 </DD>
859 </DL>
860 These subtasks may be retargeted as required.
861
862 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
863
864 @copydoc \_\_init\_\_
865
866 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
867
868 @copydoc run
869
870 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
871
872 See @ref MeasureMergedCoaddSourcesConfig_
873
874 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
875
876 The command line task interface supports a
877 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
878 files.
879
880 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
881 the various sub-tasks. See the documetation for individual sub-tasks for more information.
882
883 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
884 MeasureMergedCoaddSourcesTask
885
886 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
887 The next stage in the multi-band processing procedure will merge these measurements into a suitable
888 catalog for driving forced photometry.
889
890 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
891 to be processed.
892 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
893 `--help` command line argument:
894 @code
895 measureCoaddSources.py --help
896 @endcode
897
898 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
899 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
900 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
901 coadd as follows:
902 @code
903 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
904 @endcode
905 This will process the HSC-I band data. The results are written in
906 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
907
908 It is also necessary to run
909 @code
910 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
911 @endcode
912 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
913 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
914 """
915 _DefaultName = "measureCoaddSources"
916 ConfigClass = MeasureMergedCoaddSourcesConfig
917 RunnerClass = MeasureMergedCoaddSourcesRunner
918 getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
919 # The IDs we already have are of this type
920 makeIdFactory = _makeMakeIdFactory("MergedCoaddId", includeBand=False)
921
922 @classmethod
923 def _makeArgumentParser(cls):
924 parser = ArgumentParser(name=cls._DefaultName)
925 parser.add_id_argument("--id", "deepCoadd_calexp",
926 help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
927 ContainerClass=ExistingCoaddDataIdContainer)
928 parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
929 return parser
930
931 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
932 **kwargs):
933 """!
934 @brief Initialize the task.
935
936 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
937 @param[in] schema: the schema of the merged detection catalog used as input to this one
938 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
939 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
940 catalog. May be None if the loader can be constructed from the butler argument or all steps
941 requiring a reference catalog are disabled.
942 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
943 catalog loader, if schema or peakSchema or refObjLoader is None
944
945 The task will set its own self.schema attribute to the schema of the output measurement catalog.
946 This will include all fields from the input schema, as well as additional fields for all the
947 measurements.
948 """
949 super().__init__(**kwargs)
950 self.deblended = self.config.inputCatalog.startswith("deblended")
951 self.inputCatalog = "Coadd_" + self.config.inputCatalog
952 if initInputs is not None:
953 schema = initInputs['inputSchema'].schema
954 if schema is None:
955 assert butler is not None, "Neither butler nor schema is defined"
956 schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
957 self.schemaMapper = afwTable.SchemaMapper(schema)
958 self.schemaMapper.addMinimalSchema(schema)
959 self.schema = self.schemaMapper.getOutputSchema()
960 self.algMetadata = PropertyList()
961 self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
962 self.makeSubtask("setPrimaryFlags", schema=self.schema)
963 if self.config.doMatchSources:
964 self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
965 if self.config.doPropagateFlags:
966 self.makeSubtask("propagateFlags", schema=self.schema)
967 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
968 if self.config.doApCorr:
969 self.makeSubtask("applyApCorr", schema=self.schema)
970 if self.config.doRunCatalogCalculation:
971 self.makeSubtask("catalogCalculation", schema=self.schema)
972
973 self.outputSchema = afwTable.SourceCatalog(self.schema)
974
975 def runQuantum(self, butlerQC, inputRefs, outputRefs):
976 inputs = butlerQC.get(inputRefs)
977
978 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
979 inputs.pop('refCat'), config=self.config.refObjLoader,
980 log=self.log)
981 self.match.setRefObjLoader(refObjLoader)
982
983 # Set psfcache
984 # move this to run after gen2 deprecation
985 inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
986
987 # Get unique integer ID for IdFactory and RNG seeds
988 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "tract_patch")
989 inputs['exposureId'] = exposureIdInfo.expId
990 idFactory = exposureIdInfo.makeSourceIdFactory()
991 # Transform inputCatalog
992 table = afwTable.SourceTable.make(self.schema, idFactory)
993 sources = afwTable.SourceCatalog(table)
994 sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
995 table = sources.getTable()
996 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
997 inputs['sources'] = sources
998
999 skyMap = inputs.pop('skyMap')
1000 tractNumber = inputRefs.inputCatalog.dataId['tract']
1001 tractInfo = skyMap[tractNumber]
1002 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
1003 skyInfo = Struct(
1004 skyMap=skyMap,
1005 tractInfo=tractInfo,
1006 patchInfo=patchInfo,
1007 wcs=tractInfo.getWcs(),
1008 bbox=patchInfo.getOuterBBox()
1009 )
1010 inputs['skyInfo'] = skyInfo
1011
1012 if self.config.doPropagateFlags:
1013 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
1014 # New version
1015 ccdInputs = inputs["exposure"].getInfo().getCoaddInputs().ccds
1016 inputs["ccdInputs"] = ccdInputs
1017
1018 if "sourceTableHandles" in inputs:
1019 sourceTableHandles = inputs.pop("sourceTableHandles")
1020 sourceTableHandleDict = {handle.dataId["visit"]: handle
1021 for handle in sourceTableHandles}
1022 inputs["sourceTableHandleDict"] = sourceTableHandleDict
1023 if "finalizedSourceTableHandles" in inputs:
1024 finalizedSourceTableHandles = inputs.pop("finalizedSourceTableHandles")
1025 finalizedSourceTableHandleDict = {handle.dataId["visit"]: handle
1026 for handle in finalizedSourceTableHandles}
1027 inputs["finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
1028 else:
1029 # Deprecated legacy version
1030 # Filter out any visit catalog that is not coadd inputs
1031 ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
1032 visitKey = ccdInputs.schema.find("visit").key
1033 ccdKey = ccdInputs.schema.find("ccd").key
1034 inputVisitIds = set()
1035 ccdRecordsWcs = {}
1036 for ccdRecord in ccdInputs:
1037 visit = ccdRecord.get(visitKey)
1038 ccd = ccdRecord.get(ccdKey)
1039 inputVisitIds.add((visit, ccd))
1040 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
1041
1042 inputCatalogsToKeep = []
1043 inputCatalogWcsUpdate = []
1044 for i, dataRef in enumerate(inputRefs.visitCatalogs):
1045 key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
1046 if key in inputVisitIds:
1047 inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
1048 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
1049 inputs['visitCatalogs'] = inputCatalogsToKeep
1050 inputs['wcsUpdates'] = inputCatalogWcsUpdate
1051 inputs['ccdInputs'] = ccdInputs
1052
1053 outputs = self.run(**inputs)
1054 butlerQC.put(outputs, outputRefs)
1055
1056 def runDataRef(self, patchRef, psfCache=100):
1057 """!
1058 @brief Deblend and measure.
1059
1060 @param[in] patchRef: Patch reference.
1061
1062 Set 'is-primary' and related flags. Propagate flags
1063 from individual visits. Optionally match the sources to a reference catalog and write the matches.
1064 Finally, write the deblended sources and measurements out.
1065 """
1066 if self.config.hasFakes:
1067 coaddType = "fakes_" + self.config.coaddName
1068 else:
1069 coaddType = self.config.coaddName
1070 exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1071 exposure.getPsf().setCacheCapacity(psfCache)
1072 sources = self.readSources(patchRef)
1073 table = sources.getTable()
1074 table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1075 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1076
1077 if self.config.doPropagateFlags:
1078 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1079 else:
1080 ccdInputs = None
1081
1082 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, includeBand=False,
1083 log=self.log)
1084 results = self.run(exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=expId,
1085 ccdInputs=ccdInputs, butler=patchRef.getButler())
1086
1087 if self.config.doMatchSources:
1088 self.writeMatches(patchRef, results)
1089 self.write(patchRef, results.outputSources)
1090
1091 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1092 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
1093 """Run measurement algorithms on the input exposure, and optionally populate the
1094 resulting catalog with extra information.
1095
1096 Parameters
1097 ----------
1098 exposure : `lsst.afw.exposure.Exposure`
1099 The input exposure on which measurements are to be performed
1101 A catalog built from the results of merged detections, or
1102 deblender outputs.
1103 skyInfo : `lsst.pipe.base.Struct`
1104 A struct containing information about the position of the input exposure within
1105 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1106 exposureId : `int` or `bytes`
1107 packed unique number or bytes unique to the input exposure
1108 ccdInputs : `lsst.afw.table.ExposureCatalog`
1109 Catalog containing information on the individual visits which went into making
1110 the coadd.
1111 sourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
1112 Dict for sourceTable_visit handles (key is visit) for propagating flags.
1113 These tables are derived from the ``CalibrateTask`` sources, and contain
1114 astrometry and photometry flags, and optionally PSF flags.
1115 finalizedSourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`], optional
1116 Dict for finalized_src_table handles (key is visit) for propagating flags.
1117 These tables are derived from ``FinalizeCalibrationTask`` and contain
1118 PSF flags from the finalized PSF estimation.
1119 visitCatalogs : list of `lsst.afw.table.SourceCatalogs`
1120 A list of source catalogs corresponding to measurements made on the individual
1121 visits which went into the input exposure. If None and butler is `None` then
1122 the task cannot propagate visit flags to the output catalog.
1123 Deprecated, to be removed with PropagateVisitFlagsTask.
1124 wcsUpdates : list of `lsst.afw.geom.SkyWcs`
1125 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1126 to the input visits. Used to put all coordinates to common system. If `None` and
1127 butler is `None` then the task cannot propagate visit flags to the output catalog.
1128 Deprecated, to be removed with PropagateVisitFlagsTask.
1129 butler : `lsst.daf.persistence.Butler`
1130 A gen2 butler used to load visit catalogs.
1131 Deprecated, to be removed with Gen2.
1132
1133 Returns
1134 -------
1135 results : `lsst.pipe.base.Struct`
1136 Results of running measurement task. Will contain the catalog in the
1137 sources attribute. Optionally will have results of matching to a
1138 reference catalog in the matchResults attribute, and denormalized
1139 matches in the denormMatches attribute.
1140 """
1141 self.measurement.run(sources, exposure, exposureId=exposureId)
1142
1143 if self.config.doApCorr:
1144 self.applyApCorr.run(
1145 catalog=sources,
1146 apCorrMap=exposure.getInfo().getApCorrMap()
1147 )
1148
1149 # TODO DM-11568: this contiguous check-and-copy could go away if we
1150 # reserve enough space during SourceDetection and/or SourceDeblend.
1151 # NOTE: sourceSelectors require contiguous catalogs, so ensure
1152 # contiguity now, so views are preserved from here on.
1153 if not sources.isContiguous():
1154 sources = sources.copy(deep=True)
1155
1156 if self.config.doRunCatalogCalculation:
1157 self.catalogCalculation.run(sources)
1158
1159 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1160 patchInfo=skyInfo.patchInfo)
1161 if self.config.doPropagateFlags:
1162 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
1163 # New version
1164 self.propagateFlags.run(
1165 sources,
1166 ccdInputs,
1167 sourceTableHandleDict,
1168 finalizedSourceTableHandleDict
1169 )
1170 else:
1171 # Legacy deprecated version
1172 self.propagateFlags.run(
1173 butler,
1174 sources,
1175 ccdInputs,
1176 exposure.getWcs(),
1177 visitCatalogs,
1178 wcsUpdates
1179 )
1180
1181 results = Struct()
1182
1183 if self.config.doMatchSources:
1184 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel)
1185 matches = afwTable.packMatches(matchResult.matches)
1186 matches.table.setMetadata(matchResult.matchMeta)
1187 results.matchResult = matches
1188 if self.config.doWriteMatchesDenormalized:
1189 if matchResult.matches:
1190 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1191 else:
1192 self.log.warning("No matches, so generating dummy denormalized matches file")
1193 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1194 denormMatches.setMetadata(PropertyList())
1195 denormMatches.getMetadata().add("COMMENT",
1196 "This catalog is empty because no matches were found.")
1197 results.denormMatches = denormMatches
1198 results.denormMatches = denormMatches
1199
1200 results.outputSources = sources
1201 return results
1202
1203 def readSources(self, dataRef):
1204 """!
1205 @brief Read input sources.
1206
1207 @param[in] dataRef: Data reference for catalog of merged detections
1208 @return List of sources in merged catalog
1209
1210 We also need to add columns to hold the measurements we're about to make so we can measure in-place.
1211 """
1212 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1213 self.log.info("Read %d detections: %s", len(merged), dataRef.dataId)
1214 idFactory = self.makeIdFactory(dataRef)
1215 for s in merged:
1216 idFactory.notify(s.getId())
1217 table = afwTable.SourceTable.make(self.schema, idFactory)
1218 sources = afwTable.SourceCatalog(table)
1219 sources.extend(merged, self.schemaMapper)
1220 return sources
1221
1222 def writeMatches(self, dataRef, results):
1223 """!
1224 @brief Write matches of the sources to the astrometric reference catalog.
1225
1226 @param[in] dataRef: data reference
1227 @param[in] results: results struct from run method
1228 """
1229 if hasattr(results, "matchResult"):
1230 dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1231 if hasattr(results, "denormMatches"):
1232 dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1233
1234 def write(self, dataRef, sources):
1235 """!
1236 @brief Write the source catalog.
1237
1238 @param[in] dataRef: data reference
1239 @param[in] sources: source catalog
1240 """
1241 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1242 self.log.info("Wrote %d sources: %s", len(sources), dataRef.dataId)