24 from .multiBandUtils
import (CullPeaksConfig, MergeSourcesRunner, _makeMakeIdFactory, makeMergeArgumentParser,
25 getInputSchema, readCatalog)
34 from lsst.pex.config import Config, Field, ListField, ConfigurableField, ConfigField
35 from lsst.pipe.base import (CmdLineTask, PipelineTask, PipelineTaskConfig, Struct,
36 PipelineTaskConnections)
42 dimensions=(
"tract",
"patch",
"skymap"),
43 defaultTemplates={
"inputCoaddName":
'deep',
"outputCoaddName":
"deep"}):
44 schema = cT.InitInput(
45 doc=
"Schema of the input detection catalog",
46 name=
"{inputCoaddName}Coadd_det_schema",
47 storageClass=
"SourceCatalog"
50 outputSchema = cT.InitOutput(
51 doc=
"Schema of the merged detection catalog",
52 name=
"{outputCoaddName}Coadd_mergeDet_schema",
53 storageClass=
"SourceCatalog"
56 outputPeakSchema = cT.InitOutput(
57 doc=
"Output schema of the Footprint peak catalog",
58 name=
"{outputCoaddName}Coadd_peak_schema",
59 storageClass=
"PeakCatalog"
63 doc=
"Detection Catalogs to be merged",
64 name=
"{inputCoaddName}Coadd_det",
65 storageClass=
"SourceCatalog",
66 dimensions=(
"tract",
"patch",
"skymap",
"band"),
71 doc=
"SkyMap to be used in merging",
72 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
73 storageClass=
"SkyMap",
74 dimensions=(
"skymap",),
77 outputCatalog = cT.Output(
78 doc=
"Merged Detection catalog",
79 name=
"{outputCoaddName}Coadd_mergeDet",
80 storageClass=
"SourceCatalog",
81 dimensions=(
"tract",
"patch",
"skymap"),
85 class MergeDetectionsConfig(PipelineTaskConfig, pipelineConnections=MergeDetectionsConnections):
87 @anchor MergeDetectionsConfig_
89 @brief Configuration parameters for the MergeDetectionsTask.
91 minNewPeak = Field(dtype=float, default=1,
92 doc=
"Minimum distance from closest peak to create a new one (in arcsec).")
94 maxSamePeak = Field(dtype=float, default=0.3,
95 doc=
"When adding new catalogs to the merge, all peaks less than this distance "
96 " (in arcsec) to an existing peak will be flagged as detected in that catalog.")
97 cullPeaks = ConfigField(dtype=CullPeaksConfig, doc=
"Configuration for how to cull peaks.")
99 skyFilterName = Field(dtype=str, default=
"sky",
100 doc=
"Name of `filter' used to label sky objects (e.g. flag merge_peak_sky is set)\n"
101 "(N.b. should be in MergeMeasurementsConfig.pseudoFilterList)")
102 skyObjects = ConfigurableField(target=SkyObjectsTask, doc=
"Generate sky objects")
103 priorityList = ListField(dtype=str, default=[],
104 doc=
"Priority-ordered list of filter bands for the merge.")
105 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
107 def setDefaults(self):
108 Config.setDefaults(self)
109 self.skyObjects.avoidMask = [
"DETECTED"]
113 if len(self.priorityList) == 0:
114 raise RuntimeError(
"No priority list provided")
117 class MergeDetectionsTask(PipelineTask, CmdLineTask):
119 @anchor MergeDetectionsTask_
121 @brief Merge coadd detections from multiple bands.
123 @section pipe_tasks_multiBand_Contents Contents
125 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Purpose
126 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Init
127 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Run
128 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Config
129 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Debug
130 - @ref pipe_tasks_multiband_MergeDetectionsTask_Example
132 @section pipe_tasks_multiBand_MergeDetectionsTask_Purpose Description
134 Command-line task that merges sources detected in coadds of exposures obtained with different filters.
136 To perform photometry consistently across coadds in multiple filter bands, we create a master catalog of
137 sources from all bands by merging the sources (peaks & footprints) detected in each coadd, while keeping
138 track of which band each source originates in.
140 The catalog merge is performed by @ref getMergedSourceCatalog. Spurious peaks detected around bright
141 objects are culled as described in @ref CullPeaksConfig_.
144 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
146 deepCoadd_mergeDet{tract,patch}: SourceCatalog (only parent Footprints)
150 @section pipe_tasks_multiBand_MergeDetectionsTask_Init Task initialisation
152 @copydoc \_\_init\_\_
154 @section pipe_tasks_multiBand_MergeDetectionsTask_Run Invoking the Task
158 @section pipe_tasks_multiBand_MergeDetectionsTask_Config Configuration parameters
160 See @ref MergeDetectionsConfig_
162 @section pipe_tasks_multiBand_MergeDetectionsTask_Debug Debug variables
164 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a flag @c -d
165 to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py files.
167 MergeDetectionsTask has no debug variables.
169 @section pipe_tasks_multiband_MergeDetectionsTask_Example A complete example of using MergeDetectionsTask
171 MergeDetectionsTask is meant to be run after detecting sources in coadds generated for the chosen subset
172 of the available bands.
173 The purpose of the task is to merge sources (peaks & footprints) detected in the coadds generated from the
174 chosen subset of filters.
175 Subsequent tasks in the multi-band processing procedure will deblend the generated master list of sources
176 and, eventually, perform forced photometry.
177 Command-line usage of MergeDetectionsTask expects data references for all the coadds to be processed.
178 A list of the available optional arguments can be obtained by calling mergeCoaddDetections.py with the
179 `--help` command line argument:
181 mergeCoaddDetections.py --help
184 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
185 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
186 step 5 at @ref pipeTasks_multiBand, one may merge the catalogs of sources from each coadd as follows:
188 mergeCoaddDetections.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I^HSC-R
190 This will merge the HSC-I & -R band parent source catalogs and write the results to
191 `$CI_HSC_DIR/DATA/deepCoadd-results/merged/0/5,4/mergeDet-0-5,4.fits`.
193 The next step in the multi-band processing procedure is
194 @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask"
196 ConfigClass = MergeDetectionsConfig
197 RunnerClass = MergeSourcesRunner
198 _DefaultName =
"mergeCoaddDetections"
200 outputDataset =
"mergeDet"
201 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
204 def _makeArgumentParser(cls):
210 def __init__(self, butler=None, schema=None, initInputs=None, **kwargs):
213 @brief Initialize the merge detections task.
215 A @ref FootprintMergeList_ "FootprintMergeList" will be used to
216 merge the source catalogs.
218 @param[in] schema the schema of the detection catalogs used as input to this one
219 @param[in] butler a butler used to read the input schema from disk, if schema is None
220 @param[in] initInputs This a PipelineTask-only argument that holds all inputs passed in
221 through the PipelineTask middleware
222 @param[in] **kwargs keyword arguments to be passed to CmdLineTask.__init__
224 The task will set its own self.schema attribute to the schema of the output merged catalog.
226 super().__init__(**kwargs)
227 if initInputs
is not None:
228 schema = initInputs[
'schema'].schema
230 self.makeSubtask(
"skyObjects")
231 self.schema = self.getInputSchema(butler=butler, schema=schema)
233 filterNames = list(self.config.priorityList)
234 filterNames.append(self.config.skyFilterName)
235 self.merged = afwDetect.FootprintMergeList(self.schema, filterNames)
236 self.outputSchema = afwTable.SourceCatalog(self.schema)
237 self.outputPeakSchema = afwDetect.PeakCatalog(self.merged.getPeakSchema())
239 def runDataRef(self, patchRefList):
240 catalogs = dict(
readCatalog(self, patchRef)
for patchRef
in patchRefList)
241 skyInfo =
getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRefList[0])
242 idFactory = self.makeIdFactory(patchRefList[0])
243 skySeed = patchRefList[0].get(self.config.coaddName +
"MergedCoaddId")
244 mergeCatalogStruct = self.run(catalogs, skyInfo, idFactory, skySeed)
245 self.write(patchRefList[0], mergeCatalogStruct.outputCatalog)
247 def runQuantum(self, butlerQC, inputRefs, outputRefs):
248 inputs = butlerQC.get(inputRefs)
249 packedId, maxBits = butlerQC.quantum.dataId.pack(
"tract_patch", returnMaxBits=
True)
250 inputs[
"skySeed"] = packedId
251 inputs[
"idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
252 catalogDict = {ref.dataId[
'band']: cat
for ref, cat
in zip(inputRefs.catalogs,
254 inputs[
'catalogs'] = catalogDict
255 skyMap = inputs.pop(
'skyMap')
257 tractNumber = inputRefs.catalogs[0].dataId[
'tract']
258 tractInfo = skyMap[tractNumber]
259 patchInfo = tractInfo.getPatchInfo(inputRefs.catalogs[0].dataId[
'patch'])
264 wcs=tractInfo.getWcs(),
265 bbox=patchInfo.getOuterBBox()
267 inputs[
'skyInfo'] = skyInfo
269 outputs = self.run(**inputs)
270 butlerQC.put(outputs, outputRefs)
272 def run(self, catalogs, skyInfo, idFactory, skySeed):
274 @brief Merge multiple catalogs.
276 After ordering the catalogs and filters in priority order,
277 @ref getMergedSourceCatalog of the @ref FootprintMergeList_ "FootprintMergeList" created by
278 @ref \_\_init\_\_ is used to perform the actual merging. Finally, @ref cullPeaks is used to remove
279 garbage peaks detected around bright objects.
283 @param[out] mergedList
287 tractWcs = skyInfo.wcs
288 peakDistance = self.config.minNewPeak / tractWcs.getPixelScale().asArcseconds()
289 samePeakDistance = self.config.maxSamePeak / tractWcs.getPixelScale().asArcseconds()
292 orderedCatalogs = [catalogs[band]
for band
in self.config.priorityList
if band
in catalogs.keys()]
293 orderedBands = [band
for band
in self.config.priorityList
if band
in catalogs.keys()]
295 mergedList = self.merged.getMergedSourceCatalog(orderedCatalogs, orderedBands, peakDistance,
296 self.schema, idFactory,
302 skySourceFootprints = self.getSkySourceFootprints(mergedList, skyInfo, skySeed)
303 if skySourceFootprints:
304 key = mergedList.schema.find(
"merge_footprint_%s" % self.config.skyFilterName).key
305 for foot
in skySourceFootprints:
306 s = mergedList.addNew()
311 for record
in mergedList:
312 record.getFootprint().sortPeaks()
313 self.log.info(
"Merged to %d sources" % len(mergedList))
315 self.cullPeaks(mergedList)
316 return Struct(outputCatalog=mergedList)
318 def cullPeaks(self, catalog):
320 @brief Attempt to remove garbage peaks (mostly on the outskirts of large blends).
322 @param[in] catalog Source catalog
324 keys = [item.key
for item
in self.merged.getPeakSchema().extract(
"merge_peak_*").values()]
325 assert len(keys) > 0,
"Error finding flags that associate peaks with their detection bands."
328 for parentSource
in catalog:
331 keptPeaks = parentSource.getFootprint().getPeaks()
332 oldPeaks = list(keptPeaks)
334 familySize = len(oldPeaks)
335 totalPeaks += familySize
336 for rank, peak
in enumerate(oldPeaks):
337 if ((rank < self.config.cullPeaks.rankSufficient)
338 or (sum([peak.get(k)
for k
in keys]) >= self.config.cullPeaks.nBandsSufficient)
339 or (rank < self.config.cullPeaks.rankConsidered
340 and rank < self.config.cullPeaks.rankNormalizedConsidered * familySize)):
341 keptPeaks.append(peak)
344 self.log.info(
"Culled %d of %d peaks" % (culledPeaks, totalPeaks))
346 def getSchemaCatalogs(self):
348 Return a dict of empty catalogs for each catalog dataset produced by this task.
350 @param[out] dictionary of empty catalogs
352 mergeDet = afwTable.SourceCatalog(self.schema)
353 peak = afwDetect.PeakCatalog(self.merged.getPeakSchema())
354 return {self.config.coaddName +
"Coadd_mergeDet": mergeDet,
355 self.config.coaddName +
"Coadd_peak": peak}
357 def getSkySourceFootprints(self, mergedList, skyInfo, seed):
359 @brief Return a list of Footprints of sky objects which don't overlap with anything in mergedList
361 @param mergedList The merged Footprints from all the input bands
362 @param skyInfo A description of the patch
363 @param seed Seed for the random number generator
365 mask = afwImage.Mask(skyInfo.patchInfo.getOuterBBox())
366 detected = mask.getPlaneBitMask(
"DETECTED")
368 s.getFootprint().spans.setMask(mask, detected)
370 footprints = self.skyObjects.
run(mask, seed)
375 schema = self.merged.getPeakSchema()
376 mergeKey = schema.find(
"merge_peak_%s" % self.config.skyFilterName).key
378 for oldFoot
in footprints:
379 assert len(oldFoot.getPeaks()) == 1,
"Should be a single peak only"
380 peak = oldFoot.getPeaks()[0]
381 newFoot = afwDetect.Footprint(oldFoot.spans, schema)
382 newFoot.addPeak(peak.getFx(), peak.getFy(), peak.getPeakValue())
383 newFoot.getPeaks()[0].set(mergeKey,
True)
384 converted.append(newFoot)
390 @brief Write the output.
392 @param[in] patchRef data reference for patch
393 @param[in] catalog catalog
395 We write as the dataset provided by the 'outputDataset'
398 patchRef.put(catalog, self.config.coaddName +
"Coadd_" + self.outputDataset)
401 mergeDataId = patchRef.dataId.copy()
402 del mergeDataId[
"filter"]
403 self.log.info(
"Wrote merged catalog: %s" % (mergeDataId,))
407 @brief No metadata to write, and not sure how to write it for a list of dataRefs.
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded.
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
def write(self, patchRef, catalog)
Write the output.
def makeMergeArgumentParser(name, dataset)
Create a suitable ArgumentParser.
def getInputSchema(task, butler=None, schema=None)
Obtain the input schema either directly or froma butler reference.
def readCatalog(task, patchRef)
Read input catalog.