24 from .multiBandUtils
import (CullPeaksConfig, MergeSourcesRunner, _makeMakeIdFactory, makeMergeArgumentParser,
25 getInputSchema, getShortFilterName, readCatalog)
33 from lsst.pex.config import Config, Field, ListField, ConfigurableField, ConfigField
34 from lsst.pipe.base import (CmdLineTask, PipelineTask, PipelineTaskConfig, InitOutputDatasetField,
35 InputDatasetField, InitInputDatasetField, OutputDatasetField, Struct)
41 @anchor MergeDetectionsConfig_ 43 @brief Configuration parameters for the MergeDetectionsTask. 45 minNewPeak = Field(dtype=float, default=1,
46 doc=
"Minimum distance from closest peak to create a new one (in arcsec).")
48 maxSamePeak = Field(dtype=float, default=0.3,
49 doc=
"When adding new catalogs to the merge, all peaks less than this distance " 50 " (in arcsec) to an existing peak will be flagged as detected in that catalog.")
51 cullPeaks = ConfigField(dtype=CullPeaksConfig, doc=
"Configuration for how to cull peaks.")
53 skyFilterName = Field(dtype=str, default=
"sky",
54 doc=
"Name of `filter' used to label sky objects (e.g. flag merge_peak_sky is set)\n" 55 "(N.b. should be in MergeMeasurementsConfig.pseudoFilterList)")
56 skyObjects = ConfigurableField(target=SkyObjectsTask, doc=
"Generate sky objects")
57 priorityList = ListField(dtype=str, default=[],
58 doc=
"Priority-ordered list of bands for the merge.")
59 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
61 schema = InitInputDatasetField(
62 doc=
"Schema of the input detection catalog",
64 nameTemplate=
"{inputCoaddName}Coadd_det_schema",
65 storageClass=
"SourceCatalog" 68 outputSchema = InitOutputDatasetField(
69 doc=
"Schema of the merged detection catalog",
70 nameTemplate=
"{outputCoaddName}Coadd_mergeDet_schema",
71 storageClass=
"SourceCatalog" 74 outputPeakSchema = InitOutputDatasetField(
75 doc=
"Output schema of the Footprint peak catalog",
76 nameTemplate=
"{outputCoaddName}Coadd_peak_schema",
77 storageClass=
"PeakCatalog" 80 catalogs = InputDatasetField(
81 doc=
"Detection Catalogs to be merged",
82 nameTemplate=
"{inputCoaddName}Coadd_det",
83 storageClass=
"SourceCatalog",
84 dimensions=(
"Tract",
"Patch",
"SkyMap",
"AbstractFilter")
87 skyMap = InputDatasetField(
88 doc=
"SkyMap to be used in merging",
89 nameTemplate=
"{inputCoaddName}Coadd_skyMap",
90 storageClass=
"SkyMap",
91 dimensions=(
"SkyMap",),
95 outputCatalog = OutputDatasetField(
96 doc=
"Merged Detection catalog",
97 nameTemplate=
"{outputCoaddName}Coadd_mergeDet",
98 storageClass=
"SourceCatalog",
99 dimensions=(
"Tract",
"Patch",
"SkyMap"),
104 Config.setDefaults(self)
105 self.formatTemplateNames({
"inputCoaddName":
'deep',
"outputCoaddName":
"deep"})
107 self.quantum.dimensions = (
"Tract",
"Patch",
"SkyMap")
112 raise RuntimeError(
"No priority list provided")
117 @anchor MergeDetectionsTask_ 119 @brief Merge coadd detections from multiple bands. 121 @section pipe_tasks_multiBand_Contents Contents 123 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Purpose 124 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Init 125 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Run 126 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Config 127 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Debug 128 - @ref pipe_tasks_multiband_MergeDetectionsTask_Example 130 @section pipe_tasks_multiBand_MergeDetectionsTask_Purpose Description 132 Command-line task that merges sources detected in coadds of exposures obtained with different filters. 134 To perform photometry consistently across coadds in multiple filter bands, we create a master catalog of 135 sources from all bands by merging the sources (peaks & footprints) detected in each coadd, while keeping 136 track of which band each source originates in. 138 The catalog merge is performed by @ref getMergedSourceCatalog. Spurious peaks detected around bright 139 objects are culled as described in @ref CullPeaksConfig_. 142 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 144 deepCoadd_mergeDet{tract,patch}: SourceCatalog (only parent Footprints) 148 @section pipe_tasks_multiBand_MergeDetectionsTask_Init Task initialisation 150 @copydoc \_\_init\_\_ 152 @section pipe_tasks_multiBand_MergeDetectionsTask_Run Invoking the Task 156 @section pipe_tasks_multiBand_MergeDetectionsTask_Config Configuration parameters 158 See @ref MergeDetectionsConfig_ 160 @section pipe_tasks_multiBand_MergeDetectionsTask_Debug Debug variables 162 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a flag @c -d 163 to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py files. 165 MergeDetectionsTask has no debug variables. 167 @section pipe_tasks_multiband_MergeDetectionsTask_Example A complete example of using MergeDetectionsTask 169 MergeDetectionsTask is meant to be run after detecting sources in coadds generated for the chosen subset 170 of the available bands. 171 The purpose of the task is to merge sources (peaks & footprints) detected in the coadds generated from the 172 chosen subset of filters. 173 Subsequent tasks in the multi-band processing procedure will deblend the generated master list of sources 174 and, eventually, perform forced photometry. 175 Command-line usage of MergeDetectionsTask expects data references for all the coadds to be processed. 176 A list of the available optional arguments can be obtained by calling mergeCoaddDetections.py with the 177 `--help` command line argument: 179 mergeCoaddDetections.py --help 182 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 183 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 184 step 5 at @ref pipeTasks_multiBand, one may merge the catalogs of sources from each coadd as follows: 186 mergeCoaddDetections.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I^HSC-R 188 This will merge the HSC-I & -R band parent source catalogs and write the results to 189 `$CI_HSC_DIR/DATA/deepCoadd-results/merged/0/5,4/mergeDet-0-5,4.fits`. 191 The next step in the multi-band processing procedure is 192 @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 194 ConfigClass = MergeDetectionsConfig
195 RunnerClass = MergeSourcesRunner
196 _DefaultName =
"mergeCoaddDetections" 198 outputDataset =
"mergeDet" 199 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
202 def _makeArgumentParser(cls):
208 def __init__(self, butler=None, schema=None, initInputs=None, **kwargs):
211 @brief Initialize the merge detections task. 213 A @ref FootprintMergeList_ "FootprintMergeList" will be used to 214 merge the source catalogs. 216 @param[in] schema the schema of the detection catalogs used as input to this one 217 @param[in] butler a butler used to read the input schema from disk, if schema is None 218 @param[in] initInputs This a PipelineTask-only argument that holds all inputs passed in 219 through the PipelineTask middleware 220 @param[in] **kwargs keyword arguments to be passed to CmdLineTask.__init__ 222 The task will set its own self.schema attribute to the schema of the output merged catalog. 225 if initInputs
is not None:
226 schema = initInputs[
'schema'].schema
228 self.makeSubtask(
"skyObjects")
232 filterNames += [self.config.skyFilterName]
236 return {
"outputSchema": afwTable.SourceCatalog(self.
schema),
237 "outputPeakSchema": afwDetect.PeakCatalog(self.
merged.getPeakSchema())}
240 catalogs = dict(
readCatalog(self, patchRef)
for patchRef
in patchRefList)
241 skyInfo =
getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRefList[0])
243 skySeed = patchRefList[0].get(self.config.coaddName +
"MergedCoaddId")
244 mergeCatalogStruct = self.
run(catalogs, skyInfo, idFactory, skySeed)
245 self.
write(patchRefList[0], mergeCatalogStruct.outputCatalog)
249 inputData[
"skySeed"] = 0
250 inputData[
"idFactory"] = afwTable.IdFactory.makeSimple()
251 catalogDict = {dataId[
'abstract_filter']: cat
for dataId, cat
in zip(inputDataIds[
'catalogs'],
252 inputData[
'catalogs'])}
253 inputData[
'catalogs'] = catalogDict
254 skyMap = inputData.pop(
'skyMap')
256 tractNumber = inputDataIds[
'catalogs'][0][
'tract']
257 tractInfo = skyMap[tractNumber]
258 patchInfo = tractInfo.getPatchInfo(inputDataIds[
'catalogs'][0][
'patch'])
263 wcs=tractInfo.getWcs(),
264 bbox=patchInfo.getOuterBBox()
266 inputData[
'skyInfo'] = skyInfo
267 return self.
run(**inputData)
269 def run(self, catalogs, skyInfo, idFactory, skySeed):
271 @brief Merge multiple catalogs. 273 After ordering the catalogs and filters in priority order, 274 @ref getMergedSourceCatalog of the @ref FootprintMergeList_ "FootprintMergeList" created by 275 @ref \_\_init\_\_ is used to perform the actual merging. Finally, @ref cullPeaks is used to remove 276 garbage peaks detected around bright objects. 280 @param[out] mergedList 284 tractWcs = skyInfo.wcs
285 peakDistance = self.config.minNewPeak / tractWcs.getPixelScale().asArcseconds()
286 samePeakDistance = self.config.maxSamePeak / tractWcs.getPixelScale().asArcseconds()
289 orderedCatalogs = [catalogs[band]
for band
in self.config.priorityList
if band
in catalogs.keys()]
291 if band
in catalogs.keys()]
293 mergedList = self.
merged.getMergedSourceCatalog(orderedCatalogs, orderedBands, peakDistance,
301 if skySourceFootprints:
302 key = mergedList.schema.find(
"merge_footprint_%s" % self.config.skyFilterName).key
303 for foot
in skySourceFootprints:
304 s = mergedList.addNew()
309 for record
in mergedList:
310 record.getFootprint().sortPeaks()
311 self.log.info(
"Merged to %d sources" % len(mergedList))
314 return Struct(outputCatalog=mergedList)
318 @brief Attempt to remove garbage peaks (mostly on the outskirts of large blends). 320 @param[in] catalog Source catalog 322 keys = [item.key
for item
in self.
merged.getPeakSchema().extract(
"merge_peak_*").values()]
323 assert len(keys) > 0,
"Error finding flags that associate peaks with their detection bands." 326 for parentSource
in catalog:
329 keptPeaks = parentSource.getFootprint().getPeaks()
330 oldPeaks = list(keptPeaks)
332 familySize = len(oldPeaks)
333 totalPeaks += familySize
334 for rank, peak
in enumerate(oldPeaks):
335 if ((rank < self.config.cullPeaks.rankSufficient)
or 336 (sum([peak.get(k)
for k
in keys]) >= self.config.cullPeaks.nBandsSufficient)
or 337 (rank < self.config.cullPeaks.rankConsidered
and 338 rank < self.config.cullPeaks.rankNormalizedConsidered * familySize)):
339 keptPeaks.append(peak)
342 self.log.info(
"Culled %d of %d peaks" % (culledPeaks, totalPeaks))
346 Return a dict of empty catalogs for each catalog dataset produced by this task. 348 @param[out] dictionary of empty catalogs 350 mergeDet = afwTable.SourceCatalog(self.
schema)
351 peak = afwDetect.PeakCatalog(self.
merged.getPeakSchema())
352 return {self.config.coaddName +
"Coadd_mergeDet": mergeDet,
353 self.config.coaddName +
"Coadd_peak": peak}
357 @brief Return a list of Footprints of sky objects which don't overlap with anything in mergedList 359 @param mergedList The merged Footprints from all the input bands 360 @param skyInfo A description of the patch 361 @param seed Seed for the random number generator 363 mask = afwImage.Mask(skyInfo.patchInfo.getOuterBBox())
364 detected = mask.getPlaneBitMask(
"DETECTED")
366 s.getFootprint().spans.setMask(mask, detected)
368 footprints = self.skyObjects.
run(mask, seed)
373 schema = self.
merged.getPeakSchema()
374 mergeKey = schema.find(
"merge_peak_%s" % self.config.skyFilterName).key
376 for oldFoot
in footprints:
377 assert len(oldFoot.getPeaks()) == 1,
"Should be a single peak only" 378 peak = oldFoot.getPeaks()[0]
379 newFoot = afwDetect.Footprint(oldFoot.spans, schema)
380 newFoot.addPeak(peak.getFx(), peak.getFy(), peak.getPeakValue())
381 newFoot.getPeaks()[0].set(mergeKey,
True)
382 converted.append(newFoot)
388 @brief Write the output. 390 @param[in] patchRef data reference for patch 391 @param[in] catalog catalog 393 We write as the dataset provided by the 'outputDataset' 396 patchRef.put(catalog, self.config.coaddName +
"Coadd_" + self.
outputDataset)
399 mergeDataId = patchRef.dataId.copy()
400 del mergeDataId[
"filter"]
401 self.log.info(
"Wrote merged catalog: %s" % (mergeDataId,))
405 @brief No metadata to write, and not sure how to write it for a list of dataRefs. def getSchemaCatalogs(self)
Return a dict of empty catalogs for each catalog dataset produced by this task.
def makeMergeArgumentParser(name, dataset)
Create a suitable ArgumentParser.
Merge coadd detections from multiple bands.
def readCatalog(task, patchRef)
Read input catalog.
def adaptArgsAndRun(self, inputData, inputDataIds, outputDataIds, butler)
def __init__(self, butler=None, schema=None, initInputs=None, kwargs)
Initialize the merge detections task.
def getInitOutputDatasets(self)
def run(self, catalogs, skyInfo, idFactory, skySeed)
Merge multiple catalogs.
Configuration parameters for the MergeDetectionsTask.
def getShortFilterName(name)
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
def cullPeaks(self, catalog)
Attempt to remove garbage peaks (mostly on the outskirts of large blends).
def write(self, patchRef, catalog)
Write the output.
def getInputSchema(self, butler=None, schema=None)
def runDataRef(self, patchRefList)
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
def getSkySourceFootprints(self, mergedList, skyInfo, seed)
Return a list of Footprints of sky objects which don't overlap with anything in mergedList.