26 from lsst.pipe.base import CmdLineTask, Struct, TaskRunner, ArgumentParser, ButlerInitializedTaskRunner
27 from lsst.pex.config import Config, Field, ListField, ConfigurableField, RangeField, ConfigField
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
45 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 46 * deepCoadd_mergeDet: merged detections (tract, patch) 47 * deepCoadd_meas: measurements of merged detections (tract, patch, filter) 48 * deepCoadd_ref: reference sources (tract, patch) 49 All of these have associated *_schema catalogs that require no data ID and hold no records. 51 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 52 the mergeDet, meas, and ref dataset Footprints: 53 * deepCoadd_peak_schema 57 def _makeGetSchemaCatalogs(datasetSuffix):
58 """Construct a getSchemaCatalogs instance method 60 These are identical for most of the classes here, so we'll consolidate 63 datasetSuffix: Suffix of dataset name, e.g., "src" for "deepCoadd_src" 66 def getSchemaCatalogs(self):
67 """Return a dict of empty catalogs for each catalog dataset produced by this task.""" 68 src = afwTable.SourceCatalog(self.schema)
69 if hasattr(self,
"algMetadata"):
70 src.getTable().setMetadata(self.algMetadata)
71 return {self.config.coaddName +
"Coadd_" + datasetSuffix: src}
72 return getSchemaCatalogs
75 def _makeMakeIdFactory(datasetName):
76 """Construct a makeIdFactory instance method 78 These are identical for all the classes here, so this consolidates 81 datasetName: Dataset name without the coadd name prefix, e.g., "CoaddId" for "deepCoaddId" 84 def makeIdFactory(self, dataRef):
85 """Return an IdFactory for setting the detection identifiers 87 The actual parameters used in the IdFactory are provided by 88 the butler (through the provided data reference. 90 expBits = dataRef.get(self.config.coaddName + datasetName +
"_bits")
91 expId = int(dataRef.get(self.config.coaddName + datasetName))
92 return afwTable.IdFactory.makeSource(expId, 64 - expBits)
97 """Given a longer, camera-specific filter name (e.g. "HSC-I") return its shorthand name ("i"). 101 return afwImage.Filter(name).getFilterProperty().getName()
108 @anchor DetectCoaddSourcesConfig_ 110 @brief Configuration parameters for the DetectCoaddSourcesTask 112 doScaleVariance = Field(dtype=bool, default=
True, doc=
"Scale variance plane using empirical noise?")
113 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
114 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
115 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
116 doInsertFakes = Field(dtype=bool, default=
False,
117 doc=
"Run fake sources injection task")
118 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
119 doc=
"Injection of fake sources for testing " 120 "purposes (must be retargeted)")
123 Config.setDefaults(self)
124 self.
detection.thresholdType =
"pixel_stdev" 127 self.
detection.reEstimateBackground =
False 128 self.
detection.background.useApprox =
False 130 self.
detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER' 131 self.
detection.doTempWideBackground =
True 143 @anchor DetectCoaddSourcesTask_ 145 @brief Detect sources on a coadd 147 @section pipe_tasks_multiBand_Contents Contents 149 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose 150 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize 151 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run 152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config 153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug 154 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example 156 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description 158 Command-line task that detects sources on a coadd of exposures obtained with a single filter. 160 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise 161 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane 162 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should 163 propagate the full covariance matrix -- but it is simple and works well in practice. 165 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref 166 SourceDetectionTask_ "detection" subtask. 169 deepCoadd{tract,patch,filter}: ExposureF 171 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 172 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input 174 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList 178 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask. 179 You can retarget this subtask if you wish. 181 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization 183 @copydoc \_\_init\_\_ 185 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task 189 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters 191 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig" 193 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables 195 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 196 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 199 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to 200 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for 201 @ref SourceDetectionTask_ "SourceDetectionTask" for further information. 203 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example 204 of using DetectCoaddSourcesTask 206 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of 207 the task is to update the background, detect all sources in a single band and generate a set of parent 208 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and, 209 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data 210 reference to the coadd to be processed. A list of the available optional arguments can be obtained by 211 calling detectCoaddSources.py with the `--help` command line argument: 213 detectCoaddSources.py --help 216 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 217 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed 218 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows: 220 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 222 that will process the HSC-I band data. The results are written to 223 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`. 225 It is also necessary to run: 227 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 229 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 230 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask". 232 _DefaultName =
"detectCoaddSources" 233 ConfigClass = DetectCoaddSourcesConfig
234 getSchemaCatalogs = _makeGetSchemaCatalogs(
"det")
235 makeIdFactory = _makeMakeIdFactory(
"CoaddId")
238 def _makeArgumentParser(cls):
240 parser.add_id_argument(
"--id",
"deepCoadd", help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
241 ContainerClass=ExistingCoaddDataIdContainer)
246 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 248 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 250 @param[in] schema: initial schema for the output catalog, modified-in place to include all 251 fields set by this task. If None, the source minimal schema will be used. 252 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 254 CmdLineTask.__init__(self, **kwargs)
256 schema = afwTable.SourceTable.makeMinimalSchema()
257 if self.config.doInsertFakes:
258 self.makeSubtask(
"insertFakes")
260 self.makeSubtask(
"detection", schema=self.
schema)
261 if self.config.doScaleVariance:
262 self.makeSubtask(
"scaleVariance")
266 @brief Run detection on a coadd. 268 Invokes @ref run and then uses @ref write to output the 271 @param[in] patchRef: data reference for patch 273 exposure = patchRef.get(self.config.coaddName +
"Coadd", immediate=
True)
274 expId = int(patchRef.get(self.config.coaddName +
"CoaddId"))
276 self.
write(exposure, results, patchRef)
279 def run(self, exposure, idFactory, expId):
281 @brief Run detection on an exposure. 283 First scale the variance plane to match the observed variance 284 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 287 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 288 depending on configuration). 289 @param[in] idFactory: IdFactory to set source identifiers 290 @param[in] expId: Exposure identifier (integer) for RNG seed 292 @return a pipe.base.Struct with fields 293 - sources: catalog of detections 294 - backgrounds: list of backgrounds 296 if self.config.doScaleVariance:
297 varScale = self.scaleVariance.
run(exposure.maskedImage)
298 exposure.getMetadata().add(
"variance_scale", varScale)
299 backgrounds = afwMath.BackgroundList()
300 if self.config.doInsertFakes:
301 self.insertFakes.
run(exposure, background=backgrounds)
302 table = afwTable.SourceTable.make(self.
schema, idFactory)
303 detections = self.detection.makeSourceCatalog(table, exposure, expId=expId)
304 sources = detections.sources
305 fpSets = detections.fpSets
306 if hasattr(fpSets,
"background")
and fpSets.background:
307 for bg
in fpSets.background:
308 backgrounds.append(bg)
309 return Struct(sources=sources, backgrounds=backgrounds)
311 def write(self, exposure, results, patchRef):
313 @brief Write out results from runDetection. 315 @param[in] exposure: Exposure to write out 316 @param[in] results: Struct returned from runDetection 317 @param[in] patchRef: data reference for patch 319 coaddName = self.config.coaddName +
"Coadd" 320 patchRef.put(results.backgrounds, coaddName +
"_calexp_background")
321 patchRef.put(results.sources, coaddName +
"_det")
322 patchRef.put(exposure, coaddName +
"_calexp")
328 """Task runner for the `MergeSourcesTask` 330 Required because the run method requires a list of 331 dataRefs rather than a single dataRef. 334 """Provide a butler to the Task constructor. 341 Tuple of a list of data references and kwargs (un-used) 346 Thrown if both `parsedCmd` & `args` are `None` 348 if parsedCmd
is not None:
349 butler = parsedCmd.butler
350 elif args
is not None:
351 dataRefList, kwargs = args
352 butler = dataRefList[0].getButler()
354 raise RuntimeError(
"Neither parsedCmd or args specified")
355 return self.TaskClass(config=self.config, log=self.log, butler=butler)
359 """Build a hierarchical dictionary of patch references 369 A reference dictionary of the form {patch: {tract: {filter: dataRef}}} 374 Thrown when multiple references are provided for the same 375 combination of tract, patch and filter 378 for ref
in parsedCmd.id.refList:
379 tract = ref.dataId[
"tract"]
380 patch = ref.dataId[
"patch"]
381 filter = ref.dataId[
"filter"]
382 if tract
not in refDict:
384 if patch
not in refDict[tract]:
385 refDict[tract][patch] = {}
386 if filter
in refDict[tract][patch]:
387 raise RuntimeError(
"Multiple versions of %s" % (ref.dataId,))
388 refDict[tract][patch][filter] = ref
393 """Provide a list of patch references for each patch, tract, filter combo. 400 Keyword arguments passed to the task 405 List of tuples, where each tuple is a (dataRef, kwargs) pair. 407 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
408 return [(list(p.values()), kwargs)
for t
in refDict.values()
for p
in t.values()]
413 @anchor MergeSourcesConfig_ 415 @brief Configuration for merging sources. 417 priorityList = ListField(dtype=str, default=[],
418 doc=
"Priority-ordered list of bands for the merge.")
419 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
422 Config.validate(self)
424 raise RuntimeError(
"No priority list provided")
429 @anchor MergeSourcesTask_ 431 @brief A base class for merging source catalogs. 433 Merging detections (MergeDetectionsTask) and merging measurements (MergeMeasurementsTask) are 434 so similar that it makes sense to re-use the code, in the form of this abstract base class. 436 NB: Do not use this class directly. Instead use one of the child classes that inherit from 437 MergeSourcesTask such as @ref MergeDetectionsTask_ "MergeDetectionsTask" or @ref MergeMeasurementsTask_ 438 "MergeMeasurementsTask" 440 Sub-classes should set the following class variables: 441 * `_DefaultName`: name of Task 442 * `inputDataset`: name of dataset to read 443 * `outputDataset`: name of dataset to write 444 * `getSchemaCatalogs` to the result of `_makeGetSchemaCatalogs(outputDataset)` 446 In addition, sub-classes must implement the run method. 449 ConfigClass = MergeSourcesConfig
450 RunnerClass = MergeSourcesRunner
453 getSchemaCatalogs =
None 456 def _makeArgumentParser(cls):
458 @brief Create a suitable ArgumentParser. 460 We will use the ArgumentParser to get a provide a list of data 461 references for patches; the RunnerClass will sort them into lists 462 of data references for the same patch 465 parser.add_id_argument(
"--id",
"deepCoadd_" + cls.
inputDataset,
466 ContainerClass=ExistingCoaddDataIdContainer,
467 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i")
472 @brief Obtain the input schema either directly or froma butler reference. 474 @param[in] butler butler reference to obtain the input schema from 475 @param[in] schema the input schema 478 assert butler
is not None,
"Neither butler nor schema specified" 479 schema = butler.get(self.config.coaddName +
"Coadd_" + self.
inputDataset +
"_schema",
480 immediate=
True).schema
483 def __init__(self, butler=None, schema=None, **kwargs):
485 @brief Initialize the task. 487 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 488 @param[in] schema the schema of the detection catalogs used as input to this one 489 @param[in] butler a butler used to read the input schema from disk, if schema is None 491 Derived classes should use the getInputSchema() method to handle the additional 492 arguments and retreive the actual input schema. 494 CmdLineTask.__init__(self, **kwargs)
498 @brief Merge coadd sources from multiple bands. Calls @ref `run` which must be defined in 499 subclasses that inherit from MergeSourcesTask. 501 @param[in] patchRefList list of data references for each filter 503 catalogs = dict(self.
readCatalog(patchRef)
for patchRef
in patchRefList)
504 mergedCatalog = self.
run(catalogs, patchRefList[0])
505 self.
write(patchRefList[0], mergedCatalog)
509 @brief Read input catalog. 511 We read the input dataset provided by the 'inputDataset' 514 @param[in] patchRef data reference for patch 515 @return tuple consisting of the filter name and the catalog 517 filterName = patchRef.dataId[
"filter"]
518 catalog = patchRef.get(self.config.coaddName +
"Coadd_" + self.
inputDataset, immediate=
True)
519 self.log.info(
"Read %d sources for filter %s: %s" % (len(catalog), filterName, patchRef.dataId))
520 return filterName, catalog
522 def run(self, catalogs, patchRef):
524 @brief Merge multiple catalogs. This function must be defined in all subclasses that inherit from 527 @param[in] catalogs dict mapping filter name to source catalog 529 @return merged catalog 531 raise NotImplementedError()
535 @brief Write the output. 537 @param[in] patchRef data reference for patch 538 @param[in] catalog catalog 540 We write as the dataset provided by the 'outputDataset' 543 patchRef.put(catalog, self.config.coaddName +
"Coadd_" + self.
outputDataset)
546 mergeDataId = patchRef.dataId.copy()
547 del mergeDataId[
"filter"]
548 self.log.info(
"Wrote merged catalog: %s" % (mergeDataId,))
552 @brief No metadata to write, and not sure how to write it for a list of dataRefs. 557 class CullPeaksConfig(Config):
559 @anchor CullPeaksConfig_ 561 @brief Configuration for culling garbage peaks after merging footprints. 563 Peaks may also be culled after detection or during deblending; this configuration object 564 only deals with culling after merging Footprints. 566 These cuts are based on three quantities: 567 - nBands: the number of bands in which the peak was detected 568 - peakRank: the position of the peak within its family, sorted from brightest to faintest. 569 - peakRankNormalized: the peak rank divided by the total number of peaks in the family. 571 The formula that identifie peaks to cull is: 573 nBands < nBandsSufficient 574 AND (rank >= rankSufficient) 575 AND (rank >= rankConsider OR rank >= rankNormalizedConsider) 577 To disable peak culling, simply set nBandsSufficient=1. 580 nBandsSufficient = RangeField(dtype=int, default=2, min=1,
581 doc=
"Always keep peaks detected in this many bands")
582 rankSufficient = RangeField(dtype=int, default=20, min=1,
583 doc=
"Always keep this many peaks in each family")
584 rankConsidered = RangeField(dtype=int, default=30, min=1,
585 doc=(
"Keep peaks with less than this rank that also match the " 586 "rankNormalizedConsidered condition."))
587 rankNormalizedConsidered = RangeField(dtype=float, default=0.7, min=0.0,
588 doc=(
"Keep peaks with less than this normalized rank that" 589 " also match the rankConsidered condition."))
594 @anchor MergeDetectionsConfig_ 596 @brief Configuration parameters for the MergeDetectionsTask. 598 minNewPeak = Field(dtype=float, default=1,
599 doc=
"Minimum distance from closest peak to create a new one (in arcsec).")
601 maxSamePeak = Field(dtype=float, default=0.3,
602 doc=
"When adding new catalogs to the merge, all peaks less than this distance " 603 " (in arcsec) to an existing peak will be flagged as detected in that catalog.")
604 cullPeaks = ConfigField(dtype=CullPeaksConfig, doc=
"Configuration for how to cull peaks.")
606 skyFilterName = Field(dtype=str, default=
"sky",
607 doc=
"Name of `filter' used to label sky objects (e.g. flag merge_peak_sky is set)\n" 608 "(N.b. should be in MergeMeasurementsConfig.pseudoFilterList)")
609 skyObjects = ConfigurableField(target=SkyObjectsTask, doc=
"Generate sky objects")
612 MergeSourcesConfig.setDefaults(self)
626 @anchor MergeDetectionsTask_ 628 @brief Merge coadd detections from multiple bands. 630 @section pipe_tasks_multiBand_Contents Contents 632 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Purpose 633 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Init 634 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Run 635 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Config 636 - @ref pipe_tasks_multiBand_MergeDetectionsTask_Debug 637 - @ref pipe_tasks_multiband_MergeDetectionsTask_Example 639 @section pipe_tasks_multiBand_MergeDetectionsTask_Purpose Description 641 Command-line task that merges sources detected in coadds of exposures obtained with different filters. 643 To perform photometry consistently across coadds in multiple filter bands, we create a master catalog of 644 sources from all bands by merging the sources (peaks & footprints) detected in each coadd, while keeping 645 track of which band each source originates in. 647 The catalog merge is performed by @ref getMergedSourceCatalog. Spurious peaks detected around bright 648 objects are culled as described in @ref CullPeaksConfig_. 651 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 653 deepCoadd_mergeDet{tract,patch}: SourceCatalog (only parent Footprints) 657 MergeDetectionsTask subclasses @ref MergeSourcesTask_ "MergeSourcesTask". 659 @section pipe_tasks_multiBand_MergeDetectionsTask_Init Task initialisation 661 @copydoc \_\_init\_\_ 663 @section pipe_tasks_multiBand_MergeDetectionsTask_Run Invoking the Task 667 @section pipe_tasks_multiBand_MergeDetectionsTask_Config Configuration parameters 669 See @ref MergeDetectionsConfig_ 671 @section pipe_tasks_multiBand_MergeDetectionsTask_Debug Debug variables 673 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a flag @c -d 674 to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py files. 676 MergeDetectionsTask has no debug variables. 678 @section pipe_tasks_multiband_MergeDetectionsTask_Example A complete example of using MergeDetectionsTask 680 MergeDetectionsTask is meant to be run after detecting sources in coadds generated for the chosen subset 681 of the available bands. 682 The purpose of the task is to merge sources (peaks & footprints) detected in the coadds generated from the 683 chosen subset of filters. 684 Subsequent tasks in the multi-band processing procedure will deblend the generated master list of sources 685 and, eventually, perform forced photometry. 686 Command-line usage of MergeDetectionsTask expects data references for all the coadds to be processed. 687 A list of the available optional arguments can be obtained by calling mergeCoaddDetections.py with the 688 `--help` command line argument: 690 mergeCoaddDetections.py --help 693 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 694 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 695 step 5 at @ref pipeTasks_multiBand, one may merge the catalogs of sources from each coadd as follows: 697 mergeCoaddDetections.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I^HSC-R 699 This will merge the HSC-I & -R band parent source catalogs and write the results to 700 `$CI_HSC_DIR/DATA/deepCoadd-results/merged/0/5,4/mergeDet-0-5,4.fits`. 702 The next step in the multi-band processing procedure is 703 @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask" 705 ConfigClass = MergeDetectionsConfig
706 _DefaultName =
"mergeCoaddDetections" 708 outputDataset =
"mergeDet" 709 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
711 def __init__(self, butler=None, schema=None, **kwargs):
713 @brief Initialize the merge detections task. 715 A @ref FootprintMergeList_ "FootprintMergeList" will be used to 716 merge the source catalogs. 718 Additional keyword arguments (forwarded to MergeSourcesTask.__init__): 719 @param[in] schema the schema of the detection catalogs used as input to this one 720 @param[in] butler a butler used to read the input schema from disk, if schema is None 721 @param[in] **kwargs keyword arguments to be passed to MergeSourcesTask.__init__ 723 The task will set its own self.schema attribute to the schema of the output merged catalog. 725 MergeSourcesTask.__init__(self, butler=butler, schema=schema, **kwargs)
726 self.makeSubtask(
"skyObjects")
730 filterNames += [self.config.skyFilterName]
733 def run(self, catalogs, patchRef):
735 @brief Merge multiple catalogs. 737 After ordering the catalogs and filters in priority order, 738 @ref getMergedSourceCatalog of the @ref FootprintMergeList_ "FootprintMergeList" created by 739 @ref \_\_init\_\_ is used to perform the actual merging. Finally, @ref cullPeaks is used to remove 740 garbage peaks detected around bright objects. 744 @param[out] mergedList 748 skyInfo =
getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
749 tractWcs = skyInfo.wcs
750 peakDistance = self.config.minNewPeak / tractWcs.getPixelScale().asArcseconds()
751 samePeakDistance = self.config.maxSamePeak / tractWcs.getPixelScale().asArcseconds()
754 orderedCatalogs = [catalogs[band]
for band
in self.config.priorityList
if band
in catalogs.keys()]
756 if band
in catalogs.keys()]
758 mergedList = self.
merged.getMergedSourceCatalog(orderedCatalogs, orderedBands, peakDistance,
765 skySeed = patchRef.get(self.config.coaddName +
"MergedCoaddId")
767 if skySourceFootprints:
768 key = mergedList.schema.find(
"merge_footprint_%s" % self.config.skyFilterName).key
769 for foot
in skySourceFootprints:
770 s = mergedList.addNew()
775 for record
in mergedList:
776 record.getFootprint().sortPeaks()
777 self.log.info(
"Merged to %d sources" % len(mergedList))
784 @brief Attempt to remove garbage peaks (mostly on the outskirts of large blends). 786 @param[in] catalog Source catalog 788 keys = [item.key
for item
in self.
merged.getPeakSchema().extract(
"merge_peak_*").values()]
789 assert len(keys) > 0,
"Error finding flags that associate peaks with their detection bands." 792 for parentSource
in catalog:
795 keptPeaks = parentSource.getFootprint().getPeaks()
796 oldPeaks = list(keptPeaks)
798 familySize = len(oldPeaks)
799 totalPeaks += familySize
800 for rank, peak
in enumerate(oldPeaks):
801 if ((rank < self.config.cullPeaks.rankSufficient)
or 802 (sum([peak.get(k)
for k
in keys]) >= self.config.cullPeaks.nBandsSufficient)
or 803 (rank < self.config.cullPeaks.rankConsidered
and 804 rank < self.config.cullPeaks.rankNormalizedConsidered * familySize)):
805 keptPeaks.append(peak)
808 self.log.info(
"Culled %d of %d peaks" % (culledPeaks, totalPeaks))
812 Return a dict of empty catalogs for each catalog dataset produced by this task. 814 @param[out] dictionary of empty catalogs 816 mergeDet = afwTable.SourceCatalog(self.
schema)
817 peak = afwDetect.PeakCatalog(self.
merged.getPeakSchema())
818 return {self.config.coaddName +
"Coadd_mergeDet": mergeDet,
819 self.config.coaddName +
"Coadd_peak": peak}
823 @brief Return a list of Footprints of sky objects which don't overlap with anything in mergedList 825 @param mergedList The merged Footprints from all the input bands 826 @param skyInfo A description of the patch 827 @param seed Seed for the random number generator 829 mask = afwImage.Mask(skyInfo.patchInfo.getOuterBBox())
830 detected = mask.getPlaneBitMask(
"DETECTED")
832 s.getFootprint().spans.setMask(mask, detected)
834 footprints = self.skyObjects.
run(mask, seed)
839 schema = self.
merged.getPeakSchema()
840 mergeKey = schema.find(
"merge_peak_%s" % self.config.skyFilterName).key
842 for oldFoot
in footprints:
843 assert len(oldFoot.getPeaks()) == 1,
"Should be a single peak only" 844 peak = oldFoot.getPeaks()[0]
845 newFoot = afwDetect.Footprint(oldFoot.spans, schema)
846 newFoot.addPeak(peak.getFx(), peak.getFy(), peak.getPeakValue())
847 newFoot.getPeaks()[0].set(mergeKey,
True)
848 converted.append(newFoot)
854 """DeblendCoaddSourcesConfig 856 Configuration parameters for the `DeblendCoaddSourcesTask`. 858 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
859 doc=
"Deblend sources separately in each band")
860 multiBandDeblend = ConfigurableField(target=MultibandDeblendTask,
861 doc=
"Deblend sources simultaneously across bands")
862 simultaneous = Field(dtype=bool, default=
False, doc=
"Simultaneously deblend all bands?")
863 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
866 Config.setDefaults(self)
871 """Task runner for the `MergeSourcesTask` 873 Required because the run method requires a list of 874 dataRefs rather than a single dataRef. 878 """Provide a list of patch references for each patch, tract, filter combo. 885 Keyword arguments passed to the task 890 List of tuples, where each tuple is a (dataRef, kwargs) pair. 892 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
893 kwargs[
"psfCache"] = parsedCmd.psfCache
894 return [(list(p.values()), kwargs)
for t
in refDict.values()
for p
in t.values()]
898 """Deblend the sources in a merged catalog 900 Deblend sources from master catalog in each coadd. 901 This can either be done separately in each band using the HSC-SDSS deblender 902 (`DeblendCoaddSourcesTask.config.simultaneous==False`) 903 or use SCARLET to simultaneously fit the blend in all bands 904 (`DeblendCoaddSourcesTask.config.simultaneous==True`). 905 The task will set its own `self.schema` atribute to the `Schema` of the 906 output deblended catalog. 907 This will include all fields from the input `Schema`, as well as additional fields 910 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description 911 --------------------------------------------------------- 917 Butler used to read the input schemas from disk or 918 construct the reference catalog loader, if `schema` or `peakSchema` or 920 The schema of the merged detection catalog as an input to this task. 922 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog 924 ConfigClass = DeblendCoaddSourcesConfig
925 RunnerClass = DeblendCoaddSourcesRunner
926 _DefaultName =
"deblendCoaddSources" 927 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
930 def _makeArgumentParser(cls):
932 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
933 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
934 ContainerClass=ExistingCoaddDataIdContainer)
935 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
938 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
939 CmdLineTask.__init__(self, **kwargs)
941 assert butler
is not None,
"Neither butler nor schema is defined" 942 schema = butler.get(self.config.coaddName +
"Coadd_mergeDet_schema", immediate=
True).schema
946 if peakSchema
is None:
947 assert butler
is not None,
"Neither butler nor peakSchema is defined" 948 peakSchema = butler.get(self.config.coaddName +
"Coadd_peak_schema", immediate=
True).schema
950 if self.config.simultaneous:
951 self.makeSubtask(
"multiBandDeblend", schema=self.
schema, peakSchema=peakSchema)
953 self.makeSubtask(
"singleBandDeblend", schema=self.
schema, peakSchema=peakSchema)
956 """Return a dict of empty catalogs for each catalog dataset produced by this task. 961 Dictionary of empty catalogs, with catalog names as keys. 963 catalog = afwTable.SourceCatalog(self.
schema)
964 return {self.config.coaddName +
"Coadd_deblendedFlux": catalog,
965 self.config.coaddName +
"Coadd_deblendedModel": catalog}
970 Deblend each source simultaneously or separately 971 (depending on `DeblendCoaddSourcesTask.config.simultaneous`). 972 Set `is-primary` and related flags. 973 Propagate flags from individual visits. 974 Write the deblended sources out. 979 List of data references for each filter 981 if self.config.simultaneous:
985 for patchRef
in patchRefList:
986 exposure = patchRef.get(self.config.coaddName +
"Coadd_calexp", immediate=
True)
987 filters.append(patchRef.dataId[
"filter"])
988 exposures.append(exposure)
991 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
992 fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources)
993 for n
in range(len(patchRefList)):
994 self.
write(patchRefList[n], fluxCatalogs[filters[n]], templateCatalogs[filters[n]])
997 for patchRef
in patchRefList:
998 exposure = patchRef.get(self.config.coaddName +
"Coadd_calexp", immediate=
True)
999 exposure.getPsf().setCacheCapacity(psfCache)
1001 self.singleBandDeblend.run(exposure, sources)
1002 self.
write(patchRef, sources)
1005 """Read merged catalog 1007 Read the catalog of merged detections and create a catalog 1012 dataRef: data reference 1013 Data reference for catalog of merged detections 1017 sources: `SourceCatalog` 1018 List of sources in merged catalog 1020 We also need to add columns to hold the measurements we're about to make 1021 so we can measure in-place. 1023 merged = dataRef.get(self.config.coaddName +
"Coadd_mergeDet", immediate=
True)
1024 self.log.info(
"Read %d detections: %s" % (len(merged), dataRef.dataId))
1027 idFactory.notify(s.getId())
1028 table = afwTable.SourceTable.make(self.
schema, idFactory)
1029 sources = afwTable.SourceCatalog(table)
1033 def write(self, dataRef, flux_sources, template_sources=None):
1034 """Write the source catalog(s) 1038 dataRef: Data Reference 1039 Reference to the output catalog. 1040 flux_sources: `SourceCatalog` 1041 Flux conserved sources to write to file. 1042 If using the single band deblender, this is the catalog 1044 template_sources: `SourceCatalog` 1045 Source catalog using the multiband template models 1050 if flux_sources
is not None:
1051 assert not self.config.simultaneous
or self.config.multiBandDeblend.conserveFlux
1052 dataRef.put(flux_sources, self.config.coaddName +
"Coadd_deblendedFlux")
1056 if template_sources
is not None:
1057 assert self.config.multiBandDeblend.saveTemplates
1058 dataRef.put(template_sources, self.config.coaddName +
"Coadd_deblendedModel")
1059 self.log.info(
"Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
1062 """Write the metadata produced from processing the data. 1066 List of Butler data references used to write the metadata. 1067 The metadata is written to dataset type `CmdLineTask._getMetadataName`. 1069 for dataRef
in dataRefList:
1071 metadataName = self._getMetadataName()
1072 if metadataName
is not None:
1073 dataRef.put(self.getFullMetadata(), metadataName)
1074 except Exception
as e:
1075 self.log.warn(
"Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
1078 """Get the ExposureId from a data reference 1080 return int(dataRef.get(self.config.coaddName +
"CoaddId"))
1085 @anchor MeasureMergedCoaddSourcesConfig_ 1087 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 1089 inputCatalog = Field(dtype=str, default=
"deblendedFlux",
1090 doc=(
"Name of the input catalog to use." 1091 "If the single band deblender was used this should be 'deblendedFlux." 1092 "If the multi-band deblender was used this should be 'deblendedModel." 1093 "If no deblending was performed this should be 'mergeDet'"))
1094 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
1095 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
1096 doPropagateFlags = Field(
1097 dtype=bool, default=
True,
1098 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 1100 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc=
"Propagate visit flags to coadd")
1101 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
1102 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
1103 doWriteMatchesDenormalized = Field(
1106 doc=(
"Write reference matches in denormalized format? " 1107 "This format uses more disk space, but is more convenient to read."),
1109 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
1110 checkUnitsParseStrict = Field(
1111 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
1118 doc=
"Apply aperture corrections" 1120 applyApCorr = ConfigurableField(
1121 target=ApplyApCorrTask,
1122 doc=
"Subtask to apply aperture corrections" 1124 doRunCatalogCalculation = Field(
1127 doc=
'Run catalogCalculation task' 1129 catalogCalculation = ConfigurableField(
1130 target=CatalogCalculationTask,
1131 doc=
"Subtask to run catalogCalculation plugins on catalog" 1135 Config.setDefaults(self)
1136 self.
measurement.plugins.names |= [
'base_InputCount',
'base_Variance']
1137 self.
measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
1139 self.
measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
1151 """Get the psfCache setting into MeasureMergedCoaddSourcesTask""" 1154 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
1159 @anchor MeasureMergedCoaddSourcesTask_ 1161 @brief Deblend sources from master catalog in each coadd seperately and measure. 1163 @section pipe_tasks_multiBand_Contents Contents 1165 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose 1166 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize 1167 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run 1168 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config 1169 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug 1170 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example 1172 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description 1174 Command-line task that uses peaks and footprints from a master catalog to perform deblending and 1175 measurement in each coadd. 1177 Given a master input catalog of sources (peaks and footprints) or deblender outputs 1178 (including a HeavyFootprint in each band), measure each source on the 1179 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a 1180 consistent set of child sources. 1182 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source 1183 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit 1184 flags are propagated to the coadd sources. 1186 Optionally, we can match the coadd sources to an external reference catalog. 1189 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog 1190 @n deepCoadd_calexp{tract,patch,filter}: ExposureF 1192 deepCoadd_meas{tract,patch,filter}: SourceCatalog 1194 tract, patch, filter 1196 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks: 1199 <DT> @ref SingleFrameMeasurementTask_ "measurement" 1200 <DD> Measure source properties of deblended sources.</DD> 1201 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags" 1202 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are 1203 not at the edge of the field and that have either not been deblended or are the children of deblended 1205 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags" 1206 <DD> Propagate flags set in individual visits to the coadd.</DD> 1207 <DT> @ref DirectMatchTask_ "match" 1208 <DD> Match input sources to a reference catalog (optional). 1211 These subtasks may be retargeted as required. 1213 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization 1215 @copydoc \_\_init\_\_ 1217 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task 1221 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters 1223 See @ref MeasureMergedCoaddSourcesConfig_ 1225 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables 1227 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 1228 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 1231 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to 1232 the various sub-tasks. See the documetation for individual sub-tasks for more information. 1234 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using 1235 MeasureMergedCoaddSourcesTask 1237 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs. 1238 The next stage in the multi-band processing procedure will merge these measurements into a suitable 1239 catalog for driving forced photometry. 1241 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds 1243 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the 1244 `--help` command line argument: 1246 measureCoaddSources.py --help 1249 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 1250 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 1251 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band 1254 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 1256 This will process the HSC-I band data. The results are written in 1257 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits 1259 It is also necessary to run 1261 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 1263 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 1264 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask". 1266 _DefaultName =
"measureCoaddSources" 1267 ConfigClass = MeasureMergedCoaddSourcesConfig
1268 RunnerClass = MeasureMergedCoaddSourcesRunner
1269 getSchemaCatalogs = _makeGetSchemaCatalogs(
"meas")
1270 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
1273 def _makeArgumentParser(cls):
1275 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
1276 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
1277 ContainerClass=ExistingCoaddDataIdContainer)
1278 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
1281 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, **kwargs):
1283 @brief Initialize the task. 1285 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 1286 @param[in] schema: the schema of the merged detection catalog used as input to this one 1287 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 1288 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 1289 catalog. May be None if the loader can be constructed from the butler argument or all steps 1290 requiring a reference catalog are disabled. 1291 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 1292 catalog loader, if schema or peakSchema or refObjLoader is None 1294 The task will set its own self.schema attribute to the schema of the output measurement catalog. 1295 This will include all fields from the input schema, as well as additional fields for all the 1298 CmdLineTask.__init__(self, **kwargs)
1299 self.
deblended = self.config.inputCatalog.startswith(
"deblended")
1302 assert butler
is not None,
"Neither butler nor schema is defined" 1303 schema = butler.get(self.config.coaddName + self.
inputCatalog +
"_schema", immediate=
True).schema
1308 self.makeSubtask(
"measurement", schema=self.
schema, algMetadata=self.
algMetadata)
1309 self.makeSubtask(
"setPrimaryFlags", schema=self.
schema)
1310 if self.config.doMatchSources:
1311 if refObjLoader
is None:
1312 assert butler
is not None,
"Neither butler nor refObjLoader is defined" 1313 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
1314 if self.config.doPropagateFlags:
1315 self.makeSubtask(
"propagateFlags", schema=self.
schema)
1316 self.
schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
1317 if self.config.doApCorr:
1318 self.makeSubtask(
"applyApCorr", schema=self.
schema)
1319 if self.config.doRunCatalogCalculation:
1320 self.makeSubtask(
"catalogCalculation", schema=self.
schema)
1324 @brief Deblend and measure. 1326 @param[in] patchRef: Patch reference. 1328 Set 'is-primary' and related flags. Propagate flags 1329 from individual visits. Optionally match the sources to a reference catalog and write the matches. 1330 Finally, write the deblended sources and measurements out. 1332 exposure = patchRef.get(self.config.coaddName +
"Coadd_calexp", immediate=
True)
1333 exposure.getPsf().setCacheCapacity(psfCache)
1335 table = sources.getTable()
1338 self.measurement.run(sources, exposure, exposureId=self.
getExposureId(patchRef))
1340 if self.config.doApCorr:
1341 self.applyApCorr.run(
1343 apCorrMap=exposure.getInfo().getApCorrMap()
1350 if not sources.isContiguous():
1351 sources = sources.copy(deep=
True)
1353 if self.config.doRunCatalogCalculation:
1354 self.catalogCalculation.run(sources)
1356 skyInfo =
getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1357 self.setPrimaryFlags.run(sources, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo,
1359 if self.config.doPropagateFlags:
1360 self.propagateFlags.run(patchRef.getButler(), sources, self.propagateFlags.getCcdInputs(exposure),
1362 if self.config.doMatchSources:
1364 self.
write(patchRef, sources)
1368 @brief Read input sources. 1370 @param[in] dataRef: Data reference for catalog of merged detections 1371 @return List of sources in merged catalog 1373 We also need to add columns to hold the measurements we're about to make 1374 so we can measure in-place. 1376 merged = dataRef.get(self.config.coaddName + self.
inputCatalog, immediate=
True)
1377 self.log.info(
"Read %d detections: %s" % (len(merged), dataRef.dataId))
1380 idFactory.notify(s.getId())
1381 table = afwTable.SourceTable.make(self.
schema, idFactory)
1382 sources = afwTable.SourceCatalog(table)
1388 @brief Write matches of the sources to the astrometric reference catalog. 1390 We use the Wcs in the exposure to match sources. 1392 @param[in] dataRef: data reference 1393 @param[in] exposure: exposure with Wcs 1394 @param[in] sources: source catalog 1396 result = self.match.run(sources, exposure.getInfo().getFilter().getName())
1398 matches = afwTable.packMatches(result.matches)
1399 matches.table.setMetadata(result.matchMeta)
1400 dataRef.put(matches, self.config.coaddName +
"Coadd_measMatch")
1401 if self.config.doWriteMatchesDenormalized:
1402 denormMatches = denormalizeMatches(result.matches, result.matchMeta)
1403 dataRef.put(denormMatches, self.config.coaddName +
"Coadd_measMatchFull")
1407 @brief Write the source catalog. 1409 @param[in] dataRef: data reference 1410 @param[in] sources: source catalog 1412 dataRef.put(sources, self.config.coaddName +
"Coadd_meas")
1413 self.log.info(
"Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1416 return int(dataRef.get(self.config.coaddName +
"CoaddId"))
1421 @anchor MergeMeasurementsConfig_ 1423 @brief Configuration parameters for the MergeMeasurementsTask 1425 pseudoFilterList = ListField(dtype=str, default=[
"sky"],
1426 doc=
"Names of filters which may have no associated detection\n" 1427 "(N.b. should include MergeDetectionsConfig.skyFilterName)")
1428 snName = Field(dtype=str, default=
"base_PsfFlux",
1429 doc=
"Name of flux measurement for calculating the S/N when choosing the reference band.")
1430 minSN = Field(dtype=float, default=10.,
1431 doc=
"If the S/N from the priority band is below this value (and the S/N " 1432 "is larger than minSNDiff compared to the priority band), use the band with " 1433 "the largest S/N as the reference band.")
1434 minSNDiff = Field(dtype=float, default=3.,
1435 doc=
"If the difference in S/N between another band and the priority band is larger " 1436 "than this value (and the S/N in the priority band is less than minSN) " 1437 "use the band with the largest S/N as the reference band")
1438 flags = ListField(dtype=str, doc=
"Require that these flags, if available, are not set",
1439 default=[
"base_PixelFlags_flag_interpolatedCenter",
"base_PsfFlux_flag",
1440 "ext_photometryKron_KronFlux_flag",
"modelfit_CModel_flag", ])
1452 @anchor MergeMeasurementsTask_ 1454 @brief Merge measurements from multiple bands 1456 @section pipe_tasks_multiBand_Contents Contents 1458 - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Purpose 1459 - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Initialize 1460 - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Run 1461 - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Config 1462 - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Debug 1463 - @ref pipe_tasks_multiband_MergeMeasurementsTask_Example 1465 @section pipe_tasks_multiBand_MergeMeasurementsTask_Purpose Description 1467 Command-line task that merges measurements from multiple bands. 1469 Combines consistent (i.e. with the same peaks and footprints) catalogs of sources from multiple filter 1470 bands to construct a unified catalog that is suitable for driving forced photometry. Every source is 1471 required to have centroid, shape and flux measurements in each band. 1474 deepCoadd_meas{tract,patch,filter}: SourceCatalog 1476 deepCoadd_ref{tract,patch}: SourceCatalog 1480 MergeMeasurementsTask subclasses @ref MergeSourcesTask_ "MergeSourcesTask". 1482 @section pipe_tasks_multiBand_MergeMeasurementsTask_Initialize Task initialization 1484 @copydoc \_\_init\_\_ 1486 @section pipe_tasks_multiBand_MergeMeasurementsTask_Run Invoking the Task 1490 @section pipe_tasks_multiBand_MergeMeasurementsTask_Config Configuration parameters 1492 See @ref MergeMeasurementsConfig_ 1494 @section pipe_tasks_multiBand_MergeMeasurementsTask_Debug Debug variables 1496 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 1497 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 1500 MergeMeasurementsTask has no debug variables. 1502 @section pipe_tasks_multiband_MergeMeasurementsTask_Example A complete example 1503 of using MergeMeasurementsTask 1505 MergeMeasurementsTask is meant to be run after deblending & measuring sources in every band. 1506 The purpose of the task is to generate a catalog of sources suitable for driving forced photometry in 1507 coadds and individual exposures. 1508 Command-line usage of MergeMeasurementsTask expects a data reference to the coadds to be processed. A list 1509 of the available optional arguments can be obtained by calling mergeCoaddMeasurements.py with the `--help` 1510 command line argument: 1512 mergeCoaddMeasurements.py --help 1515 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 1516 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 1517 step 7 at @ref pipeTasks_multiBand, one may merge the catalogs generated after deblending and measuring 1520 mergeCoaddMeasurements.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I^HSC-R 1522 This will merge the HSC-I & HSC-R band catalogs. The results are written in 1523 `$CI_HSC_DIR/DATA/deepCoadd-results/`. 1525 _DefaultName =
"mergeCoaddMeasurements" 1526 ConfigClass = MergeMeasurementsConfig
1527 inputDataset =
"meas" 1528 outputDataset =
"ref" 1529 getSchemaCatalogs = _makeGetSchemaCatalogs(
"ref")
1531 def __init__(self, butler=None, schema=None, **kwargs):
1533 Initialize the task. 1535 Additional keyword arguments (forwarded to MergeSourcesTask.__init__): 1536 @param[in] schema: the schema of the detection catalogs used as input to this one 1537 @param[in] butler: a butler used to read the input schema from disk, if schema is None 1539 The task will set its own self.schema attribute to the schema of the output merged catalog. 1541 MergeSourcesTask.__init__(self, butler=butler, schema=schema, **kwargs)
1545 self.
fluxKey = inputSchema.find(self.config.snName +
"_flux").getKey()
1546 self.
fluxErrKey = inputSchema.find(self.config.snName +
"_fluxErr").getKey()
1547 self.
fluxFlagKey = inputSchema.find(self.config.snName +
"_flag").getKey()
1550 for band
in self.config.priorityList:
1552 outputKey = self.
schemaMapper.editOutputSchema().addField(
1553 "merge_measurement_%s" % short,
1555 doc=
"Flag field set if the measurements here are from the %s filter" % band
1557 peakKey = inputSchema.find(
"merge_peak_%s" % short).key
1558 footprintKey = inputSchema.find(
"merge_footprint_%s" % short).key
1559 self.
flagKeys[band] = Struct(peak=peakKey, footprint=footprintKey, output=outputKey)
1563 for filt
in self.config.pseudoFilterList:
1566 except Exception
as e:
1567 self.log.warn(
"merge_peak is not set for pseudo-filter %s: %s" % (filt, e))
1570 for flag
in self.config.flags:
1573 except KeyError
as exc:
1574 self.log.warn(
"Can't find flag %s in schema: %s" % (flag, exc,))
1576 def run(self, catalogs, patchRef):
1578 Merge measurement catalogs to create a single reference catalog for forced photometry 1580 @param[in] catalogs: the catalogs to be merged 1581 @param[in] patchRef: patch reference for data 1583 For parent sources, we choose the first band in config.priorityList for which the 1584 merge_footprint flag for that band is is True. 1586 For child sources, the logic is the same, except that we use the merge_peak flags. 1589 orderedCatalogs = [catalogs[band]
for band
in self.config.priorityList
if band
in catalogs.keys()]
1590 orderedKeys = [self.
flagKeys[band]
for band
in self.config.priorityList
if band
in catalogs.keys()]
1592 mergedCatalog = afwTable.SourceCatalog(self.
schema)
1593 mergedCatalog.reserve(len(orderedCatalogs[0]))
1595 idKey = orderedCatalogs[0].table.getIdKey()
1596 for catalog
in orderedCatalogs[1:]:
1597 if numpy.any(orderedCatalogs[0].get(idKey) != catalog.get(idKey)):
1598 raise ValueError(
"Error in inputs to MergeCoaddMeasurements: source IDs do not match")
1602 for orderedRecords
in zip(*orderedCatalogs):
1605 maxSNFlagKeys =
None 1607 priorityRecord =
None 1608 priorityFlagKeys =
None 1610 hasPseudoFilter =
False 1614 for inputRecord, flagKeys
in zip(orderedRecords, orderedKeys):
1615 parent = (inputRecord.getParent() == 0
and inputRecord.get(flagKeys.footprint))
1616 child = (inputRecord.getParent() != 0
and inputRecord.get(flagKeys.peak))
1618 if not (parent
or child):
1620 if inputRecord.get(pseudoFilterKey):
1621 hasPseudoFilter =
True 1622 priorityRecord = inputRecord
1623 priorityFlagKeys = flagKeys
1628 isBad = any(inputRecord.get(flag)
for flag
in self.
badFlags)
1633 if numpy.isnan(sn)
or sn < 0.:
1635 if (parent
or child)
and priorityRecord
is None:
1636 priorityRecord = inputRecord
1637 priorityFlagKeys = flagKeys
1640 maxSNRecord = inputRecord
1641 maxSNFlagKeys = flagKeys
1654 bestRecord = priorityRecord
1655 bestFlagKeys = priorityFlagKeys
1656 elif (prioritySN < self.config.minSN
and (maxSN - prioritySN) > self.config.minSNDiff
and 1657 maxSNRecord
is not None):
1658 bestRecord = maxSNRecord
1659 bestFlagKeys = maxSNFlagKeys
1660 elif priorityRecord
is not None:
1661 bestRecord = priorityRecord
1662 bestFlagKeys = priorityFlagKeys
1664 if bestRecord
is not None and bestFlagKeys
is not None:
1665 outputRecord = mergedCatalog.addNew()
1667 outputRecord.set(bestFlagKeys.output,
True)
1669 raise ValueError(
"Error in inputs to MergeCoaddMeasurements: no valid reference for %s" %
1670 inputRecord.getId())
1673 for inputCatalog
in orderedCatalogs:
1674 if len(mergedCatalog) != len(inputCatalog):
1675 raise ValueError(
"Mismatch between catalog sizes: %s != %s" %
1676 (len(mergedCatalog), len(orderedCatalogs)))
1678 return mergedCatalog
def getSkySourceFootprints(self, mergedList, skyInfo, seed)
Return a list of Footprints of sky objects which don't overlap with anything in mergedList.
Merge coadd detections from multiple bands.
def readSources(self, dataRef)
def makeTask(self, parsedCmd=None, args=None)
def getInputSchema(self, butler=None, schema=None)
Obtain the input schema either directly or froma butler reference.
def getSchemaCatalogs(self)
Return a dict of empty catalogs for each catalog dataset produced by this task.
def runDataRef(self, patchRefList)
Merge coadd sources from multiple bands.
def getTargetList(parsedCmd, kwargs)
def runDataRef(self, patchRef)
Run detection on a coadd.
def cullPeaks(self, catalog)
Attempt to remove garbage peaks (mostly on the outskirts of large blends).
def write(self, exposure, results, patchRef)
Write out results from runDetection.
def __init__(self, butler=None, schema=None, kwargs)
Initialize the task.
def __init__(self, butler=None, schema=None, kwargs)
Initialize the task.
Configuration parameters for the DetectCoaddSourcesTask.
def runDataRef(self, patchRefList, psfCache=100)
def __init__(self, schema=None, kwargs)
Initialize the task.
Merge measurements from multiple bands.
Deblend sources from master catalog in each coadd seperately and measure.
def writeMatches(self, dataRef, exposure, sources)
Write matches of the sources to the astrometric reference catalog.
def getShortFilterName(name)
def __init__(self, butler=None, schema=None, peakSchema=None, kwargs)
def getExposureId(self, dataRef)
Configuration parameters for the MergeMeasurementsTask.
def readSources(self, dataRef)
Read input sources.
def getSchemaCatalogs(self)
def buildRefDict(parsedCmd)
def getExposureId(self, dataRef)
Configuration parameters for the MergeDetectionsTask.
Configuration for merging sources.
def run(self, catalogs, patchRef)
Merge measurement catalogs to create a single reference catalog for forced photometry.
def run(self, catalogs, patchRef)
Merge multiple catalogs.
def write(self, patchRef, catalog)
Write the output.
def write(self, dataRef, flux_sources, template_sources=None)
A base class for merging source catalogs.
def write(self, dataRef, sources)
Write the source catalog.
def readCatalog(self, patchRef)
Read input catalog.
def getTargetList(parsedCmd, kwargs)
Configuration parameters for the MeasureMergedCoaddSourcesTask.
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, kwargs)
Initialize the task.
def run(self, exposure, idFactory, expId)
Run detection on an exposure.
Detect sources on a coadd.
def runDataRef(self, patchRef, psfCache=100)
Deblend and measure.
def __init__(self, butler=None, schema=None, kwargs)
Initialize the merge detections task.
def writeMetadata(self, dataRefList)
def run(self, catalogs, patchRef)
Merge multiple catalogs.
def getTargetList(parsedCmd, kwargs)