24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
31 from lsst.meas.extensions.scarlet
import ScarletDeblendTask
44 from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
45 from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
46 from .multiBandUtils
import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs
47 from .multiBandUtils
import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory
48 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
49 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
50 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
51 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
56 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
57 * deepCoadd_mergeDet: merged detections (tract, patch)
58 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
59 * deepCoadd_ref: reference sources (tract, patch)
60 All of these have associated *_schema catalogs that require no data ID and hold no records.
62 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
63 the mergeDet, meas, and ref dataset Footprints:
64 * deepCoadd_peak_schema
70 dimensions=(
"tract",
"patch",
"band",
"skymap"),
71 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
72 detectionSchema = cT.InitOutput(
73 doc=
"Schema of the detection catalog",
74 name=
"{outputCoaddName}Coadd_det_schema",
75 storageClass=
"SourceCatalog",
78 doc=
"Exposure on which detections are to be performed",
79 name=
"{inputCoaddName}Coadd",
80 storageClass=
"ExposureF",
81 dimensions=(
"tract",
"patch",
"band",
"skymap")
83 outputBackgrounds = cT.Output(
84 doc=
"Output Backgrounds used in detection",
85 name=
"{outputCoaddName}Coadd_calexp_background",
86 storageClass=
"Background",
87 dimensions=(
"tract",
"patch",
"band",
"skymap")
89 outputSources = cT.Output(
90 doc=
"Detected sources catalog",
91 name=
"{outputCoaddName}Coadd_det",
92 storageClass=
"SourceCatalog",
93 dimensions=(
"tract",
"patch",
"band",
"skymap")
95 outputExposure = cT.Output(
96 doc=
"Exposure post detection",
97 name=
"{outputCoaddName}Coadd_calexp",
98 storageClass=
"ExposureF",
99 dimensions=(
"tract",
"patch",
"band",
"skymap")
103 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
105 @anchor DetectCoaddSourcesConfig_
107 @brief Configuration parameters for the DetectCoaddSourcesTask
109 doScaleVariance = Field(dtype=bool, default=
True, doc=
"Scale variance plane using empirical noise?")
110 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
111 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
112 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
113 doInsertFakes = Field(dtype=bool, default=
False,
114 doc=
"Run fake sources injection task")
115 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
116 doc=
"Injection of fake sources for testing "
117 "purposes (must be retargeted)")
121 doc=
"Should be set to True if fake sources have been inserted into the input data."
124 def setDefaults(self):
125 super().setDefaults()
126 self.detection.thresholdType =
"pixel_stdev"
127 self.detection.isotropicGrow =
True
129 self.detection.reEstimateBackground =
False
130 self.detection.background.useApprox =
False
131 self.detection.background.binSize = 4096
132 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
133 self.detection.doTempWideBackground =
True
143 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
145 @anchor DetectCoaddSourcesTask_
147 @brief Detect sources on a coadd
149 @section pipe_tasks_multiBand_Contents Contents
151 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
154 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
155 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
156 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
158 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
160 Command-line task that detects sources on a coadd of exposures obtained with a single filter.
162 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
163 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
164 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
165 propagate the full covariance matrix -- but it is simple and works well in practice.
167 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
168 SourceDetectionTask_ "detection" subtask.
171 deepCoadd{tract,patch,filter}: ExposureF
173 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
174 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
176 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
180 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
181 You can retarget this subtask if you wish.
183 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
185 @copydoc \_\_init\_\_
187 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
191 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
193 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
195 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
197 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
198 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
201 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
202 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
203 @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
205 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
206 of using DetectCoaddSourcesTask
208 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
209 the task is to update the background, detect all sources in a single band and generate a set of parent
210 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
211 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
212 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
213 calling detectCoaddSources.py with the `--help` command line argument:
215 detectCoaddSources.py --help
218 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
219 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
220 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
222 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
224 that will process the HSC-I band data. The results are written to
225 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
227 It is also necessary to run:
229 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
231 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
232 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
234 _DefaultName =
"detectCoaddSources"
235 ConfigClass = DetectCoaddSourcesConfig
236 getSchemaCatalogs = _makeGetSchemaCatalogs(
"det")
237 makeIdFactory = _makeMakeIdFactory(
"CoaddId")
240 def _makeArgumentParser(cls):
241 parser = ArgumentParser(name=cls._DefaultName)
242 parser.add_id_argument(
"--id",
"deepCoadd", help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
243 ContainerClass=ExistingCoaddDataIdContainer)
246 def __init__(self, schema=None, **kwargs):
248 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
250 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
252 @param[in] schema: initial schema for the output catalog, modified-in place to include all
253 fields set by this task. If None, the source minimal schema will be used.
254 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
258 super().__init__(**kwargs)
260 schema = afwTable.SourceTable.makeMinimalSchema()
261 if self.config.doInsertFakes:
262 self.makeSubtask(
"insertFakes")
264 self.makeSubtask(
"detection", schema=self.schema)
265 if self.config.doScaleVariance:
266 self.makeSubtask(
"scaleVariance")
268 self.detectionSchema = afwTable.SourceCatalog(self.schema)
270 def runDataRef(self, patchRef):
272 @brief Run detection on a coadd.
274 Invokes @ref run and then uses @ref write to output the
277 @param[in] patchRef: data reference for patch
279 if self.config.hasFakes:
280 exposure = patchRef.get(
"fakes_" + self.config.coaddName +
"Coadd", immediate=
True)
282 exposure = patchRef.get(self.config.coaddName +
"Coadd", immediate=
True)
283 expId = int(patchRef.get(self.config.coaddName +
"CoaddId"))
284 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
285 self.write(results, patchRef)
288 def runQuantum(self, butlerQC, inputRefs, outputRefs):
289 inputs = butlerQC.get(inputRefs)
290 packedId, maxBits = butlerQC.quantum.dataId.pack(
"tract_patch_band", returnMaxBits=
True)
291 inputs[
"idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
292 inputs[
"expId"] = packedId
293 outputs = self.run(**inputs)
294 butlerQC.put(outputs, outputRefs)
296 def run(self, exposure, idFactory, expId):
298 @brief Run detection on an exposure.
300 First scale the variance plane to match the observed variance
301 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
304 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
305 depending on configuration).
306 @param[in] idFactory: IdFactory to set source identifiers
307 @param[in] expId: Exposure identifier (integer) for RNG seed
309 @return a pipe.base.Struct with fields
310 - sources: catalog of detections
311 - backgrounds: list of backgrounds
313 if self.config.doScaleVariance:
314 varScale = self.scaleVariance.
run(exposure.maskedImage)
315 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
316 backgrounds = afwMath.BackgroundList()
317 if self.config.doInsertFakes:
318 self.insertFakes.
run(exposure, background=backgrounds)
319 table = afwTable.SourceTable.make(self.schema, idFactory)
320 detections = self.detection.
run(table, exposure, expId=expId)
321 sources = detections.sources
322 fpSets = detections.fpSets
323 if hasattr(fpSets,
"background")
and fpSets.background:
324 for bg
in fpSets.background:
325 backgrounds.append(bg)
326 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
328 def write(self, results, patchRef):
330 @brief Write out results from runDetection.
332 @param[in] exposure: Exposure to write out
333 @param[in] results: Struct returned from runDetection
334 @param[in] patchRef: data reference for patch
336 coaddName = self.config.coaddName +
"Coadd"
337 patchRef.put(results.outputBackgrounds, coaddName +
"_calexp_background")
338 patchRef.put(results.outputSources, coaddName +
"_det")
339 if self.config.hasFakes:
340 patchRef.put(results.outputExposure,
"fakes_" + coaddName +
"_calexp")
342 patchRef.put(results.outputExposure, coaddName +
"_calexp")
347 class DeblendCoaddSourcesConfig(Config):
348 """DeblendCoaddSourcesConfig
350 Configuration parameters for the `DeblendCoaddSourcesTask`.
352 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
353 doc=
"Deblend sources separately in each band")
354 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
355 doc=
"Deblend sources simultaneously across bands")
356 simultaneous = Field(dtype=bool, default=
False, doc=
"Simultaneously deblend all bands?")
357 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
358 hasFakes = Field(dtype=bool,
360 doc=
"Should be set to True if fake sources have been inserted into the input data.")
362 def setDefaults(self):
363 Config.setDefaults(self)
364 self.singleBandDeblend.propagateAllPeaks =
True
368 """Task runner for the `MergeSourcesTask`
370 Required because the run method requires a list of
371 dataRefs rather than a single dataRef.
374 def getTargetList(parsedCmd, **kwargs):
375 """Provide a list of patch references for each patch, tract, filter combo.
382 Keyword arguments passed to the task
387 List of tuples, where each tuple is a (dataRef, kwargs) pair.
389 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
390 kwargs[
"psfCache"] = parsedCmd.psfCache
391 return [(list(p.values()), kwargs)
for t
in refDict.values()
for p
in t.values()]
394 class DeblendCoaddSourcesTask(CmdLineTask):
395 """Deblend the sources in a merged catalog
397 Deblend sources from master catalog in each coadd.
398 This can either be done separately in each band using the HSC-SDSS deblender
399 (`DeblendCoaddSourcesTask.config.simultaneous==False`)
400 or use SCARLET to simultaneously fit the blend in all bands
401 (`DeblendCoaddSourcesTask.config.simultaneous==True`).
402 The task will set its own `self.schema` atribute to the `Schema` of the
403 output deblended catalog.
404 This will include all fields from the input `Schema`, as well as additional fields
407 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
408 ---------------------------------------------------------
414 Butler used to read the input schemas from disk or
415 construct the reference catalog loader, if `schema` or `peakSchema` or
417 The schema of the merged detection catalog as an input to this task.
419 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
421 ConfigClass = DeblendCoaddSourcesConfig
422 RunnerClass = DeblendCoaddSourcesRunner
423 _DefaultName =
"deblendCoaddSources"
424 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
427 def _makeArgumentParser(cls):
428 parser = ArgumentParser(name=cls._DefaultName)
429 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
430 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
431 ContainerClass=ExistingCoaddDataIdContainer)
432 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
435 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
436 CmdLineTask.__init__(self, **kwargs)
438 assert butler
is not None,
"Neither butler nor schema is defined"
439 schema = butler.get(self.config.coaddName +
"Coadd_mergeDet_schema", immediate=
True).schema
440 self.schemaMapper = afwTable.SchemaMapper(schema)
441 self.schemaMapper.addMinimalSchema(schema)
442 self.schema = self.schemaMapper.getOutputSchema()
443 if peakSchema
is None:
444 assert butler
is not None,
"Neither butler nor peakSchema is defined"
445 peakSchema = butler.get(self.config.coaddName +
"Coadd_peak_schema", immediate=
True).schema
447 if self.config.simultaneous:
448 self.makeSubtask(
"multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
450 self.makeSubtask(
"singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
452 def getSchemaCatalogs(self):
453 """Return a dict of empty catalogs for each catalog dataset produced by this task.
458 Dictionary of empty catalogs, with catalog names as keys.
460 catalog = afwTable.SourceCatalog(self.schema)
461 return {self.config.coaddName +
"Coadd_deblendedFlux": catalog,
462 self.config.coaddName +
"Coadd_deblendedModel": catalog}
464 def runDataRef(self, patchRefList, psfCache=100):
467 Deblend each source simultaneously or separately
468 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
469 Set `is-primary` and related flags.
470 Propagate flags from individual visits.
471 Write the deblended sources out.
476 List of data references for each filter
479 if self.config.hasFakes:
480 coaddType =
"fakes_" + self.config.coaddName
482 coaddType = self.config.coaddName
484 if self.config.simultaneous:
488 for patchRef
in patchRefList:
489 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
490 filters.append(patchRef.dataId[
"filter"])
491 exposures.append(exposure)
493 sources = self.readSources(patchRef)
494 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
495 fluxCatalogs, templateCatalogs = self.multiBandDeblend.
run(exposure, sources)
496 for n
in range(len(patchRefList)):
497 fluxCat = fluxCatalogs
if fluxCatalogs
is None else fluxCatalogs[filters[n]]
498 self.write(patchRefList[n], fluxCat, templateCatalogs[filters[n]])
501 for patchRef
in patchRefList:
502 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
503 exposure.getPsf().setCacheCapacity(psfCache)
504 sources = self.readSources(patchRef)
505 self.singleBandDeblend.
run(exposure, sources)
506 self.write(patchRef, sources)
508 def readSources(self, dataRef):
509 """Read merged catalog
511 Read the catalog of merged detections and create a catalog
516 dataRef: data reference
517 Data reference for catalog of merged detections
521 sources: `SourceCatalog`
522 List of sources in merged catalog
524 We also need to add columns to hold the measurements we're about to make
525 so we can measure in-place.
527 merged = dataRef.get(self.config.coaddName +
"Coadd_mergeDet", immediate=
True)
528 self.log.info(
"Read %d detections: %s" % (len(merged), dataRef.dataId))
529 idFactory = self.makeIdFactory(dataRef)
531 idFactory.notify(s.getId())
532 table = afwTable.SourceTable.make(self.schema, idFactory)
533 sources = afwTable.SourceCatalog(table)
534 sources.extend(merged, self.schemaMapper)
537 def write(self, dataRef, flux_sources, template_sources=None):
538 """Write the source catalog(s)
542 dataRef: Data Reference
543 Reference to the output catalog.
544 flux_sources: `SourceCatalog`
545 Flux conserved sources to write to file.
546 If using the single band deblender, this is the catalog
548 template_sources: `SourceCatalog`
549 Source catalog using the multiband template models
554 if flux_sources
is not None:
555 assert not self.config.simultaneous
or self.config.multiBandDeblend.conserveFlux
556 dataRef.put(flux_sources, self.config.coaddName +
"Coadd_deblendedFlux")
557 self.log.info(
"Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
561 if template_sources
is not None:
562 assert self.config.multiBandDeblend.saveTemplates
563 dataRef.put(template_sources, self.config.coaddName +
"Coadd_deblendedModel")
564 self.log.info(
"Wrote %d sources: %s" % (len(template_sources), dataRef.dataId))
567 """Write the metadata produced from processing the data.
571 List of Butler data references used to write the metadata.
572 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
574 for dataRef
in dataRefList:
576 metadataName = self._getMetadataName()
577 if metadataName
is not None:
578 dataRef.put(self.getFullMetadata(), metadataName)
579 except Exception
as e:
580 self.log.warn(
"Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
582 def getExposureId(self, dataRef):
583 """Get the ExposureId from a data reference
585 return int(dataRef.get(self.config.coaddName +
"CoaddId"))
588 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=(
"tract",
"patch",
"band",
"skymap"),
589 defaultTemplates={
"inputCoaddName":
"deep",
590 "outputCoaddName":
"deep"}):
591 inputSchema = cT.InitInput(
592 doc=
"Input schema for measure merged task produced by a deblender or detection task",
593 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
594 storageClass=
"SourceCatalog"
596 outputSchema = cT.InitOutput(
597 doc=
"Output schema after all new fields are added by task",
598 name=
"{inputCoaddName}Coadd_meas_schema",
599 storageClass=
"SourceCatalog"
601 refCat = cT.PrerequisiteInput(
602 doc=
"Reference catalog used to match measured sources against known sources",
604 storageClass=
"SimpleCatalog",
605 dimensions=(
"skypix",),
610 doc=
"Input coadd image",
611 name=
"{inputCoaddName}Coadd_calexp",
612 storageClass=
"ExposureF",
613 dimensions=(
"tract",
"patch",
"band",
"skymap")
616 doc=
"SkyMap to use in processing",
617 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
618 storageClass=
"SkyMap",
619 dimensions=(
"skymap",),
621 visitCatalogs = cT.Input(
622 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
623 "further filtered in the task for the purpose of propagating flags from image calibration "
624 "and characterization to codd objects",
626 dimensions=(
"instrument",
"visit",
"detector"),
627 storageClass=
"SourceCatalog",
630 inputCatalog = cT.Input(
631 doc=(
"Name of the input catalog to use."
632 "If the single band deblender was used this should be 'deblendedFlux."
633 "If the multi-band deblender was used this should be 'deblendedModel, "
634 "or deblendedFlux if the multiband deblender was configured to output "
635 "deblended flux catalogs. If no deblending was performed this should "
637 name=
"{inputCoaddName}Coadd_deblendedFlux",
638 storageClass=
"SourceCatalog",
639 dimensions=(
"tract",
"patch",
"band",
"skymap"),
641 outputSources = cT.Output(
642 doc=
"Source catalog containing all the measurement information generated in this task",
643 name=
"{outputCoaddName}Coadd_meas",
644 dimensions=(
"tract",
"patch",
"band",
"skymap"),
645 storageClass=
"SourceCatalog",
647 matchResult = cT.Output(
648 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
649 name=
"{outputCoaddName}Coadd_measMatch",
650 dimensions=(
"tract",
"patch",
"band",
"skymap"),
651 storageClass=
"Catalog",
653 denormMatches = cT.Output(
654 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
655 "doWriteMatchesDenormalized",
656 name=
"{outputCoaddName}Coadd_measMatchFull",
657 dimensions=(
"tract",
"patch",
"band",
"skymap"),
658 storageClass=
"Catalog",
661 def __init__(self, *, config=None):
662 super().__init__(config=config)
663 if config.doPropagateFlags
is False:
664 self.inputs -= set((
"visitCatalogs",))
666 if config.doMatchSources
is False:
667 self.outputs -= set((
"matchResult",))
669 if config.doWriteMatchesDenormalized
is False:
670 self.outputs -= set((
"denormMatches",))
673 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
674 pipelineConnections=MeasureMergedCoaddSourcesConnections):
676 @anchor MeasureMergedCoaddSourcesConfig_
678 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
680 inputCatalog = Field(dtype=str, default=
"deblendedFlux",
681 doc=(
"Name of the input catalog to use."
682 "If the single band deblender was used this should be 'deblendedFlux."
683 "If the multi-band deblender was used this should be 'deblendedModel."
684 "If no deblending was performed this should be 'mergeDet'"))
685 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
686 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
687 doPropagateFlags = Field(
688 dtype=bool, default=
True,
689 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
691 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc=
"Propagate visit flags to coadd")
692 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
693 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
694 doWriteMatchesDenormalized = Field(
697 doc=(
"Write reference matches in denormalized format? "
698 "This format uses more disk space, but is more convenient to read."),
700 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
701 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
702 checkUnitsParseStrict = Field(
703 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
710 doc=
"Apply aperture corrections"
712 applyApCorr = ConfigurableField(
713 target=ApplyApCorrTask,
714 doc=
"Subtask to apply aperture corrections"
716 doRunCatalogCalculation = Field(
719 doc=
'Run catalogCalculation task'
721 catalogCalculation = ConfigurableField(
722 target=CatalogCalculationTask,
723 doc=
"Subtask to run catalogCalculation plugins on catalog"
729 doc=
"Should be set to True if fake sources have been inserted into the input data."
733 def refObjLoader(self):
734 return self.match.refObjLoader
736 def setDefaults(self):
737 super().setDefaults()
738 self.measurement.plugins.names |= [
'base_InputCount',
740 'base_LocalPhotoCalib',
742 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
744 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
749 refCatGen2 = getattr(self.refObjLoader,
"ref_dataset_name",
None)
750 if refCatGen2
is not None and refCatGen2 != self.connections.refCat:
752 f
"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
753 f
"are different. These options must be kept in sync until Gen2 is retired."
765 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
766 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
768 def getTargetList(parsedCmd, **kwargs):
769 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
772 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
774 @anchor MeasureMergedCoaddSourcesTask_
776 @brief Deblend sources from master catalog in each coadd seperately and measure.
778 @section pipe_tasks_multiBand_Contents Contents
780 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
781 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
782 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
783 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
784 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
785 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
787 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
789 Command-line task that uses peaks and footprints from a master catalog to perform deblending and
790 measurement in each coadd.
792 Given a master input catalog of sources (peaks and footprints) or deblender outputs
793 (including a HeavyFootprint in each band), measure each source on the
794 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
795 consistent set of child sources.
797 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
798 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
799 flags are propagated to the coadd sources.
801 Optionally, we can match the coadd sources to an external reference catalog.
804 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
805 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
807 deepCoadd_meas{tract,patch,filter}: SourceCatalog
811 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
814 <DT> @ref SingleFrameMeasurementTask_ "measurement"
815 <DD> Measure source properties of deblended sources.</DD>
816 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
817 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
818 not at the edge of the field and that have either not been deblended or are the children of deblended
820 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
821 <DD> Propagate flags set in individual visits to the coadd.</DD>
822 <DT> @ref DirectMatchTask_ "match"
823 <DD> Match input sources to a reference catalog (optional).
826 These subtasks may be retargeted as required.
828 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
830 @copydoc \_\_init\_\_
832 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
836 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
838 See @ref MeasureMergedCoaddSourcesConfig_
840 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
842 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
843 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
846 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
847 the various sub-tasks. See the documetation for individual sub-tasks for more information.
849 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
850 MeasureMergedCoaddSourcesTask
852 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
853 The next stage in the multi-band processing procedure will merge these measurements into a suitable
854 catalog for driving forced photometry.
856 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
858 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
859 `--help` command line argument:
861 measureCoaddSources.py --help
864 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
865 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
866 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
869 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
871 This will process the HSC-I band data. The results are written in
872 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
874 It is also necessary to run
876 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
878 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
879 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
881 _DefaultName =
"measureCoaddSources"
882 ConfigClass = MeasureMergedCoaddSourcesConfig
883 RunnerClass = MeasureMergedCoaddSourcesRunner
884 getSchemaCatalogs = _makeGetSchemaCatalogs(
"meas")
885 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
888 def _makeArgumentParser(cls):
889 parser = ArgumentParser(name=cls._DefaultName)
890 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
891 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
892 ContainerClass=ExistingCoaddDataIdContainer)
893 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
896 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
899 @brief Initialize the task.
901 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
902 @param[in] schema: the schema of the merged detection catalog used as input to this one
903 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
904 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
905 catalog. May be None if the loader can be constructed from the butler argument or all steps
906 requiring a reference catalog are disabled.
907 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
908 catalog loader, if schema or peakSchema or refObjLoader is None
910 The task will set its own self.schema attribute to the schema of the output measurement catalog.
911 This will include all fields from the input schema, as well as additional fields for all the
914 super().__init__(**kwargs)
915 self.deblended = self.config.inputCatalog.startswith(
"deblended")
916 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
917 if initInputs
is not None:
918 schema = initInputs[
'inputSchema'].schema
920 assert butler
is not None,
"Neither butler nor schema is defined"
921 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema", immediate=
True).schema
922 self.schemaMapper = afwTable.SchemaMapper(schema)
923 self.schemaMapper.addMinimalSchema(schema)
924 self.schema = self.schemaMapper.getOutputSchema()
926 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
927 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
928 if self.config.doMatchSources:
929 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
930 if self.config.doPropagateFlags:
931 self.makeSubtask(
"propagateFlags", schema=self.schema)
932 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
933 if self.config.doApCorr:
934 self.makeSubtask(
"applyApCorr", schema=self.schema)
935 if self.config.doRunCatalogCalculation:
936 self.makeSubtask(
"catalogCalculation", schema=self.schema)
938 self.outputSchema = afwTable.SourceCatalog(self.schema)
940 def runQuantum(self, butlerQC, inputRefs, outputRefs):
941 inputs = butlerQC.get(inputRefs)
943 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
944 inputs.pop(
'refCat'), config=self.config.refObjLoader,
946 self.match.setRefObjLoader(refObjLoader)
950 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
953 packedId, maxBits = butlerQC.quantum.dataId.pack(
"tract_patch", returnMaxBits=
True)
954 inputs[
'exposureId'] = packedId
955 idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
957 table = afwTable.SourceTable.make(self.schema, idFactory)
958 sources = afwTable.SourceCatalog(table)
959 sources.extend(inputs.pop(
'inputCatalog'), self.schemaMapper)
960 table = sources.getTable()
961 table.setMetadata(self.algMetadata)
962 inputs[
'sources'] = sources
964 skyMap = inputs.pop(
'skyMap')
965 tractNumber = inputRefs.inputCatalog.dataId[
'tract']
966 tractInfo = skyMap[tractNumber]
967 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId[
'patch'])
972 wcs=tractInfo.getWcs(),
973 bbox=patchInfo.getOuterBBox()
975 inputs[
'skyInfo'] = skyInfo
977 if self.config.doPropagateFlags:
979 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
980 visitKey = ccdInputs.schema.find(
"visit").key
981 ccdKey = ccdInputs.schema.find(
"ccd").key
982 inputVisitIds = set()
984 for ccdRecord
in ccdInputs:
985 visit = ccdRecord.get(visitKey)
986 ccd = ccdRecord.get(ccdKey)
987 inputVisitIds.add((visit, ccd))
988 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
990 inputCatalogsToKeep = []
991 inputCatalogWcsUpdate = []
992 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
993 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
994 if key
in inputVisitIds:
995 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
996 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
997 inputs[
'visitCatalogs'] = inputCatalogsToKeep
998 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
999 inputs[
'ccdInputs'] = ccdInputs
1001 outputs = self.run(**inputs)
1002 butlerQC.put(outputs, outputRefs)
1004 def runDataRef(self, patchRef, psfCache=100):
1006 @brief Deblend and measure.
1008 @param[in] patchRef: Patch reference.
1010 Set 'is-primary' and related flags. Propagate flags
1011 from individual visits. Optionally match the sources to a reference catalog and write the matches.
1012 Finally, write the deblended sources and measurements out.
1014 if self.config.hasFakes:
1015 coaddType =
"fakes_" + self.config.coaddName
1017 coaddType = self.config.coaddName
1018 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
1019 exposure.getPsf().setCacheCapacity(psfCache)
1020 sources = self.readSources(patchRef)
1021 table = sources.getTable()
1022 table.setMetadata(self.algMetadata)
1023 skyInfo =
getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1025 if self.config.doPropagateFlags:
1026 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1030 results = self.run(exposure=exposure, sources=sources,
1031 ccdInputs=ccdInputs,
1032 skyInfo=skyInfo, butler=patchRef.getButler(),
1033 exposureId=self.getExposureId(patchRef))
1035 if self.config.doMatchSources:
1036 self.writeMatches(patchRef, results)
1037 self.write(patchRef, results.outputSources)
1039 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1041 """Run measurement algorithms on the input exposure, and optionally populate the
1042 resulting catalog with extra information.
1046 exposure : `lsst.afw.exposure.Exposure`
1047 The input exposure on which measurements are to be performed
1048 sources : `lsst.afw.table.SourceCatalog`
1049 A catalog built from the results of merged detections, or
1051 skyInfo : `lsst.pipe.base.Struct`
1052 A struct containing information about the position of the input exposure within
1053 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1054 exposureId : `int` or `bytes`
1055 packed unique number or bytes unique to the input exposure
1056 ccdInputs : `lsst.afw.table.ExposureCatalog`
1057 Catalog containing information on the individual visits which went into making
1059 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1060 A list of source catalogs corresponding to measurements made on the individual
1061 visits which went into the input exposure. If None and butler is `None` then
1062 the task cannot propagate visit flags to the output catalog.
1063 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1064 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1065 to the input visits. Used to put all coordinates to common system. If `None` and
1066 butler is `None` then the task cannot propagate visit flags to the output catalog.
1067 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1068 Either a gen2 or gen3 butler used to load visit catalogs
1072 results : `lsst.pipe.base.Struct`
1073 Results of running measurement task. Will contain the catalog in the
1074 sources attribute. Optionally will have results of matching to a
1075 reference catalog in the matchResults attribute, and denormalized
1076 matches in the denormMatches attribute.
1078 self.measurement.
run(sources, exposure, exposureId=exposureId)
1080 if self.config.doApCorr:
1081 self.applyApCorr.
run(
1083 apCorrMap=exposure.getInfo().getApCorrMap()
1090 if not sources.isContiguous():
1091 sources = sources.copy(deep=
True)
1093 if self.config.doRunCatalogCalculation:
1094 self.catalogCalculation.
run(sources)
1096 self.setPrimaryFlags.
run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1097 patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended)
1098 if self.config.doPropagateFlags:
1099 self.propagateFlags.
run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1103 if self.config.doMatchSources:
1104 matchResult = self.match.
run(sources, exposure.getInfo().getFilter().getName())
1105 matches = afwTable.packMatches(matchResult.matches)
1106 matches.table.setMetadata(matchResult.matchMeta)
1107 results.matchResult = matches
1108 if self.config.doWriteMatchesDenormalized:
1109 if matchResult.matches:
1110 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1112 self.log.warn(
"No matches, so generating dummy denormalized matches file")
1113 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1115 denormMatches.getMetadata().add(
"COMMENT",
1116 "This catalog is empty because no matches were found.")
1117 results.denormMatches = denormMatches
1118 results.denormMatches = denormMatches
1120 results.outputSources = sources
1123 def readSources(self, dataRef):
1125 @brief Read input sources.
1127 @param[in] dataRef: Data reference for catalog of merged detections
1128 @return List of sources in merged catalog
1130 We also need to add columns to hold the measurements we're about to make
1131 so we can measure in-place.
1133 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=
True)
1134 self.log.info(
"Read %d detections: %s" % (len(merged), dataRef.dataId))
1135 idFactory = self.makeIdFactory(dataRef)
1137 idFactory.notify(s.getId())
1138 table = afwTable.SourceTable.make(self.schema, idFactory)
1139 sources = afwTable.SourceCatalog(table)
1140 sources.extend(merged, self.schemaMapper)
1143 def writeMatches(self, dataRef, results):
1145 @brief Write matches of the sources to the astrometric reference catalog.
1147 @param[in] dataRef: data reference
1148 @param[in] results: results struct from run method
1150 if hasattr(results,
"matchResult"):
1151 dataRef.put(results.matchResult, self.config.coaddName +
"Coadd_measMatch")
1152 if hasattr(results,
"denormMatches"):
1153 dataRef.put(results.denormMatches, self.config.coaddName +
"Coadd_measMatchFull")
1155 def write(self, dataRef, sources):
1157 @brief Write the source catalog.
1159 @param[in] dataRef: data reference
1160 @param[in] sources: source catalog
1162 dataRef.put(sources, self.config.coaddName +
"Coadd_meas")
1163 self.log.info(
"Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1165 def getExposureId(self, dataRef):
1166 return int(dataRef.get(self.config.coaddName +
"CoaddId"))