24from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
26import lsst.pipe.base.connectionTypes
as cT
29from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
31from lsst.meas.extensions.scarlet
import ScarletDeblendTask
43from lsst.obs.base
import ExposureIdInfo
46from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
47from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
48from .multiBandUtils
import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs
49from .multiBandUtils
import getInputSchema, readCatalog, _makeMakeIdFactory
50from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
51from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
52from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
53from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
58* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
59* deepCoadd_mergeDet: merged detections (tract, patch)
60* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
61* deepCoadd_ref: reference sources (tract, patch)
62All of these have associated *_schema catalogs that require no data ID and hold no records.
64In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
65the mergeDet, meas, and ref dataset Footprints:
66* deepCoadd_peak_schema
72 dimensions=(
"tract",
"patch",
"band",
"skymap"),
73 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
74 detectionSchema = cT.InitOutput(
75 doc=
"Schema of the detection catalog",
76 name=
"{outputCoaddName}Coadd_det_schema",
77 storageClass=
"SourceCatalog",
80 doc=
"Exposure on which detections are to be performed",
81 name=
"{inputCoaddName}Coadd",
82 storageClass=
"ExposureF",
83 dimensions=(
"tract",
"patch",
"band",
"skymap")
85 outputBackgrounds = cT.Output(
86 doc=
"Output Backgrounds used in detection",
87 name=
"{outputCoaddName}Coadd_calexp_background",
88 storageClass=
"Background",
89 dimensions=(
"tract",
"patch",
"band",
"skymap")
91 outputSources = cT.Output(
92 doc=
"Detected sources catalog",
93 name=
"{outputCoaddName}Coadd_det",
94 storageClass=
"SourceCatalog",
95 dimensions=(
"tract",
"patch",
"band",
"skymap")
97 outputExposure = cT.Output(
98 doc=
"Exposure post detection",
99 name=
"{outputCoaddName}Coadd_calexp",
100 storageClass=
"ExposureF",
101 dimensions=(
"tract",
"patch",
"band",
"skymap")
105class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
107 @anchor DetectCoaddSourcesConfig_
109 @brief Configuration parameters
for the DetectCoaddSourcesTask
111 doScaleVariance = Field(dtype=bool, default=True, doc=
"Scale variance plane using empirical noise?")
112 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
113 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
114 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
115 doInsertFakes = Field(dtype=bool, default=
False,
116 doc=
"Run fake sources injection task")
117 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
118 doc=
"Injection of fake sources for testing "
119 "purposes (must be retargeted)")
123 doc=
"Should be set to True if fake sources have been inserted into the input data."
126 def setDefaults(self):
127 super().setDefaults()
128 self.detection.thresholdType =
"pixel_stdev"
129 self.detection.isotropicGrow =
True
131 self.detection.reEstimateBackground =
False
132 self.detection.background.useApprox =
False
133 self.detection.background.binSize = 4096
134 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
135 self.detection.doTempWideBackground =
True
145class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
147 @anchor DetectCoaddSourcesTask_
149 @brief Detect sources on a coadd
151 @section pipe_tasks_multiBand_Contents Contents
153 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
154 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
155 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
156 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
157 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
158 -
@ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
160 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
162 Command-line task that detects sources on a coadd of exposures obtained
with a single filter.
164 Coadding individual visits requires each exposure to be warped. This introduces covariance
in the noise
165 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
166 in the coadd to match the observed variance. This
is an approximate approach -- strictly, we should
167 propagate the full covariance matrix -- but it
is simple
and works well
in practice.
169 After scaling the variance plane, we detect sources
and generate footprints by delegating to the
@ref
170 SourceDetectionTask_
"detection" subtask.
173 deepCoadd{tract,patch,filter}: ExposureF
175 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
176 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
178 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
182 DetectCoaddSourcesTask delegates most of its work to the
@ref SourceDetectionTask_
"detection" subtask.
183 You can retarget this subtask
if you wish.
185 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
187 @copydoc \_\_init\_\_
189 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
193 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
195 See
@ref DetectCoaddSourcesConfig_
"DetectSourcesConfig"
197 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
199 The
@link lsst.pipe.base.cmdLineTask.CmdLineTask command line task
@endlink interface supports a
200 flag
@c -d to
import @b debug.py
from your
@c PYTHONPATH; see
@ref baseDebug
for more about
@b debug.py
203 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
204 @ref SourceDetectionTask_
"SourceDetectionTask"; see the documetation
for
205 @ref SourceDetectionTask_
"SourceDetectionTask" for further information.
207 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
208 of using DetectCoaddSourcesTask
210 DetectCoaddSourcesTask
is meant to be run after assembling a coadded image
in a given band. The purpose of
211 the task
is to update the background, detect all sources
in a single band
and generate a set of parent
212 footprints. Subsequent tasks
in the multi-band processing procedure will merge sources across bands
and,
213 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
214 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
215 calling detectCoaddSources.py
with the `--help` command line argument:
217 detectCoaddSources.py --help
220 To demonstrate usage of the DetectCoaddSourcesTask
in the larger context of multi-band processing, we
221 will process HSC data
in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
222 steps 1 - 4 at
@ref pipeTasks_multiBand, one may detect all the sources
in each coadd
as follows:
224 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
226 that will process the HSC-I band data. The results are written to
227 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
229 It
is also necessary to run:
231 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
233 to generate the sources catalogs
for the HSC-R band required by the next step
in the multi-band
234 processing procedure:
@ref MergeDetectionsTask_
"MergeDetectionsTask".
236 _DefaultName = "detectCoaddSources"
237 ConfigClass = DetectCoaddSourcesConfig
238 getSchemaCatalogs = _makeGetSchemaCatalogs(
"det")
239 makeIdFactory = _makeMakeIdFactory(
"CoaddId")
242 def _makeArgumentParser(cls):
243 parser = ArgumentParser(name=cls._DefaultName)
244 parser.add_id_argument(
"--id",
"deepCoadd", help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
245 ContainerClass=ExistingCoaddDataIdContainer)
248 def __init__(self, schema=None, **kwargs):
250 @brief Initialize the task. Create the
@ref SourceDetectionTask_
"detection" subtask.
252 Keyword arguments (
in addition to those forwarded to CmdLineTask.__init__):
254 @param[
in] schema: initial schema
for the output catalog, modified-
in place to include all
255 fields set by this task. If
None, the source minimal schema will be used.
256 @param[
in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
260 super().__init__(**kwargs)
262 schema = afwTable.SourceTable.makeMinimalSchema()
263 if self.config.doInsertFakes:
264 self.makeSubtask(
"insertFakes")
266 self.makeSubtask(
"detection", schema=self.schema)
267 if self.config.doScaleVariance:
268 self.makeSubtask(
"scaleVariance")
270 self.detectionSchema = afwTable.SourceCatalog(self.schema)
272 def runDataRef(self, patchRef):
274 @brief Run detection on a coadd.
276 Invokes
@ref run
and then uses
@ref write to output the
279 @param[
in] patchRef: data reference
for patch
281 if self.config.hasFakes:
282 exposure = patchRef.get(
"fakes_" + self.config.coaddName +
"Coadd", immediate=
True)
284 exposure = patchRef.get(self.config.coaddName +
"Coadd", immediate=
True)
285 expId = int(patchRef.get(self.config.coaddName +
"CoaddId"))
286 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
287 self.write(results, patchRef)
290 def runQuantum(self, butlerQC, inputRefs, outputRefs):
291 inputs = butlerQC.get(inputRefs)
292 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId,
"tract_patch_band")
293 inputs[
"idFactory"] = exposureIdInfo.makeSourceIdFactory()
294 inputs[
"expId"] = exposureIdInfo.expId
295 outputs = self.run(**inputs)
296 butlerQC.put(outputs, outputRefs)
298 def run(self, exposure, idFactory, expId):
300 @brief Run detection on an exposure.
302 First scale the variance plane to match the observed variance
303 using
@ref ScaleVarianceTask. Then invoke the
@ref SourceDetectionTask_
"detection" subtask to
306 @param[
in,out] exposure: Exposure on which to detect (may be backround-subtracted
and scaled,
307 depending on configuration).
308 @param[
in] idFactory: IdFactory to set source identifiers
309 @param[
in] expId: Exposure identifier (integer)
for RNG seed
311 @return a pipe.base.Struct
with fields
312 - sources: catalog of detections
313 - backgrounds: list of backgrounds
315 if self.config.doScaleVariance:
316 varScale = self.scaleVariance.run(exposure.maskedImage)
317 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
318 backgrounds = afwMath.BackgroundList()
319 if self.config.doInsertFakes:
320 self.insertFakes.run(exposure, background=backgrounds)
321 table = afwTable.SourceTable.make(self.schema, idFactory)
322 detections = self.detection.run(table, exposure, expId=expId)
323 sources = detections.sources
324 fpSets = detections.fpSets
325 if hasattr(fpSets,
"background")
and fpSets.background:
326 for bg
in fpSets.background:
327 backgrounds.append(bg)
328 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
330 def write(self, results, patchRef):
332 @brief Write out results
from runDetection.
334 @param[
in] exposure: Exposure to write out
335 @param[
in] results: Struct returned
from runDetection
336 @param[
in] patchRef: data reference
for patch
338 coaddName = self.config.coaddName + "Coadd"
339 patchRef.put(results.outputBackgrounds, coaddName +
"_calexp_background")
340 patchRef.put(results.outputSources, coaddName +
"_det")
341 if self.config.hasFakes:
342 patchRef.put(results.outputExposure,
"fakes_" + coaddName +
"_calexp")
344 patchRef.put(results.outputExposure, coaddName +
"_calexp")
349class DeblendCoaddSourcesConfig(Config):
350 """DeblendCoaddSourcesConfig
352 Configuration parameters for the `DeblendCoaddSourcesTask`.
354 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
355 doc="Deblend sources separately in each band")
356 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
357 doc=
"Deblend sources simultaneously across bands")
358 simultaneous = Field(dtype=bool,
360 doc=
"Simultaneously deblend all bands? "
361 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
362 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
363 hasFakes = Field(dtype=bool,
365 doc=
"Should be set to True if fake sources have been inserted into the input data.")
367 def setDefaults(self):
368 Config.setDefaults(self)
369 self.singleBandDeblend.propagateAllPeaks =
True
373 """Task runner for the `MergeSourcesTask`
375 Required because the run method requires a list of
376 dataRefs rather than a single dataRef.
379 def getTargetList(parsedCmd, **kwargs):
380 """Provide a list of patch references for each patch, tract, filter combo.
387 Keyword arguments passed to the task
392 List of tuples, where each tuple is a (dataRef, kwargs) pair.
394 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
395 kwargs["psfCache"] = parsedCmd.psfCache
396 return [(list(p.values()), kwargs)
for t
in refDict.values()
for p
in t.values()]
399class DeblendCoaddSourcesTask(CmdLineTask):
400 """Deblend the sources in a merged catalog
402 Deblend sources from master catalog
in each coadd.
403 This can either be done separately
in each band using the HSC-SDSS deblender
404 (`DeblendCoaddSourcesTask.config.simultaneous==
False`)
405 or use SCARLET to simultaneously fit the blend
in all bands
406 (`DeblendCoaddSourcesTask.config.simultaneous==
True`).
407 The task will set its own `self.schema` atribute to the `Schema` of the
408 output deblended catalog.
409 This will include all fields
from the input `Schema`,
as well
as additional fields
412 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
413 ---------------------------------------------------------
419 Butler used to read the input schemas
from disk
or
420 construct the reference catalog loader,
if `schema`
or `peakSchema`
or
422 The schema of the merged detection catalog
as an input to this task.
424 The schema of the `PeakRecord`s
in the `Footprint`s
in the merged detection catalog
426 ConfigClass = DeblendCoaddSourcesConfig
427 RunnerClass = DeblendCoaddSourcesRunner
428 _DefaultName = "deblendCoaddSources"
429 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
432 def _makeArgumentParser(cls):
433 parser = ArgumentParser(name=cls._DefaultName)
434 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
435 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
436 ContainerClass=ExistingCoaddDataIdContainer)
437 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
440 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
441 CmdLineTask.__init__(self, **kwargs)
443 assert butler
is not None,
"Neither butler nor schema is defined"
444 schema = butler.get(self.config.coaddName +
"Coadd_mergeDet_schema", immediate=
True).schema
445 self.schemaMapper = afwTable.SchemaMapper(schema)
446 self.schemaMapper.addMinimalSchema(schema)
447 self.schema = self.schemaMapper.getOutputSchema()
448 if peakSchema
is None:
449 assert butler
is not None,
"Neither butler nor peakSchema is defined"
450 peakSchema = butler.get(self.config.coaddName +
"Coadd_peak_schema", immediate=
True).schema
452 if self.config.simultaneous:
453 self.makeSubtask(
"multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
455 self.makeSubtask(
"singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
457 def getSchemaCatalogs(self):
458 """Return a dict of empty catalogs for each catalog dataset produced by this task.
463 Dictionary of empty catalogs, with catalog names
as keys.
465 catalog = afwTable.SourceCatalog(self.schema)
466 return {self.config.coaddName +
"Coadd_deblendedFlux": catalog,
467 self.config.coaddName +
"Coadd_deblendedModel": catalog}
469 def runDataRef(self, patchRefList, psfCache=100):
472 Deblend each source simultaneously or separately
473 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
474 Set `
is-primary`
and related flags.
475 Propagate flags
from individual visits.
476 Write the deblended sources out.
481 List of data references
for each filter
484 if self.config.hasFakes:
485 coaddType =
"fakes_" + self.config.coaddName
487 coaddType = self.config.coaddName
489 if self.config.simultaneous:
493 for patchRef
in patchRefList:
494 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
495 filter = patchRef.get(coaddType +
"Coadd_filterLabel", immediate=
True)
496 filters.append(filter.bandLabel)
497 exposures.append(exposure)
499 sources = self.readSources(patchRef)
500 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
501 templateCatalogs = self.multiBandDeblend.run(exposure, sources)
502 for n
in range(len(patchRefList)):
503 self.write(patchRefList[n], templateCatalogs[filters[n]])
506 for patchRef
in patchRefList:
507 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
508 exposure.getPsf().setCacheCapacity(psfCache)
509 sources = self.readSources(patchRef)
510 self.singleBandDeblend.run(exposure, sources)
511 self.write(patchRef, sources)
513 def readSources(self, dataRef):
514 """Read merged catalog
516 Read the catalog of merged detections and create a catalog
521 dataRef: data reference
522 Data reference
for catalog of merged detections
526 sources: `SourceCatalog`
527 List of sources
in merged catalog
529 We also need to add columns to hold the measurements we
're about to make so we can measure in-place.
531 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=
True)
532 self.log.info(
"Read %d detections: %s", len(merged), dataRef.dataId)
533 idFactory = self.makeIdFactory(dataRef)
535 idFactory.notify(s.getId())
536 table = afwTable.SourceTable.make(self.schema, idFactory)
537 sources = afwTable.SourceCatalog(table)
538 sources.extend(merged, self.schemaMapper)
541 def write(self, dataRef, sources):
542 """Write the source catalog(s)
546 dataRef: Data Reference
547 Reference to the output catalog.
548 sources: `SourceCatalog`
549 Flux conserved sources to write to file.
550 If using the single band deblender, this is the catalog
552 template_sources: `SourceCatalog`
553 Source catalog using the multiband template models
556 dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux")
557 self.log.info(
"Wrote %d sources: %s", len(sources), dataRef.dataId)
559 def writeMetadata(self, dataRefList):
560 """Write the metadata produced from processing the data.
564 List of Butler data references used to write the metadata.
565 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
567 for dataRef
in dataRefList:
569 metadataName = self._getMetadataName()
570 if metadataName
is not None:
571 dataRef.put(self.getFullMetadata(), metadataName)
572 except Exception
as e:
573 self.log.warning(
"Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
575 def getExposureId(self, dataRef):
576 """Get the ExposureId from a data reference
578 return int(dataRef.get(self.config.coaddName +
"CoaddId"))
581class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
582 dimensions=(
"tract",
"patch",
"band",
"skymap"),
583 defaultTemplates={
"inputCoaddName":
"deep",
584 "outputCoaddName":
"deep"}):
585 inputSchema = cT.InitInput(
586 doc=
"Input schema for measure merged task produced by a deblender or detection task",
587 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
588 storageClass=
"SourceCatalog"
590 outputSchema = cT.InitOutput(
591 doc=
"Output schema after all new fields are added by task",
592 name=
"{inputCoaddName}Coadd_meas_schema",
593 storageClass=
"SourceCatalog"
595 refCat = cT.PrerequisiteInput(
596 doc=
"Reference catalog used to match measured sources against known sources",
598 storageClass=
"SimpleCatalog",
599 dimensions=(
"skypix",),
604 doc=
"Input coadd image",
605 name=
"{inputCoaddName}Coadd_calexp",
606 storageClass=
"ExposureF",
607 dimensions=(
"tract",
"patch",
"band",
"skymap")
610 doc=
"SkyMap to use in processing",
611 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
612 storageClass=
"SkyMap",
613 dimensions=(
"skymap",),
615 visitCatalogs = cT.Input(
616 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
617 "further filtered in the task for the purpose of propagating flags from image calibration "
618 "and characterization to codd objects",
620 dimensions=(
"instrument",
"visit",
"detector"),
621 storageClass=
"SourceCatalog",
624 inputCatalog = cT.Input(
625 doc=(
"Name of the input catalog to use."
626 "If the single band deblender was used this should be 'deblendedFlux."
627 "If the multi-band deblender was used this should be 'deblendedModel, "
628 "or deblendedFlux if the multiband deblender was configured to output "
629 "deblended flux catalogs. If no deblending was performed this should "
631 name=
"{inputCoaddName}Coadd_deblendedFlux",
632 storageClass=
"SourceCatalog",
633 dimensions=(
"tract",
"patch",
"band",
"skymap"),
635 outputSources = cT.Output(
636 doc=
"Source catalog containing all the measurement information generated in this task",
637 name=
"{outputCoaddName}Coadd_meas",
638 dimensions=(
"tract",
"patch",
"band",
"skymap"),
639 storageClass=
"SourceCatalog",
641 matchResult = cT.Output(
642 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
643 name=
"{outputCoaddName}Coadd_measMatch",
644 dimensions=(
"tract",
"patch",
"band",
"skymap"),
645 storageClass=
"Catalog",
647 denormMatches = cT.Output(
648 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
649 "doWriteMatchesDenormalized",
650 name=
"{outputCoaddName}Coadd_measMatchFull",
651 dimensions=(
"tract",
"patch",
"band",
"skymap"),
652 storageClass=
"Catalog",
655 def __init__(self, *, config=None):
656 super().__init__(config=config)
657 if config.doPropagateFlags
is False:
658 self.inputs -= set((
"visitCatalogs",))
660 if config.doMatchSources
is False:
661 self.outputs -= set((
"matchResult",))
663 if config.doWriteMatchesDenormalized
is False:
664 self.outputs -= set((
"denormMatches",))
667class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
668 pipelineConnections=MeasureMergedCoaddSourcesConnections):
670 @anchor MeasureMergedCoaddSourcesConfig_
672 @brief Configuration parameters
for the MeasureMergedCoaddSourcesTask
674 inputCatalog = Field(dtype=str, default="deblendedFlux",
675 doc=(
"Name of the input catalog to use."
676 "If the single band deblender was used this should be 'deblendedFlux."
677 "If the multi-band deblender was used this should be 'deblendedModel."
678 "If no deblending was performed this should be 'mergeDet'"))
679 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
680 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
681 doPropagateFlags = Field(
682 dtype=bool, default=
True,
683 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
685 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc=
"Propagate visit flags to coadd")
686 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
687 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
688 doWriteMatchesDenormalized = Field(
691 doc=(
"Write reference matches in denormalized format? "
692 "This format uses more disk space, but is more convenient to read."),
694 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
695 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
696 checkUnitsParseStrict = Field(
697 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
704 doc=
"Apply aperture corrections"
706 applyApCorr = ConfigurableField(
707 target=ApplyApCorrTask,
708 doc=
"Subtask to apply aperture corrections"
710 doRunCatalogCalculation = Field(
713 doc=
'Run catalogCalculation task'
715 catalogCalculation = ConfigurableField(
716 target=CatalogCalculationTask,
717 doc=
"Subtask to run catalogCalculation plugins on catalog"
723 doc=
"Should be set to True if fake sources have been inserted into the input data."
727 def refObjLoader(self):
728 return self.match.refObjLoader
730 def setDefaults(self):
731 super().setDefaults()
732 self.measurement.plugins.names |= [
'base_InputCount',
734 'base_LocalPhotoCalib',
736 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
738 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
743 refCatGen2 = getattr(self.refObjLoader,
"ref_dataset_name",
None)
744 if refCatGen2
is not None and refCatGen2 != self.connections.refCat:
746 f
"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
747 f
"are different. These options must be kept in sync until Gen2 is retired."
759class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
760 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
762 def getTargetList(parsedCmd, **kwargs):
763 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
766class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
768 @anchor MeasureMergedCoaddSourcesTask_
770 @brief Deblend sources
from master catalog
in each coadd seperately
and measure.
772 @section pipe_tasks_multiBand_Contents Contents
774 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
775 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
776 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
777 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
778 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
779 -
@ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
781 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
783 Command-line task that uses peaks
and footprints
from a master catalog to perform deblending
and
784 measurement
in each coadd.
786 Given a master input catalog of sources (peaks
and footprints)
or deblender outputs
787 (including a HeavyFootprint
in each band), measure each source on the
788 coadd. Repeating this procedure
with the same master catalog across multiple coadds will generate a
789 consistent set of child sources.
791 The deblender retains all peaks
and deblends any missing peaks (dropouts
in that band)
as PSFs. Source
792 properties are measured
and the
@c is-primary flag (indicating sources
with no children)
is set. Visit
793 flags are propagated to the coadd sources.
795 Optionally, we can match the coadd sources to an external reference catalog.
798 deepCoadd_mergeDet{tract,patch}
or deepCoadd_deblend{tract,patch}: SourceCatalog
799 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
801 deepCoadd_meas{tract,patch,filter}: SourceCatalog
805 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
808 <DT>
@ref SingleFrameMeasurementTask_
"measurement"
809 <DD> Measure source properties of deblended sources.</DD>
810 <DT>
@ref SetPrimaryFlagsTask_
"setPrimaryFlags"
811 <DD> Set flag
'is-primary' as well
as related flags on sources.
'is-primary' is set
for sources that are
812 not at the edge of the field
and that have either
not been deblended
or are the children of deblended
814 <DT>
@ref PropagateVisitFlagsTask_
"propagateFlags"
815 <DD> Propagate flags set
in individual visits to the coadd.</DD>
816 <DT>
@ref DirectMatchTask_
"match"
817 <DD> Match input sources to a reference catalog (optional).
820 These subtasks may be retargeted
as required.
822 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
824 @copydoc \_\_init\_\_
826 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
830 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
832 See
@ref MeasureMergedCoaddSourcesConfig_
834 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
836 The
@link lsst.pipe.base.cmdLineTask.CmdLineTask command line task
@endlink interface supports a
837 flag
@c -d to
import @b debug.py
from your
@c PYTHONPATH; see
@ref baseDebug
for more about
@b debug.py
840 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
841 the various sub-tasks. See the documetation
for individual sub-tasks
for more information.
843 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
844 MeasureMergedCoaddSourcesTask
846 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
847 The next stage
in the multi-band processing procedure will merge these measurements into a suitable
848 catalog
for driving forced photometry.
850 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
852 A list of the available optional arguments can be obtained by calling measureCoaddSources.py
with the
853 `--help` command line argument:
855 measureCoaddSources.py --help
858 To demonstrate usage of the DetectCoaddSourcesTask
in the larger context of multi-band processing, we
859 will process HSC data
in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
860 step 6 at
@ref pipeTasks_multiBand, one may perform deblending
and measure sources
in the HSC-I band
863 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
865 This will process the HSC-I band data. The results are written
in
866 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
868 It
is also necessary to run
870 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
872 to generate the sources catalogs
for the HSC-R band required by the next step
in the multi-band
873 procedure:
@ref MergeMeasurementsTask_
"MergeMeasurementsTask".
875 _DefaultName = "measureCoaddSources"
876 ConfigClass = MeasureMergedCoaddSourcesConfig
877 RunnerClass = MeasureMergedCoaddSourcesRunner
878 getSchemaCatalogs = _makeGetSchemaCatalogs(
"meas")
879 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
882 def _makeArgumentParser(cls):
883 parser = ArgumentParser(name=cls._DefaultName)
884 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
885 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
886 ContainerClass=ExistingCoaddDataIdContainer)
887 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
890 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
893 @brief Initialize the task.
895 Keyword arguments (
in addition to those forwarded to CmdLineTask.__init__):
896 @param[
in] schema: the schema of the merged detection catalog used
as input to this one
897 @param[
in] peakSchema: the schema of the PeakRecords
in the Footprints
in the merged detection catalog
898 @param[
in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
899 catalog. May be
None if the loader can be constructed
from the butler argument
or all steps
900 requiring a reference catalog are disabled.
901 @param[
in] butler: a butler used to read the input schemas
from disk
or construct the reference
902 catalog loader,
if schema
or peakSchema
or refObjLoader
is None
904 The task will set its own self.schema attribute to the schema of the output measurement catalog.
905 This will include all fields
from the input schema,
as well
as additional fields
for all the
908 super().__init__(**kwargs)
909 self.deblended = self.config.inputCatalog.startswith("deblended")
910 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
911 if initInputs
is not None:
912 schema = initInputs[
'inputSchema'].schema
914 assert butler
is not None,
"Neither butler nor schema is defined"
915 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema", immediate=
True).schema
916 self.schemaMapper = afwTable.SchemaMapper(schema)
917 self.schemaMapper.addMinimalSchema(schema)
918 self.schema = self.schemaMapper.getOutputSchema()
920 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
921 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
922 if self.config.doMatchSources:
923 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
924 if self.config.doPropagateFlags:
925 self.makeSubtask(
"propagateFlags", schema=self.schema)
926 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
927 if self.config.doApCorr:
928 self.makeSubtask(
"applyApCorr", schema=self.schema)
929 if self.config.doRunCatalogCalculation:
930 self.makeSubtask(
"catalogCalculation", schema=self.schema)
932 self.outputSchema = afwTable.SourceCatalog(self.schema)
934 def runQuantum(self, butlerQC, inputRefs, outputRefs):
935 inputs = butlerQC.get(inputRefs)
937 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
938 inputs.pop(
'refCat'), config=self.config.refObjLoader,
940 self.match.setRefObjLoader(refObjLoader)
944 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
947 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId,
"tract_patch")
948 inputs[
'exposureId'] = exposureIdInfo.expId
949 idFactory = exposureIdInfo.makeSourceIdFactory()
951 table = afwTable.SourceTable.make(self.schema, idFactory)
952 sources = afwTable.SourceCatalog(table)
953 sources.extend(inputs.pop(
'inputCatalog'), self.schemaMapper)
954 table = sources.getTable()
955 table.setMetadata(self.algMetadata)
956 inputs[
'sources'] = sources
958 skyMap = inputs.pop(
'skyMap')
959 tractNumber = inputRefs.inputCatalog.dataId[
'tract']
960 tractInfo = skyMap[tractNumber]
961 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId[
'patch'])
966 wcs=tractInfo.getWcs(),
967 bbox=patchInfo.getOuterBBox()
969 inputs[
'skyInfo'] = skyInfo
971 if self.config.doPropagateFlags:
973 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
974 visitKey = ccdInputs.schema.find(
"visit").key
975 ccdKey = ccdInputs.schema.find(
"ccd").key
976 inputVisitIds = set()
978 for ccdRecord
in ccdInputs:
979 visit = ccdRecord.get(visitKey)
980 ccd = ccdRecord.get(ccdKey)
981 inputVisitIds.add((visit, ccd))
982 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
984 inputCatalogsToKeep = []
985 inputCatalogWcsUpdate = []
986 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
987 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
988 if key
in inputVisitIds:
989 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
990 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
991 inputs[
'visitCatalogs'] = inputCatalogsToKeep
992 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
993 inputs[
'ccdInputs'] = ccdInputs
995 outputs = self.run(**inputs)
996 butlerQC.put(outputs, outputRefs)
998 def runDataRef(self, patchRef, psfCache=100):
1000 @brief Deblend
and measure.
1002 @param[
in] patchRef: Patch reference.
1004 Set
'is-primary' and related flags. Propagate flags
1005 from individual visits. Optionally match the sources to a reference catalog
and write the matches.
1006 Finally, write the deblended sources
and measurements out.
1008 if self.config.hasFakes:
1009 coaddType =
"fakes_" + self.config.coaddName
1011 coaddType = self.config.coaddName
1012 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
1013 exposure.getPsf().setCacheCapacity(psfCache)
1014 sources = self.readSources(patchRef)
1015 table = sources.getTable()
1016 table.setMetadata(self.algMetadata)
1017 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1019 if self.config.doPropagateFlags:
1020 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1024 results = self.run(exposure=exposure, sources=sources,
1025 ccdInputs=ccdInputs,
1026 skyInfo=skyInfo, butler=patchRef.getButler(),
1027 exposureId=self.getExposureId(patchRef))
1029 if self.config.doMatchSources:
1030 self.writeMatches(patchRef, results)
1031 self.write(patchRef, results.outputSources)
1033 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1035 """Run measurement algorithms on the input exposure, and optionally populate the
1036 resulting catalog with extra information.
1040 exposure : `lsst.afw.exposure.Exposure`
1041 The input exposure on which measurements are to be performed
1043 A catalog built
from the results of merged detections,
or
1045 skyInfo : `lsst.pipe.base.Struct`
1046 A struct containing information about the position of the input exposure within
1047 a `SkyMap`, the `SkyMap`, its `Wcs`,
and its bounding box
1048 exposureId : `int`
or `bytes`
1049 packed unique number
or bytes unique to the input exposure
1051 Catalog containing information on the individual visits which went into making
1053 visitCatalogs : list of `lsst.afw.table.SourceCatalogs`
or `
None`
1054 A list of source catalogs corresponding to measurements made on the individual
1055 visits which went into the input exposure. If
None and butler
is `
None` then
1056 the task cannot propagate visit flags to the output catalog.
1058 If visitCatalogs
is not `
None` this should be a list of wcs objects which correspond
1059 to the input visits. Used to put all coordinates to common system. If `
None`
and
1060 butler
is `
None` then the task cannot propagate visit flags to the output catalog.
1061 butler : `lsst.daf.butler.Butler`
or `lsst.daf.persistence.Butler`
1062 Either a gen2
or gen3 butler used to load visit catalogs
1066 results : `lsst.pipe.base.Struct`
1067 Results of running measurement task. Will contain the catalog
in the
1068 sources attribute. Optionally will have results of matching to a
1069 reference catalog
in the matchResults attribute,
and denormalized
1070 matches
in the denormMatches attribute.
1072 self.measurement.run(sources, exposure, exposureId=exposureId)
1074 if self.config.doApCorr:
1075 self.applyApCorr.run(
1077 apCorrMap=exposure.getInfo().getApCorrMap()
1084 if not sources.isContiguous():
1085 sources = sources.copy(deep=
True)
1087 if self.config.doRunCatalogCalculation:
1088 self.catalogCalculation.run(sources)
1090 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1091 patchInfo=skyInfo.patchInfo)
1092 if self.config.doPropagateFlags:
1093 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1097 if self.config.doMatchSources:
1098 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel)
1099 matches = afwTable.packMatches(matchResult.matches)
1100 matches.table.setMetadata(matchResult.matchMeta)
1101 results.matchResult = matches
1102 if self.config.doWriteMatchesDenormalized:
1103 if matchResult.matches:
1104 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1106 self.log.warning(
"No matches, so generating dummy denormalized matches file")
1107 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1109 denormMatches.getMetadata().add(
"COMMENT",
1110 "This catalog is empty because no matches were found.")
1111 results.denormMatches = denormMatches
1112 results.denormMatches = denormMatches
1114 results.outputSources = sources
1117 def readSources(self, dataRef):
1119 @brief Read input sources.
1121 @param[
in] dataRef: Data reference
for catalog of merged detections
1122 @return List of sources
in merged catalog
1124 We also need to add columns to hold the measurements we
're about to make so we can measure in-place.
1126 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1127 self.log.info(
"Read %d detections: %s", len(merged), dataRef.dataId)
1128 idFactory = self.makeIdFactory(dataRef)
1130 idFactory.notify(s.getId())
1131 table = afwTable.SourceTable.make(self.schema, idFactory)
1132 sources = afwTable.SourceCatalog(table)
1133 sources.extend(merged, self.schemaMapper)
1136 def writeMatches(self, dataRef, results):
1138 @brief Write matches of the sources to the astrometric reference catalog.
1140 @param[
in] dataRef: data reference
1141 @param[
in] results: results struct
from run method
1143 if hasattr(results,
"matchResult"):
1144 dataRef.put(results.matchResult, self.config.coaddName +
"Coadd_measMatch")
1145 if hasattr(results,
"denormMatches"):
1146 dataRef.put(results.denormMatches, self.config.coaddName +
"Coadd_measMatchFull")
1148 def write(self, dataRef, sources):
1150 @brief Write the source catalog.
1152 @param[
in] dataRef: data reference
1153 @param[
in] sources: source catalog
1155 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1156 self.log.info(
"Wrote %d sources: %s", len(sources), dataRef.dataId)
1158 def getExposureId(self, dataRef):
1159 return int(dataRef.get(self.config.coaddName +
"CoaddId"))