27from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
28 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
29import lsst.pipe.base.connectionTypes
as cT
32from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
34from lsst.meas.extensions.scarlet
import ScarletDeblendTask
46from lsst.obs.base
import ExposureIdInfo
49from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
50from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
51from .multiBandUtils
import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs
52from .multiBandUtils
import getInputSchema, readCatalog, _makeMakeIdFactory
53from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
54from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
55from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
56from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
61* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
62* deepCoadd_mergeDet: merged detections (tract, patch)
63* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
64* deepCoadd_ref: reference sources (tract, patch)
65All of these have associated *_schema catalogs that require no data ID and hold no records.
67In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
68the mergeDet, meas, and ref dataset Footprints:
69* deepCoadd_peak_schema
75 dimensions=(
"tract",
"patch",
"band",
"skymap"),
76 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
77 detectionSchema = cT.InitOutput(
78 doc=
"Schema of the detection catalog",
79 name=
"{outputCoaddName}Coadd_det_schema",
80 storageClass=
"SourceCatalog",
83 doc=
"Exposure on which detections are to be performed",
84 name=
"{inputCoaddName}Coadd",
85 storageClass=
"ExposureF",
86 dimensions=(
"tract",
"patch",
"band",
"skymap")
88 outputBackgrounds = cT.Output(
89 doc=
"Output Backgrounds used in detection",
90 name=
"{outputCoaddName}Coadd_calexp_background",
91 storageClass=
"Background",
92 dimensions=(
"tract",
"patch",
"band",
"skymap")
94 outputSources = cT.Output(
95 doc=
"Detected sources catalog",
96 name=
"{outputCoaddName}Coadd_det",
97 storageClass=
"SourceCatalog",
98 dimensions=(
"tract",
"patch",
"band",
"skymap")
100 outputExposure = cT.Output(
101 doc=
"Exposure post detection",
102 name=
"{outputCoaddName}Coadd_calexp",
103 storageClass=
"ExposureF",
104 dimensions=(
"tract",
"patch",
"band",
"skymap")
108class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
110 @anchor DetectCoaddSourcesConfig_
112 @brief Configuration parameters
for the DetectCoaddSourcesTask
114 doScaleVariance = Field(dtype=bool, default=True, doc=
"Scale variance plane using empirical noise?")
115 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
116 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
117 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
118 doInsertFakes = Field(dtype=bool, default=
False,
119 doc=
"Run fake sources injection task")
120 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
121 doc=
"Injection of fake sources for testing "
122 "purposes (must be retargeted)")
126 doc=
"Should be set to True if fake sources have been inserted into the input data."
129 def setDefaults(self):
130 super().setDefaults()
131 self.detection.thresholdType =
"pixel_stdev"
132 self.detection.isotropicGrow =
True
134 self.detection.reEstimateBackground =
False
135 self.detection.background.useApprox =
False
136 self.detection.background.binSize = 4096
137 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
138 self.detection.doTempWideBackground =
True
148class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
150 @anchor DetectCoaddSourcesTask_
152 @brief Detect sources on a coadd
154 @section pipe_tasks_multiBand_Contents Contents
156 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
157 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
158 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
159 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
160 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
161 -
@ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
163 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
165 Command-line task that detects sources on a coadd of exposures obtained
with a single filter.
167 Coadding individual visits requires each exposure to be warped. This introduces covariance
in the noise
168 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
169 in the coadd to match the observed variance. This
is an approximate approach -- strictly, we should
170 propagate the full covariance matrix -- but it
is simple
and works well
in practice.
172 After scaling the variance plane, we detect sources
and generate footprints by delegating to the
@ref
173 SourceDetectionTask_
"detection" subtask.
176 deepCoadd{tract,patch,filter}: ExposureF
178 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
179 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
181 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
185 DetectCoaddSourcesTask delegates most of its work to the
@ref SourceDetectionTask_
"detection" subtask.
186 You can retarget this subtask
if you wish.
188 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
190 @copydoc \_\_init\_\_
192 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
196 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
198 See
@ref DetectCoaddSourcesConfig_
"DetectSourcesConfig"
200 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
202 The
@link lsst.pipe.base.cmdLineTask.CmdLineTask command line task
@endlink interface supports a
203 flag
@c -d to
import @b debug.py
from your
@c PYTHONPATH; see
@ref baseDebug
for more about
@b debug.py
206 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
207 @ref SourceDetectionTask_
"SourceDetectionTask"; see the documetation
for
208 @ref SourceDetectionTask_
"SourceDetectionTask" for further information.
210 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
211 of using DetectCoaddSourcesTask
213 DetectCoaddSourcesTask
is meant to be run after assembling a coadded image
in a given band. The purpose of
214 the task
is to update the background, detect all sources
in a single band
and generate a set of parent
215 footprints. Subsequent tasks
in the multi-band processing procedure will merge sources across bands
and,
216 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
217 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
218 calling detectCoaddSources.py
with the `--help` command line argument:
220 detectCoaddSources.py --help
223 To demonstrate usage of the DetectCoaddSourcesTask
in the larger context of multi-band processing, we
224 will process HSC data
in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
225 steps 1 - 4 at
@ref pipeTasks_multiBand, one may detect all the sources
in each coadd
as follows:
227 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
229 that will process the HSC-I band data. The results are written to
230 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
232 It
is also necessary to run:
234 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
236 to generate the sources catalogs
for the HSC-R band required by the next step
in the multi-band
237 processing procedure:
@ref MergeDetectionsTask_
"MergeDetectionsTask".
239 _DefaultName = "detectCoaddSources"
240 ConfigClass = DetectCoaddSourcesConfig
241 getSchemaCatalogs = _makeGetSchemaCatalogs(
"det")
242 makeIdFactory = _makeMakeIdFactory(
"CoaddId")
245 def _makeArgumentParser(cls):
246 parser = ArgumentParser(name=cls._DefaultName)
247 parser.add_id_argument(
"--id",
"deepCoadd", help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
248 ContainerClass=ExistingCoaddDataIdContainer)
251 def __init__(self, schema=None, **kwargs):
253 @brief Initialize the task. Create the
@ref SourceDetectionTask_
"detection" subtask.
255 Keyword arguments (
in addition to those forwarded to CmdLineTask.__init__):
257 @param[
in] schema: initial schema
for the output catalog, modified-
in place to include all
258 fields set by this task. If
None, the source minimal schema will be used.
259 @param[
in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
263 super().__init__(**kwargs)
265 schema = afwTable.SourceTable.makeMinimalSchema()
266 if self.config.doInsertFakes:
267 self.makeSubtask(
"insertFakes")
269 self.makeSubtask(
"detection", schema=self.schema)
270 if self.config.doScaleVariance:
271 self.makeSubtask(
"scaleVariance")
273 self.detectionSchema = afwTable.SourceCatalog(self.schema)
275 def runDataRef(self, patchRef):
277 @brief Run detection on a coadd.
279 Invokes
@ref run
and then uses
@ref write to output the
282 @param[
in] patchRef: data reference
for patch
284 if self.config.hasFakes:
285 exposure = patchRef.get(
"fakes_" + self.config.coaddName +
"Coadd", immediate=
True)
287 exposure = patchRef.get(self.config.coaddName +
"Coadd", immediate=
True)
288 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, log=self.log)
289 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
290 self.write(results, patchRef)
293 def runQuantum(self, butlerQC, inputRefs, outputRefs):
294 inputs = butlerQC.get(inputRefs)
295 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId,
"tract_patch_band")
296 inputs[
"idFactory"] = exposureIdInfo.makeSourceIdFactory()
297 inputs[
"expId"] = exposureIdInfo.expId
298 outputs = self.run(**inputs)
299 butlerQC.put(outputs, outputRefs)
301 def run(self, exposure, idFactory, expId):
303 @brief Run detection on an exposure.
305 First scale the variance plane to match the observed variance
306 using
@ref ScaleVarianceTask. Then invoke the
@ref SourceDetectionTask_
"detection" subtask to
309 @param[
in,out] exposure: Exposure on which to detect (may be backround-subtracted
and scaled,
310 depending on configuration).
311 @param[
in] idFactory: IdFactory to set source identifiers
312 @param[
in] expId: Exposure identifier (integer)
for RNG seed
314 @return a pipe.base.Struct
with fields
315 - sources: catalog of detections
316 - backgrounds: list of backgrounds
318 if self.config.doScaleVariance:
319 varScale = self.scaleVariance.run(exposure.maskedImage)
320 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
321 backgrounds = afwMath.BackgroundList()
322 if self.config.doInsertFakes:
323 self.insertFakes.run(exposure, background=backgrounds)
324 table = afwTable.SourceTable.make(self.schema, idFactory)
325 detections = self.detection.run(table, exposure, expId=expId)
326 sources = detections.sources
327 fpSets = detections.fpSets
328 if hasattr(fpSets,
"background")
and fpSets.background:
329 for bg
in fpSets.background:
330 backgrounds.append(bg)
331 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
333 def write(self, results, patchRef):
335 @brief Write out results
from runDetection.
337 @param[
in] exposure: Exposure to write out
338 @param[
in] results: Struct returned
from runDetection
339 @param[
in] patchRef: data reference
for patch
341 coaddName = self.config.coaddName + "Coadd"
342 patchRef.put(results.outputBackgrounds, coaddName +
"_calexp_background")
343 patchRef.put(results.outputSources, coaddName +
"_det")
344 if self.config.hasFakes:
345 patchRef.put(results.outputExposure,
"fakes_" + coaddName +
"_calexp")
347 patchRef.put(results.outputExposure, coaddName +
"_calexp")
352class DeblendCoaddSourcesConfig(Config):
353 """DeblendCoaddSourcesConfig
355 Configuration parameters for the `DeblendCoaddSourcesTask`.
357 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
358 doc="Deblend sources separately in each band")
359 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
360 doc=
"Deblend sources simultaneously across bands")
361 simultaneous = Field(dtype=bool,
363 doc=
"Simultaneously deblend all bands? "
364 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
365 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
366 hasFakes = Field(dtype=bool,
368 doc=
"Should be set to True if fake sources have been inserted into the input data.")
370 def setDefaults(self):
371 Config.setDefaults(self)
372 self.singleBandDeblend.propagateAllPeaks =
True
376 """Task runner for the `MergeSourcesTask`
378 Required because the run method requires a list of
379 dataRefs rather than a single dataRef.
382 def getTargetList(parsedCmd, **kwargs):
383 """Provide a list of patch references for each patch, tract, filter combo.
390 Keyword arguments passed to the task
395 List of tuples, where each tuple is a (dataRef, kwargs) pair.
397 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
398 kwargs["psfCache"] = parsedCmd.psfCache
399 return [(list(p.values()), kwargs)
for t
in refDict.values()
for p
in t.values()]
402class DeblendCoaddSourcesTask(CmdLineTask):
403 """Deblend the sources in a merged catalog
405 Deblend sources from master catalog
in each coadd.
406 This can either be done separately
in each band using the HSC-SDSS deblender
407 (`DeblendCoaddSourcesTask.config.simultaneous==
False`)
408 or use SCARLET to simultaneously fit the blend
in all bands
409 (`DeblendCoaddSourcesTask.config.simultaneous==
True`).
410 The task will set its own `self.schema` atribute to the `Schema` of the
411 output deblended catalog.
412 This will include all fields
from the input `Schema`,
as well
as additional fields
415 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
416 ---------------------------------------------------------
422 Butler used to read the input schemas
from disk
or
423 construct the reference catalog loader,
if `schema`
or `peakSchema`
or
425 The schema of the merged detection catalog
as an input to this task.
427 The schema of the `PeakRecord`s
in the `Footprint`s
in the merged detection catalog
429 ConfigClass = DeblendCoaddSourcesConfig
430 RunnerClass = DeblendCoaddSourcesRunner
431 _DefaultName = "deblendCoaddSources"
432 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId", includeBand=
False)
435 def _makeArgumentParser(cls):
436 parser = ArgumentParser(name=cls._DefaultName)
437 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
438 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
439 ContainerClass=ExistingCoaddDataIdContainer)
440 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
443 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
444 CmdLineTask.__init__(self, **kwargs)
446 assert butler
is not None,
"Neither butler nor schema is defined"
447 schema = butler.get(self.config.coaddName +
"Coadd_mergeDet_schema", immediate=
True).schema
448 self.schemaMapper = afwTable.SchemaMapper(schema)
449 self.schemaMapper.addMinimalSchema(schema)
450 self.schema = self.schemaMapper.getOutputSchema()
451 if peakSchema
is None:
452 assert butler
is not None,
"Neither butler nor peakSchema is defined"
453 peakSchema = butler.get(self.config.coaddName +
"Coadd_peak_schema", immediate=
True).schema
455 if self.config.simultaneous:
456 self.makeSubtask(
"multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
458 self.makeSubtask(
"singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
460 def getSchemaCatalogs(self):
461 """Return a dict of empty catalogs for each catalog dataset produced by this task.
466 Dictionary of empty catalogs, with catalog names
as keys.
468 catalog = afwTable.SourceCatalog(self.schema)
469 return {self.config.coaddName +
"Coadd_deblendedFlux": catalog,
470 self.config.coaddName +
"Coadd_deblendedModel": catalog}
472 def runDataRef(self, patchRefList, psfCache=100):
475 Deblend each source simultaneously or separately
476 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
477 Set `
is-primary`
and related flags.
478 Propagate flags
from individual visits.
479 Write the deblended sources out.
484 List of data references
for each filter
487 if self.config.hasFakes:
488 coaddType =
"fakes_" + self.config.coaddName
490 coaddType = self.config.coaddName
492 if self.config.simultaneous:
496 for patchRef
in patchRefList:
497 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
498 filter = patchRef.get(coaddType +
"Coadd_filterLabel", immediate=
True)
499 filters.append(filter.bandLabel)
500 exposures.append(exposure)
502 exposures = [exposure
for _, exposure
in sorted(zip(filters, exposures))]
503 patchRefList = [patchRef
for _, patchRef
in sorted(zip(filters, patchRefList))]
506 sources = self.readSources(patchRef)
507 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
508 templateCatalogs = self.multiBandDeblend.run(exposure, sources)
509 for n
in range(len(patchRefList)):
510 self.write(patchRefList[n], templateCatalogs[filters[n]])
513 for patchRef
in patchRefList:
514 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
515 exposure.getPsf().setCacheCapacity(psfCache)
516 sources = self.readSources(patchRef)
517 self.singleBandDeblend.run(exposure, sources)
518 self.write(patchRef, sources)
520 def readSources(self, dataRef):
521 """Read merged catalog
523 Read the catalog of merged detections and create a catalog
528 dataRef: data reference
529 Data reference
for catalog of merged detections
533 sources: `SourceCatalog`
534 List of sources
in merged catalog
536 We also need to add columns to hold the measurements we
're about to make so we can measure in-place.
538 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=
True)
539 self.log.info(
"Read %d detections: %s", len(merged), dataRef.dataId)
540 idFactory = self.makeIdFactory(dataRef)
544 maxId = np.max(merged[
"id"])
545 idFactory.notify(maxId)
546 table = afwTable.SourceTable.make(self.schema, idFactory)
547 sources = afwTable.SourceCatalog(table)
548 sources.extend(merged, self.schemaMapper)
551 def write(self, dataRef, sources):
552 """Write the source catalog(s)
556 dataRef: Data Reference
557 Reference to the output catalog.
558 sources: `SourceCatalog`
559 Flux conserved sources to write to file.
560 If using the single band deblender, this is the catalog
562 template_sources: `SourceCatalog`
563 Source catalog using the multiband template models
566 dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux")
567 self.log.info(
"Wrote %d sources: %s", len(sources), dataRef.dataId)
570 """Write the metadata produced from processing the data.
574 List of Butler data references used to write the metadata.
575 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
577 for dataRef
in dataRefList:
579 metadataName = self._getMetadataName()
580 if metadataName
is not None:
581 dataRef.put(self.getFullMetadata(), metadataName)
582 except Exception
as e:
583 self.log.warning(
"Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
586class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
587 dimensions=(
"tract",
"patch",
"band",
"skymap"),
588 defaultTemplates={
"inputCoaddName":
"deep",
589 "outputCoaddName":
"deep"}):
590 inputSchema = cT.InitInput(
591 doc=
"Input schema for measure merged task produced by a deblender or detection task",
592 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
593 storageClass=
"SourceCatalog"
595 outputSchema = cT.InitOutput(
596 doc=
"Output schema after all new fields are added by task",
597 name=
"{inputCoaddName}Coadd_meas_schema",
598 storageClass=
"SourceCatalog"
600 refCat = cT.PrerequisiteInput(
601 doc=
"Reference catalog used to match measured sources against known sources",
603 storageClass=
"SimpleCatalog",
604 dimensions=(
"skypix",),
609 doc=
"Input coadd image",
610 name=
"{inputCoaddName}Coadd_calexp",
611 storageClass=
"ExposureF",
612 dimensions=(
"tract",
"patch",
"band",
"skymap")
615 doc=
"SkyMap to use in processing",
616 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
617 storageClass=
"SkyMap",
618 dimensions=(
"skymap",),
620 visitCatalogs = cT.Input(
621 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
622 "further filtered in the task for the purpose of propagating flags from image calibration "
623 "and characterization to codd objects",
625 dimensions=(
"instrument",
"visit",
"detector"),
626 storageClass=
"SourceCatalog",
629 inputCatalog = cT.Input(
630 doc=(
"Name of the input catalog to use."
631 "If the single band deblender was used this should be 'deblendedFlux."
632 "If the multi-band deblender was used this should be 'deblendedModel, "
633 "or deblendedFlux if the multiband deblender was configured to output "
634 "deblended flux catalogs. If no deblending was performed this should "
636 name=
"{inputCoaddName}Coadd_deblendedFlux",
637 storageClass=
"SourceCatalog",
638 dimensions=(
"tract",
"patch",
"band",
"skymap"),
640 outputSources = cT.Output(
641 doc=
"Source catalog containing all the measurement information generated in this task",
642 name=
"{outputCoaddName}Coadd_meas",
643 dimensions=(
"tract",
"patch",
"band",
"skymap"),
644 storageClass=
"SourceCatalog",
646 matchResult = cT.Output(
647 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
648 name=
"{outputCoaddName}Coadd_measMatch",
649 dimensions=(
"tract",
"patch",
"band",
"skymap"),
650 storageClass=
"Catalog",
652 denormMatches = cT.Output(
653 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
654 "doWriteMatchesDenormalized",
655 name=
"{outputCoaddName}Coadd_measMatchFull",
656 dimensions=(
"tract",
"patch",
"band",
"skymap"),
657 storageClass=
"Catalog",
660 def __init__(self, *, config=None):
661 super().__init__(config=config)
662 if config.doPropagateFlags
is False:
663 self.inputs -= set((
"visitCatalogs",))
665 if config.doMatchSources
is False:
666 self.outputs -= set((
"matchResult",))
668 if config.doWriteMatchesDenormalized
is False:
669 self.outputs -= set((
"denormMatches",))
672class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
673 pipelineConnections=MeasureMergedCoaddSourcesConnections):
675 @anchor MeasureMergedCoaddSourcesConfig_
677 @brief Configuration parameters
for the MeasureMergedCoaddSourcesTask
679 inputCatalog = Field(dtype=str, default="deblendedFlux",
680 doc=(
"Name of the input catalog to use."
681 "If the single band deblender was used this should be 'deblendedFlux."
682 "If the multi-band deblender was used this should be 'deblendedModel."
683 "If no deblending was performed this should be 'mergeDet'"))
684 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
685 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
686 doPropagateFlags = Field(
687 dtype=bool, default=
True,
688 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
690 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc=
"Propagate visit flags to coadd")
691 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
692 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
693 doWriteMatchesDenormalized = Field(
696 doc=(
"Write reference matches in denormalized format? "
697 "This format uses more disk space, but is more convenient to read."),
699 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
700 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
701 checkUnitsParseStrict = Field(
702 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
709 doc=
"Apply aperture corrections"
711 applyApCorr = ConfigurableField(
712 target=ApplyApCorrTask,
713 doc=
"Subtask to apply aperture corrections"
715 doRunCatalogCalculation = Field(
718 doc=
'Run catalogCalculation task'
720 catalogCalculation = ConfigurableField(
721 target=CatalogCalculationTask,
722 doc=
"Subtask to run catalogCalculation plugins on catalog"
728 doc=
"Should be set to True if fake sources have been inserted into the input data."
732 def refObjLoader(self):
733 return self.match.refObjLoader
735 def setDefaults(self):
736 super().setDefaults()
737 self.measurement.plugins.names |= [
'base_InputCount',
739 'base_LocalPhotoCalib',
741 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
743 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
748 refCatGen2 = getattr(self.refObjLoader,
"ref_dataset_name",
None)
749 if refCatGen2
is not None and refCatGen2 != self.connections.refCat:
751 f
"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
752 f
"are different. These options must be kept in sync until Gen2 is retired."
764class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
765 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
767 def getTargetList(parsedCmd, **kwargs):
768 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
771class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
773 @anchor MeasureMergedCoaddSourcesTask_
775 @brief Deblend sources
from master catalog
in each coadd seperately
and measure.
777 @section pipe_tasks_multiBand_Contents Contents
779 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
780 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
781 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
782 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
783 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
784 -
@ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
786 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
788 Command-line task that uses peaks
and footprints
from a master catalog to perform deblending
and
789 measurement
in each coadd.
791 Given a master input catalog of sources (peaks
and footprints)
or deblender outputs
792 (including a HeavyFootprint
in each band), measure each source on the
793 coadd. Repeating this procedure
with the same master catalog across multiple coadds will generate a
794 consistent set of child sources.
796 The deblender retains all peaks
and deblends any missing peaks (dropouts
in that band)
as PSFs. Source
797 properties are measured
and the
@c is-primary flag (indicating sources
with no children)
is set. Visit
798 flags are propagated to the coadd sources.
800 Optionally, we can match the coadd sources to an external reference catalog.
803 deepCoadd_mergeDet{tract,patch}
or deepCoadd_deblend{tract,patch}: SourceCatalog
804 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
806 deepCoadd_meas{tract,patch,filter}: SourceCatalog
810 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
813 <DT>
@ref SingleFrameMeasurementTask_
"measurement"
814 <DD> Measure source properties of deblended sources.</DD>
815 <DT>
@ref SetPrimaryFlagsTask_
"setPrimaryFlags"
816 <DD> Set flag
'is-primary' as well
as related flags on sources.
'is-primary' is set
for sources that are
817 not at the edge of the field
and that have either
not been deblended
or are the children of deblended
819 <DT>
@ref PropagateVisitFlagsTask_
"propagateFlags"
820 <DD> Propagate flags set
in individual visits to the coadd.</DD>
821 <DT>
@ref DirectMatchTask_
"match"
822 <DD> Match input sources to a reference catalog (optional).
825 These subtasks may be retargeted
as required.
827 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
829 @copydoc \_\_init\_\_
831 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
835 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
837 See
@ref MeasureMergedCoaddSourcesConfig_
839 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
841 The
@link lsst.pipe.base.cmdLineTask.CmdLineTask command line task
@endlink interface supports a
842 flag
@c -d to
import @b debug.py
from your
@c PYTHONPATH; see
@ref baseDebug
for more about
@b debug.py
845 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
846 the various sub-tasks. See the documetation
for individual sub-tasks
for more information.
848 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
849 MeasureMergedCoaddSourcesTask
851 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
852 The next stage
in the multi-band processing procedure will merge these measurements into a suitable
853 catalog
for driving forced photometry.
855 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
857 A list of the available optional arguments can be obtained by calling measureCoaddSources.py
with the
858 `--help` command line argument:
860 measureCoaddSources.py --help
863 To demonstrate usage of the DetectCoaddSourcesTask
in the larger context of multi-band processing, we
864 will process HSC data
in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
865 step 6 at
@ref pipeTasks_multiBand, one may perform deblending
and measure sources
in the HSC-I band
868 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
870 This will process the HSC-I band data. The results are written
in
871 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
873 It
is also necessary to run
875 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
877 to generate the sources catalogs
for the HSC-R band required by the next step
in the multi-band
878 procedure:
@ref MergeMeasurementsTask_
"MergeMeasurementsTask".
880 _DefaultName = "measureCoaddSources"
881 ConfigClass = MeasureMergedCoaddSourcesConfig
882 RunnerClass = MeasureMergedCoaddSourcesRunner
883 getSchemaCatalogs = _makeGetSchemaCatalogs(
"meas")
885 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId", includeBand=
False)
888 def _makeArgumentParser(cls):
889 parser = ArgumentParser(name=cls._DefaultName)
890 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
891 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
892 ContainerClass=ExistingCoaddDataIdContainer)
893 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
896 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
899 @brief Initialize the task.
901 Keyword arguments (
in addition to those forwarded to CmdLineTask.__init__):
902 @param[
in] schema: the schema of the merged detection catalog used
as input to this one
903 @param[
in] peakSchema: the schema of the PeakRecords
in the Footprints
in the merged detection catalog
904 @param[
in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
905 catalog. May be
None if the loader can be constructed
from the butler argument
or all steps
906 requiring a reference catalog are disabled.
907 @param[
in] butler: a butler used to read the input schemas
from disk
or construct the reference
908 catalog loader,
if schema
or peakSchema
or refObjLoader
is None
910 The task will set its own self.schema attribute to the schema of the output measurement catalog.
911 This will include all fields
from the input schema,
as well
as additional fields
for all the
914 super().__init__(**kwargs)
915 self.deblended = self.config.inputCatalog.startswith("deblended")
916 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
917 if initInputs
is not None:
918 schema = initInputs[
'inputSchema'].schema
920 assert butler
is not None,
"Neither butler nor schema is defined"
921 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema", immediate=
True).schema
922 self.schemaMapper = afwTable.SchemaMapper(schema)
923 self.schemaMapper.addMinimalSchema(schema)
924 self.schema = self.schemaMapper.getOutputSchema()
926 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
927 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
928 if self.config.doMatchSources:
929 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
930 if self.config.doPropagateFlags:
931 self.makeSubtask(
"propagateFlags", schema=self.schema)
932 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
933 if self.config.doApCorr:
934 self.makeSubtask(
"applyApCorr", schema=self.schema)
935 if self.config.doRunCatalogCalculation:
936 self.makeSubtask(
"catalogCalculation", schema=self.schema)
938 self.outputSchema = afwTable.SourceCatalog(self.schema)
940 def runQuantum(self, butlerQC, inputRefs, outputRefs):
941 inputs = butlerQC.get(inputRefs)
943 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
944 inputs.pop(
'refCat'), config=self.config.refObjLoader,
946 self.match.setRefObjLoader(refObjLoader)
950 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
953 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId,
"tract_patch")
954 inputs[
'exposureId'] = exposureIdInfo.expId
955 idFactory = exposureIdInfo.makeSourceIdFactory()
957 table = afwTable.SourceTable.make(self.schema, idFactory)
958 sources = afwTable.SourceCatalog(table)
959 sources.extend(inputs.pop(
'inputCatalog'), self.schemaMapper)
960 table = sources.getTable()
961 table.setMetadata(self.algMetadata)
962 inputs[
'sources'] = sources
964 skyMap = inputs.pop(
'skyMap')
965 tractNumber = inputRefs.inputCatalog.dataId[
'tract']
966 tractInfo = skyMap[tractNumber]
967 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId[
'patch'])
972 wcs=tractInfo.getWcs(),
973 bbox=patchInfo.getOuterBBox()
975 inputs[
'skyInfo'] = skyInfo
977 if self.config.doPropagateFlags:
979 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
980 visitKey = ccdInputs.schema.find(
"visit").key
981 ccdKey = ccdInputs.schema.find(
"ccd").key
982 inputVisitIds = set()
984 for ccdRecord
in ccdInputs:
985 visit = ccdRecord.get(visitKey)
986 ccd = ccdRecord.get(ccdKey)
987 inputVisitIds.add((visit, ccd))
988 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
990 inputCatalogsToKeep = []
991 inputCatalogWcsUpdate = []
992 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
993 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
994 if key
in inputVisitIds:
995 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
996 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
997 inputs[
'visitCatalogs'] = inputCatalogsToKeep
998 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
999 inputs[
'ccdInputs'] = ccdInputs
1001 outputs = self.run(**inputs)
1002 butlerQC.put(outputs, outputRefs)
1004 def runDataRef(self, patchRef, psfCache=100):
1006 @brief Deblend
and measure.
1008 @param[
in] patchRef: Patch reference.
1010 Set
'is-primary' and related flags. Propagate flags
1011 from individual visits. Optionally match the sources to a reference catalog
and write the matches.
1012 Finally, write the deblended sources
and measurements out.
1014 if self.config.hasFakes:
1015 coaddType =
"fakes_" + self.config.coaddName
1017 coaddType = self.config.coaddName
1018 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
1019 exposure.getPsf().setCacheCapacity(psfCache)
1020 sources = self.readSources(patchRef)
1021 table = sources.getTable()
1022 table.setMetadata(self.algMetadata)
1023 skyInfo =
getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1025 if self.config.doPropagateFlags:
1026 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1030 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, includeBand=
False,
1032 results = self.run(exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=expId,
1033 ccdInputs=ccdInputs, butler=patchRef.getButler())
1035 if self.config.doMatchSources:
1036 self.writeMatches(patchRef, results)
1037 self.write(patchRef, results.outputSources)
1039 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1041 """Run measurement algorithms on the input exposure, and optionally populate the
1042 resulting catalog with extra information.
1046 exposure : `lsst.afw.exposure.Exposure`
1047 The input exposure on which measurements are to be performed
1049 A catalog built
from the results of merged detections,
or
1051 skyInfo : `lsst.pipe.base.Struct`
1052 A struct containing information about the position of the input exposure within
1053 a `SkyMap`, the `SkyMap`, its `Wcs`,
and its bounding box
1054 exposureId : `int`
or `bytes`
1055 packed unique number
or bytes unique to the input exposure
1057 Catalog containing information on the individual visits which went into making
1059 visitCatalogs : list of `lsst.afw.table.SourceCatalogs`
or `
None`
1060 A list of source catalogs corresponding to measurements made on the individual
1061 visits which went into the input exposure. If
None and butler
is `
None` then
1062 the task cannot propagate visit flags to the output catalog.
1064 If visitCatalogs
is not `
None` this should be a list of wcs objects which correspond
1065 to the input visits. Used to put all coordinates to common system. If `
None`
and
1066 butler
is `
None` then the task cannot propagate visit flags to the output catalog.
1067 butler : `lsst.daf.butler.Butler`
or `lsst.daf.persistence.Butler`
1068 Either a gen2
or gen3 butler used to load visit catalogs
1072 results : `lsst.pipe.base.Struct`
1073 Results of running measurement task. Will contain the catalog
in the
1074 sources attribute. Optionally will have results of matching to a
1075 reference catalog
in the matchResults attribute,
and denormalized
1076 matches
in the denormMatches attribute.
1078 self.measurement.run(sources, exposure, exposureId=exposureId)
1080 if self.config.doApCorr:
1081 self.applyApCorr.run(
1083 apCorrMap=exposure.getInfo().getApCorrMap()
1090 if not sources.isContiguous():
1091 sources = sources.copy(deep=
True)
1093 if self.config.doRunCatalogCalculation:
1094 self.catalogCalculation.run(sources)
1096 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1097 patchInfo=skyInfo.patchInfo)
1098 if self.config.doPropagateFlags:
1099 self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1103 if self.config.doMatchSources:
1104 matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel)
1105 matches = afwTable.packMatches(matchResult.matches)
1106 matches.table.setMetadata(matchResult.matchMeta)
1107 results.matchResult = matches
1108 if self.config.doWriteMatchesDenormalized:
1109 if matchResult.matches:
1110 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1112 self.log.warning(
"No matches, so generating dummy denormalized matches file")
1113 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1115 denormMatches.getMetadata().add(
"COMMENT",
1116 "This catalog is empty because no matches were found.")
1117 results.denormMatches = denormMatches
1118 results.denormMatches = denormMatches
1120 results.outputSources = sources
1123 def readSources(self, dataRef):
1125 @brief Read input sources.
1127 @param[
in] dataRef: Data reference
for catalog of merged detections
1128 @return List of sources
in merged catalog
1130 We also need to add columns to hold the measurements we
're about to make so we can measure in-place.
1132 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1133 self.log.info(
"Read %d detections: %s", len(merged), dataRef.dataId)
1134 idFactory = self.makeIdFactory(dataRef)
1136 idFactory.notify(s.getId())
1137 table = afwTable.SourceTable.make(self.schema, idFactory)
1138 sources = afwTable.SourceCatalog(table)
1139 sources.extend(merged, self.schemaMapper)
1142 def writeMatches(self, dataRef, results):
1144 @brief Write matches of the sources to the astrometric reference catalog.
1146 @param[
in] dataRef: data reference
1147 @param[
in] results: results struct
from run method
1149 if hasattr(results,
"matchResult"):
1150 dataRef.put(results.matchResult, self.config.coaddName +
"Coadd_measMatch")
1151 if hasattr(results,
"denormMatches"):
1152 dataRef.put(results.denormMatches, self.config.coaddName +
"Coadd_measMatchFull")
1154 def write(self, dataRef, sources):
1156 @brief Write the source catalog.
1158 @param[
in] dataRef: data reference
1159 @param[
in] sources: source catalog
1161 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1162 self.log.info(
"Wrote %d sources: %s", len(sources), dataRef.dataId)
1163
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded.
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.