24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
31 from lsst.meas.extensions.scarlet
import ScarletDeblendTask
44 from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
45 from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
46 from .multiBandUtils
import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs
47 from .multiBandUtils
import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory
48 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
49 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
50 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
51 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
56 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
57 * deepCoadd_mergeDet: merged detections (tract, patch)
58 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
59 * deepCoadd_ref: reference sources (tract, patch)
60 All of these have associated *_schema catalogs that require no data ID and hold no records.
62 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
63 the mergeDet, meas, and ref dataset Footprints:
64 * deepCoadd_peak_schema
70 dimensions=(
"tract",
"patch",
"band",
"skymap"),
71 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
72 detectionSchema = cT.InitOutput(
73 doc=
"Schema of the detection catalog",
74 name=
"{outputCoaddName}Coadd_det_schema",
75 storageClass=
"SourceCatalog",
78 doc=
"Exposure on which detections are to be performed",
79 name=
"{inputCoaddName}Coadd",
80 storageClass=
"ExposureF",
81 dimensions=(
"tract",
"patch",
"band",
"skymap")
83 outputBackgrounds = cT.Output(
84 doc=
"Output Backgrounds used in detection",
85 name=
"{outputCoaddName}Coadd_calexp_background",
86 storageClass=
"Background",
87 dimensions=(
"tract",
"patch",
"band",
"skymap")
89 outputSources = cT.Output(
90 doc=
"Detected sources catalog",
91 name=
"{outputCoaddName}Coadd_det",
92 storageClass=
"SourceCatalog",
93 dimensions=(
"tract",
"patch",
"band",
"skymap")
95 outputExposure = cT.Output(
96 doc=
"Exposure post detection",
97 name=
"{outputCoaddName}Coadd_calexp",
98 storageClass=
"ExposureF",
99 dimensions=(
"tract",
"patch",
"band",
"skymap")
103 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
105 @anchor DetectCoaddSourcesConfig_
107 @brief Configuration parameters for the DetectCoaddSourcesTask
109 doScaleVariance = Field(dtype=bool, default=
True, doc=
"Scale variance plane using empirical noise?")
110 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
111 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
112 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
113 doInsertFakes = Field(dtype=bool, default=
False,
114 doc=
"Run fake sources injection task")
115 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
116 doc=
"Injection of fake sources for testing "
117 "purposes (must be retargeted)")
121 doc=
"Should be set to True if fake sources have been inserted into the input data."
124 def setDefaults(self):
125 super().setDefaults()
126 self.detection.thresholdType =
"pixel_stdev"
127 self.detection.isotropicGrow =
True
129 self.detection.reEstimateBackground =
False
130 self.detection.background.useApprox =
False
131 self.detection.background.binSize = 4096
132 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
133 self.detection.doTempWideBackground =
True
143 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
145 @anchor DetectCoaddSourcesTask_
147 @brief Detect sources on a coadd
149 @section pipe_tasks_multiBand_Contents Contents
151 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
153 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
154 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
155 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
156 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
158 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
160 Command-line task that detects sources on a coadd of exposures obtained with a single filter.
162 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
163 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
164 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
165 propagate the full covariance matrix -- but it is simple and works well in practice.
167 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
168 SourceDetectionTask_ "detection" subtask.
171 deepCoadd{tract,patch,filter}: ExposureF
173 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
174 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
176 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
180 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
181 You can retarget this subtask if you wish.
183 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
185 @copydoc \_\_init\_\_
187 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
191 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
193 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
195 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
197 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
198 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
201 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
202 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
203 @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
205 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
206 of using DetectCoaddSourcesTask
208 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
209 the task is to update the background, detect all sources in a single band and generate a set of parent
210 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
211 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
212 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
213 calling detectCoaddSources.py with the `--help` command line argument:
215 detectCoaddSources.py --help
218 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
219 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
220 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
222 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
224 that will process the HSC-I band data. The results are written to
225 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
227 It is also necessary to run:
229 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
231 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
232 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
234 _DefaultName =
"detectCoaddSources"
235 ConfigClass = DetectCoaddSourcesConfig
236 getSchemaCatalogs = _makeGetSchemaCatalogs(
"det")
237 makeIdFactory = _makeMakeIdFactory(
"CoaddId")
240 def _makeArgumentParser(cls):
241 parser = ArgumentParser(name=cls._DefaultName)
242 parser.add_id_argument(
"--id",
"deepCoadd", help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
243 ContainerClass=ExistingCoaddDataIdContainer)
246 def __init__(self, schema=None, **kwargs):
248 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
250 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
252 @param[in] schema: initial schema for the output catalog, modified-in place to include all
253 fields set by this task. If None, the source minimal schema will be used.
254 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
258 super().__init__(**kwargs)
260 schema = afwTable.SourceTable.makeMinimalSchema()
261 if self.config.doInsertFakes:
262 self.makeSubtask(
"insertFakes")
264 self.makeSubtask(
"detection", schema=self.schema)
265 if self.config.doScaleVariance:
266 self.makeSubtask(
"scaleVariance")
268 self.detectionSchema = afwTable.SourceCatalog(self.schema)
270 def runDataRef(self, patchRef):
272 @brief Run detection on a coadd.
274 Invokes @ref run and then uses @ref write to output the
277 @param[in] patchRef: data reference for patch
279 if self.config.hasFakes:
280 exposure = patchRef.get(
"fakes_" + self.config.coaddName +
"Coadd", immediate=
True)
282 exposure = patchRef.get(self.config.coaddName +
"Coadd", immediate=
True)
283 expId = int(patchRef.get(self.config.coaddName +
"CoaddId"))
284 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
285 self.write(results, patchRef)
288 def runQuantum(self, butlerQC, inputRefs, outputRefs):
289 inputs = butlerQC.get(inputRefs)
290 packedId, maxBits = butlerQC.quantum.dataId.pack(
"tract_patch_band", returnMaxBits=
True)
291 inputs[
"idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
292 inputs[
"expId"] = packedId
293 outputs = self.run(**inputs)
294 butlerQC.put(outputs, outputRefs)
296 def run(self, exposure, idFactory, expId):
298 @brief Run detection on an exposure.
300 First scale the variance plane to match the observed variance
301 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
304 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
305 depending on configuration).
306 @param[in] idFactory: IdFactory to set source identifiers
307 @param[in] expId: Exposure identifier (integer) for RNG seed
309 @return a pipe.base.Struct with fields
310 - sources: catalog of detections
311 - backgrounds: list of backgrounds
313 if self.config.doScaleVariance:
314 varScale = self.scaleVariance.
run(exposure.maskedImage)
315 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
316 backgrounds = afwMath.BackgroundList()
317 if self.config.doInsertFakes:
318 self.insertFakes.
run(exposure, background=backgrounds)
319 table = afwTable.SourceTable.make(self.schema, idFactory)
320 detections = self.detection.
run(table, exposure, expId=expId)
321 sources = detections.sources
322 fpSets = detections.fpSets
323 if hasattr(fpSets,
"background")
and fpSets.background:
324 for bg
in fpSets.background:
325 backgrounds.append(bg)
326 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
328 def write(self, results, patchRef):
330 @brief Write out results from runDetection.
332 @param[in] exposure: Exposure to write out
333 @param[in] results: Struct returned from runDetection
334 @param[in] patchRef: data reference for patch
336 coaddName = self.config.coaddName +
"Coadd"
337 patchRef.put(results.outputBackgrounds, coaddName +
"_calexp_background")
338 patchRef.put(results.outputSources, coaddName +
"_det")
339 if self.config.hasFakes:
340 patchRef.put(results.outputExposure,
"fakes_" + coaddName +
"_calexp")
342 patchRef.put(results.outputExposure, coaddName +
"_calexp")
347 class DeblendCoaddSourcesConfig(Config):
348 """DeblendCoaddSourcesConfig
350 Configuration parameters for the `DeblendCoaddSourcesTask`.
352 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
353 doc=
"Deblend sources separately in each band")
354 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
355 doc=
"Deblend sources simultaneously across bands")
356 simultaneous = Field(dtype=bool,
358 doc=
"Simultaneously deblend all bands? "
359 "True uses 'singleBandDeblend' while False uses 'multibandDeblend'")
360 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
361 hasFakes = Field(dtype=bool,
363 doc=
"Should be set to True if fake sources have been inserted into the input data.")
365 def setDefaults(self):
366 Config.setDefaults(self)
367 self.singleBandDeblend.propagateAllPeaks =
True
371 """Task runner for the `MergeSourcesTask`
373 Required because the run method requires a list of
374 dataRefs rather than a single dataRef.
377 def getTargetList(parsedCmd, **kwargs):
378 """Provide a list of patch references for each patch, tract, filter combo.
385 Keyword arguments passed to the task
390 List of tuples, where each tuple is a (dataRef, kwargs) pair.
392 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
393 kwargs[
"psfCache"] = parsedCmd.psfCache
394 return [(list(p.values()), kwargs)
for t
in refDict.values()
for p
in t.values()]
397 class DeblendCoaddSourcesTask(CmdLineTask):
398 """Deblend the sources in a merged catalog
400 Deblend sources from master catalog in each coadd.
401 This can either be done separately in each band using the HSC-SDSS deblender
402 (`DeblendCoaddSourcesTask.config.simultaneous==False`)
403 or use SCARLET to simultaneously fit the blend in all bands
404 (`DeblendCoaddSourcesTask.config.simultaneous==True`).
405 The task will set its own `self.schema` atribute to the `Schema` of the
406 output deblended catalog.
407 This will include all fields from the input `Schema`, as well as additional fields
410 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
411 ---------------------------------------------------------
417 Butler used to read the input schemas from disk or
418 construct the reference catalog loader, if `schema` or `peakSchema` or
420 The schema of the merged detection catalog as an input to this task.
422 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
424 ConfigClass = DeblendCoaddSourcesConfig
425 RunnerClass = DeblendCoaddSourcesRunner
426 _DefaultName =
"deblendCoaddSources"
427 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
430 def _makeArgumentParser(cls):
431 parser = ArgumentParser(name=cls._DefaultName)
432 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
433 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
434 ContainerClass=ExistingCoaddDataIdContainer)
435 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
438 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
439 CmdLineTask.__init__(self, **kwargs)
441 assert butler
is not None,
"Neither butler nor schema is defined"
442 schema = butler.get(self.config.coaddName +
"Coadd_mergeDet_schema", immediate=
True).schema
443 self.schemaMapper = afwTable.SchemaMapper(schema)
444 self.schemaMapper.addMinimalSchema(schema)
445 self.schema = self.schemaMapper.getOutputSchema()
446 if peakSchema
is None:
447 assert butler
is not None,
"Neither butler nor peakSchema is defined"
448 peakSchema = butler.get(self.config.coaddName +
"Coadd_peak_schema", immediate=
True).schema
450 if self.config.simultaneous:
451 self.makeSubtask(
"multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
453 self.makeSubtask(
"singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
455 def getSchemaCatalogs(self):
456 """Return a dict of empty catalogs for each catalog dataset produced by this task.
461 Dictionary of empty catalogs, with catalog names as keys.
463 catalog = afwTable.SourceCatalog(self.schema)
464 return {self.config.coaddName +
"Coadd_deblendedFlux": catalog,
465 self.config.coaddName +
"Coadd_deblendedModel": catalog}
467 def runDataRef(self, patchRefList, psfCache=100):
470 Deblend each source simultaneously or separately
471 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
472 Set `is-primary` and related flags.
473 Propagate flags from individual visits.
474 Write the deblended sources out.
479 List of data references for each filter
482 if self.config.hasFakes:
483 coaddType =
"fakes_" + self.config.coaddName
485 coaddType = self.config.coaddName
487 if self.config.simultaneous:
491 for patchRef
in patchRefList:
492 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
493 filters.append(patchRef.dataId[
"filter"])
494 exposures.append(exposure)
496 sources = self.readSources(patchRef)
497 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
498 templateCatalogs = self.multiBandDeblend.
run(exposure, sources)
499 for n
in range(len(patchRefList)):
500 self.write(patchRefList[n], templateCatalogs[filters[n]])
503 for patchRef
in patchRefList:
504 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
505 exposure.getPsf().setCacheCapacity(psfCache)
506 sources = self.readSources(patchRef)
507 self.singleBandDeblend.
run(exposure, sources)
508 self.write(patchRef, sources)
510 def readSources(self, dataRef):
511 """Read merged catalog
513 Read the catalog of merged detections and create a catalog
518 dataRef: data reference
519 Data reference for catalog of merged detections
523 sources: `SourceCatalog`
524 List of sources in merged catalog
526 We also need to add columns to hold the measurements we're about to make
527 so we can measure in-place.
529 merged = dataRef.get(self.config.coaddName +
"Coadd_mergeDet", immediate=
True)
530 self.log.info(
"Read %d detections: %s" % (len(merged), dataRef.dataId))
531 idFactory = self.makeIdFactory(dataRef)
533 idFactory.notify(s.getId())
534 table = afwTable.SourceTable.make(self.schema, idFactory)
535 sources = afwTable.SourceCatalog(table)
536 sources.extend(merged, self.schemaMapper)
539 def write(self, dataRef, sources):
540 """Write the source catalog(s)
544 dataRef: Data Reference
545 Reference to the output catalog.
546 sources: `SourceCatalog`
547 Flux conserved sources to write to file.
548 If using the single band deblender, this is the catalog
550 template_sources: `SourceCatalog`
551 Source catalog using the multiband template models
554 dataRef.put(sources, self.config.coaddName +
"Coadd_deblendedFlux")
555 self.log.info(
"Wrote %d sources: %s" % (len(sources), dataRef.dataId))
558 """Write the metadata produced from processing the data.
562 List of Butler data references used to write the metadata.
563 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
565 for dataRef
in dataRefList:
567 metadataName = self._getMetadataName()
568 if metadataName
is not None:
569 dataRef.put(self.getFullMetadata(), metadataName)
570 except Exception
as e:
571 self.log.warn(
"Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
573 def getExposureId(self, dataRef):
574 """Get the ExposureId from a data reference
576 return int(dataRef.get(self.config.coaddName +
"CoaddId"))
579 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=(
"tract",
"patch",
"band",
"skymap"),
580 defaultTemplates={
"inputCoaddName":
"deep",
581 "outputCoaddName":
"deep"}):
582 inputSchema = cT.InitInput(
583 doc=
"Input schema for measure merged task produced by a deblender or detection task",
584 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
585 storageClass=
"SourceCatalog"
587 outputSchema = cT.InitOutput(
588 doc=
"Output schema after all new fields are added by task",
589 name=
"{inputCoaddName}Coadd_meas_schema",
590 storageClass=
"SourceCatalog"
592 refCat = cT.PrerequisiteInput(
593 doc=
"Reference catalog used to match measured sources against known sources",
595 storageClass=
"SimpleCatalog",
596 dimensions=(
"skypix",),
601 doc=
"Input coadd image",
602 name=
"{inputCoaddName}Coadd_calexp",
603 storageClass=
"ExposureF",
604 dimensions=(
"tract",
"patch",
"band",
"skymap")
607 doc=
"SkyMap to use in processing",
608 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
609 storageClass=
"SkyMap",
610 dimensions=(
"skymap",),
612 visitCatalogs = cT.Input(
613 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
614 "further filtered in the task for the purpose of propagating flags from image calibration "
615 "and characterization to codd objects",
617 dimensions=(
"instrument",
"visit",
"detector"),
618 storageClass=
"SourceCatalog",
621 inputCatalog = cT.Input(
622 doc=(
"Name of the input catalog to use."
623 "If the single band deblender was used this should be 'deblendedFlux."
624 "If the multi-band deblender was used this should be 'deblendedModel, "
625 "or deblendedFlux if the multiband deblender was configured to output "
626 "deblended flux catalogs. If no deblending was performed this should "
628 name=
"{inputCoaddName}Coadd_deblendedFlux",
629 storageClass=
"SourceCatalog",
630 dimensions=(
"tract",
"patch",
"band",
"skymap"),
632 outputSources = cT.Output(
633 doc=
"Source catalog containing all the measurement information generated in this task",
634 name=
"{outputCoaddName}Coadd_meas",
635 dimensions=(
"tract",
"patch",
"band",
"skymap"),
636 storageClass=
"SourceCatalog",
638 matchResult = cT.Output(
639 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
640 name=
"{outputCoaddName}Coadd_measMatch",
641 dimensions=(
"tract",
"patch",
"band",
"skymap"),
642 storageClass=
"Catalog",
644 denormMatches = cT.Output(
645 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
646 "doWriteMatchesDenormalized",
647 name=
"{outputCoaddName}Coadd_measMatchFull",
648 dimensions=(
"tract",
"patch",
"band",
"skymap"),
649 storageClass=
"Catalog",
652 def __init__(self, *, config=None):
653 super().__init__(config=config)
654 if config.doPropagateFlags
is False:
655 self.inputs -= set((
"visitCatalogs",))
657 if config.doMatchSources
is False:
658 self.outputs -= set((
"matchResult",))
660 if config.doWriteMatchesDenormalized
is False:
661 self.outputs -= set((
"denormMatches",))
664 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
665 pipelineConnections=MeasureMergedCoaddSourcesConnections):
667 @anchor MeasureMergedCoaddSourcesConfig_
669 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
671 inputCatalog = Field(dtype=str, default=
"deblendedFlux",
672 doc=(
"Name of the input catalog to use."
673 "If the single band deblender was used this should be 'deblendedFlux."
674 "If the multi-band deblender was used this should be 'deblendedModel."
675 "If no deblending was performed this should be 'mergeDet'"))
676 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
677 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
678 doPropagateFlags = Field(
679 dtype=bool, default=
True,
680 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
682 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc=
"Propagate visit flags to coadd")
683 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
684 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
685 doWriteMatchesDenormalized = Field(
688 doc=(
"Write reference matches in denormalized format? "
689 "This format uses more disk space, but is more convenient to read."),
691 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
692 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
693 checkUnitsParseStrict = Field(
694 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
701 doc=
"Apply aperture corrections"
703 applyApCorr = ConfigurableField(
704 target=ApplyApCorrTask,
705 doc=
"Subtask to apply aperture corrections"
707 doRunCatalogCalculation = Field(
710 doc=
'Run catalogCalculation task'
712 catalogCalculation = ConfigurableField(
713 target=CatalogCalculationTask,
714 doc=
"Subtask to run catalogCalculation plugins on catalog"
720 doc=
"Should be set to True if fake sources have been inserted into the input data."
724 def refObjLoader(self):
725 return self.match.refObjLoader
727 def setDefaults(self):
728 super().setDefaults()
729 self.measurement.plugins.names |= [
'base_InputCount',
731 'base_LocalPhotoCalib',
733 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
735 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
740 refCatGen2 = getattr(self.refObjLoader,
"ref_dataset_name",
None)
741 if refCatGen2
is not None and refCatGen2 != self.connections.refCat:
743 f
"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
744 f
"are different. These options must be kept in sync until Gen2 is retired."
756 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
757 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
759 def getTargetList(parsedCmd, **kwargs):
760 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
763 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
765 @anchor MeasureMergedCoaddSourcesTask_
767 @brief Deblend sources from master catalog in each coadd seperately and measure.
769 @section pipe_tasks_multiBand_Contents Contents
771 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
772 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
773 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
774 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
775 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
776 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
778 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
780 Command-line task that uses peaks and footprints from a master catalog to perform deblending and
781 measurement in each coadd.
783 Given a master input catalog of sources (peaks and footprints) or deblender outputs
784 (including a HeavyFootprint in each band), measure each source on the
785 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
786 consistent set of child sources.
788 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
789 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
790 flags are propagated to the coadd sources.
792 Optionally, we can match the coadd sources to an external reference catalog.
795 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
796 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
798 deepCoadd_meas{tract,patch,filter}: SourceCatalog
802 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
805 <DT> @ref SingleFrameMeasurementTask_ "measurement"
806 <DD> Measure source properties of deblended sources.</DD>
807 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
808 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
809 not at the edge of the field and that have either not been deblended or are the children of deblended
811 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
812 <DD> Propagate flags set in individual visits to the coadd.</DD>
813 <DT> @ref DirectMatchTask_ "match"
814 <DD> Match input sources to a reference catalog (optional).
817 These subtasks may be retargeted as required.
819 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
821 @copydoc \_\_init\_\_
823 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
827 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
829 See @ref MeasureMergedCoaddSourcesConfig_
831 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
833 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
834 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
837 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
838 the various sub-tasks. See the documetation for individual sub-tasks for more information.
840 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
841 MeasureMergedCoaddSourcesTask
843 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
844 The next stage in the multi-band processing procedure will merge these measurements into a suitable
845 catalog for driving forced photometry.
847 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
849 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
850 `--help` command line argument:
852 measureCoaddSources.py --help
855 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
856 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
857 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
860 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
862 This will process the HSC-I band data. The results are written in
863 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
865 It is also necessary to run
867 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
869 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
870 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
872 _DefaultName =
"measureCoaddSources"
873 ConfigClass = MeasureMergedCoaddSourcesConfig
874 RunnerClass = MeasureMergedCoaddSourcesRunner
875 getSchemaCatalogs = _makeGetSchemaCatalogs(
"meas")
876 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
879 def _makeArgumentParser(cls):
880 parser = ArgumentParser(name=cls._DefaultName)
881 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
882 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
883 ContainerClass=ExistingCoaddDataIdContainer)
884 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
887 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
890 @brief Initialize the task.
892 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
893 @param[in] schema: the schema of the merged detection catalog used as input to this one
894 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
895 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
896 catalog. May be None if the loader can be constructed from the butler argument or all steps
897 requiring a reference catalog are disabled.
898 @param[in] butler: a butler used to read the input schemas from disk or construct the reference
899 catalog loader, if schema or peakSchema or refObjLoader is None
901 The task will set its own self.schema attribute to the schema of the output measurement catalog.
902 This will include all fields from the input schema, as well as additional fields for all the
905 super().__init__(**kwargs)
906 self.deblended = self.config.inputCatalog.startswith(
"deblended")
907 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
908 if initInputs
is not None:
909 schema = initInputs[
'inputSchema'].schema
911 assert butler
is not None,
"Neither butler nor schema is defined"
912 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema", immediate=
True).schema
913 self.schemaMapper = afwTable.SchemaMapper(schema)
914 self.schemaMapper.addMinimalSchema(schema)
915 self.schema = self.schemaMapper.getOutputSchema()
917 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
918 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
919 if self.config.doMatchSources:
920 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
921 if self.config.doPropagateFlags:
922 self.makeSubtask(
"propagateFlags", schema=self.schema)
923 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
924 if self.config.doApCorr:
925 self.makeSubtask(
"applyApCorr", schema=self.schema)
926 if self.config.doRunCatalogCalculation:
927 self.makeSubtask(
"catalogCalculation", schema=self.schema)
929 self.outputSchema = afwTable.SourceCatalog(self.schema)
931 def runQuantum(self, butlerQC, inputRefs, outputRefs):
932 inputs = butlerQC.get(inputRefs)
934 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
935 inputs.pop(
'refCat'), config=self.config.refObjLoader,
937 self.match.setRefObjLoader(refObjLoader)
941 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
944 packedId, maxBits = butlerQC.quantum.dataId.pack(
"tract_patch", returnMaxBits=
True)
945 inputs[
'exposureId'] = packedId
946 idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
948 table = afwTable.SourceTable.make(self.schema, idFactory)
949 sources = afwTable.SourceCatalog(table)
950 sources.extend(inputs.pop(
'inputCatalog'), self.schemaMapper)
951 table = sources.getTable()
952 table.setMetadata(self.algMetadata)
953 inputs[
'sources'] = sources
955 skyMap = inputs.pop(
'skyMap')
956 tractNumber = inputRefs.inputCatalog.dataId[
'tract']
957 tractInfo = skyMap[tractNumber]
958 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId[
'patch'])
963 wcs=tractInfo.getWcs(),
964 bbox=patchInfo.getOuterBBox()
966 inputs[
'skyInfo'] = skyInfo
968 if self.config.doPropagateFlags:
970 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
971 visitKey = ccdInputs.schema.find(
"visit").key
972 ccdKey = ccdInputs.schema.find(
"ccd").key
973 inputVisitIds = set()
975 for ccdRecord
in ccdInputs:
976 visit = ccdRecord.get(visitKey)
977 ccd = ccdRecord.get(ccdKey)
978 inputVisitIds.add((visit, ccd))
979 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
981 inputCatalogsToKeep = []
982 inputCatalogWcsUpdate = []
983 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
984 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
985 if key
in inputVisitIds:
986 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
987 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
988 inputs[
'visitCatalogs'] = inputCatalogsToKeep
989 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
990 inputs[
'ccdInputs'] = ccdInputs
992 outputs = self.run(**inputs)
993 butlerQC.put(outputs, outputRefs)
995 def runDataRef(self, patchRef, psfCache=100):
997 @brief Deblend and measure.
999 @param[in] patchRef: Patch reference.
1001 Set 'is-primary' and related flags. Propagate flags
1002 from individual visits. Optionally match the sources to a reference catalog and write the matches.
1003 Finally, write the deblended sources and measurements out.
1005 if self.config.hasFakes:
1006 coaddType =
"fakes_" + self.config.coaddName
1008 coaddType = self.config.coaddName
1009 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
1010 exposure.getPsf().setCacheCapacity(psfCache)
1011 sources = self.readSources(patchRef)
1012 table = sources.getTable()
1013 table.setMetadata(self.algMetadata)
1014 skyInfo =
getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1016 if self.config.doPropagateFlags:
1017 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1021 results = self.run(exposure=exposure, sources=sources,
1022 ccdInputs=ccdInputs,
1023 skyInfo=skyInfo, butler=patchRef.getButler(),
1024 exposureId=self.getExposureId(patchRef))
1026 if self.config.doMatchSources:
1027 self.writeMatches(patchRef, results)
1028 self.write(patchRef, results.outputSources)
1030 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1032 """Run measurement algorithms on the input exposure, and optionally populate the
1033 resulting catalog with extra information.
1037 exposure : `lsst.afw.exposure.Exposure`
1038 The input exposure on which measurements are to be performed
1039 sources : `lsst.afw.table.SourceCatalog`
1040 A catalog built from the results of merged detections, or
1042 skyInfo : `lsst.pipe.base.Struct`
1043 A struct containing information about the position of the input exposure within
1044 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1045 exposureId : `int` or `bytes`
1046 packed unique number or bytes unique to the input exposure
1047 ccdInputs : `lsst.afw.table.ExposureCatalog`
1048 Catalog containing information on the individual visits which went into making
1050 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1051 A list of source catalogs corresponding to measurements made on the individual
1052 visits which went into the input exposure. If None and butler is `None` then
1053 the task cannot propagate visit flags to the output catalog.
1054 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1055 If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1056 to the input visits. Used to put all coordinates to common system. If `None` and
1057 butler is `None` then the task cannot propagate visit flags to the output catalog.
1058 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1059 Either a gen2 or gen3 butler used to load visit catalogs
1063 results : `lsst.pipe.base.Struct`
1064 Results of running measurement task. Will contain the catalog in the
1065 sources attribute. Optionally will have results of matching to a
1066 reference catalog in the matchResults attribute, and denormalized
1067 matches in the denormMatches attribute.
1069 self.measurement.
run(sources, exposure, exposureId=exposureId)
1071 if self.config.doApCorr:
1072 self.applyApCorr.
run(
1074 apCorrMap=exposure.getInfo().getApCorrMap()
1081 if not sources.isContiguous():
1082 sources = sources.copy(deep=
True)
1084 if self.config.doRunCatalogCalculation:
1085 self.catalogCalculation.
run(sources)
1087 self.setPrimaryFlags.
run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1088 patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended)
1089 if self.config.doPropagateFlags:
1090 self.propagateFlags.
run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1094 if self.config.doMatchSources:
1095 matchResult = self.match.
run(sources, exposure.getInfo().getFilter().getName())
1096 matches = afwTable.packMatches(matchResult.matches)
1097 matches.table.setMetadata(matchResult.matchMeta)
1098 results.matchResult = matches
1099 if self.config.doWriteMatchesDenormalized:
1100 if matchResult.matches:
1101 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1103 self.log.warn(
"No matches, so generating dummy denormalized matches file")
1104 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1106 denormMatches.getMetadata().add(
"COMMENT",
1107 "This catalog is empty because no matches were found.")
1108 results.denormMatches = denormMatches
1109 results.denormMatches = denormMatches
1111 results.outputSources = sources
1114 def readSources(self, dataRef):
1116 @brief Read input sources.
1118 @param[in] dataRef: Data reference for catalog of merged detections
1119 @return List of sources in merged catalog
1121 We also need to add columns to hold the measurements we're about to make
1122 so we can measure in-place.
1124 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=
True)
1125 self.log.info(
"Read %d detections: %s" % (len(merged), dataRef.dataId))
1126 idFactory = self.makeIdFactory(dataRef)
1128 idFactory.notify(s.getId())
1129 table = afwTable.SourceTable.make(self.schema, idFactory)
1130 sources = afwTable.SourceCatalog(table)
1131 sources.extend(merged, self.schemaMapper)
1134 def writeMatches(self, dataRef, results):
1136 @brief Write matches of the sources to the astrometric reference catalog.
1138 @param[in] dataRef: data reference
1139 @param[in] results: results struct from run method
1141 if hasattr(results,
"matchResult"):
1142 dataRef.put(results.matchResult, self.config.coaddName +
"Coadd_measMatch")
1143 if hasattr(results,
"denormMatches"):
1144 dataRef.put(results.denormMatches, self.config.coaddName +
"Coadd_measMatchFull")
1146 def write(self, dataRef, sources):
1148 @brief Write the source catalog.
1150 @param[in] dataRef: data reference
1151 @param[in] sources: source catalog
1153 dataRef.put(sources, self.config.coaddName +
"Coadd_meas")
1154 self.log.info(
"Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1156 def getExposureId(self, dataRef):
1157 return int(dataRef.get(self.config.coaddName +
"CoaddId"))
1158
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded.
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
def write(self, patchRef, catalog)
Write the output.