24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27 from lsst.pex.config
import Config, Field, ConfigurableField
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
42 from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
43 from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
44 from .multiBandUtils
import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs
45 from .multiBandUtils
import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory
46 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
47 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
48 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
49 from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
54 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter) 55 * deepCoadd_mergeDet: merged detections (tract, patch) 56 * deepCoadd_meas: measurements of merged detections (tract, patch, filter) 57 * deepCoadd_ref: reference sources (tract, patch) 58 All of these have associated *_schema catalogs that require no data ID and hold no records. 60 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in 61 the mergeDet, meas, and ref dataset Footprints: 62 * deepCoadd_peak_schema 68 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
69 detectionSchema = cT.InitOutput(
70 doc=
"Schema of the detection catalog",
71 name=
"{outputCoaddName}Coadd_det_schema",
72 storageClass=
"SourceCatalog",
75 doc=
"Exposure on which detections are to be performed",
76 name=
"{inputCoaddName}Coadd",
77 storageClass=
"ExposureF",
78 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap")
80 outputBackgrounds = cT.Output(
81 doc=
"Output Backgrounds used in detection",
82 name=
"{outputCoaddName}Coadd_calexp_background",
83 storageClass=
"Background",
84 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap")
86 outputSources = cT.Output(
87 doc=
"Detected sources catalog",
88 name=
"{outputCoaddName}Coadd_det",
89 storageClass=
"SourceCatalog",
90 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap")
92 outputExposure = cT.Output(
93 doc=
"Exposure post detection",
94 name=
"{outputCoaddName}Coadd_calexp",
95 storageClass=
"ExposureF",
96 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap")
100 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
102 @anchor DetectCoaddSourcesConfig_ 104 @brief Configuration parameters for the DetectCoaddSourcesTask 106 doScaleVariance = Field(dtype=bool, default=
True, doc=
"Scale variance plane using empirical noise?")
107 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
108 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
109 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
110 doInsertFakes = Field(dtype=bool, default=
False,
111 doc=
"Run fake sources injection task")
112 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
113 doc=
"Injection of fake sources for testing " 114 "purposes (must be retargeted)")
118 doc=
"Should be set to True if fake sources have been inserted into the input data." 121 def setDefaults(self):
122 super().setDefaults()
123 self.detection.thresholdType =
"pixel_stdev" 124 self.detection.isotropicGrow =
True 126 self.detection.reEstimateBackground =
False 127 self.detection.background.useApprox =
False 128 self.detection.background.binSize = 4096
129 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER' 130 self.detection.doTempWideBackground =
True 140 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
142 @anchor DetectCoaddSourcesTask_ 144 @brief Detect sources on a coadd 146 @section pipe_tasks_multiBand_Contents Contents 148 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose 149 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize 150 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run 151 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config 152 - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug 153 - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example 155 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description 157 Command-line task that detects sources on a coadd of exposures obtained with a single filter. 159 Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise 160 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane 161 in the coadd to match the observed variance. This is an approximate approach -- strictly, we should 162 propagate the full covariance matrix -- but it is simple and works well in practice. 164 After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref 165 SourceDetectionTask_ "detection" subtask. 168 deepCoadd{tract,patch,filter}: ExposureF 170 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints) 171 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input 173 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList 177 DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask. 178 You can retarget this subtask if you wish. 180 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization 182 @copydoc \_\_init\_\_ 184 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task 188 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters 190 See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig" 192 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables 194 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 195 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 198 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to 199 @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for 200 @ref SourceDetectionTask_ "SourceDetectionTask" for further information. 202 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example 203 of using DetectCoaddSourcesTask 205 DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of 206 the task is to update the background, detect all sources in a single band and generate a set of parent 207 footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and, 208 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data 209 reference to the coadd to be processed. A list of the available optional arguments can be obtained by 210 calling detectCoaddSources.py with the `--help` command line argument: 212 detectCoaddSources.py --help 215 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 216 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed 217 steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows: 219 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 221 that will process the HSC-I band data. The results are written to 222 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`. 224 It is also necessary to run: 226 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 228 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 229 processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask". 231 _DefaultName =
"detectCoaddSources" 232 ConfigClass = DetectCoaddSourcesConfig
233 getSchemaCatalogs = _makeGetSchemaCatalogs(
"det")
234 makeIdFactory = _makeMakeIdFactory(
"CoaddId")
237 def _makeArgumentParser(cls):
238 parser = ArgumentParser(name=cls._DefaultName)
239 parser.add_id_argument(
"--id",
"deepCoadd", help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
240 ContainerClass=ExistingCoaddDataIdContainer)
243 def __init__(self, schema=None, **kwargs):
245 @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask. 247 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 249 @param[in] schema: initial schema for the output catalog, modified-in place to include all 250 fields set by this task. If None, the source minimal schema will be used. 251 @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__ 255 super().__init__(**kwargs)
257 schema = afwTable.SourceTable.makeMinimalSchema()
258 if self.config.doInsertFakes:
259 self.makeSubtask(
"insertFakes")
261 self.makeSubtask(
"detection", schema=self.schema)
262 if self.config.doScaleVariance:
263 self.makeSubtask(
"scaleVariance")
265 self.detectionSchema = afwTable.SourceCatalog(self.schema)
267 def runDataRef(self, patchRef):
269 @brief Run detection on a coadd. 271 Invokes @ref run and then uses @ref write to output the 274 @param[in] patchRef: data reference for patch 276 if self.config.hasFakes:
277 exposure = patchRef.get(
"fakes_" + self.config.coaddName +
"Coadd", immediate=
True)
279 exposure = patchRef.get(self.config.coaddName +
"Coadd", immediate=
True)
280 expId = int(patchRef.get(self.config.coaddName +
"CoaddId"))
281 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
282 self.write(results, patchRef)
285 def runQuantum(self, butlerQC, inputRefs, outputRefs):
286 inputs = butlerQC.get(inputRefs)
287 packedId, maxBits = butlerQC.quantum.dataId.pack(
"tract_patch_abstract_filter", returnMaxBits=
True)
288 inputs[
"idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
289 inputs[
"expId"] = packedId
290 outputs = self.run(**inputs)
291 butlerQC.put(outputs, outputRefs)
293 def run(self, exposure, idFactory, expId):
295 @brief Run detection on an exposure. 297 First scale the variance plane to match the observed variance 298 using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to 301 @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled, 302 depending on configuration). 303 @param[in] idFactory: IdFactory to set source identifiers 304 @param[in] expId: Exposure identifier (integer) for RNG seed 306 @return a pipe.base.Struct with fields 307 - sources: catalog of detections 308 - backgrounds: list of backgrounds 310 if self.config.doScaleVariance:
311 varScale = self.scaleVariance.
run(exposure.maskedImage)
312 exposure.getMetadata().add(
"variance_scale", varScale)
313 backgrounds = afwMath.BackgroundList()
314 if self.config.doInsertFakes:
315 self.insertFakes.
run(exposure, background=backgrounds)
316 table = afwTable.SourceTable.make(self.schema, idFactory)
317 detections = self.detection.makeSourceCatalog(table, exposure, expId=expId)
318 sources = detections.sources
319 fpSets = detections.fpSets
320 if hasattr(fpSets,
"background")
and fpSets.background:
321 for bg
in fpSets.background:
322 backgrounds.append(bg)
323 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
325 def write(self, results, patchRef):
327 @brief Write out results from runDetection. 329 @param[in] exposure: Exposure to write out 330 @param[in] results: Struct returned from runDetection 331 @param[in] patchRef: data reference for patch 333 coaddName = self.config.coaddName +
"Coadd" 334 patchRef.put(results.outputBackgrounds, coaddName +
"_calexp_background")
335 patchRef.put(results.outputSources, coaddName +
"_det")
336 if self.config.hasFakes:
337 patchRef.put(results.outputExposure,
"fakes_" + coaddName +
"_calexp")
339 patchRef.put(results.outputExposure, coaddName +
"_calexp")
344 class DeblendCoaddSourcesConfig(Config):
345 """DeblendCoaddSourcesConfig 347 Configuration parameters for the `DeblendCoaddSourcesTask`. 349 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
350 doc=
"Deblend sources separately in each band")
351 multiBandDeblend = ConfigurableField(target=MultibandDeblendTask,
352 doc=
"Deblend sources simultaneously across bands")
353 simultaneous = Field(dtype=bool, default=
False, doc=
"Simultaneously deblend all bands?")
354 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
355 hasFakes = Field(dtype=bool,
357 doc=
"Should be set to True if fake sources have been inserted into the input data.")
359 def setDefaults(self):
360 Config.setDefaults(self)
361 self.singleBandDeblend.propagateAllPeaks =
True 365 """Task runner for the `MergeSourcesTask` 367 Required because the run method requires a list of 368 dataRefs rather than a single dataRef. 371 def getTargetList(parsedCmd, **kwargs):
372 """Provide a list of patch references for each patch, tract, filter combo. 379 Keyword arguments passed to the task 384 List of tuples, where each tuple is a (dataRef, kwargs) pair. 386 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
387 kwargs[
"psfCache"] = parsedCmd.psfCache
388 return [(list(p.values()), kwargs)
for t
in refDict.values()
for p
in t.values()]
391 class DeblendCoaddSourcesTask(CmdLineTask):
392 """Deblend the sources in a merged catalog 394 Deblend sources from master catalog in each coadd. 395 This can either be done separately in each band using the HSC-SDSS deblender 396 (`DeblendCoaddSourcesTask.config.simultaneous==False`) 397 or use SCARLET to simultaneously fit the blend in all bands 398 (`DeblendCoaddSourcesTask.config.simultaneous==True`). 399 The task will set its own `self.schema` atribute to the `Schema` of the 400 output deblended catalog. 401 This will include all fields from the input `Schema`, as well as additional fields 404 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description 405 --------------------------------------------------------- 411 Butler used to read the input schemas from disk or 412 construct the reference catalog loader, if `schema` or `peakSchema` or 414 The schema of the merged detection catalog as an input to this task. 416 The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog 418 ConfigClass = DeblendCoaddSourcesConfig
419 RunnerClass = DeblendCoaddSourcesRunner
420 _DefaultName =
"deblendCoaddSources" 421 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
424 def _makeArgumentParser(cls):
425 parser = ArgumentParser(name=cls._DefaultName)
426 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
427 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
428 ContainerClass=ExistingCoaddDataIdContainer)
429 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
432 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
433 CmdLineTask.__init__(self, **kwargs)
435 assert butler
is not None,
"Neither butler nor schema is defined" 436 schema = butler.get(self.config.coaddName +
"Coadd_mergeDet_schema", immediate=
True).schema
437 self.schemaMapper = afwTable.SchemaMapper(schema)
438 self.schemaMapper.addMinimalSchema(schema)
439 self.schema = self.schemaMapper.getOutputSchema()
440 if peakSchema
is None:
441 assert butler
is not None,
"Neither butler nor peakSchema is defined" 442 peakSchema = butler.get(self.config.coaddName +
"Coadd_peak_schema", immediate=
True).schema
444 if self.config.simultaneous:
445 self.makeSubtask(
"multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
447 self.makeSubtask(
"singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
449 def getSchemaCatalogs(self):
450 """Return a dict of empty catalogs for each catalog dataset produced by this task. 455 Dictionary of empty catalogs, with catalog names as keys. 457 catalog = afwTable.SourceCatalog(self.schema)
458 return {self.config.coaddName +
"Coadd_deblendedFlux": catalog,
459 self.config.coaddName +
"Coadd_deblendedModel": catalog}
461 def runDataRef(self, patchRefList, psfCache=100):
464 Deblend each source simultaneously or separately 465 (depending on `DeblendCoaddSourcesTask.config.simultaneous`). 466 Set `is-primary` and related flags. 467 Propagate flags from individual visits. 468 Write the deblended sources out. 473 List of data references for each filter 476 if self.config.hasFakes:
477 coaddType =
"fakes_" + self.config.coaddName
479 coaddType = self.config.coaddName
481 if self.config.simultaneous:
485 for patchRef
in patchRefList:
486 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
487 filters.append(patchRef.dataId[
"filter"])
488 exposures.append(exposure)
490 sources = self.readSources(patchRef)
491 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
492 fluxCatalogs, templateCatalogs = self.multiBandDeblend.
run(exposure, sources)
493 for n
in range(len(patchRefList)):
494 self.write(patchRefList[n], fluxCatalogs[filters[n]], templateCatalogs[filters[n]])
497 for patchRef
in patchRefList:
498 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
499 exposure.getPsf().setCacheCapacity(psfCache)
500 sources = self.readSources(patchRef)
501 self.singleBandDeblend.
run(exposure, sources)
502 self.write(patchRef, sources)
504 def readSources(self, dataRef):
505 """Read merged catalog 507 Read the catalog of merged detections and create a catalog 512 dataRef: data reference 513 Data reference for catalog of merged detections 517 sources: `SourceCatalog` 518 List of sources in merged catalog 520 We also need to add columns to hold the measurements we're about to make 521 so we can measure in-place. 523 merged = dataRef.get(self.config.coaddName +
"Coadd_mergeDet", immediate=
True)
524 self.log.info(
"Read %d detections: %s" % (len(merged), dataRef.dataId))
525 idFactory = self.makeIdFactory(dataRef)
527 idFactory.notify(s.getId())
528 table = afwTable.SourceTable.make(self.schema, idFactory)
529 sources = afwTable.SourceCatalog(table)
530 sources.extend(merged, self.schemaMapper)
533 def write(self, dataRef, flux_sources, template_sources=None):
534 """Write the source catalog(s) 538 dataRef: Data Reference 539 Reference to the output catalog. 540 flux_sources: `SourceCatalog` 541 Flux conserved sources to write to file. 542 If using the single band deblender, this is the catalog 544 template_sources: `SourceCatalog` 545 Source catalog using the multiband template models 550 if flux_sources
is not None:
551 assert not self.config.simultaneous
or self.config.multiBandDeblend.conserveFlux
552 dataRef.put(flux_sources, self.config.coaddName +
"Coadd_deblendedFlux")
556 if template_sources
is not None:
557 assert self.config.multiBandDeblend.saveTemplates
558 dataRef.put(template_sources, self.config.coaddName +
"Coadd_deblendedModel")
559 self.log.info(
"Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
562 """Write the metadata produced from processing the data. 566 List of Butler data references used to write the metadata. 567 The metadata is written to dataset type `CmdLineTask._getMetadataName`. 569 for dataRef
in dataRefList:
571 metadataName = self._getMetadataName()
572 if metadataName
is not None:
573 dataRef.put(self.getFullMetadata(), metadataName)
574 except Exception
as e:
575 self.log.warn(
"Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
577 def getExposureId(self, dataRef):
578 """Get the ExposureId from a data reference 580 return int(dataRef.get(self.config.coaddName +
"CoaddId"))
583 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap"),
584 defaultTemplates={
"inputCoaddName":
"deep",
585 "outputCoaddName":
"deep"}):
586 inputSchema = cT.InitInput(
587 doc=
"Input schema for measure merged task produced by a deblender or detection task",
588 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
589 storageClass=
"SourceCatalog" 591 outputSchema = cT.InitOutput(
592 doc=
"Output schema after all new fields are added by task",
593 name=
"{inputCoaddName}Coadd_meas_schema",
594 storageClass=
"SourceCatalog" 596 refCat = cT.PrerequisiteInput(
597 doc=
"Reference catalog used to match measured sources against known sources",
599 storageClass=
"SimpleCatalog",
600 dimensions=(
"skypix",),
605 doc=
"Input coadd image",
606 name=
"{inputCoaddName}Coadd_calexp",
607 storageClass=
"ExposureF",
608 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap")
611 doc=
"SkyMap to use in processing",
612 name=
"{inputCoaddName}Coadd_skyMap",
613 storageClass=
"SkyMap",
614 dimensions=(
"skymap",),
616 visitCatalogs = cT.Input(
617 doc=
"Source catalogs for visits which overlap input tract, patch, abstract_filter. Will be " 618 "further filtered in the task for the purpose of propagating flags from image calibration " 619 "and characterization to codd objects",
621 dimensions=(
"instrument",
"visit",
"detector"),
622 storageClass=
"SourceCatalog",
625 inputCatalog = cT.Input(
626 doc=(
"Name of the input catalog to use." 627 "If the single band deblender was used this should be 'deblendedFlux." 628 "If the multi-band deblender was used this should be 'deblendedModel, " 629 "or deblendedFlux if the multiband deblender was configured to output " 630 "deblended flux catalogs. If no deblending was performed this should " 632 name=
"{inputCoaddName}Coadd_deblendedFlux",
633 storageClass=
"SourceCatalog",
634 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap"),
636 outputSources = cT.Output(
637 doc=
"Source catalog containing all the measurement information generated in this task",
638 name=
"{outputCoaddName}Coadd_meas",
639 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap"),
640 storageClass=
"SourceCatalog",
642 matchResult = cT.Output(
643 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
644 name=
"{outputCoaddName}Coadd_measMatch",
645 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap"),
646 storageClass=
"Catalog",
648 denormMatches = cT.Output(
649 doc=
"Denormalized Match catalog produced by configured matcher, optional on " 650 "doWriteMatchesDenormalized",
651 name=
"{outputCoaddName}Coadd_measMatchFull",
652 dimensions=(
"tract",
"patch",
"abstract_filter",
"skymap"),
653 storageClass=
"Catalog",
656 def __init__(self, *, config=None):
657 super().__init__(config=config)
658 if config.doPropagateFlags
is False:
659 self.inputs -= set((
"visitCatalogs",))
661 if config.doMatchSources
is False:
662 self.outputs -= set((
"matchResult",))
664 if config.doWriteMatchesDenormalized
is False:
665 self.outputs -= set((
"denormMatches",))
668 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
669 pipelineConnections=MeasureMergedCoaddSourcesConnections):
671 @anchor MeasureMergedCoaddSourcesConfig_ 673 @brief Configuration parameters for the MeasureMergedCoaddSourcesTask 675 inputCatalog = Field(dtype=str, default=
"deblendedFlux",
676 doc=(
"Name of the input catalog to use." 677 "If the single band deblender was used this should be 'deblendedFlux." 678 "If the multi-band deblender was used this should be 'deblendedModel." 679 "If no deblending was performed this should be 'mergeDet'"))
680 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
681 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
682 doPropagateFlags = Field(
683 dtype=bool, default=
True,
684 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)" 686 propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc=
"Propagate visit flags to coadd")
687 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
688 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
689 doWriteMatchesDenormalized = Field(
692 doc=(
"Write reference matches in denormalized format? " 693 "This format uses more disk space, but is more convenient to read."),
695 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
696 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
697 checkUnitsParseStrict = Field(
698 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
705 doc=
"Apply aperture corrections" 707 applyApCorr = ConfigurableField(
708 target=ApplyApCorrTask,
709 doc=
"Subtask to apply aperture corrections" 711 doRunCatalogCalculation = Field(
714 doc=
'Run catalogCalculation task' 716 catalogCalculation = ConfigurableField(
717 target=CatalogCalculationTask,
718 doc=
"Subtask to run catalogCalculation plugins on catalog" 724 doc=
"Should be set to True if fake sources have been inserted into the input data." 728 def refObjLoader(self):
729 return self.match.refObjLoader
731 def setDefaults(self):
732 super().setDefaults()
733 self.measurement.plugins.names |= [
'base_InputCount',
'base_Variance']
734 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
736 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
741 refCatGen2 = getattr(self.refObjLoader,
"ref_dataset_name",
None)
742 if refCatGen2
is not None and refCatGen2 != self.connections.refCat:
744 f
"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs " 745 f
"are different. These options must be kept in sync until Gen2 is retired." 757 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
758 """Get the psfCache setting into MeasureMergedCoaddSourcesTask""" 760 def getTargetList(parsedCmd, **kwargs):
761 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
764 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
766 @anchor MeasureMergedCoaddSourcesTask_ 768 @brief Deblend sources from master catalog in each coadd seperately and measure. 770 @section pipe_tasks_multiBand_Contents Contents 772 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose 773 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize 774 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run 775 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config 776 - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug 777 - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example 779 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description 781 Command-line task that uses peaks and footprints from a master catalog to perform deblending and 782 measurement in each coadd. 784 Given a master input catalog of sources (peaks and footprints) or deblender outputs 785 (including a HeavyFootprint in each band), measure each source on the 786 coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a 787 consistent set of child sources. 789 The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source 790 properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit 791 flags are propagated to the coadd sources. 793 Optionally, we can match the coadd sources to an external reference catalog. 796 deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog 797 @n deepCoadd_calexp{tract,patch,filter}: ExposureF 799 deepCoadd_meas{tract,patch,filter}: SourceCatalog 803 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks: 806 <DT> @ref SingleFrameMeasurementTask_ "measurement" 807 <DD> Measure source properties of deblended sources.</DD> 808 <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags" 809 <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are 810 not at the edge of the field and that have either not been deblended or are the children of deblended 812 <DT> @ref PropagateVisitFlagsTask_ "propagateFlags" 813 <DD> Propagate flags set in individual visits to the coadd.</DD> 814 <DT> @ref DirectMatchTask_ "match" 815 <DD> Match input sources to a reference catalog (optional). 818 These subtasks may be retargeted as required. 820 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization 822 @copydoc \_\_init\_\_ 824 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task 828 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters 830 See @ref MeasureMergedCoaddSourcesConfig_ 832 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables 834 The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a 835 flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py 838 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to 839 the various sub-tasks. See the documetation for individual sub-tasks for more information. 841 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using 842 MeasureMergedCoaddSourcesTask 844 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs. 845 The next stage in the multi-band processing procedure will merge these measurements into a suitable 846 catalog for driving forced photometry. 848 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds 850 A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the 851 `--help` command line argument: 853 measureCoaddSources.py --help 856 To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we 857 will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished 858 step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band 861 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I 863 This will process the HSC-I band data. The results are written in 864 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits 866 It is also necessary to run 868 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R 870 to generate the sources catalogs for the HSC-R band required by the next step in the multi-band 871 procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask". 873 _DefaultName =
"measureCoaddSources" 874 ConfigClass = MeasureMergedCoaddSourcesConfig
875 RunnerClass = MeasureMergedCoaddSourcesRunner
876 getSchemaCatalogs = _makeGetSchemaCatalogs(
"meas")
877 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId")
880 def _makeArgumentParser(cls):
881 parser = ArgumentParser(name=cls._DefaultName)
882 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
883 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
884 ContainerClass=ExistingCoaddDataIdContainer)
885 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
888 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
891 @brief Initialize the task. 893 Keyword arguments (in addition to those forwarded to CmdLineTask.__init__): 894 @param[in] schema: the schema of the merged detection catalog used as input to this one 895 @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog 896 @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference 897 catalog. May be None if the loader can be constructed from the butler argument or all steps 898 requiring a reference catalog are disabled. 899 @param[in] butler: a butler used to read the input schemas from disk or construct the reference 900 catalog loader, if schema or peakSchema or refObjLoader is None 902 The task will set its own self.schema attribute to the schema of the output measurement catalog. 903 This will include all fields from the input schema, as well as additional fields for all the 906 super().__init__(**kwargs)
907 self.deblended = self.config.inputCatalog.startswith(
"deblended")
908 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
909 if initInputs
is not None:
910 schema = initInputs[
'inputSchema'].schema
912 assert butler
is not None,
"Neither butler nor schema is defined" 913 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema", immediate=
True).schema
914 self.schemaMapper = afwTable.SchemaMapper(schema)
915 self.schemaMapper.addMinimalSchema(schema)
916 self.schema = self.schemaMapper.getOutputSchema()
918 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
919 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
920 if self.config.doMatchSources:
921 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
922 if self.config.doPropagateFlags:
923 self.makeSubtask(
"propagateFlags", schema=self.schema)
924 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
925 if self.config.doApCorr:
926 self.makeSubtask(
"applyApCorr", schema=self.schema)
927 if self.config.doRunCatalogCalculation:
928 self.makeSubtask(
"catalogCalculation", schema=self.schema)
930 self.outputSchema = afwTable.SourceCatalog(self.schema)
932 def runQuantum(self, butlerQC, inputRefs, outputRefs):
933 inputs = butlerQC.get(inputRefs)
935 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
936 inputs.pop(
'refCat'), config=self.config.refObjLoader,
938 self.match.setRefObjLoader(refObjLoader)
942 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
945 packedId, maxBits = butlerQC.quantum.dataId.pack(
"tract_patch", returnMaxBits=
True)
946 inputs[
'exposureId'] = packedId
947 idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
949 table = afwTable.SourceTable.make(self.schema, idFactory)
950 sources = afwTable.SourceCatalog(table)
951 sources.extend(inputs.pop(
'inputCatalog'), self.schemaMapper)
952 table = sources.getTable()
953 table.setMetadata(self.algMetadata)
954 inputs[
'sources'] = sources
956 skyMap = inputs.pop(
'skyMap')
957 tractNumber = inputRefs.inputCatalog.dataId[
'tract']
958 tractInfo = skyMap[tractNumber]
959 patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId[
'patch'])
964 wcs=tractInfo.getWcs(),
965 bbox=patchInfo.getOuterBBox()
967 inputs[
'skyInfo'] = skyInfo
969 if self.config.doPropagateFlags:
971 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
972 visitKey = ccdInputs.schema.find(
"visit").key
973 ccdKey = ccdInputs.schema.find(
"ccd").key
974 inputVisitIds = set()
976 for ccdRecord
in ccdInputs:
977 visit = ccdRecord.get(visitKey)
978 ccd = ccdRecord.get(ccdKey)
979 inputVisitIds.add((visit, ccd))
980 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
982 inputCatalogsToKeep = []
983 inputCatalogWcsUpdate = []
984 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
985 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
986 if key
in inputVisitIds:
987 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
988 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
989 inputs[
'visitCatalogs'] = inputCatalogsToKeep
990 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
991 inputs[
'ccdInputs'] = ccdInputs
993 outputs = self.run(**inputs)
994 butlerQC.put(outputs, outputRefs)
996 def runDataRef(self, patchRef, psfCache=100):
998 @brief Deblend and measure. 1000 @param[in] patchRef: Patch reference. 1002 Set 'is-primary' and related flags. Propagate flags 1003 from individual visits. Optionally match the sources to a reference catalog and write the matches. 1004 Finally, write the deblended sources and measurements out. 1006 if self.config.hasFakes:
1007 coaddType =
"fakes_" + self.config.coaddName
1009 coaddType = self.config.coaddName
1010 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
1011 exposure.getPsf().setCacheCapacity(psfCache)
1012 sources = self.readSources(patchRef)
1013 table = sources.getTable()
1014 table.setMetadata(self.algMetadata)
1015 skyInfo =
getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1017 if self.config.doPropagateFlags:
1018 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1022 results = self.run(exposure=exposure, sources=sources,
1023 ccdInputs=ccdInputs,
1024 skyInfo=skyInfo, butler=patchRef.getButler(),
1025 exposureId=self.getExposureId(patchRef))
1027 if self.config.doMatchSources:
1028 self.writeMatches(patchRef, results)
1029 self.write(patchRef, results.outputSources)
1031 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1033 """Run measurement algorithms on the input exposure, and optionally populate the 1034 resulting catalog with extra information. 1038 exposure : `lsst.afw.exposure.Exposure` 1039 The input exposure on which measurements are to be performed 1040 sources : `lsst.afw.table.SourceCatalog` 1041 A catalog built from the results of merged detections, or 1043 skyInfo : `lsst.pipe.base.Struct` 1044 A struct containing information about the position of the input exposure within 1045 a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box 1046 exposureId : `int` or `bytes` 1047 packed unique number or bytes unique to the input exposure 1048 ccdInputs : `lsst.afw.table.ExposureCatalog` 1049 Catalog containing information on the individual visits which went into making 1051 visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None` 1052 A list of source catalogs corresponding to measurements made on the individual 1053 visits which went into the input exposure. If None and butler is `None` then 1054 the task cannot propagate visit flags to the output catalog. 1055 wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None` 1056 If visitCatalogs is not `None` this should be a list of wcs objects which correspond 1057 to the input visits. Used to put all coordinates to common system. If `None` and 1058 butler is `None` then the task cannot propagate visit flags to the output catalog. 1059 butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler` 1060 Either a gen2 or gen3 butler used to load visit catalogs 1064 results : `lsst.pipe.base.Struct` 1065 Results of running measurement task. Will contain the catalog in the 1066 sources attribute. Optionally will have results of matching to a 1067 reference catalog in the matchResults attribute, and denormalized 1068 matches in the denormMatches attribute. 1070 self.measurement.
run(sources, exposure, exposureId=exposureId)
1072 if self.config.doApCorr:
1073 self.applyApCorr.
run(
1075 apCorrMap=exposure.getInfo().getApCorrMap()
1082 if not sources.isContiguous():
1083 sources = sources.copy(deep=
True)
1085 if self.config.doRunCatalogCalculation:
1086 self.catalogCalculation.
run(sources)
1088 self.setPrimaryFlags.
run(sources, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo,
1089 includeDeblend=self.deblended)
1090 if self.config.doPropagateFlags:
1091 self.propagateFlags.
run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1095 if self.config.doMatchSources:
1096 matchResult = self.match.
run(sources, exposure.getInfo().getFilter().getName())
1097 matches = afwTable.packMatches(matchResult.matches)
1098 matches.table.setMetadata(matchResult.matchMeta)
1099 results.matchResult = matches
1100 if self.config.doWriteMatchesDenormalized:
1101 if matchResult.matches:
1102 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1104 self.log.warn(
"No matches, so generating dummy denormalized matches file")
1105 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1107 denormMatches.getMetadata().add(
"COMMENT",
1108 "This catalog is empty because no matches were found.")
1109 results.denormMatches = denormMatches
1110 results.denormMatches = denormMatches
1112 results.outputSources = sources
1115 def readSources(self, dataRef):
1117 @brief Read input sources. 1119 @param[in] dataRef: Data reference for catalog of merged detections 1120 @return List of sources in merged catalog 1122 We also need to add columns to hold the measurements we're about to make 1123 so we can measure in-place. 1125 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=
True)
1126 self.log.info(
"Read %d detections: %s" % (len(merged), dataRef.dataId))
1127 idFactory = self.makeIdFactory(dataRef)
1129 idFactory.notify(s.getId())
1130 table = afwTable.SourceTable.make(self.schema, idFactory)
1131 sources = afwTable.SourceCatalog(table)
1132 sources.extend(merged, self.schemaMapper)
1135 def writeMatches(self, dataRef, results):
1137 @brief Write matches of the sources to the astrometric reference catalog. 1139 @param[in] dataRef: data reference 1140 @param[in] results: results struct from run method 1142 if hasattr(results,
"matchResult"):
1143 dataRef.put(results.matchResult, self.config.coaddName +
"Coadd_measMatch")
1144 if hasattr(results,
"denormMatches"):
1145 dataRef.put(results.denormMatches, self.config.coaddName +
"Coadd_measMatchFull")
1147 def write(self, dataRef, sources):
1149 @brief Write the source catalog. 1151 @param[in] dataRef: data reference 1152 @param[in] sources: source catalog 1154 dataRef.put(sources, self.config.coaddName +
"Coadd_meas")
1155 self.log.info(
"Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1157 def getExposureId(self, dataRef):
1158 return int(dataRef.get(self.config.coaddName +
"CoaddId"))
1159 def write(self, patchRef, catalog)
Write the output.
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.