28from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
29 PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
30import lsst.pipe.base.connectionTypes
as cT
31from lsst.pex.config import Config, Field, ConfigurableField, ChoiceField
33from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
35from lsst.meas.extensions.scarlet
import ScarletDeblendTask
46from lsst.obs.base
import ExposureIdInfo
49from .mergeDetections
import MergeDetectionsConfig, MergeDetectionsTask
50from .mergeMeasurements
import MergeMeasurementsConfig, MergeMeasurementsTask
51from .multiBandUtils
import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs
52from .multiBandUtils
import getInputSchema, readCatalog, _makeMakeIdFactory
53from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleConfig
54from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesSingleTask
55from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiConfig
56from .deblendCoaddSourcesPipeline
import DeblendCoaddSourcesMultiTask
61* deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
62* deepCoadd_mergeDet: merged detections (tract, patch)
63* deepCoadd_meas: measurements of merged detections (tract, patch, filter)
64* deepCoadd_ref: reference sources (tract, patch)
65All of these have associated *_schema catalogs that require no data ID and hold no records.
67In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
68the mergeDet, meas, and ref dataset Footprints:
69* deepCoadd_peak_schema
75 dimensions=(
"tract",
"patch",
"band",
"skymap"),
76 defaultTemplates={
"inputCoaddName":
"deep",
"outputCoaddName":
"deep"}):
77 detectionSchema = cT.InitOutput(
78 doc=
"Schema of the detection catalog",
79 name=
"{outputCoaddName}Coadd_det_schema",
80 storageClass=
"SourceCatalog",
83 doc=
"Exposure on which detections are to be performed",
84 name=
"{inputCoaddName}Coadd",
85 storageClass=
"ExposureF",
86 dimensions=(
"tract",
"patch",
"band",
"skymap")
88 outputBackgrounds = cT.Output(
89 doc=
"Output Backgrounds used in detection",
90 name=
"{outputCoaddName}Coadd_calexp_background",
91 storageClass=
"Background",
92 dimensions=(
"tract",
"patch",
"band",
"skymap")
94 outputSources = cT.Output(
95 doc=
"Detected sources catalog",
96 name=
"{outputCoaddName}Coadd_det",
97 storageClass=
"SourceCatalog",
98 dimensions=(
"tract",
"patch",
"band",
"skymap")
100 outputExposure = cT.Output(
101 doc=
"Exposure post detection",
102 name=
"{outputCoaddName}Coadd_calexp",
103 storageClass=
"ExposureF",
104 dimensions=(
"tract",
"patch",
"band",
"skymap")
108class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
110 @anchor DetectCoaddSourcesConfig_
112 @brief Configuration parameters
for the DetectCoaddSourcesTask
114 doScaleVariance = Field(dtype=bool, default=True, doc=
"Scale variance plane using empirical noise?")
115 scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc=
"Variance rescaling")
116 detection = ConfigurableField(target=DynamicDetectionTask, doc=
"Source detection")
117 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
118 doInsertFakes = Field(dtype=bool, default=
False,
119 doc=
"Run fake sources injection task",
120 deprecated=(
"doInsertFakes is no longer supported. This config will be removed "
122 insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
123 doc=
"Injection of fake sources for testing "
124 "purposes (must be retargeted)",
125 deprecated=(
"insertFakes is no longer supported. This config will "
126 "be removed after v24."))
130 doc=
"Should be set to True if fake sources have been inserted into the input data.",
133 def setDefaults(self):
134 super().setDefaults()
135 self.detection.thresholdType =
"pixel_stdev"
136 self.detection.isotropicGrow =
True
138 self.detection.reEstimateBackground =
False
139 self.detection.background.useApprox =
False
140 self.detection.background.binSize = 4096
141 self.detection.background.undersampleStyle =
'REDUCE_INTERP_ORDER'
142 self.detection.doTempWideBackground =
True
152class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
154 @anchor DetectCoaddSourcesTask_
156 @brief Detect sources on a coadd
158 @section pipe_tasks_multiBand_Contents Contents
160 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
161 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
162 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
163 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
164 -
@ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
165 -
@ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
167 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
169 Command-line task that detects sources on a coadd of exposures obtained
with a single filter.
171 Coadding individual visits requires each exposure to be warped. This introduces covariance
in the noise
172 properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
173 in the coadd to match the observed variance. This
is an approximate approach -- strictly, we should
174 propagate the full covariance matrix -- but it
is simple
and works well
in practice.
176 After scaling the variance plane, we detect sources
and generate footprints by delegating to the
@ref
177 SourceDetectionTask_
"detection" subtask.
180 deepCoadd{tract,patch,filter}: ExposureF
182 deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
183 @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
185 @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
189 DetectCoaddSourcesTask delegates most of its work to the
@ref SourceDetectionTask_
"detection" subtask.
190 You can retarget this subtask
if you wish.
192 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
194 @copydoc \_\_init\_\_
196 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
200 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
202 See
@ref DetectCoaddSourcesConfig_
"DetectSourcesConfig"
204 @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
206 The command line task interface supports a
207 flag
@c -d to
import @b debug.py
from your
@c PYTHONPATH; see
@ref baseDebug
for more about
@b debug.py
210 DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
211 @ref SourceDetectionTask_
"SourceDetectionTask"; see the documetation
for
212 @ref SourceDetectionTask_
"SourceDetectionTask" for further information.
214 @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
215 of using DetectCoaddSourcesTask
217 DetectCoaddSourcesTask
is meant to be run after assembling a coadded image
in a given band. The purpose of
218 the task
is to update the background, detect all sources
in a single band
and generate a set of parent
219 footprints. Subsequent tasks
in the multi-band processing procedure will merge sources across bands
and,
220 eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
221 reference to the coadd to be processed. A list of the available optional arguments can be obtained by
222 calling detectCoaddSources.py
with the `--help` command line argument:
224 detectCoaddSources.py --help
227 To demonstrate usage of the DetectCoaddSourcesTask
in the larger context of multi-band processing, we
228 will process HSC data
in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
229 steps 1 - 4 at
@ref pipeTasks_multiBand, one may detect all the sources
in each coadd
as follows:
231 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
233 that will process the HSC-I band data. The results are written to
234 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
236 It
is also necessary to run:
238 detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
240 to generate the sources catalogs
for the HSC-R band required by the next step
in the multi-band
241 processing procedure:
@ref MergeDetectionsTask_
"MergeDetectionsTask".
243 _DefaultName = "detectCoaddSources"
244 ConfigClass = DetectCoaddSourcesConfig
245 getSchemaCatalogs = _makeGetSchemaCatalogs(
"det")
246 makeIdFactory = _makeMakeIdFactory(
"CoaddId")
249 def _makeArgumentParser(cls):
250 parser = ArgumentParser(name=cls._DefaultName)
251 parser.add_id_argument(
"--id",
"deepCoadd", help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
252 ContainerClass=ExistingCoaddDataIdContainer)
255 def __init__(self, schema=None, **kwargs):
257 @brief Initialize the task. Create the
@ref SourceDetectionTask_
"detection" subtask.
259 Keyword arguments (
in addition to those forwarded to CmdLineTask.__init__):
261 @param[
in] schema: initial schema
for the output catalog, modified-
in place to include all
262 fields set by this task. If
None, the source minimal schema will be used.
263 @param[
in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
267 super().__init__(**kwargs)
269 schema = afwTable.SourceTable.makeMinimalSchema()
271 self.makeSubtask(
"detection", schema=self.schema)
272 if self.config.doScaleVariance:
273 self.makeSubtask(
"scaleVariance")
275 self.detectionSchema = afwTable.SourceCatalog(self.schema)
277 def runDataRef(self, patchRef):
279 @brief Run detection on a coadd.
281 Invokes
@ref run
and then uses
@ref write to output the
284 @param[
in] patchRef: data reference
for patch
286 if self.config.hasFakes:
287 exposure = patchRef.get(
"fakes_" + self.config.coaddName +
"Coadd", immediate=
True)
289 exposure = patchRef.get(self.config.coaddName +
"Coadd", immediate=
True)
290 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, log=self.log)
291 results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
292 self.write(results, patchRef)
295 def runQuantum(self, butlerQC, inputRefs, outputRefs):
296 inputs = butlerQC.get(inputRefs)
297 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId,
"tract_patch_band")
298 inputs[
"idFactory"] = exposureIdInfo.makeSourceIdFactory()
299 inputs[
"expId"] = exposureIdInfo.expId
300 outputs = self.run(**inputs)
301 butlerQC.put(outputs, outputRefs)
303 def run(self, exposure, idFactory, expId):
305 @brief Run detection on an exposure.
307 First scale the variance plane to match the observed variance
308 using
@ref ScaleVarianceTask. Then invoke the
@ref SourceDetectionTask_
"detection" subtask to
311 @param[
in,out] exposure: Exposure on which to detect (may be backround-subtracted
and scaled,
312 depending on configuration).
313 @param[
in] idFactory: IdFactory to set source identifiers
314 @param[
in] expId: Exposure identifier (integer)
for RNG seed
316 @return a pipe.base.Struct
with fields
317 - sources: catalog of detections
318 - backgrounds: list of backgrounds
320 if self.config.doScaleVariance:
321 varScale = self.scaleVariance.run(exposure.maskedImage)
322 exposure.getMetadata().add(
"VARIANCE_SCALE", varScale)
323 backgrounds = afwMath.BackgroundList()
324 table = afwTable.SourceTable.make(self.schema, idFactory)
325 detections = self.detection.run(table, exposure, expId=expId)
326 sources = detections.sources
327 fpSets = detections.fpSets
328 if hasattr(fpSets,
"background")
and fpSets.background:
329 for bg
in fpSets.background:
330 backgrounds.append(bg)
331 return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
333 def write(self, results, patchRef):
335 @brief Write out results
from runDetection.
337 @param[
in] exposure: Exposure to write out
338 @param[
in] results: Struct returned
from runDetection
339 @param[
in] patchRef: data reference
for patch
341 coaddName = self.config.coaddName + "Coadd"
342 patchRef.put(results.outputBackgrounds, coaddName +
"_calexp_background")
343 patchRef.put(results.outputSources, coaddName +
"_det")
344 if self.config.hasFakes:
345 patchRef.put(results.outputExposure,
"fakes_" + coaddName +
"_calexp")
347 patchRef.put(results.outputExposure, coaddName +
"_calexp")
352class DeblendCoaddSourcesConfig(Config):
353 """DeblendCoaddSourcesConfig
355 Configuration parameters for the `DeblendCoaddSourcesTask`.
357 singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
358 doc="Deblend sources separately in each band")
359 multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
360 doc=
"Deblend sources simultaneously across bands")
361 simultaneous = Field(dtype=bool,
363 doc=
"Simultaneously deblend all bands? "
364 "True uses `multibandDeblend` while False uses `singleBandDeblend`")
365 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
366 hasFakes = Field(dtype=bool,
368 doc=
"Should be set to True if fake sources have been inserted into the input data.")
370 def setDefaults(self):
371 Config.setDefaults(self)
372 self.singleBandDeblend.propagateAllPeaks =
True
376 """Task runner for the `MergeSourcesTask`
378 Required because the run method requires a list of
379 dataRefs rather than a single dataRef.
382 def getTargetList(parsedCmd, **kwargs):
383 """Provide a list of patch references for each patch, tract, filter combo.
390 Keyword arguments passed to the task
395 List of tuples, where each tuple is a (dataRef, kwargs) pair.
397 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
398 kwargs["psfCache"] = parsedCmd.psfCache
399 return [(list(p.values()), kwargs)
for t
in refDict.values()
for p
in t.values()]
402class DeblendCoaddSourcesTask(CmdLineTask):
403 """Deblend the sources in a merged catalog
405 Deblend sources from master catalog
in each coadd.
406 This can either be done separately
in each band using the HSC-SDSS deblender
407 (`DeblendCoaddSourcesTask.config.simultaneous==
False`)
408 or use SCARLET to simultaneously fit the blend
in all bands
409 (`DeblendCoaddSourcesTask.config.simultaneous==
True`).
410 The task will set its own `self.schema` atribute to the `Schema` of the
411 output deblended catalog.
412 This will include all fields
from the input `Schema`,
as well
as additional fields
415 `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
416 ---------------------------------------------------------
422 Butler used to read the input schemas
from disk
or
423 construct the reference catalog loader,
if `schema`
or `peakSchema`
or
425 The schema of the merged detection catalog
as an input to this task.
427 The schema of the `PeakRecord`s
in the `Footprint`s
in the merged detection catalog
429 ConfigClass = DeblendCoaddSourcesConfig
430 RunnerClass = DeblendCoaddSourcesRunner
431 _DefaultName = "deblendCoaddSources"
432 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId", includeBand=
False)
435 def _makeArgumentParser(cls):
436 parser = ArgumentParser(name=cls._DefaultName)
437 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
438 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
439 ContainerClass=ExistingCoaddDataIdContainer)
440 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
443 def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
444 CmdLineTask.__init__(self, **kwargs)
446 assert butler
is not None,
"Neither butler nor schema is defined"
447 schema = butler.get(self.config.coaddName +
"Coadd_mergeDet_schema", immediate=
True).schema
448 self.schemaMapper = afwTable.SchemaMapper(schema)
449 self.schemaMapper.addMinimalSchema(schema)
450 self.schema = self.schemaMapper.getOutputSchema()
451 if peakSchema
is None:
452 assert butler
is not None,
"Neither butler nor peakSchema is defined"
453 peakSchema = butler.get(self.config.coaddName +
"Coadd_peak_schema", immediate=
True).schema
455 if self.config.simultaneous:
456 self.makeSubtask(
"multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
458 self.makeSubtask(
"singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
460 def getSchemaCatalogs(self):
461 """Return a dict of empty catalogs for each catalog dataset produced by this task.
466 Dictionary of empty catalogs, with catalog names
as keys.
468 catalog = afwTable.SourceCatalog(self.schema)
469 return {self.config.coaddName +
"Coadd_deblendedFlux": catalog,
470 self.config.coaddName +
"Coadd_deblendedModel": catalog}
472 def runDataRef(self, patchRefList, psfCache=100):
475 Deblend each source simultaneously or separately
476 (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
477 Set `
is-primary`
and related flags.
478 Propagate flags
from individual visits.
479 Write the deblended sources out.
484 List of data references
for each filter
487 if self.config.hasFakes:
488 coaddType =
"fakes_" + self.config.coaddName
490 coaddType = self.config.coaddName
492 if self.config.simultaneous:
496 for patchRef
in patchRefList:
497 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
498 filter = patchRef.get(coaddType +
"Coadd_filterLabel", immediate=
True)
499 filters.append(filter.bandLabel)
500 exposures.append(exposure)
502 exposures = [exposure
for _, exposure
in sorted(zip(filters, exposures))]
503 patchRefList = [patchRef
for _, patchRef
in sorted(zip(filters, patchRefList))]
506 sources = self.readSources(patchRef)
507 exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
508 templateCatalogs, fluxCatalogs = self.multiBandDeblend.run(exposure, sources)
509 for n
in range(len(patchRefList)):
510 self.write(patchRefList[n], templateCatalogs[filters[n]],
"Model")
511 if filters[n]
in fluxCatalogs:
512 self.write(patchRefList[n], fluxCatalogs[filters[n]],
"Flux")
515 for patchRef
in patchRefList:
516 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
517 exposure.getPsf().setCacheCapacity(psfCache)
518 sources = self.readSources(patchRef)
519 self.singleBandDeblend.run(exposure, sources)
520 self.write(patchRef, sources)
522 def readSources(self, dataRef):
523 """Read merged catalog
525 Read the catalog of merged detections and create a catalog
530 dataRef: data reference
531 Data reference
for catalog of merged detections
535 sources: `SourceCatalog`
536 List of sources
in merged catalog
538 We also need to add columns to hold the measurements we
're about to make so we can measure in-place.
540 merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=
True)
541 self.log.info(
"Read %d detections: %s", len(merged), dataRef.dataId)
542 idFactory = self.makeIdFactory(dataRef)
546 maxId = np.max(merged[
"id"])
547 idFactory.notify(maxId)
548 table = afwTable.SourceTable.make(self.schema, idFactory)
549 sources = afwTable.SourceCatalog(table)
550 sources.extend(merged, self.schemaMapper)
553 def write(self, dataRef, sources, catalogType):
554 """Write the source catalog(s)
558 dataRef: Data Reference
559 Reference to the output catalog.
560 sources: `SourceCatalog`
561 Flux conserved sources to write to file.
562 If using the single band deblender, this is the catalog
564 template_sources: `SourceCatalog`
565 Source catalog using the multiband template models
568 dataRef.put(sources, self.config.coaddName + f"Coadd_deblended{catalogType}")
569 self.log.info(
"Wrote %d sources: %s", len(sources), dataRef.dataId)
571 def writeMetadata(self, dataRefList):
572 """Write the metadata produced from processing the data.
576 List of Butler data references used to write the metadata.
577 The metadata is written to dataset type `CmdLineTask._getMetadataName`.
579 for dataRef
in dataRefList:
581 metadataName = self._getMetadataName()
582 if metadataName
is not None:
583 dataRef.put(self.getFullMetadata(), metadataName)
584 except Exception
as e:
585 self.log.warning(
"Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
588class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections,
589 dimensions=(
"tract",
"patch",
"band",
"skymap"),
590 defaultTemplates={
"inputCoaddName":
"deep",
591 "outputCoaddName":
"deep",
592 "deblendedCatalog":
"deblendedFlux"}):
593 warnings.warn(
"MeasureMergedCoaddSourcesConnections.defaultTemplates is deprecated and no longer used. "
594 "Use MeasureMergedCoaddSourcesConfig.inputCatalog.")
595 inputSchema = cT.InitInput(
596 doc=
"Input schema for measure merged task produced by a deblender or detection task",
597 name=
"{inputCoaddName}Coadd_deblendedFlux_schema",
598 storageClass=
"SourceCatalog"
600 outputSchema = cT.InitOutput(
601 doc=
"Output schema after all new fields are added by task",
602 name=
"{inputCoaddName}Coadd_meas_schema",
603 storageClass=
"SourceCatalog"
605 refCat = cT.PrerequisiteInput(
606 doc=
"Reference catalog used to match measured sources against known sources",
608 storageClass=
"SimpleCatalog",
609 dimensions=(
"skypix",),
614 doc=
"Input coadd image",
615 name=
"{inputCoaddName}Coadd_calexp",
616 storageClass=
"ExposureF",
617 dimensions=(
"tract",
"patch",
"band",
"skymap")
620 doc=
"SkyMap to use in processing",
621 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
622 storageClass=
"SkyMap",
623 dimensions=(
"skymap",),
625 visitCatalogs = cT.Input(
626 doc=
"Source catalogs for visits which overlap input tract, patch, band. Will be "
627 "further filtered in the task for the purpose of propagating flags from image calibration "
628 "and characterization to coadd objects. Only used in legacy PropagateVisitFlagsTask.",
630 dimensions=(
"instrument",
"visit",
"detector"),
631 storageClass=
"SourceCatalog",
634 sourceTableHandles = cT.Input(
635 doc=(
"Source tables that are derived from the ``CalibrateTask`` sources. "
636 "These tables contain astrometry and photometry flags, and optionally "
638 name=
"sourceTable_visit",
639 storageClass=
"DataFrame",
640 dimensions=(
"instrument",
"visit"),
644 finalizedSourceTableHandles = cT.Input(
645 doc=(
"Finalized source tables from ``FinalizeCalibrationTask``. These "
646 "tables contain PSF flags from the finalized PSF estimation."),
647 name=
"finalized_src_table",
648 storageClass=
"DataFrame",
649 dimensions=(
"instrument",
"visit"),
653 inputCatalog = cT.Input(
654 doc=(
"Name of the input catalog to use."
655 "If the single band deblender was used this should be 'deblendedFlux."
656 "If the multi-band deblender was used this should be 'deblendedModel, "
657 "or deblendedFlux if the multiband deblender was configured to output "
658 "deblended flux catalogs. If no deblending was performed this should "
660 name=
"{inputCoaddName}Coadd_{deblendedCatalog}",
661 storageClass=
"SourceCatalog",
662 dimensions=(
"tract",
"patch",
"band",
"skymap"),
664 scarletCatalog = cT.Input(
665 doc=
"Catalogs produced by multiband deblending",
666 name=
"{inputCoaddName}Coadd_deblendedCatalog",
667 storageClass=
"SourceCatalog",
668 dimensions=(
"tract",
"patch",
"skymap"),
670 scarletModels = cT.Input(
671 doc=
"Multiband scarlet models produced by the deblender",
672 name=
"{inputCoaddName}Coadd_scarletModelData",
673 storageClass=
"ScarletModelData",
674 dimensions=(
"tract",
"patch",
"skymap"),
676 outputSources = cT.Output(
677 doc=
"Source catalog containing all the measurement information generated in this task",
678 name=
"{outputCoaddName}Coadd_meas",
679 dimensions=(
"tract",
"patch",
"band",
"skymap"),
680 storageClass=
"SourceCatalog",
682 matchResult = cT.Output(
683 doc=
"Match catalog produced by configured matcher, optional on doMatchSources",
684 name=
"{outputCoaddName}Coadd_measMatch",
685 dimensions=(
"tract",
"patch",
"band",
"skymap"),
686 storageClass=
"Catalog",
688 denormMatches = cT.Output(
689 doc=
"Denormalized Match catalog produced by configured matcher, optional on "
690 "doWriteMatchesDenormalized",
691 name=
"{outputCoaddName}Coadd_measMatchFull",
692 dimensions=(
"tract",
"patch",
"band",
"skymap"),
693 storageClass=
"Catalog",
696 def __init__(self, *, config=None):
697 super().__init__(config=config)
698 if config.doPropagateFlags
is False:
699 self.inputs -= set((
"visitCatalogs",))
700 self.inputs -= set((
"sourceTableHandles",))
701 self.inputs -= set((
"finalizedSourceTableHandles",))
702 elif config.propagateFlags.target == PropagateSourceFlagsTask:
704 self.inputs -= set((
"visitCatalogs",))
706 if not config.propagateFlags.source_flags:
707 self.inputs -= set((
"sourceTableHandles",))
708 if not config.propagateFlags.finalized_source_flags:
709 self.inputs -= set((
"finalizedSourceTableHandles",))
712 self.inputs -= set((
"sourceTableHandles",))
713 self.inputs -= set((
"finalizedSourceTableHandles",))
715 if config.inputCatalog ==
"deblendedCatalog":
716 self.inputs -= set((
"inputCatalog",))
718 if not config.doAddFootprints:
719 self.inputs -= set((
"scarletModels",))
721 self.inputs -= set((
"deblendedCatalog"))
722 self.inputs -= set((
"scarletModels",))
724 if config.doMatchSources
is False:
725 self.outputs -= set((
"matchResult",))
727 if config.doWriteMatchesDenormalized
is False:
728 self.outputs -= set((
"denormMatches",))
731class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
732 pipelineConnections=MeasureMergedCoaddSourcesConnections):
734 @anchor MeasureMergedCoaddSourcesConfig_
736 @brief Configuration parameters
for the MeasureMergedCoaddSourcesTask
738 inputCatalog = ChoiceField(
740 default="deblendedCatalog",
742 "deblendedCatalog":
"Output catalog from ScarletDeblendTask",
743 "deblendedFlux":
"Output catalog from SourceDeblendTask",
744 "mergeDet":
"The merged detections before deblending."
746 doc=
"The name of the input catalog.",
748 doAddFootprints = Field(dtype=bool,
750 doc=
"Whether or not to add footprints to the input catalog from scarlet models. "
751 "This should be true whenever using the multi-band deblender, "
752 "otherwise this should be False.")
753 doConserveFlux = Field(dtype=bool, default=
True,
754 doc=
"Whether to use the deblender models as templates to re-distribute the flux "
755 "from the 'exposure' (True), or to perform measurements on the deblender "
757 doStripFootprints = Field(dtype=bool, default=
True,
758 doc=
"Whether to strip footprints from the output catalog before "
760 "This is usually done when using scarlet models to save disk space.")
761 measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc=
"Source measurement")
762 setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc=
"Set flags for primary tract/patch")
763 doPropagateFlags = Field(
764 dtype=bool, default=
True,
765 doc=
"Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
767 propagateFlags = ConfigurableField(target=PropagateSourceFlagsTask, doc=
"Propagate source flags to coadd")
768 doMatchSources = Field(dtype=bool, default=
True, doc=
"Match sources to reference catalog?")
769 match = ConfigurableField(target=DirectMatchTask, doc=
"Matching to reference catalog")
770 doWriteMatchesDenormalized = Field(
773 doc=(
"Write reference matches in denormalized format? "
774 "This format uses more disk space, but is more convenient to read."),
776 coaddName = Field(dtype=str, default=
"deep", doc=
"Name of coadd")
777 psfCache = Field(dtype=int, default=100, doc=
"Size of psfCache")
778 checkUnitsParseStrict = Field(
779 doc=
"Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
786 doc=
"Apply aperture corrections"
788 applyApCorr = ConfigurableField(
789 target=ApplyApCorrTask,
790 doc=
"Subtask to apply aperture corrections"
792 doRunCatalogCalculation = Field(
795 doc=
'Run catalogCalculation task'
797 catalogCalculation = ConfigurableField(
798 target=CatalogCalculationTask,
799 doc=
"Subtask to run catalogCalculation plugins on catalog"
805 doc=
"Should be set to True if fake sources have been inserted into the input data."
809 def refObjLoader(self):
810 return self.match.refObjLoader
812 def setDefaults(self):
813 super().setDefaults()
814 self.measurement.plugins.names |= [
'base_InputCount',
816 'base_LocalPhotoCalib',
818 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
820 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
825 refCatGen2 = getattr(self.refObjLoader,
"ref_dataset_name",
None)
826 if refCatGen2
is not None and refCatGen2 != self.connections.refCat:
828 f
"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
829 f
"are different. These options must be kept in sync until Gen2 is retired."
841class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
842 """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
844 def getTargetList(parsedCmd, **kwargs):
845 return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
848class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
850 @anchor MeasureMergedCoaddSourcesTask_
852 @brief Deblend sources
from master catalog
in each coadd seperately
and measure.
854 @section pipe_tasks_multiBand_Contents Contents
856 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
857 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
858 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
859 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
860 -
@ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
861 -
@ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
863 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
865 Command-line task that uses peaks
and footprints
from a master catalog to perform deblending
and
866 measurement
in each coadd.
868 Given a master input catalog of sources (peaks
and footprints)
or deblender outputs
869 (including a HeavyFootprint
in each band), measure each source on the
870 coadd. Repeating this procedure
with the same master catalog across multiple coadds will generate a
871 consistent set of child sources.
873 The deblender retains all peaks
and deblends any missing peaks (dropouts
in that band)
as PSFs. Source
874 properties are measured
and the
@c is-primary flag (indicating sources
with no children)
is set. Visit
875 flags are propagated to the coadd sources.
877 Optionally, we can match the coadd sources to an external reference catalog.
880 deepCoadd_mergeDet{tract,patch}
or deepCoadd_deblend{tract,patch}: SourceCatalog
881 @n deepCoadd_calexp{tract,patch,filter}: ExposureF
883 deepCoadd_meas{tract,patch,filter}: SourceCatalog
887 MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
890 <DT>
@ref SingleFrameMeasurementTask_
"measurement"
891 <DD> Measure source properties of deblended sources.</DD>
892 <DT>
@ref SetPrimaryFlagsTask_
"setPrimaryFlags"
893 <DD> Set flag
'is-primary' as well
as related flags on sources.
'is-primary' is set
for sources that are
894 not at the edge of the field
and that have either
not been deblended
or are the children of deblended
896 <DT>
@ref PropagateVisitFlagsTask_
"propagateFlags"
897 <DD> Propagate flags set
in individual visits to the coadd.</DD>
898 <DT>
@ref DirectMatchTask_
"match"
899 <DD> Match input sources to a reference catalog (optional).
902 These subtasks may be retargeted
as required.
904 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
906 @copydoc \_\_init\_\_
908 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
912 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
914 See
@ref MeasureMergedCoaddSourcesConfig_
916 @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
918 The command line task interface supports a
919 flag
@c -d to
import @b debug.py
from your
@c PYTHONPATH; see
@ref baseDebug
for more about
@b debug.py
922 MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
923 the various sub-tasks. See the documetation
for individual sub-tasks
for more information.
925 @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
926 MeasureMergedCoaddSourcesTask
928 After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
929 The next stage
in the multi-band processing procedure will merge these measurements into a suitable
930 catalog
for driving forced photometry.
932 Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
934 A list of the available optional arguments can be obtained by calling measureCoaddSources.py
with the
935 `--help` command line argument:
937 measureCoaddSources.py --help
940 To demonstrate usage of the DetectCoaddSourcesTask
in the larger context of multi-band processing, we
941 will process HSC data
in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
942 step 6 at
@ref pipeTasks_multiBand, one may perform deblending
and measure sources
in the HSC-I band
945 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
947 This will process the HSC-I band data. The results are written
in
948 `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
950 It
is also necessary to run
952 measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
954 to generate the sources catalogs
for the HSC-R band required by the next step
in the multi-band
955 procedure:
@ref MergeMeasurementsTask_
"MergeMeasurementsTask".
957 _DefaultName = "measureCoaddSources"
958 ConfigClass = MeasureMergedCoaddSourcesConfig
959 RunnerClass = MeasureMergedCoaddSourcesRunner
960 getSchemaCatalogs = _makeGetSchemaCatalogs(
"meas")
962 makeIdFactory = _makeMakeIdFactory(
"MergedCoaddId", includeBand=
False)
965 def _makeArgumentParser(cls):
966 parser = ArgumentParser(name=cls._DefaultName)
967 parser.add_id_argument(
"--id",
"deepCoadd_calexp",
968 help=
"data ID, e.g. --id tract=12345 patch=1,2 filter=r",
969 ContainerClass=ExistingCoaddDataIdContainer)
970 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
973 def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
976 @brief Initialize the task.
978 Keyword arguments (
in addition to those forwarded to CmdLineTask.__init__):
979 @param[
in] schema: the schema of the merged detection catalog used
as input to this one
980 @param[
in] peakSchema: the schema of the PeakRecords
in the Footprints
in the merged detection catalog
981 @param[
in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
982 catalog. May be
None if the loader can be constructed
from the butler argument
or all steps
983 requiring a reference catalog are disabled.
984 @param[
in] butler: a butler used to read the input schemas
from disk
or construct the reference
985 catalog loader,
if schema
or peakSchema
or refObjLoader
is None
987 The task will set its own self.schema attribute to the schema of the output measurement catalog.
988 This will include all fields
from the input schema,
as well
as additional fields
for all the
991 super().__init__(**kwargs)
992 self.deblended = self.config.inputCatalog.startswith("deblended")
993 self.inputCatalog =
"Coadd_" + self.config.inputCatalog
994 if initInputs
is not None:
995 schema = initInputs[
'inputSchema'].schema
997 assert butler
is not None,
"Neither butler nor schema is defined"
998 schema = butler.get(self.config.coaddName + self.inputCatalog +
"_schema", immediate=
True).schema
999 self.schemaMapper = afwTable.SchemaMapper(schema)
1000 self.schemaMapper.addMinimalSchema(schema)
1001 self.schema = self.schemaMapper.getOutputSchema()
1003 self.makeSubtask(
"measurement", schema=self.schema, algMetadata=self.algMetadata)
1004 self.makeSubtask(
"setPrimaryFlags", schema=self.schema)
1005 if self.config.doMatchSources:
1006 self.makeSubtask(
"match", butler=butler, refObjLoader=refObjLoader)
1007 if self.config.doPropagateFlags:
1008 self.makeSubtask(
"propagateFlags", schema=self.schema)
1009 self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
1010 if self.config.doApCorr:
1011 self.makeSubtask(
"applyApCorr", schema=self.schema)
1012 if self.config.doRunCatalogCalculation:
1013 self.makeSubtask(
"catalogCalculation", schema=self.schema)
1015 self.outputSchema = afwTable.SourceCatalog(self.schema)
1017 def runQuantum(self, butlerQC, inputRefs, outputRefs):
1018 inputs = butlerQC.get(inputRefs)
1020 refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId
for ref
in inputRefs.refCat],
1021 inputs.pop(
'refCat'), config=self.config.refObjLoader,
1023 self.match.setRefObjLoader(refObjLoader)
1027 inputs[
'exposure'].getPsf().setCacheCapacity(self.config.psfCache)
1030 exposureIdInfo = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId,
"tract_patch")
1031 inputs[
'exposureId'] = exposureIdInfo.expId
1032 idFactory = exposureIdInfo.makeSourceIdFactory()
1034 table = afwTable.SourceTable.make(self.schema, idFactory)
1035 sources = afwTable.SourceCatalog(table)
1037 if "scarletCatalog" in inputs:
1038 inputCatalog = inputs.pop(
"scarletCatalog")
1039 catalogRef = inputRefs.scarletCatalog
1041 inputCatalog = inputs.pop(
"inputCatalog")
1042 catalogRef = inputRefs.inputCatalog
1043 sources.extend(inputCatalog, self.schemaMapper)
1046 if self.config.doAddFootprints:
1047 modelData = inputs.pop(
'scarletModels')
1048 if self.config.doConserveFlux:
1049 redistributeImage = inputs[
'exposure'].image
1051 redistributeImage =
None
1052 modelData.updateCatalogFootprints(
1054 band=inputRefs.exposure.dataId[
"band"],
1055 psfModel=inputs[
'exposure'].getPsf(),
1056 maskImage=inputs[
'exposure'].mask,
1057 redistributeImage=redistributeImage,
1058 removeScarletData=
True,
1060 table = sources.getTable()
1061 table.setMetadata(self.algMetadata)
1062 inputs[
'sources'] = sources
1064 skyMap = inputs.pop(
'skyMap')
1065 tractNumber = catalogRef.dataId[
'tract']
1066 tractInfo = skyMap[tractNumber]
1067 patchInfo = tractInfo.getPatchInfo(catalogRef.dataId[
'patch'])
1070 tractInfo=tractInfo,
1071 patchInfo=patchInfo,
1072 wcs=tractInfo.getWcs(),
1073 bbox=patchInfo.getOuterBBox()
1075 inputs[
'skyInfo'] = skyInfo
1077 if self.config.doPropagateFlags:
1078 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
1080 ccdInputs = inputs[
"exposure"].getInfo().getCoaddInputs().ccds
1081 inputs[
"ccdInputs"] = ccdInputs
1083 if "sourceTableHandles" in inputs:
1084 sourceTableHandles = inputs.pop(
"sourceTableHandles")
1085 sourceTableHandleDict = {handle.dataId[
"visit"]: handle
1086 for handle
in sourceTableHandles}
1087 inputs[
"sourceTableHandleDict"] = sourceTableHandleDict
1088 if "finalizedSourceTableHandles" in inputs:
1089 finalizedSourceTableHandles = inputs.pop(
"finalizedSourceTableHandles")
1090 finalizedSourceTableHandleDict = {handle.dataId[
"visit"]: handle
1091 for handle
in finalizedSourceTableHandles}
1092 inputs[
"finalizedSourceTableHandleDict"] = finalizedSourceTableHandleDict
1096 ccdInputs = inputs[
'exposure'].getInfo().getCoaddInputs().ccds
1097 visitKey = ccdInputs.schema.find(
"visit").key
1098 ccdKey = ccdInputs.schema.find(
"ccd").key
1099 inputVisitIds = set()
1101 for ccdRecord
in ccdInputs:
1102 visit = ccdRecord.get(visitKey)
1103 ccd = ccdRecord.get(ccdKey)
1104 inputVisitIds.add((visit, ccd))
1105 ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
1107 inputCatalogsToKeep = []
1108 inputCatalogWcsUpdate = []
1109 for i, dataRef
in enumerate(inputRefs.visitCatalogs):
1110 key = (dataRef.dataId[
'visit'], dataRef.dataId[
'detector'])
1111 if key
in inputVisitIds:
1112 inputCatalogsToKeep.append(inputs[
'visitCatalogs'][i])
1113 inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
1114 inputs[
'visitCatalogs'] = inputCatalogsToKeep
1115 inputs[
'wcsUpdates'] = inputCatalogWcsUpdate
1116 inputs[
'ccdInputs'] = ccdInputs
1118 outputs = self.run(**inputs)
1120 sources = outputs.outputSources
1121 butlerQC.put(outputs, outputRefs)
1123 def runDataRef(self, patchRef, psfCache=100):
1125 @brief Deblend
and measure.
1127 @param[
in] patchRef: Patch reference.
1129 Set
'is-primary' and related flags. Propagate flags
1130 from individual visits. Optionally match the sources to a reference catalog
and write the matches.
1131 Finally, write the deblended sources
and measurements out.
1133 if self.config.hasFakes:
1134 coaddType =
"fakes_" + self.config.coaddName
1136 coaddType = self.config.coaddName
1137 exposure = patchRef.get(coaddType +
"Coadd_calexp", immediate=
True)
1138 exposure.getPsf().setCacheCapacity(psfCache)
1139 sources = self.readSources(patchRef)
1140 table = sources.getTable()
1141 table.setMetadata(self.algMetadata)
1142 skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1144 if self.config.doPropagateFlags:
1145 ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1149 expId = getGen3CoaddExposureId(patchRef, coaddName=self.config.coaddName, includeBand=
False,
1151 results = self.run(exposure=exposure, sources=sources, skyInfo=skyInfo, exposureId=expId,
1152 ccdInputs=ccdInputs, butler=patchRef.getButler())
1154 if self.config.doMatchSources:
1155 self.writeMatches(patchRef, results)
1156 self.write(patchRef, results.outputSources)
1158 def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1159 butler=None, sourceTableHandleDict=None, finalizedSourceTableHandleDict=None):
1160 """Run measurement algorithms on the input exposure, and optionally populate the
1161 resulting catalog with extra information.
1165 exposure : `lsst.afw.exposure.Exposure`
1166 The input exposure on which measurements are to be performed
1168 A catalog built
from the results of merged detections,
or
1170 skyInfo : `lsst.pipe.base.Struct`
1171 A struct containing information about the position of the input exposure within
1172 a `SkyMap`, the `SkyMap`, its `Wcs`,
and its bounding box
1173 exposureId : `int`
or `bytes`
1174 packed unique number
or bytes unique to the input exposure
1176 Catalog containing information on the individual visits which went into making
1178 sourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
1179 Dict
for sourceTable_visit handles (key
is visit)
for propagating flags.
1180 These tables are derived
from the ``CalibrateTask`` sources,
and contain
1181 astrometry
and photometry flags,
and optionally PSF flags.
1182 finalizedSourceTableHandleDict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`], optional
1183 Dict
for finalized_src_table handles (key
is visit)
for propagating flags.
1184 These tables are derived
from ``FinalizeCalibrationTask``
and contain
1185 PSF flags
from the finalized PSF estimation.
1186 visitCatalogs : list of `lsst.afw.table.SourceCatalogs`
1187 A list of source catalogs corresponding to measurements made on the individual
1188 visits which went into the input exposure. If
None and butler
is `
None` then
1189 the task cannot propagate visit flags to the output catalog.
1190 Deprecated, to be removed
with PropagateVisitFlagsTask.
1192 If visitCatalogs
is not `
None` this should be a list of wcs objects which correspond
1193 to the input visits. Used to put all coordinates to common system. If `
None`
and
1194 butler
is `
None` then the task cannot propagate visit flags to the output catalog.
1195 Deprecated, to be removed
with PropagateVisitFlagsTask.
1196 butler : `lsst.daf.persistence.Butler`
1197 A gen2 butler used to load visit catalogs.
1198 Deprecated, to be removed
with Gen2.
1202 results : `lsst.pipe.base.Struct`
1203 Results of running measurement task. Will contain the catalog
in the
1204 sources attribute. Optionally will have results of matching to a
1205 reference catalog
in the matchResults attribute,
and denormalized
1206 matches
in the denormMatches attribute.
1208 self.measurement.run(sources, exposure, exposureId=exposureId)
1210 if self.config.doApCorr:
1211 self.applyApCorr.run(
1213 apCorrMap=exposure.getInfo().getApCorrMap()
1220 if not sources.isContiguous():
1221 sources = sources.copy(deep=
True)
1223 if self.config.doRunCatalogCalculation:
1224 self.catalogCalculation.run(sources)
1226 self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1227 patchInfo=skyInfo.patchInfo)
1228 if self.config.doPropagateFlags:
1229 if self.config.propagateFlags.target == PropagateSourceFlagsTask:
1231 self.propagateFlags.run(
1234 sourceTableHandleDict,
1235 finalizedSourceTableHandleDict
1239 self.propagateFlags.run(
1250 if self.config.doMatchSources:
1251 matchResult = self.match.run(sources, exposure.getInfo().getFilter().bandLabel)
1252 matches = afwTable.packMatches(matchResult.matches)
1253 matches.table.setMetadata(matchResult.matchMeta)
1254 results.matchResult = matches
1255 if self.config.doWriteMatchesDenormalized:
1256 if matchResult.matches:
1257 denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1259 self.log.warning(
"No matches, so generating dummy denormalized matches file")
1260 denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1262 denormMatches.getMetadata().add(
"COMMENT",
1263 "This catalog is empty because no matches were found.")
1264 results.denormMatches = denormMatches
1265 results.denormMatches = denormMatches
1267 results.outputSources = sources
1270 def readSources(self, dataRef):
1272 @brief Read input sources.
1274 @param[
in] dataRef: Data reference
for catalog of merged detections
1275 @return List of sources
in merged catalog
1277 We also need to add columns to hold the measurements we
're about to make so we can measure in-place.
1279 merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1280 self.log.info(
"Read %d detections: %s", len(merged), dataRef.dataId)
1281 idFactory = self.makeIdFactory(dataRef)
1283 idFactory.notify(s.getId())
1284 table = afwTable.SourceTable.make(self.schema, idFactory)
1285 sources = afwTable.SourceCatalog(table)
1286 sources.extend(merged, self.schemaMapper)
1289 def writeMatches(self, dataRef, results):
1291 @brief Write matches of the sources to the astrometric reference catalog.
1293 @param[
in] dataRef: data reference
1294 @param[
in] results: results struct
from run method
1296 if hasattr(results,
"matchResult"):
1297 dataRef.put(results.matchResult, self.config.coaddName +
"Coadd_measMatch")
1298 if hasattr(results,
"denormMatches"):
1299 dataRef.put(results.denormMatches, self.config.coaddName +
"Coadd_measMatchFull")
1301 def write(self, dataRef, sources):
1303 @brief Write the source catalog.
1305 @param[
in] dataRef: data reference
1306 @param[
in] sources: source catalog
1308 dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1309 self.log.info(
"Wrote %d sources: %s", len(sources), dataRef.dataId)