lsst.pipe.tasks  21.0.0-25-g85b8e57b+773e41f820
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask
31 from lsst.meas.extensions.scarlet import ScarletDeblendTask
32 from lsst.pipe.tasks.coaddBase import getSkyInfo
33 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
34 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
35 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
36 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
37 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
38 import lsst.afw.image as afwImage
39 import lsst.afw.table as afwTable
40 import lsst.afw.math as afwMath
41 from lsst.daf.base import PropertyList
42 from lsst.skymap import BaseSkyMap
43 
44 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
45 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
46 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
47 from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401
48 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
49 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
50 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
51 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
52 
53 
54 """
55 New set types:
56 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
57 * deepCoadd_mergeDet: merged detections (tract, patch)
58 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
59 * deepCoadd_ref: reference sources (tract, patch)
60 All of these have associated *_schema catalogs that require no data ID and hold no records.
61 
62 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
63 the mergeDet, meas, and ref dataset Footprints:
64 * deepCoadd_peak_schema
65 """
66 
67 
68 
69 class DetectCoaddSourcesConnections(PipelineTaskConnections,
70  dimensions=("tract", "patch", "band", "skymap"),
71  defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
72  detectionSchema = cT.InitOutput(
73  doc="Schema of the detection catalog",
74  name="{outputCoaddName}Coadd_det_schema",
75  storageClass="SourceCatalog",
76  )
77  exposure = cT.Input(
78  doc="Exposure on which detections are to be performed",
79  name="{inputCoaddName}Coadd",
80  storageClass="ExposureF",
81  dimensions=("tract", "patch", "band", "skymap")
82  )
83  outputBackgrounds = cT.Output(
84  doc="Output Backgrounds used in detection",
85  name="{outputCoaddName}Coadd_calexp_background",
86  storageClass="Background",
87  dimensions=("tract", "patch", "band", "skymap")
88  )
89  outputSources = cT.Output(
90  doc="Detected sources catalog",
91  name="{outputCoaddName}Coadd_det",
92  storageClass="SourceCatalog",
93  dimensions=("tract", "patch", "band", "skymap")
94  )
95  outputExposure = cT.Output(
96  doc="Exposure post detection",
97  name="{outputCoaddName}Coadd_calexp",
98  storageClass="ExposureF",
99  dimensions=("tract", "patch", "band", "skymap")
100  )
101 
102 
103 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
104  """!
105  @anchor DetectCoaddSourcesConfig_
106 
107  @brief Configuration parameters for the DetectCoaddSourcesTask
108  """
109  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
110  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
111  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
112  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
113  doInsertFakes = Field(dtype=bool, default=False,
114  doc="Run fake sources injection task")
115  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
116  doc="Injection of fake sources for testing "
117  "purposes (must be retargeted)")
118  hasFakes = Field(
119  dtype=bool,
120  default=False,
121  doc="Should be set to True if fake sources have been inserted into the input data."
122  )
123 
124  def setDefaults(self):
125  super().setDefaults()
126  self.detection.thresholdType = "pixel_stdev"
127  self.detection.isotropicGrow = True
128  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
129  self.detection.reEstimateBackground = False
130  self.detection.background.useApprox = False
131  self.detection.background.binSize = 4096
132  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
133  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
134 
135 
141 
142 
143 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
144  r"""!
145  @anchor DetectCoaddSourcesTask_
146 
147  @brief Detect sources on a coadd
148 
149  @section pipe_tasks_multiBand_Contents Contents
150 
151  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
153  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
154  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
155  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
156  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
157 
158  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
159 
160  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
161 
162  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
163  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
164  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
165  propagate the full covariance matrix -- but it is simple and works well in practice.
166 
167  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
168  SourceDetectionTask_ "detection" subtask.
169 
170  @par Inputs:
171  deepCoadd{tract,patch,filter}: ExposureF
172  @par Outputs:
173  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
174  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
175  exposure (ExposureF)
176  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
177  @par Data Unit:
178  tract, patch, filter
179 
180  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
181  You can retarget this subtask if you wish.
182 
183  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
184 
185  @copydoc \_\_init\_\_
186 
187  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
188 
189  @copydoc run
190 
191  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
192 
193  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
194 
195  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
196 
197  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
198  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
199  files.
200 
201  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
202  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
203  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
204 
205  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
206  of using DetectCoaddSourcesTask
207 
208  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
209  the task is to update the background, detect all sources in a single band and generate a set of parent
210  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
211  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
212  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
213  calling detectCoaddSources.py with the `--help` command line argument:
214  @code
215  detectCoaddSources.py --help
216  @endcode
217 
218  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
219  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
220  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
221  @code
222  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
223  @endcode
224  that will process the HSC-I band data. The results are written to
225  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
226 
227  It is also necessary to run:
228  @code
229  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
230  @endcode
231  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
232  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
233  """
234  _DefaultName = "detectCoaddSources"
235  ConfigClass = DetectCoaddSourcesConfig
236  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
237  makeIdFactory = _makeMakeIdFactory("CoaddId")
238 
239  @classmethod
240  def _makeArgumentParser(cls):
241  parser = ArgumentParser(name=cls._DefaultName)
242  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
243  ContainerClass=ExistingCoaddDataIdContainer)
244  return parser
245 
246  def __init__(self, schema=None, **kwargs):
247  """!
248  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
249 
250  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
251 
252  @param[in] schema: initial schema for the output catalog, modified-in place to include all
253  fields set by this task. If None, the source minimal schema will be used.
254  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
255  """
256  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
257  # call structure has been reviewed carefully to be sure super will work as intended.
258  super().__init__(**kwargs)
259  if schema is None:
260  schema = afwTable.SourceTable.makeMinimalSchema()
261  if self.config.doInsertFakes:
262  self.makeSubtask("insertFakes")
263  self.schema = schema
264  self.makeSubtask("detection", schema=self.schema)
265  if self.config.doScaleVariance:
266  self.makeSubtask("scaleVariance")
267 
268  self.detectionSchema = afwTable.SourceCatalog(self.schema)
269 
270  def runDataRef(self, patchRef):
271  """!
272  @brief Run detection on a coadd.
273 
274  Invokes @ref run and then uses @ref write to output the
275  results.
276 
277  @param[in] patchRef: data reference for patch
278  """
279  if self.config.hasFakes:
280  exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
281  else:
282  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
283  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
284  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
285  self.write(results, patchRef)
286  return results
287 
288  def runQuantum(self, butlerQC, inputRefs, outputRefs):
289  inputs = butlerQC.get(inputRefs)
290  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_band", returnMaxBits=True)
291  inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
292  inputs["expId"] = packedId
293  outputs = self.run(**inputs)
294  butlerQC.put(outputs, outputRefs)
295 
296  def run(self, exposure, idFactory, expId):
297  """!
298  @brief Run detection on an exposure.
299 
300  First scale the variance plane to match the observed variance
301  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
302  detect sources.
303 
304  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
305  depending on configuration).
306  @param[in] idFactory: IdFactory to set source identifiers
307  @param[in] expId: Exposure identifier (integer) for RNG seed
308 
309  @return a pipe.base.Struct with fields
310  - sources: catalog of detections
311  - backgrounds: list of backgrounds
312  """
313  if self.config.doScaleVariance:
314  varScale = self.scaleVariance.run(exposure.maskedImage)
315  exposure.getMetadata().add("VARIANCE_SCALE", varScale)
316  backgrounds = afwMath.BackgroundList()
317  if self.config.doInsertFakes:
318  self.insertFakes.run(exposure, background=backgrounds)
319  table = afwTable.SourceTable.make(self.schema, idFactory)
320  detections = self.detection.run(table, exposure, expId=expId)
321  sources = detections.sources
322  fpSets = detections.fpSets
323  if hasattr(fpSets, "background") and fpSets.background:
324  for bg in fpSets.background:
325  backgrounds.append(bg)
326  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
327 
328  def write(self, results, patchRef):
329  """!
330  @brief Write out results from runDetection.
331 
332  @param[in] exposure: Exposure to write out
333  @param[in] results: Struct returned from runDetection
334  @param[in] patchRef: data reference for patch
335  """
336  coaddName = self.config.coaddName + "Coadd"
337  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
338  patchRef.put(results.outputSources, coaddName + "_det")
339  if self.config.hasFakes:
340  patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
341  else:
342  patchRef.put(results.outputExposure, coaddName + "_calexp")
343 
344 
345 
346 
347 class DeblendCoaddSourcesConfig(Config):
348  """DeblendCoaddSourcesConfig
349 
350  Configuration parameters for the `DeblendCoaddSourcesTask`.
351  """
352  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
353  doc="Deblend sources separately in each band")
354  multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
355  doc="Deblend sources simultaneously across bands")
356  simultaneous = Field(dtype=bool,
357  default=True,
358  doc="Simultaneously deblend all bands? "
359  "True uses 'singleBandDeblend' while False uses 'multibandDeblend'")
360  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
361  hasFakes = Field(dtype=bool,
362  default=False,
363  doc="Should be set to True if fake sources have been inserted into the input data.")
364 
365  def setDefaults(self):
366  Config.setDefaults(self)
367  self.singleBandDeblend.propagateAllPeaks = True
368 
369 
370 class DeblendCoaddSourcesRunner(MergeSourcesRunner):
371  """Task runner for the `MergeSourcesTask`
372 
373  Required because the run method requires a list of
374  dataRefs rather than a single dataRef.
375  """
376  @staticmethod
377  def getTargetList(parsedCmd, **kwargs):
378  """Provide a list of patch references for each patch, tract, filter combo.
379 
380  Parameters
381  ----------
382  parsedCmd:
383  The parsed command
384  kwargs:
385  Keyword arguments passed to the task
386 
387  Returns
388  -------
389  targetList: list
390  List of tuples, where each tuple is a (dataRef, kwargs) pair.
391  """
392  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
393  kwargs["psfCache"] = parsedCmd.psfCache
394  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
395 
396 
397 class DeblendCoaddSourcesTask(CmdLineTask):
398  """Deblend the sources in a merged catalog
399 
400  Deblend sources from master catalog in each coadd.
401  This can either be done separately in each band using the HSC-SDSS deblender
402  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
403  or use SCARLET to simultaneously fit the blend in all bands
404  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
405  The task will set its own `self.schema` atribute to the `Schema` of the
406  output deblended catalog.
407  This will include all fields from the input `Schema`, as well as additional fields
408  from the deblender.
409 
410  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
411  ---------------------------------------------------------
412  `
413 
414  Parameters
415  ----------
416  butler: `Butler`
417  Butler used to read the input schemas from disk or
418  construct the reference catalog loader, if `schema` or `peakSchema` or
419  schema: `Schema`
420  The schema of the merged detection catalog as an input to this task.
421  peakSchema: `Schema`
422  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
423  """
424  ConfigClass = DeblendCoaddSourcesConfig
425  RunnerClass = DeblendCoaddSourcesRunner
426  _DefaultName = "deblendCoaddSources"
427  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
428 
429  @classmethod
430  def _makeArgumentParser(cls):
431  parser = ArgumentParser(name=cls._DefaultName)
432  parser.add_id_argument("--id", "deepCoadd_calexp",
433  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
434  ContainerClass=ExistingCoaddDataIdContainer)
435  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
436  return parser
437 
438  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
439  CmdLineTask.__init__(self, **kwargs)
440  if schema is None:
441  assert butler is not None, "Neither butler nor schema is defined"
442  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
443  self.schemaMapper = afwTable.SchemaMapper(schema)
444  self.schemaMapper.addMinimalSchema(schema)
445  self.schema = self.schemaMapper.getOutputSchema()
446  if peakSchema is None:
447  assert butler is not None, "Neither butler nor peakSchema is defined"
448  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
449 
450  if self.config.simultaneous:
451  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
452  else:
453  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
454 
455  def getSchemaCatalogs(self):
456  """Return a dict of empty catalogs for each catalog dataset produced by this task.
457 
458  Returns
459  -------
460  result: dict
461  Dictionary of empty catalogs, with catalog names as keys.
462  """
463  catalog = afwTable.SourceCatalog(self.schema)
464  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
465  self.config.coaddName + "Coadd_deblendedModel": catalog}
466 
467  def runDataRef(self, patchRefList, psfCache=100):
468  """Deblend the patch
469 
470  Deblend each source simultaneously or separately
471  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
472  Set `is-primary` and related flags.
473  Propagate flags from individual visits.
474  Write the deblended sources out.
475 
476  Parameters
477  ----------
478  patchRefList: list
479  List of data references for each filter
480  """
481 
482  if self.config.hasFakes:
483  coaddType = "fakes_" + self.config.coaddName
484  else:
485  coaddType = self.config.coaddName
486 
487  if self.config.simultaneous:
488  # Use SCARLET to simultaneously deblend across filters
489  filters = []
490  exposures = []
491  for patchRef in patchRefList:
492  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
493  filters.append(patchRef.dataId["filter"])
494  exposures.append(exposure)
495  # The input sources are the same for all bands, since it is a merged catalog
496  sources = self.readSources(patchRef)
497  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
498  templateCatalogs = self.multiBandDeblend.run(exposure, sources)
499  for n in range(len(patchRefList)):
500  self.write(patchRefList[n], templateCatalogs[filters[n]])
501  else:
502  # Use the singeband deblender to deblend each band separately
503  for patchRef in patchRefList:
504  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
505  exposure.getPsf().setCacheCapacity(psfCache)
506  sources = self.readSources(patchRef)
507  self.singleBandDeblend.run(exposure, sources)
508  self.write(patchRef, sources)
509 
510  def readSources(self, dataRef):
511  """Read merged catalog
512 
513  Read the catalog of merged detections and create a catalog
514  in a single band.
515 
516  Parameters
517  ----------
518  dataRef: data reference
519  Data reference for catalog of merged detections
520 
521  Returns
522  -------
523  sources: `SourceCatalog`
524  List of sources in merged catalog
525 
526  We also need to add columns to hold the measurements we're about to make
527  so we can measure in-place.
528  """
529  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
530  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
531  idFactory = self.makeIdFactory(dataRef)
532  for s in merged:
533  idFactory.notify(s.getId())
534  table = afwTable.SourceTable.make(self.schema, idFactory)
535  sources = afwTable.SourceCatalog(table)
536  sources.extend(merged, self.schemaMapper)
537  return sources
538 
539  def write(self, dataRef, sources):
540  """Write the source catalog(s)
541 
542  Parameters
543  ----------
544  dataRef: Data Reference
545  Reference to the output catalog.
546  sources: `SourceCatalog`
547  Flux conserved sources to write to file.
548  If using the single band deblender, this is the catalog
549  generated.
550  template_sources: `SourceCatalog`
551  Source catalog using the multiband template models
552  as footprints.
553  """
554  dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux")
555  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
556 
557  def writeMetadata(self, dataRefList):
558  """Write the metadata produced from processing the data.
559  Parameters
560  ----------
561  dataRefList
562  List of Butler data references used to write the metadata.
563  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
564  """
565  for dataRef in dataRefList:
566  try:
567  metadataName = self._getMetadataName()
568  if metadataName is not None:
569  dataRef.put(self.getFullMetadata(), metadataName)
570  except Exception as e:
571  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
572 
573  def getExposureId(self, dataRef):
574  """Get the ExposureId from a data reference
575  """
576  return int(dataRef.get(self.config.coaddName + "CoaddId"))
577 
578 
579 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"),
580  defaultTemplates={"inputCoaddName": "deep",
581  "outputCoaddName": "deep"}):
582  inputSchema = cT.InitInput(
583  doc="Input schema for measure merged task produced by a deblender or detection task",
584  name="{inputCoaddName}Coadd_deblendedFlux_schema",
585  storageClass="SourceCatalog"
586  )
587  outputSchema = cT.InitOutput(
588  doc="Output schema after all new fields are added by task",
589  name="{inputCoaddName}Coadd_meas_schema",
590  storageClass="SourceCatalog"
591  )
592  refCat = cT.PrerequisiteInput(
593  doc="Reference catalog used to match measured sources against known sources",
594  name="ref_cat",
595  storageClass="SimpleCatalog",
596  dimensions=("skypix",),
597  deferLoad=True,
598  multiple=True
599  )
600  exposure = cT.Input(
601  doc="Input coadd image",
602  name="{inputCoaddName}Coadd_calexp",
603  storageClass="ExposureF",
604  dimensions=("tract", "patch", "band", "skymap")
605  )
606  skyMap = cT.Input(
607  doc="SkyMap to use in processing",
608  name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
609  storageClass="SkyMap",
610  dimensions=("skymap",),
611  )
612  visitCatalogs = cT.Input(
613  doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
614  "further filtered in the task for the purpose of propagating flags from image calibration "
615  "and characterization to codd objects",
616  name="src",
617  dimensions=("instrument", "visit", "detector"),
618  storageClass="SourceCatalog",
619  multiple=True
620  )
621  inputCatalog = cT.Input(
622  doc=("Name of the input catalog to use."
623  "If the single band deblender was used this should be 'deblendedFlux."
624  "If the multi-band deblender was used this should be 'deblendedModel, "
625  "or deblendedFlux if the multiband deblender was configured to output "
626  "deblended flux catalogs. If no deblending was performed this should "
627  "be 'mergeDet'"),
628  name="{inputCoaddName}Coadd_deblendedFlux",
629  storageClass="SourceCatalog",
630  dimensions=("tract", "patch", "band", "skymap"),
631  )
632  outputSources = cT.Output(
633  doc="Source catalog containing all the measurement information generated in this task",
634  name="{outputCoaddName}Coadd_meas",
635  dimensions=("tract", "patch", "band", "skymap"),
636  storageClass="SourceCatalog",
637  )
638  matchResult = cT.Output(
639  doc="Match catalog produced by configured matcher, optional on doMatchSources",
640  name="{outputCoaddName}Coadd_measMatch",
641  dimensions=("tract", "patch", "band", "skymap"),
642  storageClass="Catalog",
643  )
644  denormMatches = cT.Output(
645  doc="Denormalized Match catalog produced by configured matcher, optional on "
646  "doWriteMatchesDenormalized",
647  name="{outputCoaddName}Coadd_measMatchFull",
648  dimensions=("tract", "patch", "band", "skymap"),
649  storageClass="Catalog",
650  )
651 
652  def __init__(self, *, config=None):
653  super().__init__(config=config)
654  if config.doPropagateFlags is False:
655  self.inputs -= set(("visitCatalogs",))
656 
657  if config.doMatchSources is False:
658  self.outputs -= set(("matchResult",))
659 
660  if config.doWriteMatchesDenormalized is False:
661  self.outputs -= set(("denormMatches",))
662 
663 
664 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
665  pipelineConnections=MeasureMergedCoaddSourcesConnections):
666  """!
667  @anchor MeasureMergedCoaddSourcesConfig_
668 
669  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
670  """
671  inputCatalog = Field(dtype=str, default="deblendedFlux",
672  doc=("Name of the input catalog to use."
673  "If the single band deblender was used this should be 'deblendedFlux."
674  "If the multi-band deblender was used this should be 'deblendedModel."
675  "If no deblending was performed this should be 'mergeDet'"))
676  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
677  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
678  doPropagateFlags = Field(
679  dtype=bool, default=True,
680  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
681  )
682  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
683  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
684  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
685  doWriteMatchesDenormalized = Field(
686  dtype=bool,
687  default=False,
688  doc=("Write reference matches in denormalized format? "
689  "This format uses more disk space, but is more convenient to read."),
690  )
691  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
692  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
693  checkUnitsParseStrict = Field(
694  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
695  dtype=str,
696  default="raise",
697  )
698  doApCorr = Field(
699  dtype=bool,
700  default=True,
701  doc="Apply aperture corrections"
702  )
703  applyApCorr = ConfigurableField(
704  target=ApplyApCorrTask,
705  doc="Subtask to apply aperture corrections"
706  )
707  doRunCatalogCalculation = Field(
708  dtype=bool,
709  default=True,
710  doc='Run catalogCalculation task'
711  )
712  catalogCalculation = ConfigurableField(
713  target=CatalogCalculationTask,
714  doc="Subtask to run catalogCalculation plugins on catalog"
715  )
716 
717  hasFakes = Field(
718  dtype=bool,
719  default=False,
720  doc="Should be set to True if fake sources have been inserted into the input data."
721  )
722 
723  @property
724  def refObjLoader(self):
725  return self.match.refObjLoader
726 
727  def setDefaults(self):
728  super().setDefaults()
729  self.measurement.plugins.names |= ['base_InputCount',
730  'base_Variance',
731  'base_LocalPhotoCalib',
732  'base_LocalWcs']
733  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
734  'INEXACT_PSF']
735  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
736  'INEXACT_PSF']
737 
738  def validate(self):
739  super().validate()
740  refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
741  if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
742  raise ValueError(
743  f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
744  f"are different. These options must be kept in sync until Gen2 is retired."
745  )
746 
747 
748 
754 
755 
756 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
757  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
758  @staticmethod
759  def getTargetList(parsedCmd, **kwargs):
760  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
761 
762 
763 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
764  r"""!
765  @anchor MeasureMergedCoaddSourcesTask_
766 
767  @brief Deblend sources from master catalog in each coadd seperately and measure.
768 
769  @section pipe_tasks_multiBand_Contents Contents
770 
771  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
772  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
773  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
774  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
775  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
776  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
777 
778  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
779 
780  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
781  measurement in each coadd.
782 
783  Given a master input catalog of sources (peaks and footprints) or deblender outputs
784  (including a HeavyFootprint in each band), measure each source on the
785  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
786  consistent set of child sources.
787 
788  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
789  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
790  flags are propagated to the coadd sources.
791 
792  Optionally, we can match the coadd sources to an external reference catalog.
793 
794  @par Inputs:
795  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
796  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
797  @par Outputs:
798  deepCoadd_meas{tract,patch,filter}: SourceCatalog
799  @par Data Unit:
800  tract, patch, filter
801 
802  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
803 
804  <DL>
805  <DT> @ref SingleFrameMeasurementTask_ "measurement"
806  <DD> Measure source properties of deblended sources.</DD>
807  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
808  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
809  not at the edge of the field and that have either not been deblended or are the children of deblended
810  sources</DD>
811  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
812  <DD> Propagate flags set in individual visits to the coadd.</DD>
813  <DT> @ref DirectMatchTask_ "match"
814  <DD> Match input sources to a reference catalog (optional).
815  </DD>
816  </DL>
817  These subtasks may be retargeted as required.
818 
819  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
820 
821  @copydoc \_\_init\_\_
822 
823  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
824 
825  @copydoc run
826 
827  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
828 
829  See @ref MeasureMergedCoaddSourcesConfig_
830 
831  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
832 
833  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
834  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
835  files.
836 
837  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
838  the various sub-tasks. See the documetation for individual sub-tasks for more information.
839 
840  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
841  MeasureMergedCoaddSourcesTask
842 
843  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
844  The next stage in the multi-band processing procedure will merge these measurements into a suitable
845  catalog for driving forced photometry.
846 
847  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
848  to be processed.
849  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
850  `--help` command line argument:
851  @code
852  measureCoaddSources.py --help
853  @endcode
854 
855  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
856  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
857  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
858  coadd as follows:
859  @code
860  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
861  @endcode
862  This will process the HSC-I band data. The results are written in
863  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
864 
865  It is also necessary to run
866  @code
867  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
868  @endcode
869  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
870  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
871  """
872  _DefaultName = "measureCoaddSources"
873  ConfigClass = MeasureMergedCoaddSourcesConfig
874  RunnerClass = MeasureMergedCoaddSourcesRunner
875  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
876  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
877 
878  @classmethod
879  def _makeArgumentParser(cls):
880  parser = ArgumentParser(name=cls._DefaultName)
881  parser.add_id_argument("--id", "deepCoadd_calexp",
882  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
883  ContainerClass=ExistingCoaddDataIdContainer)
884  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
885  return parser
886 
887  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
888  **kwargs):
889  """!
890  @brief Initialize the task.
891 
892  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
893  @param[in] schema: the schema of the merged detection catalog used as input to this one
894  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
895  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
896  catalog. May be None if the loader can be constructed from the butler argument or all steps
897  requiring a reference catalog are disabled.
898  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
899  catalog loader, if schema or peakSchema or refObjLoader is None
900 
901  The task will set its own self.schema attribute to the schema of the output measurement catalog.
902  This will include all fields from the input schema, as well as additional fields for all the
903  measurements.
904  """
905  super().__init__(**kwargs)
906  self.deblended = self.config.inputCatalog.startswith("deblended")
907  self.inputCatalog = "Coadd_" + self.config.inputCatalog
908  if initInputs is not None:
909  schema = initInputs['inputSchema'].schema
910  if schema is None:
911  assert butler is not None, "Neither butler nor schema is defined"
912  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
913  self.schemaMapper = afwTable.SchemaMapper(schema)
914  self.schemaMapper.addMinimalSchema(schema)
915  self.schema = self.schemaMapper.getOutputSchema()
916  self.algMetadata = PropertyList()
917  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
918  self.makeSubtask("setPrimaryFlags", schema=self.schema)
919  if self.config.doMatchSources:
920  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
921  if self.config.doPropagateFlags:
922  self.makeSubtask("propagateFlags", schema=self.schema)
923  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
924  if self.config.doApCorr:
925  self.makeSubtask("applyApCorr", schema=self.schema)
926  if self.config.doRunCatalogCalculation:
927  self.makeSubtask("catalogCalculation", schema=self.schema)
928 
929  self.outputSchema = afwTable.SourceCatalog(self.schema)
930 
931  def runQuantum(self, butlerQC, inputRefs, outputRefs):
932  inputs = butlerQC.get(inputRefs)
933 
934  refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
935  inputs.pop('refCat'), config=self.config.refObjLoader,
936  log=self.log)
937  self.match.setRefObjLoader(refObjLoader)
938 
939  # Set psfcache
940  # move this to run after gen2 deprecation
941  inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
942 
943  # Get unique integer ID for IdFactory and RNG seeds
944  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
945  inputs['exposureId'] = packedId
946  idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
947  # Transform inputCatalog
948  table = afwTable.SourceTable.make(self.schema, idFactory)
949  sources = afwTable.SourceCatalog(table)
950  sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
951  table = sources.getTable()
952  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
953  inputs['sources'] = sources
954 
955  skyMap = inputs.pop('skyMap')
956  tractNumber = inputRefs.inputCatalog.dataId['tract']
957  tractInfo = skyMap[tractNumber]
958  patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
959  skyInfo = Struct(
960  skyMap=skyMap,
961  tractInfo=tractInfo,
962  patchInfo=patchInfo,
963  wcs=tractInfo.getWcs(),
964  bbox=patchInfo.getOuterBBox()
965  )
966  inputs['skyInfo'] = skyInfo
967 
968  if self.config.doPropagateFlags:
969  # Filter out any visit catalog that is not coadd inputs
970  ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
971  visitKey = ccdInputs.schema.find("visit").key
972  ccdKey = ccdInputs.schema.find("ccd").key
973  inputVisitIds = set()
974  ccdRecordsWcs = {}
975  for ccdRecord in ccdInputs:
976  visit = ccdRecord.get(visitKey)
977  ccd = ccdRecord.get(ccdKey)
978  inputVisitIds.add((visit, ccd))
979  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
980 
981  inputCatalogsToKeep = []
982  inputCatalogWcsUpdate = []
983  for i, dataRef in enumerate(inputRefs.visitCatalogs):
984  key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
985  if key in inputVisitIds:
986  inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
987  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
988  inputs['visitCatalogs'] = inputCatalogsToKeep
989  inputs['wcsUpdates'] = inputCatalogWcsUpdate
990  inputs['ccdInputs'] = ccdInputs
991 
992  outputs = self.run(**inputs)
993  butlerQC.put(outputs, outputRefs)
994 
995  def runDataRef(self, patchRef, psfCache=100):
996  """!
997  @brief Deblend and measure.
998 
999  @param[in] patchRef: Patch reference.
1000 
1001  Set 'is-primary' and related flags. Propagate flags
1002  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1003  Finally, write the deblended sources and measurements out.
1004  """
1005  if self.config.hasFakes:
1006  coaddType = "fakes_" + self.config.coaddName
1007  else:
1008  coaddType = self.config.coaddName
1009  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1010  exposure.getPsf().setCacheCapacity(psfCache)
1011  sources = self.readSources(patchRef)
1012  table = sources.getTable()
1013  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1014  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1015 
1016  if self.config.doPropagateFlags:
1017  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1018  else:
1019  ccdInputs = None
1020 
1021  results = self.run(exposure=exposure, sources=sources,
1022  ccdInputs=ccdInputs,
1023  skyInfo=skyInfo, butler=patchRef.getButler(),
1024  exposureId=self.getExposureId(patchRef))
1025 
1026  if self.config.doMatchSources:
1027  self.writeMatches(patchRef, results)
1028  self.write(patchRef, results.outputSources)
1029 
1030  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1031  butler=None):
1032  """Run measurement algorithms on the input exposure, and optionally populate the
1033  resulting catalog with extra information.
1034 
1035  Parameters
1036  ----------
1037  exposure : `lsst.afw.exposure.Exposure`
1038  The input exposure on which measurements are to be performed
1039  sources : `lsst.afw.table.SourceCatalog`
1040  A catalog built from the results of merged detections, or
1041  deblender outputs.
1042  skyInfo : `lsst.pipe.base.Struct`
1043  A struct containing information about the position of the input exposure within
1044  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1045  exposureId : `int` or `bytes`
1046  packed unique number or bytes unique to the input exposure
1047  ccdInputs : `lsst.afw.table.ExposureCatalog`
1048  Catalog containing information on the individual visits which went into making
1049  the exposure
1050  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1051  A list of source catalogs corresponding to measurements made on the individual
1052  visits which went into the input exposure. If None and butler is `None` then
1053  the task cannot propagate visit flags to the output catalog.
1054  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1055  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1056  to the input visits. Used to put all coordinates to common system. If `None` and
1057  butler is `None` then the task cannot propagate visit flags to the output catalog.
1058  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1059  Either a gen2 or gen3 butler used to load visit catalogs
1060 
1061  Returns
1062  -------
1063  results : `lsst.pipe.base.Struct`
1064  Results of running measurement task. Will contain the catalog in the
1065  sources attribute. Optionally will have results of matching to a
1066  reference catalog in the matchResults attribute, and denormalized
1067  matches in the denormMatches attribute.
1068  """
1069  self.measurement.run(sources, exposure, exposureId=exposureId)
1070 
1071  if self.config.doApCorr:
1072  self.applyApCorr.run(
1073  catalog=sources,
1074  apCorrMap=exposure.getInfo().getApCorrMap()
1075  )
1076 
1077  # TODO DM-11568: this contiguous check-and-copy could go away if we
1078  # reserve enough space during SourceDetection and/or SourceDeblend.
1079  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1080  # contiguity now, so views are preserved from here on.
1081  if not sources.isContiguous():
1082  sources = sources.copy(deep=True)
1083 
1084  if self.config.doRunCatalogCalculation:
1085  self.catalogCalculation.run(sources)
1086 
1087  self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1088  patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended)
1089  if self.config.doPropagateFlags:
1090  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1091 
1092  results = Struct()
1093 
1094  if self.config.doMatchSources:
1095  matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName())
1096  matches = afwTable.packMatches(matchResult.matches)
1097  matches.table.setMetadata(matchResult.matchMeta)
1098  results.matchResult = matches
1099  if self.config.doWriteMatchesDenormalized:
1100  if matchResult.matches:
1101  denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1102  else:
1103  self.log.warn("No matches, so generating dummy denormalized matches file")
1104  denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1105  denormMatches.setMetadata(PropertyList())
1106  denormMatches.getMetadata().add("COMMENT",
1107  "This catalog is empty because no matches were found.")
1108  results.denormMatches = denormMatches
1109  results.denormMatches = denormMatches
1110 
1111  results.outputSources = sources
1112  return results
1113 
1114  def readSources(self, dataRef):
1115  """!
1116  @brief Read input sources.
1117 
1118  @param[in] dataRef: Data reference for catalog of merged detections
1119  @return List of sources in merged catalog
1120 
1121  We also need to add columns to hold the measurements we're about to make
1122  so we can measure in-place.
1123  """
1124  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1125  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1126  idFactory = self.makeIdFactory(dataRef)
1127  for s in merged:
1128  idFactory.notify(s.getId())
1129  table = afwTable.SourceTable.make(self.schema, idFactory)
1130  sources = afwTable.SourceCatalog(table)
1131  sources.extend(merged, self.schemaMapper)
1132  return sources
1133 
1134  def writeMatches(self, dataRef, results):
1135  """!
1136  @brief Write matches of the sources to the astrometric reference catalog.
1137 
1138  @param[in] dataRef: data reference
1139  @param[in] results: results struct from run method
1140  """
1141  if hasattr(results, "matchResult"):
1142  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1143  if hasattr(results, "denormMatches"):
1144  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1145 
1146  def write(self, dataRef, sources):
1147  """!
1148  @brief Write the source catalog.
1149 
1150  @param[in] dataRef: data reference
1151  @param[in] sources: source catalog
1152  """
1153  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1154  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1155 
1156  def getExposureId(self, dataRef):
1157  return int(dataRef.get(self.config.coaddName + "CoaddId"))