lsst.pipe.tasks  21.0.0-42-g5afaedd3+13d2e51252
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask
31 from lsst.meas.extensions.scarlet import ScarletDeblendTask
32 from lsst.pipe.tasks.coaddBase import getSkyInfo
33 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
34 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
35 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
36 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
37 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
38 import lsst.afw.image as afwImage
39 import lsst.afw.table as afwTable
40 import lsst.afw.math as afwMath
41 from lsst.daf.base import PropertyList
42 from lsst.skymap import BaseSkyMap
43 
44 # NOTE: these imports are a convenience so multiband users only have to import this file.
45 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
46 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
47 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
48 from .multiBandUtils import getInputSchema, readCatalog, _makeMakeIdFactory # noqa: F401
49 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
50 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
51 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
52 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
53 
54 
55 """
56 New set types:
57 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
58 * deepCoadd_mergeDet: merged detections (tract, patch)
59 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
60 * deepCoadd_ref: reference sources (tract, patch)
61 All of these have associated *_schema catalogs that require no data ID and hold no records.
62 
63 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
64 the mergeDet, meas, and ref dataset Footprints:
65 * deepCoadd_peak_schema
66 """
67 
68 
69 
70 class DetectCoaddSourcesConnections(PipelineTaskConnections,
71  dimensions=("tract", "patch", "band", "skymap"),
72  defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
73  detectionSchema = cT.InitOutput(
74  doc="Schema of the detection catalog",
75  name="{outputCoaddName}Coadd_det_schema",
76  storageClass="SourceCatalog",
77  )
78  exposure = cT.Input(
79  doc="Exposure on which detections are to be performed",
80  name="{inputCoaddName}Coadd",
81  storageClass="ExposureF",
82  dimensions=("tract", "patch", "band", "skymap")
83  )
84  outputBackgrounds = cT.Output(
85  doc="Output Backgrounds used in detection",
86  name="{outputCoaddName}Coadd_calexp_background",
87  storageClass="Background",
88  dimensions=("tract", "patch", "band", "skymap")
89  )
90  outputSources = cT.Output(
91  doc="Detected sources catalog",
92  name="{outputCoaddName}Coadd_det",
93  storageClass="SourceCatalog",
94  dimensions=("tract", "patch", "band", "skymap")
95  )
96  outputExposure = cT.Output(
97  doc="Exposure post detection",
98  name="{outputCoaddName}Coadd_calexp",
99  storageClass="ExposureF",
100  dimensions=("tract", "patch", "band", "skymap")
101  )
102 
103 
104 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
105  """!
106  @anchor DetectCoaddSourcesConfig_
107 
108  @brief Configuration parameters for the DetectCoaddSourcesTask
109  """
110  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
111  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
112  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
113  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
114  doInsertFakes = Field(dtype=bool, default=False,
115  doc="Run fake sources injection task")
116  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
117  doc="Injection of fake sources for testing "
118  "purposes (must be retargeted)")
119  hasFakes = Field(
120  dtype=bool,
121  default=False,
122  doc="Should be set to True if fake sources have been inserted into the input data."
123  )
124 
125  def setDefaults(self):
126  super().setDefaults()
127  self.detection.thresholdType = "pixel_stdev"
128  self.detection.isotropicGrow = True
129  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
130  self.detection.reEstimateBackground = False
131  self.detection.background.useApprox = False
132  self.detection.background.binSize = 4096
133  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
134  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
135 
136 
142 
143 
144 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
145  r"""!
146  @anchor DetectCoaddSourcesTask_
147 
148  @brief Detect sources on a coadd
149 
150  @section pipe_tasks_multiBand_Contents Contents
151 
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
153  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
154  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
155  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
156  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
157  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
158 
159  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
160 
161  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
162 
163  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
164  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
165  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
166  propagate the full covariance matrix -- but it is simple and works well in practice.
167 
168  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
169  SourceDetectionTask_ "detection" subtask.
170 
171  @par Inputs:
172  deepCoadd{tract,patch,filter}: ExposureF
173  @par Outputs:
174  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
175  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
176  exposure (ExposureF)
177  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
178  @par Data Unit:
179  tract, patch, filter
180 
181  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
182  You can retarget this subtask if you wish.
183 
184  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
185 
186  @copydoc \_\_init\_\_
187 
188  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
189 
190  @copydoc run
191 
192  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
193 
194  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
195 
196  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
197 
198  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
199  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
200  files.
201 
202  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
203  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
204  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
205 
206  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
207  of using DetectCoaddSourcesTask
208 
209  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
210  the task is to update the background, detect all sources in a single band and generate a set of parent
211  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
212  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
213  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
214  calling detectCoaddSources.py with the `--help` command line argument:
215  @code
216  detectCoaddSources.py --help
217  @endcode
218 
219  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
220  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
221  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
222  @code
223  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
224  @endcode
225  that will process the HSC-I band data. The results are written to
226  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
227 
228  It is also necessary to run:
229  @code
230  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
231  @endcode
232  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
233  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
234  """
235  _DefaultName = "detectCoaddSources"
236  ConfigClass = DetectCoaddSourcesConfig
237  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
238  makeIdFactory = _makeMakeIdFactory("CoaddId")
239 
240  @classmethod
241  def _makeArgumentParser(cls):
242  parser = ArgumentParser(name=cls._DefaultName)
243  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
244  ContainerClass=ExistingCoaddDataIdContainer)
245  return parser
246 
247  def __init__(self, schema=None, **kwargs):
248  """!
249  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
250 
251  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
252 
253  @param[in] schema: initial schema for the output catalog, modified-in place to include all
254  fields set by this task. If None, the source minimal schema will be used.
255  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
256  """
257  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
258  # call structure has been reviewed carefully to be sure super will work as intended.
259  super().__init__(**kwargs)
260  if schema is None:
261  schema = afwTable.SourceTable.makeMinimalSchema()
262  if self.config.doInsertFakes:
263  self.makeSubtask("insertFakes")
264  self.schema = schema
265  self.makeSubtask("detection", schema=self.schema)
266  if self.config.doScaleVariance:
267  self.makeSubtask("scaleVariance")
268 
269  self.detectionSchema = afwTable.SourceCatalog(self.schema)
270 
271  def runDataRef(self, patchRef):
272  """!
273  @brief Run detection on a coadd.
274 
275  Invokes @ref run and then uses @ref write to output the
276  results.
277 
278  @param[in] patchRef: data reference for patch
279  """
280  if self.config.hasFakes:
281  exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
282  else:
283  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
284  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
285  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
286  self.write(results, patchRef)
287  return results
288 
289  def runQuantum(self, butlerQC, inputRefs, outputRefs):
290  inputs = butlerQC.get(inputRefs)
291  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_band", returnMaxBits=True)
292  inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
293  inputs["expId"] = packedId
294  outputs = self.run(**inputs)
295  butlerQC.put(outputs, outputRefs)
296 
297  def run(self, exposure, idFactory, expId):
298  """!
299  @brief Run detection on an exposure.
300 
301  First scale the variance plane to match the observed variance
302  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
303  detect sources.
304 
305  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
306  depending on configuration).
307  @param[in] idFactory: IdFactory to set source identifiers
308  @param[in] expId: Exposure identifier (integer) for RNG seed
309 
310  @return a pipe.base.Struct with fields
311  - sources: catalog of detections
312  - backgrounds: list of backgrounds
313  """
314  if self.config.doScaleVariance:
315  varScale = self.scaleVariance.run(exposure.maskedImage)
316  exposure.getMetadata().add("VARIANCE_SCALE", varScale)
317  backgrounds = afwMath.BackgroundList()
318  if self.config.doInsertFakes:
319  self.insertFakes.run(exposure, background=backgrounds)
320  table = afwTable.SourceTable.make(self.schema, idFactory)
321  detections = self.detection.run(table, exposure, expId=expId)
322  sources = detections.sources
323  fpSets = detections.fpSets
324  if hasattr(fpSets, "background") and fpSets.background:
325  for bg in fpSets.background:
326  backgrounds.append(bg)
327  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
328 
329  def write(self, results, patchRef):
330  """!
331  @brief Write out results from runDetection.
332 
333  @param[in] exposure: Exposure to write out
334  @param[in] results: Struct returned from runDetection
335  @param[in] patchRef: data reference for patch
336  """
337  coaddName = self.config.coaddName + "Coadd"
338  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
339  patchRef.put(results.outputSources, coaddName + "_det")
340  if self.config.hasFakes:
341  patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
342  else:
343  patchRef.put(results.outputExposure, coaddName + "_calexp")
344 
345 
346 
347 
348 class DeblendCoaddSourcesConfig(Config):
349  """DeblendCoaddSourcesConfig
350 
351  Configuration parameters for the `DeblendCoaddSourcesTask`.
352  """
353  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
354  doc="Deblend sources separately in each band")
355  multiBandDeblend = ConfigurableField(target=ScarletDeblendTask,
356  doc="Deblend sources simultaneously across bands")
357  simultaneous = Field(dtype=bool,
358  default=True,
359  doc="Simultaneously deblend all bands? "
360  "True uses 'singleBandDeblend' while False uses 'multibandDeblend'")
361  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
362  hasFakes = Field(dtype=bool,
363  default=False,
364  doc="Should be set to True if fake sources have been inserted into the input data.")
365 
366  def setDefaults(self):
367  Config.setDefaults(self)
368  self.singleBandDeblend.propagateAllPeaks = True
369 
370 
371 class DeblendCoaddSourcesRunner(MergeSourcesRunner):
372  """Task runner for the `MergeSourcesTask`
373 
374  Required because the run method requires a list of
375  dataRefs rather than a single dataRef.
376  """
377  @staticmethod
378  def getTargetList(parsedCmd, **kwargs):
379  """Provide a list of patch references for each patch, tract, filter combo.
380 
381  Parameters
382  ----------
383  parsedCmd:
384  The parsed command
385  kwargs:
386  Keyword arguments passed to the task
387 
388  Returns
389  -------
390  targetList: list
391  List of tuples, where each tuple is a (dataRef, kwargs) pair.
392  """
393  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
394  kwargs["psfCache"] = parsedCmd.psfCache
395  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
396 
397 
398 class DeblendCoaddSourcesTask(CmdLineTask):
399  """Deblend the sources in a merged catalog
400 
401  Deblend sources from master catalog in each coadd.
402  This can either be done separately in each band using the HSC-SDSS deblender
403  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
404  or use SCARLET to simultaneously fit the blend in all bands
405  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
406  The task will set its own `self.schema` atribute to the `Schema` of the
407  output deblended catalog.
408  This will include all fields from the input `Schema`, as well as additional fields
409  from the deblender.
410 
411  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
412  ---------------------------------------------------------
413  `
414 
415  Parameters
416  ----------
417  butler: `Butler`
418  Butler used to read the input schemas from disk or
419  construct the reference catalog loader, if `schema` or `peakSchema` or
420  schema: `Schema`
421  The schema of the merged detection catalog as an input to this task.
422  peakSchema: `Schema`
423  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
424  """
425  ConfigClass = DeblendCoaddSourcesConfig
426  RunnerClass = DeblendCoaddSourcesRunner
427  _DefaultName = "deblendCoaddSources"
428  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
429 
430  @classmethod
431  def _makeArgumentParser(cls):
432  parser = ArgumentParser(name=cls._DefaultName)
433  parser.add_id_argument("--id", "deepCoadd_calexp",
434  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
435  ContainerClass=ExistingCoaddDataIdContainer)
436  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
437  return parser
438 
439  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
440  CmdLineTask.__init__(self, **kwargs)
441  if schema is None:
442  assert butler is not None, "Neither butler nor schema is defined"
443  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
444  self.schemaMapper = afwTable.SchemaMapper(schema)
445  self.schemaMapper.addMinimalSchema(schema)
446  self.schema = self.schemaMapper.getOutputSchema()
447  if peakSchema is None:
448  assert butler is not None, "Neither butler nor peakSchema is defined"
449  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
450 
451  if self.config.simultaneous:
452  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
453  else:
454  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
455 
456  def getSchemaCatalogs(self):
457  """Return a dict of empty catalogs for each catalog dataset produced by this task.
458 
459  Returns
460  -------
461  result: dict
462  Dictionary of empty catalogs, with catalog names as keys.
463  """
464  catalog = afwTable.SourceCatalog(self.schema)
465  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
466  self.config.coaddName + "Coadd_deblendedModel": catalog}
467 
468  def runDataRef(self, patchRefList, psfCache=100):
469  """Deblend the patch
470 
471  Deblend each source simultaneously or separately
472  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
473  Set `is-primary` and related flags.
474  Propagate flags from individual visits.
475  Write the deblended sources out.
476 
477  Parameters
478  ----------
479  patchRefList: list
480  List of data references for each filter
481  """
482 
483  if self.config.hasFakes:
484  coaddType = "fakes_" + self.config.coaddName
485  else:
486  coaddType = self.config.coaddName
487 
488  if self.config.simultaneous:
489  # Use SCARLET to simultaneously deblend across filters
490  filters = []
491  exposures = []
492  for patchRef in patchRefList:
493  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
494  filter = patchRef.get(coaddType + "Coadd_filterLabel", immediate=True)
495  filters.append(filter.bandLabel)
496  exposures.append(exposure)
497  # The input sources are the same for all bands, since it is a merged catalog
498  sources = self.readSources(patchRef)
499  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
500  templateCatalogs = self.multiBandDeblend.run(exposure, sources)
501  for n in range(len(patchRefList)):
502  self.write(patchRefList[n], templateCatalogs[filters[n]])
503  else:
504  # Use the singeband deblender to deblend each band separately
505  for patchRef in patchRefList:
506  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
507  exposure.getPsf().setCacheCapacity(psfCache)
508  sources = self.readSources(patchRef)
509  self.singleBandDeblend.run(exposure, sources)
510  self.write(patchRef, sources)
511 
512  def readSources(self, dataRef):
513  """Read merged catalog
514 
515  Read the catalog of merged detections and create a catalog
516  in a single band.
517 
518  Parameters
519  ----------
520  dataRef: data reference
521  Data reference for catalog of merged detections
522 
523  Returns
524  -------
525  sources: `SourceCatalog`
526  List of sources in merged catalog
527 
528  We also need to add columns to hold the measurements we're about to make
529  so we can measure in-place.
530  """
531  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
532  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
533  idFactory = self.makeIdFactory(dataRef)
534  for s in merged:
535  idFactory.notify(s.getId())
536  table = afwTable.SourceTable.make(self.schema, idFactory)
537  sources = afwTable.SourceCatalog(table)
538  sources.extend(merged, self.schemaMapper)
539  return sources
540 
541  def write(self, dataRef, sources):
542  """Write the source catalog(s)
543 
544  Parameters
545  ----------
546  dataRef: Data Reference
547  Reference to the output catalog.
548  sources: `SourceCatalog`
549  Flux conserved sources to write to file.
550  If using the single band deblender, this is the catalog
551  generated.
552  template_sources: `SourceCatalog`
553  Source catalog using the multiband template models
554  as footprints.
555  """
556  dataRef.put(sources, self.config.coaddName + "Coadd_deblendedFlux")
557  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
558 
559  def writeMetadata(self, dataRefList):
560  """Write the metadata produced from processing the data.
561  Parameters
562  ----------
563  dataRefList
564  List of Butler data references used to write the metadata.
565  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
566  """
567  for dataRef in dataRefList:
568  try:
569  metadataName = self._getMetadataName()
570  if metadataName is not None:
571  dataRef.put(self.getFullMetadata(), metadataName)
572  except Exception as e:
573  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
574 
575  def getExposureId(self, dataRef):
576  """Get the ExposureId from a data reference
577  """
578  return int(dataRef.get(self.config.coaddName + "CoaddId"))
579 
580 
581 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "band", "skymap"),
582  defaultTemplates={"inputCoaddName": "deep",
583  "outputCoaddName": "deep"}):
584  inputSchema = cT.InitInput(
585  doc="Input schema for measure merged task produced by a deblender or detection task",
586  name="{inputCoaddName}Coadd_deblendedFlux_schema",
587  storageClass="SourceCatalog"
588  )
589  outputSchema = cT.InitOutput(
590  doc="Output schema after all new fields are added by task",
591  name="{inputCoaddName}Coadd_meas_schema",
592  storageClass="SourceCatalog"
593  )
594  refCat = cT.PrerequisiteInput(
595  doc="Reference catalog used to match measured sources against known sources",
596  name="ref_cat",
597  storageClass="SimpleCatalog",
598  dimensions=("skypix",),
599  deferLoad=True,
600  multiple=True
601  )
602  exposure = cT.Input(
603  doc="Input coadd image",
604  name="{inputCoaddName}Coadd_calexp",
605  storageClass="ExposureF",
606  dimensions=("tract", "patch", "band", "skymap")
607  )
608  skyMap = cT.Input(
609  doc="SkyMap to use in processing",
610  name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
611  storageClass="SkyMap",
612  dimensions=("skymap",),
613  )
614  visitCatalogs = cT.Input(
615  doc="Source catalogs for visits which overlap input tract, patch, band. Will be "
616  "further filtered in the task for the purpose of propagating flags from image calibration "
617  "and characterization to codd objects",
618  name="src",
619  dimensions=("instrument", "visit", "detector"),
620  storageClass="SourceCatalog",
621  multiple=True
622  )
623  inputCatalog = cT.Input(
624  doc=("Name of the input catalog to use."
625  "If the single band deblender was used this should be 'deblendedFlux."
626  "If the multi-band deblender was used this should be 'deblendedModel, "
627  "or deblendedFlux if the multiband deblender was configured to output "
628  "deblended flux catalogs. If no deblending was performed this should "
629  "be 'mergeDet'"),
630  name="{inputCoaddName}Coadd_deblendedFlux",
631  storageClass="SourceCatalog",
632  dimensions=("tract", "patch", "band", "skymap"),
633  )
634  outputSources = cT.Output(
635  doc="Source catalog containing all the measurement information generated in this task",
636  name="{outputCoaddName}Coadd_meas",
637  dimensions=("tract", "patch", "band", "skymap"),
638  storageClass="SourceCatalog",
639  )
640  matchResult = cT.Output(
641  doc="Match catalog produced by configured matcher, optional on doMatchSources",
642  name="{outputCoaddName}Coadd_measMatch",
643  dimensions=("tract", "patch", "band", "skymap"),
644  storageClass="Catalog",
645  )
646  denormMatches = cT.Output(
647  doc="Denormalized Match catalog produced by configured matcher, optional on "
648  "doWriteMatchesDenormalized",
649  name="{outputCoaddName}Coadd_measMatchFull",
650  dimensions=("tract", "patch", "band", "skymap"),
651  storageClass="Catalog",
652  )
653 
654  def __init__(self, *, config=None):
655  super().__init__(config=config)
656  if config.doPropagateFlags is False:
657  self.inputs -= set(("visitCatalogs",))
658 
659  if config.doMatchSources is False:
660  self.outputs -= set(("matchResult",))
661 
662  if config.doWriteMatchesDenormalized is False:
663  self.outputs -= set(("denormMatches",))
664 
665 
666 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
667  pipelineConnections=MeasureMergedCoaddSourcesConnections):
668  """!
669  @anchor MeasureMergedCoaddSourcesConfig_
670 
671  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
672  """
673  inputCatalog = Field(dtype=str, default="deblendedFlux",
674  doc=("Name of the input catalog to use."
675  "If the single band deblender was used this should be 'deblendedFlux."
676  "If the multi-band deblender was used this should be 'deblendedModel."
677  "If no deblending was performed this should be 'mergeDet'"))
678  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
679  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
680  doPropagateFlags = Field(
681  dtype=bool, default=True,
682  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
683  )
684  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
685  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
686  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
687  doWriteMatchesDenormalized = Field(
688  dtype=bool,
689  default=False,
690  doc=("Write reference matches in denormalized format? "
691  "This format uses more disk space, but is more convenient to read."),
692  )
693  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
694  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
695  checkUnitsParseStrict = Field(
696  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
697  dtype=str,
698  default="raise",
699  )
700  doApCorr = Field(
701  dtype=bool,
702  default=True,
703  doc="Apply aperture corrections"
704  )
705  applyApCorr = ConfigurableField(
706  target=ApplyApCorrTask,
707  doc="Subtask to apply aperture corrections"
708  )
709  doRunCatalogCalculation = Field(
710  dtype=bool,
711  default=True,
712  doc='Run catalogCalculation task'
713  )
714  catalogCalculation = ConfigurableField(
715  target=CatalogCalculationTask,
716  doc="Subtask to run catalogCalculation plugins on catalog"
717  )
718 
719  hasFakes = Field(
720  dtype=bool,
721  default=False,
722  doc="Should be set to True if fake sources have been inserted into the input data."
723  )
724 
725  @property
726  def refObjLoader(self):
727  return self.match.refObjLoader
728 
729  def setDefaults(self):
730  super().setDefaults()
731  self.measurement.plugins.names |= ['base_InputCount',
732  'base_Variance',
733  'base_LocalPhotoCalib',
734  'base_LocalWcs']
735  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
736  'INEXACT_PSF']
737  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
738  'INEXACT_PSF']
739 
740  def validate(self):
741  super().validate()
742  refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
743  if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
744  raise ValueError(
745  f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
746  f"are different. These options must be kept in sync until Gen2 is retired."
747  )
748 
749 
750 
756 
757 
758 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
759  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
760  @staticmethod
761  def getTargetList(parsedCmd, **kwargs):
762  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
763 
764 
765 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
766  r"""!
767  @anchor MeasureMergedCoaddSourcesTask_
768 
769  @brief Deblend sources from master catalog in each coadd seperately and measure.
770 
771  @section pipe_tasks_multiBand_Contents Contents
772 
773  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
774  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
775  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
776  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
777  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
778  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
779 
780  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
781 
782  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
783  measurement in each coadd.
784 
785  Given a master input catalog of sources (peaks and footprints) or deblender outputs
786  (including a HeavyFootprint in each band), measure each source on the
787  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
788  consistent set of child sources.
789 
790  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
791  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
792  flags are propagated to the coadd sources.
793 
794  Optionally, we can match the coadd sources to an external reference catalog.
795 
796  @par Inputs:
797  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
798  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
799  @par Outputs:
800  deepCoadd_meas{tract,patch,filter}: SourceCatalog
801  @par Data Unit:
802  tract, patch, filter
803 
804  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
805 
806  <DL>
807  <DT> @ref SingleFrameMeasurementTask_ "measurement"
808  <DD> Measure source properties of deblended sources.</DD>
809  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
810  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
811  not at the edge of the field and that have either not been deblended or are the children of deblended
812  sources</DD>
813  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
814  <DD> Propagate flags set in individual visits to the coadd.</DD>
815  <DT> @ref DirectMatchTask_ "match"
816  <DD> Match input sources to a reference catalog (optional).
817  </DD>
818  </DL>
819  These subtasks may be retargeted as required.
820 
821  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
822 
823  @copydoc \_\_init\_\_
824 
825  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
826 
827  @copydoc run
828 
829  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
830 
831  See @ref MeasureMergedCoaddSourcesConfig_
832 
833  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
834 
835  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
836  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
837  files.
838 
839  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
840  the various sub-tasks. See the documetation for individual sub-tasks for more information.
841 
842  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
843  MeasureMergedCoaddSourcesTask
844 
845  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
846  The next stage in the multi-band processing procedure will merge these measurements into a suitable
847  catalog for driving forced photometry.
848 
849  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
850  to be processed.
851  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
852  `--help` command line argument:
853  @code
854  measureCoaddSources.py --help
855  @endcode
856 
857  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
858  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
859  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
860  coadd as follows:
861  @code
862  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
863  @endcode
864  This will process the HSC-I band data. The results are written in
865  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
866 
867  It is also necessary to run
868  @code
869  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
870  @endcode
871  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
872  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
873  """
874  _DefaultName = "measureCoaddSources"
875  ConfigClass = MeasureMergedCoaddSourcesConfig
876  RunnerClass = MeasureMergedCoaddSourcesRunner
877  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
878  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
879 
880  @classmethod
881  def _makeArgumentParser(cls):
882  parser = ArgumentParser(name=cls._DefaultName)
883  parser.add_id_argument("--id", "deepCoadd_calexp",
884  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
885  ContainerClass=ExistingCoaddDataIdContainer)
886  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
887  return parser
888 
889  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
890  **kwargs):
891  """!
892  @brief Initialize the task.
893 
894  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
895  @param[in] schema: the schema of the merged detection catalog used as input to this one
896  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
897  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
898  catalog. May be None if the loader can be constructed from the butler argument or all steps
899  requiring a reference catalog are disabled.
900  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
901  catalog loader, if schema or peakSchema or refObjLoader is None
902 
903  The task will set its own self.schema attribute to the schema of the output measurement catalog.
904  This will include all fields from the input schema, as well as additional fields for all the
905  measurements.
906  """
907  super().__init__(**kwargs)
908  self.deblended = self.config.inputCatalog.startswith("deblended")
909  self.inputCatalog = "Coadd_" + self.config.inputCatalog
910  if initInputs is not None:
911  schema = initInputs['inputSchema'].schema
912  if schema is None:
913  assert butler is not None, "Neither butler nor schema is defined"
914  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
915  self.schemaMapper = afwTable.SchemaMapper(schema)
916  self.schemaMapper.addMinimalSchema(schema)
917  self.schema = self.schemaMapper.getOutputSchema()
918  self.algMetadata = PropertyList()
919  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
920  self.makeSubtask("setPrimaryFlags", schema=self.schema)
921  if self.config.doMatchSources:
922  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
923  if self.config.doPropagateFlags:
924  self.makeSubtask("propagateFlags", schema=self.schema)
925  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
926  if self.config.doApCorr:
927  self.makeSubtask("applyApCorr", schema=self.schema)
928  if self.config.doRunCatalogCalculation:
929  self.makeSubtask("catalogCalculation", schema=self.schema)
930 
931  self.outputSchema = afwTable.SourceCatalog(self.schema)
932 
933  def runQuantum(self, butlerQC, inputRefs, outputRefs):
934  inputs = butlerQC.get(inputRefs)
935 
936  refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
937  inputs.pop('refCat'), config=self.config.refObjLoader,
938  log=self.log)
939  self.match.setRefObjLoader(refObjLoader)
940 
941  # Set psfcache
942  # move this to run after gen2 deprecation
943  inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
944 
945  # Get unique integer ID for IdFactory and RNG seeds
946  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
947  inputs['exposureId'] = packedId
948  idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
949  # Transform inputCatalog
950  table = afwTable.SourceTable.make(self.schema, idFactory)
951  sources = afwTable.SourceCatalog(table)
952  sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
953  table = sources.getTable()
954  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
955  inputs['sources'] = sources
956 
957  skyMap = inputs.pop('skyMap')
958  tractNumber = inputRefs.inputCatalog.dataId['tract']
959  tractInfo = skyMap[tractNumber]
960  patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
961  skyInfo = Struct(
962  skyMap=skyMap,
963  tractInfo=tractInfo,
964  patchInfo=patchInfo,
965  wcs=tractInfo.getWcs(),
966  bbox=patchInfo.getOuterBBox()
967  )
968  inputs['skyInfo'] = skyInfo
969 
970  if self.config.doPropagateFlags:
971  # Filter out any visit catalog that is not coadd inputs
972  ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
973  visitKey = ccdInputs.schema.find("visit").key
974  ccdKey = ccdInputs.schema.find("ccd").key
975  inputVisitIds = set()
976  ccdRecordsWcs = {}
977  for ccdRecord in ccdInputs:
978  visit = ccdRecord.get(visitKey)
979  ccd = ccdRecord.get(ccdKey)
980  inputVisitIds.add((visit, ccd))
981  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
982 
983  inputCatalogsToKeep = []
984  inputCatalogWcsUpdate = []
985  for i, dataRef in enumerate(inputRefs.visitCatalogs):
986  key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
987  if key in inputVisitIds:
988  inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
989  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
990  inputs['visitCatalogs'] = inputCatalogsToKeep
991  inputs['wcsUpdates'] = inputCatalogWcsUpdate
992  inputs['ccdInputs'] = ccdInputs
993 
994  outputs = self.run(**inputs)
995  butlerQC.put(outputs, outputRefs)
996 
997  def runDataRef(self, patchRef, psfCache=100):
998  """!
999  @brief Deblend and measure.
1000 
1001  @param[in] patchRef: Patch reference.
1002 
1003  Set 'is-primary' and related flags. Propagate flags
1004  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1005  Finally, write the deblended sources and measurements out.
1006  """
1007  if self.config.hasFakes:
1008  coaddType = "fakes_" + self.config.coaddName
1009  else:
1010  coaddType = self.config.coaddName
1011  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1012  exposure.getPsf().setCacheCapacity(psfCache)
1013  sources = self.readSources(patchRef)
1014  table = sources.getTable()
1015  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1016  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1017 
1018  if self.config.doPropagateFlags:
1019  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1020  else:
1021  ccdInputs = None
1022 
1023  results = self.run(exposure=exposure, sources=sources,
1024  ccdInputs=ccdInputs,
1025  skyInfo=skyInfo, butler=patchRef.getButler(),
1026  exposureId=self.getExposureId(patchRef))
1027 
1028  if self.config.doMatchSources:
1029  self.writeMatches(patchRef, results)
1030  self.write(patchRef, results.outputSources)
1031 
1032  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1033  butler=None):
1034  """Run measurement algorithms on the input exposure, and optionally populate the
1035  resulting catalog with extra information.
1036 
1037  Parameters
1038  ----------
1039  exposure : `lsst.afw.exposure.Exposure`
1040  The input exposure on which measurements are to be performed
1041  sources : `lsst.afw.table.SourceCatalog`
1042  A catalog built from the results of merged detections, or
1043  deblender outputs.
1044  skyInfo : `lsst.pipe.base.Struct`
1045  A struct containing information about the position of the input exposure within
1046  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1047  exposureId : `int` or `bytes`
1048  packed unique number or bytes unique to the input exposure
1049  ccdInputs : `lsst.afw.table.ExposureCatalog`
1050  Catalog containing information on the individual visits which went into making
1051  the exposure
1052  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1053  A list of source catalogs corresponding to measurements made on the individual
1054  visits which went into the input exposure. If None and butler is `None` then
1055  the task cannot propagate visit flags to the output catalog.
1056  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1057  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1058  to the input visits. Used to put all coordinates to common system. If `None` and
1059  butler is `None` then the task cannot propagate visit flags to the output catalog.
1060  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1061  Either a gen2 or gen3 butler used to load visit catalogs
1062 
1063  Returns
1064  -------
1065  results : `lsst.pipe.base.Struct`
1066  Results of running measurement task. Will contain the catalog in the
1067  sources attribute. Optionally will have results of matching to a
1068  reference catalog in the matchResults attribute, and denormalized
1069  matches in the denormMatches attribute.
1070  """
1071  self.measurement.run(sources, exposure, exposureId=exposureId)
1072 
1073  if self.config.doApCorr:
1074  self.applyApCorr.run(
1075  catalog=sources,
1076  apCorrMap=exposure.getInfo().getApCorrMap()
1077  )
1078 
1079  # TODO DM-11568: this contiguous check-and-copy could go away if we
1080  # reserve enough space during SourceDetection and/or SourceDeblend.
1081  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1082  # contiguity now, so views are preserved from here on.
1083  if not sources.isContiguous():
1084  sources = sources.copy(deep=True)
1085 
1086  if self.config.doRunCatalogCalculation:
1087  self.catalogCalculation.run(sources)
1088 
1089  self.setPrimaryFlags.run(sources, skyMap=skyInfo.skyMap, tractInfo=skyInfo.tractInfo,
1090  patchInfo=skyInfo.patchInfo, includeDeblend=self.deblended)
1091  if self.config.doPropagateFlags:
1092  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1093 
1094  results = Struct()
1095 
1096  if self.config.doMatchSources:
1097  matchResult = self.match.run(sources, exposure.getInfo().getFilterLabel().bandLabel)
1098  matches = afwTable.packMatches(matchResult.matches)
1099  matches.table.setMetadata(matchResult.matchMeta)
1100  results.matchResult = matches
1101  if self.config.doWriteMatchesDenormalized:
1102  if matchResult.matches:
1103  denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1104  else:
1105  self.log.warn("No matches, so generating dummy denormalized matches file")
1106  denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1107  denormMatches.setMetadata(PropertyList())
1108  denormMatches.getMetadata().add("COMMENT",
1109  "This catalog is empty because no matches were found.")
1110  results.denormMatches = denormMatches
1111  results.denormMatches = denormMatches
1112 
1113  results.outputSources = sources
1114  return results
1115 
1116  def readSources(self, dataRef):
1117  """!
1118  @brief Read input sources.
1119 
1120  @param[in] dataRef: Data reference for catalog of merged detections
1121  @return List of sources in merged catalog
1122 
1123  We also need to add columns to hold the measurements we're about to make
1124  so we can measure in-place.
1125  """
1126  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1127  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1128  idFactory = self.makeIdFactory(dataRef)
1129  for s in merged:
1130  idFactory.notify(s.getId())
1131  table = afwTable.SourceTable.make(self.schema, idFactory)
1132  sources = afwTable.SourceCatalog(table)
1133  sources.extend(merged, self.schemaMapper)
1134  return sources
1135 
1136  def writeMatches(self, dataRef, results):
1137  """!
1138  @brief Write matches of the sources to the astrometric reference catalog.
1139 
1140  @param[in] dataRef: data reference
1141  @param[in] results: results struct from run method
1142  """
1143  if hasattr(results, "matchResult"):
1144  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1145  if hasattr(results, "denormMatches"):
1146  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1147 
1148  def write(self, dataRef, sources):
1149  """!
1150  @brief Write the source catalog.
1151 
1152  @param[in] dataRef: data reference
1153  @param[in] sources: source catalog
1154  """
1155  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1156  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1157 
1158  def getExposureId(self, dataRef):
1159  return int(dataRef.get(self.config.coaddName + "CoaddId"))