lsst.pipe.tasks  18.1.0-21-gb3d55290+11
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask, MultibandDeblendTask
31 from lsst.pipe.tasks.coaddBase import getSkyInfo
32 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
33 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
37 import lsst.afw.image as afwImage
38 import lsst.afw.table as afwTable
39 import lsst.afw.math as afwMath
40 from lsst.daf.base import PropertyList
41 
42 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
43 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
44 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
45 from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401
46 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
47 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
48 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
49 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
50 
51 
52 """
53 New set types:
54 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
55 * deepCoadd_mergeDet: merged detections (tract, patch)
56 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
57 * deepCoadd_ref: reference sources (tract, patch)
58 All of these have associated *_schema catalogs that require no data ID and hold no records.
59 
60 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
61 the mergeDet, meas, and ref dataset Footprints:
62 * deepCoadd_peak_schema
63 """
64 
65 
66 
67 class DetectCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "abstract_filter", "skymap"),
68  defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
69  detectionSchema = cT.InitOutput(
70  doc="Schema of the detection catalog",
71  name="{outputCoaddName}Coadd_det_schema",
72  storageClass="SourceCatalog",
73  )
74  exposure = cT.Input(
75  doc="Exposure on which detections are to be performed",
76  name="{inputCoaddName}Coadd",
77  storageClass="ExposureF",
78  dimensions=("tract", "patch", "abstract_filter", "skymap")
79  )
80  outputBackgrounds = cT.Output(
81  doc="Output Backgrounds used in detection",
82  name="{outputCoaddName}Coadd_calexp_background",
83  storageClass="Background",
84  dimensions=("tract", "patch", "abstract_filter", "skymap")
85  )
86  outputSources = cT.Output(
87  doc="Detected sources catalog",
88  name="{outputCoaddName}Coadd_det",
89  storageClass="SourceCatalog",
90  dimensions=("tract", "patch", "abstract_filter", "skymap")
91  )
92  outputExposure = cT.Output(
93  doc="Exposure post detection",
94  name="{outputCoaddName}Coadd_calexp",
95  storageClass="ExposureF",
96  dimensions=("tract", "patch", "abstract_filter", "skymap")
97  )
98 
99 
100 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
101  """!
102  @anchor DetectCoaddSourcesConfig_
103 
104  @brief Configuration parameters for the DetectCoaddSourcesTask
105  """
106  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
107  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
108  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
109  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
110  doInsertFakes = Field(dtype=bool, default=False,
111  doc="Run fake sources injection task")
112  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
113  doc="Injection of fake sources for testing "
114  "purposes (must be retargeted)")
115  hasFakes = Field(
116  dtype=bool,
117  default=False,
118  doc="Should be set to True if fake sources have been inserted into the input data."
119  )
120 
121  def setDefaults(self):
122  super().setDefaults()
123  self.detection.thresholdType = "pixel_stdev"
124  self.detection.isotropicGrow = True
125  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
126  self.detection.reEstimateBackground = False
127  self.detection.background.useApprox = False
128  self.detection.background.binSize = 4096
129  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
130  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
131 
132 
138 
139 
140 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
141  r"""!
142  @anchor DetectCoaddSourcesTask_
143 
144  @brief Detect sources on a coadd
145 
146  @section pipe_tasks_multiBand_Contents Contents
147 
148  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
149  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
150  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
151  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
153  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
154 
155  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
156 
157  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
158 
159  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
160  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
161  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
162  propagate the full covariance matrix -- but it is simple and works well in practice.
163 
164  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
165  SourceDetectionTask_ "detection" subtask.
166 
167  @par Inputs:
168  deepCoadd{tract,patch,filter}: ExposureF
169  @par Outputs:
170  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
171  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
172  exposure (ExposureF)
173  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
174  @par Data Unit:
175  tract, patch, filter
176 
177  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
178  You can retarget this subtask if you wish.
179 
180  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
181 
182  @copydoc \_\_init\_\_
183 
184  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
185 
186  @copydoc run
187 
188  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
189 
190  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
191 
192  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
193 
194  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
195  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
196  files.
197 
198  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
199  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
200  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
201 
202  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
203  of using DetectCoaddSourcesTask
204 
205  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
206  the task is to update the background, detect all sources in a single band and generate a set of parent
207  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
208  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
209  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
210  calling detectCoaddSources.py with the `--help` command line argument:
211  @code
212  detectCoaddSources.py --help
213  @endcode
214 
215  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
216  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
217  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
218  @code
219  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
220  @endcode
221  that will process the HSC-I band data. The results are written to
222  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
223 
224  It is also necessary to run:
225  @code
226  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
227  @endcode
228  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
229  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
230  """
231  _DefaultName = "detectCoaddSources"
232  ConfigClass = DetectCoaddSourcesConfig
233  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
234  makeIdFactory = _makeMakeIdFactory("CoaddId")
235 
236  @classmethod
237  def _makeArgumentParser(cls):
238  parser = ArgumentParser(name=cls._DefaultName)
239  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
240  ContainerClass=ExistingCoaddDataIdContainer)
241  return parser
242 
243  def __init__(self, schema=None, **kwargs):
244  """!
245  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
246 
247  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
248 
249  @param[in] schema: initial schema for the output catalog, modified-in place to include all
250  fields set by this task. If None, the source minimal schema will be used.
251  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
252  """
253  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
254  # call structure has been reviewed carefully to be sure super will work as intended.
255  super().__init__(**kwargs)
256  if schema is None:
257  schema = afwTable.SourceTable.makeMinimalSchema()
258  if self.config.doInsertFakes:
259  self.makeSubtask("insertFakes")
260  self.schema = schema
261  self.makeSubtask("detection", schema=self.schema)
262  if self.config.doScaleVariance:
263  self.makeSubtask("scaleVariance")
264 
265  self.detectionSchema = afwTable.SourceCatalog(self.schema)
266 
267  def runDataRef(self, patchRef):
268  """!
269  @brief Run detection on a coadd.
270 
271  Invokes @ref run and then uses @ref write to output the
272  results.
273 
274  @param[in] patchRef: data reference for patch
275  """
276  if self.config.hasFakes:
277  exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
278  else:
279  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
280  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
281  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
282  self.write(results, patchRef)
283  return results
284 
285  def runQuantum(self, butlerQC, inputRefs, outputRefs):
286  inputs = butlerQC.get(inputRefs)
287  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch_abstract_filter", returnMaxBits=True)
288  inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
289  inputs["expId"] = packedId
290  outputs = self.run(**inputs)
291  butlerQC.put(outputs, outputRefs)
292 
293  def run(self, exposure, idFactory, expId):
294  """!
295  @brief Run detection on an exposure.
296 
297  First scale the variance plane to match the observed variance
298  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
299  detect sources.
300 
301  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
302  depending on configuration).
303  @param[in] idFactory: IdFactory to set source identifiers
304  @param[in] expId: Exposure identifier (integer) for RNG seed
305 
306  @return a pipe.base.Struct with fields
307  - sources: catalog of detections
308  - backgrounds: list of backgrounds
309  """
310  if self.config.doScaleVariance:
311  varScale = self.scaleVariance.run(exposure.maskedImage)
312  exposure.getMetadata().add("variance_scale", varScale)
313  backgrounds = afwMath.BackgroundList()
314  if self.config.doInsertFakes:
315  self.insertFakes.run(exposure, background=backgrounds)
316  table = afwTable.SourceTable.make(self.schema, idFactory)
317  detections = self.detection.makeSourceCatalog(table, exposure, expId=expId)
318  sources = detections.sources
319  fpSets = detections.fpSets
320  if hasattr(fpSets, "background") and fpSets.background:
321  for bg in fpSets.background:
322  backgrounds.append(bg)
323  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
324 
325  def write(self, results, patchRef):
326  """!
327  @brief Write out results from runDetection.
328 
329  @param[in] exposure: Exposure to write out
330  @param[in] results: Struct returned from runDetection
331  @param[in] patchRef: data reference for patch
332  """
333  coaddName = self.config.coaddName + "Coadd"
334  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
335  patchRef.put(results.outputSources, coaddName + "_det")
336  if self.config.hasFakes:
337  patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
338  else:
339  patchRef.put(results.outputExposure, coaddName + "_calexp")
340 
341 
342 
343 
344 class DeblendCoaddSourcesConfig(Config):
345  """DeblendCoaddSourcesConfig
346 
347  Configuration parameters for the `DeblendCoaddSourcesTask`.
348  """
349  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
350  doc="Deblend sources separately in each band")
351  multiBandDeblend = ConfigurableField(target=MultibandDeblendTask,
352  doc="Deblend sources simultaneously across bands")
353  simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
354  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
355  hasFakes = Field(dtype=bool,
356  default=False,
357  doc="Should be set to True if fake sources have been inserted into the input data.")
358 
359  def setDefaults(self):
360  Config.setDefaults(self)
361  self.singleBandDeblend.propagateAllPeaks = True
362 
363 
364 class DeblendCoaddSourcesRunner(MergeSourcesRunner):
365  """Task runner for the `MergeSourcesTask`
366 
367  Required because the run method requires a list of
368  dataRefs rather than a single dataRef.
369  """
370  @staticmethod
371  def getTargetList(parsedCmd, **kwargs):
372  """Provide a list of patch references for each patch, tract, filter combo.
373 
374  Parameters
375  ----------
376  parsedCmd:
377  The parsed command
378  kwargs:
379  Keyword arguments passed to the task
380 
381  Returns
382  -------
383  targetList: list
384  List of tuples, where each tuple is a (dataRef, kwargs) pair.
385  """
386  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
387  kwargs["psfCache"] = parsedCmd.psfCache
388  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
389 
390 
391 class DeblendCoaddSourcesTask(CmdLineTask):
392  """Deblend the sources in a merged catalog
393 
394  Deblend sources from master catalog in each coadd.
395  This can either be done separately in each band using the HSC-SDSS deblender
396  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
397  or use SCARLET to simultaneously fit the blend in all bands
398  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
399  The task will set its own `self.schema` atribute to the `Schema` of the
400  output deblended catalog.
401  This will include all fields from the input `Schema`, as well as additional fields
402  from the deblender.
403 
404  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
405  ---------------------------------------------------------
406  `
407 
408  Parameters
409  ----------
410  butler: `Butler`
411  Butler used to read the input schemas from disk or
412  construct the reference catalog loader, if `schema` or `peakSchema` or
413  schema: `Schema`
414  The schema of the merged detection catalog as an input to this task.
415  peakSchema: `Schema`
416  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
417  """
418  ConfigClass = DeblendCoaddSourcesConfig
419  RunnerClass = DeblendCoaddSourcesRunner
420  _DefaultName = "deblendCoaddSources"
421  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
422 
423  @classmethod
424  def _makeArgumentParser(cls):
425  parser = ArgumentParser(name=cls._DefaultName)
426  parser.add_id_argument("--id", "deepCoadd_calexp",
427  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
428  ContainerClass=ExistingCoaddDataIdContainer)
429  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
430  return parser
431 
432  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
433  CmdLineTask.__init__(self, **kwargs)
434  if schema is None:
435  assert butler is not None, "Neither butler nor schema is defined"
436  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
437  self.schemaMapper = afwTable.SchemaMapper(schema)
438  self.schemaMapper.addMinimalSchema(schema)
439  self.schema = self.schemaMapper.getOutputSchema()
440  if peakSchema is None:
441  assert butler is not None, "Neither butler nor peakSchema is defined"
442  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
443 
444  if self.config.simultaneous:
445  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
446  else:
447  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
448 
449  def getSchemaCatalogs(self):
450  """Return a dict of empty catalogs for each catalog dataset produced by this task.
451 
452  Returns
453  -------
454  result: dict
455  Dictionary of empty catalogs, with catalog names as keys.
456  """
457  catalog = afwTable.SourceCatalog(self.schema)
458  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
459  self.config.coaddName + "Coadd_deblendedModel": catalog}
460 
461  def runDataRef(self, patchRefList, psfCache=100):
462  """Deblend the patch
463 
464  Deblend each source simultaneously or separately
465  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
466  Set `is-primary` and related flags.
467  Propagate flags from individual visits.
468  Write the deblended sources out.
469 
470  Parameters
471  ----------
472  patchRefList: list
473  List of data references for each filter
474  """
475 
476  if self.config.hasFakes:
477  coaddType = "fakes_" + self.config.coaddName
478  else:
479  coaddType = self.config.coaddName
480 
481  if self.config.simultaneous:
482  # Use SCARLET to simultaneously deblend across filters
483  filters = []
484  exposures = []
485  for patchRef in patchRefList:
486  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
487  filters.append(patchRef.dataId["filter"])
488  exposures.append(exposure)
489  # The input sources are the same for all bands, since it is a merged catalog
490  sources = self.readSources(patchRef)
491  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
492  fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources)
493  for n in range(len(patchRefList)):
494  self.write(patchRefList[n], fluxCatalogs[filters[n]], templateCatalogs[filters[n]])
495  else:
496  # Use the singeband deblender to deblend each band separately
497  for patchRef in patchRefList:
498  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
499  exposure.getPsf().setCacheCapacity(psfCache)
500  sources = self.readSources(patchRef)
501  self.singleBandDeblend.run(exposure, sources)
502  self.write(patchRef, sources)
503 
504  def readSources(self, dataRef):
505  """Read merged catalog
506 
507  Read the catalog of merged detections and create a catalog
508  in a single band.
509 
510  Parameters
511  ----------
512  dataRef: data reference
513  Data reference for catalog of merged detections
514 
515  Returns
516  -------
517  sources: `SourceCatalog`
518  List of sources in merged catalog
519 
520  We also need to add columns to hold the measurements we're about to make
521  so we can measure in-place.
522  """
523  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
524  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
525  idFactory = self.makeIdFactory(dataRef)
526  for s in merged:
527  idFactory.notify(s.getId())
528  table = afwTable.SourceTable.make(self.schema, idFactory)
529  sources = afwTable.SourceCatalog(table)
530  sources.extend(merged, self.schemaMapper)
531  return sources
532 
533  def write(self, dataRef, flux_sources, template_sources=None):
534  """Write the source catalog(s)
535 
536  Parameters
537  ----------
538  dataRef: Data Reference
539  Reference to the output catalog.
540  flux_sources: `SourceCatalog`
541  Flux conserved sources to write to file.
542  If using the single band deblender, this is the catalog
543  generated.
544  template_sources: `SourceCatalog`
545  Source catalog using the multiband template models
546  as footprints.
547  """
548  # The multiband deblender does not have to conserve flux,
549  # so only write the flux conserved catalog if it exists
550  if flux_sources is not None:
551  assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux
552  dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux")
553  # Only the multiband deblender has the option to output the
554  # template model catalog, which can optionally be used
555  # in MeasureMergedCoaddSources
556  if template_sources is not None:
557  assert self.config.multiBandDeblend.saveTemplates
558  dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel")
559  self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
560 
561  def writeMetadata(self, dataRefList):
562  """Write the metadata produced from processing the data.
563  Parameters
564  ----------
565  dataRefList
566  List of Butler data references used to write the metadata.
567  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
568  """
569  for dataRef in dataRefList:
570  try:
571  metadataName = self._getMetadataName()
572  if metadataName is not None:
573  dataRef.put(self.getFullMetadata(), metadataName)
574  except Exception as e:
575  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
576 
577  def getExposureId(self, dataRef):
578  """Get the ExposureId from a data reference
579  """
580  return int(dataRef.get(self.config.coaddName + "CoaddId"))
581 
582 
583 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "abstract_filter", "skymap"),
584  defaultTemplates={"inputCoaddName": "deep",
585  "outputCoaddName": "deep"}):
586  inputSchema = cT.InitInput(
587  doc="Input schema for measure merged task produced by a deblender or detection task",
588  name="{inputCoaddName}Coadd_deblendedFlux_schema",
589  storageClass="SourceCatalog"
590  )
591  outputSchema = cT.InitOutput(
592  doc="Output schema after all new fields are added by task",
593  name="{inputCoaddName}Coadd_meas_schema",
594  storageClass="SourceCatalog"
595  )
596  refCat = cT.PrerequisiteInput(
597  doc="Reference catalog used to match measured sources against known sources",
598  name="ref_cat",
599  storageClass="SimpleCatalog",
600  dimensions=("skypix",),
601  deferLoad=True,
602  multiple=True
603  )
604  exposure = cT.Input(
605  doc="Input coadd image",
606  name="{inputCoaddName}Coadd_calexp",
607  storageClass="ExposureF",
608  dimensions=("tract", "patch", "abstract_filter", "skymap")
609  )
610  skyMap = cT.Input(
611  doc="SkyMap to use in processing",
612  name="{inputCoaddName}Coadd_skyMap",
613  storageClass="SkyMap",
614  dimensions=("skymap",),
615  )
616  visitCatalogs = cT.Input(
617  doc="Source catalogs for visits which overlap input tract, patch, abstract_filter. Will be "
618  "further filtered in the task for the purpose of propagating flags from image calibration "
619  "and characterization to codd objects",
620  name="src",
621  dimensions=("instrument", "visit", "detector"),
622  storageClass="SourceCatalog",
623  multiple=True
624  )
625  inputCatalog = cT.Input(
626  doc=("Name of the input catalog to use."
627  "If the single band deblender was used this should be 'deblendedFlux."
628  "If the multi-band deblender was used this should be 'deblendedModel, "
629  "or deblendedFlux if the multiband deblender was configured to output "
630  "deblended flux catalogs. If no deblending was performed this should "
631  "be 'mergeDet'"),
632  name="{inputCoaddName}Coadd_deblendedFlux",
633  storageClass="SourceCatalog",
634  dimensions=("tract", "patch", "abstract_filter", "skymap"),
635  )
636  outputSources = cT.Output(
637  doc="Source catalog containing all the measurement information generated in this task",
638  name="{outputCoaddName}Coadd_meas",
639  dimensions=("tract", "patch", "abstract_filter", "skymap"),
640  storageClass="SourceCatalog",
641  )
642  matchResult = cT.Output(
643  doc="Match catalog produced by configured matcher, optional on doMatchSources",
644  name="{outputCoaddName}Coadd_measMatch",
645  dimensions=("tract", "patch", "abstract_filter", "skymap"),
646  storageClass="Catalog",
647  )
648  denormMatches = cT.Output(
649  doc="Denormalized Match catalog produced by configured matcher, optional on "
650  "doWriteMatchesDenormalized",
651  name="{outputCoaddName}Coadd_measMatchFull",
652  dimensions=("tract", "patch", "abstract_filter", "skymap"),
653  storageClass="Catalog",
654  )
655 
656  def __init__(self, *, config=None):
657  super().__init__(config=config)
658  if config.doPropagateFlags is False:
659  self.inputs -= set(("visitCatalogs",))
660 
661  if config.doMatchSources is False:
662  self.outputs -= set(("matchResult",))
663 
664  if config.doWriteMatchesDenormalized is False:
665  self.outputs -= set(("denormMatches",))
666 
667 
668 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
669  pipelineConnections=MeasureMergedCoaddSourcesConnections):
670  """!
671  @anchor MeasureMergedCoaddSourcesConfig_
672 
673  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
674  """
675  inputCatalog = Field(dtype=str, default="deblendedFlux",
676  doc=("Name of the input catalog to use."
677  "If the single band deblender was used this should be 'deblendedFlux."
678  "If the multi-band deblender was used this should be 'deblendedModel."
679  "If no deblending was performed this should be 'mergeDet'"))
680  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
681  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
682  doPropagateFlags = Field(
683  dtype=bool, default=True,
684  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
685  )
686  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
687  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
688  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
689  doWriteMatchesDenormalized = Field(
690  dtype=bool,
691  default=False,
692  doc=("Write reference matches in denormalized format? "
693  "This format uses more disk space, but is more convenient to read."),
694  )
695  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
696  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
697  checkUnitsParseStrict = Field(
698  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
699  dtype=str,
700  default="raise",
701  )
702  doApCorr = Field(
703  dtype=bool,
704  default=True,
705  doc="Apply aperture corrections"
706  )
707  applyApCorr = ConfigurableField(
708  target=ApplyApCorrTask,
709  doc="Subtask to apply aperture corrections"
710  )
711  doRunCatalogCalculation = Field(
712  dtype=bool,
713  default=True,
714  doc='Run catalogCalculation task'
715  )
716  catalogCalculation = ConfigurableField(
717  target=CatalogCalculationTask,
718  doc="Subtask to run catalogCalculation plugins on catalog"
719  )
720 
721  hasFakes = Field(
722  dtype=bool,
723  default=False,
724  doc="Should be set to True if fake sources have been inserted into the input data."
725  )
726 
727  @property
728  def refObjLoader(self):
729  return self.match.refObjLoader
730 
731  def setDefaults(self):
732  super().setDefaults()
733  self.measurement.plugins.names |= ['base_InputCount', 'base_Variance']
734  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
735  'INEXACT_PSF']
736  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
737  'INEXACT_PSF']
738 
739  def validate(self):
740  super().validate()
741  refCatGen2 = getattr(self.refObjLoader, "ref_dataset_name", None)
742  if refCatGen2 is not None and refCatGen2 != self.connections.refCat:
743  raise ValueError(
744  f"Gen2 ({refCatGen2}) and Gen3 ({self.connections.refCat}) reference catalogs "
745  f"are different. These options must be kept in sync until Gen2 is retired."
746  )
747 
748 
749 
755 
756 
757 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
758  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
759  @staticmethod
760  def getTargetList(parsedCmd, **kwargs):
761  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
762 
763 
764 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
765  r"""!
766  @anchor MeasureMergedCoaddSourcesTask_
767 
768  @brief Deblend sources from master catalog in each coadd seperately and measure.
769 
770  @section pipe_tasks_multiBand_Contents Contents
771 
772  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
773  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
774  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
775  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
776  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
777  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
778 
779  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
780 
781  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
782  measurement in each coadd.
783 
784  Given a master input catalog of sources (peaks and footprints) or deblender outputs
785  (including a HeavyFootprint in each band), measure each source on the
786  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
787  consistent set of child sources.
788 
789  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
790  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
791  flags are propagated to the coadd sources.
792 
793  Optionally, we can match the coadd sources to an external reference catalog.
794 
795  @par Inputs:
796  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
797  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
798  @par Outputs:
799  deepCoadd_meas{tract,patch,filter}: SourceCatalog
800  @par Data Unit:
801  tract, patch, filter
802 
803  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
804 
805  <DL>
806  <DT> @ref SingleFrameMeasurementTask_ "measurement"
807  <DD> Measure source properties of deblended sources.</DD>
808  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
809  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
810  not at the edge of the field and that have either not been deblended or are the children of deblended
811  sources</DD>
812  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
813  <DD> Propagate flags set in individual visits to the coadd.</DD>
814  <DT> @ref DirectMatchTask_ "match"
815  <DD> Match input sources to a reference catalog (optional).
816  </DD>
817  </DL>
818  These subtasks may be retargeted as required.
819 
820  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
821 
822  @copydoc \_\_init\_\_
823 
824  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
825 
826  @copydoc run
827 
828  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
829 
830  See @ref MeasureMergedCoaddSourcesConfig_
831 
832  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
833 
834  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
835  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
836  files.
837 
838  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
839  the various sub-tasks. See the documetation for individual sub-tasks for more information.
840 
841  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
842  MeasureMergedCoaddSourcesTask
843 
844  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
845  The next stage in the multi-band processing procedure will merge these measurements into a suitable
846  catalog for driving forced photometry.
847 
848  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
849  to be processed.
850  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
851  `--help` command line argument:
852  @code
853  measureCoaddSources.py --help
854  @endcode
855 
856  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
857  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
858  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
859  coadd as follows:
860  @code
861  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
862  @endcode
863  This will process the HSC-I band data. The results are written in
864  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
865 
866  It is also necessary to run
867  @code
868  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
869  @endcode
870  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
871  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
872  """
873  _DefaultName = "measureCoaddSources"
874  ConfigClass = MeasureMergedCoaddSourcesConfig
875  RunnerClass = MeasureMergedCoaddSourcesRunner
876  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
877  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
878 
879  @classmethod
880  def _makeArgumentParser(cls):
881  parser = ArgumentParser(name=cls._DefaultName)
882  parser.add_id_argument("--id", "deepCoadd_calexp",
883  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
884  ContainerClass=ExistingCoaddDataIdContainer)
885  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
886  return parser
887 
888  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
889  **kwargs):
890  """!
891  @brief Initialize the task.
892 
893  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
894  @param[in] schema: the schema of the merged detection catalog used as input to this one
895  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
896  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
897  catalog. May be None if the loader can be constructed from the butler argument or all steps
898  requiring a reference catalog are disabled.
899  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
900  catalog loader, if schema or peakSchema or refObjLoader is None
901 
902  The task will set its own self.schema attribute to the schema of the output measurement catalog.
903  This will include all fields from the input schema, as well as additional fields for all the
904  measurements.
905  """
906  super().__init__(**kwargs)
907  self.deblended = self.config.inputCatalog.startswith("deblended")
908  self.inputCatalog = "Coadd_" + self.config.inputCatalog
909  if initInputs is not None:
910  schema = initInputs['inputSchema'].schema
911  if schema is None:
912  assert butler is not None, "Neither butler nor schema is defined"
913  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
914  self.schemaMapper = afwTable.SchemaMapper(schema)
915  self.schemaMapper.addMinimalSchema(schema)
916  self.schema = self.schemaMapper.getOutputSchema()
917  self.algMetadata = PropertyList()
918  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
919  self.makeSubtask("setPrimaryFlags", schema=self.schema)
920  if self.config.doMatchSources:
921  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
922  if self.config.doPropagateFlags:
923  self.makeSubtask("propagateFlags", schema=self.schema)
924  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
925  if self.config.doApCorr:
926  self.makeSubtask("applyApCorr", schema=self.schema)
927  if self.config.doRunCatalogCalculation:
928  self.makeSubtask("catalogCalculation", schema=self.schema)
929 
930  self.outputSchema = afwTable.SourceCatalog(self.schema)
931 
932  def runQuantum(self, butlerQC, inputRefs, outputRefs):
933  inputs = butlerQC.get(inputRefs)
934 
935  refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
936  inputs.pop('refCat'), config=self.config.refObjLoader,
937  log=self.log)
938  self.match.setRefObjLoader(refObjLoader)
939 
940  # Set psfcache
941  # move this to run after gen2 deprecation
942  inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
943 
944  # Get unique integer ID for IdFactory and RNG seeds
945  packedId, maxBits = butlerQC.quantum.dataId.pack("tract_patch", returnMaxBits=True)
946  inputs['exposureId'] = packedId
947  idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
948  # Transform inputCatalog
949  table = afwTable.SourceTable.make(self.schema, idFactory)
950  sources = afwTable.SourceCatalog(table)
951  sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
952  table = sources.getTable()
953  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
954  inputs['sources'] = sources
955 
956  skyMap = inputs.pop('skyMap')
957  tractNumber = inputRefs.inputCatalog.dataId['tract']
958  tractInfo = skyMap[tractNumber]
959  patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
960  skyInfo = Struct(
961  skyMap=skyMap,
962  tractInfo=tractInfo,
963  patchInfo=patchInfo,
964  wcs=tractInfo.getWcs(),
965  bbox=patchInfo.getOuterBBox()
966  )
967  inputs['skyInfo'] = skyInfo
968 
969  if self.config.doPropagateFlags:
970  # Filter out any visit catalog that is not coadd inputs
971  ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
972  visitKey = ccdInputs.schema.find("visit").key
973  ccdKey = ccdInputs.schema.find("ccd").key
974  inputVisitIds = set()
975  ccdRecordsWcs = {}
976  for ccdRecord in ccdInputs:
977  visit = ccdRecord.get(visitKey)
978  ccd = ccdRecord.get(ccdKey)
979  inputVisitIds.add((visit, ccd))
980  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
981 
982  inputCatalogsToKeep = []
983  inputCatalogWcsUpdate = []
984  for i, dataRef in enumerate(inputRefs.visitCatalogs):
985  key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
986  if key in inputVisitIds:
987  inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
988  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
989  inputs['visitCatalogs'] = inputCatalogsToKeep
990  inputs['wcsUpdates'] = inputCatalogWcsUpdate
991  inputs['ccdInputs'] = ccdInputs
992 
993  outputs = self.run(**inputs)
994  butlerQC.put(outputs, outputRefs)
995 
996  def runDataRef(self, patchRef, psfCache=100):
997  """!
998  @brief Deblend and measure.
999 
1000  @param[in] patchRef: Patch reference.
1001 
1002  Set 'is-primary' and related flags. Propagate flags
1003  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1004  Finally, write the deblended sources and measurements out.
1005  """
1006  if self.config.hasFakes:
1007  coaddType = "fakes_" + self.config.coaddName
1008  else:
1009  coaddType = self.config.coaddName
1010  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1011  exposure.getPsf().setCacheCapacity(psfCache)
1012  sources = self.readSources(patchRef)
1013  table = sources.getTable()
1014  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1015  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1016 
1017  if self.config.doPropagateFlags:
1018  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1019  else:
1020  ccdInputs = None
1021 
1022  results = self.run(exposure=exposure, sources=sources,
1023  ccdInputs=ccdInputs,
1024  skyInfo=skyInfo, butler=patchRef.getButler(),
1025  exposureId=self.getExposureId(patchRef))
1026 
1027  if self.config.doMatchSources:
1028  self.writeMatches(patchRef, results)
1029  self.write(patchRef, results.outputSources)
1030 
1031  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1032  butler=None):
1033  """Run measurement algorithms on the input exposure, and optionally populate the
1034  resulting catalog with extra information.
1035 
1036  Parameters
1037  ----------
1038  exposure : `lsst.afw.exposure.Exposure`
1039  The input exposure on which measurements are to be performed
1040  sources : `lsst.afw.table.SourceCatalog`
1041  A catalog built from the results of merged detections, or
1042  deblender outputs.
1043  skyInfo : `lsst.pipe.base.Struct`
1044  A struct containing information about the position of the input exposure within
1045  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1046  exposureId : `int` or `bytes`
1047  packed unique number or bytes unique to the input exposure
1048  ccdInputs : `lsst.afw.table.ExposureCatalog`
1049  Catalog containing information on the individual visits which went into making
1050  the exposure
1051  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1052  A list of source catalogs corresponding to measurements made on the individual
1053  visits which went into the input exposure. If None and butler is `None` then
1054  the task cannot propagate visit flags to the output catalog.
1055  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1056  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1057  to the input visits. Used to put all coordinates to common system. If `None` and
1058  butler is `None` then the task cannot propagate visit flags to the output catalog.
1059  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1060  Either a gen2 or gen3 butler used to load visit catalogs
1061 
1062  Returns
1063  -------
1064  results : `lsst.pipe.base.Struct`
1065  Results of running measurement task. Will contain the catalog in the
1066  sources attribute. Optionally will have results of matching to a
1067  reference catalog in the matchResults attribute, and denormalized
1068  matches in the denormMatches attribute.
1069  """
1070  self.measurement.run(sources, exposure, exposureId=exposureId)
1071 
1072  if self.config.doApCorr:
1073  self.applyApCorr.run(
1074  catalog=sources,
1075  apCorrMap=exposure.getInfo().getApCorrMap()
1076  )
1077 
1078  # TODO DM-11568: this contiguous check-and-copy could go away if we
1079  # reserve enough space during SourceDetection and/or SourceDeblend.
1080  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1081  # contiguity now, so views are preserved from here on.
1082  if not sources.isContiguous():
1083  sources = sources.copy(deep=True)
1084 
1085  if self.config.doRunCatalogCalculation:
1086  self.catalogCalculation.run(sources)
1087 
1088  self.setPrimaryFlags.run(sources, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo,
1089  includeDeblend=self.deblended)
1090  if self.config.doPropagateFlags:
1091  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1092 
1093  results = Struct()
1094 
1095  if self.config.doMatchSources:
1096  matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName())
1097  matches = afwTable.packMatches(matchResult.matches)
1098  matches.table.setMetadata(matchResult.matchMeta)
1099  results.matchResult = matches
1100  if self.config.doWriteMatchesDenormalized:
1101  if matchResult.matches:
1102  denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1103  else:
1104  self.log.warn("No matches, so generating dummy denormalized matches file")
1105  denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1106  denormMatches.setMetadata(PropertyList())
1107  denormMatches.getMetadata().add("COMMENT",
1108  "This catalog is empty because no matches were found.")
1109  results.denormMatches = denormMatches
1110  results.denormMatches = denormMatches
1111 
1112  results.outputSources = sources
1113  return results
1114 
1115  def readSources(self, dataRef):
1116  """!
1117  @brief Read input sources.
1118 
1119  @param[in] dataRef: Data reference for catalog of merged detections
1120  @return List of sources in merged catalog
1121 
1122  We also need to add columns to hold the measurements we're about to make
1123  so we can measure in-place.
1124  """
1125  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1126  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1127  idFactory = self.makeIdFactory(dataRef)
1128  for s in merged:
1129  idFactory.notify(s.getId())
1130  table = afwTable.SourceTable.make(self.schema, idFactory)
1131  sources = afwTable.SourceCatalog(table)
1132  sources.extend(merged, self.schemaMapper)
1133  return sources
1134 
1135  def writeMatches(self, dataRef, results):
1136  """!
1137  @brief Write matches of the sources to the astrometric reference catalog.
1138 
1139  @param[in] dataRef: data reference
1140  @param[in] results: results struct from run method
1141  """
1142  if hasattr(results, "matchResult"):
1143  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1144  if hasattr(results, "denormMatches"):
1145  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1146 
1147  def write(self, dataRef, sources):
1148  """!
1149  @brief Write the source catalog.
1150 
1151  @param[in] dataRef: data reference
1152  @param[in] sources: source catalog
1153  """
1154  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1155  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1156 
1157  def getExposureId(self, dataRef):
1158  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1159 
def write(self, patchRef, catalog)
Write the output.
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
Definition: coaddBase.py:231
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.