lsst.pipe.tasks  18.1.0-15-gc153667b
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
24 from lsst.pipe.base import (CmdLineTask, Struct, ArgumentParser, ButlerInitializedTaskRunner,
25  PipelineTask, PipelineTaskConfig, PipelineTaskConnections)
27 from lsst.pex.config import Config, Field, ConfigurableField
28 from lsst.meas.algorithms import DynamicDetectionTask, ReferenceObjectLoader
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask, MultibandDeblendTask
31 from lsst.pipe.tasks.coaddBase import getSkyInfo
32 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
33 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
37 import lsst.afw.image as afwImage
38 import lsst.afw.table as afwTable
39 import lsst.afw.math as afwMath
40 from lsst.daf.base import PropertyList
41 
42 from .mergeDetections import MergeDetectionsConfig, MergeDetectionsTask # noqa: F401
43 from .mergeMeasurements import MergeMeasurementsConfig, MergeMeasurementsTask # noqa: F401
44 from .multiBandUtils import MergeSourcesRunner, CullPeaksConfig, _makeGetSchemaCatalogs # noqa: F401
45 from .multiBandUtils import getInputSchema, getShortFilterName, readCatalog, _makeMakeIdFactory # noqa: F401
46 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleConfig # noqa: F401
47 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesSingleTask # noqa: F401
48 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiConfig # noqa: F401
49 from .deblendCoaddSourcesPipeline import DeblendCoaddSourcesMultiTask # noqa: F401
50 
51 
52 """
53 New set types:
54 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
55 * deepCoadd_mergeDet: merged detections (tract, patch)
56 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
57 * deepCoadd_ref: reference sources (tract, patch)
58 All of these have associated *_schema catalogs that require no data ID and hold no records.
59 
60 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
61 the mergeDet, meas, and ref dataset Footprints:
62 * deepCoadd_peak_schema
63 """
64 
65 
66 
67 class DetectCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "abstract_filter", "skymap"),
68  defaultTemplates={"inputCoaddName": "deep", "outputCoaddName": "deep"}):
69  detectionSchema = cT.InitOutput(
70  doc="Schema of the detection catalog",
71  name="{outputCoaddName}Coadd_det_schema",
72  storageClass="SourceCatalog",
73  )
74  exposure = cT.Input(
75  doc="Exposure on which detections are to be performed",
76  name="{inputCoaddName}Coadd",
77  storageClass="ExposureF",
78  dimensions=("tract", "patch", "abstract_filter", "skymap")
79  )
80  outputBackgrounds = cT.Output(
81  doc="Output Backgrounds used in detection",
82  name="{outputCoaddName}Coadd_calexp_background",
83  storageClass="Background",
84  dimensions=("tract", "patch", "abstract_filter", "skymap")
85  )
86  outputSources = cT.Output(
87  doc="Detected sources catalog",
88  name="{outputCoaddName}Coadd_det",
89  storageClass="SourceCatalog",
90  dimensions=("tract", "patch", "abstract_filter", "skymap")
91  )
92  outputExposure = cT.Output(
93  doc="Exposure post detection",
94  name="{outputCoaddName}Coadd_calexp",
95  storageClass="ExposureF",
96  dimensions=("tract", "patch", "abstract_filter", "skymap")
97  )
98 
99 
100 class DetectCoaddSourcesConfig(PipelineTaskConfig, pipelineConnections=DetectCoaddSourcesConnections):
101  """!
102  @anchor DetectCoaddSourcesConfig_
103 
104  @brief Configuration parameters for the DetectCoaddSourcesTask
105  """
106  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
107  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
108  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
109  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
110  doInsertFakes = Field(dtype=bool, default=False,
111  doc="Run fake sources injection task")
112  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
113  doc="Injection of fake sources for testing "
114  "purposes (must be retargeted)")
115  hasFakes = Field(
116  dtype=bool,
117  default=False,
118  doc="Should be set to True if fake sources have been inserted into the input data."
119  )
120 
121  def setDefaults(self):
122  super().setDefaults()
123  self.detection.thresholdType = "pixel_stdev"
124  self.detection.isotropicGrow = True
125  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
126  self.detection.reEstimateBackground = False
127  self.detection.background.useApprox = False
128  self.detection.background.binSize = 4096
129  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
130  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
131 
132 
138 
139 
140 class DetectCoaddSourcesTask(PipelineTask, CmdLineTask):
141  r"""!
142  @anchor DetectCoaddSourcesTask_
143 
144  @brief Detect sources on a coadd
145 
146  @section pipe_tasks_multiBand_Contents Contents
147 
148  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
149  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
150  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
151  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
153  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
154 
155  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
156 
157  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
158 
159  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
160  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
161  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
162  propagate the full covariance matrix -- but it is simple and works well in practice.
163 
164  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
165  SourceDetectionTask_ "detection" subtask.
166 
167  @par Inputs:
168  deepCoadd{tract,patch,filter}: ExposureF
169  @par Outputs:
170  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
171  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
172  exposure (ExposureF)
173  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
174  @par Data Unit:
175  tract, patch, filter
176 
177  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
178  You can retarget this subtask if you wish.
179 
180  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
181 
182  @copydoc \_\_init\_\_
183 
184  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
185 
186  @copydoc run
187 
188  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
189 
190  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
191 
192  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
193 
194  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
195  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
196  files.
197 
198  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
199  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
200  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
201 
202  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
203  of using DetectCoaddSourcesTask
204 
205  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
206  the task is to update the background, detect all sources in a single band and generate a set of parent
207  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
208  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
209  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
210  calling detectCoaddSources.py with the `--help` command line argument:
211  @code
212  detectCoaddSources.py --help
213  @endcode
214 
215  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
216  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
217  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
218  @code
219  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
220  @endcode
221  that will process the HSC-I band data. The results are written to
222  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
223 
224  It is also necessary to run:
225  @code
226  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
227  @endcode
228  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
229  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
230  """
231  _DefaultName = "detectCoaddSources"
232  ConfigClass = DetectCoaddSourcesConfig
233  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
234  makeIdFactory = _makeMakeIdFactory("CoaddId")
235 
236  @classmethod
237  def _makeArgumentParser(cls):
238  parser = ArgumentParser(name=cls._DefaultName)
239  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
240  ContainerClass=ExistingCoaddDataIdContainer)
241  return parser
242 
243  def __init__(self, schema=None, **kwargs):
244  """!
245  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
246 
247  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
248 
249  @param[in] schema: initial schema for the output catalog, modified-in place to include all
250  fields set by this task. If None, the source minimal schema will be used.
251  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
252  """
253  # N.B. Super is used here to handle the multiple inheritance of PipelineTasks, the init tree
254  # call structure has been reviewed carefully to be sure super will work as intended.
255  super().__init__(**kwargs)
256  if schema is None:
257  schema = afwTable.SourceTable.makeMinimalSchema()
258  if self.config.doInsertFakes:
259  self.makeSubtask("insertFakes")
260  self.schema = schema
261  self.makeSubtask("detection", schema=self.schema)
262  if self.config.doScaleVariance:
263  self.makeSubtask("scaleVariance")
264 
265  self.detectionSchema = afwTable.SourceCatalog(self.schema)
266 
267  def runDataRef(self, patchRef):
268  """!
269  @brief Run detection on a coadd.
270 
271  Invokes @ref run and then uses @ref write to output the
272  results.
273 
274  @param[in] patchRef: data reference for patch
275  """
276  if self.config.hasFakes:
277  exposure = patchRef.get("fakes_" + self.config.coaddName + "Coadd", immediate=True)
278  else:
279  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
280  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
281  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
282  self.write(results, patchRef)
283  return results
284 
285  def runQuantum(self, butlerQC, inputRefs, outputRefs):
286  inputs = butlerQC.get(inputRefs)
287  packedId, maxBits = butlerQC.registry.packDataId("tract_patch_abstract_filter",
288  inputRefs.exposure.dataId,
289  returnMaxBits=True)
290  inputs["idFactory"] = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
291  inputs["expId"] = packedId
292  outputs = self.run(**inputs)
293  butlerQC.put(outputs, outputRefs)
294 
295  def run(self, exposure, idFactory, expId):
296  """!
297  @brief Run detection on an exposure.
298 
299  First scale the variance plane to match the observed variance
300  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
301  detect sources.
302 
303  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
304  depending on configuration).
305  @param[in] idFactory: IdFactory to set source identifiers
306  @param[in] expId: Exposure identifier (integer) for RNG seed
307 
308  @return a pipe.base.Struct with fields
309  - sources: catalog of detections
310  - backgrounds: list of backgrounds
311  """
312  if self.config.doScaleVariance:
313  varScale = self.scaleVariance.run(exposure.maskedImage)
314  exposure.getMetadata().add("variance_scale", varScale)
315  backgrounds = afwMath.BackgroundList()
316  if self.config.doInsertFakes:
317  self.insertFakes.run(exposure, background=backgrounds)
318  table = afwTable.SourceTable.make(self.schema, idFactory)
319  detections = self.detection.makeSourceCatalog(table, exposure, expId=expId)
320  sources = detections.sources
321  fpSets = detections.fpSets
322  if hasattr(fpSets, "background") and fpSets.background:
323  for bg in fpSets.background:
324  backgrounds.append(bg)
325  return Struct(outputSources=sources, outputBackgrounds=backgrounds, outputExposure=exposure)
326 
327  def write(self, results, patchRef):
328  """!
329  @brief Write out results from runDetection.
330 
331  @param[in] exposure: Exposure to write out
332  @param[in] results: Struct returned from runDetection
333  @param[in] patchRef: data reference for patch
334  """
335  coaddName = self.config.coaddName + "Coadd"
336  patchRef.put(results.outputBackgrounds, coaddName + "_calexp_background")
337  patchRef.put(results.outputSources, coaddName + "_det")
338  if self.config.hasFakes:
339  patchRef.put(results.outputExposure, "fakes_" + coaddName + "_calexp")
340  else:
341  patchRef.put(results.outputExposure, coaddName + "_calexp")
342 
343 
344 
345 
346 class DeblendCoaddSourcesConfig(Config):
347  """DeblendCoaddSourcesConfig
348 
349  Configuration parameters for the `DeblendCoaddSourcesTask`.
350  """
351  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
352  doc="Deblend sources separately in each band")
353  multiBandDeblend = ConfigurableField(target=MultibandDeblendTask,
354  doc="Deblend sources simultaneously across bands")
355  simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
356  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
357  hasFakes = Field(dtype=bool,
358  default=False,
359  doc="Should be set to True if fake sources have been inserted into the input data.")
360 
361  def setDefaults(self):
362  Config.setDefaults(self)
363  self.singleBandDeblend.propagateAllPeaks = True
364 
365 
366 class DeblendCoaddSourcesRunner(MergeSourcesRunner):
367  """Task runner for the `MergeSourcesTask`
368 
369  Required because the run method requires a list of
370  dataRefs rather than a single dataRef.
371  """
372  @staticmethod
373  def getTargetList(parsedCmd, **kwargs):
374  """Provide a list of patch references for each patch, tract, filter combo.
375 
376  Parameters
377  ----------
378  parsedCmd:
379  The parsed command
380  kwargs:
381  Keyword arguments passed to the task
382 
383  Returns
384  -------
385  targetList: list
386  List of tuples, where each tuple is a (dataRef, kwargs) pair.
387  """
388  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
389  kwargs["psfCache"] = parsedCmd.psfCache
390  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
391 
392 
393 class DeblendCoaddSourcesTask(CmdLineTask):
394  """Deblend the sources in a merged catalog
395 
396  Deblend sources from master catalog in each coadd.
397  This can either be done separately in each band using the HSC-SDSS deblender
398  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
399  or use SCARLET to simultaneously fit the blend in all bands
400  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
401  The task will set its own `self.schema` atribute to the `Schema` of the
402  output deblended catalog.
403  This will include all fields from the input `Schema`, as well as additional fields
404  from the deblender.
405 
406  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
407  ---------------------------------------------------------
408  `
409 
410  Parameters
411  ----------
412  butler: `Butler`
413  Butler used to read the input schemas from disk or
414  construct the reference catalog loader, if `schema` or `peakSchema` or
415  schema: `Schema`
416  The schema of the merged detection catalog as an input to this task.
417  peakSchema: `Schema`
418  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
419  """
420  ConfigClass = DeblendCoaddSourcesConfig
421  RunnerClass = DeblendCoaddSourcesRunner
422  _DefaultName = "deblendCoaddSources"
423  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
424 
425  @classmethod
426  def _makeArgumentParser(cls):
427  parser = ArgumentParser(name=cls._DefaultName)
428  parser.add_id_argument("--id", "deepCoadd_calexp",
429  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
430  ContainerClass=ExistingCoaddDataIdContainer)
431  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
432  return parser
433 
434  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
435  CmdLineTask.__init__(self, **kwargs)
436  if schema is None:
437  assert butler is not None, "Neither butler nor schema is defined"
438  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
439  self.schemaMapper = afwTable.SchemaMapper(schema)
440  self.schemaMapper.addMinimalSchema(schema)
441  self.schema = self.schemaMapper.getOutputSchema()
442  if peakSchema is None:
443  assert butler is not None, "Neither butler nor peakSchema is defined"
444  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
445 
446  if self.config.simultaneous:
447  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
448  else:
449  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
450 
451  def getSchemaCatalogs(self):
452  """Return a dict of empty catalogs for each catalog dataset produced by this task.
453 
454  Returns
455  -------
456  result: dict
457  Dictionary of empty catalogs, with catalog names as keys.
458  """
459  catalog = afwTable.SourceCatalog(self.schema)
460  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
461  self.config.coaddName + "Coadd_deblendedModel": catalog}
462 
463  def runDataRef(self, patchRefList, psfCache=100):
464  """Deblend the patch
465 
466  Deblend each source simultaneously or separately
467  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
468  Set `is-primary` and related flags.
469  Propagate flags from individual visits.
470  Write the deblended sources out.
471 
472  Parameters
473  ----------
474  patchRefList: list
475  List of data references for each filter
476  """
477 
478  if self.config.hasFakes:
479  coaddType = "fakes_" + self.config.coaddName
480  else:
481  coaddType = self.config.coaddName
482 
483  if self.config.simultaneous:
484  # Use SCARLET to simultaneously deblend across filters
485  filters = []
486  exposures = []
487  for patchRef in patchRefList:
488  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
489  filters.append(patchRef.dataId["filter"])
490  exposures.append(exposure)
491  # The input sources are the same for all bands, since it is a merged catalog
492  sources = self.readSources(patchRef)
493  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
494  fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources)
495  for n in range(len(patchRefList)):
496  self.write(patchRefList[n], fluxCatalogs[filters[n]], templateCatalogs[filters[n]])
497  else:
498  # Use the singeband deblender to deblend each band separately
499  for patchRef in patchRefList:
500  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
501  exposure.getPsf().setCacheCapacity(psfCache)
502  sources = self.readSources(patchRef)
503  self.singleBandDeblend.run(exposure, sources)
504  self.write(patchRef, sources)
505 
506  def readSources(self, dataRef):
507  """Read merged catalog
508 
509  Read the catalog of merged detections and create a catalog
510  in a single band.
511 
512  Parameters
513  ----------
514  dataRef: data reference
515  Data reference for catalog of merged detections
516 
517  Returns
518  -------
519  sources: `SourceCatalog`
520  List of sources in merged catalog
521 
522  We also need to add columns to hold the measurements we're about to make
523  so we can measure in-place.
524  """
525  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
526  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
527  idFactory = self.makeIdFactory(dataRef)
528  for s in merged:
529  idFactory.notify(s.getId())
530  table = afwTable.SourceTable.make(self.schema, idFactory)
531  sources = afwTable.SourceCatalog(table)
532  sources.extend(merged, self.schemaMapper)
533  return sources
534 
535  def write(self, dataRef, flux_sources, template_sources=None):
536  """Write the source catalog(s)
537 
538  Parameters
539  ----------
540  dataRef: Data Reference
541  Reference to the output catalog.
542  flux_sources: `SourceCatalog`
543  Flux conserved sources to write to file.
544  If using the single band deblender, this is the catalog
545  generated.
546  template_sources: `SourceCatalog`
547  Source catalog using the multiband template models
548  as footprints.
549  """
550  # The multiband deblender does not have to conserve flux,
551  # so only write the flux conserved catalog if it exists
552  if flux_sources is not None:
553  assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux
554  dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux")
555  # Only the multiband deblender has the option to output the
556  # template model catalog, which can optionally be used
557  # in MeasureMergedCoaddSources
558  if template_sources is not None:
559  assert self.config.multiBandDeblend.saveTemplates
560  dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel")
561  self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
562 
563  def writeMetadata(self, dataRefList):
564  """Write the metadata produced from processing the data.
565  Parameters
566  ----------
567  dataRefList
568  List of Butler data references used to write the metadata.
569  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
570  """
571  for dataRef in dataRefList:
572  try:
573  metadataName = self._getMetadataName()
574  if metadataName is not None:
575  dataRef.put(self.getFullMetadata(), metadataName)
576  except Exception as e:
577  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
578 
579  def getExposureId(self, dataRef):
580  """Get the ExposureId from a data reference
581  """
582  return int(dataRef.get(self.config.coaddName + "CoaddId"))
583 
584 
585 class MeasureMergedCoaddSourcesConnections(PipelineTaskConnections, dimensions=("tract", "patch", "abstract_filter", "skymap"),
586  defaultTemplates={"inputCoaddName": "deep",
587  "outputCoaddName": "deep"}):
588  inputSchema = cT.InitInput(
589  doc="Input schema for measure merged task produced by a deblender or detection task",
590  name="{inputCoaddName}Coadd_deblendedFlux_schema",
591  storageClass="SourceCatalog"
592  )
593  outputSchema = cT.InitOutput(
594  doc="Output schema after all new fields are added by task",
595  name="{inputCoaddName}Coadd_meas_schema",
596  storageClass="SourceCatalog"
597  )
598  refCat = cT.PrerequisiteInput(
599  doc="Reference catalog used to match measured sources against known sources",
600  name="ref_cat",
601  storageClass="SimpleCatalog",
602  dimensions=("skypix",),
603  deferLoad=True,
604  multiple=True
605  )
606  exposure = cT.Input(
607  doc="Input coadd image",
608  name="{inputCoaddName}Coadd_calexp",
609  storageClass="ExposureF",
610  dimensions=("tract", "patch", "abstract_filter", "skymap")
611  )
612  skyMap = cT.Input(
613  doc="SkyMap to use in processing",
614  name="{inputCoaddName}Coadd_skyMap",
615  storageClass="SkyMap",
616  dimensions=("skymap",),
617  )
618  visitCatalogs = cT.Input(
619  doc="Source catalogs for visits which overlap input tract, patch, abstract_filter. Will be "
620  "further filtered in the task for the purpose of propagating flags from image calibration "
621  "and characterization to codd objects",
622  name="src",
623  dimensions=("instrument", "visit", "detector"),
624  storageClass="SourceCatalog",
625  multiple=True
626  )
627  inputCatalog = cT.Input(
628  doc=("Name of the input catalog to use."
629  "If the single band deblender was used this should be 'deblendedFlux."
630  "If the multi-band deblender was used this should be 'deblendedModel, "
631  "or deblendedFlux if the multiband deblender was configured to output "
632  "deblended flux catalogs. If no deblending was performed this should "
633  "be 'mergeDet'"),
634  name="{inputCoaddName}Coadd_deblendedFlux",
635  storageClass="SourceCatalog",
636  dimensions=("tract", "patch", "abstract_filter", "skymap"),
637  )
638  outputSources = cT.Output(
639  doc="Source catalog containing all the measurement information generated in this task",
640  name="{outputCoaddName}Coadd_meas",
641  dimensions=("tract", "patch", "abstract_filter", "skymap"),
642  storageClass="SourceCatalog",
643  )
644  matchResult = cT.Output(
645  doc="Match catalog produced by configured matcher, optional on doMatchSources",
646  name="{outputCoaddName}Coadd_measMatch",
647  dimensions=("tract", "patch", "abstract_filter", "skymap"),
648  storageClass="Catalog",
649  )
650  denormMatches = cT.Output(
651  doc="Denormalized Match catalog produced by configured matcher, optional on "
652  "doWriteMatchesDenormalized",
653  name="{outputCoaddName}Coadd_measMatchFull",
654  dimensions=("tract", "patch", "abstract_filter", "skymap"),
655  storageClass="Catalog",
656  )
657 
658  def __init__(self, *, config=None):
659  super().__init__(config=config)
660  if config.doPropagateFlags is False:
661  self.inputs -= set(("visitCatalogs",))
662 
663  if config.doMatchSources is False:
664  self.outputs -= set(("matchResult",))
665 
666  if config.doWriteMatchesDenormalized is False:
667  self.outputs -= set(("denormMatches",))
668 
669 
670 class MeasureMergedCoaddSourcesConfig(PipelineTaskConfig,
671  pipelineConnections=MeasureMergedCoaddSourcesConnections):
672  """!
673  @anchor MeasureMergedCoaddSourcesConfig_
674 
675  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
676  """
677  inputCatalog = Field(dtype=str, default="deblendedFlux",
678  doc=("Name of the input catalog to use."
679  "If the single band deblender was used this should be 'deblendedFlux."
680  "If the multi-band deblender was used this should be 'deblendedModel."
681  "If no deblending was performed this should be 'mergeDet'"))
682  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
683  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
684  doPropagateFlags = Field(
685  dtype=bool, default=True,
686  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
687  )
688  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
689  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
690  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
691  doWriteMatchesDenormalized = Field(
692  dtype=bool,
693  default=False,
694  doc=("Write reference matches in denormalized format? "
695  "This format uses more disk space, but is more convenient to read."),
696  )
697  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
698  psfCache = Field(dtype=int, default=100, doc="Size of psfCache")
699  checkUnitsParseStrict = Field(
700  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
701  dtype=str,
702  default="raise",
703  )
704  doApCorr = Field(
705  dtype=bool,
706  default=True,
707  doc="Apply aperture corrections"
708  )
709  applyApCorr = ConfigurableField(
710  target=ApplyApCorrTask,
711  doc="Subtask to apply aperture corrections"
712  )
713  doRunCatalogCalculation = Field(
714  dtype=bool,
715  default=True,
716  doc='Run catalogCalculation task'
717  )
718  catalogCalculation = ConfigurableField(
719  target=CatalogCalculationTask,
720  doc="Subtask to run catalogCalculation plugins on catalog"
721  )
722 
723  hasFakes = Field(
724  dtype=bool,
725  default=False,
726  doc="Should be set to True if fake sources have been inserted into the input data."
727  )
728 
729  @property
730  def refObjLoader(self):
731  return self.match.refObjLoader
732 
733  def setDefaults(self):
734  super().setDefaults()
735  self.measurement.plugins.names |= ['base_InputCount', 'base_Variance']
736  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
737  'INEXACT_PSF']
738  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
739  'INEXACT_PSF']
740 
741 
747 
748 
749 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
750  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
751  @staticmethod
752  def getTargetList(parsedCmd, **kwargs):
753  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
754 
755 
756 class MeasureMergedCoaddSourcesTask(PipelineTask, CmdLineTask):
757  r"""!
758  @anchor MeasureMergedCoaddSourcesTask_
759 
760  @brief Deblend sources from master catalog in each coadd seperately and measure.
761 
762  @section pipe_tasks_multiBand_Contents Contents
763 
764  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
765  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
766  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
767  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
768  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
769  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
770 
771  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
772 
773  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
774  measurement in each coadd.
775 
776  Given a master input catalog of sources (peaks and footprints) or deblender outputs
777  (including a HeavyFootprint in each band), measure each source on the
778  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
779  consistent set of child sources.
780 
781  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
782  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
783  flags are propagated to the coadd sources.
784 
785  Optionally, we can match the coadd sources to an external reference catalog.
786 
787  @par Inputs:
788  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
789  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
790  @par Outputs:
791  deepCoadd_meas{tract,patch,filter}: SourceCatalog
792  @par Data Unit:
793  tract, patch, filter
794 
795  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
796 
797  <DL>
798  <DT> @ref SingleFrameMeasurementTask_ "measurement"
799  <DD> Measure source properties of deblended sources.</DD>
800  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
801  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
802  not at the edge of the field and that have either not been deblended or are the children of deblended
803  sources</DD>
804  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
805  <DD> Propagate flags set in individual visits to the coadd.</DD>
806  <DT> @ref DirectMatchTask_ "match"
807  <DD> Match input sources to a reference catalog (optional).
808  </DD>
809  </DL>
810  These subtasks may be retargeted as required.
811 
812  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
813 
814  @copydoc \_\_init\_\_
815 
816  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
817 
818  @copydoc run
819 
820  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
821 
822  See @ref MeasureMergedCoaddSourcesConfig_
823 
824  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
825 
826  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
827  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
828  files.
829 
830  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
831  the various sub-tasks. See the documetation for individual sub-tasks for more information.
832 
833  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
834  MeasureMergedCoaddSourcesTask
835 
836  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
837  The next stage in the multi-band processing procedure will merge these measurements into a suitable
838  catalog for driving forced photometry.
839 
840  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
841  to be processed.
842  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
843  `--help` command line argument:
844  @code
845  measureCoaddSources.py --help
846  @endcode
847 
848  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
849  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
850  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
851  coadd as follows:
852  @code
853  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
854  @endcode
855  This will process the HSC-I band data. The results are written in
856  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
857 
858  It is also necessary to run
859  @code
860  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
861  @endcode
862  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
863  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
864  """
865  _DefaultName = "measureCoaddSources"
866  ConfigClass = MeasureMergedCoaddSourcesConfig
867  RunnerClass = MeasureMergedCoaddSourcesRunner
868  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
869  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
870 
871  @classmethod
872  def _makeArgumentParser(cls):
873  parser = ArgumentParser(name=cls._DefaultName)
874  parser.add_id_argument("--id", "deepCoadd_calexp",
875  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
876  ContainerClass=ExistingCoaddDataIdContainer)
877  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
878  return parser
879 
880  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, initInputs=None,
881  **kwargs):
882  """!
883  @brief Initialize the task.
884 
885  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
886  @param[in] schema: the schema of the merged detection catalog used as input to this one
887  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
888  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
889  catalog. May be None if the loader can be constructed from the butler argument or all steps
890  requiring a reference catalog are disabled.
891  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
892  catalog loader, if schema or peakSchema or refObjLoader is None
893 
894  The task will set its own self.schema attribute to the schema of the output measurement catalog.
895  This will include all fields from the input schema, as well as additional fields for all the
896  measurements.
897  """
898  super().__init__(**kwargs)
899  self.deblended = self.config.inputCatalog.startswith("deblended")
900  self.inputCatalog = "Coadd_" + self.config.inputCatalog
901  if initInputs is not None:
902  schema = initInputs['inputSchema'].schema
903  if schema is None:
904  assert butler is not None, "Neither butler nor schema is defined"
905  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
906  self.schemaMapper = afwTable.SchemaMapper(schema)
907  self.schemaMapper.addMinimalSchema(schema)
908  self.schema = self.schemaMapper.getOutputSchema()
909  self.algMetadata = PropertyList()
910  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
911  self.makeSubtask("setPrimaryFlags", schema=self.schema)
912  if self.config.doMatchSources:
913  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
914  if self.config.doPropagateFlags:
915  self.makeSubtask("propagateFlags", schema=self.schema)
916  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
917  if self.config.doApCorr:
918  self.makeSubtask("applyApCorr", schema=self.schema)
919  if self.config.doRunCatalogCalculation:
920  self.makeSubtask("catalogCalculation", schema=self.schema)
921 
922  self.outputSchema = afwTable.SourceCatalog(self.schema)
923 
924  def runQuantum(self, butlerQC, inputRefs, outputRefs):
925  inputs = butlerQC.get(inputRefs)
926 
927  refObjLoader = ReferenceObjectLoader([ref.datasetRef.dataId for ref in inputRefs.refCat],
928  inputs.pop('refCat'), config=self.config.refObjLoader,
929  log=self.log)
930  self.match.setRefObjLoader(refObjLoader)
931 
932  # Set psfcache
933  # move this to run after gen2 deprecation
934  inputs['exposure'].getPsf().setCacheCapacity(self.config.psfCache)
935 
936  # Get unique integer ID for IdFactory and RNG seeds
937  packedId, maxBits = butlerQC.registry.packDataId("tract_patch", outputRefs.outputSources.dataId,
938  returnMaxBits=True)
939  inputs['exposureId'] = packedId
940  idFactory = afwTable.IdFactory.makeSource(packedId, 64 - maxBits)
941  # Transform inputCatalog
942  table = afwTable.SourceTable.make(self.schema, idFactory)
943  sources = afwTable.SourceCatalog(table)
944  sources.extend(inputs.pop('inputCatalog'), self.schemaMapper)
945  table = sources.getTable()
946  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
947  inputs['sources'] = sources
948 
949  skyMap = inputs.pop('skyMap')
950  tractNumber = inputRefs.inputCatalog.dataId['tract']
951  tractInfo = skyMap[tractNumber]
952  patchInfo = tractInfo.getPatchInfo(inputRefs.inputCatalog.dataId['patch'])
953  skyInfo = Struct(
954  skyMap=skyMap,
955  tractInfo=tractInfo,
956  patchInfo=patchInfo,
957  wcs=tractInfo.getWcs(),
958  bbox=patchInfo.getOuterBBox()
959  )
960  inputs['skyInfo'] = skyInfo
961 
962  if self.config.doPropagateFlags:
963  # Filter out any visit catalog that is not coadd inputs
964  ccdInputs = inputs['exposure'].getInfo().getCoaddInputs().ccds
965  visitKey = ccdInputs.schema.find("visit").key
966  ccdKey = ccdInputs.schema.find("ccd").key
967  inputVisitIds = set()
968  ccdRecordsWcs = {}
969  for ccdRecord in ccdInputs:
970  visit = ccdRecord.get(visitKey)
971  ccd = ccdRecord.get(ccdKey)
972  inputVisitIds.add((visit, ccd))
973  ccdRecordsWcs[(visit, ccd)] = ccdRecord.getWcs()
974 
975  inputCatalogsToKeep = []
976  inputCatalogWcsUpdate = []
977  for i, dataRef in enumerate(inputRefs.visitCatalogs):
978  key = (dataRef.dataId['visit'], dataRef.dataId['detector'])
979  if key in inputVisitIds:
980  inputCatalogsToKeep.append(inputs['visitCatalogs'][i])
981  inputCatalogWcsUpdate.append(ccdRecordsWcs[key])
982  inputs['visitCatalogs'] = inputCatalogsToKeep
983  inputs['wcsUpdates'] = inputCatalogWcsUpdate
984  inputs['ccdInputs'] = ccdInputs
985 
986  outputs = self.run(**inputs)
987  butlerQC.put(outputs, outputRefs)
988 
989  def runDataRef(self, patchRef, psfCache=100):
990  """!
991  @brief Deblend and measure.
992 
993  @param[in] patchRef: Patch reference.
994 
995  Set 'is-primary' and related flags. Propagate flags
996  from individual visits. Optionally match the sources to a reference catalog and write the matches.
997  Finally, write the deblended sources and measurements out.
998  """
999  if self.config.hasFakes:
1000  coaddType = "fakes_" + self.config.coaddName
1001  else:
1002  coaddType = self.config.coaddName
1003  exposure = patchRef.get(coaddType + "Coadd_calexp", immediate=True)
1004  exposure.getPsf().setCacheCapacity(psfCache)
1005  sources = self.readSources(patchRef)
1006  table = sources.getTable()
1007  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1008  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1009 
1010  if self.config.doPropagateFlags:
1011  ccdInputs = self.propagateFlags.getCcdInputs(exposure)
1012  else:
1013  ccdInputs = None
1014 
1015  results = self.run(exposure=exposure, sources=sources,
1016  ccdInputs=ccdInputs,
1017  skyInfo=skyInfo, butler=patchRef.getButler(),
1018  exposureId=self.getExposureId(patchRef))
1019 
1020  if self.config.doMatchSources:
1021  self.writeMatches(patchRef, results)
1022  self.write(patchRef, results.outputSources)
1023 
1024  def run(self, exposure, sources, skyInfo, exposureId, ccdInputs=None, visitCatalogs=None, wcsUpdates=None,
1025  butler=None):
1026  """Run measurement algorithms on the input exposure, and optionally populate the
1027  resulting catalog with extra information.
1028 
1029  Parameters
1030  ----------
1031  exposure : `lsst.afw.exposure.Exposure`
1032  The input exposure on which measurements are to be performed
1033  sources : `lsst.afw.table.SourceCatalog`
1034  A catalog built from the results of merged detections, or
1035  deblender outputs.
1036  skyInfo : `lsst.pipe.base.Struct`
1037  A struct containing information about the position of the input exposure within
1038  a `SkyMap`, the `SkyMap`, its `Wcs`, and its bounding box
1039  exposureId : `int` or `bytes`
1040  packed unique number or bytes unique to the input exposure
1041  ccdInputs : `lsst.afw.table.ExposureCatalog`
1042  Catalog containing information on the individual visits which went into making
1043  the exposure
1044  visitCatalogs : list of `lsst.afw.table.SourceCatalogs` or `None`
1045  A list of source catalogs corresponding to measurements made on the individual
1046  visits which went into the input exposure. If None and butler is `None` then
1047  the task cannot propagate visit flags to the output catalog.
1048  wcsUpdates : list of `lsst.afw.geom.SkyWcs` or `None`
1049  If visitCatalogs is not `None` this should be a list of wcs objects which correspond
1050  to the input visits. Used to put all coordinates to common system. If `None` and
1051  butler is `None` then the task cannot propagate visit flags to the output catalog.
1052  butler : `lsst.daf.butler.Butler` or `lsst.daf.persistence.Butler`
1053  Either a gen2 or gen3 butler used to load visit catalogs
1054 
1055  Returns
1056  -------
1057  results : `lsst.pipe.base.Struct`
1058  Results of running measurement task. Will contain the catalog in the
1059  sources attribute. Optionally will have results of matching to a
1060  reference catalog in the matchResults attribute, and denormalized
1061  matches in the denormMatches attribute.
1062  """
1063  self.measurement.run(sources, exposure, exposureId=exposureId)
1064 
1065  if self.config.doApCorr:
1066  self.applyApCorr.run(
1067  catalog=sources,
1068  apCorrMap=exposure.getInfo().getApCorrMap()
1069  )
1070 
1071  # TODO DM-11568: this contiguous check-and-copy could go away if we
1072  # reserve enough space during SourceDetection and/or SourceDeblend.
1073  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1074  # contiguity now, so views are preserved from here on.
1075  if not sources.isContiguous():
1076  sources = sources.copy(deep=True)
1077 
1078  if self.config.doRunCatalogCalculation:
1079  self.catalogCalculation.run(sources)
1080 
1081  self.setPrimaryFlags.run(sources, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo,
1082  includeDeblend=self.deblended)
1083  if self.config.doPropagateFlags:
1084  self.propagateFlags.run(butler, sources, ccdInputs, exposure.getWcs(), visitCatalogs, wcsUpdates)
1085 
1086  results = Struct()
1087 
1088  if self.config.doMatchSources:
1089  matchResult = self.match.run(sources, exposure.getInfo().getFilter().getName())
1090  matches = afwTable.packMatches(matchResult.matches)
1091  matches.table.setMetadata(matchResult.matchMeta)
1092  results.matchResult = matches
1093  if self.config.doWriteMatchesDenormalized:
1094  if matchResult.matches:
1095  denormMatches = denormalizeMatches(matchResult.matches, matchResult.matchMeta)
1096  else:
1097  self.log.warn("No matches, so generating dummy denormalized matches file")
1098  denormMatches = afwTable.BaseCatalog(afwTable.Schema())
1099  denormMatches.setMetadata(PropertyList())
1100  denormMatches.getMetadata().add("COMMENT",
1101  "This catalog is empty because no matches were found.")
1102  results.denormMatches = denormMatches
1103  results.denormMatches = denormMatches
1104 
1105  results.outputSources = sources
1106  return results
1107 
1108  def readSources(self, dataRef):
1109  """!
1110  @brief Read input sources.
1111 
1112  @param[in] dataRef: Data reference for catalog of merged detections
1113  @return List of sources in merged catalog
1114 
1115  We also need to add columns to hold the measurements we're about to make
1116  so we can measure in-place.
1117  """
1118  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1119  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1120  idFactory = self.makeIdFactory(dataRef)
1121  for s in merged:
1122  idFactory.notify(s.getId())
1123  table = afwTable.SourceTable.make(self.schema, idFactory)
1124  sources = afwTable.SourceCatalog(table)
1125  sources.extend(merged, self.schemaMapper)
1126  return sources
1127 
1128  def writeMatches(self, dataRef, results):
1129  """!
1130  @brief Write matches of the sources to the astrometric reference catalog.
1131 
1132  @param[in] dataRef: data reference
1133  @param[in] results: results struct from run method
1134  """
1135  if hasattr(results, "matchResult"):
1136  dataRef.put(results.matchResult, self.config.coaddName + "Coadd_measMatch")
1137  if hasattr(results, "denormMatches"):
1138  dataRef.put(results.denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1139 
1140  def write(self, dataRef, sources):
1141  """!
1142  @brief Write the source catalog.
1143 
1144  @param[in] dataRef: data reference
1145  @param[in] sources: source catalog
1146  """
1147  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1148  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1149 
1150  def getExposureId(self, dataRef):
1151  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1152 
def write(self, patchRef, catalog)
Write the output.
def run(self, skyInfo, tempExpRefList, imageScalerList, weightList, altMaskList=None, mask=None, supplementaryData=None)
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
Definition: coaddBase.py:231
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.