lsst.pipe.tasks  16.0-10-g011c1fb2
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 import numpy
24 
25 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
26 from lsst.pipe.base import CmdLineTask, Struct, TaskRunner, ArgumentParser, ButlerInitializedTaskRunner
27 from lsst.pex.config import Config, Field, ListField, ConfigurableField, RangeField, ConfigField
28 from lsst.meas.algorithms import DynamicDetectionTask, SkyObjectsTask
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask, MultibandDeblendTask
31 from lsst.pipe.tasks.coaddBase import getSkyInfo
32 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
33 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
37 import lsst.afw.image as afwImage
38 import lsst.afw.table as afwTable
39 import lsst.afw.math as afwMath
40 import lsst.afw.detection as afwDetect
41 from lsst.daf.base import PropertyList
42 
43 """
44 New dataset types:
45 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
46 * deepCoadd_mergeDet: merged detections (tract, patch)
47 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
48 * deepCoadd_ref: reference sources (tract, patch)
49 All of these have associated *_schema catalogs that require no data ID and hold no records.
50 
51 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
52 the mergeDet, meas, and ref dataset Footprints:
53 * deepCoadd_peak_schema
54 """
55 
56 
57 def _makeGetSchemaCatalogs(datasetSuffix):
58  """Construct a getSchemaCatalogs instance method
59 
60  These are identical for most of the classes here, so we'll consolidate
61  the code.
62 
63  datasetSuffix: Suffix of dataset name, e.g., "src" for "deepCoadd_src"
64  """
65 
66  def getSchemaCatalogs(self):
67  """Return a dict of empty catalogs for each catalog dataset produced by this task."""
68  src = afwTable.SourceCatalog(self.schema)
69  if hasattr(self, "algMetadata"):
70  src.getTable().setMetadata(self.algMetadata)
71  return {self.config.coaddName + "Coadd_" + datasetSuffix: src}
72  return getSchemaCatalogs
73 
74 
75 def _makeMakeIdFactory(datasetName):
76  """Construct a makeIdFactory instance method
77 
78  These are identical for all the classes here, so this consolidates
79  the code.
80 
81  datasetName: Dataset name without the coadd name prefix, e.g., "CoaddId" for "deepCoaddId"
82  """
83 
84  def makeIdFactory(self, dataRef):
85  """Return an IdFactory for setting the detection identifiers
86 
87  The actual parameters used in the IdFactory are provided by
88  the butler (through the provided data reference.
89  """
90  expBits = dataRef.get(self.config.coaddName + datasetName + "_bits")
91  expId = int(dataRef.get(self.config.coaddName + datasetName))
92  return afwTable.IdFactory.makeSource(expId, 64 - expBits)
93  return makeIdFactory
94 
95 
97  """Given a longer, camera-specific filter name (e.g. "HSC-I") return its shorthand name ("i").
98  """
99  # I'm not sure if this is the way this is supposed to be implemented, but it seems to work,
100  # and its the only way I could get it to work.
101  return afwImage.Filter(name).getFilterProperty().getName()
102 
103 
104 
105 
107  """!
108  @anchor DetectCoaddSourcesConfig_
109 
110  @brief Configuration parameters for the DetectCoaddSourcesTask
111  """
112  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
113  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
114  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
115  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
116  doInsertFakes = Field(dtype=bool, default=False,
117  doc="Run fake sources injection task")
118  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
119  doc="Injection of fake sources for testing "
120  "purposes (must be retargeted)")
121 
122  def setDefaults(self):
123  Config.setDefaults(self)
124  self.detection.thresholdType = "pixel_stdev"
125  self.detection.isotropicGrow = True
126  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
127  self.detection.reEstimateBackground = False
128  self.detection.background.useApprox = False
129  self.detection.background.binSize = 4096
130  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
131  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
132 
133 
139 
140 
141 class DetectCoaddSourcesTask(CmdLineTask):
142  """!
143  @anchor DetectCoaddSourcesTask_
144 
145  @brief Detect sources on a coadd
146 
147  @section pipe_tasks_multiBand_Contents Contents
148 
149  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
150  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
151  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
153  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
154  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
155 
156  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
157 
158  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
159 
160  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
161  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
162  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
163  propagate the full covariance matrix -- but it is simple and works well in practice.
164 
165  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
166  SourceDetectionTask_ "detection" subtask.
167 
168  @par Inputs:
169  deepCoadd{tract,patch,filter}: ExposureF
170  @par Outputs:
171  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
172  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
173  exposure (ExposureF)
174  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
175  @par Data Unit:
176  tract, patch, filter
177 
178  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
179  You can retarget this subtask if you wish.
180 
181  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
182 
183  @copydoc \_\_init\_\_
184 
185  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
186 
187  @copydoc run
188 
189  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
190 
191  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
192 
193  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
194 
195  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
196  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
197  files.
198 
199  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
200  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
201  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
202 
203  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
204  of using DetectCoaddSourcesTask
205 
206  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
207  the task is to update the background, detect all sources in a single band and generate a set of parent
208  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
209  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
210  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
211  calling detectCoaddSources.py with the `--help` command line argument:
212  @code
213  detectCoaddSources.py --help
214  @endcode
215 
216  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
217  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
218  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
219  @code
220  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
221  @endcode
222  that will process the HSC-I band data. The results are written to
223  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
224 
225  It is also necessary to run:
226  @code
227  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
228  @endcode
229  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
230  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
231  """
232  _DefaultName = "detectCoaddSources"
233  ConfigClass = DetectCoaddSourcesConfig
234  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
235  makeIdFactory = _makeMakeIdFactory("CoaddId")
236 
237  @classmethod
238  def _makeArgumentParser(cls):
239  parser = ArgumentParser(name=cls._DefaultName)
240  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
241  ContainerClass=ExistingCoaddDataIdContainer)
242  return parser
243 
244  def __init__(self, schema=None, **kwargs):
245  """!
246  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
247 
248  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
249 
250  @param[in] schema: initial schema for the output catalog, modified-in place to include all
251  fields set by this task. If None, the source minimal schema will be used.
252  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
253  """
254  CmdLineTask.__init__(self, **kwargs)
255  if schema is None:
256  schema = afwTable.SourceTable.makeMinimalSchema()
257  if self.config.doInsertFakes:
258  self.makeSubtask("insertFakes")
259  self.schema = schema
260  self.makeSubtask("detection", schema=self.schema)
261  if self.config.doScaleVariance:
262  self.makeSubtask("scaleVariance")
263 
264  def runDataRef(self, patchRef):
265  """!
266  @brief Run detection on a coadd.
267 
268  Invokes @ref run and then uses @ref write to output the
269  results.
270 
271  @param[in] patchRef: data reference for patch
272  """
273  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
274  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
275  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
276  self.write(exposure, results, patchRef)
277  return results
278 
279  def run(self, exposure, idFactory, expId):
280  """!
281  @brief Run detection on an exposure.
282 
283  First scale the variance plane to match the observed variance
284  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
285  detect sources.
286 
287  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
288  depending on configuration).
289  @param[in] idFactory: IdFactory to set source identifiers
290  @param[in] expId: Exposure identifier (integer) for RNG seed
291 
292  @return a pipe.base.Struct with fields
293  - sources: catalog of detections
294  - backgrounds: list of backgrounds
295  """
296  if self.config.doScaleVariance:
297  varScale = self.scaleVariance.run(exposure.maskedImage)
298  exposure.getMetadata().add("variance_scale", varScale)
299  backgrounds = afwMath.BackgroundList()
300  if self.config.doInsertFakes:
301  self.insertFakes.run(exposure, background=backgrounds)
302  table = afwTable.SourceTable.make(self.schema, idFactory)
303  detections = self.detection.makeSourceCatalog(table, exposure, expId=expId)
304  sources = detections.sources
305  fpSets = detections.fpSets
306  if hasattr(fpSets, "background") and fpSets.background:
307  for bg in fpSets.background:
308  backgrounds.append(bg)
309  return Struct(sources=sources, backgrounds=backgrounds)
310 
311  def write(self, exposure, results, patchRef):
312  """!
313  @brief Write out results from runDetection.
314 
315  @param[in] exposure: Exposure to write out
316  @param[in] results: Struct returned from runDetection
317  @param[in] patchRef: data reference for patch
318  """
319  coaddName = self.config.coaddName + "Coadd"
320  patchRef.put(results.backgrounds, coaddName + "_calexp_background")
321  patchRef.put(results.sources, coaddName + "_det")
322  patchRef.put(exposure, coaddName + "_calexp")
323 
324 
325 
326 
327 class MergeSourcesRunner(TaskRunner):
328  """Task runner for the `MergeSourcesTask`
329 
330  Required because the run method requires a list of
331  dataRefs rather than a single dataRef.
332  """
333  def makeTask(self, parsedCmd=None, args=None):
334  """Provide a butler to the Task constructor.
335 
336  Parameters
337  ----------
338  parsedCmd:
339  The parsed command
340  args: tuple
341  Tuple of a list of data references and kwargs (un-used)
342 
343  Raises
344  ------
345  RuntimeError
346  Thrown if both `parsedCmd` & `args` are `None`
347  """
348  if parsedCmd is not None:
349  butler = parsedCmd.butler
350  elif args is not None:
351  dataRefList, kwargs = args
352  butler = dataRefList[0].getButler()
353  else:
354  raise RuntimeError("Neither parsedCmd or args specified")
355  return self.TaskClass(config=self.config, log=self.log, butler=butler)
356 
357  @staticmethod
358  def buildRefDict(parsedCmd):
359  """Build a hierarchical dictionary of patch references
360 
361  Parameters
362  ----------
363  parsedCmd:
364  The parsed command
365 
366  Returns
367  -------
368  refDict: dict
369  A reference dictionary of the form {patch: {tract: {filter: dataRef}}}
370 
371  Raises
372  ------
373  RuntimeError
374  Thrown when multiple references are provided for the same
375  combination of tract, patch and filter
376  """
377  refDict = {} # Will index this as refDict[tract][patch][filter] = ref
378  for ref in parsedCmd.id.refList:
379  tract = ref.dataId["tract"]
380  patch = ref.dataId["patch"]
381  filter = ref.dataId["filter"]
382  if tract not in refDict:
383  refDict[tract] = {}
384  if patch not in refDict[tract]:
385  refDict[tract][patch] = {}
386  if filter in refDict[tract][patch]:
387  raise RuntimeError("Multiple versions of %s" % (ref.dataId,))
388  refDict[tract][patch][filter] = ref
389  return refDict
390 
391  @staticmethod
392  def getTargetList(parsedCmd, **kwargs):
393  """Provide a list of patch references for each patch, tract, filter combo.
394 
395  Parameters
396  ----------
397  parsedCmd:
398  The parsed command
399  kwargs:
400  Keyword arguments passed to the task
401 
402  Returns
403  -------
404  targetList: list
405  List of tuples, where each tuple is a (dataRef, kwargs) pair.
406  """
407  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
408  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
409 
410 
411 class MergeSourcesConfig(Config):
412  """!
413  @anchor MergeSourcesConfig_
414 
415  @brief Configuration for merging sources.
416  """
417  priorityList = ListField(dtype=str, default=[],
418  doc="Priority-ordered list of bands for the merge.")
419  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
420 
421  def validate(self):
422  Config.validate(self)
423  if len(self.priorityList) == 0:
424  raise RuntimeError("No priority list provided")
425 
426 
427 class MergeSourcesTask(CmdLineTask):
428  """!
429  @anchor MergeSourcesTask_
430 
431  @brief A base class for merging source catalogs.
432 
433  Merging detections (MergeDetectionsTask) and merging measurements (MergeMeasurementsTask) are
434  so similar that it makes sense to re-use the code, in the form of this abstract base class.
435 
436  NB: Do not use this class directly. Instead use one of the child classes that inherit from
437  MergeSourcesTask such as @ref MergeDetectionsTask_ "MergeDetectionsTask" or @ref MergeMeasurementsTask_
438  "MergeMeasurementsTask"
439 
440  Sub-classes should set the following class variables:
441  * `_DefaultName`: name of Task
442  * `inputDataset`: name of dataset to read
443  * `outputDataset`: name of dataset to write
444  * `getSchemaCatalogs` to the result of `_makeGetSchemaCatalogs(outputDataset)`
445 
446  In addition, sub-classes must implement the run method.
447  """
448  _DefaultName = None
449  ConfigClass = MergeSourcesConfig
450  RunnerClass = MergeSourcesRunner
451  inputDataset = None
452  outputDataset = None
453  getSchemaCatalogs = None
454 
455  @classmethod
456  def _makeArgumentParser(cls):
457  """!
458  @brief Create a suitable ArgumentParser.
459 
460  We will use the ArgumentParser to get a provide a list of data
461  references for patches; the RunnerClass will sort them into lists
462  of data references for the same patch
463  """
464  parser = ArgumentParser(name=cls._DefaultName)
465  parser.add_id_argument("--id", "deepCoadd_" + cls.inputDataset,
466  ContainerClass=ExistingCoaddDataIdContainer,
467  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i")
468  return parser
469 
470  def getInputSchema(self, butler=None, schema=None):
471  """!
472  @brief Obtain the input schema either directly or froma butler reference.
473 
474  @param[in] butler butler reference to obtain the input schema from
475  @param[in] schema the input schema
476  """
477  if schema is None:
478  assert butler is not None, "Neither butler nor schema specified"
479  schema = butler.get(self.config.coaddName + "Coadd_" + self.inputDataset + "_schema",
480  immediate=True).schema
481  return schema
482 
483  def __init__(self, butler=None, schema=None, **kwargs):
484  """!
485  @brief Initialize the task.
486 
487  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
488  @param[in] schema the schema of the detection catalogs used as input to this one
489  @param[in] butler a butler used to read the input schema from disk, if schema is None
490 
491  Derived classes should use the getInputSchema() method to handle the additional
492  arguments and retreive the actual input schema.
493  """
494  CmdLineTask.__init__(self, **kwargs)
495 
496  def runDataRef(self, patchRefList):
497  """!
498  @brief Merge coadd sources from multiple bands. Calls @ref `run` which must be defined in
499  subclasses that inherit from MergeSourcesTask.
500 
501  @param[in] patchRefList list of data references for each filter
502  """
503  catalogs = dict(self.readCatalog(patchRef) for patchRef in patchRefList)
504  mergedCatalog = self.run(catalogs, patchRefList[0])
505  self.write(patchRefList[0], mergedCatalog)
506 
507  def readCatalog(self, patchRef):
508  """!
509  @brief Read input catalog.
510 
511  We read the input dataset provided by the 'inputDataset'
512  class variable.
513 
514  @param[in] patchRef data reference for patch
515  @return tuple consisting of the filter name and the catalog
516  """
517  filterName = patchRef.dataId["filter"]
518  catalog = patchRef.get(self.config.coaddName + "Coadd_" + self.inputDataset, immediate=True)
519  self.log.info("Read %d sources for filter %s: %s" % (len(catalog), filterName, patchRef.dataId))
520  return filterName, catalog
521 
522  def run(self, catalogs, patchRef):
523  """!
524  @brief Merge multiple catalogs. This function must be defined in all subclasses that inherit from
525  MergeSourcesTask.
526 
527  @param[in] catalogs dict mapping filter name to source catalog
528 
529  @return merged catalog
530  """
531  raise NotImplementedError()
532 
533  def write(self, patchRef, catalog):
534  """!
535  @brief Write the output.
536 
537  @param[in] patchRef data reference for patch
538  @param[in] catalog catalog
539 
540  We write as the dataset provided by the 'outputDataset'
541  class variable.
542  """
543  patchRef.put(catalog, self.config.coaddName + "Coadd_" + self.outputDataset)
544  # since the filter isn't actually part of the data ID for the dataset we're saving,
545  # it's confusing to see it in the log message, even if the butler simply ignores it.
546  mergeDataId = patchRef.dataId.copy()
547  del mergeDataId["filter"]
548  self.log.info("Wrote merged catalog: %s" % (mergeDataId,))
549 
550  def writeMetadata(self, dataRefList):
551  """!
552  @brief No metadata to write, and not sure how to write it for a list of dataRefs.
553  """
554  pass
555 
556 
557 class CullPeaksConfig(Config):
558  """!
559  @anchor CullPeaksConfig_
560 
561  @brief Configuration for culling garbage peaks after merging footprints.
562 
563  Peaks may also be culled after detection or during deblending; this configuration object
564  only deals with culling after merging Footprints.
565 
566  These cuts are based on three quantities:
567  - nBands: the number of bands in which the peak was detected
568  - peakRank: the position of the peak within its family, sorted from brightest to faintest.
569  - peakRankNormalized: the peak rank divided by the total number of peaks in the family.
570 
571  The formula that identifie peaks to cull is:
572 
573  nBands < nBandsSufficient
574  AND (rank >= rankSufficient)
575  AND (rank >= rankConsider OR rank >= rankNormalizedConsider)
576 
577  To disable peak culling, simply set nBandsSufficient=1.
578  """
579 
580  nBandsSufficient = RangeField(dtype=int, default=2, min=1,
581  doc="Always keep peaks detected in this many bands")
582  rankSufficient = RangeField(dtype=int, default=20, min=1,
583  doc="Always keep this many peaks in each family")
584  rankConsidered = RangeField(dtype=int, default=30, min=1,
585  doc=("Keep peaks with less than this rank that also match the "
586  "rankNormalizedConsidered condition."))
587  rankNormalizedConsidered = RangeField(dtype=float, default=0.7, min=0.0,
588  doc=("Keep peaks with less than this normalized rank that"
589  " also match the rankConsidered condition."))
590 
591 
593  """!
594  @anchor MergeDetectionsConfig_
595 
596  @brief Configuration parameters for the MergeDetectionsTask.
597  """
598  minNewPeak = Field(dtype=float, default=1,
599  doc="Minimum distance from closest peak to create a new one (in arcsec).")
600 
601  maxSamePeak = Field(dtype=float, default=0.3,
602  doc="When adding new catalogs to the merge, all peaks less than this distance "
603  " (in arcsec) to an existing peak will be flagged as detected in that catalog.")
604  cullPeaks = ConfigField(dtype=CullPeaksConfig, doc="Configuration for how to cull peaks.")
605 
606  skyFilterName = Field(dtype=str, default="sky",
607  doc="Name of `filter' used to label sky objects (e.g. flag merge_peak_sky is set)\n"
608  "(N.b. should be in MergeMeasurementsConfig.pseudoFilterList)")
609  skyObjects = ConfigurableField(target=SkyObjectsTask, doc="Generate sky objects")
610 
611  def setDefaults(self):
612  MergeSourcesConfig.setDefaults(self)
613  self.skyObjects.avoidMask = ["DETECTED"] # Nothing else is available in our custom mask
614 
615 
616 
622 
623 
625  """!
626  @anchor MergeDetectionsTask_
627 
628  @brief Merge coadd detections from multiple bands.
629 
630  @section pipe_tasks_multiBand_Contents Contents
631 
632  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Purpose
633  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Init
634  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Run
635  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Config
636  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Debug
637  - @ref pipe_tasks_multiband_MergeDetectionsTask_Example
638 
639  @section pipe_tasks_multiBand_MergeDetectionsTask_Purpose Description
640 
641  Command-line task that merges sources detected in coadds of exposures obtained with different filters.
642 
643  To perform photometry consistently across coadds in multiple filter bands, we create a master catalog of
644  sources from all bands by merging the sources (peaks & footprints) detected in each coadd, while keeping
645  track of which band each source originates in.
646 
647  The catalog merge is performed by @ref getMergedSourceCatalog. Spurious peaks detected around bright
648  objects are culled as described in @ref CullPeaksConfig_.
649 
650  @par Inputs:
651  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
652  @par Outputs:
653  deepCoadd_mergeDet{tract,patch}: SourceCatalog (only parent Footprints)
654  @par Data Unit:
655  tract, patch
656 
657  MergeDetectionsTask subclasses @ref MergeSourcesTask_ "MergeSourcesTask".
658 
659  @section pipe_tasks_multiBand_MergeDetectionsTask_Init Task initialisation
660 
661  @copydoc \_\_init\_\_
662 
663  @section pipe_tasks_multiBand_MergeDetectionsTask_Run Invoking the Task
664 
665  @copydoc run
666 
667  @section pipe_tasks_multiBand_MergeDetectionsTask_Config Configuration parameters
668 
669  See @ref MergeDetectionsConfig_
670 
671  @section pipe_tasks_multiBand_MergeDetectionsTask_Debug Debug variables
672 
673  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a flag @c -d
674  to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py files.
675 
676  MergeDetectionsTask has no debug variables.
677 
678  @section pipe_tasks_multiband_MergeDetectionsTask_Example A complete example of using MergeDetectionsTask
679 
680  MergeDetectionsTask is meant to be run after detecting sources in coadds generated for the chosen subset
681  of the available bands.
682  The purpose of the task is to merge sources (peaks & footprints) detected in the coadds generated from the
683  chosen subset of filters.
684  Subsequent tasks in the multi-band processing procedure will deblend the generated master list of sources
685  and, eventually, perform forced photometry.
686  Command-line usage of MergeDetectionsTask expects data references for all the coadds to be processed.
687  A list of the available optional arguments can be obtained by calling mergeCoaddDetections.py with the
688  `--help` command line argument:
689  @code
690  mergeCoaddDetections.py --help
691  @endcode
692 
693  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
694  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
695  step 5 at @ref pipeTasks_multiBand, one may merge the catalogs of sources from each coadd as follows:
696  @code
697  mergeCoaddDetections.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I^HSC-R
698  @endcode
699  This will merge the HSC-I & -R band parent source catalogs and write the results to
700  `$CI_HSC_DIR/DATA/deepCoadd-results/merged/0/5,4/mergeDet-0-5,4.fits`.
701 
702  The next step in the multi-band processing procedure is
703  @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask"
704  """
705  ConfigClass = MergeDetectionsConfig
706  _DefaultName = "mergeCoaddDetections"
707  inputDataset = "det"
708  outputDataset = "mergeDet"
709  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
710 
711  def __init__(self, butler=None, schema=None, **kwargs):
712  """!
713  @brief Initialize the merge detections task.
714 
715  A @ref FootprintMergeList_ "FootprintMergeList" will be used to
716  merge the source catalogs.
717 
718  Additional keyword arguments (forwarded to MergeSourcesTask.__init__):
719  @param[in] schema the schema of the detection catalogs used as input to this one
720  @param[in] butler a butler used to read the input schema from disk, if schema is None
721  @param[in] **kwargs keyword arguments to be passed to MergeSourcesTask.__init__
722 
723  The task will set its own self.schema attribute to the schema of the output merged catalog.
724  """
725  MergeSourcesTask.__init__(self, butler=butler, schema=schema, **kwargs)
726  self.makeSubtask("skyObjects")
727  self.schema = self.getInputSchema(butler=butler, schema=schema)
728 
729  filterNames = [getShortFilterName(name) for name in self.config.priorityList]
730  filterNames += [self.config.skyFilterName]
731  self.merged = afwDetect.FootprintMergeList(self.schema, filterNames)
732 
733  def run(self, catalogs, patchRef):
734  """!
735  @brief Merge multiple catalogs.
736 
737  After ordering the catalogs and filters in priority order,
738  @ref getMergedSourceCatalog of the @ref FootprintMergeList_ "FootprintMergeList" created by
739  @ref \_\_init\_\_ is used to perform the actual merging. Finally, @ref cullPeaks is used to remove
740  garbage peaks detected around bright objects.
741 
742  @param[in] catalogs
743  @param[in] patchRef
744  @param[out] mergedList
745  """
746 
747  # Convert distance to tract coordinate
748  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
749  tractWcs = skyInfo.wcs
750  peakDistance = self.config.minNewPeak / tractWcs.getPixelScale().asArcseconds()
751  samePeakDistance = self.config.maxSamePeak / tractWcs.getPixelScale().asArcseconds()
752 
753  # Put catalogs, filters in priority order
754  orderedCatalogs = [catalogs[band] for band in self.config.priorityList if band in catalogs.keys()]
755  orderedBands = [getShortFilterName(band) for band in self.config.priorityList
756  if band in catalogs.keys()]
757 
758  mergedList = self.merged.getMergedSourceCatalog(orderedCatalogs, orderedBands, peakDistance,
759  self.schema, self.makeIdFactory(patchRef),
760  samePeakDistance)
761 
762  #
763  # Add extra sources that correspond to blank sky
764  #
765  skySeed = patchRef.get(self.config.coaddName + "MergedCoaddId")
766  skySourceFootprints = self.getSkySourceFootprints(mergedList, skyInfo, skySeed)
767  if skySourceFootprints:
768  key = mergedList.schema.find("merge_footprint_%s" % self.config.skyFilterName).key
769  for foot in skySourceFootprints:
770  s = mergedList.addNew()
771  s.setFootprint(foot)
772  s.set(key, True)
773 
774  # Sort Peaks from brightest to faintest
775  for record in mergedList:
776  record.getFootprint().sortPeaks()
777  self.log.info("Merged to %d sources" % len(mergedList))
778  # Attempt to remove garbage peaks
779  self.cullPeaks(mergedList)
780  return mergedList
781 
782  def cullPeaks(self, catalog):
783  """!
784  @brief Attempt to remove garbage peaks (mostly on the outskirts of large blends).
785 
786  @param[in] catalog Source catalog
787  """
788  keys = [item.key for item in self.merged.getPeakSchema().extract("merge_peak_*").values()]
789  assert len(keys) > 0, "Error finding flags that associate peaks with their detection bands."
790  totalPeaks = 0
791  culledPeaks = 0
792  for parentSource in catalog:
793  # Make a list copy so we can clear the attached PeakCatalog and append the ones we're keeping
794  # to it (which is easier than deleting as we iterate).
795  keptPeaks = parentSource.getFootprint().getPeaks()
796  oldPeaks = list(keptPeaks)
797  keptPeaks.clear()
798  familySize = len(oldPeaks)
799  totalPeaks += familySize
800  for rank, peak in enumerate(oldPeaks):
801  if ((rank < self.config.cullPeaks.rankSufficient) or
802  (sum([peak.get(k) for k in keys]) >= self.config.cullPeaks.nBandsSufficient) or
803  (rank < self.config.cullPeaks.rankConsidered and
804  rank < self.config.cullPeaks.rankNormalizedConsidered * familySize)):
805  keptPeaks.append(peak)
806  else:
807  culledPeaks += 1
808  self.log.info("Culled %d of %d peaks" % (culledPeaks, totalPeaks))
809 
810  def getSchemaCatalogs(self):
811  """!
812  Return a dict of empty catalogs for each catalog dataset produced by this task.
813 
814  @param[out] dictionary of empty catalogs
815  """
816  mergeDet = afwTable.SourceCatalog(self.schema)
817  peak = afwDetect.PeakCatalog(self.merged.getPeakSchema())
818  return {self.config.coaddName + "Coadd_mergeDet": mergeDet,
819  self.config.coaddName + "Coadd_peak": peak}
820 
821  def getSkySourceFootprints(self, mergedList, skyInfo, seed):
822  """!
823  @brief Return a list of Footprints of sky objects which don't overlap with anything in mergedList
824 
825  @param mergedList The merged Footprints from all the input bands
826  @param skyInfo A description of the patch
827  @param seed Seed for the random number generator
828  """
829  mask = afwImage.Mask(skyInfo.patchInfo.getOuterBBox())
830  detected = mask.getPlaneBitMask("DETECTED")
831  for s in mergedList:
832  s.getFootprint().spans.setMask(mask, detected)
833 
834  footprints = self.skyObjects.run(mask, seed)
835  if not footprints:
836  return footprints
837 
838  # Need to convert the peak catalog's schema so we can set the "merge_peak_<skyFilterName>" flags
839  schema = self.merged.getPeakSchema()
840  mergeKey = schema.find("merge_peak_%s" % self.config.skyFilterName).key
841  converted = []
842  for oldFoot in footprints:
843  assert len(oldFoot.getPeaks()) == 1, "Should be a single peak only"
844  peak = oldFoot.getPeaks()[0]
845  newFoot = afwDetect.Footprint(oldFoot.spans, schema)
846  newFoot.addPeak(peak.getFx(), peak.getFy(), peak.getPeakValue())
847  newFoot.getPeaks()[0].set(mergeKey, True)
848  converted.append(newFoot)
849 
850  return converted
851 
852 
854  """DeblendCoaddSourcesConfig
855 
856  Configuration parameters for the `DeblendCoaddSourcesTask`.
857  """
858  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
859  doc="Deblend sources separately in each band")
860  multiBandDeblend = ConfigurableField(target=MultibandDeblendTask,
861  doc="Deblend sources simultaneously across bands")
862  simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
863  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
864 
865 
867  """Task runner for the `MergeSourcesTask`
868 
869  Required because the run method requires a list of
870  dataRefs rather than a single dataRef.
871  """
872  @staticmethod
873  def getTargetList(parsedCmd, **kwargs):
874  """Provide a list of patch references for each patch, tract, filter combo.
875 
876  Parameters
877  ----------
878  parsedCmd:
879  The parsed command
880  kwargs:
881  Keyword arguments passed to the task
882 
883  Returns
884  -------
885  targetList: list
886  List of tuples, where each tuple is a (dataRef, kwargs) pair.
887  """
888  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
889  kwargs["psfCache"] = parsedCmd.psfCache
890  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
891 
892 
893 class DeblendCoaddSourcesTask(CmdLineTask):
894  """Deblend the sources in a merged catalog
895 
896  Deblend sources from master catalog in each coadd.
897  This can either be done separately in each band using the HSC-SDSS deblender
898  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
899  or use SCARLET to simultaneously fit the blend in all bands
900  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
901  The task will set its own `self.schema` atribute to the `Schema` of the
902  output deblended catalog.
903  This will include all fields from the input `Schema`, as well as additional fields
904  from the deblender.
905 
906  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
907  ---------------------------------------------------------
908  `
909 
910  Parameters
911  ----------
912  butler: `Butler`
913  Butler used to read the input schemas from disk or
914  construct the reference catalog loader, if `schema` or `peakSchema` or
915  schema: `Schema`
916  The schema of the merged detection catalog as an input to this task.
917  peakSchema: `Schema`
918  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
919  """
920  ConfigClass = DeblendCoaddSourcesConfig
921  RunnerClass = DeblendCoaddSourcesRunner
922  _DefaultName = "deblendCoaddSources"
923  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
924 
925  @classmethod
926  def _makeArgumentParser(cls):
927  parser = ArgumentParser(name=cls._DefaultName)
928  parser.add_id_argument("--id", "deepCoadd_calexp",
929  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
930  ContainerClass=ExistingCoaddDataIdContainer)
931  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
932  return parser
933 
934  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
935  CmdLineTask.__init__(self, **kwargs)
936  if schema is None:
937  assert butler is not None, "Neither butler nor schema is defined"
938  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
939  self.schemaMapper = afwTable.SchemaMapper(schema)
940  self.schemaMapper.addMinimalSchema(schema)
941  self.schema = self.schemaMapper.getOutputSchema()
942  if peakSchema is None:
943  assert butler is not None, "Neither butler nor peakSchema is defined"
944  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
945 
946  if self.config.simultaneous:
947  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
948  else:
949  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
950 
951  def getSchemaCatalogs(self):
952  """Return a dict of empty catalogs for each catalog dataset produced by this task.
953 
954  Returns
955  -------
956  result: dict
957  Dictionary of empty catalogs, with catalog names as keys.
958  """
959  catalog = afwTable.SourceCatalog(self.schema)
960  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
961  self.config.coaddName + "Coadd_deblendedModel": catalog}
962 
963  def runDataRef(self, patchRefList, psfCache=100):
964  """Deblend the patch
965 
966  Deblend each source simultaneously or separately
967  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
968  Set `is-primary` and related flags.
969  Propagate flags from individual visits.
970  Write the deblended sources out.
971 
972  Parameters
973  ----------
974  patchRefList: list
975  List of data references for each filter
976  """
977  if self.config.simultaneous:
978  # Use SCARLET to simultaneously deblend across filters
979  filters = []
980  exposures = []
981  for patchRef in patchRefList:
982  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
983  filters.append(patchRef.dataId["filter"])
984  exposures.append(exposure)
985  # The input sources are the same for all bands, since it is a merged catalog
986  sources = self.readSources(patchRef)
987  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
988  fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources)
989  for n in range(len(patchRefList)):
990  self.write(patchRefList[n], fluxCatalogs[filters[n]], templateCatalogs[filters[n]])
991  else:
992  # Use the singeband deblender to deblend each band separately
993  for patchRef in patchRefList:
994  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
995  exposure.getPsf().setCacheCapacity(psfCache)
996  sources = self.readSources(patchRef)
997  self.singleBandDeblend.run(exposure, sources)
998  self.write(patchRef, sources)
999 
1000  def readSources(self, dataRef):
1001  """Read merged catalog
1002 
1003  Read the catalog of merged detections and create a catalog
1004  in a single band.
1005 
1006  Parameters
1007  ----------
1008  dataRef: data reference
1009  Data reference for catalog of merged detections
1010 
1011  Returns
1012  -------
1013  sources: `SourceCatalog`
1014  List of sources in merged catalog
1015 
1016  We also need to add columns to hold the measurements we're about to make
1017  so we can measure in-place.
1018  """
1019  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
1020  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1021  idFactory = self.makeIdFactory(dataRef)
1022  for s in merged:
1023  idFactory.notify(s.getId())
1024  table = afwTable.SourceTable.make(self.schema, idFactory)
1025  sources = afwTable.SourceCatalog(table)
1026  sources.extend(merged, self.schemaMapper)
1027  return sources
1028 
1029  def write(self, dataRef, flux_sources, template_sources=None):
1030  """Write the source catalog(s)
1031 
1032  Parameters
1033  ----------
1034  dataRef: Data Reference
1035  Reference to the output catalog.
1036  flux_sources: `SourceCatalog`
1037  Flux conserved sources to write to file.
1038  If using the single band deblender, this is the catalog
1039  generated.
1040  template_sources: `SourceCatalog`
1041  Source catalog using the multiband template models
1042  as footprints.
1043  """
1044  # The multiband deblender does not have to conserve flux,
1045  # so only write the flux conserved catalog if it exists
1046  if flux_sources is not None:
1047  assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux
1048  dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux")
1049  # Only the multiband deblender has the option to output the
1050  # template model catalog, which can optionally be used
1051  # in MeasureMergedCoaddSources
1052  if template_sources is not None:
1053  assert self.config.multiBandDeblend.saveTemplates
1054  dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel")
1055  self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
1056 
1057  def writeMetadata(self, dataRefList):
1058  """Write the metadata produced from processing the data.
1059  Parameters
1060  ----------
1061  dataRefList
1062  List of Butler data references used to write the metadata.
1063  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
1064  """
1065  for dataRef in dataRefList:
1066  try:
1067  metadataName = self._getMetadataName()
1068  if metadataName is not None:
1069  dataRef.put(self.getFullMetadata(), metadataName)
1070  except Exception as e:
1071  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
1072 
1073  def getExposureId(self, dataRef):
1074  """Get the ExposureId from a data reference
1075  """
1076  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1077 
1078 
1080  """!
1081  @anchor MeasureMergedCoaddSourcesConfig_
1082 
1083  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
1084  """
1085  inputCatalog = Field(dtype=str, default="deblendedFlux",
1086  doc=("Name of the input catalog to use."
1087  "If the single band deblender was used this should be 'deblendedFlux."
1088  "If the multi-band deblender was used this should be 'deblendedModel."
1089  "If no deblending was performed this should be 'mergeDet'"))
1090  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
1091  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
1092  doPropagateFlags = Field(
1093  dtype=bool, default=True,
1094  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
1095  )
1096  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
1097  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
1098  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
1099  doWriteMatchesDenormalized = Field(
1100  dtype=bool,
1101  default=False,
1102  doc=("Write reference matches in denormalized format? "
1103  "This format uses more disk space, but is more convenient to read."),
1104  )
1105  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
1106  checkUnitsParseStrict = Field(
1107  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
1108  dtype=str,
1109  default="raise",
1110  )
1111  doApCorr = Field(
1112  dtype=bool,
1113  default=True,
1114  doc="Apply aperture corrections"
1115  )
1116  applyApCorr = ConfigurableField(
1117  target=ApplyApCorrTask,
1118  doc="Subtask to apply aperture corrections"
1119  )
1120  doRunCatalogCalculation = Field(
1121  dtype=bool,
1122  default=True,
1123  doc='Run catalogCalculation task'
1124  )
1125  catalogCalculation = ConfigurableField(
1126  target=CatalogCalculationTask,
1127  doc="Subtask to run catalogCalculation plugins on catalog"
1128  )
1129 
1130  def setDefaults(self):
1131  Config.setDefaults(self)
1132  self.measurement.plugins.names |= ['base_InputCount', 'base_Variance']
1133  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
1134  'INEXACT_PSF']
1135  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
1136  'INEXACT_PSF']
1137 
1138 
1144 
1145 
1146 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
1147  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
1148  @staticmethod
1149  def getTargetList(parsedCmd, **kwargs):
1150  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
1151 
1152 
1154  """!
1155  @anchor MeasureMergedCoaddSourcesTask_
1156 
1157  @brief Deblend sources from master catalog in each coadd seperately and measure.
1158 
1159  @section pipe_tasks_multiBand_Contents Contents
1160 
1161  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
1162  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
1163  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
1164  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
1165  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
1166  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
1167 
1168  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
1169 
1170  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
1171  measurement in each coadd.
1172 
1173  Given a master input catalog of sources (peaks and footprints) or deblender outputs
1174  (including a HeavyFootprint in each band), measure each source on the
1175  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
1176  consistent set of child sources.
1177 
1178  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
1179  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
1180  flags are propagated to the coadd sources.
1181 
1182  Optionally, we can match the coadd sources to an external reference catalog.
1183 
1184  @par Inputs:
1185  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
1186  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
1187  @par Outputs:
1188  deepCoadd_meas{tract,patch,filter}: SourceCatalog
1189  @par Data Unit:
1190  tract, patch, filter
1191 
1192  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
1193 
1194  <DL>
1195  <DT> @ref SingleFrameMeasurementTask_ "measurement"
1196  <DD> Measure source properties of deblended sources.</DD>
1197  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
1198  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
1199  not at the edge of the field and that have either not been deblended or are the children of deblended
1200  sources</DD>
1201  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
1202  <DD> Propagate flags set in individual visits to the coadd.</DD>
1203  <DT> @ref DirectMatchTask_ "match"
1204  <DD> Match input sources to a reference catalog (optional).
1205  </DD>
1206  </DL>
1207  These subtasks may be retargeted as required.
1208 
1209  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
1210 
1211  @copydoc \_\_init\_\_
1212 
1213  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
1214 
1215  @copydoc run
1216 
1217  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
1218 
1219  See @ref MeasureMergedCoaddSourcesConfig_
1220 
1221  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
1222 
1223  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
1224  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
1225  files.
1226 
1227  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
1228  the various sub-tasks. See the documetation for individual sub-tasks for more information.
1229 
1230  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
1231  MeasureMergedCoaddSourcesTask
1232 
1233  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
1234  The next stage in the multi-band processing procedure will merge these measurements into a suitable
1235  catalog for driving forced photometry.
1236 
1237  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
1238  to be processed.
1239  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
1240  `--help` command line argument:
1241  @code
1242  measureCoaddSources.py --help
1243  @endcode
1244 
1245  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
1246  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
1247  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
1248  coadd as follows:
1249  @code
1250  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
1251  @endcode
1252  This will process the HSC-I band data. The results are written in
1253  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
1254 
1255  It is also necessary to run
1256  @code
1257  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
1258  @endcode
1259  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
1260  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
1261  """
1262  _DefaultName = "measureCoaddSources"
1263  ConfigClass = MeasureMergedCoaddSourcesConfig
1264  RunnerClass = MeasureMergedCoaddSourcesRunner
1265  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
1266  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
1267 
1268  @classmethod
1269  def _makeArgumentParser(cls):
1270  parser = ArgumentParser(name=cls._DefaultName)
1271  parser.add_id_argument("--id", "deepCoadd_calexp",
1272  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
1273  ContainerClass=ExistingCoaddDataIdContainer)
1274  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
1275  return parser
1276 
1277  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, **kwargs):
1278  """!
1279  @brief Initialize the task.
1280 
1281  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
1282  @param[in] schema: the schema of the merged detection catalog used as input to this one
1283  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
1284  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
1285  catalog. May be None if the loader can be constructed from the butler argument or all steps
1286  requiring a reference catalog are disabled.
1287  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
1288  catalog loader, if schema or peakSchema or refObjLoader is None
1289 
1290  The task will set its own self.schema attribute to the schema of the output measurement catalog.
1291  This will include all fields from the input schema, as well as additional fields for all the
1292  measurements.
1293  """
1294  CmdLineTask.__init__(self, **kwargs)
1295  self.deblended = self.config.inputCatalog.startswith("deblended")
1296  self.inputCatalog = "Coadd_" + self.config.inputCatalog
1297  if schema is None:
1298  assert butler is not None, "Neither butler nor schema is defined"
1299  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
1300  self.schemaMapper = afwTable.SchemaMapper(schema)
1301  self.schemaMapper.addMinimalSchema(schema)
1302  self.schema = self.schemaMapper.getOutputSchema()
1304  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
1305  self.makeSubtask("setPrimaryFlags", schema=self.schema)
1306  if self.config.doMatchSources:
1307  if refObjLoader is None:
1308  assert butler is not None, "Neither butler nor refObjLoader is defined"
1309  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
1310  if self.config.doPropagateFlags:
1311  self.makeSubtask("propagateFlags", schema=self.schema)
1312  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
1313  if self.config.doApCorr:
1314  self.makeSubtask("applyApCorr", schema=self.schema)
1315  if self.config.doRunCatalogCalculation:
1316  self.makeSubtask("catalogCalculation", schema=self.schema)
1317 
1318  def runDataRef(self, patchRef, psfCache=100):
1319  """!
1320  @brief Deblend and measure.
1321 
1322  @param[in] patchRef: Patch reference.
1323 
1324  Set 'is-primary' and related flags. Propagate flags
1325  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1326  Finally, write the deblended sources and measurements out.
1327  """
1328  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
1329  exposure.getPsf().setCacheCapacity(psfCache)
1330  sources = self.readSources(patchRef)
1331  table = sources.getTable()
1332  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1333 
1334  self.measurement.run(sources, exposure, exposureId=self.getExposureId(patchRef))
1335 
1336  if self.config.doApCorr:
1337  self.applyApCorr.run(
1338  catalog=sources,
1339  apCorrMap=exposure.getInfo().getApCorrMap()
1340  )
1341 
1342  # TODO DM-11568: this contiguous check-and-copy could go away if we
1343  # reserve enough space during SourceDetection and/or SourceDeblend.
1344  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1345  # contiguity now, so views are preserved from here on.
1346  if not sources.isContiguous():
1347  sources = sources.copy(deep=True)
1348 
1349  if self.config.doRunCatalogCalculation:
1350  self.catalogCalculation.run(sources)
1351 
1352  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1353  self.setPrimaryFlags.run(sources, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo,
1354  includeDeblend=self.deblended)
1355  if self.config.doPropagateFlags:
1356  self.propagateFlags.run(patchRef.getButler(), sources, self.propagateFlags.getCcdInputs(exposure),
1357  exposure.getWcs())
1358  if self.config.doMatchSources:
1359  self.writeMatches(patchRef, exposure, sources)
1360  self.write(patchRef, sources)
1361 
1362  def readSources(self, dataRef):
1363  """!
1364  @brief Read input sources.
1365 
1366  @param[in] dataRef: Data reference for catalog of merged detections
1367  @return List of sources in merged catalog
1368 
1369  We also need to add columns to hold the measurements we're about to make
1370  so we can measure in-place.
1371  """
1372  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1373  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1374  idFactory = self.makeIdFactory(dataRef)
1375  for s in merged:
1376  idFactory.notify(s.getId())
1377  table = afwTable.SourceTable.make(self.schema, idFactory)
1378  sources = afwTable.SourceCatalog(table)
1379  sources.extend(merged, self.schemaMapper)
1380  return sources
1381 
1382  def writeMatches(self, dataRef, exposure, sources):
1383  """!
1384  @brief Write matches of the sources to the astrometric reference catalog.
1385 
1386  We use the Wcs in the exposure to match sources.
1387 
1388  @param[in] dataRef: data reference
1389  @param[in] exposure: exposure with Wcs
1390  @param[in] sources: source catalog
1391  """
1392  result = self.match.run(sources, exposure.getInfo().getFilter().getName())
1393  if result.matches:
1394  matches = afwTable.packMatches(result.matches)
1395  matches.table.setMetadata(result.matchMeta)
1396  dataRef.put(matches, self.config.coaddName + "Coadd_measMatch")
1397  if self.config.doWriteMatchesDenormalized:
1398  denormMatches = denormalizeMatches(result.matches, result.matchMeta)
1399  dataRef.put(denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1400 
1401  def write(self, dataRef, sources):
1402  """!
1403  @brief Write the source catalog.
1404 
1405  @param[in] dataRef: data reference
1406  @param[in] sources: source catalog
1407  """
1408  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1409  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1410 
1411  def getExposureId(self, dataRef):
1412  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1413 
1414 
1416  """!
1417  @anchor MergeMeasurementsConfig_
1418 
1419  @brief Configuration parameters for the MergeMeasurementsTask
1420  """
1421  pseudoFilterList = ListField(dtype=str, default=["sky"],
1422  doc="Names of filters which may have no associated detection\n"
1423  "(N.b. should include MergeDetectionsConfig.skyFilterName)")
1424  snName = Field(dtype=str, default="base_PsfFlux",
1425  doc="Name of flux measurement for calculating the S/N when choosing the reference band.")
1426  minSN = Field(dtype=float, default=10.,
1427  doc="If the S/N from the priority band is below this value (and the S/N "
1428  "is larger than minSNDiff compared to the priority band), use the band with "
1429  "the largest S/N as the reference band.")
1430  minSNDiff = Field(dtype=float, default=3.,
1431  doc="If the difference in S/N between another band and the priority band is larger "
1432  "than this value (and the S/N in the priority band is less than minSN) "
1433  "use the band with the largest S/N as the reference band")
1434  flags = ListField(dtype=str, doc="Require that these flags, if available, are not set",
1435  default=["base_PixelFlags_flag_interpolatedCenter", "base_PsfFlux_flag",
1436  "ext_photometryKron_KronFlux_flag", "modelfit_CModel_flag", ])
1437 
1438 
1444 
1445 
1447  """!
1448  @anchor MergeMeasurementsTask_
1449 
1450  @brief Merge measurements from multiple bands
1451 
1452  @section pipe_tasks_multiBand_Contents Contents
1453 
1454  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Purpose
1455  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Initialize
1456  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Run
1457  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Config
1458  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Debug
1459  - @ref pipe_tasks_multiband_MergeMeasurementsTask_Example
1460 
1461  @section pipe_tasks_multiBand_MergeMeasurementsTask_Purpose Description
1462 
1463  Command-line task that merges measurements from multiple bands.
1464 
1465  Combines consistent (i.e. with the same peaks and footprints) catalogs of sources from multiple filter
1466  bands to construct a unified catalog that is suitable for driving forced photometry. Every source is
1467  required to have centroid, shape and flux measurements in each band.
1468 
1469  @par Inputs:
1470  deepCoadd_meas{tract,patch,filter}: SourceCatalog
1471  @par Outputs:
1472  deepCoadd_ref{tract,patch}: SourceCatalog
1473  @par Data Unit:
1474  tract, patch
1475 
1476  MergeMeasurementsTask subclasses @ref MergeSourcesTask_ "MergeSourcesTask".
1477 
1478  @section pipe_tasks_multiBand_MergeMeasurementsTask_Initialize Task initialization
1479 
1480  @copydoc \_\_init\_\_
1481 
1482  @section pipe_tasks_multiBand_MergeMeasurementsTask_Run Invoking the Task
1483 
1484  @copydoc run
1485 
1486  @section pipe_tasks_multiBand_MergeMeasurementsTask_Config Configuration parameters
1487 
1488  See @ref MergeMeasurementsConfig_
1489 
1490  @section pipe_tasks_multiBand_MergeMeasurementsTask_Debug Debug variables
1491 
1492  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
1493  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
1494  files.
1495 
1496  MergeMeasurementsTask has no debug variables.
1497 
1498  @section pipe_tasks_multiband_MergeMeasurementsTask_Example A complete example
1499  of using MergeMeasurementsTask
1500 
1501  MergeMeasurementsTask is meant to be run after deblending & measuring sources in every band.
1502  The purpose of the task is to generate a catalog of sources suitable for driving forced photometry in
1503  coadds and individual exposures.
1504  Command-line usage of MergeMeasurementsTask expects a data reference to the coadds to be processed. A list
1505  of the available optional arguments can be obtained by calling mergeCoaddMeasurements.py with the `--help`
1506  command line argument:
1507  @code
1508  mergeCoaddMeasurements.py --help
1509  @endcode
1510 
1511  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
1512  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
1513  step 7 at @ref pipeTasks_multiBand, one may merge the catalogs generated after deblending and measuring
1514  as follows:
1515  @code
1516  mergeCoaddMeasurements.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I^HSC-R
1517  @endcode
1518  This will merge the HSC-I & HSC-R band catalogs. The results are written in
1519  `$CI_HSC_DIR/DATA/deepCoadd-results/`.
1520  """
1521  _DefaultName = "mergeCoaddMeasurements"
1522  ConfigClass = MergeMeasurementsConfig
1523  inputDataset = "meas"
1524  outputDataset = "ref"
1525  getSchemaCatalogs = _makeGetSchemaCatalogs("ref")
1526 
1527  def __init__(self, butler=None, schema=None, **kwargs):
1528  """!
1529  Initialize the task.
1530 
1531  Additional keyword arguments (forwarded to MergeSourcesTask.__init__):
1532  @param[in] schema: the schema of the detection catalogs used as input to this one
1533  @param[in] butler: a butler used to read the input schema from disk, if schema is None
1534 
1535  The task will set its own self.schema attribute to the schema of the output merged catalog.
1536  """
1537  MergeSourcesTask.__init__(self, butler=butler, schema=schema, **kwargs)
1538  inputSchema = self.getInputSchema(butler=butler, schema=schema)
1539  self.schemaMapper = afwTable.SchemaMapper(inputSchema, True)
1540  self.schemaMapper.addMinimalSchema(inputSchema, True)
1541  self.fluxKey = inputSchema.find(self.config.snName + "_flux").getKey()
1542  self.fluxErrKey = inputSchema.find(self.config.snName + "_fluxErr").getKey()
1543  self.fluxFlagKey = inputSchema.find(self.config.snName + "_flag").getKey()
1544 
1545  self.flagKeys = {}
1546  for band in self.config.priorityList:
1547  short = getShortFilterName(band)
1548  outputKey = self.schemaMapper.editOutputSchema().addField(
1549  "merge_measurement_%s" % short,
1550  type="Flag",
1551  doc="Flag field set if the measurements here are from the %s filter" % band
1552  )
1553  peakKey = inputSchema.find("merge_peak_%s" % short).key
1554  footprintKey = inputSchema.find("merge_footprint_%s" % short).key
1555  self.flagKeys[band] = Struct(peak=peakKey, footprint=footprintKey, output=outputKey)
1556  self.schema = self.schemaMapper.getOutputSchema()
1557 
1559  for filt in self.config.pseudoFilterList:
1560  try:
1561  self.pseudoFilterKeys.append(self.schema.find("merge_peak_%s" % filt).getKey())
1562  except Exception as e:
1563  self.log.warn("merge_peak is not set for pseudo-filter %s: %s" % (filt, e))
1564 
1565  self.badFlags = {}
1566  for flag in self.config.flags:
1567  try:
1568  self.badFlags[flag] = self.schema.find(flag).getKey()
1569  except KeyError as exc:
1570  self.log.warn("Can't find flag %s in schema: %s" % (flag, exc,))
1571 
1572  def run(self, catalogs, patchRef):
1573  """!
1574  Merge measurement catalogs to create a single reference catalog for forced photometry
1575 
1576  @param[in] catalogs: the catalogs to be merged
1577  @param[in] patchRef: patch reference for data
1578 
1579  For parent sources, we choose the first band in config.priorityList for which the
1580  merge_footprint flag for that band is is True.
1581 
1582  For child sources, the logic is the same, except that we use the merge_peak flags.
1583  """
1584  # Put catalogs, filters in priority order
1585  orderedCatalogs = [catalogs[band] for band in self.config.priorityList if band in catalogs.keys()]
1586  orderedKeys = [self.flagKeys[band] for band in self.config.priorityList if band in catalogs.keys()]
1587 
1588  mergedCatalog = afwTable.SourceCatalog(self.schema)
1589  mergedCatalog.reserve(len(orderedCatalogs[0]))
1590 
1591  idKey = orderedCatalogs[0].table.getIdKey()
1592  for catalog in orderedCatalogs[1:]:
1593  if numpy.any(orderedCatalogs[0].get(idKey) != catalog.get(idKey)):
1594  raise ValueError("Error in inputs to MergeCoaddMeasurements: source IDs do not match")
1595 
1596  # This first zip iterates over all the catalogs simultaneously, yielding a sequence of one
1597  # record for each band, in priority order.
1598  for orderedRecords in zip(*orderedCatalogs):
1599 
1600  maxSNRecord = None
1601  maxSNFlagKeys = None
1602  maxSN = 0.
1603  priorityRecord = None
1604  priorityFlagKeys = None
1605  prioritySN = 0.
1606  hasPseudoFilter = False
1607 
1608  # Now we iterate over those record-band pairs, keeping track of the priority and the
1609  # largest S/N band.
1610  for inputRecord, flagKeys in zip(orderedRecords, orderedKeys):
1611  parent = (inputRecord.getParent() == 0 and inputRecord.get(flagKeys.footprint))
1612  child = (inputRecord.getParent() != 0 and inputRecord.get(flagKeys.peak))
1613 
1614  if not (parent or child):
1615  for pseudoFilterKey in self.pseudoFilterKeys:
1616  if inputRecord.get(pseudoFilterKey):
1617  hasPseudoFilter = True
1618  priorityRecord = inputRecord
1619  priorityFlagKeys = flagKeys
1620  break
1621  if hasPseudoFilter:
1622  break
1623 
1624  isBad = any(inputRecord.get(flag) for flag in self.badFlags)
1625  if isBad or inputRecord.get(self.fluxFlagKey) or inputRecord.get(self.fluxErrKey) == 0:
1626  sn = 0.
1627  else:
1628  sn = inputRecord.get(self.fluxKey)/inputRecord.get(self.fluxErrKey)
1629  if numpy.isnan(sn) or sn < 0.:
1630  sn = 0.
1631  if (parent or child) and priorityRecord is None:
1632  priorityRecord = inputRecord
1633  priorityFlagKeys = flagKeys
1634  prioritySN = sn
1635  if sn > maxSN:
1636  maxSNRecord = inputRecord
1637  maxSNFlagKeys = flagKeys
1638  maxSN = sn
1639 
1640  # If the priority band has a low S/N we would like to choose the band with the highest S/N as
1641  # the reference band instead. However, we only want to choose the highest S/N band if it is
1642  # significantly better than the priority band. Therefore, to choose a band other than the
1643  # priority, we require that the priority S/N is below the minimum threshold and that the
1644  # difference between the priority and highest S/N is larger than the difference threshold.
1645  #
1646  # For pseudo code objects we always choose the first band in the priority list.
1647  bestRecord = None
1648  bestFlagKeys = None
1649  if hasPseudoFilter:
1650  bestRecord = priorityRecord
1651  bestFlagKeys = priorityFlagKeys
1652  elif (prioritySN < self.config.minSN and (maxSN - prioritySN) > self.config.minSNDiff and
1653  maxSNRecord is not None):
1654  bestRecord = maxSNRecord
1655  bestFlagKeys = maxSNFlagKeys
1656  elif priorityRecord is not None:
1657  bestRecord = priorityRecord
1658  bestFlagKeys = priorityFlagKeys
1659 
1660  if bestRecord is not None and bestFlagKeys is not None:
1661  outputRecord = mergedCatalog.addNew()
1662  outputRecord.assign(bestRecord, self.schemaMapper)
1663  outputRecord.set(bestFlagKeys.output, True)
1664  else: # if we didn't find any records
1665  raise ValueError("Error in inputs to MergeCoaddMeasurements: no valid reference for %s" %
1666  inputRecord.getId())
1667 
1668  # more checking for sane inputs, since zip silently iterates over the smallest sequence
1669  for inputCatalog in orderedCatalogs:
1670  if len(mergedCatalog) != len(inputCatalog):
1671  raise ValueError("Mismatch between catalog sizes: %s != %s" %
1672  (len(mergedCatalog), len(orderedCatalogs)))
1673 
1674  return mergedCatalog
def getSkySourceFootprints(self, mergedList, skyInfo, seed)
Return a list of Footprints of sky objects which don&#39;t overlap with anything in mergedList.
Definition: multiBand.py:821
Merge coadd detections from multiple bands.
Definition: multiBand.py:624
def makeTask(self, parsedCmd=None, args=None)
Definition: multiBand.py:333
def getInputSchema(self, butler=None, schema=None)
Obtain the input schema either directly or froma butler reference.
Definition: multiBand.py:470
def getSchemaCatalogs(self)
Return a dict of empty catalogs for each catalog dataset produced by this task.
Definition: multiBand.py:810
def runDataRef(self, patchRefList)
Merge coadd sources from multiple bands.
Definition: multiBand.py:496
def runDataRef(self, patchRef)
Run detection on a coadd.
Definition: multiBand.py:264
def cullPeaks(self, catalog)
Attempt to remove garbage peaks (mostly on the outskirts of large blends).
Definition: multiBand.py:782
def write(self, exposure, results, patchRef)
Write out results from runDetection.
Definition: multiBand.py:311
def __init__(self, butler=None, schema=None, kwargs)
Initialize the task.
Definition: multiBand.py:483
def __init__(self, butler=None, schema=None, kwargs)
Initialize the task.
Definition: multiBand.py:1527
Configuration parameters for the DetectCoaddSourcesTask.
Definition: multiBand.py:106
def runDataRef(self, patchRefList, psfCache=100)
Definition: multiBand.py:963
def __init__(self, schema=None, kwargs)
Initialize the task.
Definition: multiBand.py:244
Merge measurements from multiple bands.
Definition: multiBand.py:1446
Deblend sources from master catalog in each coadd seperately and measure.
Definition: multiBand.py:1153
def writeMatches(self, dataRef, exposure, sources)
Write matches of the sources to the astrometric reference catalog.
Definition: multiBand.py:1382
def __init__(self, butler=None, schema=None, peakSchema=None, kwargs)
Definition: multiBand.py:934
Configuration parameters for the MergeMeasurementsTask.
Definition: multiBand.py:1415
def readSources(self, dataRef)
Read input sources.
Definition: multiBand.py:1362
Configuration parameters for the MergeDetectionsTask.
Definition: multiBand.py:592
Configuration for merging sources.
Definition: multiBand.py:411
def run(self, catalogs, patchRef)
Merge measurement catalogs to create a single reference catalog for forced photometry.
Definition: multiBand.py:1572
def run(self, catalogs, patchRef)
Merge multiple catalogs.
Definition: multiBand.py:522
def write(self, patchRef, catalog)
Write the output.
Definition: multiBand.py:533
def write(self, dataRef, flux_sources, template_sources=None)
Definition: multiBand.py:1029
A base class for merging source catalogs.
Definition: multiBand.py:427
def write(self, dataRef, sources)
Write the source catalog.
Definition: multiBand.py:1401
def readCatalog(self, patchRef)
Read input catalog.
Definition: multiBand.py:507
Configuration parameters for the MeasureMergedCoaddSourcesTask.
Definition: multiBand.py:1079
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
Definition: coaddBase.py:253
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
Definition: multiBand.py:550
def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, kwargs)
Initialize the task.
Definition: multiBand.py:1277
def run(self, exposure, idFactory, expId)
Run detection on an exposure.
Definition: multiBand.py:279
def runDataRef(self, patchRef, psfCache=100)
Deblend and measure.
Definition: multiBand.py:1318
def __init__(self, butler=None, schema=None, kwargs)
Initialize the merge detections task.
Definition: multiBand.py:711
def run(self, catalogs, patchRef)
Merge multiple catalogs.
Definition: multiBand.py:733