lsst.pipe.tasks  16.0-18-g95848a16+5
multiBand.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 #
3 # LSST Data Management System
4 # Copyright 2008-2015 AURA/LSST.
5 #
6 # This product includes software developed by the
7 # LSST Project (http://www.lsst.org/).
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 3 of the License, or
12 # (at your option) any later version.
13 #
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
18 #
19 # You should have received a copy of the LSST License Statement and
20 # the GNU General Public License along with this program. If not,
21 # see <https://www.lsstcorp.org/LegalNotices/>.
22 #
23 import numpy
24 
25 from lsst.coadd.utils.coaddDataIdContainer import ExistingCoaddDataIdContainer
26 from lsst.pipe.base import CmdLineTask, Struct, TaskRunner, ArgumentParser, ButlerInitializedTaskRunner
27 from lsst.pex.config import Config, Field, ListField, ConfigurableField, RangeField, ConfigField
28 from lsst.meas.algorithms import DynamicDetectionTask, SkyObjectsTask
29 from lsst.meas.base import SingleFrameMeasurementTask, ApplyApCorrTask, CatalogCalculationTask
30 from lsst.meas.deblender import SourceDeblendTask, MultibandDeblendTask
31 from lsst.pipe.tasks.coaddBase import getSkyInfo
32 from lsst.pipe.tasks.scaleVariance import ScaleVarianceTask
33 from lsst.meas.astrom import DirectMatchTask, denormalizeMatches
34 from lsst.pipe.tasks.fakes import BaseFakeSourcesTask
35 from lsst.pipe.tasks.setPrimaryFlags import SetPrimaryFlagsTask
36 from lsst.pipe.tasks.propagateVisitFlags import PropagateVisitFlagsTask
37 import lsst.afw.image as afwImage
38 import lsst.afw.table as afwTable
39 import lsst.afw.math as afwMath
40 import lsst.afw.detection as afwDetect
41 from lsst.daf.base import PropertyList
42 
43 """
44 New dataset types:
45 * deepCoadd_det: detections from what used to be processCoadd (tract, patch, filter)
46 * deepCoadd_mergeDet: merged detections (tract, patch)
47 * deepCoadd_meas: measurements of merged detections (tract, patch, filter)
48 * deepCoadd_ref: reference sources (tract, patch)
49 All of these have associated *_schema catalogs that require no data ID and hold no records.
50 
51 In addition, we have a schema-only dataset, which saves the schema for the PeakRecords in
52 the mergeDet, meas, and ref dataset Footprints:
53 * deepCoadd_peak_schema
54 """
55 
56 
57 def _makeGetSchemaCatalogs(datasetSuffix):
58  """Construct a getSchemaCatalogs instance method
59 
60  These are identical for most of the classes here, so we'll consolidate
61  the code.
62 
63  datasetSuffix: Suffix of dataset name, e.g., "src" for "deepCoadd_src"
64  """
65 
66  def getSchemaCatalogs(self):
67  """Return a dict of empty catalogs for each catalog dataset produced by this task."""
68  src = afwTable.SourceCatalog(self.schema)
69  if hasattr(self, "algMetadata"):
70  src.getTable().setMetadata(self.algMetadata)
71  return {self.config.coaddName + "Coadd_" + datasetSuffix: src}
72  return getSchemaCatalogs
73 
74 
75 def _makeMakeIdFactory(datasetName):
76  """Construct a makeIdFactory instance method
77 
78  These are identical for all the classes here, so this consolidates
79  the code.
80 
81  datasetName: Dataset name without the coadd name prefix, e.g., "CoaddId" for "deepCoaddId"
82  """
83 
84  def makeIdFactory(self, dataRef):
85  """Return an IdFactory for setting the detection identifiers
86 
87  The actual parameters used in the IdFactory are provided by
88  the butler (through the provided data reference.
89  """
90  expBits = dataRef.get(self.config.coaddName + datasetName + "_bits")
91  expId = int(dataRef.get(self.config.coaddName + datasetName))
92  return afwTable.IdFactory.makeSource(expId, 64 - expBits)
93  return makeIdFactory
94 
95 
97  """Given a longer, camera-specific filter name (e.g. "HSC-I") return its shorthand name ("i").
98  """
99  # I'm not sure if this is the way this is supposed to be implemented, but it seems to work,
100  # and its the only way I could get it to work.
101  return afwImage.Filter(name).getFilterProperty().getName()
102 
103 
104 
105 
107  """!
108  @anchor DetectCoaddSourcesConfig_
109 
110  @brief Configuration parameters for the DetectCoaddSourcesTask
111  """
112  doScaleVariance = Field(dtype=bool, default=True, doc="Scale variance plane using empirical noise?")
113  scaleVariance = ConfigurableField(target=ScaleVarianceTask, doc="Variance rescaling")
114  detection = ConfigurableField(target=DynamicDetectionTask, doc="Source detection")
115  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
116  doInsertFakes = Field(dtype=bool, default=False,
117  doc="Run fake sources injection task")
118  insertFakes = ConfigurableField(target=BaseFakeSourcesTask,
119  doc="Injection of fake sources for testing "
120  "purposes (must be retargeted)")
121 
122  def setDefaults(self):
123  Config.setDefaults(self)
124  self.detection.thresholdType = "pixel_stdev"
125  self.detection.isotropicGrow = True
126  # Coadds are made from background-subtracted CCDs, so any background subtraction should be very basic
127  self.detection.reEstimateBackground = False
128  self.detection.background.useApprox = False
129  self.detection.background.binSize = 4096
130  self.detection.background.undersampleStyle = 'REDUCE_INTERP_ORDER'
131  self.detection.doTempWideBackground = True # Suppress large footprints that overwhelm the deblender
132 
133 
139 
140 
141 class DetectCoaddSourcesTask(CmdLineTask):
142  """!
143  @anchor DetectCoaddSourcesTask_
144 
145  @brief Detect sources on a coadd
146 
147  @section pipe_tasks_multiBand_Contents Contents
148 
149  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose
150  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize
151  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Run
152  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Config
153  - @ref pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug
154  - @ref pipe_tasks_multiband_DetectCoaddSourcesTask_Example
155 
156  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Purpose Description
157 
158  Command-line task that detects sources on a coadd of exposures obtained with a single filter.
159 
160  Coadding individual visits requires each exposure to be warped. This introduces covariance in the noise
161  properties across pixels. Before detection, we correct the coadd variance by scaling the variance plane
162  in the coadd to match the observed variance. This is an approximate approach -- strictly, we should
163  propagate the full covariance matrix -- but it is simple and works well in practice.
164 
165  After scaling the variance plane, we detect sources and generate footprints by delegating to the @ref
166  SourceDetectionTask_ "detection" subtask.
167 
168  @par Inputs:
169  deepCoadd{tract,patch,filter}: ExposureF
170  @par Outputs:
171  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
172  @n deepCoadd_calexp{tract,patch,filter}: Variance scaled, background-subtracted input
173  exposure (ExposureF)
174  @n deepCoadd_calexp_background{tract,patch,filter}: BackgroundList
175  @par Data Unit:
176  tract, patch, filter
177 
178  DetectCoaddSourcesTask delegates most of its work to the @ref SourceDetectionTask_ "detection" subtask.
179  You can retarget this subtask if you wish.
180 
181  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Initialize Task initialization
182 
183  @copydoc \_\_init\_\_
184 
185  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Run Invoking the Task
186 
187  @copydoc run
188 
189  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Config Configuration parameters
190 
191  See @ref DetectCoaddSourcesConfig_ "DetectSourcesConfig"
192 
193  @section pipe_tasks_multiBand_DetectCoaddSourcesTask_Debug Debug variables
194 
195  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
196  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
197  files.
198 
199  DetectCoaddSourcesTask has no debug variables of its own because it relegates all the work to
200  @ref SourceDetectionTask_ "SourceDetectionTask"; see the documetation for
201  @ref SourceDetectionTask_ "SourceDetectionTask" for further information.
202 
203  @section pipe_tasks_multiband_DetectCoaddSourcesTask_Example A complete example
204  of using DetectCoaddSourcesTask
205 
206  DetectCoaddSourcesTask is meant to be run after assembling a coadded image in a given band. The purpose of
207  the task is to update the background, detect all sources in a single band and generate a set of parent
208  footprints. Subsequent tasks in the multi-band processing procedure will merge sources across bands and,
209  eventually, perform forced photometry. Command-line usage of DetectCoaddSourcesTask expects a data
210  reference to the coadd to be processed. A list of the available optional arguments can be obtained by
211  calling detectCoaddSources.py with the `--help` command line argument:
212  @code
213  detectCoaddSources.py --help
214  @endcode
215 
216  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
217  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has followed
218  steps 1 - 4 at @ref pipeTasks_multiBand, one may detect all the sources in each coadd as follows:
219  @code
220  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
221  @endcode
222  that will process the HSC-I band data. The results are written to
223  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I`.
224 
225  It is also necessary to run:
226  @code
227  detectCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
228  @endcode
229  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
230  processing procedure: @ref MergeDetectionsTask_ "MergeDetectionsTask".
231  """
232  _DefaultName = "detectCoaddSources"
233  ConfigClass = DetectCoaddSourcesConfig
234  getSchemaCatalogs = _makeGetSchemaCatalogs("det")
235  makeIdFactory = _makeMakeIdFactory("CoaddId")
236 
237  @classmethod
238  def _makeArgumentParser(cls):
239  parser = ArgumentParser(name=cls._DefaultName)
240  parser.add_id_argument("--id", "deepCoadd", help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
241  ContainerClass=ExistingCoaddDataIdContainer)
242  return parser
243 
244  def __init__(self, schema=None, **kwargs):
245  """!
246  @brief Initialize the task. Create the @ref SourceDetectionTask_ "detection" subtask.
247 
248  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
249 
250  @param[in] schema: initial schema for the output catalog, modified-in place to include all
251  fields set by this task. If None, the source minimal schema will be used.
252  @param[in] **kwargs: keyword arguments to be passed to lsst.pipe.base.task.Task.__init__
253  """
254  CmdLineTask.__init__(self, **kwargs)
255  if schema is None:
256  schema = afwTable.SourceTable.makeMinimalSchema()
257  if self.config.doInsertFakes:
258  self.makeSubtask("insertFakes")
259  self.schema = schema
260  self.makeSubtask("detection", schema=self.schema)
261  if self.config.doScaleVariance:
262  self.makeSubtask("scaleVariance")
263 
264  def runDataRef(self, patchRef):
265  """!
266  @brief Run detection on a coadd.
267 
268  Invokes @ref run and then uses @ref write to output the
269  results.
270 
271  @param[in] patchRef: data reference for patch
272  """
273  exposure = patchRef.get(self.config.coaddName + "Coadd", immediate=True)
274  expId = int(patchRef.get(self.config.coaddName + "CoaddId"))
275  results = self.run(exposure, self.makeIdFactory(patchRef), expId=expId)
276  self.write(exposure, results, patchRef)
277  return results
278 
279  def run(self, exposure, idFactory, expId):
280  """!
281  @brief Run detection on an exposure.
282 
283  First scale the variance plane to match the observed variance
284  using @ref ScaleVarianceTask. Then invoke the @ref SourceDetectionTask_ "detection" subtask to
285  detect sources.
286 
287  @param[in,out] exposure: Exposure on which to detect (may be backround-subtracted and scaled,
288  depending on configuration).
289  @param[in] idFactory: IdFactory to set source identifiers
290  @param[in] expId: Exposure identifier (integer) for RNG seed
291 
292  @return a pipe.base.Struct with fields
293  - sources: catalog of detections
294  - backgrounds: list of backgrounds
295  """
296  if self.config.doScaleVariance:
297  varScale = self.scaleVariance.run(exposure.maskedImage)
298  exposure.getMetadata().add("variance_scale", varScale)
299  backgrounds = afwMath.BackgroundList()
300  if self.config.doInsertFakes:
301  self.insertFakes.run(exposure, background=backgrounds)
302  table = afwTable.SourceTable.make(self.schema, idFactory)
303  detections = self.detection.makeSourceCatalog(table, exposure, expId=expId)
304  sources = detections.sources
305  fpSets = detections.fpSets
306  if hasattr(fpSets, "background") and fpSets.background:
307  for bg in fpSets.background:
308  backgrounds.append(bg)
309  return Struct(sources=sources, backgrounds=backgrounds)
310 
311  def write(self, exposure, results, patchRef):
312  """!
313  @brief Write out results from runDetection.
314 
315  @param[in] exposure: Exposure to write out
316  @param[in] results: Struct returned from runDetection
317  @param[in] patchRef: data reference for patch
318  """
319  coaddName = self.config.coaddName + "Coadd"
320  patchRef.put(results.backgrounds, coaddName + "_calexp_background")
321  patchRef.put(results.sources, coaddName + "_det")
322  patchRef.put(exposure, coaddName + "_calexp")
323 
324 
325 
326 
327 class MergeSourcesRunner(TaskRunner):
328  """Task runner for the `MergeSourcesTask`
329 
330  Required because the run method requires a list of
331  dataRefs rather than a single dataRef.
332  """
333  def makeTask(self, parsedCmd=None, args=None):
334  """Provide a butler to the Task constructor.
335 
336  Parameters
337  ----------
338  parsedCmd:
339  The parsed command
340  args: tuple
341  Tuple of a list of data references and kwargs (un-used)
342 
343  Raises
344  ------
345  RuntimeError
346  Thrown if both `parsedCmd` & `args` are `None`
347  """
348  if parsedCmd is not None:
349  butler = parsedCmd.butler
350  elif args is not None:
351  dataRefList, kwargs = args
352  butler = dataRefList[0].getButler()
353  else:
354  raise RuntimeError("Neither parsedCmd or args specified")
355  return self.TaskClass(config=self.config, log=self.log, butler=butler)
356 
357  @staticmethod
358  def buildRefDict(parsedCmd):
359  """Build a hierarchical dictionary of patch references
360 
361  Parameters
362  ----------
363  parsedCmd:
364  The parsed command
365 
366  Returns
367  -------
368  refDict: dict
369  A reference dictionary of the form {patch: {tract: {filter: dataRef}}}
370 
371  Raises
372  ------
373  RuntimeError
374  Thrown when multiple references are provided for the same
375  combination of tract, patch and filter
376  """
377  refDict = {} # Will index this as refDict[tract][patch][filter] = ref
378  for ref in parsedCmd.id.refList:
379  tract = ref.dataId["tract"]
380  patch = ref.dataId["patch"]
381  filter = ref.dataId["filter"]
382  if tract not in refDict:
383  refDict[tract] = {}
384  if patch not in refDict[tract]:
385  refDict[tract][patch] = {}
386  if filter in refDict[tract][patch]:
387  raise RuntimeError("Multiple versions of %s" % (ref.dataId,))
388  refDict[tract][patch][filter] = ref
389  return refDict
390 
391  @staticmethod
392  def getTargetList(parsedCmd, **kwargs):
393  """Provide a list of patch references for each patch, tract, filter combo.
394 
395  Parameters
396  ----------
397  parsedCmd:
398  The parsed command
399  kwargs:
400  Keyword arguments passed to the task
401 
402  Returns
403  -------
404  targetList: list
405  List of tuples, where each tuple is a (dataRef, kwargs) pair.
406  """
407  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
408  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
409 
410 
411 class MergeSourcesConfig(Config):
412  """!
413  @anchor MergeSourcesConfig_
414 
415  @brief Configuration for merging sources.
416  """
417  priorityList = ListField(dtype=str, default=[],
418  doc="Priority-ordered list of bands for the merge.")
419  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
420 
421  def validate(self):
422  Config.validate(self)
423  if len(self.priorityList) == 0:
424  raise RuntimeError("No priority list provided")
425 
426 
427 class MergeSourcesTask(CmdLineTask):
428  """!
429  @anchor MergeSourcesTask_
430 
431  @brief A base class for merging source catalogs.
432 
433  Merging detections (MergeDetectionsTask) and merging measurements (MergeMeasurementsTask) are
434  so similar that it makes sense to re-use the code, in the form of this abstract base class.
435 
436  NB: Do not use this class directly. Instead use one of the child classes that inherit from
437  MergeSourcesTask such as @ref MergeDetectionsTask_ "MergeDetectionsTask" or @ref MergeMeasurementsTask_
438  "MergeMeasurementsTask"
439 
440  Sub-classes should set the following class variables:
441  * `_DefaultName`: name of Task
442  * `inputDataset`: name of dataset to read
443  * `outputDataset`: name of dataset to write
444  * `getSchemaCatalogs` to the result of `_makeGetSchemaCatalogs(outputDataset)`
445 
446  In addition, sub-classes must implement the run method.
447  """
448  _DefaultName = None
449  ConfigClass = MergeSourcesConfig
450  RunnerClass = MergeSourcesRunner
451  inputDataset = None
452  outputDataset = None
453  getSchemaCatalogs = None
454 
455  @classmethod
456  def _makeArgumentParser(cls):
457  """!
458  @brief Create a suitable ArgumentParser.
459 
460  We will use the ArgumentParser to get a provide a list of data
461  references for patches; the RunnerClass will sort them into lists
462  of data references for the same patch
463  """
464  parser = ArgumentParser(name=cls._DefaultName)
465  parser.add_id_argument("--id", "deepCoadd_" + cls.inputDataset,
466  ContainerClass=ExistingCoaddDataIdContainer,
467  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i")
468  return parser
469 
470  def getInputSchema(self, butler=None, schema=None):
471  """!
472  @brief Obtain the input schema either directly or froma butler reference.
473 
474  @param[in] butler butler reference to obtain the input schema from
475  @param[in] schema the input schema
476  """
477  if schema is None:
478  assert butler is not None, "Neither butler nor schema specified"
479  schema = butler.get(self.config.coaddName + "Coadd_" + self.inputDataset + "_schema",
480  immediate=True).schema
481  return schema
482 
483  def __init__(self, butler=None, schema=None, **kwargs):
484  """!
485  @brief Initialize the task.
486 
487  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
488  @param[in] schema the schema of the detection catalogs used as input to this one
489  @param[in] butler a butler used to read the input schema from disk, if schema is None
490 
491  Derived classes should use the getInputSchema() method to handle the additional
492  arguments and retreive the actual input schema.
493  """
494  CmdLineTask.__init__(self, **kwargs)
495 
496  def runDataRef(self, patchRefList):
497  """!
498  @brief Merge coadd sources from multiple bands. Calls @ref `run` which must be defined in
499  subclasses that inherit from MergeSourcesTask.
500 
501  @param[in] patchRefList list of data references for each filter
502  """
503  catalogs = dict(self.readCatalog(patchRef) for patchRef in patchRefList)
504  mergedCatalog = self.run(catalogs, patchRefList[0])
505  self.write(patchRefList[0], mergedCatalog)
506 
507  def readCatalog(self, patchRef):
508  """!
509  @brief Read input catalog.
510 
511  We read the input dataset provided by the 'inputDataset'
512  class variable.
513 
514  @param[in] patchRef data reference for patch
515  @return tuple consisting of the filter name and the catalog
516  """
517  filterName = patchRef.dataId["filter"]
518  catalog = patchRef.get(self.config.coaddName + "Coadd_" + self.inputDataset, immediate=True)
519  self.log.info("Read %d sources for filter %s: %s" % (len(catalog), filterName, patchRef.dataId))
520  return filterName, catalog
521 
522  def run(self, catalogs, patchRef):
523  """!
524  @brief Merge multiple catalogs. This function must be defined in all subclasses that inherit from
525  MergeSourcesTask.
526 
527  @param[in] catalogs dict mapping filter name to source catalog
528 
529  @return merged catalog
530  """
531  raise NotImplementedError()
532 
533  def write(self, patchRef, catalog):
534  """!
535  @brief Write the output.
536 
537  @param[in] patchRef data reference for patch
538  @param[in] catalog catalog
539 
540  We write as the dataset provided by the 'outputDataset'
541  class variable.
542  """
543  patchRef.put(catalog, self.config.coaddName + "Coadd_" + self.outputDataset)
544  # since the filter isn't actually part of the data ID for the dataset we're saving,
545  # it's confusing to see it in the log message, even if the butler simply ignores it.
546  mergeDataId = patchRef.dataId.copy()
547  del mergeDataId["filter"]
548  self.log.info("Wrote merged catalog: %s" % (mergeDataId,))
549 
550  def writeMetadata(self, dataRefList):
551  """!
552  @brief No metadata to write, and not sure how to write it for a list of dataRefs.
553  """
554  pass
555 
556 
557 class CullPeaksConfig(Config):
558  """!
559  @anchor CullPeaksConfig_
560 
561  @brief Configuration for culling garbage peaks after merging footprints.
562 
563  Peaks may also be culled after detection or during deblending; this configuration object
564  only deals with culling after merging Footprints.
565 
566  These cuts are based on three quantities:
567  - nBands: the number of bands in which the peak was detected
568  - peakRank: the position of the peak within its family, sorted from brightest to faintest.
569  - peakRankNormalized: the peak rank divided by the total number of peaks in the family.
570 
571  The formula that identifie peaks to cull is:
572 
573  nBands < nBandsSufficient
574  AND (rank >= rankSufficient)
575  AND (rank >= rankConsider OR rank >= rankNormalizedConsider)
576 
577  To disable peak culling, simply set nBandsSufficient=1.
578  """
579 
580  nBandsSufficient = RangeField(dtype=int, default=2, min=1,
581  doc="Always keep peaks detected in this many bands")
582  rankSufficient = RangeField(dtype=int, default=20, min=1,
583  doc="Always keep this many peaks in each family")
584  rankConsidered = RangeField(dtype=int, default=30, min=1,
585  doc=("Keep peaks with less than this rank that also match the "
586  "rankNormalizedConsidered condition."))
587  rankNormalizedConsidered = RangeField(dtype=float, default=0.7, min=0.0,
588  doc=("Keep peaks with less than this normalized rank that"
589  " also match the rankConsidered condition."))
590 
591 
593  """!
594  @anchor MergeDetectionsConfig_
595 
596  @brief Configuration parameters for the MergeDetectionsTask.
597  """
598  minNewPeak = Field(dtype=float, default=1,
599  doc="Minimum distance from closest peak to create a new one (in arcsec).")
600 
601  maxSamePeak = Field(dtype=float, default=0.3,
602  doc="When adding new catalogs to the merge, all peaks less than this distance "
603  " (in arcsec) to an existing peak will be flagged as detected in that catalog.")
604  cullPeaks = ConfigField(dtype=CullPeaksConfig, doc="Configuration for how to cull peaks.")
605 
606  skyFilterName = Field(dtype=str, default="sky",
607  doc="Name of `filter' used to label sky objects (e.g. flag merge_peak_sky is set)\n"
608  "(N.b. should be in MergeMeasurementsConfig.pseudoFilterList)")
609  skyObjects = ConfigurableField(target=SkyObjectsTask, doc="Generate sky objects")
610 
611  def setDefaults(self):
612  MergeSourcesConfig.setDefaults(self)
613  self.skyObjects.avoidMask = ["DETECTED"] # Nothing else is available in our custom mask
614 
615 
616 
622 
623 
625  """!
626  @anchor MergeDetectionsTask_
627 
628  @brief Merge coadd detections from multiple bands.
629 
630  @section pipe_tasks_multiBand_Contents Contents
631 
632  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Purpose
633  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Init
634  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Run
635  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Config
636  - @ref pipe_tasks_multiBand_MergeDetectionsTask_Debug
637  - @ref pipe_tasks_multiband_MergeDetectionsTask_Example
638 
639  @section pipe_tasks_multiBand_MergeDetectionsTask_Purpose Description
640 
641  Command-line task that merges sources detected in coadds of exposures obtained with different filters.
642 
643  To perform photometry consistently across coadds in multiple filter bands, we create a master catalog of
644  sources from all bands by merging the sources (peaks & footprints) detected in each coadd, while keeping
645  track of which band each source originates in.
646 
647  The catalog merge is performed by @ref getMergedSourceCatalog. Spurious peaks detected around bright
648  objects are culled as described in @ref CullPeaksConfig_.
649 
650  @par Inputs:
651  deepCoadd_det{tract,patch,filter}: SourceCatalog (only parent Footprints)
652  @par Outputs:
653  deepCoadd_mergeDet{tract,patch}: SourceCatalog (only parent Footprints)
654  @par Data Unit:
655  tract, patch
656 
657  MergeDetectionsTask subclasses @ref MergeSourcesTask_ "MergeSourcesTask".
658 
659  @section pipe_tasks_multiBand_MergeDetectionsTask_Init Task initialisation
660 
661  @copydoc \_\_init\_\_
662 
663  @section pipe_tasks_multiBand_MergeDetectionsTask_Run Invoking the Task
664 
665  @copydoc run
666 
667  @section pipe_tasks_multiBand_MergeDetectionsTask_Config Configuration parameters
668 
669  See @ref MergeDetectionsConfig_
670 
671  @section pipe_tasks_multiBand_MergeDetectionsTask_Debug Debug variables
672 
673  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a flag @c -d
674  to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py files.
675 
676  MergeDetectionsTask has no debug variables.
677 
678  @section pipe_tasks_multiband_MergeDetectionsTask_Example A complete example of using MergeDetectionsTask
679 
680  MergeDetectionsTask is meant to be run after detecting sources in coadds generated for the chosen subset
681  of the available bands.
682  The purpose of the task is to merge sources (peaks & footprints) detected in the coadds generated from the
683  chosen subset of filters.
684  Subsequent tasks in the multi-band processing procedure will deblend the generated master list of sources
685  and, eventually, perform forced photometry.
686  Command-line usage of MergeDetectionsTask expects data references for all the coadds to be processed.
687  A list of the available optional arguments can be obtained by calling mergeCoaddDetections.py with the
688  `--help` command line argument:
689  @code
690  mergeCoaddDetections.py --help
691  @endcode
692 
693  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
694  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
695  step 5 at @ref pipeTasks_multiBand, one may merge the catalogs of sources from each coadd as follows:
696  @code
697  mergeCoaddDetections.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I^HSC-R
698  @endcode
699  This will merge the HSC-I & -R band parent source catalogs and write the results to
700  `$CI_HSC_DIR/DATA/deepCoadd-results/merged/0/5,4/mergeDet-0-5,4.fits`.
701 
702  The next step in the multi-band processing procedure is
703  @ref MeasureMergedCoaddSourcesTask_ "MeasureMergedCoaddSourcesTask"
704  """
705  ConfigClass = MergeDetectionsConfig
706  _DefaultName = "mergeCoaddDetections"
707  inputDataset = "det"
708  outputDataset = "mergeDet"
709  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
710 
711  def __init__(self, butler=None, schema=None, **kwargs):
712  """!
713  @brief Initialize the merge detections task.
714 
715  A @ref FootprintMergeList_ "FootprintMergeList" will be used to
716  merge the source catalogs.
717 
718  Additional keyword arguments (forwarded to MergeSourcesTask.__init__):
719  @param[in] schema the schema of the detection catalogs used as input to this one
720  @param[in] butler a butler used to read the input schema from disk, if schema is None
721  @param[in] **kwargs keyword arguments to be passed to MergeSourcesTask.__init__
722 
723  The task will set its own self.schema attribute to the schema of the output merged catalog.
724  """
725  MergeSourcesTask.__init__(self, butler=butler, schema=schema, **kwargs)
726  self.makeSubtask("skyObjects")
727  self.schema = self.getInputSchema(butler=butler, schema=schema)
728 
729  filterNames = [getShortFilterName(name) for name in self.config.priorityList]
730  filterNames += [self.config.skyFilterName]
731  self.merged = afwDetect.FootprintMergeList(self.schema, filterNames)
732 
733  def run(self, catalogs, patchRef):
734  """!
735  @brief Merge multiple catalogs.
736 
737  After ordering the catalogs and filters in priority order,
738  @ref getMergedSourceCatalog of the @ref FootprintMergeList_ "FootprintMergeList" created by
739  @ref \_\_init\_\_ is used to perform the actual merging. Finally, @ref cullPeaks is used to remove
740  garbage peaks detected around bright objects.
741 
742  @param[in] catalogs
743  @param[in] patchRef
744  @param[out] mergedList
745  """
746 
747  # Convert distance to tract coordinate
748  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
749  tractWcs = skyInfo.wcs
750  peakDistance = self.config.minNewPeak / tractWcs.getPixelScale().asArcseconds()
751  samePeakDistance = self.config.maxSamePeak / tractWcs.getPixelScale().asArcseconds()
752 
753  # Put catalogs, filters in priority order
754  orderedCatalogs = [catalogs[band] for band in self.config.priorityList if band in catalogs.keys()]
755  orderedBands = [getShortFilterName(band) for band in self.config.priorityList
756  if band in catalogs.keys()]
757 
758  mergedList = self.merged.getMergedSourceCatalog(orderedCatalogs, orderedBands, peakDistance,
759  self.schema, self.makeIdFactory(patchRef),
760  samePeakDistance)
761 
762  #
763  # Add extra sources that correspond to blank sky
764  #
765  skySeed = patchRef.get(self.config.coaddName + "MergedCoaddId")
766  skySourceFootprints = self.getSkySourceFootprints(mergedList, skyInfo, skySeed)
767  if skySourceFootprints:
768  key = mergedList.schema.find("merge_footprint_%s" % self.config.skyFilterName).key
769  for foot in skySourceFootprints:
770  s = mergedList.addNew()
771  s.setFootprint(foot)
772  s.set(key, True)
773 
774  # Sort Peaks from brightest to faintest
775  for record in mergedList:
776  record.getFootprint().sortPeaks()
777  self.log.info("Merged to %d sources" % len(mergedList))
778  # Attempt to remove garbage peaks
779  self.cullPeaks(mergedList)
780  return mergedList
781 
782  def cullPeaks(self, catalog):
783  """!
784  @brief Attempt to remove garbage peaks (mostly on the outskirts of large blends).
785 
786  @param[in] catalog Source catalog
787  """
788  keys = [item.key for item in self.merged.getPeakSchema().extract("merge_peak_*").values()]
789  assert len(keys) > 0, "Error finding flags that associate peaks with their detection bands."
790  totalPeaks = 0
791  culledPeaks = 0
792  for parentSource in catalog:
793  # Make a list copy so we can clear the attached PeakCatalog and append the ones we're keeping
794  # to it (which is easier than deleting as we iterate).
795  keptPeaks = parentSource.getFootprint().getPeaks()
796  oldPeaks = list(keptPeaks)
797  keptPeaks.clear()
798  familySize = len(oldPeaks)
799  totalPeaks += familySize
800  for rank, peak in enumerate(oldPeaks):
801  if ((rank < self.config.cullPeaks.rankSufficient) or
802  (sum([peak.get(k) for k in keys]) >= self.config.cullPeaks.nBandsSufficient) or
803  (rank < self.config.cullPeaks.rankConsidered and
804  rank < self.config.cullPeaks.rankNormalizedConsidered * familySize)):
805  keptPeaks.append(peak)
806  else:
807  culledPeaks += 1
808  self.log.info("Culled %d of %d peaks" % (culledPeaks, totalPeaks))
809 
810  def getSchemaCatalogs(self):
811  """!
812  Return a dict of empty catalogs for each catalog dataset produced by this task.
813 
814  @param[out] dictionary of empty catalogs
815  """
816  mergeDet = afwTable.SourceCatalog(self.schema)
817  peak = afwDetect.PeakCatalog(self.merged.getPeakSchema())
818  return {self.config.coaddName + "Coadd_mergeDet": mergeDet,
819  self.config.coaddName + "Coadd_peak": peak}
820 
821  def getSkySourceFootprints(self, mergedList, skyInfo, seed):
822  """!
823  @brief Return a list of Footprints of sky objects which don't overlap with anything in mergedList
824 
825  @param mergedList The merged Footprints from all the input bands
826  @param skyInfo A description of the patch
827  @param seed Seed for the random number generator
828  """
829  mask = afwImage.Mask(skyInfo.patchInfo.getOuterBBox())
830  detected = mask.getPlaneBitMask("DETECTED")
831  for s in mergedList:
832  s.getFootprint().spans.setMask(mask, detected)
833 
834  footprints = self.skyObjects.run(mask, seed)
835  if not footprints:
836  return footprints
837 
838  # Need to convert the peak catalog's schema so we can set the "merge_peak_<skyFilterName>" flags
839  schema = self.merged.getPeakSchema()
840  mergeKey = schema.find("merge_peak_%s" % self.config.skyFilterName).key
841  converted = []
842  for oldFoot in footprints:
843  assert len(oldFoot.getPeaks()) == 1, "Should be a single peak only"
844  peak = oldFoot.getPeaks()[0]
845  newFoot = afwDetect.Footprint(oldFoot.spans, schema)
846  newFoot.addPeak(peak.getFx(), peak.getFy(), peak.getPeakValue())
847  newFoot.getPeaks()[0].set(mergeKey, True)
848  converted.append(newFoot)
849 
850  return converted
851 
852 
854  """DeblendCoaddSourcesConfig
855 
856  Configuration parameters for the `DeblendCoaddSourcesTask`.
857  """
858  singleBandDeblend = ConfigurableField(target=SourceDeblendTask,
859  doc="Deblend sources separately in each band")
860  multiBandDeblend = ConfigurableField(target=MultibandDeblendTask,
861  doc="Deblend sources simultaneously across bands")
862  simultaneous = Field(dtype=bool, default=False, doc="Simultaneously deblend all bands?")
863  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
864 
865  def setDefaults(self):
866  Config.setDefaults(self)
867  self.singleBandDeblend.propagateAllPeaks = True
868 
869 
871  """Task runner for the `MergeSourcesTask`
872 
873  Required because the run method requires a list of
874  dataRefs rather than a single dataRef.
875  """
876  @staticmethod
877  def getTargetList(parsedCmd, **kwargs):
878  """Provide a list of patch references for each patch, tract, filter combo.
879 
880  Parameters
881  ----------
882  parsedCmd:
883  The parsed command
884  kwargs:
885  Keyword arguments passed to the task
886 
887  Returns
888  -------
889  targetList: list
890  List of tuples, where each tuple is a (dataRef, kwargs) pair.
891  """
892  refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
893  kwargs["psfCache"] = parsedCmd.psfCache
894  return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
895 
896 
897 class DeblendCoaddSourcesTask(CmdLineTask):
898  """Deblend the sources in a merged catalog
899 
900  Deblend sources from master catalog in each coadd.
901  This can either be done separately in each band using the HSC-SDSS deblender
902  (`DeblendCoaddSourcesTask.config.simultaneous==False`)
903  or use SCARLET to simultaneously fit the blend in all bands
904  (`DeblendCoaddSourcesTask.config.simultaneous==True`).
905  The task will set its own `self.schema` atribute to the `Schema` of the
906  output deblended catalog.
907  This will include all fields from the input `Schema`, as well as additional fields
908  from the deblender.
909 
910  `pipe.tasks.multiband.DeblendCoaddSourcesTask Description
911  ---------------------------------------------------------
912  `
913 
914  Parameters
915  ----------
916  butler: `Butler`
917  Butler used to read the input schemas from disk or
918  construct the reference catalog loader, if `schema` or `peakSchema` or
919  schema: `Schema`
920  The schema of the merged detection catalog as an input to this task.
921  peakSchema: `Schema`
922  The schema of the `PeakRecord`s in the `Footprint`s in the merged detection catalog
923  """
924  ConfigClass = DeblendCoaddSourcesConfig
925  RunnerClass = DeblendCoaddSourcesRunner
926  _DefaultName = "deblendCoaddSources"
927  makeIdFactory = _makeMakeIdFactory("MergedCoaddId")
928 
929  @classmethod
930  def _makeArgumentParser(cls):
931  parser = ArgumentParser(name=cls._DefaultName)
932  parser.add_id_argument("--id", "deepCoadd_calexp",
933  help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i",
934  ContainerClass=ExistingCoaddDataIdContainer)
935  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
936  return parser
937 
938  def __init__(self, butler=None, schema=None, peakSchema=None, **kwargs):
939  CmdLineTask.__init__(self, **kwargs)
940  if schema is None:
941  assert butler is not None, "Neither butler nor schema is defined"
942  schema = butler.get(self.config.coaddName + "Coadd_mergeDet_schema", immediate=True).schema
943  self.schemaMapper = afwTable.SchemaMapper(schema)
944  self.schemaMapper.addMinimalSchema(schema)
945  self.schema = self.schemaMapper.getOutputSchema()
946  if peakSchema is None:
947  assert butler is not None, "Neither butler nor peakSchema is defined"
948  peakSchema = butler.get(self.config.coaddName + "Coadd_peak_schema", immediate=True).schema
949 
950  if self.config.simultaneous:
951  self.makeSubtask("multiBandDeblend", schema=self.schema, peakSchema=peakSchema)
952  else:
953  self.makeSubtask("singleBandDeblend", schema=self.schema, peakSchema=peakSchema)
954 
955  def getSchemaCatalogs(self):
956  """Return a dict of empty catalogs for each catalog dataset produced by this task.
957 
958  Returns
959  -------
960  result: dict
961  Dictionary of empty catalogs, with catalog names as keys.
962  """
963  catalog = afwTable.SourceCatalog(self.schema)
964  return {self.config.coaddName + "Coadd_deblendedFlux": catalog,
965  self.config.coaddName + "Coadd_deblendedModel": catalog}
966 
967  def runDataRef(self, patchRefList, psfCache=100):
968  """Deblend the patch
969 
970  Deblend each source simultaneously or separately
971  (depending on `DeblendCoaddSourcesTask.config.simultaneous`).
972  Set `is-primary` and related flags.
973  Propagate flags from individual visits.
974  Write the deblended sources out.
975 
976  Parameters
977  ----------
978  patchRefList: list
979  List of data references for each filter
980  """
981  if self.config.simultaneous:
982  # Use SCARLET to simultaneously deblend across filters
983  filters = []
984  exposures = []
985  for patchRef in patchRefList:
986  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
987  filters.append(patchRef.dataId["filter"])
988  exposures.append(exposure)
989  # The input sources are the same for all bands, since it is a merged catalog
990  sources = self.readSources(patchRef)
991  exposure = afwImage.MultibandExposure.fromExposures(filters, exposures)
992  fluxCatalogs, templateCatalogs = self.multiBandDeblend.run(exposure, sources)
993  for n in range(len(patchRefList)):
994  self.write(patchRefList[n], fluxCatalogs[filters[n]], templateCatalogs[filters[n]])
995  else:
996  # Use the singeband deblender to deblend each band separately
997  for patchRef in patchRefList:
998  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
999  exposure.getPsf().setCacheCapacity(psfCache)
1000  sources = self.readSources(patchRef)
1001  self.singleBandDeblend.run(exposure, sources)
1002  self.write(patchRef, sources)
1003 
1004  def readSources(self, dataRef):
1005  """Read merged catalog
1006 
1007  Read the catalog of merged detections and create a catalog
1008  in a single band.
1009 
1010  Parameters
1011  ----------
1012  dataRef: data reference
1013  Data reference for catalog of merged detections
1014 
1015  Returns
1016  -------
1017  sources: `SourceCatalog`
1018  List of sources in merged catalog
1019 
1020  We also need to add columns to hold the measurements we're about to make
1021  so we can measure in-place.
1022  """
1023  merged = dataRef.get(self.config.coaddName + "Coadd_mergeDet", immediate=True)
1024  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1025  idFactory = self.makeIdFactory(dataRef)
1026  for s in merged:
1027  idFactory.notify(s.getId())
1028  table = afwTable.SourceTable.make(self.schema, idFactory)
1029  sources = afwTable.SourceCatalog(table)
1030  sources.extend(merged, self.schemaMapper)
1031  return sources
1032 
1033  def write(self, dataRef, flux_sources, template_sources=None):
1034  """Write the source catalog(s)
1035 
1036  Parameters
1037  ----------
1038  dataRef: Data Reference
1039  Reference to the output catalog.
1040  flux_sources: `SourceCatalog`
1041  Flux conserved sources to write to file.
1042  If using the single band deblender, this is the catalog
1043  generated.
1044  template_sources: `SourceCatalog`
1045  Source catalog using the multiband template models
1046  as footprints.
1047  """
1048  # The multiband deblender does not have to conserve flux,
1049  # so only write the flux conserved catalog if it exists
1050  if flux_sources is not None:
1051  assert not self.config.simultaneous or self.config.multiBandDeblend.conserveFlux
1052  dataRef.put(flux_sources, self.config.coaddName + "Coadd_deblendedFlux")
1053  # Only the multiband deblender has the option to output the
1054  # template model catalog, which can optionally be used
1055  # in MeasureMergedCoaddSources
1056  if template_sources is not None:
1057  assert self.config.multiBandDeblend.saveTemplates
1058  dataRef.put(template_sources, self.config.coaddName + "Coadd_deblendedModel")
1059  self.log.info("Wrote %d sources: %s" % (len(flux_sources), dataRef.dataId))
1060 
1061  def writeMetadata(self, dataRefList):
1062  """Write the metadata produced from processing the data.
1063  Parameters
1064  ----------
1065  dataRefList
1066  List of Butler data references used to write the metadata.
1067  The metadata is written to dataset type `CmdLineTask._getMetadataName`.
1068  """
1069  for dataRef in dataRefList:
1070  try:
1071  metadataName = self._getMetadataName()
1072  if metadataName is not None:
1073  dataRef.put(self.getFullMetadata(), metadataName)
1074  except Exception as e:
1075  self.log.warn("Could not persist metadata for dataId=%s: %s", dataRef.dataId, e)
1076 
1077  def getExposureId(self, dataRef):
1078  """Get the ExposureId from a data reference
1079  """
1080  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1081 
1082 
1084  """!
1085  @anchor MeasureMergedCoaddSourcesConfig_
1086 
1087  @brief Configuration parameters for the MeasureMergedCoaddSourcesTask
1088  """
1089  inputCatalog = Field(dtype=str, default="deblendedFlux",
1090  doc=("Name of the input catalog to use."
1091  "If the single band deblender was used this should be 'deblendedFlux."
1092  "If the multi-band deblender was used this should be 'deblendedModel."
1093  "If no deblending was performed this should be 'mergeDet'"))
1094  measurement = ConfigurableField(target=SingleFrameMeasurementTask, doc="Source measurement")
1095  setPrimaryFlags = ConfigurableField(target=SetPrimaryFlagsTask, doc="Set flags for primary tract/patch")
1096  doPropagateFlags = Field(
1097  dtype=bool, default=True,
1098  doc="Whether to match sources to CCD catalogs to propagate flags (to e.g. identify PSF stars)"
1099  )
1100  propagateFlags = ConfigurableField(target=PropagateVisitFlagsTask, doc="Propagate visit flags to coadd")
1101  doMatchSources = Field(dtype=bool, default=True, doc="Match sources to reference catalog?")
1102  match = ConfigurableField(target=DirectMatchTask, doc="Matching to reference catalog")
1103  doWriteMatchesDenormalized = Field(
1104  dtype=bool,
1105  default=False,
1106  doc=("Write reference matches in denormalized format? "
1107  "This format uses more disk space, but is more convenient to read."),
1108  )
1109  coaddName = Field(dtype=str, default="deep", doc="Name of coadd")
1110  checkUnitsParseStrict = Field(
1111  doc="Strictness of Astropy unit compatibility check, can be 'raise', 'warn' or 'silent'",
1112  dtype=str,
1113  default="raise",
1114  )
1115  doApCorr = Field(
1116  dtype=bool,
1117  default=True,
1118  doc="Apply aperture corrections"
1119  )
1120  applyApCorr = ConfigurableField(
1121  target=ApplyApCorrTask,
1122  doc="Subtask to apply aperture corrections"
1123  )
1124  doRunCatalogCalculation = Field(
1125  dtype=bool,
1126  default=True,
1127  doc='Run catalogCalculation task'
1128  )
1129  catalogCalculation = ConfigurableField(
1130  target=CatalogCalculationTask,
1131  doc="Subtask to run catalogCalculation plugins on catalog"
1132  )
1133 
1134  def setDefaults(self):
1135  Config.setDefaults(self)
1136  self.measurement.plugins.names |= ['base_InputCount', 'base_Variance']
1137  self.measurement.plugins['base_PixelFlags'].masksFpAnywhere = ['CLIPPED', 'SENSOR_EDGE',
1138  'INEXACT_PSF']
1139  self.measurement.plugins['base_PixelFlags'].masksFpCenter = ['CLIPPED', 'SENSOR_EDGE',
1140  'INEXACT_PSF']
1141 
1142 
1148 
1149 
1150 class MeasureMergedCoaddSourcesRunner(ButlerInitializedTaskRunner):
1151  """Get the psfCache setting into MeasureMergedCoaddSourcesTask"""
1152  @staticmethod
1153  def getTargetList(parsedCmd, **kwargs):
1154  return ButlerInitializedTaskRunner.getTargetList(parsedCmd, psfCache=parsedCmd.psfCache)
1155 
1156 
1158  """!
1159  @anchor MeasureMergedCoaddSourcesTask_
1160 
1161  @brief Deblend sources from master catalog in each coadd seperately and measure.
1162 
1163  @section pipe_tasks_multiBand_Contents Contents
1164 
1165  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose
1166  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize
1167  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run
1168  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config
1169  - @ref pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug
1170  - @ref pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example
1171 
1172  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Purpose Description
1173 
1174  Command-line task that uses peaks and footprints from a master catalog to perform deblending and
1175  measurement in each coadd.
1176 
1177  Given a master input catalog of sources (peaks and footprints) or deblender outputs
1178  (including a HeavyFootprint in each band), measure each source on the
1179  coadd. Repeating this procedure with the same master catalog across multiple coadds will generate a
1180  consistent set of child sources.
1181 
1182  The deblender retains all peaks and deblends any missing peaks (dropouts in that band) as PSFs. Source
1183  properties are measured and the @c is-primary flag (indicating sources with no children) is set. Visit
1184  flags are propagated to the coadd sources.
1185 
1186  Optionally, we can match the coadd sources to an external reference catalog.
1187 
1188  @par Inputs:
1189  deepCoadd_mergeDet{tract,patch} or deepCoadd_deblend{tract,patch}: SourceCatalog
1190  @n deepCoadd_calexp{tract,patch,filter}: ExposureF
1191  @par Outputs:
1192  deepCoadd_meas{tract,patch,filter}: SourceCatalog
1193  @par Data Unit:
1194  tract, patch, filter
1195 
1196  MeasureMergedCoaddSourcesTask delegates most of its work to a set of sub-tasks:
1197 
1198  <DL>
1199  <DT> @ref SingleFrameMeasurementTask_ "measurement"
1200  <DD> Measure source properties of deblended sources.</DD>
1201  <DT> @ref SetPrimaryFlagsTask_ "setPrimaryFlags"
1202  <DD> Set flag 'is-primary' as well as related flags on sources. 'is-primary' is set for sources that are
1203  not at the edge of the field and that have either not been deblended or are the children of deblended
1204  sources</DD>
1205  <DT> @ref PropagateVisitFlagsTask_ "propagateFlags"
1206  <DD> Propagate flags set in individual visits to the coadd.</DD>
1207  <DT> @ref DirectMatchTask_ "match"
1208  <DD> Match input sources to a reference catalog (optional).
1209  </DD>
1210  </DL>
1211  These subtasks may be retargeted as required.
1212 
1213  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Initialize Task initialization
1214 
1215  @copydoc \_\_init\_\_
1216 
1217  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Run Invoking the Task
1218 
1219  @copydoc run
1220 
1221  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Config Configuration parameters
1222 
1223  See @ref MeasureMergedCoaddSourcesConfig_
1224 
1225  @section pipe_tasks_multiBand_MeasureMergedCoaddSourcesTask_Debug Debug variables
1226 
1227  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
1228  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
1229  files.
1230 
1231  MeasureMergedCoaddSourcesTask has no debug variables of its own because it delegates all the work to
1232  the various sub-tasks. See the documetation for individual sub-tasks for more information.
1233 
1234  @section pipe_tasks_multiband_MeasureMergedCoaddSourcesTask_Example A complete example of using
1235  MeasureMergedCoaddSourcesTask
1236 
1237  After MeasureMergedCoaddSourcesTask has been run on multiple coadds, we have a set of per-band catalogs.
1238  The next stage in the multi-band processing procedure will merge these measurements into a suitable
1239  catalog for driving forced photometry.
1240 
1241  Command-line usage of MeasureMergedCoaddSourcesTask expects a data reference to the coadds
1242  to be processed.
1243  A list of the available optional arguments can be obtained by calling measureCoaddSources.py with the
1244  `--help` command line argument:
1245  @code
1246  measureCoaddSources.py --help
1247  @endcode
1248 
1249  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
1250  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
1251  step 6 at @ref pipeTasks_multiBand, one may perform deblending and measure sources in the HSC-I band
1252  coadd as follows:
1253  @code
1254  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I
1255  @endcode
1256  This will process the HSC-I band data. The results are written in
1257  `$CI_HSC_DIR/DATA/deepCoadd-results/HSC-I/0/5,4/meas-HSC-I-0-5,4.fits
1258 
1259  It is also necessary to run
1260  @code
1261  measureCoaddSources.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-R
1262  @endcode
1263  to generate the sources catalogs for the HSC-R band required by the next step in the multi-band
1264  procedure: @ref MergeMeasurementsTask_ "MergeMeasurementsTask".
1265  """
1266  _DefaultName = "measureCoaddSources"
1267  ConfigClass = MeasureMergedCoaddSourcesConfig
1268  RunnerClass = MeasureMergedCoaddSourcesRunner
1269  getSchemaCatalogs = _makeGetSchemaCatalogs("meas")
1270  makeIdFactory = _makeMakeIdFactory("MergedCoaddId") # The IDs we already have are of this type
1271 
1272  @classmethod
1273  def _makeArgumentParser(cls):
1274  parser = ArgumentParser(name=cls._DefaultName)
1275  parser.add_id_argument("--id", "deepCoadd_calexp",
1276  help="data ID, e.g. --id tract=12345 patch=1,2 filter=r",
1277  ContainerClass=ExistingCoaddDataIdContainer)
1278  parser.add_argument("--psfCache", type=int, default=100, help="Size of CoaddPsf cache")
1279  return parser
1280 
1281  def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, **kwargs):
1282  """!
1283  @brief Initialize the task.
1284 
1285  Keyword arguments (in addition to those forwarded to CmdLineTask.__init__):
1286  @param[in] schema: the schema of the merged detection catalog used as input to this one
1287  @param[in] peakSchema: the schema of the PeakRecords in the Footprints in the merged detection catalog
1288  @param[in] refObjLoader: an instance of LoadReferenceObjectsTasks that supplies an external reference
1289  catalog. May be None if the loader can be constructed from the butler argument or all steps
1290  requiring a reference catalog are disabled.
1291  @param[in] butler: a butler used to read the input schemas from disk or construct the reference
1292  catalog loader, if schema or peakSchema or refObjLoader is None
1293 
1294  The task will set its own self.schema attribute to the schema of the output measurement catalog.
1295  This will include all fields from the input schema, as well as additional fields for all the
1296  measurements.
1297  """
1298  CmdLineTask.__init__(self, **kwargs)
1299  self.deblended = self.config.inputCatalog.startswith("deblended")
1300  self.inputCatalog = "Coadd_" + self.config.inputCatalog
1301  if schema is None:
1302  assert butler is not None, "Neither butler nor schema is defined"
1303  schema = butler.get(self.config.coaddName + self.inputCatalog + "_schema", immediate=True).schema
1304  self.schemaMapper = afwTable.SchemaMapper(schema)
1305  self.schemaMapper.addMinimalSchema(schema)
1306  self.schema = self.schemaMapper.getOutputSchema()
1308  self.makeSubtask("measurement", schema=self.schema, algMetadata=self.algMetadata)
1309  self.makeSubtask("setPrimaryFlags", schema=self.schema)
1310  if self.config.doMatchSources:
1311  if refObjLoader is None:
1312  assert butler is not None, "Neither butler nor refObjLoader is defined"
1313  self.makeSubtask("match", butler=butler, refObjLoader=refObjLoader)
1314  if self.config.doPropagateFlags:
1315  self.makeSubtask("propagateFlags", schema=self.schema)
1316  self.schema.checkUnits(parse_strict=self.config.checkUnitsParseStrict)
1317  if self.config.doApCorr:
1318  self.makeSubtask("applyApCorr", schema=self.schema)
1319  if self.config.doRunCatalogCalculation:
1320  self.makeSubtask("catalogCalculation", schema=self.schema)
1321 
1322  def runDataRef(self, patchRef, psfCache=100):
1323  """!
1324  @brief Deblend and measure.
1325 
1326  @param[in] patchRef: Patch reference.
1327 
1328  Set 'is-primary' and related flags. Propagate flags
1329  from individual visits. Optionally match the sources to a reference catalog and write the matches.
1330  Finally, write the deblended sources and measurements out.
1331  """
1332  exposure = patchRef.get(self.config.coaddName + "Coadd_calexp", immediate=True)
1333  exposure.getPsf().setCacheCapacity(psfCache)
1334  sources = self.readSources(patchRef)
1335  table = sources.getTable()
1336  table.setMetadata(self.algMetadata) # Capture algorithm metadata to write out to the source catalog.
1337 
1338  self.measurement.run(sources, exposure, exposureId=self.getExposureId(patchRef))
1339 
1340  if self.config.doApCorr:
1341  self.applyApCorr.run(
1342  catalog=sources,
1343  apCorrMap=exposure.getInfo().getApCorrMap()
1344  )
1345 
1346  # TODO DM-11568: this contiguous check-and-copy could go away if we
1347  # reserve enough space during SourceDetection and/or SourceDeblend.
1348  # NOTE: sourceSelectors require contiguous catalogs, so ensure
1349  # contiguity now, so views are preserved from here on.
1350  if not sources.isContiguous():
1351  sources = sources.copy(deep=True)
1352 
1353  if self.config.doRunCatalogCalculation:
1354  self.catalogCalculation.run(sources)
1355 
1356  skyInfo = getSkyInfo(coaddName=self.config.coaddName, patchRef=patchRef)
1357  self.setPrimaryFlags.run(sources, skyInfo.skyMap, skyInfo.tractInfo, skyInfo.patchInfo,
1358  includeDeblend=self.deblended)
1359  if self.config.doPropagateFlags:
1360  self.propagateFlags.run(patchRef.getButler(), sources, self.propagateFlags.getCcdInputs(exposure),
1361  exposure.getWcs())
1362  if self.config.doMatchSources:
1363  self.writeMatches(patchRef, exposure, sources)
1364  self.write(patchRef, sources)
1365 
1366  def readSources(self, dataRef):
1367  """!
1368  @brief Read input sources.
1369 
1370  @param[in] dataRef: Data reference for catalog of merged detections
1371  @return List of sources in merged catalog
1372 
1373  We also need to add columns to hold the measurements we're about to make
1374  so we can measure in-place.
1375  """
1376  merged = dataRef.get(self.config.coaddName + self.inputCatalog, immediate=True)
1377  self.log.info("Read %d detections: %s" % (len(merged), dataRef.dataId))
1378  idFactory = self.makeIdFactory(dataRef)
1379  for s in merged:
1380  idFactory.notify(s.getId())
1381  table = afwTable.SourceTable.make(self.schema, idFactory)
1382  sources = afwTable.SourceCatalog(table)
1383  sources.extend(merged, self.schemaMapper)
1384  return sources
1385 
1386  def writeMatches(self, dataRef, exposure, sources):
1387  """!
1388  @brief Write matches of the sources to the astrometric reference catalog.
1389 
1390  We use the Wcs in the exposure to match sources.
1391 
1392  @param[in] dataRef: data reference
1393  @param[in] exposure: exposure with Wcs
1394  @param[in] sources: source catalog
1395  """
1396  result = self.match.run(sources, exposure.getInfo().getFilter().getName())
1397  if result.matches:
1398  matches = afwTable.packMatches(result.matches)
1399  matches.table.setMetadata(result.matchMeta)
1400  dataRef.put(matches, self.config.coaddName + "Coadd_measMatch")
1401  if self.config.doWriteMatchesDenormalized:
1402  denormMatches = denormalizeMatches(result.matches, result.matchMeta)
1403  dataRef.put(denormMatches, self.config.coaddName + "Coadd_measMatchFull")
1404 
1405  def write(self, dataRef, sources):
1406  """!
1407  @brief Write the source catalog.
1408 
1409  @param[in] dataRef: data reference
1410  @param[in] sources: source catalog
1411  """
1412  dataRef.put(sources, self.config.coaddName + "Coadd_meas")
1413  self.log.info("Wrote %d sources: %s" % (len(sources), dataRef.dataId))
1414 
1415  def getExposureId(self, dataRef):
1416  return int(dataRef.get(self.config.coaddName + "CoaddId"))
1417 
1418 
1420  """!
1421  @anchor MergeMeasurementsConfig_
1422 
1423  @brief Configuration parameters for the MergeMeasurementsTask
1424  """
1425  pseudoFilterList = ListField(dtype=str, default=["sky"],
1426  doc="Names of filters which may have no associated detection\n"
1427  "(N.b. should include MergeDetectionsConfig.skyFilterName)")
1428  snName = Field(dtype=str, default="base_PsfFlux",
1429  doc="Name of flux measurement for calculating the S/N when choosing the reference band.")
1430  minSN = Field(dtype=float, default=10.,
1431  doc="If the S/N from the priority band is below this value (and the S/N "
1432  "is larger than minSNDiff compared to the priority band), use the band with "
1433  "the largest S/N as the reference band.")
1434  minSNDiff = Field(dtype=float, default=3.,
1435  doc="If the difference in S/N between another band and the priority band is larger "
1436  "than this value (and the S/N in the priority band is less than minSN) "
1437  "use the band with the largest S/N as the reference band")
1438  flags = ListField(dtype=str, doc="Require that these flags, if available, are not set",
1439  default=["base_PixelFlags_flag_interpolatedCenter", "base_PsfFlux_flag",
1440  "ext_photometryKron_KronFlux_flag", "modelfit_CModel_flag", ])
1441 
1442 
1448 
1449 
1451  """!
1452  @anchor MergeMeasurementsTask_
1453 
1454  @brief Merge measurements from multiple bands
1455 
1456  @section pipe_tasks_multiBand_Contents Contents
1457 
1458  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Purpose
1459  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Initialize
1460  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Run
1461  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Config
1462  - @ref pipe_tasks_multiBand_MergeMeasurementsTask_Debug
1463  - @ref pipe_tasks_multiband_MergeMeasurementsTask_Example
1464 
1465  @section pipe_tasks_multiBand_MergeMeasurementsTask_Purpose Description
1466 
1467  Command-line task that merges measurements from multiple bands.
1468 
1469  Combines consistent (i.e. with the same peaks and footprints) catalogs of sources from multiple filter
1470  bands to construct a unified catalog that is suitable for driving forced photometry. Every source is
1471  required to have centroid, shape and flux measurements in each band.
1472 
1473  @par Inputs:
1474  deepCoadd_meas{tract,patch,filter}: SourceCatalog
1475  @par Outputs:
1476  deepCoadd_ref{tract,patch}: SourceCatalog
1477  @par Data Unit:
1478  tract, patch
1479 
1480  MergeMeasurementsTask subclasses @ref MergeSourcesTask_ "MergeSourcesTask".
1481 
1482  @section pipe_tasks_multiBand_MergeMeasurementsTask_Initialize Task initialization
1483 
1484  @copydoc \_\_init\_\_
1485 
1486  @section pipe_tasks_multiBand_MergeMeasurementsTask_Run Invoking the Task
1487 
1488  @copydoc run
1489 
1490  @section pipe_tasks_multiBand_MergeMeasurementsTask_Config Configuration parameters
1491 
1492  See @ref MergeMeasurementsConfig_
1493 
1494  @section pipe_tasks_multiBand_MergeMeasurementsTask_Debug Debug variables
1495 
1496  The @link lsst.pipe.base.cmdLineTask.CmdLineTask command line task@endlink interface supports a
1497  flag @c -d to import @b debug.py from your @c PYTHONPATH; see @ref baseDebug for more about @b debug.py
1498  files.
1499 
1500  MergeMeasurementsTask has no debug variables.
1501 
1502  @section pipe_tasks_multiband_MergeMeasurementsTask_Example A complete example
1503  of using MergeMeasurementsTask
1504 
1505  MergeMeasurementsTask is meant to be run after deblending & measuring sources in every band.
1506  The purpose of the task is to generate a catalog of sources suitable for driving forced photometry in
1507  coadds and individual exposures.
1508  Command-line usage of MergeMeasurementsTask expects a data reference to the coadds to be processed. A list
1509  of the available optional arguments can be obtained by calling mergeCoaddMeasurements.py with the `--help`
1510  command line argument:
1511  @code
1512  mergeCoaddMeasurements.py --help
1513  @endcode
1514 
1515  To demonstrate usage of the DetectCoaddSourcesTask in the larger context of multi-band processing, we
1516  will process HSC data in the [ci_hsc](https://github.com/lsst/ci_hsc) package. Assuming one has finished
1517  step 7 at @ref pipeTasks_multiBand, one may merge the catalogs generated after deblending and measuring
1518  as follows:
1519  @code
1520  mergeCoaddMeasurements.py $CI_HSC_DIR/DATA --id patch=5,4 tract=0 filter=HSC-I^HSC-R
1521  @endcode
1522  This will merge the HSC-I & HSC-R band catalogs. The results are written in
1523  `$CI_HSC_DIR/DATA/deepCoadd-results/`.
1524  """
1525  _DefaultName = "mergeCoaddMeasurements"
1526  ConfigClass = MergeMeasurementsConfig
1527  inputDataset = "meas"
1528  outputDataset = "ref"
1529  getSchemaCatalogs = _makeGetSchemaCatalogs("ref")
1530 
1531  def __init__(self, butler=None, schema=None, **kwargs):
1532  """!
1533  Initialize the task.
1534 
1535  Additional keyword arguments (forwarded to MergeSourcesTask.__init__):
1536  @param[in] schema: the schema of the detection catalogs used as input to this one
1537  @param[in] butler: a butler used to read the input schema from disk, if schema is None
1538 
1539  The task will set its own self.schema attribute to the schema of the output merged catalog.
1540  """
1541  MergeSourcesTask.__init__(self, butler=butler, schema=schema, **kwargs)
1542  inputSchema = self.getInputSchema(butler=butler, schema=schema)
1543  self.schemaMapper = afwTable.SchemaMapper(inputSchema, True)
1544  self.schemaMapper.addMinimalSchema(inputSchema, True)
1545  self.instFluxKey = inputSchema.find(self.config.snName + "_instFlux").getKey()
1546  self.instFluxErrKey = inputSchema.find(self.config.snName + "_instFluxErr").getKey()
1547  self.fluxFlagKey = inputSchema.find(self.config.snName + "_flag").getKey()
1548 
1549  self.flagKeys = {}
1550  for band in self.config.priorityList:
1551  short = getShortFilterName(band)
1552  outputKey = self.schemaMapper.editOutputSchema().addField(
1553  "merge_measurement_%s" % short,
1554  type="Flag",
1555  doc="Flag field set if the measurements here are from the %s filter" % band
1556  )
1557  peakKey = inputSchema.find("merge_peak_%s" % short).key
1558  footprintKey = inputSchema.find("merge_footprint_%s" % short).key
1559  self.flagKeys[band] = Struct(peak=peakKey, footprint=footprintKey, output=outputKey)
1560  self.schema = self.schemaMapper.getOutputSchema()
1561 
1563  for filt in self.config.pseudoFilterList:
1564  try:
1565  self.pseudoFilterKeys.append(self.schema.find("merge_peak_%s" % filt).getKey())
1566  except Exception as e:
1567  self.log.warn("merge_peak is not set for pseudo-filter %s: %s" % (filt, e))
1568 
1569  self.badFlags = {}
1570  for flag in self.config.flags:
1571  try:
1572  self.badFlags[flag] = self.schema.find(flag).getKey()
1573  except KeyError as exc:
1574  self.log.warn("Can't find flag %s in schema: %s" % (flag, exc,))
1575 
1576  def run(self, catalogs, patchRef):
1577  """!
1578  Merge measurement catalogs to create a single reference catalog for forced photometry
1579 
1580  @param[in] catalogs: the catalogs to be merged
1581  @param[in] patchRef: patch reference for data
1582 
1583  For parent sources, we choose the first band in config.priorityList for which the
1584  merge_footprint flag for that band is is True.
1585 
1586  For child sources, the logic is the same, except that we use the merge_peak flags.
1587  """
1588  # Put catalogs, filters in priority order
1589  orderedCatalogs = [catalogs[band] for band in self.config.priorityList if band in catalogs.keys()]
1590  orderedKeys = [self.flagKeys[band] for band in self.config.priorityList if band in catalogs.keys()]
1591 
1592  mergedCatalog = afwTable.SourceCatalog(self.schema)
1593  mergedCatalog.reserve(len(orderedCatalogs[0]))
1594 
1595  idKey = orderedCatalogs[0].table.getIdKey()
1596  for catalog in orderedCatalogs[1:]:
1597  if numpy.any(orderedCatalogs[0].get(idKey) != catalog.get(idKey)):
1598  raise ValueError("Error in inputs to MergeCoaddMeasurements: source IDs do not match")
1599 
1600  # This first zip iterates over all the catalogs simultaneously, yielding a sequence of one
1601  # record for each band, in priority order.
1602  for orderedRecords in zip(*orderedCatalogs):
1603 
1604  maxSNRecord = None
1605  maxSNFlagKeys = None
1606  maxSN = 0.
1607  priorityRecord = None
1608  priorityFlagKeys = None
1609  prioritySN = 0.
1610  hasPseudoFilter = False
1611 
1612  # Now we iterate over those record-band pairs, keeping track of the priority and the
1613  # largest S/N band.
1614  for inputRecord, flagKeys in zip(orderedRecords, orderedKeys):
1615  parent = (inputRecord.getParent() == 0 and inputRecord.get(flagKeys.footprint))
1616  child = (inputRecord.getParent() != 0 and inputRecord.get(flagKeys.peak))
1617 
1618  if not (parent or child):
1619  for pseudoFilterKey in self.pseudoFilterKeys:
1620  if inputRecord.get(pseudoFilterKey):
1621  hasPseudoFilter = True
1622  priorityRecord = inputRecord
1623  priorityFlagKeys = flagKeys
1624  break
1625  if hasPseudoFilter:
1626  break
1627 
1628  isBad = any(inputRecord.get(flag) for flag in self.badFlags)
1629  if isBad or inputRecord.get(self.fluxFlagKey) or inputRecord.get(self.instFluxErrKey) == 0:
1630  sn = 0.
1631  else:
1632  sn = inputRecord.get(self.instFluxKey)/inputRecord.get(self.instFluxErrKey)
1633  if numpy.isnan(sn) or sn < 0.:
1634  sn = 0.
1635  if (parent or child) and priorityRecord is None:
1636  priorityRecord = inputRecord
1637  priorityFlagKeys = flagKeys
1638  prioritySN = sn
1639  if sn > maxSN:
1640  maxSNRecord = inputRecord
1641  maxSNFlagKeys = flagKeys
1642  maxSN = sn
1643 
1644  # If the priority band has a low S/N we would like to choose the band with the highest S/N as
1645  # the reference band instead. However, we only want to choose the highest S/N band if it is
1646  # significantly better than the priority band. Therefore, to choose a band other than the
1647  # priority, we require that the priority S/N is below the minimum threshold and that the
1648  # difference between the priority and highest S/N is larger than the difference threshold.
1649  #
1650  # For pseudo code objects we always choose the first band in the priority list.
1651  bestRecord = None
1652  bestFlagKeys = None
1653  if hasPseudoFilter:
1654  bestRecord = priorityRecord
1655  bestFlagKeys = priorityFlagKeys
1656  elif (prioritySN < self.config.minSN and (maxSN - prioritySN) > self.config.minSNDiff and
1657  maxSNRecord is not None):
1658  bestRecord = maxSNRecord
1659  bestFlagKeys = maxSNFlagKeys
1660  elif priorityRecord is not None:
1661  bestRecord = priorityRecord
1662  bestFlagKeys = priorityFlagKeys
1663 
1664  if bestRecord is not None and bestFlagKeys is not None:
1665  outputRecord = mergedCatalog.addNew()
1666  outputRecord.assign(bestRecord, self.schemaMapper)
1667  outputRecord.set(bestFlagKeys.output, True)
1668  else: # if we didn't find any records
1669  raise ValueError("Error in inputs to MergeCoaddMeasurements: no valid reference for %s" %
1670  inputRecord.getId())
1671 
1672  # more checking for sane inputs, since zip silently iterates over the smallest sequence
1673  for inputCatalog in orderedCatalogs:
1674  if len(mergedCatalog) != len(inputCatalog):
1675  raise ValueError("Mismatch between catalog sizes: %s != %s" %
1676  (len(mergedCatalog), len(orderedCatalogs)))
1677 
1678  return mergedCatalog
def getSkySourceFootprints(self, mergedList, skyInfo, seed)
Return a list of Footprints of sky objects which don&#39;t overlap with anything in mergedList.
Definition: multiBand.py:821
Merge coadd detections from multiple bands.
Definition: multiBand.py:624
def makeTask(self, parsedCmd=None, args=None)
Definition: multiBand.py:333
def getInputSchema(self, butler=None, schema=None)
Obtain the input schema either directly or froma butler reference.
Definition: multiBand.py:470
def getSchemaCatalogs(self)
Return a dict of empty catalogs for each catalog dataset produced by this task.
Definition: multiBand.py:810
def runDataRef(self, patchRefList)
Merge coadd sources from multiple bands.
Definition: multiBand.py:496
def runDataRef(self, patchRef)
Run detection on a coadd.
Definition: multiBand.py:264
def cullPeaks(self, catalog)
Attempt to remove garbage peaks (mostly on the outskirts of large blends).
Definition: multiBand.py:782
def write(self, exposure, results, patchRef)
Write out results from runDetection.
Definition: multiBand.py:311
def __init__(self, butler=None, schema=None, kwargs)
Initialize the task.
Definition: multiBand.py:483
def __init__(self, butler=None, schema=None, kwargs)
Initialize the task.
Definition: multiBand.py:1531
Configuration parameters for the DetectCoaddSourcesTask.
Definition: multiBand.py:106
def runDataRef(self, patchRefList, psfCache=100)
Definition: multiBand.py:967
def __init__(self, schema=None, kwargs)
Initialize the task.
Definition: multiBand.py:244
Merge measurements from multiple bands.
Definition: multiBand.py:1450
Deblend sources from master catalog in each coadd seperately and measure.
Definition: multiBand.py:1157
def writeMatches(self, dataRef, exposure, sources)
Write matches of the sources to the astrometric reference catalog.
Definition: multiBand.py:1386
def __init__(self, butler=None, schema=None, peakSchema=None, kwargs)
Definition: multiBand.py:938
Configuration parameters for the MergeMeasurementsTask.
Definition: multiBand.py:1419
def readSources(self, dataRef)
Read input sources.
Definition: multiBand.py:1366
Configuration parameters for the MergeDetectionsTask.
Definition: multiBand.py:592
Configuration for merging sources.
Definition: multiBand.py:411
def run(self, catalogs, patchRef)
Merge measurement catalogs to create a single reference catalog for forced photometry.
Definition: multiBand.py:1576
def run(self, catalogs, patchRef)
Merge multiple catalogs.
Definition: multiBand.py:522
def write(self, patchRef, catalog)
Write the output.
Definition: multiBand.py:533
def write(self, dataRef, flux_sources, template_sources=None)
Definition: multiBand.py:1033
A base class for merging source catalogs.
Definition: multiBand.py:427
def write(self, dataRef, sources)
Write the source catalog.
Definition: multiBand.py:1405
def readCatalog(self, patchRef)
Read input catalog.
Definition: multiBand.py:507
Configuration parameters for the MeasureMergedCoaddSourcesTask.
Definition: multiBand.py:1083
def getSkyInfo(coaddName, patchRef)
Return the SkyMap, tract and patch information, wcs, and outer bbox of the patch to be coadded...
Definition: coaddBase.py:253
def writeMetadata(self, dataRefList)
No metadata to write, and not sure how to write it for a list of dataRefs.
Definition: multiBand.py:550
def __init__(self, butler=None, schema=None, peakSchema=None, refObjLoader=None, kwargs)
Initialize the task.
Definition: multiBand.py:1281
def run(self, exposure, idFactory, expId)
Run detection on an exposure.
Definition: multiBand.py:279
def runDataRef(self, patchRef, psfCache=100)
Deblend and measure.
Definition: multiBand.py:1322
def __init__(self, butler=None, schema=None, kwargs)
Initialize the merge detections task.
Definition: multiBand.py:711
def run(self, catalogs, patchRef)
Merge multiple catalogs.
Definition: multiBand.py:733