lsst.meas.deblender  16.0-8-g4dec96c+7
deblend.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008-2015 AURA/LSST.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 import math
23 import numpy as np
24 import time
25 
26 import scarlet
27 
28 import lsst.log
29 import lsst.pex.config as pexConfig
30 import lsst.pipe.base as pipeBase
31 import lsst.afw.math as afwMath
32 import lsst.afw.geom as afwGeom
33 import lsst.afw.geom.ellipses as afwEll
34 import lsst.afw.image as afwImage
35 import lsst.afw.detection as afwDet
36 import lsst.afw.table as afwTable
37 
38 logger = lsst.log.Log.getLogger("meas.deblender.deblend")
39 
40 __all__ = 'SourceDeblendConfig', 'SourceDeblendTask', 'MultibandDeblendConfig', 'MultibandDeblendTask'
41 
42 
43 class SourceDeblendConfig(pexConfig.Config):
44 
45  edgeHandling = pexConfig.ChoiceField(
46  doc='What to do when a peak to be deblended is close to the edge of the image',
47  dtype=str, default='ramp',
48  allowed={
49  'clip': 'Clip the template at the edge AND the mirror of the edge.',
50  'ramp': 'Ramp down flux at the image edge by the PSF',
51  'noclip': 'Ignore the edge when building the symmetric template.',
52  }
53  )
54 
55  strayFluxToPointSources = pexConfig.ChoiceField(
56  doc='When the deblender should attribute stray flux to point sources',
57  dtype=str, default='necessary',
58  allowed={
59  'necessary': 'When there is not an extended object in the footprint',
60  'always': 'Always',
61  'never': ('Never; stray flux will not be attributed to any deblended child '
62  'if the deblender thinks all peaks look like point sources'),
63  }
64  )
65 
66  assignStrayFlux = pexConfig.Field(dtype=bool, default=True,
67  doc='Assign stray flux (not claimed by any child in the deblender) '
68  'to deblend children.')
69 
70  strayFluxRule = pexConfig.ChoiceField(
71  doc='How to split flux among peaks',
72  dtype=str, default='trim',
73  allowed={
74  'r-to-peak': '~ 1/(1+R^2) to the peak',
75  'r-to-footprint': ('~ 1/(1+R^2) to the closest pixel in the footprint. '
76  'CAUTION: this can be computationally expensive on large footprints!'),
77  'nearest-footprint': ('Assign 100% to the nearest footprint (using L-1 norm aka '
78  'Manhattan distance)'),
79  'trim': ('Shrink the parent footprint to pixels that are not assigned to children')
80  }
81  )
82 
83  clipStrayFluxFraction = pexConfig.Field(dtype=float, default=0.001,
84  doc=('When splitting stray flux, clip fractions below '
85  'this value to zero.'))
86  psfChisq1 = pexConfig.Field(dtype=float, default=1.5, optional=False,
87  doc=('Chi-squared per DOF cut for deciding a source is '
88  'a PSF during deblending (un-shifted PSF model)'))
89  psfChisq2 = pexConfig.Field(dtype=float, default=1.5, optional=False,
90  doc=('Chi-squared per DOF cut for deciding a source is '
91  'PSF during deblending (shifted PSF model)'))
92  psfChisq2b = pexConfig.Field(dtype=float, default=1.5, optional=False,
93  doc=('Chi-squared per DOF cut for deciding a source is '
94  'a PSF during deblending (shifted PSF model #2)'))
95  maxNumberOfPeaks = pexConfig.Field(dtype=int, default=0,
96  doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
97  " (<= 0: unlimited)"))
98  maxFootprintArea = pexConfig.Field(dtype=int, default=1000000,
99  doc=("Maximum area for footprints before they are ignored as large; "
100  "non-positive means no threshold applied"))
101  maxFootprintSize = pexConfig.Field(dtype=int, default=0,
102  doc=("Maximum linear dimension for footprints before they are ignored "
103  "as large; non-positive means no threshold applied"))
104  minFootprintAxisRatio = pexConfig.Field(dtype=float, default=0.0,
105  doc=("Minimum axis ratio for footprints before they are ignored "
106  "as large; non-positive means no threshold applied"))
107  notDeblendedMask = pexConfig.Field(dtype=str, default="NOT_DEBLENDED", optional=True,
108  doc="Mask name for footprints not deblended, or None")
109 
110  tinyFootprintSize = pexConfig.RangeField(dtype=int, default=2, min=2, inclusiveMin=True,
111  doc=('Footprints smaller in width or height than this value '
112  'will be ignored; minimum of 2 due to PSF gradient '
113  'calculation.'))
114 
115  propagateAllPeaks = pexConfig.Field(dtype=bool, default=False,
116  doc=('Guarantee that all peaks produce a child source.'))
117  catchFailures = pexConfig.Field(
118  dtype=bool, default=False,
119  doc=("If True, catch exceptions thrown by the deblender, log them, "
120  "and set a flag on the parent, instead of letting them propagate up"))
121  maskPlanes = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
122  doc="Mask planes to ignore when performing statistics")
123  maskLimits = pexConfig.DictField(
124  keytype=str,
125  itemtype=float,
126  default={},
127  doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
128  "Sources violating this limit will not be deblended."),
129  )
130  weightTemplates = pexConfig.Field(
131  dtype=bool, default=False,
132  doc=("If true, a least-squares fit of the templates will be done to the "
133  "full image. The templates will be re-weighted based on this fit."))
134  removeDegenerateTemplates = pexConfig.Field(dtype=bool, default=False,
135  doc=("Try to remove similar templates?"))
136  maxTempDotProd = pexConfig.Field(
137  dtype=float, default=0.5,
138  doc=("If the dot product between two templates is larger than this value, we consider them to be "
139  "describing the same object (i.e. they are degenerate). If one of the objects has been "
140  "labeled as a PSF it will be removed, otherwise the template with the lowest value will "
141  "be removed."))
142  medianSmoothTemplate = pexConfig.Field(dtype=bool, default=True,
143  doc="Apply a smoothing filter to all of the template images")
144 
145 
151 
152 
153 class SourceDeblendTask(pipeBase.Task):
154  """!
155  \anchor SourceDeblendTask_
156 
157  \brief Split blended sources into individual sources.
158 
159  This task has no return value; it only modifies the SourceCatalog in-place.
160  """
161  ConfigClass = SourceDeblendConfig
162  _DefaultName = "sourceDeblend"
163 
164  def __init__(self, schema, peakSchema=None, **kwargs):
165  """!
166  Create the task, adding necessary fields to the given schema.
167 
168  @param[in,out] schema Schema object for measurement fields; will be modified in-place.
169  @param[in] peakSchema Schema of Footprint Peaks that will be passed to the deblender.
170  Any fields beyond the PeakTable minimal schema will be transferred
171  to the main source Schema. If None, no fields will be transferred
172  from the Peaks.
173  @param[in] **kwargs Passed to Task.__init__.
174  """
175  pipeBase.Task.__init__(self, **kwargs)
176  self.schema = schema
177  peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
178  if peakSchema is None:
179  # In this case, the peakSchemaMapper will transfer nothing, but we'll still have one
180  # to simplify downstream code
181  self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
182  else:
183  self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
184  for item in peakSchema:
185  if item.key not in peakMinimalSchema:
186  self.peakSchemaMapper.addMapping(item.key, item.field)
187  # Because SchemaMapper makes a copy of the output schema you give its ctor, it isn't
188  # updating this Schema in place. That's probably a design flaw, but in the meantime,
189  # we'll keep that schema in sync with the peakSchemaMapper.getOutputSchema() manually,
190  # by adding the same fields to both.
191  schema.addField(item.field)
192  assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
193  self.addSchemaKeys(schema)
194 
195  def addSchemaKeys(self, schema):
196  self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
197  doc='Number of children this object has (defaults to 0)')
198  self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
199  doc='Deblender thought this source looked like a PSF')
200  self.psfCenterKey = afwTable.Point2DKey.addFields(schema, 'deblend_psfCenter',
201  'If deblended-as-psf, the PSF centroid', "pixel")
202  self.psfFluxKey = schema.addField('deblend_psfFlux', type='D',
203  doc='If deblended-as-psf, the PSF flux')
204  self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
205  doc='Source had too many peaks; '
206  'only the brightest were included')
207  self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
208  doc='Parent footprint covered too many pixels')
209  self.maskedKey = schema.addField('deblend_masked', type='Flag',
210  doc='Parent footprint was predominantly masked')
211 
212  if self.config.catchFailures:
213  self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
214  doc="Deblending failed on source")
215 
216  self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
217  doc="Deblender skipped this source")
218 
219  self.deblendRampedTemplateKey = schema.addField(
220  'deblend_rampedTemplate', type='Flag',
221  doc=('This source was near an image edge and the deblender used '
222  '"ramp" edge-handling.'))
223 
224  self.deblendPatchedTemplateKey = schema.addField(
225  'deblend_patchedTemplate', type='Flag',
226  doc=('This source was near an image edge and the deblender used '
227  '"patched" edge-handling.'))
228 
229  self.hasStrayFluxKey = schema.addField(
230  'deblend_hasStrayFlux', type='Flag',
231  doc=('This source was assigned some stray flux'))
232 
233  self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in (
234  self.nChildKey, self.psfKey, self.psfCenterKey, self.psfFluxKey,
235  self.tooManyPeaksKey, self.tooBigKey)))
236 
237  @pipeBase.timeMethod
238  def run(self, exposure, sources):
239  """!
240  Get the psf from the provided exposure and then run deblend().
241 
242  @param[in] exposure Exposure to process
243  @param[in,out] sources SourceCatalog containing sources detected on this exposure.
244 
245  @return None
246  """
247  psf = exposure.getPsf()
248  assert sources.getSchema() == self.schema
249  self.deblend(exposure, sources, psf)
250 
251  def _getPsfFwhm(self, psf, bbox):
252  # It should be easier to get a PSF's fwhm;
253  # https://dev.lsstcorp.org/trac/ticket/3030
254  return psf.computeShape().getDeterminantRadius() * 2.35
255 
256  @pipeBase.timeMethod
257  def deblend(self, exposure, srcs, psf):
258  """!
259  Deblend.
260 
261  @param[in] exposure Exposure to process
262  @param[in,out] srcs SourceCatalog containing sources detected on this exposure.
263  @param[in] psf PSF
264 
265  @return None
266  """
267  self.log.info("Deblending %d sources" % len(srcs))
268 
269  from lsst.meas.deblender.baseline import deblend
270 
271  # find the median stdev in the image...
272  mi = exposure.getMaskedImage()
273  statsCtrl = afwMath.StatisticsControl()
274  statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(self.config.maskPlanes))
275  stats = afwMath.makeStatistics(mi.getVariance(), mi.getMask(), afwMath.MEDIAN, statsCtrl)
276  sigma1 = math.sqrt(stats.getValue(afwMath.MEDIAN))
277  self.log.trace('sigma1: %g', sigma1)
278 
279  n0 = len(srcs)
280  nparents = 0
281  for i, src in enumerate(srcs):
282  # t0 = time.clock()
283 
284  fp = src.getFootprint()
285  pks = fp.getPeaks()
286 
287  # Since we use the first peak for the parent object, we should propagate its flags
288  # to the parent source.
289  src.assign(pks[0], self.peakSchemaMapper)
290 
291  if len(pks) < 2:
292  continue
293 
294  if self.isLargeFootprint(fp):
295  src.set(self.tooBigKey, True)
296  self.skipParent(src, mi.getMask())
297  self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
298  continue
299  if self.isMasked(fp, exposure.getMaskedImage().getMask()):
300  src.set(self.maskedKey, True)
301  self.skipParent(src, mi.getMask())
302  self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
303  continue
304 
305  nparents += 1
306  bb = fp.getBBox()
307  psf_fwhm = self._getPsfFwhm(psf, bb)
308 
309  self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(pks))
310 
311  self.preSingleDeblendHook(exposure, srcs, i, fp, psf, psf_fwhm, sigma1)
312  npre = len(srcs)
313 
314  # This should really be set in deblend, but deblend doesn't have access to the src
315  src.set(self.tooManyPeaksKey, len(fp.getPeaks()) > self.config.maxNumberOfPeaks)
316 
317  try:
318  res = deblend(
319  fp, mi, psf, psf_fwhm, sigma1=sigma1,
320  psfChisqCut1=self.config.psfChisq1,
321  psfChisqCut2=self.config.psfChisq2,
322  psfChisqCut2b=self.config.psfChisq2b,
323  maxNumberOfPeaks=self.config.maxNumberOfPeaks,
324  strayFluxToPointSources=self.config.strayFluxToPointSources,
325  assignStrayFlux=self.config.assignStrayFlux,
326  strayFluxAssignment=self.config.strayFluxRule,
327  rampFluxAtEdge=(self.config.edgeHandling == 'ramp'),
328  patchEdges=(self.config.edgeHandling == 'noclip'),
329  tinyFootprintSize=self.config.tinyFootprintSize,
330  clipStrayFluxFraction=self.config.clipStrayFluxFraction,
331  weightTemplates=self.config.weightTemplates,
332  removeDegenerateTemplates=self.config.removeDegenerateTemplates,
333  maxTempDotProd=self.config.maxTempDotProd,
334  medianSmoothTemplate=self.config.medianSmoothTemplate
335  )
336  if self.config.catchFailures:
337  src.set(self.deblendFailedKey, False)
338  except Exception as e:
339  if self.config.catchFailures:
340  self.log.warn("Unable to deblend source %d: %s" % (src.getId(), e))
341  src.set(self.deblendFailedKey, True)
342  import traceback
343  traceback.print_exc()
344  continue
345  else:
346  raise
347 
348  kids = []
349  nchild = 0
350  for j, peak in enumerate(res.deblendedParents[0].peaks):
351  heavy = peak.getFluxPortion()
352  if heavy is None or peak.skip:
353  src.set(self.deblendSkippedKey, True)
354  if not self.config.propagateAllPeaks:
355  # Don't care
356  continue
357  # We need to preserve the peak: make sure we have enough info to create a minimal
358  # child src
359  self.log.trace("Peak at (%i,%i) failed. Using minimal default info for child.",
360  pks[j].getIx(), pks[j].getIy())
361  if heavy is None:
362  # copy the full footprint and strip out extra peaks
363  foot = afwDet.Footprint(src.getFootprint())
364  peakList = foot.getPeaks()
365  peakList.clear()
366  peakList.append(peak.peak)
367  zeroMimg = afwImage.MaskedImageF(foot.getBBox())
368  heavy = afwDet.makeHeavyFootprint(foot, zeroMimg)
369  if peak.deblendedAsPsf:
370  if peak.psfFitFlux is None:
371  peak.psfFitFlux = 0.0
372  if peak.psfFitCenter is None:
373  peak.psfFitCenter = (peak.peak.getIx(), peak.peak.getIy())
374 
375  assert(len(heavy.getPeaks()) == 1)
376 
377  src.set(self.deblendSkippedKey, False)
378  child = srcs.addNew()
379  nchild += 1
380  child.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
381  child.setParent(src.getId())
382  child.setFootprint(heavy)
383  child.set(self.psfKey, peak.deblendedAsPsf)
384  child.set(self.hasStrayFluxKey, peak.strayFlux is not None)
385  if peak.deblendedAsPsf:
386  (cx, cy) = peak.psfFitCenter
387  child.set(self.psfCenterKey, afwGeom.Point2D(cx, cy))
388  child.set(self.psfFluxKey, peak.psfFitFlux)
389  child.set(self.deblendRampedTemplateKey, peak.hasRampedTemplate)
390  child.set(self.deblendPatchedTemplateKey, peak.patched)
391  kids.append(child)
392 
393  # Child footprints may extend beyond the full extent of their parent's which
394  # results in a failure of the replace-by-noise code to reinstate these pixels
395  # to their original values. The following updates the parent footprint
396  # in-place to ensure it contains the full union of itself and all of its
397  # children's footprints.
398  spans = src.getFootprint().spans
399  for child in kids:
400  spans = spans.union(child.getFootprint().spans)
401  src.getFootprint().setSpans(spans)
402 
403  src.set(self.nChildKey, nchild)
404 
405  self.postSingleDeblendHook(exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res)
406  # print('Deblending parent id', src.getId(), 'took', time.clock() - t0)
407 
408  n1 = len(srcs)
409  self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
410  % (n0, nparents, n1-n0, n1))
411 
412  def preSingleDeblendHook(self, exposure, srcs, i, fp, psf, psf_fwhm, sigma1):
413  pass
414 
415  def postSingleDeblendHook(self, exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res):
416  pass
417 
418  def isLargeFootprint(self, footprint):
419  """Returns whether a Footprint is large
420 
421  'Large' is defined by thresholds on the area, size and axis ratio.
422  These may be disabled independently by configuring them to be non-positive.
423 
424  This is principally intended to get rid of satellite streaks, which the
425  deblender or other downstream processing can have trouble dealing with
426  (e.g., multiple large HeavyFootprints can chew up memory).
427  """
428  if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
429  return True
430  if self.config.maxFootprintSize > 0:
431  bbox = footprint.getBBox()
432  if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
433  return True
434  if self.config.minFootprintAxisRatio > 0:
435  axes = afwEll.Axes(footprint.getShape())
436  if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
437  return True
438  return False
439 
440  def isMasked(self, footprint, mask):
441  """Returns whether the footprint violates the mask limits"""
442  size = float(footprint.getArea())
443  for maskName, limit in self.config.maskLimits.items():
444  maskVal = mask.getPlaneBitMask(maskName)
445  unmaskedSpan = footprint.spans.intersectNot(mask, maskVal) # spanset of unmasked pixels
446  if (size - unmaskedSpan.getArea())/size > limit:
447  return True
448  return False
449 
450  def skipParent(self, source, mask):
451  """Indicate that the parent source is not being deblended
452 
453  We set the appropriate flags and mask.
454 
455  @param source The source to flag as skipped
456  @param mask The mask to update
457  """
458  fp = source.getFootprint()
459  source.set(self.deblendSkippedKey, True)
460  source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all
461  if self.config.notDeblendedMask:
462  mask.addMaskPlane(self.config.notDeblendedMask)
463  fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
464 
465 
466 class MultibandDeblendConfig(pexConfig.Config):
467  """MultibandDeblendConfig
468 
469  Configuration for the multiband deblender.
470  The parameters are organized by the parameter types, which are
471  - Stopping Criteria: Used to determine if the fit has converged
472  - Position Fitting Criteria: Used to fit the positions of the peaks
473  - Constraints: Used to apply constraints to the peaks and their components
474  - Other: Parameters that don't fit into the above categories
475  """
476  # Stopping Criteria
477  maxIter = pexConfig.Field(dtype=int, default=200,
478  doc=("Maximum number of iterations to deblend a single parent"))
479  relativeError = pexConfig.Field(dtype=float, default=1e-3,
480  doc=("Relative error to use when determining stopping criteria"))
481 
482  # Blend Configuration options
483  minTranslation = pexConfig.Field(dtype=float, default=1e-3,
484  doc=("A peak must be updated by at least 'minTranslation' (pixels)"
485  "or no update is performed."
486  "This field is ignored if fitPositions is False."))
487  refinementSkip = pexConfig.Field(dtype=int, default=10,
488  doc=("If fitPositions is True, the positions and box sizes are"
489  "updated on every 'refinementSkip' iterations."))
490  translationMethod = pexConfig.Field(dtype=str, default="default",
491  doc=("Method to use for fitting translations."
492  "Currently 'default' is the only available option,"
493  "which performs a linear fit, but it is possible that we"
494  "will use galsim or some other method as a future option"))
495  edgeFluxThresh = pexConfig.Field(dtype=float, default=1.0,
496  doc=("Boxes are resized when the flux at an edge is "
497  "> edgeFluxThresh * background RMS"))
498  exactLipschitz = pexConfig.Field(dtype=bool, default=False,
499  doc=("Calculate exact Lipschitz constant in every step"
500  "(True) or only calculate the approximate"
501  "Lipschitz constant with significant changes in A,S"
502  "(False)"))
503  stepSlack = pexConfig.Field(dtype=float, default=0.2,
504  doc=("A fractional measure of how much a value (like the exactLipschitz)"
505  "can change before it needs to be recalculated."
506  "This must be between 0 and 1."))
507 
508  # Constraints
509  constraints = pexConfig.Field(dtype=str, default="1,+,S,M",
510  doc=("List of constraints to use for each object"
511  "(order does not matter)"
512  "Current options are all used by default:\n"
513  "S: symmetry\n"
514  "M: monotonicity\n"
515  "1: normalized SED to unity"
516  "+: non-negative morphology"))
517  symmetryThresh = pexConfig.Field(dtype=float, default=1.0,
518  doc=("Strictness of symmetry, from"
519  "0 (no symmetry enforced) to"
520  "1 (perfect symmetry required)."
521  "If 'S' is not in `constraints`, this argument is ignored"))
522  l0Thresh = pexConfig.Field(dtype=float, default=np.nan,
523  doc=("L0 threshold. NaN results in no L0 penalty."))
524  l1Thresh = pexConfig.Field(dtype=float, default=np.nan,
525  doc=("L1 threshold. NaN results in no L1 penalty."))
526  tvxThresh = pexConfig.Field(dtype=float, default=np.nan,
527  doc=("Threshold for TV (total variation) constraint in the x-direction."
528  "NaN results in no TVx penalty."))
529  tvyThresh = pexConfig.Field(dtype=float, default=np.nan,
530  doc=("Threshold for TV (total variation) constraint in the y-direction."
531  "NaN results in no TVy penalty."))
532 
533  # Other scarlet paremeters
534  useWeights = pexConfig.Field(dtype=bool, default=False, doc="Use inverse variance as deblender weights")
535  bgScale = pexConfig.Field(
536  dtype=float, default=0.5,
537  doc=("Fraction of background RMS level to use as a"
538  "cutoff for defining the background of the image"
539  "This is used to initialize the model for each source"
540  "and to set the size of the bounding box for each source"
541  "every `refinementSkip` iteration."))
542  usePsfConvolution = pexConfig.Field(
543  dtype=bool, default=True,
544  doc=("Whether or not to convolve the morphology with the"
545  "PSF in each band or use the same morphology in all bands"))
546  saveTemplates = pexConfig.Field(
547  dtype=bool, default=True,
548  doc="Whether or not to save the SEDs and templates")
549  processSingles = pexConfig.Field(
550  dtype=bool, default=False,
551  doc="Whether or not to process isolated sources in the deblender")
552  badMask = pexConfig.Field(
553  dtype=str, default="BAD,CR,NO_DATA,SAT,SUSPECT",
554  doc="Whether or not to process isolated sources in the deblender")
555  # Old deblender parameters used in this implementation (some of which might be removed later)
556 
557  maxNumberOfPeaks = pexConfig.Field(
558  dtype=int, default=0,
559  doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
560  " (<= 0: unlimited)"))
561  maxFootprintArea = pexConfig.Field(
562  dtype=int, default=1000000,
563  doc=("Maximum area for footprints before they are ignored as large; "
564  "non-positive means no threshold applied"))
565  maxFootprintSize = pexConfig.Field(
566  dtype=int, default=0,
567  doc=("Maximum linear dimension for footprints before they are ignored "
568  "as large; non-positive means no threshold applied"))
569  minFootprintAxisRatio = pexConfig.Field(
570  dtype=float, default=0.0,
571  doc=("Minimum axis ratio for footprints before they are ignored "
572  "as large; non-positive means no threshold applied"))
573  notDeblendedMask = pexConfig.Field(
574  dtype=str, default="NOT_DEBLENDED", optional=True,
575  doc="Mask name for footprints not deblended, or None")
576 
577  tinyFootprintSize = pexConfig.RangeField(
578  dtype=int, default=2, min=2, inclusiveMin=True,
579  doc=('Footprints smaller in width or height than this value will '
580  'be ignored; minimum of 2 due to PSF gradient calculation.'))
581  catchFailures = pexConfig.Field(
582  dtype=bool, default=False,
583  doc=("If True, catch exceptions thrown by the deblender, log them, "
584  "and set a flag on the parent, instead of letting them propagate up"))
585  propagateAllPeaks = pexConfig.Field(dtype=bool, default=False,
586  doc=('Guarantee that all peaks produce a child source.'))
587  maskPlanes = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
588  doc="Mask planes to ignore when performing statistics")
589  maskLimits = pexConfig.DictField(
590  keytype=str,
591  itemtype=float,
592  default={},
593  doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
594  "Sources violating this limit will not be deblended."),
595  )
596 
597  edgeHandling = pexConfig.ChoiceField(
598  doc='What to do when a peak to be deblended is close to the edge of the image',
599  dtype=str, default='noclip',
600  allowed={
601  'clip': 'Clip the template at the edge AND the mirror of the edge.',
602  'ramp': 'Ramp down flux at the image edge by the PSF',
603  'noclip': 'Ignore the edge when building the symmetric template.',
604  }
605  )
606 
607  medianSmoothTemplate = pexConfig.Field(dtype=bool, default=False,
608  doc="Apply a smoothing filter to all of the template images")
609  medianFilterHalfsize = pexConfig.Field(dtype=float, default=2,
610  doc=('Half size of the median smoothing filter'))
611  clipFootprintToNonzero = pexConfig.Field(dtype=bool, default=False,
612  doc=("Clip non-zero spans in the footprints"))
613 
614  conserveFlux = pexConfig.Field(dtype=bool, default=True,
615  doc=("Reapportion flux to the footprints so that flux is conserved"))
616  weightTemplates = pexConfig.Field(
617  dtype=bool, default=False,
618  doc=("If true, a least-squares fit of the templates will be done to the "
619  "full image. The templates will be re-weighted based on this fit."))
620  strayFluxToPointSources = pexConfig.ChoiceField(
621  doc='When the deblender should attribute stray flux to point sources',
622  dtype=str, default='necessary',
623  allowed={
624  'necessary': 'When there is not an extended object in the footprint',
625  'always': 'Always',
626  'never': ('Never; stray flux will not be attributed to any deblended child '
627  'if the deblender thinks all peaks look like point sources'),
628  }
629  )
630 
631  assignStrayFlux = pexConfig.Field(dtype=bool, default=True,
632  doc='Assign stray flux (not claimed by any child in the deblender) '
633  'to deblend children.')
634 
635  strayFluxRule = pexConfig.ChoiceField(
636  doc='How to split flux among peaks',
637  dtype=str, default='trim',
638  allowed={
639  'r-to-peak': '~ 1/(1+R^2) to the peak',
640  'r-to-footprint': ('~ 1/(1+R^2) to the closest pixel in the footprint. '
641  'CAUTION: this can be computationally expensive on large footprints!'),
642  'nearest-footprint': ('Assign 100% to the nearest footprint (using L-1 norm aka '
643  'Manhattan distance)'),
644  'trim': ('Shrink the parent footprint to pixels that are not assigned to children')
645  }
646  )
647 
648  clipStrayFluxFraction = pexConfig.Field(dtype=float, default=0.001,
649  doc=('When splitting stray flux, clip fractions below '
650  'this value to zero.'))
651  getTemplateSum = pexConfig.Field(dtype=bool, default=False,
652  doc=("As part of the flux calculation, the sum of the templates is"
653  "calculated. If 'getTemplateSum==True' then the sum of the"
654  "templates is stored in the result (a 'PerFootprint')."))
655 
656 
657 class MultibandDeblendTask(pipeBase.Task):
658  """MultibandDeblendTask
659 
660  Split blended sources into individual sources.
661 
662  This task has no return value; it only modifies the SourceCatalog in-place.
663  """
664  ConfigClass = MultibandDeblendConfig
665  _DefaultName = "multibandDeblend"
666 
667  def __init__(self, schema, peakSchema=None, **kwargs):
668  """Create the task, adding necessary fields to the given schema.
669 
670  Parameters
671  ----------
672  schema: `lsst.afw.table.schema.schema.Schema`
673  Schema object for measurement fields; will be modified in-place.
674  peakSchema: `lsst.afw.table.schema.schema.Schema`
675  Schema of Footprint Peaks that will be passed to the deblender.
676  Any fields beyond the PeakTable minimal schema will be transferred
677  to the main source Schema. If None, no fields will be transferred
678  from the Peaks.
679  filters: list of str
680  Names of the filters used for the eposures. This is needed to store the SED as a field
681  **kwargs
682  Passed to Task.__init__.
683  """
684  from lsst.meas.deblender import plugins
685 
686  pipeBase.Task.__init__(self, **kwargs)
687  if not self.config.conserveFlux and not self.config.saveTemplates:
688  raise ValueError("Either `conserveFlux` or `saveTemplates` must be True")
689 
690  peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
691  if peakSchema is None:
692  # In this case, the peakSchemaMapper will transfer nothing, but we'll still have one
693  # to simplify downstream code
694  self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
695  else:
696  self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
697  for item in peakSchema:
698  if item.key not in peakMinimalSchema:
699  self.peakSchemaMapper.addMapping(item.key, item.field)
700  # Because SchemaMapper makes a copy of the output schema you give its ctor, it isn't
701  # updating this Schema in place. That's probably a design flaw, but in the meantime,
702  # we'll keep that schema in sync with the peakSchemaMapper.getOutputSchema() manually,
703  # by adding the same fields to both.
704  schema.addField(item.field)
705  assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
706  self._addSchemaKeys(schema)
707  self.schema = schema
708 
709  # Create the plugins for multiband deblending using the Config options
710 
711  # Basic deblender configuration
712  config = scarlet.config.Config(
713  center_min_dist=self.config.minTranslation,
714  edge_flux_thresh=self.config.edgeFluxThresh,
715  exact_lipschitz=self.config.exactLipschitz,
716  refine_skip=self.config.refinementSkip,
717  slack=self.config.stepSlack,
718  )
719  if self.config.translationMethod != "default":
720  err = "Currently the only supported translationMethod is 'default', you entered '{0}'"
721  raise NotImplementedError(err.format(self.config.translationMethod))
722 
723  # If the default constraints are not used, set the constraints for
724  # all of the sources
725  constraints = None
726  _constraints = self.config.constraints.split(",")
727  if (sorted(_constraints) != ['+', '1', 'M', 'S'] or
728  ~np.isnan(self.config.l0Thresh) or
729  ~np.isnan(self.config.l1Thresh)):
730  constraintDict = {
731  "+": scarlet.constraint.PositivityConstraint,
732  "1": scarlet.constraint.SimpleConstraint,
733  "M": scarlet.constraint.DirectMonotonicityConstraint(use_nearest=False),
734  "S": scarlet.constraint.DirectSymmetryConstraint(sigma=self.config.symmetryThresh)
735  }
736  for c in _constraints:
737  if constraints is None:
738  constraints = [constraintDict[c]]
739  else:
740  constraints += [constraintDict[c]]
741  if constraints is None:
742  constraints = scarlet.constraint.MinimalConstraint()
743  if ~np.isnan(self.config.l0Thresh):
744  constraints += [scarlet.constraint.L0Constraint(self.config.l0Thresh)]
745  if ~np.isnan(self.config.l1Thresh):
746  constraints += [scarlet.constraint.L1Constraint(self.config.l1Thresh)]
747  if ~np.isnan(self.config.tvxThresh):
748  constraints += [scarlet.constraint.TVxConstraint(self.config.tvxThresh)]
749  if ~np.isnan(self.config.tvyThresh):
750  constraints += [scarlet.constraint.TVyConstraint(self.config.tvyThresh)]
751 
752  multiband_plugin = plugins.DeblenderPlugin(
753  plugins.buildMultibandTemplates,
754  useWeights=self.config.useWeights,
755  usePsf=self.config.usePsfConvolution,
756  constraints=constraints,
757  config=config,
758  maxIter=self.config.maxIter,
759  bgScale=self.config.bgScale,
760  relativeError=self.config.relativeError,
761  badMask=self.config.badMask.split(","),
762  )
763  self.plugins = [multiband_plugin]
764 
765  # Plugins from the old deblender for post-template processing
766  # (see lsst.meas_deblender.baseline.deblend)
767  if self.config.edgeHandling == 'ramp':
768  self.plugins.append(plugins.DeblenderPlugin(plugins.rampFluxAtEdge, patchEdges=False))
769  if self.config.medianSmoothTemplate:
770  self.plugins.append(plugins.DeblenderPlugin(
771  plugins.medianSmoothTemplates,
772  medianFilterHalfsize=self.config.medianFilterHalfsize))
773  if self.config.clipFootprintToNonzero:
774  self.plugins.append(plugins.DeblenderPlugin(plugins.clipFootprintsToNonzero))
775  if self.config.conserveFlux:
776  if self.config.weightTemplates:
777  self.plugins.append(plugins.DeblenderPlugin(plugins.weightTemplates))
778  self.plugins.append(plugins.DeblenderPlugin(
779  plugins.apportionFlux,
780  clipStrayFluxFraction=self.config.clipStrayFluxFraction,
781  assignStrayFlux=self.config.assignStrayFlux,
782  strayFluxAssignment=self.config.strayFluxRule,
783  strayFluxToPointSources=self.config.strayFluxToPointSources,
784  getTemplateSum=self.config.getTemplateSum))
785 
786  def _addSchemaKeys(self, schema):
787  """Add deblender specific keys to the schema
788  """
789  self.runtimeKey = schema.addField('runtime', type=np.float32, doc='runtime in ms')
790  # Keys from old Deblender that might be kept in the new deblender
791  self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
792  doc='Number of children this object has (defaults to 0)')
793  self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
794  doc='Deblender thought this source looked like a PSF')
795  self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
796  doc='Source had too many peaks; '
797  'only the brightest were included')
798  self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
799  doc='Parent footprint covered too many pixels')
800  self.maskedKey = schema.addField('deblend_masked', type='Flag',
801  doc='Parent footprint was predominantly masked')
802  self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
803  doc="Deblending failed on source")
804 
805  self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
806  doc="Deblender skipped this source")
807 
808  # Keys from the old Deblender that some measruement tasks require.
809  # TODO: Remove these whem the old deblender is removed
810  self.psfCenterKey = afwTable.Point2DKey.addFields(schema, 'deblend_psfCenter',
811  'If deblended-as-psf, the PSF centroid', "pixel")
812  self.psfFluxKey = schema.addField('deblend_psfFlux', type='D',
813  doc='If deblended-as-psf, the PSF flux')
814  self.deblendRampedTemplateKey = schema.addField(
815  'deblend_rampedTemplate', type='Flag',
816  doc=('This source was near an image edge and the deblender used '
817  '"ramp" edge-handling.'))
818 
819  self.deblendPatchedTemplateKey = schema.addField(
820  'deblend_patchedTemplate', type='Flag',
821  doc=('This source was near an image edge and the deblender used '
822  '"patched" edge-handling.'))
823 
824  self.hasStrayFluxKey = schema.addField(
825  'deblend_hasStrayFlux', type='Flag',
826  doc=('This source was assigned some stray flux'))
827 
828  self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in (
829  self.nChildKey, self.psfKey, self.psfCenterKey, self.psfFluxKey,
830  self.tooManyPeaksKey, self.tooBigKey)))
831 
832  @pipeBase.timeMethod
833  def run(self, mExposure, mergedSources):
834  """Get the psf from each exposure and then run deblend().
835 
836  Parameters
837  ----------
838  mExposure: `MultibandExposure`
839  The exposures should be co-added images of the same
840  shape and region of the sky.
841  mergedSources: `SourceCatalog`
842  The merged `SourceCatalog` that contains parent footprints
843  to (potentially) deblend.
844 
845  Returns
846  -------
847  fluxCatalogs: dict or None
848  Keys are the names of the filters and the values are
849  `lsst.afw.table.source.source.SourceCatalog`'s.
850  These are the flux-conserved catalogs with heavy footprints with
851  the image data weighted by the multiband templates.
852  If `self.config.conserveFlux` is `False`, then this item will be None
853  templateCatalogs: dict or None
854  Keys are the names of the filters and the values are
855  `lsst.afw.table.source.source.SourceCatalog`'s.
856  These are catalogs with heavy footprints that are the templates
857  created by the multiband templates.
858  If `self.config.saveTemplates` is `False`, then this item will be None
859  """
860  psfs = {f: mExposure[f].getPsf() for f in mExposure.filters}
861  return self.deblend(mExposure, mergedSources, psfs)
862 
863  def _getPsfFwhm(self, psf, bbox):
864  return psf.computeShape().getDeterminantRadius() * 2.35
865 
866  def _addChild(self, parentId, peak, sources, heavy):
867  """Add a child to a catalog
868 
869  This creates a new child in the source catalog,
870  assigning it a parent id, adding a footprint,
871  and setting all appropriate flags based on the
872  deblender result.
873  """
874  assert len(heavy.getPeaks()) == 1
875  src = sources.addNew()
876  src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
877  src.setParent(parentId)
878  src.setFootprint(heavy)
879  src.set(self.psfKey, peak.deblendedAsPsf)
880  src.set(self.hasStrayFluxKey, peak.strayFlux is not None)
881  src.set(self.deblendRampedTemplateKey, peak.hasRampedTemplate)
882  src.set(self.deblendPatchedTemplateKey, peak.patched)
883  src.set(self.runtimeKey, 0)
884  return src
885 
886  @pipeBase.timeMethod
887  def deblend(self, mExposure, sources, psfs):
888  """Deblend a data cube of multiband images
889 
890  Parameters
891  ----------
892  mExposure: `MultibandExposure`
893  The exposures should be co-added images of the same
894  shape and region of the sky.
895  sources: `SourceCatalog`
896  The merged `SourceCatalog` that contains parent footprints
897  to (potentially) deblend.
898  psfs: dict
899  Keys are the names of the filters
900  (should be the same as `mExposure.filters`)
901  and the values are the PSFs in each band.
902 
903  Returns
904  -------
905  fluxCatalogs: dict or None
906  Keys are the names of the filters and the values are
907  `lsst.afw.table.source.source.SourceCatalog`'s.
908  These are the flux-conserved catalogs with heavy footprints with
909  the image data weighted by the multiband templates.
910  If `self.config.conserveFlux` is `False`, then this item will be None
911  templateCatalogs: dict or None
912  Keys are the names of the filters and the values are
913  `lsst.afw.table.source.source.SourceCatalog`'s.
914  These are catalogs with heavy footprints that are the templates
915  created by the multiband templates.
916  If `self.config.saveTemplates` is `False`, then this item will be None
917  """
918  from lsst.meas.deblender.baseline import newDeblend
919 
920  if tuple(psfs.keys()) != mExposure.filters:
921  msg = "PSF keys must be the same as mExposure.filters ({0}), got {1}"
922  raise ValueError(msg.format(mExposure.filters, psfs.keys()))
923 
924  filters = mExposure.filters
925  mMaskedImage = afwImage.MultibandMaskedImage(filters=mExposure.filters, image=mExposure.image,
926  mask=mExposure.mask, variance=mExposure.variance)
927  self.log.info("Deblending {0} sources in {1} exposures".format(len(sources), len(mExposure)))
928 
929  # find the median stdev in each image
930  sigmas = {}
931  for f in filters:
932  exposure = mExposure[f]
933  mi = exposure.getMaskedImage()
934  statsCtrl = afwMath.StatisticsControl()
935  statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(self.config.maskPlanes))
936  stats = afwMath.makeStatistics(mi.getVariance(), mi.getMask(), afwMath.MEDIAN, statsCtrl)
937  sigma1 = math.sqrt(stats.getValue(afwMath.MEDIAN))
938  self.log.trace('Exposure {0}, sigma1: {1}'.format(f, sigma1))
939  sigmas[f] = sigma1
940 
941  # Create the output catalogs
942  if self.config.conserveFlux:
943  fluxCatalogs = {}
944  for f in filters:
945  _catalog = afwTable.SourceCatalog(sources.table.clone())
946  _catalog.extend(sources)
947  fluxCatalogs[f] = _catalog
948  else:
949  fluxCatalogs = None
950  if self.config.saveTemplates:
951  templateCatalogs = {}
952  for f in filters:
953  _catalog = afwTable.SourceCatalog(sources.table.clone())
954  _catalog.extend(sources)
955  templateCatalogs[f] = _catalog
956  else:
957  templateCatalogs = None
958 
959  n0 = len(sources)
960  nparents = 0
961  for pk, src in enumerate(sources):
962  foot = src.getFootprint()
963  logger.info("id: {0}".format(src["id"]))
964  peaks = foot.getPeaks()
965 
966  # Since we use the first peak for the parent object, we should propagate its flags
967  # to the parent source.
968  src.assign(peaks[0], self.peakSchemaMapper)
969 
970  # Block of Skipping conditions
971  if len(peaks) < 2 and not self.config.processSingles:
972  for f in filters:
973  if self.config.saveTemplates:
974  templateCatalogs[f][pk].set(self.runtimeKey, 0)
975  if self.config.conserveFlux:
976  fluxCatalogs[f][pk].set(self.runtimeKey, 0)
977  continue
978  if self.isLargeFootprint(foot):
979  src.set(self.tooBigKey, True)
980  self.skipParent(src, [mi.getMask() for mi in mMaskedImage])
981  self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
982  continue
983  if self.isMasked(foot, exposure.getMaskedImage().getMask()):
984  src.set(self.maskedKey, True)
985  self.skipParent(src, mi.getMask())
986  self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
987  continue
988  if len(peaks) > self.config.maxNumberOfPeaks:
989  src.set(self.tooManyPeaksKey, True)
990  msg = 'Parent {0}: Too many peaks, using the first {1} peaks'
991  self.log.trace(msg.format(int(src.getId()), self.config.maxNumberOfPeaks))
992 
993  nparents += 1
994  bbox = foot.getBBox()
995  psf_fwhms = {f: self._getPsfFwhm(psf, bbox) for f, psf in psfs.items()}
996  self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks))
997  self.preSingleDeblendHook(mExposure.singles, sources, pk, foot, psfs, psf_fwhms, sigmas)
998  npre = len(sources)
999  # Run the deblender
1000  try:
1001  t0 = time.time()
1002  # Build the parameter lists with the same ordering
1003  images = mMaskedImage[:, bbox]
1004  psf_list = [psfs[f] for f in filters]
1005  fwhm_list = [psf_fwhms[f] for f in filters]
1006  avgNoise = [sigmas[f] for f in filters]
1007 
1008  result = newDeblend(debPlugins=self.plugins,
1009  footprint=foot,
1010  mMaskedImage=images,
1011  psfs=psf_list,
1012  psfFwhms=fwhm_list,
1013  avgNoise=avgNoise,
1014  maxNumberOfPeaks=self.config.maxNumberOfPeaks)
1015  tf = time.time()
1016  runtime = (tf-t0)*1000
1017  if result.failed:
1018  src.set(self.deblendFailedKey, False)
1019  src.set(self.runtimeKey, 0)
1020  continue
1021  except Exception as e:
1022  if self.config.catchFailures:
1023  self.log.warn("Unable to deblend source %d: %s" % (src.getId(), e))
1024  src.set(self.deblendFailedKey, True)
1025  src.set(self.runtimeKey, 0)
1026  import traceback
1027  traceback.print_exc()
1028  continue
1029  else:
1030  raise
1031 
1032  # Add the merged source as a parent in the catalog for each band
1033  templateParents = {}
1034  fluxParents = {}
1035  parentId = src.getId()
1036  for f in filters:
1037  if self.config.saveTemplates:
1038  templateParents[f] = templateCatalogs[f][pk]
1039  templateParents[f].set(self.runtimeKey, runtime)
1040  if self.config.conserveFlux:
1041  fluxParents[f] = fluxCatalogs[f][pk]
1042  fluxParents[f].set(self.runtimeKey, runtime)
1043 
1044  # Add each source to the catalogs in each band
1045  templateSpans = {f: afwGeom.SpanSet() for f in filters}
1046  fluxSpans = {f: afwGeom.SpanSet() for f in filters}
1047  nchild = 0
1048  for j, multiPeak in enumerate(result.peaks):
1049  heavy = {f: peak.getFluxPortion() for f, peak in multiPeak.deblendedPeaks.items()}
1050  no_flux = all([v is None for v in heavy.values()])
1051  skip_peak = all([peak.skip for peak in multiPeak.deblendedPeaks.values()])
1052  if no_flux or skip_peak:
1053  src.set(self.deblendSkippedKey, True)
1054  if not self.config.propagateAllPeaks:
1055  # We don't care
1056  continue
1057  # We need to preserve the peak: make sure we have enough info to create a minimal
1058  # child src
1059  msg = "Peak at {0} failed deblending. Using minimal default info for child."
1060  self.log.trace(msg.format(multiPeak.x, multiPeak.y))
1061 
1062  # copy the full footprint and strip out extra peaks
1063  pfoot = afwDet.Footprint(foot)
1064  peakList = pfoot.getPeaks()
1065  peakList.clear()
1066  pfoot.addPeak(multiPeak.x, multiPeak.y, 0)
1067  zeroMimg = afwImage.MaskedImageF(pfoot.getBBox())
1068  for f in filters:
1069  heavy[f] = afwDet.makeHeavyFootprint(pfoot, zeroMimg)
1070  else:
1071  src.set(self.deblendSkippedKey, False)
1072 
1073  # Add the peak to the source catalog in each band
1074  for f in filters:
1075  if len(heavy[f].getPeaks()) != 1:
1076  err = "Heavy footprint should have a single peak, got {0}"
1077  raise ValueError(err.format(len(heavy[f].getPeaks())))
1078  peak = multiPeak.deblendedPeaks[f]
1079  if self.config.saveTemplates:
1080  cat = templateCatalogs[f]
1081  tfoot = peak.templateFootprint
1082  timg = afwImage.MaskedImageF(peak.templateImage)
1083  tHeavy = afwDet.makeHeavyFootprint(tfoot, timg)
1084  child = self._addChild(parentId, peak, cat, tHeavy)
1085  if parentId == 0:
1086  child.setId(src.getId())
1087  child.set(self.runtimeKey, runtime)
1088  else:
1089  templateSpans[f] = templateSpans[f].union(tHeavy.getSpans())
1090  if self.config.conserveFlux:
1091  cat = fluxCatalogs[f]
1092  child = self._addChild(parentId, peak, cat, heavy[f])
1093  if parentId == 0:
1094  child.setId(src.getId())
1095  child.set(self.runtimeKey, runtime)
1096  else:
1097  fluxSpans[f] = fluxSpans[f].union(heavy[f].getSpans())
1098  nchild += 1
1099 
1100  # Child footprints may extend beyond the full extent of their parent's which
1101  # results in a failure of the replace-by-noise code to reinstate these pixels
1102  # to their original values. The following updates the parent footprint
1103  # in-place to ensure it contains the full union of itself and all of its
1104  # children's footprints.
1105  for f in filters:
1106  if self.config.saveTemplates:
1107  templateParents[f].set(self.nChildKey, nchild)
1108  templateParents[f].getFootprint().setSpans(templateSpans[f])
1109  if self.config.conserveFlux:
1110  fluxParents[f].set(self.nChildKey, nchild)
1111  fluxParents[f].getFootprint().setSpans(fluxSpans[f])
1112 
1113  self.postSingleDeblendHook(exposure, fluxCatalogs, templateCatalogs,
1114  pk, npre, foot, psfs, psf_fwhms, sigmas, result)
1115 
1116  if fluxCatalogs is not None:
1117  n1 = len(list(fluxCatalogs.values())[0])
1118  else:
1119  n1 = len(list(templateCatalogs.values())[0])
1120  self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
1121  % (n0, nparents, n1-n0, n1))
1122  return fluxCatalogs, templateCatalogs
1123 
1124  def preSingleDeblendHook(self, exposures, sources, pk, fp, psfs, psf_fwhms, sigmas):
1125  pass
1126 
1127  def postSingleDeblendHook(self, exposures, fluxCatalogs, templateCatalogs,
1128  pk, npre, fp, psfs, psf_fwhms, sigmas, result):
1129  pass
1130 
1131  def isLargeFootprint(self, footprint):
1132  """Returns whether a Footprint is large
1133 
1134  'Large' is defined by thresholds on the area, size and axis ratio.
1135  These may be disabled independently by configuring them to be non-positive.
1136 
1137  This is principally intended to get rid of satellite streaks, which the
1138  deblender or other downstream processing can have trouble dealing with
1139  (e.g., multiple large HeavyFootprints can chew up memory).
1140  """
1141  if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
1142  return True
1143  if self.config.maxFootprintSize > 0:
1144  bbox = footprint.getBBox()
1145  if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
1146  return True
1147  if self.config.minFootprintAxisRatio > 0:
1148  axes = afwEll.Axes(footprint.getShape())
1149  if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
1150  return True
1151  return False
1152 
1153  def isMasked(self, footprint, mask):
1154  """Returns whether the footprint violates the mask limits"""
1155  size = float(footprint.getArea())
1156  for maskName, limit in self.config.maskLimits.items():
1157  maskVal = mask.getPlaneBitMask(maskName)
1158  unmaskedSpan = footprint.spans.intersectNot(mask, maskVal) # spanset of unmasked pixels
1159  if (size - unmaskedSpan.getArea())/size > limit:
1160  return True
1161  return False
1162 
1163  def skipParent(self, source, masks):
1164  """Indicate that the parent source is not being deblended
1165 
1166  We set the appropriate flags and masks for each exposure.
1167 
1168  Parameters
1169  ----------
1170  source: `lsst.afw.table.source.source.SourceRecord`
1171  The source to flag as skipped
1172  masks: list of `lsst.afw.image.MaskX`
1173  The mask in each band to update with the non-detection
1174  """
1175  fp = source.getFootprint()
1176  source.set(self.deblendSkippedKey, True)
1177  source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all
1178  if self.config.notDeblendedMask:
1179  for mask in masks:
1180  mask.addMaskPlane(self.config.notDeblendedMask)
1181  fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
def newDeblend(debPlugins, footprint, mMaskedImage, psfs, psfFwhms, log=None, verbose=False, avgNoise=None, maxNumberOfPeaks=0)
Definition: baseline.py:675
def _addChild(self, parentId, peak, sources, heavy)
Definition: deblend.py:866
def postSingleDeblendHook(self, exposures, fluxCatalogs, templateCatalogs, pk, npre, fp, psfs, psf_fwhms, sigmas, result)
Definition: deblend.py:1128
def postSingleDeblendHook(self, exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res)
Definition: deblend.py:415
def preSingleDeblendHook(self, exposure, srcs, i, fp, psf, psf_fwhm, sigma1)
Definition: deblend.py:412
def run(self, exposure, sources)
Get the psf from the provided exposure and then run deblend().
Definition: deblend.py:238
def __init__(self, schema, peakSchema=None, kwargs)
Definition: deblend.py:667
def preSingleDeblendHook(self, exposures, sources, pk, fp, psfs, psf_fwhms, sigmas)
Definition: deblend.py:1124
Split blended sources into individual sources.
Definition: deblend.py:153
def deblend(self, exposure, srcs, psf)
Deblend.
Definition: deblend.py:257
def __init__(self, schema, peakSchema=None, kwargs)
Create the task, adding necessary fields to the given schema.
Definition: deblend.py:164
def deblend(self, mExposure, sources, psfs)
Definition: deblend.py:887
def isMasked(self, footprint, mask)
Definition: deblend.py:440
static Log getLogger(std::string const &loggername)
def run(self, mExposure, mergedSources)
Definition: deblend.py:833