lsst.meas.deblender  16.0-5-g2851537+3
deblend.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008-2015 AURA/LSST.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 import math
23 import numpy as np
24 import time
25 
26 import lsst.log
27 import lsst.pex.config as pexConfig
28 import lsst.pipe.base as pipeBase
29 import lsst.afw.math as afwMath
30 import lsst.afw.geom as afwGeom
31 import lsst.afw.geom.ellipses as afwEll
32 import lsst.afw.image as afwImage
33 import lsst.afw.detection as afwDet
34 import lsst.afw.table as afwTable
35 
36 logger = lsst.log.Log.getLogger("meas.deblender.deblend")
37 
38 __all__ = 'SourceDeblendConfig', 'SourceDeblendTask', 'MultibandDeblendConfig', 'MultibandDeblendTask'
39 
40 
41 class SourceDeblendConfig(pexConfig.Config):
42 
43  edgeHandling = pexConfig.ChoiceField(
44  doc='What to do when a peak to be deblended is close to the edge of the image',
45  dtype=str, default='ramp',
46  allowed={
47  'clip': 'Clip the template at the edge AND the mirror of the edge.',
48  'ramp': 'Ramp down flux at the image edge by the PSF',
49  'noclip': 'Ignore the edge when building the symmetric template.',
50  }
51  )
52 
53  strayFluxToPointSources = pexConfig.ChoiceField(
54  doc='When the deblender should attribute stray flux to point sources',
55  dtype=str, default='necessary',
56  allowed={
57  'necessary': 'When there is not an extended object in the footprint',
58  'always': 'Always',
59  'never': ('Never; stray flux will not be attributed to any deblended child '
60  'if the deblender thinks all peaks look like point sources'),
61  }
62  )
63 
64  assignStrayFlux = pexConfig.Field(dtype=bool, default=True,
65  doc='Assign stray flux (not claimed by any child in the deblender) '
66  'to deblend children.')
67 
68  strayFluxRule = pexConfig.ChoiceField(
69  doc='How to split flux among peaks',
70  dtype=str, default='trim',
71  allowed={
72  'r-to-peak': '~ 1/(1+R^2) to the peak',
73  'r-to-footprint': ('~ 1/(1+R^2) to the closest pixel in the footprint. '
74  'CAUTION: this can be computationally expensive on large footprints!'),
75  'nearest-footprint': ('Assign 100% to the nearest footprint (using L-1 norm aka '
76  'Manhattan distance)'),
77  'trim': ('Shrink the parent footprint to pixels that are not assigned to children')
78  }
79  )
80 
81  clipStrayFluxFraction = pexConfig.Field(dtype=float, default=0.001,
82  doc=('When splitting stray flux, clip fractions below '
83  'this value to zero.'))
84  psfChisq1 = pexConfig.Field(dtype=float, default=1.5, optional=False,
85  doc=('Chi-squared per DOF cut for deciding a source is '
86  'a PSF during deblending (un-shifted PSF model)'))
87  psfChisq2 = pexConfig.Field(dtype=float, default=1.5, optional=False,
88  doc=('Chi-squared per DOF cut for deciding a source is '
89  'PSF during deblending (shifted PSF model)'))
90  psfChisq2b = pexConfig.Field(dtype=float, default=1.5, optional=False,
91  doc=('Chi-squared per DOF cut for deciding a source is '
92  'a PSF during deblending (shifted PSF model #2)'))
93  maxNumberOfPeaks = pexConfig.Field(dtype=int, default=0,
94  doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
95  " (<= 0: unlimited)"))
96  maxFootprintArea = pexConfig.Field(dtype=int, default=1000000,
97  doc=("Maximum area for footprints before they are ignored as large; "
98  "non-positive means no threshold applied"))
99  maxFootprintSize = pexConfig.Field(dtype=int, default=0,
100  doc=("Maximum linear dimension for footprints before they are ignored "
101  "as large; non-positive means no threshold applied"))
102  minFootprintAxisRatio = pexConfig.Field(dtype=float, default=0.0,
103  doc=("Minimum axis ratio for footprints before they are ignored "
104  "as large; non-positive means no threshold applied"))
105  notDeblendedMask = pexConfig.Field(dtype=str, default="NOT_DEBLENDED", optional=True,
106  doc="Mask name for footprints not deblended, or None")
107 
108  tinyFootprintSize = pexConfig.RangeField(dtype=int, default=2, min=2, inclusiveMin=True,
109  doc=('Footprints smaller in width or height than this value will '
110  'be ignored; minimum of 2 due to PSF gradient calculation.'))
111 
112  propagateAllPeaks = pexConfig.Field(dtype=bool, default=False,
113  doc=('Guarantee that all peaks produce a child source.'))
114  catchFailures = pexConfig.Field(dtype=bool, default=False,
115  doc=("If True, catch exceptions thrown by the deblender, log them, "
116  "and set a flag on the parent, instead of letting them propagate up"))
117  maskPlanes = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
118  doc="Mask planes to ignore when performing statistics")
119  maskLimits = pexConfig.DictField(
120  keytype=str,
121  itemtype=float,
122  default={},
123  doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
124  "Sources violating this limit will not be deblended."),
125  )
126  weightTemplates = pexConfig.Field(dtype=bool, default=False,
127  doc=("If true, a least-squares fit of the templates will be done to the "
128  "full image. The templates will be re-weighted based on this fit."))
129  removeDegenerateTemplates = pexConfig.Field(dtype=bool, default=False,
130  doc=("Try to remove similar templates?"))
131  maxTempDotProd = pexConfig.Field(dtype=float, default=0.5,
132  doc=("If the dot product between two templates is larger than this value"
133  ", we consider them to be describing the same object (i.e. they are "
134  "degenerate). If one of the objects has been labeled as a PSF it "
135  "will be removed, otherwise the template with the lowest value will "
136  "be removed."))
137  medianSmoothTemplate = pexConfig.Field(dtype=bool, default=True,
138  doc="Apply a smoothing filter to all of the template images")
139 
140 
146 
147 
148 class SourceDeblendTask(pipeBase.Task):
149  """!
150  \anchor SourceDeblendTask_
151 
152  \brief Split blended sources into individual sources.
153 
154  This task has no return value; it only modifies the SourceCatalog in-place.
155  """
156  ConfigClass = SourceDeblendConfig
157  _DefaultName = "sourceDeblend"
158 
159  def __init__(self, schema, peakSchema=None, **kwargs):
160  """!
161  Create the task, adding necessary fields to the given schema.
162 
163  @param[in,out] schema Schema object for measurement fields; will be modified in-place.
164  @param[in] peakSchema Schema of Footprint Peaks that will be passed to the deblender.
165  Any fields beyond the PeakTable minimal schema will be transferred
166  to the main source Schema. If None, no fields will be transferred
167  from the Peaks.
168  @param[in] **kwargs Passed to Task.__init__.
169  """
170  pipeBase.Task.__init__(self, **kwargs)
171  self.schema = schema
172  peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
173  if peakSchema is None:
174  # In this case, the peakSchemaMapper will transfer nothing, but we'll still have one
175  # to simplify downstream code
176  self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
177  else:
178  self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
179  for item in peakSchema:
180  if item.key not in peakMinimalSchema:
181  self.peakSchemaMapper.addMapping(item.key, item.field)
182  # Because SchemaMapper makes a copy of the output schema you give its ctor, it isn't
183  # updating this Schema in place. That's probably a design flaw, but in the meantime,
184  # we'll keep that schema in sync with the peakSchemaMapper.getOutputSchema() manually,
185  # by adding the same fields to both.
186  schema.addField(item.field)
187  assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
188  self.addSchemaKeys(schema)
189 
190  def addSchemaKeys(self, schema):
191  self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
192  doc='Number of children this object has (defaults to 0)')
193  self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
194  doc='Deblender thought this source looked like a PSF')
195  self.psfCenterKey = afwTable.Point2DKey.addFields(schema, 'deblend_psfCenter',
196  'If deblended-as-psf, the PSF centroid', "pixel")
197  self.psfFluxKey = schema.addField('deblend_psfFlux', type='D',
198  doc='If deblended-as-psf, the PSF flux')
199  self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
200  doc='Source had too many peaks; '
201  'only the brightest were included')
202  self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
203  doc='Parent footprint covered too many pixels')
204  self.maskedKey = schema.addField('deblend_masked', type='Flag',
205  doc='Parent footprint was predominantly masked')
206 
207  if self.config.catchFailures:
208  self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
209  doc="Deblending failed on source")
210 
211  self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
212  doc="Deblender skipped this source")
213 
214  self.deblendRampedTemplateKey = schema.addField(
215  'deblend_rampedTemplate', type='Flag',
216  doc=('This source was near an image edge and the deblender used '
217  '"ramp" edge-handling.'))
218 
219  self.deblendPatchedTemplateKey = schema.addField(
220  'deblend_patchedTemplate', type='Flag',
221  doc=('This source was near an image edge and the deblender used '
222  '"patched" edge-handling.'))
223 
224  self.hasStrayFluxKey = schema.addField(
225  'deblend_hasStrayFlux', type='Flag',
226  doc=('This source was assigned some stray flux'))
227 
228  self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in (
229  self.nChildKey, self.psfKey, self.psfCenterKey, self.psfFluxKey,
230  self.tooManyPeaksKey, self.tooBigKey)))
231 
232  @pipeBase.timeMethod
233  def run(self, exposure, sources):
234  """!
235  Get the psf from the provided exposure and then run deblend().
236 
237  @param[in] exposure Exposure to process
238  @param[in,out] sources SourceCatalog containing sources detected on this exposure.
239 
240  @return None
241  """
242  psf = exposure.getPsf()
243  assert sources.getSchema() == self.schema
244  self.deblend(exposure, sources, psf)
245 
246  def _getPsfFwhm(self, psf, bbox):
247  # It should be easier to get a PSF's fwhm;
248  # https://dev.lsstcorp.org/trac/ticket/3030
249  return psf.computeShape().getDeterminantRadius() * 2.35
250 
251  @pipeBase.timeMethod
252  def deblend(self, exposure, srcs, psf):
253  """!
254  Deblend.
255 
256  @param[in] exposure Exposure to process
257  @param[in,out] srcs SourceCatalog containing sources detected on this exposure.
258  @param[in] psf PSF
259 
260  @return None
261  """
262  self.log.info("Deblending %d sources" % len(srcs))
263 
264  from lsst.meas.deblender.baseline import deblend
265 
266  # find the median stdev in the image...
267  mi = exposure.getMaskedImage()
268  statsCtrl = afwMath.StatisticsControl()
269  statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(self.config.maskPlanes))
270  stats = afwMath.makeStatistics(mi.getVariance(), mi.getMask(), afwMath.MEDIAN, statsCtrl)
271  sigma1 = math.sqrt(stats.getValue(afwMath.MEDIAN))
272  self.log.trace('sigma1: %g', sigma1)
273 
274  n0 = len(srcs)
275  nparents = 0
276  for i, src in enumerate(srcs):
277  #t0 = time.clock()
278 
279  fp = src.getFootprint()
280  pks = fp.getPeaks()
281 
282  # Since we use the first peak for the parent object, we should propagate its flags
283  # to the parent source.
284  src.assign(pks[0], self.peakSchemaMapper)
285 
286  if len(pks) < 2:
287  continue
288 
289  if self.isLargeFootprint(fp):
290  src.set(self.tooBigKey, True)
291  self.skipParent(src, mi.getMask())
292  self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
293  continue
294  if self.isMasked(fp, exposure.getMaskedImage().getMask()):
295  src.set(self.maskedKey, True)
296  self.skipParent(src, mi.getMask())
297  self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
298  continue
299 
300  nparents += 1
301  bb = fp.getBBox()
302  psf_fwhm = self._getPsfFwhm(psf, bb)
303 
304  self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(pks))
305 
306  self.preSingleDeblendHook(exposure, srcs, i, fp, psf, psf_fwhm, sigma1)
307  npre = len(srcs)
308 
309  # This should really be set in deblend, but deblend doesn't have access to the src
310  src.set(self.tooManyPeaksKey, len(fp.getPeaks()) > self.config.maxNumberOfPeaks)
311 
312  try:
313  res = deblend(
314  fp, mi, psf, psf_fwhm, sigma1=sigma1,
315  psfChisqCut1=self.config.psfChisq1,
316  psfChisqCut2=self.config.psfChisq2,
317  psfChisqCut2b=self.config.psfChisq2b,
318  maxNumberOfPeaks=self.config.maxNumberOfPeaks,
319  strayFluxToPointSources=self.config.strayFluxToPointSources,
320  assignStrayFlux=self.config.assignStrayFlux,
321  strayFluxAssignment=self.config.strayFluxRule,
322  rampFluxAtEdge=(self.config.edgeHandling == 'ramp'),
323  patchEdges=(self.config.edgeHandling == 'noclip'),
324  tinyFootprintSize=self.config.tinyFootprintSize,
325  clipStrayFluxFraction=self.config.clipStrayFluxFraction,
326  weightTemplates=self.config.weightTemplates,
327  removeDegenerateTemplates=self.config.removeDegenerateTemplates,
328  maxTempDotProd=self.config.maxTempDotProd,
329  medianSmoothTemplate=self.config.medianSmoothTemplate
330  )
331  if self.config.catchFailures:
332  src.set(self.deblendFailedKey, False)
333  except Exception as e:
334  if self.config.catchFailures:
335  self.log.warn("Unable to deblend source %d: %s" % (src.getId(), e))
336  src.set(self.deblendFailedKey, True)
337  import traceback
338  traceback.print_exc()
339  continue
340  else:
341  raise
342 
343  kids = []
344  nchild = 0
345  for j, peak in enumerate(res.deblendedParents[0].peaks):
346  heavy = peak.getFluxPortion()
347  if heavy is None or peak.skip:
348  src.set(self.deblendSkippedKey, True)
349  if not self.config.propagateAllPeaks:
350  # Don't care
351  continue
352  # We need to preserve the peak: make sure we have enough info to create a minimal
353  # child src
354  self.log.trace("Peak at (%i,%i) failed. Using minimal default info for child.",
355  pks[j].getIx(), pks[j].getIy())
356  if heavy is None:
357  # copy the full footprint and strip out extra peaks
358  foot = afwDet.Footprint(src.getFootprint())
359  peakList = foot.getPeaks()
360  peakList.clear()
361  peakList.append(peak.peak)
362  zeroMimg = afwImage.MaskedImageF(foot.getBBox())
363  heavy = afwDet.makeHeavyFootprint(foot, zeroMimg)
364  if peak.deblendedAsPsf:
365  if peak.psfFitFlux is None:
366  peak.psfFitFlux = 0.0
367  if peak.psfFitCenter is None:
368  peak.psfFitCenter = (peak.peak.getIx(), peak.peak.getIy())
369 
370  assert(len(heavy.getPeaks()) == 1)
371 
372  src.set(self.deblendSkippedKey, False)
373  child = srcs.addNew()
374  nchild += 1
375  child.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
376  child.setParent(src.getId())
377  child.setFootprint(heavy)
378  child.set(self.psfKey, peak.deblendedAsPsf)
379  child.set(self.hasStrayFluxKey, peak.strayFlux is not None)
380  if peak.deblendedAsPsf:
381  (cx, cy) = peak.psfFitCenter
382  child.set(self.psfCenterKey, afwGeom.Point2D(cx, cy))
383  child.set(self.psfFluxKey, peak.psfFitFlux)
384  child.set(self.deblendRampedTemplateKey, peak.hasRampedTemplate)
385  child.set(self.deblendPatchedTemplateKey, peak.patched)
386  kids.append(child)
387 
388  # Child footprints may extend beyond the full extent of their parent's which
389  # results in a failure of the replace-by-noise code to reinstate these pixels
390  # to their original values. The following updates the parent footprint
391  # in-place to ensure it contains the full union of itself and all of its
392  # children's footprints.
393  spans = src.getFootprint().spans
394  for child in kids:
395  spans = spans.union(child.getFootprint().spans)
396  src.getFootprint().setSpans(spans)
397 
398  src.set(self.nChildKey, nchild)
399 
400  self.postSingleDeblendHook(exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res)
401  #print 'Deblending parent id', src.getId(), 'took', time.clock() - t0
402 
403  n1 = len(srcs)
404  self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
405  % (n0, nparents, n1-n0, n1))
406 
407  def preSingleDeblendHook(self, exposure, srcs, i, fp, psf, psf_fwhm, sigma1):
408  pass
409 
410  def postSingleDeblendHook(self, exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res):
411  pass
412 
413  def isLargeFootprint(self, footprint):
414  """Returns whether a Footprint is large
415 
416  'Large' is defined by thresholds on the area, size and axis ratio.
417  These may be disabled independently by configuring them to be non-positive.
418 
419  This is principally intended to get rid of satellite streaks, which the
420  deblender or other downstream processing can have trouble dealing with
421  (e.g., multiple large HeavyFootprints can chew up memory).
422  """
423  if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
424  return True
425  if self.config.maxFootprintSize > 0:
426  bbox = footprint.getBBox()
427  if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
428  return True
429  if self.config.minFootprintAxisRatio > 0:
430  axes = afwEll.Axes(footprint.getShape())
431  if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
432  return True
433  return False
434 
435  def isMasked(self, footprint, mask):
436  """Returns whether the footprint violates the mask limits"""
437  size = float(footprint.getArea())
438  for maskName, limit in self.config.maskLimits.items():
439  maskVal = mask.getPlaneBitMask(maskName)
440  unmaskedSpan = footprint.spans.intersectNot(mask, maskVal) # spanset of unmasked pixels
441  if (size - unmaskedSpan.getArea())/size > limit:
442  return True
443  return False
444 
445  def skipParent(self, source, mask):
446  """Indicate that the parent source is not being deblended
447 
448  We set the appropriate flags and mask.
449 
450  @param source The source to flag as skipped
451  @param mask The mask to update
452  """
453  fp = source.getFootprint()
454  source.set(self.deblendSkippedKey, True)
455  source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all
456  if self.config.notDeblendedMask:
457  mask.addMaskPlane(self.config.notDeblendedMask)
458  fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
459 
460 class MultibandDeblendConfig(pexConfig.Config):
461  """MultibandDeblendConfig
462 
463  Configuration for the multiband deblender.
464  The parameters are organized by the parameter types, which are
465  - Stopping Criteria: Used to determine if the fit has converged
466  - Position Fitting Criteria: Used to fit the positions of the peaks
467  - Constraints: Used to apply constraints to the peaks and their components
468  - Other: Parameters that don't fit into the above categories
469  """
470  # Stopping Criteria
471  maxIter = pexConfig.Field(dtype=int, default=200,
472  doc=("Maximum number of iterations to deblend a single parent"))
473  relativeError = pexConfig.Field(dtype=float, default=1e-3,
474  doc=("Relative error to use when determining stopping criteria"))
475 
476  # Blend Configuration options
477  minTranslation = pexConfig.Field(dtype=float, default=1e-3,
478  doc=("A peak must be updated by at least 'minTranslation' (pixels)"
479  "or no update is performed."
480  "This field is ignored if fitPositions is False."))
481  refinementSkip = pexConfig.Field(dtype=int, default=10,
482  doc=("If fitPositions is True, the positions and box sizes are"
483  "updated on every 'refinementSkip' iterations."))
484  translationMethod = pexConfig.Field(dtype=str, default="default",
485  doc=("Method to use for fitting translations."
486  "Currently 'default' is the only available option,"
487  "which performs a linear fit, but it is possible that we"
488  "will use galsim or some other method as a future option"))
489  edgeFluxThresh = pexConfig.Field(dtype=float, default=1.0,
490  doc=("Boxes are resized when the flux at an edge is "
491  "> edgeFluxThresh * background RMS"))
492  exactLipschitz = pexConfig.Field(dtype=bool, default=False,
493  doc=("Calculate exact Lipschitz constant in every step"
494  "(True) or only calculate the approximate"
495  "Lipschitz constant with significant changes in A,S"
496  "(False)"))
497  stepSlack = pexConfig.Field(dtype=float, default=0.2,
498  doc=("A fractional measure of how much a value (like the exactLipschitz)"
499  "can change before it needs to be recalculated."
500  "This must be between 0 and 1."))
501 
502  # Constraints
503  constraints = pexConfig.Field(dtype=str, default="1,+,S,M",
504  doc=("List of constraints to use for each object"
505  "(order does not matter)"
506  "Current options are all used by default:\n"
507  "S: symmetry\n"
508  "M: monotonicity\n"
509  "1: normalized SED to unity"
510  "+: non-negative morphology"))
511  symmetryThresh = pexConfig.Field(dtype=float, default=1.0,
512  doc=("Strictness of symmetry, from"
513  "0 (no symmetry enforced) to"
514  "1 (perfect symmetry required)."
515  "If 'S' is not in `constraints`, this argument is ignored"))
516  l0Thresh = pexConfig.Field(dtype=float, default=np.nan,
517  doc=("L0 threshold. NaN results in no L0 penalty."))
518  l1Thresh = pexConfig.Field(dtype=float, default=np.nan,
519  doc=("L1 threshold. NaN results in no L1 penalty."))
520  tvxThresh = pexConfig.Field(dtype=float, default=np.nan,
521  doc=("Threshold for TV (total variation) constraint in the x-direction."
522  "NaN results in no TVx penalty."))
523  tvyThresh = pexConfig.Field(dtype=float, default=np.nan,
524  doc=("Threshold for TV (total variation) constraint in the y-direction."
525  "NaN results in no TVy penalty."))
526 
527  # Other scarlet paremeters
528  useWeights = pexConfig.Field(dtype=bool, default=False, doc="Use inverse variance as deblender weights")
529  bgScale = pexConfig.Field(dtype=float, default=0.5,
530  doc=("Fraction of background RMS level to use as a"
531  "cutoff for defining the background of the image"
532  "This is used to initialize the model for each source"
533  "and to set the size of the bounding box for each source"
534  "every `refinementSkip` iteration."))
535  usePsfConvolution = pexConfig.Field(dtype=bool, default=True,
536  doc=("Whether or not to convolve the morphology with the"
537  "PSF in each band or use the same morphology"
538  "in all bands"))
539  saveTemplates = pexConfig.Field(dtype=bool, default=True,
540  doc="Whether or not to save the SEDs and templates")
541  processSingles = pexConfig.Field(dtype=bool, default=False,
542  doc="Whether or not to process isolated sources in the deblender")
543  badMask = pexConfig.Field(dtype=str, default="BAD,CR,NO_DATA,SAT,SUSPECT",
544  doc="Whether or not to process isolated sources in the deblender")
545  # Old deblender parameters used in this implementation (some of which might be removed later)
546 
547  maxNumberOfPeaks = pexConfig.Field(dtype=int, default=0,
548  doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
549  " (<= 0: unlimited)"))
550  maxFootprintArea = pexConfig.Field(dtype=int, default=1000000,
551  doc=("Maximum area for footprints before they are ignored as large; "
552  "non-positive means no threshold applied"))
553  maxFootprintSize = pexConfig.Field(dtype=int, default=0,
554  doc=("Maximum linear dimension for footprints before they are ignored "
555  "as large; non-positive means no threshold applied"))
556  minFootprintAxisRatio = pexConfig.Field(dtype=float, default=0.0,
557  doc=("Minimum axis ratio for footprints before they are ignored "
558  "as large; non-positive means no threshold applied"))
559  notDeblendedMask = pexConfig.Field(dtype=str, default="NOT_DEBLENDED", optional=True,
560  doc="Mask name for footprints not deblended, or None")
561 
562  tinyFootprintSize = pexConfig.RangeField(dtype=int, default=2, min=2, inclusiveMin=True,
563  doc=('Footprints smaller in width or height than this value will '
564  'be ignored; minimum of 2 due to PSF gradient calculation.'))
565  catchFailures = pexConfig.Field(dtype=bool, default=False,
566  doc=("If True, catch exceptions thrown by the deblender, log them, "
567  "and set a flag on the parent, instead of letting them propagate up"))
568  propagateAllPeaks = pexConfig.Field(dtype=bool, default=False,
569  doc=('Guarantee that all peaks produce a child source.'))
570  maskPlanes = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
571  doc="Mask planes to ignore when performing statistics")
572  maskLimits = pexConfig.DictField(
573  keytype=str,
574  itemtype=float,
575  default={},
576  doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
577  "Sources violating this limit will not be deblended."),
578  )
579 
580  edgeHandling = pexConfig.ChoiceField(
581  doc='What to do when a peak to be deblended is close to the edge of the image',
582  dtype=str, default='ramp',
583  allowed={
584  'clip': 'Clip the template at the edge AND the mirror of the edge.',
585  'ramp': 'Ramp down flux at the image edge by the PSF',
586  'noclip': 'Ignore the edge when building the symmetric template.',
587  }
588  )
589 
590  medianSmoothTemplate = pexConfig.Field(dtype=bool, default=False,
591  doc="Apply a smoothing filter to all of the template images")
592  medianFilterHalfsize = pexConfig.Field(dtype=float, default=2,
593  doc=('Half size of the median smoothing filter'))
594  clipFootprintToNonzero = pexConfig.Field(dtype=bool, default=True,
595  doc=("Clip non-zero spans in the footprints"))
596 
597  conserveFlux = pexConfig.Field(dtype=bool, default=False,
598  doc=("Reapportion flux to the footprints so that flux is conserved"))
599  weightTemplates = pexConfig.Field(dtype=bool, default=False,
600  doc=("If true, a least-squares fit of the templates will be done to the "
601  "full image. The templates will be re-weighted based on this fit."))
602  strayFluxToPointSources = pexConfig.ChoiceField(
603  doc='When the deblender should attribute stray flux to point sources',
604  dtype=str, default='necessary',
605  allowed={
606  'necessary': 'When there is not an extended object in the footprint',
607  'always': 'Always',
608  'never': ('Never; stray flux will not be attributed to any deblended child '
609  'if the deblender thinks all peaks look like point sources'),
610  }
611  )
612 
613  assignStrayFlux = pexConfig.Field(dtype=bool, default=True,
614  doc='Assign stray flux (not claimed by any child in the deblender) '
615  'to deblend children.')
616 
617  strayFluxRule = pexConfig.ChoiceField(
618  doc='How to split flux among peaks',
619  dtype=str, default='trim',
620  allowed={
621  'r-to-peak': '~ 1/(1+R^2) to the peak',
622  'r-to-footprint': ('~ 1/(1+R^2) to the closest pixel in the footprint. '
623  'CAUTION: this can be computationally expensive on large footprints!'),
624  'nearest-footprint': ('Assign 100% to the nearest footprint (using L-1 norm aka '
625  'Manhattan distance)'),
626  'trim': ('Shrink the parent footprint to pixels that are not assigned to children')
627  }
628  )
629 
630  clipStrayFluxFraction = pexConfig.Field(dtype=float, default=0.001,
631  doc=('When splitting stray flux, clip fractions below '
632  'this value to zero.'))
633  getTemplateSum = pexConfig.Field(dtype=bool, default=False,
634  doc=("As part of the flux calculation, the sum of the templates is"
635  "calculated. If 'getTemplateSum==True' then the sum of the"
636  "templates is stored in the result (a 'PerFootprint')."))
637 
638 class MultibandDeblendTask(pipeBase.Task):
639  """MultibandDeblendTask
640 
641  Split blended sources into individual sources.
642 
643  This task has no return value; it only modifies the SourceCatalog in-place.
644  """
645  ConfigClass = MultibandDeblendConfig
646  _DefaultName = "multibandDeblend"
647 
648  def __init__(self, schema, peakSchema=None, **kwargs):
649  """Create the task, adding necessary fields to the given schema.
650 
651  Parameters
652  ----------
653  schema: `lsst.afw.table.schema.schema.Schema`
654  Schema object for measurement fields; will be modified in-place.
655  peakSchema: `lsst.afw.table.schema.schema.Schema`
656  Schema of Footprint Peaks that will be passed to the deblender.
657  Any fields beyond the PeakTable minimal schema will be transferred
658  to the main source Schema. If None, no fields will be transferred
659  from the Peaks.
660  filters: list of str
661  Names of the filters used for the eposures. This is needed to store the SED as a field
662  **kwargs
663  Passed to Task.__init__.
664  """
665  from lsst.meas.deblender import plugins
666  import scarlet
667 
668  pipeBase.Task.__init__(self, **kwargs)
669  if not self.config.conserveFlux and not self.config.saveTemplates:
670  raise ValueError("Either `conserveFlux` or `saveTemplates` must be True")
671 
672  peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
673  if peakSchema is None:
674  # In this case, the peakSchemaMapper will transfer nothing, but we'll still have one
675  # to simplify downstream code
676  self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
677  else:
678  self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
679  for item in peakSchema:
680  if item.key not in peakMinimalSchema:
681  self.peakSchemaMapper.addMapping(item.key, item.field)
682  # Because SchemaMapper makes a copy of the output schema you give its ctor, it isn't
683  # updating this Schema in place. That's probably a design flaw, but in the meantime,
684  # we'll keep that schema in sync with the peakSchemaMapper.getOutputSchema() manually,
685  # by adding the same fields to both.
686  schema.addField(item.field)
687  assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
688  self._addSchemaKeys(schema)
689 
690  # Create the plugins for multiband deblending using the Config options
691 
692  # Basic deblender configuration
693  config = scarlet.config.Config(
694  center_min_dist=self.config.minTranslation,
695  edge_flux_thresh=self.config.edgeFluxThresh,
696  exact_lipschitz=self.config.exactLipschitz,
697  refine_skip=self.config.refinementSkip,
698  slack=self.config.stepSlack,
699  )
700  if self.config.translationMethod != "default":
701  err = "Currently the only supported translationMethod is 'default', you entered '{0}'"
702  raise NotImplementedError(err.format(self.config.translationMethod))
703 
704  # If the default constraints are not used, set the constraints for
705  # all of the sources
706  constraints = None
707  _constraints = self.config.constraints.split(",")
708  if (sorted(_constraints) != ['+', '1', 'M', 'S']
709  or ~np.isnan(self.config.l0Thresh)
710  or ~np.isnan(self.config.l1Thresh)
711  ):
712  constraintDict = {
713  "+": scarlet.constraint.PositivityConstraint,
714  "1": scarlet.constraint.SimpleConstraint,
715  "M": scarlet.constraint.DirectMonotonicityConstraint(use_nearest=False),
716  "S": scarlet.constraint.DirectSymmetryConstraint(sigma=self.config.symmetryThresh)
717  }
718  for c in _constraints:
719  if constraints is None:
720  constraints = [constraintDict[c]]
721  else:
722  constraints += [constraintDict[c]]
723  if constraints is None:
724  constraints = scarlet.constraint.MinimalConstraint()
725  if ~np.isnan(self.config.l0Thresh):
726  constraints += [scarlet.constraint.L0Constraint(self.config.l0Thresh)]
727  if ~np.isnan(self.config.l1Thresh):
728  constraints += [scarlet.constraint.L1Constraint(self.config.l1Thresh)]
729  if ~np.isnan(self.config.tvxThresh):
730  constraints += [scarlet.constraint.TVxConstraint(self.config.tvxThresh)]
731  if ~np.isnan(self.config.tvyThresh):
732  constraints += [scarlet.constraint.TVyConstraint(self.config.tvyThresh)]
733 
734  multiband_plugin = plugins.DeblenderPlugin(
735  plugins.buildMultibandTemplates,
736  useWeights=self.config.useWeights,
737  usePsf=self.config.usePsfConvolution,
738  constraints=constraints,
739  config=config,
740  maxIter=self.config.maxIter,
741  bgScale=self.config.bgScale,
742  relativeError=self.config.relativeError,
743  badMask=self.config.badMask.split(","),
744  )
745  self.plugins = [multiband_plugin]
746 
747  # Plugins from the old deblender for post-template processing
748  # (see lsst.meas_deblender.baseline.deblend)
749  patchEdges = self.config.edgeHandling == 'noclip'
750  if self.config.edgeHandling == 'ramp':
751  self.plugins.append(plugins.DeblenderPlugin(plugins.rampFluxAtEdge, patchEdges=False))
752  if self.config.medianSmoothTemplate:
753  self.plugins.append(plugins.DeblenderPlugin(plugins.medianSmoothTemplates,
754  medianFilterHalfsize=self.config.medianFilterHalfsize))
755  if self.config.clipFootprintToNonzero:
756  self.plugins.append(plugins.DeblenderPlugin(plugins.clipFootprintsToNonzero))
757  if self.config.conserveFlux:
758  if self.config.weightTemplates:
759  self.plugins.append(plugins.DeblenderPlugin(plugins.weightTemplates))
760  self.plugins.append(plugins.DeblenderPlugin(plugins.apportionFlux,
761  clipStrayFluxFraction=self.config.clipStrayFluxFraction,
762  assignStrayFlux=self.config.assignStrayFlux,
763  strayFluxAssignment=self.config.strayFluxRule,
764  strayFluxToPointSources=self.config.strayFluxToPointSources,
765  getTemplateSum=self.config.getTemplateSum))
766 
767 
768  def _addSchemaKeys(self, schema):
769  """Add deblender specific keys to the schema
770  """
771  self.runtimeKey = schema.addField('runtime', type=np.float32, doc='runtime in ms')
772  # Keys from old Deblender that might be kept in the new deblender
773  self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
774  doc='Number of children this object has (defaults to 0)')
775  self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
776  doc='Deblender thought this source looked like a PSF')
777  self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
778  doc='Source had too many peaks; '
779  'only the brightest were included')
780  self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
781  doc='Parent footprint covered too many pixels')
782  self.maskedKey = schema.addField('deblend_masked', type='Flag',
783  doc='Parent footprint was predominantly masked')
784  self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
785  doc="Deblending failed on source")
786 
787  self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
788  doc="Deblender skipped this source")
789 
790  # Keys from the old Deblender that are likely to be removed for the new deblender
791  # TODO: Remove these if they remain unused
792  self.psfCenterKey = afwTable.Point2DKey.addFields(schema, 'deblend_psfCenter',
793  'If deblended-as-psf, the PSF centroid', "pixel")
794  self.psfFluxKey = schema.addField('deblend_psfFlux', type='D',
795  doc='If deblended-as-psf, the PSF flux')
796  self.deblendRampedTemplateKey = schema.addField(
797  'deblend_rampedTemplate', type='Flag',
798  doc=('This source was near an image edge and the deblender used '
799  '"ramp" edge-handling.'))
800 
801  self.deblendPatchedTemplateKey = schema.addField(
802  'deblend_patchedTemplate', type='Flag',
803  doc=('This source was near an image edge and the deblender used '
804  '"patched" edge-handling.'))
805 
806  self.hasStrayFluxKey = schema.addField(
807  'deblend_hasStrayFlux', type='Flag',
808  doc=('This source was assigned some stray flux'))
809 
810  self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in (
811  self.nChildKey, self.psfKey, self.psfCenterKey, self.psfFluxKey,
812  self.tooManyPeaksKey, self.tooBigKey)))
813 
814  @pipeBase.timeMethod
815  def run(self, mExposure, sources):
816  """Get the psf from each exposure and then run deblend().
817 
818  Parameters
819  ----------
820  mExposure: `MultibandExposure`
821  The exposures should be co-added images of the same
822  shape and region of the sky.
823  sources: `SourceCatalog`
824  Keys are the names of the filters and the values are
825  `lsst.afw.table.source.source.SourceCatalog`'s, which
826  should be a merged catalog of the sources in each band.
827 
828  Returns
829  -------
830  flux_catalogs: dict or None
831  Keys are the names of the filters and the values are
832  `lsst.afw.table.source.source.SourceCatalog`'s.
833  These are the flux-conserved catalogs with heavy footprints with
834  the image data weighted by the multiband templates.
835  If `self.config.conserveFlux` is `False`, then this item will be None
836  template_catalogs: dict or None
837  Keys are the names of the filters and the values are
838  `lsst.afw.table.source.source.SourceCatalog`'s.
839  These are catalogs with heavy footprints that are the templates
840  created by the multiband templates.
841  If `self.config.saveTemplates` is `False`, then this item will be None
842  """
843  psfs = {f:mExposure[f].getPsf() for f in mExposure.filters}
844  return self.deblend(mExposure, sources, psfs)
845 
846  def _getPsfFwhm(self, psf, bbox):
847  return psf.computeShape().getDeterminantRadius() * 2.35
848 
849  def _addChild(self, parentId, peak, sources, heavy):
850  """Add a child to a catalog
851 
852  This creates a new child in the source catalog,
853  assigning it a parent id, adding a footprint,
854  and setting all appropriate flags based on the
855  deblender result.
856  """
857  assert len(heavy.getPeaks())==1
858  src = sources.addNew()
859  src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
860  src.setParent(parentId)
861  src.setFootprint(heavy)
862  src.set(self.psfKey, peak.deblendedAsPsf)
863  src.set(self.hasStrayFluxKey, peak.strayFlux is not None)
864  src.set(self.deblendRampedTemplateKey, peak.hasRampedTemplate)
865  src.set(self.deblendPatchedTemplateKey, peak.patched)
866  src.set(self.runtimeKey, 0)
867  return src
868 
869  @pipeBase.timeMethod
870  def deblend(self, mExposure, sources, psfs):
871  """Deblend a data cube of multiband images
872 
873  Parameters
874  ----------
875  mExposure: `MultibandExposure`
876  The exposures should be co-added images of the same
877  shape and region of the sky.
878  sources: dict
879  Keys are the names of the filters
880  (should be the same as `mExposure.filters`) and the values are
881  `lsst.afw.table.source.source.SourceCatalog`'s, which
882  should be a merged catalog of the sources in each band
883  ('deepCoadd_mergeDet').
884  psfs: dict
885  Keys are the names of the filters
886  (should be the same as `mExposure.filters`)
887  and the values are the PSFs in each band.
888 
889  Returns
890  -------
891  flux_catalogs: dict or None
892  Keys are the names of the filters and the values are
893  `lsst.afw.table.source.source.SourceCatalog`'s.
894  These are the flux-conserved catalogs with heavy footprints with
895  the image data weighted by the multiband templates.
896  If `self.config.conserveFlux` is `False`, then this item will be None
897  template_catalogs: dict or None
898  Keys are the names of the filters and the values are
899  `lsst.afw.table.source.source.SourceCatalog`'s.
900  These are catalogs with heavy footprints that are the templates
901  created by the multiband templates.
902  If `self.config.saveTemplates` is `False`, then this item will be None
903  """
904  from lsst.meas.deblender.baseline import newDeblend
905  import deblender
906 
907  msg = "{0} keys must be the same as mExposure.filters ({1}), got {2}"
908  if sources.keys() != mExposure.filters:
909  raise ValueError(msg.format("Source", mExposure.filters, sources.keys()))
910  if psfs.keys() != mExposure.filters:
911  raise ValueError(msg.format("PSF", mExposure.filters, psfs.keys()))
912 
913  filters = mExposure.filters
914  mMaskedImage = afwImage.MultibandMaskedImage(filters=mExposure.filters, image=mExposure.image,
915  mask=mExposure.mask, variance=mExposure.variance)
916  self.log.info("Deblending {0} sources in {1} exposures".format(len(sources), len(mExposure)))
917 
918  # find the median stdev in each image
919  sigmas = {}
920  for f in filters:
921  exposure = mExposure[f]
922  mi = exposure.getMaskedImage()
923  statsCtrl = afwMath.StatisticsControl()
924  statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(self.config.maskPlanes))
925  stats = afwMath.makeStatistics(mi.getVariance(), mi.getMask(), afwMath.MEDIAN, statsCtrl)
926  sigma1 = math.sqrt(stats.getValue(afwMath.MEDIAN))
927  self.log.trace('Exposure {0}, sigma1: {1}'.format(f, sigma1))
928  sigmas[f] = sigma1
929 
930  # Create the output catalogs
931  if self.config.conserveFlux:
932  flux_catalogs = {f:afwTable.SourceCatalog(sources.clone()) for f in filters}
933  else:
934  flux_catalogs = None
935  if self.config.saveTemplates:
936  template_catalogs = {f:afwTable.SourceCatalog(sources.clone()) for f in filters}
937  else:
938  template_catalogs = None
939 
940  n0 = len(sources)
941  nparents = 0
942  for pk, src in enumerate(sources):
943  foot = src.getFootprint()
944  logger.info("id: {0}".format(src["id"]))
945  peaks = foot.getPeaks()
946 
947  # Since we use the first peak for the parent object, we should propagate its flags
948  # to the parent source.
949  src.assign(peaks[0], self.peakSchemaMapper)
950 
951  # Block of Skipping conditions
952  if len(peaks) < 2 and not self.config.processSingles:
953  for f in filters:
954  if self.config.saveTemplates:
955  tsrc = template_catalogs[f].addNew()
956  tsrc.assign(src)
957  tsrc.set(self.runtimeKey, 0)
958  templateParents[f] = tsrc
959  if self.config.conserveFlux:
960  tsrc = flux_catalogs[f].addNew()
961  tsrc.assign(src)
962  tsrc.set(self.runtimeKey, 0)
963  fluxParents[f] = tsrc
964  continue
965  if self.isLargeFootprint(foot):
966  src.set(self.tooBigKey, True)
967  self.skipParent(src, [mi.getMask() for mi in mMaskedImage])
968  self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
969  continue
970  if self.isMasked(foot, exposure.getMaskedImage().getMask()):
971  src.set(self.maskedKey, True)
972  self.skipParent(src, mi.getMask())
973  self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
974  continue
975  if len(peaks) > self.config.maxNumberOfPeaks:
976  src.set(self.tooManyPeaksKey, True)
977  msg = 'Parent {0}: Too many peaks, using the first {1} peaks'
978  self.log.trace(msg.format(int(src.getId()), self.config.maxNumberOfPeaks))
979 
980  nparents += 1
981  bbox = foot.getBBox()
982  psf_fwhms = {f:self._getPsfFwhm(psf, bbox) for f, psf in psfs.items()}
983  self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks))
984  self.preSingleDeblendHook(mExposure.singles, sources, pk, foot, psfs, psf_fwhms, sigmas)
985  npre = len(sources)
986  # Run the deblender
987  try:
988  t0=time.time()
989  PARENT = afwImage.PARENT
990  # Build the parameter lists with the same ordering
991  images = mMaskedImage[:, bbox]
992  psf_list = [psfs[f] for f in filters]
993  fwhm_list = [psf_fwhms[f] for f in filters]
994  avgNoise = [sigmas[f] for f in filters]
995 
996  result = newDeblend(debPlugins=self.plugins,
997  footprint=foot,
998  mMaskedImage=images,
999  psfs=psf_list,
1000  psfFwhms=fwhm_list,
1001  avgNoise=avgNoise,
1002  maxNumberOfPeaks=self.config.maxNumberOfPeaks
1003  )
1004  tf=time.time()
1005  runtime = (tf-t0)*1000
1006  if result.failed:
1007  src.set(self.deblendFailedKey, False)
1008  src.set(self.runtimeKey, 0)
1009  continue
1010  except Exception as e:
1011  if self.config.catchFailures:
1012  self.log.warn("Unable to deblend source %d: %s" % (src.getId(), e))
1013  src.set(self.deblendFailedKey, True)
1014  src.set(self.runtimeKey, 0)
1015  import traceback
1016  traceback.print_exc()
1017  continue
1018  else:
1019  raise
1020 
1021  # Add the merged source as a parent in the catalog for each band
1022  templateParents = {}
1023  fluxParents = {}
1024  parentId = src.getId()
1025  for f in filters:
1026  if self.config.saveTemplates:
1027  tsrc = template_catalogs[f].addNew()
1028  tsrc.assign(src)
1029  tsrc.set("id", parentId)
1030  tsrc.set(self.runtimeKey, runtime)
1031  _fp = afwDet.Footprint()
1032  _fp.setPeakSchema(src.getFootprint().getPeaks().getSchema())
1033  tsrc.setFootprint(_fp)
1034  templateParents[f] = tsrc
1035  if self.config.conserveFlux:
1036  tsrc = flux_catalogs[f].addNew()
1037  tsrc.assign(src)
1038  tsrc.set(self.runtimeKey, runtime)
1039  tsrc.set("id", parentId)
1040  _fp = afwDet.Footprint()
1041  _fp.setPeakSchema(src.getFootprint().getPeaks().getSchema())
1042  tsrc.setFootprint(_fp)
1043  fluxParents[f] = tsrc
1044 
1045  # Add each source to the catalogs in each band
1046  templateSpans = {f:afwGeom.SpanSet() for f in filters}
1047  fluxSpans = {f:afwGeom.SpanSet() for f in filters}
1048  nchild = 0
1049  for j, multiPeak in enumerate(result.peaks):
1050  heavy = {f:peak.getFluxPortion() for f, peak in multiPeak.deblendedPeaks.items()}
1051  no_flux = all([v is None for v in heavy.values()])
1052  skip_peak = all([peak.skip for peak in multiPeak.deblendedPeaks.values()])
1053  if no_flux or skip_peak:
1054  src.set(self.deblendSkippedKey, True)
1055  if not self.config.propagateAllPeaks:
1056  # We don't care
1057  continue
1058  # We need to preserve the peak: make sure we have enough info to create a minimal
1059  # child src
1060  msg = "Peak at {0} failed deblending. Using minimal default info for child."
1061  self.log.trace(msg.format(multiPeak.x, multiPeak.y))
1062 
1063  # copy the full footprint and strip out extra peaks
1064  pfoot = afwDet.Footprint(foot)
1065  peakList = pfoot.getPeaks()
1066  peakList.clear()
1067  pfoot.addPeak(multiPeak.x, multiPeak.y, 0)
1068  zeroMimg = afwImage.MaskedImageF(pfoot.getBBox())
1069  for f in filters:
1070  heavy[f] = afwDet.makeHeavyFootprint(pfoot, zeroMimg)
1071  else:
1072  src.set(self.deblendSkippedKey, False)
1073 
1074  # Add the peak to the source catalog in each band
1075  for f in filters:
1076  if len(heavy[f].getPeaks()) != 1:
1077  raise ValueError("Heavy footprint has multiple peaks, expected 1")
1078  peak = multiPeak.deblendedPeaks[f]
1079  if self.config.saveTemplates:
1080  cat = template_catalogs[f]
1081  tfoot = peak.templateFootprint
1082  timg = afwImage.MaskedImageF(peak.templateImage)
1083  tHeavy = afwDet.makeHeavyFootprint(tfoot, timg)
1084  child = self._addChild(parentId, peak, cat, tHeavy)
1085  if parentId==0:
1086  child.setId(src.getId())
1087  child.set(self.runtimeKey, runtime)
1088  else:
1089  _peak = tHeavy.getPeaks()[0]
1090  templateParents[f].getFootprint().addPeak(_peak.getFx(), _peak.getFy(),
1091  _peak.getPeakValue())
1092  templateSpans[f] = templateSpans[f].union(tHeavy.getSpans())
1093  if self.config.conserveFlux:
1094  cat = flux_catalogs[f]
1095  child = self._addChild(parentId, peak, cat, heavy[f])
1096  if parentId==0:
1097  child.setId(src.getId())
1098  child.set(self.runtimeKey, runtime)
1099  else:
1100  _peak = heavy[f].getPeaks()[0]
1101  fluxParents[f].getFootprint().addPeak(_peak.getFx(), _peak.getFy(),
1102  _peak.getPeakValue())
1103  fluxSpans[f] = fluxSpans[f].union(heavy[f].getSpans())
1104  nchild += 1
1105 
1106  # Child footprints may extend beyond the full extent of their parent's which
1107  # results in a failure of the replace-by-noise code to reinstate these pixels
1108  # to their original values. The following updates the parent footprint
1109  # in-place to ensure it contains the full union of itself and all of its
1110  # children's footprints.
1111  for f in filters:
1112  if self.config.saveTemplates:
1113  templateParents[f].set(self.nChildKey, nchild)
1114  templateParents[f].getFootprint().setSpans(templateSpans[f])
1115  if self.config.conserveFlux:
1116  fluxParents[f].set(self.nChildKey, nchild)
1117  fluxParents[f].getFootprint().setSpans(fluxSpans[f])
1118 
1119  self.postSingleDeblendHook(exposure, flux_catalogs, template_catalogs,
1120  pk, npre, foot, psfs, psf_fwhms, sigmas, result)
1121 
1122  if flux_catalogs is not None:
1123  n1 = len(list(flux_catalogs.values())[0])
1124  else:
1125  n1 = len(list(template_catalogs.values())[0])
1126  self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
1127  % (n0, nparents, n1-n0, n1))
1128  return flux_catalogs, template_catalogs
1129 
1130  def preSingleDeblendHook(self, exposures, sources, pk, fp, psfs, psf_fwhms, sigmas):
1131  pass
1132 
1133  def postSingleDeblendHook(self, exposures, flux_catalogs, template_catalogs,
1134  pk, npre, fp, psfs, psf_fwhms, sigmas, result):
1135  pass
1136 
1137  def isLargeFootprint(self, footprint):
1138  """Returns whether a Footprint is large
1139 
1140  'Large' is defined by thresholds on the area, size and axis ratio.
1141  These may be disabled independently by configuring them to be non-positive.
1142 
1143  This is principally intended to get rid of satellite streaks, which the
1144  deblender or other downstream processing can have trouble dealing with
1145  (e.g., multiple large HeavyFootprints can chew up memory).
1146  """
1147  if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
1148  return True
1149  if self.config.maxFootprintSize > 0:
1150  bbox = footprint.getBBox()
1151  if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
1152  return True
1153  if self.config.minFootprintAxisRatio > 0:
1154  axes = afwEll.Axes(footprint.getShape())
1155  if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
1156  return True
1157  return False
1158 
1159  def isMasked(self, footprint, mask):
1160  """Returns whether the footprint violates the mask limits"""
1161  size = float(footprint.getArea())
1162  for maskName, limit in self.config.maskLimits.items():
1163  maskVal = mask.getPlaneBitMask(maskName)
1164  unmaskedSpan = footprint.spans.intersectNot(mask, maskVal) # spanset of unmasked pixels
1165  if (size - unmaskedSpan.getArea())/size > limit:
1166  return True
1167  return False
1168 
1169  def skipParent(self, source, masks):
1170  """Indicate that the parent source is not being deblended
1171 
1172  We set the appropriate flags and masks for each exposure.
1173 
1174  Parameters
1175  ----------
1176  source: `lsst.afw.table.source.source.SourceRecord`
1177  The source to flag as skipped
1178  masks: list of `lsst.afw.image.MaskX`
1179  The mask in each band to update with the non-detection
1180  """
1181  fp = source.getFootprint()
1182  source.set(self.deblendSkippedKey, True)
1183  source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all
1184  if self.config.notDeblendedMask:
1185  for mask in masks:
1186  mask.addMaskPlane(self.config.notDeblendedMask)
1187  fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
def newDeblend(debPlugins, footprint, mMaskedImage, psfs, psfFwhms, log=None, verbose=False, avgNoise=None, maxNumberOfPeaks=0)
Definition: baseline.py:674
def _addChild(self, parentId, peak, sources, heavy)
Definition: deblend.py:849
def postSingleDeblendHook(self, exposure, srcs, i, npre, kids, fp, psf, psf_fwhm, sigma1, res)
Definition: deblend.py:410
def preSingleDeblendHook(self, exposure, srcs, i, fp, psf, psf_fwhm, sigma1)
Definition: deblend.py:407
def run(self, exposure, sources)
Get the psf from the provided exposure and then run deblend().
Definition: deblend.py:233
def postSingleDeblendHook(self, exposures, flux_catalogs, template_catalogs, pk, npre, fp, psfs, psf_fwhms, sigmas, result)
Definition: deblend.py:1134
def __init__(self, schema, peakSchema=None, kwargs)
Definition: deblend.py:648
def preSingleDeblendHook(self, exposures, sources, pk, fp, psfs, psf_fwhms, sigmas)
Definition: deblend.py:1130
static Log getLogger(std::string const &loggername)
Split blended sources into individual sources.
Definition: deblend.py:148
def deblend(self, exposure, srcs, psf)
Deblend.
Definition: deblend.py:252
def __init__(self, schema, peakSchema=None, kwargs)
Create the task, adding necessary fields to the given schema.
Definition: deblend.py:159
def deblend(self, mExposure, sources, psfs)
Definition: deblend.py:870
def isMasked(self, footprint, mask)
Definition: deblend.py:435