Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py : 14%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of meas_extensions_scarlet.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import numpy as np
23import scarlet
24from scarlet.psf import ImagePSF, GaussianPSF
25from scarlet import Blend, Frame, Observation
26from scarlet.initialization import initAllSources
28import lsst.log
29import lsst.pex.config as pexConfig
30from lsst.pex.exceptions import InvalidParameterError
31import lsst.pipe.base as pipeBase
32from lsst.geom import Point2I, Box2I, Point2D
33import lsst.afw.geom as afwGeom
34import lsst.afw.geom.ellipses as afwEll
35import lsst.afw.image.utils
36import lsst.afw.image as afwImage
37import lsst.afw.detection as afwDet
38import lsst.afw.table as afwTable
40from .source import modelToHeavy
42__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"]
44logger = lsst.log.Log.getLogger("meas.deblender.deblend")
47class IncompleteDataError(Exception):
48 """The PSF could not be computed due to incomplete data
49 """
50 pass
53class ScarletGradientError(Exception):
54 """An error occurred during optimization
56 This error occurs when the optimizer encounters
57 a NaN value while calculating the gradient.
58 """
59 def __init__(self, iterations, sources):
60 self.iterations = iterations
61 self.sources = sources
62 msg = ("ScalarGradientError in iteration {0}. "
63 "NaN values introduced in sources {1}")
64 self.message = msg.format(iterations, sources)
66 def __str__(self):
67 return self.message
70def _checkBlendConvergence(blend, f_rel):
71 """Check whether or not a blend has converged
72 """
73 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1])
74 convergence = f_rel * np.abs(blend.loss[-1])
75 return deltaLoss < convergence
78def _getPsfFwhm(psf):
79 """Calculate the FWHM of the `psf`
80 """
81 return psf.computeShape().getDeterminantRadius() * 2.35
84def _computePsfImage(self, position=None):
85 """Get a multiband PSF image
86 The PSF Kernel Image is computed for each band
87 and combined into a (filter, y, x) array and stored
88 as `self._psfImage`.
89 The result is not cached, so if the same PSF is expected
90 to be used multiple times it is a good idea to store the
91 result in another variable.
92 Note: this is a temporary fix during the deblender sprint.
93 In the future this function will replace the current method
94 in `afw.MultibandExposure.computePsfImage` (DM-19789).
95 Parameters
96 ----------
97 position : `Point2D` or `tuple`
98 Coordinates to evaluate the PSF. If `position` is `None`
99 then `Psf.getAveragePosition()` is used.
100 Returns
101 -------
102 self._psfImage: array
103 The multiband PSF image.
104 """
105 psfs = []
106 # Make the coordinates into a Point2D (if necessary)
107 if not isinstance(position, Point2D) and position is not None:
108 position = Point2D(position[0], position[1])
110 for bidx, single in enumerate(self.singles):
111 try:
112 if position is None:
113 psf = single.getPsf().computeImage()
114 psfs.append(psf)
115 else:
116 psf = single.getPsf().computeImage(position)
117 psfs.append(psf)
118 except InvalidParameterError:
119 # This band failed to compute the PSF due to incomplete data
120 # at that location. This is unlikely to be a problem for Rubin,
121 # however the edges of some HSC COSMOS fields contain incomplete
122 # data in some bands, so we track this error to distinguish it
123 # from unknown errors.
124 msg = "Failed to compute PSF at {} in band {}"
125 raise IncompleteDataError(msg.format(position, self.filters[bidx]))
127 left = np.min([psf.getBBox().getMinX() for psf in psfs])
128 bottom = np.min([psf.getBBox().getMinY() for psf in psfs])
129 right = np.max([psf.getBBox().getMaxX() for psf in psfs])
130 top = np.max([psf.getBBox().getMaxY() for psf in psfs])
131 bbox = Box2I(Point2I(left, bottom), Point2I(right, top))
132 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs]
133 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs)
134 return psfImage
137def getFootprintMask(footprint, mExposure):
138 """Mask pixels outside the footprint
140 Parameters
141 ----------
142 mExposure : `lsst.image.MultibandExposure`
143 - The multiband exposure containing the image,
144 mask, and variance data
145 footprint : `lsst.detection.Footprint`
146 - The footprint of the parent to deblend
148 Returns
149 -------
150 footprintMask : array
151 Boolean array with pixels not in the footprint set to one.
152 """
153 bbox = footprint.getBBox()
154 fpMask = afwImage.Mask(bbox)
155 footprint.spans.setMask(fpMask, 1)
156 fpMask = ~fpMask.getArray().astype(bool)
157 return fpMask
160def deblend(mExposure, footprint, config):
161 """Deblend a parent footprint
163 Parameters
164 ----------
165 mExposure : `lsst.image.MultibandExposure`
166 - The multiband exposure containing the image,
167 mask, and variance data
168 footprint : `lsst.detection.Footprint`
169 - The footprint of the parent to deblend
170 config : `ScarletDeblendConfig`
171 - Configuration of the deblending task
172 """
173 # Extract coordinates from each MultiColorPeak
174 bbox = footprint.getBBox()
176 # Create the data array from the masked images
177 images = mExposure.image[:, bbox].array
179 # Use the inverse variance as the weights
180 if config.useWeights:
181 weights = 1/mExposure.variance[:, bbox].array
182 else:
183 weights = np.ones_like(images)
184 badPixels = mExposure.mask.getPlaneBitMask(config.badMask)
185 mask = mExposure.mask[:, bbox].array & badPixels
186 weights[mask > 0] = 0
188 # Mask out the pixels outside the footprint
189 mask = getFootprintMask(footprint, mExposure)
190 weights *= ~mask
192 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32)
193 psfs = ImagePSF(psfs)
194 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters))
196 frame = Frame(images.shape, psfs=model_psf, channels=mExposure.filters)
197 observation = Observation(images, psfs=psfs, weights=weights, channels=mExposure.filters)
198 observation.match(frame)
200 assert(config.sourceModel in ["single", "double", "point", "fit"])
202 # Set the appropriate number of components
203 if config.sourceModel == "single":
204 maxComponents = 1
205 elif config.sourceModel == "double":
206 maxComponents = 2
207 elif config.sourceModel == "compact":
208 raise NotImplementedError("CompactSource initialization has not yet been ported"
209 "to the stack version of scarlet")
210 elif config.sourceModel == "point":
211 maxComponents = 0
212 elif config.sourceModel == "fit":
213 # It is likely in the future that there will be some heuristic
214 # used to determine what type of model to use for each source,
215 # but that has not yet been implemented (see DM-22551)
216 raise NotImplementedError("sourceModel 'fit' has not been implemented yet")
218 # Convert the centers to pixel coordinates
219 xmin = bbox.getMinX()
220 ymin = bbox.getMinY()
221 centers = [np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int) for peak in footprint.peaks]
223 # Only deblend sources that can be initialized
224 sources, skipped = initAllSources(
225 frame=frame,
226 centers=centers,
227 observation=observation,
228 symmetric=config.symmetric,
229 monotonic=config.monotonic,
230 thresh=config.morphThresh,
231 maxComponents=maxComponents,
232 edgeDistance=config.edgeDistance,
233 shifting=False,
234 downgrade=config.downgrade,
235 fallback=config.fallback,
236 )
238 # Attach the peak to all of the initialized sources
239 srcIndex = 0
240 for k, center in enumerate(centers):
241 if k not in skipped:
242 # This is just to make sure that there isn't a coding bug
243 assert np.all(sources[srcIndex].center == center)
244 # Store the record for the peak with the appropriate source
245 sources[srcIndex].detectedPeak = footprint.peaks[k]
246 srcIndex += 1
248 # Create the blend and attempt to optimize it
249 blend = Blend(sources, observation)
250 try:
251 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError)
252 except ArithmeticError:
253 # This occurs when a gradient update produces a NaN value
254 # This is usually due to a source initialized with a
255 # negative SED or no flux, often because the peak
256 # is a noise fluctuation in one band and not a real source.
257 iterations = len(blend.loss)
258 failedSources = []
259 for k, src in enumerate(sources):
260 if np.any(~np.isfinite(src.get_model())):
261 failedSources.append(k)
262 raise ScarletGradientError(iterations, failedSources)
264 return blend, skipped
267class ScarletDeblendConfig(pexConfig.Config):
268 """MultibandDeblendConfig
270 Configuration for the multiband deblender.
271 The parameters are organized by the parameter types, which are
272 - Stopping Criteria: Used to determine if the fit has converged
273 - Position Fitting Criteria: Used to fit the positions of the peaks
274 - Constraints: Used to apply constraints to the peaks and their components
275 - Other: Parameters that don't fit into the above categories
276 """
277 # Stopping Criteria
278 maxIter = pexConfig.Field(dtype=int, default=300,
279 doc=("Maximum number of iterations to deblend a single parent"))
280 relativeError = pexConfig.Field(dtype=float, default=1e-4,
281 doc=("Change in the loss function between"
282 "iterations to exit fitter"))
284 # Blend Configuration options
285 edgeDistance = pexConfig.Field(dtype=int, default=1,
286 doc="All sources with flux within `edgeDistance` from the edge "
287 "will be considered edge sources.")
289 # Constraints
290 morphThresh = pexConfig.Field(dtype=float, default=1,
291 doc="Fraction of background RMS a pixel must have"
292 "to be included in the initial morphology")
293 monotonic = pexConfig.Field(dtype=bool, default=True, doc="Make models monotonic")
294 symmetric = pexConfig.Field(dtype=bool, default=False, doc="Make models symmetric")
296 # Other scarlet paremeters
297 useWeights = pexConfig.Field(
298 dtype=bool, default=True,
299 doc=("Whether or not use use inverse variance weighting."
300 "If `useWeights` is `False` then flat weights are used"))
301 modelPsfSize = pexConfig.Field(
302 dtype=int, default=11,
303 doc="Model PSF side length in pixels")
304 modelPsfSigma = pexConfig.Field(
305 dtype=float, default=0.8,
306 doc="Define sigma for the model frame PSF")
307 saveTemplates = pexConfig.Field(
308 dtype=bool, default=True,
309 doc="Whether or not to save the SEDs and templates")
310 processSingles = pexConfig.Field(
311 dtype=bool, default=True,
312 doc="Whether or not to process isolated sources in the deblender")
313 sourceModel = pexConfig.Field(
314 dtype=str, default="single",
315 doc=("How to determine which model to use for sources, from\n"
316 "- 'single': use a single component for all sources\n"
317 "- 'double': use a bulge disk model for all sources\n"
318 "- 'compact': use a single component model, initialzed with a point source morphology, "
319 " for all sources\n"
320 "- 'point': use a point-source model for all sources\n"
321 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)")
322 )
323 downgrade = pexConfig.Field(
324 dtype=bool, default=False,
325 doc="Whether or not to downgrade the number of components for sources in small bounding boxes"
326 )
328 # Mask-plane restrictions
329 badMask = pexConfig.ListField(
330 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT"],
331 doc="Whether or not to process isolated sources in the deblender")
332 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
333 doc="Mask planes to ignore when performing statistics")
334 maskLimits = pexConfig.DictField(
335 keytype=str,
336 itemtype=float,
337 default={},
338 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
339 "Sources violating this limit will not be deblended."),
340 )
342 # Size restrictions
343 maxNumberOfPeaks = pexConfig.Field(
344 dtype=int, default=0,
345 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
346 " (<= 0: unlimited)"))
347 maxFootprintArea = pexConfig.Field(
348 dtype=int, default=1000000,
349 doc=("Maximum area for footprints before they are ignored as large; "
350 "non-positive means no threshold applied"))
351 maxFootprintSize = pexConfig.Field(
352 dtype=int, default=0,
353 doc=("Maximum linear dimension for footprints before they are ignored "
354 "as large; non-positive means no threshold applied"))
355 minFootprintAxisRatio = pexConfig.Field(
356 dtype=float, default=0.0,
357 doc=("Minimum axis ratio for footprints before they are ignored "
358 "as large; non-positive means no threshold applied"))
360 # Failure modes
361 fallback = pexConfig.Field(
362 dtype=bool, default=True,
363 doc="Whether or not to fallback to a smaller number of components if a source does not initialize"
364 )
365 notDeblendedMask = pexConfig.Field(
366 dtype=str, default="NOT_DEBLENDED", optional=True,
367 doc="Mask name for footprints not deblended, or None")
368 catchFailures = pexConfig.Field(
369 dtype=bool, default=True,
370 doc=("If True, catch exceptions thrown by the deblender, log them, "
371 "and set a flag on the parent, instead of letting them propagate up"))
374class ScarletDeblendTask(pipeBase.Task):
375 """ScarletDeblendTask
377 Split blended sources into individual sources.
379 This task has no return value; it only modifies the SourceCatalog in-place.
380 """
381 ConfigClass = ScarletDeblendConfig
382 _DefaultName = "scarletDeblend"
384 def __init__(self, schema, peakSchema=None, **kwargs):
385 """Create the task, adding necessary fields to the given schema.
387 Parameters
388 ----------
389 schema : `lsst.afw.table.schema.schema.Schema`
390 Schema object for measurement fields; will be modified in-place.
391 peakSchema : `lsst.afw.table.schema.schema.Schema`
392 Schema of Footprint Peaks that will be passed to the deblender.
393 Any fields beyond the PeakTable minimal schema will be transferred
394 to the main source Schema. If None, no fields will be transferred
395 from the Peaks.
396 filters : list of str
397 Names of the filters used for the eposures. This is needed to store
398 the SED as a field
399 **kwargs
400 Passed to Task.__init__.
401 """
402 pipeBase.Task.__init__(self, **kwargs)
404 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
405 if peakSchema is None:
406 # In this case, the peakSchemaMapper will transfer nothing, but
407 # we'll still have one
408 # to simplify downstream code
409 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
410 else:
411 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
412 for item in peakSchema:
413 if item.key not in peakMinimalSchema:
414 self.peakSchemaMapper.addMapping(item.key, item.field)
415 # Because SchemaMapper makes a copy of the output schema
416 # you give its ctor, it isn't updating this Schema in
417 # place. That's probably a design flaw, but in the
418 # meantime, we'll keep that schema in sync with the
419 # peakSchemaMapper.getOutputSchema() manually, by adding
420 # the same fields to both.
421 schema.addField(item.field)
422 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
423 self._addSchemaKeys(schema)
424 self.schema = schema
426 def _addSchemaKeys(self, schema):
427 """Add deblender specific keys to the schema
428 """
429 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms')
431 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge')
433 self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
434 doc='Number of children this object has (defaults to 0)')
435 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
436 doc='Deblender thought this source looked like a PSF')
437 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
438 doc='Source had too many peaks; '
439 'only the brightest were included')
440 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
441 doc='Parent footprint covered too many pixels')
442 self.maskedKey = schema.addField('deblend_masked', type='Flag',
443 doc='Parent footprint was predominantly masked')
444 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag',
445 doc='scarlet sed optimization did not converge before'
446 'config.maxIter')
447 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag',
448 doc='scarlet morph optimization did not converge before'
449 'config.maxIter')
450 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag',
451 type='Flag',
452 doc='at least one source in the blend'
453 'failed to converge')
454 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag',
455 doc='Source had flux on the edge of the parent footprint')
456 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
457 doc="Deblending failed on source")
458 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25,
459 doc='Name of error if the blend failed')
460 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
461 doc="Deblender skipped this source")
462 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center",
463 doc="Center used to apply constraints in scarlet",
464 unit="pixel")
465 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32,
466 doc="ID of the peak in the parent footprint. "
467 "This is not unique, but the combination of 'parent'"
468 "and 'peakId' should be for all child sources. "
469 "Top level blends with no parents have 'peakId=0'")
470 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count',
471 doc="The instFlux at the peak position of deblended mode")
472 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=20,
473 doc="The type of model used, for example "
474 "MultiExtendedSource, SingleExtendedSource, PointSource")
475 self.edgeFluxFlagKey = schema.addField("deblend_edgeFluxFlag", type="Flag",
476 doc="Source has flux on the edge of the image")
477 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32,
478 doc="Number of initial peaks in the blend. "
479 "This includes peaks that may have been culled "
480 "during deblending or failed to deblend")
481 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32,
482 doc="Same as deblend_n_peaks, but the number of peaks "
483 "in the parent footprint")
484 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32,
485 doc="Flux measurement from scarlet")
486 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32,
487 doc="Final logL, used to identify regressions in scarlet.")
489 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in
490 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey))
491 # )
493 @pipeBase.timeMethod
494 def run(self, mExposure, mergedSources):
495 """Get the psf from each exposure and then run deblend().
497 Parameters
498 ----------
499 mExposure : `MultibandExposure`
500 The exposures should be co-added images of the same
501 shape and region of the sky.
502 mergedSources : `SourceCatalog`
503 The merged `SourceCatalog` that contains parent footprints
504 to (potentially) deblend.
506 Returns
507 -------
508 fluxCatalogs: dict or None
509 Keys are the names of the filters and the values are
510 `lsst.afw.table.source.source.SourceCatalog`'s.
511 These are the flux-conserved catalogs with heavy footprints with
512 the image data weighted by the multiband templates.
513 If `self.config.conserveFlux` is `False`, then this item will be
514 None
515 templateCatalogs: dict or None
516 Keys are the names of the filters and the values are
517 `lsst.afw.table.source.source.SourceCatalog`'s.
518 These are catalogs with heavy footprints that are the templates
519 created by the multiband templates.
520 If `self.config.saveTemplates` is `False`, then this item will be
521 None
522 """
523 return self.deblend(mExposure, mergedSources)
525 @pipeBase.timeMethod
526 def deblend(self, mExposure, sources):
527 """Deblend a data cube of multiband images
529 Parameters
530 ----------
531 mExposure : `MultibandExposure`
532 The exposures should be co-added images of the same
533 shape and region of the sky.
534 sources : `SourceCatalog`
535 The merged `SourceCatalog` that contains parent footprints
536 to (potentially) deblend.
538 Returns
539 -------
540 fluxCatalogs : dict or None
541 Keys are the names of the filters and the values are
542 `lsst.afw.table.source.source.SourceCatalog`'s.
543 These are the flux-conserved catalogs with heavy footprints with
544 the image data weighted by the multiband templates.
545 If `self.config.conserveFlux` is `False`, then this item will be
546 None
547 templateCatalogs : dict or None
548 Keys are the names of the filters and the values are
549 `lsst.afw.table.source.source.SourceCatalog`'s.
550 These are catalogs with heavy footprints that are the templates
551 created by the multiband templates.
552 If `self.config.saveTemplates` is `False`, then this item will be
553 None
554 """
555 import time
557 filters = mExposure.filters
558 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure)))
560 # Create the output catalogs
561 templateCatalogs = {}
562 # This must be returned but is not calculated right now, setting it to
563 # None to be consistent with doc string
564 fluxCatalogs = None
565 for f in filters:
566 _catalog = afwTable.SourceCatalog(sources.table.clone())
567 _catalog.extend(sources)
568 templateCatalogs[f] = _catalog
570 n0 = len(sources)
571 nparents = 0
572 for pk, src in enumerate(sources):
573 foot = src.getFootprint()
574 bbox = foot.getBBox()
575 logger.info("id: {0}".format(src["id"]))
576 peaks = foot.getPeaks()
578 # Since we use the first peak for the parent object, we should
579 # propagate its flags to the parent source.
580 src.assign(peaks[0], self.peakSchemaMapper)
582 # Block of Skipping conditions
583 if len(peaks) < 2 and not self.config.processSingles:
584 for f in filters:
585 templateCatalogs[f][pk].set(self.runtimeKey, 0)
586 continue
587 if self._isLargeFootprint(foot):
588 src.set(self.tooBigKey, True)
589 self._skipParent(src, mExposure.mask)
590 self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
591 continue
592 if self._isMasked(foot, mExposure):
593 src.set(self.maskedKey, True)
594 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
595 mask = afwImage.MaskX(mask, xy0=bbox.getMin())
596 self._skipParent(src, mask)
597 self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
598 continue
599 if self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks:
600 src.set(self.tooManyPeaksKey, True)
601 self._skipParent(src, mExposure.mask)
602 msg = 'Parent {0}: Too many peaks, skipping blend'
603 self.log.trace(msg.format(int(src.getId())))
604 # Unlike meas_deblender, in scarlet we skip the entire blend
605 # if the number of peaks exceeds max peaks, since neglecting
606 # to model any peaks often results in catastrophic failure
607 # of scarlet to generate models for the brighter sources.
608 continue
610 nparents += 1
611 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks))
612 # Run the deblender
613 blendError = None
614 try:
615 t0 = time.time()
616 # Build the parameter lists with the same ordering
617 blend, skipped = deblend(mExposure, foot, self.config)
618 tf = time.time()
619 runtime = (tf-t0)*1000
620 src.set(self.deblendFailedKey, False)
621 src.set(self.runtimeKey, runtime)
622 converged = _checkBlendConvergence(blend, self.config.relativeError)
623 src.set(self.blendConvergenceFailedFlagKey, converged)
624 sources = [src for src in blend.sources]
625 # Re-insert place holders for skipped sources
626 # to propagate them in the catalog so
627 # that the peaks stay consistent
628 for k in skipped:
629 sources.insert(k, None)
630 # Catch all errors and filter out the ones that we know about
631 except Exception as e:
632 blendError = type(e).__name__
633 if isinstance(e, ScarletGradientError):
634 src.set(self.iterKey, e.iterations)
635 elif not isinstance(e, IncompleteDataError):
636 blendError = "UnknownError"
637 self._skipParent(src, mExposure.mask)
638 if self.config.catchFailures:
639 # Make it easy to find UnknownErrors in the log file
640 self.log.warn("UnknownError")
641 import traceback
642 traceback.print_exc()
643 else:
644 raise
646 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), blendError))
647 src.set(self.deblendFailedKey, True)
648 src.set(self.deblendErrorKey, blendError)
649 self._skipParent(src, mExposure.mask)
650 continue
652 # Add the merged source as a parent in the catalog for each band
653 templateParents = {}
654 parentId = src.getId()
655 for f in filters:
656 templateParents[f] = templateCatalogs[f][pk]
657 templateParents[f].set(self.nPeaksKey, len(foot.peaks))
658 templateParents[f].set(self.runtimeKey, runtime)
659 templateParents[f].set(self.iterKey, len(blend.loss))
660 logL = blend.loss[-1]-blend.observations[0].log_norm
661 templateParents[f].set(self.scarletLogLKey, logL)
663 # Add each source to the catalogs in each band
664 templateSpans = {f: afwGeom.SpanSet() for f in filters}
665 nchild = 0
666 for k, source in enumerate(sources):
667 # Skip any sources with no flux or that scarlet skipped because
668 # it could not initialize
669 if k in skipped:
670 # No need to propagate anything
671 continue
672 else:
673 src.set(self.deblendSkippedKey, False)
674 models = modelToHeavy(source, filters, xy0=bbox.getMin(),
675 observation=blend.observations[0])
677 flux = scarlet.measure.flux(source)
678 for fidx, f in enumerate(filters):
679 if len(models[f].getPeaks()) != 1:
680 err = "Heavy footprint should have a single peak, got {0}"
681 raise ValueError(err.format(len(models[f].peaks)))
682 cat = templateCatalogs[f]
683 child = self._addChild(parentId, cat, models[f], source, converged,
684 xy0=bbox.getMin(), flux=flux[fidx])
685 if parentId == 0:
686 child.setId(src.getId())
687 child.set(self.runtimeKey, runtime)
688 else:
689 templateSpans[f] = templateSpans[f].union(models[f].getSpans())
690 nchild += 1
692 # Child footprints may extend beyond the full extent of their
693 # parent's which results in a failure of the replace-by-noise code
694 # to reinstate these pixels to their original values. The
695 # following updates the parent footprint in-place to ensure it
696 # contains the full union of itself and all of its
697 # children's footprints.
698 for f in filters:
699 templateParents[f].set(self.nChildKey, nchild)
700 templateParents[f].getFootprint().setSpans(templateSpans[f])
702 K = len(list(templateCatalogs.values())[0])
703 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
704 % (n0, nparents, K-n0, K))
705 return fluxCatalogs, templateCatalogs
707 def _isLargeFootprint(self, footprint):
708 """Returns whether a Footprint is large
710 'Large' is defined by thresholds on the area, size and axis ratio.
711 These may be disabled independently by configuring them to be
712 non-positive.
714 This is principally intended to get rid of satellite streaks, which the
715 deblender or other downstream processing can have trouble dealing with
716 (e.g., multiple large HeavyFootprints can chew up memory).
717 """
718 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
719 return True
720 if self.config.maxFootprintSize > 0:
721 bbox = footprint.getBBox()
722 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
723 return True
724 if self.config.minFootprintAxisRatio > 0:
725 axes = afwEll.Axes(footprint.getShape())
726 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
727 return True
728 return False
730 def _isMasked(self, footprint, mExposure):
731 """Returns whether the footprint violates the mask limits"""
732 bbox = footprint.getBBox()
733 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
734 size = float(footprint.getArea())
735 for maskName, limit in self.config.maskLimits.items():
736 maskVal = mExposure.mask.getPlaneBitMask(maskName)
737 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin())
738 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels
739 if (size - unmaskedSpan.getArea())/size > limit:
740 return True
741 return False
743 def _skipParent(self, source, masks):
744 """Indicate that the parent source is not being deblended
746 We set the appropriate flags and masks for each exposure.
748 Parameters
749 ----------
750 source : `lsst.afw.table.source.source.SourceRecord`
751 The source to flag as skipped
752 masks : list of `lsst.afw.image.MaskX`
753 The mask in each band to update with the non-detection
754 """
755 fp = source.getFootprint()
756 source.set(self.deblendSkippedKey, True)
757 if self.config.notDeblendedMask:
758 for mask in masks:
759 mask.addMaskPlane(self.config.notDeblendedMask)
760 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
761 # The deblender didn't run on this source, so it has zero runtime
762 source.set(self.runtimeKey, 0)
763 # Set the center of the parent
764 bbox = fp.getBBox()
765 centerX = int(bbox.getMinX()+bbox.getWidth()/2)
766 centerY = int(bbox.getMinY()+bbox.getHeight()/2)
767 source.set(self.peakCenter, Point2I(centerX, centerY))
768 # There are no deblended children, so nChild = 0
769 source.set(self.nChildKey, 0)
770 # But we also want to know how many peaks that we would have
771 # deblended if the parent wasn't skipped.
772 source.set(self.nPeaksKey, len(fp.peaks))
773 # The blend was skipped, so it didn't take any iterations
774 source.set(self.iterKey, 0)
775 # Top level parents are not a detected peak, so they have no peakId
776 source.set(self.peakIdKey, 0)
777 # Top level parents also have no parentNPeaks
778 source.set(self.parentNPeaksKey, 0)
780 def _addChild(self, parentId, sources, heavy, scarletSource, blend_converged, xy0, flux):
781 """Add a child to a catalog
783 This creates a new child in the source catalog,
784 assigning it a parent id, adding a footprint,
785 and setting all appropriate flags based on the
786 deblender result.
787 """
788 assert len(heavy.getPeaks()) == 1
789 src = sources.addNew()
790 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
791 src.setParent(parentId)
792 src.setFootprint(heavy)
793 # Set the psf key based on whether or not the source was
794 # deblended using the PointSource model.
795 # This key is not that useful anymore since we now keep track of
796 # `modelType`, but we continue to propagate it in case code downstream
797 # is expecting it.
798 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource")
799 src.set(self.runtimeKey, 0)
800 src.set(self.blendConvergenceFailedFlagKey, not blend_converged)
802 # Set the position of the peak from the parent footprint
803 # This will make it easier to match the same source across
804 # deblenders and across observations, where the peak
805 # position is unlikely to change unless enough time passes
806 # for a source to move on the sky.
807 peak = scarletSource.detectedPeak
808 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"]))
809 src.set(self.peakIdKey, peak["id"])
811 # The children have a single peak
812 src.set(self.nPeaksKey, 1)
814 # Store the flux at the center of the model and the total
815 # scarlet flux measurement.
816 morph = afwDet.multiband.heavyFootprintToImage(heavy).image.array
818 # Set the flux at the center of the model (for SNR)
819 try:
820 cy, cx = scarletSource.center
821 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0])
822 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0])
823 src.set(self.modelCenterFlux, morph[cy, cx])
824 except AttributeError:
825 msg = "Did not recognize coordinates for source type of `{0}`, "
826 msg += "could not write coordinates or center flux. "
827 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information."
828 logger.warning(msg.format(type(scarletSource)))
830 src.set(self.modelTypeKey, scarletSource.__class__.__name__)
831 src.set(self.edgeFluxFlagKey, scarletSource.isEdge)
832 # Include the source flux in the model space in the catalog.
833 # This uses the narrower model PSF, which ensures that all sources
834 # not located on an edge have all of their flux included in the
835 # measurement.
836 src.set(self.scarletFluxKey, flux)
837 return src