Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py : 15%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of meas_extensions_scarlet.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import logging
23import numpy as np
24import scarlet
25from scarlet.psf import ImagePSF, GaussianPSF
26from scarlet import Blend, Frame, Observation
27from scarlet.renderer import ConvolutionRenderer
28from scarlet.initialization import init_all_sources
30import lsst.log
31import lsst.pex.config as pexConfig
32from lsst.pex.exceptions import InvalidParameterError
33import lsst.pipe.base as pipeBase
34from lsst.geom import Point2I, Box2I, Point2D
35import lsst.afw.geom.ellipses as afwEll
36import lsst.afw.image.utils
37import lsst.afw.image as afwImage
38import lsst.afw.detection as afwDet
39import lsst.afw.table as afwTable
41from .source import modelToHeavy
43# scarlet initialization allows the user to specify the maximum number
44# of components for a source but will fall back to fewer components or
45# an initial PSF morphology depending on the S/N. If either of those happen
46# then scarlet currently warnings that the type of source created by the
47# user was modified. This is not ideal behavior, as it creates a lot of
48# unnecessary warnings for expected behavior and the information is
49# already persisted due to the change in source type.
50# So we silence all of the initialization warnings here to prevent
51# polluting the log files.
52scarletInitLogger = logging.getLogger("scarlet.initialisation")
53scarletSourceLogger = logging.getLogger("scarlet.source")
54scarletInitLogger.setLevel(logging.ERROR)
55scarletSourceLogger.setLevel(logging.ERROR)
57__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"]
59logger = lsst.log.Log.getLogger("meas.deblender.deblend")
62class IncompleteDataError(Exception):
63 """The PSF could not be computed due to incomplete data
64 """
65 pass
68class ScarletGradientError(Exception):
69 """An error occurred during optimization
71 This error occurs when the optimizer encounters
72 a NaN value while calculating the gradient.
73 """
74 def __init__(self, iterations, sources):
75 self.iterations = iterations
76 self.sources = sources
77 msg = ("ScalarGradientError in iteration {0}. "
78 "NaN values introduced in sources {1}")
79 self.message = msg.format(iterations, sources)
81 def __str__(self):
82 return self.message
85def _checkBlendConvergence(blend, f_rel):
86 """Check whether or not a blend has converged
87 """
88 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1])
89 convergence = f_rel * np.abs(blend.loss[-1])
90 return deltaLoss < convergence
93def _getPsfFwhm(psf):
94 """Calculate the FWHM of the `psf`
95 """
96 return psf.computeShape().getDeterminantRadius() * 2.35
99def _computePsfImage(self, position=None):
100 """Get a multiband PSF image
101 The PSF Kernel Image is computed for each band
102 and combined into a (filter, y, x) array and stored
103 as `self._psfImage`.
104 The result is not cached, so if the same PSF is expected
105 to be used multiple times it is a good idea to store the
106 result in another variable.
107 Note: this is a temporary fix during the deblender sprint.
108 In the future this function will replace the current method
109 in `afw.MultibandExposure.computePsfImage` (DM-19789).
110 Parameters
111 ----------
112 position : `Point2D` or `tuple`
113 Coordinates to evaluate the PSF. If `position` is `None`
114 then `Psf.getAveragePosition()` is used.
115 Returns
116 -------
117 self._psfImage: array
118 The multiband PSF image.
119 """
120 psfs = []
121 # Make the coordinates into a Point2D (if necessary)
122 if not isinstance(position, Point2D) and position is not None:
123 position = Point2D(position[0], position[1])
125 for bidx, single in enumerate(self.singles):
126 try:
127 if position is None:
128 psf = single.getPsf().computeImage()
129 psfs.append(psf)
130 else:
131 psf = single.getPsf().computeKernelImage(position)
132 psfs.append(psf)
133 except InvalidParameterError:
134 # This band failed to compute the PSF due to incomplete data
135 # at that location. This is unlikely to be a problem for Rubin,
136 # however the edges of some HSC COSMOS fields contain incomplete
137 # data in some bands, so we track this error to distinguish it
138 # from unknown errors.
139 msg = "Failed to compute PSF at {} in band {}"
140 raise IncompleteDataError(msg.format(position, self.filters[bidx]))
142 left = np.min([psf.getBBox().getMinX() for psf in psfs])
143 bottom = np.min([psf.getBBox().getMinY() for psf in psfs])
144 right = np.max([psf.getBBox().getMaxX() for psf in psfs])
145 top = np.max([psf.getBBox().getMaxY() for psf in psfs])
146 bbox = Box2I(Point2I(left, bottom), Point2I(right, top))
147 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs]
148 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs)
149 return psfImage
152def getFootprintMask(footprint, mExposure):
153 """Mask pixels outside the footprint
155 Parameters
156 ----------
157 mExposure : `lsst.image.MultibandExposure`
158 - The multiband exposure containing the image,
159 mask, and variance data
160 footprint : `lsst.detection.Footprint`
161 - The footprint of the parent to deblend
163 Returns
164 -------
165 footprintMask : array
166 Boolean array with pixels not in the footprint set to one.
167 """
168 bbox = footprint.getBBox()
169 fpMask = afwImage.Mask(bbox)
170 footprint.spans.setMask(fpMask, 1)
171 fpMask = ~fpMask.getArray().astype(bool)
172 return fpMask
175def deblend(mExposure, footprint, config):
176 """Deblend a parent footprint
178 Parameters
179 ----------
180 mExposure : `lsst.image.MultibandExposure`
181 - The multiband exposure containing the image,
182 mask, and variance data
183 footprint : `lsst.detection.Footprint`
184 - The footprint of the parent to deblend
185 config : `ScarletDeblendConfig`
186 - Configuration of the deblending task
187 """
188 # Extract coordinates from each MultiColorPeak
189 bbox = footprint.getBBox()
191 # Create the data array from the masked images
192 images = mExposure.image[:, bbox].array
194 # Use the inverse variance as the weights
195 if config.useWeights:
196 weights = 1/mExposure.variance[:, bbox].array
197 else:
198 weights = np.ones_like(images)
199 badPixels = mExposure.mask.getPlaneBitMask(config.badMask)
200 mask = mExposure.mask[:, bbox].array & badPixels
201 weights[mask > 0] = 0
203 # Mask out the pixels outside the footprint
204 mask = getFootprintMask(footprint, mExposure)
205 weights *= ~mask
207 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32)
208 psfs = ImagePSF(psfs)
209 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters))
211 frame = Frame(images.shape, psf=model_psf, channels=mExposure.filters)
212 observation = Observation(images, psf=psfs, weights=weights, channels=mExposure.filters)
213 if config.convolutionType == "fft":
214 observation.match(frame)
215 elif config.convolutionType == "real":
216 renderer = ConvolutionRenderer(observation, frame, convolution_type="real")
217 observation.match(frame, renderer=renderer)
218 else:
219 raise ValueError("Unrecognized convolution type {}".format(config.convolutionType))
221 assert(config.sourceModel in ["single", "double", "compact", "fit"])
223 # Set the appropriate number of components
224 if config.sourceModel == "single":
225 maxComponents = 1
226 elif config.sourceModel == "double":
227 maxComponents = 2
228 elif config.sourceModel == "compact":
229 maxComponents = 0
230 elif config.sourceModel == "point":
231 raise NotImplementedError("Point source photometry is currently not implemented")
232 elif config.sourceModel == "fit":
233 # It is likely in the future that there will be some heuristic
234 # used to determine what type of model to use for each source,
235 # but that has not yet been implemented (see DM-22551)
236 raise NotImplementedError("sourceModel 'fit' has not been implemented yet")
238 # Convert the centers to pixel coordinates
239 xmin = bbox.getMinX()
240 ymin = bbox.getMinY()
241 centers = [np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int) for peak in footprint.peaks]
243 # Only deblend sources that can be initialized
244 sources, skipped = init_all_sources(
245 frame=frame,
246 centers=centers,
247 observations=observation,
248 thresh=config.morphThresh,
249 max_components=maxComponents,
250 min_snr=config.minSNR,
251 shifting=False,
252 fallback=config.fallback,
253 silent=config.catchFailures,
254 set_spectra=config.setSpectra,
255 )
257 # Attach the peak to all of the initialized sources
258 srcIndex = 0
259 for k, center in enumerate(centers):
260 if k not in skipped:
261 # This is just to make sure that there isn't a coding bug
262 assert np.all(sources[srcIndex].center == center)
263 # Store the record for the peak with the appropriate source
264 sources[srcIndex].detectedPeak = footprint.peaks[k]
265 srcIndex += 1
267 # Create the blend and attempt to optimize it
268 blend = Blend(sources, observation)
269 try:
270 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError)
271 except ArithmeticError:
272 # This occurs when a gradient update produces a NaN value
273 # This is usually due to a source initialized with a
274 # negative SED or no flux, often because the peak
275 # is a noise fluctuation in one band and not a real source.
276 iterations = len(blend.loss)
277 failedSources = []
278 for k, src in enumerate(sources):
279 if np.any(~np.isfinite(src.get_model())):
280 failedSources.append(k)
281 raise ScarletGradientError(iterations, failedSources)
283 return blend, skipped
286class ScarletDeblendConfig(pexConfig.Config):
287 """MultibandDeblendConfig
289 Configuration for the multiband deblender.
290 The parameters are organized by the parameter types, which are
291 - Stopping Criteria: Used to determine if the fit has converged
292 - Position Fitting Criteria: Used to fit the positions of the peaks
293 - Constraints: Used to apply constraints to the peaks and their components
294 - Other: Parameters that don't fit into the above categories
295 """
296 # Stopping Criteria
297 maxIter = pexConfig.Field(dtype=int, default=300,
298 doc=("Maximum number of iterations to deblend a single parent"))
299 relativeError = pexConfig.Field(dtype=float, default=1e-4,
300 doc=("Change in the loss function between"
301 "iterations to exit fitter"))
303 # Constraints
304 morphThresh = pexConfig.Field(dtype=float, default=1,
305 doc="Fraction of background RMS a pixel must have"
306 "to be included in the initial morphology")
307 # Other scarlet paremeters
308 useWeights = pexConfig.Field(
309 dtype=bool, default=True,
310 doc=("Whether or not use use inverse variance weighting."
311 "If `useWeights` is `False` then flat weights are used"))
312 modelPsfSize = pexConfig.Field(
313 dtype=int, default=11,
314 doc="Model PSF side length in pixels")
315 modelPsfSigma = pexConfig.Field(
316 dtype=float, default=0.8,
317 doc="Define sigma for the model frame PSF")
318 minSNR = pexConfig.Field(
319 dtype=float, default=50,
320 doc="Minimum Signal to noise to accept the source."
321 "Sources with lower flux will be initialized with the PSF but updated "
322 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).")
323 saveTemplates = pexConfig.Field(
324 dtype=bool, default=True,
325 doc="Whether or not to save the SEDs and templates")
326 processSingles = pexConfig.Field(
327 dtype=bool, default=True,
328 doc="Whether or not to process isolated sources in the deblender")
329 convolutionType = pexConfig.Field(
330 dtype=str, default="fft",
331 doc="Type of convolution to render the model to the observations.\n"
332 "- 'fft': perform convolutions in Fourier space\n"
333 "- 'real': peform convolutions in real space.")
334 sourceModel = pexConfig.Field(
335 dtype=str, default="double",
336 doc=("How to determine which model to use for sources, from\n"
337 "- 'single': use a single component for all sources\n"
338 "- 'double': use a bulge disk model for all sources\n"
339 "- 'compact': use a single component model, initialzed with a point source morphology, "
340 " for all sources\n"
341 "- 'point': use a point-source model for all sources\n"
342 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)")
343 )
344 setSpectra = pexConfig.Field(
345 dtype=bool, default=True,
346 doc="Whether or not to solve for the best-fit spectra during initialization. "
347 "This makes initialization slightly longer, as it requires a convolution "
348 "to set the optimal spectra, but results in a much better initial log-likelihood "
349 "and reduced total runtime, with convergence in fewer iterations.")
351 # Mask-plane restrictions
352 badMask = pexConfig.ListField(
353 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT", "EDGE"],
354 doc="Whether or not to process isolated sources in the deblender")
355 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
356 doc="Mask planes to ignore when performing statistics")
357 maskLimits = pexConfig.DictField(
358 keytype=str,
359 itemtype=float,
360 default={},
361 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
362 "Sources violating this limit will not be deblended."),
363 )
365 # Size restrictions
366 maxNumberOfPeaks = pexConfig.Field(
367 dtype=int, default=0,
368 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
369 " (<= 0: unlimited)"))
370 maxFootprintArea = pexConfig.Field(
371 dtype=int, default=1000000,
372 doc=("Maximum area for footprints before they are ignored as large; "
373 "non-positive means no threshold applied"))
374 maxFootprintSize = pexConfig.Field(
375 dtype=int, default=0,
376 doc=("Maximum linear dimension for footprints before they are ignored "
377 "as large; non-positive means no threshold applied"))
378 minFootprintAxisRatio = pexConfig.Field(
379 dtype=float, default=0.0,
380 doc=("Minimum axis ratio for footprints before they are ignored "
381 "as large; non-positive means no threshold applied"))
383 # Failure modes
384 fallback = pexConfig.Field(
385 dtype=bool, default=True,
386 doc="Whether or not to fallback to a smaller number of components if a source does not initialize"
387 )
388 notDeblendedMask = pexConfig.Field(
389 dtype=str, default="NOT_DEBLENDED", optional=True,
390 doc="Mask name for footprints not deblended, or None")
391 catchFailures = pexConfig.Field(
392 dtype=bool, default=True,
393 doc=("If True, catch exceptions thrown by the deblender, log them, "
394 "and set a flag on the parent, instead of letting them propagate up"))
397class ScarletDeblendTask(pipeBase.Task):
398 """ScarletDeblendTask
400 Split blended sources into individual sources.
402 This task has no return value; it only modifies the SourceCatalog in-place.
403 """
404 ConfigClass = ScarletDeblendConfig
405 _DefaultName = "scarletDeblend"
407 def __init__(self, schema, peakSchema=None, **kwargs):
408 """Create the task, adding necessary fields to the given schema.
410 Parameters
411 ----------
412 schema : `lsst.afw.table.schema.schema.Schema`
413 Schema object for measurement fields; will be modified in-place.
414 peakSchema : `lsst.afw.table.schema.schema.Schema`
415 Schema of Footprint Peaks that will be passed to the deblender.
416 Any fields beyond the PeakTable minimal schema will be transferred
417 to the main source Schema. If None, no fields will be transferred
418 from the Peaks.
419 filters : list of str
420 Names of the filters used for the eposures. This is needed to store
421 the SED as a field
422 **kwargs
423 Passed to Task.__init__.
424 """
425 pipeBase.Task.__init__(self, **kwargs)
427 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
428 if peakSchema is None:
429 # In this case, the peakSchemaMapper will transfer nothing, but
430 # we'll still have one
431 # to simplify downstream code
432 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
433 else:
434 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
435 for item in peakSchema:
436 if item.key not in peakMinimalSchema:
437 self.peakSchemaMapper.addMapping(item.key, item.field)
438 # Because SchemaMapper makes a copy of the output schema
439 # you give its ctor, it isn't updating this Schema in
440 # place. That's probably a design flaw, but in the
441 # meantime, we'll keep that schema in sync with the
442 # peakSchemaMapper.getOutputSchema() manually, by adding
443 # the same fields to both.
444 schema.addField(item.field)
445 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
446 self._addSchemaKeys(schema)
447 self.schema = schema
448 self.toCopyFromParent = [item.key for item in self.schema
449 if item.field.getName().startswith("merge_footprint")]
451 def _addSchemaKeys(self, schema):
452 """Add deblender specific keys to the schema
453 """
454 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms')
456 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge')
458 self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
459 doc='Number of children this object has (defaults to 0)')
460 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
461 doc='Deblender thought this source looked like a PSF')
462 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
463 doc='Source had too many peaks; '
464 'only the brightest were included')
465 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
466 doc='Parent footprint covered too many pixels')
467 self.maskedKey = schema.addField('deblend_masked', type='Flag',
468 doc='Parent footprint was predominantly masked')
469 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag',
470 doc='scarlet sed optimization did not converge before'
471 'config.maxIter')
472 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag',
473 doc='scarlet morph optimization did not converge before'
474 'config.maxIter')
475 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag',
476 type='Flag',
477 doc='at least one source in the blend'
478 'failed to converge')
479 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag',
480 doc='Source had flux on the edge of the parent footprint')
481 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
482 doc="Deblending failed on source")
483 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25,
484 doc='Name of error if the blend failed')
485 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
486 doc="Deblender skipped this source")
487 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center",
488 doc="Center used to apply constraints in scarlet",
489 unit="pixel")
490 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32,
491 doc="ID of the peak in the parent footprint. "
492 "This is not unique, but the combination of 'parent'"
493 "and 'peakId' should be for all child sources. "
494 "Top level blends with no parents have 'peakId=0'")
495 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count',
496 doc="The instFlux at the peak position of deblended mode")
497 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25,
498 doc="The type of model used, for example "
499 "MultiExtendedSource, SingleExtendedSource, PointSource")
500 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32,
501 doc="Number of initial peaks in the blend. "
502 "This includes peaks that may have been culled "
503 "during deblending or failed to deblend")
504 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32,
505 doc="Same as deblend_n_peaks, but the number of peaks "
506 "in the parent footprint")
507 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32,
508 doc="Flux measurement from scarlet")
509 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32,
510 doc="Final logL, used to identify regressions in scarlet.")
512 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in
513 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey))
514 # )
516 @pipeBase.timeMethod
517 def run(self, mExposure, mergedSources):
518 """Get the psf from each exposure and then run deblend().
520 Parameters
521 ----------
522 mExposure : `MultibandExposure`
523 The exposures should be co-added images of the same
524 shape and region of the sky.
525 mergedSources : `SourceCatalog`
526 The merged `SourceCatalog` that contains parent footprints
527 to (potentially) deblend.
529 Returns
530 -------
531 templateCatalogs: dict
532 Keys are the names of the filters and the values are
533 `lsst.afw.table.source.source.SourceCatalog`'s.
534 These are catalogs with heavy footprints that are the templates
535 created by the multiband templates.
536 """
537 return self.deblend(mExposure, mergedSources)
539 @pipeBase.timeMethod
540 def deblend(self, mExposure, sources):
541 """Deblend a data cube of multiband images
543 Parameters
544 ----------
545 mExposure : `MultibandExposure`
546 The exposures should be co-added images of the same
547 shape and region of the sky.
548 sources : `SourceCatalog`
549 The merged `SourceCatalog` that contains parent footprints
550 to (potentially) deblend.
552 Returns
553 -------
554 templateCatalogs : dict or None
555 Keys are the names of the filters and the values are
556 `lsst.afw.table.source.source.SourceCatalog`'s.
557 These are catalogs with heavy footprints that are the templates
558 created by the multiband templates.
559 """
560 import time
562 filters = mExposure.filters
563 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure)))
565 # Create the output catalogs
566 templateCatalogs = {}
567 # This must be returned but is not calculated right now, setting it to
568 # None to be consistent with doc string
569 for f in filters:
570 _catalog = afwTable.SourceCatalog(sources.table.clone())
571 _catalog.extend(sources)
572 templateCatalogs[f] = _catalog
574 n0 = len(sources)
575 nparents = 0
576 for pk, src in enumerate(sources):
577 foot = src.getFootprint()
578 bbox = foot.getBBox()
579 peaks = foot.getPeaks()
581 # Since we use the first peak for the parent object, we should
582 # propagate its flags to the parent source.
583 src.assign(peaks[0], self.peakSchemaMapper)
585 # Block of Skipping conditions
586 if len(peaks) < 2 and not self.config.processSingles:
587 for f in filters:
588 templateCatalogs[f][pk].set(self.runtimeKey, 0)
589 continue
590 if self._isLargeFootprint(foot):
591 src.set(self.tooBigKey, True)
592 self._skipParent(src, mExposure.mask)
593 self.log.trace('Parent %i: skipping large footprint', int(src.getId()))
594 continue
595 if self._isMasked(foot, mExposure):
596 src.set(self.maskedKey, True)
597 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
598 mask = afwImage.MaskX(mask, xy0=bbox.getMin())
599 self._skipParent(src, mask)
600 self.log.trace('Parent %i: skipping masked footprint', int(src.getId()))
601 continue
602 if self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks:
603 src.set(self.tooManyPeaksKey, True)
604 self._skipParent(src, mExposure.mask)
605 msg = 'Parent {0}: Too many peaks, skipping blend'
606 self.log.trace(msg.format(int(src.getId())))
607 # Unlike meas_deblender, in scarlet we skip the entire blend
608 # if the number of peaks exceeds max peaks, since neglecting
609 # to model any peaks often results in catastrophic failure
610 # of scarlet to generate models for the brighter sources.
611 continue
613 nparents += 1
614 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks))
615 # Run the deblender
616 blendError = None
617 try:
618 t0 = time.time()
619 # Build the parameter lists with the same ordering
620 blend, skipped = deblend(mExposure, foot, self.config)
621 tf = time.time()
622 runtime = (tf-t0)*1000
623 src.set(self.deblendFailedKey, False)
624 src.set(self.runtimeKey, runtime)
625 converged = _checkBlendConvergence(blend, self.config.relativeError)
626 src.set(self.blendConvergenceFailedFlagKey, converged)
627 sources = [src for src in blend.sources]
628 # Re-insert place holders for skipped sources
629 # to propagate them in the catalog so
630 # that the peaks stay consistent
631 for k in skipped:
632 sources.insert(k, None)
633 # Catch all errors and filter out the ones that we know about
634 except Exception as e:
635 blendError = type(e).__name__
636 if isinstance(e, ScarletGradientError):
637 src.set(self.iterKey, e.iterations)
638 elif not isinstance(e, IncompleteDataError):
639 blendError = "UnknownError"
640 self._skipParent(src, mExposure.mask)
641 if self.config.catchFailures:
642 # Make it easy to find UnknownErrors in the log file
643 self.log.warn("UnknownError")
644 import traceback
645 traceback.print_exc()
646 else:
647 raise
649 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), blendError))
650 src.set(self.deblendFailedKey, True)
651 src.set(self.deblendErrorKey, blendError)
652 self._skipParent(src, mExposure.mask)
653 continue
655 # Add the merged source as a parent in the catalog for each band
656 templateParents = {}
657 parentId = src.getId()
658 for f in filters:
659 templateParents[f] = templateCatalogs[f][pk]
660 templateParents[f].set(self.nPeaksKey, len(foot.peaks))
661 templateParents[f].set(self.runtimeKey, runtime)
662 templateParents[f].set(self.iterKey, len(blend.loss))
663 logL = blend.loss[-1]-blend.observations[0].log_norm
664 templateParents[f].set(self.scarletLogLKey, logL)
666 # Add each source to the catalogs in each band
667 nchild = 0
668 for k, source in enumerate(sources):
669 # Skip any sources with no flux or that scarlet skipped because
670 # it could not initialize
671 if k in skipped:
672 # No need to propagate anything
673 continue
674 else:
675 src.set(self.deblendSkippedKey, False)
676 models = modelToHeavy(source, filters, xy0=bbox.getMin(),
677 observation=blend.observations[0])
679 flux = scarlet.measure.flux(source)
680 for fidx, f in enumerate(filters):
681 if len(models[f].getPeaks()) != 1:
682 err = "Heavy footprint should have a single peak, got {0}"
683 raise ValueError(err.format(len(models[f].peaks)))
684 cat = templateCatalogs[f]
685 child = self._addChild(src, cat, models[f], source, converged,
686 xy0=bbox.getMin(), flux=flux[fidx])
687 if parentId == 0:
688 child.setId(src.getId())
689 child.set(self.runtimeKey, runtime)
690 nchild += 1
692 # Set the number of children for each parent
693 for f in filters:
694 templateParents[f].set(self.nChildKey, nchild)
696 K = len(list(templateCatalogs.values())[0])
697 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources'
698 % (n0, nparents, K-n0, K))
699 return templateCatalogs
701 def _isLargeFootprint(self, footprint):
702 """Returns whether a Footprint is large
704 'Large' is defined by thresholds on the area, size and axis ratio.
705 These may be disabled independently by configuring them to be
706 non-positive.
708 This is principally intended to get rid of satellite streaks, which the
709 deblender or other downstream processing can have trouble dealing with
710 (e.g., multiple large HeavyFootprints can chew up memory).
711 """
712 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
713 return True
714 if self.config.maxFootprintSize > 0:
715 bbox = footprint.getBBox()
716 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
717 return True
718 if self.config.minFootprintAxisRatio > 0:
719 axes = afwEll.Axes(footprint.getShape())
720 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
721 return True
722 return False
724 def _isMasked(self, footprint, mExposure):
725 """Returns whether the footprint violates the mask limits"""
726 bbox = footprint.getBBox()
727 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
728 size = float(footprint.getArea())
729 for maskName, limit in self.config.maskLimits.items():
730 maskVal = mExposure.mask.getPlaneBitMask(maskName)
731 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin())
732 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels
733 if (size - unmaskedSpan.getArea())/size > limit:
734 return True
735 return False
737 def _skipParent(self, source, masks):
738 """Indicate that the parent source is not being deblended
740 We set the appropriate flags and masks for each exposure.
742 Parameters
743 ----------
744 source : `lsst.afw.table.source.source.SourceRecord`
745 The source to flag as skipped
746 masks : list of `lsst.afw.image.MaskX`
747 The mask in each band to update with the non-detection
748 """
749 fp = source.getFootprint()
750 source.set(self.deblendSkippedKey, True)
751 if self.config.notDeblendedMask:
752 for mask in masks:
753 mask.addMaskPlane(self.config.notDeblendedMask)
754 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
755 # The deblender didn't run on this source, so it has zero runtime
756 source.set(self.runtimeKey, 0)
757 # Set the center of the parent
758 bbox = fp.getBBox()
759 centerX = int(bbox.getMinX()+bbox.getWidth()/2)
760 centerY = int(bbox.getMinY()+bbox.getHeight()/2)
761 source.set(self.peakCenter, Point2I(centerX, centerY))
762 # There are no deblended children, so nChild = 0
763 source.set(self.nChildKey, 0)
764 # But we also want to know how many peaks that we would have
765 # deblended if the parent wasn't skipped.
766 source.set(self.nPeaksKey, len(fp.peaks))
767 # The blend was skipped, so it didn't take any iterations
768 source.set(self.iterKey, 0)
769 # Top level parents are not a detected peak, so they have no peakId
770 source.set(self.peakIdKey, 0)
771 # Top level parents also have no parentNPeaks
772 source.set(self.parentNPeaksKey, 0)
774 def _addChild(self, parent, sources, heavy, scarletSource, blend_converged, xy0, flux):
775 """Add a child to a catalog
777 This creates a new child in the source catalog,
778 assigning it a parent id, adding a footprint,
779 and setting all appropriate flags based on the
780 deblender result.
781 """
782 assert len(heavy.getPeaks()) == 1
783 src = sources.addNew()
784 for key in self.toCopyFromParent:
785 src.set(key, parent.get(key))
786 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper)
787 src.setParent(parent.getId())
788 src.setFootprint(heavy)
789 # Set the psf key based on whether or not the source was
790 # deblended using the PointSource model.
791 # This key is not that useful anymore since we now keep track of
792 # `modelType`, but we continue to propagate it in case code downstream
793 # is expecting it.
794 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource")
795 src.set(self.runtimeKey, 0)
796 src.set(self.blendConvergenceFailedFlagKey, not blend_converged)
798 # Set the position of the peak from the parent footprint
799 # This will make it easier to match the same source across
800 # deblenders and across observations, where the peak
801 # position is unlikely to change unless enough time passes
802 # for a source to move on the sky.
803 peak = scarletSource.detectedPeak
804 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"]))
805 src.set(self.peakIdKey, peak["id"])
807 # The children have a single peak
808 src.set(self.nPeaksKey, 1)
810 # Store the flux at the center of the model and the total
811 # scarlet flux measurement.
812 morph = afwDet.multiband.heavyFootprintToImage(heavy).image.array
814 # Set the flux at the center of the model (for SNR)
815 try:
816 cy, cx = scarletSource.center
817 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0])
818 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0])
819 src.set(self.modelCenterFlux, morph[cy, cx])
820 except AttributeError:
821 msg = "Did not recognize coordinates for source type of `{0}`, "
822 msg += "could not write coordinates or center flux. "
823 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information."
824 logger.warning(msg.format(type(scarletSource)))
826 src.set(self.modelTypeKey, scarletSource.__class__.__name__)
827 # Include the source flux in the model space in the catalog.
828 # This uses the narrower model PSF, which ensures that all sources
829 # not located on an edge have all of their flux included in the
830 # measurement.
831 src.set(self.scarletFluxKey, flux)
832 return src