Coverage for python/lsst/meas/extensions/scarlet/scarletDeblendTask.py: 15%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of meas_extensions_scarlet.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22from functools import partial
23import logging
24import numpy as np
25import scarlet
26from scarlet.psf import ImagePSF, GaussianPSF
27from scarlet import Blend, Frame, Observation
28from scarlet.renderer import ConvolutionRenderer
29from scarlet.detect import get_detect_wavelets
30from scarlet.initialization import init_all_sources
31from scarlet import lite
33import lsst.pex.config as pexConfig
34from lsst.pex.exceptions import InvalidParameterError
35import lsst.pipe.base as pipeBase
36from lsst.geom import Point2I, Box2I, Point2D
37import lsst.afw.geom.ellipses as afwEll
38import lsst.afw.image as afwImage
39import lsst.afw.detection as afwDet
40import lsst.afw.table as afwTable
41from lsst.utils.timer import timeMethod
43from .source import bboxToScarletBox, modelToHeavy, liteModelToHeavy
45# Scarlet and proxmin have a different definition of log levels than the stack,
46# so even "warnings" occur far more often than we would like.
47# So for now we only display scarlet and proxmin errors, as all other
48# scarlet outputs would be considered "TRACE" by our standards.
49scarletLogger = logging.getLogger("scarlet")
50scarletLogger.setLevel(logging.ERROR)
51proxminLogger = logging.getLogger("proxmin")
52proxminLogger.setLevel(logging.ERROR)
54__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"]
56logger = logging.getLogger(__name__)
59class IncompleteDataError(Exception):
60 """The PSF could not be computed due to incomplete data
61 """
62 pass
65class ScarletGradientError(Exception):
66 """An error occurred during optimization
68 This error occurs when the optimizer encounters
69 a NaN value while calculating the gradient.
70 """
71 def __init__(self, iterations, sources):
72 self.iterations = iterations
73 self.sources = sources
74 msg = ("ScalarGradientError in iteration {0}. "
75 "NaN values introduced in sources {1}")
76 self.message = msg.format(iterations, sources)
78 def __str__(self):
79 return self.message
82def _checkBlendConvergence(blend, f_rel):
83 """Check whether or not a blend has converged
84 """
85 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1])
86 convergence = f_rel * np.abs(blend.loss[-1])
87 return deltaLoss < convergence
90def _getPsfFwhm(psf):
91 """Calculate the FWHM of the `psf`
92 """
93 return psf.computeShape().getDeterminantRadius() * 2.35
96def _computePsfImage(self, position=None):
97 """Get a multiband PSF image
98 The PSF Kernel Image is computed for each band
99 and combined into a (filter, y, x) array and stored
100 as `self._psfImage`.
101 The result is not cached, so if the same PSF is expected
102 to be used multiple times it is a good idea to store the
103 result in another variable.
104 Note: this is a temporary fix during the deblender sprint.
105 In the future this function will replace the current method
106 in `afw.MultibandExposure.computePsfImage` (DM-19789).
107 Parameters
108 ----------
109 position : `Point2D` or `tuple`
110 Coordinates to evaluate the PSF. If `position` is `None`
111 then `Psf.getAveragePosition()` is used.
112 Returns
113 -------
114 self._psfImage: array
115 The multiband PSF image.
116 """
117 psfs = []
118 # Make the coordinates into a Point2D (if necessary)
119 if not isinstance(position, Point2D) and position is not None:
120 position = Point2D(position[0], position[1])
122 for bidx, single in enumerate(self.singles):
123 try:
124 if position is None:
125 psf = single.getPsf().computeImage()
126 psfs.append(psf)
127 else:
128 psf = single.getPsf().computeKernelImage(position)
129 psfs.append(psf)
130 except InvalidParameterError:
131 # This band failed to compute the PSF due to incomplete data
132 # at that location. This is unlikely to be a problem for Rubin,
133 # however the edges of some HSC COSMOS fields contain incomplete
134 # data in some bands, so we track this error to distinguish it
135 # from unknown errors.
136 msg = "Failed to compute PSF at {} in band {}"
137 raise IncompleteDataError(msg.format(position, self.filters[bidx]))
139 left = np.min([psf.getBBox().getMinX() for psf in psfs])
140 bottom = np.min([psf.getBBox().getMinY() for psf in psfs])
141 right = np.max([psf.getBBox().getMaxX() for psf in psfs])
142 top = np.max([psf.getBBox().getMaxY() for psf in psfs])
143 bbox = Box2I(Point2I(left, bottom), Point2I(right, top))
144 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs]
145 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs)
146 return psfImage
149def getFootprintMask(footprint, mExposure):
150 """Mask pixels outside the footprint
152 Parameters
153 ----------
154 mExposure : `lsst.image.MultibandExposure`
155 - The multiband exposure containing the image,
156 mask, and variance data
157 footprint : `lsst.detection.Footprint`
158 - The footprint of the parent to deblend
160 Returns
161 -------
162 footprintMask : array
163 Boolean array with pixels not in the footprint set to one.
164 """
165 bbox = footprint.getBBox()
166 fpMask = afwImage.Mask(bbox)
167 footprint.spans.setMask(fpMask, 1)
168 fpMask = ~fpMask.getArray().astype(bool)
169 return fpMask
172def isPseudoSource(source, pseudoColumns):
173 """Check if a source is a pseudo source.
175 This is mostly for skipping sky objects,
176 but any other column can also be added to disable
177 deblending on a parent or individual source when
178 set to `True`.
180 Parameters
181 ----------
182 source : `lsst.afw.table.source.source.SourceRecord`
183 The source to check for the pseudo bit.
184 pseudoColumns : `list` of `str`
185 A list of columns to check for pseudo sources.
186 """
187 isPseudo = False
188 for col in pseudoColumns:
189 try:
190 isPseudo |= source[col]
191 except KeyError:
192 pass
193 return isPseudo
196def deblend(mExposure, footprint, config):
197 """Deblend a parent footprint
199 Parameters
200 ----------
201 mExposure : `lsst.image.MultibandExposure`
202 - The multiband exposure containing the image,
203 mask, and variance data
204 footprint : `lsst.detection.Footprint`
205 - The footprint of the parent to deblend
206 config : `ScarletDeblendConfig`
207 - Configuration of the deblending task
209 Returns
210 -------
211 blend : `scarlet.Blend`
212 The scarlet blend class that contains all of the information
213 about the parameters and results from scarlet
214 skipped : `list` of `int`
215 The indices of any children that failed to initialize
216 and were skipped.
217 spectrumInit : `bool`
218 Whether or not all of the sources were initialized by jointly
219 fitting their SED's. This provides a better initialization
220 but created memory issues when a blend is too large or
221 contains too many sources.
222 """
223 # Extract coordinates from each MultiColorPeak
224 bbox = footprint.getBBox()
226 # Create the data array from the masked images
227 images = mExposure.image[:, bbox].array
229 # Use the inverse variance as the weights
230 if config.useWeights:
231 weights = 1/mExposure.variance[:, bbox].array
232 else:
233 weights = np.ones_like(images)
234 badPixels = mExposure.mask.getPlaneBitMask(config.badMask)
235 mask = mExposure.mask[:, bbox].array & badPixels
236 weights[mask > 0] = 0
238 # Mask out the pixels outside the footprint
239 mask = getFootprintMask(footprint, mExposure)
240 weights *= ~mask
242 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32)
243 psfs = ImagePSF(psfs)
244 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters))
246 frame = Frame(images.shape, psf=model_psf, channels=mExposure.filters)
247 observation = Observation(images, psf=psfs, weights=weights, channels=mExposure.filters)
248 if config.convolutionType == "fft":
249 observation.match(frame)
250 elif config.convolutionType == "real":
251 renderer = ConvolutionRenderer(observation, frame, convolution_type="real")
252 observation.match(frame, renderer=renderer)
253 else:
254 raise ValueError("Unrecognized convolution type {}".format(config.convolutionType))
256 assert(config.sourceModel in ["single", "double", "compact", "fit"])
258 # Set the appropriate number of components
259 if config.sourceModel == "single":
260 maxComponents = 1
261 elif config.sourceModel == "double":
262 maxComponents = 2
263 elif config.sourceModel == "compact":
264 maxComponents = 0
265 elif config.sourceModel == "point":
266 raise NotImplementedError("Point source photometry is currently not implemented")
267 elif config.sourceModel == "fit":
268 # It is likely in the future that there will be some heuristic
269 # used to determine what type of model to use for each source,
270 # but that has not yet been implemented (see DM-22551)
271 raise NotImplementedError("sourceModel 'fit' has not been implemented yet")
273 # Convert the centers to pixel coordinates
274 xmin = bbox.getMinX()
275 ymin = bbox.getMinY()
276 centers = [
277 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int)
278 for peak in footprint.peaks
279 if not isPseudoSource(peak, config.pseudoColumns)
280 ]
282 # Choose whether or not to use the improved spectral initialization
283 if config.setSpectra:
284 if config.maxSpectrumCutoff <= 0:
285 spectrumInit = True
286 else:
287 spectrumInit = len(centers) * bbox.getArea() < config.maxSpectrumCutoff
288 else:
289 spectrumInit = False
291 # Only deblend sources that can be initialized
292 sources, skipped = init_all_sources(
293 frame=frame,
294 centers=centers,
295 observations=observation,
296 thresh=config.morphThresh,
297 max_components=maxComponents,
298 min_snr=config.minSNR,
299 shifting=False,
300 fallback=config.fallback,
301 silent=config.catchFailures,
302 set_spectra=spectrumInit,
303 )
305 # Attach the peak to all of the initialized sources
306 srcIndex = 0
307 for k, center in enumerate(centers):
308 if k not in skipped:
309 # This is just to make sure that there isn't a coding bug
310 assert np.all(sources[srcIndex].center == center)
311 # Store the record for the peak with the appropriate source
312 sources[srcIndex].detectedPeak = footprint.peaks[k]
313 srcIndex += 1
315 # Create the blend and attempt to optimize it
316 blend = Blend(sources, observation)
317 try:
318 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError)
319 except ArithmeticError:
320 # This occurs when a gradient update produces a NaN value
321 # This is usually due to a source initialized with a
322 # negative SED or no flux, often because the peak
323 # is a noise fluctuation in one band and not a real source.
324 iterations = len(blend.loss)
325 failedSources = []
326 for k, src in enumerate(sources):
327 if np.any(~np.isfinite(src.get_model())):
328 failedSources.append(k)
329 raise ScarletGradientError(iterations, failedSources)
331 return blend, skipped, spectrumInit
334def deblend_lite(mExposure, footprint, config, wavelets=None):
335 """Deblend a parent footprint
337 Parameters
338 ----------
339 mExposure : `lsst.image.MultibandExposure`
340 - The multiband exposure containing the image,
341 mask, and variance data
342 footprint : `lsst.detection.Footprint`
343 - The footprint of the parent to deblend
344 config : `ScarletDeblendConfig`
345 - Configuration of the deblending task
346 """
347 # Extract coordinates from each MultiColorPeak
348 bbox = footprint.getBBox()
350 # Create the data array from the masked images
351 images = mExposure.image[:, bbox].array
352 variance = mExposure.variance[:, bbox].array
354 # Use the inverse variance as the weights
355 if config.useWeights:
356 weights = 1/mExposure.variance[:, bbox].array
357 else:
358 weights = np.ones_like(images)
359 badPixels = mExposure.mask.getPlaneBitMask(config.badMask)
360 mask = mExposure.mask[:, bbox].array & badPixels
361 weights[mask > 0] = 0
363 # Mask out the pixels outside the footprint
364 mask = getFootprintMask(footprint, mExposure)
365 weights *= ~mask
367 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32)
368 modelPsf = lite.integrated_circular_gaussian(sigma=config.modelPsfSigma)
370 observation = lite.LiteObservation(
371 images=images,
372 variance=variance,
373 weights=weights,
374 psfs=psfs,
375 model_psf=modelPsf[None, :, :],
376 convolution_mode=config.convolutionType,
377 )
379 # Convert the centers to pixel coordinates
380 xmin = bbox.getMinX()
381 ymin = bbox.getMinY()
382 centers = [
383 np.array([peak.getIy() - ymin, peak.getIx() - xmin], dtype=int)
384 for peak in footprint.peaks
385 if not isPseudoSource(peak, config.pseudoColumns)
386 ]
388 # Initialize the sources
389 if config.morphImage == "chi2":
390 sources = lite.init_all_sources_main(
391 observation,
392 centers,
393 min_snr=config.minSNR,
394 thresh=config.morphThresh,
395 )
396 elif config.morphImage == "wavelet":
397 _bbox = bboxToScarletBox(len(mExposure.filters), bbox, bbox.getMin())
398 _wavelets = wavelets[(slice(None), *_bbox[1:].slices)]
399 sources = lite.init_all_sources_wavelets(
400 observation,
401 centers,
402 use_psf=False,
403 wavelets=_wavelets,
404 min_snr=config.minSNR,
405 )
406 else:
407 raise ValueError("morphImage must be either 'chi2' or 'wavelet'.")
409 # Set the optimizer
410 if config.optimizer == "adaprox":
411 parameterization = partial(
412 lite.init_adaprox_component,
413 bg_thresh=config.backgroundThresh,
414 max_prox_iter=config.maxProxIter,
415 )
416 elif config.optimizer == "fista":
417 parameterization = partial(
418 lite.init_fista_component,
419 bg_thresh=config.backgroundThresh,
420 )
421 else:
422 raise ValueError("Unrecognized optimizer. Must be either 'adaprox' or 'fista'.")
423 sources = lite.parameterize_sources(sources, observation, parameterization)
425 # Attach the peak to all of the initialized sources
426 for k, center in enumerate(centers):
427 # This is just to make sure that there isn't a coding bug
428 if len(sources[k].components) > 0 and np.any(sources[k].center != center):
429 raise ValueError("Misaligned center, expected {center} but got {sources[k].center}")
430 # Store the record for the peak with the appropriate source
431 sources[k].detectedPeak = footprint.peaks[k]
433 blend = lite.LiteBlend(sources, observation)
435 # Initialize each source with its best fit spectrum
436 # This significantly cuts down on the number of iterations
437 # that the optimizer needs and usually results in a better
438 # fit, but using least squares on a very large blend causes memory issues.
439 # This is typically the most expensive operation in deblending, memorywise.
440 spectrumInit = False
441 if config.setSpectra:
442 if config.maxSpectrumCutoff <= 0 or len(centers) * bbox.getArea() < config.maxSpectrumCutoff:
443 spectrumInit = True
444 blend.fit_spectra()
446 # Set the sources that could not be initialized and were skipped
447 skipped = [src for src in sources if src.is_null]
449 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError, min_iter=config.minIter)
451 return blend, skipped, spectrumInit
454class ScarletDeblendConfig(pexConfig.Config):
455 """MultibandDeblendConfig
457 Configuration for the multiband deblender.
458 The parameters are organized by the parameter types, which are
459 - Stopping Criteria: Used to determine if the fit has converged
460 - Position Fitting Criteria: Used to fit the positions of the peaks
461 - Constraints: Used to apply constraints to the peaks and their components
462 - Other: Parameters that don't fit into the above categories
463 """
464 # Stopping Criteria
465 minIter = pexConfig.Field(dtype=int, default=1,
466 doc="Minimum number of iterations before the optimizer is allowed to stop.")
467 maxIter = pexConfig.Field(dtype=int, default=300,
468 doc=("Maximum number of iterations to deblend a single parent"))
469 relativeError = pexConfig.Field(dtype=float, default=1e-2,
470 doc=("Change in the loss function between iterations to exit fitter. "
471 "Typically this is `1e-2` if measurements will be made on the "
472 "flux re-distributed models and `1e-4` when making measurements "
473 "on the models themselves."))
475 # Constraints
476 morphThresh = pexConfig.Field(dtype=float, default=1,
477 doc="Fraction of background RMS a pixel must have"
478 "to be included in the initial morphology")
479 # Lite Parameters
480 # All of these parameters (except version) are only valid if version='lite'
481 version = pexConfig.ChoiceField(
482 dtype=str,
483 default="lite",
484 allowed={
485 "scarlet": "main scarlet version (likely to be deprecated soon)",
486 "lite": "Optimized version of scarlet for survey data from a single instrument",
487 },
488 doc="The version of scarlet to use.",
489 )
490 optimizer = pexConfig.ChoiceField(
491 dtype=str,
492 default="adaprox",
493 allowed={
494 "adaprox": "Proximal ADAM optimization",
495 "fista": "Accelerated proximal gradient method",
496 },
497 doc="The optimizer to use for fitting parameters and is only used when version='lite'",
498 )
499 morphImage = pexConfig.ChoiceField(
500 dtype=str,
501 default="chi2",
502 allowed={
503 "chi2": "Initialize sources on a chi^2 image made from all available bands",
504 "wavelet": "Initialize sources using a wavelet decomposition of the chi^2 image",
505 },
506 doc="The type of image to use for initializing the morphology. "
507 "Must be either 'chi2' or 'wavelet'. "
508 )
509 backgroundThresh = pexConfig.Field(
510 dtype=float,
511 default=0.25,
512 doc="Fraction of background to use for a sparsity threshold. "
513 "This prevents sources from growing unrealistically outside "
514 "the parent footprint while still modeling flux correctly "
515 "for bright sources."
516 )
517 maxProxIter = pexConfig.Field(
518 dtype=int,
519 default=1,
520 doc="Maximum number of proximal operator iterations inside of each "
521 "iteration of the optimizer. "
522 "This config field is only used if version='lite' and optimizer='adaprox'."
523 )
524 waveletScales = pexConfig.Field(
525 dtype=int,
526 default=5,
527 doc="Number of wavelet scales to use for wavelet initialization. "
528 "This field is only used when `version`='lite' and `morphImage`='wavelet'."
529 )
531 # Other scarlet paremeters
532 useWeights = pexConfig.Field(
533 dtype=bool, default=True,
534 doc=("Whether or not use use inverse variance weighting."
535 "If `useWeights` is `False` then flat weights are used"))
536 modelPsfSize = pexConfig.Field(
537 dtype=int, default=11,
538 doc="Model PSF side length in pixels")
539 modelPsfSigma = pexConfig.Field(
540 dtype=float, default=0.8,
541 doc="Define sigma for the model frame PSF")
542 minSNR = pexConfig.Field(
543 dtype=float, default=50,
544 doc="Minimum Signal to noise to accept the source."
545 "Sources with lower flux will be initialized with the PSF but updated "
546 "like an ordinary ExtendedSource (known in scarlet as a `CompactSource`).")
547 saveTemplates = pexConfig.Field(
548 dtype=bool, default=True,
549 doc="Whether or not to save the SEDs and templates")
550 processSingles = pexConfig.Field(
551 dtype=bool, default=True,
552 doc="Whether or not to process isolated sources in the deblender")
553 convolutionType = pexConfig.Field(
554 dtype=str, default="fft",
555 doc="Type of convolution to render the model to the observations.\n"
556 "- 'fft': perform convolutions in Fourier space\n"
557 "- 'real': peform convolutions in real space.")
558 sourceModel = pexConfig.Field(
559 dtype=str, default="double",
560 doc=("How to determine which model to use for sources, from\n"
561 "- 'single': use a single component for all sources\n"
562 "- 'double': use a bulge disk model for all sources\n"
563 "- 'compact': use a single component model, initialzed with a point source morphology, "
564 " for all sources\n"
565 "- 'point': use a point-source model for all sources\n"
566 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)"),
567 deprecated="This field will be deprecated when the default for `version` is changed to `lite`.",
568 )
569 setSpectra = pexConfig.Field(
570 dtype=bool, default=True,
571 doc="Whether or not to solve for the best-fit spectra during initialization. "
572 "This makes initialization slightly longer, as it requires a convolution "
573 "to set the optimal spectra, but results in a much better initial log-likelihood "
574 "and reduced total runtime, with convergence in fewer iterations."
575 "This option is only used when "
576 "peaks*area < `maxSpectrumCutoff` will use the improved initialization.")
578 # Mask-plane restrictions
579 badMask = pexConfig.ListField(
580 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT", "EDGE"],
581 doc="Whether or not to process isolated sources in the deblender")
582 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"],
583 doc="Mask planes to ignore when performing statistics")
584 maskLimits = pexConfig.DictField(
585 keytype=str,
586 itemtype=float,
587 default={},
588 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. "
589 "Sources violating this limit will not be deblended."),
590 )
592 # Size restrictions
593 maxNumberOfPeaks = pexConfig.Field(
594 dtype=int, default=0,
595 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent"
596 " (<= 0: unlimited)"))
597 maxFootprintArea = pexConfig.Field(
598 dtype=int, default=1000000,
599 doc=("Maximum area for footprints before they are ignored as large; "
600 "non-positive means no threshold applied"))
601 maxFootprintSize = pexConfig.Field(
602 dtype=int, default=0,
603 doc=("Maximum linear dimension for footprints before they are ignored "
604 "as large; non-positive means no threshold applied"))
605 minFootprintAxisRatio = pexConfig.Field(
606 dtype=float, default=0.0,
607 doc=("Minimum axis ratio for footprints before they are ignored "
608 "as large; non-positive means no threshold applied"))
609 maxSpectrumCutoff = pexConfig.Field(
610 dtype=int, default=1000000,
611 doc=("Maximum number of pixels * number of sources in a blend. "
612 "This is different than `maxFootprintArea` because this isn't "
613 "the footprint area but the area of the bounding box that "
614 "contains the footprint, and is also multiplied by the number of"
615 "sources in the footprint. This prevents large skinny blends with "
616 "a high density of sources from running out of memory. "
617 "If `maxSpectrumCutoff == -1` then there is no cutoff.")
618 )
620 # Failure modes
621 fallback = pexConfig.Field(
622 dtype=bool, default=True,
623 doc="Whether or not to fallback to a smaller number of components if a source does not initialize"
624 )
625 notDeblendedMask = pexConfig.Field(
626 dtype=str, default="NOT_DEBLENDED", optional=True,
627 doc="Mask name for footprints not deblended, or None")
628 catchFailures = pexConfig.Field(
629 dtype=bool, default=True,
630 doc=("If True, catch exceptions thrown by the deblender, log them, "
631 "and set a flag on the parent, instead of letting them propagate up"))
633 # Other options
634 columnInheritance = pexConfig.DictField(
635 keytype=str, itemtype=str, default={
636 "deblend_nChild": "deblend_parentNChild",
637 "deblend_nPeaks": "deblend_parentNPeaks",
638 "deblend_spectrumInitFlag": "deblend_spectrumInitFlag",
639 "deblend_blendConvergenceFailedFlag": "deblend_blendConvergenceFailedFlag",
640 },
641 doc="Columns to pass from the parent to the child. "
642 "The key is the name of the column for the parent record, "
643 "the value is the name of the column to use for the child."
644 )
645 pseudoColumns = pexConfig.ListField(
646 dtype=str, default=['merge_peak_sky', 'sky_source'],
647 doc="Names of flags which should never be deblended."
648 )
650 # Logging option(s)
651 loggingInterval = pexConfig.Field(
652 dtype=int, default=600,
653 doc="Interval (in seconds) to log messages (at VERBOSE level) while deblending sources."
654 )
655 # Testing options
656 # Some obs packages and ci packages run the full pipeline on a small
657 # subset of data to test that the pipeline is functioning properly.
658 # This is not meant as scientific validation, so it can be useful
659 # to only run on a small subset of the data that is large enough to
660 # test the desired pipeline features but not so long that the deblender
661 # is the tall pole in terms of execution times.
662 useCiLimits = pexConfig.Field(
663 dtype=bool, default=False,
664 doc="Limit the number of sources deblended for CI to prevent long build times")
665 ciDeblendChildRange = pexConfig.ListField(
666 dtype=int, default=[5, 10],
667 doc="Only deblend parent Footprints with a number of peaks in the (inclusive) range indicated."
668 "If `useCiLimits==False` then this parameter is ignored.")
669 ciNumParentsToDeblend = pexConfig.Field(
670 dtype=int, default=10,
671 doc="Only use the first `ciNumParentsToDeblend` parent footprints with a total peak count "
672 "within `ciDebledChildRange`. "
673 "If `useCiLimits==False` then this parameter is ignored.")
676class ScarletDeblendTask(pipeBase.Task):
677 """ScarletDeblendTask
679 Split blended sources into individual sources.
681 This task has no return value; it only modifies the SourceCatalog in-place.
682 """
683 ConfigClass = ScarletDeblendConfig
684 _DefaultName = "scarletDeblend"
686 def __init__(self, schema, peakSchema=None, **kwargs):
687 """Create the task, adding necessary fields to the given schema.
689 Parameters
690 ----------
691 schema : `lsst.afw.table.schema.schema.Schema`
692 Schema object for measurement fields; will be modified in-place.
693 peakSchema : `lsst.afw.table.schema.schema.Schema`
694 Schema of Footprint Peaks that will be passed to the deblender.
695 Any fields beyond the PeakTable minimal schema will be transferred
696 to the main source Schema. If None, no fields will be transferred
697 from the Peaks.
698 filters : list of str
699 Names of the filters used for the eposures. This is needed to store
700 the SED as a field
701 **kwargs
702 Passed to Task.__init__.
703 """
704 pipeBase.Task.__init__(self, **kwargs)
706 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema()
707 if peakSchema is None:
708 # In this case, the peakSchemaMapper will transfer nothing, but
709 # we'll still have one
710 # to simplify downstream code
711 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema)
712 else:
713 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema)
714 for item in peakSchema:
715 if item.key not in peakMinimalSchema:
716 self.peakSchemaMapper.addMapping(item.key, item.field)
717 # Because SchemaMapper makes a copy of the output schema
718 # you give its ctor, it isn't updating this Schema in
719 # place. That's probably a design flaw, but in the
720 # meantime, we'll keep that schema in sync with the
721 # peakSchemaMapper.getOutputSchema() manually, by adding
722 # the same fields to both.
723 schema.addField(item.field)
724 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas"
725 self._addSchemaKeys(schema)
726 self.schema = schema
727 self.toCopyFromParent = [item.key for item in self.schema
728 if item.field.getName().startswith("merge_footprint")]
730 def _addSchemaKeys(self, schema):
731 """Add deblender specific keys to the schema
732 """
733 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms')
735 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge')
737 self.nChildKey = schema.addField('deblend_nChild', type=np.int32,
738 doc='Number of children this object has (defaults to 0)')
739 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag',
740 doc='Deblender thought this source looked like a PSF')
741 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag',
742 doc='Source had too many peaks; '
743 'only the brightest were included')
744 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag',
745 doc='Parent footprint covered too many pixels')
746 self.maskedKey = schema.addField('deblend_masked', type='Flag',
747 doc='Parent footprint was predominantly masked')
748 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag',
749 doc='scarlet sed optimization did not converge before'
750 'config.maxIter')
751 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag',
752 doc='scarlet morph optimization did not converge before'
753 'config.maxIter')
754 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag',
755 type='Flag',
756 doc='at least one source in the blend'
757 'failed to converge')
758 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag',
759 doc='Source had flux on the edge of the parent footprint')
760 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag',
761 doc="Deblending failed on source")
762 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25,
763 doc='Name of error if the blend failed')
764 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag',
765 doc="Deblender skipped this source")
766 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center",
767 doc="Center used to apply constraints in scarlet",
768 unit="pixel")
769 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32,
770 doc="ID of the peak in the parent footprint. "
771 "This is not unique, but the combination of 'parent'"
772 "and 'peakId' should be for all child sources. "
773 "Top level blends with no parents have 'peakId=0'")
774 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count',
775 doc="The instFlux at the peak position of deblended mode")
776 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=25,
777 doc="The type of model used, for example "
778 "MultiExtendedSource, SingleExtendedSource, PointSource")
779 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32,
780 doc="Number of initial peaks in the blend. "
781 "This includes peaks that may have been culled "
782 "during deblending or failed to deblend")
783 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32,
784 doc="deblend_nPeaks from this records parent.")
785 self.parentNChildKey = schema.addField("deblend_parentNChild", type=np.int32,
786 doc="deblend_nChild from this records parent.")
787 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32,
788 doc="Flux measurement from scarlet")
789 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32,
790 doc="Final logL, used to identify regressions in scarlet.")
791 self.scarletSpectrumInitKey = schema.addField("deblend_spectrumInitFlag", type='Flag',
792 doc="True when scarlet initializes sources "
793 "in the blend with a more accurate spectrum. "
794 "The algorithm uses a lot of memory, "
795 "so large dense blends will use "
796 "a less accurate initialization.")
798 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in
799 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey))
800 # )
802 @timeMethod
803 def run(self, mExposure, mergedSources):
804 """Get the psf from each exposure and then run deblend().
806 Parameters
807 ----------
808 mExposure : `MultibandExposure`
809 The exposures should be co-added images of the same
810 shape and region of the sky.
811 mergedSources : `SourceCatalog`
812 The merged `SourceCatalog` that contains parent footprints
813 to (potentially) deblend.
815 Returns
816 -------
817 templateCatalogs: dict
818 Keys are the names of the filters and the values are
819 `lsst.afw.table.source.source.SourceCatalog`'s.
820 These are catalogs with heavy footprints that are the templates
821 created by the multiband templates.
822 """
823 return self.deblend(mExposure, mergedSources)
825 @timeMethod
826 def deblend(self, mExposure, catalog):
827 """Deblend a data cube of multiband images
829 Parameters
830 ----------
831 mExposure : `MultibandExposure`
832 The exposures should be co-added images of the same
833 shape and region of the sky.
834 catalog : `SourceCatalog`
835 The merged `SourceCatalog` that contains parent footprints
836 to (potentially) deblend. The new deblended sources are
837 appended to this catalog in place.
839 Returns
840 -------
841 catalogs : `dict` or `None`
842 Keys are the names of the filters and the values are
843 `lsst.afw.table.source.source.SourceCatalog`'s.
844 These are catalogs with heavy footprints that are the templates
845 created by the multiband templates.
846 """
847 import time
849 # Cull footprints if required by ci
850 if self.config.useCiLimits:
851 self.log.info("Using CI catalog limits, the original number of sources to deblend was %d.",
852 len(catalog))
853 # Select parents with a number of children in the range
854 # config.ciDeblendChildRange
855 minChildren, maxChildren = self.config.ciDeblendChildRange
856 nPeaks = np.array([len(src.getFootprint().peaks) for src in catalog])
857 childrenInRange = np.where((nPeaks >= minChildren) & (nPeaks <= maxChildren))[0]
858 if len(childrenInRange) < self.config.ciNumParentsToDeblend:
859 raise ValueError("Fewer than ciNumParentsToDeblend children were contained in the range "
860 "indicated by ciDeblendChildRange. Adjust this range to include more "
861 "parents.")
862 # Keep all of the isolated parents and the first
863 # `ciNumParentsToDeblend` children
864 parents = nPeaks == 1
865 children = np.zeros((len(catalog),), dtype=bool)
866 children[childrenInRange[:self.config.ciNumParentsToDeblend]] = True
867 catalog = catalog[parents | children]
868 # We need to update the IdFactory, otherwise the the source ids
869 # will not be sequential
870 idFactory = catalog.getIdFactory()
871 maxId = np.max(catalog["id"])
872 idFactory.notify(maxId)
874 filters = mExposure.filters
875 self.log.info("Deblending %d sources in %d exposure bands", len(catalog), len(mExposure))
876 nextLogTime = time.time() + self.config.loggingInterval
878 # Create a set of wavelet coefficients if using wavelet initialization
879 if self.config.version == "lite" and self.config.morphImage == "wavelet":
880 images = mExposure.image.array
881 variance = mExposure.variance.array
882 wavelets = get_detect_wavelets(images, variance, scales=self.config.waveletScales)
883 else:
884 wavelets = None
886 # Add the NOT_DEBLENDED mask to the mask plane in each band
887 if self.config.notDeblendedMask:
888 for mask in mExposure.mask:
889 mask.addMaskPlane(self.config.notDeblendedMask)
891 nParents = len(catalog)
892 nDeblendedParents = 0
893 skippedParents = []
894 multibandColumns = {
895 "heavies": [],
896 "fluxes": [],
897 "centerFluxes": [],
898 }
899 weightedColumns = {
900 "heavies": [],
901 "fluxes": [],
902 "centerFluxes": [],
903 }
904 for parentIndex in range(nParents):
905 parent = catalog[parentIndex]
906 foot = parent.getFootprint()
907 bbox = foot.getBBox()
908 peaks = foot.getPeaks()
910 # Since we use the first peak for the parent object, we should
911 # propagate its flags to the parent source.
912 parent.assign(peaks[0], self.peakSchemaMapper)
914 # Skip isolated sources unless processSingles is turned on.
915 # Note: this does not flag isolated sources as skipped or
916 # set the NOT_DEBLENDED mask in the exposure,
917 # since these aren't really a skipped blends.
918 # We also skip pseudo sources, like sky objects, which
919 # are intended to be skipped
920 if ((len(peaks) < 2 and not self.config.processSingles)
921 or isPseudoSource(parent, self.config.pseudoColumns)):
922 self._updateParentRecord(
923 parent=parent,
924 nPeaks=len(peaks),
925 nChild=0,
926 runtime=np.nan,
927 iterations=0,
928 logL=np.nan,
929 spectrumInit=False,
930 converged=False,
931 )
932 continue
934 # Block of conditions for skipping a parent with multiple children
935 skipKey = None
936 if self._isLargeFootprint(foot):
937 # The footprint is above the maximum footprint size limit
938 skipKey = self.tooBigKey
939 skipMessage = f"Parent {parent.getId()}: skipping large footprint"
940 elif self._isMasked(foot, mExposure):
941 # The footprint exceeds the maximum number of masked pixels
942 skipKey = self.maskedKey
943 skipMessage = f"Parent {parent.getId()}: skipping masked footprint"
944 elif self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks:
945 # Unlike meas_deblender, in scarlet we skip the entire blend
946 # if the number of peaks exceeds max peaks, since neglecting
947 # to model any peaks often results in catastrophic failure
948 # of scarlet to generate models for the brighter sources.
949 skipKey = self.tooManyPeaksKey
950 skipMessage = f"Parent {parent.getId()}: Too many peaks, skipping blend"
951 if skipKey is not None:
952 self._skipParent(
953 parent=parent,
954 skipKey=skipKey,
955 logMessage=skipMessage,
956 )
957 skippedParents.append(parentIndex)
958 continue
960 nDeblendedParents += 1
961 self.log.trace("Parent %d: deblending %d peaks", parent.getId(), len(peaks))
962 # Run the deblender
963 blendError = None
964 try:
965 t0 = time.monotonic()
966 # Build the parameter lists with the same ordering
967 if self.config.version == "scarlet":
968 blend, skipped, spectrumInit = deblend(mExposure, foot, self.config)
969 elif self.config.version == "lite":
970 blend, skipped, spectrumInit = deblend_lite(mExposure, foot, self.config, wavelets)
971 tf = time.monotonic()
972 runtime = (tf-t0)*1000
973 converged = _checkBlendConvergence(blend, self.config.relativeError)
975 scarletSources = [src for src in blend.sources]
976 nChild = len(scarletSources)
977 # Catch all errors and filter out the ones that we know about
978 except Exception as e:
979 blendError = type(e).__name__
980 if isinstance(e, ScarletGradientError):
981 parent.set(self.iterKey, e.iterations)
982 elif not isinstance(e, IncompleteDataError):
983 blendError = "UnknownError"
984 if self.config.catchFailures:
985 # Make it easy to find UnknownErrors in the log file
986 self.log.warn("UnknownError")
987 import traceback
988 traceback.print_exc()
989 else:
990 raise
992 self._skipParent(
993 parent=parent,
994 skipKey=self.deblendFailedKey,
995 logMessage=f"Unable to deblend source {parent.getId}: {blendError}",
996 )
997 parent.set(self.deblendErrorKey, blendError)
998 skippedParents.append(parentIndex)
999 continue
1001 # Update the parent record with the deblending results
1002 if self.config.version == "scarlet":
1003 logL = -blend.loss[-1] + blend.observations[0].log_norm
1004 elif self.config.version == "lite":
1005 logL = blend.loss[-1]
1006 self._updateParentRecord(
1007 parent=parent,
1008 nPeaks=len(peaks),
1009 nChild=nChild,
1010 runtime=runtime,
1011 iterations=len(blend.loss),
1012 logL=logL,
1013 spectrumInit=spectrumInit,
1014 converged=converged,
1015 )
1017 # Add each deblended source to the catalog
1018 for k, scarletSource in enumerate(scarletSources):
1019 # Skip any sources with no flux or that scarlet skipped because
1020 # it could not initialize
1021 if k in skipped or (self.config.version == "lite" and scarletSource.is_null):
1022 # No need to propagate anything
1023 continue
1024 parent.set(self.deblendSkippedKey, False)
1025 if self.config.version == "lite":
1026 mHeavy = liteModelToHeavy(scarletSource, mExposure, blend, xy0=bbox.getMin())
1027 weightedHeavy = liteModelToHeavy(
1028 scarletSource, mExposure, blend, xy0=bbox.getMin(), useFlux=True)
1029 weightedColumns["heavies"].append(weightedHeavy)
1030 flux = scarletSource.get_model(use_flux=True).sum(axis=(1, 2))
1031 weightedColumns["fluxes"].append({
1032 filters[fidx]: _flux
1033 for fidx, _flux in enumerate(flux)
1034 })
1035 centerFlux = self._getCenterFlux(weightedHeavy, scarletSource, xy0=bbox.getMin())
1036 weightedColumns["centerFluxes"].append(centerFlux)
1037 else:
1038 mHeavy = modelToHeavy(scarletSource, mExposure, blend, xy0=bbox.getMin())
1039 multibandColumns["heavies"].append(mHeavy)
1040 flux = scarlet.measure.flux(scarletSource)
1041 multibandColumns["fluxes"].append({
1042 filters[fidx]: _flux
1043 for fidx, _flux in enumerate(flux)
1044 })
1045 centerFlux = self._getCenterFlux(mHeavy, scarletSource, xy0=bbox.getMin())
1046 multibandColumns["centerFluxes"].append(centerFlux)
1048 # Add all fields except the HeavyFootprint to the
1049 # source record
1050 self._addChild(
1051 parent=parent,
1052 mHeavy=mHeavy,
1053 catalog=catalog,
1054 scarletSource=scarletSource,
1055 )
1057 # Log a message if it has been a while since the last log.
1058 if (currentTime := time.time()) > nextLogTime:
1059 nextLogTime = currentTime + self.config.loggingInterval
1060 self.log.verbose("Deblended %d parent sources out of %d", parentIndex + 1, nParents)
1062 # Clear the cached values in scarlet to clear out memory
1063 scarlet.cache.Cache._cache = {}
1065 # Make sure that the number of new sources matches the number of
1066 # entries in each of the band dependent columns.
1067 # This should never trigger and is just a sanity check.
1068 nChildren = len(catalog) - nParents
1069 if np.any([len(meas) != nChildren for meas in multibandColumns.values()]):
1070 msg = f"Added {len(catalog)-nParents} new sources, but have "
1071 msg += ", ".join([
1072 f"{len(value)} {key}"
1073 for key, value in multibandColumns.items()
1074 ])
1075 raise RuntimeError(msg)
1076 # Make a copy of the catlog in each band and update the footprints
1077 catalogs = {}
1078 for f in filters:
1079 _catalog = afwTable.SourceCatalog(catalog.table.clone())
1080 _catalog.extend(catalog, deep=True)
1082 # Update the footprints and columns that are different
1083 # for each filter
1084 for sourceIndex, source in enumerate(_catalog[nParents:]):
1085 source.setFootprint(multibandColumns["heavies"][sourceIndex][f])
1086 source.set(self.scarletFluxKey, multibandColumns["fluxes"][sourceIndex][f])
1087 source.set(self.modelCenterFlux, multibandColumns["centerFluxes"][sourceIndex][f])
1088 catalogs[f] = _catalog
1090 weightedCatalogs = {}
1091 if self.config.version == "lite":
1092 # Also create a catalog by reweighting the flux
1093 weightedCatalogs = {}
1094 for f in filters:
1095 _catalog = afwTable.SourceCatalog(catalog.table.clone())
1096 _catalog.extend(catalog, deep=True)
1098 # Update the footprints and columns that are different
1099 # for each filter
1100 for sourceIndex, source in enumerate(_catalog[nParents:]):
1101 source.setFootprint(weightedColumns["heavies"][sourceIndex][f])
1102 source.set(self.scarletFluxKey, weightedColumns["fluxes"][sourceIndex][f])
1103 source.set(self.modelCenterFlux, weightedColumns["centerFluxes"][sourceIndex][f])
1104 weightedCatalogs[f] = _catalog
1106 # Update the mExposure mask with the footprint of skipped parents
1107 if self.config.notDeblendedMask:
1108 for mask in mExposure.mask:
1109 for parentIndex in skippedParents:
1110 fp = _catalog[parentIndex].getFootprint()
1111 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask))
1113 self.log.info("Deblender results: of %d parent sources, %d were deblended, "
1114 "creating %d children, for a total of %d sources",
1115 nParents, nDeblendedParents, nChildren, len(catalog))
1116 return catalogs, weightedCatalogs
1118 def _isLargeFootprint(self, footprint):
1119 """Returns whether a Footprint is large
1121 'Large' is defined by thresholds on the area, size and axis ratio.
1122 These may be disabled independently by configuring them to be
1123 non-positive.
1125 This is principally intended to get rid of satellite streaks, which the
1126 deblender or other downstream processing can have trouble dealing with
1127 (e.g., multiple large HeavyFootprints can chew up memory).
1128 """
1129 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea:
1130 return True
1131 if self.config.maxFootprintSize > 0:
1132 bbox = footprint.getBBox()
1133 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize:
1134 return True
1135 if self.config.minFootprintAxisRatio > 0:
1136 axes = afwEll.Axes(footprint.getShape())
1137 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA():
1138 return True
1139 return False
1141 def _isMasked(self, footprint, mExposure):
1142 """Returns whether the footprint violates the mask limits"""
1143 bbox = footprint.getBBox()
1144 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0)
1145 size = float(footprint.getArea())
1146 for maskName, limit in self.config.maskLimits.items():
1147 maskVal = mExposure.mask.getPlaneBitMask(maskName)
1148 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin())
1149 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels
1150 if (size - unmaskedSpan.getArea())/size > limit:
1151 return True
1152 return False
1154 def _skipParent(self, parent, skipKey, logMessage):
1155 """Update a parent record that is not being deblended.
1157 This is a fairly trivial function but is implemented to ensure
1158 that a skipped parent updates the appropriate columns
1159 consistently, and always has a flag to mark the reason that
1160 it is being skipped.
1162 Parameters
1163 ----------
1164 parent : `lsst.afw.table.source.source.SourceRecord`
1165 The parent record to flag as skipped.
1166 skipKey : `bool`
1167 The name of the flag to mark the reason for skipping.
1168 logMessage : `str`
1169 The message to display in a log.trace when a source
1170 is skipped.
1171 """
1172 if logMessage is not None:
1173 self.log.trace(logMessage)
1174 self._updateParentRecord(
1175 parent=parent,
1176 nPeaks=len(parent.getFootprint().peaks),
1177 nChild=0,
1178 runtime=np.nan,
1179 iterations=0,
1180 logL=np.nan,
1181 spectrumInit=False,
1182 converged=False,
1183 )
1185 # Mark the source as skipped by the deblender and
1186 # flag the reason why.
1187 parent.set(self.deblendSkippedKey, True)
1188 parent.set(skipKey, True)
1190 def _updateParentRecord(self, parent, nPeaks, nChild,
1191 runtime, iterations, logL, spectrumInit, converged):
1192 """Update a parent record in all of the single band catalogs.
1194 Ensure that all locations that update a parent record,
1195 whether it is skipped or updated after deblending,
1196 update all of the appropriate columns.
1198 Parameters
1199 ----------
1200 parent : `lsst.afw.table.source.source.SourceRecord`
1201 The parent record to update.
1202 nPeaks : `int`
1203 Number of peaks in the parent footprint.
1204 nChild : `int`
1205 Number of children deblended from the parent.
1206 This may differ from `nPeaks` if some of the peaks
1207 were culled and have no deblended model.
1208 runtime : `float`
1209 Total runtime for deblending.
1210 iterations : `int`
1211 Total number of iterations in scarlet before convergence.
1212 logL : `float`
1213 Final log likelihood of the blend.
1214 spectrumInit : `bool`
1215 True when scarlet used `set_spectra` to initialize all
1216 sources with better initial intensities.
1217 converged : `bool`
1218 True when the optimizer reached convergence before
1219 reaching the maximum number of iterations.
1220 """
1221 parent.set(self.nPeaksKey, nPeaks)
1222 parent.set(self.nChildKey, nChild)
1223 parent.set(self.runtimeKey, runtime)
1224 parent.set(self.iterKey, iterations)
1225 parent.set(self.scarletLogLKey, logL)
1226 parent.set(self.scarletSpectrumInitKey, spectrumInit)
1227 parent.set(self.blendConvergenceFailedFlagKey, converged)
1229 def _addChild(self, parent, mHeavy, catalog, scarletSource):
1230 """Add a child to a catalog.
1232 This creates a new child in the source catalog,
1233 assigning it a parent id, and adding all columns
1234 that are independent across all filter bands.
1236 Parameters
1237 ----------
1238 parent : `lsst.afw.table.source.source.SourceRecord`
1239 The parent of the new child record.
1240 mHeavy : `lsst.detection.MultibandFootprint`
1241 The multi-band footprint containing the model and
1242 peak catalog for the new child record.
1243 catalog : `lsst.afw.table.source.source.SourceCatalog`
1244 The merged `SourceCatalog` that contains parent footprints
1245 to (potentially) deblend.
1246 scarletSource : `scarlet.Component`
1247 The scarlet model for the new source record.
1248 """
1249 src = catalog.addNew()
1250 for key in self.toCopyFromParent:
1251 src.set(key, parent.get(key))
1252 # The peak catalog is the same for all bands,
1253 # so we just use the first peak catalog
1254 peaks = mHeavy[mHeavy.filters[0]].peaks
1255 src.assign(peaks[0], self.peakSchemaMapper)
1256 src.setParent(parent.getId())
1257 # Currently all children only have a single peak,
1258 # but it's possible in the future that there will be hierarchical
1259 # deblending, so we use the footprint to set the number of peaks
1260 # for each child.
1261 src.set(self.nPeaksKey, len(peaks))
1262 # Set the psf key based on whether or not the source was
1263 # deblended using the PointSource model.
1264 # This key is not that useful anymore since we now keep track of
1265 # `modelType`, but we continue to propagate it in case code downstream
1266 # is expecting it.
1267 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource")
1268 src.set(self.modelTypeKey, scarletSource.__class__.__name__)
1269 # We set the runtime to zero so that summing up the
1270 # runtime column will give the total time spent
1271 # running the deblender for the catalog.
1272 src.set(self.runtimeKey, 0)
1274 # Set the position of the peak from the parent footprint
1275 # This will make it easier to match the same source across
1276 # deblenders and across observations, where the peak
1277 # position is unlikely to change unless enough time passes
1278 # for a source to move on the sky.
1279 peak = scarletSource.detectedPeak
1280 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"]))
1281 src.set(self.peakIdKey, peak["id"])
1283 # Propagate columns from the parent to the child
1284 for parentColumn, childColumn in self.config.columnInheritance.items():
1285 src.set(childColumn, parent.get(parentColumn))
1287 def _getCenterFlux(self, mHeavy, scarletSource, xy0):
1288 """Get the flux at the center of a HeavyFootprint
1290 Parameters
1291 ----------
1292 mHeavy : `lsst.detection.MultibandFootprint`
1293 The multi-band footprint containing the model for the source.
1294 scarletSource : `scarlet.Component`
1295 The scarlet model for the heavy footprint
1296 """
1297 # Store the flux at the center of the model and the total
1298 # scarlet flux measurement.
1299 mImage = mHeavy.getImage(fill=0.0).image
1301 # Set the flux at the center of the model (for SNR)
1302 try:
1303 cy, cx = scarletSource.center
1304 cy += xy0.y
1305 cx += xy0.x
1306 return mImage[:, cx, cy]
1307 except AttributeError:
1308 msg = "Did not recognize coordinates for source type of `{0}`, "
1309 msg += "could not write coordinates or center flux. "
1310 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information."
1311 logger.warning(msg.format(type(scarletSource)))
1312 return {f: np.nan for f in mImage.filters}